Test Report: Docker_Linux_crio_arm64 19651

                    
                      f000a69778791892f7d89fef6358d7150d12a198:2024-09-16:36236
                    
                

Test fail (35/229)

Order failed test Duration
31 TestAddons/serial/GCPAuth/Namespaces 0
33 TestAddons/parallel/Registry 14.54
34 TestAddons/parallel/Ingress 2.54
36 TestAddons/parallel/MetricsServer 354.78
39 TestAddons/parallel/CSI 362.56
42 TestAddons/parallel/LocalPath 0
46 TestCertOptions 41.78
68 TestFunctional/serial/KubeContext 2.89
69 TestFunctional/serial/KubectlGetPods 2.86
82 TestFunctional/serial/ComponentHealth 2.49
85 TestFunctional/serial/InvalidService 0
88 TestFunctional/parallel/DashboardCmd 6.95
95 TestFunctional/parallel/ServiceCmdConnect 2.46
97 TestFunctional/parallel/PersistentVolumeClaim 101.9
107 TestFunctional/parallel/NodeLabels 4.24
116 TestFunctional/parallel/TunnelCmd/serial/WaitService/Setup 0
117 TestFunctional/parallel/TunnelCmd/serial/AccessDirect 92.32
122 TestFunctional/parallel/ServiceCmd/DeployApp 0
123 TestFunctional/parallel/ServiceCmd/List 0.34
124 TestFunctional/parallel/ServiceCmd/JSONOutput 0.33
125 TestFunctional/parallel/ServiceCmd/HTTPS 0.33
126 TestFunctional/parallel/ServiceCmd/Format 0.33
127 TestFunctional/parallel/ServiceCmd/URL 0.33
131 TestFunctional/parallel/MountCmd/any-port 3.36
162 TestMultiControlPlane/serial/NodeLabels 3.15
167 TestMultiControlPlane/serial/RestartSecondaryNode 29.54
170 TestMultiControlPlane/serial/DeleteSecondaryNode 17.31
173 TestMultiControlPlane/serial/RestartCluster 90.15
229 TestMultiNode/serial/MultiNodeLabels 2.97
233 TestMultiNode/serial/StartAfterStop 13.1
235 TestMultiNode/serial/DeleteNode 9.69
237 TestMultiNode/serial/RestartMultiNode 64.13
243 TestPreload 23.63
251 TestKubernetesUpgrade 358.72
361 TestNetworkPlugins/group/custom-flannel/NetCatPod 7200.083
x
+
TestAddons/serial/GCPAuth/Namespaces (0s)

                                                
                                                
=== RUN   TestAddons/serial/GCPAuth/Namespaces
addons_test.go:656: (dbg) Run:  kubectl --context addons-936355 create ns new-namespace
addons_test.go:656: (dbg) Non-zero exit: kubectl --context addons-936355 create ns new-namespace: fork/exec /usr/local/bin/kubectl: exec format error (507.121µs)
addons_test.go:658: kubectl --context addons-936355 create ns new-namespace failed: fork/exec /usr/local/bin/kubectl: exec format error
--- FAIL: TestAddons/serial/GCPAuth/Namespaces (0.00s)

                                                
                                    
x
+
TestAddons/parallel/Registry (14.54s)

                                                
                                                
=== RUN   TestAddons/parallel/Registry
=== PAUSE TestAddons/parallel/Registry

                                                
                                                

                                                
                                                
=== CONT  TestAddons/parallel/Registry
addons_test.go:332: registry stabilized in 29.960164ms
addons_test.go:334: (dbg) TestAddons/parallel/Registry: waiting 6m0s for pods matching "actual-registry=true" in namespace "kube-system" ...
helpers_test.go:344: "registry-66c9cd494c-xh5d4" [6f439a0d-4e84-4ea2-97ef-2666b73327b7] Running
addons_test.go:334: (dbg) TestAddons/parallel/Registry: actual-registry=true healthy within 6.003637668s
addons_test.go:337: (dbg) TestAddons/parallel/Registry: waiting 10m0s for pods matching "registry-proxy=true" in namespace "kube-system" ...
helpers_test.go:344: "registry-proxy-xdksj" [f3007abe-d474-44b8-91de-56f1d2dc83a9] Running
addons_test.go:337: (dbg) TestAddons/parallel/Registry: registry-proxy=true healthy within 5.003948041s
addons_test.go:342: (dbg) Run:  kubectl --context addons-936355 delete po -l run=registry-test --now
addons_test.go:342: (dbg) Non-zero exit: kubectl --context addons-936355 delete po -l run=registry-test --now: fork/exec /usr/local/bin/kubectl: exec format error (568.962µs)
addons_test.go:344: pre-cleanup kubectl --context addons-936355 delete po -l run=registry-test --now failed: fork/exec /usr/local/bin/kubectl: exec format error (not a problem)
addons_test.go:347: (dbg) Run:  kubectl --context addons-936355 run --rm registry-test --restart=Never --image=gcr.io/k8s-minikube/busybox -it -- sh -c "wget --spider -S http://registry.kube-system.svc.cluster.local"
addons_test.go:347: (dbg) Non-zero exit: kubectl --context addons-936355 run --rm registry-test --restart=Never --image=gcr.io/k8s-minikube/busybox -it -- sh -c "wget --spider -S http://registry.kube-system.svc.cluster.local": fork/exec /usr/local/bin/kubectl: exec format error (224.863µs)
addons_test.go:349: failed to hit registry.kube-system.svc.cluster.local. args "kubectl --context addons-936355 run --rm registry-test --restart=Never --image=gcr.io/k8s-minikube/busybox -it -- sh -c \"wget --spider -S http://registry.kube-system.svc.cluster.local\"" failed: fork/exec /usr/local/bin/kubectl: exec format error
addons_test.go:353: expected curl response be "HTTP/1.1 200", but got **
addons_test.go:361: (dbg) Run:  out/minikube-linux-arm64 -p addons-936355 ip
2024/09/16 10:39:04 [DEBUG] GET http://192.168.49.2:5000
addons_test.go:390: (dbg) Run:  out/minikube-linux-arm64 -p addons-936355 addons disable registry --alsologtostderr -v=1
helpers_test.go:222: -----------------------post-mortem--------------------------------
helpers_test.go:230: ======>  post-mortem[TestAddons/parallel/Registry]: docker inspect <======
helpers_test.go:231: (dbg) Run:  docker inspect addons-936355
helpers_test.go:235: (dbg) docker inspect addons-936355:

                                                
                                                
-- stdout --
	[
	    {
	        "Id": "990f1d352091220982d3e72266c05a58085b58b6631f82700a66decf59d84c22",
	        "Created": "2024-09-16T10:35:26.829229764Z",
	        "Path": "/usr/local/bin/entrypoint",
	        "Args": [
	            "/sbin/init"
	        ],
	        "State": {
	            "Status": "running",
	            "Running": true,
	            "Paused": false,
	            "Restarting": false,
	            "OOMKilled": false,
	            "Dead": false,
	            "Pid": 1385081,
	            "ExitCode": 0,
	            "Error": "",
	            "StartedAt": "2024-09-16T10:35:26.979651686Z",
	            "FinishedAt": "0001-01-01T00:00:00Z"
	        },
	        "Image": "sha256:a1b71fa87733590eb4674b16f6945626ae533f3af37066893e3fd70eb9476268",
	        "ResolvConfPath": "/var/lib/docker/containers/990f1d352091220982d3e72266c05a58085b58b6631f82700a66decf59d84c22/resolv.conf",
	        "HostnamePath": "/var/lib/docker/containers/990f1d352091220982d3e72266c05a58085b58b6631f82700a66decf59d84c22/hostname",
	        "HostsPath": "/var/lib/docker/containers/990f1d352091220982d3e72266c05a58085b58b6631f82700a66decf59d84c22/hosts",
	        "LogPath": "/var/lib/docker/containers/990f1d352091220982d3e72266c05a58085b58b6631f82700a66decf59d84c22/990f1d352091220982d3e72266c05a58085b58b6631f82700a66decf59d84c22-json.log",
	        "Name": "/addons-936355",
	        "RestartCount": 0,
	        "Driver": "overlay2",
	        "Platform": "linux",
	        "MountLabel": "",
	        "ProcessLabel": "",
	        "AppArmorProfile": "unconfined",
	        "ExecIDs": null,
	        "HostConfig": {
	            "Binds": [
	                "/lib/modules:/lib/modules:ro",
	                "addons-936355:/var"
	            ],
	            "ContainerIDFile": "",
	            "LogConfig": {
	                "Type": "json-file",
	                "Config": {}
	            },
	            "NetworkMode": "addons-936355",
	            "PortBindings": {
	                "22/tcp": [
	                    {
	                        "HostIp": "127.0.0.1",
	                        "HostPort": ""
	                    }
	                ],
	                "2376/tcp": [
	                    {
	                        "HostIp": "127.0.0.1",
	                        "HostPort": ""
	                    }
	                ],
	                "32443/tcp": [
	                    {
	                        "HostIp": "127.0.0.1",
	                        "HostPort": ""
	                    }
	                ],
	                "5000/tcp": [
	                    {
	                        "HostIp": "127.0.0.1",
	                        "HostPort": ""
	                    }
	                ],
	                "8443/tcp": [
	                    {
	                        "HostIp": "127.0.0.1",
	                        "HostPort": ""
	                    }
	                ]
	            },
	            "RestartPolicy": {
	                "Name": "no",
	                "MaximumRetryCount": 0
	            },
	            "AutoRemove": false,
	            "VolumeDriver": "",
	            "VolumesFrom": null,
	            "ConsoleSize": [
	                0,
	                0
	            ],
	            "CapAdd": null,
	            "CapDrop": null,
	            "CgroupnsMode": "host",
	            "Dns": [],
	            "DnsOptions": [],
	            "DnsSearch": [],
	            "ExtraHosts": null,
	            "GroupAdd": null,
	            "IpcMode": "private",
	            "Cgroup": "",
	            "Links": null,
	            "OomScoreAdj": 0,
	            "PidMode": "",
	            "Privileged": true,
	            "PublishAllPorts": false,
	            "ReadonlyRootfs": false,
	            "SecurityOpt": [
	                "seccomp=unconfined",
	                "apparmor=unconfined",
	                "label=disable"
	            ],
	            "Tmpfs": {
	                "/run": "",
	                "/tmp": ""
	            },
	            "UTSMode": "",
	            "UsernsMode": "",
	            "ShmSize": 67108864,
	            "Runtime": "runc",
	            "Isolation": "",
	            "CpuShares": 0,
	            "Memory": 4194304000,
	            "NanoCpus": 2000000000,
	            "CgroupParent": "",
	            "BlkioWeight": 0,
	            "BlkioWeightDevice": [],
	            "BlkioDeviceReadBps": [],
	            "BlkioDeviceWriteBps": [],
	            "BlkioDeviceReadIOps": [],
	            "BlkioDeviceWriteIOps": [],
	            "CpuPeriod": 0,
	            "CpuQuota": 0,
	            "CpuRealtimePeriod": 0,
	            "CpuRealtimeRuntime": 0,
	            "CpusetCpus": "",
	            "CpusetMems": "",
	            "Devices": [],
	            "DeviceCgroupRules": null,
	            "DeviceRequests": null,
	            "MemoryReservation": 0,
	            "MemorySwap": 8388608000,
	            "MemorySwappiness": null,
	            "OomKillDisable": false,
	            "PidsLimit": null,
	            "Ulimits": [],
	            "CpuCount": 0,
	            "CpuPercent": 0,
	            "IOMaximumIOps": 0,
	            "IOMaximumBandwidth": 0,
	            "MaskedPaths": null,
	            "ReadonlyPaths": null
	        },
	        "GraphDriver": {
	            "Data": {
	                "LowerDir": "/var/lib/docker/overlay2/c77d59ded00fa56b49dc4ec025d7a90bf6cdbcc44e193db0ee5c49a540e58e7c-init/diff:/var/lib/docker/overlay2/1502e35c27c097cfc834a7c6caeee5bb9f58b41375577f491b73f55bc131cbae/diff",
	                "MergedDir": "/var/lib/docker/overlay2/c77d59ded00fa56b49dc4ec025d7a90bf6cdbcc44e193db0ee5c49a540e58e7c/merged",
	                "UpperDir": "/var/lib/docker/overlay2/c77d59ded00fa56b49dc4ec025d7a90bf6cdbcc44e193db0ee5c49a540e58e7c/diff",
	                "WorkDir": "/var/lib/docker/overlay2/c77d59ded00fa56b49dc4ec025d7a90bf6cdbcc44e193db0ee5c49a540e58e7c/work"
	            },
	            "Name": "overlay2"
	        },
	        "Mounts": [
	            {
	                "Type": "bind",
	                "Source": "/lib/modules",
	                "Destination": "/lib/modules",
	                "Mode": "ro",
	                "RW": false,
	                "Propagation": "rprivate"
	            },
	            {
	                "Type": "volume",
	                "Name": "addons-936355",
	                "Source": "/var/lib/docker/volumes/addons-936355/_data",
	                "Destination": "/var",
	                "Driver": "local",
	                "Mode": "z",
	                "RW": true,
	                "Propagation": ""
	            }
	        ],
	        "Config": {
	            "Hostname": "addons-936355",
	            "Domainname": "",
	            "User": "",
	            "AttachStdin": false,
	            "AttachStdout": false,
	            "AttachStderr": false,
	            "ExposedPorts": {
	                "22/tcp": {},
	                "2376/tcp": {},
	                "32443/tcp": {},
	                "5000/tcp": {},
	                "8443/tcp": {}
	            },
	            "Tty": true,
	            "OpenStdin": false,
	            "StdinOnce": false,
	            "Env": [
	                "container=docker",
	                "PATH=/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin"
	            ],
	            "Cmd": null,
	            "Image": "gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0",
	            "Volumes": null,
	            "WorkingDir": "/",
	            "Entrypoint": [
	                "/usr/local/bin/entrypoint",
	                "/sbin/init"
	            ],
	            "OnBuild": null,
	            "Labels": {
	                "created_by.minikube.sigs.k8s.io": "true",
	                "mode.minikube.sigs.k8s.io": "addons-936355",
	                "name.minikube.sigs.k8s.io": "addons-936355",
	                "role.minikube.sigs.k8s.io": ""
	            },
	            "StopSignal": "SIGRTMIN+3"
	        },
	        "NetworkSettings": {
	            "Bridge": "",
	            "SandboxID": "c25ad70fe630d4f698b2829da4e56bff2645b3ff549ca5302800a382e6bdd028",
	            "SandboxKey": "/var/run/docker/netns/c25ad70fe630",
	            "Ports": {
	                "22/tcp": [
	                    {
	                        "HostIp": "127.0.0.1",
	                        "HostPort": "34603"
	                    }
	                ],
	                "2376/tcp": [
	                    {
	                        "HostIp": "127.0.0.1",
	                        "HostPort": "34604"
	                    }
	                ],
	                "32443/tcp": [
	                    {
	                        "HostIp": "127.0.0.1",
	                        "HostPort": "34607"
	                    }
	                ],
	                "5000/tcp": [
	                    {
	                        "HostIp": "127.0.0.1",
	                        "HostPort": "34605"
	                    }
	                ],
	                "8443/tcp": [
	                    {
	                        "HostIp": "127.0.0.1",
	                        "HostPort": "34606"
	                    }
	                ]
	            },
	            "HairpinMode": false,
	            "LinkLocalIPv6Address": "",
	            "LinkLocalIPv6PrefixLen": 0,
	            "SecondaryIPAddresses": null,
	            "SecondaryIPv6Addresses": null,
	            "EndpointID": "",
	            "Gateway": "",
	            "GlobalIPv6Address": "",
	            "GlobalIPv6PrefixLen": 0,
	            "IPAddress": "",
	            "IPPrefixLen": 0,
	            "IPv6Gateway": "",
	            "MacAddress": "",
	            "Networks": {
	                "addons-936355": {
	                    "IPAMConfig": {
	                        "IPv4Address": "192.168.49.2"
	                    },
	                    "Links": null,
	                    "Aliases": null,
	                    "MacAddress": "02:42:c0:a8:31:02",
	                    "DriverOpts": null,
	                    "NetworkID": "5d73edaa3366fd0ba0b4bacad454985b0bd272fda9938fc527483e0046d7c748",
	                    "EndpointID": "cf4cd538acb5e979612a79c60d294fba1f05c9fef1a1bec978977fcb945819c4",
	                    "Gateway": "192.168.49.1",
	                    "IPAddress": "192.168.49.2",
	                    "IPPrefixLen": 24,
	                    "IPv6Gateway": "",
	                    "GlobalIPv6Address": "",
	                    "GlobalIPv6PrefixLen": 0,
	                    "DNSNames": [
	                        "addons-936355",
	                        "990f1d352091"
	                    ]
	                }
	            }
	        }
	    }
	]

                                                
                                                
-- /stdout --
helpers_test.go:239: (dbg) Run:  out/minikube-linux-arm64 status --format={{.Host}} -p addons-936355 -n addons-936355
helpers_test.go:244: <<< TestAddons/parallel/Registry FAILED: start of post-mortem logs <<<
helpers_test.go:245: ======>  post-mortem[TestAddons/parallel/Registry]: minikube logs <======
helpers_test.go:247: (dbg) Run:  out/minikube-linux-arm64 -p addons-936355 logs -n 25
helpers_test.go:247: (dbg) Done: out/minikube-linux-arm64 -p addons-936355 logs -n 25: (1.63709775s)
helpers_test.go:252: TestAddons/parallel/Registry logs: 
-- stdout --
	
	==> Audit <==
	|---------|--------------------------------------|------------------------|---------|---------|---------------------|---------------------|
	| Command |                 Args                 |        Profile         |  User   | Version |     Start Time      |      End Time       |
	|---------|--------------------------------------|------------------------|---------|---------|---------------------|---------------------|
	| start   | -o=json --download-only              | download-only-084128   | jenkins | v1.34.0 | 16 Sep 24 10:34 UTC |                     |
	|         | -p download-only-084128              |                        |         |         |                     |                     |
	|         | --force --alsologtostderr            |                        |         |         |                     |                     |
	|         | --kubernetes-version=v1.20.0         |                        |         |         |                     |                     |
	|         | --container-runtime=crio             |                        |         |         |                     |                     |
	|         | --driver=docker                      |                        |         |         |                     |                     |
	|         | --container-runtime=crio             |                        |         |         |                     |                     |
	| delete  | --all                                | minikube               | jenkins | v1.34.0 | 16 Sep 24 10:34 UTC | 16 Sep 24 10:34 UTC |
	| delete  | -p download-only-084128              | download-only-084128   | jenkins | v1.34.0 | 16 Sep 24 10:34 UTC | 16 Sep 24 10:34 UTC |
	| start   | -o=json --download-only              | download-only-605096   | jenkins | v1.34.0 | 16 Sep 24 10:34 UTC |                     |
	|         | -p download-only-605096              |                        |         |         |                     |                     |
	|         | --force --alsologtostderr            |                        |         |         |                     |                     |
	|         | --kubernetes-version=v1.31.1         |                        |         |         |                     |                     |
	|         | --container-runtime=crio             |                        |         |         |                     |                     |
	|         | --driver=docker                      |                        |         |         |                     |                     |
	|         | --container-runtime=crio             |                        |         |         |                     |                     |
	| delete  | --all                                | minikube               | jenkins | v1.34.0 | 16 Sep 24 10:34 UTC | 16 Sep 24 10:34 UTC |
	| delete  | -p download-only-605096              | download-only-605096   | jenkins | v1.34.0 | 16 Sep 24 10:34 UTC | 16 Sep 24 10:34 UTC |
	| delete  | -p download-only-084128              | download-only-084128   | jenkins | v1.34.0 | 16 Sep 24 10:35 UTC | 16 Sep 24 10:35 UTC |
	| delete  | -p download-only-605096              | download-only-605096   | jenkins | v1.34.0 | 16 Sep 24 10:35 UTC | 16 Sep 24 10:35 UTC |
	| start   | --download-only -p                   | download-docker-880503 | jenkins | v1.34.0 | 16 Sep 24 10:35 UTC |                     |
	|         | download-docker-880503               |                        |         |         |                     |                     |
	|         | --alsologtostderr                    |                        |         |         |                     |                     |
	|         | --driver=docker                      |                        |         |         |                     |                     |
	|         | --container-runtime=crio             |                        |         |         |                     |                     |
	| delete  | -p download-docker-880503            | download-docker-880503 | jenkins | v1.34.0 | 16 Sep 24 10:35 UTC | 16 Sep 24 10:35 UTC |
	| start   | --download-only -p                   | binary-mirror-652159   | jenkins | v1.34.0 | 16 Sep 24 10:35 UTC |                     |
	|         | binary-mirror-652159                 |                        |         |         |                     |                     |
	|         | --alsologtostderr                    |                        |         |         |                     |                     |
	|         | --binary-mirror                      |                        |         |         |                     |                     |
	|         | http://127.0.0.1:40363               |                        |         |         |                     |                     |
	|         | --driver=docker                      |                        |         |         |                     |                     |
	|         | --container-runtime=crio             |                        |         |         |                     |                     |
	| delete  | -p binary-mirror-652159              | binary-mirror-652159   | jenkins | v1.34.0 | 16 Sep 24 10:35 UTC | 16 Sep 24 10:35 UTC |
	| addons  | enable dashboard -p                  | addons-936355          | jenkins | v1.34.0 | 16 Sep 24 10:35 UTC |                     |
	|         | addons-936355                        |                        |         |         |                     |                     |
	| addons  | disable dashboard -p                 | addons-936355          | jenkins | v1.34.0 | 16 Sep 24 10:35 UTC |                     |
	|         | addons-936355                        |                        |         |         |                     |                     |
	| start   | -p addons-936355 --wait=true         | addons-936355          | jenkins | v1.34.0 | 16 Sep 24 10:35 UTC | 16 Sep 24 10:38 UTC |
	|         | --memory=4000 --alsologtostderr      |                        |         |         |                     |                     |
	|         | --addons=registry                    |                        |         |         |                     |                     |
	|         | --addons=metrics-server              |                        |         |         |                     |                     |
	|         | --addons=volumesnapshots             |                        |         |         |                     |                     |
	|         | --addons=csi-hostpath-driver         |                        |         |         |                     |                     |
	|         | --addons=gcp-auth                    |                        |         |         |                     |                     |
	|         | --addons=cloud-spanner               |                        |         |         |                     |                     |
	|         | --addons=inspektor-gadget            |                        |         |         |                     |                     |
	|         | --addons=storage-provisioner-rancher |                        |         |         |                     |                     |
	|         | --addons=nvidia-device-plugin        |                        |         |         |                     |                     |
	|         | --addons=yakd --addons=volcano       |                        |         |         |                     |                     |
	|         | --driver=docker                      |                        |         |         |                     |                     |
	|         | --container-runtime=crio             |                        |         |         |                     |                     |
	|         | --addons=ingress                     |                        |         |         |                     |                     |
	|         | --addons=ingress-dns                 |                        |         |         |                     |                     |
	| ip      | addons-936355 ip                     | addons-936355          | jenkins | v1.34.0 | 16 Sep 24 10:39 UTC | 16 Sep 24 10:39 UTC |
	| addons  | addons-936355 addons disable         | addons-936355          | jenkins | v1.34.0 | 16 Sep 24 10:39 UTC | 16 Sep 24 10:39 UTC |
	|         | registry --alsologtostderr           |                        |         |         |                     |                     |
	|         | -v=1                                 |                        |         |         |                     |                     |
	|---------|--------------------------------------|------------------------|---------|---------|---------------------|---------------------|
	
	
	==> Last Start <==
	Log file created at: 2024/09/16 10:35:01
	Running on machine: ip-172-31-21-244
	Binary: Built with gc go1.23.0 for linux/arm64
	Log line format: [IWEF]mmdd hh:mm:ss.uuuuuu threadid file:line] msg
	I0916 10:35:01.861741 1384589 out.go:345] Setting OutFile to fd 1 ...
	I0916 10:35:01.861923 1384589 out.go:392] TERM=,COLORTERM=, which probably does not support color
	I0916 10:35:01.861959 1384589 out.go:358] Setting ErrFile to fd 2...
	I0916 10:35:01.861972 1384589 out.go:392] TERM=,COLORTERM=, which probably does not support color
	I0916 10:35:01.862230 1384589 root.go:338] Updating PATH: /home/jenkins/minikube-integration/19651-1378450/.minikube/bin
	I0916 10:35:01.862730 1384589 out.go:352] Setting JSON to false
	I0916 10:35:01.863665 1384589 start.go:129] hostinfo: {"hostname":"ip-172-31-21-244","uptime":37047,"bootTime":1726445855,"procs":155,"os":"linux","platform":"ubuntu","platformFamily":"debian","platformVersion":"20.04","kernelVersion":"5.15.0-1069-aws","kernelArch":"aarch64","virtualizationSystem":"","virtualizationRole":"","hostId":"da8ac1fd-6236-412a-a346-95873c98230d"}
	I0916 10:35:01.863739 1384589 start.go:139] virtualization:  
	I0916 10:35:01.866923 1384589 out.go:177] * [addons-936355] minikube v1.34.0 on Ubuntu 20.04 (arm64)
	I0916 10:35:01.870432 1384589 out.go:177]   - MINIKUBE_LOCATION=19651
	I0916 10:35:01.870537 1384589 notify.go:220] Checking for updates...
	I0916 10:35:01.875880 1384589 out.go:177]   - MINIKUBE_SUPPRESS_DOCKER_PERFORMANCE=true
	I0916 10:35:01.878650 1384589 out.go:177]   - KUBECONFIG=/home/jenkins/minikube-integration/19651-1378450/kubeconfig
	I0916 10:35:01.881242 1384589 out.go:177]   - MINIKUBE_HOME=/home/jenkins/minikube-integration/19651-1378450/.minikube
	I0916 10:35:01.883862 1384589 out.go:177]   - MINIKUBE_BIN=out/minikube-linux-arm64
	I0916 10:35:01.886520 1384589 out.go:177]   - MINIKUBE_FORCE_SYSTEMD=
	I0916 10:35:01.889353 1384589 driver.go:394] Setting default libvirt URI to qemu:///system
	I0916 10:35:01.930300 1384589 docker.go:123] docker version: linux-27.2.1:Docker Engine - Community
	I0916 10:35:01.930438 1384589 cli_runner.go:164] Run: docker system info --format "{{json .}}"
	I0916 10:35:01.986400 1384589 info.go:266] docker info: {ID:5FDH:SA5P:5GCT:NLAS:B73P:SGDQ:PBG5:UBVH:UZY3:RXGO:CI7S:WAIH Containers:0 ContainersRunning:0 ContainersPaused:0 ContainersStopped:0 Images:1 Driver:overlay2 DriverStatus:[[Backing Filesystem extfs] [Supports d_type true] [Using metacopy false] [Native Overlay Diff true] [userxattr false]] SystemStatus:<nil> Plugins:{Volume:[local] Network:[bridge host ipvlan macvlan null overlay] Authorization:<nil> Log:[awslogs fluentd gcplogs gelf journald json-file local splunk syslog]} MemoryLimit:true SwapLimit:true KernelMemory:false KernelMemoryTCP:true CPUCfsPeriod:true CPUCfsQuota:true CPUShares:true CPUSet:true PidsLimit:true IPv4Forwarding:true BridgeNfIptables:true BridgeNfIP6Tables:true Debug:false NFd:25 OomKillDisable:true NGoroutines:44 SystemTime:2024-09-16 10:35:01.976217774 +0000 UTC LoggingDriver:json-file CgroupDriver:cgroupfs NEventsListener:0 KernelVersion:5.15.0-1069-aws OperatingSystem:Ubuntu 20.04.6 LTS OSType:linux Architecture:aar
ch64 IndexServerAddress:https://index.docker.io/v1/ RegistryConfig:{AllowNondistributableArtifactsCIDRs:[] AllowNondistributableArtifactsHostnames:[] InsecureRegistryCIDRs:[127.0.0.0/8] IndexConfigs:{DockerIo:{Name:docker.io Mirrors:[] Secure:true Official:true}} Mirrors:[]} NCPU:2 MemTotal:8214839296 GenericResources:<nil> DockerRootDir:/var/lib/docker HTTPProxy: HTTPSProxy: NoProxy: Name:ip-172-31-21-244 Labels:[] ExperimentalBuild:false ServerVersion:27.2.1 ClusterStore: ClusterAdvertise: Runtimes:{Runc:{Path:runc}} DefaultRuntime:runc Swarm:{NodeID: NodeAddr: LocalNodeState:inactive ControlAvailable:false Error: RemoteManagers:<nil>} LiveRestoreEnabled:false Isolation: InitBinary:docker-init ContainerdCommit:{ID:7f7fdf5fed64eb6a7caf99b3e12efcf9d60e311c Expected:7f7fdf5fed64eb6a7caf99b3e12efcf9d60e311c} RuncCommit:{ID:v1.1.14-0-g2c9f560 Expected:v1.1.14-0-g2c9f560} InitCommit:{ID:de40ad0 Expected:de40ad0} SecurityOptions:[name=apparmor name=seccomp,profile=builtin] ProductLicense: Warnings:<nil> ServerErro
rs:[] ClientInfo:{Debug:false Plugins:[map[Name:buildx Path:/usr/libexec/docker/cli-plugins/docker-buildx SchemaVersion:0.1.0 ShortDescription:Docker Buildx Vendor:Docker Inc. Version:v0.16.2] map[Name:compose Path:/usr/libexec/docker/cli-plugins/docker-compose SchemaVersion:0.1.0 ShortDescription:Docker Compose Vendor:Docker Inc. Version:v2.29.2]] Warnings:<nil>}}
	I0916 10:35:01.986524 1384589 docker.go:318] overlay module found
	I0916 10:35:01.989262 1384589 out.go:177] * Using the docker driver based on user configuration
	I0916 10:35:01.991996 1384589 start.go:297] selected driver: docker
	I0916 10:35:01.992025 1384589 start.go:901] validating driver "docker" against <nil>
	I0916 10:35:01.992040 1384589 start.go:912] status for docker: {Installed:true Healthy:true Running:false NeedsImprovement:false Error:<nil> Reason: Fix: Doc: Version:}
	I0916 10:35:01.992727 1384589 cli_runner.go:164] Run: docker system info --format "{{json .}}"
	I0916 10:35:02.058953 1384589 info.go:266] docker info: {ID:5FDH:SA5P:5GCT:NLAS:B73P:SGDQ:PBG5:UBVH:UZY3:RXGO:CI7S:WAIH Containers:0 ContainersRunning:0 ContainersPaused:0 ContainersStopped:0 Images:1 Driver:overlay2 DriverStatus:[[Backing Filesystem extfs] [Supports d_type true] [Using metacopy false] [Native Overlay Diff true] [userxattr false]] SystemStatus:<nil> Plugins:{Volume:[local] Network:[bridge host ipvlan macvlan null overlay] Authorization:<nil> Log:[awslogs fluentd gcplogs gelf journald json-file local splunk syslog]} MemoryLimit:true SwapLimit:true KernelMemory:false KernelMemoryTCP:true CPUCfsPeriod:true CPUCfsQuota:true CPUShares:true CPUSet:true PidsLimit:true IPv4Forwarding:true BridgeNfIptables:true BridgeNfIP6Tables:true Debug:false NFd:25 OomKillDisable:true NGoroutines:44 SystemTime:2024-09-16 10:35:02.049617339 +0000 UTC LoggingDriver:json-file CgroupDriver:cgroupfs NEventsListener:0 KernelVersion:5.15.0-1069-aws OperatingSystem:Ubuntu 20.04.6 LTS OSType:linux Architecture:aar
ch64 IndexServerAddress:https://index.docker.io/v1/ RegistryConfig:{AllowNondistributableArtifactsCIDRs:[] AllowNondistributableArtifactsHostnames:[] InsecureRegistryCIDRs:[127.0.0.0/8] IndexConfigs:{DockerIo:{Name:docker.io Mirrors:[] Secure:true Official:true}} Mirrors:[]} NCPU:2 MemTotal:8214839296 GenericResources:<nil> DockerRootDir:/var/lib/docker HTTPProxy: HTTPSProxy: NoProxy: Name:ip-172-31-21-244 Labels:[] ExperimentalBuild:false ServerVersion:27.2.1 ClusterStore: ClusterAdvertise: Runtimes:{Runc:{Path:runc}} DefaultRuntime:runc Swarm:{NodeID: NodeAddr: LocalNodeState:inactive ControlAvailable:false Error: RemoteManagers:<nil>} LiveRestoreEnabled:false Isolation: InitBinary:docker-init ContainerdCommit:{ID:7f7fdf5fed64eb6a7caf99b3e12efcf9d60e311c Expected:7f7fdf5fed64eb6a7caf99b3e12efcf9d60e311c} RuncCommit:{ID:v1.1.14-0-g2c9f560 Expected:v1.1.14-0-g2c9f560} InitCommit:{ID:de40ad0 Expected:de40ad0} SecurityOptions:[name=apparmor name=seccomp,profile=builtin] ProductLicense: Warnings:<nil> ServerErro
rs:[] ClientInfo:{Debug:false Plugins:[map[Name:buildx Path:/usr/libexec/docker/cli-plugins/docker-buildx SchemaVersion:0.1.0 ShortDescription:Docker Buildx Vendor:Docker Inc. Version:v0.16.2] map[Name:compose Path:/usr/libexec/docker/cli-plugins/docker-compose SchemaVersion:0.1.0 ShortDescription:Docker Compose Vendor:Docker Inc. Version:v2.29.2]] Warnings:<nil>}}
	I0916 10:35:02.059182 1384589 start_flags.go:310] no existing cluster config was found, will generate one from the flags 
	I0916 10:35:02.059420 1384589 start_flags.go:947] Waiting for all components: map[apiserver:true apps_running:true default_sa:true extra:true kubelet:true node_ready:true system_pods:true]
	I0916 10:35:02.062017 1384589 out.go:177] * Using Docker driver with root privileges
	I0916 10:35:02.064628 1384589 cni.go:84] Creating CNI manager for ""
	I0916 10:35:02.064789 1384589 cni.go:143] "docker" driver + "crio" runtime found, recommending kindnet
	I0916 10:35:02.064804 1384589 start_flags.go:319] Found "CNI" CNI - setting NetworkPlugin=cni
	I0916 10:35:02.064885 1384589 start.go:340] cluster config:
	{Name:addons-936355 KeepContext:false EmbedCerts:false MinikubeISO: KicBaseImage:gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 Memory:4000 CPUs:2 DiskSize:20000 Driver:docker HyperkitVpnKitSock: HyperkitVSockPorts:[] DockerEnv:[] ContainerVolumeMounts:[] InsecureRegistry:[] RegistryMirror:[] HostOnlyCIDR:192.168.59.1/24 HypervVirtualSwitch: HypervUseExternalSwitch:false HypervExternalAdapter: KVMNetwork:default KVMQemuURI:qemu:///system KVMGPU:false KVMHidden:false KVMNUMACount:1 APIServerPort:8443 DockerOpt:[] DisableDriverMounts:false NFSShare:[] NFSSharesRoot:/nfsshares UUID: NoVTXCheck:false DNSProxy:false HostDNSResolver:true HostOnlyNicType:virtio NatNicType:virtio SSHIPAddress: SSHUser:root SSHKey: SSHPort:22 KubernetesConfig:{KubernetesVersion:v1.31.1 ClusterName:addons-936355 Namespace:default APIServerHAVIP: APIServerName:minikubeCA APIServerNames:[] APIServerIPs:[] DNSDomain:cluster.local ContainerRuntime
:crio CRISocket: NetworkPlugin:cni FeatureGates: ServiceCIDR:10.96.0.0/12 ImageRepository: LoadBalancerStartIP: LoadBalancerEndIP: CustomIngressCert: RegistryAliases: ExtraOptions:[] ShouldLoadCachedImages:true EnableDefaultCNI:false CNI:} Nodes:[{Name: IP: Port:8443 KubernetesVersion:v1.31.1 ContainerRuntime:crio ControlPlane:true Worker:true}] Addons:map[] CustomAddonImages:map[] CustomAddonRegistries:map[] VerifyComponents:map[apiserver:true apps_running:true default_sa:true extra:true kubelet:true node_ready:true system_pods:true] StartHostTimeout:6m0s ScheduledStop:<nil> ExposedPorts:[] ListenAddress: Network: Subnet: MultiNodeRequested:false ExtraDisks:0 CertExpiration:26280h0m0s Mount:false MountString:/home/jenkins:/minikube-host Mount9PVersion:9p2000.L MountGID:docker MountIP: MountMSize:262144 MountOptions:[] MountPort:0 MountType:9p MountUID:docker BinaryMirror: DisableOptimizations:false DisableMetrics:false CustomQemuFirmwarePath: SocketVMnetClientPath: SocketVMnetPath: StaticIP: SSHAuthSock: SSH
AgentPID:0 GPUs: AutoPauseInterval:1m0s}
	I0916 10:35:02.069567 1384589 out.go:177] * Starting "addons-936355" primary control-plane node in "addons-936355" cluster
	I0916 10:35:02.072130 1384589 cache.go:121] Beginning downloading kic base image for docker with crio
	I0916 10:35:02.074827 1384589 out.go:177] * Pulling base image v0.0.45-1726358845-19644 ...
	I0916 10:35:02.077314 1384589 preload.go:131] Checking if preload exists for k8s version v1.31.1 and runtime crio
	I0916 10:35:02.077371 1384589 preload.go:146] Found local preload: /home/jenkins/minikube-integration/19651-1378450/.minikube/cache/preloaded-tarball/preloaded-images-k8s-v18-v1.31.1-cri-o-overlay-arm64.tar.lz4
	I0916 10:35:02.077383 1384589 cache.go:56] Caching tarball of preloaded images
	I0916 10:35:02.077398 1384589 image.go:79] Checking for gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 in local docker daemon
	I0916 10:35:02.077476 1384589 preload.go:172] Found /home/jenkins/minikube-integration/19651-1378450/.minikube/cache/preloaded-tarball/preloaded-images-k8s-v18-v1.31.1-cri-o-overlay-arm64.tar.lz4 in cache, skipping download
	I0916 10:35:02.077486 1384589 cache.go:59] Finished verifying existence of preloaded tar for v1.31.1 on crio
	I0916 10:35:02.077848 1384589 profile.go:143] Saving config to /home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/addons-936355/config.json ...
	I0916 10:35:02.077880 1384589 lock.go:35] WriteFile acquiring /home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/addons-936355/config.json: {Name:mkd05c2b0dbaa1cc700db22c74ae8fbcc0c53329 Clock:{} Delay:500ms Timeout:1m0s Cancel:<nil>}
	I0916 10:35:02.092106 1384589 cache.go:149] Downloading gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 to local cache
	I0916 10:35:02.092232 1384589 image.go:63] Checking for gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 in local cache directory
	I0916 10:35:02.092252 1384589 image.go:66] Found gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 in local cache directory, skipping pull
	I0916 10:35:02.092257 1384589 image.go:135] gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 exists in cache, skipping pull
	I0916 10:35:02.092264 1384589 cache.go:152] successfully saved gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 as a tarball
	I0916 10:35:02.092269 1384589 cache.go:162] Loading gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 from local cache
	I0916 10:35:19.265886 1384589 cache.go:164] successfully loaded and using gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 from cached tarball
	I0916 10:35:19.265926 1384589 cache.go:194] Successfully downloaded all kic artifacts
	I0916 10:35:19.265955 1384589 start.go:360] acquireMachinesLock for addons-936355: {Name:mk780e867f4084d469fbad7a4968b7ad3d556c69 Clock:{} Delay:500ms Timeout:10m0s Cancel:<nil>}
	I0916 10:35:19.266489 1384589 start.go:364] duration metric: took 511.962µs to acquireMachinesLock for "addons-936355"
	I0916 10:35:19.266531 1384589 start.go:93] Provisioning new machine with config: &{Name:addons-936355 KeepContext:false EmbedCerts:false MinikubeISO: KicBaseImage:gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 Memory:4000 CPUs:2 DiskSize:20000 Driver:docker HyperkitVpnKitSock: HyperkitVSockPorts:[] DockerEnv:[] ContainerVolumeMounts:[] InsecureRegistry:[] RegistryMirror:[] HostOnlyCIDR:192.168.59.1/24 HypervVirtualSwitch: HypervUseExternalSwitch:false HypervExternalAdapter: KVMNetwork:default KVMQemuURI:qemu:///system KVMGPU:false KVMHidden:false KVMNUMACount:1 APIServerPort:8443 DockerOpt:[] DisableDriverMounts:false NFSShare:[] NFSSharesRoot:/nfsshares UUID: NoVTXCheck:false DNSProxy:false HostDNSResolver:true HostOnlyNicType:virtio NatNicType:virtio SSHIPAddress: SSHUser:root SSHKey: SSHPort:22 KubernetesConfig:{KubernetesVersion:v1.31.1 ClusterName:addons-936355 Namespace:default APIServerHAVIP: APIServerName:min
ikubeCA APIServerNames:[] APIServerIPs:[] DNSDomain:cluster.local ContainerRuntime:crio CRISocket: NetworkPlugin:cni FeatureGates: ServiceCIDR:10.96.0.0/12 ImageRepository: LoadBalancerStartIP: LoadBalancerEndIP: CustomIngressCert: RegistryAliases: ExtraOptions:[] ShouldLoadCachedImages:true EnableDefaultCNI:false CNI:} Nodes:[{Name: IP: Port:8443 KubernetesVersion:v1.31.1 ContainerRuntime:crio ControlPlane:true Worker:true}] Addons:map[] CustomAddonImages:map[] CustomAddonRegistries:map[] VerifyComponents:map[apiserver:true apps_running:true default_sa:true extra:true kubelet:true node_ready:true system_pods:true] StartHostTimeout:6m0s ScheduledStop:<nil> ExposedPorts:[] ListenAddress: Network: Subnet: MultiNodeRequested:false ExtraDisks:0 CertExpiration:26280h0m0s Mount:false MountString:/home/jenkins:/minikube-host Mount9PVersion:9p2000.L MountGID:docker MountIP: MountMSize:262144 MountOptions:[] MountPort:0 MountType:9p MountUID:docker BinaryMirror: DisableOptimizations:false DisableMetrics:false CustomQe
muFirmwarePath: SocketVMnetClientPath: SocketVMnetPath: StaticIP: SSHAuthSock: SSHAgentPID:0 GPUs: AutoPauseInterval:1m0s} &{Name: IP: Port:8443 KubernetesVersion:v1.31.1 ContainerRuntime:crio ControlPlane:true Worker:true}
	I0916 10:35:19.266610 1384589 start.go:125] createHost starting for "" (driver="docker")
	I0916 10:35:19.269716 1384589 out.go:235] * Creating docker container (CPUs=2, Memory=4000MB) ...
	I0916 10:35:19.269968 1384589 start.go:159] libmachine.API.Create for "addons-936355" (driver="docker")
	I0916 10:35:19.270003 1384589 client.go:168] LocalClient.Create starting
	I0916 10:35:19.270125 1384589 main.go:141] libmachine: Creating CA: /home/jenkins/minikube-integration/19651-1378450/.minikube/certs/ca.pem
	I0916 10:35:20.065665 1384589 main.go:141] libmachine: Creating client certificate: /home/jenkins/minikube-integration/19651-1378450/.minikube/certs/cert.pem
	I0916 10:35:20.505791 1384589 cli_runner.go:164] Run: docker network inspect addons-936355 --format "{"Name": "{{.Name}}","Driver": "{{.Driver}}","Subnet": "{{range .IPAM.Config}}{{.Subnet}}{{end}}","Gateway": "{{range .IPAM.Config}}{{.Gateway}}{{end}}","MTU": {{if (index .Options "com.docker.network.driver.mtu")}}{{(index .Options "com.docker.network.driver.mtu")}}{{else}}0{{end}}, "ContainerIPs": [{{range $k,$v := .Containers }}"{{$v.IPv4Address}}",{{end}}]}"
	W0916 10:35:20.520423 1384589 cli_runner.go:211] docker network inspect addons-936355 --format "{"Name": "{{.Name}}","Driver": "{{.Driver}}","Subnet": "{{range .IPAM.Config}}{{.Subnet}}{{end}}","Gateway": "{{range .IPAM.Config}}{{.Gateway}}{{end}}","MTU": {{if (index .Options "com.docker.network.driver.mtu")}}{{(index .Options "com.docker.network.driver.mtu")}}{{else}}0{{end}}, "ContainerIPs": [{{range $k,$v := .Containers }}"{{$v.IPv4Address}}",{{end}}]}" returned with exit code 1
	I0916 10:35:20.520525 1384589 network_create.go:284] running [docker network inspect addons-936355] to gather additional debugging logs...
	I0916 10:35:20.520546 1384589 cli_runner.go:164] Run: docker network inspect addons-936355
	W0916 10:35:20.534395 1384589 cli_runner.go:211] docker network inspect addons-936355 returned with exit code 1
	I0916 10:35:20.534432 1384589 network_create.go:287] error running [docker network inspect addons-936355]: docker network inspect addons-936355: exit status 1
	stdout:
	[]
	
	stderr:
	Error response from daemon: network addons-936355 not found
	I0916 10:35:20.534447 1384589 network_create.go:289] output of [docker network inspect addons-936355]: -- stdout --
	[]
	
	-- /stdout --
	** stderr ** 
	Error response from daemon: network addons-936355 not found
	
	** /stderr **
	I0916 10:35:20.534555 1384589 cli_runner.go:164] Run: docker network inspect bridge --format "{"Name": "{{.Name}}","Driver": "{{.Driver}}","Subnet": "{{range .IPAM.Config}}{{.Subnet}}{{end}}","Gateway": "{{range .IPAM.Config}}{{.Gateway}}{{end}}","MTU": {{if (index .Options "com.docker.network.driver.mtu")}}{{(index .Options "com.docker.network.driver.mtu")}}{{else}}0{{end}}, "ContainerIPs": [{{range $k,$v := .Containers }}"{{$v.IPv4Address}}",{{end}}]}"
	I0916 10:35:20.550802 1384589 network.go:206] using free private subnet 192.168.49.0/24: &{IP:192.168.49.0 Netmask:255.255.255.0 Prefix:24 CIDR:192.168.49.0/24 Gateway:192.168.49.1 ClientMin:192.168.49.2 ClientMax:192.168.49.254 Broadcast:192.168.49.255 IsPrivate:true Interface:{IfaceName: IfaceIPv4: IfaceMTU:0 IfaceMAC:} reservation:0x4001826a70}
	I0916 10:35:20.550849 1384589 network_create.go:124] attempt to create docker network addons-936355 192.168.49.0/24 with gateway 192.168.49.1 and MTU of 1500 ...
	I0916 10:35:20.550909 1384589 cli_runner.go:164] Run: docker network create --driver=bridge --subnet=192.168.49.0/24 --gateway=192.168.49.1 -o --ip-masq -o --icc -o com.docker.network.driver.mtu=1500 --label=created_by.minikube.sigs.k8s.io=true --label=name.minikube.sigs.k8s.io=addons-936355 addons-936355
	I0916 10:35:20.622324 1384589 network_create.go:108] docker network addons-936355 192.168.49.0/24 created
	I0916 10:35:20.622359 1384589 kic.go:121] calculated static IP "192.168.49.2" for the "addons-936355" container
	I0916 10:35:20.622443 1384589 cli_runner.go:164] Run: docker ps -a --format {{.Names}}
	I0916 10:35:20.636891 1384589 cli_runner.go:164] Run: docker volume create addons-936355 --label name.minikube.sigs.k8s.io=addons-936355 --label created_by.minikube.sigs.k8s.io=true
	I0916 10:35:20.653249 1384589 oci.go:103] Successfully created a docker volume addons-936355
	I0916 10:35:20.653357 1384589 cli_runner.go:164] Run: docker run --rm --name addons-936355-preload-sidecar --label created_by.minikube.sigs.k8s.io=true --label name.minikube.sigs.k8s.io=addons-936355 --entrypoint /usr/bin/test -v addons-936355:/var gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 -d /var/lib
	I0916 10:35:22.737442 1384589 cli_runner.go:217] Completed: docker run --rm --name addons-936355-preload-sidecar --label created_by.minikube.sigs.k8s.io=true --label name.minikube.sigs.k8s.io=addons-936355 --entrypoint /usr/bin/test -v addons-936355:/var gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 -d /var/lib: (2.08404207s)
	I0916 10:35:22.737471 1384589 oci.go:107] Successfully prepared a docker volume addons-936355
	I0916 10:35:22.737499 1384589 preload.go:131] Checking if preload exists for k8s version v1.31.1 and runtime crio
	I0916 10:35:22.737519 1384589 kic.go:194] Starting extracting preloaded images to volume ...
	I0916 10:35:22.737588 1384589 cli_runner.go:164] Run: docker run --rm --entrypoint /usr/bin/tar -v /home/jenkins/minikube-integration/19651-1378450/.minikube/cache/preloaded-tarball/preloaded-images-k8s-v18-v1.31.1-cri-o-overlay-arm64.tar.lz4:/preloaded.tar:ro -v addons-936355:/extractDir gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 -I lz4 -xf /preloaded.tar -C /extractDir
	I0916 10:35:26.763089 1384589 cli_runner.go:217] Completed: docker run --rm --entrypoint /usr/bin/tar -v /home/jenkins/minikube-integration/19651-1378450/.minikube/cache/preloaded-tarball/preloaded-images-k8s-v18-v1.31.1-cri-o-overlay-arm64.tar.lz4:/preloaded.tar:ro -v addons-936355:/extractDir gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 -I lz4 -xf /preloaded.tar -C /extractDir: (4.025452617s)
	I0916 10:35:26.763126 1384589 kic.go:203] duration metric: took 4.025604753s to extract preloaded images to volume ...
	W0916 10:35:26.763258 1384589 cgroups_linux.go:77] Your kernel does not support swap limit capabilities or the cgroup is not mounted.
	I0916 10:35:26.763378 1384589 cli_runner.go:164] Run: docker info --format "'{{json .SecurityOptions}}'"
	I0916 10:35:26.814712 1384589 cli_runner.go:164] Run: docker run -d -t --privileged --security-opt seccomp=unconfined --tmpfs /tmp --tmpfs /run -v /lib/modules:/lib/modules:ro --hostname addons-936355 --name addons-936355 --label created_by.minikube.sigs.k8s.io=true --label name.minikube.sigs.k8s.io=addons-936355 --label role.minikube.sigs.k8s.io= --label mode.minikube.sigs.k8s.io=addons-936355 --network addons-936355 --ip 192.168.49.2 --volume addons-936355:/var --security-opt apparmor=unconfined --memory=4000mb --cpus=2 -e container=docker --expose 8443 --publish=127.0.0.1::8443 --publish=127.0.0.1::22 --publish=127.0.0.1::2376 --publish=127.0.0.1::5000 --publish=127.0.0.1::32443 gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0
	I0916 10:35:27.165000 1384589 cli_runner.go:164] Run: docker container inspect addons-936355 --format={{.State.Running}}
	I0916 10:35:27.189076 1384589 cli_runner.go:164] Run: docker container inspect addons-936355 --format={{.State.Status}}
	I0916 10:35:27.216370 1384589 cli_runner.go:164] Run: docker exec addons-936355 stat /var/lib/dpkg/alternatives/iptables
	I0916 10:35:27.281467 1384589 oci.go:144] the created container "addons-936355" has a running status.
	I0916 10:35:27.281502 1384589 kic.go:225] Creating ssh key for kic: /home/jenkins/minikube-integration/19651-1378450/.minikube/machines/addons-936355/id_rsa...
	I0916 10:35:28.804386 1384589 kic_runner.go:191] docker (temp): /home/jenkins/minikube-integration/19651-1378450/.minikube/machines/addons-936355/id_rsa.pub --> /home/docker/.ssh/authorized_keys (381 bytes)
	I0916 10:35:28.826599 1384589 cli_runner.go:164] Run: docker container inspect addons-936355 --format={{.State.Status}}
	I0916 10:35:28.843564 1384589 kic_runner.go:93] Run: chown docker:docker /home/docker/.ssh/authorized_keys
	I0916 10:35:28.843591 1384589 kic_runner.go:114] Args: [docker exec --privileged addons-936355 chown docker:docker /home/docker/.ssh/authorized_keys]
	I0916 10:35:28.892577 1384589 cli_runner.go:164] Run: docker container inspect addons-936355 --format={{.State.Status}}
	I0916 10:35:28.913158 1384589 machine.go:93] provisionDockerMachine start ...
	I0916 10:35:28.913258 1384589 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" addons-936355
	I0916 10:35:28.931596 1384589 main.go:141] libmachine: Using SSH client type: native
	I0916 10:35:28.931893 1384589 main.go:141] libmachine: &{{{<nil> 0 [] [] []} docker [0x41abe0] 0x41d420 <nil>  [] 0s} 127.0.0.1 34603 <nil> <nil>}
	I0916 10:35:28.931910 1384589 main.go:141] libmachine: About to run SSH command:
	hostname
	I0916 10:35:29.068030 1384589 main.go:141] libmachine: SSH cmd err, output: <nil>: addons-936355
	
	I0916 10:35:29.068064 1384589 ubuntu.go:169] provisioning hostname "addons-936355"
	I0916 10:35:29.068142 1384589 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" addons-936355
	I0916 10:35:29.085139 1384589 main.go:141] libmachine: Using SSH client type: native
	I0916 10:35:29.085383 1384589 main.go:141] libmachine: &{{{<nil> 0 [] [] []} docker [0x41abe0] 0x41d420 <nil>  [] 0s} 127.0.0.1 34603 <nil> <nil>}
	I0916 10:35:29.085399 1384589 main.go:141] libmachine: About to run SSH command:
	sudo hostname addons-936355 && echo "addons-936355" | sudo tee /etc/hostname
	I0916 10:35:29.232508 1384589 main.go:141] libmachine: SSH cmd err, output: <nil>: addons-936355
	
	I0916 10:35:29.232589 1384589 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" addons-936355
	I0916 10:35:29.248944 1384589 main.go:141] libmachine: Using SSH client type: native
	I0916 10:35:29.249190 1384589 main.go:141] libmachine: &{{{<nil> 0 [] [] []} docker [0x41abe0] 0x41d420 <nil>  [] 0s} 127.0.0.1 34603 <nil> <nil>}
	I0916 10:35:29.249214 1384589 main.go:141] libmachine: About to run SSH command:
	
			if ! grep -xq '.*\saddons-936355' /etc/hosts; then
				if grep -xq '127.0.1.1\s.*' /etc/hosts; then
					sudo sed -i 's/^127.0.1.1\s.*/127.0.1.1 addons-936355/g' /etc/hosts;
				else 
					echo '127.0.1.1 addons-936355' | sudo tee -a /etc/hosts; 
				fi
			fi
	I0916 10:35:29.385206 1384589 main.go:141] libmachine: SSH cmd err, output: <nil>: 
	I0916 10:35:29.385233 1384589 ubuntu.go:175] set auth options {CertDir:/home/jenkins/minikube-integration/19651-1378450/.minikube CaCertPath:/home/jenkins/minikube-integration/19651-1378450/.minikube/certs/ca.pem CaPrivateKeyPath:/home/jenkins/minikube-integration/19651-1378450/.minikube/certs/ca-key.pem CaCertRemotePath:/etc/docker/ca.pem ServerCertPath:/home/jenkins/minikube-integration/19651-1378450/.minikube/machines/server.pem ServerKeyPath:/home/jenkins/minikube-integration/19651-1378450/.minikube/machines/server-key.pem ClientKeyPath:/home/jenkins/minikube-integration/19651-1378450/.minikube/certs/key.pem ServerCertRemotePath:/etc/docker/server.pem ServerKeyRemotePath:/etc/docker/server-key.pem ClientCertPath:/home/jenkins/minikube-integration/19651-1378450/.minikube/certs/cert.pem ServerCertSANs:[] StorePath:/home/jenkins/minikube-integration/19651-1378450/.minikube}
	I0916 10:35:29.385263 1384589 ubuntu.go:177] setting up certificates
	I0916 10:35:29.385275 1384589 provision.go:84] configureAuth start
	I0916 10:35:29.385357 1384589 cli_runner.go:164] Run: docker container inspect -f "{{range .NetworkSettings.Networks}}{{.IPAddress}},{{.GlobalIPv6Address}}{{end}}" addons-936355
	I0916 10:35:29.401844 1384589 provision.go:143] copyHostCerts
	I0916 10:35:29.401930 1384589 exec_runner.go:151] cp: /home/jenkins/minikube-integration/19651-1378450/.minikube/certs/ca.pem --> /home/jenkins/minikube-integration/19651-1378450/.minikube/ca.pem (1078 bytes)
	I0916 10:35:29.402060 1384589 exec_runner.go:151] cp: /home/jenkins/minikube-integration/19651-1378450/.minikube/certs/cert.pem --> /home/jenkins/minikube-integration/19651-1378450/.minikube/cert.pem (1123 bytes)
	I0916 10:35:29.402129 1384589 exec_runner.go:151] cp: /home/jenkins/minikube-integration/19651-1378450/.minikube/certs/key.pem --> /home/jenkins/minikube-integration/19651-1378450/.minikube/key.pem (1679 bytes)
	I0916 10:35:29.402184 1384589 provision.go:117] generating server cert: /home/jenkins/minikube-integration/19651-1378450/.minikube/machines/server.pem ca-key=/home/jenkins/minikube-integration/19651-1378450/.minikube/certs/ca.pem private-key=/home/jenkins/minikube-integration/19651-1378450/.minikube/certs/ca-key.pem org=jenkins.addons-936355 san=[127.0.0.1 192.168.49.2 addons-936355 localhost minikube]
	I0916 10:35:29.844064 1384589 provision.go:177] copyRemoteCerts
	I0916 10:35:29.844139 1384589 ssh_runner.go:195] Run: sudo mkdir -p /etc/docker /etc/docker /etc/docker
	I0916 10:35:29.844181 1384589 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" addons-936355
	I0916 10:35:29.860341 1384589 sshutil.go:53] new ssh client: &{IP:127.0.0.1 Port:34603 SSHKeyPath:/home/jenkins/minikube-integration/19651-1378450/.minikube/machines/addons-936355/id_rsa Username:docker}
	I0916 10:35:29.957424 1384589 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-1378450/.minikube/certs/ca.pem --> /etc/docker/ca.pem (1078 bytes)
	I0916 10:35:29.982494 1384589 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-1378450/.minikube/machines/server.pem --> /etc/docker/server.pem (1208 bytes)
	I0916 10:35:30.020527 1384589 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-1378450/.minikube/machines/server-key.pem --> /etc/docker/server-key.pem (1675 bytes)
	I0916 10:35:30.083993 1384589 provision.go:87] duration metric: took 698.682489ms to configureAuth
	I0916 10:35:30.084118 1384589 ubuntu.go:193] setting minikube options for container-runtime
	I0916 10:35:30.084480 1384589 config.go:182] Loaded profile config "addons-936355": Driver=docker, ContainerRuntime=crio, KubernetesVersion=v1.31.1
	I0916 10:35:30.084746 1384589 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" addons-936355
	I0916 10:35:30.108015 1384589 main.go:141] libmachine: Using SSH client type: native
	I0916 10:35:30.108273 1384589 main.go:141] libmachine: &{{{<nil> 0 [] [] []} docker [0x41abe0] 0x41d420 <nil>  [] 0s} 127.0.0.1 34603 <nil> <nil>}
	I0916 10:35:30.108291 1384589 main.go:141] libmachine: About to run SSH command:
	sudo mkdir -p /etc/sysconfig && printf %s "
	CRIO_MINIKUBE_OPTIONS='--insecure-registry 10.96.0.0/12 '
	" | sudo tee /etc/sysconfig/crio.minikube && sudo systemctl restart crio
	I0916 10:35:30.350713 1384589 main.go:141] libmachine: SSH cmd err, output: <nil>: 
	CRIO_MINIKUBE_OPTIONS='--insecure-registry 10.96.0.0/12 '
	
	I0916 10:35:30.350736 1384589 machine.go:96] duration metric: took 1.437556677s to provisionDockerMachine
	I0916 10:35:30.350754 1384589 client.go:171] duration metric: took 11.080732872s to LocalClient.Create
	I0916 10:35:30.350775 1384589 start.go:167] duration metric: took 11.080807939s to libmachine.API.Create "addons-936355"
	I0916 10:35:30.350784 1384589 start.go:293] postStartSetup for "addons-936355" (driver="docker")
	I0916 10:35:30.350795 1384589 start.go:322] creating required directories: [/etc/kubernetes/addons /etc/kubernetes/manifests /var/tmp/minikube /var/lib/minikube /var/lib/minikube/certs /var/lib/minikube/images /var/lib/minikube/binaries /tmp/gvisor /usr/share/ca-certificates /etc/ssl/certs]
	I0916 10:35:30.350871 1384589 ssh_runner.go:195] Run: sudo mkdir -p /etc/kubernetes/addons /etc/kubernetes/manifests /var/tmp/minikube /var/lib/minikube /var/lib/minikube/certs /var/lib/minikube/images /var/lib/minikube/binaries /tmp/gvisor /usr/share/ca-certificates /etc/ssl/certs
	I0916 10:35:30.350928 1384589 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" addons-936355
	I0916 10:35:30.367694 1384589 sshutil.go:53] new ssh client: &{IP:127.0.0.1 Port:34603 SSHKeyPath:/home/jenkins/minikube-integration/19651-1378450/.minikube/machines/addons-936355/id_rsa Username:docker}
	I0916 10:35:30.471627 1384589 ssh_runner.go:195] Run: cat /etc/os-release
	I0916 10:35:30.475048 1384589 main.go:141] libmachine: Couldn't set key VERSION_CODENAME, no corresponding struct field found
	I0916 10:35:30.475083 1384589 main.go:141] libmachine: Couldn't set key PRIVACY_POLICY_URL, no corresponding struct field found
	I0916 10:35:30.475094 1384589 main.go:141] libmachine: Couldn't set key UBUNTU_CODENAME, no corresponding struct field found
	I0916 10:35:30.475101 1384589 info.go:137] Remote host: Ubuntu 22.04.4 LTS
	I0916 10:35:30.475111 1384589 filesync.go:126] Scanning /home/jenkins/minikube-integration/19651-1378450/.minikube/addons for local assets ...
	I0916 10:35:30.475191 1384589 filesync.go:126] Scanning /home/jenkins/minikube-integration/19651-1378450/.minikube/files for local assets ...
	I0916 10:35:30.475215 1384589 start.go:296] duration metric: took 124.425275ms for postStartSetup
	I0916 10:35:30.475537 1384589 cli_runner.go:164] Run: docker container inspect -f "{{range .NetworkSettings.Networks}}{{.IPAddress}},{{.GlobalIPv6Address}}{{end}}" addons-936355
	I0916 10:35:30.492884 1384589 profile.go:143] Saving config to /home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/addons-936355/config.json ...
	I0916 10:35:30.493230 1384589 ssh_runner.go:195] Run: sh -c "df -h /var | awk 'NR==2{print $5}'"
	I0916 10:35:30.493280 1384589 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" addons-936355
	I0916 10:35:30.510291 1384589 sshutil.go:53] new ssh client: &{IP:127.0.0.1 Port:34603 SSHKeyPath:/home/jenkins/minikube-integration/19651-1378450/.minikube/machines/addons-936355/id_rsa Username:docker}
	I0916 10:35:30.601939 1384589 ssh_runner.go:195] Run: sh -c "df -BG /var | awk 'NR==2{print $4}'"
	I0916 10:35:30.606785 1384589 start.go:128] duration metric: took 11.340152497s to createHost
	I0916 10:35:30.606809 1384589 start.go:83] releasing machines lock for "addons-936355", held for 11.340303023s
	I0916 10:35:30.606879 1384589 cli_runner.go:164] Run: docker container inspect -f "{{range .NetworkSettings.Networks}}{{.IPAddress}},{{.GlobalIPv6Address}}{{end}}" addons-936355
	I0916 10:35:30.623200 1384589 ssh_runner.go:195] Run: cat /version.json
	I0916 10:35:30.623223 1384589 ssh_runner.go:195] Run: curl -sS -m 2 https://registry.k8s.io/
	I0916 10:35:30.623263 1384589 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" addons-936355
	I0916 10:35:30.623284 1384589 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" addons-936355
	I0916 10:35:30.644076 1384589 sshutil.go:53] new ssh client: &{IP:127.0.0.1 Port:34603 SSHKeyPath:/home/jenkins/minikube-integration/19651-1378450/.minikube/machines/addons-936355/id_rsa Username:docker}
	I0916 10:35:30.644213 1384589 sshutil.go:53] new ssh client: &{IP:127.0.0.1 Port:34603 SSHKeyPath:/home/jenkins/minikube-integration/19651-1378450/.minikube/machines/addons-936355/id_rsa Username:docker}
	I0916 10:35:30.736416 1384589 ssh_runner.go:195] Run: systemctl --version
	I0916 10:35:30.866086 1384589 ssh_runner.go:195] Run: sudo sh -c "podman version >/dev/null"
	I0916 10:35:31.012168 1384589 ssh_runner.go:195] Run: sh -c "stat /etc/cni/net.d/*loopback.conf*"
	I0916 10:35:31.016985 1384589 ssh_runner.go:195] Run: sudo find /etc/cni/net.d -maxdepth 1 -type f -name *loopback.conf* -not -name *.mk_disabled -exec sh -c "sudo mv {} {}.mk_disabled" ;
	I0916 10:35:31.040299 1384589 cni.go:221] loopback cni configuration disabled: "/etc/cni/net.d/*loopback.conf*" found
	I0916 10:35:31.040383 1384589 ssh_runner.go:195] Run: sudo find /etc/cni/net.d -maxdepth 1 -type f ( ( -name *bridge* -or -name *podman* ) -and -not -name *.mk_disabled ) -printf "%p, " -exec sh -c "sudo mv {} {}.mk_disabled" ;
	I0916 10:35:31.079331 1384589 cni.go:262] disabled [/etc/cni/net.d/87-podman-bridge.conflist, /etc/cni/net.d/100-crio-bridge.conf] bridge cni config(s)
	I0916 10:35:31.079357 1384589 start.go:495] detecting cgroup driver to use...
	I0916 10:35:31.079391 1384589 detect.go:187] detected "cgroupfs" cgroup driver on host os
	I0916 10:35:31.079448 1384589 ssh_runner.go:195] Run: sudo systemctl stop -f containerd
	I0916 10:35:31.097860 1384589 ssh_runner.go:195] Run: sudo systemctl is-active --quiet service containerd
	I0916 10:35:31.111311 1384589 docker.go:217] disabling cri-docker service (if available) ...
	I0916 10:35:31.111396 1384589 ssh_runner.go:195] Run: sudo systemctl stop -f cri-docker.socket
	I0916 10:35:31.126864 1384589 ssh_runner.go:195] Run: sudo systemctl stop -f cri-docker.service
	I0916 10:35:31.142983 1384589 ssh_runner.go:195] Run: sudo systemctl disable cri-docker.socket
	I0916 10:35:31.237602 1384589 ssh_runner.go:195] Run: sudo systemctl mask cri-docker.service
	I0916 10:35:31.329055 1384589 docker.go:233] disabling docker service ...
	I0916 10:35:31.329150 1384589 ssh_runner.go:195] Run: sudo systemctl stop -f docker.socket
	I0916 10:35:31.350134 1384589 ssh_runner.go:195] Run: sudo systemctl stop -f docker.service
	I0916 10:35:31.362931 1384589 ssh_runner.go:195] Run: sudo systemctl disable docker.socket
	I0916 10:35:31.458212 1384589 ssh_runner.go:195] Run: sudo systemctl mask docker.service
	I0916 10:35:31.563725 1384589 ssh_runner.go:195] Run: sudo systemctl is-active --quiet service docker
	I0916 10:35:31.575461 1384589 ssh_runner.go:195] Run: /bin/bash -c "sudo mkdir -p /etc && printf %s "runtime-endpoint: unix:///var/run/crio/crio.sock
	" | sudo tee /etc/crictl.yaml"
	I0916 10:35:31.592172 1384589 crio.go:59] configure cri-o to use "registry.k8s.io/pause:3.10" pause image...
	I0916 10:35:31.592265 1384589 ssh_runner.go:195] Run: sh -c "sudo sed -i 's|^.*pause_image = .*$|pause_image = "registry.k8s.io/pause:3.10"|' /etc/crio/crio.conf.d/02-crio.conf"
	I0916 10:35:31.602336 1384589 crio.go:70] configuring cri-o to use "cgroupfs" as cgroup driver...
	I0916 10:35:31.602418 1384589 ssh_runner.go:195] Run: sh -c "sudo sed -i 's|^.*cgroup_manager = .*$|cgroup_manager = "cgroupfs"|' /etc/crio/crio.conf.d/02-crio.conf"
	I0916 10:35:31.612396 1384589 ssh_runner.go:195] Run: sh -c "sudo sed -i '/conmon_cgroup = .*/d' /etc/crio/crio.conf.d/02-crio.conf"
	I0916 10:35:31.622391 1384589 ssh_runner.go:195] Run: sh -c "sudo sed -i '/cgroup_manager = .*/a conmon_cgroup = "pod"' /etc/crio/crio.conf.d/02-crio.conf"
	I0916 10:35:31.632203 1384589 ssh_runner.go:195] Run: sh -c "sudo rm -rf /etc/cni/net.mk"
	I0916 10:35:31.642063 1384589 ssh_runner.go:195] Run: sh -c "sudo sed -i '/^ *"net.ipv4.ip_unprivileged_port_start=.*"/d' /etc/crio/crio.conf.d/02-crio.conf"
	I0916 10:35:31.651889 1384589 ssh_runner.go:195] Run: sh -c "sudo grep -q "^ *default_sysctls" /etc/crio/crio.conf.d/02-crio.conf || sudo sed -i '/conmon_cgroup = .*/a default_sysctls = \[\n\]' /etc/crio/crio.conf.d/02-crio.conf"
	I0916 10:35:31.669408 1384589 ssh_runner.go:195] Run: sh -c "sudo sed -i -r 's|^default_sysctls *= *\[|&\n  "net.ipv4.ip_unprivileged_port_start=0",|' /etc/crio/crio.conf.d/02-crio.conf"
	I0916 10:35:31.683307 1384589 ssh_runner.go:195] Run: sudo sysctl net.bridge.bridge-nf-call-iptables
	I0916 10:35:31.692220 1384589 ssh_runner.go:195] Run: sudo sh -c "echo 1 > /proc/sys/net/ipv4/ip_forward"
	I0916 10:35:31.702005 1384589 ssh_runner.go:195] Run: sudo systemctl daemon-reload
	I0916 10:35:31.781982 1384589 ssh_runner.go:195] Run: sudo systemctl restart crio
	I0916 10:35:31.897438 1384589 start.go:542] Will wait 60s for socket path /var/run/crio/crio.sock
	I0916 10:35:31.897567 1384589 ssh_runner.go:195] Run: stat /var/run/crio/crio.sock
	I0916 10:35:31.901379 1384589 start.go:563] Will wait 60s for crictl version
	I0916 10:35:31.901491 1384589 ssh_runner.go:195] Run: which crictl
	I0916 10:35:31.904735 1384589 ssh_runner.go:195] Run: sudo /usr/bin/crictl version
	I0916 10:35:31.941675 1384589 start.go:579] Version:  0.1.0
	RuntimeName:  cri-o
	RuntimeVersion:  1.24.6
	RuntimeApiVersion:  v1
	I0916 10:35:31.941854 1384589 ssh_runner.go:195] Run: crio --version
	I0916 10:35:31.981298 1384589 ssh_runner.go:195] Run: crio --version
	I0916 10:35:32.027709 1384589 out.go:177] * Preparing Kubernetes v1.31.1 on CRI-O 1.24.6 ...
	I0916 10:35:32.030371 1384589 cli_runner.go:164] Run: docker network inspect addons-936355 --format "{"Name": "{{.Name}}","Driver": "{{.Driver}}","Subnet": "{{range .IPAM.Config}}{{.Subnet}}{{end}}","Gateway": "{{range .IPAM.Config}}{{.Gateway}}{{end}}","MTU": {{if (index .Options "com.docker.network.driver.mtu")}}{{(index .Options "com.docker.network.driver.mtu")}}{{else}}0{{end}}, "ContainerIPs": [{{range $k,$v := .Containers }}"{{$v.IPv4Address}}",{{end}}]}"
	I0916 10:35:32.045684 1384589 ssh_runner.go:195] Run: grep 192.168.49.1	host.minikube.internal$ /etc/hosts
	I0916 10:35:32.049353 1384589 ssh_runner.go:195] Run: /bin/bash -c "{ grep -v $'\thost.minikube.internal$' "/etc/hosts"; echo "192.168.49.1	host.minikube.internal"; } > /tmp/h.$$; sudo cp /tmp/h.$$ "/etc/hosts""
	I0916 10:35:32.060434 1384589 kubeadm.go:883] updating cluster {Name:addons-936355 KeepContext:false EmbedCerts:false MinikubeISO: KicBaseImage:gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 Memory:4000 CPUs:2 DiskSize:20000 Driver:docker HyperkitVpnKitSock: HyperkitVSockPorts:[] DockerEnv:[] ContainerVolumeMounts:[] InsecureRegistry:[] RegistryMirror:[] HostOnlyCIDR:192.168.59.1/24 HypervVirtualSwitch: HypervUseExternalSwitch:false HypervExternalAdapter: KVMNetwork:default KVMQemuURI:qemu:///system KVMGPU:false KVMHidden:false KVMNUMACount:1 APIServerPort:8443 DockerOpt:[] DisableDriverMounts:false NFSShare:[] NFSSharesRoot:/nfsshares UUID: NoVTXCheck:false DNSProxy:false HostDNSResolver:true HostOnlyNicType:virtio NatNicType:virtio SSHIPAddress: SSHUser:root SSHKey: SSHPort:22 KubernetesConfig:{KubernetesVersion:v1.31.1 ClusterName:addons-936355 Namespace:default APIServerHAVIP: APIServerName:minikubeCA APIServerNa
mes:[] APIServerIPs:[] DNSDomain:cluster.local ContainerRuntime:crio CRISocket: NetworkPlugin:cni FeatureGates: ServiceCIDR:10.96.0.0/12 ImageRepository: LoadBalancerStartIP: LoadBalancerEndIP: CustomIngressCert: RegistryAliases: ExtraOptions:[] ShouldLoadCachedImages:true EnableDefaultCNI:false CNI:} Nodes:[{Name: IP:192.168.49.2 Port:8443 KubernetesVersion:v1.31.1 ContainerRuntime:crio ControlPlane:true Worker:true}] Addons:map[] CustomAddonImages:map[] CustomAddonRegistries:map[] VerifyComponents:map[apiserver:true apps_running:true default_sa:true extra:true kubelet:true node_ready:true system_pods:true] StartHostTimeout:6m0s ScheduledStop:<nil> ExposedPorts:[] ListenAddress: Network: Subnet: MultiNodeRequested:false ExtraDisks:0 CertExpiration:26280h0m0s Mount:false MountString:/home/jenkins:/minikube-host Mount9PVersion:9p2000.L MountGID:docker MountIP: MountMSize:262144 MountOptions:[] MountPort:0 MountType:9p MountUID:docker BinaryMirror: DisableOptimizations:false DisableMetrics:false CustomQemuFirmw
arePath: SocketVMnetClientPath: SocketVMnetPath: StaticIP: SSHAuthSock: SSHAgentPID:0 GPUs: AutoPauseInterval:1m0s} ...
	I0916 10:35:32.060562 1384589 preload.go:131] Checking if preload exists for k8s version v1.31.1 and runtime crio
	I0916 10:35:32.060622 1384589 ssh_runner.go:195] Run: sudo crictl images --output json
	I0916 10:35:32.132274 1384589 crio.go:514] all images are preloaded for cri-o runtime.
	I0916 10:35:32.132300 1384589 crio.go:433] Images already preloaded, skipping extraction
	I0916 10:35:32.132361 1384589 ssh_runner.go:195] Run: sudo crictl images --output json
	I0916 10:35:32.168136 1384589 crio.go:514] all images are preloaded for cri-o runtime.
	I0916 10:35:32.168159 1384589 cache_images.go:84] Images are preloaded, skipping loading
	I0916 10:35:32.168167 1384589 kubeadm.go:934] updating node { 192.168.49.2 8443 v1.31.1 crio true true} ...
	I0916 10:35:32.168274 1384589 kubeadm.go:946] kubelet [Unit]
	Wants=crio.service
	
	[Service]
	ExecStart=
	ExecStart=/var/lib/minikube/binaries/v1.31.1/kubelet --bootstrap-kubeconfig=/etc/kubernetes/bootstrap-kubelet.conf --cgroups-per-qos=false --config=/var/lib/kubelet/config.yaml --enforce-node-allocatable= --hostname-override=addons-936355 --kubeconfig=/etc/kubernetes/kubelet.conf --node-ip=192.168.49.2
	
	[Install]
	 config:
	{KubernetesVersion:v1.31.1 ClusterName:addons-936355 Namespace:default APIServerHAVIP: APIServerName:minikubeCA APIServerNames:[] APIServerIPs:[] DNSDomain:cluster.local ContainerRuntime:crio CRISocket: NetworkPlugin:cni FeatureGates: ServiceCIDR:10.96.0.0/12 ImageRepository: LoadBalancerStartIP: LoadBalancerEndIP: CustomIngressCert: RegistryAliases: ExtraOptions:[] ShouldLoadCachedImages:true EnableDefaultCNI:false CNI:}
	I0916 10:35:32.168366 1384589 ssh_runner.go:195] Run: crio config
	I0916 10:35:32.227191 1384589 cni.go:84] Creating CNI manager for ""
	I0916 10:35:32.227213 1384589 cni.go:143] "docker" driver + "crio" runtime found, recommending kindnet
	I0916 10:35:32.227223 1384589 kubeadm.go:84] Using pod CIDR: 10.244.0.0/16
	I0916 10:35:32.227267 1384589 kubeadm.go:181] kubeadm options: {CertDir:/var/lib/minikube/certs ServiceCIDR:10.96.0.0/12 PodSubnet:10.244.0.0/16 AdvertiseAddress:192.168.49.2 APIServerPort:8443 KubernetesVersion:v1.31.1 EtcdDataDir:/var/lib/minikube/etcd EtcdExtraArgs:map[] ClusterName:addons-936355 NodeName:addons-936355 DNSDomain:cluster.local CRISocket:/var/run/crio/crio.sock ImageRepository: ComponentOptions:[{Component:apiServer ExtraArgs:map[enable-admission-plugins:NamespaceLifecycle,LimitRanger,ServiceAccount,DefaultStorageClass,DefaultTolerationSeconds,NodeRestriction,MutatingAdmissionWebhook,ValidatingAdmissionWebhook,ResourceQuota] Pairs:map[certSANs:["127.0.0.1", "localhost", "192.168.49.2"]]} {Component:controllerManager ExtraArgs:map[allocate-node-cidrs:true leader-elect:false] Pairs:map[]} {Component:scheduler ExtraArgs:map[leader-elect:false] Pairs:map[]}] FeatureArgs:map[] NodeIP:192.168.49.2 CgroupDriver:cgroupfs ClientCAFile:/var/lib/minikube/certs/ca.crt StaticPodPath:/etc/kuberne
tes/manifests ControlPlaneAddress:control-plane.minikube.internal KubeProxyOptions:map[] ResolvConfSearchRegression:false KubeletConfigOpts:map[containerRuntimeEndpoint:unix:///var/run/crio/crio.sock hairpinMode:hairpin-veth runtimeRequestTimeout:15m] PrependCriSocketUnix:true}
	I0916 10:35:32.227445 1384589 kubeadm.go:187] kubeadm config:
	apiVersion: kubeadm.k8s.io/v1beta3
	kind: InitConfiguration
	localAPIEndpoint:
	  advertiseAddress: 192.168.49.2
	  bindPort: 8443
	bootstrapTokens:
	  - groups:
	      - system:bootstrappers:kubeadm:default-node-token
	    ttl: 24h0m0s
	    usages:
	      - signing
	      - authentication
	nodeRegistration:
	  criSocket: unix:///var/run/crio/crio.sock
	  name: "addons-936355"
	  kubeletExtraArgs:
	    node-ip: 192.168.49.2
	  taints: []
	---
	apiVersion: kubeadm.k8s.io/v1beta3
	kind: ClusterConfiguration
	apiServer:
	  certSANs: ["127.0.0.1", "localhost", "192.168.49.2"]
	  extraArgs:
	    enable-admission-plugins: "NamespaceLifecycle,LimitRanger,ServiceAccount,DefaultStorageClass,DefaultTolerationSeconds,NodeRestriction,MutatingAdmissionWebhook,ValidatingAdmissionWebhook,ResourceQuota"
	controllerManager:
	  extraArgs:
	    allocate-node-cidrs: "true"
	    leader-elect: "false"
	scheduler:
	  extraArgs:
	    leader-elect: "false"
	certificatesDir: /var/lib/minikube/certs
	clusterName: mk
	controlPlaneEndpoint: control-plane.minikube.internal:8443
	etcd:
	  local:
	    dataDir: /var/lib/minikube/etcd
	    extraArgs:
	      proxy-refresh-interval: "70000"
	kubernetesVersion: v1.31.1
	networking:
	  dnsDomain: cluster.local
	  podSubnet: "10.244.0.0/16"
	  serviceSubnet: 10.96.0.0/12
	---
	apiVersion: kubelet.config.k8s.io/v1beta1
	kind: KubeletConfiguration
	authentication:
	  x509:
	    clientCAFile: /var/lib/minikube/certs/ca.crt
	cgroupDriver: cgroupfs
	containerRuntimeEndpoint: unix:///var/run/crio/crio.sock
	hairpinMode: hairpin-veth
	runtimeRequestTimeout: 15m
	clusterDomain: "cluster.local"
	# disable disk resource management by default
	imageGCHighThresholdPercent: 100
	evictionHard:
	  nodefs.available: "0%"
	  nodefs.inodesFree: "0%"
	  imagefs.available: "0%"
	failSwapOn: false
	staticPodPath: /etc/kubernetes/manifests
	---
	apiVersion: kubeproxy.config.k8s.io/v1alpha1
	kind: KubeProxyConfiguration
	clusterCIDR: "10.244.0.0/16"
	metricsBindAddress: 0.0.0.0:10249
	conntrack:
	  maxPerCore: 0
	# Skip setting "net.netfilter.nf_conntrack_tcp_timeout_established"
	  tcpEstablishedTimeout: 0s
	# Skip setting "net.netfilter.nf_conntrack_tcp_timeout_close"
	  tcpCloseWaitTimeout: 0s
	
	I0916 10:35:32.227523 1384589 ssh_runner.go:195] Run: sudo ls /var/lib/minikube/binaries/v1.31.1
	I0916 10:35:32.236628 1384589 binaries.go:44] Found k8s binaries, skipping transfer
	I0916 10:35:32.236739 1384589 ssh_runner.go:195] Run: sudo mkdir -p /etc/systemd/system/kubelet.service.d /lib/systemd/system /var/tmp/minikube
	I0916 10:35:32.245582 1384589 ssh_runner.go:362] scp memory --> /etc/systemd/system/kubelet.service.d/10-kubeadm.conf (363 bytes)
	I0916 10:35:32.264058 1384589 ssh_runner.go:362] scp memory --> /lib/systemd/system/kubelet.service (352 bytes)
	I0916 10:35:32.283541 1384589 ssh_runner.go:362] scp memory --> /var/tmp/minikube/kubeadm.yaml.new (2151 bytes)
	I0916 10:35:32.302607 1384589 ssh_runner.go:195] Run: grep 192.168.49.2	control-plane.minikube.internal$ /etc/hosts
	I0916 10:35:32.306351 1384589 ssh_runner.go:195] Run: /bin/bash -c "{ grep -v $'\tcontrol-plane.minikube.internal$' "/etc/hosts"; echo "192.168.49.2	control-plane.minikube.internal"; } > /tmp/h.$$; sudo cp /tmp/h.$$ "/etc/hosts""
	I0916 10:35:32.317408 1384589 ssh_runner.go:195] Run: sudo systemctl daemon-reload
	I0916 10:35:32.409376 1384589 ssh_runner.go:195] Run: sudo systemctl start kubelet
	I0916 10:35:32.423337 1384589 certs.go:68] Setting up /home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/addons-936355 for IP: 192.168.49.2
	I0916 10:35:32.423401 1384589 certs.go:194] generating shared ca certs ...
	I0916 10:35:32.423434 1384589 certs.go:226] acquiring lock for ca certs: {Name:mk0ae46b50e2e49d53ad6fcc94535aa50d9156d6 Clock:{} Delay:500ms Timeout:1m0s Cancel:<nil>}
	I0916 10:35:32.423586 1384589 certs.go:240] generating "minikubeCA" ca cert: /home/jenkins/minikube-integration/19651-1378450/.minikube/ca.key
	I0916 10:35:34.185450 1384589 crypto.go:156] Writing cert to /home/jenkins/minikube-integration/19651-1378450/.minikube/ca.crt ...
	I0916 10:35:34.185484 1384589 lock.go:35] WriteFile acquiring /home/jenkins/minikube-integration/19651-1378450/.minikube/ca.crt: {Name:mk7933e16cdd72038659b0287d05eb0c475b810e Clock:{} Delay:500ms Timeout:1m0s Cancel:<nil>}
	I0916 10:35:34.185680 1384589 crypto.go:164] Writing key to /home/jenkins/minikube-integration/19651-1378450/.minikube/ca.key ...
	I0916 10:35:34.185693 1384589 lock.go:35] WriteFile acquiring /home/jenkins/minikube-integration/19651-1378450/.minikube/ca.key: {Name:mkb7482a30b71122d1b4fb2bf43b1e757c702edc Clock:{} Delay:500ms Timeout:1m0s Cancel:<nil>}
	I0916 10:35:34.186220 1384589 certs.go:240] generating "proxyClientCA" ca cert: /home/jenkins/minikube-integration/19651-1378450/.minikube/proxy-client-ca.key
	I0916 10:35:34.459909 1384589 crypto.go:156] Writing cert to /home/jenkins/minikube-integration/19651-1378450/.minikube/proxy-client-ca.crt ...
	I0916 10:35:34.459947 1384589 lock.go:35] WriteFile acquiring /home/jenkins/minikube-integration/19651-1378450/.minikube/proxy-client-ca.crt: {Name:mke012c32e9f14a06899ff2aaaf49a35a27f11b6 Clock:{} Delay:500ms Timeout:1m0s Cancel:<nil>}
	I0916 10:35:34.460629 1384589 crypto.go:164] Writing key to /home/jenkins/minikube-integration/19651-1378450/.minikube/proxy-client-ca.key ...
	I0916 10:35:34.460645 1384589 lock.go:35] WriteFile acquiring /home/jenkins/minikube-integration/19651-1378450/.minikube/proxy-client-ca.key: {Name:mk5d1994088ad6012c806fe8f78deff99aef1b4a Clock:{} Delay:500ms Timeout:1m0s Cancel:<nil>}
	I0916 10:35:34.460749 1384589 certs.go:256] generating profile certs ...
	I0916 10:35:34.460814 1384589 certs.go:363] generating signed profile cert for "minikube-user": /home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/addons-936355/client.key
	I0916 10:35:34.460832 1384589 crypto.go:68] Generating cert /home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/addons-936355/client.crt with IP's: []
	I0916 10:35:34.818752 1384589 crypto.go:156] Writing cert to /home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/addons-936355/client.crt ...
	I0916 10:35:34.818789 1384589 lock.go:35] WriteFile acquiring /home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/addons-936355/client.crt: {Name:mk0c01900c6bb90e11943bb255479c9c46b42cdc Clock:{} Delay:500ms Timeout:1m0s Cancel:<nil>}
	I0916 10:35:34.819458 1384589 crypto.go:164] Writing key to /home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/addons-936355/client.key ...
	I0916 10:35:34.819477 1384589 lock.go:35] WriteFile acquiring /home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/addons-936355/client.key: {Name:mk6a80bf44231e37c26b15b78c1573c745bc94c7 Clock:{} Delay:500ms Timeout:1m0s Cancel:<nil>}
	I0916 10:35:34.820007 1384589 certs.go:363] generating signed profile cert for "minikube": /home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/addons-936355/apiserver.key.87ecb0c8
	I0916 10:35:34.820055 1384589 crypto.go:68] Generating cert /home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/addons-936355/apiserver.crt.87ecb0c8 with IP's: [10.96.0.1 127.0.0.1 10.0.0.1 192.168.49.2]
	I0916 10:35:35.136595 1384589 crypto.go:156] Writing cert to /home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/addons-936355/apiserver.crt.87ecb0c8 ...
	I0916 10:35:35.136634 1384589 lock.go:35] WriteFile acquiring /home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/addons-936355/apiserver.crt.87ecb0c8: {Name:mkefb9e5abb2f41ae336f1dfb5f1a2e66afaeb9d Clock:{} Delay:500ms Timeout:1m0s Cancel:<nil>}
	I0916 10:35:35.136842 1384589 crypto.go:164] Writing key to /home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/addons-936355/apiserver.key.87ecb0c8 ...
	I0916 10:35:35.136857 1384589 lock.go:35] WriteFile acquiring /home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/addons-936355/apiserver.key.87ecb0c8: {Name:mkea4ee147dec7cfd16ab920313dbb27db2e74f5 Clock:{} Delay:500ms Timeout:1m0s Cancel:<nil>}
	I0916 10:35:35.137417 1384589 certs.go:381] copying /home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/addons-936355/apiserver.crt.87ecb0c8 -> /home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/addons-936355/apiserver.crt
	I0916 10:35:35.137519 1384589 certs.go:385] copying /home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/addons-936355/apiserver.key.87ecb0c8 -> /home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/addons-936355/apiserver.key
	I0916 10:35:35.137576 1384589 certs.go:363] generating signed profile cert for "aggregator": /home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/addons-936355/proxy-client.key
	I0916 10:35:35.137599 1384589 crypto.go:68] Generating cert /home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/addons-936355/proxy-client.crt with IP's: []
	I0916 10:35:35.880558 1384589 crypto.go:156] Writing cert to /home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/addons-936355/proxy-client.crt ...
	I0916 10:35:35.880594 1384589 lock.go:35] WriteFile acquiring /home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/addons-936355/proxy-client.crt: {Name:mke368773a6b2b93aed6ad850fe8fd0d4a737afa Clock:{} Delay:500ms Timeout:1m0s Cancel:<nil>}
	I0916 10:35:35.881334 1384589 crypto.go:164] Writing key to /home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/addons-936355/proxy-client.key ...
	I0916 10:35:35.881354 1384589 lock.go:35] WriteFile acquiring /home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/addons-936355/proxy-client.key: {Name:mk0b7d6a78a045adf50310a69acebceca87fff88 Clock:{} Delay:500ms Timeout:1m0s Cancel:<nil>}
	I0916 10:35:35.881575 1384589 certs.go:484] found cert: /home/jenkins/minikube-integration/19651-1378450/.minikube/certs/ca-key.pem (1679 bytes)
	I0916 10:35:35.881620 1384589 certs.go:484] found cert: /home/jenkins/minikube-integration/19651-1378450/.minikube/certs/ca.pem (1078 bytes)
	I0916 10:35:35.881652 1384589 certs.go:484] found cert: /home/jenkins/minikube-integration/19651-1378450/.minikube/certs/cert.pem (1123 bytes)
	I0916 10:35:35.881681 1384589 certs.go:484] found cert: /home/jenkins/minikube-integration/19651-1378450/.minikube/certs/key.pem (1679 bytes)
	I0916 10:35:35.882348 1384589 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-1378450/.minikube/ca.crt --> /var/lib/minikube/certs/ca.crt (1111 bytes)
	I0916 10:35:35.913124 1384589 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-1378450/.minikube/ca.key --> /var/lib/minikube/certs/ca.key (1675 bytes)
	I0916 10:35:35.940837 1384589 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-1378450/.minikube/proxy-client-ca.crt --> /var/lib/minikube/certs/proxy-client-ca.crt (1119 bytes)
	I0916 10:35:35.966731 1384589 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-1378450/.minikube/proxy-client-ca.key --> /var/lib/minikube/certs/proxy-client-ca.key (1679 bytes)
	I0916 10:35:35.992292 1384589 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/addons-936355/apiserver.crt --> /var/lib/minikube/certs/apiserver.crt (1419 bytes)
	I0916 10:35:36.018704 1384589 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/addons-936355/apiserver.key --> /var/lib/minikube/certs/apiserver.key (1675 bytes)
	I0916 10:35:36.045022 1384589 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/addons-936355/proxy-client.crt --> /var/lib/minikube/certs/proxy-client.crt (1147 bytes)
	I0916 10:35:36.070444 1384589 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/addons-936355/proxy-client.key --> /var/lib/minikube/certs/proxy-client.key (1679 bytes)
	I0916 10:35:36.097278 1384589 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-1378450/.minikube/ca.crt --> /usr/share/ca-certificates/minikubeCA.pem (1111 bytes)
	I0916 10:35:36.122467 1384589 ssh_runner.go:362] scp memory --> /var/lib/minikube/kubeconfig (738 bytes)
	I0916 10:35:36.141948 1384589 ssh_runner.go:195] Run: openssl version
	I0916 10:35:36.147681 1384589 ssh_runner.go:195] Run: sudo /bin/bash -c "test -s /usr/share/ca-certificates/minikubeCA.pem && ln -fs /usr/share/ca-certificates/minikubeCA.pem /etc/ssl/certs/minikubeCA.pem"
	I0916 10:35:36.157655 1384589 ssh_runner.go:195] Run: ls -la /usr/share/ca-certificates/minikubeCA.pem
	I0916 10:35:36.161783 1384589 certs.go:528] hashing: -rw-r--r-- 1 root root 1111 Sep 16 10:35 /usr/share/ca-certificates/minikubeCA.pem
	I0916 10:35:36.161849 1384589 ssh_runner.go:195] Run: openssl x509 -hash -noout -in /usr/share/ca-certificates/minikubeCA.pem
	I0916 10:35:36.169303 1384589 ssh_runner.go:195] Run: sudo /bin/bash -c "test -L /etc/ssl/certs/b5213941.0 || ln -fs /etc/ssl/certs/minikubeCA.pem /etc/ssl/certs/b5213941.0"
	I0916 10:35:36.183583 1384589 ssh_runner.go:195] Run: stat /var/lib/minikube/certs/apiserver-kubelet-client.crt
	I0916 10:35:36.188459 1384589 certs.go:399] 'apiserver-kubelet-client' cert doesn't exist, likely first start: stat /var/lib/minikube/certs/apiserver-kubelet-client.crt: Process exited with status 1
	stdout:
	
	stderr:
	stat: cannot statx '/var/lib/minikube/certs/apiserver-kubelet-client.crt': No such file or directory
	I0916 10:35:36.188535 1384589 kubeadm.go:392] StartCluster: {Name:addons-936355 KeepContext:false EmbedCerts:false MinikubeISO: KicBaseImage:gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 Memory:4000 CPUs:2 DiskSize:20000 Driver:docker HyperkitVpnKitSock: HyperkitVSockPorts:[] DockerEnv:[] ContainerVolumeMounts:[] InsecureRegistry:[] RegistryMirror:[] HostOnlyCIDR:192.168.59.1/24 HypervVirtualSwitch: HypervUseExternalSwitch:false HypervExternalAdapter: KVMNetwork:default KVMQemuURI:qemu:///system KVMGPU:false KVMHidden:false KVMNUMACount:1 APIServerPort:8443 DockerOpt:[] DisableDriverMounts:false NFSShare:[] NFSSharesRoot:/nfsshares UUID: NoVTXCheck:false DNSProxy:false HostDNSResolver:true HostOnlyNicType:virtio NatNicType:virtio SSHIPAddress: SSHUser:root SSHKey: SSHPort:22 KubernetesConfig:{KubernetesVersion:v1.31.1 ClusterName:addons-936355 Namespace:default APIServerHAVIP: APIServerName:minikubeCA APIServerNames
:[] APIServerIPs:[] DNSDomain:cluster.local ContainerRuntime:crio CRISocket: NetworkPlugin:cni FeatureGates: ServiceCIDR:10.96.0.0/12 ImageRepository: LoadBalancerStartIP: LoadBalancerEndIP: CustomIngressCert: RegistryAliases: ExtraOptions:[] ShouldLoadCachedImages:true EnableDefaultCNI:false CNI:} Nodes:[{Name: IP:192.168.49.2 Port:8443 KubernetesVersion:v1.31.1 ContainerRuntime:crio ControlPlane:true Worker:true}] Addons:map[] CustomAddonImages:map[] CustomAddonRegistries:map[] VerifyComponents:map[apiserver:true apps_running:true default_sa:true extra:true kubelet:true node_ready:true system_pods:true] StartHostTimeout:6m0s ScheduledStop:<nil> ExposedPorts:[] ListenAddress: Network: Subnet: MultiNodeRequested:false ExtraDisks:0 CertExpiration:26280h0m0s Mount:false MountString:/home/jenkins:/minikube-host Mount9PVersion:9p2000.L MountGID:docker MountIP: MountMSize:262144 MountOptions:[] MountPort:0 MountType:9p MountUID:docker BinaryMirror: DisableOptimizations:false DisableMetrics:false CustomQemuFirmware
Path: SocketVMnetClientPath: SocketVMnetPath: StaticIP: SSHAuthSock: SSHAgentPID:0 GPUs: AutoPauseInterval:1m0s}
	I0916 10:35:36.188663 1384589 cri.go:54] listing CRI containers in root : {State:paused Name: Namespaces:[kube-system]}
	I0916 10:35:36.188762 1384589 ssh_runner.go:195] Run: sudo -s eval "crictl ps -a --quiet --label io.kubernetes.pod.namespace=kube-system"
	I0916 10:35:36.238853 1384589 cri.go:89] found id: ""
	I0916 10:35:36.238944 1384589 ssh_runner.go:195] Run: sudo ls /var/lib/kubelet/kubeadm-flags.env /var/lib/kubelet/config.yaml /var/lib/minikube/etcd
	I0916 10:35:36.247955 1384589 ssh_runner.go:195] Run: sudo cp /var/tmp/minikube/kubeadm.yaml.new /var/tmp/minikube/kubeadm.yaml
	I0916 10:35:36.256986 1384589 kubeadm.go:214] ignoring SystemVerification for kubeadm because of docker driver
	I0916 10:35:36.257089 1384589 ssh_runner.go:195] Run: sudo ls -la /etc/kubernetes/admin.conf /etc/kubernetes/kubelet.conf /etc/kubernetes/controller-manager.conf /etc/kubernetes/scheduler.conf
	I0916 10:35:36.266246 1384589 kubeadm.go:155] config check failed, skipping stale config cleanup: sudo ls -la /etc/kubernetes/admin.conf /etc/kubernetes/kubelet.conf /etc/kubernetes/controller-manager.conf /etc/kubernetes/scheduler.conf: Process exited with status 2
	stdout:
	
	stderr:
	ls: cannot access '/etc/kubernetes/admin.conf': No such file or directory
	ls: cannot access '/etc/kubernetes/kubelet.conf': No such file or directory
	ls: cannot access '/etc/kubernetes/controller-manager.conf': No such file or directory
	ls: cannot access '/etc/kubernetes/scheduler.conf': No such file or directory
	I0916 10:35:36.266266 1384589 kubeadm.go:157] found existing configuration files:
	
	I0916 10:35:36.266339 1384589 ssh_runner.go:195] Run: sudo grep https://control-plane.minikube.internal:8443 /etc/kubernetes/admin.conf
	I0916 10:35:36.274963 1384589 kubeadm.go:163] "https://control-plane.minikube.internal:8443" may not be in /etc/kubernetes/admin.conf - will remove: sudo grep https://control-plane.minikube.internal:8443 /etc/kubernetes/admin.conf: Process exited with status 2
	stdout:
	
	stderr:
	grep: /etc/kubernetes/admin.conf: No such file or directory
	I0916 10:35:36.275044 1384589 ssh_runner.go:195] Run: sudo rm -f /etc/kubernetes/admin.conf
	I0916 10:35:36.283444 1384589 ssh_runner.go:195] Run: sudo grep https://control-plane.minikube.internal:8443 /etc/kubernetes/kubelet.conf
	I0916 10:35:36.292355 1384589 kubeadm.go:163] "https://control-plane.minikube.internal:8443" may not be in /etc/kubernetes/kubelet.conf - will remove: sudo grep https://control-plane.minikube.internal:8443 /etc/kubernetes/kubelet.conf: Process exited with status 2
	stdout:
	
	stderr:
	grep: /etc/kubernetes/kubelet.conf: No such file or directory
	I0916 10:35:36.292450 1384589 ssh_runner.go:195] Run: sudo rm -f /etc/kubernetes/kubelet.conf
	I0916 10:35:36.300873 1384589 ssh_runner.go:195] Run: sudo grep https://control-plane.minikube.internal:8443 /etc/kubernetes/controller-manager.conf
	I0916 10:35:36.309855 1384589 kubeadm.go:163] "https://control-plane.minikube.internal:8443" may not be in /etc/kubernetes/controller-manager.conf - will remove: sudo grep https://control-plane.minikube.internal:8443 /etc/kubernetes/controller-manager.conf: Process exited with status 2
	stdout:
	
	stderr:
	grep: /etc/kubernetes/controller-manager.conf: No such file or directory
	I0916 10:35:36.309929 1384589 ssh_runner.go:195] Run: sudo rm -f /etc/kubernetes/controller-manager.conf
	I0916 10:35:36.318718 1384589 ssh_runner.go:195] Run: sudo grep https://control-plane.minikube.internal:8443 /etc/kubernetes/scheduler.conf
	I0916 10:35:36.328008 1384589 kubeadm.go:163] "https://control-plane.minikube.internal:8443" may not be in /etc/kubernetes/scheduler.conf - will remove: sudo grep https://control-plane.minikube.internal:8443 /etc/kubernetes/scheduler.conf: Process exited with status 2
	stdout:
	
	stderr:
	grep: /etc/kubernetes/scheduler.conf: No such file or directory
	I0916 10:35:36.328097 1384589 ssh_runner.go:195] Run: sudo rm -f /etc/kubernetes/scheduler.conf
	I0916 10:35:36.336437 1384589 ssh_runner.go:286] Start: /bin/bash -c "sudo env PATH="/var/lib/minikube/binaries/v1.31.1:$PATH" kubeadm init --config /var/tmp/minikube/kubeadm.yaml  --ignore-preflight-errors=DirAvailable--etc-kubernetes-manifests,DirAvailable--var-lib-minikube,DirAvailable--var-lib-minikube-etcd,FileAvailable--etc-kubernetes-manifests-kube-scheduler.yaml,FileAvailable--etc-kubernetes-manifests-kube-apiserver.yaml,FileAvailable--etc-kubernetes-manifests-kube-controller-manager.yaml,FileAvailable--etc-kubernetes-manifests-etcd.yaml,Port-10250,Swap,NumCPU,Mem,SystemVerification,FileContent--proc-sys-net-bridge-bridge-nf-call-iptables"
	I0916 10:35:36.378930 1384589 kubeadm.go:310] [init] Using Kubernetes version: v1.31.1
	I0916 10:35:36.379124 1384589 kubeadm.go:310] [preflight] Running pre-flight checks
	I0916 10:35:36.400406 1384589 kubeadm.go:310] [preflight] The system verification failed. Printing the output from the verification:
	I0916 10:35:36.400480 1384589 kubeadm.go:310] KERNEL_VERSION: 5.15.0-1069-aws
	I0916 10:35:36.400522 1384589 kubeadm.go:310] OS: Linux
	I0916 10:35:36.400571 1384589 kubeadm.go:310] CGROUPS_CPU: enabled
	I0916 10:35:36.400622 1384589 kubeadm.go:310] CGROUPS_CPUACCT: enabled
	I0916 10:35:36.400687 1384589 kubeadm.go:310] CGROUPS_CPUSET: enabled
	I0916 10:35:36.400738 1384589 kubeadm.go:310] CGROUPS_DEVICES: enabled
	I0916 10:35:36.400790 1384589 kubeadm.go:310] CGROUPS_FREEZER: enabled
	I0916 10:35:36.400843 1384589 kubeadm.go:310] CGROUPS_MEMORY: enabled
	I0916 10:35:36.400891 1384589 kubeadm.go:310] CGROUPS_PIDS: enabled
	I0916 10:35:36.400941 1384589 kubeadm.go:310] CGROUPS_HUGETLB: enabled
	I0916 10:35:36.400990 1384589 kubeadm.go:310] CGROUPS_BLKIO: enabled
	I0916 10:35:36.460868 1384589 kubeadm.go:310] [preflight] Pulling images required for setting up a Kubernetes cluster
	I0916 10:35:36.460983 1384589 kubeadm.go:310] [preflight] This might take a minute or two, depending on the speed of your internet connection
	I0916 10:35:36.461077 1384589 kubeadm.go:310] [preflight] You can also perform this action beforehand using 'kubeadm config images pull'
	I0916 10:35:36.469524 1384589 kubeadm.go:310] [certs] Using certificateDir folder "/var/lib/minikube/certs"
	I0916 10:35:36.478231 1384589 out.go:235]   - Generating certificates and keys ...
	I0916 10:35:36.478421 1384589 kubeadm.go:310] [certs] Using existing ca certificate authority
	I0916 10:35:36.478536 1384589 kubeadm.go:310] [certs] Using existing apiserver certificate and key on disk
	I0916 10:35:37.031514 1384589 kubeadm.go:310] [certs] Generating "apiserver-kubelet-client" certificate and key
	I0916 10:35:37.927948 1384589 kubeadm.go:310] [certs] Generating "front-proxy-ca" certificate and key
	I0916 10:35:38.481156 1384589 kubeadm.go:310] [certs] Generating "front-proxy-client" certificate and key
	I0916 10:35:38.950500 1384589 kubeadm.go:310] [certs] Generating "etcd/ca" certificate and key
	I0916 10:35:40.037164 1384589 kubeadm.go:310] [certs] Generating "etcd/server" certificate and key
	I0916 10:35:40.037694 1384589 kubeadm.go:310] [certs] etcd/server serving cert is signed for DNS names [addons-936355 localhost] and IPs [192.168.49.2 127.0.0.1 ::1]
	I0916 10:35:40.393078 1384589 kubeadm.go:310] [certs] Generating "etcd/peer" certificate and key
	I0916 10:35:40.393223 1384589 kubeadm.go:310] [certs] etcd/peer serving cert is signed for DNS names [addons-936355 localhost] and IPs [192.168.49.2 127.0.0.1 ::1]
	I0916 10:35:40.639316 1384589 kubeadm.go:310] [certs] Generating "etcd/healthcheck-client" certificate and key
	I0916 10:35:41.086019 1384589 kubeadm.go:310] [certs] Generating "apiserver-etcd-client" certificate and key
	I0916 10:35:41.417060 1384589 kubeadm.go:310] [certs] Generating "sa" key and public key
	I0916 10:35:41.417146 1384589 kubeadm.go:310] [kubeconfig] Using kubeconfig folder "/etc/kubernetes"
	I0916 10:35:41.829000 1384589 kubeadm.go:310] [kubeconfig] Writing "admin.conf" kubeconfig file
	I0916 10:35:42.186509 1384589 kubeadm.go:310] [kubeconfig] Writing "super-admin.conf" kubeconfig file
	I0916 10:35:43.056769 1384589 kubeadm.go:310] [kubeconfig] Writing "kubelet.conf" kubeconfig file
	I0916 10:35:43.944133 1384589 kubeadm.go:310] [kubeconfig] Writing "controller-manager.conf" kubeconfig file
	I0916 10:35:44.069436 1384589 kubeadm.go:310] [kubeconfig] Writing "scheduler.conf" kubeconfig file
	I0916 10:35:44.070260 1384589 kubeadm.go:310] [etcd] Creating static Pod manifest for local etcd in "/etc/kubernetes/manifests"
	I0916 10:35:44.073516 1384589 kubeadm.go:310] [control-plane] Using manifest folder "/etc/kubernetes/manifests"
	I0916 10:35:44.076353 1384589 out.go:235]   - Booting up control plane ...
	I0916 10:35:44.076466 1384589 kubeadm.go:310] [control-plane] Creating static Pod manifest for "kube-apiserver"
	I0916 10:35:44.076546 1384589 kubeadm.go:310] [control-plane] Creating static Pod manifest for "kube-controller-manager"
	I0916 10:35:44.077309 1384589 kubeadm.go:310] [control-plane] Creating static Pod manifest for "kube-scheduler"
	I0916 10:35:44.088522 1384589 kubeadm.go:310] [kubelet-start] Writing kubelet environment file with flags to file "/var/lib/kubelet/kubeadm-flags.env"
	I0916 10:35:44.095329 1384589 kubeadm.go:310] [kubelet-start] Writing kubelet configuration to file "/var/lib/kubelet/config.yaml"
	I0916 10:35:44.095390 1384589 kubeadm.go:310] [kubelet-start] Starting the kubelet
	I0916 10:35:44.198308 1384589 kubeadm.go:310] [wait-control-plane] Waiting for the kubelet to boot up the control plane as static Pods from directory "/etc/kubernetes/manifests"
	I0916 10:35:44.198428 1384589 kubeadm.go:310] [kubelet-check] Waiting for a healthy kubelet at http://127.0.0.1:10248/healthz. This can take up to 4m0s
	I0916 10:35:45.200220 1384589 kubeadm.go:310] [kubelet-check] The kubelet is healthy after 1.00196488s
	I0916 10:35:45.200324 1384589 kubeadm.go:310] [api-check] Waiting for a healthy API server. This can take up to 4m0s
	I0916 10:35:51.202352 1384589 kubeadm.go:310] [api-check] The API server is healthy after 6.002166951s
	I0916 10:35:51.223941 1384589 kubeadm.go:310] [upload-config] Storing the configuration used in ConfigMap "kubeadm-config" in the "kube-system" Namespace
	I0916 10:35:51.239556 1384589 kubeadm.go:310] [kubelet] Creating a ConfigMap "kubelet-config" in namespace kube-system with the configuration for the kubelets in the cluster
	I0916 10:35:51.267029 1384589 kubeadm.go:310] [upload-certs] Skipping phase. Please see --upload-certs
	I0916 10:35:51.267231 1384589 kubeadm.go:310] [mark-control-plane] Marking the node addons-936355 as control-plane by adding the labels: [node-role.kubernetes.io/control-plane node.kubernetes.io/exclude-from-external-load-balancers]
	I0916 10:35:51.278589 1384589 kubeadm.go:310] [bootstrap-token] Using token: 08qv26.fux33djnogp684b3
	I0916 10:35:51.281486 1384589 out.go:235]   - Configuring RBAC rules ...
	I0916 10:35:51.281633 1384589 kubeadm.go:310] [bootstrap-token] Configuring bootstrap tokens, cluster-info ConfigMap, RBAC Roles
	I0916 10:35:51.288736 1384589 kubeadm.go:310] [bootstrap-token] Configured RBAC rules to allow Node Bootstrap tokens to get nodes
	I0916 10:35:51.298974 1384589 kubeadm.go:310] [bootstrap-token] Configured RBAC rules to allow Node Bootstrap tokens to post CSRs in order for nodes to get long term certificate credentials
	I0916 10:35:51.303116 1384589 kubeadm.go:310] [bootstrap-token] Configured RBAC rules to allow the csrapprover controller automatically approve CSRs from a Node Bootstrap Token
	I0916 10:35:51.306944 1384589 kubeadm.go:310] [bootstrap-token] Configured RBAC rules to allow certificate rotation for all node client certificates in the cluster
	I0916 10:35:51.312530 1384589 kubeadm.go:310] [bootstrap-token] Creating the "cluster-info" ConfigMap in the "kube-public" namespace
	I0916 10:35:51.609739 1384589 kubeadm.go:310] [kubelet-finalize] Updating "/etc/kubernetes/kubelet.conf" to point to a rotatable kubelet client certificate and key
	I0916 10:35:52.042589 1384589 kubeadm.go:310] [addons] Applied essential addon: CoreDNS
	I0916 10:35:52.609454 1384589 kubeadm.go:310] [addons] Applied essential addon: kube-proxy
	I0916 10:35:52.610559 1384589 kubeadm.go:310] 
	I0916 10:35:52.610639 1384589 kubeadm.go:310] Your Kubernetes control-plane has initialized successfully!
	I0916 10:35:52.610651 1384589 kubeadm.go:310] 
	I0916 10:35:52.610728 1384589 kubeadm.go:310] To start using your cluster, you need to run the following as a regular user:
	I0916 10:35:52.610737 1384589 kubeadm.go:310] 
	I0916 10:35:52.610762 1384589 kubeadm.go:310]   mkdir -p $HOME/.kube
	I0916 10:35:52.610825 1384589 kubeadm.go:310]   sudo cp -i /etc/kubernetes/admin.conf $HOME/.kube/config
	I0916 10:35:52.610877 1384589 kubeadm.go:310]   sudo chown $(id -u):$(id -g) $HOME/.kube/config
	I0916 10:35:52.610886 1384589 kubeadm.go:310] 
	I0916 10:35:52.610939 1384589 kubeadm.go:310] Alternatively, if you are the root user, you can run:
	I0916 10:35:52.610947 1384589 kubeadm.go:310] 
	I0916 10:35:52.610994 1384589 kubeadm.go:310]   export KUBECONFIG=/etc/kubernetes/admin.conf
	I0916 10:35:52.611003 1384589 kubeadm.go:310] 
	I0916 10:35:52.611054 1384589 kubeadm.go:310] You should now deploy a pod network to the cluster.
	I0916 10:35:52.611131 1384589 kubeadm.go:310] Run "kubectl apply -f [podnetwork].yaml" with one of the options listed at:
	I0916 10:35:52.611205 1384589 kubeadm.go:310]   https://kubernetes.io/docs/concepts/cluster-administration/addons/
	I0916 10:35:52.611213 1384589 kubeadm.go:310] 
	I0916 10:35:52.611296 1384589 kubeadm.go:310] You can now join any number of control-plane nodes by copying certificate authorities
	I0916 10:35:52.611376 1384589 kubeadm.go:310] and service account keys on each node and then running the following as root:
	I0916 10:35:52.611384 1384589 kubeadm.go:310] 
	I0916 10:35:52.611467 1384589 kubeadm.go:310]   kubeadm join control-plane.minikube.internal:8443 --token 08qv26.fux33djnogp684b3 \
	I0916 10:35:52.611571 1384589 kubeadm.go:310] 	--discovery-token-ca-cert-hash sha256:a39d4a6e06a2efc97f5d9564a89b81063790e757dde370e866d9dc4c2ed0ec07 \
	I0916 10:35:52.611602 1384589 kubeadm.go:310] 	--control-plane 
	I0916 10:35:52.611610 1384589 kubeadm.go:310] 
	I0916 10:35:52.611694 1384589 kubeadm.go:310] Then you can join any number of worker nodes by running the following on each as root:
	I0916 10:35:52.611701 1384589 kubeadm.go:310] 
	I0916 10:35:52.611782 1384589 kubeadm.go:310] kubeadm join control-plane.minikube.internal:8443 --token 08qv26.fux33djnogp684b3 \
	I0916 10:35:52.612037 1384589 kubeadm.go:310] 	--discovery-token-ca-cert-hash sha256:a39d4a6e06a2efc97f5d9564a89b81063790e757dde370e866d9dc4c2ed0ec07 
	I0916 10:35:52.615159 1384589 kubeadm.go:310] W0916 10:35:36.375856    1193 common.go:101] your configuration file uses a deprecated API spec: "kubeadm.k8s.io/v1beta3" (kind: "ClusterConfiguration"). Please use 'kubeadm config migrate --old-config old.yaml --new-config new.yaml', which will write the new, similar spec using a newer API version.
	I0916 10:35:52.615456 1384589 kubeadm.go:310] W0916 10:35:36.376640    1193 common.go:101] your configuration file uses a deprecated API spec: "kubeadm.k8s.io/v1beta3" (kind: "InitConfiguration"). Please use 'kubeadm config migrate --old-config old.yaml --new-config new.yaml', which will write the new, similar spec using a newer API version.
	I0916 10:35:52.615672 1384589 kubeadm.go:310] 	[WARNING SystemVerification]: failed to parse kernel config: unable to load kernel module: "configs", output: "modprobe: FATAL: Module configs not found in directory /lib/modules/5.15.0-1069-aws\n", err: exit status 1
	I0916 10:35:52.615783 1384589 kubeadm.go:310] 	[WARNING Service-Kubelet]: kubelet service is not enabled, please run 'systemctl enable kubelet.service'
	I0916 10:35:52.615802 1384589 cni.go:84] Creating CNI manager for ""
	I0916 10:35:52.615810 1384589 cni.go:143] "docker" driver + "crio" runtime found, recommending kindnet
	I0916 10:35:52.618721 1384589 out.go:177] * Configuring CNI (Container Networking Interface) ...
	I0916 10:35:52.621394 1384589 ssh_runner.go:195] Run: stat /opt/cni/bin/portmap
	I0916 10:35:52.625462 1384589 cni.go:182] applying CNI manifest using /var/lib/minikube/binaries/v1.31.1/kubectl ...
	I0916 10:35:52.625484 1384589 ssh_runner.go:362] scp memory --> /var/tmp/minikube/cni.yaml (2601 bytes)
	I0916 10:35:52.644461 1384589 ssh_runner.go:195] Run: sudo /var/lib/minikube/binaries/v1.31.1/kubectl apply --kubeconfig=/var/lib/minikube/kubeconfig -f /var/tmp/minikube/cni.yaml
	I0916 10:35:52.919005 1384589 ssh_runner.go:195] Run: /bin/bash -c "cat /proc/$(pgrep kube-apiserver)/oom_adj"
	I0916 10:35:52.919065 1384589 ssh_runner.go:195] Run: sudo /var/lib/minikube/binaries/v1.31.1/kubectl create clusterrolebinding minikube-rbac --clusterrole=cluster-admin --serviceaccount=kube-system:default --kubeconfig=/var/lib/minikube/kubeconfig
	I0916 10:35:52.919130 1384589 ssh_runner.go:195] Run: sudo /var/lib/minikube/binaries/v1.31.1/kubectl --kubeconfig=/var/lib/minikube/kubeconfig label --overwrite nodes addons-936355 minikube.k8s.io/updated_at=2024_09_16T10_35_52_0700 minikube.k8s.io/version=v1.34.0 minikube.k8s.io/commit=90d544f06ea0f69499271b003be64a9a224d57ed minikube.k8s.io/name=addons-936355 minikube.k8s.io/primary=true
	I0916 10:35:52.934021 1384589 ops.go:34] apiserver oom_adj: -16
	I0916 10:35:53.058693 1384589 ssh_runner.go:195] Run: sudo /var/lib/minikube/binaries/v1.31.1/kubectl get sa default --kubeconfig=/var/lib/minikube/kubeconfig
	I0916 10:35:53.559565 1384589 ssh_runner.go:195] Run: sudo /var/lib/minikube/binaries/v1.31.1/kubectl get sa default --kubeconfig=/var/lib/minikube/kubeconfig
	I0916 10:35:54.058855 1384589 ssh_runner.go:195] Run: sudo /var/lib/minikube/binaries/v1.31.1/kubectl get sa default --kubeconfig=/var/lib/minikube/kubeconfig
	I0916 10:35:54.558709 1384589 ssh_runner.go:195] Run: sudo /var/lib/minikube/binaries/v1.31.1/kubectl get sa default --kubeconfig=/var/lib/minikube/kubeconfig
	I0916 10:35:55.059014 1384589 ssh_runner.go:195] Run: sudo /var/lib/minikube/binaries/v1.31.1/kubectl get sa default --kubeconfig=/var/lib/minikube/kubeconfig
	I0916 10:35:55.559273 1384589 ssh_runner.go:195] Run: sudo /var/lib/minikube/binaries/v1.31.1/kubectl get sa default --kubeconfig=/var/lib/minikube/kubeconfig
	I0916 10:35:56.058909 1384589 ssh_runner.go:195] Run: sudo /var/lib/minikube/binaries/v1.31.1/kubectl get sa default --kubeconfig=/var/lib/minikube/kubeconfig
	I0916 10:35:56.559492 1384589 ssh_runner.go:195] Run: sudo /var/lib/minikube/binaries/v1.31.1/kubectl get sa default --kubeconfig=/var/lib/minikube/kubeconfig
	I0916 10:35:56.645989 1384589 kubeadm.go:1113] duration metric: took 3.7269816s to wait for elevateKubeSystemPrivileges
	I0916 10:35:56.646081 1384589 kubeadm.go:394] duration metric: took 20.457571781s to StartCluster
	I0916 10:35:56.646115 1384589 settings.go:142] acquiring lock: {Name:mkc0474d366ad36774e47290c7932cc180a1b9f8 Clock:{} Delay:500ms Timeout:1m0s Cancel:<nil>}
	I0916 10:35:56.646272 1384589 settings.go:150] Updating kubeconfig:  /home/jenkins/minikube-integration/19651-1378450/kubeconfig
	I0916 10:35:56.646729 1384589 lock.go:35] WriteFile acquiring /home/jenkins/minikube-integration/19651-1378450/kubeconfig: {Name:mk806df66aa01ad28d0c99bc1a876b4310e8a3a0 Clock:{} Delay:500ms Timeout:1m0s Cancel:<nil>}
	I0916 10:35:56.647006 1384589 start.go:235] Will wait 6m0s for node &{Name: IP:192.168.49.2 Port:8443 KubernetesVersion:v1.31.1 ContainerRuntime:crio ControlPlane:true Worker:true}
	I0916 10:35:56.647218 1384589 config.go:182] Loaded profile config "addons-936355": Driver=docker, ContainerRuntime=crio, KubernetesVersion=v1.31.1
	I0916 10:35:56.647256 1384589 addons.go:507] enable addons start: toEnable=map[ambassador:false auto-pause:false cloud-spanner:true csi-hostpath-driver:true dashboard:false default-storageclass:true efk:false freshpod:false gcp-auth:true gvisor:false headlamp:false helm-tiller:false inaccel:false ingress:true ingress-dns:true inspektor-gadget:true istio:false istio-provisioner:false kong:false kubeflow:false kubevirt:false logviewer:false metallb:false metrics-server:true nvidia-device-plugin:true nvidia-driver-installer:false nvidia-gpu-device-plugin:false olm:false pod-security-policy:false portainer:false registry:true registry-aliases:false registry-creds:false storage-provisioner:true storage-provisioner-gluster:false storage-provisioner-rancher:true volcano:true volumesnapshots:true yakd:true]
	I0916 10:35:56.647344 1384589 addons.go:69] Setting yakd=true in profile "addons-936355"
	I0916 10:35:56.647362 1384589 addons.go:234] Setting addon yakd=true in "addons-936355"
	I0916 10:35:56.647386 1384589 host.go:66] Checking if "addons-936355" exists ...
	I0916 10:35:56.647853 1384589 cli_runner.go:164] Run: docker container inspect addons-936355 --format={{.State.Status}}
	I0916 10:35:56.647019 1384589 ssh_runner.go:195] Run: /bin/bash -c "sudo /var/lib/minikube/binaries/v1.31.1/kubectl --kubeconfig=/var/lib/minikube/kubeconfig -n kube-system get configmap coredns -o yaml"
	I0916 10:35:56.648343 1384589 addons.go:69] Setting inspektor-gadget=true in profile "addons-936355"
	I0916 10:35:56.648358 1384589 addons.go:69] Setting metrics-server=true in profile "addons-936355"
	I0916 10:35:56.648364 1384589 addons.go:69] Setting cloud-spanner=true in profile "addons-936355"
	I0916 10:35:56.648372 1384589 addons.go:234] Setting addon cloud-spanner=true in "addons-936355"
	I0916 10:35:56.648375 1384589 addons.go:234] Setting addon metrics-server=true in "addons-936355"
	I0916 10:35:56.648397 1384589 host.go:66] Checking if "addons-936355" exists ...
	I0916 10:35:56.648398 1384589 host.go:66] Checking if "addons-936355" exists ...
	I0916 10:35:56.648856 1384589 cli_runner.go:164] Run: docker container inspect addons-936355 --format={{.State.Status}}
	I0916 10:35:56.648883 1384589 cli_runner.go:164] Run: docker container inspect addons-936355 --format={{.State.Status}}
	I0916 10:35:56.651521 1384589 addons.go:69] Setting nvidia-device-plugin=true in profile "addons-936355"
	I0916 10:35:56.651556 1384589 addons.go:234] Setting addon nvidia-device-plugin=true in "addons-936355"
	I0916 10:35:56.651597 1384589 host.go:66] Checking if "addons-936355" exists ...
	I0916 10:35:56.652064 1384589 cli_runner.go:164] Run: docker container inspect addons-936355 --format={{.State.Status}}
	I0916 10:35:56.654169 1384589 addons.go:69] Setting csi-hostpath-driver=true in profile "addons-936355"
	I0916 10:35:56.654360 1384589 addons.go:234] Setting addon csi-hostpath-driver=true in "addons-936355"
	I0916 10:35:56.654505 1384589 host.go:66] Checking if "addons-936355" exists ...
	I0916 10:35:56.656244 1384589 cli_runner.go:164] Run: docker container inspect addons-936355 --format={{.State.Status}}
	I0916 10:35:56.657047 1384589 addons.go:69] Setting registry=true in profile "addons-936355"
	I0916 10:35:56.657068 1384589 addons.go:234] Setting addon registry=true in "addons-936355"
	I0916 10:35:56.657100 1384589 host.go:66] Checking if "addons-936355" exists ...
	I0916 10:35:56.657530 1384589 cli_runner.go:164] Run: docker container inspect addons-936355 --format={{.State.Status}}
	I0916 10:35:56.665356 1384589 addons.go:69] Setting storage-provisioner=true in profile "addons-936355"
	I0916 10:35:56.665392 1384589 addons.go:234] Setting addon storage-provisioner=true in "addons-936355"
	I0916 10:35:56.665428 1384589 host.go:66] Checking if "addons-936355" exists ...
	I0916 10:35:56.665900 1384589 cli_runner.go:164] Run: docker container inspect addons-936355 --format={{.State.Status}}
	I0916 10:35:56.656864 1384589 addons.go:69] Setting default-storageclass=true in profile "addons-936355"
	I0916 10:35:56.672310 1384589 addons_storage_classes.go:33] enableOrDisableStorageClasses default-storageclass=true on "addons-936355"
	I0916 10:35:56.672744 1384589 cli_runner.go:164] Run: docker container inspect addons-936355 --format={{.State.Status}}
	I0916 10:35:56.656877 1384589 addons.go:69] Setting gcp-auth=true in profile "addons-936355"
	I0916 10:35:56.677792 1384589 mustload.go:65] Loading cluster: addons-936355
	I0916 10:35:56.678032 1384589 config.go:182] Loaded profile config "addons-936355": Driver=docker, ContainerRuntime=crio, KubernetesVersion=v1.31.1
	I0916 10:35:56.678386 1384589 cli_runner.go:164] Run: docker container inspect addons-936355 --format={{.State.Status}}
	I0916 10:35:56.685741 1384589 addons.go:69] Setting storage-provisioner-rancher=true in profile "addons-936355"
	I0916 10:35:56.685780 1384589 addons_storage_classes.go:33] enableOrDisableStorageClasses storage-provisioner-rancher=true on "addons-936355"
	I0916 10:35:56.686170 1384589 cli_runner.go:164] Run: docker container inspect addons-936355 --format={{.State.Status}}
	I0916 10:35:56.656881 1384589 addons.go:69] Setting ingress=true in profile "addons-936355"
	I0916 10:35:56.697863 1384589 addons.go:234] Setting addon ingress=true in "addons-936355"
	I0916 10:35:56.697916 1384589 host.go:66] Checking if "addons-936355" exists ...
	I0916 10:35:56.698402 1384589 cli_runner.go:164] Run: docker container inspect addons-936355 --format={{.State.Status}}
	I0916 10:35:56.656886 1384589 addons.go:69] Setting ingress-dns=true in profile "addons-936355"
	I0916 10:35:56.714403 1384589 addons.go:234] Setting addon ingress-dns=true in "addons-936355"
	I0916 10:35:56.714458 1384589 host.go:66] Checking if "addons-936355" exists ...
	I0916 10:35:56.715038 1384589 cli_runner.go:164] Run: docker container inspect addons-936355 --format={{.State.Status}}
	I0916 10:35:56.718711 1384589 out.go:177] * Verifying Kubernetes components...
	I0916 10:35:56.721654 1384589 ssh_runner.go:195] Run: sudo systemctl daemon-reload
	I0916 10:35:56.725191 1384589 addons.go:69] Setting volcano=true in profile "addons-936355"
	I0916 10:35:56.725221 1384589 addons.go:234] Setting addon volcano=true in "addons-936355"
	I0916 10:35:56.725264 1384589 host.go:66] Checking if "addons-936355" exists ...
	I0916 10:35:56.725742 1384589 cli_runner.go:164] Run: docker container inspect addons-936355 --format={{.State.Status}}
	I0916 10:35:56.755780 1384589 addons.go:69] Setting volumesnapshots=true in profile "addons-936355"
	I0916 10:35:56.755830 1384589 addons.go:234] Setting addon volumesnapshots=true in "addons-936355"
	I0916 10:35:56.755891 1384589 host.go:66] Checking if "addons-936355" exists ...
	I0916 10:35:56.756438 1384589 cli_runner.go:164] Run: docker container inspect addons-936355 --format={{.State.Status}}
	I0916 10:35:56.648360 1384589 addons.go:234] Setting addon inspektor-gadget=true in "addons-936355"
	I0916 10:35:56.781338 1384589 host.go:66] Checking if "addons-936355" exists ...
	I0916 10:35:56.781866 1384589 cli_runner.go:164] Run: docker container inspect addons-936355 --format={{.State.Status}}
	I0916 10:35:56.795868 1384589 out.go:177]   - Using image gcr.io/cloud-spanner-emulator/emulator:1.5.23
	I0916 10:35:56.806961 1384589 out.go:177]   - Using image nvcr.io/nvidia/k8s-device-plugin:v0.16.2
	I0916 10:35:56.813860 1384589 addons.go:431] installing /etc/kubernetes/addons/nvidia-device-plugin.yaml
	I0916 10:35:56.813885 1384589 ssh_runner.go:362] scp memory --> /etc/kubernetes/addons/nvidia-device-plugin.yaml (1966 bytes)
	I0916 10:35:56.813953 1384589 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" addons-936355
	I0916 10:35:56.825055 1384589 addons.go:234] Setting addon default-storageclass=true in "addons-936355"
	I0916 10:35:56.825094 1384589 host.go:66] Checking if "addons-936355" exists ...
	I0916 10:35:56.825522 1384589 cli_runner.go:164] Run: docker container inspect addons-936355 --format={{.State.Status}}
	I0916 10:35:56.844917 1384589 out.go:177]   - Using image gcr.io/k8s-minikube/minikube-ingress-dns:0.0.3
	I0916 10:35:56.847733 1384589 addons.go:431] installing /etc/kubernetes/addons/ingress-dns-pod.yaml
	I0916 10:35:56.847756 1384589 ssh_runner.go:362] scp memory --> /etc/kubernetes/addons/ingress-dns-pod.yaml (2442 bytes)
	I0916 10:35:56.847823 1384589 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" addons-936355
	I0916 10:35:56.855550 1384589 addons.go:431] installing /etc/kubernetes/addons/deployment.yaml
	I0916 10:35:56.855573 1384589 ssh_runner.go:362] scp memory --> /etc/kubernetes/addons/deployment.yaml (1004 bytes)
	I0916 10:35:56.855637 1384589 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" addons-936355
	I0916 10:35:56.868185 1384589 out.go:177]   - Using image docker.io/marcnuri/yakd:0.0.5
	I0916 10:35:56.870805 1384589 addons.go:431] installing /etc/kubernetes/addons/yakd-ns.yaml
	I0916 10:35:56.870832 1384589 ssh_runner.go:362] scp yakd/yakd-ns.yaml --> /etc/kubernetes/addons/yakd-ns.yaml (171 bytes)
	I0916 10:35:56.870903 1384589 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" addons-936355
	I0916 10:35:56.880834 1384589 out.go:177]   - Using image registry.k8s.io/ingress-nginx/kube-webhook-certgen:v1.4.3
	I0916 10:35:56.883983 1384589 out.go:177]   - Using image registry.k8s.io/ingress-nginx/controller:v1.11.2
	I0916 10:35:56.888274 1384589 out.go:177]   - Using image registry.k8s.io/ingress-nginx/kube-webhook-certgen:v1.4.3
	I0916 10:35:56.892893 1384589 out.go:177]   - Using image registry.k8s.io/metrics-server/metrics-server:v0.7.2
	I0916 10:35:56.893194 1384589 addons.go:431] installing /etc/kubernetes/addons/ingress-deploy.yaml
	I0916 10:35:56.893206 1384589 ssh_runner.go:362] scp memory --> /etc/kubernetes/addons/ingress-deploy.yaml (16078 bytes)
	I0916 10:35:56.893271 1384589 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" addons-936355
	I0916 10:35:56.895536 1384589 addons.go:431] installing /etc/kubernetes/addons/metrics-apiservice.yaml
	I0916 10:35:56.895559 1384589 ssh_runner.go:362] scp metrics-server/metrics-apiservice.yaml --> /etc/kubernetes/addons/metrics-apiservice.yaml (424 bytes)
	I0916 10:35:56.895631 1384589 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" addons-936355
	I0916 10:35:56.932992 1384589 addons.go:234] Setting addon storage-provisioner-rancher=true in "addons-936355"
	I0916 10:35:56.933037 1384589 host.go:66] Checking if "addons-936355" exists ...
	I0916 10:35:56.933461 1384589 cli_runner.go:164] Run: docker container inspect addons-936355 --format={{.State.Status}}
	I0916 10:35:56.975517 1384589 out.go:177]   - Using image registry.k8s.io/sig-storage/livenessprobe:v2.8.0
	I0916 10:35:56.981731 1384589 out.go:177]   - Using image gcr.io/k8s-minikube/kube-registry-proxy:0.0.6
	I0916 10:35:57.008862 1384589 out.go:177]   - Using image gcr.io/k8s-minikube/storage-provisioner:v5
	I0916 10:35:57.011867 1384589 addons.go:431] installing /etc/kubernetes/addons/storage-provisioner.yaml
	I0916 10:35:57.012071 1384589 ssh_runner.go:362] scp memory --> /etc/kubernetes/addons/storage-provisioner.yaml (2676 bytes)
	I0916 10:35:57.012271 1384589 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" addons-936355
	I0916 10:35:57.012572 1384589 out.go:177]   - Using image registry.k8s.io/sig-storage/csi-resizer:v1.6.0
	I0916 10:35:57.018623 1384589 out.go:177]   - Using image registry.k8s.io/sig-storage/csi-snapshotter:v6.1.0
	W0916 10:35:57.018876 1384589 out.go:270] ! Enabling 'volcano' returned an error: running callbacks: [volcano addon does not support crio]
	I0916 10:35:57.019207 1384589 out.go:177]   - Using image ghcr.io/inspektor-gadget/inspektor-gadget:v0.32.0
	I0916 10:35:57.026491 1384589 out.go:177]   - Using image registry.k8s.io/sig-storage/snapshot-controller:v6.1.0
	I0916 10:35:57.031787 1384589 addons.go:431] installing /etc/kubernetes/addons/storageclass.yaml
	I0916 10:35:57.031824 1384589 ssh_runner.go:362] scp storageclass/storageclass.yaml --> /etc/kubernetes/addons/storageclass.yaml (271 bytes)
	I0916 10:35:57.031905 1384589 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" addons-936355
	I0916 10:35:57.035870 1384589 out.go:177]   - Using image docker.io/registry:2.8.3
	I0916 10:35:57.037432 1384589 addons.go:431] installing /etc/kubernetes/addons/csi-hostpath-snapshotclass.yaml
	I0916 10:35:57.040920 1384589 ssh_runner.go:362] scp volumesnapshots/csi-hostpath-snapshotclass.yaml --> /etc/kubernetes/addons/csi-hostpath-snapshotclass.yaml (934 bytes)
	I0916 10:35:57.041029 1384589 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" addons-936355
	I0916 10:35:57.041817 1384589 host.go:66] Checking if "addons-936355" exists ...
	I0916 10:35:57.047002 1384589 addons.go:431] installing /etc/kubernetes/addons/registry-rc.yaml
	I0916 10:35:57.047021 1384589 ssh_runner.go:362] scp memory --> /etc/kubernetes/addons/registry-rc.yaml (860 bytes)
	I0916 10:35:57.047081 1384589 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" addons-936355
	I0916 10:35:57.039199 1384589 out.go:177]   - Using image registry.k8s.io/sig-storage/csi-provisioner:v3.3.0
	I0916 10:35:57.067112 1384589 addons.go:431] installing /etc/kubernetes/addons/ig-namespace.yaml
	I0916 10:35:57.067136 1384589 ssh_runner.go:362] scp inspektor-gadget/ig-namespace.yaml --> /etc/kubernetes/addons/ig-namespace.yaml (55 bytes)
	I0916 10:35:57.067221 1384589 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" addons-936355
	I0916 10:35:57.077336 1384589 sshutil.go:53] new ssh client: &{IP:127.0.0.1 Port:34603 SSHKeyPath:/home/jenkins/minikube-integration/19651-1378450/.minikube/machines/addons-936355/id_rsa Username:docker}
	I0916 10:35:57.080728 1384589 out.go:177]   - Using image registry.k8s.io/sig-storage/csi-attacher:v4.0.0
	I0916 10:35:57.083509 1384589 out.go:177]   - Using image registry.k8s.io/sig-storage/csi-external-health-monitor-controller:v0.7.0
	I0916 10:35:57.084922 1384589 sshutil.go:53] new ssh client: &{IP:127.0.0.1 Port:34603 SSHKeyPath:/home/jenkins/minikube-integration/19651-1378450/.minikube/machines/addons-936355/id_rsa Username:docker}
	I0916 10:35:57.092584 1384589 out.go:177]   - Using image registry.k8s.io/sig-storage/csi-node-driver-registrar:v2.6.0
	I0916 10:35:57.100918 1384589 out.go:177]   - Using image registry.k8s.io/sig-storage/hostpathplugin:v1.9.0
	I0916 10:35:57.102580 1384589 sshutil.go:53] new ssh client: &{IP:127.0.0.1 Port:34603 SSHKeyPath:/home/jenkins/minikube-integration/19651-1378450/.minikube/machines/addons-936355/id_rsa Username:docker}
	I0916 10:35:57.103637 1384589 addons.go:431] installing /etc/kubernetes/addons/rbac-external-attacher.yaml
	I0916 10:35:57.103656 1384589 ssh_runner.go:362] scp csi-hostpath-driver/rbac/rbac-external-attacher.yaml --> /etc/kubernetes/addons/rbac-external-attacher.yaml (3073 bytes)
	I0916 10:35:57.103715 1384589 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" addons-936355
	I0916 10:35:57.120809 1384589 sshutil.go:53] new ssh client: &{IP:127.0.0.1 Port:34603 SSHKeyPath:/home/jenkins/minikube-integration/19651-1378450/.minikube/machines/addons-936355/id_rsa Username:docker}
	I0916 10:35:57.121658 1384589 sshutil.go:53] new ssh client: &{IP:127.0.0.1 Port:34603 SSHKeyPath:/home/jenkins/minikube-integration/19651-1378450/.minikube/machines/addons-936355/id_rsa Username:docker}
	I0916 10:35:57.165011 1384589 sshutil.go:53] new ssh client: &{IP:127.0.0.1 Port:34603 SSHKeyPath:/home/jenkins/minikube-integration/19651-1378450/.minikube/machines/addons-936355/id_rsa Username:docker}
	I0916 10:35:57.190914 1384589 out.go:177]   - Using image docker.io/rancher/local-path-provisioner:v0.0.22
	I0916 10:35:57.195762 1384589 out.go:177]   - Using image docker.io/busybox:stable
	I0916 10:35:57.198447 1384589 addons.go:431] installing /etc/kubernetes/addons/storage-provisioner-rancher.yaml
	I0916 10:35:57.198482 1384589 ssh_runner.go:362] scp memory --> /etc/kubernetes/addons/storage-provisioner-rancher.yaml (3113 bytes)
	I0916 10:35:57.198559 1384589 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" addons-936355
	I0916 10:35:57.237951 1384589 sshutil.go:53] new ssh client: &{IP:127.0.0.1 Port:34603 SSHKeyPath:/home/jenkins/minikube-integration/19651-1378450/.minikube/machines/addons-936355/id_rsa Username:docker}
	I0916 10:35:57.247913 1384589 sshutil.go:53] new ssh client: &{IP:127.0.0.1 Port:34603 SSHKeyPath:/home/jenkins/minikube-integration/19651-1378450/.minikube/machines/addons-936355/id_rsa Username:docker}
	I0916 10:35:57.261430 1384589 sshutil.go:53] new ssh client: &{IP:127.0.0.1 Port:34603 SSHKeyPath:/home/jenkins/minikube-integration/19651-1378450/.minikube/machines/addons-936355/id_rsa Username:docker}
	I0916 10:35:57.263688 1384589 sshutil.go:53] new ssh client: &{IP:127.0.0.1 Port:34603 SSHKeyPath:/home/jenkins/minikube-integration/19651-1378450/.minikube/machines/addons-936355/id_rsa Username:docker}
	I0916 10:35:57.268844 1384589 sshutil.go:53] new ssh client: &{IP:127.0.0.1 Port:34603 SSHKeyPath:/home/jenkins/minikube-integration/19651-1378450/.minikube/machines/addons-936355/id_rsa Username:docker}
	I0916 10:35:57.272259 1384589 sshutil.go:53] new ssh client: &{IP:127.0.0.1 Port:34603 SSHKeyPath:/home/jenkins/minikube-integration/19651-1378450/.minikube/machines/addons-936355/id_rsa Username:docker}
	W0916 10:35:57.289422 1384589 sshutil.go:64] dial failure (will retry): ssh: handshake failed: EOF
	I0916 10:35:57.289665 1384589 retry.go:31] will retry after 343.76577ms: ssh: handshake failed: EOF
	I0916 10:35:57.317769 1384589 sshutil.go:53] new ssh client: &{IP:127.0.0.1 Port:34603 SSHKeyPath:/home/jenkins/minikube-integration/19651-1378450/.minikube/machines/addons-936355/id_rsa Username:docker}
	I0916 10:35:57.327435 1384589 ssh_runner.go:195] Run: sudo systemctl start kubelet
	I0916 10:35:57.327622 1384589 ssh_runner.go:195] Run: /bin/bash -c "sudo /var/lib/minikube/binaries/v1.31.1/kubectl --kubeconfig=/var/lib/minikube/kubeconfig -n kube-system get configmap coredns -o yaml | sed -e '/^        forward . \/etc\/resolv.conf.*/i \        hosts {\n           192.168.49.1 host.minikube.internal\n           fallthrough\n        }' -e '/^        errors *$/i \        log' | sudo /var/lib/minikube/binaries/v1.31.1/kubectl --kubeconfig=/var/lib/minikube/kubeconfig replace -f -"
	I0916 10:35:57.507011 1384589 ssh_runner.go:195] Run: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.31.1/kubectl apply -f /etc/kubernetes/addons/ingress-deploy.yaml
	I0916 10:35:57.508273 1384589 ssh_runner.go:195] Run: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.31.1/kubectl apply -f /etc/kubernetes/addons/nvidia-device-plugin.yaml
	I0916 10:35:57.512529 1384589 addons.go:431] installing /etc/kubernetes/addons/yakd-sa.yaml
	I0916 10:35:57.512557 1384589 ssh_runner.go:362] scp yakd/yakd-sa.yaml --> /etc/kubernetes/addons/yakd-sa.yaml (247 bytes)
	I0916 10:35:57.532805 1384589 ssh_runner.go:195] Run: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.31.1/kubectl apply -f /etc/kubernetes/addons/ingress-dns-pod.yaml
	I0916 10:35:57.544603 1384589 addons.go:431] installing /etc/kubernetes/addons/metrics-server-deployment.yaml
	I0916 10:35:57.544626 1384589 ssh_runner.go:362] scp memory --> /etc/kubernetes/addons/metrics-server-deployment.yaml (1907 bytes)
	I0916 10:35:57.554769 1384589 ssh_runner.go:195] Run: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.31.1/kubectl apply -f /etc/kubernetes/addons/deployment.yaml
	I0916 10:35:57.597359 1384589 ssh_runner.go:195] Run: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.31.1/kubectl apply -f /etc/kubernetes/addons/storageclass.yaml
	I0916 10:35:57.683748 1384589 addons.go:431] installing /etc/kubernetes/addons/yakd-crb.yaml
	I0916 10:35:57.683782 1384589 ssh_runner.go:362] scp yakd/yakd-crb.yaml --> /etc/kubernetes/addons/yakd-crb.yaml (422 bytes)
	I0916 10:35:57.706763 1384589 ssh_runner.go:195] Run: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.31.1/kubectl apply -f /etc/kubernetes/addons/storage-provisioner.yaml
	I0916 10:35:57.708832 1384589 addons.go:431] installing /etc/kubernetes/addons/ig-serviceaccount.yaml
	I0916 10:35:57.708864 1384589 ssh_runner.go:362] scp inspektor-gadget/ig-serviceaccount.yaml --> /etc/kubernetes/addons/ig-serviceaccount.yaml (80 bytes)
	I0916 10:35:57.733074 1384589 addons.go:431] installing /etc/kubernetes/addons/metrics-server-rbac.yaml
	I0916 10:35:57.733107 1384589 ssh_runner.go:362] scp metrics-server/metrics-server-rbac.yaml --> /etc/kubernetes/addons/metrics-server-rbac.yaml (2175 bytes)
	I0916 10:35:57.767880 1384589 addons.go:431] installing /etc/kubernetes/addons/snapshot.storage.k8s.io_volumesnapshotclasses.yaml
	I0916 10:35:57.767908 1384589 ssh_runner.go:362] scp volumesnapshots/snapshot.storage.k8s.io_volumesnapshotclasses.yaml --> /etc/kubernetes/addons/snapshot.storage.k8s.io_volumesnapshotclasses.yaml (6471 bytes)
	I0916 10:35:57.780746 1384589 addons.go:431] installing /etc/kubernetes/addons/registry-svc.yaml
	I0916 10:35:57.780786 1384589 ssh_runner.go:362] scp registry/registry-svc.yaml --> /etc/kubernetes/addons/registry-svc.yaml (398 bytes)
	I0916 10:35:57.807404 1384589 ssh_runner.go:195] Run: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.31.1/kubectl apply -f /etc/kubernetes/addons/storage-provisioner-rancher.yaml
	I0916 10:35:57.850707 1384589 addons.go:431] installing /etc/kubernetes/addons/yakd-svc.yaml
	I0916 10:35:57.850745 1384589 ssh_runner.go:362] scp yakd/yakd-svc.yaml --> /etc/kubernetes/addons/yakd-svc.yaml (412 bytes)
	I0916 10:35:57.887607 1384589 addons.go:431] installing /etc/kubernetes/addons/ig-role.yaml
	I0916 10:35:57.887636 1384589 ssh_runner.go:362] scp inspektor-gadget/ig-role.yaml --> /etc/kubernetes/addons/ig-role.yaml (210 bytes)
	I0916 10:35:57.954841 1384589 addons.go:431] installing /etc/kubernetes/addons/metrics-server-service.yaml
	I0916 10:35:57.954878 1384589 ssh_runner.go:362] scp metrics-server/metrics-server-service.yaml --> /etc/kubernetes/addons/metrics-server-service.yaml (446 bytes)
	I0916 10:35:57.957894 1384589 addons.go:431] installing /etc/kubernetes/addons/snapshot.storage.k8s.io_volumesnapshotcontents.yaml
	I0916 10:35:57.957918 1384589 ssh_runner.go:362] scp volumesnapshots/snapshot.storage.k8s.io_volumesnapshotcontents.yaml --> /etc/kubernetes/addons/snapshot.storage.k8s.io_volumesnapshotcontents.yaml (23126 bytes)
	I0916 10:35:57.990850 1384589 addons.go:431] installing /etc/kubernetes/addons/registry-proxy.yaml
	I0916 10:35:57.990882 1384589 ssh_runner.go:362] scp memory --> /etc/kubernetes/addons/registry-proxy.yaml (947 bytes)
	I0916 10:35:58.040155 1384589 addons.go:431] installing /etc/kubernetes/addons/yakd-dp.yaml
	I0916 10:35:58.040193 1384589 ssh_runner.go:362] scp memory --> /etc/kubernetes/addons/yakd-dp.yaml (2017 bytes)
	I0916 10:35:58.078005 1384589 addons.go:431] installing /etc/kubernetes/addons/ig-rolebinding.yaml
	I0916 10:35:58.078038 1384589 ssh_runner.go:362] scp inspektor-gadget/ig-rolebinding.yaml --> /etc/kubernetes/addons/ig-rolebinding.yaml (244 bytes)
	I0916 10:35:58.084259 1384589 addons.go:431] installing /etc/kubernetes/addons/rbac-hostpath.yaml
	I0916 10:35:58.084302 1384589 ssh_runner.go:362] scp csi-hostpath-driver/rbac/rbac-hostpath.yaml --> /etc/kubernetes/addons/rbac-hostpath.yaml (4266 bytes)
	I0916 10:35:58.131227 1384589 addons.go:431] installing /etc/kubernetes/addons/snapshot.storage.k8s.io_volumesnapshots.yaml
	I0916 10:35:58.131253 1384589 ssh_runner.go:362] scp volumesnapshots/snapshot.storage.k8s.io_volumesnapshots.yaml --> /etc/kubernetes/addons/snapshot.storage.k8s.io_volumesnapshots.yaml (19582 bytes)
	I0916 10:35:58.132161 1384589 ssh_runner.go:195] Run: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.31.1/kubectl apply -f /etc/kubernetes/addons/registry-rc.yaml -f /etc/kubernetes/addons/registry-svc.yaml -f /etc/kubernetes/addons/registry-proxy.yaml
	I0916 10:35:58.147419 1384589 ssh_runner.go:195] Run: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.31.1/kubectl apply -f /etc/kubernetes/addons/metrics-apiservice.yaml -f /etc/kubernetes/addons/metrics-server-deployment.yaml -f /etc/kubernetes/addons/metrics-server-rbac.yaml -f /etc/kubernetes/addons/metrics-server-service.yaml
	I0916 10:35:58.178615 1384589 ssh_runner.go:195] Run: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.31.1/kubectl apply -f /etc/kubernetes/addons/yakd-ns.yaml -f /etc/kubernetes/addons/yakd-sa.yaml -f /etc/kubernetes/addons/yakd-crb.yaml -f /etc/kubernetes/addons/yakd-svc.yaml -f /etc/kubernetes/addons/yakd-dp.yaml
	I0916 10:35:58.199520 1384589 addons.go:431] installing /etc/kubernetes/addons/ig-clusterrole.yaml
	I0916 10:35:58.199553 1384589 ssh_runner.go:362] scp inspektor-gadget/ig-clusterrole.yaml --> /etc/kubernetes/addons/ig-clusterrole.yaml (1485 bytes)
	I0916 10:35:58.206840 1384589 addons.go:431] installing /etc/kubernetes/addons/rbac-external-health-monitor-controller.yaml
	I0916 10:35:58.206873 1384589 ssh_runner.go:362] scp csi-hostpath-driver/rbac/rbac-external-health-monitor-controller.yaml --> /etc/kubernetes/addons/rbac-external-health-monitor-controller.yaml (3038 bytes)
	I0916 10:35:58.251350 1384589 addons.go:431] installing /etc/kubernetes/addons/rbac-volume-snapshot-controller.yaml
	I0916 10:35:58.251378 1384589 ssh_runner.go:362] scp volumesnapshots/rbac-volume-snapshot-controller.yaml --> /etc/kubernetes/addons/rbac-volume-snapshot-controller.yaml (3545 bytes)
	I0916 10:35:58.301781 1384589 addons.go:431] installing /etc/kubernetes/addons/rbac-external-provisioner.yaml
	I0916 10:35:58.301809 1384589 ssh_runner.go:362] scp csi-hostpath-driver/rbac/rbac-external-provisioner.yaml --> /etc/kubernetes/addons/rbac-external-provisioner.yaml (4442 bytes)
	I0916 10:35:58.328155 1384589 addons.go:431] installing /etc/kubernetes/addons/ig-clusterrolebinding.yaml
	I0916 10:35:58.328184 1384589 ssh_runner.go:362] scp inspektor-gadget/ig-clusterrolebinding.yaml --> /etc/kubernetes/addons/ig-clusterrolebinding.yaml (274 bytes)
	I0916 10:35:58.351423 1384589 addons.go:431] installing /etc/kubernetes/addons/volume-snapshot-controller-deployment.yaml
	I0916 10:35:58.351449 1384589 ssh_runner.go:362] scp memory --> /etc/kubernetes/addons/volume-snapshot-controller-deployment.yaml (1475 bytes)
	I0916 10:35:58.404154 1384589 addons.go:431] installing /etc/kubernetes/addons/rbac-external-resizer.yaml
	I0916 10:35:58.404188 1384589 ssh_runner.go:362] scp csi-hostpath-driver/rbac/rbac-external-resizer.yaml --> /etc/kubernetes/addons/rbac-external-resizer.yaml (2943 bytes)
	I0916 10:35:58.467023 1384589 ssh_runner.go:195] Run: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.31.1/kubectl apply -f /etc/kubernetes/addons/csi-hostpath-snapshotclass.yaml -f /etc/kubernetes/addons/snapshot.storage.k8s.io_volumesnapshotclasses.yaml -f /etc/kubernetes/addons/snapshot.storage.k8s.io_volumesnapshotcontents.yaml -f /etc/kubernetes/addons/snapshot.storage.k8s.io_volumesnapshots.yaml -f /etc/kubernetes/addons/rbac-volume-snapshot-controller.yaml -f /etc/kubernetes/addons/volume-snapshot-controller-deployment.yaml
	I0916 10:35:58.468235 1384589 addons.go:431] installing /etc/kubernetes/addons/rbac-external-snapshotter.yaml
	I0916 10:35:58.468257 1384589 ssh_runner.go:362] scp csi-hostpath-driver/rbac/rbac-external-snapshotter.yaml --> /etc/kubernetes/addons/rbac-external-snapshotter.yaml (3149 bytes)
	I0916 10:35:58.517809 1384589 addons.go:431] installing /etc/kubernetes/addons/ig-crd.yaml
	I0916 10:35:58.517836 1384589 ssh_runner.go:362] scp inspektor-gadget/ig-crd.yaml --> /etc/kubernetes/addons/ig-crd.yaml (5216 bytes)
	I0916 10:35:58.529132 1384589 addons.go:431] installing /etc/kubernetes/addons/csi-hostpath-attacher.yaml
	I0916 10:35:58.529162 1384589 ssh_runner.go:362] scp memory --> /etc/kubernetes/addons/csi-hostpath-attacher.yaml (2143 bytes)
	I0916 10:35:58.607318 1384589 addons.go:431] installing /etc/kubernetes/addons/ig-daemonset.yaml
	I0916 10:35:58.607345 1384589 ssh_runner.go:362] scp memory --> /etc/kubernetes/addons/ig-daemonset.yaml (7735 bytes)
	I0916 10:35:58.620217 1384589 addons.go:431] installing /etc/kubernetes/addons/csi-hostpath-driverinfo.yaml
	I0916 10:35:58.620264 1384589 ssh_runner.go:362] scp csi-hostpath-driver/deploy/csi-hostpath-driverinfo.yaml --> /etc/kubernetes/addons/csi-hostpath-driverinfo.yaml (1274 bytes)
	I0916 10:35:58.671546 1384589 ssh_runner.go:195] Run: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.31.1/kubectl apply -f /etc/kubernetes/addons/ig-namespace.yaml -f /etc/kubernetes/addons/ig-serviceaccount.yaml -f /etc/kubernetes/addons/ig-role.yaml -f /etc/kubernetes/addons/ig-rolebinding.yaml -f /etc/kubernetes/addons/ig-clusterrole.yaml -f /etc/kubernetes/addons/ig-clusterrolebinding.yaml -f /etc/kubernetes/addons/ig-crd.yaml -f /etc/kubernetes/addons/ig-daemonset.yaml
	I0916 10:35:58.726776 1384589 addons.go:431] installing /etc/kubernetes/addons/csi-hostpath-plugin.yaml
	I0916 10:35:58.726803 1384589 ssh_runner.go:362] scp memory --> /etc/kubernetes/addons/csi-hostpath-plugin.yaml (8201 bytes)
	I0916 10:35:58.855138 1384589 addons.go:431] installing /etc/kubernetes/addons/csi-hostpath-resizer.yaml
	I0916 10:35:58.855204 1384589 ssh_runner.go:362] scp memory --> /etc/kubernetes/addons/csi-hostpath-resizer.yaml (2191 bytes)
	I0916 10:35:58.993338 1384589 addons.go:431] installing /etc/kubernetes/addons/csi-hostpath-storageclass.yaml
	I0916 10:35:58.993375 1384589 ssh_runner.go:362] scp csi-hostpath-driver/deploy/csi-hostpath-storageclass.yaml --> /etc/kubernetes/addons/csi-hostpath-storageclass.yaml (846 bytes)
	I0916 10:35:59.149795 1384589 ssh_runner.go:195] Run: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.31.1/kubectl apply -f /etc/kubernetes/addons/rbac-external-attacher.yaml -f /etc/kubernetes/addons/rbac-hostpath.yaml -f /etc/kubernetes/addons/rbac-external-health-monitor-controller.yaml -f /etc/kubernetes/addons/rbac-external-provisioner.yaml -f /etc/kubernetes/addons/rbac-external-resizer.yaml -f /etc/kubernetes/addons/rbac-external-snapshotter.yaml -f /etc/kubernetes/addons/csi-hostpath-attacher.yaml -f /etc/kubernetes/addons/csi-hostpath-driverinfo.yaml -f /etc/kubernetes/addons/csi-hostpath-plugin.yaml -f /etc/kubernetes/addons/csi-hostpath-resizer.yaml -f /etc/kubernetes/addons/csi-hostpath-storageclass.yaml
	I0916 10:36:00.098293 1384589 ssh_runner.go:235] Completed: /bin/bash -c "sudo /var/lib/minikube/binaries/v1.31.1/kubectl --kubeconfig=/var/lib/minikube/kubeconfig -n kube-system get configmap coredns -o yaml | sed -e '/^        forward . \/etc\/resolv.conf.*/i \        hosts {\n           192.168.49.1 host.minikube.internal\n           fallthrough\n        }' -e '/^        errors *$/i \        log' | sudo /var/lib/minikube/binaries/v1.31.1/kubectl --kubeconfig=/var/lib/minikube/kubeconfig replace -f -": (2.77063538s)
	I0916 10:36:00.098468 1384589 start.go:971] {"host.minikube.internal": 192.168.49.1} host record injected into CoreDNS's ConfigMap
	I0916 10:36:00.098398 1384589 ssh_runner.go:235] Completed: sudo systemctl start kubelet: (2.770933472s)
	I0916 10:36:00.099620 1384589 node_ready.go:35] waiting up to 6m0s for node "addons-936355" to be "Ready" ...
	I0916 10:36:00.683691 1384589 kapi.go:214] "coredns" deployment in "kube-system" namespace and "addons-936355" context rescaled to 1 replicas
	I0916 10:36:02.134513 1384589 node_ready.go:53] node "addons-936355" has status "Ready":"False"
	I0916 10:36:03.099256 1384589 ssh_runner.go:235] Completed: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.31.1/kubectl apply -f /etc/kubernetes/addons/ingress-deploy.yaml: (5.592195221s)
	I0916 10:36:03.099297 1384589 addons.go:475] Verifying addon ingress=true in "addons-936355"
	I0916 10:36:03.099513 1384589 ssh_runner.go:235] Completed: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.31.1/kubectl apply -f /etc/kubernetes/addons/nvidia-device-plugin.yaml: (5.591216064s)
	I0916 10:36:03.099584 1384589 ssh_runner.go:235] Completed: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.31.1/kubectl apply -f /etc/kubernetes/addons/ingress-dns-pod.yaml: (5.56674984s)
	I0916 10:36:03.099618 1384589 ssh_runner.go:235] Completed: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.31.1/kubectl apply -f /etc/kubernetes/addons/deployment.yaml: (5.544827293s)
	I0916 10:36:03.099645 1384589 ssh_runner.go:235] Completed: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.31.1/kubectl apply -f /etc/kubernetes/addons/storageclass.yaml: (5.502263542s)
	I0916 10:36:03.099882 1384589 ssh_runner.go:235] Completed: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.31.1/kubectl apply -f /etc/kubernetes/addons/storage-provisioner.yaml: (5.393096916s)
	I0916 10:36:03.099983 1384589 ssh_runner.go:235] Completed: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.31.1/kubectl apply -f /etc/kubernetes/addons/storage-provisioner-rancher.yaml: (5.292558502s)
	I0916 10:36:03.100117 1384589 ssh_runner.go:235] Completed: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.31.1/kubectl apply -f /etc/kubernetes/addons/registry-rc.yaml -f /etc/kubernetes/addons/registry-svc.yaml -f /etc/kubernetes/addons/registry-proxy.yaml: (4.967882873s)
	I0916 10:36:03.100138 1384589 addons.go:475] Verifying addon registry=true in "addons-936355"
	I0916 10:36:03.100642 1384589 ssh_runner.go:235] Completed: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.31.1/kubectl apply -f /etc/kubernetes/addons/metrics-apiservice.yaml -f /etc/kubernetes/addons/metrics-server-deployment.yaml -f /etc/kubernetes/addons/metrics-server-rbac.yaml -f /etc/kubernetes/addons/metrics-server-service.yaml: (4.953181668s)
	I0916 10:36:03.100670 1384589 addons.go:475] Verifying addon metrics-server=true in "addons-936355"
	I0916 10:36:03.100733 1384589 ssh_runner.go:235] Completed: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.31.1/kubectl apply -f /etc/kubernetes/addons/yakd-ns.yaml -f /etc/kubernetes/addons/yakd-sa.yaml -f /etc/kubernetes/addons/yakd-crb.yaml -f /etc/kubernetes/addons/yakd-svc.yaml -f /etc/kubernetes/addons/yakd-dp.yaml: (4.922089802s)
	I0916 10:36:03.102943 1384589 out.go:177] * To access YAKD - Kubernetes Dashboard, wait for Pod to be ready and run the following command:
	
		minikube -p addons-936355 service yakd-dashboard -n yakd-dashboard
	
	I0916 10:36:03.102961 1384589 out.go:177] * Verifying registry addon...
	I0916 10:36:03.103034 1384589 out.go:177] * Verifying ingress addon...
	I0916 10:36:03.105813 1384589 kapi.go:75] Waiting for pod with label "app.kubernetes.io/name=ingress-nginx" in ns "ingress-nginx" ...
	I0916 10:36:03.106800 1384589 kapi.go:75] Waiting for pod with label "kubernetes.io/minikube-addons=registry" in ns "kube-system" ...
	I0916 10:36:03.137676 1384589 kapi.go:86] Found 3 Pods for label selector app.kubernetes.io/name=ingress-nginx
	I0916 10:36:03.137755 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:36:03.140614 1384589 kapi.go:86] Found 1 Pods for label selector kubernetes.io/minikube-addons=registry
	I0916 10:36:03.140698 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	W0916 10:36:03.153289 1384589 out.go:270] ! Enabling 'storage-provisioner-rancher' returned an error: running callbacks: [Error making local-path the default storage class: Error while marking storage class local-path as default: Operation cannot be fulfilled on storageclasses.storage.k8s.io "local-path": the object has been modified; please apply your changes to the latest version and try again]
	I0916 10:36:03.250335 1384589 ssh_runner.go:235] Completed: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.31.1/kubectl apply -f /etc/kubernetes/addons/csi-hostpath-snapshotclass.yaml -f /etc/kubernetes/addons/snapshot.storage.k8s.io_volumesnapshotclasses.yaml -f /etc/kubernetes/addons/snapshot.storage.k8s.io_volumesnapshotcontents.yaml -f /etc/kubernetes/addons/snapshot.storage.k8s.io_volumesnapshots.yaml -f /etc/kubernetes/addons/rbac-volume-snapshot-controller.yaml -f /etc/kubernetes/addons/volume-snapshot-controller-deployment.yaml: (4.783269551s)
	W0916 10:36:03.250417 1384589 addons.go:457] apply failed, will retry: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.31.1/kubectl apply -f /etc/kubernetes/addons/csi-hostpath-snapshotclass.yaml -f /etc/kubernetes/addons/snapshot.storage.k8s.io_volumesnapshotclasses.yaml -f /etc/kubernetes/addons/snapshot.storage.k8s.io_volumesnapshotcontents.yaml -f /etc/kubernetes/addons/snapshot.storage.k8s.io_volumesnapshots.yaml -f /etc/kubernetes/addons/rbac-volume-snapshot-controller.yaml -f /etc/kubernetes/addons/volume-snapshot-controller-deployment.yaml: Process exited with status 1
	stdout:
	customresourcedefinition.apiextensions.k8s.io/volumesnapshotclasses.snapshot.storage.k8s.io created
	customresourcedefinition.apiextensions.k8s.io/volumesnapshotcontents.snapshot.storage.k8s.io created
	customresourcedefinition.apiextensions.k8s.io/volumesnapshots.snapshot.storage.k8s.io created
	serviceaccount/snapshot-controller created
	clusterrole.rbac.authorization.k8s.io/snapshot-controller-runner created
	clusterrolebinding.rbac.authorization.k8s.io/snapshot-controller-role created
	role.rbac.authorization.k8s.io/snapshot-controller-leaderelection created
	rolebinding.rbac.authorization.k8s.io/snapshot-controller-leaderelection created
	deployment.apps/snapshot-controller created
	
	stderr:
	error: resource mapping not found for name: "csi-hostpath-snapclass" namespace: "" from "/etc/kubernetes/addons/csi-hostpath-snapshotclass.yaml": no matches for kind "VolumeSnapshotClass" in version "snapshot.storage.k8s.io/v1"
	ensure CRDs are installed first
	I0916 10:36:03.250450 1384589 retry.go:31] will retry after 275.497637ms: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.31.1/kubectl apply -f /etc/kubernetes/addons/csi-hostpath-snapshotclass.yaml -f /etc/kubernetes/addons/snapshot.storage.k8s.io_volumesnapshotclasses.yaml -f /etc/kubernetes/addons/snapshot.storage.k8s.io_volumesnapshotcontents.yaml -f /etc/kubernetes/addons/snapshot.storage.k8s.io_volumesnapshots.yaml -f /etc/kubernetes/addons/rbac-volume-snapshot-controller.yaml -f /etc/kubernetes/addons/volume-snapshot-controller-deployment.yaml: Process exited with status 1
	stdout:
	customresourcedefinition.apiextensions.k8s.io/volumesnapshotclasses.snapshot.storage.k8s.io created
	customresourcedefinition.apiextensions.k8s.io/volumesnapshotcontents.snapshot.storage.k8s.io created
	customresourcedefinition.apiextensions.k8s.io/volumesnapshots.snapshot.storage.k8s.io created
	serviceaccount/snapshot-controller created
	clusterrole.rbac.authorization.k8s.io/snapshot-controller-runner created
	clusterrolebinding.rbac.authorization.k8s.io/snapshot-controller-role created
	role.rbac.authorization.k8s.io/snapshot-controller-leaderelection created
	rolebinding.rbac.authorization.k8s.io/snapshot-controller-leaderelection created
	deployment.apps/snapshot-controller created
	
	stderr:
	error: resource mapping not found for name: "csi-hostpath-snapclass" namespace: "" from "/etc/kubernetes/addons/csi-hostpath-snapshotclass.yaml": no matches for kind "VolumeSnapshotClass" in version "snapshot.storage.k8s.io/v1"
	ensure CRDs are installed first
	I0916 10:36:03.250543 1384589 ssh_runner.go:235] Completed: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.31.1/kubectl apply -f /etc/kubernetes/addons/ig-namespace.yaml -f /etc/kubernetes/addons/ig-serviceaccount.yaml -f /etc/kubernetes/addons/ig-role.yaml -f /etc/kubernetes/addons/ig-rolebinding.yaml -f /etc/kubernetes/addons/ig-clusterrole.yaml -f /etc/kubernetes/addons/ig-clusterrolebinding.yaml -f /etc/kubernetes/addons/ig-crd.yaml -f /etc/kubernetes/addons/ig-daemonset.yaml: (4.57892356s)
	I0916 10:36:03.461537 1384589 ssh_runner.go:235] Completed: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.31.1/kubectl apply -f /etc/kubernetes/addons/rbac-external-attacher.yaml -f /etc/kubernetes/addons/rbac-hostpath.yaml -f /etc/kubernetes/addons/rbac-external-health-monitor-controller.yaml -f /etc/kubernetes/addons/rbac-external-provisioner.yaml -f /etc/kubernetes/addons/rbac-external-resizer.yaml -f /etc/kubernetes/addons/rbac-external-snapshotter.yaml -f /etc/kubernetes/addons/csi-hostpath-attacher.yaml -f /etc/kubernetes/addons/csi-hostpath-driverinfo.yaml -f /etc/kubernetes/addons/csi-hostpath-plugin.yaml -f /etc/kubernetes/addons/csi-hostpath-resizer.yaml -f /etc/kubernetes/addons/csi-hostpath-storageclass.yaml: (4.311696877s)
	I0916 10:36:03.461620 1384589 addons.go:475] Verifying addon csi-hostpath-driver=true in "addons-936355"
	I0916 10:36:03.466201 1384589 out.go:177] * Verifying csi-hostpath-driver addon...
	I0916 10:36:03.469722 1384589 kapi.go:75] Waiting for pod with label "kubernetes.io/minikube-addons=csi-hostpath-driver" in ns "kube-system" ...
	I0916 10:36:03.486422 1384589 kapi.go:86] Found 2 Pods for label selector kubernetes.io/minikube-addons=csi-hostpath-driver
	I0916 10:36:03.486490 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:36:03.526121 1384589 ssh_runner.go:195] Run: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.31.1/kubectl apply --force -f /etc/kubernetes/addons/csi-hostpath-snapshotclass.yaml -f /etc/kubernetes/addons/snapshot.storage.k8s.io_volumesnapshotclasses.yaml -f /etc/kubernetes/addons/snapshot.storage.k8s.io_volumesnapshotcontents.yaml -f /etc/kubernetes/addons/snapshot.storage.k8s.io_volumesnapshots.yaml -f /etc/kubernetes/addons/rbac-volume-snapshot-controller.yaml -f /etc/kubernetes/addons/volume-snapshot-controller-deployment.yaml
	I0916 10:36:03.615580 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:36:03.616763 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:36:03.973974 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:36:04.110336 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:36:04.111341 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:36:04.482735 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:36:04.603445 1384589 node_ready.go:53] node "addons-936355" has status "Ready":"False"
	I0916 10:36:04.611582 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:36:04.612963 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:36:04.974584 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:36:05.112352 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:36:05.113152 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:36:05.475349 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:36:05.612975 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:36:05.617564 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:36:05.994295 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:36:06.112783 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:36:06.113610 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:36:06.402733 1384589 ssh_runner.go:235] Completed: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.31.1/kubectl apply --force -f /etc/kubernetes/addons/csi-hostpath-snapshotclass.yaml -f /etc/kubernetes/addons/snapshot.storage.k8s.io_volumesnapshotclasses.yaml -f /etc/kubernetes/addons/snapshot.storage.k8s.io_volumesnapshotcontents.yaml -f /etc/kubernetes/addons/snapshot.storage.k8s.io_volumesnapshots.yaml -f /etc/kubernetes/addons/rbac-volume-snapshot-controller.yaml -f /etc/kubernetes/addons/volume-snapshot-controller-deployment.yaml: (2.87656549s)
	I0916 10:36:06.474104 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:36:06.604073 1384589 node_ready.go:53] node "addons-936355" has status "Ready":"False"
	I0916 10:36:06.611947 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:36:06.613297 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:36:06.973698 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:36:07.111053 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:36:07.112244 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:36:07.247182 1384589 ssh_runner.go:362] scp memory --> /var/lib/minikube/google_application_credentials.json (162 bytes)
	I0916 10:36:07.247343 1384589 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" addons-936355
	I0916 10:36:07.269993 1384589 sshutil.go:53] new ssh client: &{IP:127.0.0.1 Port:34603 SSHKeyPath:/home/jenkins/minikube-integration/19651-1378450/.minikube/machines/addons-936355/id_rsa Username:docker}
	I0916 10:36:07.399328 1384589 ssh_runner.go:362] scp memory --> /var/lib/minikube/google_cloud_project (12 bytes)
	I0916 10:36:07.424561 1384589 addons.go:234] Setting addon gcp-auth=true in "addons-936355"
	I0916 10:36:07.424615 1384589 host.go:66] Checking if "addons-936355" exists ...
	I0916 10:36:07.425137 1384589 cli_runner.go:164] Run: docker container inspect addons-936355 --format={{.State.Status}}
	I0916 10:36:07.445430 1384589 ssh_runner.go:195] Run: cat /var/lib/minikube/google_application_credentials.json
	I0916 10:36:07.445507 1384589 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" addons-936355
	I0916 10:36:07.462936 1384589 sshutil.go:53] new ssh client: &{IP:127.0.0.1 Port:34603 SSHKeyPath:/home/jenkins/minikube-integration/19651-1378450/.minikube/machines/addons-936355/id_rsa Username:docker}
	I0916 10:36:07.473788 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:36:07.564092 1384589 out.go:177]   - Using image gcr.io/k8s-minikube/gcp-auth-webhook:v0.1.2
	I0916 10:36:07.566842 1384589 out.go:177]   - Using image registry.k8s.io/ingress-nginx/kube-webhook-certgen:v1.4.3
	I0916 10:36:07.569433 1384589 addons.go:431] installing /etc/kubernetes/addons/gcp-auth-ns.yaml
	I0916 10:36:07.569479 1384589 ssh_runner.go:362] scp gcp-auth/gcp-auth-ns.yaml --> /etc/kubernetes/addons/gcp-auth-ns.yaml (700 bytes)
	I0916 10:36:07.591162 1384589 addons.go:431] installing /etc/kubernetes/addons/gcp-auth-service.yaml
	I0916 10:36:07.591235 1384589 ssh_runner.go:362] scp gcp-auth/gcp-auth-service.yaml --> /etc/kubernetes/addons/gcp-auth-service.yaml (788 bytes)
	I0916 10:36:07.611011 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:36:07.612352 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:36:07.614169 1384589 addons.go:431] installing /etc/kubernetes/addons/gcp-auth-webhook.yaml
	I0916 10:36:07.614230 1384589 ssh_runner.go:362] scp memory --> /etc/kubernetes/addons/gcp-auth-webhook.yaml (5421 bytes)
	I0916 10:36:07.634944 1384589 ssh_runner.go:195] Run: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.31.1/kubectl apply -f /etc/kubernetes/addons/gcp-auth-ns.yaml -f /etc/kubernetes/addons/gcp-auth-service.yaml -f /etc/kubernetes/addons/gcp-auth-webhook.yaml
	I0916 10:36:07.973644 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:36:08.114938 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:36:08.115927 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:36:08.289637 1384589 addons.go:475] Verifying addon gcp-auth=true in "addons-936355"
	I0916 10:36:08.292442 1384589 out.go:177] * Verifying gcp-auth addon...
	I0916 10:36:08.297073 1384589 kapi.go:75] Waiting for pod with label "kubernetes.io/minikube-addons=gcp-auth" in ns "gcp-auth" ...
	I0916 10:36:08.311457 1384589 kapi.go:86] Found 1 Pods for label selector kubernetes.io/minikube-addons=gcp-auth
	I0916 10:36:08.311536 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:36:08.473794 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:36:08.610857 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:36:08.611876 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:36:08.801268 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:36:08.973643 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:36:09.105583 1384589 node_ready.go:53] node "addons-936355" has status "Ready":"False"
	I0916 10:36:09.110567 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:36:09.111022 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:36:09.300943 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:36:09.478291 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:36:09.611071 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:36:09.612876 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:36:09.801153 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:36:09.973766 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:36:10.118258 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:36:10.119777 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:36:10.307205 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:36:10.473996 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:36:10.611600 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:36:10.611698 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:36:10.801229 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:36:10.974340 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:36:11.112014 1384589 node_ready.go:53] node "addons-936355" has status "Ready":"False"
	I0916 10:36:11.116183 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:36:11.120476 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:36:11.301066 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:36:11.473420 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:36:11.610713 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:36:11.612423 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:36:11.800270 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:36:11.973407 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:36:12.115791 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:36:12.116920 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:36:12.301411 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:36:12.473867 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:36:12.609770 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:36:12.610662 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:36:12.801634 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:36:12.973046 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:36:13.110851 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:36:13.111134 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:36:13.300575 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:36:13.473835 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:36:13.603219 1384589 node_ready.go:53] node "addons-936355" has status "Ready":"False"
	I0916 10:36:13.610390 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:36:13.611574 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:36:13.801371 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:36:13.973479 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:36:14.112208 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:36:14.113533 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:36:14.300299 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:36:14.474139 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:36:14.610046 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:36:14.612561 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:36:14.800653 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:36:14.972848 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:36:15.110408 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:36:15.110932 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:36:15.300237 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:36:15.473707 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:36:15.603293 1384589 node_ready.go:53] node "addons-936355" has status "Ready":"False"
	I0916 10:36:15.610246 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:36:15.611371 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:36:15.800451 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:36:15.973710 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:36:16.110350 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:36:16.111259 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:36:16.300830 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:36:16.472823 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:36:16.609912 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:36:16.610711 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:36:16.801005 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:36:16.973568 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:36:17.110550 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:36:17.112172 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:36:17.301017 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:36:17.473847 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:36:17.603589 1384589 node_ready.go:53] node "addons-936355" has status "Ready":"False"
	I0916 10:36:17.610593 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:36:17.611441 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:36:17.800956 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:36:17.974143 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:36:18.110263 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:36:18.111182 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:36:18.301212 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:36:18.473331 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:36:18.610442 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:36:18.611436 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:36:18.800286 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:36:18.973687 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:36:19.110597 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:36:19.111342 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:36:19.301090 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:36:19.473269 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:36:19.609625 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:36:19.610850 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:36:19.800307 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:36:19.974046 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:36:20.103731 1384589 node_ready.go:53] node "addons-936355" has status "Ready":"False"
	I0916 10:36:20.112214 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:36:20.113558 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:36:20.301265 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:36:20.473689 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:36:20.610324 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:36:20.611114 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:36:20.800597 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:36:20.973842 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:36:21.109533 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:36:21.111696 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:36:21.302328 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:36:21.473189 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:36:21.610124 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:36:21.611262 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:36:21.801275 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:36:21.973296 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:36:22.111525 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:36:22.113002 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:36:22.300321 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:36:22.473211 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:36:22.602936 1384589 node_ready.go:53] node "addons-936355" has status "Ready":"False"
	I0916 10:36:22.610283 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:36:22.611107 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:36:22.800931 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:36:22.974004 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:36:23.109980 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:36:23.110973 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:36:23.301081 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:36:23.473035 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:36:23.610199 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:36:23.611296 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:36:23.800268 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:36:23.973666 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:36:24.109603 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:36:24.110778 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:36:24.301295 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:36:24.473680 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:36:24.609537 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:36:24.610685 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:36:24.800457 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:36:24.974147 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:36:25.103048 1384589 node_ready.go:53] node "addons-936355" has status "Ready":"False"
	I0916 10:36:25.111012 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:36:25.111240 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:36:25.300767 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:36:25.473813 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:36:25.610908 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:36:25.611483 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:36:25.801271 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:36:25.973399 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:36:26.109553 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:36:26.111922 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:36:26.300892 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:36:26.473331 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:36:26.609476 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:36:26.610465 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:36:26.800314 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:36:26.974947 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:36:27.104747 1384589 node_ready.go:53] node "addons-936355" has status "Ready":"False"
	I0916 10:36:27.110466 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:36:27.113262 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:36:27.302886 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:36:27.475127 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:36:27.610103 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:36:27.619742 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:36:27.801198 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:36:27.974956 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:36:28.115379 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:36:28.117659 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:36:28.300851 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:36:28.474546 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:36:28.610341 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:36:28.611106 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:36:28.800632 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:36:28.973876 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:36:29.109998 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:36:29.111054 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:36:29.300629 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:36:29.473403 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:36:29.603802 1384589 node_ready.go:53] node "addons-936355" has status "Ready":"False"
	I0916 10:36:29.610293 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:36:29.611053 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:36:29.800316 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:36:29.975589 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:36:30.112209 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:36:30.112442 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:36:30.300936 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:36:30.473757 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:36:30.610468 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:36:30.610927 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:36:30.801173 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:36:30.974752 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:36:31.111549 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:36:31.111768 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:36:31.300752 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:36:31.472954 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:36:31.610456 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:36:31.611765 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:36:31.801083 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:36:31.973842 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:36:32.103800 1384589 node_ready.go:53] node "addons-936355" has status "Ready":"False"
	I0916 10:36:32.109737 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:36:32.111636 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:36:32.301104 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:36:32.473774 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:36:32.610924 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:36:32.611190 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:36:32.801482 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:36:32.974672 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:36:33.110188 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:36:33.111271 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:36:33.301349 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:36:33.473433 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:36:33.610409 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:36:33.610888 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:36:33.801627 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:36:33.973881 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:36:34.110134 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:36:34.110497 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:36:34.301089 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:36:34.474295 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:36:34.603135 1384589 node_ready.go:53] node "addons-936355" has status "Ready":"False"
	I0916 10:36:34.610342 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:36:34.611690 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:36:34.801258 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:36:34.973555 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:36:35.110766 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:36:35.111394 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:36:35.300970 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:36:35.473087 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:36:35.610115 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:36:35.611008 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:36:35.800154 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:36:35.974082 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:36:36.109881 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:36:36.110992 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:36:36.300326 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:36:36.473408 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:36:36.604025 1384589 node_ready.go:53] node "addons-936355" has status "Ready":"False"
	I0916 10:36:36.610440 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:36:36.610869 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:36:36.801065 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:36:36.973323 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:36:37.109996 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:36:37.111285 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:36:37.300895 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:36:37.474211 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:36:37.610044 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:36:37.610356 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:36:37.800660 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:36:37.973698 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:36:38.110670 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:36:38.110901 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:36:38.301861 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:36:38.473168 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:36:38.610218 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:36:38.611834 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:36:38.800936 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:36:38.975190 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:36:39.103702 1384589 node_ready.go:53] node "addons-936355" has status "Ready":"False"
	I0916 10:36:39.110476 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:36:39.111170 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:36:39.301227 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:36:39.473926 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:36:39.609710 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:36:39.611195 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:36:39.800502 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:36:39.973582 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:36:40.111455 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:36:40.111653 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:36:40.300951 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:36:40.473797 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:36:40.610268 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:36:40.611132 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:36:40.800770 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:36:40.974250 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:36:41.110735 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:36:41.111970 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:36:41.300538 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:36:41.473964 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:36:41.603723 1384589 node_ready.go:53] node "addons-936355" has status "Ready":"False"
	I0916 10:36:41.610292 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:36:41.610627 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:36:41.801470 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:36:41.974052 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:36:42.110959 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:36:42.112236 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:36:42.300960 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:36:42.473748 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:36:42.612461 1384589 node_ready.go:49] node "addons-936355" has status "Ready":"True"
	I0916 10:36:42.612538 1384589 node_ready.go:38] duration metric: took 42.512890552s for node "addons-936355" to be "Ready" ...
	I0916 10:36:42.612563 1384589 pod_ready.go:36] extra waiting up to 6m0s for all system-critical pods including labels [k8s-app=kube-dns component=etcd component=kube-apiserver component=kube-controller-manager k8s-app=kube-proxy component=kube-scheduler] to be "Ready" ...
	I0916 10:36:42.623341 1384589 kapi.go:86] Found 2 Pods for label selector kubernetes.io/minikube-addons=registry
	I0916 10:36:42.623417 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:36:42.624231 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:36:42.627174 1384589 pod_ready.go:79] waiting up to 6m0s for pod "coredns-7c65d6cfc9-r6x6b" in "kube-system" namespace to be "Ready" ...
	I0916 10:36:42.859763 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:36:42.978703 1384589 kapi.go:86] Found 3 Pods for label selector kubernetes.io/minikube-addons=csi-hostpath-driver
	I0916 10:36:42.978731 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:36:43.131865 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:36:43.133687 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:36:43.349019 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:36:43.479093 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:36:43.612085 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:36:43.613250 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:36:43.838378 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:36:43.975549 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:36:44.112002 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:36:44.113078 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:36:44.303567 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:36:44.474708 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:36:44.612644 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:36:44.614103 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:36:44.633869 1384589 pod_ready.go:93] pod "coredns-7c65d6cfc9-r6x6b" in "kube-system" namespace has status "Ready":"True"
	I0916 10:36:44.633943 1384589 pod_ready.go:82] duration metric: took 2.006728044s for pod "coredns-7c65d6cfc9-r6x6b" in "kube-system" namespace to be "Ready" ...
	I0916 10:36:44.633994 1384589 pod_ready.go:79] waiting up to 6m0s for pod "etcd-addons-936355" in "kube-system" namespace to be "Ready" ...
	I0916 10:36:44.642581 1384589 pod_ready.go:93] pod "etcd-addons-936355" in "kube-system" namespace has status "Ready":"True"
	I0916 10:36:44.642653 1384589 pod_ready.go:82] duration metric: took 8.633064ms for pod "etcd-addons-936355" in "kube-system" namespace to be "Ready" ...
	I0916 10:36:44.642683 1384589 pod_ready.go:79] waiting up to 6m0s for pod "kube-apiserver-addons-936355" in "kube-system" namespace to be "Ready" ...
	I0916 10:36:44.650836 1384589 pod_ready.go:93] pod "kube-apiserver-addons-936355" in "kube-system" namespace has status "Ready":"True"
	I0916 10:36:44.650858 1384589 pod_ready.go:82] duration metric: took 8.155202ms for pod "kube-apiserver-addons-936355" in "kube-system" namespace to be "Ready" ...
	I0916 10:36:44.650871 1384589 pod_ready.go:79] waiting up to 6m0s for pod "kube-controller-manager-addons-936355" in "kube-system" namespace to be "Ready" ...
	I0916 10:36:44.656888 1384589 pod_ready.go:93] pod "kube-controller-manager-addons-936355" in "kube-system" namespace has status "Ready":"True"
	I0916 10:36:44.656911 1384589 pod_ready.go:82] duration metric: took 6.032453ms for pod "kube-controller-manager-addons-936355" in "kube-system" namespace to be "Ready" ...
	I0916 10:36:44.656925 1384589 pod_ready.go:79] waiting up to 6m0s for pod "kube-proxy-6zqlq" in "kube-system" namespace to be "Ready" ...
	I0916 10:36:44.663172 1384589 pod_ready.go:93] pod "kube-proxy-6zqlq" in "kube-system" namespace has status "Ready":"True"
	I0916 10:36:44.663198 1384589 pod_ready.go:82] duration metric: took 6.264685ms for pod "kube-proxy-6zqlq" in "kube-system" namespace to be "Ready" ...
	I0916 10:36:44.663210 1384589 pod_ready.go:79] waiting up to 6m0s for pod "kube-scheduler-addons-936355" in "kube-system" namespace to be "Ready" ...
	I0916 10:36:44.800889 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:36:44.975665 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:36:45.036535 1384589 pod_ready.go:93] pod "kube-scheduler-addons-936355" in "kube-system" namespace has status "Ready":"True"
	I0916 10:36:45.036565 1384589 pod_ready.go:82] duration metric: took 373.347727ms for pod "kube-scheduler-addons-936355" in "kube-system" namespace to be "Ready" ...
	I0916 10:36:45.036579 1384589 pod_ready.go:79] waiting up to 6m0s for pod "metrics-server-84c5f94fbc-hngcs" in "kube-system" namespace to be "Ready" ...
	I0916 10:36:45.111493 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:36:45.112631 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:36:45.308107 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:36:45.474657 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:36:45.611914 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:36:45.612461 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:36:45.801892 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:36:45.974950 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:36:46.111683 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:36:46.114082 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:36:46.301157 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:36:46.475128 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:36:46.611945 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:36:46.613048 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:36:46.801341 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:36:46.974921 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:36:47.044703 1384589 pod_ready.go:103] pod "metrics-server-84c5f94fbc-hngcs" in "kube-system" namespace has status "Ready":"False"
	I0916 10:36:47.112165 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:36:47.114489 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:36:47.301333 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:36:47.480727 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:36:47.612823 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:36:47.613992 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:36:47.802256 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:36:47.975336 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:36:48.114295 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:36:48.116308 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:36:48.301669 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:36:48.478171 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:36:48.613077 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:36:48.615032 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:36:48.802520 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:36:48.974753 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:36:49.045627 1384589 pod_ready.go:103] pod "metrics-server-84c5f94fbc-hngcs" in "kube-system" namespace has status "Ready":"False"
	I0916 10:36:49.112778 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:36:49.116258 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:36:49.301317 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:36:49.477632 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:36:49.617030 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:36:49.618841 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:36:49.801756 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:36:49.975098 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:36:50.112372 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:36:50.115428 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:36:50.303239 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:36:50.475866 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:36:50.610712 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:36:50.613666 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:36:50.800849 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:36:50.975104 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:36:51.113376 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:36:51.116309 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:36:51.305523 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:36:51.476644 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:36:51.547164 1384589 pod_ready.go:103] pod "metrics-server-84c5f94fbc-hngcs" in "kube-system" namespace has status "Ready":"False"
	I0916 10:36:51.619471 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:36:51.620588 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:36:51.803271 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:36:51.978508 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:36:52.112860 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:36:52.114242 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:36:52.300920 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:36:52.475635 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:36:52.610961 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:36:52.611563 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:36:52.802388 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:36:52.975192 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:36:53.112514 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:36:53.113242 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:36:53.301036 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:36:53.475517 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:36:53.613316 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:36:53.614402 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:36:53.801348 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:36:53.977291 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:36:54.050970 1384589 pod_ready.go:103] pod "metrics-server-84c5f94fbc-hngcs" in "kube-system" namespace has status "Ready":"False"
	I0916 10:36:54.110981 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:36:54.112076 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:36:54.300546 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:36:54.476454 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:36:54.610582 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:36:54.612518 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:36:54.803551 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:36:54.974994 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:36:55.111398 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:36:55.112761 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:36:55.301089 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:36:55.474274 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:36:55.609938 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:36:55.612002 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:36:55.800575 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:36:55.974519 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:36:56.112644 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:36:56.113614 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:36:56.301290 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:36:56.476637 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:36:56.543349 1384589 pod_ready.go:103] pod "metrics-server-84c5f94fbc-hngcs" in "kube-system" namespace has status "Ready":"False"
	I0916 10:36:56.613159 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:36:56.614779 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:36:56.801547 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:36:56.975878 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:36:57.111646 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:36:57.114449 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:36:57.301068 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:36:57.475345 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:36:57.612454 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:36:57.613637 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:36:57.802031 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:36:57.975475 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:36:58.112792 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:36:58.114331 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:36:58.301185 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:36:58.477806 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:36:58.543702 1384589 pod_ready.go:103] pod "metrics-server-84c5f94fbc-hngcs" in "kube-system" namespace has status "Ready":"False"
	I0916 10:36:58.611292 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:36:58.612924 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:36:58.801770 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:36:58.978258 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:36:59.111614 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:36:59.113277 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:36:59.300874 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:36:59.478857 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:36:59.612769 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:36:59.614234 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:36:59.801191 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:36:59.975770 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:37:00.124776 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:37:00.127598 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:37:00.312397 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:37:00.476593 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:37:00.612256 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:37:00.615086 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:37:00.801400 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:37:00.975455 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:37:01.045782 1384589 pod_ready.go:103] pod "metrics-server-84c5f94fbc-hngcs" in "kube-system" namespace has status "Ready":"False"
	I0916 10:37:01.116772 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:37:01.117862 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:37:01.300859 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:37:01.475607 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:37:01.614426 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:37:01.616901 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:37:01.806694 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:37:01.976923 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:37:02.111895 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:37:02.112248 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:37:02.301293 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:37:02.474913 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:37:02.610544 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:37:02.611469 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:37:02.801570 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:37:02.974546 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:37:03.110553 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:37:03.111258 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:37:03.302951 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:37:03.475760 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:37:03.542976 1384589 pod_ready.go:103] pod "metrics-server-84c5f94fbc-hngcs" in "kube-system" namespace has status "Ready":"False"
	I0916 10:37:03.612478 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:37:03.614314 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:37:03.802588 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:37:03.974619 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:37:04.116170 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:37:04.117565 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:37:04.301282 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:37:04.474423 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:37:04.609959 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:37:04.611546 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:37:04.802714 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:37:04.974564 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:37:05.111189 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:37:05.119380 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:37:05.301308 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:37:05.480667 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:37:05.545296 1384589 pod_ready.go:103] pod "metrics-server-84c5f94fbc-hngcs" in "kube-system" namespace has status "Ready":"False"
	I0916 10:37:05.613921 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:37:05.620210 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:37:05.801887 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:37:05.979380 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:37:06.117389 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:37:06.120937 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:37:06.301555 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:37:06.475271 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:37:06.612080 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:37:06.614588 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:37:06.801421 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:37:06.975493 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:37:07.111399 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:37:07.114107 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:37:07.300779 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:37:07.478877 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:37:07.558060 1384589 pod_ready.go:103] pod "metrics-server-84c5f94fbc-hngcs" in "kube-system" namespace has status "Ready":"False"
	I0916 10:37:07.615155 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:37:07.616925 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:37:07.801853 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:37:07.975171 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:37:08.110594 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:37:08.111215 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:37:08.300440 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:37:08.476290 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:37:08.611297 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:37:08.612374 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:37:08.801416 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:37:08.975287 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:37:09.110125 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:37:09.111958 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:37:09.304146 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:37:09.474050 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:37:09.610553 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:37:09.611805 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:37:09.801358 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:37:09.974606 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:37:10.045151 1384589 pod_ready.go:103] pod "metrics-server-84c5f94fbc-hngcs" in "kube-system" namespace has status "Ready":"False"
	I0916 10:37:10.115132 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:37:10.117029 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:37:10.300604 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:37:10.478567 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:37:10.612321 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:37:10.613469 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:37:10.801386 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:37:10.979174 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:37:11.112568 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:37:11.116046 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:37:11.301477 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:37:11.475805 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:37:11.613534 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:37:11.615206 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:37:11.802410 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:37:11.976748 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:37:12.047271 1384589 pod_ready.go:103] pod "metrics-server-84c5f94fbc-hngcs" in "kube-system" namespace has status "Ready":"False"
	I0916 10:37:12.112753 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:37:12.114779 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:37:12.300849 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:37:12.479609 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:37:12.633512 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:37:12.635102 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:37:12.801945 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:37:12.978658 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:37:13.111553 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:37:13.113586 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:37:13.303385 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:37:13.479039 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:37:13.614588 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:37:13.615554 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:37:13.806654 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:37:13.981409 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:37:14.060889 1384589 pod_ready.go:103] pod "metrics-server-84c5f94fbc-hngcs" in "kube-system" namespace has status "Ready":"False"
	I0916 10:37:14.112654 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:37:14.113844 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:37:14.301688 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:37:14.474872 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:37:14.610310 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:37:14.610746 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:37:14.800633 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:37:14.975036 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:37:15.112998 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:37:15.115460 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:37:15.300634 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:37:15.474102 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:37:15.613955 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:37:15.615489 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:37:15.801741 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:37:15.975686 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:37:16.113469 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:37:16.114978 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:37:16.301581 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:37:16.475151 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:37:16.550481 1384589 pod_ready.go:103] pod "metrics-server-84c5f94fbc-hngcs" in "kube-system" namespace has status "Ready":"False"
	I0916 10:37:16.614516 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:37:16.615278 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:37:16.802546 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:37:16.975189 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:37:17.110944 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:37:17.111649 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:37:17.302100 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:37:17.475101 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:37:17.611759 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:37:17.612357 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:37:17.800825 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:37:17.975226 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:37:18.110760 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:37:18.112805 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:37:18.300370 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:37:18.474527 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:37:18.610984 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:37:18.611944 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:37:18.801132 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:37:18.974591 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:37:19.046356 1384589 pod_ready.go:103] pod "metrics-server-84c5f94fbc-hngcs" in "kube-system" namespace has status "Ready":"False"
	I0916 10:37:19.112245 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:37:19.115197 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:37:19.301744 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:37:19.475515 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:37:19.610679 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:37:19.614216 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:37:19.801704 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:37:19.974949 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:37:20.111388 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:37:20.114141 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:37:20.301219 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:37:20.474669 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:37:20.611319 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:37:20.615110 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:37:20.801384 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:37:20.976136 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:37:21.113352 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:37:21.113988 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:37:21.300920 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:37:21.489778 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:37:21.545440 1384589 pod_ready.go:103] pod "metrics-server-84c5f94fbc-hngcs" in "kube-system" namespace has status "Ready":"False"
	I0916 10:37:21.613554 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:37:21.616634 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:37:21.801820 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:37:21.977146 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:37:22.111094 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:37:22.112217 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:37:22.301825 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:37:22.475834 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:37:22.611602 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:37:22.612556 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:37:22.805363 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:37:22.975337 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:37:23.112472 1384589 kapi.go:107] duration metric: took 1m20.005670496s to wait for kubernetes.io/minikube-addons=registry ...
	I0916 10:37:23.113515 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:37:23.300925 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:37:23.474515 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:37:23.610822 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:37:23.801408 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:37:23.977906 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:37:24.044059 1384589 pod_ready.go:103] pod "metrics-server-84c5f94fbc-hngcs" in "kube-system" namespace has status "Ready":"False"
	I0916 10:37:24.117487 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:37:24.301384 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:37:24.476565 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:37:24.611373 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:37:24.801872 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:37:24.984901 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:37:25.111954 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:37:25.300421 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:37:25.475126 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:37:25.611267 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:37:25.808830 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:37:25.975068 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:37:26.111025 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:37:26.310954 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:37:26.475111 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:37:26.543709 1384589 pod_ready.go:103] pod "metrics-server-84c5f94fbc-hngcs" in "kube-system" namespace has status "Ready":"False"
	I0916 10:37:26.609974 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:37:26.838995 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:37:26.975321 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:37:27.110779 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:37:27.301198 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:37:27.476321 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:37:27.610748 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:37:27.801486 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:37:27.975547 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:37:28.110763 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:37:28.301469 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:37:28.474991 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:37:28.610943 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:37:28.801350 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:37:28.975749 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:37:29.046127 1384589 pod_ready.go:103] pod "metrics-server-84c5f94fbc-hngcs" in "kube-system" namespace has status "Ready":"False"
	I0916 10:37:29.110966 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:37:29.305494 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:37:29.475929 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:37:29.609824 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:37:29.801492 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:37:29.977852 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:37:30.113447 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:37:30.301994 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:37:30.476258 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:37:30.610718 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:37:30.801712 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:37:30.975400 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:37:31.110916 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:37:31.300717 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:37:31.474547 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:37:31.542764 1384589 pod_ready.go:103] pod "metrics-server-84c5f94fbc-hngcs" in "kube-system" namespace has status "Ready":"False"
	I0916 10:37:31.612339 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:37:31.804045 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:37:31.975617 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:37:32.110961 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:37:32.300588 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:37:32.482569 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:37:32.611127 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:37:32.804201 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:37:32.975368 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:37:33.111355 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:37:33.301816 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:37:33.477518 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:37:33.551472 1384589 pod_ready.go:103] pod "metrics-server-84c5f94fbc-hngcs" in "kube-system" namespace has status "Ready":"False"
	I0916 10:37:33.611027 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:37:33.801158 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:37:34.013405 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:37:34.127200 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:37:34.310368 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:37:34.475923 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:37:34.611219 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:37:34.801913 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:37:34.978855 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:37:35.118452 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:37:35.300764 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:37:35.476873 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:37:35.611849 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:37:35.802246 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:37:35.975118 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:37:36.044866 1384589 pod_ready.go:103] pod "metrics-server-84c5f94fbc-hngcs" in "kube-system" namespace has status "Ready":"False"
	I0916 10:37:36.111125 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:37:36.301167 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:37:36.477188 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:37:36.617190 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:37:36.801375 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:37:36.974623 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:37:37.113798 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:37:37.301345 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:37:37.479115 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:37:37.611187 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:37:37.802141 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:37:37.976103 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:37:38.094708 1384589 pod_ready.go:103] pod "metrics-server-84c5f94fbc-hngcs" in "kube-system" namespace has status "Ready":"False"
	I0916 10:37:38.116394 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:37:38.300966 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:37:38.474752 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:37:38.610164 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:37:38.800561 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:37:38.975817 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:37:39.110879 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:37:39.301972 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:37:39.475982 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:37:39.614550 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:37:39.801870 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:37:39.975576 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:37:40.112781 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:37:40.301195 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:37:40.476921 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:37:40.543014 1384589 pod_ready.go:103] pod "metrics-server-84c5f94fbc-hngcs" in "kube-system" namespace has status "Ready":"False"
	I0916 10:37:40.612230 1384589 kapi.go:107] duration metric: took 1m37.506412903s to wait for app.kubernetes.io/name=ingress-nginx ...
	I0916 10:37:40.800501 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:37:40.980528 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:37:41.301899 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:37:41.478479 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:37:41.801278 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:37:41.975045 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:37:42.302225 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:37:42.487350 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:37:42.548067 1384589 pod_ready.go:103] pod "metrics-server-84c5f94fbc-hngcs" in "kube-system" namespace has status "Ready":"False"
	I0916 10:37:42.806839 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:37:42.976392 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:37:43.300621 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:37:43.475884 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:37:43.802919 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:37:43.975139 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:37:44.301415 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:37:44.475371 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:37:44.801688 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:37:44.975259 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:37:45.062166 1384589 pod_ready.go:103] pod "metrics-server-84c5f94fbc-hngcs" in "kube-system" namespace has status "Ready":"False"
	I0916 10:37:45.301957 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:37:45.477003 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:37:45.802892 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:37:45.974994 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:37:46.301112 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:37:46.475372 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:37:46.800784 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:37:46.974857 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:37:47.303524 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:37:47.475768 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:37:47.545443 1384589 pod_ready.go:103] pod "metrics-server-84c5f94fbc-hngcs" in "kube-system" namespace has status "Ready":"False"
	I0916 10:37:47.800473 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:37:47.974841 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:37:48.301353 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:37:48.474781 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:37:48.800728 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:37:48.975372 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:37:49.301044 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:37:49.475307 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:37:49.801251 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:37:49.976296 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:37:50.044755 1384589 pod_ready.go:103] pod "metrics-server-84c5f94fbc-hngcs" in "kube-system" namespace has status "Ready":"False"
	I0916 10:37:50.306461 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:37:50.478119 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:37:50.802508 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:37:50.975919 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:37:51.310303 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:37:51.475230 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:37:51.801606 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:37:51.975318 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:37:52.053332 1384589 pod_ready.go:103] pod "metrics-server-84c5f94fbc-hngcs" in "kube-system" namespace has status "Ready":"False"
	I0916 10:37:52.302006 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:37:52.476486 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:37:52.801441 1384589 kapi.go:107] duration metric: took 1m44.50438368s to wait for kubernetes.io/minikube-addons=gcp-auth ...
	I0916 10:37:52.803585 1384589 out.go:177] * Your GCP credentials will now be mounted into every pod created in the addons-936355 cluster.
	I0916 10:37:52.805126 1384589 out.go:177] * If you don't want your credentials mounted into a specific pod, add a label with the `gcp-auth-skip-secret` key to your pod configuration.
	I0916 10:37:52.807003 1384589 out.go:177] * If you want existing pods to be mounted with credentials, either recreate them or rerun addons enable with --refresh.
	I0916 10:37:52.974797 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:37:53.475581 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:37:53.975250 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:37:54.474446 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:37:54.542561 1384589 pod_ready.go:103] pod "metrics-server-84c5f94fbc-hngcs" in "kube-system" namespace has status "Ready":"False"
	I0916 10:37:54.975714 1384589 kapi.go:107] duration metric: took 1m51.5059929s to wait for kubernetes.io/minikube-addons=csi-hostpath-driver ...
	I0916 10:37:54.976913 1384589 out.go:177] * Enabled addons: nvidia-device-plugin, ingress-dns, cloud-spanner, storage-provisioner, metrics-server, yakd, default-storageclass, inspektor-gadget, volumesnapshots, registry, ingress, gcp-auth, csi-hostpath-driver
	I0916 10:37:54.977951 1384589 addons.go:510] duration metric: took 1m58.330681209s for enable addons: enabled=[nvidia-device-plugin ingress-dns cloud-spanner storage-provisioner metrics-server yakd default-storageclass inspektor-gadget volumesnapshots registry ingress gcp-auth csi-hostpath-driver]
	I0916 10:37:56.543286 1384589 pod_ready.go:103] pod "metrics-server-84c5f94fbc-hngcs" in "kube-system" namespace has status "Ready":"False"
	I0916 10:37:58.543538 1384589 pod_ready.go:103] pod "metrics-server-84c5f94fbc-hngcs" in "kube-system" namespace has status "Ready":"False"
	I0916 10:38:00.545466 1384589 pod_ready.go:103] pod "metrics-server-84c5f94fbc-hngcs" in "kube-system" namespace has status "Ready":"False"
	I0916 10:38:03.044859 1384589 pod_ready.go:103] pod "metrics-server-84c5f94fbc-hngcs" in "kube-system" namespace has status "Ready":"False"
	I0916 10:38:05.543384 1384589 pod_ready.go:103] pod "metrics-server-84c5f94fbc-hngcs" in "kube-system" namespace has status "Ready":"False"
	I0916 10:38:08.044081 1384589 pod_ready.go:103] pod "metrics-server-84c5f94fbc-hngcs" in "kube-system" namespace has status "Ready":"False"
	I0916 10:38:10.044862 1384589 pod_ready.go:103] pod "metrics-server-84c5f94fbc-hngcs" in "kube-system" namespace has status "Ready":"False"
	I0916 10:38:12.543815 1384589 pod_ready.go:103] pod "metrics-server-84c5f94fbc-hngcs" in "kube-system" namespace has status "Ready":"False"
	I0916 10:38:15.046388 1384589 pod_ready.go:103] pod "metrics-server-84c5f94fbc-hngcs" in "kube-system" namespace has status "Ready":"False"
	I0916 10:38:17.044536 1384589 pod_ready.go:93] pod "metrics-server-84c5f94fbc-hngcs" in "kube-system" namespace has status "Ready":"True"
	I0916 10:38:17.044563 1384589 pod_ready.go:82] duration metric: took 1m32.00797612s for pod "metrics-server-84c5f94fbc-hngcs" in "kube-system" namespace to be "Ready" ...
	I0916 10:38:17.044576 1384589 pod_ready.go:79] waiting up to 6m0s for pod "nvidia-device-plugin-daemonset-6j9gc" in "kube-system" namespace to be "Ready" ...
	I0916 10:38:17.054621 1384589 pod_ready.go:93] pod "nvidia-device-plugin-daemonset-6j9gc" in "kube-system" namespace has status "Ready":"True"
	I0916 10:38:17.054646 1384589 pod_ready.go:82] duration metric: took 10.061393ms for pod "nvidia-device-plugin-daemonset-6j9gc" in "kube-system" namespace to be "Ready" ...
	I0916 10:38:17.054673 1384589 pod_ready.go:39] duration metric: took 1m34.442085136s for extra waiting for all system-critical and pods with labels [k8s-app=kube-dns component=etcd component=kube-apiserver component=kube-controller-manager k8s-app=kube-proxy component=kube-scheduler] to be "Ready" ...
	I0916 10:38:17.054689 1384589 api_server.go:52] waiting for apiserver process to appear ...
	I0916 10:38:17.054724 1384589 cri.go:54] listing CRI containers in root : {State:all Name:kube-apiserver Namespaces:[]}
	I0916 10:38:17.054791 1384589 ssh_runner.go:195] Run: sudo crictl ps -a --quiet --name=kube-apiserver
	I0916 10:38:17.110909 1384589 cri.go:89] found id: "f911db1ed55bbf8b3dc28ca0fef7e51209be97baaa15d9194b879451dd6fd403"
	I0916 10:38:17.110942 1384589 cri.go:89] found id: ""
	I0916 10:38:17.110950 1384589 logs.go:276] 1 containers: [f911db1ed55bbf8b3dc28ca0fef7e51209be97baaa15d9194b879451dd6fd403]
	I0916 10:38:17.111018 1384589 ssh_runner.go:195] Run: which crictl
	I0916 10:38:17.114542 1384589 cri.go:54] listing CRI containers in root : {State:all Name:etcd Namespaces:[]}
	I0916 10:38:17.114619 1384589 ssh_runner.go:195] Run: sudo crictl ps -a --quiet --name=etcd
	I0916 10:38:17.153834 1384589 cri.go:89] found id: "3b247261f15f4cdd596d5e7ee3354c24cb995a27a5e0581e877596df04b900d5"
	I0916 10:38:17.153856 1384589 cri.go:89] found id: ""
	I0916 10:38:17.153864 1384589 logs.go:276] 1 containers: [3b247261f15f4cdd596d5e7ee3354c24cb995a27a5e0581e877596df04b900d5]
	I0916 10:38:17.153923 1384589 ssh_runner.go:195] Run: which crictl
	I0916 10:38:17.157470 1384589 cri.go:54] listing CRI containers in root : {State:all Name:coredns Namespaces:[]}
	I0916 10:38:17.157579 1384589 ssh_runner.go:195] Run: sudo crictl ps -a --quiet --name=coredns
	I0916 10:38:17.198133 1384589 cri.go:89] found id: "ee934dc9f4f92e52b49ad02508bb42771f460a2494fa8b1a65d888191266a4ad"
	I0916 10:38:17.198155 1384589 cri.go:89] found id: ""
	I0916 10:38:17.198163 1384589 logs.go:276] 1 containers: [ee934dc9f4f92e52b49ad02508bb42771f460a2494fa8b1a65d888191266a4ad]
	I0916 10:38:17.198222 1384589 ssh_runner.go:195] Run: which crictl
	I0916 10:38:17.201699 1384589 cri.go:54] listing CRI containers in root : {State:all Name:kube-scheduler Namespaces:[]}
	I0916 10:38:17.201773 1384589 ssh_runner.go:195] Run: sudo crictl ps -a --quiet --name=kube-scheduler
	I0916 10:38:17.244177 1384589 cri.go:89] found id: "2b161087caf5a6ab9dedbb699f7c69ddf6c2c5cdb19026d46daf824d90966d25"
	I0916 10:38:17.244206 1384589 cri.go:89] found id: ""
	I0916 10:38:17.244215 1384589 logs.go:276] 1 containers: [2b161087caf5a6ab9dedbb699f7c69ddf6c2c5cdb19026d46daf824d90966d25]
	I0916 10:38:17.244287 1384589 ssh_runner.go:195] Run: which crictl
	I0916 10:38:17.248238 1384589 cri.go:54] listing CRI containers in root : {State:all Name:kube-proxy Namespaces:[]}
	I0916 10:38:17.248346 1384589 ssh_runner.go:195] Run: sudo crictl ps -a --quiet --name=kube-proxy
	I0916 10:38:17.286359 1384589 cri.go:89] found id: "6200eb5cfcd24bb0f0253359201c6d75c0624dcb7a313b0bc95b7370a13539a0"
	I0916 10:38:17.286380 1384589 cri.go:89] found id: ""
	I0916 10:38:17.286388 1384589 logs.go:276] 1 containers: [6200eb5cfcd24bb0f0253359201c6d75c0624dcb7a313b0bc95b7370a13539a0]
	I0916 10:38:17.286476 1384589 ssh_runner.go:195] Run: which crictl
	I0916 10:38:17.290475 1384589 cri.go:54] listing CRI containers in root : {State:all Name:kube-controller-manager Namespaces:[]}
	I0916 10:38:17.290598 1384589 ssh_runner.go:195] Run: sudo crictl ps -a --quiet --name=kube-controller-manager
	I0916 10:38:17.332786 1384589 cri.go:89] found id: "4ee66eef50ab615bdd0d94fe194567492cafe76910819703a964b78b45f55436"
	I0916 10:38:17.332808 1384589 cri.go:89] found id: ""
	I0916 10:38:17.332817 1384589 logs.go:276] 1 containers: [4ee66eef50ab615bdd0d94fe194567492cafe76910819703a964b78b45f55436]
	I0916 10:38:17.332887 1384589 ssh_runner.go:195] Run: which crictl
	I0916 10:38:17.336545 1384589 cri.go:54] listing CRI containers in root : {State:all Name:kindnet Namespaces:[]}
	I0916 10:38:17.336625 1384589 ssh_runner.go:195] Run: sudo crictl ps -a --quiet --name=kindnet
	I0916 10:38:17.376900 1384589 cri.go:89] found id: "8d59e894feca0e01e03cc7257c67ed10cf0f9db194b88b314e4961bc62d9e7f1"
	I0916 10:38:17.376922 1384589 cri.go:89] found id: ""
	I0916 10:38:17.376930 1384589 logs.go:276] 1 containers: [8d59e894feca0e01e03cc7257c67ed10cf0f9db194b88b314e4961bc62d9e7f1]
	I0916 10:38:17.376991 1384589 ssh_runner.go:195] Run: which crictl
	I0916 10:38:17.380608 1384589 logs.go:123] Gathering logs for kube-scheduler [2b161087caf5a6ab9dedbb699f7c69ddf6c2c5cdb19026d46daf824d90966d25] ...
	I0916 10:38:17.380639 1384589 ssh_runner.go:195] Run: /bin/bash -c "sudo /usr/bin/crictl logs --tail 400 2b161087caf5a6ab9dedbb699f7c69ddf6c2c5cdb19026d46daf824d90966d25"
	I0916 10:38:17.430005 1384589 logs.go:123] Gathering logs for kindnet [8d59e894feca0e01e03cc7257c67ed10cf0f9db194b88b314e4961bc62d9e7f1] ...
	I0916 10:38:17.430059 1384589 ssh_runner.go:195] Run: /bin/bash -c "sudo /usr/bin/crictl logs --tail 400 8d59e894feca0e01e03cc7257c67ed10cf0f9db194b88b314e4961bc62d9e7f1"
	I0916 10:38:17.478918 1384589 logs.go:123] Gathering logs for CRI-O ...
	I0916 10:38:17.478953 1384589 ssh_runner.go:195] Run: /bin/bash -c "sudo journalctl -u crio -n 400"
	I0916 10:38:17.578588 1384589 logs.go:123] Gathering logs for dmesg ...
	I0916 10:38:17.578626 1384589 ssh_runner.go:195] Run: /bin/bash -c "sudo dmesg -PH -L=never --level warn,err,crit,alert,emerg | tail -n 400"
	I0916 10:38:17.596725 1384589 logs.go:123] Gathering logs for describe nodes ...
	I0916 10:38:17.596755 1384589 ssh_runner.go:195] Run: /bin/bash -c "sudo /var/lib/minikube/binaries/v1.31.1/kubectl describe nodes --kubeconfig=/var/lib/minikube/kubeconfig"
	I0916 10:38:17.780455 1384589 logs.go:123] Gathering logs for etcd [3b247261f15f4cdd596d5e7ee3354c24cb995a27a5e0581e877596df04b900d5] ...
	I0916 10:38:17.780482 1384589 ssh_runner.go:195] Run: /bin/bash -c "sudo /usr/bin/crictl logs --tail 400 3b247261f15f4cdd596d5e7ee3354c24cb995a27a5e0581e877596df04b900d5"
	I0916 10:38:17.832701 1384589 logs.go:123] Gathering logs for kube-proxy [6200eb5cfcd24bb0f0253359201c6d75c0624dcb7a313b0bc95b7370a13539a0] ...
	I0916 10:38:17.832737 1384589 ssh_runner.go:195] Run: /bin/bash -c "sudo /usr/bin/crictl logs --tail 400 6200eb5cfcd24bb0f0253359201c6d75c0624dcb7a313b0bc95b7370a13539a0"
	I0916 10:38:17.873549 1384589 logs.go:123] Gathering logs for kube-controller-manager [4ee66eef50ab615bdd0d94fe194567492cafe76910819703a964b78b45f55436] ...
	I0916 10:38:17.873579 1384589 ssh_runner.go:195] Run: /bin/bash -c "sudo /usr/bin/crictl logs --tail 400 4ee66eef50ab615bdd0d94fe194567492cafe76910819703a964b78b45f55436"
	I0916 10:38:17.944894 1384589 logs.go:123] Gathering logs for container status ...
	I0916 10:38:17.944933 1384589 ssh_runner.go:195] Run: /bin/bash -c "sudo `which crictl || echo crictl` ps -a || sudo docker ps -a"
	I0916 10:38:18.006230 1384589 logs.go:123] Gathering logs for kubelet ...
	I0916 10:38:18.006286 1384589 ssh_runner.go:195] Run: /bin/bash -c "sudo journalctl -u kubelet -n 400"
	W0916 10:38:18.071787 1384589 logs.go:138] Found kubelet problem: Sep 16 10:35:59 addons-936355 kubelet[1507]: W0916 10:35:59.171269    1507 reflector.go:561] object-"kube-system"/"kube-root-ca.crt": failed to list *v1.ConfigMap: configmaps "kube-root-ca.crt" is forbidden: User "system:node:addons-936355" cannot list resource "configmaps" in API group "" in the namespace "kube-system": no relationship found between node 'addons-936355' and this object
	W0916 10:38:18.072057 1384589 logs.go:138] Found kubelet problem: Sep 16 10:35:59 addons-936355 kubelet[1507]: E0916 10:35:59.171326    1507 reflector.go:158] "Unhandled Error" err="object-\"kube-system\"/\"kube-root-ca.crt\": Failed to watch *v1.ConfigMap: failed to list *v1.ConfigMap: configmaps \"kube-root-ca.crt\" is forbidden: User \"system:node:addons-936355\" cannot list resource \"configmaps\" in API group \"\" in the namespace \"kube-system\": no relationship found between node 'addons-936355' and this object" logger="UnhandledError"
	W0916 10:38:18.072239 1384589 logs.go:138] Found kubelet problem: Sep 16 10:35:59 addons-936355 kubelet[1507]: W0916 10:35:59.171509    1507 reflector.go:561] object-"kube-system"/"kube-proxy": failed to list *v1.ConfigMap: configmaps "kube-proxy" is forbidden: User "system:node:addons-936355" cannot list resource "configmaps" in API group "" in the namespace "kube-system": no relationship found between node 'addons-936355' and this object
	W0916 10:38:18.072456 1384589 logs.go:138] Found kubelet problem: Sep 16 10:35:59 addons-936355 kubelet[1507]: E0916 10:35:59.171550    1507 reflector.go:158] "Unhandled Error" err="object-\"kube-system\"/\"kube-proxy\": Failed to watch *v1.ConfigMap: failed to list *v1.ConfigMap: configmaps \"kube-proxy\" is forbidden: User \"system:node:addons-936355\" cannot list resource \"configmaps\" in API group \"\" in the namespace \"kube-system\": no relationship found between node 'addons-936355' and this object" logger="UnhandledError"
	W0916 10:38:18.075800 1384589 logs.go:138] Found kubelet problem: Sep 16 10:36:02 addons-936355 kubelet[1507]: W0916 10:36:02.454965    1507 reflector.go:561] object-"gadget"/"kube-root-ca.crt": failed to list *v1.ConfigMap: configmaps "kube-root-ca.crt" is forbidden: User "system:node:addons-936355" cannot list resource "configmaps" in API group "" in the namespace "gadget": no relationship found between node 'addons-936355' and this object
	W0916 10:38:18.076027 1384589 logs.go:138] Found kubelet problem: Sep 16 10:36:02 addons-936355 kubelet[1507]: E0916 10:36:02.455028    1507 reflector.go:158] "Unhandled Error" err="object-\"gadget\"/\"kube-root-ca.crt\": Failed to watch *v1.ConfigMap: failed to list *v1.ConfigMap: configmaps \"kube-root-ca.crt\" is forbidden: User \"system:node:addons-936355\" cannot list resource \"configmaps\" in API group \"\" in the namespace \"gadget\": no relationship found between node 'addons-936355' and this object" logger="UnhandledError"
	W0916 10:38:18.087591 1384589 logs.go:138] Found kubelet problem: Sep 16 10:36:42 addons-936355 kubelet[1507]: W0916 10:36:42.520421    1507 reflector.go:561] object-"default"/"kube-root-ca.crt": failed to list *v1.ConfigMap: configmaps "kube-root-ca.crt" is forbidden: User "system:node:addons-936355" cannot list resource "configmaps" in API group "" in the namespace "default": no relationship found between node 'addons-936355' and this object
	W0916 10:38:18.087896 1384589 logs.go:138] Found kubelet problem: Sep 16 10:36:42 addons-936355 kubelet[1507]: E0916 10:36:42.520489    1507 reflector.go:158] "Unhandled Error" err="object-\"default\"/\"kube-root-ca.crt\": Failed to watch *v1.ConfigMap: failed to list *v1.ConfigMap: configmaps \"kube-root-ca.crt\" is forbidden: User \"system:node:addons-936355\" cannot list resource \"configmaps\" in API group \"\" in the namespace \"default\": no relationship found between node 'addons-936355' and this object" logger="UnhandledError"
	W0916 10:38:18.088088 1384589 logs.go:138] Found kubelet problem: Sep 16 10:36:42 addons-936355 kubelet[1507]: W0916 10:36:42.520540    1507 reflector.go:561] object-"local-path-storage"/"kube-root-ca.crt": failed to list *v1.ConfigMap: configmaps "kube-root-ca.crt" is forbidden: User "system:node:addons-936355" cannot list resource "configmaps" in API group "" in the namespace "local-path-storage": no relationship found between node 'addons-936355' and this object
	W0916 10:38:18.088320 1384589 logs.go:138] Found kubelet problem: Sep 16 10:36:42 addons-936355 kubelet[1507]: E0916 10:36:42.520560    1507 reflector.go:158] "Unhandled Error" err="object-\"local-path-storage\"/\"kube-root-ca.crt\": Failed to watch *v1.ConfigMap: failed to list *v1.ConfigMap: configmaps \"kube-root-ca.crt\" is forbidden: User \"system:node:addons-936355\" cannot list resource \"configmaps\" in API group \"\" in the namespace \"local-path-storage\": no relationship found between node 'addons-936355' and this object" logger="UnhandledError"
	I0916 10:38:18.128812 1384589 logs.go:123] Gathering logs for kube-apiserver [f911db1ed55bbf8b3dc28ca0fef7e51209be97baaa15d9194b879451dd6fd403] ...
	I0916 10:38:18.128841 1384589 ssh_runner.go:195] Run: /bin/bash -c "sudo /usr/bin/crictl logs --tail 400 f911db1ed55bbf8b3dc28ca0fef7e51209be97baaa15d9194b879451dd6fd403"
	I0916 10:38:18.186612 1384589 logs.go:123] Gathering logs for coredns [ee934dc9f4f92e52b49ad02508bb42771f460a2494fa8b1a65d888191266a4ad] ...
	I0916 10:38:18.186644 1384589 ssh_runner.go:195] Run: /bin/bash -c "sudo /usr/bin/crictl logs --tail 400 ee934dc9f4f92e52b49ad02508bb42771f460a2494fa8b1a65d888191266a4ad"
	I0916 10:38:18.233148 1384589 out.go:358] Setting ErrFile to fd 2...
	I0916 10:38:18.233182 1384589 out.go:392] TERM=,COLORTERM=, which probably does not support color
	W0916 10:38:18.233388 1384589 out.go:270] X Problems detected in kubelet:
	W0916 10:38:18.233404 1384589 out.go:270]   Sep 16 10:36:02 addons-936355 kubelet[1507]: E0916 10:36:02.455028    1507 reflector.go:158] "Unhandled Error" err="object-\"gadget\"/\"kube-root-ca.crt\": Failed to watch *v1.ConfigMap: failed to list *v1.ConfigMap: configmaps \"kube-root-ca.crt\" is forbidden: User \"system:node:addons-936355\" cannot list resource \"configmaps\" in API group \"\" in the namespace \"gadget\": no relationship found between node 'addons-936355' and this object" logger="UnhandledError"
	W0916 10:38:18.233412 1384589 out.go:270]   Sep 16 10:36:42 addons-936355 kubelet[1507]: W0916 10:36:42.520421    1507 reflector.go:561] object-"default"/"kube-root-ca.crt": failed to list *v1.ConfigMap: configmaps "kube-root-ca.crt" is forbidden: User "system:node:addons-936355" cannot list resource "configmaps" in API group "" in the namespace "default": no relationship found between node 'addons-936355' and this object
	W0916 10:38:18.233423 1384589 out.go:270]   Sep 16 10:36:42 addons-936355 kubelet[1507]: E0916 10:36:42.520489    1507 reflector.go:158] "Unhandled Error" err="object-\"default\"/\"kube-root-ca.crt\": Failed to watch *v1.ConfigMap: failed to list *v1.ConfigMap: configmaps \"kube-root-ca.crt\" is forbidden: User \"system:node:addons-936355\" cannot list resource \"configmaps\" in API group \"\" in the namespace \"default\": no relationship found between node 'addons-936355' and this object" logger="UnhandledError"
	W0916 10:38:18.233429 1384589 out.go:270]   Sep 16 10:36:42 addons-936355 kubelet[1507]: W0916 10:36:42.520540    1507 reflector.go:561] object-"local-path-storage"/"kube-root-ca.crt": failed to list *v1.ConfigMap: configmaps "kube-root-ca.crt" is forbidden: User "system:node:addons-936355" cannot list resource "configmaps" in API group "" in the namespace "local-path-storage": no relationship found between node 'addons-936355' and this object
	W0916 10:38:18.233449 1384589 out.go:270]   Sep 16 10:36:42 addons-936355 kubelet[1507]: E0916 10:36:42.520560    1507 reflector.go:158] "Unhandled Error" err="object-\"local-path-storage\"/\"kube-root-ca.crt\": Failed to watch *v1.ConfigMap: failed to list *v1.ConfigMap: configmaps \"kube-root-ca.crt\" is forbidden: User \"system:node:addons-936355\" cannot list resource \"configmaps\" in API group \"\" in the namespace \"local-path-storage\": no relationship found between node 'addons-936355' and this object" logger="UnhandledError"
	I0916 10:38:18.233461 1384589 out.go:358] Setting ErrFile to fd 2...
	I0916 10:38:18.233470 1384589 out.go:392] TERM=,COLORTERM=, which probably does not support color
	I0916 10:38:28.234697 1384589 ssh_runner.go:195] Run: sudo pgrep -xnf kube-apiserver.*minikube.*
	I0916 10:38:28.249266 1384589 api_server.go:72] duration metric: took 2m31.602198408s to wait for apiserver process to appear ...
	I0916 10:38:28.249292 1384589 api_server.go:88] waiting for apiserver healthz status ...
	I0916 10:38:28.249329 1384589 cri.go:54] listing CRI containers in root : {State:all Name:kube-apiserver Namespaces:[]}
	I0916 10:38:28.249401 1384589 ssh_runner.go:195] Run: sudo crictl ps -a --quiet --name=kube-apiserver
	I0916 10:38:28.291513 1384589 cri.go:89] found id: "f911db1ed55bbf8b3dc28ca0fef7e51209be97baaa15d9194b879451dd6fd403"
	I0916 10:38:28.291538 1384589 cri.go:89] found id: ""
	I0916 10:38:28.291546 1384589 logs.go:276] 1 containers: [f911db1ed55bbf8b3dc28ca0fef7e51209be97baaa15d9194b879451dd6fd403]
	I0916 10:38:28.291605 1384589 ssh_runner.go:195] Run: which crictl
	I0916 10:38:28.295282 1384589 cri.go:54] listing CRI containers in root : {State:all Name:etcd Namespaces:[]}
	I0916 10:38:28.295362 1384589 ssh_runner.go:195] Run: sudo crictl ps -a --quiet --name=etcd
	I0916 10:38:28.334381 1384589 cri.go:89] found id: "3b247261f15f4cdd596d5e7ee3354c24cb995a27a5e0581e877596df04b900d5"
	I0916 10:38:28.334460 1384589 cri.go:89] found id: ""
	I0916 10:38:28.334479 1384589 logs.go:276] 1 containers: [3b247261f15f4cdd596d5e7ee3354c24cb995a27a5e0581e877596df04b900d5]
	I0916 10:38:28.334596 1384589 ssh_runner.go:195] Run: which crictl
	I0916 10:38:28.338232 1384589 cri.go:54] listing CRI containers in root : {State:all Name:coredns Namespaces:[]}
	I0916 10:38:28.338315 1384589 ssh_runner.go:195] Run: sudo crictl ps -a --quiet --name=coredns
	I0916 10:38:28.386465 1384589 cri.go:89] found id: "ee934dc9f4f92e52b49ad02508bb42771f460a2494fa8b1a65d888191266a4ad"
	I0916 10:38:28.386495 1384589 cri.go:89] found id: ""
	I0916 10:38:28.386503 1384589 logs.go:276] 1 containers: [ee934dc9f4f92e52b49ad02508bb42771f460a2494fa8b1a65d888191266a4ad]
	I0916 10:38:28.386564 1384589 ssh_runner.go:195] Run: which crictl
	I0916 10:38:28.390431 1384589 cri.go:54] listing CRI containers in root : {State:all Name:kube-scheduler Namespaces:[]}
	I0916 10:38:28.390508 1384589 ssh_runner.go:195] Run: sudo crictl ps -a --quiet --name=kube-scheduler
	I0916 10:38:28.428479 1384589 cri.go:89] found id: "2b161087caf5a6ab9dedbb699f7c69ddf6c2c5cdb19026d46daf824d90966d25"
	I0916 10:38:28.428500 1384589 cri.go:89] found id: ""
	I0916 10:38:28.428508 1384589 logs.go:276] 1 containers: [2b161087caf5a6ab9dedbb699f7c69ddf6c2c5cdb19026d46daf824d90966d25]
	I0916 10:38:28.428568 1384589 ssh_runner.go:195] Run: which crictl
	I0916 10:38:28.431936 1384589 cri.go:54] listing CRI containers in root : {State:all Name:kube-proxy Namespaces:[]}
	I0916 10:38:28.432009 1384589 ssh_runner.go:195] Run: sudo crictl ps -a --quiet --name=kube-proxy
	I0916 10:38:28.480074 1384589 cri.go:89] found id: "6200eb5cfcd24bb0f0253359201c6d75c0624dcb7a313b0bc95b7370a13539a0"
	I0916 10:38:28.480148 1384589 cri.go:89] found id: ""
	I0916 10:38:28.480171 1384589 logs.go:276] 1 containers: [6200eb5cfcd24bb0f0253359201c6d75c0624dcb7a313b0bc95b7370a13539a0]
	I0916 10:38:28.480257 1384589 ssh_runner.go:195] Run: which crictl
	I0916 10:38:28.484845 1384589 cri.go:54] listing CRI containers in root : {State:all Name:kube-controller-manager Namespaces:[]}
	I0916 10:38:28.484948 1384589 ssh_runner.go:195] Run: sudo crictl ps -a --quiet --name=kube-controller-manager
	I0916 10:38:28.526872 1384589 cri.go:89] found id: "4ee66eef50ab615bdd0d94fe194567492cafe76910819703a964b78b45f55436"
	I0916 10:38:28.526896 1384589 cri.go:89] found id: ""
	I0916 10:38:28.526905 1384589 logs.go:276] 1 containers: [4ee66eef50ab615bdd0d94fe194567492cafe76910819703a964b78b45f55436]
	I0916 10:38:28.526965 1384589 ssh_runner.go:195] Run: which crictl
	I0916 10:38:28.530520 1384589 cri.go:54] listing CRI containers in root : {State:all Name:kindnet Namespaces:[]}
	I0916 10:38:28.530607 1384589 ssh_runner.go:195] Run: sudo crictl ps -a --quiet --name=kindnet
	I0916 10:38:28.569037 1384589 cri.go:89] found id: "8d59e894feca0e01e03cc7257c67ed10cf0f9db194b88b314e4961bc62d9e7f1"
	I0916 10:38:28.569065 1384589 cri.go:89] found id: ""
	I0916 10:38:28.569074 1384589 logs.go:276] 1 containers: [8d59e894feca0e01e03cc7257c67ed10cf0f9db194b88b314e4961bc62d9e7f1]
	I0916 10:38:28.569150 1384589 ssh_runner.go:195] Run: which crictl
	I0916 10:38:28.572604 1384589 logs.go:123] Gathering logs for dmesg ...
	I0916 10:38:28.572634 1384589 ssh_runner.go:195] Run: /bin/bash -c "sudo dmesg -PH -L=never --level warn,err,crit,alert,emerg | tail -n 400"
	I0916 10:38:28.589298 1384589 logs.go:123] Gathering logs for describe nodes ...
	I0916 10:38:28.589323 1384589 ssh_runner.go:195] Run: /bin/bash -c "sudo /var/lib/minikube/binaries/v1.31.1/kubectl describe nodes --kubeconfig=/var/lib/minikube/kubeconfig"
	I0916 10:38:28.729585 1384589 logs.go:123] Gathering logs for etcd [3b247261f15f4cdd596d5e7ee3354c24cb995a27a5e0581e877596df04b900d5] ...
	I0916 10:38:28.729703 1384589 ssh_runner.go:195] Run: /bin/bash -c "sudo /usr/bin/crictl logs --tail 400 3b247261f15f4cdd596d5e7ee3354c24cb995a27a5e0581e877596df04b900d5"
	I0916 10:38:28.802248 1384589 logs.go:123] Gathering logs for coredns [ee934dc9f4f92e52b49ad02508bb42771f460a2494fa8b1a65d888191266a4ad] ...
	I0916 10:38:28.802300 1384589 ssh_runner.go:195] Run: /bin/bash -c "sudo /usr/bin/crictl logs --tail 400 ee934dc9f4f92e52b49ad02508bb42771f460a2494fa8b1a65d888191266a4ad"
	I0916 10:38:28.843099 1384589 logs.go:123] Gathering logs for kube-proxy [6200eb5cfcd24bb0f0253359201c6d75c0624dcb7a313b0bc95b7370a13539a0] ...
	I0916 10:38:28.843130 1384589 ssh_runner.go:195] Run: /bin/bash -c "sudo /usr/bin/crictl logs --tail 400 6200eb5cfcd24bb0f0253359201c6d75c0624dcb7a313b0bc95b7370a13539a0"
	I0916 10:38:28.886320 1384589 logs.go:123] Gathering logs for kindnet [8d59e894feca0e01e03cc7257c67ed10cf0f9db194b88b314e4961bc62d9e7f1] ...
	I0916 10:38:28.886350 1384589 ssh_runner.go:195] Run: /bin/bash -c "sudo /usr/bin/crictl logs --tail 400 8d59e894feca0e01e03cc7257c67ed10cf0f9db194b88b314e4961bc62d9e7f1"
	I0916 10:38:28.930299 1384589 logs.go:123] Gathering logs for CRI-O ...
	I0916 10:38:28.930374 1384589 ssh_runner.go:195] Run: /bin/bash -c "sudo journalctl -u crio -n 400"
	I0916 10:38:29.041608 1384589 logs.go:123] Gathering logs for kubelet ...
	I0916 10:38:29.041656 1384589 ssh_runner.go:195] Run: /bin/bash -c "sudo journalctl -u kubelet -n 400"
	W0916 10:38:29.079590 1384589 logs.go:138] Found kubelet problem: Sep 16 10:35:59 addons-936355 kubelet[1507]: W0916 10:35:59.171269    1507 reflector.go:561] object-"kube-system"/"kube-root-ca.crt": failed to list *v1.ConfigMap: configmaps "kube-root-ca.crt" is forbidden: User "system:node:addons-936355" cannot list resource "configmaps" in API group "" in the namespace "kube-system": no relationship found between node 'addons-936355' and this object
	W0916 10:38:29.079841 1384589 logs.go:138] Found kubelet problem: Sep 16 10:35:59 addons-936355 kubelet[1507]: E0916 10:35:59.171326    1507 reflector.go:158] "Unhandled Error" err="object-\"kube-system\"/\"kube-root-ca.crt\": Failed to watch *v1.ConfigMap: failed to list *v1.ConfigMap: configmaps \"kube-root-ca.crt\" is forbidden: User \"system:node:addons-936355\" cannot list resource \"configmaps\" in API group \"\" in the namespace \"kube-system\": no relationship found between node 'addons-936355' and this object" logger="UnhandledError"
	W0916 10:38:29.080020 1384589 logs.go:138] Found kubelet problem: Sep 16 10:35:59 addons-936355 kubelet[1507]: W0916 10:35:59.171509    1507 reflector.go:561] object-"kube-system"/"kube-proxy": failed to list *v1.ConfigMap: configmaps "kube-proxy" is forbidden: User "system:node:addons-936355" cannot list resource "configmaps" in API group "" in the namespace "kube-system": no relationship found between node 'addons-936355' and this object
	W0916 10:38:29.080236 1384589 logs.go:138] Found kubelet problem: Sep 16 10:35:59 addons-936355 kubelet[1507]: E0916 10:35:59.171550    1507 reflector.go:158] "Unhandled Error" err="object-\"kube-system\"/\"kube-proxy\": Failed to watch *v1.ConfigMap: failed to list *v1.ConfigMap: configmaps \"kube-proxy\" is forbidden: User \"system:node:addons-936355\" cannot list resource \"configmaps\" in API group \"\" in the namespace \"kube-system\": no relationship found between node 'addons-936355' and this object" logger="UnhandledError"
	W0916 10:38:29.083646 1384589 logs.go:138] Found kubelet problem: Sep 16 10:36:02 addons-936355 kubelet[1507]: W0916 10:36:02.454965    1507 reflector.go:561] object-"gadget"/"kube-root-ca.crt": failed to list *v1.ConfigMap: configmaps "kube-root-ca.crt" is forbidden: User "system:node:addons-936355" cannot list resource "configmaps" in API group "" in the namespace "gadget": no relationship found between node 'addons-936355' and this object
	W0916 10:38:29.083870 1384589 logs.go:138] Found kubelet problem: Sep 16 10:36:02 addons-936355 kubelet[1507]: E0916 10:36:02.455028    1507 reflector.go:158] "Unhandled Error" err="object-\"gadget\"/\"kube-root-ca.crt\": Failed to watch *v1.ConfigMap: failed to list *v1.ConfigMap: configmaps \"kube-root-ca.crt\" is forbidden: User \"system:node:addons-936355\" cannot list resource \"configmaps\" in API group \"\" in the namespace \"gadget\": no relationship found between node 'addons-936355' and this object" logger="UnhandledError"
	W0916 10:38:29.095503 1384589 logs.go:138] Found kubelet problem: Sep 16 10:36:42 addons-936355 kubelet[1507]: W0916 10:36:42.520421    1507 reflector.go:561] object-"default"/"kube-root-ca.crt": failed to list *v1.ConfigMap: configmaps "kube-root-ca.crt" is forbidden: User "system:node:addons-936355" cannot list resource "configmaps" in API group "" in the namespace "default": no relationship found between node 'addons-936355' and this object
	W0916 10:38:29.095743 1384589 logs.go:138] Found kubelet problem: Sep 16 10:36:42 addons-936355 kubelet[1507]: E0916 10:36:42.520489    1507 reflector.go:158] "Unhandled Error" err="object-\"default\"/\"kube-root-ca.crt\": Failed to watch *v1.ConfigMap: failed to list *v1.ConfigMap: configmaps \"kube-root-ca.crt\" is forbidden: User \"system:node:addons-936355\" cannot list resource \"configmaps\" in API group \"\" in the namespace \"default\": no relationship found between node 'addons-936355' and this object" logger="UnhandledError"
	W0916 10:38:29.095931 1384589 logs.go:138] Found kubelet problem: Sep 16 10:36:42 addons-936355 kubelet[1507]: W0916 10:36:42.520540    1507 reflector.go:561] object-"local-path-storage"/"kube-root-ca.crt": failed to list *v1.ConfigMap: configmaps "kube-root-ca.crt" is forbidden: User "system:node:addons-936355" cannot list resource "configmaps" in API group "" in the namespace "local-path-storage": no relationship found between node 'addons-936355' and this object
	W0916 10:38:29.096162 1384589 logs.go:138] Found kubelet problem: Sep 16 10:36:42 addons-936355 kubelet[1507]: E0916 10:36:42.520560    1507 reflector.go:158] "Unhandled Error" err="object-\"local-path-storage\"/\"kube-root-ca.crt\": Failed to watch *v1.ConfigMap: failed to list *v1.ConfigMap: configmaps \"kube-root-ca.crt\" is forbidden: User \"system:node:addons-936355\" cannot list resource \"configmaps\" in API group \"\" in the namespace \"local-path-storage\": no relationship found between node 'addons-936355' and this object" logger="UnhandledError"
	I0916 10:38:29.147372 1384589 logs.go:123] Gathering logs for kube-apiserver [f911db1ed55bbf8b3dc28ca0fef7e51209be97baaa15d9194b879451dd6fd403] ...
	I0916 10:38:29.147401 1384589 ssh_runner.go:195] Run: /bin/bash -c "sudo /usr/bin/crictl logs --tail 400 f911db1ed55bbf8b3dc28ca0fef7e51209be97baaa15d9194b879451dd6fd403"
	I0916 10:38:29.214117 1384589 logs.go:123] Gathering logs for kube-scheduler [2b161087caf5a6ab9dedbb699f7c69ddf6c2c5cdb19026d46daf824d90966d25] ...
	I0916 10:38:29.214148 1384589 ssh_runner.go:195] Run: /bin/bash -c "sudo /usr/bin/crictl logs --tail 400 2b161087caf5a6ab9dedbb699f7c69ddf6c2c5cdb19026d46daf824d90966d25"
	I0916 10:38:29.266528 1384589 logs.go:123] Gathering logs for kube-controller-manager [4ee66eef50ab615bdd0d94fe194567492cafe76910819703a964b78b45f55436] ...
	I0916 10:38:29.266562 1384589 ssh_runner.go:195] Run: /bin/bash -c "sudo /usr/bin/crictl logs --tail 400 4ee66eef50ab615bdd0d94fe194567492cafe76910819703a964b78b45f55436"
	I0916 10:38:29.339157 1384589 logs.go:123] Gathering logs for container status ...
	I0916 10:38:29.339193 1384589 ssh_runner.go:195] Run: /bin/bash -c "sudo `which crictl || echo crictl` ps -a || sudo docker ps -a"
	I0916 10:38:29.402328 1384589 out.go:358] Setting ErrFile to fd 2...
	I0916 10:38:29.402360 1384589 out.go:392] TERM=,COLORTERM=, which probably does not support color
	W0916 10:38:29.402421 1384589 out.go:270] X Problems detected in kubelet:
	W0916 10:38:29.402433 1384589 out.go:270]   Sep 16 10:36:02 addons-936355 kubelet[1507]: E0916 10:36:02.455028    1507 reflector.go:158] "Unhandled Error" err="object-\"gadget\"/\"kube-root-ca.crt\": Failed to watch *v1.ConfigMap: failed to list *v1.ConfigMap: configmaps \"kube-root-ca.crt\" is forbidden: User \"system:node:addons-936355\" cannot list resource \"configmaps\" in API group \"\" in the namespace \"gadget\": no relationship found between node 'addons-936355' and this object" logger="UnhandledError"
	W0916 10:38:29.402445 1384589 out.go:270]   Sep 16 10:36:42 addons-936355 kubelet[1507]: W0916 10:36:42.520421    1507 reflector.go:561] object-"default"/"kube-root-ca.crt": failed to list *v1.ConfigMap: configmaps "kube-root-ca.crt" is forbidden: User "system:node:addons-936355" cannot list resource "configmaps" in API group "" in the namespace "default": no relationship found between node 'addons-936355' and this object
	W0916 10:38:29.402453 1384589 out.go:270]   Sep 16 10:36:42 addons-936355 kubelet[1507]: E0916 10:36:42.520489    1507 reflector.go:158] "Unhandled Error" err="object-\"default\"/\"kube-root-ca.crt\": Failed to watch *v1.ConfigMap: failed to list *v1.ConfigMap: configmaps \"kube-root-ca.crt\" is forbidden: User \"system:node:addons-936355\" cannot list resource \"configmaps\" in API group \"\" in the namespace \"default\": no relationship found between node 'addons-936355' and this object" logger="UnhandledError"
	W0916 10:38:29.402464 1384589 out.go:270]   Sep 16 10:36:42 addons-936355 kubelet[1507]: W0916 10:36:42.520540    1507 reflector.go:561] object-"local-path-storage"/"kube-root-ca.crt": failed to list *v1.ConfigMap: configmaps "kube-root-ca.crt" is forbidden: User "system:node:addons-936355" cannot list resource "configmaps" in API group "" in the namespace "local-path-storage": no relationship found between node 'addons-936355' and this object
	W0916 10:38:29.402472 1384589 out.go:270]   Sep 16 10:36:42 addons-936355 kubelet[1507]: E0916 10:36:42.520560    1507 reflector.go:158] "Unhandled Error" err="object-\"local-path-storage\"/\"kube-root-ca.crt\": Failed to watch *v1.ConfigMap: failed to list *v1.ConfigMap: configmaps \"kube-root-ca.crt\" is forbidden: User \"system:node:addons-936355\" cannot list resource \"configmaps\" in API group \"\" in the namespace \"local-path-storage\": no relationship found between node 'addons-936355' and this object" logger="UnhandledError"
	I0916 10:38:29.402483 1384589 out.go:358] Setting ErrFile to fd 2...
	I0916 10:38:29.402490 1384589 out.go:392] TERM=,COLORTERM=, which probably does not support color
	I0916 10:38:39.403739 1384589 api_server.go:253] Checking apiserver healthz at https://192.168.49.2:8443/healthz ...
	I0916 10:38:39.411467 1384589 api_server.go:279] https://192.168.49.2:8443/healthz returned 200:
	ok
	I0916 10:38:39.412460 1384589 api_server.go:141] control plane version: v1.31.1
	I0916 10:38:39.412486 1384589 api_server.go:131] duration metric: took 11.16318566s to wait for apiserver health ...
	I0916 10:38:39.412495 1384589 system_pods.go:43] waiting for kube-system pods to appear ...
	I0916 10:38:39.412517 1384589 cri.go:54] listing CRI containers in root : {State:all Name:kube-apiserver Namespaces:[]}
	I0916 10:38:39.412584 1384589 ssh_runner.go:195] Run: sudo crictl ps -a --quiet --name=kube-apiserver
	I0916 10:38:39.451224 1384589 cri.go:89] found id: "f911db1ed55bbf8b3dc28ca0fef7e51209be97baaa15d9194b879451dd6fd403"
	I0916 10:38:39.451243 1384589 cri.go:89] found id: ""
	I0916 10:38:39.451251 1384589 logs.go:276] 1 containers: [f911db1ed55bbf8b3dc28ca0fef7e51209be97baaa15d9194b879451dd6fd403]
	I0916 10:38:39.451311 1384589 ssh_runner.go:195] Run: which crictl
	I0916 10:38:39.454893 1384589 cri.go:54] listing CRI containers in root : {State:all Name:etcd Namespaces:[]}
	I0916 10:38:39.454968 1384589 ssh_runner.go:195] Run: sudo crictl ps -a --quiet --name=etcd
	I0916 10:38:39.499416 1384589 cri.go:89] found id: "3b247261f15f4cdd596d5e7ee3354c24cb995a27a5e0581e877596df04b900d5"
	I0916 10:38:39.499439 1384589 cri.go:89] found id: ""
	I0916 10:38:39.499448 1384589 logs.go:276] 1 containers: [3b247261f15f4cdd596d5e7ee3354c24cb995a27a5e0581e877596df04b900d5]
	I0916 10:38:39.499510 1384589 ssh_runner.go:195] Run: which crictl
	I0916 10:38:39.503122 1384589 cri.go:54] listing CRI containers in root : {State:all Name:coredns Namespaces:[]}
	I0916 10:38:39.503208 1384589 ssh_runner.go:195] Run: sudo crictl ps -a --quiet --name=coredns
	I0916 10:38:39.542014 1384589 cri.go:89] found id: "ee934dc9f4f92e52b49ad02508bb42771f460a2494fa8b1a65d888191266a4ad"
	I0916 10:38:39.542035 1384589 cri.go:89] found id: ""
	I0916 10:38:39.542043 1384589 logs.go:276] 1 containers: [ee934dc9f4f92e52b49ad02508bb42771f460a2494fa8b1a65d888191266a4ad]
	I0916 10:38:39.542101 1384589 ssh_runner.go:195] Run: which crictl
	I0916 10:38:39.546062 1384589 cri.go:54] listing CRI containers in root : {State:all Name:kube-scheduler Namespaces:[]}
	I0916 10:38:39.546152 1384589 ssh_runner.go:195] Run: sudo crictl ps -a --quiet --name=kube-scheduler
	I0916 10:38:39.587808 1384589 cri.go:89] found id: "2b161087caf5a6ab9dedbb699f7c69ddf6c2c5cdb19026d46daf824d90966d25"
	I0916 10:38:39.587831 1384589 cri.go:89] found id: ""
	I0916 10:38:39.587842 1384589 logs.go:276] 1 containers: [2b161087caf5a6ab9dedbb699f7c69ddf6c2c5cdb19026d46daf824d90966d25]
	I0916 10:38:39.587908 1384589 ssh_runner.go:195] Run: which crictl
	I0916 10:38:39.591371 1384589 cri.go:54] listing CRI containers in root : {State:all Name:kube-proxy Namespaces:[]}
	I0916 10:38:39.591441 1384589 ssh_runner.go:195] Run: sudo crictl ps -a --quiet --name=kube-proxy
	I0916 10:38:39.629404 1384589 cri.go:89] found id: "6200eb5cfcd24bb0f0253359201c6d75c0624dcb7a313b0bc95b7370a13539a0"
	I0916 10:38:39.629428 1384589 cri.go:89] found id: ""
	I0916 10:38:39.629437 1384589 logs.go:276] 1 containers: [6200eb5cfcd24bb0f0253359201c6d75c0624dcb7a313b0bc95b7370a13539a0]
	I0916 10:38:39.629495 1384589 ssh_runner.go:195] Run: which crictl
	I0916 10:38:39.633014 1384589 cri.go:54] listing CRI containers in root : {State:all Name:kube-controller-manager Namespaces:[]}
	I0916 10:38:39.633091 1384589 ssh_runner.go:195] Run: sudo crictl ps -a --quiet --name=kube-controller-manager
	I0916 10:38:39.676945 1384589 cri.go:89] found id: "4ee66eef50ab615bdd0d94fe194567492cafe76910819703a964b78b45f55436"
	I0916 10:38:39.676965 1384589 cri.go:89] found id: ""
	I0916 10:38:39.676973 1384589 logs.go:276] 1 containers: [4ee66eef50ab615bdd0d94fe194567492cafe76910819703a964b78b45f55436]
	I0916 10:38:39.677033 1384589 ssh_runner.go:195] Run: which crictl
	I0916 10:38:39.680612 1384589 cri.go:54] listing CRI containers in root : {State:all Name:kindnet Namespaces:[]}
	I0916 10:38:39.680742 1384589 ssh_runner.go:195] Run: sudo crictl ps -a --quiet --name=kindnet
	I0916 10:38:39.722262 1384589 cri.go:89] found id: "8d59e894feca0e01e03cc7257c67ed10cf0f9db194b88b314e4961bc62d9e7f1"
	I0916 10:38:39.722282 1384589 cri.go:89] found id: ""
	I0916 10:38:39.722291 1384589 logs.go:276] 1 containers: [8d59e894feca0e01e03cc7257c67ed10cf0f9db194b88b314e4961bc62d9e7f1]
	I0916 10:38:39.722347 1384589 ssh_runner.go:195] Run: which crictl
	I0916 10:38:39.726091 1384589 logs.go:123] Gathering logs for dmesg ...
	I0916 10:38:39.726167 1384589 ssh_runner.go:195] Run: /bin/bash -c "sudo dmesg -PH -L=never --level warn,err,crit,alert,emerg | tail -n 400"
	I0916 10:38:39.742632 1384589 logs.go:123] Gathering logs for etcd [3b247261f15f4cdd596d5e7ee3354c24cb995a27a5e0581e877596df04b900d5] ...
	I0916 10:38:39.742660 1384589 ssh_runner.go:195] Run: /bin/bash -c "sudo /usr/bin/crictl logs --tail 400 3b247261f15f4cdd596d5e7ee3354c24cb995a27a5e0581e877596df04b900d5"
	I0916 10:38:39.814109 1384589 logs.go:123] Gathering logs for CRI-O ...
	I0916 10:38:39.814142 1384589 ssh_runner.go:195] Run: /bin/bash -c "sudo journalctl -u crio -n 400"
	I0916 10:38:39.914270 1384589 logs.go:123] Gathering logs for kube-controller-manager [4ee66eef50ab615bdd0d94fe194567492cafe76910819703a964b78b45f55436] ...
	I0916 10:38:39.914308 1384589 ssh_runner.go:195] Run: /bin/bash -c "sudo /usr/bin/crictl logs --tail 400 4ee66eef50ab615bdd0d94fe194567492cafe76910819703a964b78b45f55436"
	I0916 10:38:40.019354 1384589 logs.go:123] Gathering logs for kindnet [8d59e894feca0e01e03cc7257c67ed10cf0f9db194b88b314e4961bc62d9e7f1] ...
	I0916 10:38:40.019397 1384589 ssh_runner.go:195] Run: /bin/bash -c "sudo /usr/bin/crictl logs --tail 400 8d59e894feca0e01e03cc7257c67ed10cf0f9db194b88b314e4961bc62d9e7f1"
	I0916 10:38:40.079304 1384589 logs.go:123] Gathering logs for kubelet ...
	I0916 10:38:40.079345 1384589 ssh_runner.go:195] Run: /bin/bash -c "sudo journalctl -u kubelet -n 400"
	W0916 10:38:40.123482 1384589 logs.go:138] Found kubelet problem: Sep 16 10:35:59 addons-936355 kubelet[1507]: W0916 10:35:59.171269    1507 reflector.go:561] object-"kube-system"/"kube-root-ca.crt": failed to list *v1.ConfigMap: configmaps "kube-root-ca.crt" is forbidden: User "system:node:addons-936355" cannot list resource "configmaps" in API group "" in the namespace "kube-system": no relationship found between node 'addons-936355' and this object
	W0916 10:38:40.123736 1384589 logs.go:138] Found kubelet problem: Sep 16 10:35:59 addons-936355 kubelet[1507]: E0916 10:35:59.171326    1507 reflector.go:158] "Unhandled Error" err="object-\"kube-system\"/\"kube-root-ca.crt\": Failed to watch *v1.ConfigMap: failed to list *v1.ConfigMap: configmaps \"kube-root-ca.crt\" is forbidden: User \"system:node:addons-936355\" cannot list resource \"configmaps\" in API group \"\" in the namespace \"kube-system\": no relationship found between node 'addons-936355' and this object" logger="UnhandledError"
	W0916 10:38:40.123917 1384589 logs.go:138] Found kubelet problem: Sep 16 10:35:59 addons-936355 kubelet[1507]: W0916 10:35:59.171509    1507 reflector.go:561] object-"kube-system"/"kube-proxy": failed to list *v1.ConfigMap: configmaps "kube-proxy" is forbidden: User "system:node:addons-936355" cannot list resource "configmaps" in API group "" in the namespace "kube-system": no relationship found between node 'addons-936355' and this object
	W0916 10:38:40.124171 1384589 logs.go:138] Found kubelet problem: Sep 16 10:35:59 addons-936355 kubelet[1507]: E0916 10:35:59.171550    1507 reflector.go:158] "Unhandled Error" err="object-\"kube-system\"/\"kube-proxy\": Failed to watch *v1.ConfigMap: failed to list *v1.ConfigMap: configmaps \"kube-proxy\" is forbidden: User \"system:node:addons-936355\" cannot list resource \"configmaps\" in API group \"\" in the namespace \"kube-system\": no relationship found between node 'addons-936355' and this object" logger="UnhandledError"
	W0916 10:38:40.127515 1384589 logs.go:138] Found kubelet problem: Sep 16 10:36:02 addons-936355 kubelet[1507]: W0916 10:36:02.454965    1507 reflector.go:561] object-"gadget"/"kube-root-ca.crt": failed to list *v1.ConfigMap: configmaps "kube-root-ca.crt" is forbidden: User "system:node:addons-936355" cannot list resource "configmaps" in API group "" in the namespace "gadget": no relationship found between node 'addons-936355' and this object
	W0916 10:38:40.127756 1384589 logs.go:138] Found kubelet problem: Sep 16 10:36:02 addons-936355 kubelet[1507]: E0916 10:36:02.455028    1507 reflector.go:158] "Unhandled Error" err="object-\"gadget\"/\"kube-root-ca.crt\": Failed to watch *v1.ConfigMap: failed to list *v1.ConfigMap: configmaps \"kube-root-ca.crt\" is forbidden: User \"system:node:addons-936355\" cannot list resource \"configmaps\" in API group \"\" in the namespace \"gadget\": no relationship found between node 'addons-936355' and this object" logger="UnhandledError"
	W0916 10:38:40.139306 1384589 logs.go:138] Found kubelet problem: Sep 16 10:36:42 addons-936355 kubelet[1507]: W0916 10:36:42.520421    1507 reflector.go:561] object-"default"/"kube-root-ca.crt": failed to list *v1.ConfigMap: configmaps "kube-root-ca.crt" is forbidden: User "system:node:addons-936355" cannot list resource "configmaps" in API group "" in the namespace "default": no relationship found between node 'addons-936355' and this object
	W0916 10:38:40.139536 1384589 logs.go:138] Found kubelet problem: Sep 16 10:36:42 addons-936355 kubelet[1507]: E0916 10:36:42.520489    1507 reflector.go:158] "Unhandled Error" err="object-\"default\"/\"kube-root-ca.crt\": Failed to watch *v1.ConfigMap: failed to list *v1.ConfigMap: configmaps \"kube-root-ca.crt\" is forbidden: User \"system:node:addons-936355\" cannot list resource \"configmaps\" in API group \"\" in the namespace \"default\": no relationship found between node 'addons-936355' and this object" logger="UnhandledError"
	W0916 10:38:40.139726 1384589 logs.go:138] Found kubelet problem: Sep 16 10:36:42 addons-936355 kubelet[1507]: W0916 10:36:42.520540    1507 reflector.go:561] object-"local-path-storage"/"kube-root-ca.crt": failed to list *v1.ConfigMap: configmaps "kube-root-ca.crt" is forbidden: User "system:node:addons-936355" cannot list resource "configmaps" in API group "" in the namespace "local-path-storage": no relationship found between node 'addons-936355' and this object
	W0916 10:38:40.139953 1384589 logs.go:138] Found kubelet problem: Sep 16 10:36:42 addons-936355 kubelet[1507]: E0916 10:36:42.520560    1507 reflector.go:158] "Unhandled Error" err="object-\"local-path-storage\"/\"kube-root-ca.crt\": Failed to watch *v1.ConfigMap: failed to list *v1.ConfigMap: configmaps \"kube-root-ca.crt\" is forbidden: User \"system:node:addons-936355\" cannot list resource \"configmaps\" in API group \"\" in the namespace \"local-path-storage\": no relationship found between node 'addons-936355' and this object" logger="UnhandledError"
	I0916 10:38:40.192100 1384589 logs.go:123] Gathering logs for describe nodes ...
	I0916 10:38:40.192138 1384589 ssh_runner.go:195] Run: /bin/bash -c "sudo /var/lib/minikube/binaries/v1.31.1/kubectl describe nodes --kubeconfig=/var/lib/minikube/kubeconfig"
	I0916 10:38:40.333078 1384589 logs.go:123] Gathering logs for kube-apiserver [f911db1ed55bbf8b3dc28ca0fef7e51209be97baaa15d9194b879451dd6fd403] ...
	I0916 10:38:40.333117 1384589 ssh_runner.go:195] Run: /bin/bash -c "sudo /usr/bin/crictl logs --tail 400 f911db1ed55bbf8b3dc28ca0fef7e51209be97baaa15d9194b879451dd6fd403"
	I0916 10:38:40.403526 1384589 logs.go:123] Gathering logs for coredns [ee934dc9f4f92e52b49ad02508bb42771f460a2494fa8b1a65d888191266a4ad] ...
	I0916 10:38:40.403566 1384589 ssh_runner.go:195] Run: /bin/bash -c "sudo /usr/bin/crictl logs --tail 400 ee934dc9f4f92e52b49ad02508bb42771f460a2494fa8b1a65d888191266a4ad"
	I0916 10:38:40.442653 1384589 logs.go:123] Gathering logs for kube-scheduler [2b161087caf5a6ab9dedbb699f7c69ddf6c2c5cdb19026d46daf824d90966d25] ...
	I0916 10:38:40.442681 1384589 ssh_runner.go:195] Run: /bin/bash -c "sudo /usr/bin/crictl logs --tail 400 2b161087caf5a6ab9dedbb699f7c69ddf6c2c5cdb19026d46daf824d90966d25"
	I0916 10:38:40.492601 1384589 logs.go:123] Gathering logs for kube-proxy [6200eb5cfcd24bb0f0253359201c6d75c0624dcb7a313b0bc95b7370a13539a0] ...
	I0916 10:38:40.492632 1384589 ssh_runner.go:195] Run: /bin/bash -c "sudo /usr/bin/crictl logs --tail 400 6200eb5cfcd24bb0f0253359201c6d75c0624dcb7a313b0bc95b7370a13539a0"
	I0916 10:38:40.533326 1384589 logs.go:123] Gathering logs for container status ...
	I0916 10:38:40.533357 1384589 ssh_runner.go:195] Run: /bin/bash -c "sudo `which crictl || echo crictl` ps -a || sudo docker ps -a"
	I0916 10:38:40.587619 1384589 out.go:358] Setting ErrFile to fd 2...
	I0916 10:38:40.587653 1384589 out.go:392] TERM=,COLORTERM=, which probably does not support color
	W0916 10:38:40.587735 1384589 out.go:270] X Problems detected in kubelet:
	W0916 10:38:40.587753 1384589 out.go:270]   Sep 16 10:36:02 addons-936355 kubelet[1507]: E0916 10:36:02.455028    1507 reflector.go:158] "Unhandled Error" err="object-\"gadget\"/\"kube-root-ca.crt\": Failed to watch *v1.ConfigMap: failed to list *v1.ConfigMap: configmaps \"kube-root-ca.crt\" is forbidden: User \"system:node:addons-936355\" cannot list resource \"configmaps\" in API group \"\" in the namespace \"gadget\": no relationship found between node 'addons-936355' and this object" logger="UnhandledError"
	W0916 10:38:40.587783 1384589 out.go:270]   Sep 16 10:36:42 addons-936355 kubelet[1507]: W0916 10:36:42.520421    1507 reflector.go:561] object-"default"/"kube-root-ca.crt": failed to list *v1.ConfigMap: configmaps "kube-root-ca.crt" is forbidden: User "system:node:addons-936355" cannot list resource "configmaps" in API group "" in the namespace "default": no relationship found between node 'addons-936355' and this object
	W0916 10:38:40.587793 1384589 out.go:270]   Sep 16 10:36:42 addons-936355 kubelet[1507]: E0916 10:36:42.520489    1507 reflector.go:158] "Unhandled Error" err="object-\"default\"/\"kube-root-ca.crt\": Failed to watch *v1.ConfigMap: failed to list *v1.ConfigMap: configmaps \"kube-root-ca.crt\" is forbidden: User \"system:node:addons-936355\" cannot list resource \"configmaps\" in API group \"\" in the namespace \"default\": no relationship found between node 'addons-936355' and this object" logger="UnhandledError"
	W0916 10:38:40.587808 1384589 out.go:270]   Sep 16 10:36:42 addons-936355 kubelet[1507]: W0916 10:36:42.520540    1507 reflector.go:561] object-"local-path-storage"/"kube-root-ca.crt": failed to list *v1.ConfigMap: configmaps "kube-root-ca.crt" is forbidden: User "system:node:addons-936355" cannot list resource "configmaps" in API group "" in the namespace "local-path-storage": no relationship found between node 'addons-936355' and this object
	W0916 10:38:40.587820 1384589 out.go:270]   Sep 16 10:36:42 addons-936355 kubelet[1507]: E0916 10:36:42.520560    1507 reflector.go:158] "Unhandled Error" err="object-\"local-path-storage\"/\"kube-root-ca.crt\": Failed to watch *v1.ConfigMap: failed to list *v1.ConfigMap: configmaps \"kube-root-ca.crt\" is forbidden: User \"system:node:addons-936355\" cannot list resource \"configmaps\" in API group \"\" in the namespace \"local-path-storage\": no relationship found between node 'addons-936355' and this object" logger="UnhandledError"
	I0916 10:38:40.587827 1384589 out.go:358] Setting ErrFile to fd 2...
	I0916 10:38:40.587838 1384589 out.go:392] TERM=,COLORTERM=, which probably does not support color
	I0916 10:38:50.602620 1384589 system_pods.go:59] 18 kube-system pods found
	I0916 10:38:50.602695 1384589 system_pods.go:61] "coredns-7c65d6cfc9-r6x6b" [fc313ec6-5b9a-444f-ae74-8a9d31bad075] Running
	I0916 10:38:50.602715 1384589 system_pods.go:61] "csi-hostpath-attacher-0" [973b3dd3-b66c-4f66-a499-e50893dc0d35] Running
	I0916 10:38:50.602720 1384589 system_pods.go:61] "csi-hostpath-resizer-0" [51405fd6-eaa1-4b53-ab6c-fc127aa3e3ed] Running
	I0916 10:38:50.602728 1384589 system_pods.go:61] "csi-hostpathplugin-zrlmd" [86e81bf7-3587-41e4-a08a-e800ecc90538] Running
	I0916 10:38:50.602736 1384589 system_pods.go:61] "etcd-addons-936355" [354ae326-d376-4f6f-805d-2605645d8d04] Running
	I0916 10:38:50.602745 1384589 system_pods.go:61] "kindnet-wv5d6" [35e2a463-84e1-4b51-8b1d-2f07b7677069] Running
	I0916 10:38:50.602749 1384589 system_pods.go:61] "kube-apiserver-addons-936355" [397fd8ae-a57b-462d-9c08-d0d45236f3b0] Running
	I0916 10:38:50.602753 1384589 system_pods.go:61] "kube-controller-manager-addons-936355" [d2285801-6e4d-4f4f-a300-721484f9834e] Running
	I0916 10:38:50.602762 1384589 system_pods.go:61] "kube-ingress-dns-minikube" [cfe0a31e-4a7c-4260-9320-4d769706f403] Running
	I0916 10:38:50.602767 1384589 system_pods.go:61] "kube-proxy-6zqlq" [c2680a6c-7cc0-48d6-8094-2d804da5c90b] Running
	I0916 10:38:50.602771 1384589 system_pods.go:61] "kube-scheduler-addons-936355" [881986a3-b57c-4fd3-bd1e-c796e39d9a39] Running
	I0916 10:38:50.602775 1384589 system_pods.go:61] "metrics-server-84c5f94fbc-hngcs" [5901d847-eeb7-4c71-97ba-d08734fb39ed] Running
	I0916 10:38:50.602794 1384589 system_pods.go:61] "nvidia-device-plugin-daemonset-6j9gc" [7ee6aa38-6656-4e60-bd4b-f35c0299acea] Running
	I0916 10:38:50.602798 1384589 system_pods.go:61] "registry-66c9cd494c-xh5d4" [6f439a0d-4e84-4ea2-97ef-2666b73327b7] Running
	I0916 10:38:50.602813 1384589 system_pods.go:61] "registry-proxy-xdksj" [f3007abe-d474-44b8-91de-56f1d2dc83a9] Running
	I0916 10:38:50.602821 1384589 system_pods.go:61] "snapshot-controller-56fcc65765-5th26" [00fbb682-4a60-4b76-84a9-4b0d4669fc20] Running
	I0916 10:38:50.602825 1384589 system_pods.go:61] "snapshot-controller-56fcc65765-fjrw9" [1eb3d0c0-5ee6-493b-ab86-8b96ac9e4110] Running
	I0916 10:38:50.602832 1384589 system_pods.go:61] "storage-provisioner" [1b62a2a2-7b11-4305-99cc-88c5a411f505] Running
	I0916 10:38:50.602848 1384589 system_pods.go:74] duration metric: took 11.190345697s to wait for pod list to return data ...
	I0916 10:38:50.602873 1384589 default_sa.go:34] waiting for default service account to be created ...
	I0916 10:38:50.606360 1384589 default_sa.go:45] found service account: "default"
	I0916 10:38:50.606391 1384589 default_sa.go:55] duration metric: took 3.50956ms for default service account to be created ...
	I0916 10:38:50.606400 1384589 system_pods.go:116] waiting for k8s-apps to be running ...
	I0916 10:38:50.616619 1384589 system_pods.go:86] 18 kube-system pods found
	I0916 10:38:50.616661 1384589 system_pods.go:89] "coredns-7c65d6cfc9-r6x6b" [fc313ec6-5b9a-444f-ae74-8a9d31bad075] Running
	I0916 10:38:50.616668 1384589 system_pods.go:89] "csi-hostpath-attacher-0" [973b3dd3-b66c-4f66-a499-e50893dc0d35] Running
	I0916 10:38:50.617624 1384589 system_pods.go:89] "csi-hostpath-resizer-0" [51405fd6-eaa1-4b53-ab6c-fc127aa3e3ed] Running
	I0916 10:38:50.617646 1384589 system_pods.go:89] "csi-hostpathplugin-zrlmd" [86e81bf7-3587-41e4-a08a-e800ecc90538] Running
	I0916 10:38:50.617652 1384589 system_pods.go:89] "etcd-addons-936355" [354ae326-d376-4f6f-805d-2605645d8d04] Running
	I0916 10:38:50.617662 1384589 system_pods.go:89] "kindnet-wv5d6" [35e2a463-84e1-4b51-8b1d-2f07b7677069] Running
	I0916 10:38:50.617668 1384589 system_pods.go:89] "kube-apiserver-addons-936355" [397fd8ae-a57b-462d-9c08-d0d45236f3b0] Running
	I0916 10:38:50.617673 1384589 system_pods.go:89] "kube-controller-manager-addons-936355" [d2285801-6e4d-4f4f-a300-721484f9834e] Running
	I0916 10:38:50.617677 1384589 system_pods.go:89] "kube-ingress-dns-minikube" [cfe0a31e-4a7c-4260-9320-4d769706f403] Running
	I0916 10:38:50.617682 1384589 system_pods.go:89] "kube-proxy-6zqlq" [c2680a6c-7cc0-48d6-8094-2d804da5c90b] Running
	I0916 10:38:50.617686 1384589 system_pods.go:89] "kube-scheduler-addons-936355" [881986a3-b57c-4fd3-bd1e-c796e39d9a39] Running
	I0916 10:38:50.617691 1384589 system_pods.go:89] "metrics-server-84c5f94fbc-hngcs" [5901d847-eeb7-4c71-97ba-d08734fb39ed] Running
	I0916 10:38:50.617696 1384589 system_pods.go:89] "nvidia-device-plugin-daemonset-6j9gc" [7ee6aa38-6656-4e60-bd4b-f35c0299acea] Running
	I0916 10:38:50.617701 1384589 system_pods.go:89] "registry-66c9cd494c-xh5d4" [6f439a0d-4e84-4ea2-97ef-2666b73327b7] Running
	I0916 10:38:50.617705 1384589 system_pods.go:89] "registry-proxy-xdksj" [f3007abe-d474-44b8-91de-56f1d2dc83a9] Running
	I0916 10:38:50.617716 1384589 system_pods.go:89] "snapshot-controller-56fcc65765-5th26" [00fbb682-4a60-4b76-84a9-4b0d4669fc20] Running
	I0916 10:38:50.617730 1384589 system_pods.go:89] "snapshot-controller-56fcc65765-fjrw9" [1eb3d0c0-5ee6-493b-ab86-8b96ac9e4110] Running
	I0916 10:38:50.617734 1384589 system_pods.go:89] "storage-provisioner" [1b62a2a2-7b11-4305-99cc-88c5a411f505] Running
	I0916 10:38:50.617742 1384589 system_pods.go:126] duration metric: took 11.335042ms to wait for k8s-apps to be running ...
	I0916 10:38:50.617754 1384589 system_svc.go:44] waiting for kubelet service to be running ....
	I0916 10:38:50.617812 1384589 ssh_runner.go:195] Run: sudo systemctl is-active --quiet service kubelet
	I0916 10:38:50.630041 1384589 system_svc.go:56] duration metric: took 12.276523ms WaitForService to wait for kubelet
	I0916 10:38:50.630069 1384589 kubeadm.go:582] duration metric: took 2m53.983006463s to wait for: map[apiserver:true apps_running:true default_sa:true extra:true kubelet:true node_ready:true system_pods:true]
	I0916 10:38:50.630088 1384589 node_conditions.go:102] verifying NodePressure condition ...
	I0916 10:38:50.633754 1384589 node_conditions.go:122] node storage ephemeral capacity is 203034800Ki
	I0916 10:38:50.633790 1384589 node_conditions.go:123] node cpu capacity is 2
	I0916 10:38:50.633806 1384589 node_conditions.go:105] duration metric: took 3.708685ms to run NodePressure ...
	I0916 10:38:50.633819 1384589 start.go:241] waiting for startup goroutines ...
	I0916 10:38:50.633826 1384589 start.go:246] waiting for cluster config update ...
	I0916 10:38:50.633842 1384589 start.go:255] writing updated cluster config ...
	I0916 10:38:50.634158 1384589 ssh_runner.go:195] Run: rm -f paused
	I0916 10:38:50.643301 1384589 out.go:177] * Done! kubectl is now configured to use "addons-936355" cluster and "default" namespace by default
	E0916 10:38:50.646536 1384589 start.go:291] kubectl info: exec: fork/exec /usr/local/bin/kubectl: exec format error
	
	
	==> CRI-O <==
	Sep 16 10:38:52 addons-936355 crio[961]: time="2024-09-16 10:38:52.067771222Z" level=info msg="Removed pod sandbox: 821b6da716f9c307f2f1ca9123b5a25e280c77873ba32652c05c183acc0093a0" id=0993c911-2834-4482-a87e-85e258fbca72 name=/runtime.v1.RuntimeService/RemovePodSandbox
	Sep 16 10:38:52 addons-936355 crio[961]: time="2024-09-16 10:38:52.070317985Z" level=info msg="Stopping pod sandbox: 1e16aeed21b4a80718694b87f672c460bdbeb0351bf54081a50305624b4e0140" id=df5e8fe5-594c-42a6-8d54-056aae5fe16d name=/runtime.v1.RuntimeService/StopPodSandbox
	Sep 16 10:38:52 addons-936355 crio[961]: time="2024-09-16 10:38:52.070588780Z" level=info msg="Stopped pod sandbox (already stopped): 1e16aeed21b4a80718694b87f672c460bdbeb0351bf54081a50305624b4e0140" id=df5e8fe5-594c-42a6-8d54-056aae5fe16d name=/runtime.v1.RuntimeService/StopPodSandbox
	Sep 16 10:38:52 addons-936355 crio[961]: time="2024-09-16 10:38:52.071259933Z" level=info msg="Removing pod sandbox: 1e16aeed21b4a80718694b87f672c460bdbeb0351bf54081a50305624b4e0140" id=9dd72c91-a062-4e03-aee7-b65ffe9d9747 name=/runtime.v1.RuntimeService/RemovePodSandbox
	Sep 16 10:38:52 addons-936355 crio[961]: time="2024-09-16 10:38:52.093361927Z" level=info msg="Removed pod sandbox: 1e16aeed21b4a80718694b87f672c460bdbeb0351bf54081a50305624b4e0140" id=9dd72c91-a062-4e03-aee7-b65ffe9d9747 name=/runtime.v1.RuntimeService/RemovePodSandbox
	Sep 16 10:39:05 addons-936355 crio[961]: time="2024-09-16 10:39:05.172377621Z" level=info msg="Stopping container: 87f384d0874aec7b02ee16f7af41c18367ef47b3aa01cc01ed3c3ae635da2b07 (timeout: 30s)" id=9ef1b1d1-e811-43d6-9c5b-c3b084483629 name=/runtime.v1.RuntimeService/StopContainer
	Sep 16 10:39:05 addons-936355 crio[961]: time="2024-09-16 10:39:05.205763771Z" level=info msg="Stopping container: 98b21953b25791555c220446acb65672ddacb442b186aa5654acc8d51867b8a3 (timeout: 30s)" id=83291a27-9331-45da-aa87-f3e2388b6b63 name=/runtime.v1.RuntimeService/StopContainer
	Sep 16 10:39:05 addons-936355 conmon[2991]: conmon 87f384d0874aec7b02ee <ninfo>: container 3003 exited with status 2
	Sep 16 10:39:05 addons-936355 crio[961]: time="2024-09-16 10:39:05.320204501Z" level=info msg="Stopped container 87f384d0874aec7b02ee16f7af41c18367ef47b3aa01cc01ed3c3ae635da2b07: kube-system/registry-66c9cd494c-xh5d4/registry" id=9ef1b1d1-e811-43d6-9c5b-c3b084483629 name=/runtime.v1.RuntimeService/StopContainer
	Sep 16 10:39:05 addons-936355 crio[961]: time="2024-09-16 10:39:05.321024787Z" level=info msg="Stopping pod sandbox: c003f384d1cdb0050d93ec9015722ad80b5a0aa00d90a1916d345ff7306e2e99" id=4241eeb6-0181-4c51-a41d-a2789006d971 name=/runtime.v1.RuntimeService/StopPodSandbox
	Sep 16 10:39:05 addons-936355 crio[961]: time="2024-09-16 10:39:05.321523195Z" level=info msg="Got pod network &{Name:registry-66c9cd494c-xh5d4 Namespace:kube-system ID:c003f384d1cdb0050d93ec9015722ad80b5a0aa00d90a1916d345ff7306e2e99 UID:6f439a0d-4e84-4ea2-97ef-2666b73327b7 NetNS:/var/run/netns/0d332e66-6175-4ee2-8802-4e52f3b9fb0c Networks:[{Name:kindnet Ifname:eth0}] RuntimeConfig:map[kindnet:{IP: MAC: PortMappings:[] Bandwidth:<nil> IpRanges:[]}] Aliases:map[]}"
	Sep 16 10:39:05 addons-936355 crio[961]: time="2024-09-16 10:39:05.321765404Z" level=info msg="Deleting pod kube-system_registry-66c9cd494c-xh5d4 from CNI network \"kindnet\" (type=ptp)"
	Sep 16 10:39:05 addons-936355 crio[961]: time="2024-09-16 10:39:05.359651455Z" level=info msg="Stopped pod sandbox: c003f384d1cdb0050d93ec9015722ad80b5a0aa00d90a1916d345ff7306e2e99" id=4241eeb6-0181-4c51-a41d-a2789006d971 name=/runtime.v1.RuntimeService/StopPodSandbox
	Sep 16 10:39:05 addons-936355 crio[961]: time="2024-09-16 10:39:05.364162227Z" level=info msg="Stopped container 98b21953b25791555c220446acb65672ddacb442b186aa5654acc8d51867b8a3: kube-system/registry-proxy-xdksj/registry-proxy" id=83291a27-9331-45da-aa87-f3e2388b6b63 name=/runtime.v1.RuntimeService/StopContainer
	Sep 16 10:39:05 addons-936355 crio[961]: time="2024-09-16 10:39:05.364897133Z" level=info msg="Stopping pod sandbox: 9a0035f03f71cd166a9aca62a4f6f4d7d168d152f02e0057e8301e5add9459b2" id=51abf0be-b55f-4a3a-8f2a-abe49f1b13ca name=/runtime.v1.RuntimeService/StopPodSandbox
	Sep 16 10:39:05 addons-936355 crio[961]: time="2024-09-16 10:39:05.382999004Z" level=info msg="Restoring iptables rules: *nat\n:KUBE-HP-FGJBLTLSE7OJCA2Q - [0:0]\n:KUBE-HP-OPTXOFSG7LL2KRSE - [0:0]\n:KUBE-HP-QXS6FWWTXJ4KZKE7 - [0:0]\n:KUBE-HOSTPORTS - [0:0]\n-A KUBE-HOSTPORTS -p tcp -m comment --comment \"k8s_ingress-nginx-controller-bc57996ff-jgfjf_ingress-nginx_88782742-872e-4ae3-8595-abd9ef1ef766_0_ hostport 443\" -m tcp --dport 443 -j KUBE-HP-FGJBLTLSE7OJCA2Q\n-A KUBE-HOSTPORTS -p tcp -m comment --comment \"k8s_ingress-nginx-controller-bc57996ff-jgfjf_ingress-nginx_88782742-872e-4ae3-8595-abd9ef1ef766_0_ hostport 80\" -m tcp --dport 80 -j KUBE-HP-OPTXOFSG7LL2KRSE\n-A KUBE-HP-FGJBLTLSE7OJCA2Q -s 10.244.0.19/32 -m comment --comment \"k8s_ingress-nginx-controller-bc57996ff-jgfjf_ingress-nginx_88782742-872e-4ae3-8595-abd9ef1ef766_0_ hostport 443\" -j KUBE-MARK-MASQ\n-A KUBE-HP-FGJBLTLSE7OJCA2Q -p tcp -m comment --comment \"k8s_ingress-nginx-controller-bc57996ff-jgfjf_ingress-nginx_88782742-872e-4ae3-85
95-abd9ef1ef766_0_ hostport 443\" -m tcp -j DNAT --to-destination 10.244.0.19:443\n-A KUBE-HP-OPTXOFSG7LL2KRSE -s 10.244.0.19/32 -m comment --comment \"k8s_ingress-nginx-controller-bc57996ff-jgfjf_ingress-nginx_88782742-872e-4ae3-8595-abd9ef1ef766_0_ hostport 80\" -j KUBE-MARK-MASQ\n-A KUBE-HP-OPTXOFSG7LL2KRSE -p tcp -m comment --comment \"k8s_ingress-nginx-controller-bc57996ff-jgfjf_ingress-nginx_88782742-872e-4ae3-8595-abd9ef1ef766_0_ hostport 80\" -m tcp -j DNAT --to-destination 10.244.0.19:80\n-X KUBE-HP-QXS6FWWTXJ4KZKE7\nCOMMIT\n"
	Sep 16 10:39:05 addons-936355 crio[961]: time="2024-09-16 10:39:05.392505837Z" level=info msg="Closing host port tcp:5000"
	Sep 16 10:39:05 addons-936355 crio[961]: time="2024-09-16 10:39:05.394734380Z" level=info msg="Host port tcp:5000 does not have an open socket"
	Sep 16 10:39:05 addons-936355 crio[961]: time="2024-09-16 10:39:05.394950120Z" level=info msg="Got pod network &{Name:registry-proxy-xdksj Namespace:kube-system ID:9a0035f03f71cd166a9aca62a4f6f4d7d168d152f02e0057e8301e5add9459b2 UID:f3007abe-d474-44b8-91de-56f1d2dc83a9 NetNS:/var/run/netns/bf65e95b-6d06-4086-9c10-308209abbf2c Networks:[{Name:kindnet Ifname:eth0}] RuntimeConfig:map[kindnet:{IP: MAC: PortMappings:[] Bandwidth:<nil> IpRanges:[]}] Aliases:map[]}"
	Sep 16 10:39:05 addons-936355 crio[961]: time="2024-09-16 10:39:05.395085715Z" level=info msg="Deleting pod kube-system_registry-proxy-xdksj from CNI network \"kindnet\" (type=ptp)"
	Sep 16 10:39:05 addons-936355 crio[961]: time="2024-09-16 10:39:05.431560456Z" level=info msg="Stopped pod sandbox: 9a0035f03f71cd166a9aca62a4f6f4d7d168d152f02e0057e8301e5add9459b2" id=51abf0be-b55f-4a3a-8f2a-abe49f1b13ca name=/runtime.v1.RuntimeService/StopPodSandbox
	Sep 16 10:39:05 addons-936355 crio[961]: time="2024-09-16 10:39:05.738572389Z" level=info msg="Removing container: 98b21953b25791555c220446acb65672ddacb442b186aa5654acc8d51867b8a3" id=d57b58af-e27a-4d8c-8b01-97ced07dd2a6 name=/runtime.v1.RuntimeService/RemoveContainer
	Sep 16 10:39:05 addons-936355 crio[961]: time="2024-09-16 10:39:05.767450630Z" level=info msg="Removed container 98b21953b25791555c220446acb65672ddacb442b186aa5654acc8d51867b8a3: kube-system/registry-proxy-xdksj/registry-proxy" id=d57b58af-e27a-4d8c-8b01-97ced07dd2a6 name=/runtime.v1.RuntimeService/RemoveContainer
	Sep 16 10:39:05 addons-936355 crio[961]: time="2024-09-16 10:39:05.769927963Z" level=info msg="Removing container: 87f384d0874aec7b02ee16f7af41c18367ef47b3aa01cc01ed3c3ae635da2b07" id=cdc7409a-4ae3-42f4-92f0-f482a5bad7ae name=/runtime.v1.RuntimeService/RemoveContainer
	Sep 16 10:39:05 addons-936355 crio[961]: time="2024-09-16 10:39:05.815945680Z" level=info msg="Removed container 87f384d0874aec7b02ee16f7af41c18367ef47b3aa01cc01ed3c3ae635da2b07: kube-system/registry-66c9cd494c-xh5d4/registry" id=cdc7409a-4ae3-42f4-92f0-f482a5bad7ae name=/runtime.v1.RuntimeService/RemoveContainer
	
	
	==> container status <==
	CONTAINER           IMAGE                                                                                                                                        CREATED              STATE               NAME                                     ATTEMPT             POD ID              POD
	b3fda3bbc6527       ghcr.io/inspektor-gadget/inspektor-gadget@sha256:03e677e1cf9d2c9bea454e3dbcbcef20b3022e987534a2874eb1abc5bc3e73ec                            46 seconds ago       Exited              gadget                                   4                   cf56dfeabe5de       gadget-hx2qq
	3b30e9b80217f       registry.k8s.io/sig-storage/csi-snapshotter@sha256:291334908ddf71a4661fd7f6d9d97274de8a5378a2b6fdfeb2ce73414a34f82f                          About a minute ago   Running             csi-snapshotter                          0                   32259548d9366       csi-hostpathplugin-zrlmd
	66246ecfc47d6       gcr.io/k8s-minikube/gcp-auth-webhook@sha256:a40e1a121ee367d1712ac3a54ec9c38c405a65dde923c98e5fa6368fa82c4b69                                 About a minute ago   Running             gcp-auth                                 0                   b09347ee3cb04       gcp-auth-89d5ffd79-j2ckg
	5dabae8faaade       registry.k8s.io/sig-storage/csi-provisioner@sha256:98ffd09c0784203d200e0f8c241501de31c8df79644caac7eed61bd6391e5d49                          About a minute ago   Running             csi-provisioner                          0                   32259548d9366       csi-hostpathplugin-zrlmd
	63d680209bdeb       registry.k8s.io/sig-storage/livenessprobe@sha256:8b00c6e8f52639ed9c6f866085893ab688e57879741b3089e3cfa9998502e158                            About a minute ago   Running             liveness-probe                           0                   32259548d9366       csi-hostpathplugin-zrlmd
	b241211876358       registry.k8s.io/sig-storage/hostpathplugin@sha256:7b1dfc90a367222067fc468442fdf952e20fc5961f25c1ad654300ddc34d7083                           About a minute ago   Running             hostpath                                 0                   32259548d9366       csi-hostpathplugin-zrlmd
	ab8eaedf8040a       registry.k8s.io/sig-storage/csi-node-driver-registrar@sha256:511b8c8ac828194a753909d26555ff08bc12f497dd8daeb83fe9d593693a26c1                About a minute ago   Running             node-driver-registrar                    0                   32259548d9366       csi-hostpathplugin-zrlmd
	331ea01abf2ed       registry.k8s.io/ingress-nginx/controller@sha256:22f9d129ae8c89a2cabbd13af3c1668944f3dd68fec186199b7024a0a2fc75b3                             About a minute ago   Running             controller                               0                   549ac22ef6389       ingress-nginx-controller-bc57996ff-jgfjf
	5e5f91a726842       docker.io/rancher/local-path-provisioner@sha256:689a2489a24e74426e4a4666e611c988202c5fa995908b0c60133aca3eb87d98                             About a minute ago   Running             local-path-provisioner                   0                   0d353b19ef8b9       local-path-provisioner-86d989889c-b652d
	9773c25a0a3dc       gcr.io/cloud-spanner-emulator/emulator@sha256:41ec188288c7943f488600462b2b74002814e52439be82d15de33c3ee4898a58                               About a minute ago   Running             cloud-spanner-emulator                   0                   32e89c2c5a56d       cloud-spanner-emulator-769b77f747-qvhhc
	3d28641a10686       registry.k8s.io/ingress-nginx/kube-webhook-certgen@sha256:7c4c1a6ca8855c524a64983eaf590e126a669ae12df83ad65de281c9beee13d3                   About a minute ago   Exited              patch                                    0                   ae4e1f0886d62       ingress-nginx-admission-patch-5hvnf
	98ee5c554b6be       registry.k8s.io/sig-storage/csi-external-health-monitor-controller@sha256:80b9ba94aa2afe24553d69bd165a6a51552d1582d68618ec00d3b804a7d9193c   About a minute ago   Running             csi-external-health-monitor-controller   0                   32259548d9366       csi-hostpathplugin-zrlmd
	11f6f0bf554a7       registry.k8s.io/sig-storage/csi-resizer@sha256:425d8f1b769398127767b06ed97ce62578a3179bcb99809ce93a1649e025ffe7                              About a minute ago   Running             csi-resizer                              0                   b35d742443216       csi-hostpath-resizer-0
	b9e189d1acd4c       registry.k8s.io/ingress-nginx/kube-webhook-certgen@sha256:7c4c1a6ca8855c524a64983eaf590e126a669ae12df83ad65de281c9beee13d3                   About a minute ago   Exited              create                                   0                   ef2a1639e8386       ingress-nginx-admission-create-kmjkm
	b65d0d4cafeec       registry.k8s.io/metrics-server/metrics-server@sha256:048bcf48fc2cce517a61777e22bac782ba59ea5e9b9a54bcb42dbee99566a91f                        About a minute ago   Running             metrics-server                           0                   d015a3419dfc0       metrics-server-84c5f94fbc-hngcs
	fe7a31fb7fe71       registry.k8s.io/sig-storage/snapshot-controller@sha256:5d668e35c15df6e87e2530da25d557f543182cedbdb39d421b87076463ee9857                      About a minute ago   Running             volume-snapshot-controller               0                   54dbfb69eabc4       snapshot-controller-56fcc65765-5th26
	082cee4b81438       docker.io/marcnuri/yakd@sha256:1c961556224d57fc747de0b1874524208e5fb4f8386f23e9c1c4c18e97109f17                                              About a minute ago   Running             yakd                                     0                   91ccf72a05daa       yakd-dashboard-67d98fc6b-ztsj8
	4cf01aeaccd3f       registry.k8s.io/sig-storage/snapshot-controller@sha256:5d668e35c15df6e87e2530da25d557f543182cedbdb39d421b87076463ee9857                      2 minutes ago        Running             volume-snapshot-controller               0                   29551751a8a3b       snapshot-controller-56fcc65765-fjrw9
	d5f8b279203cd       nvcr.io/nvidia/k8s-device-plugin@sha256:cdd05f9d89f0552478d46474005e86b98795ad364664f644225b99d94978e680                                     2 minutes ago        Running             nvidia-device-plugin-ctr                 0                   eda9663f4feb4       nvidia-device-plugin-daemonset-6j9gc
	d50b4977768d7       registry.k8s.io/sig-storage/csi-attacher@sha256:4b5609c78455de45821910065281a368d5f760b41250f90cbde5110543bdc326                             2 minutes ago        Running             csi-attacher                             0                   6f989f68a9599       csi-hostpath-attacher-0
	198a1da1f3633       gcr.io/k8s-minikube/minikube-ingress-dns@sha256:4211a1de532376c881851542238121b26792225faa36a7b02dccad88fd05797c                             2 minutes ago        Running             minikube-ingress-dns                     0                   69f4c5e690a85       kube-ingress-dns-minikube
	ee934dc9f4f92       2f6c962e7b8311337352d9fdea917da2184d9919f4da7695bc2a6517cf392fe4                                                                             2 minutes ago        Running             coredns                                  0                   d4b44085e648e       coredns-7c65d6cfc9-r6x6b
	2a862ef326432       ba04bb24b95753201135cbc420b233c1b0b9fa2e1fd21d28319c348c33fbcde6                                                                             2 minutes ago        Running             storage-provisioner                      0                   e168c388c9d11       storage-provisioner
	8d59e894feca0       6a23fa8fd2b78ab58e42ba273808edc936a9c53d8ac4a919f6337be094843a51                                                                             3 minutes ago        Running             kindnet-cni                              0                   ca9fcc6465180       kindnet-wv5d6
	6200eb5cfcd24       24a140c548c075e487e45d0ee73b1aa89f8bfb40c08a57e05975559728822b1d                                                                             3 minutes ago        Running             kube-proxy                               0                   a491da0967548       kube-proxy-6zqlq
	2b161087caf5a       7f8aa378bb47dffcf430f3a601abe39137e88aee0238e23ed8530fdd18dab82d                                                                             3 minutes ago        Running             kube-scheduler                           0                   c99e3a64f4ade       kube-scheduler-addons-936355
	4ee66eef50ab6       279f381cb37365bbbcd133c9531fba9c2beb0f38dbbe6ddfcd0b1b1643d3450e                                                                             3 minutes ago        Running             kube-controller-manager                  0                   70ee024a23a5b       kube-controller-manager-addons-936355
	f911db1ed55bb       d3f53a98c0a9d9163c4848bcf34b2d2f5e1e3691b79f3d1dd6d0206809e02853                                                                             3 minutes ago        Running             kube-apiserver                           0                   fe5dcd273af65       kube-apiserver-addons-936355
	3b247261f15f4       27e3830e1402783674d8b594038967deea9d51f0d91b34c93c8f39d2f68af7da                                                                             3 minutes ago        Running             etcd                                     0                   24ef782ab6be4       etcd-addons-936355
	
	
	==> coredns [ee934dc9f4f92e52b49ad02508bb42771f460a2494fa8b1a65d888191266a4ad] <==
	[INFO] 10.244.0.6:41410 - 64521 "A IN registry.kube-system.svc.cluster.local.cluster.local. udp 70 false 512" NXDOMAIN qr,aa,rd 163 0.000074082s
	[INFO] 10.244.0.6:32998 - 54705 "AAAA IN registry.kube-system.svc.cluster.local.us-east-2.compute.internal. udp 83 false 512" NXDOMAIN qr,rd,ra 83 0.002300443s
	[INFO] 10.244.0.6:32998 - 29583 "A IN registry.kube-system.svc.cluster.local.us-east-2.compute.internal. udp 83 false 512" NXDOMAIN qr,rd,ra 83 0.002118081s
	[INFO] 10.244.0.6:57466 - 59415 "A IN registry.kube-system.svc.cluster.local. udp 56 false 512" NOERROR qr,aa,rd 110 0.000171687s
	[INFO] 10.244.0.6:57466 - 26377 "AAAA IN registry.kube-system.svc.cluster.local. udp 56 false 512" NOERROR qr,aa,rd 149 0.000100124s
	[INFO] 10.244.0.6:57769 - 49607 "A IN registry.kube-system.svc.cluster.local.kube-system.svc.cluster.local. udp 86 false 512" NXDOMAIN qr,aa,rd 179 0.000092608s
	[INFO] 10.244.0.6:57769 - 14275 "AAAA IN registry.kube-system.svc.cluster.local.kube-system.svc.cluster.local. udp 86 false 512" NXDOMAIN qr,aa,rd 179 0.00007117s
	[INFO] 10.244.0.6:44055 - 7650 "AAAA IN registry.kube-system.svc.cluster.local.svc.cluster.local. udp 74 false 512" NXDOMAIN qr,aa,rd 167 0.000056876s
	[INFO] 10.244.0.6:44055 - 57820 "A IN registry.kube-system.svc.cluster.local.svc.cluster.local. udp 74 false 512" NXDOMAIN qr,aa,rd 167 0.000036053s
	[INFO] 10.244.0.6:42734 - 36918 "AAAA IN registry.kube-system.svc.cluster.local.cluster.local. udp 70 false 512" NXDOMAIN qr,aa,rd 163 0.000045283s
	[INFO] 10.244.0.6:42734 - 61736 "A IN registry.kube-system.svc.cluster.local.cluster.local. udp 70 false 512" NXDOMAIN qr,aa,rd 163 0.000033599s
	[INFO] 10.244.0.6:54338 - 26081 "AAAA IN registry.kube-system.svc.cluster.local.us-east-2.compute.internal. udp 83 false 512" NXDOMAIN qr,rd,ra 83 0.001812112s
	[INFO] 10.244.0.6:54338 - 40423 "A IN registry.kube-system.svc.cluster.local.us-east-2.compute.internal. udp 83 false 512" NXDOMAIN qr,rd,ra 83 0.001769553s
	[INFO] 10.244.0.6:39094 - 56002 "AAAA IN registry.kube-system.svc.cluster.local. udp 56 false 512" NOERROR qr,aa,rd 149 0.000048811s
	[INFO] 10.244.0.6:39094 - 9935 "A IN registry.kube-system.svc.cluster.local. udp 56 false 512" NOERROR qr,aa,rd 110 0.000036265s
	[INFO] 10.244.0.20:53754 - 1366 "A IN storage.googleapis.com.gcp-auth.svc.cluster.local. udp 78 false 1232" NXDOMAIN qr,aa,rd 160 0.0016936s
	[INFO] 10.244.0.20:51144 - 45189 "AAAA IN storage.googleapis.com.gcp-auth.svc.cluster.local. udp 78 false 1232" NXDOMAIN qr,aa,rd 160 0.002085581s
	[INFO] 10.244.0.20:60186 - 495 "AAAA IN storage.googleapis.com.svc.cluster.local. udp 69 false 1232" NXDOMAIN qr,aa,rd 151 0.000131541s
	[INFO] 10.244.0.20:58173 - 47948 "A IN storage.googleapis.com.svc.cluster.local. udp 69 false 1232" NXDOMAIN qr,aa,rd 151 0.000146031s
	[INFO] 10.244.0.20:41557 - 45319 "AAAA IN storage.googleapis.com.cluster.local. udp 65 false 1232" NXDOMAIN qr,aa,rd 147 0.000125396s
	[INFO] 10.244.0.20:60168 - 27262 "A IN storage.googleapis.com.cluster.local. udp 65 false 1232" NXDOMAIN qr,aa,rd 147 0.00012213s
	[INFO] 10.244.0.20:55951 - 7020 "A IN storage.googleapis.com.us-east-2.compute.internal. udp 78 false 1232" NXDOMAIN qr,rd,ra 67 0.004693068s
	[INFO] 10.244.0.20:46529 - 17954 "AAAA IN storage.googleapis.com.us-east-2.compute.internal. udp 78 false 1232" NXDOMAIN qr,rd,ra 67 0.005735124s
	[INFO] 10.244.0.20:54136 - 20848 "A IN storage.googleapis.com. udp 51 false 1232" NOERROR qr,rd,ra 610 0.001853292s
	[INFO] 10.244.0.20:59146 - 51848 "AAAA IN storage.googleapis.com. udp 51 false 1232" NOERROR qr,rd,ra 240 0.0025582s
	
	
	==> describe nodes <==
	Name:               addons-936355
	Roles:              control-plane
	Labels:             beta.kubernetes.io/arch=arm64
	                    beta.kubernetes.io/os=linux
	                    kubernetes.io/arch=arm64
	                    kubernetes.io/hostname=addons-936355
	                    kubernetes.io/os=linux
	                    minikube.k8s.io/commit=90d544f06ea0f69499271b003be64a9a224d57ed
	                    minikube.k8s.io/name=addons-936355
	                    minikube.k8s.io/primary=true
	                    minikube.k8s.io/updated_at=2024_09_16T10_35_52_0700
	                    minikube.k8s.io/version=v1.34.0
	                    node-role.kubernetes.io/control-plane=
	                    node.kubernetes.io/exclude-from-external-load-balancers=
	                    topology.hostpath.csi/node=addons-936355
	Annotations:        csi.volume.kubernetes.io/nodeid: {"hostpath.csi.k8s.io":"addons-936355"}
	                    kubeadm.alpha.kubernetes.io/cri-socket: unix:///var/run/crio/crio.sock
	                    node.alpha.kubernetes.io/ttl: 0
	                    volumes.kubernetes.io/controller-managed-attach-detach: true
	CreationTimestamp:  Mon, 16 Sep 2024 10:35:49 +0000
	Taints:             <none>
	Unschedulable:      false
	Lease:
	  HolderIdentity:  addons-936355
	  AcquireTime:     <unset>
	  RenewTime:       Mon, 16 Sep 2024 10:38:56 +0000
	Conditions:
	  Type             Status  LastHeartbeatTime                 LastTransitionTime                Reason                       Message
	  ----             ------  -----------------                 ------------------                ------                       -------
	  MemoryPressure   False   Mon, 16 Sep 2024 10:38:25 +0000   Mon, 16 Sep 2024 10:35:45 +0000   KubeletHasSufficientMemory   kubelet has sufficient memory available
	  DiskPressure     False   Mon, 16 Sep 2024 10:38:25 +0000   Mon, 16 Sep 2024 10:35:45 +0000   KubeletHasNoDiskPressure     kubelet has no disk pressure
	  PIDPressure      False   Mon, 16 Sep 2024 10:38:25 +0000   Mon, 16 Sep 2024 10:35:45 +0000   KubeletHasSufficientPID      kubelet has sufficient PID available
	  Ready            True    Mon, 16 Sep 2024 10:38:25 +0000   Mon, 16 Sep 2024 10:36:42 +0000   KubeletReady                 kubelet is posting ready status
	Addresses:
	  InternalIP:  192.168.49.2
	  Hostname:    addons-936355
	Capacity:
	  cpu:                2
	  ephemeral-storage:  203034800Ki
	  hugepages-1Gi:      0
	  hugepages-2Mi:      0
	  hugepages-32Mi:     0
	  hugepages-64Ki:     0
	  memory:             8022304Ki
	  pods:               110
	Allocatable:
	  cpu:                2
	  ephemeral-storage:  203034800Ki
	  hugepages-1Gi:      0
	  hugepages-2Mi:      0
	  hugepages-32Mi:     0
	  hugepages-64Ki:     0
	  memory:             8022304Ki
	  pods:               110
	System Info:
	  Machine ID:                 d04f59375248444681829ec487634926
	  System UUID:                65d15a11-4f3c-4207-941c-6a3b096d7c27
	  Boot ID:                    34b2555f-ef29-4c31-9b47-b3b930bd3b4b
	  Kernel Version:             5.15.0-1069-aws
	  OS Image:                   Ubuntu 22.04.4 LTS
	  Operating System:           linux
	  Architecture:               arm64
	  Container Runtime Version:  cri-o://1.24.6
	  Kubelet Version:            v1.31.1
	  Kube-Proxy Version:         v1.31.1
	PodCIDR:                      10.244.0.0/24
	PodCIDRs:                     10.244.0.0/24
	Non-terminated Pods:          (22 in total)
	  Namespace                   Name                                        CPU Requests  CPU Limits  Memory Requests  Memory Limits  Age
	  ---------                   ----                                        ------------  ----------  ---------------  -------------  ---
	  default                     cloud-spanner-emulator-769b77f747-qvhhc     0 (0%)        0 (0%)      0 (0%)           0 (0%)         3m6s
	  gadget                      gadget-hx2qq                                0 (0%)        0 (0%)      0 (0%)           0 (0%)         3m4s
	  gcp-auth                    gcp-auth-89d5ffd79-j2ckg                    0 (0%)        0 (0%)      0 (0%)           0 (0%)         2m58s
	  ingress-nginx               ingress-nginx-controller-bc57996ff-jgfjf    100m (5%)     0 (0%)      90Mi (1%)        0 (0%)         3m4s
	  kube-system                 coredns-7c65d6cfc9-r6x6b                    100m (5%)     0 (0%)      70Mi (0%)        170Mi (2%)     3m8s
	  kube-system                 csi-hostpath-attacher-0                     0 (0%)        0 (0%)      0 (0%)           0 (0%)         3m3s
	  kube-system                 csi-hostpath-resizer-0                      0 (0%)        0 (0%)      0 (0%)           0 (0%)         3m3s
	  kube-system                 csi-hostpathplugin-zrlmd                    0 (0%)        0 (0%)      0 (0%)           0 (0%)         2m24s
	  kube-system                 etcd-addons-936355                          100m (5%)     0 (0%)      100Mi (1%)       0 (0%)         3m14s
	  kube-system                 kindnet-wv5d6                               100m (5%)     100m (5%)   50Mi (0%)        50Mi (0%)      3m8s
	  kube-system                 kube-apiserver-addons-936355                250m (12%)    0 (0%)      0 (0%)           0 (0%)         3m15s
	  kube-system                 kube-controller-manager-addons-936355       200m (10%)    0 (0%)      0 (0%)           0 (0%)         3m14s
	  kube-system                 kube-ingress-dns-minikube                   0 (0%)        0 (0%)      0 (0%)           0 (0%)         3m5s
	  kube-system                 kube-proxy-6zqlq                            0 (0%)        0 (0%)      0 (0%)           0 (0%)         3m9s
	  kube-system                 kube-scheduler-addons-936355                100m (5%)     0 (0%)      0 (0%)           0 (0%)         3m14s
	  kube-system                 metrics-server-84c5f94fbc-hngcs             100m (5%)     0 (0%)      200Mi (2%)       0 (0%)         3m5s
	  kube-system                 nvidia-device-plugin-daemonset-6j9gc        0 (0%)        0 (0%)      0 (0%)           0 (0%)         2m24s
	  kube-system                 snapshot-controller-56fcc65765-5th26        0 (0%)        0 (0%)      0 (0%)           0 (0%)         3m3s
	  kube-system                 snapshot-controller-56fcc65765-fjrw9        0 (0%)        0 (0%)      0 (0%)           0 (0%)         3m3s
	  kube-system                 storage-provisioner                         0 (0%)        0 (0%)      0 (0%)           0 (0%)         3m4s
	  local-path-storage          local-path-provisioner-86d989889c-b652d     0 (0%)        0 (0%)      0 (0%)           0 (0%)         3m4s
	  yakd-dashboard              yakd-dashboard-67d98fc6b-ztsj8              0 (0%)        0 (0%)      128Mi (1%)       256Mi (3%)     3m4s
	Allocated resources:
	  (Total limits may be over 100 percent, i.e., overcommitted.)
	  Resource           Requests     Limits
	  --------           --------     ------
	  cpu                1050m (52%)  100m (5%)
	  memory             638Mi (8%)   476Mi (6%)
	  ephemeral-storage  0 (0%)       0 (0%)
	  hugepages-1Gi      0 (0%)       0 (0%)
	  hugepages-2Mi      0 (0%)       0 (0%)
	  hugepages-32Mi     0 (0%)       0 (0%)
	  hugepages-64Ki     0 (0%)       0 (0%)
	Events:
	  Type     Reason                   Age                    From             Message
	  ----     ------                   ----                   ----             -------
	  Normal   Starting                 3m4s                   kube-proxy       
	  Normal   NodeHasSufficientMemory  3m21s (x8 over 3m22s)  kubelet          Node addons-936355 status is now: NodeHasSufficientMemory
	  Normal   NodeHasNoDiskPressure    3m21s (x8 over 3m22s)  kubelet          Node addons-936355 status is now: NodeHasNoDiskPressure
	  Normal   NodeHasSufficientPID     3m21s (x7 over 3m22s)  kubelet          Node addons-936355 status is now: NodeHasSufficientPID
	  Normal   Starting                 3m15s                  kubelet          Starting kubelet.
	  Warning  CgroupV1                 3m15s                  kubelet          Cgroup v1 support is in maintenance mode, please migrate to Cgroup v2.
	  Normal   NodeHasSufficientMemory  3m14s                  kubelet          Node addons-936355 status is now: NodeHasSufficientMemory
	  Normal   NodeHasNoDiskPressure    3m14s                  kubelet          Node addons-936355 status is now: NodeHasNoDiskPressure
	  Normal   NodeHasSufficientPID     3m14s                  kubelet          Node addons-936355 status is now: NodeHasSufficientPID
	  Normal   RegisteredNode           3m10s                  node-controller  Node addons-936355 event: Registered Node addons-936355 in Controller
	  Normal   NodeReady                2m24s                  kubelet          Node addons-936355 status is now: NodeReady
	
	
	==> dmesg <==
	[Sep16 10:07] systemd-journald[226]: Failed to send stream file descriptor to service manager: Connection refused
	
	
	==> etcd [3b247261f15f4cdd596d5e7ee3354c24cb995a27a5e0581e877596df04b900d5] <==
	{"level":"warn","ts":"2024-09-16T10:35:59.461521Z","caller":"etcdserver/util.go:170","msg":"apply request took too long","took":"173.42953ms","expected-duration":"100ms","prefix":"read-only range ","request":"key:\"/registry/serviceaccounts\" limit:1 ","response":"range_response_count:0 size:5"}
	{"level":"info","ts":"2024-09-16T10:35:59.461548Z","caller":"traceutil/trace.go:171","msg":"trace[825824076] range","detail":"{range_begin:/registry/serviceaccounts; range_end:; response_count:0; response_revision:365; }","duration":"173.471351ms","start":"2024-09-16T10:35:59.288071Z","end":"2024-09-16T10:35:59.461542Z","steps":["trace[825824076] 'agreement among raft nodes before linearized reading'  (duration: 173.394077ms)"],"step_count":1}
	{"level":"info","ts":"2024-09-16T10:35:59.910299Z","caller":"traceutil/trace.go:171","msg":"trace[571848] transaction","detail":"{read_only:false; response_revision:372; number_of_response:1; }","duration":"101.485416ms","start":"2024-09-16T10:35:59.808786Z","end":"2024-09-16T10:35:59.910272Z","steps":["trace[571848] 'process raft request'  (duration: 72.962753ms)"],"step_count":1}
	{"level":"info","ts":"2024-09-16T10:35:59.910551Z","caller":"traceutil/trace.go:171","msg":"trace[2049811000] transaction","detail":"{read_only:false; response_revision:373; number_of_response:1; }","duration":"101.622964ms","start":"2024-09-16T10:35:59.808918Z","end":"2024-09-16T10:35:59.910541Z","steps":["trace[2049811000] 'process raft request'  (duration: 72.910972ms)"],"step_count":1}
	{"level":"info","ts":"2024-09-16T10:35:59.910806Z","caller":"traceutil/trace.go:171","msg":"trace[1901548869] transaction","detail":"{read_only:false; response_revision:374; number_of_response:1; }","duration":"101.844209ms","start":"2024-09-16T10:35:59.808954Z","end":"2024-09-16T10:35:59.910798Z","steps":["trace[1901548869] 'process raft request'  (duration: 72.897089ms)"],"step_count":1}
	{"level":"warn","ts":"2024-09-16T10:35:59.945205Z","caller":"etcdserver/util.go:170","msg":"apply request took too long","took":"136.325816ms","expected-duration":"100ms","prefix":"read-only range ","request":"key:\"/registry/daemonsets/kube-system/kindnet\" ","response":"range_response_count:1 size:4681"}
	{"level":"info","ts":"2024-09-16T10:35:59.945344Z","caller":"traceutil/trace.go:171","msg":"trace[1851060564] range","detail":"{range_begin:/registry/daemonsets/kube-system/kindnet; range_end:; response_count:1; response_revision:375; }","duration":"136.4823ms","start":"2024-09-16T10:35:59.808847Z","end":"2024-09-16T10:35:59.945330Z","steps":["trace[1851060564] 'agreement among raft nodes before linearized reading'  (duration: 136.289206ms)"],"step_count":1}
	{"level":"warn","ts":"2024-09-16T10:35:59.945577Z","caller":"etcdserver/util.go:170","msg":"apply request took too long","took":"136.842881ms","expected-duration":"100ms","prefix":"read-only range ","request":"key:\"/registry/pods/kube-system/kube-proxy-6zqlq\" ","response":"range_response_count:1 size:4833"}
	{"level":"info","ts":"2024-09-16T10:35:59.945682Z","caller":"traceutil/trace.go:171","msg":"trace[840636989] range","detail":"{range_begin:/registry/pods/kube-system/kube-proxy-6zqlq; range_end:; response_count:1; response_revision:375; }","duration":"136.945081ms","start":"2024-09-16T10:35:59.808725Z","end":"2024-09-16T10:35:59.945670Z","steps":["trace[840636989] 'agreement among raft nodes before linearized reading'  (duration: 136.808125ms)"],"step_count":1}
	{"level":"info","ts":"2024-09-16T10:36:00.052890Z","caller":"traceutil/trace.go:171","msg":"trace[935433541] transaction","detail":"{read_only:false; response_revision:376; number_of_response:1; }","duration":"171.425064ms","start":"2024-09-16T10:35:59.881432Z","end":"2024-09-16T10:36:00.052857Z","steps":["trace[935433541] 'process raft request'  (duration: 103.374661ms)","trace[935433541] 'compare'  (duration: 67.370586ms)"],"step_count":2}
	{"level":"info","ts":"2024-09-16T10:36:00.053251Z","caller":"traceutil/trace.go:171","msg":"trace[1640692462] linearizableReadLoop","detail":"{readStateIndex:386; appliedIndex:385; }","duration":"171.181083ms","start":"2024-09-16T10:35:59.882059Z","end":"2024-09-16T10:36:00.053240Z","steps":["trace[1640692462] 'read index received'  (duration: 86.984477ms)","trace[1640692462] 'applied index is now lower than readState.Index'  (duration: 84.173345ms)"],"step_count":2}
	{"level":"warn","ts":"2024-09-16T10:36:00.082458Z","caller":"etcdserver/util.go:170","msg":"apply request took too long","took":"201.082081ms","expected-duration":"100ms","prefix":"read-only range ","request":"key:\"/registry/services/specs/default/cloud-spanner-emulator\" ","response":"range_response_count:0 size:5"}
	{"level":"info","ts":"2024-09-16T10:36:00.131166Z","caller":"traceutil/trace.go:171","msg":"trace[1850329919] range","detail":"{range_begin:/registry/services/specs/default/cloud-spanner-emulator; range_end:; response_count:0; response_revision:379; }","duration":"249.789246ms","start":"2024-09-16T10:35:59.881352Z","end":"2024-09-16T10:36:00.131141Z","steps":["trace[1850329919] 'agreement among raft nodes before linearized reading'  (duration: 201.06412ms)"],"step_count":1}
	{"level":"warn","ts":"2024-09-16T10:36:00.081165Z","caller":"etcdserver/util.go:170","msg":"apply request took too long","took":"179.917297ms","expected-duration":"100ms","prefix":"read-only range ","request":"key:\"/registry/apiextensions.k8s.io/customresourcedefinitions\" limit:1 ","response":"range_response_count:0 size:5"}
	{"level":"info","ts":"2024-09-16T10:36:00.139019Z","caller":"traceutil/trace.go:171","msg":"trace[179530847] range","detail":"{range_begin:/registry/apiextensions.k8s.io/customresourcedefinitions; range_end:; response_count:0; response_revision:377; }","duration":"257.611311ms","start":"2024-09-16T10:35:59.881381Z","end":"2024-09-16T10:36:00.138992Z","steps":["trace[179530847] 'agreement among raft nodes before linearized reading'  (duration: 179.875904ms)"],"step_count":1}
	{"level":"info","ts":"2024-09-16T10:36:00.497778Z","caller":"traceutil/trace.go:171","msg":"trace[781374587] transaction","detail":"{read_only:false; response_revision:383; number_of_response:1; }","duration":"244.927422ms","start":"2024-09-16T10:36:00.252822Z","end":"2024-09-16T10:36:00.497749Z","steps":["trace[781374587] 'process raft request'  (duration: 240.255139ms)"],"step_count":1}
	{"level":"info","ts":"2024-09-16T10:36:00.498033Z","caller":"traceutil/trace.go:171","msg":"trace[2049862755] transaction","detail":"{read_only:false; response_revision:384; number_of_response:1; }","duration":"245.149988ms","start":"2024-09-16T10:36:00.252873Z","end":"2024-09-16T10:36:00.498023Z","steps":["trace[2049862755] 'process raft request'  (duration: 243.936212ms)"],"step_count":1}
	{"level":"info","ts":"2024-09-16T10:36:00.498257Z","caller":"traceutil/trace.go:171","msg":"trace[1308392049] transaction","detail":"{read_only:false; response_revision:385; number_of_response:1; }","duration":"245.371382ms","start":"2024-09-16T10:36:00.252875Z","end":"2024-09-16T10:36:00.498247Z","steps":["trace[1308392049] 'process raft request'  (duration: 243.967662ms)"],"step_count":1}
	{"level":"info","ts":"2024-09-16T10:36:00.498461Z","caller":"traceutil/trace.go:171","msg":"trace[1919696831] transaction","detail":"{read_only:false; response_revision:386; number_of_response:1; }","duration":"245.376936ms","start":"2024-09-16T10:36:00.253076Z","end":"2024-09-16T10:36:00.498453Z","steps":["trace[1919696831] 'process raft request'  (duration: 243.813828ms)"],"step_count":1}
	{"level":"warn","ts":"2024-09-16T10:36:00.508772Z","caller":"etcdserver/util.go:170","msg":"apply request took too long","took":"105.013609ms","expected-duration":"100ms","prefix":"read-only range ","request":"key:\"/registry/namespaces/kube-system\" ","response":"range_response_count:1 size:351"}
	{"level":"info","ts":"2024-09-16T10:36:00.508863Z","caller":"traceutil/trace.go:171","msg":"trace[1394685121] range","detail":"{range_begin:/registry/namespaces/kube-system; range_end:; response_count:1; response_revision:394; }","duration":"105.116096ms","start":"2024-09-16T10:36:00.403731Z","end":"2024-09-16T10:36:00.508847Z","steps":["trace[1394685121] 'agreement among raft nodes before linearized reading'  (duration: 104.925356ms)"],"step_count":1}
	{"level":"warn","ts":"2024-09-16T10:36:00.510822Z","caller":"etcdserver/util.go:170","msg":"apply request took too long","took":"106.978552ms","expected-duration":"100ms","prefix":"read-only range ","request":"key:\"/registry/ranges/serviceips\" ","response":"range_response_count:1 size:116"}
	{"level":"info","ts":"2024-09-16T10:36:00.510873Z","caller":"traceutil/trace.go:171","msg":"trace[90254389] range","detail":"{range_begin:/registry/ranges/serviceips; range_end:; response_count:1; response_revision:394; }","duration":"107.038374ms","start":"2024-09-16T10:36:00.403822Z","end":"2024-09-16T10:36:00.510860Z","steps":["trace[90254389] 'agreement among raft nodes before linearized reading'  (duration: 106.927616ms)"],"step_count":1}
	{"level":"warn","ts":"2024-09-16T10:36:00.513542Z","caller":"etcdserver/util.go:170","msg":"apply request took too long","took":"109.80734ms","expected-duration":"100ms","prefix":"read-only range ","request":"key:\"/registry/daemonsets/kube-system/kindnet\" ","response":"range_response_count:1 size:4681"}
	{"level":"info","ts":"2024-09-16T10:36:00.513613Z","caller":"traceutil/trace.go:171","msg":"trace[1039894144] range","detail":"{range_begin:/registry/daemonsets/kube-system/kindnet; range_end:; response_count:1; response_revision:395; }","duration":"109.886707ms","start":"2024-09-16T10:36:00.403712Z","end":"2024-09-16T10:36:00.513599Z","steps":["trace[1039894144] 'agreement among raft nodes before linearized reading'  (duration: 109.778304ms)"],"step_count":1}
	
	
	==> gcp-auth [66246ecfc47d65d522c45cff2baf15e2433dc0e0681c400a1437f7890b27b5b4] <==
	2024/09/16 10:37:52 GCP Auth Webhook started!
	
	
	==> kernel <==
	 10:39:06 up 10:21,  0 users,  load average: 0.68, 1.65, 2.16
	Linux addons-936355 5.15.0-1069-aws #75~20.04.1-Ubuntu SMP Mon Aug 19 16:22:47 UTC 2024 aarch64 aarch64 aarch64 GNU/Linux
	PRETTY_NAME="Ubuntu 22.04.4 LTS"
	
	
	==> kindnet [8d59e894feca0e01e03cc7257c67ed10cf0f9db194b88b314e4961bc62d9e7f1] <==
	I0916 10:37:02.017517       1 main.go:299] handling current node
	I0916 10:37:12.017862       1 main.go:295] Handling node with IPs: map[192.168.49.2:{}]
	I0916 10:37:12.018018       1 main.go:299] handling current node
	I0916 10:37:22.020833       1 main.go:295] Handling node with IPs: map[192.168.49.2:{}]
	I0916 10:37:22.020865       1 main.go:299] handling current node
	I0916 10:37:32.017358       1 main.go:295] Handling node with IPs: map[192.168.49.2:{}]
	I0916 10:37:32.017445       1 main.go:299] handling current node
	I0916 10:37:42.018563       1 main.go:295] Handling node with IPs: map[192.168.49.2:{}]
	I0916 10:37:42.018620       1 main.go:299] handling current node
	I0916 10:37:52.020803       1 main.go:295] Handling node with IPs: map[192.168.49.2:{}]
	I0916 10:37:52.020837       1 main.go:299] handling current node
	I0916 10:38:02.017541       1 main.go:295] Handling node with IPs: map[192.168.49.2:{}]
	I0916 10:38:02.017588       1 main.go:299] handling current node
	I0916 10:38:12.017242       1 main.go:295] Handling node with IPs: map[192.168.49.2:{}]
	I0916 10:38:12.017281       1 main.go:299] handling current node
	I0916 10:38:22.020756       1 main.go:295] Handling node with IPs: map[192.168.49.2:{}]
	I0916 10:38:22.020801       1 main.go:299] handling current node
	I0916 10:38:32.018099       1 main.go:295] Handling node with IPs: map[192.168.49.2:{}]
	I0916 10:38:32.018160       1 main.go:299] handling current node
	I0916 10:38:42.024451       1 main.go:295] Handling node with IPs: map[192.168.49.2:{}]
	I0916 10:38:42.024497       1 main.go:299] handling current node
	I0916 10:38:52.025239       1 main.go:295] Handling node with IPs: map[192.168.49.2:{}]
	I0916 10:38:52.025276       1 main.go:299] handling current node
	I0916 10:39:02.017471       1 main.go:295] Handling node with IPs: map[192.168.49.2:{}]
	I0916 10:39:02.017635       1 main.go:299] handling current node
	
	
	==> kube-apiserver [f911db1ed55bbf8b3dc28ca0fef7e51209be97baaa15d9194b879451dd6fd403] <==
	I0916 10:37:03.362794       1 controller.go:109] OpenAPI AggregationController: action for item v1beta1.metrics.k8s.io: Rate Limited Requeue.
	I0916 10:37:03.362849       1 controller.go:126] OpenAPI AggregationController: action for item v1beta1.metrics.k8s.io: Rate Limited Requeue.
	E0916 10:38:17.001292       1 remote_available_controller.go:448] "Unhandled Error" err="v1beta1.metrics.k8s.io failed with: failing or missing response from https://10.100.138.161:443/apis/metrics.k8s.io/v1beta1: Get \"https://10.100.138.161:443/apis/metrics.k8s.io/v1beta1\": dial tcp 10.100.138.161:443: connect: connection refused" logger="UnhandledError"
	W0916 10:38:17.001510       1 handler_proxy.go:99] no RequestInfo found in the context
	E0916 10:38:17.001593       1 controller.go:146] "Unhandled Error" err=<
		Error updating APIService "v1beta1.metrics.k8s.io" with err: failed to download v1beta1.metrics.k8s.io: failed to retrieve openAPI spec, http error: ResponseCode: 503, Body: service unavailable
		, Header: map[Content-Type:[text/plain; charset=utf-8] X-Content-Type-Options:[nosniff]]
	 > logger="UnhandledError"
	W0916 10:38:18.005200       1 handler_proxy.go:99] no RequestInfo found in the context
	E0916 10:38:18.005260       1 controller.go:113] "Unhandled Error" err="loading OpenAPI spec for \"v1beta1.metrics.k8s.io\" failed with: Error, could not get list of group versions for APIService" logger="UnhandledError"
	W0916 10:38:18.005310       1 handler_proxy.go:99] no RequestInfo found in the context
	E0916 10:38:18.005375       1 controller.go:102] "Unhandled Error" err=<
		loading OpenAPI spec for "v1beta1.metrics.k8s.io" failed with: failed to download v1beta1.metrics.k8s.io: failed to retrieve openAPI spec, http error: ResponseCode: 503, Body: service unavailable
		, Header: map[Content-Type:[text/plain; charset=utf-8] X-Content-Type-Options:[nosniff]]
	 > logger="UnhandledError"
	I0916 10:38:18.006688       1 controller.go:126] OpenAPI AggregationController: action for item v1beta1.metrics.k8s.io: Rate Limited Requeue.
	I0916 10:38:18.006752       1 controller.go:109] OpenAPI AggregationController: action for item v1beta1.metrics.k8s.io: Rate Limited Requeue.
	W0916 10:38:22.012823       1 handler_proxy.go:99] no RequestInfo found in the context
	E0916 10:38:22.012831       1 remote_available_controller.go:448] "Unhandled Error" err="v1beta1.metrics.k8s.io failed with: failing or missing response from https://10.100.138.161:443/apis/metrics.k8s.io/v1beta1: Get \"https://10.100.138.161:443/apis/metrics.k8s.io/v1beta1\": dial tcp 10.100.138.161:443: i/o timeout" logger="UnhandledError"
	E0916 10:38:22.012998       1 controller.go:146] "Unhandled Error" err=<
		Error updating APIService "v1beta1.metrics.k8s.io" with err: failed to download v1beta1.metrics.k8s.io: failed to retrieve openAPI spec, http error: ResponseCode: 503, Body: service unavailable
		, Header: map[Content-Type:[text/plain; charset=utf-8] X-Content-Type-Options:[nosniff]]
	 > logger="UnhandledError"
	I0916 10:38:22.050506       1 handler.go:286] Adding GroupVersion metrics.k8s.io v1beta1 to ResourceManager
	E0916 10:38:22.062342       1 remote_available_controller.go:448] "Unhandled Error" err="v1beta1.metrics.k8s.io failed with: Operation cannot be fulfilled on apiservices.apiregistration.k8s.io \"v1beta1.metrics.k8s.io\": the object has been modified; please apply your changes to the latest version and try again" logger="UnhandledError"
	
	
	==> kube-controller-manager [4ee66eef50ab615bdd0d94fe194567492cafe76910819703a964b78b45f55436] <==
	I0916 10:37:35.666008       1 replica_set.go:679] "Finished syncing" logger="replicaset-controller" kind="ReplicaSet" key="kube-system/snapshot-controller-56fcc65765" duration="6.577785ms"
	I0916 10:37:35.666189       1 replica_set.go:679] "Finished syncing" logger="replicaset-controller" kind="ReplicaSet" key="kube-system/snapshot-controller-56fcc65765" duration="55.9µs"
	I0916 10:37:40.457925       1 replica_set.go:679] "Finished syncing" logger="replicaset-controller" kind="ReplicaSet" key="ingress-nginx/ingress-nginx-controller-bc57996ff" duration="58.632µs"
	I0916 10:37:42.479801       1 job_controller.go:568] "enqueueing job" logger="job-controller" key="gcp-auth/gcp-auth-certs-patch" delay="1s"
	I0916 10:37:43.509000       1 job_controller.go:568] "enqueueing job" logger="job-controller" key="gcp-auth/gcp-auth-certs-patch" delay="1s"
	I0916 10:37:43.572015       1 job_controller.go:568] "enqueueing job" logger="job-controller" key="gcp-auth/gcp-auth-certs-patch" delay="1s"
	I0916 10:37:44.515757       1 job_controller.go:568] "enqueueing job" logger="job-controller" key="gcp-auth/gcp-auth-certs-patch" delay="1s"
	I0916 10:37:44.524004       1 job_controller.go:568] "enqueueing job" logger="job-controller" key="gcp-auth/gcp-auth-certs-patch" delay="1s"
	I0916 10:37:44.530235       1 job_controller.go:568] "enqueueing job" logger="job-controller" key="gcp-auth/gcp-auth-certs-patch" delay="1s"
	I0916 10:37:45.150271       1 range_allocator.go:241] "Successfully synced" logger="node-ipam-controller" key="addons-936355"
	I0916 10:37:52.558234       1 replica_set.go:679] "Finished syncing" logger="replicaset-controller" kind="ReplicaSet" key="gcp-auth/gcp-auth-89d5ffd79" duration="22.364743ms"
	I0916 10:37:52.559132       1 replica_set.go:679] "Finished syncing" logger="replicaset-controller" kind="ReplicaSet" key="gcp-auth/gcp-auth-89d5ffd79" duration="31.351µs"
	I0916 10:37:54.619407       1 range_allocator.go:241] "Successfully synced" logger="node-ipam-controller" key="addons-936355"
	I0916 10:37:54.659736       1 replica_set.go:679] "Finished syncing" logger="replicaset-controller" kind="ReplicaSet" key="ingress-nginx/ingress-nginx-controller-bc57996ff" duration="16.445353ms"
	I0916 10:37:54.660858       1 replica_set.go:679] "Finished syncing" logger="replicaset-controller" kind="ReplicaSet" key="ingress-nginx/ingress-nginx-controller-bc57996ff" duration="68.076µs"
	E0916 10:37:56.423390       1 resource_quota_controller.go:446] "Unhandled Error" err="unable to retrieve the complete list of server APIs: metrics.k8s.io/v1beta1: stale GroupVersion discovery: metrics.k8s.io/v1beta1" logger="UnhandledError"
	I0916 10:37:56.899634       1 garbagecollector.go:826] "failed to discover some groups" logger="garbage-collector-controller" groups="<internal error: json: unsupported type: map[schema.GroupVersion]error>"
	I0916 10:37:58.024462       1 job_controller.go:568] "enqueueing job" logger="job-controller" key="gcp-auth/gcp-auth-certs-create" delay="0s"
	I0916 10:37:58.060468       1 job_controller.go:568] "enqueueing job" logger="job-controller" key="gcp-auth/gcp-auth-certs-create" delay="0s"
	I0916 10:38:14.019749       1 job_controller.go:568] "enqueueing job" logger="job-controller" key="gcp-auth/gcp-auth-certs-patch" delay="0s"
	I0916 10:38:14.050322       1 job_controller.go:568] "enqueueing job" logger="job-controller" key="gcp-auth/gcp-auth-certs-patch" delay="0s"
	I0916 10:38:16.992451       1 replica_set.go:679] "Finished syncing" logger="replicaset-controller" kind="ReplicaSet" key="kube-system/metrics-server-84c5f94fbc" duration="19.424064ms"
	I0916 10:38:16.993500       1 replica_set.go:679] "Finished syncing" logger="replicaset-controller" kind="ReplicaSet" key="kube-system/metrics-server-84c5f94fbc" duration="50.764µs"
	I0916 10:38:25.225399       1 range_allocator.go:241] "Successfully synced" logger="node-ipam-controller" key="addons-936355"
	I0916 10:39:05.149618       1 replica_set.go:679] "Finished syncing" logger="replicaset-controller" kind="ReplicaSet" key="kube-system/registry-66c9cd494c" duration="10.223µs"
	
	
	==> kube-proxy [6200eb5cfcd24bb0f0253359201c6d75c0624dcb7a313b0bc95b7370a13539a0] <==
	I0916 10:36:01.688812       1 server_linux.go:66] "Using iptables proxy"
	I0916 10:36:02.265241       1 server.go:677] "Successfully retrieved node IP(s)" IPs=["192.168.49.2"]
	E0916 10:36:02.271591       1 server.go:234] "Kube-proxy configuration may be incomplete or incorrect" err="nodePortAddresses is unset; NodePort connections will be accepted on all local IPs. Consider using `--nodeport-addresses primary`"
	I0916 10:36:02.423456       1 server.go:243] "kube-proxy running in dual-stack mode" primary ipFamily="IPv4"
	I0916 10:36:02.423579       1 server_linux.go:169] "Using iptables Proxier"
	I0916 10:36:02.431736       1 proxier.go:255] "Setting route_localnet=1 to allow node-ports on localhost; to change this either disable iptables.localhostNodePorts (--iptables-localhost-nodeports) or set nodePortAddresses (--nodeport-addresses) to filter loopback addresses" ipFamily="IPv4"
	I0916 10:36:02.432160       1 server.go:483] "Version info" version="v1.31.1"
	I0916 10:36:02.432351       1 server.go:485] "Golang settings" GOGC="" GOMAXPROCS="" GOTRACEBACK=""
	I0916 10:36:02.433544       1 config.go:199] "Starting service config controller"
	I0916 10:36:02.433620       1 shared_informer.go:313] Waiting for caches to sync for service config
	I0916 10:36:02.433682       1 config.go:105] "Starting endpoint slice config controller"
	I0916 10:36:02.433713       1 shared_informer.go:313] Waiting for caches to sync for endpoint slice config
	I0916 10:36:02.434194       1 config.go:328] "Starting node config controller"
	I0916 10:36:02.434243       1 shared_informer.go:313] Waiting for caches to sync for node config
	I0916 10:36:02.545223       1 shared_informer.go:320] Caches are synced for node config
	I0916 10:36:02.585616       1 shared_informer.go:320] Caches are synced for service config
	I0916 10:36:02.585634       1 shared_informer.go:320] Caches are synced for endpoint slice config
	
	
	==> kube-scheduler [2b161087caf5a6ab9dedbb699f7c69ddf6c2c5cdb19026d46daf824d90966d25] <==
	W0916 10:35:50.291598       1 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.PodDisruptionBudget: poddisruptionbudgets.policy is forbidden: User "system:kube-scheduler" cannot list resource "poddisruptionbudgets" in API group "policy" at the cluster scope
	E0916 10:35:50.291652       1 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.PodDisruptionBudget: failed to list *v1.PodDisruptionBudget: poddisruptionbudgets.policy is forbidden: User \"system:kube-scheduler\" cannot list resource \"poddisruptionbudgets\" in API group \"policy\" at the cluster scope" logger="UnhandledError"
	W0916 10:35:50.291738       1 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.StorageClass: storageclasses.storage.k8s.io is forbidden: User "system:kube-scheduler" cannot list resource "storageclasses" in API group "storage.k8s.io" at the cluster scope
	E0916 10:35:50.291810       1 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.StorageClass: failed to list *v1.StorageClass: storageclasses.storage.k8s.io is forbidden: User \"system:kube-scheduler\" cannot list resource \"storageclasses\" in API group \"storage.k8s.io\" at the cluster scope" logger="UnhandledError"
	W0916 10:35:50.291911       1 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.CSINode: csinodes.storage.k8s.io is forbidden: User "system:kube-scheduler" cannot list resource "csinodes" in API group "storage.k8s.io" at the cluster scope
	E0916 10:35:50.291966       1 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.CSINode: failed to list *v1.CSINode: csinodes.storage.k8s.io is forbidden: User \"system:kube-scheduler\" cannot list resource \"csinodes\" in API group \"storage.k8s.io\" at the cluster scope" logger="UnhandledError"
	W0916 10:35:50.292090       1 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Service: services is forbidden: User "system:kube-scheduler" cannot list resource "services" in API group "" at the cluster scope
	E0916 10:35:50.292141       1 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Service: failed to list *v1.Service: services is forbidden: User \"system:kube-scheduler\" cannot list resource \"services\" in API group \"\" at the cluster scope" logger="UnhandledError"
	W0916 10:35:50.292276       1 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.CSIStorageCapacity: csistoragecapacities.storage.k8s.io is forbidden: User "system:kube-scheduler" cannot list resource "csistoragecapacities" in API group "storage.k8s.io" at the cluster scope
	E0916 10:35:50.292635       1 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.CSIStorageCapacity: failed to list *v1.CSIStorageCapacity: csistoragecapacities.storage.k8s.io is forbidden: User \"system:kube-scheduler\" cannot list resource \"csistoragecapacities\" in API group \"storage.k8s.io\" at the cluster scope" logger="UnhandledError"
	W0916 10:35:50.292342       1 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Pod: pods is forbidden: User "system:kube-scheduler" cannot list resource "pods" in API group "" at the cluster scope
	E0916 10:35:50.292669       1 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Pod: failed to list *v1.Pod: pods is forbidden: User \"system:kube-scheduler\" cannot list resource \"pods\" in API group \"\" at the cluster scope" logger="UnhandledError"
	W0916 10:35:50.292396       1 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.PersistentVolume: persistentvolumes is forbidden: User "system:kube-scheduler" cannot list resource "persistentvolumes" in API group "" at the cluster scope
	E0916 10:35:50.292714       1 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.PersistentVolume: failed to list *v1.PersistentVolume: persistentvolumes is forbidden: User \"system:kube-scheduler\" cannot list resource \"persistentvolumes\" in API group \"\" at the cluster scope" logger="UnhandledError"
	W0916 10:35:50.292436       1 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Namespace: namespaces is forbidden: User "system:kube-scheduler" cannot list resource "namespaces" in API group "" at the cluster scope
	E0916 10:35:50.292743       1 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Namespace: failed to list *v1.Namespace: namespaces is forbidden: User \"system:kube-scheduler\" cannot list resource \"namespaces\" in API group \"\" at the cluster scope" logger="UnhandledError"
	W0916 10:35:50.292494       1 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.ReplicationController: replicationcontrollers is forbidden: User "system:kube-scheduler" cannot list resource "replicationcontrollers" in API group "" at the cluster scope
	E0916 10:35:50.292771       1 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.ReplicationController: failed to list *v1.ReplicationController: replicationcontrollers is forbidden: User \"system:kube-scheduler\" cannot list resource \"replicationcontrollers\" in API group \"\" at the cluster scope" logger="UnhandledError"
	W0916 10:35:50.292533       1 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Node: nodes is forbidden: User "system:kube-scheduler" cannot list resource "nodes" in API group "" at the cluster scope
	E0916 10:35:50.292790       1 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Node: failed to list *v1.Node: nodes is forbidden: User \"system:kube-scheduler\" cannot list resource \"nodes\" in API group \"\" at the cluster scope" logger="UnhandledError"
	W0916 10:35:50.292588       1 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.CSIDriver: csidrivers.storage.k8s.io is forbidden: User "system:kube-scheduler" cannot list resource "csidrivers" in API group "storage.k8s.io" at the cluster scope
	E0916 10:35:50.292814       1 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.CSIDriver: failed to list *v1.CSIDriver: csidrivers.storage.k8s.io is forbidden: User \"system:kube-scheduler\" cannot list resource \"csidrivers\" in API group \"storage.k8s.io\" at the cluster scope" logger="UnhandledError"
	W0916 10:35:50.292916       1 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.ReplicaSet: replicasets.apps is forbidden: User "system:kube-scheduler" cannot list resource "replicasets" in API group "apps" at the cluster scope
	E0916 10:35:50.292984       1 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.ReplicaSet: failed to list *v1.ReplicaSet: replicasets.apps is forbidden: User \"system:kube-scheduler\" cannot list resource \"replicasets\" in API group \"apps\" at the cluster scope" logger="UnhandledError"
	I0916 10:35:51.479680       1 shared_informer.go:320] Caches are synced for client-ca::kube-system::extension-apiserver-authentication::client-ca-file
	
	
	==> kubelet <==
	Sep 16 10:38:45 addons-936355 kubelet[1507]: E0916 10:38:45.951975    1507 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"gadget\" with CrashLoopBackOff: \"back-off 1m20s restarting failed container=gadget pod=gadget-hx2qq_gadget(fb6217d4-dbed-40c2-b47e-4342cb3f94b1)\"" pod="gadget/gadget-hx2qq" podUID="fb6217d4-dbed-40c2-b47e-4342cb3f94b1"
	Sep 16 10:38:51 addons-936355 kubelet[1507]: I0916 10:38:51.964062    1507 scope.go:117] "RemoveContainer" containerID="16f3ba74fd477e6809254f0696429acd69c59d300feb20a69365dff521532653"
	Sep 16 10:38:51 addons-936355 kubelet[1507]: I0916 10:38:51.988957    1507 scope.go:117] "RemoveContainer" containerID="f63cb6636d57daf2da092dde08c60351559707b647e16732f387642a34fb2976"
	Sep 16 10:38:52 addons-936355 kubelet[1507]: E0916 10:38:52.114006    1507 eviction_manager.go:257] "Eviction manager: failed to get HasDedicatedImageFs" err="missing image stats: &ImageFsInfoResponse{ImageFilesystems:[]*FilesystemUsage{&FilesystemUsage{Timestamp:1726483132113749857,FsId:&FilesystemIdentifier{Mountpoint:/var/lib/containers/storage/overlay-images,},UsedBytes:&UInt64Value{Value:460628,},InodesUsed:&UInt64Value{Value:183,},},},ContainerFilesystems:[]*FilesystemUsage{},}"
	Sep 16 10:38:52 addons-936355 kubelet[1507]: E0916 10:38:52.114042    1507 eviction_manager.go:212] "Eviction manager: failed to synchronize" err="eviction manager: failed to get HasDedicatedImageFs: missing image stats: &ImageFsInfoResponse{ImageFilesystems:[]*FilesystemUsage{&FilesystemUsage{Timestamp:1726483132113749857,FsId:&FilesystemIdentifier{Mountpoint:/var/lib/containers/storage/overlay-images,},UsedBytes:&UInt64Value{Value:460628,},InodesUsed:&UInt64Value{Value:183,},},},ContainerFilesystems:[]*FilesystemUsage{},}"
	Sep 16 10:38:56 addons-936355 kubelet[1507]: I0916 10:38:56.951462    1507 scope.go:117] "RemoveContainer" containerID="b3fda3bbc6527bab3713f7ee920de180eb00d2705006c750054ca0da818f0982"
	Sep 16 10:38:56 addons-936355 kubelet[1507]: E0916 10:38:56.951634    1507 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"gadget\" with CrashLoopBackOff: \"back-off 1m20s restarting failed container=gadget pod=gadget-hx2qq_gadget(fb6217d4-dbed-40c2-b47e-4342cb3f94b1)\"" pod="gadget/gadget-hx2qq" podUID="fb6217d4-dbed-40c2-b47e-4342cb3f94b1"
	Sep 16 10:39:02 addons-936355 kubelet[1507]: E0916 10:39:02.116890    1507 eviction_manager.go:257] "Eviction manager: failed to get HasDedicatedImageFs" err="missing image stats: &ImageFsInfoResponse{ImageFilesystems:[]*FilesystemUsage{&FilesystemUsage{Timestamp:1726483142116637322,FsId:&FilesystemIdentifier{Mountpoint:/var/lib/containers/storage/overlay-images,},UsedBytes:&UInt64Value{Value:460628,},InodesUsed:&UInt64Value{Value:183,},},},ContainerFilesystems:[]*FilesystemUsage{},}"
	Sep 16 10:39:02 addons-936355 kubelet[1507]: E0916 10:39:02.116932    1507 eviction_manager.go:212] "Eviction manager: failed to synchronize" err="eviction manager: failed to get HasDedicatedImageFs: missing image stats: &ImageFsInfoResponse{ImageFilesystems:[]*FilesystemUsage{&FilesystemUsage{Timestamp:1726483142116637322,FsId:&FilesystemIdentifier{Mountpoint:/var/lib/containers/storage/overlay-images,},UsedBytes:&UInt64Value{Value:460628,},InodesUsed:&UInt64Value{Value:183,},},},ContainerFilesystems:[]*FilesystemUsage{},}"
	Sep 16 10:39:05 addons-936355 kubelet[1507]: I0916 10:39:05.488286    1507 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jdtjv\" (UniqueName: \"kubernetes.io/projected/6f439a0d-4e84-4ea2-97ef-2666b73327b7-kube-api-access-jdtjv\") pod \"6f439a0d-4e84-4ea2-97ef-2666b73327b7\" (UID: \"6f439a0d-4e84-4ea2-97ef-2666b73327b7\") "
	Sep 16 10:39:05 addons-936355 kubelet[1507]: I0916 10:39:05.488343    1507 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-hsfwx\" (UniqueName: \"kubernetes.io/projected/f3007abe-d474-44b8-91de-56f1d2dc83a9-kube-api-access-hsfwx\") pod \"f3007abe-d474-44b8-91de-56f1d2dc83a9\" (UID: \"f3007abe-d474-44b8-91de-56f1d2dc83a9\") "
	Sep 16 10:39:05 addons-936355 kubelet[1507]: I0916 10:39:05.491695    1507 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6f439a0d-4e84-4ea2-97ef-2666b73327b7-kube-api-access-jdtjv" (OuterVolumeSpecName: "kube-api-access-jdtjv") pod "6f439a0d-4e84-4ea2-97ef-2666b73327b7" (UID: "6f439a0d-4e84-4ea2-97ef-2666b73327b7"). InnerVolumeSpecName "kube-api-access-jdtjv". PluginName "kubernetes.io/projected", VolumeGidValue ""
	Sep 16 10:39:05 addons-936355 kubelet[1507]: I0916 10:39:05.492844    1507 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f3007abe-d474-44b8-91de-56f1d2dc83a9-kube-api-access-hsfwx" (OuterVolumeSpecName: "kube-api-access-hsfwx") pod "f3007abe-d474-44b8-91de-56f1d2dc83a9" (UID: "f3007abe-d474-44b8-91de-56f1d2dc83a9"). InnerVolumeSpecName "kube-api-access-hsfwx". PluginName "kubernetes.io/projected", VolumeGidValue ""
	Sep 16 10:39:05 addons-936355 kubelet[1507]: I0916 10:39:05.589696    1507 reconciler_common.go:288] "Volume detached for volume \"kube-api-access-jdtjv\" (UniqueName: \"kubernetes.io/projected/6f439a0d-4e84-4ea2-97ef-2666b73327b7-kube-api-access-jdtjv\") on node \"addons-936355\" DevicePath \"\""
	Sep 16 10:39:05 addons-936355 kubelet[1507]: I0916 10:39:05.589735    1507 reconciler_common.go:288] "Volume detached for volume \"kube-api-access-hsfwx\" (UniqueName: \"kubernetes.io/projected/f3007abe-d474-44b8-91de-56f1d2dc83a9-kube-api-access-hsfwx\") on node \"addons-936355\" DevicePath \"\""
	Sep 16 10:39:05 addons-936355 kubelet[1507]: I0916 10:39:05.734104    1507 scope.go:117] "RemoveContainer" containerID="98b21953b25791555c220446acb65672ddacb442b186aa5654acc8d51867b8a3"
	Sep 16 10:39:05 addons-936355 kubelet[1507]: I0916 10:39:05.767839    1507 scope.go:117] "RemoveContainer" containerID="98b21953b25791555c220446acb65672ddacb442b186aa5654acc8d51867b8a3"
	Sep 16 10:39:05 addons-936355 kubelet[1507]: E0916 10:39:05.768425    1507 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"98b21953b25791555c220446acb65672ddacb442b186aa5654acc8d51867b8a3\": container with ID starting with 98b21953b25791555c220446acb65672ddacb442b186aa5654acc8d51867b8a3 not found: ID does not exist" containerID="98b21953b25791555c220446acb65672ddacb442b186aa5654acc8d51867b8a3"
	Sep 16 10:39:05 addons-936355 kubelet[1507]: I0916 10:39:05.768458    1507 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"98b21953b25791555c220446acb65672ddacb442b186aa5654acc8d51867b8a3"} err="failed to get container status \"98b21953b25791555c220446acb65672ddacb442b186aa5654acc8d51867b8a3\": rpc error: code = NotFound desc = could not find container \"98b21953b25791555c220446acb65672ddacb442b186aa5654acc8d51867b8a3\": container with ID starting with 98b21953b25791555c220446acb65672ddacb442b186aa5654acc8d51867b8a3 not found: ID does not exist"
	Sep 16 10:39:05 addons-936355 kubelet[1507]: I0916 10:39:05.768519    1507 scope.go:117] "RemoveContainer" containerID="87f384d0874aec7b02ee16f7af41c18367ef47b3aa01cc01ed3c3ae635da2b07"
	Sep 16 10:39:05 addons-936355 kubelet[1507]: I0916 10:39:05.816338    1507 scope.go:117] "RemoveContainer" containerID="87f384d0874aec7b02ee16f7af41c18367ef47b3aa01cc01ed3c3ae635da2b07"
	Sep 16 10:39:05 addons-936355 kubelet[1507]: E0916 10:39:05.817087    1507 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"87f384d0874aec7b02ee16f7af41c18367ef47b3aa01cc01ed3c3ae635da2b07\": container with ID starting with 87f384d0874aec7b02ee16f7af41c18367ef47b3aa01cc01ed3c3ae635da2b07 not found: ID does not exist" containerID="87f384d0874aec7b02ee16f7af41c18367ef47b3aa01cc01ed3c3ae635da2b07"
	Sep 16 10:39:05 addons-936355 kubelet[1507]: I0916 10:39:05.817121    1507 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"87f384d0874aec7b02ee16f7af41c18367ef47b3aa01cc01ed3c3ae635da2b07"} err="failed to get container status \"87f384d0874aec7b02ee16f7af41c18367ef47b3aa01cc01ed3c3ae635da2b07\": rpc error: code = NotFound desc = could not find container \"87f384d0874aec7b02ee16f7af41c18367ef47b3aa01cc01ed3c3ae635da2b07\": container with ID starting with 87f384d0874aec7b02ee16f7af41c18367ef47b3aa01cc01ed3c3ae635da2b07 not found: ID does not exist"
	Sep 16 10:39:05 addons-936355 kubelet[1507]: I0916 10:39:05.952201    1507 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6f439a0d-4e84-4ea2-97ef-2666b73327b7" path="/var/lib/kubelet/pods/6f439a0d-4e84-4ea2-97ef-2666b73327b7/volumes"
	Sep 16 10:39:05 addons-936355 kubelet[1507]: I0916 10:39:05.952595    1507 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f3007abe-d474-44b8-91de-56f1d2dc83a9" path="/var/lib/kubelet/pods/f3007abe-d474-44b8-91de-56f1d2dc83a9/volumes"
	
	
	==> storage-provisioner [2a862ef326432a5d0293f9317e2a22cc3bbc0e787dab4595749d403d11fd2627] <==
	I0916 10:36:43.471506       1 storage_provisioner.go:116] Initializing the minikube storage provisioner...
	I0916 10:36:43.494873       1 storage_provisioner.go:141] Storage provisioner initialized, now starting service!
	I0916 10:36:43.495065       1 leaderelection.go:243] attempting to acquire leader lease kube-system/k8s.io-minikube-hostpath...
	I0916 10:36:43.512818       1 leaderelection.go:253] successfully acquired lease kube-system/k8s.io-minikube-hostpath
	I0916 10:36:43.513129       1 controller.go:835] Starting provisioner controller k8s.io/minikube-hostpath_addons-936355_2129c620-91c1-42a8-a96f-7ac21cc45cc8!
	I0916 10:36:43.520230       1 event.go:282] Event(v1.ObjectReference{Kind:"Endpoints", Namespace:"kube-system", Name:"k8s.io-minikube-hostpath", UID:"89d79315-71d3-40c0-aeb5-687aa54390d8", APIVersion:"v1", ResourceVersion:"938", FieldPath:""}): type: 'Normal' reason: 'LeaderElection' addons-936355_2129c620-91c1-42a8-a96f-7ac21cc45cc8 became leader
	I0916 10:36:43.613923       1 controller.go:884] Started provisioner controller k8s.io/minikube-hostpath_addons-936355_2129c620-91c1-42a8-a96f-7ac21cc45cc8!
	

                                                
                                                
-- /stdout --
helpers_test.go:254: (dbg) Run:  out/minikube-linux-arm64 status --format={{.APIServer}} -p addons-936355 -n addons-936355
helpers_test.go:261: (dbg) Run:  kubectl --context addons-936355 get po -o=jsonpath={.items[*].metadata.name} -A --field-selector=status.phase!=Running
helpers_test.go:261: (dbg) Non-zero exit: kubectl --context addons-936355 get po -o=jsonpath={.items[*].metadata.name} -A --field-selector=status.phase!=Running: fork/exec /usr/local/bin/kubectl: exec format error (654.062µs)
helpers_test.go:263: kubectl --context addons-936355 get po -o=jsonpath={.items[*].metadata.name} -A --field-selector=status.phase!=Running: fork/exec /usr/local/bin/kubectl: exec format error
--- FAIL: TestAddons/parallel/Registry (14.54s)

                                                
                                    
x
+
TestAddons/parallel/Ingress (2.54s)

                                                
                                                
=== RUN   TestAddons/parallel/Ingress
=== PAUSE TestAddons/parallel/Ingress

                                                
                                                

                                                
                                                
=== CONT  TestAddons/parallel/Ingress
addons_test.go:209: (dbg) Run:  kubectl --context addons-936355 wait --for=condition=ready --namespace=ingress-nginx pod --selector=app.kubernetes.io/component=controller --timeout=90s
addons_test.go:209: (dbg) Non-zero exit: kubectl --context addons-936355 wait --for=condition=ready --namespace=ingress-nginx pod --selector=app.kubernetes.io/component=controller --timeout=90s: fork/exec /usr/local/bin/kubectl: exec format error (3.701098ms)
addons_test.go:210: failed waiting for ingress-nginx-controller : fork/exec /usr/local/bin/kubectl: exec format error
helpers_test.go:222: -----------------------post-mortem--------------------------------
helpers_test.go:230: ======>  post-mortem[TestAddons/parallel/Ingress]: docker inspect <======
helpers_test.go:231: (dbg) Run:  docker inspect addons-936355
helpers_test.go:235: (dbg) docker inspect addons-936355:

                                                
                                                
-- stdout --
	[
	    {
	        "Id": "990f1d352091220982d3e72266c05a58085b58b6631f82700a66decf59d84c22",
	        "Created": "2024-09-16T10:35:26.829229764Z",
	        "Path": "/usr/local/bin/entrypoint",
	        "Args": [
	            "/sbin/init"
	        ],
	        "State": {
	            "Status": "running",
	            "Running": true,
	            "Paused": false,
	            "Restarting": false,
	            "OOMKilled": false,
	            "Dead": false,
	            "Pid": 1385081,
	            "ExitCode": 0,
	            "Error": "",
	            "StartedAt": "2024-09-16T10:35:26.979651686Z",
	            "FinishedAt": "0001-01-01T00:00:00Z"
	        },
	        "Image": "sha256:a1b71fa87733590eb4674b16f6945626ae533f3af37066893e3fd70eb9476268",
	        "ResolvConfPath": "/var/lib/docker/containers/990f1d352091220982d3e72266c05a58085b58b6631f82700a66decf59d84c22/resolv.conf",
	        "HostnamePath": "/var/lib/docker/containers/990f1d352091220982d3e72266c05a58085b58b6631f82700a66decf59d84c22/hostname",
	        "HostsPath": "/var/lib/docker/containers/990f1d352091220982d3e72266c05a58085b58b6631f82700a66decf59d84c22/hosts",
	        "LogPath": "/var/lib/docker/containers/990f1d352091220982d3e72266c05a58085b58b6631f82700a66decf59d84c22/990f1d352091220982d3e72266c05a58085b58b6631f82700a66decf59d84c22-json.log",
	        "Name": "/addons-936355",
	        "RestartCount": 0,
	        "Driver": "overlay2",
	        "Platform": "linux",
	        "MountLabel": "",
	        "ProcessLabel": "",
	        "AppArmorProfile": "unconfined",
	        "ExecIDs": null,
	        "HostConfig": {
	            "Binds": [
	                "/lib/modules:/lib/modules:ro",
	                "addons-936355:/var"
	            ],
	            "ContainerIDFile": "",
	            "LogConfig": {
	                "Type": "json-file",
	                "Config": {}
	            },
	            "NetworkMode": "addons-936355",
	            "PortBindings": {
	                "22/tcp": [
	                    {
	                        "HostIp": "127.0.0.1",
	                        "HostPort": ""
	                    }
	                ],
	                "2376/tcp": [
	                    {
	                        "HostIp": "127.0.0.1",
	                        "HostPort": ""
	                    }
	                ],
	                "32443/tcp": [
	                    {
	                        "HostIp": "127.0.0.1",
	                        "HostPort": ""
	                    }
	                ],
	                "5000/tcp": [
	                    {
	                        "HostIp": "127.0.0.1",
	                        "HostPort": ""
	                    }
	                ],
	                "8443/tcp": [
	                    {
	                        "HostIp": "127.0.0.1",
	                        "HostPort": ""
	                    }
	                ]
	            },
	            "RestartPolicy": {
	                "Name": "no",
	                "MaximumRetryCount": 0
	            },
	            "AutoRemove": false,
	            "VolumeDriver": "",
	            "VolumesFrom": null,
	            "ConsoleSize": [
	                0,
	                0
	            ],
	            "CapAdd": null,
	            "CapDrop": null,
	            "CgroupnsMode": "host",
	            "Dns": [],
	            "DnsOptions": [],
	            "DnsSearch": [],
	            "ExtraHosts": null,
	            "GroupAdd": null,
	            "IpcMode": "private",
	            "Cgroup": "",
	            "Links": null,
	            "OomScoreAdj": 0,
	            "PidMode": "",
	            "Privileged": true,
	            "PublishAllPorts": false,
	            "ReadonlyRootfs": false,
	            "SecurityOpt": [
	                "seccomp=unconfined",
	                "apparmor=unconfined",
	                "label=disable"
	            ],
	            "Tmpfs": {
	                "/run": "",
	                "/tmp": ""
	            },
	            "UTSMode": "",
	            "UsernsMode": "",
	            "ShmSize": 67108864,
	            "Runtime": "runc",
	            "Isolation": "",
	            "CpuShares": 0,
	            "Memory": 4194304000,
	            "NanoCpus": 2000000000,
	            "CgroupParent": "",
	            "BlkioWeight": 0,
	            "BlkioWeightDevice": [],
	            "BlkioDeviceReadBps": [],
	            "BlkioDeviceWriteBps": [],
	            "BlkioDeviceReadIOps": [],
	            "BlkioDeviceWriteIOps": [],
	            "CpuPeriod": 0,
	            "CpuQuota": 0,
	            "CpuRealtimePeriod": 0,
	            "CpuRealtimeRuntime": 0,
	            "CpusetCpus": "",
	            "CpusetMems": "",
	            "Devices": [],
	            "DeviceCgroupRules": null,
	            "DeviceRequests": null,
	            "MemoryReservation": 0,
	            "MemorySwap": 8388608000,
	            "MemorySwappiness": null,
	            "OomKillDisable": false,
	            "PidsLimit": null,
	            "Ulimits": [],
	            "CpuCount": 0,
	            "CpuPercent": 0,
	            "IOMaximumIOps": 0,
	            "IOMaximumBandwidth": 0,
	            "MaskedPaths": null,
	            "ReadonlyPaths": null
	        },
	        "GraphDriver": {
	            "Data": {
	                "LowerDir": "/var/lib/docker/overlay2/c77d59ded00fa56b49dc4ec025d7a90bf6cdbcc44e193db0ee5c49a540e58e7c-init/diff:/var/lib/docker/overlay2/1502e35c27c097cfc834a7c6caeee5bb9f58b41375577f491b73f55bc131cbae/diff",
	                "MergedDir": "/var/lib/docker/overlay2/c77d59ded00fa56b49dc4ec025d7a90bf6cdbcc44e193db0ee5c49a540e58e7c/merged",
	                "UpperDir": "/var/lib/docker/overlay2/c77d59ded00fa56b49dc4ec025d7a90bf6cdbcc44e193db0ee5c49a540e58e7c/diff",
	                "WorkDir": "/var/lib/docker/overlay2/c77d59ded00fa56b49dc4ec025d7a90bf6cdbcc44e193db0ee5c49a540e58e7c/work"
	            },
	            "Name": "overlay2"
	        },
	        "Mounts": [
	            {
	                "Type": "bind",
	                "Source": "/lib/modules",
	                "Destination": "/lib/modules",
	                "Mode": "ro",
	                "RW": false,
	                "Propagation": "rprivate"
	            },
	            {
	                "Type": "volume",
	                "Name": "addons-936355",
	                "Source": "/var/lib/docker/volumes/addons-936355/_data",
	                "Destination": "/var",
	                "Driver": "local",
	                "Mode": "z",
	                "RW": true,
	                "Propagation": ""
	            }
	        ],
	        "Config": {
	            "Hostname": "addons-936355",
	            "Domainname": "",
	            "User": "",
	            "AttachStdin": false,
	            "AttachStdout": false,
	            "AttachStderr": false,
	            "ExposedPorts": {
	                "22/tcp": {},
	                "2376/tcp": {},
	                "32443/tcp": {},
	                "5000/tcp": {},
	                "8443/tcp": {}
	            },
	            "Tty": true,
	            "OpenStdin": false,
	            "StdinOnce": false,
	            "Env": [
	                "container=docker",
	                "PATH=/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin"
	            ],
	            "Cmd": null,
	            "Image": "gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0",
	            "Volumes": null,
	            "WorkingDir": "/",
	            "Entrypoint": [
	                "/usr/local/bin/entrypoint",
	                "/sbin/init"
	            ],
	            "OnBuild": null,
	            "Labels": {
	                "created_by.minikube.sigs.k8s.io": "true",
	                "mode.minikube.sigs.k8s.io": "addons-936355",
	                "name.minikube.sigs.k8s.io": "addons-936355",
	                "role.minikube.sigs.k8s.io": ""
	            },
	            "StopSignal": "SIGRTMIN+3"
	        },
	        "NetworkSettings": {
	            "Bridge": "",
	            "SandboxID": "c25ad70fe630d4f698b2829da4e56bff2645b3ff549ca5302800a382e6bdd028",
	            "SandboxKey": "/var/run/docker/netns/c25ad70fe630",
	            "Ports": {
	                "22/tcp": [
	                    {
	                        "HostIp": "127.0.0.1",
	                        "HostPort": "34603"
	                    }
	                ],
	                "2376/tcp": [
	                    {
	                        "HostIp": "127.0.0.1",
	                        "HostPort": "34604"
	                    }
	                ],
	                "32443/tcp": [
	                    {
	                        "HostIp": "127.0.0.1",
	                        "HostPort": "34607"
	                    }
	                ],
	                "5000/tcp": [
	                    {
	                        "HostIp": "127.0.0.1",
	                        "HostPort": "34605"
	                    }
	                ],
	                "8443/tcp": [
	                    {
	                        "HostIp": "127.0.0.1",
	                        "HostPort": "34606"
	                    }
	                ]
	            },
	            "HairpinMode": false,
	            "LinkLocalIPv6Address": "",
	            "LinkLocalIPv6PrefixLen": 0,
	            "SecondaryIPAddresses": null,
	            "SecondaryIPv6Addresses": null,
	            "EndpointID": "",
	            "Gateway": "",
	            "GlobalIPv6Address": "",
	            "GlobalIPv6PrefixLen": 0,
	            "IPAddress": "",
	            "IPPrefixLen": 0,
	            "IPv6Gateway": "",
	            "MacAddress": "",
	            "Networks": {
	                "addons-936355": {
	                    "IPAMConfig": {
	                        "IPv4Address": "192.168.49.2"
	                    },
	                    "Links": null,
	                    "Aliases": null,
	                    "MacAddress": "02:42:c0:a8:31:02",
	                    "DriverOpts": null,
	                    "NetworkID": "5d73edaa3366fd0ba0b4bacad454985b0bd272fda9938fc527483e0046d7c748",
	                    "EndpointID": "cf4cd538acb5e979612a79c60d294fba1f05c9fef1a1bec978977fcb945819c4",
	                    "Gateway": "192.168.49.1",
	                    "IPAddress": "192.168.49.2",
	                    "IPPrefixLen": 24,
	                    "IPv6Gateway": "",
	                    "GlobalIPv6Address": "",
	                    "GlobalIPv6PrefixLen": 0,
	                    "DNSNames": [
	                        "addons-936355",
	                        "990f1d352091"
	                    ]
	                }
	            }
	        }
	    }
	]

                                                
                                                
-- /stdout --
helpers_test.go:239: (dbg) Run:  out/minikube-linux-arm64 status --format={{.Host}} -p addons-936355 -n addons-936355
helpers_test.go:244: <<< TestAddons/parallel/Ingress FAILED: start of post-mortem logs <<<
helpers_test.go:245: ======>  post-mortem[TestAddons/parallel/Ingress]: minikube logs <======
helpers_test.go:247: (dbg) Run:  out/minikube-linux-arm64 -p addons-936355 logs -n 25
helpers_test.go:247: (dbg) Done: out/minikube-linux-arm64 -p addons-936355 logs -n 25: (1.606197383s)
helpers_test.go:252: TestAddons/parallel/Ingress logs: 
-- stdout --
	
	==> Audit <==
	|---------|--------------------------------------|------------------------|---------|---------|---------------------|---------------------|
	| Command |                 Args                 |        Profile         |  User   | Version |     Start Time      |      End Time       |
	|---------|--------------------------------------|------------------------|---------|---------|---------------------|---------------------|
	| start   | -o=json --download-only              | download-only-084128   | jenkins | v1.34.0 | 16 Sep 24 10:34 UTC |                     |
	|         | -p download-only-084128              |                        |         |         |                     |                     |
	|         | --force --alsologtostderr            |                        |         |         |                     |                     |
	|         | --kubernetes-version=v1.20.0         |                        |         |         |                     |                     |
	|         | --container-runtime=crio             |                        |         |         |                     |                     |
	|         | --driver=docker                      |                        |         |         |                     |                     |
	|         | --container-runtime=crio             |                        |         |         |                     |                     |
	| delete  | --all                                | minikube               | jenkins | v1.34.0 | 16 Sep 24 10:34 UTC | 16 Sep 24 10:34 UTC |
	| delete  | -p download-only-084128              | download-only-084128   | jenkins | v1.34.0 | 16 Sep 24 10:34 UTC | 16 Sep 24 10:34 UTC |
	| start   | -o=json --download-only              | download-only-605096   | jenkins | v1.34.0 | 16 Sep 24 10:34 UTC |                     |
	|         | -p download-only-605096              |                        |         |         |                     |                     |
	|         | --force --alsologtostderr            |                        |         |         |                     |                     |
	|         | --kubernetes-version=v1.31.1         |                        |         |         |                     |                     |
	|         | --container-runtime=crio             |                        |         |         |                     |                     |
	|         | --driver=docker                      |                        |         |         |                     |                     |
	|         | --container-runtime=crio             |                        |         |         |                     |                     |
	| delete  | --all                                | minikube               | jenkins | v1.34.0 | 16 Sep 24 10:34 UTC | 16 Sep 24 10:34 UTC |
	| delete  | -p download-only-605096              | download-only-605096   | jenkins | v1.34.0 | 16 Sep 24 10:34 UTC | 16 Sep 24 10:34 UTC |
	| delete  | -p download-only-084128              | download-only-084128   | jenkins | v1.34.0 | 16 Sep 24 10:35 UTC | 16 Sep 24 10:35 UTC |
	| delete  | -p download-only-605096              | download-only-605096   | jenkins | v1.34.0 | 16 Sep 24 10:35 UTC | 16 Sep 24 10:35 UTC |
	| start   | --download-only -p                   | download-docker-880503 | jenkins | v1.34.0 | 16 Sep 24 10:35 UTC |                     |
	|         | download-docker-880503               |                        |         |         |                     |                     |
	|         | --alsologtostderr                    |                        |         |         |                     |                     |
	|         | --driver=docker                      |                        |         |         |                     |                     |
	|         | --container-runtime=crio             |                        |         |         |                     |                     |
	| delete  | -p download-docker-880503            | download-docker-880503 | jenkins | v1.34.0 | 16 Sep 24 10:35 UTC | 16 Sep 24 10:35 UTC |
	| start   | --download-only -p                   | binary-mirror-652159   | jenkins | v1.34.0 | 16 Sep 24 10:35 UTC |                     |
	|         | binary-mirror-652159                 |                        |         |         |                     |                     |
	|         | --alsologtostderr                    |                        |         |         |                     |                     |
	|         | --binary-mirror                      |                        |         |         |                     |                     |
	|         | http://127.0.0.1:40363               |                        |         |         |                     |                     |
	|         | --driver=docker                      |                        |         |         |                     |                     |
	|         | --container-runtime=crio             |                        |         |         |                     |                     |
	| delete  | -p binary-mirror-652159              | binary-mirror-652159   | jenkins | v1.34.0 | 16 Sep 24 10:35 UTC | 16 Sep 24 10:35 UTC |
	| addons  | enable dashboard -p                  | addons-936355          | jenkins | v1.34.0 | 16 Sep 24 10:35 UTC |                     |
	|         | addons-936355                        |                        |         |         |                     |                     |
	| addons  | disable dashboard -p                 | addons-936355          | jenkins | v1.34.0 | 16 Sep 24 10:35 UTC |                     |
	|         | addons-936355                        |                        |         |         |                     |                     |
	| start   | -p addons-936355 --wait=true         | addons-936355          | jenkins | v1.34.0 | 16 Sep 24 10:35 UTC | 16 Sep 24 10:38 UTC |
	|         | --memory=4000 --alsologtostderr      |                        |         |         |                     |                     |
	|         | --addons=registry                    |                        |         |         |                     |                     |
	|         | --addons=metrics-server              |                        |         |         |                     |                     |
	|         | --addons=volumesnapshots             |                        |         |         |                     |                     |
	|         | --addons=csi-hostpath-driver         |                        |         |         |                     |                     |
	|         | --addons=gcp-auth                    |                        |         |         |                     |                     |
	|         | --addons=cloud-spanner               |                        |         |         |                     |                     |
	|         | --addons=inspektor-gadget            |                        |         |         |                     |                     |
	|         | --addons=storage-provisioner-rancher |                        |         |         |                     |                     |
	|         | --addons=nvidia-device-plugin        |                        |         |         |                     |                     |
	|         | --addons=yakd --addons=volcano       |                        |         |         |                     |                     |
	|         | --driver=docker                      |                        |         |         |                     |                     |
	|         | --container-runtime=crio             |                        |         |         |                     |                     |
	|         | --addons=ingress                     |                        |         |         |                     |                     |
	|         | --addons=ingress-dns                 |                        |         |         |                     |                     |
	| ip      | addons-936355 ip                     | addons-936355          | jenkins | v1.34.0 | 16 Sep 24 10:39 UTC | 16 Sep 24 10:39 UTC |
	| addons  | addons-936355 addons disable         | addons-936355          | jenkins | v1.34.0 | 16 Sep 24 10:39 UTC | 16 Sep 24 10:39 UTC |
	|         | registry --alsologtostderr           |                        |         |         |                     |                     |
	|         | -v=1                                 |                        |         |         |                     |                     |
	| addons  | addons-936355 addons                 | addons-936355          | jenkins | v1.34.0 | 16 Sep 24 10:44 UTC | 16 Sep 24 10:44 UTC |
	|         | disable metrics-server               |                        |         |         |                     |                     |
	|         | --alsologtostderr -v=1               |                        |         |         |                     |                     |
	| addons  | disable inspektor-gadget -p          | addons-936355          | jenkins | v1.34.0 | 16 Sep 24 10:45 UTC |                     |
	|         | addons-936355                        |                        |         |         |                     |                     |
	|---------|--------------------------------------|------------------------|---------|---------|---------------------|---------------------|
	
	
	==> Last Start <==
	Log file created at: 2024/09/16 10:35:01
	Running on machine: ip-172-31-21-244
	Binary: Built with gc go1.23.0 for linux/arm64
	Log line format: [IWEF]mmdd hh:mm:ss.uuuuuu threadid file:line] msg
	I0916 10:35:01.861741 1384589 out.go:345] Setting OutFile to fd 1 ...
	I0916 10:35:01.861923 1384589 out.go:392] TERM=,COLORTERM=, which probably does not support color
	I0916 10:35:01.861959 1384589 out.go:358] Setting ErrFile to fd 2...
	I0916 10:35:01.861972 1384589 out.go:392] TERM=,COLORTERM=, which probably does not support color
	I0916 10:35:01.862230 1384589 root.go:338] Updating PATH: /home/jenkins/minikube-integration/19651-1378450/.minikube/bin
	I0916 10:35:01.862730 1384589 out.go:352] Setting JSON to false
	I0916 10:35:01.863665 1384589 start.go:129] hostinfo: {"hostname":"ip-172-31-21-244","uptime":37047,"bootTime":1726445855,"procs":155,"os":"linux","platform":"ubuntu","platformFamily":"debian","platformVersion":"20.04","kernelVersion":"5.15.0-1069-aws","kernelArch":"aarch64","virtualizationSystem":"","virtualizationRole":"","hostId":"da8ac1fd-6236-412a-a346-95873c98230d"}
	I0916 10:35:01.863739 1384589 start.go:139] virtualization:  
	I0916 10:35:01.866923 1384589 out.go:177] * [addons-936355] minikube v1.34.0 on Ubuntu 20.04 (arm64)
	I0916 10:35:01.870432 1384589 out.go:177]   - MINIKUBE_LOCATION=19651
	I0916 10:35:01.870537 1384589 notify.go:220] Checking for updates...
	I0916 10:35:01.875880 1384589 out.go:177]   - MINIKUBE_SUPPRESS_DOCKER_PERFORMANCE=true
	I0916 10:35:01.878650 1384589 out.go:177]   - KUBECONFIG=/home/jenkins/minikube-integration/19651-1378450/kubeconfig
	I0916 10:35:01.881242 1384589 out.go:177]   - MINIKUBE_HOME=/home/jenkins/minikube-integration/19651-1378450/.minikube
	I0916 10:35:01.883862 1384589 out.go:177]   - MINIKUBE_BIN=out/minikube-linux-arm64
	I0916 10:35:01.886520 1384589 out.go:177]   - MINIKUBE_FORCE_SYSTEMD=
	I0916 10:35:01.889353 1384589 driver.go:394] Setting default libvirt URI to qemu:///system
	I0916 10:35:01.930300 1384589 docker.go:123] docker version: linux-27.2.1:Docker Engine - Community
	I0916 10:35:01.930438 1384589 cli_runner.go:164] Run: docker system info --format "{{json .}}"
	I0916 10:35:01.986400 1384589 info.go:266] docker info: {ID:5FDH:SA5P:5GCT:NLAS:B73P:SGDQ:PBG5:UBVH:UZY3:RXGO:CI7S:WAIH Containers:0 ContainersRunning:0 ContainersPaused:0 ContainersStopped:0 Images:1 Driver:overlay2 DriverStatus:[[Backing Filesystem extfs] [Supports d_type true] [Using metacopy false] [Native Overlay Diff true] [userxattr false]] SystemStatus:<nil> Plugins:{Volume:[local] Network:[bridge host ipvlan macvlan null overlay] Authorization:<nil> Log:[awslogs fluentd gcplogs gelf journald json-file local splunk syslog]} MemoryLimit:true SwapLimit:true KernelMemory:false KernelMemoryTCP:true CPUCfsPeriod:true CPUCfsQuota:true CPUShares:true CPUSet:true PidsLimit:true IPv4Forwarding:true BridgeNfIptables:true BridgeNfIP6Tables:true Debug:false NFd:25 OomKillDisable:true NGoroutines:44 SystemTime:2024-09-16 10:35:01.976217774 +0000 UTC LoggingDriver:json-file CgroupDriver:cgroupfs NEventsListener:0 KernelVersion:5.15.0-1069-aws OperatingSystem:Ubuntu 20.04.6 LTS OSType:linux Architecture:aar
ch64 IndexServerAddress:https://index.docker.io/v1/ RegistryConfig:{AllowNondistributableArtifactsCIDRs:[] AllowNondistributableArtifactsHostnames:[] InsecureRegistryCIDRs:[127.0.0.0/8] IndexConfigs:{DockerIo:{Name:docker.io Mirrors:[] Secure:true Official:true}} Mirrors:[]} NCPU:2 MemTotal:8214839296 GenericResources:<nil> DockerRootDir:/var/lib/docker HTTPProxy: HTTPSProxy: NoProxy: Name:ip-172-31-21-244 Labels:[] ExperimentalBuild:false ServerVersion:27.2.1 ClusterStore: ClusterAdvertise: Runtimes:{Runc:{Path:runc}} DefaultRuntime:runc Swarm:{NodeID: NodeAddr: LocalNodeState:inactive ControlAvailable:false Error: RemoteManagers:<nil>} LiveRestoreEnabled:false Isolation: InitBinary:docker-init ContainerdCommit:{ID:7f7fdf5fed64eb6a7caf99b3e12efcf9d60e311c Expected:7f7fdf5fed64eb6a7caf99b3e12efcf9d60e311c} RuncCommit:{ID:v1.1.14-0-g2c9f560 Expected:v1.1.14-0-g2c9f560} InitCommit:{ID:de40ad0 Expected:de40ad0} SecurityOptions:[name=apparmor name=seccomp,profile=builtin] ProductLicense: Warnings:<nil> ServerErro
rs:[] ClientInfo:{Debug:false Plugins:[map[Name:buildx Path:/usr/libexec/docker/cli-plugins/docker-buildx SchemaVersion:0.1.0 ShortDescription:Docker Buildx Vendor:Docker Inc. Version:v0.16.2] map[Name:compose Path:/usr/libexec/docker/cli-plugins/docker-compose SchemaVersion:0.1.0 ShortDescription:Docker Compose Vendor:Docker Inc. Version:v2.29.2]] Warnings:<nil>}}
	I0916 10:35:01.986524 1384589 docker.go:318] overlay module found
	I0916 10:35:01.989262 1384589 out.go:177] * Using the docker driver based on user configuration
	I0916 10:35:01.991996 1384589 start.go:297] selected driver: docker
	I0916 10:35:01.992025 1384589 start.go:901] validating driver "docker" against <nil>
	I0916 10:35:01.992040 1384589 start.go:912] status for docker: {Installed:true Healthy:true Running:false NeedsImprovement:false Error:<nil> Reason: Fix: Doc: Version:}
	I0916 10:35:01.992727 1384589 cli_runner.go:164] Run: docker system info --format "{{json .}}"
	I0916 10:35:02.058953 1384589 info.go:266] docker info: {ID:5FDH:SA5P:5GCT:NLAS:B73P:SGDQ:PBG5:UBVH:UZY3:RXGO:CI7S:WAIH Containers:0 ContainersRunning:0 ContainersPaused:0 ContainersStopped:0 Images:1 Driver:overlay2 DriverStatus:[[Backing Filesystem extfs] [Supports d_type true] [Using metacopy false] [Native Overlay Diff true] [userxattr false]] SystemStatus:<nil> Plugins:{Volume:[local] Network:[bridge host ipvlan macvlan null overlay] Authorization:<nil> Log:[awslogs fluentd gcplogs gelf journald json-file local splunk syslog]} MemoryLimit:true SwapLimit:true KernelMemory:false KernelMemoryTCP:true CPUCfsPeriod:true CPUCfsQuota:true CPUShares:true CPUSet:true PidsLimit:true IPv4Forwarding:true BridgeNfIptables:true BridgeNfIP6Tables:true Debug:false NFd:25 OomKillDisable:true NGoroutines:44 SystemTime:2024-09-16 10:35:02.049617339 +0000 UTC LoggingDriver:json-file CgroupDriver:cgroupfs NEventsListener:0 KernelVersion:5.15.0-1069-aws OperatingSystem:Ubuntu 20.04.6 LTS OSType:linux Architecture:aar
ch64 IndexServerAddress:https://index.docker.io/v1/ RegistryConfig:{AllowNondistributableArtifactsCIDRs:[] AllowNondistributableArtifactsHostnames:[] InsecureRegistryCIDRs:[127.0.0.0/8] IndexConfigs:{DockerIo:{Name:docker.io Mirrors:[] Secure:true Official:true}} Mirrors:[]} NCPU:2 MemTotal:8214839296 GenericResources:<nil> DockerRootDir:/var/lib/docker HTTPProxy: HTTPSProxy: NoProxy: Name:ip-172-31-21-244 Labels:[] ExperimentalBuild:false ServerVersion:27.2.1 ClusterStore: ClusterAdvertise: Runtimes:{Runc:{Path:runc}} DefaultRuntime:runc Swarm:{NodeID: NodeAddr: LocalNodeState:inactive ControlAvailable:false Error: RemoteManagers:<nil>} LiveRestoreEnabled:false Isolation: InitBinary:docker-init ContainerdCommit:{ID:7f7fdf5fed64eb6a7caf99b3e12efcf9d60e311c Expected:7f7fdf5fed64eb6a7caf99b3e12efcf9d60e311c} RuncCommit:{ID:v1.1.14-0-g2c9f560 Expected:v1.1.14-0-g2c9f560} InitCommit:{ID:de40ad0 Expected:de40ad0} SecurityOptions:[name=apparmor name=seccomp,profile=builtin] ProductLicense: Warnings:<nil> ServerErro
rs:[] ClientInfo:{Debug:false Plugins:[map[Name:buildx Path:/usr/libexec/docker/cli-plugins/docker-buildx SchemaVersion:0.1.0 ShortDescription:Docker Buildx Vendor:Docker Inc. Version:v0.16.2] map[Name:compose Path:/usr/libexec/docker/cli-plugins/docker-compose SchemaVersion:0.1.0 ShortDescription:Docker Compose Vendor:Docker Inc. Version:v2.29.2]] Warnings:<nil>}}
	I0916 10:35:02.059182 1384589 start_flags.go:310] no existing cluster config was found, will generate one from the flags 
	I0916 10:35:02.059420 1384589 start_flags.go:947] Waiting for all components: map[apiserver:true apps_running:true default_sa:true extra:true kubelet:true node_ready:true system_pods:true]
	I0916 10:35:02.062017 1384589 out.go:177] * Using Docker driver with root privileges
	I0916 10:35:02.064628 1384589 cni.go:84] Creating CNI manager for ""
	I0916 10:35:02.064789 1384589 cni.go:143] "docker" driver + "crio" runtime found, recommending kindnet
	I0916 10:35:02.064804 1384589 start_flags.go:319] Found "CNI" CNI - setting NetworkPlugin=cni
	I0916 10:35:02.064885 1384589 start.go:340] cluster config:
	{Name:addons-936355 KeepContext:false EmbedCerts:false MinikubeISO: KicBaseImage:gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 Memory:4000 CPUs:2 DiskSize:20000 Driver:docker HyperkitVpnKitSock: HyperkitVSockPorts:[] DockerEnv:[] ContainerVolumeMounts:[] InsecureRegistry:[] RegistryMirror:[] HostOnlyCIDR:192.168.59.1/24 HypervVirtualSwitch: HypervUseExternalSwitch:false HypervExternalAdapter: KVMNetwork:default KVMQemuURI:qemu:///system KVMGPU:false KVMHidden:false KVMNUMACount:1 APIServerPort:8443 DockerOpt:[] DisableDriverMounts:false NFSShare:[] NFSSharesRoot:/nfsshares UUID: NoVTXCheck:false DNSProxy:false HostDNSResolver:true HostOnlyNicType:virtio NatNicType:virtio SSHIPAddress: SSHUser:root SSHKey: SSHPort:22 KubernetesConfig:{KubernetesVersion:v1.31.1 ClusterName:addons-936355 Namespace:default APIServerHAVIP: APIServerName:minikubeCA APIServerNames:[] APIServerIPs:[] DNSDomain:cluster.local ContainerRuntime
:crio CRISocket: NetworkPlugin:cni FeatureGates: ServiceCIDR:10.96.0.0/12 ImageRepository: LoadBalancerStartIP: LoadBalancerEndIP: CustomIngressCert: RegistryAliases: ExtraOptions:[] ShouldLoadCachedImages:true EnableDefaultCNI:false CNI:} Nodes:[{Name: IP: Port:8443 KubernetesVersion:v1.31.1 ContainerRuntime:crio ControlPlane:true Worker:true}] Addons:map[] CustomAddonImages:map[] CustomAddonRegistries:map[] VerifyComponents:map[apiserver:true apps_running:true default_sa:true extra:true kubelet:true node_ready:true system_pods:true] StartHostTimeout:6m0s ScheduledStop:<nil> ExposedPorts:[] ListenAddress: Network: Subnet: MultiNodeRequested:false ExtraDisks:0 CertExpiration:26280h0m0s Mount:false MountString:/home/jenkins:/minikube-host Mount9PVersion:9p2000.L MountGID:docker MountIP: MountMSize:262144 MountOptions:[] MountPort:0 MountType:9p MountUID:docker BinaryMirror: DisableOptimizations:false DisableMetrics:false CustomQemuFirmwarePath: SocketVMnetClientPath: SocketVMnetPath: StaticIP: SSHAuthSock: SSH
AgentPID:0 GPUs: AutoPauseInterval:1m0s}
	I0916 10:35:02.069567 1384589 out.go:177] * Starting "addons-936355" primary control-plane node in "addons-936355" cluster
	I0916 10:35:02.072130 1384589 cache.go:121] Beginning downloading kic base image for docker with crio
	I0916 10:35:02.074827 1384589 out.go:177] * Pulling base image v0.0.45-1726358845-19644 ...
	I0916 10:35:02.077314 1384589 preload.go:131] Checking if preload exists for k8s version v1.31.1 and runtime crio
	I0916 10:35:02.077371 1384589 preload.go:146] Found local preload: /home/jenkins/minikube-integration/19651-1378450/.minikube/cache/preloaded-tarball/preloaded-images-k8s-v18-v1.31.1-cri-o-overlay-arm64.tar.lz4
	I0916 10:35:02.077383 1384589 cache.go:56] Caching tarball of preloaded images
	I0916 10:35:02.077398 1384589 image.go:79] Checking for gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 in local docker daemon
	I0916 10:35:02.077476 1384589 preload.go:172] Found /home/jenkins/minikube-integration/19651-1378450/.minikube/cache/preloaded-tarball/preloaded-images-k8s-v18-v1.31.1-cri-o-overlay-arm64.tar.lz4 in cache, skipping download
	I0916 10:35:02.077486 1384589 cache.go:59] Finished verifying existence of preloaded tar for v1.31.1 on crio
	I0916 10:35:02.077848 1384589 profile.go:143] Saving config to /home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/addons-936355/config.json ...
	I0916 10:35:02.077880 1384589 lock.go:35] WriteFile acquiring /home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/addons-936355/config.json: {Name:mkd05c2b0dbaa1cc700db22c74ae8fbcc0c53329 Clock:{} Delay:500ms Timeout:1m0s Cancel:<nil>}
	I0916 10:35:02.092106 1384589 cache.go:149] Downloading gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 to local cache
	I0916 10:35:02.092232 1384589 image.go:63] Checking for gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 in local cache directory
	I0916 10:35:02.092252 1384589 image.go:66] Found gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 in local cache directory, skipping pull
	I0916 10:35:02.092257 1384589 image.go:135] gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 exists in cache, skipping pull
	I0916 10:35:02.092264 1384589 cache.go:152] successfully saved gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 as a tarball
	I0916 10:35:02.092269 1384589 cache.go:162] Loading gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 from local cache
	I0916 10:35:19.265886 1384589 cache.go:164] successfully loaded and using gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 from cached tarball
	I0916 10:35:19.265926 1384589 cache.go:194] Successfully downloaded all kic artifacts
	I0916 10:35:19.265955 1384589 start.go:360] acquireMachinesLock for addons-936355: {Name:mk780e867f4084d469fbad7a4968b7ad3d556c69 Clock:{} Delay:500ms Timeout:10m0s Cancel:<nil>}
	I0916 10:35:19.266489 1384589 start.go:364] duration metric: took 511.962µs to acquireMachinesLock for "addons-936355"
	I0916 10:35:19.266531 1384589 start.go:93] Provisioning new machine with config: &{Name:addons-936355 KeepContext:false EmbedCerts:false MinikubeISO: KicBaseImage:gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 Memory:4000 CPUs:2 DiskSize:20000 Driver:docker HyperkitVpnKitSock: HyperkitVSockPorts:[] DockerEnv:[] ContainerVolumeMounts:[] InsecureRegistry:[] RegistryMirror:[] HostOnlyCIDR:192.168.59.1/24 HypervVirtualSwitch: HypervUseExternalSwitch:false HypervExternalAdapter: KVMNetwork:default KVMQemuURI:qemu:///system KVMGPU:false KVMHidden:false KVMNUMACount:1 APIServerPort:8443 DockerOpt:[] DisableDriverMounts:false NFSShare:[] NFSSharesRoot:/nfsshares UUID: NoVTXCheck:false DNSProxy:false HostDNSResolver:true HostOnlyNicType:virtio NatNicType:virtio SSHIPAddress: SSHUser:root SSHKey: SSHPort:22 KubernetesConfig:{KubernetesVersion:v1.31.1 ClusterName:addons-936355 Namespace:default APIServerHAVIP: APIServerName:min
ikubeCA APIServerNames:[] APIServerIPs:[] DNSDomain:cluster.local ContainerRuntime:crio CRISocket: NetworkPlugin:cni FeatureGates: ServiceCIDR:10.96.0.0/12 ImageRepository: LoadBalancerStartIP: LoadBalancerEndIP: CustomIngressCert: RegistryAliases: ExtraOptions:[] ShouldLoadCachedImages:true EnableDefaultCNI:false CNI:} Nodes:[{Name: IP: Port:8443 KubernetesVersion:v1.31.1 ContainerRuntime:crio ControlPlane:true Worker:true}] Addons:map[] CustomAddonImages:map[] CustomAddonRegistries:map[] VerifyComponents:map[apiserver:true apps_running:true default_sa:true extra:true kubelet:true node_ready:true system_pods:true] StartHostTimeout:6m0s ScheduledStop:<nil> ExposedPorts:[] ListenAddress: Network: Subnet: MultiNodeRequested:false ExtraDisks:0 CertExpiration:26280h0m0s Mount:false MountString:/home/jenkins:/minikube-host Mount9PVersion:9p2000.L MountGID:docker MountIP: MountMSize:262144 MountOptions:[] MountPort:0 MountType:9p MountUID:docker BinaryMirror: DisableOptimizations:false DisableMetrics:false CustomQe
muFirmwarePath: SocketVMnetClientPath: SocketVMnetPath: StaticIP: SSHAuthSock: SSHAgentPID:0 GPUs: AutoPauseInterval:1m0s} &{Name: IP: Port:8443 KubernetesVersion:v1.31.1 ContainerRuntime:crio ControlPlane:true Worker:true}
	I0916 10:35:19.266610 1384589 start.go:125] createHost starting for "" (driver="docker")
	I0916 10:35:19.269716 1384589 out.go:235] * Creating docker container (CPUs=2, Memory=4000MB) ...
	I0916 10:35:19.269968 1384589 start.go:159] libmachine.API.Create for "addons-936355" (driver="docker")
	I0916 10:35:19.270003 1384589 client.go:168] LocalClient.Create starting
	I0916 10:35:19.270125 1384589 main.go:141] libmachine: Creating CA: /home/jenkins/minikube-integration/19651-1378450/.minikube/certs/ca.pem
	I0916 10:35:20.065665 1384589 main.go:141] libmachine: Creating client certificate: /home/jenkins/minikube-integration/19651-1378450/.minikube/certs/cert.pem
	I0916 10:35:20.505791 1384589 cli_runner.go:164] Run: docker network inspect addons-936355 --format "{"Name": "{{.Name}}","Driver": "{{.Driver}}","Subnet": "{{range .IPAM.Config}}{{.Subnet}}{{end}}","Gateway": "{{range .IPAM.Config}}{{.Gateway}}{{end}}","MTU": {{if (index .Options "com.docker.network.driver.mtu")}}{{(index .Options "com.docker.network.driver.mtu")}}{{else}}0{{end}}, "ContainerIPs": [{{range $k,$v := .Containers }}"{{$v.IPv4Address}}",{{end}}]}"
	W0916 10:35:20.520423 1384589 cli_runner.go:211] docker network inspect addons-936355 --format "{"Name": "{{.Name}}","Driver": "{{.Driver}}","Subnet": "{{range .IPAM.Config}}{{.Subnet}}{{end}}","Gateway": "{{range .IPAM.Config}}{{.Gateway}}{{end}}","MTU": {{if (index .Options "com.docker.network.driver.mtu")}}{{(index .Options "com.docker.network.driver.mtu")}}{{else}}0{{end}}, "ContainerIPs": [{{range $k,$v := .Containers }}"{{$v.IPv4Address}}",{{end}}]}" returned with exit code 1
	I0916 10:35:20.520525 1384589 network_create.go:284] running [docker network inspect addons-936355] to gather additional debugging logs...
	I0916 10:35:20.520546 1384589 cli_runner.go:164] Run: docker network inspect addons-936355
	W0916 10:35:20.534395 1384589 cli_runner.go:211] docker network inspect addons-936355 returned with exit code 1
	I0916 10:35:20.534432 1384589 network_create.go:287] error running [docker network inspect addons-936355]: docker network inspect addons-936355: exit status 1
	stdout:
	[]
	
	stderr:
	Error response from daemon: network addons-936355 not found
	I0916 10:35:20.534447 1384589 network_create.go:289] output of [docker network inspect addons-936355]: -- stdout --
	[]
	
	-- /stdout --
	** stderr ** 
	Error response from daemon: network addons-936355 not found
	
	** /stderr **
	I0916 10:35:20.534555 1384589 cli_runner.go:164] Run: docker network inspect bridge --format "{"Name": "{{.Name}}","Driver": "{{.Driver}}","Subnet": "{{range .IPAM.Config}}{{.Subnet}}{{end}}","Gateway": "{{range .IPAM.Config}}{{.Gateway}}{{end}}","MTU": {{if (index .Options "com.docker.network.driver.mtu")}}{{(index .Options "com.docker.network.driver.mtu")}}{{else}}0{{end}}, "ContainerIPs": [{{range $k,$v := .Containers }}"{{$v.IPv4Address}}",{{end}}]}"
	I0916 10:35:20.550802 1384589 network.go:206] using free private subnet 192.168.49.0/24: &{IP:192.168.49.0 Netmask:255.255.255.0 Prefix:24 CIDR:192.168.49.0/24 Gateway:192.168.49.1 ClientMin:192.168.49.2 ClientMax:192.168.49.254 Broadcast:192.168.49.255 IsPrivate:true Interface:{IfaceName: IfaceIPv4: IfaceMTU:0 IfaceMAC:} reservation:0x4001826a70}
	I0916 10:35:20.550849 1384589 network_create.go:124] attempt to create docker network addons-936355 192.168.49.0/24 with gateway 192.168.49.1 and MTU of 1500 ...
	I0916 10:35:20.550909 1384589 cli_runner.go:164] Run: docker network create --driver=bridge --subnet=192.168.49.0/24 --gateway=192.168.49.1 -o --ip-masq -o --icc -o com.docker.network.driver.mtu=1500 --label=created_by.minikube.sigs.k8s.io=true --label=name.minikube.sigs.k8s.io=addons-936355 addons-936355
	I0916 10:35:20.622324 1384589 network_create.go:108] docker network addons-936355 192.168.49.0/24 created
	I0916 10:35:20.622359 1384589 kic.go:121] calculated static IP "192.168.49.2" for the "addons-936355" container
	I0916 10:35:20.622443 1384589 cli_runner.go:164] Run: docker ps -a --format {{.Names}}
	I0916 10:35:20.636891 1384589 cli_runner.go:164] Run: docker volume create addons-936355 --label name.minikube.sigs.k8s.io=addons-936355 --label created_by.minikube.sigs.k8s.io=true
	I0916 10:35:20.653249 1384589 oci.go:103] Successfully created a docker volume addons-936355
	I0916 10:35:20.653357 1384589 cli_runner.go:164] Run: docker run --rm --name addons-936355-preload-sidecar --label created_by.minikube.sigs.k8s.io=true --label name.minikube.sigs.k8s.io=addons-936355 --entrypoint /usr/bin/test -v addons-936355:/var gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 -d /var/lib
	I0916 10:35:22.737442 1384589 cli_runner.go:217] Completed: docker run --rm --name addons-936355-preload-sidecar --label created_by.minikube.sigs.k8s.io=true --label name.minikube.sigs.k8s.io=addons-936355 --entrypoint /usr/bin/test -v addons-936355:/var gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 -d /var/lib: (2.08404207s)
	I0916 10:35:22.737471 1384589 oci.go:107] Successfully prepared a docker volume addons-936355
	I0916 10:35:22.737499 1384589 preload.go:131] Checking if preload exists for k8s version v1.31.1 and runtime crio
	I0916 10:35:22.737519 1384589 kic.go:194] Starting extracting preloaded images to volume ...
	I0916 10:35:22.737588 1384589 cli_runner.go:164] Run: docker run --rm --entrypoint /usr/bin/tar -v /home/jenkins/minikube-integration/19651-1378450/.minikube/cache/preloaded-tarball/preloaded-images-k8s-v18-v1.31.1-cri-o-overlay-arm64.tar.lz4:/preloaded.tar:ro -v addons-936355:/extractDir gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 -I lz4 -xf /preloaded.tar -C /extractDir
	I0916 10:35:26.763089 1384589 cli_runner.go:217] Completed: docker run --rm --entrypoint /usr/bin/tar -v /home/jenkins/minikube-integration/19651-1378450/.minikube/cache/preloaded-tarball/preloaded-images-k8s-v18-v1.31.1-cri-o-overlay-arm64.tar.lz4:/preloaded.tar:ro -v addons-936355:/extractDir gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 -I lz4 -xf /preloaded.tar -C /extractDir: (4.025452617s)
	I0916 10:35:26.763126 1384589 kic.go:203] duration metric: took 4.025604753s to extract preloaded images to volume ...
	W0916 10:35:26.763258 1384589 cgroups_linux.go:77] Your kernel does not support swap limit capabilities or the cgroup is not mounted.
	I0916 10:35:26.763378 1384589 cli_runner.go:164] Run: docker info --format "'{{json .SecurityOptions}}'"
	I0916 10:35:26.814712 1384589 cli_runner.go:164] Run: docker run -d -t --privileged --security-opt seccomp=unconfined --tmpfs /tmp --tmpfs /run -v /lib/modules:/lib/modules:ro --hostname addons-936355 --name addons-936355 --label created_by.minikube.sigs.k8s.io=true --label name.minikube.sigs.k8s.io=addons-936355 --label role.minikube.sigs.k8s.io= --label mode.minikube.sigs.k8s.io=addons-936355 --network addons-936355 --ip 192.168.49.2 --volume addons-936355:/var --security-opt apparmor=unconfined --memory=4000mb --cpus=2 -e container=docker --expose 8443 --publish=127.0.0.1::8443 --publish=127.0.0.1::22 --publish=127.0.0.1::2376 --publish=127.0.0.1::5000 --publish=127.0.0.1::32443 gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0
	I0916 10:35:27.165000 1384589 cli_runner.go:164] Run: docker container inspect addons-936355 --format={{.State.Running}}
	I0916 10:35:27.189076 1384589 cli_runner.go:164] Run: docker container inspect addons-936355 --format={{.State.Status}}
	I0916 10:35:27.216370 1384589 cli_runner.go:164] Run: docker exec addons-936355 stat /var/lib/dpkg/alternatives/iptables
	I0916 10:35:27.281467 1384589 oci.go:144] the created container "addons-936355" has a running status.
	I0916 10:35:27.281502 1384589 kic.go:225] Creating ssh key for kic: /home/jenkins/minikube-integration/19651-1378450/.minikube/machines/addons-936355/id_rsa...
	I0916 10:35:28.804386 1384589 kic_runner.go:191] docker (temp): /home/jenkins/minikube-integration/19651-1378450/.minikube/machines/addons-936355/id_rsa.pub --> /home/docker/.ssh/authorized_keys (381 bytes)
	I0916 10:35:28.826599 1384589 cli_runner.go:164] Run: docker container inspect addons-936355 --format={{.State.Status}}
	I0916 10:35:28.843564 1384589 kic_runner.go:93] Run: chown docker:docker /home/docker/.ssh/authorized_keys
	I0916 10:35:28.843591 1384589 kic_runner.go:114] Args: [docker exec --privileged addons-936355 chown docker:docker /home/docker/.ssh/authorized_keys]
	I0916 10:35:28.892577 1384589 cli_runner.go:164] Run: docker container inspect addons-936355 --format={{.State.Status}}
	I0916 10:35:28.913158 1384589 machine.go:93] provisionDockerMachine start ...
	I0916 10:35:28.913258 1384589 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" addons-936355
	I0916 10:35:28.931596 1384589 main.go:141] libmachine: Using SSH client type: native
	I0916 10:35:28.931893 1384589 main.go:141] libmachine: &{{{<nil> 0 [] [] []} docker [0x41abe0] 0x41d420 <nil>  [] 0s} 127.0.0.1 34603 <nil> <nil>}
	I0916 10:35:28.931910 1384589 main.go:141] libmachine: About to run SSH command:
	hostname
	I0916 10:35:29.068030 1384589 main.go:141] libmachine: SSH cmd err, output: <nil>: addons-936355
	
	I0916 10:35:29.068064 1384589 ubuntu.go:169] provisioning hostname "addons-936355"
	I0916 10:35:29.068142 1384589 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" addons-936355
	I0916 10:35:29.085139 1384589 main.go:141] libmachine: Using SSH client type: native
	I0916 10:35:29.085383 1384589 main.go:141] libmachine: &{{{<nil> 0 [] [] []} docker [0x41abe0] 0x41d420 <nil>  [] 0s} 127.0.0.1 34603 <nil> <nil>}
	I0916 10:35:29.085399 1384589 main.go:141] libmachine: About to run SSH command:
	sudo hostname addons-936355 && echo "addons-936355" | sudo tee /etc/hostname
	I0916 10:35:29.232508 1384589 main.go:141] libmachine: SSH cmd err, output: <nil>: addons-936355
	
	I0916 10:35:29.232589 1384589 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" addons-936355
	I0916 10:35:29.248944 1384589 main.go:141] libmachine: Using SSH client type: native
	I0916 10:35:29.249190 1384589 main.go:141] libmachine: &{{{<nil> 0 [] [] []} docker [0x41abe0] 0x41d420 <nil>  [] 0s} 127.0.0.1 34603 <nil> <nil>}
	I0916 10:35:29.249214 1384589 main.go:141] libmachine: About to run SSH command:
	
			if ! grep -xq '.*\saddons-936355' /etc/hosts; then
				if grep -xq '127.0.1.1\s.*' /etc/hosts; then
					sudo sed -i 's/^127.0.1.1\s.*/127.0.1.1 addons-936355/g' /etc/hosts;
				else 
					echo '127.0.1.1 addons-936355' | sudo tee -a /etc/hosts; 
				fi
			fi
	I0916 10:35:29.385206 1384589 main.go:141] libmachine: SSH cmd err, output: <nil>: 
	I0916 10:35:29.385233 1384589 ubuntu.go:175] set auth options {CertDir:/home/jenkins/minikube-integration/19651-1378450/.minikube CaCertPath:/home/jenkins/minikube-integration/19651-1378450/.minikube/certs/ca.pem CaPrivateKeyPath:/home/jenkins/minikube-integration/19651-1378450/.minikube/certs/ca-key.pem CaCertRemotePath:/etc/docker/ca.pem ServerCertPath:/home/jenkins/minikube-integration/19651-1378450/.minikube/machines/server.pem ServerKeyPath:/home/jenkins/minikube-integration/19651-1378450/.minikube/machines/server-key.pem ClientKeyPath:/home/jenkins/minikube-integration/19651-1378450/.minikube/certs/key.pem ServerCertRemotePath:/etc/docker/server.pem ServerKeyRemotePath:/etc/docker/server-key.pem ClientCertPath:/home/jenkins/minikube-integration/19651-1378450/.minikube/certs/cert.pem ServerCertSANs:[] StorePath:/home/jenkins/minikube-integration/19651-1378450/.minikube}
	I0916 10:35:29.385263 1384589 ubuntu.go:177] setting up certificates
	I0916 10:35:29.385275 1384589 provision.go:84] configureAuth start
	I0916 10:35:29.385357 1384589 cli_runner.go:164] Run: docker container inspect -f "{{range .NetworkSettings.Networks}}{{.IPAddress}},{{.GlobalIPv6Address}}{{end}}" addons-936355
	I0916 10:35:29.401844 1384589 provision.go:143] copyHostCerts
	I0916 10:35:29.401930 1384589 exec_runner.go:151] cp: /home/jenkins/minikube-integration/19651-1378450/.minikube/certs/ca.pem --> /home/jenkins/minikube-integration/19651-1378450/.minikube/ca.pem (1078 bytes)
	I0916 10:35:29.402060 1384589 exec_runner.go:151] cp: /home/jenkins/minikube-integration/19651-1378450/.minikube/certs/cert.pem --> /home/jenkins/minikube-integration/19651-1378450/.minikube/cert.pem (1123 bytes)
	I0916 10:35:29.402129 1384589 exec_runner.go:151] cp: /home/jenkins/minikube-integration/19651-1378450/.minikube/certs/key.pem --> /home/jenkins/minikube-integration/19651-1378450/.minikube/key.pem (1679 bytes)
	I0916 10:35:29.402184 1384589 provision.go:117] generating server cert: /home/jenkins/minikube-integration/19651-1378450/.minikube/machines/server.pem ca-key=/home/jenkins/minikube-integration/19651-1378450/.minikube/certs/ca.pem private-key=/home/jenkins/minikube-integration/19651-1378450/.minikube/certs/ca-key.pem org=jenkins.addons-936355 san=[127.0.0.1 192.168.49.2 addons-936355 localhost minikube]
	I0916 10:35:29.844064 1384589 provision.go:177] copyRemoteCerts
	I0916 10:35:29.844139 1384589 ssh_runner.go:195] Run: sudo mkdir -p /etc/docker /etc/docker /etc/docker
	I0916 10:35:29.844181 1384589 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" addons-936355
	I0916 10:35:29.860341 1384589 sshutil.go:53] new ssh client: &{IP:127.0.0.1 Port:34603 SSHKeyPath:/home/jenkins/minikube-integration/19651-1378450/.minikube/machines/addons-936355/id_rsa Username:docker}
	I0916 10:35:29.957424 1384589 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-1378450/.minikube/certs/ca.pem --> /etc/docker/ca.pem (1078 bytes)
	I0916 10:35:29.982494 1384589 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-1378450/.minikube/machines/server.pem --> /etc/docker/server.pem (1208 bytes)
	I0916 10:35:30.020527 1384589 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-1378450/.minikube/machines/server-key.pem --> /etc/docker/server-key.pem (1675 bytes)
	I0916 10:35:30.083993 1384589 provision.go:87] duration metric: took 698.682489ms to configureAuth
	I0916 10:35:30.084118 1384589 ubuntu.go:193] setting minikube options for container-runtime
	I0916 10:35:30.084480 1384589 config.go:182] Loaded profile config "addons-936355": Driver=docker, ContainerRuntime=crio, KubernetesVersion=v1.31.1
	I0916 10:35:30.084746 1384589 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" addons-936355
	I0916 10:35:30.108015 1384589 main.go:141] libmachine: Using SSH client type: native
	I0916 10:35:30.108273 1384589 main.go:141] libmachine: &{{{<nil> 0 [] [] []} docker [0x41abe0] 0x41d420 <nil>  [] 0s} 127.0.0.1 34603 <nil> <nil>}
	I0916 10:35:30.108291 1384589 main.go:141] libmachine: About to run SSH command:
	sudo mkdir -p /etc/sysconfig && printf %s "
	CRIO_MINIKUBE_OPTIONS='--insecure-registry 10.96.0.0/12 '
	" | sudo tee /etc/sysconfig/crio.minikube && sudo systemctl restart crio
	I0916 10:35:30.350713 1384589 main.go:141] libmachine: SSH cmd err, output: <nil>: 
	CRIO_MINIKUBE_OPTIONS='--insecure-registry 10.96.0.0/12 '
	
	I0916 10:35:30.350736 1384589 machine.go:96] duration metric: took 1.437556677s to provisionDockerMachine
	I0916 10:35:30.350754 1384589 client.go:171] duration metric: took 11.080732872s to LocalClient.Create
	I0916 10:35:30.350775 1384589 start.go:167] duration metric: took 11.080807939s to libmachine.API.Create "addons-936355"
	I0916 10:35:30.350784 1384589 start.go:293] postStartSetup for "addons-936355" (driver="docker")
	I0916 10:35:30.350795 1384589 start.go:322] creating required directories: [/etc/kubernetes/addons /etc/kubernetes/manifests /var/tmp/minikube /var/lib/minikube /var/lib/minikube/certs /var/lib/minikube/images /var/lib/minikube/binaries /tmp/gvisor /usr/share/ca-certificates /etc/ssl/certs]
	I0916 10:35:30.350871 1384589 ssh_runner.go:195] Run: sudo mkdir -p /etc/kubernetes/addons /etc/kubernetes/manifests /var/tmp/minikube /var/lib/minikube /var/lib/minikube/certs /var/lib/minikube/images /var/lib/minikube/binaries /tmp/gvisor /usr/share/ca-certificates /etc/ssl/certs
	I0916 10:35:30.350928 1384589 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" addons-936355
	I0916 10:35:30.367694 1384589 sshutil.go:53] new ssh client: &{IP:127.0.0.1 Port:34603 SSHKeyPath:/home/jenkins/minikube-integration/19651-1378450/.minikube/machines/addons-936355/id_rsa Username:docker}
	I0916 10:35:30.471627 1384589 ssh_runner.go:195] Run: cat /etc/os-release
	I0916 10:35:30.475048 1384589 main.go:141] libmachine: Couldn't set key VERSION_CODENAME, no corresponding struct field found
	I0916 10:35:30.475083 1384589 main.go:141] libmachine: Couldn't set key PRIVACY_POLICY_URL, no corresponding struct field found
	I0916 10:35:30.475094 1384589 main.go:141] libmachine: Couldn't set key UBUNTU_CODENAME, no corresponding struct field found
	I0916 10:35:30.475101 1384589 info.go:137] Remote host: Ubuntu 22.04.4 LTS
	I0916 10:35:30.475111 1384589 filesync.go:126] Scanning /home/jenkins/minikube-integration/19651-1378450/.minikube/addons for local assets ...
	I0916 10:35:30.475191 1384589 filesync.go:126] Scanning /home/jenkins/minikube-integration/19651-1378450/.minikube/files for local assets ...
	I0916 10:35:30.475215 1384589 start.go:296] duration metric: took 124.425275ms for postStartSetup
	I0916 10:35:30.475537 1384589 cli_runner.go:164] Run: docker container inspect -f "{{range .NetworkSettings.Networks}}{{.IPAddress}},{{.GlobalIPv6Address}}{{end}}" addons-936355
	I0916 10:35:30.492884 1384589 profile.go:143] Saving config to /home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/addons-936355/config.json ...
	I0916 10:35:30.493230 1384589 ssh_runner.go:195] Run: sh -c "df -h /var | awk 'NR==2{print $5}'"
	I0916 10:35:30.493280 1384589 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" addons-936355
	I0916 10:35:30.510291 1384589 sshutil.go:53] new ssh client: &{IP:127.0.0.1 Port:34603 SSHKeyPath:/home/jenkins/minikube-integration/19651-1378450/.minikube/machines/addons-936355/id_rsa Username:docker}
	I0916 10:35:30.601939 1384589 ssh_runner.go:195] Run: sh -c "df -BG /var | awk 'NR==2{print $4}'"
	I0916 10:35:30.606785 1384589 start.go:128] duration metric: took 11.340152497s to createHost
	I0916 10:35:30.606809 1384589 start.go:83] releasing machines lock for "addons-936355", held for 11.340303023s
	I0916 10:35:30.606879 1384589 cli_runner.go:164] Run: docker container inspect -f "{{range .NetworkSettings.Networks}}{{.IPAddress}},{{.GlobalIPv6Address}}{{end}}" addons-936355
	I0916 10:35:30.623200 1384589 ssh_runner.go:195] Run: cat /version.json
	I0916 10:35:30.623223 1384589 ssh_runner.go:195] Run: curl -sS -m 2 https://registry.k8s.io/
	I0916 10:35:30.623263 1384589 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" addons-936355
	I0916 10:35:30.623284 1384589 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" addons-936355
	I0916 10:35:30.644076 1384589 sshutil.go:53] new ssh client: &{IP:127.0.0.1 Port:34603 SSHKeyPath:/home/jenkins/minikube-integration/19651-1378450/.minikube/machines/addons-936355/id_rsa Username:docker}
	I0916 10:35:30.644213 1384589 sshutil.go:53] new ssh client: &{IP:127.0.0.1 Port:34603 SSHKeyPath:/home/jenkins/minikube-integration/19651-1378450/.minikube/machines/addons-936355/id_rsa Username:docker}
	I0916 10:35:30.736416 1384589 ssh_runner.go:195] Run: systemctl --version
	I0916 10:35:30.866086 1384589 ssh_runner.go:195] Run: sudo sh -c "podman version >/dev/null"
	I0916 10:35:31.012168 1384589 ssh_runner.go:195] Run: sh -c "stat /etc/cni/net.d/*loopback.conf*"
	I0916 10:35:31.016985 1384589 ssh_runner.go:195] Run: sudo find /etc/cni/net.d -maxdepth 1 -type f -name *loopback.conf* -not -name *.mk_disabled -exec sh -c "sudo mv {} {}.mk_disabled" ;
	I0916 10:35:31.040299 1384589 cni.go:221] loopback cni configuration disabled: "/etc/cni/net.d/*loopback.conf*" found
	I0916 10:35:31.040383 1384589 ssh_runner.go:195] Run: sudo find /etc/cni/net.d -maxdepth 1 -type f ( ( -name *bridge* -or -name *podman* ) -and -not -name *.mk_disabled ) -printf "%p, " -exec sh -c "sudo mv {} {}.mk_disabled" ;
	I0916 10:35:31.079331 1384589 cni.go:262] disabled [/etc/cni/net.d/87-podman-bridge.conflist, /etc/cni/net.d/100-crio-bridge.conf] bridge cni config(s)
	I0916 10:35:31.079357 1384589 start.go:495] detecting cgroup driver to use...
	I0916 10:35:31.079391 1384589 detect.go:187] detected "cgroupfs" cgroup driver on host os
	I0916 10:35:31.079448 1384589 ssh_runner.go:195] Run: sudo systemctl stop -f containerd
	I0916 10:35:31.097860 1384589 ssh_runner.go:195] Run: sudo systemctl is-active --quiet service containerd
	I0916 10:35:31.111311 1384589 docker.go:217] disabling cri-docker service (if available) ...
	I0916 10:35:31.111396 1384589 ssh_runner.go:195] Run: sudo systemctl stop -f cri-docker.socket
	I0916 10:35:31.126864 1384589 ssh_runner.go:195] Run: sudo systemctl stop -f cri-docker.service
	I0916 10:35:31.142983 1384589 ssh_runner.go:195] Run: sudo systemctl disable cri-docker.socket
	I0916 10:35:31.237602 1384589 ssh_runner.go:195] Run: sudo systemctl mask cri-docker.service
	I0916 10:35:31.329055 1384589 docker.go:233] disabling docker service ...
	I0916 10:35:31.329150 1384589 ssh_runner.go:195] Run: sudo systemctl stop -f docker.socket
	I0916 10:35:31.350134 1384589 ssh_runner.go:195] Run: sudo systemctl stop -f docker.service
	I0916 10:35:31.362931 1384589 ssh_runner.go:195] Run: sudo systemctl disable docker.socket
	I0916 10:35:31.458212 1384589 ssh_runner.go:195] Run: sudo systemctl mask docker.service
	I0916 10:35:31.563725 1384589 ssh_runner.go:195] Run: sudo systemctl is-active --quiet service docker
	I0916 10:35:31.575461 1384589 ssh_runner.go:195] Run: /bin/bash -c "sudo mkdir -p /etc && printf %s "runtime-endpoint: unix:///var/run/crio/crio.sock
	" | sudo tee /etc/crictl.yaml"
	I0916 10:35:31.592172 1384589 crio.go:59] configure cri-o to use "registry.k8s.io/pause:3.10" pause image...
	I0916 10:35:31.592265 1384589 ssh_runner.go:195] Run: sh -c "sudo sed -i 's|^.*pause_image = .*$|pause_image = "registry.k8s.io/pause:3.10"|' /etc/crio/crio.conf.d/02-crio.conf"
	I0916 10:35:31.602336 1384589 crio.go:70] configuring cri-o to use "cgroupfs" as cgroup driver...
	I0916 10:35:31.602418 1384589 ssh_runner.go:195] Run: sh -c "sudo sed -i 's|^.*cgroup_manager = .*$|cgroup_manager = "cgroupfs"|' /etc/crio/crio.conf.d/02-crio.conf"
	I0916 10:35:31.612396 1384589 ssh_runner.go:195] Run: sh -c "sudo sed -i '/conmon_cgroup = .*/d' /etc/crio/crio.conf.d/02-crio.conf"
	I0916 10:35:31.622391 1384589 ssh_runner.go:195] Run: sh -c "sudo sed -i '/cgroup_manager = .*/a conmon_cgroup = "pod"' /etc/crio/crio.conf.d/02-crio.conf"
	I0916 10:35:31.632203 1384589 ssh_runner.go:195] Run: sh -c "sudo rm -rf /etc/cni/net.mk"
	I0916 10:35:31.642063 1384589 ssh_runner.go:195] Run: sh -c "sudo sed -i '/^ *"net.ipv4.ip_unprivileged_port_start=.*"/d' /etc/crio/crio.conf.d/02-crio.conf"
	I0916 10:35:31.651889 1384589 ssh_runner.go:195] Run: sh -c "sudo grep -q "^ *default_sysctls" /etc/crio/crio.conf.d/02-crio.conf || sudo sed -i '/conmon_cgroup = .*/a default_sysctls = \[\n\]' /etc/crio/crio.conf.d/02-crio.conf"
	I0916 10:35:31.669408 1384589 ssh_runner.go:195] Run: sh -c "sudo sed -i -r 's|^default_sysctls *= *\[|&\n  "net.ipv4.ip_unprivileged_port_start=0",|' /etc/crio/crio.conf.d/02-crio.conf"
	I0916 10:35:31.683307 1384589 ssh_runner.go:195] Run: sudo sysctl net.bridge.bridge-nf-call-iptables
	I0916 10:35:31.692220 1384589 ssh_runner.go:195] Run: sudo sh -c "echo 1 > /proc/sys/net/ipv4/ip_forward"
	I0916 10:35:31.702005 1384589 ssh_runner.go:195] Run: sudo systemctl daemon-reload
	I0916 10:35:31.781982 1384589 ssh_runner.go:195] Run: sudo systemctl restart crio
	I0916 10:35:31.897438 1384589 start.go:542] Will wait 60s for socket path /var/run/crio/crio.sock
	I0916 10:35:31.897567 1384589 ssh_runner.go:195] Run: stat /var/run/crio/crio.sock
	I0916 10:35:31.901379 1384589 start.go:563] Will wait 60s for crictl version
	I0916 10:35:31.901491 1384589 ssh_runner.go:195] Run: which crictl
	I0916 10:35:31.904735 1384589 ssh_runner.go:195] Run: sudo /usr/bin/crictl version
	I0916 10:35:31.941675 1384589 start.go:579] Version:  0.1.0
	RuntimeName:  cri-o
	RuntimeVersion:  1.24.6
	RuntimeApiVersion:  v1
	I0916 10:35:31.941854 1384589 ssh_runner.go:195] Run: crio --version
	I0916 10:35:31.981298 1384589 ssh_runner.go:195] Run: crio --version
	I0916 10:35:32.027709 1384589 out.go:177] * Preparing Kubernetes v1.31.1 on CRI-O 1.24.6 ...
	I0916 10:35:32.030371 1384589 cli_runner.go:164] Run: docker network inspect addons-936355 --format "{"Name": "{{.Name}}","Driver": "{{.Driver}}","Subnet": "{{range .IPAM.Config}}{{.Subnet}}{{end}}","Gateway": "{{range .IPAM.Config}}{{.Gateway}}{{end}}","MTU": {{if (index .Options "com.docker.network.driver.mtu")}}{{(index .Options "com.docker.network.driver.mtu")}}{{else}}0{{end}}, "ContainerIPs": [{{range $k,$v := .Containers }}"{{$v.IPv4Address}}",{{end}}]}"
	I0916 10:35:32.045684 1384589 ssh_runner.go:195] Run: grep 192.168.49.1	host.minikube.internal$ /etc/hosts
	I0916 10:35:32.049353 1384589 ssh_runner.go:195] Run: /bin/bash -c "{ grep -v $'\thost.minikube.internal$' "/etc/hosts"; echo "192.168.49.1	host.minikube.internal"; } > /tmp/h.$$; sudo cp /tmp/h.$$ "/etc/hosts""
	I0916 10:35:32.060434 1384589 kubeadm.go:883] updating cluster {Name:addons-936355 KeepContext:false EmbedCerts:false MinikubeISO: KicBaseImage:gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 Memory:4000 CPUs:2 DiskSize:20000 Driver:docker HyperkitVpnKitSock: HyperkitVSockPorts:[] DockerEnv:[] ContainerVolumeMounts:[] InsecureRegistry:[] RegistryMirror:[] HostOnlyCIDR:192.168.59.1/24 HypervVirtualSwitch: HypervUseExternalSwitch:false HypervExternalAdapter: KVMNetwork:default KVMQemuURI:qemu:///system KVMGPU:false KVMHidden:false KVMNUMACount:1 APIServerPort:8443 DockerOpt:[] DisableDriverMounts:false NFSShare:[] NFSSharesRoot:/nfsshares UUID: NoVTXCheck:false DNSProxy:false HostDNSResolver:true HostOnlyNicType:virtio NatNicType:virtio SSHIPAddress: SSHUser:root SSHKey: SSHPort:22 KubernetesConfig:{KubernetesVersion:v1.31.1 ClusterName:addons-936355 Namespace:default APIServerHAVIP: APIServerName:minikubeCA APIServerNa
mes:[] APIServerIPs:[] DNSDomain:cluster.local ContainerRuntime:crio CRISocket: NetworkPlugin:cni FeatureGates: ServiceCIDR:10.96.0.0/12 ImageRepository: LoadBalancerStartIP: LoadBalancerEndIP: CustomIngressCert: RegistryAliases: ExtraOptions:[] ShouldLoadCachedImages:true EnableDefaultCNI:false CNI:} Nodes:[{Name: IP:192.168.49.2 Port:8443 KubernetesVersion:v1.31.1 ContainerRuntime:crio ControlPlane:true Worker:true}] Addons:map[] CustomAddonImages:map[] CustomAddonRegistries:map[] VerifyComponents:map[apiserver:true apps_running:true default_sa:true extra:true kubelet:true node_ready:true system_pods:true] StartHostTimeout:6m0s ScheduledStop:<nil> ExposedPorts:[] ListenAddress: Network: Subnet: MultiNodeRequested:false ExtraDisks:0 CertExpiration:26280h0m0s Mount:false MountString:/home/jenkins:/minikube-host Mount9PVersion:9p2000.L MountGID:docker MountIP: MountMSize:262144 MountOptions:[] MountPort:0 MountType:9p MountUID:docker BinaryMirror: DisableOptimizations:false DisableMetrics:false CustomQemuFirmw
arePath: SocketVMnetClientPath: SocketVMnetPath: StaticIP: SSHAuthSock: SSHAgentPID:0 GPUs: AutoPauseInterval:1m0s} ...
	I0916 10:35:32.060562 1384589 preload.go:131] Checking if preload exists for k8s version v1.31.1 and runtime crio
	I0916 10:35:32.060622 1384589 ssh_runner.go:195] Run: sudo crictl images --output json
	I0916 10:35:32.132274 1384589 crio.go:514] all images are preloaded for cri-o runtime.
	I0916 10:35:32.132300 1384589 crio.go:433] Images already preloaded, skipping extraction
	I0916 10:35:32.132361 1384589 ssh_runner.go:195] Run: sudo crictl images --output json
	I0916 10:35:32.168136 1384589 crio.go:514] all images are preloaded for cri-o runtime.
	I0916 10:35:32.168159 1384589 cache_images.go:84] Images are preloaded, skipping loading
	I0916 10:35:32.168167 1384589 kubeadm.go:934] updating node { 192.168.49.2 8443 v1.31.1 crio true true} ...
	I0916 10:35:32.168274 1384589 kubeadm.go:946] kubelet [Unit]
	Wants=crio.service
	
	[Service]
	ExecStart=
	ExecStart=/var/lib/minikube/binaries/v1.31.1/kubelet --bootstrap-kubeconfig=/etc/kubernetes/bootstrap-kubelet.conf --cgroups-per-qos=false --config=/var/lib/kubelet/config.yaml --enforce-node-allocatable= --hostname-override=addons-936355 --kubeconfig=/etc/kubernetes/kubelet.conf --node-ip=192.168.49.2
	
	[Install]
	 config:
	{KubernetesVersion:v1.31.1 ClusterName:addons-936355 Namespace:default APIServerHAVIP: APIServerName:minikubeCA APIServerNames:[] APIServerIPs:[] DNSDomain:cluster.local ContainerRuntime:crio CRISocket: NetworkPlugin:cni FeatureGates: ServiceCIDR:10.96.0.0/12 ImageRepository: LoadBalancerStartIP: LoadBalancerEndIP: CustomIngressCert: RegistryAliases: ExtraOptions:[] ShouldLoadCachedImages:true EnableDefaultCNI:false CNI:}
	I0916 10:35:32.168366 1384589 ssh_runner.go:195] Run: crio config
	I0916 10:35:32.227191 1384589 cni.go:84] Creating CNI manager for ""
	I0916 10:35:32.227213 1384589 cni.go:143] "docker" driver + "crio" runtime found, recommending kindnet
	I0916 10:35:32.227223 1384589 kubeadm.go:84] Using pod CIDR: 10.244.0.0/16
	I0916 10:35:32.227267 1384589 kubeadm.go:181] kubeadm options: {CertDir:/var/lib/minikube/certs ServiceCIDR:10.96.0.0/12 PodSubnet:10.244.0.0/16 AdvertiseAddress:192.168.49.2 APIServerPort:8443 KubernetesVersion:v1.31.1 EtcdDataDir:/var/lib/minikube/etcd EtcdExtraArgs:map[] ClusterName:addons-936355 NodeName:addons-936355 DNSDomain:cluster.local CRISocket:/var/run/crio/crio.sock ImageRepository: ComponentOptions:[{Component:apiServer ExtraArgs:map[enable-admission-plugins:NamespaceLifecycle,LimitRanger,ServiceAccount,DefaultStorageClass,DefaultTolerationSeconds,NodeRestriction,MutatingAdmissionWebhook,ValidatingAdmissionWebhook,ResourceQuota] Pairs:map[certSANs:["127.0.0.1", "localhost", "192.168.49.2"]]} {Component:controllerManager ExtraArgs:map[allocate-node-cidrs:true leader-elect:false] Pairs:map[]} {Component:scheduler ExtraArgs:map[leader-elect:false] Pairs:map[]}] FeatureArgs:map[] NodeIP:192.168.49.2 CgroupDriver:cgroupfs ClientCAFile:/var/lib/minikube/certs/ca.crt StaticPodPath:/etc/kuberne
tes/manifests ControlPlaneAddress:control-plane.minikube.internal KubeProxyOptions:map[] ResolvConfSearchRegression:false KubeletConfigOpts:map[containerRuntimeEndpoint:unix:///var/run/crio/crio.sock hairpinMode:hairpin-veth runtimeRequestTimeout:15m] PrependCriSocketUnix:true}
	I0916 10:35:32.227445 1384589 kubeadm.go:187] kubeadm config:
	apiVersion: kubeadm.k8s.io/v1beta3
	kind: InitConfiguration
	localAPIEndpoint:
	  advertiseAddress: 192.168.49.2
	  bindPort: 8443
	bootstrapTokens:
	  - groups:
	      - system:bootstrappers:kubeadm:default-node-token
	    ttl: 24h0m0s
	    usages:
	      - signing
	      - authentication
	nodeRegistration:
	  criSocket: unix:///var/run/crio/crio.sock
	  name: "addons-936355"
	  kubeletExtraArgs:
	    node-ip: 192.168.49.2
	  taints: []
	---
	apiVersion: kubeadm.k8s.io/v1beta3
	kind: ClusterConfiguration
	apiServer:
	  certSANs: ["127.0.0.1", "localhost", "192.168.49.2"]
	  extraArgs:
	    enable-admission-plugins: "NamespaceLifecycle,LimitRanger,ServiceAccount,DefaultStorageClass,DefaultTolerationSeconds,NodeRestriction,MutatingAdmissionWebhook,ValidatingAdmissionWebhook,ResourceQuota"
	controllerManager:
	  extraArgs:
	    allocate-node-cidrs: "true"
	    leader-elect: "false"
	scheduler:
	  extraArgs:
	    leader-elect: "false"
	certificatesDir: /var/lib/minikube/certs
	clusterName: mk
	controlPlaneEndpoint: control-plane.minikube.internal:8443
	etcd:
	  local:
	    dataDir: /var/lib/minikube/etcd
	    extraArgs:
	      proxy-refresh-interval: "70000"
	kubernetesVersion: v1.31.1
	networking:
	  dnsDomain: cluster.local
	  podSubnet: "10.244.0.0/16"
	  serviceSubnet: 10.96.0.0/12
	---
	apiVersion: kubelet.config.k8s.io/v1beta1
	kind: KubeletConfiguration
	authentication:
	  x509:
	    clientCAFile: /var/lib/minikube/certs/ca.crt
	cgroupDriver: cgroupfs
	containerRuntimeEndpoint: unix:///var/run/crio/crio.sock
	hairpinMode: hairpin-veth
	runtimeRequestTimeout: 15m
	clusterDomain: "cluster.local"
	# disable disk resource management by default
	imageGCHighThresholdPercent: 100
	evictionHard:
	  nodefs.available: "0%"
	  nodefs.inodesFree: "0%"
	  imagefs.available: "0%"
	failSwapOn: false
	staticPodPath: /etc/kubernetes/manifests
	---
	apiVersion: kubeproxy.config.k8s.io/v1alpha1
	kind: KubeProxyConfiguration
	clusterCIDR: "10.244.0.0/16"
	metricsBindAddress: 0.0.0.0:10249
	conntrack:
	  maxPerCore: 0
	# Skip setting "net.netfilter.nf_conntrack_tcp_timeout_established"
	  tcpEstablishedTimeout: 0s
	# Skip setting "net.netfilter.nf_conntrack_tcp_timeout_close"
	  tcpCloseWaitTimeout: 0s
	
	I0916 10:35:32.227523 1384589 ssh_runner.go:195] Run: sudo ls /var/lib/minikube/binaries/v1.31.1
	I0916 10:35:32.236628 1384589 binaries.go:44] Found k8s binaries, skipping transfer
	I0916 10:35:32.236739 1384589 ssh_runner.go:195] Run: sudo mkdir -p /etc/systemd/system/kubelet.service.d /lib/systemd/system /var/tmp/minikube
	I0916 10:35:32.245582 1384589 ssh_runner.go:362] scp memory --> /etc/systemd/system/kubelet.service.d/10-kubeadm.conf (363 bytes)
	I0916 10:35:32.264058 1384589 ssh_runner.go:362] scp memory --> /lib/systemd/system/kubelet.service (352 bytes)
	I0916 10:35:32.283541 1384589 ssh_runner.go:362] scp memory --> /var/tmp/minikube/kubeadm.yaml.new (2151 bytes)
	I0916 10:35:32.302607 1384589 ssh_runner.go:195] Run: grep 192.168.49.2	control-plane.minikube.internal$ /etc/hosts
	I0916 10:35:32.306351 1384589 ssh_runner.go:195] Run: /bin/bash -c "{ grep -v $'\tcontrol-plane.minikube.internal$' "/etc/hosts"; echo "192.168.49.2	control-plane.minikube.internal"; } > /tmp/h.$$; sudo cp /tmp/h.$$ "/etc/hosts""
	I0916 10:35:32.317408 1384589 ssh_runner.go:195] Run: sudo systemctl daemon-reload
	I0916 10:35:32.409376 1384589 ssh_runner.go:195] Run: sudo systemctl start kubelet
	I0916 10:35:32.423337 1384589 certs.go:68] Setting up /home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/addons-936355 for IP: 192.168.49.2
	I0916 10:35:32.423401 1384589 certs.go:194] generating shared ca certs ...
	I0916 10:35:32.423434 1384589 certs.go:226] acquiring lock for ca certs: {Name:mk0ae46b50e2e49d53ad6fcc94535aa50d9156d6 Clock:{} Delay:500ms Timeout:1m0s Cancel:<nil>}
	I0916 10:35:32.423586 1384589 certs.go:240] generating "minikubeCA" ca cert: /home/jenkins/minikube-integration/19651-1378450/.minikube/ca.key
	I0916 10:35:34.185450 1384589 crypto.go:156] Writing cert to /home/jenkins/minikube-integration/19651-1378450/.minikube/ca.crt ...
	I0916 10:35:34.185484 1384589 lock.go:35] WriteFile acquiring /home/jenkins/minikube-integration/19651-1378450/.minikube/ca.crt: {Name:mk7933e16cdd72038659b0287d05eb0c475b810e Clock:{} Delay:500ms Timeout:1m0s Cancel:<nil>}
	I0916 10:35:34.185680 1384589 crypto.go:164] Writing key to /home/jenkins/minikube-integration/19651-1378450/.minikube/ca.key ...
	I0916 10:35:34.185693 1384589 lock.go:35] WriteFile acquiring /home/jenkins/minikube-integration/19651-1378450/.minikube/ca.key: {Name:mkb7482a30b71122d1b4fb2bf43b1e757c702edc Clock:{} Delay:500ms Timeout:1m0s Cancel:<nil>}
	I0916 10:35:34.186220 1384589 certs.go:240] generating "proxyClientCA" ca cert: /home/jenkins/minikube-integration/19651-1378450/.minikube/proxy-client-ca.key
	I0916 10:35:34.459909 1384589 crypto.go:156] Writing cert to /home/jenkins/minikube-integration/19651-1378450/.minikube/proxy-client-ca.crt ...
	I0916 10:35:34.459947 1384589 lock.go:35] WriteFile acquiring /home/jenkins/minikube-integration/19651-1378450/.minikube/proxy-client-ca.crt: {Name:mke012c32e9f14a06899ff2aaaf49a35a27f11b6 Clock:{} Delay:500ms Timeout:1m0s Cancel:<nil>}
	I0916 10:35:34.460629 1384589 crypto.go:164] Writing key to /home/jenkins/minikube-integration/19651-1378450/.minikube/proxy-client-ca.key ...
	I0916 10:35:34.460645 1384589 lock.go:35] WriteFile acquiring /home/jenkins/minikube-integration/19651-1378450/.minikube/proxy-client-ca.key: {Name:mk5d1994088ad6012c806fe8f78deff99aef1b4a Clock:{} Delay:500ms Timeout:1m0s Cancel:<nil>}
	I0916 10:35:34.460749 1384589 certs.go:256] generating profile certs ...
	I0916 10:35:34.460814 1384589 certs.go:363] generating signed profile cert for "minikube-user": /home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/addons-936355/client.key
	I0916 10:35:34.460832 1384589 crypto.go:68] Generating cert /home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/addons-936355/client.crt with IP's: []
	I0916 10:35:34.818752 1384589 crypto.go:156] Writing cert to /home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/addons-936355/client.crt ...
	I0916 10:35:34.818789 1384589 lock.go:35] WriteFile acquiring /home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/addons-936355/client.crt: {Name:mk0c01900c6bb90e11943bb255479c9c46b42cdc Clock:{} Delay:500ms Timeout:1m0s Cancel:<nil>}
	I0916 10:35:34.819458 1384589 crypto.go:164] Writing key to /home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/addons-936355/client.key ...
	I0916 10:35:34.819477 1384589 lock.go:35] WriteFile acquiring /home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/addons-936355/client.key: {Name:mk6a80bf44231e37c26b15b78c1573c745bc94c7 Clock:{} Delay:500ms Timeout:1m0s Cancel:<nil>}
	I0916 10:35:34.820007 1384589 certs.go:363] generating signed profile cert for "minikube": /home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/addons-936355/apiserver.key.87ecb0c8
	I0916 10:35:34.820055 1384589 crypto.go:68] Generating cert /home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/addons-936355/apiserver.crt.87ecb0c8 with IP's: [10.96.0.1 127.0.0.1 10.0.0.1 192.168.49.2]
	I0916 10:35:35.136595 1384589 crypto.go:156] Writing cert to /home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/addons-936355/apiserver.crt.87ecb0c8 ...
	I0916 10:35:35.136634 1384589 lock.go:35] WriteFile acquiring /home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/addons-936355/apiserver.crt.87ecb0c8: {Name:mkefb9e5abb2f41ae336f1dfb5f1a2e66afaeb9d Clock:{} Delay:500ms Timeout:1m0s Cancel:<nil>}
	I0916 10:35:35.136842 1384589 crypto.go:164] Writing key to /home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/addons-936355/apiserver.key.87ecb0c8 ...
	I0916 10:35:35.136857 1384589 lock.go:35] WriteFile acquiring /home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/addons-936355/apiserver.key.87ecb0c8: {Name:mkea4ee147dec7cfd16ab920313dbb27db2e74f5 Clock:{} Delay:500ms Timeout:1m0s Cancel:<nil>}
	I0916 10:35:35.137417 1384589 certs.go:381] copying /home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/addons-936355/apiserver.crt.87ecb0c8 -> /home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/addons-936355/apiserver.crt
	I0916 10:35:35.137519 1384589 certs.go:385] copying /home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/addons-936355/apiserver.key.87ecb0c8 -> /home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/addons-936355/apiserver.key
	I0916 10:35:35.137576 1384589 certs.go:363] generating signed profile cert for "aggregator": /home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/addons-936355/proxy-client.key
	I0916 10:35:35.137599 1384589 crypto.go:68] Generating cert /home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/addons-936355/proxy-client.crt with IP's: []
	I0916 10:35:35.880558 1384589 crypto.go:156] Writing cert to /home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/addons-936355/proxy-client.crt ...
	I0916 10:35:35.880594 1384589 lock.go:35] WriteFile acquiring /home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/addons-936355/proxy-client.crt: {Name:mke368773a6b2b93aed6ad850fe8fd0d4a737afa Clock:{} Delay:500ms Timeout:1m0s Cancel:<nil>}
	I0916 10:35:35.881334 1384589 crypto.go:164] Writing key to /home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/addons-936355/proxy-client.key ...
	I0916 10:35:35.881354 1384589 lock.go:35] WriteFile acquiring /home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/addons-936355/proxy-client.key: {Name:mk0b7d6a78a045adf50310a69acebceca87fff88 Clock:{} Delay:500ms Timeout:1m0s Cancel:<nil>}
	I0916 10:35:35.881575 1384589 certs.go:484] found cert: /home/jenkins/minikube-integration/19651-1378450/.minikube/certs/ca-key.pem (1679 bytes)
	I0916 10:35:35.881620 1384589 certs.go:484] found cert: /home/jenkins/minikube-integration/19651-1378450/.minikube/certs/ca.pem (1078 bytes)
	I0916 10:35:35.881652 1384589 certs.go:484] found cert: /home/jenkins/minikube-integration/19651-1378450/.minikube/certs/cert.pem (1123 bytes)
	I0916 10:35:35.881681 1384589 certs.go:484] found cert: /home/jenkins/minikube-integration/19651-1378450/.minikube/certs/key.pem (1679 bytes)
	I0916 10:35:35.882348 1384589 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-1378450/.minikube/ca.crt --> /var/lib/minikube/certs/ca.crt (1111 bytes)
	I0916 10:35:35.913124 1384589 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-1378450/.minikube/ca.key --> /var/lib/minikube/certs/ca.key (1675 bytes)
	I0916 10:35:35.940837 1384589 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-1378450/.minikube/proxy-client-ca.crt --> /var/lib/minikube/certs/proxy-client-ca.crt (1119 bytes)
	I0916 10:35:35.966731 1384589 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-1378450/.minikube/proxy-client-ca.key --> /var/lib/minikube/certs/proxy-client-ca.key (1679 bytes)
	I0916 10:35:35.992292 1384589 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/addons-936355/apiserver.crt --> /var/lib/minikube/certs/apiserver.crt (1419 bytes)
	I0916 10:35:36.018704 1384589 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/addons-936355/apiserver.key --> /var/lib/minikube/certs/apiserver.key (1675 bytes)
	I0916 10:35:36.045022 1384589 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/addons-936355/proxy-client.crt --> /var/lib/minikube/certs/proxy-client.crt (1147 bytes)
	I0916 10:35:36.070444 1384589 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/addons-936355/proxy-client.key --> /var/lib/minikube/certs/proxy-client.key (1679 bytes)
	I0916 10:35:36.097278 1384589 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-1378450/.minikube/ca.crt --> /usr/share/ca-certificates/minikubeCA.pem (1111 bytes)
	I0916 10:35:36.122467 1384589 ssh_runner.go:362] scp memory --> /var/lib/minikube/kubeconfig (738 bytes)
	I0916 10:35:36.141948 1384589 ssh_runner.go:195] Run: openssl version
	I0916 10:35:36.147681 1384589 ssh_runner.go:195] Run: sudo /bin/bash -c "test -s /usr/share/ca-certificates/minikubeCA.pem && ln -fs /usr/share/ca-certificates/minikubeCA.pem /etc/ssl/certs/minikubeCA.pem"
	I0916 10:35:36.157655 1384589 ssh_runner.go:195] Run: ls -la /usr/share/ca-certificates/minikubeCA.pem
	I0916 10:35:36.161783 1384589 certs.go:528] hashing: -rw-r--r-- 1 root root 1111 Sep 16 10:35 /usr/share/ca-certificates/minikubeCA.pem
	I0916 10:35:36.161849 1384589 ssh_runner.go:195] Run: openssl x509 -hash -noout -in /usr/share/ca-certificates/minikubeCA.pem
	I0916 10:35:36.169303 1384589 ssh_runner.go:195] Run: sudo /bin/bash -c "test -L /etc/ssl/certs/b5213941.0 || ln -fs /etc/ssl/certs/minikubeCA.pem /etc/ssl/certs/b5213941.0"
	I0916 10:35:36.183583 1384589 ssh_runner.go:195] Run: stat /var/lib/minikube/certs/apiserver-kubelet-client.crt
	I0916 10:35:36.188459 1384589 certs.go:399] 'apiserver-kubelet-client' cert doesn't exist, likely first start: stat /var/lib/minikube/certs/apiserver-kubelet-client.crt: Process exited with status 1
	stdout:
	
	stderr:
	stat: cannot statx '/var/lib/minikube/certs/apiserver-kubelet-client.crt': No such file or directory
	I0916 10:35:36.188535 1384589 kubeadm.go:392] StartCluster: {Name:addons-936355 KeepContext:false EmbedCerts:false MinikubeISO: KicBaseImage:gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 Memory:4000 CPUs:2 DiskSize:20000 Driver:docker HyperkitVpnKitSock: HyperkitVSockPorts:[] DockerEnv:[] ContainerVolumeMounts:[] InsecureRegistry:[] RegistryMirror:[] HostOnlyCIDR:192.168.59.1/24 HypervVirtualSwitch: HypervUseExternalSwitch:false HypervExternalAdapter: KVMNetwork:default KVMQemuURI:qemu:///system KVMGPU:false KVMHidden:false KVMNUMACount:1 APIServerPort:8443 DockerOpt:[] DisableDriverMounts:false NFSShare:[] NFSSharesRoot:/nfsshares UUID: NoVTXCheck:false DNSProxy:false HostDNSResolver:true HostOnlyNicType:virtio NatNicType:virtio SSHIPAddress: SSHUser:root SSHKey: SSHPort:22 KubernetesConfig:{KubernetesVersion:v1.31.1 ClusterName:addons-936355 Namespace:default APIServerHAVIP: APIServerName:minikubeCA APIServerNames
:[] APIServerIPs:[] DNSDomain:cluster.local ContainerRuntime:crio CRISocket: NetworkPlugin:cni FeatureGates: ServiceCIDR:10.96.0.0/12 ImageRepository: LoadBalancerStartIP: LoadBalancerEndIP: CustomIngressCert: RegistryAliases: ExtraOptions:[] ShouldLoadCachedImages:true EnableDefaultCNI:false CNI:} Nodes:[{Name: IP:192.168.49.2 Port:8443 KubernetesVersion:v1.31.1 ContainerRuntime:crio ControlPlane:true Worker:true}] Addons:map[] CustomAddonImages:map[] CustomAddonRegistries:map[] VerifyComponents:map[apiserver:true apps_running:true default_sa:true extra:true kubelet:true node_ready:true system_pods:true] StartHostTimeout:6m0s ScheduledStop:<nil> ExposedPorts:[] ListenAddress: Network: Subnet: MultiNodeRequested:false ExtraDisks:0 CertExpiration:26280h0m0s Mount:false MountString:/home/jenkins:/minikube-host Mount9PVersion:9p2000.L MountGID:docker MountIP: MountMSize:262144 MountOptions:[] MountPort:0 MountType:9p MountUID:docker BinaryMirror: DisableOptimizations:false DisableMetrics:false CustomQemuFirmware
Path: SocketVMnetClientPath: SocketVMnetPath: StaticIP: SSHAuthSock: SSHAgentPID:0 GPUs: AutoPauseInterval:1m0s}
	I0916 10:35:36.188663 1384589 cri.go:54] listing CRI containers in root : {State:paused Name: Namespaces:[kube-system]}
	I0916 10:35:36.188762 1384589 ssh_runner.go:195] Run: sudo -s eval "crictl ps -a --quiet --label io.kubernetes.pod.namespace=kube-system"
	I0916 10:35:36.238853 1384589 cri.go:89] found id: ""
	I0916 10:35:36.238944 1384589 ssh_runner.go:195] Run: sudo ls /var/lib/kubelet/kubeadm-flags.env /var/lib/kubelet/config.yaml /var/lib/minikube/etcd
	I0916 10:35:36.247955 1384589 ssh_runner.go:195] Run: sudo cp /var/tmp/minikube/kubeadm.yaml.new /var/tmp/minikube/kubeadm.yaml
	I0916 10:35:36.256986 1384589 kubeadm.go:214] ignoring SystemVerification for kubeadm because of docker driver
	I0916 10:35:36.257089 1384589 ssh_runner.go:195] Run: sudo ls -la /etc/kubernetes/admin.conf /etc/kubernetes/kubelet.conf /etc/kubernetes/controller-manager.conf /etc/kubernetes/scheduler.conf
	I0916 10:35:36.266246 1384589 kubeadm.go:155] config check failed, skipping stale config cleanup: sudo ls -la /etc/kubernetes/admin.conf /etc/kubernetes/kubelet.conf /etc/kubernetes/controller-manager.conf /etc/kubernetes/scheduler.conf: Process exited with status 2
	stdout:
	
	stderr:
	ls: cannot access '/etc/kubernetes/admin.conf': No such file or directory
	ls: cannot access '/etc/kubernetes/kubelet.conf': No such file or directory
	ls: cannot access '/etc/kubernetes/controller-manager.conf': No such file or directory
	ls: cannot access '/etc/kubernetes/scheduler.conf': No such file or directory
	I0916 10:35:36.266266 1384589 kubeadm.go:157] found existing configuration files:
	
	I0916 10:35:36.266339 1384589 ssh_runner.go:195] Run: sudo grep https://control-plane.minikube.internal:8443 /etc/kubernetes/admin.conf
	I0916 10:35:36.274963 1384589 kubeadm.go:163] "https://control-plane.minikube.internal:8443" may not be in /etc/kubernetes/admin.conf - will remove: sudo grep https://control-plane.minikube.internal:8443 /etc/kubernetes/admin.conf: Process exited with status 2
	stdout:
	
	stderr:
	grep: /etc/kubernetes/admin.conf: No such file or directory
	I0916 10:35:36.275044 1384589 ssh_runner.go:195] Run: sudo rm -f /etc/kubernetes/admin.conf
	I0916 10:35:36.283444 1384589 ssh_runner.go:195] Run: sudo grep https://control-plane.minikube.internal:8443 /etc/kubernetes/kubelet.conf
	I0916 10:35:36.292355 1384589 kubeadm.go:163] "https://control-plane.minikube.internal:8443" may not be in /etc/kubernetes/kubelet.conf - will remove: sudo grep https://control-plane.minikube.internal:8443 /etc/kubernetes/kubelet.conf: Process exited with status 2
	stdout:
	
	stderr:
	grep: /etc/kubernetes/kubelet.conf: No such file or directory
	I0916 10:35:36.292450 1384589 ssh_runner.go:195] Run: sudo rm -f /etc/kubernetes/kubelet.conf
	I0916 10:35:36.300873 1384589 ssh_runner.go:195] Run: sudo grep https://control-plane.minikube.internal:8443 /etc/kubernetes/controller-manager.conf
	I0916 10:35:36.309855 1384589 kubeadm.go:163] "https://control-plane.minikube.internal:8443" may not be in /etc/kubernetes/controller-manager.conf - will remove: sudo grep https://control-plane.minikube.internal:8443 /etc/kubernetes/controller-manager.conf: Process exited with status 2
	stdout:
	
	stderr:
	grep: /etc/kubernetes/controller-manager.conf: No such file or directory
	I0916 10:35:36.309929 1384589 ssh_runner.go:195] Run: sudo rm -f /etc/kubernetes/controller-manager.conf
	I0916 10:35:36.318718 1384589 ssh_runner.go:195] Run: sudo grep https://control-plane.minikube.internal:8443 /etc/kubernetes/scheduler.conf
	I0916 10:35:36.328008 1384589 kubeadm.go:163] "https://control-plane.minikube.internal:8443" may not be in /etc/kubernetes/scheduler.conf - will remove: sudo grep https://control-plane.minikube.internal:8443 /etc/kubernetes/scheduler.conf: Process exited with status 2
	stdout:
	
	stderr:
	grep: /etc/kubernetes/scheduler.conf: No such file or directory
	I0916 10:35:36.328097 1384589 ssh_runner.go:195] Run: sudo rm -f /etc/kubernetes/scheduler.conf
	I0916 10:35:36.336437 1384589 ssh_runner.go:286] Start: /bin/bash -c "sudo env PATH="/var/lib/minikube/binaries/v1.31.1:$PATH" kubeadm init --config /var/tmp/minikube/kubeadm.yaml  --ignore-preflight-errors=DirAvailable--etc-kubernetes-manifests,DirAvailable--var-lib-minikube,DirAvailable--var-lib-minikube-etcd,FileAvailable--etc-kubernetes-manifests-kube-scheduler.yaml,FileAvailable--etc-kubernetes-manifests-kube-apiserver.yaml,FileAvailable--etc-kubernetes-manifests-kube-controller-manager.yaml,FileAvailable--etc-kubernetes-manifests-etcd.yaml,Port-10250,Swap,NumCPU,Mem,SystemVerification,FileContent--proc-sys-net-bridge-bridge-nf-call-iptables"
	I0916 10:35:36.378930 1384589 kubeadm.go:310] [init] Using Kubernetes version: v1.31.1
	I0916 10:35:36.379124 1384589 kubeadm.go:310] [preflight] Running pre-flight checks
	I0916 10:35:36.400406 1384589 kubeadm.go:310] [preflight] The system verification failed. Printing the output from the verification:
	I0916 10:35:36.400480 1384589 kubeadm.go:310] KERNEL_VERSION: 5.15.0-1069-aws
	I0916 10:35:36.400522 1384589 kubeadm.go:310] OS: Linux
	I0916 10:35:36.400571 1384589 kubeadm.go:310] CGROUPS_CPU: enabled
	I0916 10:35:36.400622 1384589 kubeadm.go:310] CGROUPS_CPUACCT: enabled
	I0916 10:35:36.400687 1384589 kubeadm.go:310] CGROUPS_CPUSET: enabled
	I0916 10:35:36.400738 1384589 kubeadm.go:310] CGROUPS_DEVICES: enabled
	I0916 10:35:36.400790 1384589 kubeadm.go:310] CGROUPS_FREEZER: enabled
	I0916 10:35:36.400843 1384589 kubeadm.go:310] CGROUPS_MEMORY: enabled
	I0916 10:35:36.400891 1384589 kubeadm.go:310] CGROUPS_PIDS: enabled
	I0916 10:35:36.400941 1384589 kubeadm.go:310] CGROUPS_HUGETLB: enabled
	I0916 10:35:36.400990 1384589 kubeadm.go:310] CGROUPS_BLKIO: enabled
	I0916 10:35:36.460868 1384589 kubeadm.go:310] [preflight] Pulling images required for setting up a Kubernetes cluster
	I0916 10:35:36.460983 1384589 kubeadm.go:310] [preflight] This might take a minute or two, depending on the speed of your internet connection
	I0916 10:35:36.461077 1384589 kubeadm.go:310] [preflight] You can also perform this action beforehand using 'kubeadm config images pull'
	I0916 10:35:36.469524 1384589 kubeadm.go:310] [certs] Using certificateDir folder "/var/lib/minikube/certs"
	I0916 10:35:36.478231 1384589 out.go:235]   - Generating certificates and keys ...
	I0916 10:35:36.478421 1384589 kubeadm.go:310] [certs] Using existing ca certificate authority
	I0916 10:35:36.478536 1384589 kubeadm.go:310] [certs] Using existing apiserver certificate and key on disk
	I0916 10:35:37.031514 1384589 kubeadm.go:310] [certs] Generating "apiserver-kubelet-client" certificate and key
	I0916 10:35:37.927948 1384589 kubeadm.go:310] [certs] Generating "front-proxy-ca" certificate and key
	I0916 10:35:38.481156 1384589 kubeadm.go:310] [certs] Generating "front-proxy-client" certificate and key
	I0916 10:35:38.950500 1384589 kubeadm.go:310] [certs] Generating "etcd/ca" certificate and key
	I0916 10:35:40.037164 1384589 kubeadm.go:310] [certs] Generating "etcd/server" certificate and key
	I0916 10:35:40.037694 1384589 kubeadm.go:310] [certs] etcd/server serving cert is signed for DNS names [addons-936355 localhost] and IPs [192.168.49.2 127.0.0.1 ::1]
	I0916 10:35:40.393078 1384589 kubeadm.go:310] [certs] Generating "etcd/peer" certificate and key
	I0916 10:35:40.393223 1384589 kubeadm.go:310] [certs] etcd/peer serving cert is signed for DNS names [addons-936355 localhost] and IPs [192.168.49.2 127.0.0.1 ::1]
	I0916 10:35:40.639316 1384589 kubeadm.go:310] [certs] Generating "etcd/healthcheck-client" certificate and key
	I0916 10:35:41.086019 1384589 kubeadm.go:310] [certs] Generating "apiserver-etcd-client" certificate and key
	I0916 10:35:41.417060 1384589 kubeadm.go:310] [certs] Generating "sa" key and public key
	I0916 10:35:41.417146 1384589 kubeadm.go:310] [kubeconfig] Using kubeconfig folder "/etc/kubernetes"
	I0916 10:35:41.829000 1384589 kubeadm.go:310] [kubeconfig] Writing "admin.conf" kubeconfig file
	I0916 10:35:42.186509 1384589 kubeadm.go:310] [kubeconfig] Writing "super-admin.conf" kubeconfig file
	I0916 10:35:43.056769 1384589 kubeadm.go:310] [kubeconfig] Writing "kubelet.conf" kubeconfig file
	I0916 10:35:43.944133 1384589 kubeadm.go:310] [kubeconfig] Writing "controller-manager.conf" kubeconfig file
	I0916 10:35:44.069436 1384589 kubeadm.go:310] [kubeconfig] Writing "scheduler.conf" kubeconfig file
	I0916 10:35:44.070260 1384589 kubeadm.go:310] [etcd] Creating static Pod manifest for local etcd in "/etc/kubernetes/manifests"
	I0916 10:35:44.073516 1384589 kubeadm.go:310] [control-plane] Using manifest folder "/etc/kubernetes/manifests"
	I0916 10:35:44.076353 1384589 out.go:235]   - Booting up control plane ...
	I0916 10:35:44.076466 1384589 kubeadm.go:310] [control-plane] Creating static Pod manifest for "kube-apiserver"
	I0916 10:35:44.076546 1384589 kubeadm.go:310] [control-plane] Creating static Pod manifest for "kube-controller-manager"
	I0916 10:35:44.077309 1384589 kubeadm.go:310] [control-plane] Creating static Pod manifest for "kube-scheduler"
	I0916 10:35:44.088522 1384589 kubeadm.go:310] [kubelet-start] Writing kubelet environment file with flags to file "/var/lib/kubelet/kubeadm-flags.env"
	I0916 10:35:44.095329 1384589 kubeadm.go:310] [kubelet-start] Writing kubelet configuration to file "/var/lib/kubelet/config.yaml"
	I0916 10:35:44.095390 1384589 kubeadm.go:310] [kubelet-start] Starting the kubelet
	I0916 10:35:44.198308 1384589 kubeadm.go:310] [wait-control-plane] Waiting for the kubelet to boot up the control plane as static Pods from directory "/etc/kubernetes/manifests"
	I0916 10:35:44.198428 1384589 kubeadm.go:310] [kubelet-check] Waiting for a healthy kubelet at http://127.0.0.1:10248/healthz. This can take up to 4m0s
	I0916 10:35:45.200220 1384589 kubeadm.go:310] [kubelet-check] The kubelet is healthy after 1.00196488s
	I0916 10:35:45.200324 1384589 kubeadm.go:310] [api-check] Waiting for a healthy API server. This can take up to 4m0s
	I0916 10:35:51.202352 1384589 kubeadm.go:310] [api-check] The API server is healthy after 6.002166951s
	I0916 10:35:51.223941 1384589 kubeadm.go:310] [upload-config] Storing the configuration used in ConfigMap "kubeadm-config" in the "kube-system" Namespace
	I0916 10:35:51.239556 1384589 kubeadm.go:310] [kubelet] Creating a ConfigMap "kubelet-config" in namespace kube-system with the configuration for the kubelets in the cluster
	I0916 10:35:51.267029 1384589 kubeadm.go:310] [upload-certs] Skipping phase. Please see --upload-certs
	I0916 10:35:51.267231 1384589 kubeadm.go:310] [mark-control-plane] Marking the node addons-936355 as control-plane by adding the labels: [node-role.kubernetes.io/control-plane node.kubernetes.io/exclude-from-external-load-balancers]
	I0916 10:35:51.278589 1384589 kubeadm.go:310] [bootstrap-token] Using token: 08qv26.fux33djnogp684b3
	I0916 10:35:51.281486 1384589 out.go:235]   - Configuring RBAC rules ...
	I0916 10:35:51.281633 1384589 kubeadm.go:310] [bootstrap-token] Configuring bootstrap tokens, cluster-info ConfigMap, RBAC Roles
	I0916 10:35:51.288736 1384589 kubeadm.go:310] [bootstrap-token] Configured RBAC rules to allow Node Bootstrap tokens to get nodes
	I0916 10:35:51.298974 1384589 kubeadm.go:310] [bootstrap-token] Configured RBAC rules to allow Node Bootstrap tokens to post CSRs in order for nodes to get long term certificate credentials
	I0916 10:35:51.303116 1384589 kubeadm.go:310] [bootstrap-token] Configured RBAC rules to allow the csrapprover controller automatically approve CSRs from a Node Bootstrap Token
	I0916 10:35:51.306944 1384589 kubeadm.go:310] [bootstrap-token] Configured RBAC rules to allow certificate rotation for all node client certificates in the cluster
	I0916 10:35:51.312530 1384589 kubeadm.go:310] [bootstrap-token] Creating the "cluster-info" ConfigMap in the "kube-public" namespace
	I0916 10:35:51.609739 1384589 kubeadm.go:310] [kubelet-finalize] Updating "/etc/kubernetes/kubelet.conf" to point to a rotatable kubelet client certificate and key
	I0916 10:35:52.042589 1384589 kubeadm.go:310] [addons] Applied essential addon: CoreDNS
	I0916 10:35:52.609454 1384589 kubeadm.go:310] [addons] Applied essential addon: kube-proxy
	I0916 10:35:52.610559 1384589 kubeadm.go:310] 
	I0916 10:35:52.610639 1384589 kubeadm.go:310] Your Kubernetes control-plane has initialized successfully!
	I0916 10:35:52.610651 1384589 kubeadm.go:310] 
	I0916 10:35:52.610728 1384589 kubeadm.go:310] To start using your cluster, you need to run the following as a regular user:
	I0916 10:35:52.610737 1384589 kubeadm.go:310] 
	I0916 10:35:52.610762 1384589 kubeadm.go:310]   mkdir -p $HOME/.kube
	I0916 10:35:52.610825 1384589 kubeadm.go:310]   sudo cp -i /etc/kubernetes/admin.conf $HOME/.kube/config
	I0916 10:35:52.610877 1384589 kubeadm.go:310]   sudo chown $(id -u):$(id -g) $HOME/.kube/config
	I0916 10:35:52.610886 1384589 kubeadm.go:310] 
	I0916 10:35:52.610939 1384589 kubeadm.go:310] Alternatively, if you are the root user, you can run:
	I0916 10:35:52.610947 1384589 kubeadm.go:310] 
	I0916 10:35:52.610994 1384589 kubeadm.go:310]   export KUBECONFIG=/etc/kubernetes/admin.conf
	I0916 10:35:52.611003 1384589 kubeadm.go:310] 
	I0916 10:35:52.611054 1384589 kubeadm.go:310] You should now deploy a pod network to the cluster.
	I0916 10:35:52.611131 1384589 kubeadm.go:310] Run "kubectl apply -f [podnetwork].yaml" with one of the options listed at:
	I0916 10:35:52.611205 1384589 kubeadm.go:310]   https://kubernetes.io/docs/concepts/cluster-administration/addons/
	I0916 10:35:52.611213 1384589 kubeadm.go:310] 
	I0916 10:35:52.611296 1384589 kubeadm.go:310] You can now join any number of control-plane nodes by copying certificate authorities
	I0916 10:35:52.611376 1384589 kubeadm.go:310] and service account keys on each node and then running the following as root:
	I0916 10:35:52.611384 1384589 kubeadm.go:310] 
	I0916 10:35:52.611467 1384589 kubeadm.go:310]   kubeadm join control-plane.minikube.internal:8443 --token 08qv26.fux33djnogp684b3 \
	I0916 10:35:52.611571 1384589 kubeadm.go:310] 	--discovery-token-ca-cert-hash sha256:a39d4a6e06a2efc97f5d9564a89b81063790e757dde370e866d9dc4c2ed0ec07 \
	I0916 10:35:52.611602 1384589 kubeadm.go:310] 	--control-plane 
	I0916 10:35:52.611610 1384589 kubeadm.go:310] 
	I0916 10:35:52.611694 1384589 kubeadm.go:310] Then you can join any number of worker nodes by running the following on each as root:
	I0916 10:35:52.611701 1384589 kubeadm.go:310] 
	I0916 10:35:52.611782 1384589 kubeadm.go:310] kubeadm join control-plane.minikube.internal:8443 --token 08qv26.fux33djnogp684b3 \
	I0916 10:35:52.612037 1384589 kubeadm.go:310] 	--discovery-token-ca-cert-hash sha256:a39d4a6e06a2efc97f5d9564a89b81063790e757dde370e866d9dc4c2ed0ec07 
	I0916 10:35:52.615159 1384589 kubeadm.go:310] W0916 10:35:36.375856    1193 common.go:101] your configuration file uses a deprecated API spec: "kubeadm.k8s.io/v1beta3" (kind: "ClusterConfiguration"). Please use 'kubeadm config migrate --old-config old.yaml --new-config new.yaml', which will write the new, similar spec using a newer API version.
	I0916 10:35:52.615456 1384589 kubeadm.go:310] W0916 10:35:36.376640    1193 common.go:101] your configuration file uses a deprecated API spec: "kubeadm.k8s.io/v1beta3" (kind: "InitConfiguration"). Please use 'kubeadm config migrate --old-config old.yaml --new-config new.yaml', which will write the new, similar spec using a newer API version.
	I0916 10:35:52.615672 1384589 kubeadm.go:310] 	[WARNING SystemVerification]: failed to parse kernel config: unable to load kernel module: "configs", output: "modprobe: FATAL: Module configs not found in directory /lib/modules/5.15.0-1069-aws\n", err: exit status 1
	I0916 10:35:52.615783 1384589 kubeadm.go:310] 	[WARNING Service-Kubelet]: kubelet service is not enabled, please run 'systemctl enable kubelet.service'
	I0916 10:35:52.615802 1384589 cni.go:84] Creating CNI manager for ""
	I0916 10:35:52.615810 1384589 cni.go:143] "docker" driver + "crio" runtime found, recommending kindnet
	I0916 10:35:52.618721 1384589 out.go:177] * Configuring CNI (Container Networking Interface) ...
	I0916 10:35:52.621394 1384589 ssh_runner.go:195] Run: stat /opt/cni/bin/portmap
	I0916 10:35:52.625462 1384589 cni.go:182] applying CNI manifest using /var/lib/minikube/binaries/v1.31.1/kubectl ...
	I0916 10:35:52.625484 1384589 ssh_runner.go:362] scp memory --> /var/tmp/minikube/cni.yaml (2601 bytes)
	I0916 10:35:52.644461 1384589 ssh_runner.go:195] Run: sudo /var/lib/minikube/binaries/v1.31.1/kubectl apply --kubeconfig=/var/lib/minikube/kubeconfig -f /var/tmp/minikube/cni.yaml
	I0916 10:35:52.919005 1384589 ssh_runner.go:195] Run: /bin/bash -c "cat /proc/$(pgrep kube-apiserver)/oom_adj"
	I0916 10:35:52.919065 1384589 ssh_runner.go:195] Run: sudo /var/lib/minikube/binaries/v1.31.1/kubectl create clusterrolebinding minikube-rbac --clusterrole=cluster-admin --serviceaccount=kube-system:default --kubeconfig=/var/lib/minikube/kubeconfig
	I0916 10:35:52.919130 1384589 ssh_runner.go:195] Run: sudo /var/lib/minikube/binaries/v1.31.1/kubectl --kubeconfig=/var/lib/minikube/kubeconfig label --overwrite nodes addons-936355 minikube.k8s.io/updated_at=2024_09_16T10_35_52_0700 minikube.k8s.io/version=v1.34.0 minikube.k8s.io/commit=90d544f06ea0f69499271b003be64a9a224d57ed minikube.k8s.io/name=addons-936355 minikube.k8s.io/primary=true
	I0916 10:35:52.934021 1384589 ops.go:34] apiserver oom_adj: -16
	I0916 10:35:53.058693 1384589 ssh_runner.go:195] Run: sudo /var/lib/minikube/binaries/v1.31.1/kubectl get sa default --kubeconfig=/var/lib/minikube/kubeconfig
	I0916 10:35:53.559565 1384589 ssh_runner.go:195] Run: sudo /var/lib/minikube/binaries/v1.31.1/kubectl get sa default --kubeconfig=/var/lib/minikube/kubeconfig
	I0916 10:35:54.058855 1384589 ssh_runner.go:195] Run: sudo /var/lib/minikube/binaries/v1.31.1/kubectl get sa default --kubeconfig=/var/lib/minikube/kubeconfig
	I0916 10:35:54.558709 1384589 ssh_runner.go:195] Run: sudo /var/lib/minikube/binaries/v1.31.1/kubectl get sa default --kubeconfig=/var/lib/minikube/kubeconfig
	I0916 10:35:55.059014 1384589 ssh_runner.go:195] Run: sudo /var/lib/minikube/binaries/v1.31.1/kubectl get sa default --kubeconfig=/var/lib/minikube/kubeconfig
	I0916 10:35:55.559273 1384589 ssh_runner.go:195] Run: sudo /var/lib/minikube/binaries/v1.31.1/kubectl get sa default --kubeconfig=/var/lib/minikube/kubeconfig
	I0916 10:35:56.058909 1384589 ssh_runner.go:195] Run: sudo /var/lib/minikube/binaries/v1.31.1/kubectl get sa default --kubeconfig=/var/lib/minikube/kubeconfig
	I0916 10:35:56.559492 1384589 ssh_runner.go:195] Run: sudo /var/lib/minikube/binaries/v1.31.1/kubectl get sa default --kubeconfig=/var/lib/minikube/kubeconfig
	I0916 10:35:56.645989 1384589 kubeadm.go:1113] duration metric: took 3.7269816s to wait for elevateKubeSystemPrivileges
	I0916 10:35:56.646081 1384589 kubeadm.go:394] duration metric: took 20.457571781s to StartCluster
	I0916 10:35:56.646115 1384589 settings.go:142] acquiring lock: {Name:mkc0474d366ad36774e47290c7932cc180a1b9f8 Clock:{} Delay:500ms Timeout:1m0s Cancel:<nil>}
	I0916 10:35:56.646272 1384589 settings.go:150] Updating kubeconfig:  /home/jenkins/minikube-integration/19651-1378450/kubeconfig
	I0916 10:35:56.646729 1384589 lock.go:35] WriteFile acquiring /home/jenkins/minikube-integration/19651-1378450/kubeconfig: {Name:mk806df66aa01ad28d0c99bc1a876b4310e8a3a0 Clock:{} Delay:500ms Timeout:1m0s Cancel:<nil>}
	I0916 10:35:56.647006 1384589 start.go:235] Will wait 6m0s for node &{Name: IP:192.168.49.2 Port:8443 KubernetesVersion:v1.31.1 ContainerRuntime:crio ControlPlane:true Worker:true}
	I0916 10:35:56.647218 1384589 config.go:182] Loaded profile config "addons-936355": Driver=docker, ContainerRuntime=crio, KubernetesVersion=v1.31.1
	I0916 10:35:56.647256 1384589 addons.go:507] enable addons start: toEnable=map[ambassador:false auto-pause:false cloud-spanner:true csi-hostpath-driver:true dashboard:false default-storageclass:true efk:false freshpod:false gcp-auth:true gvisor:false headlamp:false helm-tiller:false inaccel:false ingress:true ingress-dns:true inspektor-gadget:true istio:false istio-provisioner:false kong:false kubeflow:false kubevirt:false logviewer:false metallb:false metrics-server:true nvidia-device-plugin:true nvidia-driver-installer:false nvidia-gpu-device-plugin:false olm:false pod-security-policy:false portainer:false registry:true registry-aliases:false registry-creds:false storage-provisioner:true storage-provisioner-gluster:false storage-provisioner-rancher:true volcano:true volumesnapshots:true yakd:true]
	I0916 10:35:56.647344 1384589 addons.go:69] Setting yakd=true in profile "addons-936355"
	I0916 10:35:56.647362 1384589 addons.go:234] Setting addon yakd=true in "addons-936355"
	I0916 10:35:56.647386 1384589 host.go:66] Checking if "addons-936355" exists ...
	I0916 10:35:56.647853 1384589 cli_runner.go:164] Run: docker container inspect addons-936355 --format={{.State.Status}}
	I0916 10:35:56.647019 1384589 ssh_runner.go:195] Run: /bin/bash -c "sudo /var/lib/minikube/binaries/v1.31.1/kubectl --kubeconfig=/var/lib/minikube/kubeconfig -n kube-system get configmap coredns -o yaml"
	I0916 10:35:56.648343 1384589 addons.go:69] Setting inspektor-gadget=true in profile "addons-936355"
	I0916 10:35:56.648358 1384589 addons.go:69] Setting metrics-server=true in profile "addons-936355"
	I0916 10:35:56.648364 1384589 addons.go:69] Setting cloud-spanner=true in profile "addons-936355"
	I0916 10:35:56.648372 1384589 addons.go:234] Setting addon cloud-spanner=true in "addons-936355"
	I0916 10:35:56.648375 1384589 addons.go:234] Setting addon metrics-server=true in "addons-936355"
	I0916 10:35:56.648397 1384589 host.go:66] Checking if "addons-936355" exists ...
	I0916 10:35:56.648398 1384589 host.go:66] Checking if "addons-936355" exists ...
	I0916 10:35:56.648856 1384589 cli_runner.go:164] Run: docker container inspect addons-936355 --format={{.State.Status}}
	I0916 10:35:56.648883 1384589 cli_runner.go:164] Run: docker container inspect addons-936355 --format={{.State.Status}}
	I0916 10:35:56.651521 1384589 addons.go:69] Setting nvidia-device-plugin=true in profile "addons-936355"
	I0916 10:35:56.651556 1384589 addons.go:234] Setting addon nvidia-device-plugin=true in "addons-936355"
	I0916 10:35:56.651597 1384589 host.go:66] Checking if "addons-936355" exists ...
	I0916 10:35:56.652064 1384589 cli_runner.go:164] Run: docker container inspect addons-936355 --format={{.State.Status}}
	I0916 10:35:56.654169 1384589 addons.go:69] Setting csi-hostpath-driver=true in profile "addons-936355"
	I0916 10:35:56.654360 1384589 addons.go:234] Setting addon csi-hostpath-driver=true in "addons-936355"
	I0916 10:35:56.654505 1384589 host.go:66] Checking if "addons-936355" exists ...
	I0916 10:35:56.656244 1384589 cli_runner.go:164] Run: docker container inspect addons-936355 --format={{.State.Status}}
	I0916 10:35:56.657047 1384589 addons.go:69] Setting registry=true in profile "addons-936355"
	I0916 10:35:56.657068 1384589 addons.go:234] Setting addon registry=true in "addons-936355"
	I0916 10:35:56.657100 1384589 host.go:66] Checking if "addons-936355" exists ...
	I0916 10:35:56.657530 1384589 cli_runner.go:164] Run: docker container inspect addons-936355 --format={{.State.Status}}
	I0916 10:35:56.665356 1384589 addons.go:69] Setting storage-provisioner=true in profile "addons-936355"
	I0916 10:35:56.665392 1384589 addons.go:234] Setting addon storage-provisioner=true in "addons-936355"
	I0916 10:35:56.665428 1384589 host.go:66] Checking if "addons-936355" exists ...
	I0916 10:35:56.665900 1384589 cli_runner.go:164] Run: docker container inspect addons-936355 --format={{.State.Status}}
	I0916 10:35:56.656864 1384589 addons.go:69] Setting default-storageclass=true in profile "addons-936355"
	I0916 10:35:56.672310 1384589 addons_storage_classes.go:33] enableOrDisableStorageClasses default-storageclass=true on "addons-936355"
	I0916 10:35:56.672744 1384589 cli_runner.go:164] Run: docker container inspect addons-936355 --format={{.State.Status}}
	I0916 10:35:56.656877 1384589 addons.go:69] Setting gcp-auth=true in profile "addons-936355"
	I0916 10:35:56.677792 1384589 mustload.go:65] Loading cluster: addons-936355
	I0916 10:35:56.678032 1384589 config.go:182] Loaded profile config "addons-936355": Driver=docker, ContainerRuntime=crio, KubernetesVersion=v1.31.1
	I0916 10:35:56.678386 1384589 cli_runner.go:164] Run: docker container inspect addons-936355 --format={{.State.Status}}
	I0916 10:35:56.685741 1384589 addons.go:69] Setting storage-provisioner-rancher=true in profile "addons-936355"
	I0916 10:35:56.685780 1384589 addons_storage_classes.go:33] enableOrDisableStorageClasses storage-provisioner-rancher=true on "addons-936355"
	I0916 10:35:56.686170 1384589 cli_runner.go:164] Run: docker container inspect addons-936355 --format={{.State.Status}}
	I0916 10:35:56.656881 1384589 addons.go:69] Setting ingress=true in profile "addons-936355"
	I0916 10:35:56.697863 1384589 addons.go:234] Setting addon ingress=true in "addons-936355"
	I0916 10:35:56.697916 1384589 host.go:66] Checking if "addons-936355" exists ...
	I0916 10:35:56.698402 1384589 cli_runner.go:164] Run: docker container inspect addons-936355 --format={{.State.Status}}
	I0916 10:35:56.656886 1384589 addons.go:69] Setting ingress-dns=true in profile "addons-936355"
	I0916 10:35:56.714403 1384589 addons.go:234] Setting addon ingress-dns=true in "addons-936355"
	I0916 10:35:56.714458 1384589 host.go:66] Checking if "addons-936355" exists ...
	I0916 10:35:56.715038 1384589 cli_runner.go:164] Run: docker container inspect addons-936355 --format={{.State.Status}}
	I0916 10:35:56.718711 1384589 out.go:177] * Verifying Kubernetes components...
	I0916 10:35:56.721654 1384589 ssh_runner.go:195] Run: sudo systemctl daemon-reload
	I0916 10:35:56.725191 1384589 addons.go:69] Setting volcano=true in profile "addons-936355"
	I0916 10:35:56.725221 1384589 addons.go:234] Setting addon volcano=true in "addons-936355"
	I0916 10:35:56.725264 1384589 host.go:66] Checking if "addons-936355" exists ...
	I0916 10:35:56.725742 1384589 cli_runner.go:164] Run: docker container inspect addons-936355 --format={{.State.Status}}
	I0916 10:35:56.755780 1384589 addons.go:69] Setting volumesnapshots=true in profile "addons-936355"
	I0916 10:35:56.755830 1384589 addons.go:234] Setting addon volumesnapshots=true in "addons-936355"
	I0916 10:35:56.755891 1384589 host.go:66] Checking if "addons-936355" exists ...
	I0916 10:35:56.756438 1384589 cli_runner.go:164] Run: docker container inspect addons-936355 --format={{.State.Status}}
	I0916 10:35:56.648360 1384589 addons.go:234] Setting addon inspektor-gadget=true in "addons-936355"
	I0916 10:35:56.781338 1384589 host.go:66] Checking if "addons-936355" exists ...
	I0916 10:35:56.781866 1384589 cli_runner.go:164] Run: docker container inspect addons-936355 --format={{.State.Status}}
	I0916 10:35:56.795868 1384589 out.go:177]   - Using image gcr.io/cloud-spanner-emulator/emulator:1.5.23
	I0916 10:35:56.806961 1384589 out.go:177]   - Using image nvcr.io/nvidia/k8s-device-plugin:v0.16.2
	I0916 10:35:56.813860 1384589 addons.go:431] installing /etc/kubernetes/addons/nvidia-device-plugin.yaml
	I0916 10:35:56.813885 1384589 ssh_runner.go:362] scp memory --> /etc/kubernetes/addons/nvidia-device-plugin.yaml (1966 bytes)
	I0916 10:35:56.813953 1384589 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" addons-936355
	I0916 10:35:56.825055 1384589 addons.go:234] Setting addon default-storageclass=true in "addons-936355"
	I0916 10:35:56.825094 1384589 host.go:66] Checking if "addons-936355" exists ...
	I0916 10:35:56.825522 1384589 cli_runner.go:164] Run: docker container inspect addons-936355 --format={{.State.Status}}
	I0916 10:35:56.844917 1384589 out.go:177]   - Using image gcr.io/k8s-minikube/minikube-ingress-dns:0.0.3
	I0916 10:35:56.847733 1384589 addons.go:431] installing /etc/kubernetes/addons/ingress-dns-pod.yaml
	I0916 10:35:56.847756 1384589 ssh_runner.go:362] scp memory --> /etc/kubernetes/addons/ingress-dns-pod.yaml (2442 bytes)
	I0916 10:35:56.847823 1384589 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" addons-936355
	I0916 10:35:56.855550 1384589 addons.go:431] installing /etc/kubernetes/addons/deployment.yaml
	I0916 10:35:56.855573 1384589 ssh_runner.go:362] scp memory --> /etc/kubernetes/addons/deployment.yaml (1004 bytes)
	I0916 10:35:56.855637 1384589 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" addons-936355
	I0916 10:35:56.868185 1384589 out.go:177]   - Using image docker.io/marcnuri/yakd:0.0.5
	I0916 10:35:56.870805 1384589 addons.go:431] installing /etc/kubernetes/addons/yakd-ns.yaml
	I0916 10:35:56.870832 1384589 ssh_runner.go:362] scp yakd/yakd-ns.yaml --> /etc/kubernetes/addons/yakd-ns.yaml (171 bytes)
	I0916 10:35:56.870903 1384589 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" addons-936355
	I0916 10:35:56.880834 1384589 out.go:177]   - Using image registry.k8s.io/ingress-nginx/kube-webhook-certgen:v1.4.3
	I0916 10:35:56.883983 1384589 out.go:177]   - Using image registry.k8s.io/ingress-nginx/controller:v1.11.2
	I0916 10:35:56.888274 1384589 out.go:177]   - Using image registry.k8s.io/ingress-nginx/kube-webhook-certgen:v1.4.3
	I0916 10:35:56.892893 1384589 out.go:177]   - Using image registry.k8s.io/metrics-server/metrics-server:v0.7.2
	I0916 10:35:56.893194 1384589 addons.go:431] installing /etc/kubernetes/addons/ingress-deploy.yaml
	I0916 10:35:56.893206 1384589 ssh_runner.go:362] scp memory --> /etc/kubernetes/addons/ingress-deploy.yaml (16078 bytes)
	I0916 10:35:56.893271 1384589 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" addons-936355
	I0916 10:35:56.895536 1384589 addons.go:431] installing /etc/kubernetes/addons/metrics-apiservice.yaml
	I0916 10:35:56.895559 1384589 ssh_runner.go:362] scp metrics-server/metrics-apiservice.yaml --> /etc/kubernetes/addons/metrics-apiservice.yaml (424 bytes)
	I0916 10:35:56.895631 1384589 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" addons-936355
	I0916 10:35:56.932992 1384589 addons.go:234] Setting addon storage-provisioner-rancher=true in "addons-936355"
	I0916 10:35:56.933037 1384589 host.go:66] Checking if "addons-936355" exists ...
	I0916 10:35:56.933461 1384589 cli_runner.go:164] Run: docker container inspect addons-936355 --format={{.State.Status}}
	I0916 10:35:56.975517 1384589 out.go:177]   - Using image registry.k8s.io/sig-storage/livenessprobe:v2.8.0
	I0916 10:35:56.981731 1384589 out.go:177]   - Using image gcr.io/k8s-minikube/kube-registry-proxy:0.0.6
	I0916 10:35:57.008862 1384589 out.go:177]   - Using image gcr.io/k8s-minikube/storage-provisioner:v5
	I0916 10:35:57.011867 1384589 addons.go:431] installing /etc/kubernetes/addons/storage-provisioner.yaml
	I0916 10:35:57.012071 1384589 ssh_runner.go:362] scp memory --> /etc/kubernetes/addons/storage-provisioner.yaml (2676 bytes)
	I0916 10:35:57.012271 1384589 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" addons-936355
	I0916 10:35:57.012572 1384589 out.go:177]   - Using image registry.k8s.io/sig-storage/csi-resizer:v1.6.0
	I0916 10:35:57.018623 1384589 out.go:177]   - Using image registry.k8s.io/sig-storage/csi-snapshotter:v6.1.0
	W0916 10:35:57.018876 1384589 out.go:270] ! Enabling 'volcano' returned an error: running callbacks: [volcano addon does not support crio]
	I0916 10:35:57.019207 1384589 out.go:177]   - Using image ghcr.io/inspektor-gadget/inspektor-gadget:v0.32.0
	I0916 10:35:57.026491 1384589 out.go:177]   - Using image registry.k8s.io/sig-storage/snapshot-controller:v6.1.0
	I0916 10:35:57.031787 1384589 addons.go:431] installing /etc/kubernetes/addons/storageclass.yaml
	I0916 10:35:57.031824 1384589 ssh_runner.go:362] scp storageclass/storageclass.yaml --> /etc/kubernetes/addons/storageclass.yaml (271 bytes)
	I0916 10:35:57.031905 1384589 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" addons-936355
	I0916 10:35:57.035870 1384589 out.go:177]   - Using image docker.io/registry:2.8.3
	I0916 10:35:57.037432 1384589 addons.go:431] installing /etc/kubernetes/addons/csi-hostpath-snapshotclass.yaml
	I0916 10:35:57.040920 1384589 ssh_runner.go:362] scp volumesnapshots/csi-hostpath-snapshotclass.yaml --> /etc/kubernetes/addons/csi-hostpath-snapshotclass.yaml (934 bytes)
	I0916 10:35:57.041029 1384589 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" addons-936355
	I0916 10:35:57.041817 1384589 host.go:66] Checking if "addons-936355" exists ...
	I0916 10:35:57.047002 1384589 addons.go:431] installing /etc/kubernetes/addons/registry-rc.yaml
	I0916 10:35:57.047021 1384589 ssh_runner.go:362] scp memory --> /etc/kubernetes/addons/registry-rc.yaml (860 bytes)
	I0916 10:35:57.047081 1384589 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" addons-936355
	I0916 10:35:57.039199 1384589 out.go:177]   - Using image registry.k8s.io/sig-storage/csi-provisioner:v3.3.0
	I0916 10:35:57.067112 1384589 addons.go:431] installing /etc/kubernetes/addons/ig-namespace.yaml
	I0916 10:35:57.067136 1384589 ssh_runner.go:362] scp inspektor-gadget/ig-namespace.yaml --> /etc/kubernetes/addons/ig-namespace.yaml (55 bytes)
	I0916 10:35:57.067221 1384589 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" addons-936355
	I0916 10:35:57.077336 1384589 sshutil.go:53] new ssh client: &{IP:127.0.0.1 Port:34603 SSHKeyPath:/home/jenkins/minikube-integration/19651-1378450/.minikube/machines/addons-936355/id_rsa Username:docker}
	I0916 10:35:57.080728 1384589 out.go:177]   - Using image registry.k8s.io/sig-storage/csi-attacher:v4.0.0
	I0916 10:35:57.083509 1384589 out.go:177]   - Using image registry.k8s.io/sig-storage/csi-external-health-monitor-controller:v0.7.0
	I0916 10:35:57.084922 1384589 sshutil.go:53] new ssh client: &{IP:127.0.0.1 Port:34603 SSHKeyPath:/home/jenkins/minikube-integration/19651-1378450/.minikube/machines/addons-936355/id_rsa Username:docker}
	I0916 10:35:57.092584 1384589 out.go:177]   - Using image registry.k8s.io/sig-storage/csi-node-driver-registrar:v2.6.0
	I0916 10:35:57.100918 1384589 out.go:177]   - Using image registry.k8s.io/sig-storage/hostpathplugin:v1.9.0
	I0916 10:35:57.102580 1384589 sshutil.go:53] new ssh client: &{IP:127.0.0.1 Port:34603 SSHKeyPath:/home/jenkins/minikube-integration/19651-1378450/.minikube/machines/addons-936355/id_rsa Username:docker}
	I0916 10:35:57.103637 1384589 addons.go:431] installing /etc/kubernetes/addons/rbac-external-attacher.yaml
	I0916 10:35:57.103656 1384589 ssh_runner.go:362] scp csi-hostpath-driver/rbac/rbac-external-attacher.yaml --> /etc/kubernetes/addons/rbac-external-attacher.yaml (3073 bytes)
	I0916 10:35:57.103715 1384589 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" addons-936355
	I0916 10:35:57.120809 1384589 sshutil.go:53] new ssh client: &{IP:127.0.0.1 Port:34603 SSHKeyPath:/home/jenkins/minikube-integration/19651-1378450/.minikube/machines/addons-936355/id_rsa Username:docker}
	I0916 10:35:57.121658 1384589 sshutil.go:53] new ssh client: &{IP:127.0.0.1 Port:34603 SSHKeyPath:/home/jenkins/minikube-integration/19651-1378450/.minikube/machines/addons-936355/id_rsa Username:docker}
	I0916 10:35:57.165011 1384589 sshutil.go:53] new ssh client: &{IP:127.0.0.1 Port:34603 SSHKeyPath:/home/jenkins/minikube-integration/19651-1378450/.minikube/machines/addons-936355/id_rsa Username:docker}
	I0916 10:35:57.190914 1384589 out.go:177]   - Using image docker.io/rancher/local-path-provisioner:v0.0.22
	I0916 10:35:57.195762 1384589 out.go:177]   - Using image docker.io/busybox:stable
	I0916 10:35:57.198447 1384589 addons.go:431] installing /etc/kubernetes/addons/storage-provisioner-rancher.yaml
	I0916 10:35:57.198482 1384589 ssh_runner.go:362] scp memory --> /etc/kubernetes/addons/storage-provisioner-rancher.yaml (3113 bytes)
	I0916 10:35:57.198559 1384589 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" addons-936355
	I0916 10:35:57.237951 1384589 sshutil.go:53] new ssh client: &{IP:127.0.0.1 Port:34603 SSHKeyPath:/home/jenkins/minikube-integration/19651-1378450/.minikube/machines/addons-936355/id_rsa Username:docker}
	I0916 10:35:57.247913 1384589 sshutil.go:53] new ssh client: &{IP:127.0.0.1 Port:34603 SSHKeyPath:/home/jenkins/minikube-integration/19651-1378450/.minikube/machines/addons-936355/id_rsa Username:docker}
	I0916 10:35:57.261430 1384589 sshutil.go:53] new ssh client: &{IP:127.0.0.1 Port:34603 SSHKeyPath:/home/jenkins/minikube-integration/19651-1378450/.minikube/machines/addons-936355/id_rsa Username:docker}
	I0916 10:35:57.263688 1384589 sshutil.go:53] new ssh client: &{IP:127.0.0.1 Port:34603 SSHKeyPath:/home/jenkins/minikube-integration/19651-1378450/.minikube/machines/addons-936355/id_rsa Username:docker}
	I0916 10:35:57.268844 1384589 sshutil.go:53] new ssh client: &{IP:127.0.0.1 Port:34603 SSHKeyPath:/home/jenkins/minikube-integration/19651-1378450/.minikube/machines/addons-936355/id_rsa Username:docker}
	I0916 10:35:57.272259 1384589 sshutil.go:53] new ssh client: &{IP:127.0.0.1 Port:34603 SSHKeyPath:/home/jenkins/minikube-integration/19651-1378450/.minikube/machines/addons-936355/id_rsa Username:docker}
	W0916 10:35:57.289422 1384589 sshutil.go:64] dial failure (will retry): ssh: handshake failed: EOF
	I0916 10:35:57.289665 1384589 retry.go:31] will retry after 343.76577ms: ssh: handshake failed: EOF
	I0916 10:35:57.317769 1384589 sshutil.go:53] new ssh client: &{IP:127.0.0.1 Port:34603 SSHKeyPath:/home/jenkins/minikube-integration/19651-1378450/.minikube/machines/addons-936355/id_rsa Username:docker}
	I0916 10:35:57.327435 1384589 ssh_runner.go:195] Run: sudo systemctl start kubelet
	I0916 10:35:57.327622 1384589 ssh_runner.go:195] Run: /bin/bash -c "sudo /var/lib/minikube/binaries/v1.31.1/kubectl --kubeconfig=/var/lib/minikube/kubeconfig -n kube-system get configmap coredns -o yaml | sed -e '/^        forward . \/etc\/resolv.conf.*/i \        hosts {\n           192.168.49.1 host.minikube.internal\n           fallthrough\n        }' -e '/^        errors *$/i \        log' | sudo /var/lib/minikube/binaries/v1.31.1/kubectl --kubeconfig=/var/lib/minikube/kubeconfig replace -f -"
	I0916 10:35:57.507011 1384589 ssh_runner.go:195] Run: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.31.1/kubectl apply -f /etc/kubernetes/addons/ingress-deploy.yaml
	I0916 10:35:57.508273 1384589 ssh_runner.go:195] Run: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.31.1/kubectl apply -f /etc/kubernetes/addons/nvidia-device-plugin.yaml
	I0916 10:35:57.512529 1384589 addons.go:431] installing /etc/kubernetes/addons/yakd-sa.yaml
	I0916 10:35:57.512557 1384589 ssh_runner.go:362] scp yakd/yakd-sa.yaml --> /etc/kubernetes/addons/yakd-sa.yaml (247 bytes)
	I0916 10:35:57.532805 1384589 ssh_runner.go:195] Run: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.31.1/kubectl apply -f /etc/kubernetes/addons/ingress-dns-pod.yaml
	I0916 10:35:57.544603 1384589 addons.go:431] installing /etc/kubernetes/addons/metrics-server-deployment.yaml
	I0916 10:35:57.544626 1384589 ssh_runner.go:362] scp memory --> /etc/kubernetes/addons/metrics-server-deployment.yaml (1907 bytes)
	I0916 10:35:57.554769 1384589 ssh_runner.go:195] Run: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.31.1/kubectl apply -f /etc/kubernetes/addons/deployment.yaml
	I0916 10:35:57.597359 1384589 ssh_runner.go:195] Run: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.31.1/kubectl apply -f /etc/kubernetes/addons/storageclass.yaml
	I0916 10:35:57.683748 1384589 addons.go:431] installing /etc/kubernetes/addons/yakd-crb.yaml
	I0916 10:35:57.683782 1384589 ssh_runner.go:362] scp yakd/yakd-crb.yaml --> /etc/kubernetes/addons/yakd-crb.yaml (422 bytes)
	I0916 10:35:57.706763 1384589 ssh_runner.go:195] Run: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.31.1/kubectl apply -f /etc/kubernetes/addons/storage-provisioner.yaml
	I0916 10:35:57.708832 1384589 addons.go:431] installing /etc/kubernetes/addons/ig-serviceaccount.yaml
	I0916 10:35:57.708864 1384589 ssh_runner.go:362] scp inspektor-gadget/ig-serviceaccount.yaml --> /etc/kubernetes/addons/ig-serviceaccount.yaml (80 bytes)
	I0916 10:35:57.733074 1384589 addons.go:431] installing /etc/kubernetes/addons/metrics-server-rbac.yaml
	I0916 10:35:57.733107 1384589 ssh_runner.go:362] scp metrics-server/metrics-server-rbac.yaml --> /etc/kubernetes/addons/metrics-server-rbac.yaml (2175 bytes)
	I0916 10:35:57.767880 1384589 addons.go:431] installing /etc/kubernetes/addons/snapshot.storage.k8s.io_volumesnapshotclasses.yaml
	I0916 10:35:57.767908 1384589 ssh_runner.go:362] scp volumesnapshots/snapshot.storage.k8s.io_volumesnapshotclasses.yaml --> /etc/kubernetes/addons/snapshot.storage.k8s.io_volumesnapshotclasses.yaml (6471 bytes)
	I0916 10:35:57.780746 1384589 addons.go:431] installing /etc/kubernetes/addons/registry-svc.yaml
	I0916 10:35:57.780786 1384589 ssh_runner.go:362] scp registry/registry-svc.yaml --> /etc/kubernetes/addons/registry-svc.yaml (398 bytes)
	I0916 10:35:57.807404 1384589 ssh_runner.go:195] Run: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.31.1/kubectl apply -f /etc/kubernetes/addons/storage-provisioner-rancher.yaml
	I0916 10:35:57.850707 1384589 addons.go:431] installing /etc/kubernetes/addons/yakd-svc.yaml
	I0916 10:35:57.850745 1384589 ssh_runner.go:362] scp yakd/yakd-svc.yaml --> /etc/kubernetes/addons/yakd-svc.yaml (412 bytes)
	I0916 10:35:57.887607 1384589 addons.go:431] installing /etc/kubernetes/addons/ig-role.yaml
	I0916 10:35:57.887636 1384589 ssh_runner.go:362] scp inspektor-gadget/ig-role.yaml --> /etc/kubernetes/addons/ig-role.yaml (210 bytes)
	I0916 10:35:57.954841 1384589 addons.go:431] installing /etc/kubernetes/addons/metrics-server-service.yaml
	I0916 10:35:57.954878 1384589 ssh_runner.go:362] scp metrics-server/metrics-server-service.yaml --> /etc/kubernetes/addons/metrics-server-service.yaml (446 bytes)
	I0916 10:35:57.957894 1384589 addons.go:431] installing /etc/kubernetes/addons/snapshot.storage.k8s.io_volumesnapshotcontents.yaml
	I0916 10:35:57.957918 1384589 ssh_runner.go:362] scp volumesnapshots/snapshot.storage.k8s.io_volumesnapshotcontents.yaml --> /etc/kubernetes/addons/snapshot.storage.k8s.io_volumesnapshotcontents.yaml (23126 bytes)
	I0916 10:35:57.990850 1384589 addons.go:431] installing /etc/kubernetes/addons/registry-proxy.yaml
	I0916 10:35:57.990882 1384589 ssh_runner.go:362] scp memory --> /etc/kubernetes/addons/registry-proxy.yaml (947 bytes)
	I0916 10:35:58.040155 1384589 addons.go:431] installing /etc/kubernetes/addons/yakd-dp.yaml
	I0916 10:35:58.040193 1384589 ssh_runner.go:362] scp memory --> /etc/kubernetes/addons/yakd-dp.yaml (2017 bytes)
	I0916 10:35:58.078005 1384589 addons.go:431] installing /etc/kubernetes/addons/ig-rolebinding.yaml
	I0916 10:35:58.078038 1384589 ssh_runner.go:362] scp inspektor-gadget/ig-rolebinding.yaml --> /etc/kubernetes/addons/ig-rolebinding.yaml (244 bytes)
	I0916 10:35:58.084259 1384589 addons.go:431] installing /etc/kubernetes/addons/rbac-hostpath.yaml
	I0916 10:35:58.084302 1384589 ssh_runner.go:362] scp csi-hostpath-driver/rbac/rbac-hostpath.yaml --> /etc/kubernetes/addons/rbac-hostpath.yaml (4266 bytes)
	I0916 10:35:58.131227 1384589 addons.go:431] installing /etc/kubernetes/addons/snapshot.storage.k8s.io_volumesnapshots.yaml
	I0916 10:35:58.131253 1384589 ssh_runner.go:362] scp volumesnapshots/snapshot.storage.k8s.io_volumesnapshots.yaml --> /etc/kubernetes/addons/snapshot.storage.k8s.io_volumesnapshots.yaml (19582 bytes)
	I0916 10:35:58.132161 1384589 ssh_runner.go:195] Run: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.31.1/kubectl apply -f /etc/kubernetes/addons/registry-rc.yaml -f /etc/kubernetes/addons/registry-svc.yaml -f /etc/kubernetes/addons/registry-proxy.yaml
	I0916 10:35:58.147419 1384589 ssh_runner.go:195] Run: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.31.1/kubectl apply -f /etc/kubernetes/addons/metrics-apiservice.yaml -f /etc/kubernetes/addons/metrics-server-deployment.yaml -f /etc/kubernetes/addons/metrics-server-rbac.yaml -f /etc/kubernetes/addons/metrics-server-service.yaml
	I0916 10:35:58.178615 1384589 ssh_runner.go:195] Run: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.31.1/kubectl apply -f /etc/kubernetes/addons/yakd-ns.yaml -f /etc/kubernetes/addons/yakd-sa.yaml -f /etc/kubernetes/addons/yakd-crb.yaml -f /etc/kubernetes/addons/yakd-svc.yaml -f /etc/kubernetes/addons/yakd-dp.yaml
	I0916 10:35:58.199520 1384589 addons.go:431] installing /etc/kubernetes/addons/ig-clusterrole.yaml
	I0916 10:35:58.199553 1384589 ssh_runner.go:362] scp inspektor-gadget/ig-clusterrole.yaml --> /etc/kubernetes/addons/ig-clusterrole.yaml (1485 bytes)
	I0916 10:35:58.206840 1384589 addons.go:431] installing /etc/kubernetes/addons/rbac-external-health-monitor-controller.yaml
	I0916 10:35:58.206873 1384589 ssh_runner.go:362] scp csi-hostpath-driver/rbac/rbac-external-health-monitor-controller.yaml --> /etc/kubernetes/addons/rbac-external-health-monitor-controller.yaml (3038 bytes)
	I0916 10:35:58.251350 1384589 addons.go:431] installing /etc/kubernetes/addons/rbac-volume-snapshot-controller.yaml
	I0916 10:35:58.251378 1384589 ssh_runner.go:362] scp volumesnapshots/rbac-volume-snapshot-controller.yaml --> /etc/kubernetes/addons/rbac-volume-snapshot-controller.yaml (3545 bytes)
	I0916 10:35:58.301781 1384589 addons.go:431] installing /etc/kubernetes/addons/rbac-external-provisioner.yaml
	I0916 10:35:58.301809 1384589 ssh_runner.go:362] scp csi-hostpath-driver/rbac/rbac-external-provisioner.yaml --> /etc/kubernetes/addons/rbac-external-provisioner.yaml (4442 bytes)
	I0916 10:35:58.328155 1384589 addons.go:431] installing /etc/kubernetes/addons/ig-clusterrolebinding.yaml
	I0916 10:35:58.328184 1384589 ssh_runner.go:362] scp inspektor-gadget/ig-clusterrolebinding.yaml --> /etc/kubernetes/addons/ig-clusterrolebinding.yaml (274 bytes)
	I0916 10:35:58.351423 1384589 addons.go:431] installing /etc/kubernetes/addons/volume-snapshot-controller-deployment.yaml
	I0916 10:35:58.351449 1384589 ssh_runner.go:362] scp memory --> /etc/kubernetes/addons/volume-snapshot-controller-deployment.yaml (1475 bytes)
	I0916 10:35:58.404154 1384589 addons.go:431] installing /etc/kubernetes/addons/rbac-external-resizer.yaml
	I0916 10:35:58.404188 1384589 ssh_runner.go:362] scp csi-hostpath-driver/rbac/rbac-external-resizer.yaml --> /etc/kubernetes/addons/rbac-external-resizer.yaml (2943 bytes)
	I0916 10:35:58.467023 1384589 ssh_runner.go:195] Run: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.31.1/kubectl apply -f /etc/kubernetes/addons/csi-hostpath-snapshotclass.yaml -f /etc/kubernetes/addons/snapshot.storage.k8s.io_volumesnapshotclasses.yaml -f /etc/kubernetes/addons/snapshot.storage.k8s.io_volumesnapshotcontents.yaml -f /etc/kubernetes/addons/snapshot.storage.k8s.io_volumesnapshots.yaml -f /etc/kubernetes/addons/rbac-volume-snapshot-controller.yaml -f /etc/kubernetes/addons/volume-snapshot-controller-deployment.yaml
	I0916 10:35:58.468235 1384589 addons.go:431] installing /etc/kubernetes/addons/rbac-external-snapshotter.yaml
	I0916 10:35:58.468257 1384589 ssh_runner.go:362] scp csi-hostpath-driver/rbac/rbac-external-snapshotter.yaml --> /etc/kubernetes/addons/rbac-external-snapshotter.yaml (3149 bytes)
	I0916 10:35:58.517809 1384589 addons.go:431] installing /etc/kubernetes/addons/ig-crd.yaml
	I0916 10:35:58.517836 1384589 ssh_runner.go:362] scp inspektor-gadget/ig-crd.yaml --> /etc/kubernetes/addons/ig-crd.yaml (5216 bytes)
	I0916 10:35:58.529132 1384589 addons.go:431] installing /etc/kubernetes/addons/csi-hostpath-attacher.yaml
	I0916 10:35:58.529162 1384589 ssh_runner.go:362] scp memory --> /etc/kubernetes/addons/csi-hostpath-attacher.yaml (2143 bytes)
	I0916 10:35:58.607318 1384589 addons.go:431] installing /etc/kubernetes/addons/ig-daemonset.yaml
	I0916 10:35:58.607345 1384589 ssh_runner.go:362] scp memory --> /etc/kubernetes/addons/ig-daemonset.yaml (7735 bytes)
	I0916 10:35:58.620217 1384589 addons.go:431] installing /etc/kubernetes/addons/csi-hostpath-driverinfo.yaml
	I0916 10:35:58.620264 1384589 ssh_runner.go:362] scp csi-hostpath-driver/deploy/csi-hostpath-driverinfo.yaml --> /etc/kubernetes/addons/csi-hostpath-driverinfo.yaml (1274 bytes)
	I0916 10:35:58.671546 1384589 ssh_runner.go:195] Run: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.31.1/kubectl apply -f /etc/kubernetes/addons/ig-namespace.yaml -f /etc/kubernetes/addons/ig-serviceaccount.yaml -f /etc/kubernetes/addons/ig-role.yaml -f /etc/kubernetes/addons/ig-rolebinding.yaml -f /etc/kubernetes/addons/ig-clusterrole.yaml -f /etc/kubernetes/addons/ig-clusterrolebinding.yaml -f /etc/kubernetes/addons/ig-crd.yaml -f /etc/kubernetes/addons/ig-daemonset.yaml
	I0916 10:35:58.726776 1384589 addons.go:431] installing /etc/kubernetes/addons/csi-hostpath-plugin.yaml
	I0916 10:35:58.726803 1384589 ssh_runner.go:362] scp memory --> /etc/kubernetes/addons/csi-hostpath-plugin.yaml (8201 bytes)
	I0916 10:35:58.855138 1384589 addons.go:431] installing /etc/kubernetes/addons/csi-hostpath-resizer.yaml
	I0916 10:35:58.855204 1384589 ssh_runner.go:362] scp memory --> /etc/kubernetes/addons/csi-hostpath-resizer.yaml (2191 bytes)
	I0916 10:35:58.993338 1384589 addons.go:431] installing /etc/kubernetes/addons/csi-hostpath-storageclass.yaml
	I0916 10:35:58.993375 1384589 ssh_runner.go:362] scp csi-hostpath-driver/deploy/csi-hostpath-storageclass.yaml --> /etc/kubernetes/addons/csi-hostpath-storageclass.yaml (846 bytes)
	I0916 10:35:59.149795 1384589 ssh_runner.go:195] Run: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.31.1/kubectl apply -f /etc/kubernetes/addons/rbac-external-attacher.yaml -f /etc/kubernetes/addons/rbac-hostpath.yaml -f /etc/kubernetes/addons/rbac-external-health-monitor-controller.yaml -f /etc/kubernetes/addons/rbac-external-provisioner.yaml -f /etc/kubernetes/addons/rbac-external-resizer.yaml -f /etc/kubernetes/addons/rbac-external-snapshotter.yaml -f /etc/kubernetes/addons/csi-hostpath-attacher.yaml -f /etc/kubernetes/addons/csi-hostpath-driverinfo.yaml -f /etc/kubernetes/addons/csi-hostpath-plugin.yaml -f /etc/kubernetes/addons/csi-hostpath-resizer.yaml -f /etc/kubernetes/addons/csi-hostpath-storageclass.yaml
	I0916 10:36:00.098293 1384589 ssh_runner.go:235] Completed: /bin/bash -c "sudo /var/lib/minikube/binaries/v1.31.1/kubectl --kubeconfig=/var/lib/minikube/kubeconfig -n kube-system get configmap coredns -o yaml | sed -e '/^        forward . \/etc\/resolv.conf.*/i \        hosts {\n           192.168.49.1 host.minikube.internal\n           fallthrough\n        }' -e '/^        errors *$/i \        log' | sudo /var/lib/minikube/binaries/v1.31.1/kubectl --kubeconfig=/var/lib/minikube/kubeconfig replace -f -": (2.77063538s)
	I0916 10:36:00.098468 1384589 start.go:971] {"host.minikube.internal": 192.168.49.1} host record injected into CoreDNS's ConfigMap
	I0916 10:36:00.098398 1384589 ssh_runner.go:235] Completed: sudo systemctl start kubelet: (2.770933472s)
	I0916 10:36:00.099620 1384589 node_ready.go:35] waiting up to 6m0s for node "addons-936355" to be "Ready" ...
	I0916 10:36:00.683691 1384589 kapi.go:214] "coredns" deployment in "kube-system" namespace and "addons-936355" context rescaled to 1 replicas
	I0916 10:36:02.134513 1384589 node_ready.go:53] node "addons-936355" has status "Ready":"False"
	I0916 10:36:03.099256 1384589 ssh_runner.go:235] Completed: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.31.1/kubectl apply -f /etc/kubernetes/addons/ingress-deploy.yaml: (5.592195221s)
	I0916 10:36:03.099297 1384589 addons.go:475] Verifying addon ingress=true in "addons-936355"
	I0916 10:36:03.099513 1384589 ssh_runner.go:235] Completed: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.31.1/kubectl apply -f /etc/kubernetes/addons/nvidia-device-plugin.yaml: (5.591216064s)
	I0916 10:36:03.099584 1384589 ssh_runner.go:235] Completed: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.31.1/kubectl apply -f /etc/kubernetes/addons/ingress-dns-pod.yaml: (5.56674984s)
	I0916 10:36:03.099618 1384589 ssh_runner.go:235] Completed: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.31.1/kubectl apply -f /etc/kubernetes/addons/deployment.yaml: (5.544827293s)
	I0916 10:36:03.099645 1384589 ssh_runner.go:235] Completed: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.31.1/kubectl apply -f /etc/kubernetes/addons/storageclass.yaml: (5.502263542s)
	I0916 10:36:03.099882 1384589 ssh_runner.go:235] Completed: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.31.1/kubectl apply -f /etc/kubernetes/addons/storage-provisioner.yaml: (5.393096916s)
	I0916 10:36:03.099983 1384589 ssh_runner.go:235] Completed: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.31.1/kubectl apply -f /etc/kubernetes/addons/storage-provisioner-rancher.yaml: (5.292558502s)
	I0916 10:36:03.100117 1384589 ssh_runner.go:235] Completed: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.31.1/kubectl apply -f /etc/kubernetes/addons/registry-rc.yaml -f /etc/kubernetes/addons/registry-svc.yaml -f /etc/kubernetes/addons/registry-proxy.yaml: (4.967882873s)
	I0916 10:36:03.100138 1384589 addons.go:475] Verifying addon registry=true in "addons-936355"
	I0916 10:36:03.100642 1384589 ssh_runner.go:235] Completed: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.31.1/kubectl apply -f /etc/kubernetes/addons/metrics-apiservice.yaml -f /etc/kubernetes/addons/metrics-server-deployment.yaml -f /etc/kubernetes/addons/metrics-server-rbac.yaml -f /etc/kubernetes/addons/metrics-server-service.yaml: (4.953181668s)
	I0916 10:36:03.100670 1384589 addons.go:475] Verifying addon metrics-server=true in "addons-936355"
	I0916 10:36:03.100733 1384589 ssh_runner.go:235] Completed: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.31.1/kubectl apply -f /etc/kubernetes/addons/yakd-ns.yaml -f /etc/kubernetes/addons/yakd-sa.yaml -f /etc/kubernetes/addons/yakd-crb.yaml -f /etc/kubernetes/addons/yakd-svc.yaml -f /etc/kubernetes/addons/yakd-dp.yaml: (4.922089802s)
	I0916 10:36:03.102943 1384589 out.go:177] * To access YAKD - Kubernetes Dashboard, wait for Pod to be ready and run the following command:
	
		minikube -p addons-936355 service yakd-dashboard -n yakd-dashboard
	
	I0916 10:36:03.102961 1384589 out.go:177] * Verifying registry addon...
	I0916 10:36:03.103034 1384589 out.go:177] * Verifying ingress addon...
	I0916 10:36:03.105813 1384589 kapi.go:75] Waiting for pod with label "app.kubernetes.io/name=ingress-nginx" in ns "ingress-nginx" ...
	I0916 10:36:03.106800 1384589 kapi.go:75] Waiting for pod with label "kubernetes.io/minikube-addons=registry" in ns "kube-system" ...
	I0916 10:36:03.137676 1384589 kapi.go:86] Found 3 Pods for label selector app.kubernetes.io/name=ingress-nginx
	I0916 10:36:03.137755 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:36:03.140614 1384589 kapi.go:86] Found 1 Pods for label selector kubernetes.io/minikube-addons=registry
	I0916 10:36:03.140698 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	W0916 10:36:03.153289 1384589 out.go:270] ! Enabling 'storage-provisioner-rancher' returned an error: running callbacks: [Error making local-path the default storage class: Error while marking storage class local-path as default: Operation cannot be fulfilled on storageclasses.storage.k8s.io "local-path": the object has been modified; please apply your changes to the latest version and try again]
	I0916 10:36:03.250335 1384589 ssh_runner.go:235] Completed: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.31.1/kubectl apply -f /etc/kubernetes/addons/csi-hostpath-snapshotclass.yaml -f /etc/kubernetes/addons/snapshot.storage.k8s.io_volumesnapshotclasses.yaml -f /etc/kubernetes/addons/snapshot.storage.k8s.io_volumesnapshotcontents.yaml -f /etc/kubernetes/addons/snapshot.storage.k8s.io_volumesnapshots.yaml -f /etc/kubernetes/addons/rbac-volume-snapshot-controller.yaml -f /etc/kubernetes/addons/volume-snapshot-controller-deployment.yaml: (4.783269551s)
	W0916 10:36:03.250417 1384589 addons.go:457] apply failed, will retry: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.31.1/kubectl apply -f /etc/kubernetes/addons/csi-hostpath-snapshotclass.yaml -f /etc/kubernetes/addons/snapshot.storage.k8s.io_volumesnapshotclasses.yaml -f /etc/kubernetes/addons/snapshot.storage.k8s.io_volumesnapshotcontents.yaml -f /etc/kubernetes/addons/snapshot.storage.k8s.io_volumesnapshots.yaml -f /etc/kubernetes/addons/rbac-volume-snapshot-controller.yaml -f /etc/kubernetes/addons/volume-snapshot-controller-deployment.yaml: Process exited with status 1
	stdout:
	customresourcedefinition.apiextensions.k8s.io/volumesnapshotclasses.snapshot.storage.k8s.io created
	customresourcedefinition.apiextensions.k8s.io/volumesnapshotcontents.snapshot.storage.k8s.io created
	customresourcedefinition.apiextensions.k8s.io/volumesnapshots.snapshot.storage.k8s.io created
	serviceaccount/snapshot-controller created
	clusterrole.rbac.authorization.k8s.io/snapshot-controller-runner created
	clusterrolebinding.rbac.authorization.k8s.io/snapshot-controller-role created
	role.rbac.authorization.k8s.io/snapshot-controller-leaderelection created
	rolebinding.rbac.authorization.k8s.io/snapshot-controller-leaderelection created
	deployment.apps/snapshot-controller created
	
	stderr:
	error: resource mapping not found for name: "csi-hostpath-snapclass" namespace: "" from "/etc/kubernetes/addons/csi-hostpath-snapshotclass.yaml": no matches for kind "VolumeSnapshotClass" in version "snapshot.storage.k8s.io/v1"
	ensure CRDs are installed first
	I0916 10:36:03.250450 1384589 retry.go:31] will retry after 275.497637ms: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.31.1/kubectl apply -f /etc/kubernetes/addons/csi-hostpath-snapshotclass.yaml -f /etc/kubernetes/addons/snapshot.storage.k8s.io_volumesnapshotclasses.yaml -f /etc/kubernetes/addons/snapshot.storage.k8s.io_volumesnapshotcontents.yaml -f /etc/kubernetes/addons/snapshot.storage.k8s.io_volumesnapshots.yaml -f /etc/kubernetes/addons/rbac-volume-snapshot-controller.yaml -f /etc/kubernetes/addons/volume-snapshot-controller-deployment.yaml: Process exited with status 1
	stdout:
	customresourcedefinition.apiextensions.k8s.io/volumesnapshotclasses.snapshot.storage.k8s.io created
	customresourcedefinition.apiextensions.k8s.io/volumesnapshotcontents.snapshot.storage.k8s.io created
	customresourcedefinition.apiextensions.k8s.io/volumesnapshots.snapshot.storage.k8s.io created
	serviceaccount/snapshot-controller created
	clusterrole.rbac.authorization.k8s.io/snapshot-controller-runner created
	clusterrolebinding.rbac.authorization.k8s.io/snapshot-controller-role created
	role.rbac.authorization.k8s.io/snapshot-controller-leaderelection created
	rolebinding.rbac.authorization.k8s.io/snapshot-controller-leaderelection created
	deployment.apps/snapshot-controller created
	
	stderr:
	error: resource mapping not found for name: "csi-hostpath-snapclass" namespace: "" from "/etc/kubernetes/addons/csi-hostpath-snapshotclass.yaml": no matches for kind "VolumeSnapshotClass" in version "snapshot.storage.k8s.io/v1"
	ensure CRDs are installed first
	I0916 10:36:03.250543 1384589 ssh_runner.go:235] Completed: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.31.1/kubectl apply -f /etc/kubernetes/addons/ig-namespace.yaml -f /etc/kubernetes/addons/ig-serviceaccount.yaml -f /etc/kubernetes/addons/ig-role.yaml -f /etc/kubernetes/addons/ig-rolebinding.yaml -f /etc/kubernetes/addons/ig-clusterrole.yaml -f /etc/kubernetes/addons/ig-clusterrolebinding.yaml -f /etc/kubernetes/addons/ig-crd.yaml -f /etc/kubernetes/addons/ig-daemonset.yaml: (4.57892356s)
	I0916 10:36:03.461537 1384589 ssh_runner.go:235] Completed: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.31.1/kubectl apply -f /etc/kubernetes/addons/rbac-external-attacher.yaml -f /etc/kubernetes/addons/rbac-hostpath.yaml -f /etc/kubernetes/addons/rbac-external-health-monitor-controller.yaml -f /etc/kubernetes/addons/rbac-external-provisioner.yaml -f /etc/kubernetes/addons/rbac-external-resizer.yaml -f /etc/kubernetes/addons/rbac-external-snapshotter.yaml -f /etc/kubernetes/addons/csi-hostpath-attacher.yaml -f /etc/kubernetes/addons/csi-hostpath-driverinfo.yaml -f /etc/kubernetes/addons/csi-hostpath-plugin.yaml -f /etc/kubernetes/addons/csi-hostpath-resizer.yaml -f /etc/kubernetes/addons/csi-hostpath-storageclass.yaml: (4.311696877s)
	I0916 10:36:03.461620 1384589 addons.go:475] Verifying addon csi-hostpath-driver=true in "addons-936355"
	I0916 10:36:03.466201 1384589 out.go:177] * Verifying csi-hostpath-driver addon...
	I0916 10:36:03.469722 1384589 kapi.go:75] Waiting for pod with label "kubernetes.io/minikube-addons=csi-hostpath-driver" in ns "kube-system" ...
	I0916 10:36:03.486422 1384589 kapi.go:86] Found 2 Pods for label selector kubernetes.io/minikube-addons=csi-hostpath-driver
	I0916 10:36:03.486490 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:36:03.526121 1384589 ssh_runner.go:195] Run: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.31.1/kubectl apply --force -f /etc/kubernetes/addons/csi-hostpath-snapshotclass.yaml -f /etc/kubernetes/addons/snapshot.storage.k8s.io_volumesnapshotclasses.yaml -f /etc/kubernetes/addons/snapshot.storage.k8s.io_volumesnapshotcontents.yaml -f /etc/kubernetes/addons/snapshot.storage.k8s.io_volumesnapshots.yaml -f /etc/kubernetes/addons/rbac-volume-snapshot-controller.yaml -f /etc/kubernetes/addons/volume-snapshot-controller-deployment.yaml
	I0916 10:36:03.615580 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:36:03.616763 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:36:03.973974 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:36:04.110336 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:36:04.111341 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:36:04.482735 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:36:04.603445 1384589 node_ready.go:53] node "addons-936355" has status "Ready":"False"
	I0916 10:36:04.611582 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:36:04.612963 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:36:04.974584 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:36:05.112352 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:36:05.113152 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:36:05.475349 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:36:05.612975 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:36:05.617564 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:36:05.994295 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:36:06.112783 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:36:06.113610 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:36:06.402733 1384589 ssh_runner.go:235] Completed: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.31.1/kubectl apply --force -f /etc/kubernetes/addons/csi-hostpath-snapshotclass.yaml -f /etc/kubernetes/addons/snapshot.storage.k8s.io_volumesnapshotclasses.yaml -f /etc/kubernetes/addons/snapshot.storage.k8s.io_volumesnapshotcontents.yaml -f /etc/kubernetes/addons/snapshot.storage.k8s.io_volumesnapshots.yaml -f /etc/kubernetes/addons/rbac-volume-snapshot-controller.yaml -f /etc/kubernetes/addons/volume-snapshot-controller-deployment.yaml: (2.87656549s)
	I0916 10:36:06.474104 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:36:06.604073 1384589 node_ready.go:53] node "addons-936355" has status "Ready":"False"
	I0916 10:36:06.611947 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:36:06.613297 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:36:06.973698 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:36:07.111053 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:36:07.112244 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:36:07.247182 1384589 ssh_runner.go:362] scp memory --> /var/lib/minikube/google_application_credentials.json (162 bytes)
	I0916 10:36:07.247343 1384589 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" addons-936355
	I0916 10:36:07.269993 1384589 sshutil.go:53] new ssh client: &{IP:127.0.0.1 Port:34603 SSHKeyPath:/home/jenkins/minikube-integration/19651-1378450/.minikube/machines/addons-936355/id_rsa Username:docker}
	I0916 10:36:07.399328 1384589 ssh_runner.go:362] scp memory --> /var/lib/minikube/google_cloud_project (12 bytes)
	I0916 10:36:07.424561 1384589 addons.go:234] Setting addon gcp-auth=true in "addons-936355"
	I0916 10:36:07.424615 1384589 host.go:66] Checking if "addons-936355" exists ...
	I0916 10:36:07.425137 1384589 cli_runner.go:164] Run: docker container inspect addons-936355 --format={{.State.Status}}
	I0916 10:36:07.445430 1384589 ssh_runner.go:195] Run: cat /var/lib/minikube/google_application_credentials.json
	I0916 10:36:07.445507 1384589 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" addons-936355
	I0916 10:36:07.462936 1384589 sshutil.go:53] new ssh client: &{IP:127.0.0.1 Port:34603 SSHKeyPath:/home/jenkins/minikube-integration/19651-1378450/.minikube/machines/addons-936355/id_rsa Username:docker}
	I0916 10:36:07.473788 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:36:07.564092 1384589 out.go:177]   - Using image gcr.io/k8s-minikube/gcp-auth-webhook:v0.1.2
	I0916 10:36:07.566842 1384589 out.go:177]   - Using image registry.k8s.io/ingress-nginx/kube-webhook-certgen:v1.4.3
	I0916 10:36:07.569433 1384589 addons.go:431] installing /etc/kubernetes/addons/gcp-auth-ns.yaml
	I0916 10:36:07.569479 1384589 ssh_runner.go:362] scp gcp-auth/gcp-auth-ns.yaml --> /etc/kubernetes/addons/gcp-auth-ns.yaml (700 bytes)
	I0916 10:36:07.591162 1384589 addons.go:431] installing /etc/kubernetes/addons/gcp-auth-service.yaml
	I0916 10:36:07.591235 1384589 ssh_runner.go:362] scp gcp-auth/gcp-auth-service.yaml --> /etc/kubernetes/addons/gcp-auth-service.yaml (788 bytes)
	I0916 10:36:07.611011 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:36:07.612352 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:36:07.614169 1384589 addons.go:431] installing /etc/kubernetes/addons/gcp-auth-webhook.yaml
	I0916 10:36:07.614230 1384589 ssh_runner.go:362] scp memory --> /etc/kubernetes/addons/gcp-auth-webhook.yaml (5421 bytes)
	I0916 10:36:07.634944 1384589 ssh_runner.go:195] Run: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.31.1/kubectl apply -f /etc/kubernetes/addons/gcp-auth-ns.yaml -f /etc/kubernetes/addons/gcp-auth-service.yaml -f /etc/kubernetes/addons/gcp-auth-webhook.yaml
	I0916 10:36:07.973644 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:36:08.114938 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:36:08.115927 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:36:08.289637 1384589 addons.go:475] Verifying addon gcp-auth=true in "addons-936355"
	I0916 10:36:08.292442 1384589 out.go:177] * Verifying gcp-auth addon...
	I0916 10:36:08.297073 1384589 kapi.go:75] Waiting for pod with label "kubernetes.io/minikube-addons=gcp-auth" in ns "gcp-auth" ...
	I0916 10:36:08.311457 1384589 kapi.go:86] Found 1 Pods for label selector kubernetes.io/minikube-addons=gcp-auth
	I0916 10:36:08.311536 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:36:08.473794 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:36:08.610857 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:36:08.611876 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:36:08.801268 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:36:08.973643 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:36:09.105583 1384589 node_ready.go:53] node "addons-936355" has status "Ready":"False"
	I0916 10:36:09.110567 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:36:09.111022 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:36:09.300943 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:36:09.478291 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:36:09.611071 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:36:09.612876 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:36:09.801153 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:36:09.973766 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:36:10.118258 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:36:10.119777 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:36:10.307205 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:36:10.473996 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:36:10.611600 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:36:10.611698 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:36:10.801229 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:36:10.974340 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:36:11.112014 1384589 node_ready.go:53] node "addons-936355" has status "Ready":"False"
	I0916 10:36:11.116183 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:36:11.120476 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:36:11.301066 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:36:11.473420 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:36:11.610713 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:36:11.612423 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:36:11.800270 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:36:11.973407 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:36:12.115791 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:36:12.116920 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:36:12.301411 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:36:12.473867 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:36:12.609770 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:36:12.610662 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:36:12.801634 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:36:12.973046 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:36:13.110851 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:36:13.111134 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:36:13.300575 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:36:13.473835 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:36:13.603219 1384589 node_ready.go:53] node "addons-936355" has status "Ready":"False"
	I0916 10:36:13.610390 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:36:13.611574 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:36:13.801371 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:36:13.973479 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:36:14.112208 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:36:14.113533 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:36:14.300299 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:36:14.474139 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:36:14.610046 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:36:14.612561 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:36:14.800653 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:36:14.972848 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:36:15.110408 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:36:15.110932 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:36:15.300237 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:36:15.473707 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:36:15.603293 1384589 node_ready.go:53] node "addons-936355" has status "Ready":"False"
	I0916 10:36:15.610246 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:36:15.611371 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:36:15.800451 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:36:15.973710 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:36:16.110350 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:36:16.111259 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:36:16.300830 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:36:16.472823 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:36:16.609912 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:36:16.610711 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:36:16.801005 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:36:16.973568 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:36:17.110550 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:36:17.112172 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:36:17.301017 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:36:17.473847 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:36:17.603589 1384589 node_ready.go:53] node "addons-936355" has status "Ready":"False"
	I0916 10:36:17.610593 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:36:17.611441 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:36:17.800956 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:36:17.974143 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:36:18.110263 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:36:18.111182 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:36:18.301212 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:36:18.473331 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:36:18.610442 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:36:18.611436 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:36:18.800286 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:36:18.973687 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:36:19.110597 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:36:19.111342 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:36:19.301090 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:36:19.473269 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:36:19.609625 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:36:19.610850 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:36:19.800307 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:36:19.974046 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:36:20.103731 1384589 node_ready.go:53] node "addons-936355" has status "Ready":"False"
	I0916 10:36:20.112214 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:36:20.113558 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:36:20.301265 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:36:20.473689 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:36:20.610324 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:36:20.611114 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:36:20.800597 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:36:20.973842 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:36:21.109533 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:36:21.111696 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:36:21.302328 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:36:21.473189 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:36:21.610124 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:36:21.611262 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:36:21.801275 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:36:21.973296 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:36:22.111525 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:36:22.113002 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:36:22.300321 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:36:22.473211 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:36:22.602936 1384589 node_ready.go:53] node "addons-936355" has status "Ready":"False"
	I0916 10:36:22.610283 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:36:22.611107 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:36:22.800931 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:36:22.974004 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:36:23.109980 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:36:23.110973 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:36:23.301081 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:36:23.473035 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:36:23.610199 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:36:23.611296 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:36:23.800268 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:36:23.973666 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:36:24.109603 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:36:24.110778 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:36:24.301295 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:36:24.473680 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:36:24.609537 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:36:24.610685 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:36:24.800457 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:36:24.974147 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:36:25.103048 1384589 node_ready.go:53] node "addons-936355" has status "Ready":"False"
	I0916 10:36:25.111012 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:36:25.111240 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:36:25.300767 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:36:25.473813 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:36:25.610908 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:36:25.611483 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:36:25.801271 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:36:25.973399 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:36:26.109553 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:36:26.111922 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:36:26.300892 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:36:26.473331 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:36:26.609476 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:36:26.610465 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:36:26.800314 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:36:26.974947 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:36:27.104747 1384589 node_ready.go:53] node "addons-936355" has status "Ready":"False"
	I0916 10:36:27.110466 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:36:27.113262 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:36:27.302886 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:36:27.475127 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:36:27.610103 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:36:27.619742 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:36:27.801198 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:36:27.974956 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:36:28.115379 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:36:28.117659 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:36:28.300851 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:36:28.474546 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:36:28.610341 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:36:28.611106 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:36:28.800632 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:36:28.973876 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:36:29.109998 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:36:29.111054 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:36:29.300629 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:36:29.473403 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:36:29.603802 1384589 node_ready.go:53] node "addons-936355" has status "Ready":"False"
	I0916 10:36:29.610293 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:36:29.611053 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:36:29.800316 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:36:29.975589 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:36:30.112209 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:36:30.112442 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:36:30.300936 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:36:30.473757 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:36:30.610468 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:36:30.610927 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:36:30.801173 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:36:30.974752 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:36:31.111549 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:36:31.111768 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:36:31.300752 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:36:31.472954 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:36:31.610456 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:36:31.611765 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:36:31.801083 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:36:31.973842 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:36:32.103800 1384589 node_ready.go:53] node "addons-936355" has status "Ready":"False"
	I0916 10:36:32.109737 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:36:32.111636 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:36:32.301104 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:36:32.473774 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:36:32.610924 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:36:32.611190 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:36:32.801482 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:36:32.974672 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:36:33.110188 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:36:33.111271 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:36:33.301349 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:36:33.473433 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:36:33.610409 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:36:33.610888 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:36:33.801627 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:36:33.973881 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:36:34.110134 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:36:34.110497 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:36:34.301089 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:36:34.474295 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:36:34.603135 1384589 node_ready.go:53] node "addons-936355" has status "Ready":"False"
	I0916 10:36:34.610342 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:36:34.611690 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:36:34.801258 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:36:34.973555 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:36:35.110766 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:36:35.111394 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:36:35.300970 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:36:35.473087 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:36:35.610115 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:36:35.611008 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:36:35.800154 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:36:35.974082 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:36:36.109881 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:36:36.110992 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:36:36.300326 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:36:36.473408 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:36:36.604025 1384589 node_ready.go:53] node "addons-936355" has status "Ready":"False"
	I0916 10:36:36.610440 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:36:36.610869 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:36:36.801065 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:36:36.973323 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:36:37.109996 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:36:37.111285 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:36:37.300895 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:36:37.474211 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:36:37.610044 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:36:37.610356 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:36:37.800660 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:36:37.973698 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:36:38.110670 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:36:38.110901 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:36:38.301861 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:36:38.473168 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:36:38.610218 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:36:38.611834 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:36:38.800936 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:36:38.975190 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:36:39.103702 1384589 node_ready.go:53] node "addons-936355" has status "Ready":"False"
	I0916 10:36:39.110476 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:36:39.111170 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:36:39.301227 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:36:39.473926 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:36:39.609710 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:36:39.611195 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:36:39.800502 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:36:39.973582 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:36:40.111455 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:36:40.111653 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:36:40.300951 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:36:40.473797 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:36:40.610268 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:36:40.611132 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:36:40.800770 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:36:40.974250 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:36:41.110735 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:36:41.111970 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:36:41.300538 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:36:41.473964 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:36:41.603723 1384589 node_ready.go:53] node "addons-936355" has status "Ready":"False"
	I0916 10:36:41.610292 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:36:41.610627 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:36:41.801470 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:36:41.974052 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:36:42.110959 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:36:42.112236 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:36:42.300960 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:36:42.473748 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:36:42.612461 1384589 node_ready.go:49] node "addons-936355" has status "Ready":"True"
	I0916 10:36:42.612538 1384589 node_ready.go:38] duration metric: took 42.512890552s for node "addons-936355" to be "Ready" ...
	I0916 10:36:42.612563 1384589 pod_ready.go:36] extra waiting up to 6m0s for all system-critical pods including labels [k8s-app=kube-dns component=etcd component=kube-apiserver component=kube-controller-manager k8s-app=kube-proxy component=kube-scheduler] to be "Ready" ...
	I0916 10:36:42.623341 1384589 kapi.go:86] Found 2 Pods for label selector kubernetes.io/minikube-addons=registry
	I0916 10:36:42.623417 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:36:42.624231 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:36:42.627174 1384589 pod_ready.go:79] waiting up to 6m0s for pod "coredns-7c65d6cfc9-r6x6b" in "kube-system" namespace to be "Ready" ...
	I0916 10:36:42.859763 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:36:42.978703 1384589 kapi.go:86] Found 3 Pods for label selector kubernetes.io/minikube-addons=csi-hostpath-driver
	I0916 10:36:42.978731 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:36:43.131865 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:36:43.133687 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:36:43.349019 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:36:43.479093 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:36:43.612085 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:36:43.613250 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:36:43.838378 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:36:43.975549 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:36:44.112002 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:36:44.113078 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:36:44.303567 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:36:44.474708 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:36:44.612644 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:36:44.614103 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:36:44.633869 1384589 pod_ready.go:93] pod "coredns-7c65d6cfc9-r6x6b" in "kube-system" namespace has status "Ready":"True"
	I0916 10:36:44.633943 1384589 pod_ready.go:82] duration metric: took 2.006728044s for pod "coredns-7c65d6cfc9-r6x6b" in "kube-system" namespace to be "Ready" ...
	I0916 10:36:44.633994 1384589 pod_ready.go:79] waiting up to 6m0s for pod "etcd-addons-936355" in "kube-system" namespace to be "Ready" ...
	I0916 10:36:44.642581 1384589 pod_ready.go:93] pod "etcd-addons-936355" in "kube-system" namespace has status "Ready":"True"
	I0916 10:36:44.642653 1384589 pod_ready.go:82] duration metric: took 8.633064ms for pod "etcd-addons-936355" in "kube-system" namespace to be "Ready" ...
	I0916 10:36:44.642683 1384589 pod_ready.go:79] waiting up to 6m0s for pod "kube-apiserver-addons-936355" in "kube-system" namespace to be "Ready" ...
	I0916 10:36:44.650836 1384589 pod_ready.go:93] pod "kube-apiserver-addons-936355" in "kube-system" namespace has status "Ready":"True"
	I0916 10:36:44.650858 1384589 pod_ready.go:82] duration metric: took 8.155202ms for pod "kube-apiserver-addons-936355" in "kube-system" namespace to be "Ready" ...
	I0916 10:36:44.650871 1384589 pod_ready.go:79] waiting up to 6m0s for pod "kube-controller-manager-addons-936355" in "kube-system" namespace to be "Ready" ...
	I0916 10:36:44.656888 1384589 pod_ready.go:93] pod "kube-controller-manager-addons-936355" in "kube-system" namespace has status "Ready":"True"
	I0916 10:36:44.656911 1384589 pod_ready.go:82] duration metric: took 6.032453ms for pod "kube-controller-manager-addons-936355" in "kube-system" namespace to be "Ready" ...
	I0916 10:36:44.656925 1384589 pod_ready.go:79] waiting up to 6m0s for pod "kube-proxy-6zqlq" in "kube-system" namespace to be "Ready" ...
	I0916 10:36:44.663172 1384589 pod_ready.go:93] pod "kube-proxy-6zqlq" in "kube-system" namespace has status "Ready":"True"
	I0916 10:36:44.663198 1384589 pod_ready.go:82] duration metric: took 6.264685ms for pod "kube-proxy-6zqlq" in "kube-system" namespace to be "Ready" ...
	I0916 10:36:44.663210 1384589 pod_ready.go:79] waiting up to 6m0s for pod "kube-scheduler-addons-936355" in "kube-system" namespace to be "Ready" ...
	I0916 10:36:44.800889 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:36:44.975665 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:36:45.036535 1384589 pod_ready.go:93] pod "kube-scheduler-addons-936355" in "kube-system" namespace has status "Ready":"True"
	I0916 10:36:45.036565 1384589 pod_ready.go:82] duration metric: took 373.347727ms for pod "kube-scheduler-addons-936355" in "kube-system" namespace to be "Ready" ...
	I0916 10:36:45.036579 1384589 pod_ready.go:79] waiting up to 6m0s for pod "metrics-server-84c5f94fbc-hngcs" in "kube-system" namespace to be "Ready" ...
	I0916 10:36:45.111493 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:36:45.112631 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:36:45.308107 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:36:45.474657 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:36:45.611914 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:36:45.612461 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:36:45.801892 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:36:45.974950 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:36:46.111683 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:36:46.114082 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:36:46.301157 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:36:46.475128 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:36:46.611945 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:36:46.613048 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:36:46.801341 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:36:46.974921 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:36:47.044703 1384589 pod_ready.go:103] pod "metrics-server-84c5f94fbc-hngcs" in "kube-system" namespace has status "Ready":"False"
	I0916 10:36:47.112165 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:36:47.114489 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:36:47.301333 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:36:47.480727 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:36:47.612823 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:36:47.613992 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:36:47.802256 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:36:47.975336 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:36:48.114295 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:36:48.116308 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:36:48.301669 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:36:48.478171 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:36:48.613077 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:36:48.615032 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:36:48.802520 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:36:48.974753 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:36:49.045627 1384589 pod_ready.go:103] pod "metrics-server-84c5f94fbc-hngcs" in "kube-system" namespace has status "Ready":"False"
	I0916 10:36:49.112778 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:36:49.116258 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:36:49.301317 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:36:49.477632 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:36:49.617030 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:36:49.618841 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:36:49.801756 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:36:49.975098 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:36:50.112372 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:36:50.115428 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:36:50.303239 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:36:50.475866 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:36:50.610712 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:36:50.613666 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:36:50.800849 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:36:50.975104 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:36:51.113376 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:36:51.116309 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:36:51.305523 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:36:51.476644 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:36:51.547164 1384589 pod_ready.go:103] pod "metrics-server-84c5f94fbc-hngcs" in "kube-system" namespace has status "Ready":"False"
	I0916 10:36:51.619471 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:36:51.620588 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:36:51.803271 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:36:51.978508 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:36:52.112860 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:36:52.114242 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:36:52.300920 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:36:52.475635 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:36:52.610961 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:36:52.611563 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:36:52.802388 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:36:52.975192 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:36:53.112514 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:36:53.113242 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:36:53.301036 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:36:53.475517 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:36:53.613316 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:36:53.614402 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:36:53.801348 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:36:53.977291 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:36:54.050970 1384589 pod_ready.go:103] pod "metrics-server-84c5f94fbc-hngcs" in "kube-system" namespace has status "Ready":"False"
	I0916 10:36:54.110981 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:36:54.112076 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:36:54.300546 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:36:54.476454 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:36:54.610582 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:36:54.612518 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:36:54.803551 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:36:54.974994 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:36:55.111398 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:36:55.112761 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:36:55.301089 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:36:55.474274 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:36:55.609938 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:36:55.612002 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:36:55.800575 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:36:55.974519 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:36:56.112644 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:36:56.113614 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:36:56.301290 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:36:56.476637 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:36:56.543349 1384589 pod_ready.go:103] pod "metrics-server-84c5f94fbc-hngcs" in "kube-system" namespace has status "Ready":"False"
	I0916 10:36:56.613159 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:36:56.614779 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:36:56.801547 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:36:56.975878 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:36:57.111646 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:36:57.114449 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:36:57.301068 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:36:57.475345 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:36:57.612454 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:36:57.613637 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:36:57.802031 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:36:57.975475 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:36:58.112792 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:36:58.114331 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:36:58.301185 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:36:58.477806 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:36:58.543702 1384589 pod_ready.go:103] pod "metrics-server-84c5f94fbc-hngcs" in "kube-system" namespace has status "Ready":"False"
	I0916 10:36:58.611292 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:36:58.612924 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:36:58.801770 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:36:58.978258 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:36:59.111614 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:36:59.113277 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:36:59.300874 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:36:59.478857 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:36:59.612769 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:36:59.614234 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:36:59.801191 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:36:59.975770 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:37:00.124776 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:37:00.127598 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:37:00.312397 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:37:00.476593 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:37:00.612256 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:37:00.615086 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:37:00.801400 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:37:00.975455 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:37:01.045782 1384589 pod_ready.go:103] pod "metrics-server-84c5f94fbc-hngcs" in "kube-system" namespace has status "Ready":"False"
	I0916 10:37:01.116772 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:37:01.117862 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:37:01.300859 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:37:01.475607 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:37:01.614426 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:37:01.616901 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:37:01.806694 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:37:01.976923 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:37:02.111895 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:37:02.112248 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:37:02.301293 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:37:02.474913 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:37:02.610544 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:37:02.611469 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:37:02.801570 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:37:02.974546 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:37:03.110553 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:37:03.111258 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:37:03.302951 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:37:03.475760 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:37:03.542976 1384589 pod_ready.go:103] pod "metrics-server-84c5f94fbc-hngcs" in "kube-system" namespace has status "Ready":"False"
	I0916 10:37:03.612478 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:37:03.614314 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:37:03.802588 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:37:03.974619 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:37:04.116170 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:37:04.117565 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:37:04.301282 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:37:04.474423 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:37:04.609959 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:37:04.611546 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:37:04.802714 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:37:04.974564 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:37:05.111189 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:37:05.119380 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:37:05.301308 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:37:05.480667 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:37:05.545296 1384589 pod_ready.go:103] pod "metrics-server-84c5f94fbc-hngcs" in "kube-system" namespace has status "Ready":"False"
	I0916 10:37:05.613921 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:37:05.620210 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:37:05.801887 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:37:05.979380 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:37:06.117389 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:37:06.120937 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:37:06.301555 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:37:06.475271 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:37:06.612080 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:37:06.614588 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:37:06.801421 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:37:06.975493 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:37:07.111399 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:37:07.114107 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:37:07.300779 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:37:07.478877 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:37:07.558060 1384589 pod_ready.go:103] pod "metrics-server-84c5f94fbc-hngcs" in "kube-system" namespace has status "Ready":"False"
	I0916 10:37:07.615155 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:37:07.616925 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:37:07.801853 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:37:07.975171 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:37:08.110594 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:37:08.111215 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:37:08.300440 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:37:08.476290 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:37:08.611297 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:37:08.612374 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:37:08.801416 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:37:08.975287 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:37:09.110125 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:37:09.111958 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:37:09.304146 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:37:09.474050 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:37:09.610553 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:37:09.611805 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:37:09.801358 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:37:09.974606 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:37:10.045151 1384589 pod_ready.go:103] pod "metrics-server-84c5f94fbc-hngcs" in "kube-system" namespace has status "Ready":"False"
	I0916 10:37:10.115132 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:37:10.117029 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:37:10.300604 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:37:10.478567 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:37:10.612321 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:37:10.613469 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:37:10.801386 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:37:10.979174 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:37:11.112568 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:37:11.116046 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:37:11.301477 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:37:11.475805 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:37:11.613534 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:37:11.615206 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:37:11.802410 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:37:11.976748 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:37:12.047271 1384589 pod_ready.go:103] pod "metrics-server-84c5f94fbc-hngcs" in "kube-system" namespace has status "Ready":"False"
	I0916 10:37:12.112753 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:37:12.114779 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:37:12.300849 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:37:12.479609 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:37:12.633512 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:37:12.635102 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:37:12.801945 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:37:12.978658 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:37:13.111553 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:37:13.113586 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:37:13.303385 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:37:13.479039 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:37:13.614588 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:37:13.615554 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:37:13.806654 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:37:13.981409 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:37:14.060889 1384589 pod_ready.go:103] pod "metrics-server-84c5f94fbc-hngcs" in "kube-system" namespace has status "Ready":"False"
	I0916 10:37:14.112654 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:37:14.113844 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:37:14.301688 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:37:14.474872 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:37:14.610310 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:37:14.610746 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:37:14.800633 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:37:14.975036 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:37:15.112998 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:37:15.115460 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:37:15.300634 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:37:15.474102 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:37:15.613955 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:37:15.615489 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:37:15.801741 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:37:15.975686 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:37:16.113469 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:37:16.114978 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:37:16.301581 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:37:16.475151 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:37:16.550481 1384589 pod_ready.go:103] pod "metrics-server-84c5f94fbc-hngcs" in "kube-system" namespace has status "Ready":"False"
	I0916 10:37:16.614516 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:37:16.615278 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:37:16.802546 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:37:16.975189 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:37:17.110944 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:37:17.111649 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:37:17.302100 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:37:17.475101 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:37:17.611759 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:37:17.612357 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:37:17.800825 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:37:17.975226 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:37:18.110760 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:37:18.112805 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:37:18.300370 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:37:18.474527 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:37:18.610984 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:37:18.611944 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:37:18.801132 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:37:18.974591 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:37:19.046356 1384589 pod_ready.go:103] pod "metrics-server-84c5f94fbc-hngcs" in "kube-system" namespace has status "Ready":"False"
	I0916 10:37:19.112245 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:37:19.115197 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:37:19.301744 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:37:19.475515 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:37:19.610679 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:37:19.614216 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:37:19.801704 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:37:19.974949 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:37:20.111388 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:37:20.114141 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:37:20.301219 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:37:20.474669 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:37:20.611319 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:37:20.615110 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:37:20.801384 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:37:20.976136 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:37:21.113352 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:37:21.113988 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:37:21.300920 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:37:21.489778 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:37:21.545440 1384589 pod_ready.go:103] pod "metrics-server-84c5f94fbc-hngcs" in "kube-system" namespace has status "Ready":"False"
	I0916 10:37:21.613554 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:37:21.616634 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:37:21.801820 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:37:21.977146 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:37:22.111094 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:37:22.112217 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:37:22.301825 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:37:22.475834 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:37:22.611602 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:37:22.612556 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:37:22.805363 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:37:22.975337 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:37:23.112472 1384589 kapi.go:107] duration metric: took 1m20.005670496s to wait for kubernetes.io/minikube-addons=registry ...
	I0916 10:37:23.113515 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:37:23.300925 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:37:23.474515 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:37:23.610822 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:37:23.801408 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:37:23.977906 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:37:24.044059 1384589 pod_ready.go:103] pod "metrics-server-84c5f94fbc-hngcs" in "kube-system" namespace has status "Ready":"False"
	I0916 10:37:24.117487 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:37:24.301384 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:37:24.476565 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:37:24.611373 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:37:24.801872 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:37:24.984901 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:37:25.111954 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:37:25.300421 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:37:25.475126 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:37:25.611267 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:37:25.808830 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:37:25.975068 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:37:26.111025 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:37:26.310954 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:37:26.475111 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:37:26.543709 1384589 pod_ready.go:103] pod "metrics-server-84c5f94fbc-hngcs" in "kube-system" namespace has status "Ready":"False"
	I0916 10:37:26.609974 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:37:26.838995 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:37:26.975321 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:37:27.110779 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:37:27.301198 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:37:27.476321 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:37:27.610748 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:37:27.801486 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:37:27.975547 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:37:28.110763 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:37:28.301469 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:37:28.474991 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:37:28.610943 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:37:28.801350 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:37:28.975749 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:37:29.046127 1384589 pod_ready.go:103] pod "metrics-server-84c5f94fbc-hngcs" in "kube-system" namespace has status "Ready":"False"
	I0916 10:37:29.110966 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:37:29.305494 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:37:29.475929 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:37:29.609824 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:37:29.801492 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:37:29.977852 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:37:30.113447 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:37:30.301994 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:37:30.476258 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:37:30.610718 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:37:30.801712 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:37:30.975400 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:37:31.110916 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:37:31.300717 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:37:31.474547 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:37:31.542764 1384589 pod_ready.go:103] pod "metrics-server-84c5f94fbc-hngcs" in "kube-system" namespace has status "Ready":"False"
	I0916 10:37:31.612339 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:37:31.804045 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:37:31.975617 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:37:32.110961 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:37:32.300588 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:37:32.482569 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:37:32.611127 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:37:32.804201 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:37:32.975368 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:37:33.111355 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:37:33.301816 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:37:33.477518 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:37:33.551472 1384589 pod_ready.go:103] pod "metrics-server-84c5f94fbc-hngcs" in "kube-system" namespace has status "Ready":"False"
	I0916 10:37:33.611027 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:37:33.801158 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:37:34.013405 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:37:34.127200 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:37:34.310368 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:37:34.475923 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:37:34.611219 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:37:34.801913 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:37:34.978855 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:37:35.118452 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:37:35.300764 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:37:35.476873 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:37:35.611849 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:37:35.802246 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:37:35.975118 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:37:36.044866 1384589 pod_ready.go:103] pod "metrics-server-84c5f94fbc-hngcs" in "kube-system" namespace has status "Ready":"False"
	I0916 10:37:36.111125 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:37:36.301167 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:37:36.477188 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:37:36.617190 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:37:36.801375 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:37:36.974623 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:37:37.113798 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:37:37.301345 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:37:37.479115 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:37:37.611187 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:37:37.802141 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:37:37.976103 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:37:38.094708 1384589 pod_ready.go:103] pod "metrics-server-84c5f94fbc-hngcs" in "kube-system" namespace has status "Ready":"False"
	I0916 10:37:38.116394 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:37:38.300966 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:37:38.474752 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:37:38.610164 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:37:38.800561 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:37:38.975817 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:37:39.110879 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:37:39.301972 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:37:39.475982 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:37:39.614550 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:37:39.801870 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:37:39.975576 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:37:40.112781 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:37:40.301195 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:37:40.476921 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:37:40.543014 1384589 pod_ready.go:103] pod "metrics-server-84c5f94fbc-hngcs" in "kube-system" namespace has status "Ready":"False"
	I0916 10:37:40.612230 1384589 kapi.go:107] duration metric: took 1m37.506412903s to wait for app.kubernetes.io/name=ingress-nginx ...
	I0916 10:37:40.800501 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:37:40.980528 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:37:41.301899 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:37:41.478479 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:37:41.801278 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:37:41.975045 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:37:42.302225 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:37:42.487350 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:37:42.548067 1384589 pod_ready.go:103] pod "metrics-server-84c5f94fbc-hngcs" in "kube-system" namespace has status "Ready":"False"
	I0916 10:37:42.806839 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:37:42.976392 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:37:43.300621 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:37:43.475884 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:37:43.802919 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:37:43.975139 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:37:44.301415 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:37:44.475371 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:37:44.801688 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:37:44.975259 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:37:45.062166 1384589 pod_ready.go:103] pod "metrics-server-84c5f94fbc-hngcs" in "kube-system" namespace has status "Ready":"False"
	I0916 10:37:45.301957 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:37:45.477003 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:37:45.802892 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:37:45.974994 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:37:46.301112 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:37:46.475372 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:37:46.800784 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:37:46.974857 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:37:47.303524 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:37:47.475768 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:37:47.545443 1384589 pod_ready.go:103] pod "metrics-server-84c5f94fbc-hngcs" in "kube-system" namespace has status "Ready":"False"
	I0916 10:37:47.800473 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:37:47.974841 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:37:48.301353 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:37:48.474781 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:37:48.800728 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:37:48.975372 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:37:49.301044 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:37:49.475307 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:37:49.801251 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:37:49.976296 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:37:50.044755 1384589 pod_ready.go:103] pod "metrics-server-84c5f94fbc-hngcs" in "kube-system" namespace has status "Ready":"False"
	I0916 10:37:50.306461 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:37:50.478119 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:37:50.802508 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:37:50.975919 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:37:51.310303 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:37:51.475230 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:37:51.801606 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:37:51.975318 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:37:52.053332 1384589 pod_ready.go:103] pod "metrics-server-84c5f94fbc-hngcs" in "kube-system" namespace has status "Ready":"False"
	I0916 10:37:52.302006 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:37:52.476486 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:37:52.801441 1384589 kapi.go:107] duration metric: took 1m44.50438368s to wait for kubernetes.io/minikube-addons=gcp-auth ...
	I0916 10:37:52.803585 1384589 out.go:177] * Your GCP credentials will now be mounted into every pod created in the addons-936355 cluster.
	I0916 10:37:52.805126 1384589 out.go:177] * If you don't want your credentials mounted into a specific pod, add a label with the `gcp-auth-skip-secret` key to your pod configuration.
	I0916 10:37:52.807003 1384589 out.go:177] * If you want existing pods to be mounted with credentials, either recreate them or rerun addons enable with --refresh.
	I0916 10:37:52.974797 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:37:53.475581 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:37:53.975250 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:37:54.474446 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:37:54.542561 1384589 pod_ready.go:103] pod "metrics-server-84c5f94fbc-hngcs" in "kube-system" namespace has status "Ready":"False"
	I0916 10:37:54.975714 1384589 kapi.go:107] duration metric: took 1m51.5059929s to wait for kubernetes.io/minikube-addons=csi-hostpath-driver ...
	I0916 10:37:54.976913 1384589 out.go:177] * Enabled addons: nvidia-device-plugin, ingress-dns, cloud-spanner, storage-provisioner, metrics-server, yakd, default-storageclass, inspektor-gadget, volumesnapshots, registry, ingress, gcp-auth, csi-hostpath-driver
	I0916 10:37:54.977951 1384589 addons.go:510] duration metric: took 1m58.330681209s for enable addons: enabled=[nvidia-device-plugin ingress-dns cloud-spanner storage-provisioner metrics-server yakd default-storageclass inspektor-gadget volumesnapshots registry ingress gcp-auth csi-hostpath-driver]
	I0916 10:37:56.543286 1384589 pod_ready.go:103] pod "metrics-server-84c5f94fbc-hngcs" in "kube-system" namespace has status "Ready":"False"
	I0916 10:37:58.543538 1384589 pod_ready.go:103] pod "metrics-server-84c5f94fbc-hngcs" in "kube-system" namespace has status "Ready":"False"
	I0916 10:38:00.545466 1384589 pod_ready.go:103] pod "metrics-server-84c5f94fbc-hngcs" in "kube-system" namespace has status "Ready":"False"
	I0916 10:38:03.044859 1384589 pod_ready.go:103] pod "metrics-server-84c5f94fbc-hngcs" in "kube-system" namespace has status "Ready":"False"
	I0916 10:38:05.543384 1384589 pod_ready.go:103] pod "metrics-server-84c5f94fbc-hngcs" in "kube-system" namespace has status "Ready":"False"
	I0916 10:38:08.044081 1384589 pod_ready.go:103] pod "metrics-server-84c5f94fbc-hngcs" in "kube-system" namespace has status "Ready":"False"
	I0916 10:38:10.044862 1384589 pod_ready.go:103] pod "metrics-server-84c5f94fbc-hngcs" in "kube-system" namespace has status "Ready":"False"
	I0916 10:38:12.543815 1384589 pod_ready.go:103] pod "metrics-server-84c5f94fbc-hngcs" in "kube-system" namespace has status "Ready":"False"
	I0916 10:38:15.046388 1384589 pod_ready.go:103] pod "metrics-server-84c5f94fbc-hngcs" in "kube-system" namespace has status "Ready":"False"
	I0916 10:38:17.044536 1384589 pod_ready.go:93] pod "metrics-server-84c5f94fbc-hngcs" in "kube-system" namespace has status "Ready":"True"
	I0916 10:38:17.044563 1384589 pod_ready.go:82] duration metric: took 1m32.00797612s for pod "metrics-server-84c5f94fbc-hngcs" in "kube-system" namespace to be "Ready" ...
	I0916 10:38:17.044576 1384589 pod_ready.go:79] waiting up to 6m0s for pod "nvidia-device-plugin-daemonset-6j9gc" in "kube-system" namespace to be "Ready" ...
	I0916 10:38:17.054621 1384589 pod_ready.go:93] pod "nvidia-device-plugin-daemonset-6j9gc" in "kube-system" namespace has status "Ready":"True"
	I0916 10:38:17.054646 1384589 pod_ready.go:82] duration metric: took 10.061393ms for pod "nvidia-device-plugin-daemonset-6j9gc" in "kube-system" namespace to be "Ready" ...
	I0916 10:38:17.054673 1384589 pod_ready.go:39] duration metric: took 1m34.442085136s for extra waiting for all system-critical and pods with labels [k8s-app=kube-dns component=etcd component=kube-apiserver component=kube-controller-manager k8s-app=kube-proxy component=kube-scheduler] to be "Ready" ...
	I0916 10:38:17.054689 1384589 api_server.go:52] waiting for apiserver process to appear ...
	I0916 10:38:17.054724 1384589 cri.go:54] listing CRI containers in root : {State:all Name:kube-apiserver Namespaces:[]}
	I0916 10:38:17.054791 1384589 ssh_runner.go:195] Run: sudo crictl ps -a --quiet --name=kube-apiserver
	I0916 10:38:17.110909 1384589 cri.go:89] found id: "f911db1ed55bbf8b3dc28ca0fef7e51209be97baaa15d9194b879451dd6fd403"
	I0916 10:38:17.110942 1384589 cri.go:89] found id: ""
	I0916 10:38:17.110950 1384589 logs.go:276] 1 containers: [f911db1ed55bbf8b3dc28ca0fef7e51209be97baaa15d9194b879451dd6fd403]
	I0916 10:38:17.111018 1384589 ssh_runner.go:195] Run: which crictl
	I0916 10:38:17.114542 1384589 cri.go:54] listing CRI containers in root : {State:all Name:etcd Namespaces:[]}
	I0916 10:38:17.114619 1384589 ssh_runner.go:195] Run: sudo crictl ps -a --quiet --name=etcd
	I0916 10:38:17.153834 1384589 cri.go:89] found id: "3b247261f15f4cdd596d5e7ee3354c24cb995a27a5e0581e877596df04b900d5"
	I0916 10:38:17.153856 1384589 cri.go:89] found id: ""
	I0916 10:38:17.153864 1384589 logs.go:276] 1 containers: [3b247261f15f4cdd596d5e7ee3354c24cb995a27a5e0581e877596df04b900d5]
	I0916 10:38:17.153923 1384589 ssh_runner.go:195] Run: which crictl
	I0916 10:38:17.157470 1384589 cri.go:54] listing CRI containers in root : {State:all Name:coredns Namespaces:[]}
	I0916 10:38:17.157579 1384589 ssh_runner.go:195] Run: sudo crictl ps -a --quiet --name=coredns
	I0916 10:38:17.198133 1384589 cri.go:89] found id: "ee934dc9f4f92e52b49ad02508bb42771f460a2494fa8b1a65d888191266a4ad"
	I0916 10:38:17.198155 1384589 cri.go:89] found id: ""
	I0916 10:38:17.198163 1384589 logs.go:276] 1 containers: [ee934dc9f4f92e52b49ad02508bb42771f460a2494fa8b1a65d888191266a4ad]
	I0916 10:38:17.198222 1384589 ssh_runner.go:195] Run: which crictl
	I0916 10:38:17.201699 1384589 cri.go:54] listing CRI containers in root : {State:all Name:kube-scheduler Namespaces:[]}
	I0916 10:38:17.201773 1384589 ssh_runner.go:195] Run: sudo crictl ps -a --quiet --name=kube-scheduler
	I0916 10:38:17.244177 1384589 cri.go:89] found id: "2b161087caf5a6ab9dedbb699f7c69ddf6c2c5cdb19026d46daf824d90966d25"
	I0916 10:38:17.244206 1384589 cri.go:89] found id: ""
	I0916 10:38:17.244215 1384589 logs.go:276] 1 containers: [2b161087caf5a6ab9dedbb699f7c69ddf6c2c5cdb19026d46daf824d90966d25]
	I0916 10:38:17.244287 1384589 ssh_runner.go:195] Run: which crictl
	I0916 10:38:17.248238 1384589 cri.go:54] listing CRI containers in root : {State:all Name:kube-proxy Namespaces:[]}
	I0916 10:38:17.248346 1384589 ssh_runner.go:195] Run: sudo crictl ps -a --quiet --name=kube-proxy
	I0916 10:38:17.286359 1384589 cri.go:89] found id: "6200eb5cfcd24bb0f0253359201c6d75c0624dcb7a313b0bc95b7370a13539a0"
	I0916 10:38:17.286380 1384589 cri.go:89] found id: ""
	I0916 10:38:17.286388 1384589 logs.go:276] 1 containers: [6200eb5cfcd24bb0f0253359201c6d75c0624dcb7a313b0bc95b7370a13539a0]
	I0916 10:38:17.286476 1384589 ssh_runner.go:195] Run: which crictl
	I0916 10:38:17.290475 1384589 cri.go:54] listing CRI containers in root : {State:all Name:kube-controller-manager Namespaces:[]}
	I0916 10:38:17.290598 1384589 ssh_runner.go:195] Run: sudo crictl ps -a --quiet --name=kube-controller-manager
	I0916 10:38:17.332786 1384589 cri.go:89] found id: "4ee66eef50ab615bdd0d94fe194567492cafe76910819703a964b78b45f55436"
	I0916 10:38:17.332808 1384589 cri.go:89] found id: ""
	I0916 10:38:17.332817 1384589 logs.go:276] 1 containers: [4ee66eef50ab615bdd0d94fe194567492cafe76910819703a964b78b45f55436]
	I0916 10:38:17.332887 1384589 ssh_runner.go:195] Run: which crictl
	I0916 10:38:17.336545 1384589 cri.go:54] listing CRI containers in root : {State:all Name:kindnet Namespaces:[]}
	I0916 10:38:17.336625 1384589 ssh_runner.go:195] Run: sudo crictl ps -a --quiet --name=kindnet
	I0916 10:38:17.376900 1384589 cri.go:89] found id: "8d59e894feca0e01e03cc7257c67ed10cf0f9db194b88b314e4961bc62d9e7f1"
	I0916 10:38:17.376922 1384589 cri.go:89] found id: ""
	I0916 10:38:17.376930 1384589 logs.go:276] 1 containers: [8d59e894feca0e01e03cc7257c67ed10cf0f9db194b88b314e4961bc62d9e7f1]
	I0916 10:38:17.376991 1384589 ssh_runner.go:195] Run: which crictl
	I0916 10:38:17.380608 1384589 logs.go:123] Gathering logs for kube-scheduler [2b161087caf5a6ab9dedbb699f7c69ddf6c2c5cdb19026d46daf824d90966d25] ...
	I0916 10:38:17.380639 1384589 ssh_runner.go:195] Run: /bin/bash -c "sudo /usr/bin/crictl logs --tail 400 2b161087caf5a6ab9dedbb699f7c69ddf6c2c5cdb19026d46daf824d90966d25"
	I0916 10:38:17.430005 1384589 logs.go:123] Gathering logs for kindnet [8d59e894feca0e01e03cc7257c67ed10cf0f9db194b88b314e4961bc62d9e7f1] ...
	I0916 10:38:17.430059 1384589 ssh_runner.go:195] Run: /bin/bash -c "sudo /usr/bin/crictl logs --tail 400 8d59e894feca0e01e03cc7257c67ed10cf0f9db194b88b314e4961bc62d9e7f1"
	I0916 10:38:17.478918 1384589 logs.go:123] Gathering logs for CRI-O ...
	I0916 10:38:17.478953 1384589 ssh_runner.go:195] Run: /bin/bash -c "sudo journalctl -u crio -n 400"
	I0916 10:38:17.578588 1384589 logs.go:123] Gathering logs for dmesg ...
	I0916 10:38:17.578626 1384589 ssh_runner.go:195] Run: /bin/bash -c "sudo dmesg -PH -L=never --level warn,err,crit,alert,emerg | tail -n 400"
	I0916 10:38:17.596725 1384589 logs.go:123] Gathering logs for describe nodes ...
	I0916 10:38:17.596755 1384589 ssh_runner.go:195] Run: /bin/bash -c "sudo /var/lib/minikube/binaries/v1.31.1/kubectl describe nodes --kubeconfig=/var/lib/minikube/kubeconfig"
	I0916 10:38:17.780455 1384589 logs.go:123] Gathering logs for etcd [3b247261f15f4cdd596d5e7ee3354c24cb995a27a5e0581e877596df04b900d5] ...
	I0916 10:38:17.780482 1384589 ssh_runner.go:195] Run: /bin/bash -c "sudo /usr/bin/crictl logs --tail 400 3b247261f15f4cdd596d5e7ee3354c24cb995a27a5e0581e877596df04b900d5"
	I0916 10:38:17.832701 1384589 logs.go:123] Gathering logs for kube-proxy [6200eb5cfcd24bb0f0253359201c6d75c0624dcb7a313b0bc95b7370a13539a0] ...
	I0916 10:38:17.832737 1384589 ssh_runner.go:195] Run: /bin/bash -c "sudo /usr/bin/crictl logs --tail 400 6200eb5cfcd24bb0f0253359201c6d75c0624dcb7a313b0bc95b7370a13539a0"
	I0916 10:38:17.873549 1384589 logs.go:123] Gathering logs for kube-controller-manager [4ee66eef50ab615bdd0d94fe194567492cafe76910819703a964b78b45f55436] ...
	I0916 10:38:17.873579 1384589 ssh_runner.go:195] Run: /bin/bash -c "sudo /usr/bin/crictl logs --tail 400 4ee66eef50ab615bdd0d94fe194567492cafe76910819703a964b78b45f55436"
	I0916 10:38:17.944894 1384589 logs.go:123] Gathering logs for container status ...
	I0916 10:38:17.944933 1384589 ssh_runner.go:195] Run: /bin/bash -c "sudo `which crictl || echo crictl` ps -a || sudo docker ps -a"
	I0916 10:38:18.006230 1384589 logs.go:123] Gathering logs for kubelet ...
	I0916 10:38:18.006286 1384589 ssh_runner.go:195] Run: /bin/bash -c "sudo journalctl -u kubelet -n 400"
	W0916 10:38:18.071787 1384589 logs.go:138] Found kubelet problem: Sep 16 10:35:59 addons-936355 kubelet[1507]: W0916 10:35:59.171269    1507 reflector.go:561] object-"kube-system"/"kube-root-ca.crt": failed to list *v1.ConfigMap: configmaps "kube-root-ca.crt" is forbidden: User "system:node:addons-936355" cannot list resource "configmaps" in API group "" in the namespace "kube-system": no relationship found between node 'addons-936355' and this object
	W0916 10:38:18.072057 1384589 logs.go:138] Found kubelet problem: Sep 16 10:35:59 addons-936355 kubelet[1507]: E0916 10:35:59.171326    1507 reflector.go:158] "Unhandled Error" err="object-\"kube-system\"/\"kube-root-ca.crt\": Failed to watch *v1.ConfigMap: failed to list *v1.ConfigMap: configmaps \"kube-root-ca.crt\" is forbidden: User \"system:node:addons-936355\" cannot list resource \"configmaps\" in API group \"\" in the namespace \"kube-system\": no relationship found between node 'addons-936355' and this object" logger="UnhandledError"
	W0916 10:38:18.072239 1384589 logs.go:138] Found kubelet problem: Sep 16 10:35:59 addons-936355 kubelet[1507]: W0916 10:35:59.171509    1507 reflector.go:561] object-"kube-system"/"kube-proxy": failed to list *v1.ConfigMap: configmaps "kube-proxy" is forbidden: User "system:node:addons-936355" cannot list resource "configmaps" in API group "" in the namespace "kube-system": no relationship found between node 'addons-936355' and this object
	W0916 10:38:18.072456 1384589 logs.go:138] Found kubelet problem: Sep 16 10:35:59 addons-936355 kubelet[1507]: E0916 10:35:59.171550    1507 reflector.go:158] "Unhandled Error" err="object-\"kube-system\"/\"kube-proxy\": Failed to watch *v1.ConfigMap: failed to list *v1.ConfigMap: configmaps \"kube-proxy\" is forbidden: User \"system:node:addons-936355\" cannot list resource \"configmaps\" in API group \"\" in the namespace \"kube-system\": no relationship found between node 'addons-936355' and this object" logger="UnhandledError"
	W0916 10:38:18.075800 1384589 logs.go:138] Found kubelet problem: Sep 16 10:36:02 addons-936355 kubelet[1507]: W0916 10:36:02.454965    1507 reflector.go:561] object-"gadget"/"kube-root-ca.crt": failed to list *v1.ConfigMap: configmaps "kube-root-ca.crt" is forbidden: User "system:node:addons-936355" cannot list resource "configmaps" in API group "" in the namespace "gadget": no relationship found between node 'addons-936355' and this object
	W0916 10:38:18.076027 1384589 logs.go:138] Found kubelet problem: Sep 16 10:36:02 addons-936355 kubelet[1507]: E0916 10:36:02.455028    1507 reflector.go:158] "Unhandled Error" err="object-\"gadget\"/\"kube-root-ca.crt\": Failed to watch *v1.ConfigMap: failed to list *v1.ConfigMap: configmaps \"kube-root-ca.crt\" is forbidden: User \"system:node:addons-936355\" cannot list resource \"configmaps\" in API group \"\" in the namespace \"gadget\": no relationship found between node 'addons-936355' and this object" logger="UnhandledError"
	W0916 10:38:18.087591 1384589 logs.go:138] Found kubelet problem: Sep 16 10:36:42 addons-936355 kubelet[1507]: W0916 10:36:42.520421    1507 reflector.go:561] object-"default"/"kube-root-ca.crt": failed to list *v1.ConfigMap: configmaps "kube-root-ca.crt" is forbidden: User "system:node:addons-936355" cannot list resource "configmaps" in API group "" in the namespace "default": no relationship found between node 'addons-936355' and this object
	W0916 10:38:18.087896 1384589 logs.go:138] Found kubelet problem: Sep 16 10:36:42 addons-936355 kubelet[1507]: E0916 10:36:42.520489    1507 reflector.go:158] "Unhandled Error" err="object-\"default\"/\"kube-root-ca.crt\": Failed to watch *v1.ConfigMap: failed to list *v1.ConfigMap: configmaps \"kube-root-ca.crt\" is forbidden: User \"system:node:addons-936355\" cannot list resource \"configmaps\" in API group \"\" in the namespace \"default\": no relationship found between node 'addons-936355' and this object" logger="UnhandledError"
	W0916 10:38:18.088088 1384589 logs.go:138] Found kubelet problem: Sep 16 10:36:42 addons-936355 kubelet[1507]: W0916 10:36:42.520540    1507 reflector.go:561] object-"local-path-storage"/"kube-root-ca.crt": failed to list *v1.ConfigMap: configmaps "kube-root-ca.crt" is forbidden: User "system:node:addons-936355" cannot list resource "configmaps" in API group "" in the namespace "local-path-storage": no relationship found between node 'addons-936355' and this object
	W0916 10:38:18.088320 1384589 logs.go:138] Found kubelet problem: Sep 16 10:36:42 addons-936355 kubelet[1507]: E0916 10:36:42.520560    1507 reflector.go:158] "Unhandled Error" err="object-\"local-path-storage\"/\"kube-root-ca.crt\": Failed to watch *v1.ConfigMap: failed to list *v1.ConfigMap: configmaps \"kube-root-ca.crt\" is forbidden: User \"system:node:addons-936355\" cannot list resource \"configmaps\" in API group \"\" in the namespace \"local-path-storage\": no relationship found between node 'addons-936355' and this object" logger="UnhandledError"
	I0916 10:38:18.128812 1384589 logs.go:123] Gathering logs for kube-apiserver [f911db1ed55bbf8b3dc28ca0fef7e51209be97baaa15d9194b879451dd6fd403] ...
	I0916 10:38:18.128841 1384589 ssh_runner.go:195] Run: /bin/bash -c "sudo /usr/bin/crictl logs --tail 400 f911db1ed55bbf8b3dc28ca0fef7e51209be97baaa15d9194b879451dd6fd403"
	I0916 10:38:18.186612 1384589 logs.go:123] Gathering logs for coredns [ee934dc9f4f92e52b49ad02508bb42771f460a2494fa8b1a65d888191266a4ad] ...
	I0916 10:38:18.186644 1384589 ssh_runner.go:195] Run: /bin/bash -c "sudo /usr/bin/crictl logs --tail 400 ee934dc9f4f92e52b49ad02508bb42771f460a2494fa8b1a65d888191266a4ad"
	I0916 10:38:18.233148 1384589 out.go:358] Setting ErrFile to fd 2...
	I0916 10:38:18.233182 1384589 out.go:392] TERM=,COLORTERM=, which probably does not support color
	W0916 10:38:18.233388 1384589 out.go:270] X Problems detected in kubelet:
	W0916 10:38:18.233404 1384589 out.go:270]   Sep 16 10:36:02 addons-936355 kubelet[1507]: E0916 10:36:02.455028    1507 reflector.go:158] "Unhandled Error" err="object-\"gadget\"/\"kube-root-ca.crt\": Failed to watch *v1.ConfigMap: failed to list *v1.ConfigMap: configmaps \"kube-root-ca.crt\" is forbidden: User \"system:node:addons-936355\" cannot list resource \"configmaps\" in API group \"\" in the namespace \"gadget\": no relationship found between node 'addons-936355' and this object" logger="UnhandledError"
	W0916 10:38:18.233412 1384589 out.go:270]   Sep 16 10:36:42 addons-936355 kubelet[1507]: W0916 10:36:42.520421    1507 reflector.go:561] object-"default"/"kube-root-ca.crt": failed to list *v1.ConfigMap: configmaps "kube-root-ca.crt" is forbidden: User "system:node:addons-936355" cannot list resource "configmaps" in API group "" in the namespace "default": no relationship found between node 'addons-936355' and this object
	W0916 10:38:18.233423 1384589 out.go:270]   Sep 16 10:36:42 addons-936355 kubelet[1507]: E0916 10:36:42.520489    1507 reflector.go:158] "Unhandled Error" err="object-\"default\"/\"kube-root-ca.crt\": Failed to watch *v1.ConfigMap: failed to list *v1.ConfigMap: configmaps \"kube-root-ca.crt\" is forbidden: User \"system:node:addons-936355\" cannot list resource \"configmaps\" in API group \"\" in the namespace \"default\": no relationship found between node 'addons-936355' and this object" logger="UnhandledError"
	W0916 10:38:18.233429 1384589 out.go:270]   Sep 16 10:36:42 addons-936355 kubelet[1507]: W0916 10:36:42.520540    1507 reflector.go:561] object-"local-path-storage"/"kube-root-ca.crt": failed to list *v1.ConfigMap: configmaps "kube-root-ca.crt" is forbidden: User "system:node:addons-936355" cannot list resource "configmaps" in API group "" in the namespace "local-path-storage": no relationship found between node 'addons-936355' and this object
	W0916 10:38:18.233449 1384589 out.go:270]   Sep 16 10:36:42 addons-936355 kubelet[1507]: E0916 10:36:42.520560    1507 reflector.go:158] "Unhandled Error" err="object-\"local-path-storage\"/\"kube-root-ca.crt\": Failed to watch *v1.ConfigMap: failed to list *v1.ConfigMap: configmaps \"kube-root-ca.crt\" is forbidden: User \"system:node:addons-936355\" cannot list resource \"configmaps\" in API group \"\" in the namespace \"local-path-storage\": no relationship found between node 'addons-936355' and this object" logger="UnhandledError"
	I0916 10:38:18.233461 1384589 out.go:358] Setting ErrFile to fd 2...
	I0916 10:38:18.233470 1384589 out.go:392] TERM=,COLORTERM=, which probably does not support color
	I0916 10:38:28.234697 1384589 ssh_runner.go:195] Run: sudo pgrep -xnf kube-apiserver.*minikube.*
	I0916 10:38:28.249266 1384589 api_server.go:72] duration metric: took 2m31.602198408s to wait for apiserver process to appear ...
	I0916 10:38:28.249292 1384589 api_server.go:88] waiting for apiserver healthz status ...
	I0916 10:38:28.249329 1384589 cri.go:54] listing CRI containers in root : {State:all Name:kube-apiserver Namespaces:[]}
	I0916 10:38:28.249401 1384589 ssh_runner.go:195] Run: sudo crictl ps -a --quiet --name=kube-apiserver
	I0916 10:38:28.291513 1384589 cri.go:89] found id: "f911db1ed55bbf8b3dc28ca0fef7e51209be97baaa15d9194b879451dd6fd403"
	I0916 10:38:28.291538 1384589 cri.go:89] found id: ""
	I0916 10:38:28.291546 1384589 logs.go:276] 1 containers: [f911db1ed55bbf8b3dc28ca0fef7e51209be97baaa15d9194b879451dd6fd403]
	I0916 10:38:28.291605 1384589 ssh_runner.go:195] Run: which crictl
	I0916 10:38:28.295282 1384589 cri.go:54] listing CRI containers in root : {State:all Name:etcd Namespaces:[]}
	I0916 10:38:28.295362 1384589 ssh_runner.go:195] Run: sudo crictl ps -a --quiet --name=etcd
	I0916 10:38:28.334381 1384589 cri.go:89] found id: "3b247261f15f4cdd596d5e7ee3354c24cb995a27a5e0581e877596df04b900d5"
	I0916 10:38:28.334460 1384589 cri.go:89] found id: ""
	I0916 10:38:28.334479 1384589 logs.go:276] 1 containers: [3b247261f15f4cdd596d5e7ee3354c24cb995a27a5e0581e877596df04b900d5]
	I0916 10:38:28.334596 1384589 ssh_runner.go:195] Run: which crictl
	I0916 10:38:28.338232 1384589 cri.go:54] listing CRI containers in root : {State:all Name:coredns Namespaces:[]}
	I0916 10:38:28.338315 1384589 ssh_runner.go:195] Run: sudo crictl ps -a --quiet --name=coredns
	I0916 10:38:28.386465 1384589 cri.go:89] found id: "ee934dc9f4f92e52b49ad02508bb42771f460a2494fa8b1a65d888191266a4ad"
	I0916 10:38:28.386495 1384589 cri.go:89] found id: ""
	I0916 10:38:28.386503 1384589 logs.go:276] 1 containers: [ee934dc9f4f92e52b49ad02508bb42771f460a2494fa8b1a65d888191266a4ad]
	I0916 10:38:28.386564 1384589 ssh_runner.go:195] Run: which crictl
	I0916 10:38:28.390431 1384589 cri.go:54] listing CRI containers in root : {State:all Name:kube-scheduler Namespaces:[]}
	I0916 10:38:28.390508 1384589 ssh_runner.go:195] Run: sudo crictl ps -a --quiet --name=kube-scheduler
	I0916 10:38:28.428479 1384589 cri.go:89] found id: "2b161087caf5a6ab9dedbb699f7c69ddf6c2c5cdb19026d46daf824d90966d25"
	I0916 10:38:28.428500 1384589 cri.go:89] found id: ""
	I0916 10:38:28.428508 1384589 logs.go:276] 1 containers: [2b161087caf5a6ab9dedbb699f7c69ddf6c2c5cdb19026d46daf824d90966d25]
	I0916 10:38:28.428568 1384589 ssh_runner.go:195] Run: which crictl
	I0916 10:38:28.431936 1384589 cri.go:54] listing CRI containers in root : {State:all Name:kube-proxy Namespaces:[]}
	I0916 10:38:28.432009 1384589 ssh_runner.go:195] Run: sudo crictl ps -a --quiet --name=kube-proxy
	I0916 10:38:28.480074 1384589 cri.go:89] found id: "6200eb5cfcd24bb0f0253359201c6d75c0624dcb7a313b0bc95b7370a13539a0"
	I0916 10:38:28.480148 1384589 cri.go:89] found id: ""
	I0916 10:38:28.480171 1384589 logs.go:276] 1 containers: [6200eb5cfcd24bb0f0253359201c6d75c0624dcb7a313b0bc95b7370a13539a0]
	I0916 10:38:28.480257 1384589 ssh_runner.go:195] Run: which crictl
	I0916 10:38:28.484845 1384589 cri.go:54] listing CRI containers in root : {State:all Name:kube-controller-manager Namespaces:[]}
	I0916 10:38:28.484948 1384589 ssh_runner.go:195] Run: sudo crictl ps -a --quiet --name=kube-controller-manager
	I0916 10:38:28.526872 1384589 cri.go:89] found id: "4ee66eef50ab615bdd0d94fe194567492cafe76910819703a964b78b45f55436"
	I0916 10:38:28.526896 1384589 cri.go:89] found id: ""
	I0916 10:38:28.526905 1384589 logs.go:276] 1 containers: [4ee66eef50ab615bdd0d94fe194567492cafe76910819703a964b78b45f55436]
	I0916 10:38:28.526965 1384589 ssh_runner.go:195] Run: which crictl
	I0916 10:38:28.530520 1384589 cri.go:54] listing CRI containers in root : {State:all Name:kindnet Namespaces:[]}
	I0916 10:38:28.530607 1384589 ssh_runner.go:195] Run: sudo crictl ps -a --quiet --name=kindnet
	I0916 10:38:28.569037 1384589 cri.go:89] found id: "8d59e894feca0e01e03cc7257c67ed10cf0f9db194b88b314e4961bc62d9e7f1"
	I0916 10:38:28.569065 1384589 cri.go:89] found id: ""
	I0916 10:38:28.569074 1384589 logs.go:276] 1 containers: [8d59e894feca0e01e03cc7257c67ed10cf0f9db194b88b314e4961bc62d9e7f1]
	I0916 10:38:28.569150 1384589 ssh_runner.go:195] Run: which crictl
	I0916 10:38:28.572604 1384589 logs.go:123] Gathering logs for dmesg ...
	I0916 10:38:28.572634 1384589 ssh_runner.go:195] Run: /bin/bash -c "sudo dmesg -PH -L=never --level warn,err,crit,alert,emerg | tail -n 400"
	I0916 10:38:28.589298 1384589 logs.go:123] Gathering logs for describe nodes ...
	I0916 10:38:28.589323 1384589 ssh_runner.go:195] Run: /bin/bash -c "sudo /var/lib/minikube/binaries/v1.31.1/kubectl describe nodes --kubeconfig=/var/lib/minikube/kubeconfig"
	I0916 10:38:28.729585 1384589 logs.go:123] Gathering logs for etcd [3b247261f15f4cdd596d5e7ee3354c24cb995a27a5e0581e877596df04b900d5] ...
	I0916 10:38:28.729703 1384589 ssh_runner.go:195] Run: /bin/bash -c "sudo /usr/bin/crictl logs --tail 400 3b247261f15f4cdd596d5e7ee3354c24cb995a27a5e0581e877596df04b900d5"
	I0916 10:38:28.802248 1384589 logs.go:123] Gathering logs for coredns [ee934dc9f4f92e52b49ad02508bb42771f460a2494fa8b1a65d888191266a4ad] ...
	I0916 10:38:28.802300 1384589 ssh_runner.go:195] Run: /bin/bash -c "sudo /usr/bin/crictl logs --tail 400 ee934dc9f4f92e52b49ad02508bb42771f460a2494fa8b1a65d888191266a4ad"
	I0916 10:38:28.843099 1384589 logs.go:123] Gathering logs for kube-proxy [6200eb5cfcd24bb0f0253359201c6d75c0624dcb7a313b0bc95b7370a13539a0] ...
	I0916 10:38:28.843130 1384589 ssh_runner.go:195] Run: /bin/bash -c "sudo /usr/bin/crictl logs --tail 400 6200eb5cfcd24bb0f0253359201c6d75c0624dcb7a313b0bc95b7370a13539a0"
	I0916 10:38:28.886320 1384589 logs.go:123] Gathering logs for kindnet [8d59e894feca0e01e03cc7257c67ed10cf0f9db194b88b314e4961bc62d9e7f1] ...
	I0916 10:38:28.886350 1384589 ssh_runner.go:195] Run: /bin/bash -c "sudo /usr/bin/crictl logs --tail 400 8d59e894feca0e01e03cc7257c67ed10cf0f9db194b88b314e4961bc62d9e7f1"
	I0916 10:38:28.930299 1384589 logs.go:123] Gathering logs for CRI-O ...
	I0916 10:38:28.930374 1384589 ssh_runner.go:195] Run: /bin/bash -c "sudo journalctl -u crio -n 400"
	I0916 10:38:29.041608 1384589 logs.go:123] Gathering logs for kubelet ...
	I0916 10:38:29.041656 1384589 ssh_runner.go:195] Run: /bin/bash -c "sudo journalctl -u kubelet -n 400"
	W0916 10:38:29.079590 1384589 logs.go:138] Found kubelet problem: Sep 16 10:35:59 addons-936355 kubelet[1507]: W0916 10:35:59.171269    1507 reflector.go:561] object-"kube-system"/"kube-root-ca.crt": failed to list *v1.ConfigMap: configmaps "kube-root-ca.crt" is forbidden: User "system:node:addons-936355" cannot list resource "configmaps" in API group "" in the namespace "kube-system": no relationship found between node 'addons-936355' and this object
	W0916 10:38:29.079841 1384589 logs.go:138] Found kubelet problem: Sep 16 10:35:59 addons-936355 kubelet[1507]: E0916 10:35:59.171326    1507 reflector.go:158] "Unhandled Error" err="object-\"kube-system\"/\"kube-root-ca.crt\": Failed to watch *v1.ConfigMap: failed to list *v1.ConfigMap: configmaps \"kube-root-ca.crt\" is forbidden: User \"system:node:addons-936355\" cannot list resource \"configmaps\" in API group \"\" in the namespace \"kube-system\": no relationship found between node 'addons-936355' and this object" logger="UnhandledError"
	W0916 10:38:29.080020 1384589 logs.go:138] Found kubelet problem: Sep 16 10:35:59 addons-936355 kubelet[1507]: W0916 10:35:59.171509    1507 reflector.go:561] object-"kube-system"/"kube-proxy": failed to list *v1.ConfigMap: configmaps "kube-proxy" is forbidden: User "system:node:addons-936355" cannot list resource "configmaps" in API group "" in the namespace "kube-system": no relationship found between node 'addons-936355' and this object
	W0916 10:38:29.080236 1384589 logs.go:138] Found kubelet problem: Sep 16 10:35:59 addons-936355 kubelet[1507]: E0916 10:35:59.171550    1507 reflector.go:158] "Unhandled Error" err="object-\"kube-system\"/\"kube-proxy\": Failed to watch *v1.ConfigMap: failed to list *v1.ConfigMap: configmaps \"kube-proxy\" is forbidden: User \"system:node:addons-936355\" cannot list resource \"configmaps\" in API group \"\" in the namespace \"kube-system\": no relationship found between node 'addons-936355' and this object" logger="UnhandledError"
	W0916 10:38:29.083646 1384589 logs.go:138] Found kubelet problem: Sep 16 10:36:02 addons-936355 kubelet[1507]: W0916 10:36:02.454965    1507 reflector.go:561] object-"gadget"/"kube-root-ca.crt": failed to list *v1.ConfigMap: configmaps "kube-root-ca.crt" is forbidden: User "system:node:addons-936355" cannot list resource "configmaps" in API group "" in the namespace "gadget": no relationship found between node 'addons-936355' and this object
	W0916 10:38:29.083870 1384589 logs.go:138] Found kubelet problem: Sep 16 10:36:02 addons-936355 kubelet[1507]: E0916 10:36:02.455028    1507 reflector.go:158] "Unhandled Error" err="object-\"gadget\"/\"kube-root-ca.crt\": Failed to watch *v1.ConfigMap: failed to list *v1.ConfigMap: configmaps \"kube-root-ca.crt\" is forbidden: User \"system:node:addons-936355\" cannot list resource \"configmaps\" in API group \"\" in the namespace \"gadget\": no relationship found between node 'addons-936355' and this object" logger="UnhandledError"
	W0916 10:38:29.095503 1384589 logs.go:138] Found kubelet problem: Sep 16 10:36:42 addons-936355 kubelet[1507]: W0916 10:36:42.520421    1507 reflector.go:561] object-"default"/"kube-root-ca.crt": failed to list *v1.ConfigMap: configmaps "kube-root-ca.crt" is forbidden: User "system:node:addons-936355" cannot list resource "configmaps" in API group "" in the namespace "default": no relationship found between node 'addons-936355' and this object
	W0916 10:38:29.095743 1384589 logs.go:138] Found kubelet problem: Sep 16 10:36:42 addons-936355 kubelet[1507]: E0916 10:36:42.520489    1507 reflector.go:158] "Unhandled Error" err="object-\"default\"/\"kube-root-ca.crt\": Failed to watch *v1.ConfigMap: failed to list *v1.ConfigMap: configmaps \"kube-root-ca.crt\" is forbidden: User \"system:node:addons-936355\" cannot list resource \"configmaps\" in API group \"\" in the namespace \"default\": no relationship found between node 'addons-936355' and this object" logger="UnhandledError"
	W0916 10:38:29.095931 1384589 logs.go:138] Found kubelet problem: Sep 16 10:36:42 addons-936355 kubelet[1507]: W0916 10:36:42.520540    1507 reflector.go:561] object-"local-path-storage"/"kube-root-ca.crt": failed to list *v1.ConfigMap: configmaps "kube-root-ca.crt" is forbidden: User "system:node:addons-936355" cannot list resource "configmaps" in API group "" in the namespace "local-path-storage": no relationship found between node 'addons-936355' and this object
	W0916 10:38:29.096162 1384589 logs.go:138] Found kubelet problem: Sep 16 10:36:42 addons-936355 kubelet[1507]: E0916 10:36:42.520560    1507 reflector.go:158] "Unhandled Error" err="object-\"local-path-storage\"/\"kube-root-ca.crt\": Failed to watch *v1.ConfigMap: failed to list *v1.ConfigMap: configmaps \"kube-root-ca.crt\" is forbidden: User \"system:node:addons-936355\" cannot list resource \"configmaps\" in API group \"\" in the namespace \"local-path-storage\": no relationship found between node 'addons-936355' and this object" logger="UnhandledError"
	I0916 10:38:29.147372 1384589 logs.go:123] Gathering logs for kube-apiserver [f911db1ed55bbf8b3dc28ca0fef7e51209be97baaa15d9194b879451dd6fd403] ...
	I0916 10:38:29.147401 1384589 ssh_runner.go:195] Run: /bin/bash -c "sudo /usr/bin/crictl logs --tail 400 f911db1ed55bbf8b3dc28ca0fef7e51209be97baaa15d9194b879451dd6fd403"
	I0916 10:38:29.214117 1384589 logs.go:123] Gathering logs for kube-scheduler [2b161087caf5a6ab9dedbb699f7c69ddf6c2c5cdb19026d46daf824d90966d25] ...
	I0916 10:38:29.214148 1384589 ssh_runner.go:195] Run: /bin/bash -c "sudo /usr/bin/crictl logs --tail 400 2b161087caf5a6ab9dedbb699f7c69ddf6c2c5cdb19026d46daf824d90966d25"
	I0916 10:38:29.266528 1384589 logs.go:123] Gathering logs for kube-controller-manager [4ee66eef50ab615bdd0d94fe194567492cafe76910819703a964b78b45f55436] ...
	I0916 10:38:29.266562 1384589 ssh_runner.go:195] Run: /bin/bash -c "sudo /usr/bin/crictl logs --tail 400 4ee66eef50ab615bdd0d94fe194567492cafe76910819703a964b78b45f55436"
	I0916 10:38:29.339157 1384589 logs.go:123] Gathering logs for container status ...
	I0916 10:38:29.339193 1384589 ssh_runner.go:195] Run: /bin/bash -c "sudo `which crictl || echo crictl` ps -a || sudo docker ps -a"
	I0916 10:38:29.402328 1384589 out.go:358] Setting ErrFile to fd 2...
	I0916 10:38:29.402360 1384589 out.go:392] TERM=,COLORTERM=, which probably does not support color
	W0916 10:38:29.402421 1384589 out.go:270] X Problems detected in kubelet:
	W0916 10:38:29.402433 1384589 out.go:270]   Sep 16 10:36:02 addons-936355 kubelet[1507]: E0916 10:36:02.455028    1507 reflector.go:158] "Unhandled Error" err="object-\"gadget\"/\"kube-root-ca.crt\": Failed to watch *v1.ConfigMap: failed to list *v1.ConfigMap: configmaps \"kube-root-ca.crt\" is forbidden: User \"system:node:addons-936355\" cannot list resource \"configmaps\" in API group \"\" in the namespace \"gadget\": no relationship found between node 'addons-936355' and this object" logger="UnhandledError"
	W0916 10:38:29.402445 1384589 out.go:270]   Sep 16 10:36:42 addons-936355 kubelet[1507]: W0916 10:36:42.520421    1507 reflector.go:561] object-"default"/"kube-root-ca.crt": failed to list *v1.ConfigMap: configmaps "kube-root-ca.crt" is forbidden: User "system:node:addons-936355" cannot list resource "configmaps" in API group "" in the namespace "default": no relationship found between node 'addons-936355' and this object
	W0916 10:38:29.402453 1384589 out.go:270]   Sep 16 10:36:42 addons-936355 kubelet[1507]: E0916 10:36:42.520489    1507 reflector.go:158] "Unhandled Error" err="object-\"default\"/\"kube-root-ca.crt\": Failed to watch *v1.ConfigMap: failed to list *v1.ConfigMap: configmaps \"kube-root-ca.crt\" is forbidden: User \"system:node:addons-936355\" cannot list resource \"configmaps\" in API group \"\" in the namespace \"default\": no relationship found between node 'addons-936355' and this object" logger="UnhandledError"
	W0916 10:38:29.402464 1384589 out.go:270]   Sep 16 10:36:42 addons-936355 kubelet[1507]: W0916 10:36:42.520540    1507 reflector.go:561] object-"local-path-storage"/"kube-root-ca.crt": failed to list *v1.ConfigMap: configmaps "kube-root-ca.crt" is forbidden: User "system:node:addons-936355" cannot list resource "configmaps" in API group "" in the namespace "local-path-storage": no relationship found between node 'addons-936355' and this object
	W0916 10:38:29.402472 1384589 out.go:270]   Sep 16 10:36:42 addons-936355 kubelet[1507]: E0916 10:36:42.520560    1507 reflector.go:158] "Unhandled Error" err="object-\"local-path-storage\"/\"kube-root-ca.crt\": Failed to watch *v1.ConfigMap: failed to list *v1.ConfigMap: configmaps \"kube-root-ca.crt\" is forbidden: User \"system:node:addons-936355\" cannot list resource \"configmaps\" in API group \"\" in the namespace \"local-path-storage\": no relationship found between node 'addons-936355' and this object" logger="UnhandledError"
	I0916 10:38:29.402483 1384589 out.go:358] Setting ErrFile to fd 2...
	I0916 10:38:29.402490 1384589 out.go:392] TERM=,COLORTERM=, which probably does not support color
	I0916 10:38:39.403739 1384589 api_server.go:253] Checking apiserver healthz at https://192.168.49.2:8443/healthz ...
	I0916 10:38:39.411467 1384589 api_server.go:279] https://192.168.49.2:8443/healthz returned 200:
	ok
	I0916 10:38:39.412460 1384589 api_server.go:141] control plane version: v1.31.1
	I0916 10:38:39.412486 1384589 api_server.go:131] duration metric: took 11.16318566s to wait for apiserver health ...
	I0916 10:38:39.412495 1384589 system_pods.go:43] waiting for kube-system pods to appear ...
	I0916 10:38:39.412517 1384589 cri.go:54] listing CRI containers in root : {State:all Name:kube-apiserver Namespaces:[]}
	I0916 10:38:39.412584 1384589 ssh_runner.go:195] Run: sudo crictl ps -a --quiet --name=kube-apiserver
	I0916 10:38:39.451224 1384589 cri.go:89] found id: "f911db1ed55bbf8b3dc28ca0fef7e51209be97baaa15d9194b879451dd6fd403"
	I0916 10:38:39.451243 1384589 cri.go:89] found id: ""
	I0916 10:38:39.451251 1384589 logs.go:276] 1 containers: [f911db1ed55bbf8b3dc28ca0fef7e51209be97baaa15d9194b879451dd6fd403]
	I0916 10:38:39.451311 1384589 ssh_runner.go:195] Run: which crictl
	I0916 10:38:39.454893 1384589 cri.go:54] listing CRI containers in root : {State:all Name:etcd Namespaces:[]}
	I0916 10:38:39.454968 1384589 ssh_runner.go:195] Run: sudo crictl ps -a --quiet --name=etcd
	I0916 10:38:39.499416 1384589 cri.go:89] found id: "3b247261f15f4cdd596d5e7ee3354c24cb995a27a5e0581e877596df04b900d5"
	I0916 10:38:39.499439 1384589 cri.go:89] found id: ""
	I0916 10:38:39.499448 1384589 logs.go:276] 1 containers: [3b247261f15f4cdd596d5e7ee3354c24cb995a27a5e0581e877596df04b900d5]
	I0916 10:38:39.499510 1384589 ssh_runner.go:195] Run: which crictl
	I0916 10:38:39.503122 1384589 cri.go:54] listing CRI containers in root : {State:all Name:coredns Namespaces:[]}
	I0916 10:38:39.503208 1384589 ssh_runner.go:195] Run: sudo crictl ps -a --quiet --name=coredns
	I0916 10:38:39.542014 1384589 cri.go:89] found id: "ee934dc9f4f92e52b49ad02508bb42771f460a2494fa8b1a65d888191266a4ad"
	I0916 10:38:39.542035 1384589 cri.go:89] found id: ""
	I0916 10:38:39.542043 1384589 logs.go:276] 1 containers: [ee934dc9f4f92e52b49ad02508bb42771f460a2494fa8b1a65d888191266a4ad]
	I0916 10:38:39.542101 1384589 ssh_runner.go:195] Run: which crictl
	I0916 10:38:39.546062 1384589 cri.go:54] listing CRI containers in root : {State:all Name:kube-scheduler Namespaces:[]}
	I0916 10:38:39.546152 1384589 ssh_runner.go:195] Run: sudo crictl ps -a --quiet --name=kube-scheduler
	I0916 10:38:39.587808 1384589 cri.go:89] found id: "2b161087caf5a6ab9dedbb699f7c69ddf6c2c5cdb19026d46daf824d90966d25"
	I0916 10:38:39.587831 1384589 cri.go:89] found id: ""
	I0916 10:38:39.587842 1384589 logs.go:276] 1 containers: [2b161087caf5a6ab9dedbb699f7c69ddf6c2c5cdb19026d46daf824d90966d25]
	I0916 10:38:39.587908 1384589 ssh_runner.go:195] Run: which crictl
	I0916 10:38:39.591371 1384589 cri.go:54] listing CRI containers in root : {State:all Name:kube-proxy Namespaces:[]}
	I0916 10:38:39.591441 1384589 ssh_runner.go:195] Run: sudo crictl ps -a --quiet --name=kube-proxy
	I0916 10:38:39.629404 1384589 cri.go:89] found id: "6200eb5cfcd24bb0f0253359201c6d75c0624dcb7a313b0bc95b7370a13539a0"
	I0916 10:38:39.629428 1384589 cri.go:89] found id: ""
	I0916 10:38:39.629437 1384589 logs.go:276] 1 containers: [6200eb5cfcd24bb0f0253359201c6d75c0624dcb7a313b0bc95b7370a13539a0]
	I0916 10:38:39.629495 1384589 ssh_runner.go:195] Run: which crictl
	I0916 10:38:39.633014 1384589 cri.go:54] listing CRI containers in root : {State:all Name:kube-controller-manager Namespaces:[]}
	I0916 10:38:39.633091 1384589 ssh_runner.go:195] Run: sudo crictl ps -a --quiet --name=kube-controller-manager
	I0916 10:38:39.676945 1384589 cri.go:89] found id: "4ee66eef50ab615bdd0d94fe194567492cafe76910819703a964b78b45f55436"
	I0916 10:38:39.676965 1384589 cri.go:89] found id: ""
	I0916 10:38:39.676973 1384589 logs.go:276] 1 containers: [4ee66eef50ab615bdd0d94fe194567492cafe76910819703a964b78b45f55436]
	I0916 10:38:39.677033 1384589 ssh_runner.go:195] Run: which crictl
	I0916 10:38:39.680612 1384589 cri.go:54] listing CRI containers in root : {State:all Name:kindnet Namespaces:[]}
	I0916 10:38:39.680742 1384589 ssh_runner.go:195] Run: sudo crictl ps -a --quiet --name=kindnet
	I0916 10:38:39.722262 1384589 cri.go:89] found id: "8d59e894feca0e01e03cc7257c67ed10cf0f9db194b88b314e4961bc62d9e7f1"
	I0916 10:38:39.722282 1384589 cri.go:89] found id: ""
	I0916 10:38:39.722291 1384589 logs.go:276] 1 containers: [8d59e894feca0e01e03cc7257c67ed10cf0f9db194b88b314e4961bc62d9e7f1]
	I0916 10:38:39.722347 1384589 ssh_runner.go:195] Run: which crictl
	I0916 10:38:39.726091 1384589 logs.go:123] Gathering logs for dmesg ...
	I0916 10:38:39.726167 1384589 ssh_runner.go:195] Run: /bin/bash -c "sudo dmesg -PH -L=never --level warn,err,crit,alert,emerg | tail -n 400"
	I0916 10:38:39.742632 1384589 logs.go:123] Gathering logs for etcd [3b247261f15f4cdd596d5e7ee3354c24cb995a27a5e0581e877596df04b900d5] ...
	I0916 10:38:39.742660 1384589 ssh_runner.go:195] Run: /bin/bash -c "sudo /usr/bin/crictl logs --tail 400 3b247261f15f4cdd596d5e7ee3354c24cb995a27a5e0581e877596df04b900d5"
	I0916 10:38:39.814109 1384589 logs.go:123] Gathering logs for CRI-O ...
	I0916 10:38:39.814142 1384589 ssh_runner.go:195] Run: /bin/bash -c "sudo journalctl -u crio -n 400"
	I0916 10:38:39.914270 1384589 logs.go:123] Gathering logs for kube-controller-manager [4ee66eef50ab615bdd0d94fe194567492cafe76910819703a964b78b45f55436] ...
	I0916 10:38:39.914308 1384589 ssh_runner.go:195] Run: /bin/bash -c "sudo /usr/bin/crictl logs --tail 400 4ee66eef50ab615bdd0d94fe194567492cafe76910819703a964b78b45f55436"
	I0916 10:38:40.019354 1384589 logs.go:123] Gathering logs for kindnet [8d59e894feca0e01e03cc7257c67ed10cf0f9db194b88b314e4961bc62d9e7f1] ...
	I0916 10:38:40.019397 1384589 ssh_runner.go:195] Run: /bin/bash -c "sudo /usr/bin/crictl logs --tail 400 8d59e894feca0e01e03cc7257c67ed10cf0f9db194b88b314e4961bc62d9e7f1"
	I0916 10:38:40.079304 1384589 logs.go:123] Gathering logs for kubelet ...
	I0916 10:38:40.079345 1384589 ssh_runner.go:195] Run: /bin/bash -c "sudo journalctl -u kubelet -n 400"
	W0916 10:38:40.123482 1384589 logs.go:138] Found kubelet problem: Sep 16 10:35:59 addons-936355 kubelet[1507]: W0916 10:35:59.171269    1507 reflector.go:561] object-"kube-system"/"kube-root-ca.crt": failed to list *v1.ConfigMap: configmaps "kube-root-ca.crt" is forbidden: User "system:node:addons-936355" cannot list resource "configmaps" in API group "" in the namespace "kube-system": no relationship found between node 'addons-936355' and this object
	W0916 10:38:40.123736 1384589 logs.go:138] Found kubelet problem: Sep 16 10:35:59 addons-936355 kubelet[1507]: E0916 10:35:59.171326    1507 reflector.go:158] "Unhandled Error" err="object-\"kube-system\"/\"kube-root-ca.crt\": Failed to watch *v1.ConfigMap: failed to list *v1.ConfigMap: configmaps \"kube-root-ca.crt\" is forbidden: User \"system:node:addons-936355\" cannot list resource \"configmaps\" in API group \"\" in the namespace \"kube-system\": no relationship found between node 'addons-936355' and this object" logger="UnhandledError"
	W0916 10:38:40.123917 1384589 logs.go:138] Found kubelet problem: Sep 16 10:35:59 addons-936355 kubelet[1507]: W0916 10:35:59.171509    1507 reflector.go:561] object-"kube-system"/"kube-proxy": failed to list *v1.ConfigMap: configmaps "kube-proxy" is forbidden: User "system:node:addons-936355" cannot list resource "configmaps" in API group "" in the namespace "kube-system": no relationship found between node 'addons-936355' and this object
	W0916 10:38:40.124171 1384589 logs.go:138] Found kubelet problem: Sep 16 10:35:59 addons-936355 kubelet[1507]: E0916 10:35:59.171550    1507 reflector.go:158] "Unhandled Error" err="object-\"kube-system\"/\"kube-proxy\": Failed to watch *v1.ConfigMap: failed to list *v1.ConfigMap: configmaps \"kube-proxy\" is forbidden: User \"system:node:addons-936355\" cannot list resource \"configmaps\" in API group \"\" in the namespace \"kube-system\": no relationship found between node 'addons-936355' and this object" logger="UnhandledError"
	W0916 10:38:40.127515 1384589 logs.go:138] Found kubelet problem: Sep 16 10:36:02 addons-936355 kubelet[1507]: W0916 10:36:02.454965    1507 reflector.go:561] object-"gadget"/"kube-root-ca.crt": failed to list *v1.ConfigMap: configmaps "kube-root-ca.crt" is forbidden: User "system:node:addons-936355" cannot list resource "configmaps" in API group "" in the namespace "gadget": no relationship found between node 'addons-936355' and this object
	W0916 10:38:40.127756 1384589 logs.go:138] Found kubelet problem: Sep 16 10:36:02 addons-936355 kubelet[1507]: E0916 10:36:02.455028    1507 reflector.go:158] "Unhandled Error" err="object-\"gadget\"/\"kube-root-ca.crt\": Failed to watch *v1.ConfigMap: failed to list *v1.ConfigMap: configmaps \"kube-root-ca.crt\" is forbidden: User \"system:node:addons-936355\" cannot list resource \"configmaps\" in API group \"\" in the namespace \"gadget\": no relationship found between node 'addons-936355' and this object" logger="UnhandledError"
	W0916 10:38:40.139306 1384589 logs.go:138] Found kubelet problem: Sep 16 10:36:42 addons-936355 kubelet[1507]: W0916 10:36:42.520421    1507 reflector.go:561] object-"default"/"kube-root-ca.crt": failed to list *v1.ConfigMap: configmaps "kube-root-ca.crt" is forbidden: User "system:node:addons-936355" cannot list resource "configmaps" in API group "" in the namespace "default": no relationship found between node 'addons-936355' and this object
	W0916 10:38:40.139536 1384589 logs.go:138] Found kubelet problem: Sep 16 10:36:42 addons-936355 kubelet[1507]: E0916 10:36:42.520489    1507 reflector.go:158] "Unhandled Error" err="object-\"default\"/\"kube-root-ca.crt\": Failed to watch *v1.ConfigMap: failed to list *v1.ConfigMap: configmaps \"kube-root-ca.crt\" is forbidden: User \"system:node:addons-936355\" cannot list resource \"configmaps\" in API group \"\" in the namespace \"default\": no relationship found between node 'addons-936355' and this object" logger="UnhandledError"
	W0916 10:38:40.139726 1384589 logs.go:138] Found kubelet problem: Sep 16 10:36:42 addons-936355 kubelet[1507]: W0916 10:36:42.520540    1507 reflector.go:561] object-"local-path-storage"/"kube-root-ca.crt": failed to list *v1.ConfigMap: configmaps "kube-root-ca.crt" is forbidden: User "system:node:addons-936355" cannot list resource "configmaps" in API group "" in the namespace "local-path-storage": no relationship found between node 'addons-936355' and this object
	W0916 10:38:40.139953 1384589 logs.go:138] Found kubelet problem: Sep 16 10:36:42 addons-936355 kubelet[1507]: E0916 10:36:42.520560    1507 reflector.go:158] "Unhandled Error" err="object-\"local-path-storage\"/\"kube-root-ca.crt\": Failed to watch *v1.ConfigMap: failed to list *v1.ConfigMap: configmaps \"kube-root-ca.crt\" is forbidden: User \"system:node:addons-936355\" cannot list resource \"configmaps\" in API group \"\" in the namespace \"local-path-storage\": no relationship found between node 'addons-936355' and this object" logger="UnhandledError"
	I0916 10:38:40.192100 1384589 logs.go:123] Gathering logs for describe nodes ...
	I0916 10:38:40.192138 1384589 ssh_runner.go:195] Run: /bin/bash -c "sudo /var/lib/minikube/binaries/v1.31.1/kubectl describe nodes --kubeconfig=/var/lib/minikube/kubeconfig"
	I0916 10:38:40.333078 1384589 logs.go:123] Gathering logs for kube-apiserver [f911db1ed55bbf8b3dc28ca0fef7e51209be97baaa15d9194b879451dd6fd403] ...
	I0916 10:38:40.333117 1384589 ssh_runner.go:195] Run: /bin/bash -c "sudo /usr/bin/crictl logs --tail 400 f911db1ed55bbf8b3dc28ca0fef7e51209be97baaa15d9194b879451dd6fd403"
	I0916 10:38:40.403526 1384589 logs.go:123] Gathering logs for coredns [ee934dc9f4f92e52b49ad02508bb42771f460a2494fa8b1a65d888191266a4ad] ...
	I0916 10:38:40.403566 1384589 ssh_runner.go:195] Run: /bin/bash -c "sudo /usr/bin/crictl logs --tail 400 ee934dc9f4f92e52b49ad02508bb42771f460a2494fa8b1a65d888191266a4ad"
	I0916 10:38:40.442653 1384589 logs.go:123] Gathering logs for kube-scheduler [2b161087caf5a6ab9dedbb699f7c69ddf6c2c5cdb19026d46daf824d90966d25] ...
	I0916 10:38:40.442681 1384589 ssh_runner.go:195] Run: /bin/bash -c "sudo /usr/bin/crictl logs --tail 400 2b161087caf5a6ab9dedbb699f7c69ddf6c2c5cdb19026d46daf824d90966d25"
	I0916 10:38:40.492601 1384589 logs.go:123] Gathering logs for kube-proxy [6200eb5cfcd24bb0f0253359201c6d75c0624dcb7a313b0bc95b7370a13539a0] ...
	I0916 10:38:40.492632 1384589 ssh_runner.go:195] Run: /bin/bash -c "sudo /usr/bin/crictl logs --tail 400 6200eb5cfcd24bb0f0253359201c6d75c0624dcb7a313b0bc95b7370a13539a0"
	I0916 10:38:40.533326 1384589 logs.go:123] Gathering logs for container status ...
	I0916 10:38:40.533357 1384589 ssh_runner.go:195] Run: /bin/bash -c "sudo `which crictl || echo crictl` ps -a || sudo docker ps -a"
	I0916 10:38:40.587619 1384589 out.go:358] Setting ErrFile to fd 2...
	I0916 10:38:40.587653 1384589 out.go:392] TERM=,COLORTERM=, which probably does not support color
	W0916 10:38:40.587735 1384589 out.go:270] X Problems detected in kubelet:
	W0916 10:38:40.587753 1384589 out.go:270]   Sep 16 10:36:02 addons-936355 kubelet[1507]: E0916 10:36:02.455028    1507 reflector.go:158] "Unhandled Error" err="object-\"gadget\"/\"kube-root-ca.crt\": Failed to watch *v1.ConfigMap: failed to list *v1.ConfigMap: configmaps \"kube-root-ca.crt\" is forbidden: User \"system:node:addons-936355\" cannot list resource \"configmaps\" in API group \"\" in the namespace \"gadget\": no relationship found between node 'addons-936355' and this object" logger="UnhandledError"
	W0916 10:38:40.587783 1384589 out.go:270]   Sep 16 10:36:42 addons-936355 kubelet[1507]: W0916 10:36:42.520421    1507 reflector.go:561] object-"default"/"kube-root-ca.crt": failed to list *v1.ConfigMap: configmaps "kube-root-ca.crt" is forbidden: User "system:node:addons-936355" cannot list resource "configmaps" in API group "" in the namespace "default": no relationship found between node 'addons-936355' and this object
	W0916 10:38:40.587793 1384589 out.go:270]   Sep 16 10:36:42 addons-936355 kubelet[1507]: E0916 10:36:42.520489    1507 reflector.go:158] "Unhandled Error" err="object-\"default\"/\"kube-root-ca.crt\": Failed to watch *v1.ConfigMap: failed to list *v1.ConfigMap: configmaps \"kube-root-ca.crt\" is forbidden: User \"system:node:addons-936355\" cannot list resource \"configmaps\" in API group \"\" in the namespace \"default\": no relationship found between node 'addons-936355' and this object" logger="UnhandledError"
	W0916 10:38:40.587808 1384589 out.go:270]   Sep 16 10:36:42 addons-936355 kubelet[1507]: W0916 10:36:42.520540    1507 reflector.go:561] object-"local-path-storage"/"kube-root-ca.crt": failed to list *v1.ConfigMap: configmaps "kube-root-ca.crt" is forbidden: User "system:node:addons-936355" cannot list resource "configmaps" in API group "" in the namespace "local-path-storage": no relationship found between node 'addons-936355' and this object
	W0916 10:38:40.587820 1384589 out.go:270]   Sep 16 10:36:42 addons-936355 kubelet[1507]: E0916 10:36:42.520560    1507 reflector.go:158] "Unhandled Error" err="object-\"local-path-storage\"/\"kube-root-ca.crt\": Failed to watch *v1.ConfigMap: failed to list *v1.ConfigMap: configmaps \"kube-root-ca.crt\" is forbidden: User \"system:node:addons-936355\" cannot list resource \"configmaps\" in API group \"\" in the namespace \"local-path-storage\": no relationship found between node 'addons-936355' and this object" logger="UnhandledError"
	I0916 10:38:40.587827 1384589 out.go:358] Setting ErrFile to fd 2...
	I0916 10:38:40.587838 1384589 out.go:392] TERM=,COLORTERM=, which probably does not support color
	I0916 10:38:50.602620 1384589 system_pods.go:59] 18 kube-system pods found
	I0916 10:38:50.602695 1384589 system_pods.go:61] "coredns-7c65d6cfc9-r6x6b" [fc313ec6-5b9a-444f-ae74-8a9d31bad075] Running
	I0916 10:38:50.602715 1384589 system_pods.go:61] "csi-hostpath-attacher-0" [973b3dd3-b66c-4f66-a499-e50893dc0d35] Running
	I0916 10:38:50.602720 1384589 system_pods.go:61] "csi-hostpath-resizer-0" [51405fd6-eaa1-4b53-ab6c-fc127aa3e3ed] Running
	I0916 10:38:50.602728 1384589 system_pods.go:61] "csi-hostpathplugin-zrlmd" [86e81bf7-3587-41e4-a08a-e800ecc90538] Running
	I0916 10:38:50.602736 1384589 system_pods.go:61] "etcd-addons-936355" [354ae326-d376-4f6f-805d-2605645d8d04] Running
	I0916 10:38:50.602745 1384589 system_pods.go:61] "kindnet-wv5d6" [35e2a463-84e1-4b51-8b1d-2f07b7677069] Running
	I0916 10:38:50.602749 1384589 system_pods.go:61] "kube-apiserver-addons-936355" [397fd8ae-a57b-462d-9c08-d0d45236f3b0] Running
	I0916 10:38:50.602753 1384589 system_pods.go:61] "kube-controller-manager-addons-936355" [d2285801-6e4d-4f4f-a300-721484f9834e] Running
	I0916 10:38:50.602762 1384589 system_pods.go:61] "kube-ingress-dns-minikube" [cfe0a31e-4a7c-4260-9320-4d769706f403] Running
	I0916 10:38:50.602767 1384589 system_pods.go:61] "kube-proxy-6zqlq" [c2680a6c-7cc0-48d6-8094-2d804da5c90b] Running
	I0916 10:38:50.602771 1384589 system_pods.go:61] "kube-scheduler-addons-936355" [881986a3-b57c-4fd3-bd1e-c796e39d9a39] Running
	I0916 10:38:50.602775 1384589 system_pods.go:61] "metrics-server-84c5f94fbc-hngcs" [5901d847-eeb7-4c71-97ba-d08734fb39ed] Running
	I0916 10:38:50.602794 1384589 system_pods.go:61] "nvidia-device-plugin-daemonset-6j9gc" [7ee6aa38-6656-4e60-bd4b-f35c0299acea] Running
	I0916 10:38:50.602798 1384589 system_pods.go:61] "registry-66c9cd494c-xh5d4" [6f439a0d-4e84-4ea2-97ef-2666b73327b7] Running
	I0916 10:38:50.602813 1384589 system_pods.go:61] "registry-proxy-xdksj" [f3007abe-d474-44b8-91de-56f1d2dc83a9] Running
	I0916 10:38:50.602821 1384589 system_pods.go:61] "snapshot-controller-56fcc65765-5th26" [00fbb682-4a60-4b76-84a9-4b0d4669fc20] Running
	I0916 10:38:50.602825 1384589 system_pods.go:61] "snapshot-controller-56fcc65765-fjrw9" [1eb3d0c0-5ee6-493b-ab86-8b96ac9e4110] Running
	I0916 10:38:50.602832 1384589 system_pods.go:61] "storage-provisioner" [1b62a2a2-7b11-4305-99cc-88c5a411f505] Running
	I0916 10:38:50.602848 1384589 system_pods.go:74] duration metric: took 11.190345697s to wait for pod list to return data ...
	I0916 10:38:50.602873 1384589 default_sa.go:34] waiting for default service account to be created ...
	I0916 10:38:50.606360 1384589 default_sa.go:45] found service account: "default"
	I0916 10:38:50.606391 1384589 default_sa.go:55] duration metric: took 3.50956ms for default service account to be created ...
	I0916 10:38:50.606400 1384589 system_pods.go:116] waiting for k8s-apps to be running ...
	I0916 10:38:50.616619 1384589 system_pods.go:86] 18 kube-system pods found
	I0916 10:38:50.616661 1384589 system_pods.go:89] "coredns-7c65d6cfc9-r6x6b" [fc313ec6-5b9a-444f-ae74-8a9d31bad075] Running
	I0916 10:38:50.616668 1384589 system_pods.go:89] "csi-hostpath-attacher-0" [973b3dd3-b66c-4f66-a499-e50893dc0d35] Running
	I0916 10:38:50.617624 1384589 system_pods.go:89] "csi-hostpath-resizer-0" [51405fd6-eaa1-4b53-ab6c-fc127aa3e3ed] Running
	I0916 10:38:50.617646 1384589 system_pods.go:89] "csi-hostpathplugin-zrlmd" [86e81bf7-3587-41e4-a08a-e800ecc90538] Running
	I0916 10:38:50.617652 1384589 system_pods.go:89] "etcd-addons-936355" [354ae326-d376-4f6f-805d-2605645d8d04] Running
	I0916 10:38:50.617662 1384589 system_pods.go:89] "kindnet-wv5d6" [35e2a463-84e1-4b51-8b1d-2f07b7677069] Running
	I0916 10:38:50.617668 1384589 system_pods.go:89] "kube-apiserver-addons-936355" [397fd8ae-a57b-462d-9c08-d0d45236f3b0] Running
	I0916 10:38:50.617673 1384589 system_pods.go:89] "kube-controller-manager-addons-936355" [d2285801-6e4d-4f4f-a300-721484f9834e] Running
	I0916 10:38:50.617677 1384589 system_pods.go:89] "kube-ingress-dns-minikube" [cfe0a31e-4a7c-4260-9320-4d769706f403] Running
	I0916 10:38:50.617682 1384589 system_pods.go:89] "kube-proxy-6zqlq" [c2680a6c-7cc0-48d6-8094-2d804da5c90b] Running
	I0916 10:38:50.617686 1384589 system_pods.go:89] "kube-scheduler-addons-936355" [881986a3-b57c-4fd3-bd1e-c796e39d9a39] Running
	I0916 10:38:50.617691 1384589 system_pods.go:89] "metrics-server-84c5f94fbc-hngcs" [5901d847-eeb7-4c71-97ba-d08734fb39ed] Running
	I0916 10:38:50.617696 1384589 system_pods.go:89] "nvidia-device-plugin-daemonset-6j9gc" [7ee6aa38-6656-4e60-bd4b-f35c0299acea] Running
	I0916 10:38:50.617701 1384589 system_pods.go:89] "registry-66c9cd494c-xh5d4" [6f439a0d-4e84-4ea2-97ef-2666b73327b7] Running
	I0916 10:38:50.617705 1384589 system_pods.go:89] "registry-proxy-xdksj" [f3007abe-d474-44b8-91de-56f1d2dc83a9] Running
	I0916 10:38:50.617716 1384589 system_pods.go:89] "snapshot-controller-56fcc65765-5th26" [00fbb682-4a60-4b76-84a9-4b0d4669fc20] Running
	I0916 10:38:50.617730 1384589 system_pods.go:89] "snapshot-controller-56fcc65765-fjrw9" [1eb3d0c0-5ee6-493b-ab86-8b96ac9e4110] Running
	I0916 10:38:50.617734 1384589 system_pods.go:89] "storage-provisioner" [1b62a2a2-7b11-4305-99cc-88c5a411f505] Running
	I0916 10:38:50.617742 1384589 system_pods.go:126] duration metric: took 11.335042ms to wait for k8s-apps to be running ...
	I0916 10:38:50.617754 1384589 system_svc.go:44] waiting for kubelet service to be running ....
	I0916 10:38:50.617812 1384589 ssh_runner.go:195] Run: sudo systemctl is-active --quiet service kubelet
	I0916 10:38:50.630041 1384589 system_svc.go:56] duration metric: took 12.276523ms WaitForService to wait for kubelet
	I0916 10:38:50.630069 1384589 kubeadm.go:582] duration metric: took 2m53.983006463s to wait for: map[apiserver:true apps_running:true default_sa:true extra:true kubelet:true node_ready:true system_pods:true]
	I0916 10:38:50.630088 1384589 node_conditions.go:102] verifying NodePressure condition ...
	I0916 10:38:50.633754 1384589 node_conditions.go:122] node storage ephemeral capacity is 203034800Ki
	I0916 10:38:50.633790 1384589 node_conditions.go:123] node cpu capacity is 2
	I0916 10:38:50.633806 1384589 node_conditions.go:105] duration metric: took 3.708685ms to run NodePressure ...
	I0916 10:38:50.633819 1384589 start.go:241] waiting for startup goroutines ...
	I0916 10:38:50.633826 1384589 start.go:246] waiting for cluster config update ...
	I0916 10:38:50.633842 1384589 start.go:255] writing updated cluster config ...
	I0916 10:38:50.634158 1384589 ssh_runner.go:195] Run: rm -f paused
	I0916 10:38:50.643301 1384589 out.go:177] * Done! kubectl is now configured to use "addons-936355" cluster and "default" namespace by default
	E0916 10:38:50.646536 1384589 start.go:291] kubectl info: exec: fork/exec /usr/local/bin/kubectl: exec format error
	
	
	==> CRI-O <==
	Sep 16 10:42:41 addons-936355 crio[961]: time="2024-09-16 10:42:41.190458782Z" level=info msg="Pulled image: ghcr.io/inspektor-gadget/inspektor-gadget@sha256:03e677e1cf9d2c9bea454e3dbcbcef20b3022e987534a2874eb1abc5bc3e73ec" id=aec7eb7a-34f5-4a54-8544-6d4473e524d8 name=/runtime.v1.ImageService/PullImage
	Sep 16 10:42:41 addons-936355 crio[961]: time="2024-09-16 10:42:41.191172133Z" level=info msg="Checking image status: ghcr.io/inspektor-gadget/inspektor-gadget:v0.32.0@sha256:03e677e1cf9d2c9bea454e3dbcbcef20b3022e987534a2874eb1abc5bc3e73ec" id=f8aaafe6-9303-4892-941c-fe3f3cbab9c5 name=/runtime.v1.ImageService/ImageStatus
	Sep 16 10:42:41 addons-936355 crio[961]: time="2024-09-16 10:42:41.191407713Z" level=info msg="Image status: &ImageStatusResponse{Image:&Image{Id:4f725bf50aaa5c697fbb84c107e9c7a3766f0f85f514ffce712d03ee5f62e8dd,RepoTags:[],RepoDigests:[ghcr.io/inspektor-gadget/inspektor-gadget@sha256:03e677e1cf9d2c9bea454e3dbcbcef20b3022e987534a2874eb1abc5bc3e73ec ghcr.io/inspektor-gadget/inspektor-gadget@sha256:7bb75e6a6a00e80a93c6115d94a22482eba22ee957f22e34e0b2310fc3a1391d],Size_:171509623,Uid:&Int64Value{Value:0,},Username:,Spec:nil,},Info:map[string]string{},}" id=f8aaafe6-9303-4892-941c-fe3f3cbab9c5 name=/runtime.v1.ImageService/ImageStatus
	Sep 16 10:42:41 addons-936355 crio[961]: time="2024-09-16 10:42:41.192237878Z" level=info msg="Checking image status: ghcr.io/inspektor-gadget/inspektor-gadget:v0.32.0@sha256:03e677e1cf9d2c9bea454e3dbcbcef20b3022e987534a2874eb1abc5bc3e73ec" id=83481a4a-c4dd-4cb0-9f55-4cb8a97d2f56 name=/runtime.v1.ImageService/ImageStatus
	Sep 16 10:42:41 addons-936355 crio[961]: time="2024-09-16 10:42:41.192468518Z" level=info msg="Image status: &ImageStatusResponse{Image:&Image{Id:4f725bf50aaa5c697fbb84c107e9c7a3766f0f85f514ffce712d03ee5f62e8dd,RepoTags:[],RepoDigests:[ghcr.io/inspektor-gadget/inspektor-gadget@sha256:03e677e1cf9d2c9bea454e3dbcbcef20b3022e987534a2874eb1abc5bc3e73ec ghcr.io/inspektor-gadget/inspektor-gadget@sha256:7bb75e6a6a00e80a93c6115d94a22482eba22ee957f22e34e0b2310fc3a1391d],Size_:171509623,Uid:&Int64Value{Value:0,},Username:,Spec:nil,},Info:map[string]string{},}" id=83481a4a-c4dd-4cb0-9f55-4cb8a97d2f56 name=/runtime.v1.ImageService/ImageStatus
	Sep 16 10:42:41 addons-936355 crio[961]: time="2024-09-16 10:42:41.193197779Z" level=info msg="Creating container: gadget/gadget-hx2qq/gadget" id=9b815aa8-db37-4a38-bb71-3d1e33129027 name=/runtime.v1.RuntimeService/CreateContainer
	Sep 16 10:42:41 addons-936355 crio[961]: time="2024-09-16 10:42:41.193291028Z" level=warning msg="Allowed annotations are specified for workload []"
	Sep 16 10:42:41 addons-936355 crio[961]: time="2024-09-16 10:42:41.257217560Z" level=info msg="Created container bcf51d70eaf49387d9ea6641126c0b61aca168a47c3d61e6314346f6d445ad66: gadget/gadget-hx2qq/gadget" id=9b815aa8-db37-4a38-bb71-3d1e33129027 name=/runtime.v1.RuntimeService/CreateContainer
	Sep 16 10:42:41 addons-936355 crio[961]: time="2024-09-16 10:42:41.257933316Z" level=info msg="Starting container: bcf51d70eaf49387d9ea6641126c0b61aca168a47c3d61e6314346f6d445ad66" id=ee8c419a-09c7-4f67-bb0e-42b94ffd6d3c name=/runtime.v1.RuntimeService/StartContainer
	Sep 16 10:42:41 addons-936355 crio[961]: time="2024-09-16 10:42:41.266608200Z" level=info msg="Started container" PID=6346 containerID=bcf51d70eaf49387d9ea6641126c0b61aca168a47c3d61e6314346f6d445ad66 description=gadget/gadget-hx2qq/gadget id=ee8c419a-09c7-4f67-bb0e-42b94ffd6d3c name=/runtime.v1.RuntimeService/StartContainer sandboxID=cf56dfeabe5decbedd58fc457dc7719d29c93fc1ac2509ce2b409125c237d769
	Sep 16 10:42:42 addons-936355 conmon[6335]: conmon bcf51d70eaf49387d9ea <ninfo>: container 6346 exited with status 1
	Sep 16 10:42:43 addons-936355 crio[961]: time="2024-09-16 10:42:43.272327618Z" level=info msg="Removing container: d36df2407ca5e1100ca95fde6e52ae6ca976aeb3d1ff54fe7ca517a91ca9c88f" id=5e037ab7-a3fb-48d4-8ceb-27378ea04007 name=/runtime.v1.RuntimeService/RemoveContainer
	Sep 16 10:42:43 addons-936355 crio[961]: time="2024-09-16 10:42:43.294374970Z" level=info msg="Removed container d36df2407ca5e1100ca95fde6e52ae6ca976aeb3d1ff54fe7ca517a91ca9c88f: gadget/gadget-hx2qq/gadget" id=5e037ab7-a3fb-48d4-8ceb-27378ea04007 name=/runtime.v1.RuntimeService/RemoveContainer
	Sep 16 10:44:59 addons-936355 crio[961]: time="2024-09-16 10:44:59.034213271Z" level=info msg="Stopping container: b65d0d4cafeecf004f5ab649f5343b72888fe4317e08a1f161b35e6e17844410 (timeout: 30s)" id=f86b5341-2037-431a-9670-cd9962d4c777 name=/runtime.v1.RuntimeService/StopContainer
	Sep 16 10:45:00 addons-936355 crio[961]: time="2024-09-16 10:45:00.223840830Z" level=info msg="Stopped container b65d0d4cafeecf004f5ab649f5343b72888fe4317e08a1f161b35e6e17844410: kube-system/metrics-server-84c5f94fbc-hngcs/metrics-server" id=f86b5341-2037-431a-9670-cd9962d4c777 name=/runtime.v1.RuntimeService/StopContainer
	Sep 16 10:45:00 addons-936355 crio[961]: time="2024-09-16 10:45:00.224531667Z" level=info msg="Stopping pod sandbox: d015a3419dfc0a7ed6c5cb9bbaa97743a3f95bc504b27df5c2861ca84165fc78" id=e702dc7c-e29a-4701-9354-361c77e73c12 name=/runtime.v1.RuntimeService/StopPodSandbox
	Sep 16 10:45:00 addons-936355 crio[961]: time="2024-09-16 10:45:00.224863613Z" level=info msg="Got pod network &{Name:metrics-server-84c5f94fbc-hngcs Namespace:kube-system ID:d015a3419dfc0a7ed6c5cb9bbaa97743a3f95bc504b27df5c2861ca84165fc78 UID:5901d847-eeb7-4c71-97ba-d08734fb39ed NetNS:/var/run/netns/d869eb9f-bc5e-4d56-b86e-af8ebf6deab2 Networks:[{Name:kindnet Ifname:eth0}] RuntimeConfig:map[kindnet:{IP: MAC: PortMappings:[] Bandwidth:<nil> IpRanges:[]}] Aliases:map[]}"
	Sep 16 10:45:00 addons-936355 crio[961]: time="2024-09-16 10:45:00.225040930Z" level=info msg="Deleting pod kube-system_metrics-server-84c5f94fbc-hngcs from CNI network \"kindnet\" (type=ptp)"
	Sep 16 10:45:00 addons-936355 crio[961]: time="2024-09-16 10:45:00.353255654Z" level=info msg="Stopped pod sandbox: d015a3419dfc0a7ed6c5cb9bbaa97743a3f95bc504b27df5c2861ca84165fc78" id=e702dc7c-e29a-4701-9354-361c77e73c12 name=/runtime.v1.RuntimeService/StopPodSandbox
	Sep 16 10:45:00 addons-936355 crio[961]: time="2024-09-16 10:45:00.635650185Z" level=info msg="Removing container: b65d0d4cafeecf004f5ab649f5343b72888fe4317e08a1f161b35e6e17844410" id=4673ad4c-4397-44f1-85fd-2477b808375c name=/runtime.v1.RuntimeService/RemoveContainer
	Sep 16 10:45:00 addons-936355 crio[961]: time="2024-09-16 10:45:00.700191485Z" level=info msg="Removed container b65d0d4cafeecf004f5ab649f5343b72888fe4317e08a1f161b35e6e17844410: kube-system/metrics-server-84c5f94fbc-hngcs/metrics-server" id=4673ad4c-4397-44f1-85fd-2477b808375c name=/runtime.v1.RuntimeService/RemoveContainer
	Sep 16 10:45:01 addons-936355 crio[961]: time="2024-09-16 10:45:01.656395681Z" level=info msg="Stopping pod sandbox: cf56dfeabe5decbedd58fc457dc7719d29c93fc1ac2509ce2b409125c237d769" id=0fa164d0-34b4-4e5e-8367-baa00bfe8fb3 name=/runtime.v1.RuntimeService/StopPodSandbox
	Sep 16 10:45:01 addons-936355 crio[961]: time="2024-09-16 10:45:01.662253417Z" level=info msg="Stopped pod sandbox: cf56dfeabe5decbedd58fc457dc7719d29c93fc1ac2509ce2b409125c237d769" id=0fa164d0-34b4-4e5e-8367-baa00bfe8fb3 name=/runtime.v1.RuntimeService/StopPodSandbox
	Sep 16 10:45:02 addons-936355 crio[961]: time="2024-09-16 10:45:02.629574283Z" level=info msg="Removing container: bcf51d70eaf49387d9ea6641126c0b61aca168a47c3d61e6314346f6d445ad66" id=30438bb0-08db-4bd8-8b24-61698cd20ee9 name=/runtime.v1.RuntimeService/RemoveContainer
	Sep 16 10:45:02 addons-936355 crio[961]: time="2024-09-16 10:45:02.671612648Z" level=info msg="Removed container bcf51d70eaf49387d9ea6641126c0b61aca168a47c3d61e6314346f6d445ad66: gadget/gadget-hx2qq/gadget" id=30438bb0-08db-4bd8-8b24-61698cd20ee9 name=/runtime.v1.RuntimeService/RemoveContainer
	
	
	==> container status <==
	CONTAINER           IMAGE                                                                                                                                        CREATED             STATE               NAME                                     ATTEMPT             POD ID              POD
	3b30e9b80217f       registry.k8s.io/sig-storage/csi-snapshotter@sha256:291334908ddf71a4661fd7f6d9d97274de8a5378a2b6fdfeb2ce73414a34f82f                          7 minutes ago       Running             csi-snapshotter                          0                   32259548d9366       csi-hostpathplugin-zrlmd
	66246ecfc47d6       gcr.io/k8s-minikube/gcp-auth-webhook@sha256:a40e1a121ee367d1712ac3a54ec9c38c405a65dde923c98e5fa6368fa82c4b69                                 7 minutes ago       Running             gcp-auth                                 0                   b09347ee3cb04       gcp-auth-89d5ffd79-j2ckg
	5dabae8faaade       registry.k8s.io/sig-storage/csi-provisioner@sha256:98ffd09c0784203d200e0f8c241501de31c8df79644caac7eed61bd6391e5d49                          7 minutes ago       Running             csi-provisioner                          0                   32259548d9366       csi-hostpathplugin-zrlmd
	63d680209bdeb       registry.k8s.io/sig-storage/livenessprobe@sha256:8b00c6e8f52639ed9c6f866085893ab688e57879741b3089e3cfa9998502e158                            7 minutes ago       Running             liveness-probe                           0                   32259548d9366       csi-hostpathplugin-zrlmd
	b241211876358       registry.k8s.io/sig-storage/hostpathplugin@sha256:7b1dfc90a367222067fc468442fdf952e20fc5961f25c1ad654300ddc34d7083                           7 minutes ago       Running             hostpath                                 0                   32259548d9366       csi-hostpathplugin-zrlmd
	ab8eaedf8040a       registry.k8s.io/sig-storage/csi-node-driver-registrar@sha256:511b8c8ac828194a753909d26555ff08bc12f497dd8daeb83fe9d593693a26c1                7 minutes ago       Running             node-driver-registrar                    0                   32259548d9366       csi-hostpathplugin-zrlmd
	331ea01abf2ed       registry.k8s.io/ingress-nginx/controller@sha256:22f9d129ae8c89a2cabbd13af3c1668944f3dd68fec186199b7024a0a2fc75b3                             7 minutes ago       Running             controller                               0                   549ac22ef6389       ingress-nginx-controller-bc57996ff-jgfjf
	5e5f91a726842       docker.io/rancher/local-path-provisioner@sha256:689a2489a24e74426e4a4666e611c988202c5fa995908b0c60133aca3eb87d98                             7 minutes ago       Running             local-path-provisioner                   0                   0d353b19ef8b9       local-path-provisioner-86d989889c-b652d
	9773c25a0a3dc       gcr.io/cloud-spanner-emulator/emulator@sha256:41ec188288c7943f488600462b2b74002814e52439be82d15de33c3ee4898a58                               7 minutes ago       Running             cloud-spanner-emulator                   0                   32e89c2c5a56d       cloud-spanner-emulator-769b77f747-qvhhc
	3d28641a10686       registry.k8s.io/ingress-nginx/kube-webhook-certgen@sha256:7c4c1a6ca8855c524a64983eaf590e126a669ae12df83ad65de281c9beee13d3                   7 minutes ago       Exited              patch                                    0                   ae4e1f0886d62       ingress-nginx-admission-patch-5hvnf
	98ee5c554b6be       registry.k8s.io/sig-storage/csi-external-health-monitor-controller@sha256:80b9ba94aa2afe24553d69bd165a6a51552d1582d68618ec00d3b804a7d9193c   7 minutes ago       Running             csi-external-health-monitor-controller   0                   32259548d9366       csi-hostpathplugin-zrlmd
	11f6f0bf554a7       registry.k8s.io/sig-storage/csi-resizer@sha256:425d8f1b769398127767b06ed97ce62578a3179bcb99809ce93a1649e025ffe7                              7 minutes ago       Running             csi-resizer                              0                   b35d742443216       csi-hostpath-resizer-0
	b9e189d1acd4c       registry.k8s.io/ingress-nginx/kube-webhook-certgen@sha256:7c4c1a6ca8855c524a64983eaf590e126a669ae12df83ad65de281c9beee13d3                   7 minutes ago       Exited              create                                   0                   ef2a1639e8386       ingress-nginx-admission-create-kmjkm
	fe7a31fb7fe71       registry.k8s.io/sig-storage/snapshot-controller@sha256:5d668e35c15df6e87e2530da25d557f543182cedbdb39d421b87076463ee9857                      7 minutes ago       Running             volume-snapshot-controller               0                   54dbfb69eabc4       snapshot-controller-56fcc65765-5th26
	082cee4b81438       docker.io/marcnuri/yakd@sha256:1c961556224d57fc747de0b1874524208e5fb4f8386f23e9c1c4c18e97109f17                                              7 minutes ago       Running             yakd                                     0                   91ccf72a05daa       yakd-dashboard-67d98fc6b-ztsj8
	4cf01aeaccd3f       registry.k8s.io/sig-storage/snapshot-controller@sha256:5d668e35c15df6e87e2530da25d557f543182cedbdb39d421b87076463ee9857                      7 minutes ago       Running             volume-snapshot-controller               0                   29551751a8a3b       snapshot-controller-56fcc65765-fjrw9
	d5f8b279203cd       nvcr.io/nvidia/k8s-device-plugin@sha256:cdd05f9d89f0552478d46474005e86b98795ad364664f644225b99d94978e680                                     8 minutes ago       Running             nvidia-device-plugin-ctr                 0                   eda9663f4feb4       nvidia-device-plugin-daemonset-6j9gc
	d50b4977768d7       registry.k8s.io/sig-storage/csi-attacher@sha256:4b5609c78455de45821910065281a368d5f760b41250f90cbde5110543bdc326                             8 minutes ago       Running             csi-attacher                             0                   6f989f68a9599       csi-hostpath-attacher-0
	198a1da1f3633       gcr.io/k8s-minikube/minikube-ingress-dns@sha256:4211a1de532376c881851542238121b26792225faa36a7b02dccad88fd05797c                             8 minutes ago       Running             minikube-ingress-dns                     0                   69f4c5e690a85       kube-ingress-dns-minikube
	ee934dc9f4f92       2f6c962e7b8311337352d9fdea917da2184d9919f4da7695bc2a6517cf392fe4                                                                             8 minutes ago       Running             coredns                                  0                   d4b44085e648e       coredns-7c65d6cfc9-r6x6b
	2a862ef326432       ba04bb24b95753201135cbc420b233c1b0b9fa2e1fd21d28319c348c33fbcde6                                                                             8 minutes ago       Running             storage-provisioner                      0                   e168c388c9d11       storage-provisioner
	8d59e894feca0       6a23fa8fd2b78ab58e42ba273808edc936a9c53d8ac4a919f6337be094843a51                                                                             9 minutes ago       Running             kindnet-cni                              0                   ca9fcc6465180       kindnet-wv5d6
	6200eb5cfcd24       24a140c548c075e487e45d0ee73b1aa89f8bfb40c08a57e05975559728822b1d                                                                             9 minutes ago       Running             kube-proxy                               0                   a491da0967548       kube-proxy-6zqlq
	2b161087caf5a       7f8aa378bb47dffcf430f3a601abe39137e88aee0238e23ed8530fdd18dab82d                                                                             9 minutes ago       Running             kube-scheduler                           0                   c99e3a64f4ade       kube-scheduler-addons-936355
	4ee66eef50ab6       279f381cb37365bbbcd133c9531fba9c2beb0f38dbbe6ddfcd0b1b1643d3450e                                                                             9 minutes ago       Running             kube-controller-manager                  0                   70ee024a23a5b       kube-controller-manager-addons-936355
	f911db1ed55bb       d3f53a98c0a9d9163c4848bcf34b2d2f5e1e3691b79f3d1dd6d0206809e02853                                                                             9 minutes ago       Running             kube-apiserver                           0                   fe5dcd273af65       kube-apiserver-addons-936355
	3b247261f15f4       27e3830e1402783674d8b594038967deea9d51f0d91b34c93c8f39d2f68af7da                                                                             9 minutes ago       Running             etcd                                     0                   24ef782ab6be4       etcd-addons-936355
	
	
	==> coredns [ee934dc9f4f92e52b49ad02508bb42771f460a2494fa8b1a65d888191266a4ad] <==
	[INFO] 10.244.0.6:41410 - 64521 "A IN registry.kube-system.svc.cluster.local.cluster.local. udp 70 false 512" NXDOMAIN qr,aa,rd 163 0.000074082s
	[INFO] 10.244.0.6:32998 - 54705 "AAAA IN registry.kube-system.svc.cluster.local.us-east-2.compute.internal. udp 83 false 512" NXDOMAIN qr,rd,ra 83 0.002300443s
	[INFO] 10.244.0.6:32998 - 29583 "A IN registry.kube-system.svc.cluster.local.us-east-2.compute.internal. udp 83 false 512" NXDOMAIN qr,rd,ra 83 0.002118081s
	[INFO] 10.244.0.6:57466 - 59415 "A IN registry.kube-system.svc.cluster.local. udp 56 false 512" NOERROR qr,aa,rd 110 0.000171687s
	[INFO] 10.244.0.6:57466 - 26377 "AAAA IN registry.kube-system.svc.cluster.local. udp 56 false 512" NOERROR qr,aa,rd 149 0.000100124s
	[INFO] 10.244.0.6:57769 - 49607 "A IN registry.kube-system.svc.cluster.local.kube-system.svc.cluster.local. udp 86 false 512" NXDOMAIN qr,aa,rd 179 0.000092608s
	[INFO] 10.244.0.6:57769 - 14275 "AAAA IN registry.kube-system.svc.cluster.local.kube-system.svc.cluster.local. udp 86 false 512" NXDOMAIN qr,aa,rd 179 0.00007117s
	[INFO] 10.244.0.6:44055 - 7650 "AAAA IN registry.kube-system.svc.cluster.local.svc.cluster.local. udp 74 false 512" NXDOMAIN qr,aa,rd 167 0.000056876s
	[INFO] 10.244.0.6:44055 - 57820 "A IN registry.kube-system.svc.cluster.local.svc.cluster.local. udp 74 false 512" NXDOMAIN qr,aa,rd 167 0.000036053s
	[INFO] 10.244.0.6:42734 - 36918 "AAAA IN registry.kube-system.svc.cluster.local.cluster.local. udp 70 false 512" NXDOMAIN qr,aa,rd 163 0.000045283s
	[INFO] 10.244.0.6:42734 - 61736 "A IN registry.kube-system.svc.cluster.local.cluster.local. udp 70 false 512" NXDOMAIN qr,aa,rd 163 0.000033599s
	[INFO] 10.244.0.6:54338 - 26081 "AAAA IN registry.kube-system.svc.cluster.local.us-east-2.compute.internal. udp 83 false 512" NXDOMAIN qr,rd,ra 83 0.001812112s
	[INFO] 10.244.0.6:54338 - 40423 "A IN registry.kube-system.svc.cluster.local.us-east-2.compute.internal. udp 83 false 512" NXDOMAIN qr,rd,ra 83 0.001769553s
	[INFO] 10.244.0.6:39094 - 56002 "AAAA IN registry.kube-system.svc.cluster.local. udp 56 false 512" NOERROR qr,aa,rd 149 0.000048811s
	[INFO] 10.244.0.6:39094 - 9935 "A IN registry.kube-system.svc.cluster.local. udp 56 false 512" NOERROR qr,aa,rd 110 0.000036265s
	[INFO] 10.244.0.20:53754 - 1366 "A IN storage.googleapis.com.gcp-auth.svc.cluster.local. udp 78 false 1232" NXDOMAIN qr,aa,rd 160 0.0016936s
	[INFO] 10.244.0.20:51144 - 45189 "AAAA IN storage.googleapis.com.gcp-auth.svc.cluster.local. udp 78 false 1232" NXDOMAIN qr,aa,rd 160 0.002085581s
	[INFO] 10.244.0.20:60186 - 495 "AAAA IN storage.googleapis.com.svc.cluster.local. udp 69 false 1232" NXDOMAIN qr,aa,rd 151 0.000131541s
	[INFO] 10.244.0.20:58173 - 47948 "A IN storage.googleapis.com.svc.cluster.local. udp 69 false 1232" NXDOMAIN qr,aa,rd 151 0.000146031s
	[INFO] 10.244.0.20:41557 - 45319 "AAAA IN storage.googleapis.com.cluster.local. udp 65 false 1232" NXDOMAIN qr,aa,rd 147 0.000125396s
	[INFO] 10.244.0.20:60168 - 27262 "A IN storage.googleapis.com.cluster.local. udp 65 false 1232" NXDOMAIN qr,aa,rd 147 0.00012213s
	[INFO] 10.244.0.20:55951 - 7020 "A IN storage.googleapis.com.us-east-2.compute.internal. udp 78 false 1232" NXDOMAIN qr,rd,ra 67 0.004693068s
	[INFO] 10.244.0.20:46529 - 17954 "AAAA IN storage.googleapis.com.us-east-2.compute.internal. udp 78 false 1232" NXDOMAIN qr,rd,ra 67 0.005735124s
	[INFO] 10.244.0.20:54136 - 20848 "A IN storage.googleapis.com. udp 51 false 1232" NOERROR qr,rd,ra 610 0.001853292s
	[INFO] 10.244.0.20:59146 - 51848 "AAAA IN storage.googleapis.com. udp 51 false 1232" NOERROR qr,rd,ra 240 0.0025582s
	
	
	==> describe nodes <==
	Name:               addons-936355
	Roles:              control-plane
	Labels:             beta.kubernetes.io/arch=arm64
	                    beta.kubernetes.io/os=linux
	                    kubernetes.io/arch=arm64
	                    kubernetes.io/hostname=addons-936355
	                    kubernetes.io/os=linux
	                    minikube.k8s.io/commit=90d544f06ea0f69499271b003be64a9a224d57ed
	                    minikube.k8s.io/name=addons-936355
	                    minikube.k8s.io/primary=true
	                    minikube.k8s.io/updated_at=2024_09_16T10_35_52_0700
	                    minikube.k8s.io/version=v1.34.0
	                    node-role.kubernetes.io/control-plane=
	                    node.kubernetes.io/exclude-from-external-load-balancers=
	                    topology.hostpath.csi/node=addons-936355
	Annotations:        csi.volume.kubernetes.io/nodeid: {"hostpath.csi.k8s.io":"addons-936355"}
	                    kubeadm.alpha.kubernetes.io/cri-socket: unix:///var/run/crio/crio.sock
	                    node.alpha.kubernetes.io/ttl: 0
	                    volumes.kubernetes.io/controller-managed-attach-detach: true
	CreationTimestamp:  Mon, 16 Sep 2024 10:35:49 +0000
	Taints:             <none>
	Unschedulable:      false
	Lease:
	  HolderIdentity:  addons-936355
	  AcquireTime:     <unset>
	  RenewTime:       Mon, 16 Sep 2024 10:44:54 +0000
	Conditions:
	  Type             Status  LastHeartbeatTime                 LastTransitionTime                Reason                       Message
	  ----             ------  -----------------                 ------------------                ------                       -------
	  MemoryPressure   False   Mon, 16 Sep 2024 10:43:30 +0000   Mon, 16 Sep 2024 10:35:45 +0000   KubeletHasSufficientMemory   kubelet has sufficient memory available
	  DiskPressure     False   Mon, 16 Sep 2024 10:43:30 +0000   Mon, 16 Sep 2024 10:35:45 +0000   KubeletHasNoDiskPressure     kubelet has no disk pressure
	  PIDPressure      False   Mon, 16 Sep 2024 10:43:30 +0000   Mon, 16 Sep 2024 10:35:45 +0000   KubeletHasSufficientPID      kubelet has sufficient PID available
	  Ready            True    Mon, 16 Sep 2024 10:43:30 +0000   Mon, 16 Sep 2024 10:36:42 +0000   KubeletReady                 kubelet is posting ready status
	Addresses:
	  InternalIP:  192.168.49.2
	  Hostname:    addons-936355
	Capacity:
	  cpu:                2
	  ephemeral-storage:  203034800Ki
	  hugepages-1Gi:      0
	  hugepages-2Mi:      0
	  hugepages-32Mi:     0
	  hugepages-64Ki:     0
	  memory:             8022304Ki
	  pods:               110
	Allocatable:
	  cpu:                2
	  ephemeral-storage:  203034800Ki
	  hugepages-1Gi:      0
	  hugepages-2Mi:      0
	  hugepages-32Mi:     0
	  hugepages-64Ki:     0
	  memory:             8022304Ki
	  pods:               110
	System Info:
	  Machine ID:                 d04f59375248444681829ec487634926
	  System UUID:                65d15a11-4f3c-4207-941c-6a3b096d7c27
	  Boot ID:                    34b2555f-ef29-4c31-9b47-b3b930bd3b4b
	  Kernel Version:             5.15.0-1069-aws
	  OS Image:                   Ubuntu 22.04.4 LTS
	  Operating System:           linux
	  Architecture:               arm64
	  Container Runtime Version:  cri-o://1.24.6
	  Kubelet Version:            v1.31.1
	  Kube-Proxy Version:         v1.31.1
	PodCIDR:                      10.244.0.0/24
	PodCIDRs:                     10.244.0.0/24
	Non-terminated Pods:          (20 in total)
	  Namespace                   Name                                        CPU Requests  CPU Limits  Memory Requests  Memory Limits  Age
	  ---------                   ----                                        ------------  ----------  ---------------  -------------  ---
	  default                     cloud-spanner-emulator-769b77f747-qvhhc     0 (0%)        0 (0%)      0 (0%)           0 (0%)         9m4s
	  gcp-auth                    gcp-auth-89d5ffd79-j2ckg                    0 (0%)        0 (0%)      0 (0%)           0 (0%)         8m56s
	  ingress-nginx               ingress-nginx-controller-bc57996ff-jgfjf    100m (5%)     0 (0%)      90Mi (1%)        0 (0%)         9m2s
	  kube-system                 coredns-7c65d6cfc9-r6x6b                    100m (5%)     0 (0%)      70Mi (0%)        170Mi (2%)     9m6s
	  kube-system                 csi-hostpath-attacher-0                     0 (0%)        0 (0%)      0 (0%)           0 (0%)         9m1s
	  kube-system                 csi-hostpath-resizer-0                      0 (0%)        0 (0%)      0 (0%)           0 (0%)         9m1s
	  kube-system                 csi-hostpathplugin-zrlmd                    0 (0%)        0 (0%)      0 (0%)           0 (0%)         8m22s
	  kube-system                 etcd-addons-936355                          100m (5%)     0 (0%)      100Mi (1%)       0 (0%)         9m12s
	  kube-system                 kindnet-wv5d6                               100m (5%)     100m (5%)   50Mi (0%)        50Mi (0%)      9m6s
	  kube-system                 kube-apiserver-addons-936355                250m (12%)    0 (0%)      0 (0%)           0 (0%)         9m13s
	  kube-system                 kube-controller-manager-addons-936355       200m (10%)    0 (0%)      0 (0%)           0 (0%)         9m12s
	  kube-system                 kube-ingress-dns-minikube                   0 (0%)        0 (0%)      0 (0%)           0 (0%)         9m3s
	  kube-system                 kube-proxy-6zqlq                            0 (0%)        0 (0%)      0 (0%)           0 (0%)         9m7s
	  kube-system                 kube-scheduler-addons-936355                100m (5%)     0 (0%)      0 (0%)           0 (0%)         9m12s
	  kube-system                 nvidia-device-plugin-daemonset-6j9gc        0 (0%)        0 (0%)      0 (0%)           0 (0%)         8m22s
	  kube-system                 snapshot-controller-56fcc65765-5th26        0 (0%)        0 (0%)      0 (0%)           0 (0%)         9m1s
	  kube-system                 snapshot-controller-56fcc65765-fjrw9        0 (0%)        0 (0%)      0 (0%)           0 (0%)         9m1s
	  kube-system                 storage-provisioner                         0 (0%)        0 (0%)      0 (0%)           0 (0%)         9m2s
	  local-path-storage          local-path-provisioner-86d989889c-b652d     0 (0%)        0 (0%)      0 (0%)           0 (0%)         9m2s
	  yakd-dashboard              yakd-dashboard-67d98fc6b-ztsj8              0 (0%)        0 (0%)      128Mi (1%)       256Mi (3%)     9m2s
	Allocated resources:
	  (Total limits may be over 100 percent, i.e., overcommitted.)
	  Resource           Requests    Limits
	  --------           --------    ------
	  cpu                950m (47%)  100m (5%)
	  memory             438Mi (5%)  476Mi (6%)
	  ephemeral-storage  0 (0%)      0 (0%)
	  hugepages-1Gi      0 (0%)      0 (0%)
	  hugepages-2Mi      0 (0%)      0 (0%)
	  hugepages-32Mi     0 (0%)      0 (0%)
	  hugepages-64Ki     0 (0%)      0 (0%)
	Events:
	  Type     Reason                   Age                    From             Message
	  ----     ------                   ----                   ----             -------
	  Normal   Starting                 9m1s                   kube-proxy       
	  Normal   NodeHasSufficientMemory  9m19s (x8 over 9m20s)  kubelet          Node addons-936355 status is now: NodeHasSufficientMemory
	  Normal   NodeHasNoDiskPressure    9m19s (x8 over 9m20s)  kubelet          Node addons-936355 status is now: NodeHasNoDiskPressure
	  Normal   NodeHasSufficientPID     9m19s (x7 over 9m20s)  kubelet          Node addons-936355 status is now: NodeHasSufficientPID
	  Normal   Starting                 9m13s                  kubelet          Starting kubelet.
	  Warning  CgroupV1                 9m13s                  kubelet          Cgroup v1 support is in maintenance mode, please migrate to Cgroup v2.
	  Normal   NodeHasSufficientMemory  9m12s                  kubelet          Node addons-936355 status is now: NodeHasSufficientMemory
	  Normal   NodeHasNoDiskPressure    9m12s                  kubelet          Node addons-936355 status is now: NodeHasNoDiskPressure
	  Normal   NodeHasSufficientPID     9m12s                  kubelet          Node addons-936355 status is now: NodeHasSufficientPID
	  Normal   RegisteredNode           9m8s                   node-controller  Node addons-936355 event: Registered Node addons-936355 in Controller
	  Normal   NodeReady                8m22s                  kubelet          Node addons-936355 status is now: NodeReady
	
	
	==> dmesg <==
	[Sep16 10:07] systemd-journald[226]: Failed to send stream file descriptor to service manager: Connection refused
	
	
	==> etcd [3b247261f15f4cdd596d5e7ee3354c24cb995a27a5e0581e877596df04b900d5] <==
	{"level":"warn","ts":"2024-09-16T10:35:59.461521Z","caller":"etcdserver/util.go:170","msg":"apply request took too long","took":"173.42953ms","expected-duration":"100ms","prefix":"read-only range ","request":"key:\"/registry/serviceaccounts\" limit:1 ","response":"range_response_count:0 size:5"}
	{"level":"info","ts":"2024-09-16T10:35:59.461548Z","caller":"traceutil/trace.go:171","msg":"trace[825824076] range","detail":"{range_begin:/registry/serviceaccounts; range_end:; response_count:0; response_revision:365; }","duration":"173.471351ms","start":"2024-09-16T10:35:59.288071Z","end":"2024-09-16T10:35:59.461542Z","steps":["trace[825824076] 'agreement among raft nodes before linearized reading'  (duration: 173.394077ms)"],"step_count":1}
	{"level":"info","ts":"2024-09-16T10:35:59.910299Z","caller":"traceutil/trace.go:171","msg":"trace[571848] transaction","detail":"{read_only:false; response_revision:372; number_of_response:1; }","duration":"101.485416ms","start":"2024-09-16T10:35:59.808786Z","end":"2024-09-16T10:35:59.910272Z","steps":["trace[571848] 'process raft request'  (duration: 72.962753ms)"],"step_count":1}
	{"level":"info","ts":"2024-09-16T10:35:59.910551Z","caller":"traceutil/trace.go:171","msg":"trace[2049811000] transaction","detail":"{read_only:false; response_revision:373; number_of_response:1; }","duration":"101.622964ms","start":"2024-09-16T10:35:59.808918Z","end":"2024-09-16T10:35:59.910541Z","steps":["trace[2049811000] 'process raft request'  (duration: 72.910972ms)"],"step_count":1}
	{"level":"info","ts":"2024-09-16T10:35:59.910806Z","caller":"traceutil/trace.go:171","msg":"trace[1901548869] transaction","detail":"{read_only:false; response_revision:374; number_of_response:1; }","duration":"101.844209ms","start":"2024-09-16T10:35:59.808954Z","end":"2024-09-16T10:35:59.910798Z","steps":["trace[1901548869] 'process raft request'  (duration: 72.897089ms)"],"step_count":1}
	{"level":"warn","ts":"2024-09-16T10:35:59.945205Z","caller":"etcdserver/util.go:170","msg":"apply request took too long","took":"136.325816ms","expected-duration":"100ms","prefix":"read-only range ","request":"key:\"/registry/daemonsets/kube-system/kindnet\" ","response":"range_response_count:1 size:4681"}
	{"level":"info","ts":"2024-09-16T10:35:59.945344Z","caller":"traceutil/trace.go:171","msg":"trace[1851060564] range","detail":"{range_begin:/registry/daemonsets/kube-system/kindnet; range_end:; response_count:1; response_revision:375; }","duration":"136.4823ms","start":"2024-09-16T10:35:59.808847Z","end":"2024-09-16T10:35:59.945330Z","steps":["trace[1851060564] 'agreement among raft nodes before linearized reading'  (duration: 136.289206ms)"],"step_count":1}
	{"level":"warn","ts":"2024-09-16T10:35:59.945577Z","caller":"etcdserver/util.go:170","msg":"apply request took too long","took":"136.842881ms","expected-duration":"100ms","prefix":"read-only range ","request":"key:\"/registry/pods/kube-system/kube-proxy-6zqlq\" ","response":"range_response_count:1 size:4833"}
	{"level":"info","ts":"2024-09-16T10:35:59.945682Z","caller":"traceutil/trace.go:171","msg":"trace[840636989] range","detail":"{range_begin:/registry/pods/kube-system/kube-proxy-6zqlq; range_end:; response_count:1; response_revision:375; }","duration":"136.945081ms","start":"2024-09-16T10:35:59.808725Z","end":"2024-09-16T10:35:59.945670Z","steps":["trace[840636989] 'agreement among raft nodes before linearized reading'  (duration: 136.808125ms)"],"step_count":1}
	{"level":"info","ts":"2024-09-16T10:36:00.052890Z","caller":"traceutil/trace.go:171","msg":"trace[935433541] transaction","detail":"{read_only:false; response_revision:376; number_of_response:1; }","duration":"171.425064ms","start":"2024-09-16T10:35:59.881432Z","end":"2024-09-16T10:36:00.052857Z","steps":["trace[935433541] 'process raft request'  (duration: 103.374661ms)","trace[935433541] 'compare'  (duration: 67.370586ms)"],"step_count":2}
	{"level":"info","ts":"2024-09-16T10:36:00.053251Z","caller":"traceutil/trace.go:171","msg":"trace[1640692462] linearizableReadLoop","detail":"{readStateIndex:386; appliedIndex:385; }","duration":"171.181083ms","start":"2024-09-16T10:35:59.882059Z","end":"2024-09-16T10:36:00.053240Z","steps":["trace[1640692462] 'read index received'  (duration: 86.984477ms)","trace[1640692462] 'applied index is now lower than readState.Index'  (duration: 84.173345ms)"],"step_count":2}
	{"level":"warn","ts":"2024-09-16T10:36:00.082458Z","caller":"etcdserver/util.go:170","msg":"apply request took too long","took":"201.082081ms","expected-duration":"100ms","prefix":"read-only range ","request":"key:\"/registry/services/specs/default/cloud-spanner-emulator\" ","response":"range_response_count:0 size:5"}
	{"level":"info","ts":"2024-09-16T10:36:00.131166Z","caller":"traceutil/trace.go:171","msg":"trace[1850329919] range","detail":"{range_begin:/registry/services/specs/default/cloud-spanner-emulator; range_end:; response_count:0; response_revision:379; }","duration":"249.789246ms","start":"2024-09-16T10:35:59.881352Z","end":"2024-09-16T10:36:00.131141Z","steps":["trace[1850329919] 'agreement among raft nodes before linearized reading'  (duration: 201.06412ms)"],"step_count":1}
	{"level":"warn","ts":"2024-09-16T10:36:00.081165Z","caller":"etcdserver/util.go:170","msg":"apply request took too long","took":"179.917297ms","expected-duration":"100ms","prefix":"read-only range ","request":"key:\"/registry/apiextensions.k8s.io/customresourcedefinitions\" limit:1 ","response":"range_response_count:0 size:5"}
	{"level":"info","ts":"2024-09-16T10:36:00.139019Z","caller":"traceutil/trace.go:171","msg":"trace[179530847] range","detail":"{range_begin:/registry/apiextensions.k8s.io/customresourcedefinitions; range_end:; response_count:0; response_revision:377; }","duration":"257.611311ms","start":"2024-09-16T10:35:59.881381Z","end":"2024-09-16T10:36:00.138992Z","steps":["trace[179530847] 'agreement among raft nodes before linearized reading'  (duration: 179.875904ms)"],"step_count":1}
	{"level":"info","ts":"2024-09-16T10:36:00.497778Z","caller":"traceutil/trace.go:171","msg":"trace[781374587] transaction","detail":"{read_only:false; response_revision:383; number_of_response:1; }","duration":"244.927422ms","start":"2024-09-16T10:36:00.252822Z","end":"2024-09-16T10:36:00.497749Z","steps":["trace[781374587] 'process raft request'  (duration: 240.255139ms)"],"step_count":1}
	{"level":"info","ts":"2024-09-16T10:36:00.498033Z","caller":"traceutil/trace.go:171","msg":"trace[2049862755] transaction","detail":"{read_only:false; response_revision:384; number_of_response:1; }","duration":"245.149988ms","start":"2024-09-16T10:36:00.252873Z","end":"2024-09-16T10:36:00.498023Z","steps":["trace[2049862755] 'process raft request'  (duration: 243.936212ms)"],"step_count":1}
	{"level":"info","ts":"2024-09-16T10:36:00.498257Z","caller":"traceutil/trace.go:171","msg":"trace[1308392049] transaction","detail":"{read_only:false; response_revision:385; number_of_response:1; }","duration":"245.371382ms","start":"2024-09-16T10:36:00.252875Z","end":"2024-09-16T10:36:00.498247Z","steps":["trace[1308392049] 'process raft request'  (duration: 243.967662ms)"],"step_count":1}
	{"level":"info","ts":"2024-09-16T10:36:00.498461Z","caller":"traceutil/trace.go:171","msg":"trace[1919696831] transaction","detail":"{read_only:false; response_revision:386; number_of_response:1; }","duration":"245.376936ms","start":"2024-09-16T10:36:00.253076Z","end":"2024-09-16T10:36:00.498453Z","steps":["trace[1919696831] 'process raft request'  (duration: 243.813828ms)"],"step_count":1}
	{"level":"warn","ts":"2024-09-16T10:36:00.508772Z","caller":"etcdserver/util.go:170","msg":"apply request took too long","took":"105.013609ms","expected-duration":"100ms","prefix":"read-only range ","request":"key:\"/registry/namespaces/kube-system\" ","response":"range_response_count:1 size:351"}
	{"level":"info","ts":"2024-09-16T10:36:00.508863Z","caller":"traceutil/trace.go:171","msg":"trace[1394685121] range","detail":"{range_begin:/registry/namespaces/kube-system; range_end:; response_count:1; response_revision:394; }","duration":"105.116096ms","start":"2024-09-16T10:36:00.403731Z","end":"2024-09-16T10:36:00.508847Z","steps":["trace[1394685121] 'agreement among raft nodes before linearized reading'  (duration: 104.925356ms)"],"step_count":1}
	{"level":"warn","ts":"2024-09-16T10:36:00.510822Z","caller":"etcdserver/util.go:170","msg":"apply request took too long","took":"106.978552ms","expected-duration":"100ms","prefix":"read-only range ","request":"key:\"/registry/ranges/serviceips\" ","response":"range_response_count:1 size:116"}
	{"level":"info","ts":"2024-09-16T10:36:00.510873Z","caller":"traceutil/trace.go:171","msg":"trace[90254389] range","detail":"{range_begin:/registry/ranges/serviceips; range_end:; response_count:1; response_revision:394; }","duration":"107.038374ms","start":"2024-09-16T10:36:00.403822Z","end":"2024-09-16T10:36:00.510860Z","steps":["trace[90254389] 'agreement among raft nodes before linearized reading'  (duration: 106.927616ms)"],"step_count":1}
	{"level":"warn","ts":"2024-09-16T10:36:00.513542Z","caller":"etcdserver/util.go:170","msg":"apply request took too long","took":"109.80734ms","expected-duration":"100ms","prefix":"read-only range ","request":"key:\"/registry/daemonsets/kube-system/kindnet\" ","response":"range_response_count:1 size:4681"}
	{"level":"info","ts":"2024-09-16T10:36:00.513613Z","caller":"traceutil/trace.go:171","msg":"trace[1039894144] range","detail":"{range_begin:/registry/daemonsets/kube-system/kindnet; range_end:; response_count:1; response_revision:395; }","duration":"109.886707ms","start":"2024-09-16T10:36:00.403712Z","end":"2024-09-16T10:36:00.513599Z","steps":["trace[1039894144] 'agreement among raft nodes before linearized reading'  (duration: 109.778304ms)"],"step_count":1}
	
	
	==> gcp-auth [66246ecfc47d65d522c45cff2baf15e2433dc0e0681c400a1437f7890b27b5b4] <==
	2024/09/16 10:37:52 GCP Auth Webhook started!
	
	
	==> kernel <==
	 10:45:04 up 10:27,  0 users,  load average: 0.77, 0.87, 1.65
	Linux addons-936355 5.15.0-1069-aws #75~20.04.1-Ubuntu SMP Mon Aug 19 16:22:47 UTC 2024 aarch64 aarch64 aarch64 GNU/Linux
	PRETTY_NAME="Ubuntu 22.04.4 LTS"
	
	
	==> kindnet [8d59e894feca0e01e03cc7257c67ed10cf0f9db194b88b314e4961bc62d9e7f1] <==
	I0916 10:43:02.017806       1 main.go:299] handling current node
	I0916 10:43:12.017271       1 main.go:295] Handling node with IPs: map[192.168.49.2:{}]
	I0916 10:43:12.017425       1 main.go:299] handling current node
	I0916 10:43:22.020176       1 main.go:295] Handling node with IPs: map[192.168.49.2:{}]
	I0916 10:43:22.020291       1 main.go:299] handling current node
	I0916 10:43:32.018190       1 main.go:295] Handling node with IPs: map[192.168.49.2:{}]
	I0916 10:43:32.018318       1 main.go:299] handling current node
	I0916 10:43:42.017224       1 main.go:295] Handling node with IPs: map[192.168.49.2:{}]
	I0916 10:43:42.017265       1 main.go:299] handling current node
	I0916 10:43:52.020877       1 main.go:295] Handling node with IPs: map[192.168.49.2:{}]
	I0916 10:43:52.021004       1 main.go:299] handling current node
	I0916 10:44:02.018165       1 main.go:295] Handling node with IPs: map[192.168.49.2:{}]
	I0916 10:44:02.018203       1 main.go:299] handling current node
	I0916 10:44:12.017210       1 main.go:295] Handling node with IPs: map[192.168.49.2:{}]
	I0916 10:44:12.017340       1 main.go:299] handling current node
	I0916 10:44:22.017201       1 main.go:295] Handling node with IPs: map[192.168.49.2:{}]
	I0916 10:44:22.017236       1 main.go:299] handling current node
	I0916 10:44:32.025659       1 main.go:295] Handling node with IPs: map[192.168.49.2:{}]
	I0916 10:44:32.025697       1 main.go:299] handling current node
	I0916 10:44:42.017259       1 main.go:295] Handling node with IPs: map[192.168.49.2:{}]
	I0916 10:44:42.017354       1 main.go:299] handling current node
	I0916 10:44:52.019299       1 main.go:295] Handling node with IPs: map[192.168.49.2:{}]
	I0916 10:44:52.019335       1 main.go:299] handling current node
	I0916 10:45:02.017505       1 main.go:295] Handling node with IPs: map[192.168.49.2:{}]
	I0916 10:45:02.017538       1 main.go:299] handling current node
	
	
	==> kube-apiserver [f911db1ed55bbf8b3dc28ca0fef7e51209be97baaa15d9194b879451dd6fd403] <==
	E0916 10:38:17.001292       1 remote_available_controller.go:448] "Unhandled Error" err="v1beta1.metrics.k8s.io failed with: failing or missing response from https://10.100.138.161:443/apis/metrics.k8s.io/v1beta1: Get \"https://10.100.138.161:443/apis/metrics.k8s.io/v1beta1\": dial tcp 10.100.138.161:443: connect: connection refused" logger="UnhandledError"
	W0916 10:38:17.001510       1 handler_proxy.go:99] no RequestInfo found in the context
	E0916 10:38:17.001593       1 controller.go:146] "Unhandled Error" err=<
		Error updating APIService "v1beta1.metrics.k8s.io" with err: failed to download v1beta1.metrics.k8s.io: failed to retrieve openAPI spec, http error: ResponseCode: 503, Body: service unavailable
		, Header: map[Content-Type:[text/plain; charset=utf-8] X-Content-Type-Options:[nosniff]]
	 > logger="UnhandledError"
	W0916 10:38:18.005200       1 handler_proxy.go:99] no RequestInfo found in the context
	E0916 10:38:18.005260       1 controller.go:113] "Unhandled Error" err="loading OpenAPI spec for \"v1beta1.metrics.k8s.io\" failed with: Error, could not get list of group versions for APIService" logger="UnhandledError"
	W0916 10:38:18.005310       1 handler_proxy.go:99] no RequestInfo found in the context
	E0916 10:38:18.005375       1 controller.go:102] "Unhandled Error" err=<
		loading OpenAPI spec for "v1beta1.metrics.k8s.io" failed with: failed to download v1beta1.metrics.k8s.io: failed to retrieve openAPI spec, http error: ResponseCode: 503, Body: service unavailable
		, Header: map[Content-Type:[text/plain; charset=utf-8] X-Content-Type-Options:[nosniff]]
	 > logger="UnhandledError"
	I0916 10:38:18.006688       1 controller.go:126] OpenAPI AggregationController: action for item v1beta1.metrics.k8s.io: Rate Limited Requeue.
	I0916 10:38:18.006752       1 controller.go:109] OpenAPI AggregationController: action for item v1beta1.metrics.k8s.io: Rate Limited Requeue.
	W0916 10:38:22.012823       1 handler_proxy.go:99] no RequestInfo found in the context
	E0916 10:38:22.012831       1 remote_available_controller.go:448] "Unhandled Error" err="v1beta1.metrics.k8s.io failed with: failing or missing response from https://10.100.138.161:443/apis/metrics.k8s.io/v1beta1: Get \"https://10.100.138.161:443/apis/metrics.k8s.io/v1beta1\": dial tcp 10.100.138.161:443: i/o timeout" logger="UnhandledError"
	E0916 10:38:22.012998       1 controller.go:146] "Unhandled Error" err=<
		Error updating APIService "v1beta1.metrics.k8s.io" with err: failed to download v1beta1.metrics.k8s.io: failed to retrieve openAPI spec, http error: ResponseCode: 503, Body: service unavailable
		, Header: map[Content-Type:[text/plain; charset=utf-8] X-Content-Type-Options:[nosniff]]
	 > logger="UnhandledError"
	I0916 10:38:22.050506       1 handler.go:286] Adding GroupVersion metrics.k8s.io v1beta1 to ResourceManager
	E0916 10:38:22.062342       1 remote_available_controller.go:448] "Unhandled Error" err="v1beta1.metrics.k8s.io failed with: Operation cannot be fulfilled on apiservices.apiregistration.k8s.io \"v1beta1.metrics.k8s.io\": the object has been modified; please apply your changes to the latest version and try again" logger="UnhandledError"
	I0916 10:45:01.609832       1 handler.go:286] Adding GroupVersion gadget.kinvolk.io v1alpha1 to ResourceManager
	W0916 10:45:02.742296       1 cacher.go:171] Terminating all watchers from cacher traces.gadget.kinvolk.io
	
	
	==> kube-controller-manager [4ee66eef50ab615bdd0d94fe194567492cafe76910819703a964b78b45f55436] <==
	I0916 10:37:43.572015       1 job_controller.go:568] "enqueueing job" logger="job-controller" key="gcp-auth/gcp-auth-certs-patch" delay="1s"
	I0916 10:37:44.515757       1 job_controller.go:568] "enqueueing job" logger="job-controller" key="gcp-auth/gcp-auth-certs-patch" delay="1s"
	I0916 10:37:44.524004       1 job_controller.go:568] "enqueueing job" logger="job-controller" key="gcp-auth/gcp-auth-certs-patch" delay="1s"
	I0916 10:37:44.530235       1 job_controller.go:568] "enqueueing job" logger="job-controller" key="gcp-auth/gcp-auth-certs-patch" delay="1s"
	I0916 10:37:45.150271       1 range_allocator.go:241] "Successfully synced" logger="node-ipam-controller" key="addons-936355"
	I0916 10:37:52.558234       1 replica_set.go:679] "Finished syncing" logger="replicaset-controller" kind="ReplicaSet" key="gcp-auth/gcp-auth-89d5ffd79" duration="22.364743ms"
	I0916 10:37:52.559132       1 replica_set.go:679] "Finished syncing" logger="replicaset-controller" kind="ReplicaSet" key="gcp-auth/gcp-auth-89d5ffd79" duration="31.351µs"
	I0916 10:37:54.619407       1 range_allocator.go:241] "Successfully synced" logger="node-ipam-controller" key="addons-936355"
	I0916 10:37:54.659736       1 replica_set.go:679] "Finished syncing" logger="replicaset-controller" kind="ReplicaSet" key="ingress-nginx/ingress-nginx-controller-bc57996ff" duration="16.445353ms"
	I0916 10:37:54.660858       1 replica_set.go:679] "Finished syncing" logger="replicaset-controller" kind="ReplicaSet" key="ingress-nginx/ingress-nginx-controller-bc57996ff" duration="68.076µs"
	E0916 10:37:56.423390       1 resource_quota_controller.go:446] "Unhandled Error" err="unable to retrieve the complete list of server APIs: metrics.k8s.io/v1beta1: stale GroupVersion discovery: metrics.k8s.io/v1beta1" logger="UnhandledError"
	I0916 10:37:56.899634       1 garbagecollector.go:826] "failed to discover some groups" logger="garbage-collector-controller" groups="<internal error: json: unsupported type: map[schema.GroupVersion]error>"
	I0916 10:37:58.024462       1 job_controller.go:568] "enqueueing job" logger="job-controller" key="gcp-auth/gcp-auth-certs-create" delay="0s"
	I0916 10:37:58.060468       1 job_controller.go:568] "enqueueing job" logger="job-controller" key="gcp-auth/gcp-auth-certs-create" delay="0s"
	I0916 10:38:14.019749       1 job_controller.go:568] "enqueueing job" logger="job-controller" key="gcp-auth/gcp-auth-certs-patch" delay="0s"
	I0916 10:38:14.050322       1 job_controller.go:568] "enqueueing job" logger="job-controller" key="gcp-auth/gcp-auth-certs-patch" delay="0s"
	I0916 10:38:16.992451       1 replica_set.go:679] "Finished syncing" logger="replicaset-controller" kind="ReplicaSet" key="kube-system/metrics-server-84c5f94fbc" duration="19.424064ms"
	I0916 10:38:16.993500       1 replica_set.go:679] "Finished syncing" logger="replicaset-controller" kind="ReplicaSet" key="kube-system/metrics-server-84c5f94fbc" duration="50.764µs"
	I0916 10:38:25.225399       1 range_allocator.go:241] "Successfully synced" logger="node-ipam-controller" key="addons-936355"
	I0916 10:39:05.149618       1 replica_set.go:679] "Finished syncing" logger="replicaset-controller" kind="ReplicaSet" key="kube-system/registry-66c9cd494c" duration="10.223µs"
	I0916 10:43:30.185523       1 range_allocator.go:241] "Successfully synced" logger="node-ipam-controller" key="addons-936355"
	I0916 10:44:59.000560       1 replica_set.go:679] "Finished syncing" logger="replicaset-controller" kind="ReplicaSet" key="kube-system/metrics-server-84c5f94fbc" duration="4.677µs"
	E0916 10:45:02.744102       1 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/metadata/metadatainformer/informer.go:138: Failed to watch *v1.PartialObjectMetadata: the server could not find the requested resource" logger="UnhandledError"
	W0916 10:45:04.219714       1 reflector.go:561] k8s.io/client-go/metadata/metadatainformer/informer.go:138: failed to list *v1.PartialObjectMetadata: the server could not find the requested resource
	E0916 10:45:04.219759       1 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/metadata/metadatainformer/informer.go:138: Failed to watch *v1.PartialObjectMetadata: failed to list *v1.PartialObjectMetadata: the server could not find the requested resource" logger="UnhandledError"
	
	
	==> kube-proxy [6200eb5cfcd24bb0f0253359201c6d75c0624dcb7a313b0bc95b7370a13539a0] <==
	I0916 10:36:01.688812       1 server_linux.go:66] "Using iptables proxy"
	I0916 10:36:02.265241       1 server.go:677] "Successfully retrieved node IP(s)" IPs=["192.168.49.2"]
	E0916 10:36:02.271591       1 server.go:234] "Kube-proxy configuration may be incomplete or incorrect" err="nodePortAddresses is unset; NodePort connections will be accepted on all local IPs. Consider using `--nodeport-addresses primary`"
	I0916 10:36:02.423456       1 server.go:243] "kube-proxy running in dual-stack mode" primary ipFamily="IPv4"
	I0916 10:36:02.423579       1 server_linux.go:169] "Using iptables Proxier"
	I0916 10:36:02.431736       1 proxier.go:255] "Setting route_localnet=1 to allow node-ports on localhost; to change this either disable iptables.localhostNodePorts (--iptables-localhost-nodeports) or set nodePortAddresses (--nodeport-addresses) to filter loopback addresses" ipFamily="IPv4"
	I0916 10:36:02.432160       1 server.go:483] "Version info" version="v1.31.1"
	I0916 10:36:02.432351       1 server.go:485] "Golang settings" GOGC="" GOMAXPROCS="" GOTRACEBACK=""
	I0916 10:36:02.433544       1 config.go:199] "Starting service config controller"
	I0916 10:36:02.433620       1 shared_informer.go:313] Waiting for caches to sync for service config
	I0916 10:36:02.433682       1 config.go:105] "Starting endpoint slice config controller"
	I0916 10:36:02.433713       1 shared_informer.go:313] Waiting for caches to sync for endpoint slice config
	I0916 10:36:02.434194       1 config.go:328] "Starting node config controller"
	I0916 10:36:02.434243       1 shared_informer.go:313] Waiting for caches to sync for node config
	I0916 10:36:02.545223       1 shared_informer.go:320] Caches are synced for node config
	I0916 10:36:02.585616       1 shared_informer.go:320] Caches are synced for service config
	I0916 10:36:02.585634       1 shared_informer.go:320] Caches are synced for endpoint slice config
	
	
	==> kube-scheduler [2b161087caf5a6ab9dedbb699f7c69ddf6c2c5cdb19026d46daf824d90966d25] <==
	W0916 10:35:50.291598       1 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.PodDisruptionBudget: poddisruptionbudgets.policy is forbidden: User "system:kube-scheduler" cannot list resource "poddisruptionbudgets" in API group "policy" at the cluster scope
	E0916 10:35:50.291652       1 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.PodDisruptionBudget: failed to list *v1.PodDisruptionBudget: poddisruptionbudgets.policy is forbidden: User \"system:kube-scheduler\" cannot list resource \"poddisruptionbudgets\" in API group \"policy\" at the cluster scope" logger="UnhandledError"
	W0916 10:35:50.291738       1 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.StorageClass: storageclasses.storage.k8s.io is forbidden: User "system:kube-scheduler" cannot list resource "storageclasses" in API group "storage.k8s.io" at the cluster scope
	E0916 10:35:50.291810       1 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.StorageClass: failed to list *v1.StorageClass: storageclasses.storage.k8s.io is forbidden: User \"system:kube-scheduler\" cannot list resource \"storageclasses\" in API group \"storage.k8s.io\" at the cluster scope" logger="UnhandledError"
	W0916 10:35:50.291911       1 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.CSINode: csinodes.storage.k8s.io is forbidden: User "system:kube-scheduler" cannot list resource "csinodes" in API group "storage.k8s.io" at the cluster scope
	E0916 10:35:50.291966       1 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.CSINode: failed to list *v1.CSINode: csinodes.storage.k8s.io is forbidden: User \"system:kube-scheduler\" cannot list resource \"csinodes\" in API group \"storage.k8s.io\" at the cluster scope" logger="UnhandledError"
	W0916 10:35:50.292090       1 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Service: services is forbidden: User "system:kube-scheduler" cannot list resource "services" in API group "" at the cluster scope
	E0916 10:35:50.292141       1 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Service: failed to list *v1.Service: services is forbidden: User \"system:kube-scheduler\" cannot list resource \"services\" in API group \"\" at the cluster scope" logger="UnhandledError"
	W0916 10:35:50.292276       1 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.CSIStorageCapacity: csistoragecapacities.storage.k8s.io is forbidden: User "system:kube-scheduler" cannot list resource "csistoragecapacities" in API group "storage.k8s.io" at the cluster scope
	E0916 10:35:50.292635       1 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.CSIStorageCapacity: failed to list *v1.CSIStorageCapacity: csistoragecapacities.storage.k8s.io is forbidden: User \"system:kube-scheduler\" cannot list resource \"csistoragecapacities\" in API group \"storage.k8s.io\" at the cluster scope" logger="UnhandledError"
	W0916 10:35:50.292342       1 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Pod: pods is forbidden: User "system:kube-scheduler" cannot list resource "pods" in API group "" at the cluster scope
	E0916 10:35:50.292669       1 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Pod: failed to list *v1.Pod: pods is forbidden: User \"system:kube-scheduler\" cannot list resource \"pods\" in API group \"\" at the cluster scope" logger="UnhandledError"
	W0916 10:35:50.292396       1 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.PersistentVolume: persistentvolumes is forbidden: User "system:kube-scheduler" cannot list resource "persistentvolumes" in API group "" at the cluster scope
	E0916 10:35:50.292714       1 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.PersistentVolume: failed to list *v1.PersistentVolume: persistentvolumes is forbidden: User \"system:kube-scheduler\" cannot list resource \"persistentvolumes\" in API group \"\" at the cluster scope" logger="UnhandledError"
	W0916 10:35:50.292436       1 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Namespace: namespaces is forbidden: User "system:kube-scheduler" cannot list resource "namespaces" in API group "" at the cluster scope
	E0916 10:35:50.292743       1 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Namespace: failed to list *v1.Namespace: namespaces is forbidden: User \"system:kube-scheduler\" cannot list resource \"namespaces\" in API group \"\" at the cluster scope" logger="UnhandledError"
	W0916 10:35:50.292494       1 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.ReplicationController: replicationcontrollers is forbidden: User "system:kube-scheduler" cannot list resource "replicationcontrollers" in API group "" at the cluster scope
	E0916 10:35:50.292771       1 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.ReplicationController: failed to list *v1.ReplicationController: replicationcontrollers is forbidden: User \"system:kube-scheduler\" cannot list resource \"replicationcontrollers\" in API group \"\" at the cluster scope" logger="UnhandledError"
	W0916 10:35:50.292533       1 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Node: nodes is forbidden: User "system:kube-scheduler" cannot list resource "nodes" in API group "" at the cluster scope
	E0916 10:35:50.292790       1 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Node: failed to list *v1.Node: nodes is forbidden: User \"system:kube-scheduler\" cannot list resource \"nodes\" in API group \"\" at the cluster scope" logger="UnhandledError"
	W0916 10:35:50.292588       1 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.CSIDriver: csidrivers.storage.k8s.io is forbidden: User "system:kube-scheduler" cannot list resource "csidrivers" in API group "storage.k8s.io" at the cluster scope
	E0916 10:35:50.292814       1 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.CSIDriver: failed to list *v1.CSIDriver: csidrivers.storage.k8s.io is forbidden: User \"system:kube-scheduler\" cannot list resource \"csidrivers\" in API group \"storage.k8s.io\" at the cluster scope" logger="UnhandledError"
	W0916 10:35:50.292916       1 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.ReplicaSet: replicasets.apps is forbidden: User "system:kube-scheduler" cannot list resource "replicasets" in API group "apps" at the cluster scope
	E0916 10:35:50.292984       1 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.ReplicaSet: failed to list *v1.ReplicaSet: replicasets.apps is forbidden: User \"system:kube-scheduler\" cannot list resource \"replicasets\" in API group \"apps\" at the cluster scope" logger="UnhandledError"
	I0916 10:35:51.479680       1 shared_informer.go:320] Caches are synced for client-ca::kube-system::extension-apiserver-authentication::client-ca-file
	
	
	==> kubelet <==
	Sep 16 10:45:01 addons-936355 kubelet[1507]: I0916 10:45:01.827817    1507 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"debugfs\" (UniqueName: \"kubernetes.io/host-path/fb6217d4-dbed-40c2-b47e-4342cb3f94b1-debugfs\") pod \"fb6217d4-dbed-40c2-b47e-4342cb3f94b1\" (UID: \"fb6217d4-dbed-40c2-b47e-4342cb3f94b1\") "
	Sep 16 10:45:01 addons-936355 kubelet[1507]: I0916 10:45:01.827850    1507 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/fb6217d4-dbed-40c2-b47e-4342cb3f94b1-host\") pod \"fb6217d4-dbed-40c2-b47e-4342cb3f94b1\" (UID: \"fb6217d4-dbed-40c2-b47e-4342cb3f94b1\") "
	Sep 16 10:45:01 addons-936355 kubelet[1507]: I0916 10:45:01.827886    1507 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lkfcs\" (UniqueName: \"kubernetes.io/projected/fb6217d4-dbed-40c2-b47e-4342cb3f94b1-kube-api-access-lkfcs\") pod \"fb6217d4-dbed-40c2-b47e-4342cb3f94b1\" (UID: \"fb6217d4-dbed-40c2-b47e-4342cb3f94b1\") "
	Sep 16 10:45:01 addons-936355 kubelet[1507]: I0916 10:45:01.827909    1507 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bpffs\" (UniqueName: \"kubernetes.io/host-path/fb6217d4-dbed-40c2-b47e-4342cb3f94b1-bpffs\") pod \"fb6217d4-dbed-40c2-b47e-4342cb3f94b1\" (UID: \"fb6217d4-dbed-40c2-b47e-4342cb3f94b1\") "
	Sep 16 10:45:01 addons-936355 kubelet[1507]: I0916 10:45:01.827932    1507 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"modules\" (UniqueName: \"kubernetes.io/host-path/fb6217d4-dbed-40c2-b47e-4342cb3f94b1-modules\") pod \"fb6217d4-dbed-40c2-b47e-4342cb3f94b1\" (UID: \"fb6217d4-dbed-40c2-b47e-4342cb3f94b1\") "
	Sep 16 10:45:01 addons-936355 kubelet[1507]: I0916 10:45:01.827951    1507 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run\" (UniqueName: \"kubernetes.io/host-path/fb6217d4-dbed-40c2-b47e-4342cb3f94b1-run\") pod \"fb6217d4-dbed-40c2-b47e-4342cb3f94b1\" (UID: \"fb6217d4-dbed-40c2-b47e-4342cb3f94b1\") "
	Sep 16 10:45:01 addons-936355 kubelet[1507]: I0916 10:45:01.828069    1507 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/fb6217d4-dbed-40c2-b47e-4342cb3f94b1-run" (OuterVolumeSpecName: "run") pod "fb6217d4-dbed-40c2-b47e-4342cb3f94b1" (UID: "fb6217d4-dbed-40c2-b47e-4342cb3f94b1"). InnerVolumeSpecName "run". PluginName "kubernetes.io/host-path", VolumeGidValue ""
	Sep 16 10:45:01 addons-936355 kubelet[1507]: I0916 10:45:01.828119    1507 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/fb6217d4-dbed-40c2-b47e-4342cb3f94b1-cgroup" (OuterVolumeSpecName: "cgroup") pod "fb6217d4-dbed-40c2-b47e-4342cb3f94b1" (UID: "fb6217d4-dbed-40c2-b47e-4342cb3f94b1"). InnerVolumeSpecName "cgroup". PluginName "kubernetes.io/host-path", VolumeGidValue ""
	Sep 16 10:45:01 addons-936355 kubelet[1507]: I0916 10:45:01.828162    1507 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/fb6217d4-dbed-40c2-b47e-4342cb3f94b1-debugfs" (OuterVolumeSpecName: "debugfs") pod "fb6217d4-dbed-40c2-b47e-4342cb3f94b1" (UID: "fb6217d4-dbed-40c2-b47e-4342cb3f94b1"). InnerVolumeSpecName "debugfs". PluginName "kubernetes.io/host-path", VolumeGidValue ""
	Sep 16 10:45:01 addons-936355 kubelet[1507]: I0916 10:45:01.828180    1507 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/fb6217d4-dbed-40c2-b47e-4342cb3f94b1-host" (OuterVolumeSpecName: "host") pod "fb6217d4-dbed-40c2-b47e-4342cb3f94b1" (UID: "fb6217d4-dbed-40c2-b47e-4342cb3f94b1"). InnerVolumeSpecName "host". PluginName "kubernetes.io/host-path", VolumeGidValue ""
	Sep 16 10:45:01 addons-936355 kubelet[1507]: I0916 10:45:01.829456    1507 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/fb6217d4-dbed-40c2-b47e-4342cb3f94b1-bpffs" (OuterVolumeSpecName: "bpffs") pod "fb6217d4-dbed-40c2-b47e-4342cb3f94b1" (UID: "fb6217d4-dbed-40c2-b47e-4342cb3f94b1"). InnerVolumeSpecName "bpffs". PluginName "kubernetes.io/host-path", VolumeGidValue ""
	Sep 16 10:45:01 addons-936355 kubelet[1507]: I0916 10:45:01.829526    1507 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/fb6217d4-dbed-40c2-b47e-4342cb3f94b1-modules" (OuterVolumeSpecName: "modules") pod "fb6217d4-dbed-40c2-b47e-4342cb3f94b1" (UID: "fb6217d4-dbed-40c2-b47e-4342cb3f94b1"). InnerVolumeSpecName "modules". PluginName "kubernetes.io/host-path", VolumeGidValue ""
	Sep 16 10:45:01 addons-936355 kubelet[1507]: I0916 10:45:01.830432    1507 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/fb6217d4-dbed-40c2-b47e-4342cb3f94b1-kube-api-access-lkfcs" (OuterVolumeSpecName: "kube-api-access-lkfcs") pod "fb6217d4-dbed-40c2-b47e-4342cb3f94b1" (UID: "fb6217d4-dbed-40c2-b47e-4342cb3f94b1"). InnerVolumeSpecName "kube-api-access-lkfcs". PluginName "kubernetes.io/projected", VolumeGidValue ""
	Sep 16 10:45:01 addons-936355 kubelet[1507]: I0916 10:45:01.929150    1507 reconciler_common.go:288] "Volume detached for volume \"host\" (UniqueName: \"kubernetes.io/host-path/fb6217d4-dbed-40c2-b47e-4342cb3f94b1-host\") on node \"addons-936355\" DevicePath \"\""
	Sep 16 10:45:01 addons-936355 kubelet[1507]: I0916 10:45:01.929195    1507 reconciler_common.go:288] "Volume detached for volume \"kube-api-access-lkfcs\" (UniqueName: \"kubernetes.io/projected/fb6217d4-dbed-40c2-b47e-4342cb3f94b1-kube-api-access-lkfcs\") on node \"addons-936355\" DevicePath \"\""
	Sep 16 10:45:01 addons-936355 kubelet[1507]: I0916 10:45:01.929207    1507 reconciler_common.go:288] "Volume detached for volume \"bpffs\" (UniqueName: \"kubernetes.io/host-path/fb6217d4-dbed-40c2-b47e-4342cb3f94b1-bpffs\") on node \"addons-936355\" DevicePath \"\""
	Sep 16 10:45:01 addons-936355 kubelet[1507]: I0916 10:45:01.929217    1507 reconciler_common.go:288] "Volume detached for volume \"modules\" (UniqueName: \"kubernetes.io/host-path/fb6217d4-dbed-40c2-b47e-4342cb3f94b1-modules\") on node \"addons-936355\" DevicePath \"\""
	Sep 16 10:45:01 addons-936355 kubelet[1507]: I0916 10:45:01.929229    1507 reconciler_common.go:288] "Volume detached for volume \"run\" (UniqueName: \"kubernetes.io/host-path/fb6217d4-dbed-40c2-b47e-4342cb3f94b1-run\") on node \"addons-936355\" DevicePath \"\""
	Sep 16 10:45:01 addons-936355 kubelet[1507]: I0916 10:45:01.929238    1507 reconciler_common.go:288] "Volume detached for volume \"cgroup\" (UniqueName: \"kubernetes.io/host-path/fb6217d4-dbed-40c2-b47e-4342cb3f94b1-cgroup\") on node \"addons-936355\" DevicePath \"\""
	Sep 16 10:45:01 addons-936355 kubelet[1507]: I0916 10:45:01.929246    1507 reconciler_common.go:288] "Volume detached for volume \"debugfs\" (UniqueName: \"kubernetes.io/host-path/fb6217d4-dbed-40c2-b47e-4342cb3f94b1-debugfs\") on node \"addons-936355\" DevicePath \"\""
	Sep 16 10:45:01 addons-936355 kubelet[1507]: I0916 10:45:01.953650    1507 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5901d847-eeb7-4c71-97ba-d08734fb39ed" path="/var/lib/kubelet/pods/5901d847-eeb7-4c71-97ba-d08734fb39ed/volumes"
	Sep 16 10:45:02 addons-936355 kubelet[1507]: E0916 10:45:02.213574    1507 eviction_manager.go:257] "Eviction manager: failed to get HasDedicatedImageFs" err="missing image stats: &ImageFsInfoResponse{ImageFilesystems:[]*FilesystemUsage{&FilesystemUsage{Timestamp:1726483502213290359,FsId:&FilesystemIdentifier{Mountpoint:/var/lib/containers/storage/overlay-images,},UsedBytes:&UInt64Value{Value:460628,},InodesUsed:&UInt64Value{Value:183,},},},ContainerFilesystems:[]*FilesystemUsage{},}"
	Sep 16 10:45:02 addons-936355 kubelet[1507]: E0916 10:45:02.213614    1507 eviction_manager.go:212] "Eviction manager: failed to synchronize" err="eviction manager: failed to get HasDedicatedImageFs: missing image stats: &ImageFsInfoResponse{ImageFilesystems:[]*FilesystemUsage{&FilesystemUsage{Timestamp:1726483502213290359,FsId:&FilesystemIdentifier{Mountpoint:/var/lib/containers/storage/overlay-images,},UsedBytes:&UInt64Value{Value:460628,},InodesUsed:&UInt64Value{Value:183,},},},ContainerFilesystems:[]*FilesystemUsage{},}"
	Sep 16 10:45:02 addons-936355 kubelet[1507]: I0916 10:45:02.625963    1507 scope.go:117] "RemoveContainer" containerID="bcf51d70eaf49387d9ea6641126c0b61aca168a47c3d61e6314346f6d445ad66"
	Sep 16 10:45:03 addons-936355 kubelet[1507]: I0916 10:45:03.952357    1507 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="fb6217d4-dbed-40c2-b47e-4342cb3f94b1" path="/var/lib/kubelet/pods/fb6217d4-dbed-40c2-b47e-4342cb3f94b1/volumes"
	
	
	==> storage-provisioner [2a862ef326432a5d0293f9317e2a22cc3bbc0e787dab4595749d403d11fd2627] <==
	I0916 10:36:43.471506       1 storage_provisioner.go:116] Initializing the minikube storage provisioner...
	I0916 10:36:43.494873       1 storage_provisioner.go:141] Storage provisioner initialized, now starting service!
	I0916 10:36:43.495065       1 leaderelection.go:243] attempting to acquire leader lease kube-system/k8s.io-minikube-hostpath...
	I0916 10:36:43.512818       1 leaderelection.go:253] successfully acquired lease kube-system/k8s.io-minikube-hostpath
	I0916 10:36:43.513129       1 controller.go:835] Starting provisioner controller k8s.io/minikube-hostpath_addons-936355_2129c620-91c1-42a8-a96f-7ac21cc45cc8!
	I0916 10:36:43.520230       1 event.go:282] Event(v1.ObjectReference{Kind:"Endpoints", Namespace:"kube-system", Name:"k8s.io-minikube-hostpath", UID:"89d79315-71d3-40c0-aeb5-687aa54390d8", APIVersion:"v1", ResourceVersion:"938", FieldPath:""}): type: 'Normal' reason: 'LeaderElection' addons-936355_2129c620-91c1-42a8-a96f-7ac21cc45cc8 became leader
	I0916 10:36:43.613923       1 controller.go:884] Started provisioner controller k8s.io/minikube-hostpath_addons-936355_2129c620-91c1-42a8-a96f-7ac21cc45cc8!
	

                                                
                                                
-- /stdout --
helpers_test.go:254: (dbg) Run:  out/minikube-linux-arm64 status --format={{.APIServer}} -p addons-936355 -n addons-936355
helpers_test.go:261: (dbg) Run:  kubectl --context addons-936355 get po -o=jsonpath={.items[*].metadata.name} -A --field-selector=status.phase!=Running
helpers_test.go:261: (dbg) Non-zero exit: kubectl --context addons-936355 get po -o=jsonpath={.items[*].metadata.name} -A --field-selector=status.phase!=Running: fork/exec /usr/local/bin/kubectl: exec format error (415.555µs)
helpers_test.go:263: kubectl --context addons-936355 get po -o=jsonpath={.items[*].metadata.name} -A --field-selector=status.phase!=Running: fork/exec /usr/local/bin/kubectl: exec format error
--- FAIL: TestAddons/parallel/Ingress (2.54s)

                                                
                                    
x
+
TestAddons/parallel/MetricsServer (354.78s)

                                                
                                                
=== RUN   TestAddons/parallel/MetricsServer
=== PAUSE TestAddons/parallel/MetricsServer

                                                
                                                

                                                
                                                
=== CONT  TestAddons/parallel/MetricsServer
addons_test.go:409: metrics-server stabilized in 2.57777ms
addons_test.go:411: (dbg) TestAddons/parallel/MetricsServer: waiting 6m0s for pods matching "k8s-app=metrics-server" in namespace "kube-system" ...
helpers_test.go:344: "metrics-server-84c5f94fbc-hngcs" [5901d847-eeb7-4c71-97ba-d08734fb39ed] Running
addons_test.go:411: (dbg) TestAddons/parallel/MetricsServer: k8s-app=metrics-server healthy within 6.003330176s
addons_test.go:417: (dbg) Run:  kubectl --context addons-936355 top pods -n kube-system
addons_test.go:417: (dbg) Non-zero exit: kubectl --context addons-936355 top pods -n kube-system: fork/exec /usr/local/bin/kubectl: exec format error (425.843µs)
addons_test.go:417: (dbg) Run:  kubectl --context addons-936355 top pods -n kube-system
addons_test.go:417: (dbg) Non-zero exit: kubectl --context addons-936355 top pods -n kube-system: fork/exec /usr/local/bin/kubectl: exec format error (419.796µs)
addons_test.go:417: (dbg) Run:  kubectl --context addons-936355 top pods -n kube-system
addons_test.go:417: (dbg) Non-zero exit: kubectl --context addons-936355 top pods -n kube-system: fork/exec /usr/local/bin/kubectl: exec format error (465.949µs)
addons_test.go:417: (dbg) Run:  kubectl --context addons-936355 top pods -n kube-system
addons_test.go:417: (dbg) Non-zero exit: kubectl --context addons-936355 top pods -n kube-system: fork/exec /usr/local/bin/kubectl: exec format error (341.07µs)
addons_test.go:417: (dbg) Run:  kubectl --context addons-936355 top pods -n kube-system
addons_test.go:417: (dbg) Non-zero exit: kubectl --context addons-936355 top pods -n kube-system: fork/exec /usr/local/bin/kubectl: exec format error (334.826µs)
addons_test.go:417: (dbg) Run:  kubectl --context addons-936355 top pods -n kube-system
addons_test.go:417: (dbg) Non-zero exit: kubectl --context addons-936355 top pods -n kube-system: fork/exec /usr/local/bin/kubectl: exec format error (503.174µs)
addons_test.go:417: (dbg) Run:  kubectl --context addons-936355 top pods -n kube-system
addons_test.go:417: (dbg) Non-zero exit: kubectl --context addons-936355 top pods -n kube-system: fork/exec /usr/local/bin/kubectl: exec format error (485.78µs)
addons_test.go:417: (dbg) Run:  kubectl --context addons-936355 top pods -n kube-system
addons_test.go:417: (dbg) Non-zero exit: kubectl --context addons-936355 top pods -n kube-system: fork/exec /usr/local/bin/kubectl: exec format error (348.364µs)
addons_test.go:417: (dbg) Run:  kubectl --context addons-936355 top pods -n kube-system
addons_test.go:417: (dbg) Non-zero exit: kubectl --context addons-936355 top pods -n kube-system: fork/exec /usr/local/bin/kubectl: exec format error (543.97µs)
addons_test.go:417: (dbg) Run:  kubectl --context addons-936355 top pods -n kube-system
addons_test.go:417: (dbg) Non-zero exit: kubectl --context addons-936355 top pods -n kube-system: fork/exec /usr/local/bin/kubectl: exec format error (3.291803ms)
addons_test.go:417: (dbg) Run:  kubectl --context addons-936355 top pods -n kube-system
addons_test.go:417: (dbg) Non-zero exit: kubectl --context addons-936355 top pods -n kube-system: fork/exec /usr/local/bin/kubectl: exec format error (432.801µs)
addons_test.go:417: (dbg) Run:  kubectl --context addons-936355 top pods -n kube-system
addons_test.go:417: (dbg) Non-zero exit: kubectl --context addons-936355 top pods -n kube-system: fork/exec /usr/local/bin/kubectl: exec format error (561.512µs)
addons_test.go:431: failed checking metric server: fork/exec /usr/local/bin/kubectl: exec format error
addons_test.go:434: (dbg) Run:  out/minikube-linux-arm64 -p addons-936355 addons disable metrics-server --alsologtostderr -v=1
helpers_test.go:222: -----------------------post-mortem--------------------------------
helpers_test.go:230: ======>  post-mortem[TestAddons/parallel/MetricsServer]: docker inspect <======
helpers_test.go:231: (dbg) Run:  docker inspect addons-936355
helpers_test.go:235: (dbg) docker inspect addons-936355:

                                                
                                                
-- stdout --
	[
	    {
	        "Id": "990f1d352091220982d3e72266c05a58085b58b6631f82700a66decf59d84c22",
	        "Created": "2024-09-16T10:35:26.829229764Z",
	        "Path": "/usr/local/bin/entrypoint",
	        "Args": [
	            "/sbin/init"
	        ],
	        "State": {
	            "Status": "running",
	            "Running": true,
	            "Paused": false,
	            "Restarting": false,
	            "OOMKilled": false,
	            "Dead": false,
	            "Pid": 1385081,
	            "ExitCode": 0,
	            "Error": "",
	            "StartedAt": "2024-09-16T10:35:26.979651686Z",
	            "FinishedAt": "0001-01-01T00:00:00Z"
	        },
	        "Image": "sha256:a1b71fa87733590eb4674b16f6945626ae533f3af37066893e3fd70eb9476268",
	        "ResolvConfPath": "/var/lib/docker/containers/990f1d352091220982d3e72266c05a58085b58b6631f82700a66decf59d84c22/resolv.conf",
	        "HostnamePath": "/var/lib/docker/containers/990f1d352091220982d3e72266c05a58085b58b6631f82700a66decf59d84c22/hostname",
	        "HostsPath": "/var/lib/docker/containers/990f1d352091220982d3e72266c05a58085b58b6631f82700a66decf59d84c22/hosts",
	        "LogPath": "/var/lib/docker/containers/990f1d352091220982d3e72266c05a58085b58b6631f82700a66decf59d84c22/990f1d352091220982d3e72266c05a58085b58b6631f82700a66decf59d84c22-json.log",
	        "Name": "/addons-936355",
	        "RestartCount": 0,
	        "Driver": "overlay2",
	        "Platform": "linux",
	        "MountLabel": "",
	        "ProcessLabel": "",
	        "AppArmorProfile": "unconfined",
	        "ExecIDs": null,
	        "HostConfig": {
	            "Binds": [
	                "/lib/modules:/lib/modules:ro",
	                "addons-936355:/var"
	            ],
	            "ContainerIDFile": "",
	            "LogConfig": {
	                "Type": "json-file",
	                "Config": {}
	            },
	            "NetworkMode": "addons-936355",
	            "PortBindings": {
	                "22/tcp": [
	                    {
	                        "HostIp": "127.0.0.1",
	                        "HostPort": ""
	                    }
	                ],
	                "2376/tcp": [
	                    {
	                        "HostIp": "127.0.0.1",
	                        "HostPort": ""
	                    }
	                ],
	                "32443/tcp": [
	                    {
	                        "HostIp": "127.0.0.1",
	                        "HostPort": ""
	                    }
	                ],
	                "5000/tcp": [
	                    {
	                        "HostIp": "127.0.0.1",
	                        "HostPort": ""
	                    }
	                ],
	                "8443/tcp": [
	                    {
	                        "HostIp": "127.0.0.1",
	                        "HostPort": ""
	                    }
	                ]
	            },
	            "RestartPolicy": {
	                "Name": "no",
	                "MaximumRetryCount": 0
	            },
	            "AutoRemove": false,
	            "VolumeDriver": "",
	            "VolumesFrom": null,
	            "ConsoleSize": [
	                0,
	                0
	            ],
	            "CapAdd": null,
	            "CapDrop": null,
	            "CgroupnsMode": "host",
	            "Dns": [],
	            "DnsOptions": [],
	            "DnsSearch": [],
	            "ExtraHosts": null,
	            "GroupAdd": null,
	            "IpcMode": "private",
	            "Cgroup": "",
	            "Links": null,
	            "OomScoreAdj": 0,
	            "PidMode": "",
	            "Privileged": true,
	            "PublishAllPorts": false,
	            "ReadonlyRootfs": false,
	            "SecurityOpt": [
	                "seccomp=unconfined",
	                "apparmor=unconfined",
	                "label=disable"
	            ],
	            "Tmpfs": {
	                "/run": "",
	                "/tmp": ""
	            },
	            "UTSMode": "",
	            "UsernsMode": "",
	            "ShmSize": 67108864,
	            "Runtime": "runc",
	            "Isolation": "",
	            "CpuShares": 0,
	            "Memory": 4194304000,
	            "NanoCpus": 2000000000,
	            "CgroupParent": "",
	            "BlkioWeight": 0,
	            "BlkioWeightDevice": [],
	            "BlkioDeviceReadBps": [],
	            "BlkioDeviceWriteBps": [],
	            "BlkioDeviceReadIOps": [],
	            "BlkioDeviceWriteIOps": [],
	            "CpuPeriod": 0,
	            "CpuQuota": 0,
	            "CpuRealtimePeriod": 0,
	            "CpuRealtimeRuntime": 0,
	            "CpusetCpus": "",
	            "CpusetMems": "",
	            "Devices": [],
	            "DeviceCgroupRules": null,
	            "DeviceRequests": null,
	            "MemoryReservation": 0,
	            "MemorySwap": 8388608000,
	            "MemorySwappiness": null,
	            "OomKillDisable": false,
	            "PidsLimit": null,
	            "Ulimits": [],
	            "CpuCount": 0,
	            "CpuPercent": 0,
	            "IOMaximumIOps": 0,
	            "IOMaximumBandwidth": 0,
	            "MaskedPaths": null,
	            "ReadonlyPaths": null
	        },
	        "GraphDriver": {
	            "Data": {
	                "LowerDir": "/var/lib/docker/overlay2/c77d59ded00fa56b49dc4ec025d7a90bf6cdbcc44e193db0ee5c49a540e58e7c-init/diff:/var/lib/docker/overlay2/1502e35c27c097cfc834a7c6caeee5bb9f58b41375577f491b73f55bc131cbae/diff",
	                "MergedDir": "/var/lib/docker/overlay2/c77d59ded00fa56b49dc4ec025d7a90bf6cdbcc44e193db0ee5c49a540e58e7c/merged",
	                "UpperDir": "/var/lib/docker/overlay2/c77d59ded00fa56b49dc4ec025d7a90bf6cdbcc44e193db0ee5c49a540e58e7c/diff",
	                "WorkDir": "/var/lib/docker/overlay2/c77d59ded00fa56b49dc4ec025d7a90bf6cdbcc44e193db0ee5c49a540e58e7c/work"
	            },
	            "Name": "overlay2"
	        },
	        "Mounts": [
	            {
	                "Type": "bind",
	                "Source": "/lib/modules",
	                "Destination": "/lib/modules",
	                "Mode": "ro",
	                "RW": false,
	                "Propagation": "rprivate"
	            },
	            {
	                "Type": "volume",
	                "Name": "addons-936355",
	                "Source": "/var/lib/docker/volumes/addons-936355/_data",
	                "Destination": "/var",
	                "Driver": "local",
	                "Mode": "z",
	                "RW": true,
	                "Propagation": ""
	            }
	        ],
	        "Config": {
	            "Hostname": "addons-936355",
	            "Domainname": "",
	            "User": "",
	            "AttachStdin": false,
	            "AttachStdout": false,
	            "AttachStderr": false,
	            "ExposedPorts": {
	                "22/tcp": {},
	                "2376/tcp": {},
	                "32443/tcp": {},
	                "5000/tcp": {},
	                "8443/tcp": {}
	            },
	            "Tty": true,
	            "OpenStdin": false,
	            "StdinOnce": false,
	            "Env": [
	                "container=docker",
	                "PATH=/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin"
	            ],
	            "Cmd": null,
	            "Image": "gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0",
	            "Volumes": null,
	            "WorkingDir": "/",
	            "Entrypoint": [
	                "/usr/local/bin/entrypoint",
	                "/sbin/init"
	            ],
	            "OnBuild": null,
	            "Labels": {
	                "created_by.minikube.sigs.k8s.io": "true",
	                "mode.minikube.sigs.k8s.io": "addons-936355",
	                "name.minikube.sigs.k8s.io": "addons-936355",
	                "role.minikube.sigs.k8s.io": ""
	            },
	            "StopSignal": "SIGRTMIN+3"
	        },
	        "NetworkSettings": {
	            "Bridge": "",
	            "SandboxID": "c25ad70fe630d4f698b2829da4e56bff2645b3ff549ca5302800a382e6bdd028",
	            "SandboxKey": "/var/run/docker/netns/c25ad70fe630",
	            "Ports": {
	                "22/tcp": [
	                    {
	                        "HostIp": "127.0.0.1",
	                        "HostPort": "34603"
	                    }
	                ],
	                "2376/tcp": [
	                    {
	                        "HostIp": "127.0.0.1",
	                        "HostPort": "34604"
	                    }
	                ],
	                "32443/tcp": [
	                    {
	                        "HostIp": "127.0.0.1",
	                        "HostPort": "34607"
	                    }
	                ],
	                "5000/tcp": [
	                    {
	                        "HostIp": "127.0.0.1",
	                        "HostPort": "34605"
	                    }
	                ],
	                "8443/tcp": [
	                    {
	                        "HostIp": "127.0.0.1",
	                        "HostPort": "34606"
	                    }
	                ]
	            },
	            "HairpinMode": false,
	            "LinkLocalIPv6Address": "",
	            "LinkLocalIPv6PrefixLen": 0,
	            "SecondaryIPAddresses": null,
	            "SecondaryIPv6Addresses": null,
	            "EndpointID": "",
	            "Gateway": "",
	            "GlobalIPv6Address": "",
	            "GlobalIPv6PrefixLen": 0,
	            "IPAddress": "",
	            "IPPrefixLen": 0,
	            "IPv6Gateway": "",
	            "MacAddress": "",
	            "Networks": {
	                "addons-936355": {
	                    "IPAMConfig": {
	                        "IPv4Address": "192.168.49.2"
	                    },
	                    "Links": null,
	                    "Aliases": null,
	                    "MacAddress": "02:42:c0:a8:31:02",
	                    "DriverOpts": null,
	                    "NetworkID": "5d73edaa3366fd0ba0b4bacad454985b0bd272fda9938fc527483e0046d7c748",
	                    "EndpointID": "cf4cd538acb5e979612a79c60d294fba1f05c9fef1a1bec978977fcb945819c4",
	                    "Gateway": "192.168.49.1",
	                    "IPAddress": "192.168.49.2",
	                    "IPPrefixLen": 24,
	                    "IPv6Gateway": "",
	                    "GlobalIPv6Address": "",
	                    "GlobalIPv6PrefixLen": 0,
	                    "DNSNames": [
	                        "addons-936355",
	                        "990f1d352091"
	                    ]
	                }
	            }
	        }
	    }
	]

                                                
                                                
-- /stdout --
helpers_test.go:239: (dbg) Run:  out/minikube-linux-arm64 status --format={{.Host}} -p addons-936355 -n addons-936355
helpers_test.go:244: <<< TestAddons/parallel/MetricsServer FAILED: start of post-mortem logs <<<
helpers_test.go:245: ======>  post-mortem[TestAddons/parallel/MetricsServer]: minikube logs <======
helpers_test.go:247: (dbg) Run:  out/minikube-linux-arm64 -p addons-936355 logs -n 25
helpers_test.go:247: (dbg) Done: out/minikube-linux-arm64 -p addons-936355 logs -n 25: (2.485028085s)
helpers_test.go:252: TestAddons/parallel/MetricsServer logs: 
-- stdout --
	
	==> Audit <==
	|---------|--------------------------------------|------------------------|---------|---------|---------------------|---------------------|
	| Command |                 Args                 |        Profile         |  User   | Version |     Start Time      |      End Time       |
	|---------|--------------------------------------|------------------------|---------|---------|---------------------|---------------------|
	| start   | -o=json --download-only              | download-only-084128   | jenkins | v1.34.0 | 16 Sep 24 10:34 UTC |                     |
	|         | -p download-only-084128              |                        |         |         |                     |                     |
	|         | --force --alsologtostderr            |                        |         |         |                     |                     |
	|         | --kubernetes-version=v1.20.0         |                        |         |         |                     |                     |
	|         | --container-runtime=crio             |                        |         |         |                     |                     |
	|         | --driver=docker                      |                        |         |         |                     |                     |
	|         | --container-runtime=crio             |                        |         |         |                     |                     |
	| delete  | --all                                | minikube               | jenkins | v1.34.0 | 16 Sep 24 10:34 UTC | 16 Sep 24 10:34 UTC |
	| delete  | -p download-only-084128              | download-only-084128   | jenkins | v1.34.0 | 16 Sep 24 10:34 UTC | 16 Sep 24 10:34 UTC |
	| start   | -o=json --download-only              | download-only-605096   | jenkins | v1.34.0 | 16 Sep 24 10:34 UTC |                     |
	|         | -p download-only-605096              |                        |         |         |                     |                     |
	|         | --force --alsologtostderr            |                        |         |         |                     |                     |
	|         | --kubernetes-version=v1.31.1         |                        |         |         |                     |                     |
	|         | --container-runtime=crio             |                        |         |         |                     |                     |
	|         | --driver=docker                      |                        |         |         |                     |                     |
	|         | --container-runtime=crio             |                        |         |         |                     |                     |
	| delete  | --all                                | minikube               | jenkins | v1.34.0 | 16 Sep 24 10:34 UTC | 16 Sep 24 10:34 UTC |
	| delete  | -p download-only-605096              | download-only-605096   | jenkins | v1.34.0 | 16 Sep 24 10:34 UTC | 16 Sep 24 10:34 UTC |
	| delete  | -p download-only-084128              | download-only-084128   | jenkins | v1.34.0 | 16 Sep 24 10:35 UTC | 16 Sep 24 10:35 UTC |
	| delete  | -p download-only-605096              | download-only-605096   | jenkins | v1.34.0 | 16 Sep 24 10:35 UTC | 16 Sep 24 10:35 UTC |
	| start   | --download-only -p                   | download-docker-880503 | jenkins | v1.34.0 | 16 Sep 24 10:35 UTC |                     |
	|         | download-docker-880503               |                        |         |         |                     |                     |
	|         | --alsologtostderr                    |                        |         |         |                     |                     |
	|         | --driver=docker                      |                        |         |         |                     |                     |
	|         | --container-runtime=crio             |                        |         |         |                     |                     |
	| delete  | -p download-docker-880503            | download-docker-880503 | jenkins | v1.34.0 | 16 Sep 24 10:35 UTC | 16 Sep 24 10:35 UTC |
	| start   | --download-only -p                   | binary-mirror-652159   | jenkins | v1.34.0 | 16 Sep 24 10:35 UTC |                     |
	|         | binary-mirror-652159                 |                        |         |         |                     |                     |
	|         | --alsologtostderr                    |                        |         |         |                     |                     |
	|         | --binary-mirror                      |                        |         |         |                     |                     |
	|         | http://127.0.0.1:40363               |                        |         |         |                     |                     |
	|         | --driver=docker                      |                        |         |         |                     |                     |
	|         | --container-runtime=crio             |                        |         |         |                     |                     |
	| delete  | -p binary-mirror-652159              | binary-mirror-652159   | jenkins | v1.34.0 | 16 Sep 24 10:35 UTC | 16 Sep 24 10:35 UTC |
	| addons  | enable dashboard -p                  | addons-936355          | jenkins | v1.34.0 | 16 Sep 24 10:35 UTC |                     |
	|         | addons-936355                        |                        |         |         |                     |                     |
	| addons  | disable dashboard -p                 | addons-936355          | jenkins | v1.34.0 | 16 Sep 24 10:35 UTC |                     |
	|         | addons-936355                        |                        |         |         |                     |                     |
	| start   | -p addons-936355 --wait=true         | addons-936355          | jenkins | v1.34.0 | 16 Sep 24 10:35 UTC | 16 Sep 24 10:38 UTC |
	|         | --memory=4000 --alsologtostderr      |                        |         |         |                     |                     |
	|         | --addons=registry                    |                        |         |         |                     |                     |
	|         | --addons=metrics-server              |                        |         |         |                     |                     |
	|         | --addons=volumesnapshots             |                        |         |         |                     |                     |
	|         | --addons=csi-hostpath-driver         |                        |         |         |                     |                     |
	|         | --addons=gcp-auth                    |                        |         |         |                     |                     |
	|         | --addons=cloud-spanner               |                        |         |         |                     |                     |
	|         | --addons=inspektor-gadget            |                        |         |         |                     |                     |
	|         | --addons=storage-provisioner-rancher |                        |         |         |                     |                     |
	|         | --addons=nvidia-device-plugin        |                        |         |         |                     |                     |
	|         | --addons=yakd --addons=volcano       |                        |         |         |                     |                     |
	|         | --driver=docker                      |                        |         |         |                     |                     |
	|         | --container-runtime=crio             |                        |         |         |                     |                     |
	|         | --addons=ingress                     |                        |         |         |                     |                     |
	|         | --addons=ingress-dns                 |                        |         |         |                     |                     |
	| ip      | addons-936355 ip                     | addons-936355          | jenkins | v1.34.0 | 16 Sep 24 10:39 UTC | 16 Sep 24 10:39 UTC |
	| addons  | addons-936355 addons disable         | addons-936355          | jenkins | v1.34.0 | 16 Sep 24 10:39 UTC | 16 Sep 24 10:39 UTC |
	|         | registry --alsologtostderr           |                        |         |         |                     |                     |
	|         | -v=1                                 |                        |         |         |                     |                     |
	| addons  | addons-936355 addons                 | addons-936355          | jenkins | v1.34.0 | 16 Sep 24 10:44 UTC | 16 Sep 24 10:44 UTC |
	|         | disable metrics-server               |                        |         |         |                     |                     |
	|         | --alsologtostderr -v=1               |                        |         |         |                     |                     |
	|---------|--------------------------------------|------------------------|---------|---------|---------------------|---------------------|
	
	
	==> Last Start <==
	Log file created at: 2024/09/16 10:35:01
	Running on machine: ip-172-31-21-244
	Binary: Built with gc go1.23.0 for linux/arm64
	Log line format: [IWEF]mmdd hh:mm:ss.uuuuuu threadid file:line] msg
	I0916 10:35:01.861741 1384589 out.go:345] Setting OutFile to fd 1 ...
	I0916 10:35:01.861923 1384589 out.go:392] TERM=,COLORTERM=, which probably does not support color
	I0916 10:35:01.861959 1384589 out.go:358] Setting ErrFile to fd 2...
	I0916 10:35:01.861972 1384589 out.go:392] TERM=,COLORTERM=, which probably does not support color
	I0916 10:35:01.862230 1384589 root.go:338] Updating PATH: /home/jenkins/minikube-integration/19651-1378450/.minikube/bin
	I0916 10:35:01.862730 1384589 out.go:352] Setting JSON to false
	I0916 10:35:01.863665 1384589 start.go:129] hostinfo: {"hostname":"ip-172-31-21-244","uptime":37047,"bootTime":1726445855,"procs":155,"os":"linux","platform":"ubuntu","platformFamily":"debian","platformVersion":"20.04","kernelVersion":"5.15.0-1069-aws","kernelArch":"aarch64","virtualizationSystem":"","virtualizationRole":"","hostId":"da8ac1fd-6236-412a-a346-95873c98230d"}
	I0916 10:35:01.863739 1384589 start.go:139] virtualization:  
	I0916 10:35:01.866923 1384589 out.go:177] * [addons-936355] minikube v1.34.0 on Ubuntu 20.04 (arm64)
	I0916 10:35:01.870432 1384589 out.go:177]   - MINIKUBE_LOCATION=19651
	I0916 10:35:01.870537 1384589 notify.go:220] Checking for updates...
	I0916 10:35:01.875880 1384589 out.go:177]   - MINIKUBE_SUPPRESS_DOCKER_PERFORMANCE=true
	I0916 10:35:01.878650 1384589 out.go:177]   - KUBECONFIG=/home/jenkins/minikube-integration/19651-1378450/kubeconfig
	I0916 10:35:01.881242 1384589 out.go:177]   - MINIKUBE_HOME=/home/jenkins/minikube-integration/19651-1378450/.minikube
	I0916 10:35:01.883862 1384589 out.go:177]   - MINIKUBE_BIN=out/minikube-linux-arm64
	I0916 10:35:01.886520 1384589 out.go:177]   - MINIKUBE_FORCE_SYSTEMD=
	I0916 10:35:01.889353 1384589 driver.go:394] Setting default libvirt URI to qemu:///system
	I0916 10:35:01.930300 1384589 docker.go:123] docker version: linux-27.2.1:Docker Engine - Community
	I0916 10:35:01.930438 1384589 cli_runner.go:164] Run: docker system info --format "{{json .}}"
	I0916 10:35:01.986400 1384589 info.go:266] docker info: {ID:5FDH:SA5P:5GCT:NLAS:B73P:SGDQ:PBG5:UBVH:UZY3:RXGO:CI7S:WAIH Containers:0 ContainersRunning:0 ContainersPaused:0 ContainersStopped:0 Images:1 Driver:overlay2 DriverStatus:[[Backing Filesystem extfs] [Supports d_type true] [Using metacopy false] [Native Overlay Diff true] [userxattr false]] SystemStatus:<nil> Plugins:{Volume:[local] Network:[bridge host ipvlan macvlan null overlay] Authorization:<nil> Log:[awslogs fluentd gcplogs gelf journald json-file local splunk syslog]} MemoryLimit:true SwapLimit:true KernelMemory:false KernelMemoryTCP:true CPUCfsPeriod:true CPUCfsQuota:true CPUShares:true CPUSet:true PidsLimit:true IPv4Forwarding:true BridgeNfIptables:true BridgeNfIP6Tables:true Debug:false NFd:25 OomKillDisable:true NGoroutines:44 SystemTime:2024-09-16 10:35:01.976217774 +0000 UTC LoggingDriver:json-file CgroupDriver:cgroupfs NEventsListener:0 KernelVersion:5.15.0-1069-aws OperatingSystem:Ubuntu 20.04.6 LTS OSType:linux Architecture:aar
ch64 IndexServerAddress:https://index.docker.io/v1/ RegistryConfig:{AllowNondistributableArtifactsCIDRs:[] AllowNondistributableArtifactsHostnames:[] InsecureRegistryCIDRs:[127.0.0.0/8] IndexConfigs:{DockerIo:{Name:docker.io Mirrors:[] Secure:true Official:true}} Mirrors:[]} NCPU:2 MemTotal:8214839296 GenericResources:<nil> DockerRootDir:/var/lib/docker HTTPProxy: HTTPSProxy: NoProxy: Name:ip-172-31-21-244 Labels:[] ExperimentalBuild:false ServerVersion:27.2.1 ClusterStore: ClusterAdvertise: Runtimes:{Runc:{Path:runc}} DefaultRuntime:runc Swarm:{NodeID: NodeAddr: LocalNodeState:inactive ControlAvailable:false Error: RemoteManagers:<nil>} LiveRestoreEnabled:false Isolation: InitBinary:docker-init ContainerdCommit:{ID:7f7fdf5fed64eb6a7caf99b3e12efcf9d60e311c Expected:7f7fdf5fed64eb6a7caf99b3e12efcf9d60e311c} RuncCommit:{ID:v1.1.14-0-g2c9f560 Expected:v1.1.14-0-g2c9f560} InitCommit:{ID:de40ad0 Expected:de40ad0} SecurityOptions:[name=apparmor name=seccomp,profile=builtin] ProductLicense: Warnings:<nil> ServerErro
rs:[] ClientInfo:{Debug:false Plugins:[map[Name:buildx Path:/usr/libexec/docker/cli-plugins/docker-buildx SchemaVersion:0.1.0 ShortDescription:Docker Buildx Vendor:Docker Inc. Version:v0.16.2] map[Name:compose Path:/usr/libexec/docker/cli-plugins/docker-compose SchemaVersion:0.1.0 ShortDescription:Docker Compose Vendor:Docker Inc. Version:v2.29.2]] Warnings:<nil>}}
	I0916 10:35:01.986524 1384589 docker.go:318] overlay module found
	I0916 10:35:01.989262 1384589 out.go:177] * Using the docker driver based on user configuration
	I0916 10:35:01.991996 1384589 start.go:297] selected driver: docker
	I0916 10:35:01.992025 1384589 start.go:901] validating driver "docker" against <nil>
	I0916 10:35:01.992040 1384589 start.go:912] status for docker: {Installed:true Healthy:true Running:false NeedsImprovement:false Error:<nil> Reason: Fix: Doc: Version:}
	I0916 10:35:01.992727 1384589 cli_runner.go:164] Run: docker system info --format "{{json .}}"
	I0916 10:35:02.058953 1384589 info.go:266] docker info: {ID:5FDH:SA5P:5GCT:NLAS:B73P:SGDQ:PBG5:UBVH:UZY3:RXGO:CI7S:WAIH Containers:0 ContainersRunning:0 ContainersPaused:0 ContainersStopped:0 Images:1 Driver:overlay2 DriverStatus:[[Backing Filesystem extfs] [Supports d_type true] [Using metacopy false] [Native Overlay Diff true] [userxattr false]] SystemStatus:<nil> Plugins:{Volume:[local] Network:[bridge host ipvlan macvlan null overlay] Authorization:<nil> Log:[awslogs fluentd gcplogs gelf journald json-file local splunk syslog]} MemoryLimit:true SwapLimit:true KernelMemory:false KernelMemoryTCP:true CPUCfsPeriod:true CPUCfsQuota:true CPUShares:true CPUSet:true PidsLimit:true IPv4Forwarding:true BridgeNfIptables:true BridgeNfIP6Tables:true Debug:false NFd:25 OomKillDisable:true NGoroutines:44 SystemTime:2024-09-16 10:35:02.049617339 +0000 UTC LoggingDriver:json-file CgroupDriver:cgroupfs NEventsListener:0 KernelVersion:5.15.0-1069-aws OperatingSystem:Ubuntu 20.04.6 LTS OSType:linux Architecture:aar
ch64 IndexServerAddress:https://index.docker.io/v1/ RegistryConfig:{AllowNondistributableArtifactsCIDRs:[] AllowNondistributableArtifactsHostnames:[] InsecureRegistryCIDRs:[127.0.0.0/8] IndexConfigs:{DockerIo:{Name:docker.io Mirrors:[] Secure:true Official:true}} Mirrors:[]} NCPU:2 MemTotal:8214839296 GenericResources:<nil> DockerRootDir:/var/lib/docker HTTPProxy: HTTPSProxy: NoProxy: Name:ip-172-31-21-244 Labels:[] ExperimentalBuild:false ServerVersion:27.2.1 ClusterStore: ClusterAdvertise: Runtimes:{Runc:{Path:runc}} DefaultRuntime:runc Swarm:{NodeID: NodeAddr: LocalNodeState:inactive ControlAvailable:false Error: RemoteManagers:<nil>} LiveRestoreEnabled:false Isolation: InitBinary:docker-init ContainerdCommit:{ID:7f7fdf5fed64eb6a7caf99b3e12efcf9d60e311c Expected:7f7fdf5fed64eb6a7caf99b3e12efcf9d60e311c} RuncCommit:{ID:v1.1.14-0-g2c9f560 Expected:v1.1.14-0-g2c9f560} InitCommit:{ID:de40ad0 Expected:de40ad0} SecurityOptions:[name=apparmor name=seccomp,profile=builtin] ProductLicense: Warnings:<nil> ServerErro
rs:[] ClientInfo:{Debug:false Plugins:[map[Name:buildx Path:/usr/libexec/docker/cli-plugins/docker-buildx SchemaVersion:0.1.0 ShortDescription:Docker Buildx Vendor:Docker Inc. Version:v0.16.2] map[Name:compose Path:/usr/libexec/docker/cli-plugins/docker-compose SchemaVersion:0.1.0 ShortDescription:Docker Compose Vendor:Docker Inc. Version:v2.29.2]] Warnings:<nil>}}
	I0916 10:35:02.059182 1384589 start_flags.go:310] no existing cluster config was found, will generate one from the flags 
	I0916 10:35:02.059420 1384589 start_flags.go:947] Waiting for all components: map[apiserver:true apps_running:true default_sa:true extra:true kubelet:true node_ready:true system_pods:true]
	I0916 10:35:02.062017 1384589 out.go:177] * Using Docker driver with root privileges
	I0916 10:35:02.064628 1384589 cni.go:84] Creating CNI manager for ""
	I0916 10:35:02.064789 1384589 cni.go:143] "docker" driver + "crio" runtime found, recommending kindnet
	I0916 10:35:02.064804 1384589 start_flags.go:319] Found "CNI" CNI - setting NetworkPlugin=cni
	I0916 10:35:02.064885 1384589 start.go:340] cluster config:
	{Name:addons-936355 KeepContext:false EmbedCerts:false MinikubeISO: KicBaseImage:gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 Memory:4000 CPUs:2 DiskSize:20000 Driver:docker HyperkitVpnKitSock: HyperkitVSockPorts:[] DockerEnv:[] ContainerVolumeMounts:[] InsecureRegistry:[] RegistryMirror:[] HostOnlyCIDR:192.168.59.1/24 HypervVirtualSwitch: HypervUseExternalSwitch:false HypervExternalAdapter: KVMNetwork:default KVMQemuURI:qemu:///system KVMGPU:false KVMHidden:false KVMNUMACount:1 APIServerPort:8443 DockerOpt:[] DisableDriverMounts:false NFSShare:[] NFSSharesRoot:/nfsshares UUID: NoVTXCheck:false DNSProxy:false HostDNSResolver:true HostOnlyNicType:virtio NatNicType:virtio SSHIPAddress: SSHUser:root SSHKey: SSHPort:22 KubernetesConfig:{KubernetesVersion:v1.31.1 ClusterName:addons-936355 Namespace:default APIServerHAVIP: APIServerName:minikubeCA APIServerNames:[] APIServerIPs:[] DNSDomain:cluster.local ContainerRuntime
:crio CRISocket: NetworkPlugin:cni FeatureGates: ServiceCIDR:10.96.0.0/12 ImageRepository: LoadBalancerStartIP: LoadBalancerEndIP: CustomIngressCert: RegistryAliases: ExtraOptions:[] ShouldLoadCachedImages:true EnableDefaultCNI:false CNI:} Nodes:[{Name: IP: Port:8443 KubernetesVersion:v1.31.1 ContainerRuntime:crio ControlPlane:true Worker:true}] Addons:map[] CustomAddonImages:map[] CustomAddonRegistries:map[] VerifyComponents:map[apiserver:true apps_running:true default_sa:true extra:true kubelet:true node_ready:true system_pods:true] StartHostTimeout:6m0s ScheduledStop:<nil> ExposedPorts:[] ListenAddress: Network: Subnet: MultiNodeRequested:false ExtraDisks:0 CertExpiration:26280h0m0s Mount:false MountString:/home/jenkins:/minikube-host Mount9PVersion:9p2000.L MountGID:docker MountIP: MountMSize:262144 MountOptions:[] MountPort:0 MountType:9p MountUID:docker BinaryMirror: DisableOptimizations:false DisableMetrics:false CustomQemuFirmwarePath: SocketVMnetClientPath: SocketVMnetPath: StaticIP: SSHAuthSock: SSH
AgentPID:0 GPUs: AutoPauseInterval:1m0s}
	I0916 10:35:02.069567 1384589 out.go:177] * Starting "addons-936355" primary control-plane node in "addons-936355" cluster
	I0916 10:35:02.072130 1384589 cache.go:121] Beginning downloading kic base image for docker with crio
	I0916 10:35:02.074827 1384589 out.go:177] * Pulling base image v0.0.45-1726358845-19644 ...
	I0916 10:35:02.077314 1384589 preload.go:131] Checking if preload exists for k8s version v1.31.1 and runtime crio
	I0916 10:35:02.077371 1384589 preload.go:146] Found local preload: /home/jenkins/minikube-integration/19651-1378450/.minikube/cache/preloaded-tarball/preloaded-images-k8s-v18-v1.31.1-cri-o-overlay-arm64.tar.lz4
	I0916 10:35:02.077383 1384589 cache.go:56] Caching tarball of preloaded images
	I0916 10:35:02.077398 1384589 image.go:79] Checking for gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 in local docker daemon
	I0916 10:35:02.077476 1384589 preload.go:172] Found /home/jenkins/minikube-integration/19651-1378450/.minikube/cache/preloaded-tarball/preloaded-images-k8s-v18-v1.31.1-cri-o-overlay-arm64.tar.lz4 in cache, skipping download
	I0916 10:35:02.077486 1384589 cache.go:59] Finished verifying existence of preloaded tar for v1.31.1 on crio
	I0916 10:35:02.077848 1384589 profile.go:143] Saving config to /home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/addons-936355/config.json ...
	I0916 10:35:02.077880 1384589 lock.go:35] WriteFile acquiring /home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/addons-936355/config.json: {Name:mkd05c2b0dbaa1cc700db22c74ae8fbcc0c53329 Clock:{} Delay:500ms Timeout:1m0s Cancel:<nil>}
	I0916 10:35:02.092106 1384589 cache.go:149] Downloading gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 to local cache
	I0916 10:35:02.092232 1384589 image.go:63] Checking for gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 in local cache directory
	I0916 10:35:02.092252 1384589 image.go:66] Found gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 in local cache directory, skipping pull
	I0916 10:35:02.092257 1384589 image.go:135] gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 exists in cache, skipping pull
	I0916 10:35:02.092264 1384589 cache.go:152] successfully saved gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 as a tarball
	I0916 10:35:02.092269 1384589 cache.go:162] Loading gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 from local cache
	I0916 10:35:19.265886 1384589 cache.go:164] successfully loaded and using gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 from cached tarball
	I0916 10:35:19.265926 1384589 cache.go:194] Successfully downloaded all kic artifacts
	I0916 10:35:19.265955 1384589 start.go:360] acquireMachinesLock for addons-936355: {Name:mk780e867f4084d469fbad7a4968b7ad3d556c69 Clock:{} Delay:500ms Timeout:10m0s Cancel:<nil>}
	I0916 10:35:19.266489 1384589 start.go:364] duration metric: took 511.962µs to acquireMachinesLock for "addons-936355"
	I0916 10:35:19.266531 1384589 start.go:93] Provisioning new machine with config: &{Name:addons-936355 KeepContext:false EmbedCerts:false MinikubeISO: KicBaseImage:gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 Memory:4000 CPUs:2 DiskSize:20000 Driver:docker HyperkitVpnKitSock: HyperkitVSockPorts:[] DockerEnv:[] ContainerVolumeMounts:[] InsecureRegistry:[] RegistryMirror:[] HostOnlyCIDR:192.168.59.1/24 HypervVirtualSwitch: HypervUseExternalSwitch:false HypervExternalAdapter: KVMNetwork:default KVMQemuURI:qemu:///system KVMGPU:false KVMHidden:false KVMNUMACount:1 APIServerPort:8443 DockerOpt:[] DisableDriverMounts:false NFSShare:[] NFSSharesRoot:/nfsshares UUID: NoVTXCheck:false DNSProxy:false HostDNSResolver:true HostOnlyNicType:virtio NatNicType:virtio SSHIPAddress: SSHUser:root SSHKey: SSHPort:22 KubernetesConfig:{KubernetesVersion:v1.31.1 ClusterName:addons-936355 Namespace:default APIServerHAVIP: APIServerName:min
ikubeCA APIServerNames:[] APIServerIPs:[] DNSDomain:cluster.local ContainerRuntime:crio CRISocket: NetworkPlugin:cni FeatureGates: ServiceCIDR:10.96.0.0/12 ImageRepository: LoadBalancerStartIP: LoadBalancerEndIP: CustomIngressCert: RegistryAliases: ExtraOptions:[] ShouldLoadCachedImages:true EnableDefaultCNI:false CNI:} Nodes:[{Name: IP: Port:8443 KubernetesVersion:v1.31.1 ContainerRuntime:crio ControlPlane:true Worker:true}] Addons:map[] CustomAddonImages:map[] CustomAddonRegistries:map[] VerifyComponents:map[apiserver:true apps_running:true default_sa:true extra:true kubelet:true node_ready:true system_pods:true] StartHostTimeout:6m0s ScheduledStop:<nil> ExposedPorts:[] ListenAddress: Network: Subnet: MultiNodeRequested:false ExtraDisks:0 CertExpiration:26280h0m0s Mount:false MountString:/home/jenkins:/minikube-host Mount9PVersion:9p2000.L MountGID:docker MountIP: MountMSize:262144 MountOptions:[] MountPort:0 MountType:9p MountUID:docker BinaryMirror: DisableOptimizations:false DisableMetrics:false CustomQe
muFirmwarePath: SocketVMnetClientPath: SocketVMnetPath: StaticIP: SSHAuthSock: SSHAgentPID:0 GPUs: AutoPauseInterval:1m0s} &{Name: IP: Port:8443 KubernetesVersion:v1.31.1 ContainerRuntime:crio ControlPlane:true Worker:true}
	I0916 10:35:19.266610 1384589 start.go:125] createHost starting for "" (driver="docker")
	I0916 10:35:19.269716 1384589 out.go:235] * Creating docker container (CPUs=2, Memory=4000MB) ...
	I0916 10:35:19.269968 1384589 start.go:159] libmachine.API.Create for "addons-936355" (driver="docker")
	I0916 10:35:19.270003 1384589 client.go:168] LocalClient.Create starting
	I0916 10:35:19.270125 1384589 main.go:141] libmachine: Creating CA: /home/jenkins/minikube-integration/19651-1378450/.minikube/certs/ca.pem
	I0916 10:35:20.065665 1384589 main.go:141] libmachine: Creating client certificate: /home/jenkins/minikube-integration/19651-1378450/.minikube/certs/cert.pem
	I0916 10:35:20.505791 1384589 cli_runner.go:164] Run: docker network inspect addons-936355 --format "{"Name": "{{.Name}}","Driver": "{{.Driver}}","Subnet": "{{range .IPAM.Config}}{{.Subnet}}{{end}}","Gateway": "{{range .IPAM.Config}}{{.Gateway}}{{end}}","MTU": {{if (index .Options "com.docker.network.driver.mtu")}}{{(index .Options "com.docker.network.driver.mtu")}}{{else}}0{{end}}, "ContainerIPs": [{{range $k,$v := .Containers }}"{{$v.IPv4Address}}",{{end}}]}"
	W0916 10:35:20.520423 1384589 cli_runner.go:211] docker network inspect addons-936355 --format "{"Name": "{{.Name}}","Driver": "{{.Driver}}","Subnet": "{{range .IPAM.Config}}{{.Subnet}}{{end}}","Gateway": "{{range .IPAM.Config}}{{.Gateway}}{{end}}","MTU": {{if (index .Options "com.docker.network.driver.mtu")}}{{(index .Options "com.docker.network.driver.mtu")}}{{else}}0{{end}}, "ContainerIPs": [{{range $k,$v := .Containers }}"{{$v.IPv4Address}}",{{end}}]}" returned with exit code 1
	I0916 10:35:20.520525 1384589 network_create.go:284] running [docker network inspect addons-936355] to gather additional debugging logs...
	I0916 10:35:20.520546 1384589 cli_runner.go:164] Run: docker network inspect addons-936355
	W0916 10:35:20.534395 1384589 cli_runner.go:211] docker network inspect addons-936355 returned with exit code 1
	I0916 10:35:20.534432 1384589 network_create.go:287] error running [docker network inspect addons-936355]: docker network inspect addons-936355: exit status 1
	stdout:
	[]
	
	stderr:
	Error response from daemon: network addons-936355 not found
	I0916 10:35:20.534447 1384589 network_create.go:289] output of [docker network inspect addons-936355]: -- stdout --
	[]
	
	-- /stdout --
	** stderr ** 
	Error response from daemon: network addons-936355 not found
	
	** /stderr **
	I0916 10:35:20.534555 1384589 cli_runner.go:164] Run: docker network inspect bridge --format "{"Name": "{{.Name}}","Driver": "{{.Driver}}","Subnet": "{{range .IPAM.Config}}{{.Subnet}}{{end}}","Gateway": "{{range .IPAM.Config}}{{.Gateway}}{{end}}","MTU": {{if (index .Options "com.docker.network.driver.mtu")}}{{(index .Options "com.docker.network.driver.mtu")}}{{else}}0{{end}}, "ContainerIPs": [{{range $k,$v := .Containers }}"{{$v.IPv4Address}}",{{end}}]}"
	I0916 10:35:20.550802 1384589 network.go:206] using free private subnet 192.168.49.0/24: &{IP:192.168.49.0 Netmask:255.255.255.0 Prefix:24 CIDR:192.168.49.0/24 Gateway:192.168.49.1 ClientMin:192.168.49.2 ClientMax:192.168.49.254 Broadcast:192.168.49.255 IsPrivate:true Interface:{IfaceName: IfaceIPv4: IfaceMTU:0 IfaceMAC:} reservation:0x4001826a70}
	I0916 10:35:20.550849 1384589 network_create.go:124] attempt to create docker network addons-936355 192.168.49.0/24 with gateway 192.168.49.1 and MTU of 1500 ...
	I0916 10:35:20.550909 1384589 cli_runner.go:164] Run: docker network create --driver=bridge --subnet=192.168.49.0/24 --gateway=192.168.49.1 -o --ip-masq -o --icc -o com.docker.network.driver.mtu=1500 --label=created_by.minikube.sigs.k8s.io=true --label=name.minikube.sigs.k8s.io=addons-936355 addons-936355
	I0916 10:35:20.622324 1384589 network_create.go:108] docker network addons-936355 192.168.49.0/24 created
	I0916 10:35:20.622359 1384589 kic.go:121] calculated static IP "192.168.49.2" for the "addons-936355" container
	I0916 10:35:20.622443 1384589 cli_runner.go:164] Run: docker ps -a --format {{.Names}}
	I0916 10:35:20.636891 1384589 cli_runner.go:164] Run: docker volume create addons-936355 --label name.minikube.sigs.k8s.io=addons-936355 --label created_by.minikube.sigs.k8s.io=true
	I0916 10:35:20.653249 1384589 oci.go:103] Successfully created a docker volume addons-936355
	I0916 10:35:20.653357 1384589 cli_runner.go:164] Run: docker run --rm --name addons-936355-preload-sidecar --label created_by.minikube.sigs.k8s.io=true --label name.minikube.sigs.k8s.io=addons-936355 --entrypoint /usr/bin/test -v addons-936355:/var gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 -d /var/lib
	I0916 10:35:22.737442 1384589 cli_runner.go:217] Completed: docker run --rm --name addons-936355-preload-sidecar --label created_by.minikube.sigs.k8s.io=true --label name.minikube.sigs.k8s.io=addons-936355 --entrypoint /usr/bin/test -v addons-936355:/var gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 -d /var/lib: (2.08404207s)
	I0916 10:35:22.737471 1384589 oci.go:107] Successfully prepared a docker volume addons-936355
	I0916 10:35:22.737499 1384589 preload.go:131] Checking if preload exists for k8s version v1.31.1 and runtime crio
	I0916 10:35:22.737519 1384589 kic.go:194] Starting extracting preloaded images to volume ...
	I0916 10:35:22.737588 1384589 cli_runner.go:164] Run: docker run --rm --entrypoint /usr/bin/tar -v /home/jenkins/minikube-integration/19651-1378450/.minikube/cache/preloaded-tarball/preloaded-images-k8s-v18-v1.31.1-cri-o-overlay-arm64.tar.lz4:/preloaded.tar:ro -v addons-936355:/extractDir gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 -I lz4 -xf /preloaded.tar -C /extractDir
	I0916 10:35:26.763089 1384589 cli_runner.go:217] Completed: docker run --rm --entrypoint /usr/bin/tar -v /home/jenkins/minikube-integration/19651-1378450/.minikube/cache/preloaded-tarball/preloaded-images-k8s-v18-v1.31.1-cri-o-overlay-arm64.tar.lz4:/preloaded.tar:ro -v addons-936355:/extractDir gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 -I lz4 -xf /preloaded.tar -C /extractDir: (4.025452617s)
	I0916 10:35:26.763126 1384589 kic.go:203] duration metric: took 4.025604753s to extract preloaded images to volume ...
	W0916 10:35:26.763258 1384589 cgroups_linux.go:77] Your kernel does not support swap limit capabilities or the cgroup is not mounted.
	I0916 10:35:26.763378 1384589 cli_runner.go:164] Run: docker info --format "'{{json .SecurityOptions}}'"
	I0916 10:35:26.814712 1384589 cli_runner.go:164] Run: docker run -d -t --privileged --security-opt seccomp=unconfined --tmpfs /tmp --tmpfs /run -v /lib/modules:/lib/modules:ro --hostname addons-936355 --name addons-936355 --label created_by.minikube.sigs.k8s.io=true --label name.minikube.sigs.k8s.io=addons-936355 --label role.minikube.sigs.k8s.io= --label mode.minikube.sigs.k8s.io=addons-936355 --network addons-936355 --ip 192.168.49.2 --volume addons-936355:/var --security-opt apparmor=unconfined --memory=4000mb --cpus=2 -e container=docker --expose 8443 --publish=127.0.0.1::8443 --publish=127.0.0.1::22 --publish=127.0.0.1::2376 --publish=127.0.0.1::5000 --publish=127.0.0.1::32443 gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0
	I0916 10:35:27.165000 1384589 cli_runner.go:164] Run: docker container inspect addons-936355 --format={{.State.Running}}
	I0916 10:35:27.189076 1384589 cli_runner.go:164] Run: docker container inspect addons-936355 --format={{.State.Status}}
	I0916 10:35:27.216370 1384589 cli_runner.go:164] Run: docker exec addons-936355 stat /var/lib/dpkg/alternatives/iptables
	I0916 10:35:27.281467 1384589 oci.go:144] the created container "addons-936355" has a running status.
	I0916 10:35:27.281502 1384589 kic.go:225] Creating ssh key for kic: /home/jenkins/minikube-integration/19651-1378450/.minikube/machines/addons-936355/id_rsa...
	I0916 10:35:28.804386 1384589 kic_runner.go:191] docker (temp): /home/jenkins/minikube-integration/19651-1378450/.minikube/machines/addons-936355/id_rsa.pub --> /home/docker/.ssh/authorized_keys (381 bytes)
	I0916 10:35:28.826599 1384589 cli_runner.go:164] Run: docker container inspect addons-936355 --format={{.State.Status}}
	I0916 10:35:28.843564 1384589 kic_runner.go:93] Run: chown docker:docker /home/docker/.ssh/authorized_keys
	I0916 10:35:28.843591 1384589 kic_runner.go:114] Args: [docker exec --privileged addons-936355 chown docker:docker /home/docker/.ssh/authorized_keys]
	I0916 10:35:28.892577 1384589 cli_runner.go:164] Run: docker container inspect addons-936355 --format={{.State.Status}}
	I0916 10:35:28.913158 1384589 machine.go:93] provisionDockerMachine start ...
	I0916 10:35:28.913258 1384589 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" addons-936355
	I0916 10:35:28.931596 1384589 main.go:141] libmachine: Using SSH client type: native
	I0916 10:35:28.931893 1384589 main.go:141] libmachine: &{{{<nil> 0 [] [] []} docker [0x41abe0] 0x41d420 <nil>  [] 0s} 127.0.0.1 34603 <nil> <nil>}
	I0916 10:35:28.931910 1384589 main.go:141] libmachine: About to run SSH command:
	hostname
	I0916 10:35:29.068030 1384589 main.go:141] libmachine: SSH cmd err, output: <nil>: addons-936355
	
	I0916 10:35:29.068064 1384589 ubuntu.go:169] provisioning hostname "addons-936355"
	I0916 10:35:29.068142 1384589 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" addons-936355
	I0916 10:35:29.085139 1384589 main.go:141] libmachine: Using SSH client type: native
	I0916 10:35:29.085383 1384589 main.go:141] libmachine: &{{{<nil> 0 [] [] []} docker [0x41abe0] 0x41d420 <nil>  [] 0s} 127.0.0.1 34603 <nil> <nil>}
	I0916 10:35:29.085399 1384589 main.go:141] libmachine: About to run SSH command:
	sudo hostname addons-936355 && echo "addons-936355" | sudo tee /etc/hostname
	I0916 10:35:29.232508 1384589 main.go:141] libmachine: SSH cmd err, output: <nil>: addons-936355
	
	I0916 10:35:29.232589 1384589 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" addons-936355
	I0916 10:35:29.248944 1384589 main.go:141] libmachine: Using SSH client type: native
	I0916 10:35:29.249190 1384589 main.go:141] libmachine: &{{{<nil> 0 [] [] []} docker [0x41abe0] 0x41d420 <nil>  [] 0s} 127.0.0.1 34603 <nil> <nil>}
	I0916 10:35:29.249214 1384589 main.go:141] libmachine: About to run SSH command:
	
			if ! grep -xq '.*\saddons-936355' /etc/hosts; then
				if grep -xq '127.0.1.1\s.*' /etc/hosts; then
					sudo sed -i 's/^127.0.1.1\s.*/127.0.1.1 addons-936355/g' /etc/hosts;
				else 
					echo '127.0.1.1 addons-936355' | sudo tee -a /etc/hosts; 
				fi
			fi
	I0916 10:35:29.385206 1384589 main.go:141] libmachine: SSH cmd err, output: <nil>: 
	I0916 10:35:29.385233 1384589 ubuntu.go:175] set auth options {CertDir:/home/jenkins/minikube-integration/19651-1378450/.minikube CaCertPath:/home/jenkins/minikube-integration/19651-1378450/.minikube/certs/ca.pem CaPrivateKeyPath:/home/jenkins/minikube-integration/19651-1378450/.minikube/certs/ca-key.pem CaCertRemotePath:/etc/docker/ca.pem ServerCertPath:/home/jenkins/minikube-integration/19651-1378450/.minikube/machines/server.pem ServerKeyPath:/home/jenkins/minikube-integration/19651-1378450/.minikube/machines/server-key.pem ClientKeyPath:/home/jenkins/minikube-integration/19651-1378450/.minikube/certs/key.pem ServerCertRemotePath:/etc/docker/server.pem ServerKeyRemotePath:/etc/docker/server-key.pem ClientCertPath:/home/jenkins/minikube-integration/19651-1378450/.minikube/certs/cert.pem ServerCertSANs:[] StorePath:/home/jenkins/minikube-integration/19651-1378450/.minikube}
	I0916 10:35:29.385263 1384589 ubuntu.go:177] setting up certificates
	I0916 10:35:29.385275 1384589 provision.go:84] configureAuth start
	I0916 10:35:29.385357 1384589 cli_runner.go:164] Run: docker container inspect -f "{{range .NetworkSettings.Networks}}{{.IPAddress}},{{.GlobalIPv6Address}}{{end}}" addons-936355
	I0916 10:35:29.401844 1384589 provision.go:143] copyHostCerts
	I0916 10:35:29.401930 1384589 exec_runner.go:151] cp: /home/jenkins/minikube-integration/19651-1378450/.minikube/certs/ca.pem --> /home/jenkins/minikube-integration/19651-1378450/.minikube/ca.pem (1078 bytes)
	I0916 10:35:29.402060 1384589 exec_runner.go:151] cp: /home/jenkins/minikube-integration/19651-1378450/.minikube/certs/cert.pem --> /home/jenkins/minikube-integration/19651-1378450/.minikube/cert.pem (1123 bytes)
	I0916 10:35:29.402129 1384589 exec_runner.go:151] cp: /home/jenkins/minikube-integration/19651-1378450/.minikube/certs/key.pem --> /home/jenkins/minikube-integration/19651-1378450/.minikube/key.pem (1679 bytes)
	I0916 10:35:29.402184 1384589 provision.go:117] generating server cert: /home/jenkins/minikube-integration/19651-1378450/.minikube/machines/server.pem ca-key=/home/jenkins/minikube-integration/19651-1378450/.minikube/certs/ca.pem private-key=/home/jenkins/minikube-integration/19651-1378450/.minikube/certs/ca-key.pem org=jenkins.addons-936355 san=[127.0.0.1 192.168.49.2 addons-936355 localhost minikube]
	I0916 10:35:29.844064 1384589 provision.go:177] copyRemoteCerts
	I0916 10:35:29.844139 1384589 ssh_runner.go:195] Run: sudo mkdir -p /etc/docker /etc/docker /etc/docker
	I0916 10:35:29.844181 1384589 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" addons-936355
	I0916 10:35:29.860341 1384589 sshutil.go:53] new ssh client: &{IP:127.0.0.1 Port:34603 SSHKeyPath:/home/jenkins/minikube-integration/19651-1378450/.minikube/machines/addons-936355/id_rsa Username:docker}
	I0916 10:35:29.957424 1384589 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-1378450/.minikube/certs/ca.pem --> /etc/docker/ca.pem (1078 bytes)
	I0916 10:35:29.982494 1384589 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-1378450/.minikube/machines/server.pem --> /etc/docker/server.pem (1208 bytes)
	I0916 10:35:30.020527 1384589 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-1378450/.minikube/machines/server-key.pem --> /etc/docker/server-key.pem (1675 bytes)
	I0916 10:35:30.083993 1384589 provision.go:87] duration metric: took 698.682489ms to configureAuth
	I0916 10:35:30.084118 1384589 ubuntu.go:193] setting minikube options for container-runtime
	I0916 10:35:30.084480 1384589 config.go:182] Loaded profile config "addons-936355": Driver=docker, ContainerRuntime=crio, KubernetesVersion=v1.31.1
	I0916 10:35:30.084746 1384589 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" addons-936355
	I0916 10:35:30.108015 1384589 main.go:141] libmachine: Using SSH client type: native
	I0916 10:35:30.108273 1384589 main.go:141] libmachine: &{{{<nil> 0 [] [] []} docker [0x41abe0] 0x41d420 <nil>  [] 0s} 127.0.0.1 34603 <nil> <nil>}
	I0916 10:35:30.108291 1384589 main.go:141] libmachine: About to run SSH command:
	sudo mkdir -p /etc/sysconfig && printf %s "
	CRIO_MINIKUBE_OPTIONS='--insecure-registry 10.96.0.0/12 '
	" | sudo tee /etc/sysconfig/crio.minikube && sudo systemctl restart crio
	I0916 10:35:30.350713 1384589 main.go:141] libmachine: SSH cmd err, output: <nil>: 
	CRIO_MINIKUBE_OPTIONS='--insecure-registry 10.96.0.0/12 '
	
	I0916 10:35:30.350736 1384589 machine.go:96] duration metric: took 1.437556677s to provisionDockerMachine
	I0916 10:35:30.350754 1384589 client.go:171] duration metric: took 11.080732872s to LocalClient.Create
	I0916 10:35:30.350775 1384589 start.go:167] duration metric: took 11.080807939s to libmachine.API.Create "addons-936355"
	I0916 10:35:30.350784 1384589 start.go:293] postStartSetup for "addons-936355" (driver="docker")
	I0916 10:35:30.350795 1384589 start.go:322] creating required directories: [/etc/kubernetes/addons /etc/kubernetes/manifests /var/tmp/minikube /var/lib/minikube /var/lib/minikube/certs /var/lib/minikube/images /var/lib/minikube/binaries /tmp/gvisor /usr/share/ca-certificates /etc/ssl/certs]
	I0916 10:35:30.350871 1384589 ssh_runner.go:195] Run: sudo mkdir -p /etc/kubernetes/addons /etc/kubernetes/manifests /var/tmp/minikube /var/lib/minikube /var/lib/minikube/certs /var/lib/minikube/images /var/lib/minikube/binaries /tmp/gvisor /usr/share/ca-certificates /etc/ssl/certs
	I0916 10:35:30.350928 1384589 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" addons-936355
	I0916 10:35:30.367694 1384589 sshutil.go:53] new ssh client: &{IP:127.0.0.1 Port:34603 SSHKeyPath:/home/jenkins/minikube-integration/19651-1378450/.minikube/machines/addons-936355/id_rsa Username:docker}
	I0916 10:35:30.471627 1384589 ssh_runner.go:195] Run: cat /etc/os-release
	I0916 10:35:30.475048 1384589 main.go:141] libmachine: Couldn't set key VERSION_CODENAME, no corresponding struct field found
	I0916 10:35:30.475083 1384589 main.go:141] libmachine: Couldn't set key PRIVACY_POLICY_URL, no corresponding struct field found
	I0916 10:35:30.475094 1384589 main.go:141] libmachine: Couldn't set key UBUNTU_CODENAME, no corresponding struct field found
	I0916 10:35:30.475101 1384589 info.go:137] Remote host: Ubuntu 22.04.4 LTS
	I0916 10:35:30.475111 1384589 filesync.go:126] Scanning /home/jenkins/minikube-integration/19651-1378450/.minikube/addons for local assets ...
	I0916 10:35:30.475191 1384589 filesync.go:126] Scanning /home/jenkins/minikube-integration/19651-1378450/.minikube/files for local assets ...
	I0916 10:35:30.475215 1384589 start.go:296] duration metric: took 124.425275ms for postStartSetup
	I0916 10:35:30.475537 1384589 cli_runner.go:164] Run: docker container inspect -f "{{range .NetworkSettings.Networks}}{{.IPAddress}},{{.GlobalIPv6Address}}{{end}}" addons-936355
	I0916 10:35:30.492884 1384589 profile.go:143] Saving config to /home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/addons-936355/config.json ...
	I0916 10:35:30.493230 1384589 ssh_runner.go:195] Run: sh -c "df -h /var | awk 'NR==2{print $5}'"
	I0916 10:35:30.493280 1384589 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" addons-936355
	I0916 10:35:30.510291 1384589 sshutil.go:53] new ssh client: &{IP:127.0.0.1 Port:34603 SSHKeyPath:/home/jenkins/minikube-integration/19651-1378450/.minikube/machines/addons-936355/id_rsa Username:docker}
	I0916 10:35:30.601939 1384589 ssh_runner.go:195] Run: sh -c "df -BG /var | awk 'NR==2{print $4}'"
	I0916 10:35:30.606785 1384589 start.go:128] duration metric: took 11.340152497s to createHost
	I0916 10:35:30.606809 1384589 start.go:83] releasing machines lock for "addons-936355", held for 11.340303023s
	I0916 10:35:30.606879 1384589 cli_runner.go:164] Run: docker container inspect -f "{{range .NetworkSettings.Networks}}{{.IPAddress}},{{.GlobalIPv6Address}}{{end}}" addons-936355
	I0916 10:35:30.623200 1384589 ssh_runner.go:195] Run: cat /version.json
	I0916 10:35:30.623223 1384589 ssh_runner.go:195] Run: curl -sS -m 2 https://registry.k8s.io/
	I0916 10:35:30.623263 1384589 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" addons-936355
	I0916 10:35:30.623284 1384589 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" addons-936355
	I0916 10:35:30.644076 1384589 sshutil.go:53] new ssh client: &{IP:127.0.0.1 Port:34603 SSHKeyPath:/home/jenkins/minikube-integration/19651-1378450/.minikube/machines/addons-936355/id_rsa Username:docker}
	I0916 10:35:30.644213 1384589 sshutil.go:53] new ssh client: &{IP:127.0.0.1 Port:34603 SSHKeyPath:/home/jenkins/minikube-integration/19651-1378450/.minikube/machines/addons-936355/id_rsa Username:docker}
	I0916 10:35:30.736416 1384589 ssh_runner.go:195] Run: systemctl --version
	I0916 10:35:30.866086 1384589 ssh_runner.go:195] Run: sudo sh -c "podman version >/dev/null"
	I0916 10:35:31.012168 1384589 ssh_runner.go:195] Run: sh -c "stat /etc/cni/net.d/*loopback.conf*"
	I0916 10:35:31.016985 1384589 ssh_runner.go:195] Run: sudo find /etc/cni/net.d -maxdepth 1 -type f -name *loopback.conf* -not -name *.mk_disabled -exec sh -c "sudo mv {} {}.mk_disabled" ;
	I0916 10:35:31.040299 1384589 cni.go:221] loopback cni configuration disabled: "/etc/cni/net.d/*loopback.conf*" found
	I0916 10:35:31.040383 1384589 ssh_runner.go:195] Run: sudo find /etc/cni/net.d -maxdepth 1 -type f ( ( -name *bridge* -or -name *podman* ) -and -not -name *.mk_disabled ) -printf "%p, " -exec sh -c "sudo mv {} {}.mk_disabled" ;
	I0916 10:35:31.079331 1384589 cni.go:262] disabled [/etc/cni/net.d/87-podman-bridge.conflist, /etc/cni/net.d/100-crio-bridge.conf] bridge cni config(s)
	I0916 10:35:31.079357 1384589 start.go:495] detecting cgroup driver to use...
	I0916 10:35:31.079391 1384589 detect.go:187] detected "cgroupfs" cgroup driver on host os
	I0916 10:35:31.079448 1384589 ssh_runner.go:195] Run: sudo systemctl stop -f containerd
	I0916 10:35:31.097860 1384589 ssh_runner.go:195] Run: sudo systemctl is-active --quiet service containerd
	I0916 10:35:31.111311 1384589 docker.go:217] disabling cri-docker service (if available) ...
	I0916 10:35:31.111396 1384589 ssh_runner.go:195] Run: sudo systemctl stop -f cri-docker.socket
	I0916 10:35:31.126864 1384589 ssh_runner.go:195] Run: sudo systemctl stop -f cri-docker.service
	I0916 10:35:31.142983 1384589 ssh_runner.go:195] Run: sudo systemctl disable cri-docker.socket
	I0916 10:35:31.237602 1384589 ssh_runner.go:195] Run: sudo systemctl mask cri-docker.service
	I0916 10:35:31.329055 1384589 docker.go:233] disabling docker service ...
	I0916 10:35:31.329150 1384589 ssh_runner.go:195] Run: sudo systemctl stop -f docker.socket
	I0916 10:35:31.350134 1384589 ssh_runner.go:195] Run: sudo systemctl stop -f docker.service
	I0916 10:35:31.362931 1384589 ssh_runner.go:195] Run: sudo systemctl disable docker.socket
	I0916 10:35:31.458212 1384589 ssh_runner.go:195] Run: sudo systemctl mask docker.service
	I0916 10:35:31.563725 1384589 ssh_runner.go:195] Run: sudo systemctl is-active --quiet service docker
	I0916 10:35:31.575461 1384589 ssh_runner.go:195] Run: /bin/bash -c "sudo mkdir -p /etc && printf %s "runtime-endpoint: unix:///var/run/crio/crio.sock
	" | sudo tee /etc/crictl.yaml"
	I0916 10:35:31.592172 1384589 crio.go:59] configure cri-o to use "registry.k8s.io/pause:3.10" pause image...
	I0916 10:35:31.592265 1384589 ssh_runner.go:195] Run: sh -c "sudo sed -i 's|^.*pause_image = .*$|pause_image = "registry.k8s.io/pause:3.10"|' /etc/crio/crio.conf.d/02-crio.conf"
	I0916 10:35:31.602336 1384589 crio.go:70] configuring cri-o to use "cgroupfs" as cgroup driver...
	I0916 10:35:31.602418 1384589 ssh_runner.go:195] Run: sh -c "sudo sed -i 's|^.*cgroup_manager = .*$|cgroup_manager = "cgroupfs"|' /etc/crio/crio.conf.d/02-crio.conf"
	I0916 10:35:31.612396 1384589 ssh_runner.go:195] Run: sh -c "sudo sed -i '/conmon_cgroup = .*/d' /etc/crio/crio.conf.d/02-crio.conf"
	I0916 10:35:31.622391 1384589 ssh_runner.go:195] Run: sh -c "sudo sed -i '/cgroup_manager = .*/a conmon_cgroup = "pod"' /etc/crio/crio.conf.d/02-crio.conf"
	I0916 10:35:31.632203 1384589 ssh_runner.go:195] Run: sh -c "sudo rm -rf /etc/cni/net.mk"
	I0916 10:35:31.642063 1384589 ssh_runner.go:195] Run: sh -c "sudo sed -i '/^ *"net.ipv4.ip_unprivileged_port_start=.*"/d' /etc/crio/crio.conf.d/02-crio.conf"
	I0916 10:35:31.651889 1384589 ssh_runner.go:195] Run: sh -c "sudo grep -q "^ *default_sysctls" /etc/crio/crio.conf.d/02-crio.conf || sudo sed -i '/conmon_cgroup = .*/a default_sysctls = \[\n\]' /etc/crio/crio.conf.d/02-crio.conf"
	I0916 10:35:31.669408 1384589 ssh_runner.go:195] Run: sh -c "sudo sed -i -r 's|^default_sysctls *= *\[|&\n  "net.ipv4.ip_unprivileged_port_start=0",|' /etc/crio/crio.conf.d/02-crio.conf"
	I0916 10:35:31.683307 1384589 ssh_runner.go:195] Run: sudo sysctl net.bridge.bridge-nf-call-iptables
	I0916 10:35:31.692220 1384589 ssh_runner.go:195] Run: sudo sh -c "echo 1 > /proc/sys/net/ipv4/ip_forward"
	I0916 10:35:31.702005 1384589 ssh_runner.go:195] Run: sudo systemctl daemon-reload
	I0916 10:35:31.781982 1384589 ssh_runner.go:195] Run: sudo systemctl restart crio
	I0916 10:35:31.897438 1384589 start.go:542] Will wait 60s for socket path /var/run/crio/crio.sock
	I0916 10:35:31.897567 1384589 ssh_runner.go:195] Run: stat /var/run/crio/crio.sock
	I0916 10:35:31.901379 1384589 start.go:563] Will wait 60s for crictl version
	I0916 10:35:31.901491 1384589 ssh_runner.go:195] Run: which crictl
	I0916 10:35:31.904735 1384589 ssh_runner.go:195] Run: sudo /usr/bin/crictl version
	I0916 10:35:31.941675 1384589 start.go:579] Version:  0.1.0
	RuntimeName:  cri-o
	RuntimeVersion:  1.24.6
	RuntimeApiVersion:  v1
	I0916 10:35:31.941854 1384589 ssh_runner.go:195] Run: crio --version
	I0916 10:35:31.981298 1384589 ssh_runner.go:195] Run: crio --version
	I0916 10:35:32.027709 1384589 out.go:177] * Preparing Kubernetes v1.31.1 on CRI-O 1.24.6 ...
	I0916 10:35:32.030371 1384589 cli_runner.go:164] Run: docker network inspect addons-936355 --format "{"Name": "{{.Name}}","Driver": "{{.Driver}}","Subnet": "{{range .IPAM.Config}}{{.Subnet}}{{end}}","Gateway": "{{range .IPAM.Config}}{{.Gateway}}{{end}}","MTU": {{if (index .Options "com.docker.network.driver.mtu")}}{{(index .Options "com.docker.network.driver.mtu")}}{{else}}0{{end}}, "ContainerIPs": [{{range $k,$v := .Containers }}"{{$v.IPv4Address}}",{{end}}]}"
	I0916 10:35:32.045684 1384589 ssh_runner.go:195] Run: grep 192.168.49.1	host.minikube.internal$ /etc/hosts
	I0916 10:35:32.049353 1384589 ssh_runner.go:195] Run: /bin/bash -c "{ grep -v $'\thost.minikube.internal$' "/etc/hosts"; echo "192.168.49.1	host.minikube.internal"; } > /tmp/h.$$; sudo cp /tmp/h.$$ "/etc/hosts""
	I0916 10:35:32.060434 1384589 kubeadm.go:883] updating cluster {Name:addons-936355 KeepContext:false EmbedCerts:false MinikubeISO: KicBaseImage:gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 Memory:4000 CPUs:2 DiskSize:20000 Driver:docker HyperkitVpnKitSock: HyperkitVSockPorts:[] DockerEnv:[] ContainerVolumeMounts:[] InsecureRegistry:[] RegistryMirror:[] HostOnlyCIDR:192.168.59.1/24 HypervVirtualSwitch: HypervUseExternalSwitch:false HypervExternalAdapter: KVMNetwork:default KVMQemuURI:qemu:///system KVMGPU:false KVMHidden:false KVMNUMACount:1 APIServerPort:8443 DockerOpt:[] DisableDriverMounts:false NFSShare:[] NFSSharesRoot:/nfsshares UUID: NoVTXCheck:false DNSProxy:false HostDNSResolver:true HostOnlyNicType:virtio NatNicType:virtio SSHIPAddress: SSHUser:root SSHKey: SSHPort:22 KubernetesConfig:{KubernetesVersion:v1.31.1 ClusterName:addons-936355 Namespace:default APIServerHAVIP: APIServerName:minikubeCA APIServerNa
mes:[] APIServerIPs:[] DNSDomain:cluster.local ContainerRuntime:crio CRISocket: NetworkPlugin:cni FeatureGates: ServiceCIDR:10.96.0.0/12 ImageRepository: LoadBalancerStartIP: LoadBalancerEndIP: CustomIngressCert: RegistryAliases: ExtraOptions:[] ShouldLoadCachedImages:true EnableDefaultCNI:false CNI:} Nodes:[{Name: IP:192.168.49.2 Port:8443 KubernetesVersion:v1.31.1 ContainerRuntime:crio ControlPlane:true Worker:true}] Addons:map[] CustomAddonImages:map[] CustomAddonRegistries:map[] VerifyComponents:map[apiserver:true apps_running:true default_sa:true extra:true kubelet:true node_ready:true system_pods:true] StartHostTimeout:6m0s ScheduledStop:<nil> ExposedPorts:[] ListenAddress: Network: Subnet: MultiNodeRequested:false ExtraDisks:0 CertExpiration:26280h0m0s Mount:false MountString:/home/jenkins:/minikube-host Mount9PVersion:9p2000.L MountGID:docker MountIP: MountMSize:262144 MountOptions:[] MountPort:0 MountType:9p MountUID:docker BinaryMirror: DisableOptimizations:false DisableMetrics:false CustomQemuFirmw
arePath: SocketVMnetClientPath: SocketVMnetPath: StaticIP: SSHAuthSock: SSHAgentPID:0 GPUs: AutoPauseInterval:1m0s} ...
	I0916 10:35:32.060562 1384589 preload.go:131] Checking if preload exists for k8s version v1.31.1 and runtime crio
	I0916 10:35:32.060622 1384589 ssh_runner.go:195] Run: sudo crictl images --output json
	I0916 10:35:32.132274 1384589 crio.go:514] all images are preloaded for cri-o runtime.
	I0916 10:35:32.132300 1384589 crio.go:433] Images already preloaded, skipping extraction
	I0916 10:35:32.132361 1384589 ssh_runner.go:195] Run: sudo crictl images --output json
	I0916 10:35:32.168136 1384589 crio.go:514] all images are preloaded for cri-o runtime.
	I0916 10:35:32.168159 1384589 cache_images.go:84] Images are preloaded, skipping loading
	I0916 10:35:32.168167 1384589 kubeadm.go:934] updating node { 192.168.49.2 8443 v1.31.1 crio true true} ...
	I0916 10:35:32.168274 1384589 kubeadm.go:946] kubelet [Unit]
	Wants=crio.service
	
	[Service]
	ExecStart=
	ExecStart=/var/lib/minikube/binaries/v1.31.1/kubelet --bootstrap-kubeconfig=/etc/kubernetes/bootstrap-kubelet.conf --cgroups-per-qos=false --config=/var/lib/kubelet/config.yaml --enforce-node-allocatable= --hostname-override=addons-936355 --kubeconfig=/etc/kubernetes/kubelet.conf --node-ip=192.168.49.2
	
	[Install]
	 config:
	{KubernetesVersion:v1.31.1 ClusterName:addons-936355 Namespace:default APIServerHAVIP: APIServerName:minikubeCA APIServerNames:[] APIServerIPs:[] DNSDomain:cluster.local ContainerRuntime:crio CRISocket: NetworkPlugin:cni FeatureGates: ServiceCIDR:10.96.0.0/12 ImageRepository: LoadBalancerStartIP: LoadBalancerEndIP: CustomIngressCert: RegistryAliases: ExtraOptions:[] ShouldLoadCachedImages:true EnableDefaultCNI:false CNI:}
	I0916 10:35:32.168366 1384589 ssh_runner.go:195] Run: crio config
	I0916 10:35:32.227191 1384589 cni.go:84] Creating CNI manager for ""
	I0916 10:35:32.227213 1384589 cni.go:143] "docker" driver + "crio" runtime found, recommending kindnet
	I0916 10:35:32.227223 1384589 kubeadm.go:84] Using pod CIDR: 10.244.0.0/16
	I0916 10:35:32.227267 1384589 kubeadm.go:181] kubeadm options: {CertDir:/var/lib/minikube/certs ServiceCIDR:10.96.0.0/12 PodSubnet:10.244.0.0/16 AdvertiseAddress:192.168.49.2 APIServerPort:8443 KubernetesVersion:v1.31.1 EtcdDataDir:/var/lib/minikube/etcd EtcdExtraArgs:map[] ClusterName:addons-936355 NodeName:addons-936355 DNSDomain:cluster.local CRISocket:/var/run/crio/crio.sock ImageRepository: ComponentOptions:[{Component:apiServer ExtraArgs:map[enable-admission-plugins:NamespaceLifecycle,LimitRanger,ServiceAccount,DefaultStorageClass,DefaultTolerationSeconds,NodeRestriction,MutatingAdmissionWebhook,ValidatingAdmissionWebhook,ResourceQuota] Pairs:map[certSANs:["127.0.0.1", "localhost", "192.168.49.2"]]} {Component:controllerManager ExtraArgs:map[allocate-node-cidrs:true leader-elect:false] Pairs:map[]} {Component:scheduler ExtraArgs:map[leader-elect:false] Pairs:map[]}] FeatureArgs:map[] NodeIP:192.168.49.2 CgroupDriver:cgroupfs ClientCAFile:/var/lib/minikube/certs/ca.crt StaticPodPath:/etc/kuberne
tes/manifests ControlPlaneAddress:control-plane.minikube.internal KubeProxyOptions:map[] ResolvConfSearchRegression:false KubeletConfigOpts:map[containerRuntimeEndpoint:unix:///var/run/crio/crio.sock hairpinMode:hairpin-veth runtimeRequestTimeout:15m] PrependCriSocketUnix:true}
	I0916 10:35:32.227445 1384589 kubeadm.go:187] kubeadm config:
	apiVersion: kubeadm.k8s.io/v1beta3
	kind: InitConfiguration
	localAPIEndpoint:
	  advertiseAddress: 192.168.49.2
	  bindPort: 8443
	bootstrapTokens:
	  - groups:
	      - system:bootstrappers:kubeadm:default-node-token
	    ttl: 24h0m0s
	    usages:
	      - signing
	      - authentication
	nodeRegistration:
	  criSocket: unix:///var/run/crio/crio.sock
	  name: "addons-936355"
	  kubeletExtraArgs:
	    node-ip: 192.168.49.2
	  taints: []
	---
	apiVersion: kubeadm.k8s.io/v1beta3
	kind: ClusterConfiguration
	apiServer:
	  certSANs: ["127.0.0.1", "localhost", "192.168.49.2"]
	  extraArgs:
	    enable-admission-plugins: "NamespaceLifecycle,LimitRanger,ServiceAccount,DefaultStorageClass,DefaultTolerationSeconds,NodeRestriction,MutatingAdmissionWebhook,ValidatingAdmissionWebhook,ResourceQuota"
	controllerManager:
	  extraArgs:
	    allocate-node-cidrs: "true"
	    leader-elect: "false"
	scheduler:
	  extraArgs:
	    leader-elect: "false"
	certificatesDir: /var/lib/minikube/certs
	clusterName: mk
	controlPlaneEndpoint: control-plane.minikube.internal:8443
	etcd:
	  local:
	    dataDir: /var/lib/minikube/etcd
	    extraArgs:
	      proxy-refresh-interval: "70000"
	kubernetesVersion: v1.31.1
	networking:
	  dnsDomain: cluster.local
	  podSubnet: "10.244.0.0/16"
	  serviceSubnet: 10.96.0.0/12
	---
	apiVersion: kubelet.config.k8s.io/v1beta1
	kind: KubeletConfiguration
	authentication:
	  x509:
	    clientCAFile: /var/lib/minikube/certs/ca.crt
	cgroupDriver: cgroupfs
	containerRuntimeEndpoint: unix:///var/run/crio/crio.sock
	hairpinMode: hairpin-veth
	runtimeRequestTimeout: 15m
	clusterDomain: "cluster.local"
	# disable disk resource management by default
	imageGCHighThresholdPercent: 100
	evictionHard:
	  nodefs.available: "0%"
	  nodefs.inodesFree: "0%"
	  imagefs.available: "0%"
	failSwapOn: false
	staticPodPath: /etc/kubernetes/manifests
	---
	apiVersion: kubeproxy.config.k8s.io/v1alpha1
	kind: KubeProxyConfiguration
	clusterCIDR: "10.244.0.0/16"
	metricsBindAddress: 0.0.0.0:10249
	conntrack:
	  maxPerCore: 0
	# Skip setting "net.netfilter.nf_conntrack_tcp_timeout_established"
	  tcpEstablishedTimeout: 0s
	# Skip setting "net.netfilter.nf_conntrack_tcp_timeout_close"
	  tcpCloseWaitTimeout: 0s
	
	I0916 10:35:32.227523 1384589 ssh_runner.go:195] Run: sudo ls /var/lib/minikube/binaries/v1.31.1
	I0916 10:35:32.236628 1384589 binaries.go:44] Found k8s binaries, skipping transfer
	I0916 10:35:32.236739 1384589 ssh_runner.go:195] Run: sudo mkdir -p /etc/systemd/system/kubelet.service.d /lib/systemd/system /var/tmp/minikube
	I0916 10:35:32.245582 1384589 ssh_runner.go:362] scp memory --> /etc/systemd/system/kubelet.service.d/10-kubeadm.conf (363 bytes)
	I0916 10:35:32.264058 1384589 ssh_runner.go:362] scp memory --> /lib/systemd/system/kubelet.service (352 bytes)
	I0916 10:35:32.283541 1384589 ssh_runner.go:362] scp memory --> /var/tmp/minikube/kubeadm.yaml.new (2151 bytes)
	I0916 10:35:32.302607 1384589 ssh_runner.go:195] Run: grep 192.168.49.2	control-plane.minikube.internal$ /etc/hosts
	I0916 10:35:32.306351 1384589 ssh_runner.go:195] Run: /bin/bash -c "{ grep -v $'\tcontrol-plane.minikube.internal$' "/etc/hosts"; echo "192.168.49.2	control-plane.minikube.internal"; } > /tmp/h.$$; sudo cp /tmp/h.$$ "/etc/hosts""
	I0916 10:35:32.317408 1384589 ssh_runner.go:195] Run: sudo systemctl daemon-reload
	I0916 10:35:32.409376 1384589 ssh_runner.go:195] Run: sudo systemctl start kubelet
	I0916 10:35:32.423337 1384589 certs.go:68] Setting up /home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/addons-936355 for IP: 192.168.49.2
	I0916 10:35:32.423401 1384589 certs.go:194] generating shared ca certs ...
	I0916 10:35:32.423434 1384589 certs.go:226] acquiring lock for ca certs: {Name:mk0ae46b50e2e49d53ad6fcc94535aa50d9156d6 Clock:{} Delay:500ms Timeout:1m0s Cancel:<nil>}
	I0916 10:35:32.423586 1384589 certs.go:240] generating "minikubeCA" ca cert: /home/jenkins/minikube-integration/19651-1378450/.minikube/ca.key
	I0916 10:35:34.185450 1384589 crypto.go:156] Writing cert to /home/jenkins/minikube-integration/19651-1378450/.minikube/ca.crt ...
	I0916 10:35:34.185484 1384589 lock.go:35] WriteFile acquiring /home/jenkins/minikube-integration/19651-1378450/.minikube/ca.crt: {Name:mk7933e16cdd72038659b0287d05eb0c475b810e Clock:{} Delay:500ms Timeout:1m0s Cancel:<nil>}
	I0916 10:35:34.185680 1384589 crypto.go:164] Writing key to /home/jenkins/minikube-integration/19651-1378450/.minikube/ca.key ...
	I0916 10:35:34.185693 1384589 lock.go:35] WriteFile acquiring /home/jenkins/minikube-integration/19651-1378450/.minikube/ca.key: {Name:mkb7482a30b71122d1b4fb2bf43b1e757c702edc Clock:{} Delay:500ms Timeout:1m0s Cancel:<nil>}
	I0916 10:35:34.186220 1384589 certs.go:240] generating "proxyClientCA" ca cert: /home/jenkins/minikube-integration/19651-1378450/.minikube/proxy-client-ca.key
	I0916 10:35:34.459909 1384589 crypto.go:156] Writing cert to /home/jenkins/minikube-integration/19651-1378450/.minikube/proxy-client-ca.crt ...
	I0916 10:35:34.459947 1384589 lock.go:35] WriteFile acquiring /home/jenkins/minikube-integration/19651-1378450/.minikube/proxy-client-ca.crt: {Name:mke012c32e9f14a06899ff2aaaf49a35a27f11b6 Clock:{} Delay:500ms Timeout:1m0s Cancel:<nil>}
	I0916 10:35:34.460629 1384589 crypto.go:164] Writing key to /home/jenkins/minikube-integration/19651-1378450/.minikube/proxy-client-ca.key ...
	I0916 10:35:34.460645 1384589 lock.go:35] WriteFile acquiring /home/jenkins/minikube-integration/19651-1378450/.minikube/proxy-client-ca.key: {Name:mk5d1994088ad6012c806fe8f78deff99aef1b4a Clock:{} Delay:500ms Timeout:1m0s Cancel:<nil>}
	I0916 10:35:34.460749 1384589 certs.go:256] generating profile certs ...
	I0916 10:35:34.460814 1384589 certs.go:363] generating signed profile cert for "minikube-user": /home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/addons-936355/client.key
	I0916 10:35:34.460832 1384589 crypto.go:68] Generating cert /home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/addons-936355/client.crt with IP's: []
	I0916 10:35:34.818752 1384589 crypto.go:156] Writing cert to /home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/addons-936355/client.crt ...
	I0916 10:35:34.818789 1384589 lock.go:35] WriteFile acquiring /home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/addons-936355/client.crt: {Name:mk0c01900c6bb90e11943bb255479c9c46b42cdc Clock:{} Delay:500ms Timeout:1m0s Cancel:<nil>}
	I0916 10:35:34.819458 1384589 crypto.go:164] Writing key to /home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/addons-936355/client.key ...
	I0916 10:35:34.819477 1384589 lock.go:35] WriteFile acquiring /home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/addons-936355/client.key: {Name:mk6a80bf44231e37c26b15b78c1573c745bc94c7 Clock:{} Delay:500ms Timeout:1m0s Cancel:<nil>}
	I0916 10:35:34.820007 1384589 certs.go:363] generating signed profile cert for "minikube": /home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/addons-936355/apiserver.key.87ecb0c8
	I0916 10:35:34.820055 1384589 crypto.go:68] Generating cert /home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/addons-936355/apiserver.crt.87ecb0c8 with IP's: [10.96.0.1 127.0.0.1 10.0.0.1 192.168.49.2]
	I0916 10:35:35.136595 1384589 crypto.go:156] Writing cert to /home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/addons-936355/apiserver.crt.87ecb0c8 ...
	I0916 10:35:35.136634 1384589 lock.go:35] WriteFile acquiring /home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/addons-936355/apiserver.crt.87ecb0c8: {Name:mkefb9e5abb2f41ae336f1dfb5f1a2e66afaeb9d Clock:{} Delay:500ms Timeout:1m0s Cancel:<nil>}
	I0916 10:35:35.136842 1384589 crypto.go:164] Writing key to /home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/addons-936355/apiserver.key.87ecb0c8 ...
	I0916 10:35:35.136857 1384589 lock.go:35] WriteFile acquiring /home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/addons-936355/apiserver.key.87ecb0c8: {Name:mkea4ee147dec7cfd16ab920313dbb27db2e74f5 Clock:{} Delay:500ms Timeout:1m0s Cancel:<nil>}
	I0916 10:35:35.137417 1384589 certs.go:381] copying /home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/addons-936355/apiserver.crt.87ecb0c8 -> /home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/addons-936355/apiserver.crt
	I0916 10:35:35.137519 1384589 certs.go:385] copying /home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/addons-936355/apiserver.key.87ecb0c8 -> /home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/addons-936355/apiserver.key
	I0916 10:35:35.137576 1384589 certs.go:363] generating signed profile cert for "aggregator": /home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/addons-936355/proxy-client.key
	I0916 10:35:35.137599 1384589 crypto.go:68] Generating cert /home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/addons-936355/proxy-client.crt with IP's: []
	I0916 10:35:35.880558 1384589 crypto.go:156] Writing cert to /home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/addons-936355/proxy-client.crt ...
	I0916 10:35:35.880594 1384589 lock.go:35] WriteFile acquiring /home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/addons-936355/proxy-client.crt: {Name:mke368773a6b2b93aed6ad850fe8fd0d4a737afa Clock:{} Delay:500ms Timeout:1m0s Cancel:<nil>}
	I0916 10:35:35.881334 1384589 crypto.go:164] Writing key to /home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/addons-936355/proxy-client.key ...
	I0916 10:35:35.881354 1384589 lock.go:35] WriteFile acquiring /home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/addons-936355/proxy-client.key: {Name:mk0b7d6a78a045adf50310a69acebceca87fff88 Clock:{} Delay:500ms Timeout:1m0s Cancel:<nil>}
	I0916 10:35:35.881575 1384589 certs.go:484] found cert: /home/jenkins/minikube-integration/19651-1378450/.minikube/certs/ca-key.pem (1679 bytes)
	I0916 10:35:35.881620 1384589 certs.go:484] found cert: /home/jenkins/minikube-integration/19651-1378450/.minikube/certs/ca.pem (1078 bytes)
	I0916 10:35:35.881652 1384589 certs.go:484] found cert: /home/jenkins/minikube-integration/19651-1378450/.minikube/certs/cert.pem (1123 bytes)
	I0916 10:35:35.881681 1384589 certs.go:484] found cert: /home/jenkins/minikube-integration/19651-1378450/.minikube/certs/key.pem (1679 bytes)
	I0916 10:35:35.882348 1384589 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-1378450/.minikube/ca.crt --> /var/lib/minikube/certs/ca.crt (1111 bytes)
	I0916 10:35:35.913124 1384589 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-1378450/.minikube/ca.key --> /var/lib/minikube/certs/ca.key (1675 bytes)
	I0916 10:35:35.940837 1384589 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-1378450/.minikube/proxy-client-ca.crt --> /var/lib/minikube/certs/proxy-client-ca.crt (1119 bytes)
	I0916 10:35:35.966731 1384589 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-1378450/.minikube/proxy-client-ca.key --> /var/lib/minikube/certs/proxy-client-ca.key (1679 bytes)
	I0916 10:35:35.992292 1384589 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/addons-936355/apiserver.crt --> /var/lib/minikube/certs/apiserver.crt (1419 bytes)
	I0916 10:35:36.018704 1384589 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/addons-936355/apiserver.key --> /var/lib/minikube/certs/apiserver.key (1675 bytes)
	I0916 10:35:36.045022 1384589 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/addons-936355/proxy-client.crt --> /var/lib/minikube/certs/proxy-client.crt (1147 bytes)
	I0916 10:35:36.070444 1384589 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/addons-936355/proxy-client.key --> /var/lib/minikube/certs/proxy-client.key (1679 bytes)
	I0916 10:35:36.097278 1384589 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-1378450/.minikube/ca.crt --> /usr/share/ca-certificates/minikubeCA.pem (1111 bytes)
	I0916 10:35:36.122467 1384589 ssh_runner.go:362] scp memory --> /var/lib/minikube/kubeconfig (738 bytes)
	I0916 10:35:36.141948 1384589 ssh_runner.go:195] Run: openssl version
	I0916 10:35:36.147681 1384589 ssh_runner.go:195] Run: sudo /bin/bash -c "test -s /usr/share/ca-certificates/minikubeCA.pem && ln -fs /usr/share/ca-certificates/minikubeCA.pem /etc/ssl/certs/minikubeCA.pem"
	I0916 10:35:36.157655 1384589 ssh_runner.go:195] Run: ls -la /usr/share/ca-certificates/minikubeCA.pem
	I0916 10:35:36.161783 1384589 certs.go:528] hashing: -rw-r--r-- 1 root root 1111 Sep 16 10:35 /usr/share/ca-certificates/minikubeCA.pem
	I0916 10:35:36.161849 1384589 ssh_runner.go:195] Run: openssl x509 -hash -noout -in /usr/share/ca-certificates/minikubeCA.pem
	I0916 10:35:36.169303 1384589 ssh_runner.go:195] Run: sudo /bin/bash -c "test -L /etc/ssl/certs/b5213941.0 || ln -fs /etc/ssl/certs/minikubeCA.pem /etc/ssl/certs/b5213941.0"
	I0916 10:35:36.183583 1384589 ssh_runner.go:195] Run: stat /var/lib/minikube/certs/apiserver-kubelet-client.crt
	I0916 10:35:36.188459 1384589 certs.go:399] 'apiserver-kubelet-client' cert doesn't exist, likely first start: stat /var/lib/minikube/certs/apiserver-kubelet-client.crt: Process exited with status 1
	stdout:
	
	stderr:
	stat: cannot statx '/var/lib/minikube/certs/apiserver-kubelet-client.crt': No such file or directory
	I0916 10:35:36.188535 1384589 kubeadm.go:392] StartCluster: {Name:addons-936355 KeepContext:false EmbedCerts:false MinikubeISO: KicBaseImage:gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 Memory:4000 CPUs:2 DiskSize:20000 Driver:docker HyperkitVpnKitSock: HyperkitVSockPorts:[] DockerEnv:[] ContainerVolumeMounts:[] InsecureRegistry:[] RegistryMirror:[] HostOnlyCIDR:192.168.59.1/24 HypervVirtualSwitch: HypervUseExternalSwitch:false HypervExternalAdapter: KVMNetwork:default KVMQemuURI:qemu:///system KVMGPU:false KVMHidden:false KVMNUMACount:1 APIServerPort:8443 DockerOpt:[] DisableDriverMounts:false NFSShare:[] NFSSharesRoot:/nfsshares UUID: NoVTXCheck:false DNSProxy:false HostDNSResolver:true HostOnlyNicType:virtio NatNicType:virtio SSHIPAddress: SSHUser:root SSHKey: SSHPort:22 KubernetesConfig:{KubernetesVersion:v1.31.1 ClusterName:addons-936355 Namespace:default APIServerHAVIP: APIServerName:minikubeCA APIServerNames
:[] APIServerIPs:[] DNSDomain:cluster.local ContainerRuntime:crio CRISocket: NetworkPlugin:cni FeatureGates: ServiceCIDR:10.96.0.0/12 ImageRepository: LoadBalancerStartIP: LoadBalancerEndIP: CustomIngressCert: RegistryAliases: ExtraOptions:[] ShouldLoadCachedImages:true EnableDefaultCNI:false CNI:} Nodes:[{Name: IP:192.168.49.2 Port:8443 KubernetesVersion:v1.31.1 ContainerRuntime:crio ControlPlane:true Worker:true}] Addons:map[] CustomAddonImages:map[] CustomAddonRegistries:map[] VerifyComponents:map[apiserver:true apps_running:true default_sa:true extra:true kubelet:true node_ready:true system_pods:true] StartHostTimeout:6m0s ScheduledStop:<nil> ExposedPorts:[] ListenAddress: Network: Subnet: MultiNodeRequested:false ExtraDisks:0 CertExpiration:26280h0m0s Mount:false MountString:/home/jenkins:/minikube-host Mount9PVersion:9p2000.L MountGID:docker MountIP: MountMSize:262144 MountOptions:[] MountPort:0 MountType:9p MountUID:docker BinaryMirror: DisableOptimizations:false DisableMetrics:false CustomQemuFirmware
Path: SocketVMnetClientPath: SocketVMnetPath: StaticIP: SSHAuthSock: SSHAgentPID:0 GPUs: AutoPauseInterval:1m0s}
	I0916 10:35:36.188663 1384589 cri.go:54] listing CRI containers in root : {State:paused Name: Namespaces:[kube-system]}
	I0916 10:35:36.188762 1384589 ssh_runner.go:195] Run: sudo -s eval "crictl ps -a --quiet --label io.kubernetes.pod.namespace=kube-system"
	I0916 10:35:36.238853 1384589 cri.go:89] found id: ""
	I0916 10:35:36.238944 1384589 ssh_runner.go:195] Run: sudo ls /var/lib/kubelet/kubeadm-flags.env /var/lib/kubelet/config.yaml /var/lib/minikube/etcd
	I0916 10:35:36.247955 1384589 ssh_runner.go:195] Run: sudo cp /var/tmp/minikube/kubeadm.yaml.new /var/tmp/minikube/kubeadm.yaml
	I0916 10:35:36.256986 1384589 kubeadm.go:214] ignoring SystemVerification for kubeadm because of docker driver
	I0916 10:35:36.257089 1384589 ssh_runner.go:195] Run: sudo ls -la /etc/kubernetes/admin.conf /etc/kubernetes/kubelet.conf /etc/kubernetes/controller-manager.conf /etc/kubernetes/scheduler.conf
	I0916 10:35:36.266246 1384589 kubeadm.go:155] config check failed, skipping stale config cleanup: sudo ls -la /etc/kubernetes/admin.conf /etc/kubernetes/kubelet.conf /etc/kubernetes/controller-manager.conf /etc/kubernetes/scheduler.conf: Process exited with status 2
	stdout:
	
	stderr:
	ls: cannot access '/etc/kubernetes/admin.conf': No such file or directory
	ls: cannot access '/etc/kubernetes/kubelet.conf': No such file or directory
	ls: cannot access '/etc/kubernetes/controller-manager.conf': No such file or directory
	ls: cannot access '/etc/kubernetes/scheduler.conf': No such file or directory
	I0916 10:35:36.266266 1384589 kubeadm.go:157] found existing configuration files:
	
	I0916 10:35:36.266339 1384589 ssh_runner.go:195] Run: sudo grep https://control-plane.minikube.internal:8443 /etc/kubernetes/admin.conf
	I0916 10:35:36.274963 1384589 kubeadm.go:163] "https://control-plane.minikube.internal:8443" may not be in /etc/kubernetes/admin.conf - will remove: sudo grep https://control-plane.minikube.internal:8443 /etc/kubernetes/admin.conf: Process exited with status 2
	stdout:
	
	stderr:
	grep: /etc/kubernetes/admin.conf: No such file or directory
	I0916 10:35:36.275044 1384589 ssh_runner.go:195] Run: sudo rm -f /etc/kubernetes/admin.conf
	I0916 10:35:36.283444 1384589 ssh_runner.go:195] Run: sudo grep https://control-plane.minikube.internal:8443 /etc/kubernetes/kubelet.conf
	I0916 10:35:36.292355 1384589 kubeadm.go:163] "https://control-plane.minikube.internal:8443" may not be in /etc/kubernetes/kubelet.conf - will remove: sudo grep https://control-plane.minikube.internal:8443 /etc/kubernetes/kubelet.conf: Process exited with status 2
	stdout:
	
	stderr:
	grep: /etc/kubernetes/kubelet.conf: No such file or directory
	I0916 10:35:36.292450 1384589 ssh_runner.go:195] Run: sudo rm -f /etc/kubernetes/kubelet.conf
	I0916 10:35:36.300873 1384589 ssh_runner.go:195] Run: sudo grep https://control-plane.minikube.internal:8443 /etc/kubernetes/controller-manager.conf
	I0916 10:35:36.309855 1384589 kubeadm.go:163] "https://control-plane.minikube.internal:8443" may not be in /etc/kubernetes/controller-manager.conf - will remove: sudo grep https://control-plane.minikube.internal:8443 /etc/kubernetes/controller-manager.conf: Process exited with status 2
	stdout:
	
	stderr:
	grep: /etc/kubernetes/controller-manager.conf: No such file or directory
	I0916 10:35:36.309929 1384589 ssh_runner.go:195] Run: sudo rm -f /etc/kubernetes/controller-manager.conf
	I0916 10:35:36.318718 1384589 ssh_runner.go:195] Run: sudo grep https://control-plane.minikube.internal:8443 /etc/kubernetes/scheduler.conf
	I0916 10:35:36.328008 1384589 kubeadm.go:163] "https://control-plane.minikube.internal:8443" may not be in /etc/kubernetes/scheduler.conf - will remove: sudo grep https://control-plane.minikube.internal:8443 /etc/kubernetes/scheduler.conf: Process exited with status 2
	stdout:
	
	stderr:
	grep: /etc/kubernetes/scheduler.conf: No such file or directory
	I0916 10:35:36.328097 1384589 ssh_runner.go:195] Run: sudo rm -f /etc/kubernetes/scheduler.conf
	I0916 10:35:36.336437 1384589 ssh_runner.go:286] Start: /bin/bash -c "sudo env PATH="/var/lib/minikube/binaries/v1.31.1:$PATH" kubeadm init --config /var/tmp/minikube/kubeadm.yaml  --ignore-preflight-errors=DirAvailable--etc-kubernetes-manifests,DirAvailable--var-lib-minikube,DirAvailable--var-lib-minikube-etcd,FileAvailable--etc-kubernetes-manifests-kube-scheduler.yaml,FileAvailable--etc-kubernetes-manifests-kube-apiserver.yaml,FileAvailable--etc-kubernetes-manifests-kube-controller-manager.yaml,FileAvailable--etc-kubernetes-manifests-etcd.yaml,Port-10250,Swap,NumCPU,Mem,SystemVerification,FileContent--proc-sys-net-bridge-bridge-nf-call-iptables"
	I0916 10:35:36.378930 1384589 kubeadm.go:310] [init] Using Kubernetes version: v1.31.1
	I0916 10:35:36.379124 1384589 kubeadm.go:310] [preflight] Running pre-flight checks
	I0916 10:35:36.400406 1384589 kubeadm.go:310] [preflight] The system verification failed. Printing the output from the verification:
	I0916 10:35:36.400480 1384589 kubeadm.go:310] KERNEL_VERSION: 5.15.0-1069-aws
	I0916 10:35:36.400522 1384589 kubeadm.go:310] OS: Linux
	I0916 10:35:36.400571 1384589 kubeadm.go:310] CGROUPS_CPU: enabled
	I0916 10:35:36.400622 1384589 kubeadm.go:310] CGROUPS_CPUACCT: enabled
	I0916 10:35:36.400687 1384589 kubeadm.go:310] CGROUPS_CPUSET: enabled
	I0916 10:35:36.400738 1384589 kubeadm.go:310] CGROUPS_DEVICES: enabled
	I0916 10:35:36.400790 1384589 kubeadm.go:310] CGROUPS_FREEZER: enabled
	I0916 10:35:36.400843 1384589 kubeadm.go:310] CGROUPS_MEMORY: enabled
	I0916 10:35:36.400891 1384589 kubeadm.go:310] CGROUPS_PIDS: enabled
	I0916 10:35:36.400941 1384589 kubeadm.go:310] CGROUPS_HUGETLB: enabled
	I0916 10:35:36.400990 1384589 kubeadm.go:310] CGROUPS_BLKIO: enabled
	I0916 10:35:36.460868 1384589 kubeadm.go:310] [preflight] Pulling images required for setting up a Kubernetes cluster
	I0916 10:35:36.460983 1384589 kubeadm.go:310] [preflight] This might take a minute or two, depending on the speed of your internet connection
	I0916 10:35:36.461077 1384589 kubeadm.go:310] [preflight] You can also perform this action beforehand using 'kubeadm config images pull'
	I0916 10:35:36.469524 1384589 kubeadm.go:310] [certs] Using certificateDir folder "/var/lib/minikube/certs"
	I0916 10:35:36.478231 1384589 out.go:235]   - Generating certificates and keys ...
	I0916 10:35:36.478421 1384589 kubeadm.go:310] [certs] Using existing ca certificate authority
	I0916 10:35:36.478536 1384589 kubeadm.go:310] [certs] Using existing apiserver certificate and key on disk
	I0916 10:35:37.031514 1384589 kubeadm.go:310] [certs] Generating "apiserver-kubelet-client" certificate and key
	I0916 10:35:37.927948 1384589 kubeadm.go:310] [certs] Generating "front-proxy-ca" certificate and key
	I0916 10:35:38.481156 1384589 kubeadm.go:310] [certs] Generating "front-proxy-client" certificate and key
	I0916 10:35:38.950500 1384589 kubeadm.go:310] [certs] Generating "etcd/ca" certificate and key
	I0916 10:35:40.037164 1384589 kubeadm.go:310] [certs] Generating "etcd/server" certificate and key
	I0916 10:35:40.037694 1384589 kubeadm.go:310] [certs] etcd/server serving cert is signed for DNS names [addons-936355 localhost] and IPs [192.168.49.2 127.0.0.1 ::1]
	I0916 10:35:40.393078 1384589 kubeadm.go:310] [certs] Generating "etcd/peer" certificate and key
	I0916 10:35:40.393223 1384589 kubeadm.go:310] [certs] etcd/peer serving cert is signed for DNS names [addons-936355 localhost] and IPs [192.168.49.2 127.0.0.1 ::1]
	I0916 10:35:40.639316 1384589 kubeadm.go:310] [certs] Generating "etcd/healthcheck-client" certificate and key
	I0916 10:35:41.086019 1384589 kubeadm.go:310] [certs] Generating "apiserver-etcd-client" certificate and key
	I0916 10:35:41.417060 1384589 kubeadm.go:310] [certs] Generating "sa" key and public key
	I0916 10:35:41.417146 1384589 kubeadm.go:310] [kubeconfig] Using kubeconfig folder "/etc/kubernetes"
	I0916 10:35:41.829000 1384589 kubeadm.go:310] [kubeconfig] Writing "admin.conf" kubeconfig file
	I0916 10:35:42.186509 1384589 kubeadm.go:310] [kubeconfig] Writing "super-admin.conf" kubeconfig file
	I0916 10:35:43.056769 1384589 kubeadm.go:310] [kubeconfig] Writing "kubelet.conf" kubeconfig file
	I0916 10:35:43.944133 1384589 kubeadm.go:310] [kubeconfig] Writing "controller-manager.conf" kubeconfig file
	I0916 10:35:44.069436 1384589 kubeadm.go:310] [kubeconfig] Writing "scheduler.conf" kubeconfig file
	I0916 10:35:44.070260 1384589 kubeadm.go:310] [etcd] Creating static Pod manifest for local etcd in "/etc/kubernetes/manifests"
	I0916 10:35:44.073516 1384589 kubeadm.go:310] [control-plane] Using manifest folder "/etc/kubernetes/manifests"
	I0916 10:35:44.076353 1384589 out.go:235]   - Booting up control plane ...
	I0916 10:35:44.076466 1384589 kubeadm.go:310] [control-plane] Creating static Pod manifest for "kube-apiserver"
	I0916 10:35:44.076546 1384589 kubeadm.go:310] [control-plane] Creating static Pod manifest for "kube-controller-manager"
	I0916 10:35:44.077309 1384589 kubeadm.go:310] [control-plane] Creating static Pod manifest for "kube-scheduler"
	I0916 10:35:44.088522 1384589 kubeadm.go:310] [kubelet-start] Writing kubelet environment file with flags to file "/var/lib/kubelet/kubeadm-flags.env"
	I0916 10:35:44.095329 1384589 kubeadm.go:310] [kubelet-start] Writing kubelet configuration to file "/var/lib/kubelet/config.yaml"
	I0916 10:35:44.095390 1384589 kubeadm.go:310] [kubelet-start] Starting the kubelet
	I0916 10:35:44.198308 1384589 kubeadm.go:310] [wait-control-plane] Waiting for the kubelet to boot up the control plane as static Pods from directory "/etc/kubernetes/manifests"
	I0916 10:35:44.198428 1384589 kubeadm.go:310] [kubelet-check] Waiting for a healthy kubelet at http://127.0.0.1:10248/healthz. This can take up to 4m0s
	I0916 10:35:45.200220 1384589 kubeadm.go:310] [kubelet-check] The kubelet is healthy after 1.00196488s
	I0916 10:35:45.200324 1384589 kubeadm.go:310] [api-check] Waiting for a healthy API server. This can take up to 4m0s
	I0916 10:35:51.202352 1384589 kubeadm.go:310] [api-check] The API server is healthy after 6.002166951s
	I0916 10:35:51.223941 1384589 kubeadm.go:310] [upload-config] Storing the configuration used in ConfigMap "kubeadm-config" in the "kube-system" Namespace
	I0916 10:35:51.239556 1384589 kubeadm.go:310] [kubelet] Creating a ConfigMap "kubelet-config" in namespace kube-system with the configuration for the kubelets in the cluster
	I0916 10:35:51.267029 1384589 kubeadm.go:310] [upload-certs] Skipping phase. Please see --upload-certs
	I0916 10:35:51.267231 1384589 kubeadm.go:310] [mark-control-plane] Marking the node addons-936355 as control-plane by adding the labels: [node-role.kubernetes.io/control-plane node.kubernetes.io/exclude-from-external-load-balancers]
	I0916 10:35:51.278589 1384589 kubeadm.go:310] [bootstrap-token] Using token: 08qv26.fux33djnogp684b3
	I0916 10:35:51.281486 1384589 out.go:235]   - Configuring RBAC rules ...
	I0916 10:35:51.281633 1384589 kubeadm.go:310] [bootstrap-token] Configuring bootstrap tokens, cluster-info ConfigMap, RBAC Roles
	I0916 10:35:51.288736 1384589 kubeadm.go:310] [bootstrap-token] Configured RBAC rules to allow Node Bootstrap tokens to get nodes
	I0916 10:35:51.298974 1384589 kubeadm.go:310] [bootstrap-token] Configured RBAC rules to allow Node Bootstrap tokens to post CSRs in order for nodes to get long term certificate credentials
	I0916 10:35:51.303116 1384589 kubeadm.go:310] [bootstrap-token] Configured RBAC rules to allow the csrapprover controller automatically approve CSRs from a Node Bootstrap Token
	I0916 10:35:51.306944 1384589 kubeadm.go:310] [bootstrap-token] Configured RBAC rules to allow certificate rotation for all node client certificates in the cluster
	I0916 10:35:51.312530 1384589 kubeadm.go:310] [bootstrap-token] Creating the "cluster-info" ConfigMap in the "kube-public" namespace
	I0916 10:35:51.609739 1384589 kubeadm.go:310] [kubelet-finalize] Updating "/etc/kubernetes/kubelet.conf" to point to a rotatable kubelet client certificate and key
	I0916 10:35:52.042589 1384589 kubeadm.go:310] [addons] Applied essential addon: CoreDNS
	I0916 10:35:52.609454 1384589 kubeadm.go:310] [addons] Applied essential addon: kube-proxy
	I0916 10:35:52.610559 1384589 kubeadm.go:310] 
	I0916 10:35:52.610639 1384589 kubeadm.go:310] Your Kubernetes control-plane has initialized successfully!
	I0916 10:35:52.610651 1384589 kubeadm.go:310] 
	I0916 10:35:52.610728 1384589 kubeadm.go:310] To start using your cluster, you need to run the following as a regular user:
	I0916 10:35:52.610737 1384589 kubeadm.go:310] 
	I0916 10:35:52.610762 1384589 kubeadm.go:310]   mkdir -p $HOME/.kube
	I0916 10:35:52.610825 1384589 kubeadm.go:310]   sudo cp -i /etc/kubernetes/admin.conf $HOME/.kube/config
	I0916 10:35:52.610877 1384589 kubeadm.go:310]   sudo chown $(id -u):$(id -g) $HOME/.kube/config
	I0916 10:35:52.610886 1384589 kubeadm.go:310] 
	I0916 10:35:52.610939 1384589 kubeadm.go:310] Alternatively, if you are the root user, you can run:
	I0916 10:35:52.610947 1384589 kubeadm.go:310] 
	I0916 10:35:52.610994 1384589 kubeadm.go:310]   export KUBECONFIG=/etc/kubernetes/admin.conf
	I0916 10:35:52.611003 1384589 kubeadm.go:310] 
	I0916 10:35:52.611054 1384589 kubeadm.go:310] You should now deploy a pod network to the cluster.
	I0916 10:35:52.611131 1384589 kubeadm.go:310] Run "kubectl apply -f [podnetwork].yaml" with one of the options listed at:
	I0916 10:35:52.611205 1384589 kubeadm.go:310]   https://kubernetes.io/docs/concepts/cluster-administration/addons/
	I0916 10:35:52.611213 1384589 kubeadm.go:310] 
	I0916 10:35:52.611296 1384589 kubeadm.go:310] You can now join any number of control-plane nodes by copying certificate authorities
	I0916 10:35:52.611376 1384589 kubeadm.go:310] and service account keys on each node and then running the following as root:
	I0916 10:35:52.611384 1384589 kubeadm.go:310] 
	I0916 10:35:52.611467 1384589 kubeadm.go:310]   kubeadm join control-plane.minikube.internal:8443 --token 08qv26.fux33djnogp684b3 \
	I0916 10:35:52.611571 1384589 kubeadm.go:310] 	--discovery-token-ca-cert-hash sha256:a39d4a6e06a2efc97f5d9564a89b81063790e757dde370e866d9dc4c2ed0ec07 \
	I0916 10:35:52.611602 1384589 kubeadm.go:310] 	--control-plane 
	I0916 10:35:52.611610 1384589 kubeadm.go:310] 
	I0916 10:35:52.611694 1384589 kubeadm.go:310] Then you can join any number of worker nodes by running the following on each as root:
	I0916 10:35:52.611701 1384589 kubeadm.go:310] 
	I0916 10:35:52.611782 1384589 kubeadm.go:310] kubeadm join control-plane.minikube.internal:8443 --token 08qv26.fux33djnogp684b3 \
	I0916 10:35:52.612037 1384589 kubeadm.go:310] 	--discovery-token-ca-cert-hash sha256:a39d4a6e06a2efc97f5d9564a89b81063790e757dde370e866d9dc4c2ed0ec07 
	I0916 10:35:52.615159 1384589 kubeadm.go:310] W0916 10:35:36.375856    1193 common.go:101] your configuration file uses a deprecated API spec: "kubeadm.k8s.io/v1beta3" (kind: "ClusterConfiguration"). Please use 'kubeadm config migrate --old-config old.yaml --new-config new.yaml', which will write the new, similar spec using a newer API version.
	I0916 10:35:52.615456 1384589 kubeadm.go:310] W0916 10:35:36.376640    1193 common.go:101] your configuration file uses a deprecated API spec: "kubeadm.k8s.io/v1beta3" (kind: "InitConfiguration"). Please use 'kubeadm config migrate --old-config old.yaml --new-config new.yaml', which will write the new, similar spec using a newer API version.
	I0916 10:35:52.615672 1384589 kubeadm.go:310] 	[WARNING SystemVerification]: failed to parse kernel config: unable to load kernel module: "configs", output: "modprobe: FATAL: Module configs not found in directory /lib/modules/5.15.0-1069-aws\n", err: exit status 1
	I0916 10:35:52.615783 1384589 kubeadm.go:310] 	[WARNING Service-Kubelet]: kubelet service is not enabled, please run 'systemctl enable kubelet.service'
	I0916 10:35:52.615802 1384589 cni.go:84] Creating CNI manager for ""
	I0916 10:35:52.615810 1384589 cni.go:143] "docker" driver + "crio" runtime found, recommending kindnet
	I0916 10:35:52.618721 1384589 out.go:177] * Configuring CNI (Container Networking Interface) ...
	I0916 10:35:52.621394 1384589 ssh_runner.go:195] Run: stat /opt/cni/bin/portmap
	I0916 10:35:52.625462 1384589 cni.go:182] applying CNI manifest using /var/lib/minikube/binaries/v1.31.1/kubectl ...
	I0916 10:35:52.625484 1384589 ssh_runner.go:362] scp memory --> /var/tmp/minikube/cni.yaml (2601 bytes)
	I0916 10:35:52.644461 1384589 ssh_runner.go:195] Run: sudo /var/lib/minikube/binaries/v1.31.1/kubectl apply --kubeconfig=/var/lib/minikube/kubeconfig -f /var/tmp/minikube/cni.yaml
	I0916 10:35:52.919005 1384589 ssh_runner.go:195] Run: /bin/bash -c "cat /proc/$(pgrep kube-apiserver)/oom_adj"
	I0916 10:35:52.919065 1384589 ssh_runner.go:195] Run: sudo /var/lib/minikube/binaries/v1.31.1/kubectl create clusterrolebinding minikube-rbac --clusterrole=cluster-admin --serviceaccount=kube-system:default --kubeconfig=/var/lib/minikube/kubeconfig
	I0916 10:35:52.919130 1384589 ssh_runner.go:195] Run: sudo /var/lib/minikube/binaries/v1.31.1/kubectl --kubeconfig=/var/lib/minikube/kubeconfig label --overwrite nodes addons-936355 minikube.k8s.io/updated_at=2024_09_16T10_35_52_0700 minikube.k8s.io/version=v1.34.0 minikube.k8s.io/commit=90d544f06ea0f69499271b003be64a9a224d57ed minikube.k8s.io/name=addons-936355 minikube.k8s.io/primary=true
	I0916 10:35:52.934021 1384589 ops.go:34] apiserver oom_adj: -16
	I0916 10:35:53.058693 1384589 ssh_runner.go:195] Run: sudo /var/lib/minikube/binaries/v1.31.1/kubectl get sa default --kubeconfig=/var/lib/minikube/kubeconfig
	I0916 10:35:53.559565 1384589 ssh_runner.go:195] Run: sudo /var/lib/minikube/binaries/v1.31.1/kubectl get sa default --kubeconfig=/var/lib/minikube/kubeconfig
	I0916 10:35:54.058855 1384589 ssh_runner.go:195] Run: sudo /var/lib/minikube/binaries/v1.31.1/kubectl get sa default --kubeconfig=/var/lib/minikube/kubeconfig
	I0916 10:35:54.558709 1384589 ssh_runner.go:195] Run: sudo /var/lib/minikube/binaries/v1.31.1/kubectl get sa default --kubeconfig=/var/lib/minikube/kubeconfig
	I0916 10:35:55.059014 1384589 ssh_runner.go:195] Run: sudo /var/lib/minikube/binaries/v1.31.1/kubectl get sa default --kubeconfig=/var/lib/minikube/kubeconfig
	I0916 10:35:55.559273 1384589 ssh_runner.go:195] Run: sudo /var/lib/minikube/binaries/v1.31.1/kubectl get sa default --kubeconfig=/var/lib/minikube/kubeconfig
	I0916 10:35:56.058909 1384589 ssh_runner.go:195] Run: sudo /var/lib/minikube/binaries/v1.31.1/kubectl get sa default --kubeconfig=/var/lib/minikube/kubeconfig
	I0916 10:35:56.559492 1384589 ssh_runner.go:195] Run: sudo /var/lib/minikube/binaries/v1.31.1/kubectl get sa default --kubeconfig=/var/lib/minikube/kubeconfig
	I0916 10:35:56.645989 1384589 kubeadm.go:1113] duration metric: took 3.7269816s to wait for elevateKubeSystemPrivileges
	I0916 10:35:56.646081 1384589 kubeadm.go:394] duration metric: took 20.457571781s to StartCluster
	I0916 10:35:56.646115 1384589 settings.go:142] acquiring lock: {Name:mkc0474d366ad36774e47290c7932cc180a1b9f8 Clock:{} Delay:500ms Timeout:1m0s Cancel:<nil>}
	I0916 10:35:56.646272 1384589 settings.go:150] Updating kubeconfig:  /home/jenkins/minikube-integration/19651-1378450/kubeconfig
	I0916 10:35:56.646729 1384589 lock.go:35] WriteFile acquiring /home/jenkins/minikube-integration/19651-1378450/kubeconfig: {Name:mk806df66aa01ad28d0c99bc1a876b4310e8a3a0 Clock:{} Delay:500ms Timeout:1m0s Cancel:<nil>}
	I0916 10:35:56.647006 1384589 start.go:235] Will wait 6m0s for node &{Name: IP:192.168.49.2 Port:8443 KubernetesVersion:v1.31.1 ContainerRuntime:crio ControlPlane:true Worker:true}
	I0916 10:35:56.647218 1384589 config.go:182] Loaded profile config "addons-936355": Driver=docker, ContainerRuntime=crio, KubernetesVersion=v1.31.1
	I0916 10:35:56.647256 1384589 addons.go:507] enable addons start: toEnable=map[ambassador:false auto-pause:false cloud-spanner:true csi-hostpath-driver:true dashboard:false default-storageclass:true efk:false freshpod:false gcp-auth:true gvisor:false headlamp:false helm-tiller:false inaccel:false ingress:true ingress-dns:true inspektor-gadget:true istio:false istio-provisioner:false kong:false kubeflow:false kubevirt:false logviewer:false metallb:false metrics-server:true nvidia-device-plugin:true nvidia-driver-installer:false nvidia-gpu-device-plugin:false olm:false pod-security-policy:false portainer:false registry:true registry-aliases:false registry-creds:false storage-provisioner:true storage-provisioner-gluster:false storage-provisioner-rancher:true volcano:true volumesnapshots:true yakd:true]
	I0916 10:35:56.647344 1384589 addons.go:69] Setting yakd=true in profile "addons-936355"
	I0916 10:35:56.647362 1384589 addons.go:234] Setting addon yakd=true in "addons-936355"
	I0916 10:35:56.647386 1384589 host.go:66] Checking if "addons-936355" exists ...
	I0916 10:35:56.647853 1384589 cli_runner.go:164] Run: docker container inspect addons-936355 --format={{.State.Status}}
	I0916 10:35:56.647019 1384589 ssh_runner.go:195] Run: /bin/bash -c "sudo /var/lib/minikube/binaries/v1.31.1/kubectl --kubeconfig=/var/lib/minikube/kubeconfig -n kube-system get configmap coredns -o yaml"
	I0916 10:35:56.648343 1384589 addons.go:69] Setting inspektor-gadget=true in profile "addons-936355"
	I0916 10:35:56.648358 1384589 addons.go:69] Setting metrics-server=true in profile "addons-936355"
	I0916 10:35:56.648364 1384589 addons.go:69] Setting cloud-spanner=true in profile "addons-936355"
	I0916 10:35:56.648372 1384589 addons.go:234] Setting addon cloud-spanner=true in "addons-936355"
	I0916 10:35:56.648375 1384589 addons.go:234] Setting addon metrics-server=true in "addons-936355"
	I0916 10:35:56.648397 1384589 host.go:66] Checking if "addons-936355" exists ...
	I0916 10:35:56.648398 1384589 host.go:66] Checking if "addons-936355" exists ...
	I0916 10:35:56.648856 1384589 cli_runner.go:164] Run: docker container inspect addons-936355 --format={{.State.Status}}
	I0916 10:35:56.648883 1384589 cli_runner.go:164] Run: docker container inspect addons-936355 --format={{.State.Status}}
	I0916 10:35:56.651521 1384589 addons.go:69] Setting nvidia-device-plugin=true in profile "addons-936355"
	I0916 10:35:56.651556 1384589 addons.go:234] Setting addon nvidia-device-plugin=true in "addons-936355"
	I0916 10:35:56.651597 1384589 host.go:66] Checking if "addons-936355" exists ...
	I0916 10:35:56.652064 1384589 cli_runner.go:164] Run: docker container inspect addons-936355 --format={{.State.Status}}
	I0916 10:35:56.654169 1384589 addons.go:69] Setting csi-hostpath-driver=true in profile "addons-936355"
	I0916 10:35:56.654360 1384589 addons.go:234] Setting addon csi-hostpath-driver=true in "addons-936355"
	I0916 10:35:56.654505 1384589 host.go:66] Checking if "addons-936355" exists ...
	I0916 10:35:56.656244 1384589 cli_runner.go:164] Run: docker container inspect addons-936355 --format={{.State.Status}}
	I0916 10:35:56.657047 1384589 addons.go:69] Setting registry=true in profile "addons-936355"
	I0916 10:35:56.657068 1384589 addons.go:234] Setting addon registry=true in "addons-936355"
	I0916 10:35:56.657100 1384589 host.go:66] Checking if "addons-936355" exists ...
	I0916 10:35:56.657530 1384589 cli_runner.go:164] Run: docker container inspect addons-936355 --format={{.State.Status}}
	I0916 10:35:56.665356 1384589 addons.go:69] Setting storage-provisioner=true in profile "addons-936355"
	I0916 10:35:56.665392 1384589 addons.go:234] Setting addon storage-provisioner=true in "addons-936355"
	I0916 10:35:56.665428 1384589 host.go:66] Checking if "addons-936355" exists ...
	I0916 10:35:56.665900 1384589 cli_runner.go:164] Run: docker container inspect addons-936355 --format={{.State.Status}}
	I0916 10:35:56.656864 1384589 addons.go:69] Setting default-storageclass=true in profile "addons-936355"
	I0916 10:35:56.672310 1384589 addons_storage_classes.go:33] enableOrDisableStorageClasses default-storageclass=true on "addons-936355"
	I0916 10:35:56.672744 1384589 cli_runner.go:164] Run: docker container inspect addons-936355 --format={{.State.Status}}
	I0916 10:35:56.656877 1384589 addons.go:69] Setting gcp-auth=true in profile "addons-936355"
	I0916 10:35:56.677792 1384589 mustload.go:65] Loading cluster: addons-936355
	I0916 10:35:56.678032 1384589 config.go:182] Loaded profile config "addons-936355": Driver=docker, ContainerRuntime=crio, KubernetesVersion=v1.31.1
	I0916 10:35:56.678386 1384589 cli_runner.go:164] Run: docker container inspect addons-936355 --format={{.State.Status}}
	I0916 10:35:56.685741 1384589 addons.go:69] Setting storage-provisioner-rancher=true in profile "addons-936355"
	I0916 10:35:56.685780 1384589 addons_storage_classes.go:33] enableOrDisableStorageClasses storage-provisioner-rancher=true on "addons-936355"
	I0916 10:35:56.686170 1384589 cli_runner.go:164] Run: docker container inspect addons-936355 --format={{.State.Status}}
	I0916 10:35:56.656881 1384589 addons.go:69] Setting ingress=true in profile "addons-936355"
	I0916 10:35:56.697863 1384589 addons.go:234] Setting addon ingress=true in "addons-936355"
	I0916 10:35:56.697916 1384589 host.go:66] Checking if "addons-936355" exists ...
	I0916 10:35:56.698402 1384589 cli_runner.go:164] Run: docker container inspect addons-936355 --format={{.State.Status}}
	I0916 10:35:56.656886 1384589 addons.go:69] Setting ingress-dns=true in profile "addons-936355"
	I0916 10:35:56.714403 1384589 addons.go:234] Setting addon ingress-dns=true in "addons-936355"
	I0916 10:35:56.714458 1384589 host.go:66] Checking if "addons-936355" exists ...
	I0916 10:35:56.715038 1384589 cli_runner.go:164] Run: docker container inspect addons-936355 --format={{.State.Status}}
	I0916 10:35:56.718711 1384589 out.go:177] * Verifying Kubernetes components...
	I0916 10:35:56.721654 1384589 ssh_runner.go:195] Run: sudo systemctl daemon-reload
	I0916 10:35:56.725191 1384589 addons.go:69] Setting volcano=true in profile "addons-936355"
	I0916 10:35:56.725221 1384589 addons.go:234] Setting addon volcano=true in "addons-936355"
	I0916 10:35:56.725264 1384589 host.go:66] Checking if "addons-936355" exists ...
	I0916 10:35:56.725742 1384589 cli_runner.go:164] Run: docker container inspect addons-936355 --format={{.State.Status}}
	I0916 10:35:56.755780 1384589 addons.go:69] Setting volumesnapshots=true in profile "addons-936355"
	I0916 10:35:56.755830 1384589 addons.go:234] Setting addon volumesnapshots=true in "addons-936355"
	I0916 10:35:56.755891 1384589 host.go:66] Checking if "addons-936355" exists ...
	I0916 10:35:56.756438 1384589 cli_runner.go:164] Run: docker container inspect addons-936355 --format={{.State.Status}}
	I0916 10:35:56.648360 1384589 addons.go:234] Setting addon inspektor-gadget=true in "addons-936355"
	I0916 10:35:56.781338 1384589 host.go:66] Checking if "addons-936355" exists ...
	I0916 10:35:56.781866 1384589 cli_runner.go:164] Run: docker container inspect addons-936355 --format={{.State.Status}}
	I0916 10:35:56.795868 1384589 out.go:177]   - Using image gcr.io/cloud-spanner-emulator/emulator:1.5.23
	I0916 10:35:56.806961 1384589 out.go:177]   - Using image nvcr.io/nvidia/k8s-device-plugin:v0.16.2
	I0916 10:35:56.813860 1384589 addons.go:431] installing /etc/kubernetes/addons/nvidia-device-plugin.yaml
	I0916 10:35:56.813885 1384589 ssh_runner.go:362] scp memory --> /etc/kubernetes/addons/nvidia-device-plugin.yaml (1966 bytes)
	I0916 10:35:56.813953 1384589 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" addons-936355
	I0916 10:35:56.825055 1384589 addons.go:234] Setting addon default-storageclass=true in "addons-936355"
	I0916 10:35:56.825094 1384589 host.go:66] Checking if "addons-936355" exists ...
	I0916 10:35:56.825522 1384589 cli_runner.go:164] Run: docker container inspect addons-936355 --format={{.State.Status}}
	I0916 10:35:56.844917 1384589 out.go:177]   - Using image gcr.io/k8s-minikube/minikube-ingress-dns:0.0.3
	I0916 10:35:56.847733 1384589 addons.go:431] installing /etc/kubernetes/addons/ingress-dns-pod.yaml
	I0916 10:35:56.847756 1384589 ssh_runner.go:362] scp memory --> /etc/kubernetes/addons/ingress-dns-pod.yaml (2442 bytes)
	I0916 10:35:56.847823 1384589 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" addons-936355
	I0916 10:35:56.855550 1384589 addons.go:431] installing /etc/kubernetes/addons/deployment.yaml
	I0916 10:35:56.855573 1384589 ssh_runner.go:362] scp memory --> /etc/kubernetes/addons/deployment.yaml (1004 bytes)
	I0916 10:35:56.855637 1384589 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" addons-936355
	I0916 10:35:56.868185 1384589 out.go:177]   - Using image docker.io/marcnuri/yakd:0.0.5
	I0916 10:35:56.870805 1384589 addons.go:431] installing /etc/kubernetes/addons/yakd-ns.yaml
	I0916 10:35:56.870832 1384589 ssh_runner.go:362] scp yakd/yakd-ns.yaml --> /etc/kubernetes/addons/yakd-ns.yaml (171 bytes)
	I0916 10:35:56.870903 1384589 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" addons-936355
	I0916 10:35:56.880834 1384589 out.go:177]   - Using image registry.k8s.io/ingress-nginx/kube-webhook-certgen:v1.4.3
	I0916 10:35:56.883983 1384589 out.go:177]   - Using image registry.k8s.io/ingress-nginx/controller:v1.11.2
	I0916 10:35:56.888274 1384589 out.go:177]   - Using image registry.k8s.io/ingress-nginx/kube-webhook-certgen:v1.4.3
	I0916 10:35:56.892893 1384589 out.go:177]   - Using image registry.k8s.io/metrics-server/metrics-server:v0.7.2
	I0916 10:35:56.893194 1384589 addons.go:431] installing /etc/kubernetes/addons/ingress-deploy.yaml
	I0916 10:35:56.893206 1384589 ssh_runner.go:362] scp memory --> /etc/kubernetes/addons/ingress-deploy.yaml (16078 bytes)
	I0916 10:35:56.893271 1384589 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" addons-936355
	I0916 10:35:56.895536 1384589 addons.go:431] installing /etc/kubernetes/addons/metrics-apiservice.yaml
	I0916 10:35:56.895559 1384589 ssh_runner.go:362] scp metrics-server/metrics-apiservice.yaml --> /etc/kubernetes/addons/metrics-apiservice.yaml (424 bytes)
	I0916 10:35:56.895631 1384589 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" addons-936355
	I0916 10:35:56.932992 1384589 addons.go:234] Setting addon storage-provisioner-rancher=true in "addons-936355"
	I0916 10:35:56.933037 1384589 host.go:66] Checking if "addons-936355" exists ...
	I0916 10:35:56.933461 1384589 cli_runner.go:164] Run: docker container inspect addons-936355 --format={{.State.Status}}
	I0916 10:35:56.975517 1384589 out.go:177]   - Using image registry.k8s.io/sig-storage/livenessprobe:v2.8.0
	I0916 10:35:56.981731 1384589 out.go:177]   - Using image gcr.io/k8s-minikube/kube-registry-proxy:0.0.6
	I0916 10:35:57.008862 1384589 out.go:177]   - Using image gcr.io/k8s-minikube/storage-provisioner:v5
	I0916 10:35:57.011867 1384589 addons.go:431] installing /etc/kubernetes/addons/storage-provisioner.yaml
	I0916 10:35:57.012071 1384589 ssh_runner.go:362] scp memory --> /etc/kubernetes/addons/storage-provisioner.yaml (2676 bytes)
	I0916 10:35:57.012271 1384589 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" addons-936355
	I0916 10:35:57.012572 1384589 out.go:177]   - Using image registry.k8s.io/sig-storage/csi-resizer:v1.6.0
	I0916 10:35:57.018623 1384589 out.go:177]   - Using image registry.k8s.io/sig-storage/csi-snapshotter:v6.1.0
	W0916 10:35:57.018876 1384589 out.go:270] ! Enabling 'volcano' returned an error: running callbacks: [volcano addon does not support crio]
	I0916 10:35:57.019207 1384589 out.go:177]   - Using image ghcr.io/inspektor-gadget/inspektor-gadget:v0.32.0
	I0916 10:35:57.026491 1384589 out.go:177]   - Using image registry.k8s.io/sig-storage/snapshot-controller:v6.1.0
	I0916 10:35:57.031787 1384589 addons.go:431] installing /etc/kubernetes/addons/storageclass.yaml
	I0916 10:35:57.031824 1384589 ssh_runner.go:362] scp storageclass/storageclass.yaml --> /etc/kubernetes/addons/storageclass.yaml (271 bytes)
	I0916 10:35:57.031905 1384589 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" addons-936355
	I0916 10:35:57.035870 1384589 out.go:177]   - Using image docker.io/registry:2.8.3
	I0916 10:35:57.037432 1384589 addons.go:431] installing /etc/kubernetes/addons/csi-hostpath-snapshotclass.yaml
	I0916 10:35:57.040920 1384589 ssh_runner.go:362] scp volumesnapshots/csi-hostpath-snapshotclass.yaml --> /etc/kubernetes/addons/csi-hostpath-snapshotclass.yaml (934 bytes)
	I0916 10:35:57.041029 1384589 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" addons-936355
	I0916 10:35:57.041817 1384589 host.go:66] Checking if "addons-936355" exists ...
	I0916 10:35:57.047002 1384589 addons.go:431] installing /etc/kubernetes/addons/registry-rc.yaml
	I0916 10:35:57.047021 1384589 ssh_runner.go:362] scp memory --> /etc/kubernetes/addons/registry-rc.yaml (860 bytes)
	I0916 10:35:57.047081 1384589 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" addons-936355
	I0916 10:35:57.039199 1384589 out.go:177]   - Using image registry.k8s.io/sig-storage/csi-provisioner:v3.3.0
	I0916 10:35:57.067112 1384589 addons.go:431] installing /etc/kubernetes/addons/ig-namespace.yaml
	I0916 10:35:57.067136 1384589 ssh_runner.go:362] scp inspektor-gadget/ig-namespace.yaml --> /etc/kubernetes/addons/ig-namespace.yaml (55 bytes)
	I0916 10:35:57.067221 1384589 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" addons-936355
	I0916 10:35:57.077336 1384589 sshutil.go:53] new ssh client: &{IP:127.0.0.1 Port:34603 SSHKeyPath:/home/jenkins/minikube-integration/19651-1378450/.minikube/machines/addons-936355/id_rsa Username:docker}
	I0916 10:35:57.080728 1384589 out.go:177]   - Using image registry.k8s.io/sig-storage/csi-attacher:v4.0.0
	I0916 10:35:57.083509 1384589 out.go:177]   - Using image registry.k8s.io/sig-storage/csi-external-health-monitor-controller:v0.7.0
	I0916 10:35:57.084922 1384589 sshutil.go:53] new ssh client: &{IP:127.0.0.1 Port:34603 SSHKeyPath:/home/jenkins/minikube-integration/19651-1378450/.minikube/machines/addons-936355/id_rsa Username:docker}
	I0916 10:35:57.092584 1384589 out.go:177]   - Using image registry.k8s.io/sig-storage/csi-node-driver-registrar:v2.6.0
	I0916 10:35:57.100918 1384589 out.go:177]   - Using image registry.k8s.io/sig-storage/hostpathplugin:v1.9.0
	I0916 10:35:57.102580 1384589 sshutil.go:53] new ssh client: &{IP:127.0.0.1 Port:34603 SSHKeyPath:/home/jenkins/minikube-integration/19651-1378450/.minikube/machines/addons-936355/id_rsa Username:docker}
	I0916 10:35:57.103637 1384589 addons.go:431] installing /etc/kubernetes/addons/rbac-external-attacher.yaml
	I0916 10:35:57.103656 1384589 ssh_runner.go:362] scp csi-hostpath-driver/rbac/rbac-external-attacher.yaml --> /etc/kubernetes/addons/rbac-external-attacher.yaml (3073 bytes)
	I0916 10:35:57.103715 1384589 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" addons-936355
	I0916 10:35:57.120809 1384589 sshutil.go:53] new ssh client: &{IP:127.0.0.1 Port:34603 SSHKeyPath:/home/jenkins/minikube-integration/19651-1378450/.minikube/machines/addons-936355/id_rsa Username:docker}
	I0916 10:35:57.121658 1384589 sshutil.go:53] new ssh client: &{IP:127.0.0.1 Port:34603 SSHKeyPath:/home/jenkins/minikube-integration/19651-1378450/.minikube/machines/addons-936355/id_rsa Username:docker}
	I0916 10:35:57.165011 1384589 sshutil.go:53] new ssh client: &{IP:127.0.0.1 Port:34603 SSHKeyPath:/home/jenkins/minikube-integration/19651-1378450/.minikube/machines/addons-936355/id_rsa Username:docker}
	I0916 10:35:57.190914 1384589 out.go:177]   - Using image docker.io/rancher/local-path-provisioner:v0.0.22
	I0916 10:35:57.195762 1384589 out.go:177]   - Using image docker.io/busybox:stable
	I0916 10:35:57.198447 1384589 addons.go:431] installing /etc/kubernetes/addons/storage-provisioner-rancher.yaml
	I0916 10:35:57.198482 1384589 ssh_runner.go:362] scp memory --> /etc/kubernetes/addons/storage-provisioner-rancher.yaml (3113 bytes)
	I0916 10:35:57.198559 1384589 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" addons-936355
	I0916 10:35:57.237951 1384589 sshutil.go:53] new ssh client: &{IP:127.0.0.1 Port:34603 SSHKeyPath:/home/jenkins/minikube-integration/19651-1378450/.minikube/machines/addons-936355/id_rsa Username:docker}
	I0916 10:35:57.247913 1384589 sshutil.go:53] new ssh client: &{IP:127.0.0.1 Port:34603 SSHKeyPath:/home/jenkins/minikube-integration/19651-1378450/.minikube/machines/addons-936355/id_rsa Username:docker}
	I0916 10:35:57.261430 1384589 sshutil.go:53] new ssh client: &{IP:127.0.0.1 Port:34603 SSHKeyPath:/home/jenkins/minikube-integration/19651-1378450/.minikube/machines/addons-936355/id_rsa Username:docker}
	I0916 10:35:57.263688 1384589 sshutil.go:53] new ssh client: &{IP:127.0.0.1 Port:34603 SSHKeyPath:/home/jenkins/minikube-integration/19651-1378450/.minikube/machines/addons-936355/id_rsa Username:docker}
	I0916 10:35:57.268844 1384589 sshutil.go:53] new ssh client: &{IP:127.0.0.1 Port:34603 SSHKeyPath:/home/jenkins/minikube-integration/19651-1378450/.minikube/machines/addons-936355/id_rsa Username:docker}
	I0916 10:35:57.272259 1384589 sshutil.go:53] new ssh client: &{IP:127.0.0.1 Port:34603 SSHKeyPath:/home/jenkins/minikube-integration/19651-1378450/.minikube/machines/addons-936355/id_rsa Username:docker}
	W0916 10:35:57.289422 1384589 sshutil.go:64] dial failure (will retry): ssh: handshake failed: EOF
	I0916 10:35:57.289665 1384589 retry.go:31] will retry after 343.76577ms: ssh: handshake failed: EOF
	I0916 10:35:57.317769 1384589 sshutil.go:53] new ssh client: &{IP:127.0.0.1 Port:34603 SSHKeyPath:/home/jenkins/minikube-integration/19651-1378450/.minikube/machines/addons-936355/id_rsa Username:docker}
	I0916 10:35:57.327435 1384589 ssh_runner.go:195] Run: sudo systemctl start kubelet
	I0916 10:35:57.327622 1384589 ssh_runner.go:195] Run: /bin/bash -c "sudo /var/lib/minikube/binaries/v1.31.1/kubectl --kubeconfig=/var/lib/minikube/kubeconfig -n kube-system get configmap coredns -o yaml | sed -e '/^        forward . \/etc\/resolv.conf.*/i \        hosts {\n           192.168.49.1 host.minikube.internal\n           fallthrough\n        }' -e '/^        errors *$/i \        log' | sudo /var/lib/minikube/binaries/v1.31.1/kubectl --kubeconfig=/var/lib/minikube/kubeconfig replace -f -"
	I0916 10:35:57.507011 1384589 ssh_runner.go:195] Run: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.31.1/kubectl apply -f /etc/kubernetes/addons/ingress-deploy.yaml
	I0916 10:35:57.508273 1384589 ssh_runner.go:195] Run: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.31.1/kubectl apply -f /etc/kubernetes/addons/nvidia-device-plugin.yaml
	I0916 10:35:57.512529 1384589 addons.go:431] installing /etc/kubernetes/addons/yakd-sa.yaml
	I0916 10:35:57.512557 1384589 ssh_runner.go:362] scp yakd/yakd-sa.yaml --> /etc/kubernetes/addons/yakd-sa.yaml (247 bytes)
	I0916 10:35:57.532805 1384589 ssh_runner.go:195] Run: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.31.1/kubectl apply -f /etc/kubernetes/addons/ingress-dns-pod.yaml
	I0916 10:35:57.544603 1384589 addons.go:431] installing /etc/kubernetes/addons/metrics-server-deployment.yaml
	I0916 10:35:57.544626 1384589 ssh_runner.go:362] scp memory --> /etc/kubernetes/addons/metrics-server-deployment.yaml (1907 bytes)
	I0916 10:35:57.554769 1384589 ssh_runner.go:195] Run: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.31.1/kubectl apply -f /etc/kubernetes/addons/deployment.yaml
	I0916 10:35:57.597359 1384589 ssh_runner.go:195] Run: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.31.1/kubectl apply -f /etc/kubernetes/addons/storageclass.yaml
	I0916 10:35:57.683748 1384589 addons.go:431] installing /etc/kubernetes/addons/yakd-crb.yaml
	I0916 10:35:57.683782 1384589 ssh_runner.go:362] scp yakd/yakd-crb.yaml --> /etc/kubernetes/addons/yakd-crb.yaml (422 bytes)
	I0916 10:35:57.706763 1384589 ssh_runner.go:195] Run: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.31.1/kubectl apply -f /etc/kubernetes/addons/storage-provisioner.yaml
	I0916 10:35:57.708832 1384589 addons.go:431] installing /etc/kubernetes/addons/ig-serviceaccount.yaml
	I0916 10:35:57.708864 1384589 ssh_runner.go:362] scp inspektor-gadget/ig-serviceaccount.yaml --> /etc/kubernetes/addons/ig-serviceaccount.yaml (80 bytes)
	I0916 10:35:57.733074 1384589 addons.go:431] installing /etc/kubernetes/addons/metrics-server-rbac.yaml
	I0916 10:35:57.733107 1384589 ssh_runner.go:362] scp metrics-server/metrics-server-rbac.yaml --> /etc/kubernetes/addons/metrics-server-rbac.yaml (2175 bytes)
	I0916 10:35:57.767880 1384589 addons.go:431] installing /etc/kubernetes/addons/snapshot.storage.k8s.io_volumesnapshotclasses.yaml
	I0916 10:35:57.767908 1384589 ssh_runner.go:362] scp volumesnapshots/snapshot.storage.k8s.io_volumesnapshotclasses.yaml --> /etc/kubernetes/addons/snapshot.storage.k8s.io_volumesnapshotclasses.yaml (6471 bytes)
	I0916 10:35:57.780746 1384589 addons.go:431] installing /etc/kubernetes/addons/registry-svc.yaml
	I0916 10:35:57.780786 1384589 ssh_runner.go:362] scp registry/registry-svc.yaml --> /etc/kubernetes/addons/registry-svc.yaml (398 bytes)
	I0916 10:35:57.807404 1384589 ssh_runner.go:195] Run: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.31.1/kubectl apply -f /etc/kubernetes/addons/storage-provisioner-rancher.yaml
	I0916 10:35:57.850707 1384589 addons.go:431] installing /etc/kubernetes/addons/yakd-svc.yaml
	I0916 10:35:57.850745 1384589 ssh_runner.go:362] scp yakd/yakd-svc.yaml --> /etc/kubernetes/addons/yakd-svc.yaml (412 bytes)
	I0916 10:35:57.887607 1384589 addons.go:431] installing /etc/kubernetes/addons/ig-role.yaml
	I0916 10:35:57.887636 1384589 ssh_runner.go:362] scp inspektor-gadget/ig-role.yaml --> /etc/kubernetes/addons/ig-role.yaml (210 bytes)
	I0916 10:35:57.954841 1384589 addons.go:431] installing /etc/kubernetes/addons/metrics-server-service.yaml
	I0916 10:35:57.954878 1384589 ssh_runner.go:362] scp metrics-server/metrics-server-service.yaml --> /etc/kubernetes/addons/metrics-server-service.yaml (446 bytes)
	I0916 10:35:57.957894 1384589 addons.go:431] installing /etc/kubernetes/addons/snapshot.storage.k8s.io_volumesnapshotcontents.yaml
	I0916 10:35:57.957918 1384589 ssh_runner.go:362] scp volumesnapshots/snapshot.storage.k8s.io_volumesnapshotcontents.yaml --> /etc/kubernetes/addons/snapshot.storage.k8s.io_volumesnapshotcontents.yaml (23126 bytes)
	I0916 10:35:57.990850 1384589 addons.go:431] installing /etc/kubernetes/addons/registry-proxy.yaml
	I0916 10:35:57.990882 1384589 ssh_runner.go:362] scp memory --> /etc/kubernetes/addons/registry-proxy.yaml (947 bytes)
	I0916 10:35:58.040155 1384589 addons.go:431] installing /etc/kubernetes/addons/yakd-dp.yaml
	I0916 10:35:58.040193 1384589 ssh_runner.go:362] scp memory --> /etc/kubernetes/addons/yakd-dp.yaml (2017 bytes)
	I0916 10:35:58.078005 1384589 addons.go:431] installing /etc/kubernetes/addons/ig-rolebinding.yaml
	I0916 10:35:58.078038 1384589 ssh_runner.go:362] scp inspektor-gadget/ig-rolebinding.yaml --> /etc/kubernetes/addons/ig-rolebinding.yaml (244 bytes)
	I0916 10:35:58.084259 1384589 addons.go:431] installing /etc/kubernetes/addons/rbac-hostpath.yaml
	I0916 10:35:58.084302 1384589 ssh_runner.go:362] scp csi-hostpath-driver/rbac/rbac-hostpath.yaml --> /etc/kubernetes/addons/rbac-hostpath.yaml (4266 bytes)
	I0916 10:35:58.131227 1384589 addons.go:431] installing /etc/kubernetes/addons/snapshot.storage.k8s.io_volumesnapshots.yaml
	I0916 10:35:58.131253 1384589 ssh_runner.go:362] scp volumesnapshots/snapshot.storage.k8s.io_volumesnapshots.yaml --> /etc/kubernetes/addons/snapshot.storage.k8s.io_volumesnapshots.yaml (19582 bytes)
	I0916 10:35:58.132161 1384589 ssh_runner.go:195] Run: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.31.1/kubectl apply -f /etc/kubernetes/addons/registry-rc.yaml -f /etc/kubernetes/addons/registry-svc.yaml -f /etc/kubernetes/addons/registry-proxy.yaml
	I0916 10:35:58.147419 1384589 ssh_runner.go:195] Run: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.31.1/kubectl apply -f /etc/kubernetes/addons/metrics-apiservice.yaml -f /etc/kubernetes/addons/metrics-server-deployment.yaml -f /etc/kubernetes/addons/metrics-server-rbac.yaml -f /etc/kubernetes/addons/metrics-server-service.yaml
	I0916 10:35:58.178615 1384589 ssh_runner.go:195] Run: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.31.1/kubectl apply -f /etc/kubernetes/addons/yakd-ns.yaml -f /etc/kubernetes/addons/yakd-sa.yaml -f /etc/kubernetes/addons/yakd-crb.yaml -f /etc/kubernetes/addons/yakd-svc.yaml -f /etc/kubernetes/addons/yakd-dp.yaml
	I0916 10:35:58.199520 1384589 addons.go:431] installing /etc/kubernetes/addons/ig-clusterrole.yaml
	I0916 10:35:58.199553 1384589 ssh_runner.go:362] scp inspektor-gadget/ig-clusterrole.yaml --> /etc/kubernetes/addons/ig-clusterrole.yaml (1485 bytes)
	I0916 10:35:58.206840 1384589 addons.go:431] installing /etc/kubernetes/addons/rbac-external-health-monitor-controller.yaml
	I0916 10:35:58.206873 1384589 ssh_runner.go:362] scp csi-hostpath-driver/rbac/rbac-external-health-monitor-controller.yaml --> /etc/kubernetes/addons/rbac-external-health-monitor-controller.yaml (3038 bytes)
	I0916 10:35:58.251350 1384589 addons.go:431] installing /etc/kubernetes/addons/rbac-volume-snapshot-controller.yaml
	I0916 10:35:58.251378 1384589 ssh_runner.go:362] scp volumesnapshots/rbac-volume-snapshot-controller.yaml --> /etc/kubernetes/addons/rbac-volume-snapshot-controller.yaml (3545 bytes)
	I0916 10:35:58.301781 1384589 addons.go:431] installing /etc/kubernetes/addons/rbac-external-provisioner.yaml
	I0916 10:35:58.301809 1384589 ssh_runner.go:362] scp csi-hostpath-driver/rbac/rbac-external-provisioner.yaml --> /etc/kubernetes/addons/rbac-external-provisioner.yaml (4442 bytes)
	I0916 10:35:58.328155 1384589 addons.go:431] installing /etc/kubernetes/addons/ig-clusterrolebinding.yaml
	I0916 10:35:58.328184 1384589 ssh_runner.go:362] scp inspektor-gadget/ig-clusterrolebinding.yaml --> /etc/kubernetes/addons/ig-clusterrolebinding.yaml (274 bytes)
	I0916 10:35:58.351423 1384589 addons.go:431] installing /etc/kubernetes/addons/volume-snapshot-controller-deployment.yaml
	I0916 10:35:58.351449 1384589 ssh_runner.go:362] scp memory --> /etc/kubernetes/addons/volume-snapshot-controller-deployment.yaml (1475 bytes)
	I0916 10:35:58.404154 1384589 addons.go:431] installing /etc/kubernetes/addons/rbac-external-resizer.yaml
	I0916 10:35:58.404188 1384589 ssh_runner.go:362] scp csi-hostpath-driver/rbac/rbac-external-resizer.yaml --> /etc/kubernetes/addons/rbac-external-resizer.yaml (2943 bytes)
	I0916 10:35:58.467023 1384589 ssh_runner.go:195] Run: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.31.1/kubectl apply -f /etc/kubernetes/addons/csi-hostpath-snapshotclass.yaml -f /etc/kubernetes/addons/snapshot.storage.k8s.io_volumesnapshotclasses.yaml -f /etc/kubernetes/addons/snapshot.storage.k8s.io_volumesnapshotcontents.yaml -f /etc/kubernetes/addons/snapshot.storage.k8s.io_volumesnapshots.yaml -f /etc/kubernetes/addons/rbac-volume-snapshot-controller.yaml -f /etc/kubernetes/addons/volume-snapshot-controller-deployment.yaml
	I0916 10:35:58.468235 1384589 addons.go:431] installing /etc/kubernetes/addons/rbac-external-snapshotter.yaml
	I0916 10:35:58.468257 1384589 ssh_runner.go:362] scp csi-hostpath-driver/rbac/rbac-external-snapshotter.yaml --> /etc/kubernetes/addons/rbac-external-snapshotter.yaml (3149 bytes)
	I0916 10:35:58.517809 1384589 addons.go:431] installing /etc/kubernetes/addons/ig-crd.yaml
	I0916 10:35:58.517836 1384589 ssh_runner.go:362] scp inspektor-gadget/ig-crd.yaml --> /etc/kubernetes/addons/ig-crd.yaml (5216 bytes)
	I0916 10:35:58.529132 1384589 addons.go:431] installing /etc/kubernetes/addons/csi-hostpath-attacher.yaml
	I0916 10:35:58.529162 1384589 ssh_runner.go:362] scp memory --> /etc/kubernetes/addons/csi-hostpath-attacher.yaml (2143 bytes)
	I0916 10:35:58.607318 1384589 addons.go:431] installing /etc/kubernetes/addons/ig-daemonset.yaml
	I0916 10:35:58.607345 1384589 ssh_runner.go:362] scp memory --> /etc/kubernetes/addons/ig-daemonset.yaml (7735 bytes)
	I0916 10:35:58.620217 1384589 addons.go:431] installing /etc/kubernetes/addons/csi-hostpath-driverinfo.yaml
	I0916 10:35:58.620264 1384589 ssh_runner.go:362] scp csi-hostpath-driver/deploy/csi-hostpath-driverinfo.yaml --> /etc/kubernetes/addons/csi-hostpath-driverinfo.yaml (1274 bytes)
	I0916 10:35:58.671546 1384589 ssh_runner.go:195] Run: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.31.1/kubectl apply -f /etc/kubernetes/addons/ig-namespace.yaml -f /etc/kubernetes/addons/ig-serviceaccount.yaml -f /etc/kubernetes/addons/ig-role.yaml -f /etc/kubernetes/addons/ig-rolebinding.yaml -f /etc/kubernetes/addons/ig-clusterrole.yaml -f /etc/kubernetes/addons/ig-clusterrolebinding.yaml -f /etc/kubernetes/addons/ig-crd.yaml -f /etc/kubernetes/addons/ig-daemonset.yaml
	I0916 10:35:58.726776 1384589 addons.go:431] installing /etc/kubernetes/addons/csi-hostpath-plugin.yaml
	I0916 10:35:58.726803 1384589 ssh_runner.go:362] scp memory --> /etc/kubernetes/addons/csi-hostpath-plugin.yaml (8201 bytes)
	I0916 10:35:58.855138 1384589 addons.go:431] installing /etc/kubernetes/addons/csi-hostpath-resizer.yaml
	I0916 10:35:58.855204 1384589 ssh_runner.go:362] scp memory --> /etc/kubernetes/addons/csi-hostpath-resizer.yaml (2191 bytes)
	I0916 10:35:58.993338 1384589 addons.go:431] installing /etc/kubernetes/addons/csi-hostpath-storageclass.yaml
	I0916 10:35:58.993375 1384589 ssh_runner.go:362] scp csi-hostpath-driver/deploy/csi-hostpath-storageclass.yaml --> /etc/kubernetes/addons/csi-hostpath-storageclass.yaml (846 bytes)
	I0916 10:35:59.149795 1384589 ssh_runner.go:195] Run: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.31.1/kubectl apply -f /etc/kubernetes/addons/rbac-external-attacher.yaml -f /etc/kubernetes/addons/rbac-hostpath.yaml -f /etc/kubernetes/addons/rbac-external-health-monitor-controller.yaml -f /etc/kubernetes/addons/rbac-external-provisioner.yaml -f /etc/kubernetes/addons/rbac-external-resizer.yaml -f /etc/kubernetes/addons/rbac-external-snapshotter.yaml -f /etc/kubernetes/addons/csi-hostpath-attacher.yaml -f /etc/kubernetes/addons/csi-hostpath-driverinfo.yaml -f /etc/kubernetes/addons/csi-hostpath-plugin.yaml -f /etc/kubernetes/addons/csi-hostpath-resizer.yaml -f /etc/kubernetes/addons/csi-hostpath-storageclass.yaml
	I0916 10:36:00.098293 1384589 ssh_runner.go:235] Completed: /bin/bash -c "sudo /var/lib/minikube/binaries/v1.31.1/kubectl --kubeconfig=/var/lib/minikube/kubeconfig -n kube-system get configmap coredns -o yaml | sed -e '/^        forward . \/etc\/resolv.conf.*/i \        hosts {\n           192.168.49.1 host.minikube.internal\n           fallthrough\n        }' -e '/^        errors *$/i \        log' | sudo /var/lib/minikube/binaries/v1.31.1/kubectl --kubeconfig=/var/lib/minikube/kubeconfig replace -f -": (2.77063538s)
	I0916 10:36:00.098468 1384589 start.go:971] {"host.minikube.internal": 192.168.49.1} host record injected into CoreDNS's ConfigMap
	I0916 10:36:00.098398 1384589 ssh_runner.go:235] Completed: sudo systemctl start kubelet: (2.770933472s)
	I0916 10:36:00.099620 1384589 node_ready.go:35] waiting up to 6m0s for node "addons-936355" to be "Ready" ...
	I0916 10:36:00.683691 1384589 kapi.go:214] "coredns" deployment in "kube-system" namespace and "addons-936355" context rescaled to 1 replicas
	I0916 10:36:02.134513 1384589 node_ready.go:53] node "addons-936355" has status "Ready":"False"
	I0916 10:36:03.099256 1384589 ssh_runner.go:235] Completed: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.31.1/kubectl apply -f /etc/kubernetes/addons/ingress-deploy.yaml: (5.592195221s)
	I0916 10:36:03.099297 1384589 addons.go:475] Verifying addon ingress=true in "addons-936355"
	I0916 10:36:03.099513 1384589 ssh_runner.go:235] Completed: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.31.1/kubectl apply -f /etc/kubernetes/addons/nvidia-device-plugin.yaml: (5.591216064s)
	I0916 10:36:03.099584 1384589 ssh_runner.go:235] Completed: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.31.1/kubectl apply -f /etc/kubernetes/addons/ingress-dns-pod.yaml: (5.56674984s)
	I0916 10:36:03.099618 1384589 ssh_runner.go:235] Completed: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.31.1/kubectl apply -f /etc/kubernetes/addons/deployment.yaml: (5.544827293s)
	I0916 10:36:03.099645 1384589 ssh_runner.go:235] Completed: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.31.1/kubectl apply -f /etc/kubernetes/addons/storageclass.yaml: (5.502263542s)
	I0916 10:36:03.099882 1384589 ssh_runner.go:235] Completed: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.31.1/kubectl apply -f /etc/kubernetes/addons/storage-provisioner.yaml: (5.393096916s)
	I0916 10:36:03.099983 1384589 ssh_runner.go:235] Completed: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.31.1/kubectl apply -f /etc/kubernetes/addons/storage-provisioner-rancher.yaml: (5.292558502s)
	I0916 10:36:03.100117 1384589 ssh_runner.go:235] Completed: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.31.1/kubectl apply -f /etc/kubernetes/addons/registry-rc.yaml -f /etc/kubernetes/addons/registry-svc.yaml -f /etc/kubernetes/addons/registry-proxy.yaml: (4.967882873s)
	I0916 10:36:03.100138 1384589 addons.go:475] Verifying addon registry=true in "addons-936355"
	I0916 10:36:03.100642 1384589 ssh_runner.go:235] Completed: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.31.1/kubectl apply -f /etc/kubernetes/addons/metrics-apiservice.yaml -f /etc/kubernetes/addons/metrics-server-deployment.yaml -f /etc/kubernetes/addons/metrics-server-rbac.yaml -f /etc/kubernetes/addons/metrics-server-service.yaml: (4.953181668s)
	I0916 10:36:03.100670 1384589 addons.go:475] Verifying addon metrics-server=true in "addons-936355"
	I0916 10:36:03.100733 1384589 ssh_runner.go:235] Completed: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.31.1/kubectl apply -f /etc/kubernetes/addons/yakd-ns.yaml -f /etc/kubernetes/addons/yakd-sa.yaml -f /etc/kubernetes/addons/yakd-crb.yaml -f /etc/kubernetes/addons/yakd-svc.yaml -f /etc/kubernetes/addons/yakd-dp.yaml: (4.922089802s)
	I0916 10:36:03.102943 1384589 out.go:177] * To access YAKD - Kubernetes Dashboard, wait for Pod to be ready and run the following command:
	
		minikube -p addons-936355 service yakd-dashboard -n yakd-dashboard
	
	I0916 10:36:03.102961 1384589 out.go:177] * Verifying registry addon...
	I0916 10:36:03.103034 1384589 out.go:177] * Verifying ingress addon...
	I0916 10:36:03.105813 1384589 kapi.go:75] Waiting for pod with label "app.kubernetes.io/name=ingress-nginx" in ns "ingress-nginx" ...
	I0916 10:36:03.106800 1384589 kapi.go:75] Waiting for pod with label "kubernetes.io/minikube-addons=registry" in ns "kube-system" ...
	I0916 10:36:03.137676 1384589 kapi.go:86] Found 3 Pods for label selector app.kubernetes.io/name=ingress-nginx
	I0916 10:36:03.137755 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:36:03.140614 1384589 kapi.go:86] Found 1 Pods for label selector kubernetes.io/minikube-addons=registry
	I0916 10:36:03.140698 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	W0916 10:36:03.153289 1384589 out.go:270] ! Enabling 'storage-provisioner-rancher' returned an error: running callbacks: [Error making local-path the default storage class: Error while marking storage class local-path as default: Operation cannot be fulfilled on storageclasses.storage.k8s.io "local-path": the object has been modified; please apply your changes to the latest version and try again]
	I0916 10:36:03.250335 1384589 ssh_runner.go:235] Completed: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.31.1/kubectl apply -f /etc/kubernetes/addons/csi-hostpath-snapshotclass.yaml -f /etc/kubernetes/addons/snapshot.storage.k8s.io_volumesnapshotclasses.yaml -f /etc/kubernetes/addons/snapshot.storage.k8s.io_volumesnapshotcontents.yaml -f /etc/kubernetes/addons/snapshot.storage.k8s.io_volumesnapshots.yaml -f /etc/kubernetes/addons/rbac-volume-snapshot-controller.yaml -f /etc/kubernetes/addons/volume-snapshot-controller-deployment.yaml: (4.783269551s)
	W0916 10:36:03.250417 1384589 addons.go:457] apply failed, will retry: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.31.1/kubectl apply -f /etc/kubernetes/addons/csi-hostpath-snapshotclass.yaml -f /etc/kubernetes/addons/snapshot.storage.k8s.io_volumesnapshotclasses.yaml -f /etc/kubernetes/addons/snapshot.storage.k8s.io_volumesnapshotcontents.yaml -f /etc/kubernetes/addons/snapshot.storage.k8s.io_volumesnapshots.yaml -f /etc/kubernetes/addons/rbac-volume-snapshot-controller.yaml -f /etc/kubernetes/addons/volume-snapshot-controller-deployment.yaml: Process exited with status 1
	stdout:
	customresourcedefinition.apiextensions.k8s.io/volumesnapshotclasses.snapshot.storage.k8s.io created
	customresourcedefinition.apiextensions.k8s.io/volumesnapshotcontents.snapshot.storage.k8s.io created
	customresourcedefinition.apiextensions.k8s.io/volumesnapshots.snapshot.storage.k8s.io created
	serviceaccount/snapshot-controller created
	clusterrole.rbac.authorization.k8s.io/snapshot-controller-runner created
	clusterrolebinding.rbac.authorization.k8s.io/snapshot-controller-role created
	role.rbac.authorization.k8s.io/snapshot-controller-leaderelection created
	rolebinding.rbac.authorization.k8s.io/snapshot-controller-leaderelection created
	deployment.apps/snapshot-controller created
	
	stderr:
	error: resource mapping not found for name: "csi-hostpath-snapclass" namespace: "" from "/etc/kubernetes/addons/csi-hostpath-snapshotclass.yaml": no matches for kind "VolumeSnapshotClass" in version "snapshot.storage.k8s.io/v1"
	ensure CRDs are installed first
	I0916 10:36:03.250450 1384589 retry.go:31] will retry after 275.497637ms: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.31.1/kubectl apply -f /etc/kubernetes/addons/csi-hostpath-snapshotclass.yaml -f /etc/kubernetes/addons/snapshot.storage.k8s.io_volumesnapshotclasses.yaml -f /etc/kubernetes/addons/snapshot.storage.k8s.io_volumesnapshotcontents.yaml -f /etc/kubernetes/addons/snapshot.storage.k8s.io_volumesnapshots.yaml -f /etc/kubernetes/addons/rbac-volume-snapshot-controller.yaml -f /etc/kubernetes/addons/volume-snapshot-controller-deployment.yaml: Process exited with status 1
	stdout:
	customresourcedefinition.apiextensions.k8s.io/volumesnapshotclasses.snapshot.storage.k8s.io created
	customresourcedefinition.apiextensions.k8s.io/volumesnapshotcontents.snapshot.storage.k8s.io created
	customresourcedefinition.apiextensions.k8s.io/volumesnapshots.snapshot.storage.k8s.io created
	serviceaccount/snapshot-controller created
	clusterrole.rbac.authorization.k8s.io/snapshot-controller-runner created
	clusterrolebinding.rbac.authorization.k8s.io/snapshot-controller-role created
	role.rbac.authorization.k8s.io/snapshot-controller-leaderelection created
	rolebinding.rbac.authorization.k8s.io/snapshot-controller-leaderelection created
	deployment.apps/snapshot-controller created
	
	stderr:
	error: resource mapping not found for name: "csi-hostpath-snapclass" namespace: "" from "/etc/kubernetes/addons/csi-hostpath-snapshotclass.yaml": no matches for kind "VolumeSnapshotClass" in version "snapshot.storage.k8s.io/v1"
	ensure CRDs are installed first
	I0916 10:36:03.250543 1384589 ssh_runner.go:235] Completed: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.31.1/kubectl apply -f /etc/kubernetes/addons/ig-namespace.yaml -f /etc/kubernetes/addons/ig-serviceaccount.yaml -f /etc/kubernetes/addons/ig-role.yaml -f /etc/kubernetes/addons/ig-rolebinding.yaml -f /etc/kubernetes/addons/ig-clusterrole.yaml -f /etc/kubernetes/addons/ig-clusterrolebinding.yaml -f /etc/kubernetes/addons/ig-crd.yaml -f /etc/kubernetes/addons/ig-daemonset.yaml: (4.57892356s)
	I0916 10:36:03.461537 1384589 ssh_runner.go:235] Completed: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.31.1/kubectl apply -f /etc/kubernetes/addons/rbac-external-attacher.yaml -f /etc/kubernetes/addons/rbac-hostpath.yaml -f /etc/kubernetes/addons/rbac-external-health-monitor-controller.yaml -f /etc/kubernetes/addons/rbac-external-provisioner.yaml -f /etc/kubernetes/addons/rbac-external-resizer.yaml -f /etc/kubernetes/addons/rbac-external-snapshotter.yaml -f /etc/kubernetes/addons/csi-hostpath-attacher.yaml -f /etc/kubernetes/addons/csi-hostpath-driverinfo.yaml -f /etc/kubernetes/addons/csi-hostpath-plugin.yaml -f /etc/kubernetes/addons/csi-hostpath-resizer.yaml -f /etc/kubernetes/addons/csi-hostpath-storageclass.yaml: (4.311696877s)
	I0916 10:36:03.461620 1384589 addons.go:475] Verifying addon csi-hostpath-driver=true in "addons-936355"
	I0916 10:36:03.466201 1384589 out.go:177] * Verifying csi-hostpath-driver addon...
	I0916 10:36:03.469722 1384589 kapi.go:75] Waiting for pod with label "kubernetes.io/minikube-addons=csi-hostpath-driver" in ns "kube-system" ...
	I0916 10:36:03.486422 1384589 kapi.go:86] Found 2 Pods for label selector kubernetes.io/minikube-addons=csi-hostpath-driver
	I0916 10:36:03.486490 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:36:03.526121 1384589 ssh_runner.go:195] Run: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.31.1/kubectl apply --force -f /etc/kubernetes/addons/csi-hostpath-snapshotclass.yaml -f /etc/kubernetes/addons/snapshot.storage.k8s.io_volumesnapshotclasses.yaml -f /etc/kubernetes/addons/snapshot.storage.k8s.io_volumesnapshotcontents.yaml -f /etc/kubernetes/addons/snapshot.storage.k8s.io_volumesnapshots.yaml -f /etc/kubernetes/addons/rbac-volume-snapshot-controller.yaml -f /etc/kubernetes/addons/volume-snapshot-controller-deployment.yaml
	I0916 10:36:03.615580 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:36:03.616763 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:36:03.973974 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:36:04.110336 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:36:04.111341 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:36:04.482735 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:36:04.603445 1384589 node_ready.go:53] node "addons-936355" has status "Ready":"False"
	I0916 10:36:04.611582 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:36:04.612963 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:36:04.974584 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:36:05.112352 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:36:05.113152 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:36:05.475349 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:36:05.612975 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:36:05.617564 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:36:05.994295 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:36:06.112783 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:36:06.113610 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:36:06.402733 1384589 ssh_runner.go:235] Completed: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.31.1/kubectl apply --force -f /etc/kubernetes/addons/csi-hostpath-snapshotclass.yaml -f /etc/kubernetes/addons/snapshot.storage.k8s.io_volumesnapshotclasses.yaml -f /etc/kubernetes/addons/snapshot.storage.k8s.io_volumesnapshotcontents.yaml -f /etc/kubernetes/addons/snapshot.storage.k8s.io_volumesnapshots.yaml -f /etc/kubernetes/addons/rbac-volume-snapshot-controller.yaml -f /etc/kubernetes/addons/volume-snapshot-controller-deployment.yaml: (2.87656549s)
	I0916 10:36:06.474104 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:36:06.604073 1384589 node_ready.go:53] node "addons-936355" has status "Ready":"False"
	I0916 10:36:06.611947 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:36:06.613297 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:36:06.973698 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:36:07.111053 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:36:07.112244 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:36:07.247182 1384589 ssh_runner.go:362] scp memory --> /var/lib/minikube/google_application_credentials.json (162 bytes)
	I0916 10:36:07.247343 1384589 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" addons-936355
	I0916 10:36:07.269993 1384589 sshutil.go:53] new ssh client: &{IP:127.0.0.1 Port:34603 SSHKeyPath:/home/jenkins/minikube-integration/19651-1378450/.minikube/machines/addons-936355/id_rsa Username:docker}
	I0916 10:36:07.399328 1384589 ssh_runner.go:362] scp memory --> /var/lib/minikube/google_cloud_project (12 bytes)
	I0916 10:36:07.424561 1384589 addons.go:234] Setting addon gcp-auth=true in "addons-936355"
	I0916 10:36:07.424615 1384589 host.go:66] Checking if "addons-936355" exists ...
	I0916 10:36:07.425137 1384589 cli_runner.go:164] Run: docker container inspect addons-936355 --format={{.State.Status}}
	I0916 10:36:07.445430 1384589 ssh_runner.go:195] Run: cat /var/lib/minikube/google_application_credentials.json
	I0916 10:36:07.445507 1384589 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" addons-936355
	I0916 10:36:07.462936 1384589 sshutil.go:53] new ssh client: &{IP:127.0.0.1 Port:34603 SSHKeyPath:/home/jenkins/minikube-integration/19651-1378450/.minikube/machines/addons-936355/id_rsa Username:docker}
	I0916 10:36:07.473788 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:36:07.564092 1384589 out.go:177]   - Using image gcr.io/k8s-minikube/gcp-auth-webhook:v0.1.2
	I0916 10:36:07.566842 1384589 out.go:177]   - Using image registry.k8s.io/ingress-nginx/kube-webhook-certgen:v1.4.3
	I0916 10:36:07.569433 1384589 addons.go:431] installing /etc/kubernetes/addons/gcp-auth-ns.yaml
	I0916 10:36:07.569479 1384589 ssh_runner.go:362] scp gcp-auth/gcp-auth-ns.yaml --> /etc/kubernetes/addons/gcp-auth-ns.yaml (700 bytes)
	I0916 10:36:07.591162 1384589 addons.go:431] installing /etc/kubernetes/addons/gcp-auth-service.yaml
	I0916 10:36:07.591235 1384589 ssh_runner.go:362] scp gcp-auth/gcp-auth-service.yaml --> /etc/kubernetes/addons/gcp-auth-service.yaml (788 bytes)
	I0916 10:36:07.611011 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:36:07.612352 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:36:07.614169 1384589 addons.go:431] installing /etc/kubernetes/addons/gcp-auth-webhook.yaml
	I0916 10:36:07.614230 1384589 ssh_runner.go:362] scp memory --> /etc/kubernetes/addons/gcp-auth-webhook.yaml (5421 bytes)
	I0916 10:36:07.634944 1384589 ssh_runner.go:195] Run: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.31.1/kubectl apply -f /etc/kubernetes/addons/gcp-auth-ns.yaml -f /etc/kubernetes/addons/gcp-auth-service.yaml -f /etc/kubernetes/addons/gcp-auth-webhook.yaml
	I0916 10:36:07.973644 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:36:08.114938 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:36:08.115927 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:36:08.289637 1384589 addons.go:475] Verifying addon gcp-auth=true in "addons-936355"
	I0916 10:36:08.292442 1384589 out.go:177] * Verifying gcp-auth addon...
	I0916 10:36:08.297073 1384589 kapi.go:75] Waiting for pod with label "kubernetes.io/minikube-addons=gcp-auth" in ns "gcp-auth" ...
	I0916 10:36:08.311457 1384589 kapi.go:86] Found 1 Pods for label selector kubernetes.io/minikube-addons=gcp-auth
	I0916 10:36:08.311536 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:36:08.473794 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:36:08.610857 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:36:08.611876 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:36:08.801268 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:36:08.973643 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:36:09.105583 1384589 node_ready.go:53] node "addons-936355" has status "Ready":"False"
	I0916 10:36:09.110567 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:36:09.111022 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:36:09.300943 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:36:09.478291 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:36:09.611071 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:36:09.612876 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:36:09.801153 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:36:09.973766 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:36:10.118258 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:36:10.119777 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:36:10.307205 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:36:10.473996 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:36:10.611600 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:36:10.611698 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:36:10.801229 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:36:10.974340 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:36:11.112014 1384589 node_ready.go:53] node "addons-936355" has status "Ready":"False"
	I0916 10:36:11.116183 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:36:11.120476 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:36:11.301066 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:36:11.473420 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:36:11.610713 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:36:11.612423 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:36:11.800270 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:36:11.973407 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:36:12.115791 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:36:12.116920 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:36:12.301411 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:36:12.473867 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:36:12.609770 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:36:12.610662 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:36:12.801634 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:36:12.973046 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:36:13.110851 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:36:13.111134 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:36:13.300575 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:36:13.473835 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:36:13.603219 1384589 node_ready.go:53] node "addons-936355" has status "Ready":"False"
	I0916 10:36:13.610390 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:36:13.611574 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:36:13.801371 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:36:13.973479 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:36:14.112208 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:36:14.113533 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:36:14.300299 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:36:14.474139 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:36:14.610046 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:36:14.612561 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:36:14.800653 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:36:14.972848 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:36:15.110408 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:36:15.110932 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:36:15.300237 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:36:15.473707 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:36:15.603293 1384589 node_ready.go:53] node "addons-936355" has status "Ready":"False"
	I0916 10:36:15.610246 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:36:15.611371 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:36:15.800451 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:36:15.973710 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:36:16.110350 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:36:16.111259 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:36:16.300830 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:36:16.472823 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:36:16.609912 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:36:16.610711 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:36:16.801005 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:36:16.973568 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:36:17.110550 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:36:17.112172 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:36:17.301017 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:36:17.473847 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:36:17.603589 1384589 node_ready.go:53] node "addons-936355" has status "Ready":"False"
	I0916 10:36:17.610593 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:36:17.611441 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:36:17.800956 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:36:17.974143 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:36:18.110263 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:36:18.111182 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:36:18.301212 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:36:18.473331 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:36:18.610442 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:36:18.611436 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:36:18.800286 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:36:18.973687 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:36:19.110597 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:36:19.111342 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:36:19.301090 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:36:19.473269 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:36:19.609625 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:36:19.610850 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:36:19.800307 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:36:19.974046 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:36:20.103731 1384589 node_ready.go:53] node "addons-936355" has status "Ready":"False"
	I0916 10:36:20.112214 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:36:20.113558 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:36:20.301265 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:36:20.473689 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:36:20.610324 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:36:20.611114 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:36:20.800597 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:36:20.973842 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:36:21.109533 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:36:21.111696 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:36:21.302328 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:36:21.473189 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:36:21.610124 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:36:21.611262 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:36:21.801275 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:36:21.973296 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:36:22.111525 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:36:22.113002 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:36:22.300321 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:36:22.473211 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:36:22.602936 1384589 node_ready.go:53] node "addons-936355" has status "Ready":"False"
	I0916 10:36:22.610283 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:36:22.611107 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:36:22.800931 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:36:22.974004 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:36:23.109980 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:36:23.110973 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:36:23.301081 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:36:23.473035 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:36:23.610199 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:36:23.611296 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:36:23.800268 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:36:23.973666 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:36:24.109603 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:36:24.110778 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:36:24.301295 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:36:24.473680 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:36:24.609537 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:36:24.610685 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:36:24.800457 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:36:24.974147 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:36:25.103048 1384589 node_ready.go:53] node "addons-936355" has status "Ready":"False"
	I0916 10:36:25.111012 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:36:25.111240 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:36:25.300767 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:36:25.473813 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:36:25.610908 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:36:25.611483 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:36:25.801271 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:36:25.973399 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:36:26.109553 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:36:26.111922 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:36:26.300892 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:36:26.473331 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:36:26.609476 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:36:26.610465 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:36:26.800314 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:36:26.974947 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:36:27.104747 1384589 node_ready.go:53] node "addons-936355" has status "Ready":"False"
	I0916 10:36:27.110466 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:36:27.113262 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:36:27.302886 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:36:27.475127 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:36:27.610103 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:36:27.619742 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:36:27.801198 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:36:27.974956 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:36:28.115379 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:36:28.117659 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:36:28.300851 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:36:28.474546 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:36:28.610341 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:36:28.611106 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:36:28.800632 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:36:28.973876 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:36:29.109998 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:36:29.111054 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:36:29.300629 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:36:29.473403 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:36:29.603802 1384589 node_ready.go:53] node "addons-936355" has status "Ready":"False"
	I0916 10:36:29.610293 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:36:29.611053 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:36:29.800316 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:36:29.975589 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:36:30.112209 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:36:30.112442 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:36:30.300936 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:36:30.473757 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:36:30.610468 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:36:30.610927 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:36:30.801173 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:36:30.974752 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:36:31.111549 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:36:31.111768 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:36:31.300752 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:36:31.472954 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:36:31.610456 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:36:31.611765 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:36:31.801083 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:36:31.973842 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:36:32.103800 1384589 node_ready.go:53] node "addons-936355" has status "Ready":"False"
	I0916 10:36:32.109737 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:36:32.111636 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:36:32.301104 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:36:32.473774 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:36:32.610924 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:36:32.611190 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:36:32.801482 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:36:32.974672 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:36:33.110188 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:36:33.111271 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:36:33.301349 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:36:33.473433 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:36:33.610409 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:36:33.610888 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:36:33.801627 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:36:33.973881 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:36:34.110134 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:36:34.110497 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:36:34.301089 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:36:34.474295 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:36:34.603135 1384589 node_ready.go:53] node "addons-936355" has status "Ready":"False"
	I0916 10:36:34.610342 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:36:34.611690 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:36:34.801258 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:36:34.973555 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:36:35.110766 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:36:35.111394 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:36:35.300970 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:36:35.473087 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:36:35.610115 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:36:35.611008 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:36:35.800154 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:36:35.974082 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:36:36.109881 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:36:36.110992 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:36:36.300326 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:36:36.473408 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:36:36.604025 1384589 node_ready.go:53] node "addons-936355" has status "Ready":"False"
	I0916 10:36:36.610440 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:36:36.610869 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:36:36.801065 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:36:36.973323 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:36:37.109996 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:36:37.111285 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:36:37.300895 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:36:37.474211 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:36:37.610044 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:36:37.610356 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:36:37.800660 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:36:37.973698 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:36:38.110670 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:36:38.110901 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:36:38.301861 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:36:38.473168 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:36:38.610218 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:36:38.611834 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:36:38.800936 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:36:38.975190 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:36:39.103702 1384589 node_ready.go:53] node "addons-936355" has status "Ready":"False"
	I0916 10:36:39.110476 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:36:39.111170 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:36:39.301227 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:36:39.473926 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:36:39.609710 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:36:39.611195 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:36:39.800502 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:36:39.973582 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:36:40.111455 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:36:40.111653 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:36:40.300951 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:36:40.473797 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:36:40.610268 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:36:40.611132 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:36:40.800770 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:36:40.974250 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:36:41.110735 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:36:41.111970 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:36:41.300538 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:36:41.473964 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:36:41.603723 1384589 node_ready.go:53] node "addons-936355" has status "Ready":"False"
	I0916 10:36:41.610292 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:36:41.610627 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:36:41.801470 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:36:41.974052 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:36:42.110959 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:36:42.112236 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:36:42.300960 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:36:42.473748 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:36:42.612461 1384589 node_ready.go:49] node "addons-936355" has status "Ready":"True"
	I0916 10:36:42.612538 1384589 node_ready.go:38] duration metric: took 42.512890552s for node "addons-936355" to be "Ready" ...
	I0916 10:36:42.612563 1384589 pod_ready.go:36] extra waiting up to 6m0s for all system-critical pods including labels [k8s-app=kube-dns component=etcd component=kube-apiserver component=kube-controller-manager k8s-app=kube-proxy component=kube-scheduler] to be "Ready" ...
	I0916 10:36:42.623341 1384589 kapi.go:86] Found 2 Pods for label selector kubernetes.io/minikube-addons=registry
	I0916 10:36:42.623417 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:36:42.624231 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:36:42.627174 1384589 pod_ready.go:79] waiting up to 6m0s for pod "coredns-7c65d6cfc9-r6x6b" in "kube-system" namespace to be "Ready" ...
	I0916 10:36:42.859763 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:36:42.978703 1384589 kapi.go:86] Found 3 Pods for label selector kubernetes.io/minikube-addons=csi-hostpath-driver
	I0916 10:36:42.978731 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:36:43.131865 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:36:43.133687 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:36:43.349019 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:36:43.479093 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:36:43.612085 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:36:43.613250 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:36:43.838378 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:36:43.975549 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:36:44.112002 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:36:44.113078 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:36:44.303567 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:36:44.474708 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:36:44.612644 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:36:44.614103 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:36:44.633869 1384589 pod_ready.go:93] pod "coredns-7c65d6cfc9-r6x6b" in "kube-system" namespace has status "Ready":"True"
	I0916 10:36:44.633943 1384589 pod_ready.go:82] duration metric: took 2.006728044s for pod "coredns-7c65d6cfc9-r6x6b" in "kube-system" namespace to be "Ready" ...
	I0916 10:36:44.633994 1384589 pod_ready.go:79] waiting up to 6m0s for pod "etcd-addons-936355" in "kube-system" namespace to be "Ready" ...
	I0916 10:36:44.642581 1384589 pod_ready.go:93] pod "etcd-addons-936355" in "kube-system" namespace has status "Ready":"True"
	I0916 10:36:44.642653 1384589 pod_ready.go:82] duration metric: took 8.633064ms for pod "etcd-addons-936355" in "kube-system" namespace to be "Ready" ...
	I0916 10:36:44.642683 1384589 pod_ready.go:79] waiting up to 6m0s for pod "kube-apiserver-addons-936355" in "kube-system" namespace to be "Ready" ...
	I0916 10:36:44.650836 1384589 pod_ready.go:93] pod "kube-apiserver-addons-936355" in "kube-system" namespace has status "Ready":"True"
	I0916 10:36:44.650858 1384589 pod_ready.go:82] duration metric: took 8.155202ms for pod "kube-apiserver-addons-936355" in "kube-system" namespace to be "Ready" ...
	I0916 10:36:44.650871 1384589 pod_ready.go:79] waiting up to 6m0s for pod "kube-controller-manager-addons-936355" in "kube-system" namespace to be "Ready" ...
	I0916 10:36:44.656888 1384589 pod_ready.go:93] pod "kube-controller-manager-addons-936355" in "kube-system" namespace has status "Ready":"True"
	I0916 10:36:44.656911 1384589 pod_ready.go:82] duration metric: took 6.032453ms for pod "kube-controller-manager-addons-936355" in "kube-system" namespace to be "Ready" ...
	I0916 10:36:44.656925 1384589 pod_ready.go:79] waiting up to 6m0s for pod "kube-proxy-6zqlq" in "kube-system" namespace to be "Ready" ...
	I0916 10:36:44.663172 1384589 pod_ready.go:93] pod "kube-proxy-6zqlq" in "kube-system" namespace has status "Ready":"True"
	I0916 10:36:44.663198 1384589 pod_ready.go:82] duration metric: took 6.264685ms for pod "kube-proxy-6zqlq" in "kube-system" namespace to be "Ready" ...
	I0916 10:36:44.663210 1384589 pod_ready.go:79] waiting up to 6m0s for pod "kube-scheduler-addons-936355" in "kube-system" namespace to be "Ready" ...
	I0916 10:36:44.800889 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:36:44.975665 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:36:45.036535 1384589 pod_ready.go:93] pod "kube-scheduler-addons-936355" in "kube-system" namespace has status "Ready":"True"
	I0916 10:36:45.036565 1384589 pod_ready.go:82] duration metric: took 373.347727ms for pod "kube-scheduler-addons-936355" in "kube-system" namespace to be "Ready" ...
	I0916 10:36:45.036579 1384589 pod_ready.go:79] waiting up to 6m0s for pod "metrics-server-84c5f94fbc-hngcs" in "kube-system" namespace to be "Ready" ...
	I0916 10:36:45.111493 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:36:45.112631 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:36:45.308107 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:36:45.474657 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:36:45.611914 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:36:45.612461 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:36:45.801892 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:36:45.974950 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:36:46.111683 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:36:46.114082 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:36:46.301157 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:36:46.475128 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:36:46.611945 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:36:46.613048 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:36:46.801341 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:36:46.974921 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:36:47.044703 1384589 pod_ready.go:103] pod "metrics-server-84c5f94fbc-hngcs" in "kube-system" namespace has status "Ready":"False"
	I0916 10:36:47.112165 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:36:47.114489 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:36:47.301333 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:36:47.480727 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:36:47.612823 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:36:47.613992 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:36:47.802256 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:36:47.975336 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:36:48.114295 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:36:48.116308 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:36:48.301669 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:36:48.478171 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:36:48.613077 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:36:48.615032 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:36:48.802520 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:36:48.974753 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:36:49.045627 1384589 pod_ready.go:103] pod "metrics-server-84c5f94fbc-hngcs" in "kube-system" namespace has status "Ready":"False"
	I0916 10:36:49.112778 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:36:49.116258 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:36:49.301317 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:36:49.477632 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:36:49.617030 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:36:49.618841 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:36:49.801756 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:36:49.975098 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:36:50.112372 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:36:50.115428 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:36:50.303239 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:36:50.475866 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:36:50.610712 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:36:50.613666 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:36:50.800849 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:36:50.975104 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:36:51.113376 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:36:51.116309 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:36:51.305523 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:36:51.476644 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:36:51.547164 1384589 pod_ready.go:103] pod "metrics-server-84c5f94fbc-hngcs" in "kube-system" namespace has status "Ready":"False"
	I0916 10:36:51.619471 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:36:51.620588 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:36:51.803271 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:36:51.978508 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:36:52.112860 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:36:52.114242 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:36:52.300920 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:36:52.475635 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:36:52.610961 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:36:52.611563 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:36:52.802388 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:36:52.975192 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:36:53.112514 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:36:53.113242 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:36:53.301036 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:36:53.475517 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:36:53.613316 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:36:53.614402 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:36:53.801348 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:36:53.977291 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:36:54.050970 1384589 pod_ready.go:103] pod "metrics-server-84c5f94fbc-hngcs" in "kube-system" namespace has status "Ready":"False"
	I0916 10:36:54.110981 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:36:54.112076 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:36:54.300546 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:36:54.476454 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:36:54.610582 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:36:54.612518 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:36:54.803551 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:36:54.974994 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:36:55.111398 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:36:55.112761 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:36:55.301089 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:36:55.474274 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:36:55.609938 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:36:55.612002 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:36:55.800575 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:36:55.974519 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:36:56.112644 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:36:56.113614 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:36:56.301290 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:36:56.476637 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:36:56.543349 1384589 pod_ready.go:103] pod "metrics-server-84c5f94fbc-hngcs" in "kube-system" namespace has status "Ready":"False"
	I0916 10:36:56.613159 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:36:56.614779 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:36:56.801547 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:36:56.975878 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:36:57.111646 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:36:57.114449 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:36:57.301068 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:36:57.475345 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:36:57.612454 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:36:57.613637 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:36:57.802031 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:36:57.975475 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:36:58.112792 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:36:58.114331 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:36:58.301185 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:36:58.477806 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:36:58.543702 1384589 pod_ready.go:103] pod "metrics-server-84c5f94fbc-hngcs" in "kube-system" namespace has status "Ready":"False"
	I0916 10:36:58.611292 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:36:58.612924 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:36:58.801770 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:36:58.978258 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:36:59.111614 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:36:59.113277 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:36:59.300874 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:36:59.478857 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:36:59.612769 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:36:59.614234 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:36:59.801191 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:36:59.975770 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:37:00.124776 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:37:00.127598 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:37:00.312397 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:37:00.476593 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:37:00.612256 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:37:00.615086 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:37:00.801400 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:37:00.975455 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:37:01.045782 1384589 pod_ready.go:103] pod "metrics-server-84c5f94fbc-hngcs" in "kube-system" namespace has status "Ready":"False"
	I0916 10:37:01.116772 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:37:01.117862 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:37:01.300859 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:37:01.475607 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:37:01.614426 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:37:01.616901 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:37:01.806694 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:37:01.976923 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:37:02.111895 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:37:02.112248 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:37:02.301293 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:37:02.474913 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:37:02.610544 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:37:02.611469 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:37:02.801570 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:37:02.974546 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:37:03.110553 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:37:03.111258 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:37:03.302951 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:37:03.475760 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:37:03.542976 1384589 pod_ready.go:103] pod "metrics-server-84c5f94fbc-hngcs" in "kube-system" namespace has status "Ready":"False"
	I0916 10:37:03.612478 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:37:03.614314 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:37:03.802588 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:37:03.974619 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:37:04.116170 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:37:04.117565 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:37:04.301282 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:37:04.474423 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:37:04.609959 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:37:04.611546 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:37:04.802714 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:37:04.974564 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:37:05.111189 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:37:05.119380 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:37:05.301308 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:37:05.480667 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:37:05.545296 1384589 pod_ready.go:103] pod "metrics-server-84c5f94fbc-hngcs" in "kube-system" namespace has status "Ready":"False"
	I0916 10:37:05.613921 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:37:05.620210 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:37:05.801887 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:37:05.979380 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:37:06.117389 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:37:06.120937 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:37:06.301555 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:37:06.475271 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:37:06.612080 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:37:06.614588 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:37:06.801421 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:37:06.975493 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:37:07.111399 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:37:07.114107 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:37:07.300779 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:37:07.478877 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:37:07.558060 1384589 pod_ready.go:103] pod "metrics-server-84c5f94fbc-hngcs" in "kube-system" namespace has status "Ready":"False"
	I0916 10:37:07.615155 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:37:07.616925 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:37:07.801853 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:37:07.975171 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:37:08.110594 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:37:08.111215 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:37:08.300440 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:37:08.476290 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:37:08.611297 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:37:08.612374 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:37:08.801416 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:37:08.975287 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:37:09.110125 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:37:09.111958 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:37:09.304146 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:37:09.474050 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:37:09.610553 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:37:09.611805 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:37:09.801358 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:37:09.974606 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:37:10.045151 1384589 pod_ready.go:103] pod "metrics-server-84c5f94fbc-hngcs" in "kube-system" namespace has status "Ready":"False"
	I0916 10:37:10.115132 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:37:10.117029 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:37:10.300604 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:37:10.478567 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:37:10.612321 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:37:10.613469 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:37:10.801386 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:37:10.979174 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:37:11.112568 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:37:11.116046 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:37:11.301477 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:37:11.475805 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:37:11.613534 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:37:11.615206 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:37:11.802410 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:37:11.976748 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:37:12.047271 1384589 pod_ready.go:103] pod "metrics-server-84c5f94fbc-hngcs" in "kube-system" namespace has status "Ready":"False"
	I0916 10:37:12.112753 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:37:12.114779 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:37:12.300849 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:37:12.479609 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:37:12.633512 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:37:12.635102 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:37:12.801945 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:37:12.978658 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:37:13.111553 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:37:13.113586 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:37:13.303385 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:37:13.479039 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:37:13.614588 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:37:13.615554 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:37:13.806654 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:37:13.981409 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:37:14.060889 1384589 pod_ready.go:103] pod "metrics-server-84c5f94fbc-hngcs" in "kube-system" namespace has status "Ready":"False"
	I0916 10:37:14.112654 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:37:14.113844 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:37:14.301688 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:37:14.474872 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:37:14.610310 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:37:14.610746 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:37:14.800633 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:37:14.975036 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:37:15.112998 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:37:15.115460 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:37:15.300634 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:37:15.474102 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:37:15.613955 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:37:15.615489 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:37:15.801741 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:37:15.975686 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:37:16.113469 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:37:16.114978 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:37:16.301581 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:37:16.475151 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:37:16.550481 1384589 pod_ready.go:103] pod "metrics-server-84c5f94fbc-hngcs" in "kube-system" namespace has status "Ready":"False"
	I0916 10:37:16.614516 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:37:16.615278 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:37:16.802546 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:37:16.975189 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:37:17.110944 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:37:17.111649 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:37:17.302100 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:37:17.475101 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:37:17.611759 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:37:17.612357 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:37:17.800825 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:37:17.975226 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:37:18.110760 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:37:18.112805 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:37:18.300370 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:37:18.474527 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:37:18.610984 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:37:18.611944 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:37:18.801132 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:37:18.974591 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:37:19.046356 1384589 pod_ready.go:103] pod "metrics-server-84c5f94fbc-hngcs" in "kube-system" namespace has status "Ready":"False"
	I0916 10:37:19.112245 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:37:19.115197 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:37:19.301744 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:37:19.475515 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:37:19.610679 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:37:19.614216 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:37:19.801704 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:37:19.974949 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:37:20.111388 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:37:20.114141 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:37:20.301219 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:37:20.474669 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:37:20.611319 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:37:20.615110 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:37:20.801384 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:37:20.976136 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:37:21.113352 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:37:21.113988 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:37:21.300920 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:37:21.489778 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:37:21.545440 1384589 pod_ready.go:103] pod "metrics-server-84c5f94fbc-hngcs" in "kube-system" namespace has status "Ready":"False"
	I0916 10:37:21.613554 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:37:21.616634 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:37:21.801820 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:37:21.977146 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:37:22.111094 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:37:22.112217 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:37:22.301825 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:37:22.475834 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:37:22.611602 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:37:22.612556 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:37:22.805363 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:37:22.975337 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:37:23.112472 1384589 kapi.go:107] duration metric: took 1m20.005670496s to wait for kubernetes.io/minikube-addons=registry ...
	I0916 10:37:23.113515 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:37:23.300925 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:37:23.474515 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:37:23.610822 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:37:23.801408 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:37:23.977906 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:37:24.044059 1384589 pod_ready.go:103] pod "metrics-server-84c5f94fbc-hngcs" in "kube-system" namespace has status "Ready":"False"
	I0916 10:37:24.117487 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:37:24.301384 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:37:24.476565 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:37:24.611373 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:37:24.801872 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:37:24.984901 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:37:25.111954 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:37:25.300421 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:37:25.475126 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:37:25.611267 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:37:25.808830 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:37:25.975068 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:37:26.111025 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:37:26.310954 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:37:26.475111 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:37:26.543709 1384589 pod_ready.go:103] pod "metrics-server-84c5f94fbc-hngcs" in "kube-system" namespace has status "Ready":"False"
	I0916 10:37:26.609974 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:37:26.838995 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:37:26.975321 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:37:27.110779 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:37:27.301198 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:37:27.476321 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:37:27.610748 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:37:27.801486 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:37:27.975547 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:37:28.110763 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:37:28.301469 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:37:28.474991 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:37:28.610943 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:37:28.801350 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:37:28.975749 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:37:29.046127 1384589 pod_ready.go:103] pod "metrics-server-84c5f94fbc-hngcs" in "kube-system" namespace has status "Ready":"False"
	I0916 10:37:29.110966 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:37:29.305494 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:37:29.475929 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:37:29.609824 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:37:29.801492 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:37:29.977852 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:37:30.113447 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:37:30.301994 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:37:30.476258 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:37:30.610718 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:37:30.801712 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:37:30.975400 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:37:31.110916 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:37:31.300717 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:37:31.474547 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:37:31.542764 1384589 pod_ready.go:103] pod "metrics-server-84c5f94fbc-hngcs" in "kube-system" namespace has status "Ready":"False"
	I0916 10:37:31.612339 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:37:31.804045 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:37:31.975617 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:37:32.110961 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:37:32.300588 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:37:32.482569 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:37:32.611127 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:37:32.804201 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:37:32.975368 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:37:33.111355 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:37:33.301816 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:37:33.477518 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:37:33.551472 1384589 pod_ready.go:103] pod "metrics-server-84c5f94fbc-hngcs" in "kube-system" namespace has status "Ready":"False"
	I0916 10:37:33.611027 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:37:33.801158 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:37:34.013405 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:37:34.127200 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:37:34.310368 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:37:34.475923 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:37:34.611219 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:37:34.801913 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:37:34.978855 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:37:35.118452 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:37:35.300764 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:37:35.476873 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:37:35.611849 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:37:35.802246 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:37:35.975118 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:37:36.044866 1384589 pod_ready.go:103] pod "metrics-server-84c5f94fbc-hngcs" in "kube-system" namespace has status "Ready":"False"
	I0916 10:37:36.111125 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:37:36.301167 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:37:36.477188 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:37:36.617190 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:37:36.801375 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:37:36.974623 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:37:37.113798 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:37:37.301345 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:37:37.479115 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:37:37.611187 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:37:37.802141 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:37:37.976103 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:37:38.094708 1384589 pod_ready.go:103] pod "metrics-server-84c5f94fbc-hngcs" in "kube-system" namespace has status "Ready":"False"
	I0916 10:37:38.116394 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:37:38.300966 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:37:38.474752 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:37:38.610164 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:37:38.800561 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:37:38.975817 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:37:39.110879 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:37:39.301972 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:37:39.475982 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:37:39.614550 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:37:39.801870 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:37:39.975576 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:37:40.112781 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:37:40.301195 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:37:40.476921 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:37:40.543014 1384589 pod_ready.go:103] pod "metrics-server-84c5f94fbc-hngcs" in "kube-system" namespace has status "Ready":"False"
	I0916 10:37:40.612230 1384589 kapi.go:107] duration metric: took 1m37.506412903s to wait for app.kubernetes.io/name=ingress-nginx ...
	I0916 10:37:40.800501 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:37:40.980528 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:37:41.301899 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:37:41.478479 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:37:41.801278 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:37:41.975045 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:37:42.302225 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:37:42.487350 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:37:42.548067 1384589 pod_ready.go:103] pod "metrics-server-84c5f94fbc-hngcs" in "kube-system" namespace has status "Ready":"False"
	I0916 10:37:42.806839 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:37:42.976392 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:37:43.300621 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:37:43.475884 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:37:43.802919 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:37:43.975139 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:37:44.301415 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:37:44.475371 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:37:44.801688 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:37:44.975259 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:37:45.062166 1384589 pod_ready.go:103] pod "metrics-server-84c5f94fbc-hngcs" in "kube-system" namespace has status "Ready":"False"
	I0916 10:37:45.301957 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:37:45.477003 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:37:45.802892 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:37:45.974994 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:37:46.301112 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:37:46.475372 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:37:46.800784 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:37:46.974857 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:37:47.303524 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:37:47.475768 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:37:47.545443 1384589 pod_ready.go:103] pod "metrics-server-84c5f94fbc-hngcs" in "kube-system" namespace has status "Ready":"False"
	I0916 10:37:47.800473 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:37:47.974841 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:37:48.301353 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:37:48.474781 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:37:48.800728 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:37:48.975372 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:37:49.301044 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:37:49.475307 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:37:49.801251 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:37:49.976296 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:37:50.044755 1384589 pod_ready.go:103] pod "metrics-server-84c5f94fbc-hngcs" in "kube-system" namespace has status "Ready":"False"
	I0916 10:37:50.306461 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:37:50.478119 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:37:50.802508 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:37:50.975919 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:37:51.310303 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:37:51.475230 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:37:51.801606 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:37:51.975318 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:37:52.053332 1384589 pod_ready.go:103] pod "metrics-server-84c5f94fbc-hngcs" in "kube-system" namespace has status "Ready":"False"
	I0916 10:37:52.302006 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:37:52.476486 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:37:52.801441 1384589 kapi.go:107] duration metric: took 1m44.50438368s to wait for kubernetes.io/minikube-addons=gcp-auth ...
	I0916 10:37:52.803585 1384589 out.go:177] * Your GCP credentials will now be mounted into every pod created in the addons-936355 cluster.
	I0916 10:37:52.805126 1384589 out.go:177] * If you don't want your credentials mounted into a specific pod, add a label with the `gcp-auth-skip-secret` key to your pod configuration.
	I0916 10:37:52.807003 1384589 out.go:177] * If you want existing pods to be mounted with credentials, either recreate them or rerun addons enable with --refresh.
	I0916 10:37:52.974797 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:37:53.475581 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:37:53.975250 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:37:54.474446 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:37:54.542561 1384589 pod_ready.go:103] pod "metrics-server-84c5f94fbc-hngcs" in "kube-system" namespace has status "Ready":"False"
	I0916 10:37:54.975714 1384589 kapi.go:107] duration metric: took 1m51.5059929s to wait for kubernetes.io/minikube-addons=csi-hostpath-driver ...
	I0916 10:37:54.976913 1384589 out.go:177] * Enabled addons: nvidia-device-plugin, ingress-dns, cloud-spanner, storage-provisioner, metrics-server, yakd, default-storageclass, inspektor-gadget, volumesnapshots, registry, ingress, gcp-auth, csi-hostpath-driver
	I0916 10:37:54.977951 1384589 addons.go:510] duration metric: took 1m58.330681209s for enable addons: enabled=[nvidia-device-plugin ingress-dns cloud-spanner storage-provisioner metrics-server yakd default-storageclass inspektor-gadget volumesnapshots registry ingress gcp-auth csi-hostpath-driver]
	I0916 10:37:56.543286 1384589 pod_ready.go:103] pod "metrics-server-84c5f94fbc-hngcs" in "kube-system" namespace has status "Ready":"False"
	I0916 10:37:58.543538 1384589 pod_ready.go:103] pod "metrics-server-84c5f94fbc-hngcs" in "kube-system" namespace has status "Ready":"False"
	I0916 10:38:00.545466 1384589 pod_ready.go:103] pod "metrics-server-84c5f94fbc-hngcs" in "kube-system" namespace has status "Ready":"False"
	I0916 10:38:03.044859 1384589 pod_ready.go:103] pod "metrics-server-84c5f94fbc-hngcs" in "kube-system" namespace has status "Ready":"False"
	I0916 10:38:05.543384 1384589 pod_ready.go:103] pod "metrics-server-84c5f94fbc-hngcs" in "kube-system" namespace has status "Ready":"False"
	I0916 10:38:08.044081 1384589 pod_ready.go:103] pod "metrics-server-84c5f94fbc-hngcs" in "kube-system" namespace has status "Ready":"False"
	I0916 10:38:10.044862 1384589 pod_ready.go:103] pod "metrics-server-84c5f94fbc-hngcs" in "kube-system" namespace has status "Ready":"False"
	I0916 10:38:12.543815 1384589 pod_ready.go:103] pod "metrics-server-84c5f94fbc-hngcs" in "kube-system" namespace has status "Ready":"False"
	I0916 10:38:15.046388 1384589 pod_ready.go:103] pod "metrics-server-84c5f94fbc-hngcs" in "kube-system" namespace has status "Ready":"False"
	I0916 10:38:17.044536 1384589 pod_ready.go:93] pod "metrics-server-84c5f94fbc-hngcs" in "kube-system" namespace has status "Ready":"True"
	I0916 10:38:17.044563 1384589 pod_ready.go:82] duration metric: took 1m32.00797612s for pod "metrics-server-84c5f94fbc-hngcs" in "kube-system" namespace to be "Ready" ...
	I0916 10:38:17.044576 1384589 pod_ready.go:79] waiting up to 6m0s for pod "nvidia-device-plugin-daemonset-6j9gc" in "kube-system" namespace to be "Ready" ...
	I0916 10:38:17.054621 1384589 pod_ready.go:93] pod "nvidia-device-plugin-daemonset-6j9gc" in "kube-system" namespace has status "Ready":"True"
	I0916 10:38:17.054646 1384589 pod_ready.go:82] duration metric: took 10.061393ms for pod "nvidia-device-plugin-daemonset-6j9gc" in "kube-system" namespace to be "Ready" ...
	I0916 10:38:17.054673 1384589 pod_ready.go:39] duration metric: took 1m34.442085136s for extra waiting for all system-critical and pods with labels [k8s-app=kube-dns component=etcd component=kube-apiserver component=kube-controller-manager k8s-app=kube-proxy component=kube-scheduler] to be "Ready" ...
	I0916 10:38:17.054689 1384589 api_server.go:52] waiting for apiserver process to appear ...
	I0916 10:38:17.054724 1384589 cri.go:54] listing CRI containers in root : {State:all Name:kube-apiserver Namespaces:[]}
	I0916 10:38:17.054791 1384589 ssh_runner.go:195] Run: sudo crictl ps -a --quiet --name=kube-apiserver
	I0916 10:38:17.110909 1384589 cri.go:89] found id: "f911db1ed55bbf8b3dc28ca0fef7e51209be97baaa15d9194b879451dd6fd403"
	I0916 10:38:17.110942 1384589 cri.go:89] found id: ""
	I0916 10:38:17.110950 1384589 logs.go:276] 1 containers: [f911db1ed55bbf8b3dc28ca0fef7e51209be97baaa15d9194b879451dd6fd403]
	I0916 10:38:17.111018 1384589 ssh_runner.go:195] Run: which crictl
	I0916 10:38:17.114542 1384589 cri.go:54] listing CRI containers in root : {State:all Name:etcd Namespaces:[]}
	I0916 10:38:17.114619 1384589 ssh_runner.go:195] Run: sudo crictl ps -a --quiet --name=etcd
	I0916 10:38:17.153834 1384589 cri.go:89] found id: "3b247261f15f4cdd596d5e7ee3354c24cb995a27a5e0581e877596df04b900d5"
	I0916 10:38:17.153856 1384589 cri.go:89] found id: ""
	I0916 10:38:17.153864 1384589 logs.go:276] 1 containers: [3b247261f15f4cdd596d5e7ee3354c24cb995a27a5e0581e877596df04b900d5]
	I0916 10:38:17.153923 1384589 ssh_runner.go:195] Run: which crictl
	I0916 10:38:17.157470 1384589 cri.go:54] listing CRI containers in root : {State:all Name:coredns Namespaces:[]}
	I0916 10:38:17.157579 1384589 ssh_runner.go:195] Run: sudo crictl ps -a --quiet --name=coredns
	I0916 10:38:17.198133 1384589 cri.go:89] found id: "ee934dc9f4f92e52b49ad02508bb42771f460a2494fa8b1a65d888191266a4ad"
	I0916 10:38:17.198155 1384589 cri.go:89] found id: ""
	I0916 10:38:17.198163 1384589 logs.go:276] 1 containers: [ee934dc9f4f92e52b49ad02508bb42771f460a2494fa8b1a65d888191266a4ad]
	I0916 10:38:17.198222 1384589 ssh_runner.go:195] Run: which crictl
	I0916 10:38:17.201699 1384589 cri.go:54] listing CRI containers in root : {State:all Name:kube-scheduler Namespaces:[]}
	I0916 10:38:17.201773 1384589 ssh_runner.go:195] Run: sudo crictl ps -a --quiet --name=kube-scheduler
	I0916 10:38:17.244177 1384589 cri.go:89] found id: "2b161087caf5a6ab9dedbb699f7c69ddf6c2c5cdb19026d46daf824d90966d25"
	I0916 10:38:17.244206 1384589 cri.go:89] found id: ""
	I0916 10:38:17.244215 1384589 logs.go:276] 1 containers: [2b161087caf5a6ab9dedbb699f7c69ddf6c2c5cdb19026d46daf824d90966d25]
	I0916 10:38:17.244287 1384589 ssh_runner.go:195] Run: which crictl
	I0916 10:38:17.248238 1384589 cri.go:54] listing CRI containers in root : {State:all Name:kube-proxy Namespaces:[]}
	I0916 10:38:17.248346 1384589 ssh_runner.go:195] Run: sudo crictl ps -a --quiet --name=kube-proxy
	I0916 10:38:17.286359 1384589 cri.go:89] found id: "6200eb5cfcd24bb0f0253359201c6d75c0624dcb7a313b0bc95b7370a13539a0"
	I0916 10:38:17.286380 1384589 cri.go:89] found id: ""
	I0916 10:38:17.286388 1384589 logs.go:276] 1 containers: [6200eb5cfcd24bb0f0253359201c6d75c0624dcb7a313b0bc95b7370a13539a0]
	I0916 10:38:17.286476 1384589 ssh_runner.go:195] Run: which crictl
	I0916 10:38:17.290475 1384589 cri.go:54] listing CRI containers in root : {State:all Name:kube-controller-manager Namespaces:[]}
	I0916 10:38:17.290598 1384589 ssh_runner.go:195] Run: sudo crictl ps -a --quiet --name=kube-controller-manager
	I0916 10:38:17.332786 1384589 cri.go:89] found id: "4ee66eef50ab615bdd0d94fe194567492cafe76910819703a964b78b45f55436"
	I0916 10:38:17.332808 1384589 cri.go:89] found id: ""
	I0916 10:38:17.332817 1384589 logs.go:276] 1 containers: [4ee66eef50ab615bdd0d94fe194567492cafe76910819703a964b78b45f55436]
	I0916 10:38:17.332887 1384589 ssh_runner.go:195] Run: which crictl
	I0916 10:38:17.336545 1384589 cri.go:54] listing CRI containers in root : {State:all Name:kindnet Namespaces:[]}
	I0916 10:38:17.336625 1384589 ssh_runner.go:195] Run: sudo crictl ps -a --quiet --name=kindnet
	I0916 10:38:17.376900 1384589 cri.go:89] found id: "8d59e894feca0e01e03cc7257c67ed10cf0f9db194b88b314e4961bc62d9e7f1"
	I0916 10:38:17.376922 1384589 cri.go:89] found id: ""
	I0916 10:38:17.376930 1384589 logs.go:276] 1 containers: [8d59e894feca0e01e03cc7257c67ed10cf0f9db194b88b314e4961bc62d9e7f1]
	I0916 10:38:17.376991 1384589 ssh_runner.go:195] Run: which crictl
	I0916 10:38:17.380608 1384589 logs.go:123] Gathering logs for kube-scheduler [2b161087caf5a6ab9dedbb699f7c69ddf6c2c5cdb19026d46daf824d90966d25] ...
	I0916 10:38:17.380639 1384589 ssh_runner.go:195] Run: /bin/bash -c "sudo /usr/bin/crictl logs --tail 400 2b161087caf5a6ab9dedbb699f7c69ddf6c2c5cdb19026d46daf824d90966d25"
	I0916 10:38:17.430005 1384589 logs.go:123] Gathering logs for kindnet [8d59e894feca0e01e03cc7257c67ed10cf0f9db194b88b314e4961bc62d9e7f1] ...
	I0916 10:38:17.430059 1384589 ssh_runner.go:195] Run: /bin/bash -c "sudo /usr/bin/crictl logs --tail 400 8d59e894feca0e01e03cc7257c67ed10cf0f9db194b88b314e4961bc62d9e7f1"
	I0916 10:38:17.478918 1384589 logs.go:123] Gathering logs for CRI-O ...
	I0916 10:38:17.478953 1384589 ssh_runner.go:195] Run: /bin/bash -c "sudo journalctl -u crio -n 400"
	I0916 10:38:17.578588 1384589 logs.go:123] Gathering logs for dmesg ...
	I0916 10:38:17.578626 1384589 ssh_runner.go:195] Run: /bin/bash -c "sudo dmesg -PH -L=never --level warn,err,crit,alert,emerg | tail -n 400"
	I0916 10:38:17.596725 1384589 logs.go:123] Gathering logs for describe nodes ...
	I0916 10:38:17.596755 1384589 ssh_runner.go:195] Run: /bin/bash -c "sudo /var/lib/minikube/binaries/v1.31.1/kubectl describe nodes --kubeconfig=/var/lib/minikube/kubeconfig"
	I0916 10:38:17.780455 1384589 logs.go:123] Gathering logs for etcd [3b247261f15f4cdd596d5e7ee3354c24cb995a27a5e0581e877596df04b900d5] ...
	I0916 10:38:17.780482 1384589 ssh_runner.go:195] Run: /bin/bash -c "sudo /usr/bin/crictl logs --tail 400 3b247261f15f4cdd596d5e7ee3354c24cb995a27a5e0581e877596df04b900d5"
	I0916 10:38:17.832701 1384589 logs.go:123] Gathering logs for kube-proxy [6200eb5cfcd24bb0f0253359201c6d75c0624dcb7a313b0bc95b7370a13539a0] ...
	I0916 10:38:17.832737 1384589 ssh_runner.go:195] Run: /bin/bash -c "sudo /usr/bin/crictl logs --tail 400 6200eb5cfcd24bb0f0253359201c6d75c0624dcb7a313b0bc95b7370a13539a0"
	I0916 10:38:17.873549 1384589 logs.go:123] Gathering logs for kube-controller-manager [4ee66eef50ab615bdd0d94fe194567492cafe76910819703a964b78b45f55436] ...
	I0916 10:38:17.873579 1384589 ssh_runner.go:195] Run: /bin/bash -c "sudo /usr/bin/crictl logs --tail 400 4ee66eef50ab615bdd0d94fe194567492cafe76910819703a964b78b45f55436"
	I0916 10:38:17.944894 1384589 logs.go:123] Gathering logs for container status ...
	I0916 10:38:17.944933 1384589 ssh_runner.go:195] Run: /bin/bash -c "sudo `which crictl || echo crictl` ps -a || sudo docker ps -a"
	I0916 10:38:18.006230 1384589 logs.go:123] Gathering logs for kubelet ...
	I0916 10:38:18.006286 1384589 ssh_runner.go:195] Run: /bin/bash -c "sudo journalctl -u kubelet -n 400"
	W0916 10:38:18.071787 1384589 logs.go:138] Found kubelet problem: Sep 16 10:35:59 addons-936355 kubelet[1507]: W0916 10:35:59.171269    1507 reflector.go:561] object-"kube-system"/"kube-root-ca.crt": failed to list *v1.ConfigMap: configmaps "kube-root-ca.crt" is forbidden: User "system:node:addons-936355" cannot list resource "configmaps" in API group "" in the namespace "kube-system": no relationship found between node 'addons-936355' and this object
	W0916 10:38:18.072057 1384589 logs.go:138] Found kubelet problem: Sep 16 10:35:59 addons-936355 kubelet[1507]: E0916 10:35:59.171326    1507 reflector.go:158] "Unhandled Error" err="object-\"kube-system\"/\"kube-root-ca.crt\": Failed to watch *v1.ConfigMap: failed to list *v1.ConfigMap: configmaps \"kube-root-ca.crt\" is forbidden: User \"system:node:addons-936355\" cannot list resource \"configmaps\" in API group \"\" in the namespace \"kube-system\": no relationship found between node 'addons-936355' and this object" logger="UnhandledError"
	W0916 10:38:18.072239 1384589 logs.go:138] Found kubelet problem: Sep 16 10:35:59 addons-936355 kubelet[1507]: W0916 10:35:59.171509    1507 reflector.go:561] object-"kube-system"/"kube-proxy": failed to list *v1.ConfigMap: configmaps "kube-proxy" is forbidden: User "system:node:addons-936355" cannot list resource "configmaps" in API group "" in the namespace "kube-system": no relationship found between node 'addons-936355' and this object
	W0916 10:38:18.072456 1384589 logs.go:138] Found kubelet problem: Sep 16 10:35:59 addons-936355 kubelet[1507]: E0916 10:35:59.171550    1507 reflector.go:158] "Unhandled Error" err="object-\"kube-system\"/\"kube-proxy\": Failed to watch *v1.ConfigMap: failed to list *v1.ConfigMap: configmaps \"kube-proxy\" is forbidden: User \"system:node:addons-936355\" cannot list resource \"configmaps\" in API group \"\" in the namespace \"kube-system\": no relationship found between node 'addons-936355' and this object" logger="UnhandledError"
	W0916 10:38:18.075800 1384589 logs.go:138] Found kubelet problem: Sep 16 10:36:02 addons-936355 kubelet[1507]: W0916 10:36:02.454965    1507 reflector.go:561] object-"gadget"/"kube-root-ca.crt": failed to list *v1.ConfigMap: configmaps "kube-root-ca.crt" is forbidden: User "system:node:addons-936355" cannot list resource "configmaps" in API group "" in the namespace "gadget": no relationship found between node 'addons-936355' and this object
	W0916 10:38:18.076027 1384589 logs.go:138] Found kubelet problem: Sep 16 10:36:02 addons-936355 kubelet[1507]: E0916 10:36:02.455028    1507 reflector.go:158] "Unhandled Error" err="object-\"gadget\"/\"kube-root-ca.crt\": Failed to watch *v1.ConfigMap: failed to list *v1.ConfigMap: configmaps \"kube-root-ca.crt\" is forbidden: User \"system:node:addons-936355\" cannot list resource \"configmaps\" in API group \"\" in the namespace \"gadget\": no relationship found between node 'addons-936355' and this object" logger="UnhandledError"
	W0916 10:38:18.087591 1384589 logs.go:138] Found kubelet problem: Sep 16 10:36:42 addons-936355 kubelet[1507]: W0916 10:36:42.520421    1507 reflector.go:561] object-"default"/"kube-root-ca.crt": failed to list *v1.ConfigMap: configmaps "kube-root-ca.crt" is forbidden: User "system:node:addons-936355" cannot list resource "configmaps" in API group "" in the namespace "default": no relationship found between node 'addons-936355' and this object
	W0916 10:38:18.087896 1384589 logs.go:138] Found kubelet problem: Sep 16 10:36:42 addons-936355 kubelet[1507]: E0916 10:36:42.520489    1507 reflector.go:158] "Unhandled Error" err="object-\"default\"/\"kube-root-ca.crt\": Failed to watch *v1.ConfigMap: failed to list *v1.ConfigMap: configmaps \"kube-root-ca.crt\" is forbidden: User \"system:node:addons-936355\" cannot list resource \"configmaps\" in API group \"\" in the namespace \"default\": no relationship found between node 'addons-936355' and this object" logger="UnhandledError"
	W0916 10:38:18.088088 1384589 logs.go:138] Found kubelet problem: Sep 16 10:36:42 addons-936355 kubelet[1507]: W0916 10:36:42.520540    1507 reflector.go:561] object-"local-path-storage"/"kube-root-ca.crt": failed to list *v1.ConfigMap: configmaps "kube-root-ca.crt" is forbidden: User "system:node:addons-936355" cannot list resource "configmaps" in API group "" in the namespace "local-path-storage": no relationship found between node 'addons-936355' and this object
	W0916 10:38:18.088320 1384589 logs.go:138] Found kubelet problem: Sep 16 10:36:42 addons-936355 kubelet[1507]: E0916 10:36:42.520560    1507 reflector.go:158] "Unhandled Error" err="object-\"local-path-storage\"/\"kube-root-ca.crt\": Failed to watch *v1.ConfigMap: failed to list *v1.ConfigMap: configmaps \"kube-root-ca.crt\" is forbidden: User \"system:node:addons-936355\" cannot list resource \"configmaps\" in API group \"\" in the namespace \"local-path-storage\": no relationship found between node 'addons-936355' and this object" logger="UnhandledError"
	I0916 10:38:18.128812 1384589 logs.go:123] Gathering logs for kube-apiserver [f911db1ed55bbf8b3dc28ca0fef7e51209be97baaa15d9194b879451dd6fd403] ...
	I0916 10:38:18.128841 1384589 ssh_runner.go:195] Run: /bin/bash -c "sudo /usr/bin/crictl logs --tail 400 f911db1ed55bbf8b3dc28ca0fef7e51209be97baaa15d9194b879451dd6fd403"
	I0916 10:38:18.186612 1384589 logs.go:123] Gathering logs for coredns [ee934dc9f4f92e52b49ad02508bb42771f460a2494fa8b1a65d888191266a4ad] ...
	I0916 10:38:18.186644 1384589 ssh_runner.go:195] Run: /bin/bash -c "sudo /usr/bin/crictl logs --tail 400 ee934dc9f4f92e52b49ad02508bb42771f460a2494fa8b1a65d888191266a4ad"
	I0916 10:38:18.233148 1384589 out.go:358] Setting ErrFile to fd 2...
	I0916 10:38:18.233182 1384589 out.go:392] TERM=,COLORTERM=, which probably does not support color
	W0916 10:38:18.233388 1384589 out.go:270] X Problems detected in kubelet:
	W0916 10:38:18.233404 1384589 out.go:270]   Sep 16 10:36:02 addons-936355 kubelet[1507]: E0916 10:36:02.455028    1507 reflector.go:158] "Unhandled Error" err="object-\"gadget\"/\"kube-root-ca.crt\": Failed to watch *v1.ConfigMap: failed to list *v1.ConfigMap: configmaps \"kube-root-ca.crt\" is forbidden: User \"system:node:addons-936355\" cannot list resource \"configmaps\" in API group \"\" in the namespace \"gadget\": no relationship found between node 'addons-936355' and this object" logger="UnhandledError"
	W0916 10:38:18.233412 1384589 out.go:270]   Sep 16 10:36:42 addons-936355 kubelet[1507]: W0916 10:36:42.520421    1507 reflector.go:561] object-"default"/"kube-root-ca.crt": failed to list *v1.ConfigMap: configmaps "kube-root-ca.crt" is forbidden: User "system:node:addons-936355" cannot list resource "configmaps" in API group "" in the namespace "default": no relationship found between node 'addons-936355' and this object
	W0916 10:38:18.233423 1384589 out.go:270]   Sep 16 10:36:42 addons-936355 kubelet[1507]: E0916 10:36:42.520489    1507 reflector.go:158] "Unhandled Error" err="object-\"default\"/\"kube-root-ca.crt\": Failed to watch *v1.ConfigMap: failed to list *v1.ConfigMap: configmaps \"kube-root-ca.crt\" is forbidden: User \"system:node:addons-936355\" cannot list resource \"configmaps\" in API group \"\" in the namespace \"default\": no relationship found between node 'addons-936355' and this object" logger="UnhandledError"
	W0916 10:38:18.233429 1384589 out.go:270]   Sep 16 10:36:42 addons-936355 kubelet[1507]: W0916 10:36:42.520540    1507 reflector.go:561] object-"local-path-storage"/"kube-root-ca.crt": failed to list *v1.ConfigMap: configmaps "kube-root-ca.crt" is forbidden: User "system:node:addons-936355" cannot list resource "configmaps" in API group "" in the namespace "local-path-storage": no relationship found between node 'addons-936355' and this object
	W0916 10:38:18.233449 1384589 out.go:270]   Sep 16 10:36:42 addons-936355 kubelet[1507]: E0916 10:36:42.520560    1507 reflector.go:158] "Unhandled Error" err="object-\"local-path-storage\"/\"kube-root-ca.crt\": Failed to watch *v1.ConfigMap: failed to list *v1.ConfigMap: configmaps \"kube-root-ca.crt\" is forbidden: User \"system:node:addons-936355\" cannot list resource \"configmaps\" in API group \"\" in the namespace \"local-path-storage\": no relationship found between node 'addons-936355' and this object" logger="UnhandledError"
	I0916 10:38:18.233461 1384589 out.go:358] Setting ErrFile to fd 2...
	I0916 10:38:18.233470 1384589 out.go:392] TERM=,COLORTERM=, which probably does not support color
	I0916 10:38:28.234697 1384589 ssh_runner.go:195] Run: sudo pgrep -xnf kube-apiserver.*minikube.*
	I0916 10:38:28.249266 1384589 api_server.go:72] duration metric: took 2m31.602198408s to wait for apiserver process to appear ...
	I0916 10:38:28.249292 1384589 api_server.go:88] waiting for apiserver healthz status ...
	I0916 10:38:28.249329 1384589 cri.go:54] listing CRI containers in root : {State:all Name:kube-apiserver Namespaces:[]}
	I0916 10:38:28.249401 1384589 ssh_runner.go:195] Run: sudo crictl ps -a --quiet --name=kube-apiserver
	I0916 10:38:28.291513 1384589 cri.go:89] found id: "f911db1ed55bbf8b3dc28ca0fef7e51209be97baaa15d9194b879451dd6fd403"
	I0916 10:38:28.291538 1384589 cri.go:89] found id: ""
	I0916 10:38:28.291546 1384589 logs.go:276] 1 containers: [f911db1ed55bbf8b3dc28ca0fef7e51209be97baaa15d9194b879451dd6fd403]
	I0916 10:38:28.291605 1384589 ssh_runner.go:195] Run: which crictl
	I0916 10:38:28.295282 1384589 cri.go:54] listing CRI containers in root : {State:all Name:etcd Namespaces:[]}
	I0916 10:38:28.295362 1384589 ssh_runner.go:195] Run: sudo crictl ps -a --quiet --name=etcd
	I0916 10:38:28.334381 1384589 cri.go:89] found id: "3b247261f15f4cdd596d5e7ee3354c24cb995a27a5e0581e877596df04b900d5"
	I0916 10:38:28.334460 1384589 cri.go:89] found id: ""
	I0916 10:38:28.334479 1384589 logs.go:276] 1 containers: [3b247261f15f4cdd596d5e7ee3354c24cb995a27a5e0581e877596df04b900d5]
	I0916 10:38:28.334596 1384589 ssh_runner.go:195] Run: which crictl
	I0916 10:38:28.338232 1384589 cri.go:54] listing CRI containers in root : {State:all Name:coredns Namespaces:[]}
	I0916 10:38:28.338315 1384589 ssh_runner.go:195] Run: sudo crictl ps -a --quiet --name=coredns
	I0916 10:38:28.386465 1384589 cri.go:89] found id: "ee934dc9f4f92e52b49ad02508bb42771f460a2494fa8b1a65d888191266a4ad"
	I0916 10:38:28.386495 1384589 cri.go:89] found id: ""
	I0916 10:38:28.386503 1384589 logs.go:276] 1 containers: [ee934dc9f4f92e52b49ad02508bb42771f460a2494fa8b1a65d888191266a4ad]
	I0916 10:38:28.386564 1384589 ssh_runner.go:195] Run: which crictl
	I0916 10:38:28.390431 1384589 cri.go:54] listing CRI containers in root : {State:all Name:kube-scheduler Namespaces:[]}
	I0916 10:38:28.390508 1384589 ssh_runner.go:195] Run: sudo crictl ps -a --quiet --name=kube-scheduler
	I0916 10:38:28.428479 1384589 cri.go:89] found id: "2b161087caf5a6ab9dedbb699f7c69ddf6c2c5cdb19026d46daf824d90966d25"
	I0916 10:38:28.428500 1384589 cri.go:89] found id: ""
	I0916 10:38:28.428508 1384589 logs.go:276] 1 containers: [2b161087caf5a6ab9dedbb699f7c69ddf6c2c5cdb19026d46daf824d90966d25]
	I0916 10:38:28.428568 1384589 ssh_runner.go:195] Run: which crictl
	I0916 10:38:28.431936 1384589 cri.go:54] listing CRI containers in root : {State:all Name:kube-proxy Namespaces:[]}
	I0916 10:38:28.432009 1384589 ssh_runner.go:195] Run: sudo crictl ps -a --quiet --name=kube-proxy
	I0916 10:38:28.480074 1384589 cri.go:89] found id: "6200eb5cfcd24bb0f0253359201c6d75c0624dcb7a313b0bc95b7370a13539a0"
	I0916 10:38:28.480148 1384589 cri.go:89] found id: ""
	I0916 10:38:28.480171 1384589 logs.go:276] 1 containers: [6200eb5cfcd24bb0f0253359201c6d75c0624dcb7a313b0bc95b7370a13539a0]
	I0916 10:38:28.480257 1384589 ssh_runner.go:195] Run: which crictl
	I0916 10:38:28.484845 1384589 cri.go:54] listing CRI containers in root : {State:all Name:kube-controller-manager Namespaces:[]}
	I0916 10:38:28.484948 1384589 ssh_runner.go:195] Run: sudo crictl ps -a --quiet --name=kube-controller-manager
	I0916 10:38:28.526872 1384589 cri.go:89] found id: "4ee66eef50ab615bdd0d94fe194567492cafe76910819703a964b78b45f55436"
	I0916 10:38:28.526896 1384589 cri.go:89] found id: ""
	I0916 10:38:28.526905 1384589 logs.go:276] 1 containers: [4ee66eef50ab615bdd0d94fe194567492cafe76910819703a964b78b45f55436]
	I0916 10:38:28.526965 1384589 ssh_runner.go:195] Run: which crictl
	I0916 10:38:28.530520 1384589 cri.go:54] listing CRI containers in root : {State:all Name:kindnet Namespaces:[]}
	I0916 10:38:28.530607 1384589 ssh_runner.go:195] Run: sudo crictl ps -a --quiet --name=kindnet
	I0916 10:38:28.569037 1384589 cri.go:89] found id: "8d59e894feca0e01e03cc7257c67ed10cf0f9db194b88b314e4961bc62d9e7f1"
	I0916 10:38:28.569065 1384589 cri.go:89] found id: ""
	I0916 10:38:28.569074 1384589 logs.go:276] 1 containers: [8d59e894feca0e01e03cc7257c67ed10cf0f9db194b88b314e4961bc62d9e7f1]
	I0916 10:38:28.569150 1384589 ssh_runner.go:195] Run: which crictl
	I0916 10:38:28.572604 1384589 logs.go:123] Gathering logs for dmesg ...
	I0916 10:38:28.572634 1384589 ssh_runner.go:195] Run: /bin/bash -c "sudo dmesg -PH -L=never --level warn,err,crit,alert,emerg | tail -n 400"
	I0916 10:38:28.589298 1384589 logs.go:123] Gathering logs for describe nodes ...
	I0916 10:38:28.589323 1384589 ssh_runner.go:195] Run: /bin/bash -c "sudo /var/lib/minikube/binaries/v1.31.1/kubectl describe nodes --kubeconfig=/var/lib/minikube/kubeconfig"
	I0916 10:38:28.729585 1384589 logs.go:123] Gathering logs for etcd [3b247261f15f4cdd596d5e7ee3354c24cb995a27a5e0581e877596df04b900d5] ...
	I0916 10:38:28.729703 1384589 ssh_runner.go:195] Run: /bin/bash -c "sudo /usr/bin/crictl logs --tail 400 3b247261f15f4cdd596d5e7ee3354c24cb995a27a5e0581e877596df04b900d5"
	I0916 10:38:28.802248 1384589 logs.go:123] Gathering logs for coredns [ee934dc9f4f92e52b49ad02508bb42771f460a2494fa8b1a65d888191266a4ad] ...
	I0916 10:38:28.802300 1384589 ssh_runner.go:195] Run: /bin/bash -c "sudo /usr/bin/crictl logs --tail 400 ee934dc9f4f92e52b49ad02508bb42771f460a2494fa8b1a65d888191266a4ad"
	I0916 10:38:28.843099 1384589 logs.go:123] Gathering logs for kube-proxy [6200eb5cfcd24bb0f0253359201c6d75c0624dcb7a313b0bc95b7370a13539a0] ...
	I0916 10:38:28.843130 1384589 ssh_runner.go:195] Run: /bin/bash -c "sudo /usr/bin/crictl logs --tail 400 6200eb5cfcd24bb0f0253359201c6d75c0624dcb7a313b0bc95b7370a13539a0"
	I0916 10:38:28.886320 1384589 logs.go:123] Gathering logs for kindnet [8d59e894feca0e01e03cc7257c67ed10cf0f9db194b88b314e4961bc62d9e7f1] ...
	I0916 10:38:28.886350 1384589 ssh_runner.go:195] Run: /bin/bash -c "sudo /usr/bin/crictl logs --tail 400 8d59e894feca0e01e03cc7257c67ed10cf0f9db194b88b314e4961bc62d9e7f1"
	I0916 10:38:28.930299 1384589 logs.go:123] Gathering logs for CRI-O ...
	I0916 10:38:28.930374 1384589 ssh_runner.go:195] Run: /bin/bash -c "sudo journalctl -u crio -n 400"
	I0916 10:38:29.041608 1384589 logs.go:123] Gathering logs for kubelet ...
	I0916 10:38:29.041656 1384589 ssh_runner.go:195] Run: /bin/bash -c "sudo journalctl -u kubelet -n 400"
	W0916 10:38:29.079590 1384589 logs.go:138] Found kubelet problem: Sep 16 10:35:59 addons-936355 kubelet[1507]: W0916 10:35:59.171269    1507 reflector.go:561] object-"kube-system"/"kube-root-ca.crt": failed to list *v1.ConfigMap: configmaps "kube-root-ca.crt" is forbidden: User "system:node:addons-936355" cannot list resource "configmaps" in API group "" in the namespace "kube-system": no relationship found between node 'addons-936355' and this object
	W0916 10:38:29.079841 1384589 logs.go:138] Found kubelet problem: Sep 16 10:35:59 addons-936355 kubelet[1507]: E0916 10:35:59.171326    1507 reflector.go:158] "Unhandled Error" err="object-\"kube-system\"/\"kube-root-ca.crt\": Failed to watch *v1.ConfigMap: failed to list *v1.ConfigMap: configmaps \"kube-root-ca.crt\" is forbidden: User \"system:node:addons-936355\" cannot list resource \"configmaps\" in API group \"\" in the namespace \"kube-system\": no relationship found between node 'addons-936355' and this object" logger="UnhandledError"
	W0916 10:38:29.080020 1384589 logs.go:138] Found kubelet problem: Sep 16 10:35:59 addons-936355 kubelet[1507]: W0916 10:35:59.171509    1507 reflector.go:561] object-"kube-system"/"kube-proxy": failed to list *v1.ConfigMap: configmaps "kube-proxy" is forbidden: User "system:node:addons-936355" cannot list resource "configmaps" in API group "" in the namespace "kube-system": no relationship found between node 'addons-936355' and this object
	W0916 10:38:29.080236 1384589 logs.go:138] Found kubelet problem: Sep 16 10:35:59 addons-936355 kubelet[1507]: E0916 10:35:59.171550    1507 reflector.go:158] "Unhandled Error" err="object-\"kube-system\"/\"kube-proxy\": Failed to watch *v1.ConfigMap: failed to list *v1.ConfigMap: configmaps \"kube-proxy\" is forbidden: User \"system:node:addons-936355\" cannot list resource \"configmaps\" in API group \"\" in the namespace \"kube-system\": no relationship found between node 'addons-936355' and this object" logger="UnhandledError"
	W0916 10:38:29.083646 1384589 logs.go:138] Found kubelet problem: Sep 16 10:36:02 addons-936355 kubelet[1507]: W0916 10:36:02.454965    1507 reflector.go:561] object-"gadget"/"kube-root-ca.crt": failed to list *v1.ConfigMap: configmaps "kube-root-ca.crt" is forbidden: User "system:node:addons-936355" cannot list resource "configmaps" in API group "" in the namespace "gadget": no relationship found between node 'addons-936355' and this object
	W0916 10:38:29.083870 1384589 logs.go:138] Found kubelet problem: Sep 16 10:36:02 addons-936355 kubelet[1507]: E0916 10:36:02.455028    1507 reflector.go:158] "Unhandled Error" err="object-\"gadget\"/\"kube-root-ca.crt\": Failed to watch *v1.ConfigMap: failed to list *v1.ConfigMap: configmaps \"kube-root-ca.crt\" is forbidden: User \"system:node:addons-936355\" cannot list resource \"configmaps\" in API group \"\" in the namespace \"gadget\": no relationship found between node 'addons-936355' and this object" logger="UnhandledError"
	W0916 10:38:29.095503 1384589 logs.go:138] Found kubelet problem: Sep 16 10:36:42 addons-936355 kubelet[1507]: W0916 10:36:42.520421    1507 reflector.go:561] object-"default"/"kube-root-ca.crt": failed to list *v1.ConfigMap: configmaps "kube-root-ca.crt" is forbidden: User "system:node:addons-936355" cannot list resource "configmaps" in API group "" in the namespace "default": no relationship found between node 'addons-936355' and this object
	W0916 10:38:29.095743 1384589 logs.go:138] Found kubelet problem: Sep 16 10:36:42 addons-936355 kubelet[1507]: E0916 10:36:42.520489    1507 reflector.go:158] "Unhandled Error" err="object-\"default\"/\"kube-root-ca.crt\": Failed to watch *v1.ConfigMap: failed to list *v1.ConfigMap: configmaps \"kube-root-ca.crt\" is forbidden: User \"system:node:addons-936355\" cannot list resource \"configmaps\" in API group \"\" in the namespace \"default\": no relationship found between node 'addons-936355' and this object" logger="UnhandledError"
	W0916 10:38:29.095931 1384589 logs.go:138] Found kubelet problem: Sep 16 10:36:42 addons-936355 kubelet[1507]: W0916 10:36:42.520540    1507 reflector.go:561] object-"local-path-storage"/"kube-root-ca.crt": failed to list *v1.ConfigMap: configmaps "kube-root-ca.crt" is forbidden: User "system:node:addons-936355" cannot list resource "configmaps" in API group "" in the namespace "local-path-storage": no relationship found between node 'addons-936355' and this object
	W0916 10:38:29.096162 1384589 logs.go:138] Found kubelet problem: Sep 16 10:36:42 addons-936355 kubelet[1507]: E0916 10:36:42.520560    1507 reflector.go:158] "Unhandled Error" err="object-\"local-path-storage\"/\"kube-root-ca.crt\": Failed to watch *v1.ConfigMap: failed to list *v1.ConfigMap: configmaps \"kube-root-ca.crt\" is forbidden: User \"system:node:addons-936355\" cannot list resource \"configmaps\" in API group \"\" in the namespace \"local-path-storage\": no relationship found between node 'addons-936355' and this object" logger="UnhandledError"
	I0916 10:38:29.147372 1384589 logs.go:123] Gathering logs for kube-apiserver [f911db1ed55bbf8b3dc28ca0fef7e51209be97baaa15d9194b879451dd6fd403] ...
	I0916 10:38:29.147401 1384589 ssh_runner.go:195] Run: /bin/bash -c "sudo /usr/bin/crictl logs --tail 400 f911db1ed55bbf8b3dc28ca0fef7e51209be97baaa15d9194b879451dd6fd403"
	I0916 10:38:29.214117 1384589 logs.go:123] Gathering logs for kube-scheduler [2b161087caf5a6ab9dedbb699f7c69ddf6c2c5cdb19026d46daf824d90966d25] ...
	I0916 10:38:29.214148 1384589 ssh_runner.go:195] Run: /bin/bash -c "sudo /usr/bin/crictl logs --tail 400 2b161087caf5a6ab9dedbb699f7c69ddf6c2c5cdb19026d46daf824d90966d25"
	I0916 10:38:29.266528 1384589 logs.go:123] Gathering logs for kube-controller-manager [4ee66eef50ab615bdd0d94fe194567492cafe76910819703a964b78b45f55436] ...
	I0916 10:38:29.266562 1384589 ssh_runner.go:195] Run: /bin/bash -c "sudo /usr/bin/crictl logs --tail 400 4ee66eef50ab615bdd0d94fe194567492cafe76910819703a964b78b45f55436"
	I0916 10:38:29.339157 1384589 logs.go:123] Gathering logs for container status ...
	I0916 10:38:29.339193 1384589 ssh_runner.go:195] Run: /bin/bash -c "sudo `which crictl || echo crictl` ps -a || sudo docker ps -a"
	I0916 10:38:29.402328 1384589 out.go:358] Setting ErrFile to fd 2...
	I0916 10:38:29.402360 1384589 out.go:392] TERM=,COLORTERM=, which probably does not support color
	W0916 10:38:29.402421 1384589 out.go:270] X Problems detected in kubelet:
	W0916 10:38:29.402433 1384589 out.go:270]   Sep 16 10:36:02 addons-936355 kubelet[1507]: E0916 10:36:02.455028    1507 reflector.go:158] "Unhandled Error" err="object-\"gadget\"/\"kube-root-ca.crt\": Failed to watch *v1.ConfigMap: failed to list *v1.ConfigMap: configmaps \"kube-root-ca.crt\" is forbidden: User \"system:node:addons-936355\" cannot list resource \"configmaps\" in API group \"\" in the namespace \"gadget\": no relationship found between node 'addons-936355' and this object" logger="UnhandledError"
	W0916 10:38:29.402445 1384589 out.go:270]   Sep 16 10:36:42 addons-936355 kubelet[1507]: W0916 10:36:42.520421    1507 reflector.go:561] object-"default"/"kube-root-ca.crt": failed to list *v1.ConfigMap: configmaps "kube-root-ca.crt" is forbidden: User "system:node:addons-936355" cannot list resource "configmaps" in API group "" in the namespace "default": no relationship found between node 'addons-936355' and this object
	W0916 10:38:29.402453 1384589 out.go:270]   Sep 16 10:36:42 addons-936355 kubelet[1507]: E0916 10:36:42.520489    1507 reflector.go:158] "Unhandled Error" err="object-\"default\"/\"kube-root-ca.crt\": Failed to watch *v1.ConfigMap: failed to list *v1.ConfigMap: configmaps \"kube-root-ca.crt\" is forbidden: User \"system:node:addons-936355\" cannot list resource \"configmaps\" in API group \"\" in the namespace \"default\": no relationship found between node 'addons-936355' and this object" logger="UnhandledError"
	W0916 10:38:29.402464 1384589 out.go:270]   Sep 16 10:36:42 addons-936355 kubelet[1507]: W0916 10:36:42.520540    1507 reflector.go:561] object-"local-path-storage"/"kube-root-ca.crt": failed to list *v1.ConfigMap: configmaps "kube-root-ca.crt" is forbidden: User "system:node:addons-936355" cannot list resource "configmaps" in API group "" in the namespace "local-path-storage": no relationship found between node 'addons-936355' and this object
	W0916 10:38:29.402472 1384589 out.go:270]   Sep 16 10:36:42 addons-936355 kubelet[1507]: E0916 10:36:42.520560    1507 reflector.go:158] "Unhandled Error" err="object-\"local-path-storage\"/\"kube-root-ca.crt\": Failed to watch *v1.ConfigMap: failed to list *v1.ConfigMap: configmaps \"kube-root-ca.crt\" is forbidden: User \"system:node:addons-936355\" cannot list resource \"configmaps\" in API group \"\" in the namespace \"local-path-storage\": no relationship found between node 'addons-936355' and this object" logger="UnhandledError"
	I0916 10:38:29.402483 1384589 out.go:358] Setting ErrFile to fd 2...
	I0916 10:38:29.402490 1384589 out.go:392] TERM=,COLORTERM=, which probably does not support color
	I0916 10:38:39.403739 1384589 api_server.go:253] Checking apiserver healthz at https://192.168.49.2:8443/healthz ...
	I0916 10:38:39.411467 1384589 api_server.go:279] https://192.168.49.2:8443/healthz returned 200:
	ok
	I0916 10:38:39.412460 1384589 api_server.go:141] control plane version: v1.31.1
	I0916 10:38:39.412486 1384589 api_server.go:131] duration metric: took 11.16318566s to wait for apiserver health ...
	I0916 10:38:39.412495 1384589 system_pods.go:43] waiting for kube-system pods to appear ...
	I0916 10:38:39.412517 1384589 cri.go:54] listing CRI containers in root : {State:all Name:kube-apiserver Namespaces:[]}
	I0916 10:38:39.412584 1384589 ssh_runner.go:195] Run: sudo crictl ps -a --quiet --name=kube-apiserver
	I0916 10:38:39.451224 1384589 cri.go:89] found id: "f911db1ed55bbf8b3dc28ca0fef7e51209be97baaa15d9194b879451dd6fd403"
	I0916 10:38:39.451243 1384589 cri.go:89] found id: ""
	I0916 10:38:39.451251 1384589 logs.go:276] 1 containers: [f911db1ed55bbf8b3dc28ca0fef7e51209be97baaa15d9194b879451dd6fd403]
	I0916 10:38:39.451311 1384589 ssh_runner.go:195] Run: which crictl
	I0916 10:38:39.454893 1384589 cri.go:54] listing CRI containers in root : {State:all Name:etcd Namespaces:[]}
	I0916 10:38:39.454968 1384589 ssh_runner.go:195] Run: sudo crictl ps -a --quiet --name=etcd
	I0916 10:38:39.499416 1384589 cri.go:89] found id: "3b247261f15f4cdd596d5e7ee3354c24cb995a27a5e0581e877596df04b900d5"
	I0916 10:38:39.499439 1384589 cri.go:89] found id: ""
	I0916 10:38:39.499448 1384589 logs.go:276] 1 containers: [3b247261f15f4cdd596d5e7ee3354c24cb995a27a5e0581e877596df04b900d5]
	I0916 10:38:39.499510 1384589 ssh_runner.go:195] Run: which crictl
	I0916 10:38:39.503122 1384589 cri.go:54] listing CRI containers in root : {State:all Name:coredns Namespaces:[]}
	I0916 10:38:39.503208 1384589 ssh_runner.go:195] Run: sudo crictl ps -a --quiet --name=coredns
	I0916 10:38:39.542014 1384589 cri.go:89] found id: "ee934dc9f4f92e52b49ad02508bb42771f460a2494fa8b1a65d888191266a4ad"
	I0916 10:38:39.542035 1384589 cri.go:89] found id: ""
	I0916 10:38:39.542043 1384589 logs.go:276] 1 containers: [ee934dc9f4f92e52b49ad02508bb42771f460a2494fa8b1a65d888191266a4ad]
	I0916 10:38:39.542101 1384589 ssh_runner.go:195] Run: which crictl
	I0916 10:38:39.546062 1384589 cri.go:54] listing CRI containers in root : {State:all Name:kube-scheduler Namespaces:[]}
	I0916 10:38:39.546152 1384589 ssh_runner.go:195] Run: sudo crictl ps -a --quiet --name=kube-scheduler
	I0916 10:38:39.587808 1384589 cri.go:89] found id: "2b161087caf5a6ab9dedbb699f7c69ddf6c2c5cdb19026d46daf824d90966d25"
	I0916 10:38:39.587831 1384589 cri.go:89] found id: ""
	I0916 10:38:39.587842 1384589 logs.go:276] 1 containers: [2b161087caf5a6ab9dedbb699f7c69ddf6c2c5cdb19026d46daf824d90966d25]
	I0916 10:38:39.587908 1384589 ssh_runner.go:195] Run: which crictl
	I0916 10:38:39.591371 1384589 cri.go:54] listing CRI containers in root : {State:all Name:kube-proxy Namespaces:[]}
	I0916 10:38:39.591441 1384589 ssh_runner.go:195] Run: sudo crictl ps -a --quiet --name=kube-proxy
	I0916 10:38:39.629404 1384589 cri.go:89] found id: "6200eb5cfcd24bb0f0253359201c6d75c0624dcb7a313b0bc95b7370a13539a0"
	I0916 10:38:39.629428 1384589 cri.go:89] found id: ""
	I0916 10:38:39.629437 1384589 logs.go:276] 1 containers: [6200eb5cfcd24bb0f0253359201c6d75c0624dcb7a313b0bc95b7370a13539a0]
	I0916 10:38:39.629495 1384589 ssh_runner.go:195] Run: which crictl
	I0916 10:38:39.633014 1384589 cri.go:54] listing CRI containers in root : {State:all Name:kube-controller-manager Namespaces:[]}
	I0916 10:38:39.633091 1384589 ssh_runner.go:195] Run: sudo crictl ps -a --quiet --name=kube-controller-manager
	I0916 10:38:39.676945 1384589 cri.go:89] found id: "4ee66eef50ab615bdd0d94fe194567492cafe76910819703a964b78b45f55436"
	I0916 10:38:39.676965 1384589 cri.go:89] found id: ""
	I0916 10:38:39.676973 1384589 logs.go:276] 1 containers: [4ee66eef50ab615bdd0d94fe194567492cafe76910819703a964b78b45f55436]
	I0916 10:38:39.677033 1384589 ssh_runner.go:195] Run: which crictl
	I0916 10:38:39.680612 1384589 cri.go:54] listing CRI containers in root : {State:all Name:kindnet Namespaces:[]}
	I0916 10:38:39.680742 1384589 ssh_runner.go:195] Run: sudo crictl ps -a --quiet --name=kindnet
	I0916 10:38:39.722262 1384589 cri.go:89] found id: "8d59e894feca0e01e03cc7257c67ed10cf0f9db194b88b314e4961bc62d9e7f1"
	I0916 10:38:39.722282 1384589 cri.go:89] found id: ""
	I0916 10:38:39.722291 1384589 logs.go:276] 1 containers: [8d59e894feca0e01e03cc7257c67ed10cf0f9db194b88b314e4961bc62d9e7f1]
	I0916 10:38:39.722347 1384589 ssh_runner.go:195] Run: which crictl
	I0916 10:38:39.726091 1384589 logs.go:123] Gathering logs for dmesg ...
	I0916 10:38:39.726167 1384589 ssh_runner.go:195] Run: /bin/bash -c "sudo dmesg -PH -L=never --level warn,err,crit,alert,emerg | tail -n 400"
	I0916 10:38:39.742632 1384589 logs.go:123] Gathering logs for etcd [3b247261f15f4cdd596d5e7ee3354c24cb995a27a5e0581e877596df04b900d5] ...
	I0916 10:38:39.742660 1384589 ssh_runner.go:195] Run: /bin/bash -c "sudo /usr/bin/crictl logs --tail 400 3b247261f15f4cdd596d5e7ee3354c24cb995a27a5e0581e877596df04b900d5"
	I0916 10:38:39.814109 1384589 logs.go:123] Gathering logs for CRI-O ...
	I0916 10:38:39.814142 1384589 ssh_runner.go:195] Run: /bin/bash -c "sudo journalctl -u crio -n 400"
	I0916 10:38:39.914270 1384589 logs.go:123] Gathering logs for kube-controller-manager [4ee66eef50ab615bdd0d94fe194567492cafe76910819703a964b78b45f55436] ...
	I0916 10:38:39.914308 1384589 ssh_runner.go:195] Run: /bin/bash -c "sudo /usr/bin/crictl logs --tail 400 4ee66eef50ab615bdd0d94fe194567492cafe76910819703a964b78b45f55436"
	I0916 10:38:40.019354 1384589 logs.go:123] Gathering logs for kindnet [8d59e894feca0e01e03cc7257c67ed10cf0f9db194b88b314e4961bc62d9e7f1] ...
	I0916 10:38:40.019397 1384589 ssh_runner.go:195] Run: /bin/bash -c "sudo /usr/bin/crictl logs --tail 400 8d59e894feca0e01e03cc7257c67ed10cf0f9db194b88b314e4961bc62d9e7f1"
	I0916 10:38:40.079304 1384589 logs.go:123] Gathering logs for kubelet ...
	I0916 10:38:40.079345 1384589 ssh_runner.go:195] Run: /bin/bash -c "sudo journalctl -u kubelet -n 400"
	W0916 10:38:40.123482 1384589 logs.go:138] Found kubelet problem: Sep 16 10:35:59 addons-936355 kubelet[1507]: W0916 10:35:59.171269    1507 reflector.go:561] object-"kube-system"/"kube-root-ca.crt": failed to list *v1.ConfigMap: configmaps "kube-root-ca.crt" is forbidden: User "system:node:addons-936355" cannot list resource "configmaps" in API group "" in the namespace "kube-system": no relationship found between node 'addons-936355' and this object
	W0916 10:38:40.123736 1384589 logs.go:138] Found kubelet problem: Sep 16 10:35:59 addons-936355 kubelet[1507]: E0916 10:35:59.171326    1507 reflector.go:158] "Unhandled Error" err="object-\"kube-system\"/\"kube-root-ca.crt\": Failed to watch *v1.ConfigMap: failed to list *v1.ConfigMap: configmaps \"kube-root-ca.crt\" is forbidden: User \"system:node:addons-936355\" cannot list resource \"configmaps\" in API group \"\" in the namespace \"kube-system\": no relationship found between node 'addons-936355' and this object" logger="UnhandledError"
	W0916 10:38:40.123917 1384589 logs.go:138] Found kubelet problem: Sep 16 10:35:59 addons-936355 kubelet[1507]: W0916 10:35:59.171509    1507 reflector.go:561] object-"kube-system"/"kube-proxy": failed to list *v1.ConfigMap: configmaps "kube-proxy" is forbidden: User "system:node:addons-936355" cannot list resource "configmaps" in API group "" in the namespace "kube-system": no relationship found between node 'addons-936355' and this object
	W0916 10:38:40.124171 1384589 logs.go:138] Found kubelet problem: Sep 16 10:35:59 addons-936355 kubelet[1507]: E0916 10:35:59.171550    1507 reflector.go:158] "Unhandled Error" err="object-\"kube-system\"/\"kube-proxy\": Failed to watch *v1.ConfigMap: failed to list *v1.ConfigMap: configmaps \"kube-proxy\" is forbidden: User \"system:node:addons-936355\" cannot list resource \"configmaps\" in API group \"\" in the namespace \"kube-system\": no relationship found between node 'addons-936355' and this object" logger="UnhandledError"
	W0916 10:38:40.127515 1384589 logs.go:138] Found kubelet problem: Sep 16 10:36:02 addons-936355 kubelet[1507]: W0916 10:36:02.454965    1507 reflector.go:561] object-"gadget"/"kube-root-ca.crt": failed to list *v1.ConfigMap: configmaps "kube-root-ca.crt" is forbidden: User "system:node:addons-936355" cannot list resource "configmaps" in API group "" in the namespace "gadget": no relationship found between node 'addons-936355' and this object
	W0916 10:38:40.127756 1384589 logs.go:138] Found kubelet problem: Sep 16 10:36:02 addons-936355 kubelet[1507]: E0916 10:36:02.455028    1507 reflector.go:158] "Unhandled Error" err="object-\"gadget\"/\"kube-root-ca.crt\": Failed to watch *v1.ConfigMap: failed to list *v1.ConfigMap: configmaps \"kube-root-ca.crt\" is forbidden: User \"system:node:addons-936355\" cannot list resource \"configmaps\" in API group \"\" in the namespace \"gadget\": no relationship found between node 'addons-936355' and this object" logger="UnhandledError"
	W0916 10:38:40.139306 1384589 logs.go:138] Found kubelet problem: Sep 16 10:36:42 addons-936355 kubelet[1507]: W0916 10:36:42.520421    1507 reflector.go:561] object-"default"/"kube-root-ca.crt": failed to list *v1.ConfigMap: configmaps "kube-root-ca.crt" is forbidden: User "system:node:addons-936355" cannot list resource "configmaps" in API group "" in the namespace "default": no relationship found between node 'addons-936355' and this object
	W0916 10:38:40.139536 1384589 logs.go:138] Found kubelet problem: Sep 16 10:36:42 addons-936355 kubelet[1507]: E0916 10:36:42.520489    1507 reflector.go:158] "Unhandled Error" err="object-\"default\"/\"kube-root-ca.crt\": Failed to watch *v1.ConfigMap: failed to list *v1.ConfigMap: configmaps \"kube-root-ca.crt\" is forbidden: User \"system:node:addons-936355\" cannot list resource \"configmaps\" in API group \"\" in the namespace \"default\": no relationship found between node 'addons-936355' and this object" logger="UnhandledError"
	W0916 10:38:40.139726 1384589 logs.go:138] Found kubelet problem: Sep 16 10:36:42 addons-936355 kubelet[1507]: W0916 10:36:42.520540    1507 reflector.go:561] object-"local-path-storage"/"kube-root-ca.crt": failed to list *v1.ConfigMap: configmaps "kube-root-ca.crt" is forbidden: User "system:node:addons-936355" cannot list resource "configmaps" in API group "" in the namespace "local-path-storage": no relationship found between node 'addons-936355' and this object
	W0916 10:38:40.139953 1384589 logs.go:138] Found kubelet problem: Sep 16 10:36:42 addons-936355 kubelet[1507]: E0916 10:36:42.520560    1507 reflector.go:158] "Unhandled Error" err="object-\"local-path-storage\"/\"kube-root-ca.crt\": Failed to watch *v1.ConfigMap: failed to list *v1.ConfigMap: configmaps \"kube-root-ca.crt\" is forbidden: User \"system:node:addons-936355\" cannot list resource \"configmaps\" in API group \"\" in the namespace \"local-path-storage\": no relationship found between node 'addons-936355' and this object" logger="UnhandledError"
	I0916 10:38:40.192100 1384589 logs.go:123] Gathering logs for describe nodes ...
	I0916 10:38:40.192138 1384589 ssh_runner.go:195] Run: /bin/bash -c "sudo /var/lib/minikube/binaries/v1.31.1/kubectl describe nodes --kubeconfig=/var/lib/minikube/kubeconfig"
	I0916 10:38:40.333078 1384589 logs.go:123] Gathering logs for kube-apiserver [f911db1ed55bbf8b3dc28ca0fef7e51209be97baaa15d9194b879451dd6fd403] ...
	I0916 10:38:40.333117 1384589 ssh_runner.go:195] Run: /bin/bash -c "sudo /usr/bin/crictl logs --tail 400 f911db1ed55bbf8b3dc28ca0fef7e51209be97baaa15d9194b879451dd6fd403"
	I0916 10:38:40.403526 1384589 logs.go:123] Gathering logs for coredns [ee934dc9f4f92e52b49ad02508bb42771f460a2494fa8b1a65d888191266a4ad] ...
	I0916 10:38:40.403566 1384589 ssh_runner.go:195] Run: /bin/bash -c "sudo /usr/bin/crictl logs --tail 400 ee934dc9f4f92e52b49ad02508bb42771f460a2494fa8b1a65d888191266a4ad"
	I0916 10:38:40.442653 1384589 logs.go:123] Gathering logs for kube-scheduler [2b161087caf5a6ab9dedbb699f7c69ddf6c2c5cdb19026d46daf824d90966d25] ...
	I0916 10:38:40.442681 1384589 ssh_runner.go:195] Run: /bin/bash -c "sudo /usr/bin/crictl logs --tail 400 2b161087caf5a6ab9dedbb699f7c69ddf6c2c5cdb19026d46daf824d90966d25"
	I0916 10:38:40.492601 1384589 logs.go:123] Gathering logs for kube-proxy [6200eb5cfcd24bb0f0253359201c6d75c0624dcb7a313b0bc95b7370a13539a0] ...
	I0916 10:38:40.492632 1384589 ssh_runner.go:195] Run: /bin/bash -c "sudo /usr/bin/crictl logs --tail 400 6200eb5cfcd24bb0f0253359201c6d75c0624dcb7a313b0bc95b7370a13539a0"
	I0916 10:38:40.533326 1384589 logs.go:123] Gathering logs for container status ...
	I0916 10:38:40.533357 1384589 ssh_runner.go:195] Run: /bin/bash -c "sudo `which crictl || echo crictl` ps -a || sudo docker ps -a"
	I0916 10:38:40.587619 1384589 out.go:358] Setting ErrFile to fd 2...
	I0916 10:38:40.587653 1384589 out.go:392] TERM=,COLORTERM=, which probably does not support color
	W0916 10:38:40.587735 1384589 out.go:270] X Problems detected in kubelet:
	W0916 10:38:40.587753 1384589 out.go:270]   Sep 16 10:36:02 addons-936355 kubelet[1507]: E0916 10:36:02.455028    1507 reflector.go:158] "Unhandled Error" err="object-\"gadget\"/\"kube-root-ca.crt\": Failed to watch *v1.ConfigMap: failed to list *v1.ConfigMap: configmaps \"kube-root-ca.crt\" is forbidden: User \"system:node:addons-936355\" cannot list resource \"configmaps\" in API group \"\" in the namespace \"gadget\": no relationship found between node 'addons-936355' and this object" logger="UnhandledError"
	W0916 10:38:40.587783 1384589 out.go:270]   Sep 16 10:36:42 addons-936355 kubelet[1507]: W0916 10:36:42.520421    1507 reflector.go:561] object-"default"/"kube-root-ca.crt": failed to list *v1.ConfigMap: configmaps "kube-root-ca.crt" is forbidden: User "system:node:addons-936355" cannot list resource "configmaps" in API group "" in the namespace "default": no relationship found between node 'addons-936355' and this object
	W0916 10:38:40.587793 1384589 out.go:270]   Sep 16 10:36:42 addons-936355 kubelet[1507]: E0916 10:36:42.520489    1507 reflector.go:158] "Unhandled Error" err="object-\"default\"/\"kube-root-ca.crt\": Failed to watch *v1.ConfigMap: failed to list *v1.ConfigMap: configmaps \"kube-root-ca.crt\" is forbidden: User \"system:node:addons-936355\" cannot list resource \"configmaps\" in API group \"\" in the namespace \"default\": no relationship found between node 'addons-936355' and this object" logger="UnhandledError"
	W0916 10:38:40.587808 1384589 out.go:270]   Sep 16 10:36:42 addons-936355 kubelet[1507]: W0916 10:36:42.520540    1507 reflector.go:561] object-"local-path-storage"/"kube-root-ca.crt": failed to list *v1.ConfigMap: configmaps "kube-root-ca.crt" is forbidden: User "system:node:addons-936355" cannot list resource "configmaps" in API group "" in the namespace "local-path-storage": no relationship found between node 'addons-936355' and this object
	W0916 10:38:40.587820 1384589 out.go:270]   Sep 16 10:36:42 addons-936355 kubelet[1507]: E0916 10:36:42.520560    1507 reflector.go:158] "Unhandled Error" err="object-\"local-path-storage\"/\"kube-root-ca.crt\": Failed to watch *v1.ConfigMap: failed to list *v1.ConfigMap: configmaps \"kube-root-ca.crt\" is forbidden: User \"system:node:addons-936355\" cannot list resource \"configmaps\" in API group \"\" in the namespace \"local-path-storage\": no relationship found between node 'addons-936355' and this object" logger="UnhandledError"
	I0916 10:38:40.587827 1384589 out.go:358] Setting ErrFile to fd 2...
	I0916 10:38:40.587838 1384589 out.go:392] TERM=,COLORTERM=, which probably does not support color
	I0916 10:38:50.602620 1384589 system_pods.go:59] 18 kube-system pods found
	I0916 10:38:50.602695 1384589 system_pods.go:61] "coredns-7c65d6cfc9-r6x6b" [fc313ec6-5b9a-444f-ae74-8a9d31bad075] Running
	I0916 10:38:50.602715 1384589 system_pods.go:61] "csi-hostpath-attacher-0" [973b3dd3-b66c-4f66-a499-e50893dc0d35] Running
	I0916 10:38:50.602720 1384589 system_pods.go:61] "csi-hostpath-resizer-0" [51405fd6-eaa1-4b53-ab6c-fc127aa3e3ed] Running
	I0916 10:38:50.602728 1384589 system_pods.go:61] "csi-hostpathplugin-zrlmd" [86e81bf7-3587-41e4-a08a-e800ecc90538] Running
	I0916 10:38:50.602736 1384589 system_pods.go:61] "etcd-addons-936355" [354ae326-d376-4f6f-805d-2605645d8d04] Running
	I0916 10:38:50.602745 1384589 system_pods.go:61] "kindnet-wv5d6" [35e2a463-84e1-4b51-8b1d-2f07b7677069] Running
	I0916 10:38:50.602749 1384589 system_pods.go:61] "kube-apiserver-addons-936355" [397fd8ae-a57b-462d-9c08-d0d45236f3b0] Running
	I0916 10:38:50.602753 1384589 system_pods.go:61] "kube-controller-manager-addons-936355" [d2285801-6e4d-4f4f-a300-721484f9834e] Running
	I0916 10:38:50.602762 1384589 system_pods.go:61] "kube-ingress-dns-minikube" [cfe0a31e-4a7c-4260-9320-4d769706f403] Running
	I0916 10:38:50.602767 1384589 system_pods.go:61] "kube-proxy-6zqlq" [c2680a6c-7cc0-48d6-8094-2d804da5c90b] Running
	I0916 10:38:50.602771 1384589 system_pods.go:61] "kube-scheduler-addons-936355" [881986a3-b57c-4fd3-bd1e-c796e39d9a39] Running
	I0916 10:38:50.602775 1384589 system_pods.go:61] "metrics-server-84c5f94fbc-hngcs" [5901d847-eeb7-4c71-97ba-d08734fb39ed] Running
	I0916 10:38:50.602794 1384589 system_pods.go:61] "nvidia-device-plugin-daemonset-6j9gc" [7ee6aa38-6656-4e60-bd4b-f35c0299acea] Running
	I0916 10:38:50.602798 1384589 system_pods.go:61] "registry-66c9cd494c-xh5d4" [6f439a0d-4e84-4ea2-97ef-2666b73327b7] Running
	I0916 10:38:50.602813 1384589 system_pods.go:61] "registry-proxy-xdksj" [f3007abe-d474-44b8-91de-56f1d2dc83a9] Running
	I0916 10:38:50.602821 1384589 system_pods.go:61] "snapshot-controller-56fcc65765-5th26" [00fbb682-4a60-4b76-84a9-4b0d4669fc20] Running
	I0916 10:38:50.602825 1384589 system_pods.go:61] "snapshot-controller-56fcc65765-fjrw9" [1eb3d0c0-5ee6-493b-ab86-8b96ac9e4110] Running
	I0916 10:38:50.602832 1384589 system_pods.go:61] "storage-provisioner" [1b62a2a2-7b11-4305-99cc-88c5a411f505] Running
	I0916 10:38:50.602848 1384589 system_pods.go:74] duration metric: took 11.190345697s to wait for pod list to return data ...
	I0916 10:38:50.602873 1384589 default_sa.go:34] waiting for default service account to be created ...
	I0916 10:38:50.606360 1384589 default_sa.go:45] found service account: "default"
	I0916 10:38:50.606391 1384589 default_sa.go:55] duration metric: took 3.50956ms for default service account to be created ...
	I0916 10:38:50.606400 1384589 system_pods.go:116] waiting for k8s-apps to be running ...
	I0916 10:38:50.616619 1384589 system_pods.go:86] 18 kube-system pods found
	I0916 10:38:50.616661 1384589 system_pods.go:89] "coredns-7c65d6cfc9-r6x6b" [fc313ec6-5b9a-444f-ae74-8a9d31bad075] Running
	I0916 10:38:50.616668 1384589 system_pods.go:89] "csi-hostpath-attacher-0" [973b3dd3-b66c-4f66-a499-e50893dc0d35] Running
	I0916 10:38:50.617624 1384589 system_pods.go:89] "csi-hostpath-resizer-0" [51405fd6-eaa1-4b53-ab6c-fc127aa3e3ed] Running
	I0916 10:38:50.617646 1384589 system_pods.go:89] "csi-hostpathplugin-zrlmd" [86e81bf7-3587-41e4-a08a-e800ecc90538] Running
	I0916 10:38:50.617652 1384589 system_pods.go:89] "etcd-addons-936355" [354ae326-d376-4f6f-805d-2605645d8d04] Running
	I0916 10:38:50.617662 1384589 system_pods.go:89] "kindnet-wv5d6" [35e2a463-84e1-4b51-8b1d-2f07b7677069] Running
	I0916 10:38:50.617668 1384589 system_pods.go:89] "kube-apiserver-addons-936355" [397fd8ae-a57b-462d-9c08-d0d45236f3b0] Running
	I0916 10:38:50.617673 1384589 system_pods.go:89] "kube-controller-manager-addons-936355" [d2285801-6e4d-4f4f-a300-721484f9834e] Running
	I0916 10:38:50.617677 1384589 system_pods.go:89] "kube-ingress-dns-minikube" [cfe0a31e-4a7c-4260-9320-4d769706f403] Running
	I0916 10:38:50.617682 1384589 system_pods.go:89] "kube-proxy-6zqlq" [c2680a6c-7cc0-48d6-8094-2d804da5c90b] Running
	I0916 10:38:50.617686 1384589 system_pods.go:89] "kube-scheduler-addons-936355" [881986a3-b57c-4fd3-bd1e-c796e39d9a39] Running
	I0916 10:38:50.617691 1384589 system_pods.go:89] "metrics-server-84c5f94fbc-hngcs" [5901d847-eeb7-4c71-97ba-d08734fb39ed] Running
	I0916 10:38:50.617696 1384589 system_pods.go:89] "nvidia-device-plugin-daemonset-6j9gc" [7ee6aa38-6656-4e60-bd4b-f35c0299acea] Running
	I0916 10:38:50.617701 1384589 system_pods.go:89] "registry-66c9cd494c-xh5d4" [6f439a0d-4e84-4ea2-97ef-2666b73327b7] Running
	I0916 10:38:50.617705 1384589 system_pods.go:89] "registry-proxy-xdksj" [f3007abe-d474-44b8-91de-56f1d2dc83a9] Running
	I0916 10:38:50.617716 1384589 system_pods.go:89] "snapshot-controller-56fcc65765-5th26" [00fbb682-4a60-4b76-84a9-4b0d4669fc20] Running
	I0916 10:38:50.617730 1384589 system_pods.go:89] "snapshot-controller-56fcc65765-fjrw9" [1eb3d0c0-5ee6-493b-ab86-8b96ac9e4110] Running
	I0916 10:38:50.617734 1384589 system_pods.go:89] "storage-provisioner" [1b62a2a2-7b11-4305-99cc-88c5a411f505] Running
	I0916 10:38:50.617742 1384589 system_pods.go:126] duration metric: took 11.335042ms to wait for k8s-apps to be running ...
	I0916 10:38:50.617754 1384589 system_svc.go:44] waiting for kubelet service to be running ....
	I0916 10:38:50.617812 1384589 ssh_runner.go:195] Run: sudo systemctl is-active --quiet service kubelet
	I0916 10:38:50.630041 1384589 system_svc.go:56] duration metric: took 12.276523ms WaitForService to wait for kubelet
	I0916 10:38:50.630069 1384589 kubeadm.go:582] duration metric: took 2m53.983006463s to wait for: map[apiserver:true apps_running:true default_sa:true extra:true kubelet:true node_ready:true system_pods:true]
	I0916 10:38:50.630088 1384589 node_conditions.go:102] verifying NodePressure condition ...
	I0916 10:38:50.633754 1384589 node_conditions.go:122] node storage ephemeral capacity is 203034800Ki
	I0916 10:38:50.633790 1384589 node_conditions.go:123] node cpu capacity is 2
	I0916 10:38:50.633806 1384589 node_conditions.go:105] duration metric: took 3.708685ms to run NodePressure ...
	I0916 10:38:50.633819 1384589 start.go:241] waiting for startup goroutines ...
	I0916 10:38:50.633826 1384589 start.go:246] waiting for cluster config update ...
	I0916 10:38:50.633842 1384589 start.go:255] writing updated cluster config ...
	I0916 10:38:50.634158 1384589 ssh_runner.go:195] Run: rm -f paused
	I0916 10:38:50.643301 1384589 out.go:177] * Done! kubectl is now configured to use "addons-936355" cluster and "default" namespace by default
	E0916 10:38:50.646536 1384589 start.go:291] kubectl info: exec: fork/exec /usr/local/bin/kubectl: exec format error
	
	
	==> CRI-O <==
	Sep 16 10:42:40 addons-936355 crio[961]: time="2024-09-16 10:42:40.951968254Z" level=info msg="Checking image status: ghcr.io/inspektor-gadget/inspektor-gadget:v0.32.0@sha256:03e677e1cf9d2c9bea454e3dbcbcef20b3022e987534a2874eb1abc5bc3e73ec" id=ade1caf6-2145-4e84-81e9-3f71b003b2c7 name=/runtime.v1.ImageService/ImageStatus
	Sep 16 10:42:40 addons-936355 crio[961]: time="2024-09-16 10:42:40.952231508Z" level=info msg="Image status: &ImageStatusResponse{Image:&Image{Id:4f725bf50aaa5c697fbb84c107e9c7a3766f0f85f514ffce712d03ee5f62e8dd,RepoTags:[],RepoDigests:[ghcr.io/inspektor-gadget/inspektor-gadget@sha256:03e677e1cf9d2c9bea454e3dbcbcef20b3022e987534a2874eb1abc5bc3e73ec ghcr.io/inspektor-gadget/inspektor-gadget@sha256:7bb75e6a6a00e80a93c6115d94a22482eba22ee957f22e34e0b2310fc3a1391d],Size_:171509623,Uid:&Int64Value{Value:0,},Username:,Spec:nil,},Info:map[string]string{},}" id=ade1caf6-2145-4e84-81e9-3f71b003b2c7 name=/runtime.v1.ImageService/ImageStatus
	Sep 16 10:42:40 addons-936355 crio[961]: time="2024-09-16 10:42:40.953062633Z" level=info msg="Pulling image: ghcr.io/inspektor-gadget/inspektor-gadget:v0.32.0@sha256:03e677e1cf9d2c9bea454e3dbcbcef20b3022e987534a2874eb1abc5bc3e73ec" id=aec7eb7a-34f5-4a54-8544-6d4473e524d8 name=/runtime.v1.ImageService/PullImage
	Sep 16 10:42:40 addons-936355 crio[961]: time="2024-09-16 10:42:40.955317303Z" level=info msg="Trying to access \"ghcr.io/inspektor-gadget/inspektor-gadget@sha256:03e677e1cf9d2c9bea454e3dbcbcef20b3022e987534a2874eb1abc5bc3e73ec\""
	Sep 16 10:42:41 addons-936355 crio[961]: time="2024-09-16 10:42:41.190458782Z" level=info msg="Pulled image: ghcr.io/inspektor-gadget/inspektor-gadget@sha256:03e677e1cf9d2c9bea454e3dbcbcef20b3022e987534a2874eb1abc5bc3e73ec" id=aec7eb7a-34f5-4a54-8544-6d4473e524d8 name=/runtime.v1.ImageService/PullImage
	Sep 16 10:42:41 addons-936355 crio[961]: time="2024-09-16 10:42:41.191172133Z" level=info msg="Checking image status: ghcr.io/inspektor-gadget/inspektor-gadget:v0.32.0@sha256:03e677e1cf9d2c9bea454e3dbcbcef20b3022e987534a2874eb1abc5bc3e73ec" id=f8aaafe6-9303-4892-941c-fe3f3cbab9c5 name=/runtime.v1.ImageService/ImageStatus
	Sep 16 10:42:41 addons-936355 crio[961]: time="2024-09-16 10:42:41.191407713Z" level=info msg="Image status: &ImageStatusResponse{Image:&Image{Id:4f725bf50aaa5c697fbb84c107e9c7a3766f0f85f514ffce712d03ee5f62e8dd,RepoTags:[],RepoDigests:[ghcr.io/inspektor-gadget/inspektor-gadget@sha256:03e677e1cf9d2c9bea454e3dbcbcef20b3022e987534a2874eb1abc5bc3e73ec ghcr.io/inspektor-gadget/inspektor-gadget@sha256:7bb75e6a6a00e80a93c6115d94a22482eba22ee957f22e34e0b2310fc3a1391d],Size_:171509623,Uid:&Int64Value{Value:0,},Username:,Spec:nil,},Info:map[string]string{},}" id=f8aaafe6-9303-4892-941c-fe3f3cbab9c5 name=/runtime.v1.ImageService/ImageStatus
	Sep 16 10:42:41 addons-936355 crio[961]: time="2024-09-16 10:42:41.192237878Z" level=info msg="Checking image status: ghcr.io/inspektor-gadget/inspektor-gadget:v0.32.0@sha256:03e677e1cf9d2c9bea454e3dbcbcef20b3022e987534a2874eb1abc5bc3e73ec" id=83481a4a-c4dd-4cb0-9f55-4cb8a97d2f56 name=/runtime.v1.ImageService/ImageStatus
	Sep 16 10:42:41 addons-936355 crio[961]: time="2024-09-16 10:42:41.192468518Z" level=info msg="Image status: &ImageStatusResponse{Image:&Image{Id:4f725bf50aaa5c697fbb84c107e9c7a3766f0f85f514ffce712d03ee5f62e8dd,RepoTags:[],RepoDigests:[ghcr.io/inspektor-gadget/inspektor-gadget@sha256:03e677e1cf9d2c9bea454e3dbcbcef20b3022e987534a2874eb1abc5bc3e73ec ghcr.io/inspektor-gadget/inspektor-gadget@sha256:7bb75e6a6a00e80a93c6115d94a22482eba22ee957f22e34e0b2310fc3a1391d],Size_:171509623,Uid:&Int64Value{Value:0,},Username:,Spec:nil,},Info:map[string]string{},}" id=83481a4a-c4dd-4cb0-9f55-4cb8a97d2f56 name=/runtime.v1.ImageService/ImageStatus
	Sep 16 10:42:41 addons-936355 crio[961]: time="2024-09-16 10:42:41.193197779Z" level=info msg="Creating container: gadget/gadget-hx2qq/gadget" id=9b815aa8-db37-4a38-bb71-3d1e33129027 name=/runtime.v1.RuntimeService/CreateContainer
	Sep 16 10:42:41 addons-936355 crio[961]: time="2024-09-16 10:42:41.193291028Z" level=warning msg="Allowed annotations are specified for workload []"
	Sep 16 10:42:41 addons-936355 crio[961]: time="2024-09-16 10:42:41.257217560Z" level=info msg="Created container bcf51d70eaf49387d9ea6641126c0b61aca168a47c3d61e6314346f6d445ad66: gadget/gadget-hx2qq/gadget" id=9b815aa8-db37-4a38-bb71-3d1e33129027 name=/runtime.v1.RuntimeService/CreateContainer
	Sep 16 10:42:41 addons-936355 crio[961]: time="2024-09-16 10:42:41.257933316Z" level=info msg="Starting container: bcf51d70eaf49387d9ea6641126c0b61aca168a47c3d61e6314346f6d445ad66" id=ee8c419a-09c7-4f67-bb0e-42b94ffd6d3c name=/runtime.v1.RuntimeService/StartContainer
	Sep 16 10:42:41 addons-936355 crio[961]: time="2024-09-16 10:42:41.266608200Z" level=info msg="Started container" PID=6346 containerID=bcf51d70eaf49387d9ea6641126c0b61aca168a47c3d61e6314346f6d445ad66 description=gadget/gadget-hx2qq/gadget id=ee8c419a-09c7-4f67-bb0e-42b94ffd6d3c name=/runtime.v1.RuntimeService/StartContainer sandboxID=cf56dfeabe5decbedd58fc457dc7719d29c93fc1ac2509ce2b409125c237d769
	Sep 16 10:42:42 addons-936355 conmon[6335]: conmon bcf51d70eaf49387d9ea <ninfo>: container 6346 exited with status 1
	Sep 16 10:42:43 addons-936355 crio[961]: time="2024-09-16 10:42:43.272327618Z" level=info msg="Removing container: d36df2407ca5e1100ca95fde6e52ae6ca976aeb3d1ff54fe7ca517a91ca9c88f" id=5e037ab7-a3fb-48d4-8ceb-27378ea04007 name=/runtime.v1.RuntimeService/RemoveContainer
	Sep 16 10:42:43 addons-936355 crio[961]: time="2024-09-16 10:42:43.294374970Z" level=info msg="Removed container d36df2407ca5e1100ca95fde6e52ae6ca976aeb3d1ff54fe7ca517a91ca9c88f: gadget/gadget-hx2qq/gadget" id=5e037ab7-a3fb-48d4-8ceb-27378ea04007 name=/runtime.v1.RuntimeService/RemoveContainer
	Sep 16 10:44:59 addons-936355 crio[961]: time="2024-09-16 10:44:59.034213271Z" level=info msg="Stopping container: b65d0d4cafeecf004f5ab649f5343b72888fe4317e08a1f161b35e6e17844410 (timeout: 30s)" id=f86b5341-2037-431a-9670-cd9962d4c777 name=/runtime.v1.RuntimeService/StopContainer
	Sep 16 10:45:00 addons-936355 crio[961]: time="2024-09-16 10:45:00.223840830Z" level=info msg="Stopped container b65d0d4cafeecf004f5ab649f5343b72888fe4317e08a1f161b35e6e17844410: kube-system/metrics-server-84c5f94fbc-hngcs/metrics-server" id=f86b5341-2037-431a-9670-cd9962d4c777 name=/runtime.v1.RuntimeService/StopContainer
	Sep 16 10:45:00 addons-936355 crio[961]: time="2024-09-16 10:45:00.224531667Z" level=info msg="Stopping pod sandbox: d015a3419dfc0a7ed6c5cb9bbaa97743a3f95bc504b27df5c2861ca84165fc78" id=e702dc7c-e29a-4701-9354-361c77e73c12 name=/runtime.v1.RuntimeService/StopPodSandbox
	Sep 16 10:45:00 addons-936355 crio[961]: time="2024-09-16 10:45:00.224863613Z" level=info msg="Got pod network &{Name:metrics-server-84c5f94fbc-hngcs Namespace:kube-system ID:d015a3419dfc0a7ed6c5cb9bbaa97743a3f95bc504b27df5c2861ca84165fc78 UID:5901d847-eeb7-4c71-97ba-d08734fb39ed NetNS:/var/run/netns/d869eb9f-bc5e-4d56-b86e-af8ebf6deab2 Networks:[{Name:kindnet Ifname:eth0}] RuntimeConfig:map[kindnet:{IP: MAC: PortMappings:[] Bandwidth:<nil> IpRanges:[]}] Aliases:map[]}"
	Sep 16 10:45:00 addons-936355 crio[961]: time="2024-09-16 10:45:00.225040930Z" level=info msg="Deleting pod kube-system_metrics-server-84c5f94fbc-hngcs from CNI network \"kindnet\" (type=ptp)"
	Sep 16 10:45:00 addons-936355 crio[961]: time="2024-09-16 10:45:00.353255654Z" level=info msg="Stopped pod sandbox: d015a3419dfc0a7ed6c5cb9bbaa97743a3f95bc504b27df5c2861ca84165fc78" id=e702dc7c-e29a-4701-9354-361c77e73c12 name=/runtime.v1.RuntimeService/StopPodSandbox
	Sep 16 10:45:00 addons-936355 crio[961]: time="2024-09-16 10:45:00.635650185Z" level=info msg="Removing container: b65d0d4cafeecf004f5ab649f5343b72888fe4317e08a1f161b35e6e17844410" id=4673ad4c-4397-44f1-85fd-2477b808375c name=/runtime.v1.RuntimeService/RemoveContainer
	Sep 16 10:45:00 addons-936355 crio[961]: time="2024-09-16 10:45:00.700191485Z" level=info msg="Removed container b65d0d4cafeecf004f5ab649f5343b72888fe4317e08a1f161b35e6e17844410: kube-system/metrics-server-84c5f94fbc-hngcs/metrics-server" id=4673ad4c-4397-44f1-85fd-2477b808375c name=/runtime.v1.RuntimeService/RemoveContainer
	
	
	==> container status <==
	CONTAINER           IMAGE                                                                                                                                        CREATED             STATE               NAME                                     ATTEMPT             POD ID              POD
	bcf51d70eaf49       ghcr.io/inspektor-gadget/inspektor-gadget@sha256:03e677e1cf9d2c9bea454e3dbcbcef20b3022e987534a2874eb1abc5bc3e73ec                            2 minutes ago       Exited              gadget                                   6                   cf56dfeabe5de       gadget-hx2qq
	3b30e9b80217f       registry.k8s.io/sig-storage/csi-snapshotter@sha256:291334908ddf71a4661fd7f6d9d97274de8a5378a2b6fdfeb2ce73414a34f82f                          7 minutes ago       Running             csi-snapshotter                          0                   32259548d9366       csi-hostpathplugin-zrlmd
	66246ecfc47d6       gcr.io/k8s-minikube/gcp-auth-webhook@sha256:a40e1a121ee367d1712ac3a54ec9c38c405a65dde923c98e5fa6368fa82c4b69                                 7 minutes ago       Running             gcp-auth                                 0                   b09347ee3cb04       gcp-auth-89d5ffd79-j2ckg
	5dabae8faaade       registry.k8s.io/sig-storage/csi-provisioner@sha256:98ffd09c0784203d200e0f8c241501de31c8df79644caac7eed61bd6391e5d49                          7 minutes ago       Running             csi-provisioner                          0                   32259548d9366       csi-hostpathplugin-zrlmd
	63d680209bdeb       registry.k8s.io/sig-storage/livenessprobe@sha256:8b00c6e8f52639ed9c6f866085893ab688e57879741b3089e3cfa9998502e158                            7 minutes ago       Running             liveness-probe                           0                   32259548d9366       csi-hostpathplugin-zrlmd
	b241211876358       registry.k8s.io/sig-storage/hostpathplugin@sha256:7b1dfc90a367222067fc468442fdf952e20fc5961f25c1ad654300ddc34d7083                           7 minutes ago       Running             hostpath                                 0                   32259548d9366       csi-hostpathplugin-zrlmd
	ab8eaedf8040a       registry.k8s.io/sig-storage/csi-node-driver-registrar@sha256:511b8c8ac828194a753909d26555ff08bc12f497dd8daeb83fe9d593693a26c1                7 minutes ago       Running             node-driver-registrar                    0                   32259548d9366       csi-hostpathplugin-zrlmd
	331ea01abf2ed       registry.k8s.io/ingress-nginx/controller@sha256:22f9d129ae8c89a2cabbd13af3c1668944f3dd68fec186199b7024a0a2fc75b3                             7 minutes ago       Running             controller                               0                   549ac22ef6389       ingress-nginx-controller-bc57996ff-jgfjf
	5e5f91a726842       docker.io/rancher/local-path-provisioner@sha256:689a2489a24e74426e4a4666e611c988202c5fa995908b0c60133aca3eb87d98                             7 minutes ago       Running             local-path-provisioner                   0                   0d353b19ef8b9       local-path-provisioner-86d989889c-b652d
	9773c25a0a3dc       gcr.io/cloud-spanner-emulator/emulator@sha256:41ec188288c7943f488600462b2b74002814e52439be82d15de33c3ee4898a58                               7 minutes ago       Running             cloud-spanner-emulator                   0                   32e89c2c5a56d       cloud-spanner-emulator-769b77f747-qvhhc
	3d28641a10686       registry.k8s.io/ingress-nginx/kube-webhook-certgen@sha256:7c4c1a6ca8855c524a64983eaf590e126a669ae12df83ad65de281c9beee13d3                   7 minutes ago       Exited              patch                                    0                   ae4e1f0886d62       ingress-nginx-admission-patch-5hvnf
	98ee5c554b6be       registry.k8s.io/sig-storage/csi-external-health-monitor-controller@sha256:80b9ba94aa2afe24553d69bd165a6a51552d1582d68618ec00d3b804a7d9193c   7 minutes ago       Running             csi-external-health-monitor-controller   0                   32259548d9366       csi-hostpathplugin-zrlmd
	11f6f0bf554a7       registry.k8s.io/sig-storage/csi-resizer@sha256:425d8f1b769398127767b06ed97ce62578a3179bcb99809ce93a1649e025ffe7                              7 minutes ago       Running             csi-resizer                              0                   b35d742443216       csi-hostpath-resizer-0
	b9e189d1acd4c       registry.k8s.io/ingress-nginx/kube-webhook-certgen@sha256:7c4c1a6ca8855c524a64983eaf590e126a669ae12df83ad65de281c9beee13d3                   7 minutes ago       Exited              create                                   0                   ef2a1639e8386       ingress-nginx-admission-create-kmjkm
	fe7a31fb7fe71       registry.k8s.io/sig-storage/snapshot-controller@sha256:5d668e35c15df6e87e2530da25d557f543182cedbdb39d421b87076463ee9857                      7 minutes ago       Running             volume-snapshot-controller               0                   54dbfb69eabc4       snapshot-controller-56fcc65765-5th26
	082cee4b81438       docker.io/marcnuri/yakd@sha256:1c961556224d57fc747de0b1874524208e5fb4f8386f23e9c1c4c18e97109f17                                              7 minutes ago       Running             yakd                                     0                   91ccf72a05daa       yakd-dashboard-67d98fc6b-ztsj8
	4cf01aeaccd3f       registry.k8s.io/sig-storage/snapshot-controller@sha256:5d668e35c15df6e87e2530da25d557f543182cedbdb39d421b87076463ee9857                      7 minutes ago       Running             volume-snapshot-controller               0                   29551751a8a3b       snapshot-controller-56fcc65765-fjrw9
	d5f8b279203cd       nvcr.io/nvidia/k8s-device-plugin@sha256:cdd05f9d89f0552478d46474005e86b98795ad364664f644225b99d94978e680                                     7 minutes ago       Running             nvidia-device-plugin-ctr                 0                   eda9663f4feb4       nvidia-device-plugin-daemonset-6j9gc
	d50b4977768d7       registry.k8s.io/sig-storage/csi-attacher@sha256:4b5609c78455de45821910065281a368d5f760b41250f90cbde5110543bdc326                             8 minutes ago       Running             csi-attacher                             0                   6f989f68a9599       csi-hostpath-attacher-0
	198a1da1f3633       gcr.io/k8s-minikube/minikube-ingress-dns@sha256:4211a1de532376c881851542238121b26792225faa36a7b02dccad88fd05797c                             8 minutes ago       Running             minikube-ingress-dns                     0                   69f4c5e690a85       kube-ingress-dns-minikube
	ee934dc9f4f92       2f6c962e7b8311337352d9fdea917da2184d9919f4da7695bc2a6517cf392fe4                                                                             8 minutes ago       Running             coredns                                  0                   d4b44085e648e       coredns-7c65d6cfc9-r6x6b
	2a862ef326432       ba04bb24b95753201135cbc420b233c1b0b9fa2e1fd21d28319c348c33fbcde6                                                                             8 minutes ago       Running             storage-provisioner                      0                   e168c388c9d11       storage-provisioner
	8d59e894feca0       6a23fa8fd2b78ab58e42ba273808edc936a9c53d8ac4a919f6337be094843a51                                                                             8 minutes ago       Running             kindnet-cni                              0                   ca9fcc6465180       kindnet-wv5d6
	6200eb5cfcd24       24a140c548c075e487e45d0ee73b1aa89f8bfb40c08a57e05975559728822b1d                                                                             9 minutes ago       Running             kube-proxy                               0                   a491da0967548       kube-proxy-6zqlq
	2b161087caf5a       7f8aa378bb47dffcf430f3a601abe39137e88aee0238e23ed8530fdd18dab82d                                                                             9 minutes ago       Running             kube-scheduler                           0                   c99e3a64f4ade       kube-scheduler-addons-936355
	4ee66eef50ab6       279f381cb37365bbbcd133c9531fba9c2beb0f38dbbe6ddfcd0b1b1643d3450e                                                                             9 minutes ago       Running             kube-controller-manager                  0                   70ee024a23a5b       kube-controller-manager-addons-936355
	f911db1ed55bb       d3f53a98c0a9d9163c4848bcf34b2d2f5e1e3691b79f3d1dd6d0206809e02853                                                                             9 minutes ago       Running             kube-apiserver                           0                   fe5dcd273af65       kube-apiserver-addons-936355
	3b247261f15f4       27e3830e1402783674d8b594038967deea9d51f0d91b34c93c8f39d2f68af7da                                                                             9 minutes ago       Running             etcd                                     0                   24ef782ab6be4       etcd-addons-936355
	
	
	==> coredns [ee934dc9f4f92e52b49ad02508bb42771f460a2494fa8b1a65d888191266a4ad] <==
	[INFO] 10.244.0.6:41410 - 64521 "A IN registry.kube-system.svc.cluster.local.cluster.local. udp 70 false 512" NXDOMAIN qr,aa,rd 163 0.000074082s
	[INFO] 10.244.0.6:32998 - 54705 "AAAA IN registry.kube-system.svc.cluster.local.us-east-2.compute.internal. udp 83 false 512" NXDOMAIN qr,rd,ra 83 0.002300443s
	[INFO] 10.244.0.6:32998 - 29583 "A IN registry.kube-system.svc.cluster.local.us-east-2.compute.internal. udp 83 false 512" NXDOMAIN qr,rd,ra 83 0.002118081s
	[INFO] 10.244.0.6:57466 - 59415 "A IN registry.kube-system.svc.cluster.local. udp 56 false 512" NOERROR qr,aa,rd 110 0.000171687s
	[INFO] 10.244.0.6:57466 - 26377 "AAAA IN registry.kube-system.svc.cluster.local. udp 56 false 512" NOERROR qr,aa,rd 149 0.000100124s
	[INFO] 10.244.0.6:57769 - 49607 "A IN registry.kube-system.svc.cluster.local.kube-system.svc.cluster.local. udp 86 false 512" NXDOMAIN qr,aa,rd 179 0.000092608s
	[INFO] 10.244.0.6:57769 - 14275 "AAAA IN registry.kube-system.svc.cluster.local.kube-system.svc.cluster.local. udp 86 false 512" NXDOMAIN qr,aa,rd 179 0.00007117s
	[INFO] 10.244.0.6:44055 - 7650 "AAAA IN registry.kube-system.svc.cluster.local.svc.cluster.local. udp 74 false 512" NXDOMAIN qr,aa,rd 167 0.000056876s
	[INFO] 10.244.0.6:44055 - 57820 "A IN registry.kube-system.svc.cluster.local.svc.cluster.local. udp 74 false 512" NXDOMAIN qr,aa,rd 167 0.000036053s
	[INFO] 10.244.0.6:42734 - 36918 "AAAA IN registry.kube-system.svc.cluster.local.cluster.local. udp 70 false 512" NXDOMAIN qr,aa,rd 163 0.000045283s
	[INFO] 10.244.0.6:42734 - 61736 "A IN registry.kube-system.svc.cluster.local.cluster.local. udp 70 false 512" NXDOMAIN qr,aa,rd 163 0.000033599s
	[INFO] 10.244.0.6:54338 - 26081 "AAAA IN registry.kube-system.svc.cluster.local.us-east-2.compute.internal. udp 83 false 512" NXDOMAIN qr,rd,ra 83 0.001812112s
	[INFO] 10.244.0.6:54338 - 40423 "A IN registry.kube-system.svc.cluster.local.us-east-2.compute.internal. udp 83 false 512" NXDOMAIN qr,rd,ra 83 0.001769553s
	[INFO] 10.244.0.6:39094 - 56002 "AAAA IN registry.kube-system.svc.cluster.local. udp 56 false 512" NOERROR qr,aa,rd 149 0.000048811s
	[INFO] 10.244.0.6:39094 - 9935 "A IN registry.kube-system.svc.cluster.local. udp 56 false 512" NOERROR qr,aa,rd 110 0.000036265s
	[INFO] 10.244.0.20:53754 - 1366 "A IN storage.googleapis.com.gcp-auth.svc.cluster.local. udp 78 false 1232" NXDOMAIN qr,aa,rd 160 0.0016936s
	[INFO] 10.244.0.20:51144 - 45189 "AAAA IN storage.googleapis.com.gcp-auth.svc.cluster.local. udp 78 false 1232" NXDOMAIN qr,aa,rd 160 0.002085581s
	[INFO] 10.244.0.20:60186 - 495 "AAAA IN storage.googleapis.com.svc.cluster.local. udp 69 false 1232" NXDOMAIN qr,aa,rd 151 0.000131541s
	[INFO] 10.244.0.20:58173 - 47948 "A IN storage.googleapis.com.svc.cluster.local. udp 69 false 1232" NXDOMAIN qr,aa,rd 151 0.000146031s
	[INFO] 10.244.0.20:41557 - 45319 "AAAA IN storage.googleapis.com.cluster.local. udp 65 false 1232" NXDOMAIN qr,aa,rd 147 0.000125396s
	[INFO] 10.244.0.20:60168 - 27262 "A IN storage.googleapis.com.cluster.local. udp 65 false 1232" NXDOMAIN qr,aa,rd 147 0.00012213s
	[INFO] 10.244.0.20:55951 - 7020 "A IN storage.googleapis.com.us-east-2.compute.internal. udp 78 false 1232" NXDOMAIN qr,rd,ra 67 0.004693068s
	[INFO] 10.244.0.20:46529 - 17954 "AAAA IN storage.googleapis.com.us-east-2.compute.internal. udp 78 false 1232" NXDOMAIN qr,rd,ra 67 0.005735124s
	[INFO] 10.244.0.20:54136 - 20848 "A IN storage.googleapis.com. udp 51 false 1232" NOERROR qr,rd,ra 610 0.001853292s
	[INFO] 10.244.0.20:59146 - 51848 "AAAA IN storage.googleapis.com. udp 51 false 1232" NOERROR qr,rd,ra 240 0.0025582s
	
	
	==> describe nodes <==
	Name:               addons-936355
	Roles:              control-plane
	Labels:             beta.kubernetes.io/arch=arm64
	                    beta.kubernetes.io/os=linux
	                    kubernetes.io/arch=arm64
	                    kubernetes.io/hostname=addons-936355
	                    kubernetes.io/os=linux
	                    minikube.k8s.io/commit=90d544f06ea0f69499271b003be64a9a224d57ed
	                    minikube.k8s.io/name=addons-936355
	                    minikube.k8s.io/primary=true
	                    minikube.k8s.io/updated_at=2024_09_16T10_35_52_0700
	                    minikube.k8s.io/version=v1.34.0
	                    node-role.kubernetes.io/control-plane=
	                    node.kubernetes.io/exclude-from-external-load-balancers=
	                    topology.hostpath.csi/node=addons-936355
	Annotations:        csi.volume.kubernetes.io/nodeid: {"hostpath.csi.k8s.io":"addons-936355"}
	                    kubeadm.alpha.kubernetes.io/cri-socket: unix:///var/run/crio/crio.sock
	                    node.alpha.kubernetes.io/ttl: 0
	                    volumes.kubernetes.io/controller-managed-attach-detach: true
	CreationTimestamp:  Mon, 16 Sep 2024 10:35:49 +0000
	Taints:             <none>
	Unschedulable:      false
	Lease:
	  HolderIdentity:  addons-936355
	  AcquireTime:     <unset>
	  RenewTime:       Mon, 16 Sep 2024 10:44:54 +0000
	Conditions:
	  Type             Status  LastHeartbeatTime                 LastTransitionTime                Reason                       Message
	  ----             ------  -----------------                 ------------------                ------                       -------
	  MemoryPressure   False   Mon, 16 Sep 2024 10:43:30 +0000   Mon, 16 Sep 2024 10:35:45 +0000   KubeletHasSufficientMemory   kubelet has sufficient memory available
	  DiskPressure     False   Mon, 16 Sep 2024 10:43:30 +0000   Mon, 16 Sep 2024 10:35:45 +0000   KubeletHasNoDiskPressure     kubelet has no disk pressure
	  PIDPressure      False   Mon, 16 Sep 2024 10:43:30 +0000   Mon, 16 Sep 2024 10:35:45 +0000   KubeletHasSufficientPID      kubelet has sufficient PID available
	  Ready            True    Mon, 16 Sep 2024 10:43:30 +0000   Mon, 16 Sep 2024 10:36:42 +0000   KubeletReady                 kubelet is posting ready status
	Addresses:
	  InternalIP:  192.168.49.2
	  Hostname:    addons-936355
	Capacity:
	  cpu:                2
	  ephemeral-storage:  203034800Ki
	  hugepages-1Gi:      0
	  hugepages-2Mi:      0
	  hugepages-32Mi:     0
	  hugepages-64Ki:     0
	  memory:             8022304Ki
	  pods:               110
	Allocatable:
	  cpu:                2
	  ephemeral-storage:  203034800Ki
	  hugepages-1Gi:      0
	  hugepages-2Mi:      0
	  hugepages-32Mi:     0
	  hugepages-64Ki:     0
	  memory:             8022304Ki
	  pods:               110
	System Info:
	  Machine ID:                 d04f59375248444681829ec487634926
	  System UUID:                65d15a11-4f3c-4207-941c-6a3b096d7c27
	  Boot ID:                    34b2555f-ef29-4c31-9b47-b3b930bd3b4b
	  Kernel Version:             5.15.0-1069-aws
	  OS Image:                   Ubuntu 22.04.4 LTS
	  Operating System:           linux
	  Architecture:               arm64
	  Container Runtime Version:  cri-o://1.24.6
	  Kubelet Version:            v1.31.1
	  Kube-Proxy Version:         v1.31.1
	PodCIDR:                      10.244.0.0/24
	PodCIDRs:                     10.244.0.0/24
	Non-terminated Pods:          (21 in total)
	  Namespace                   Name                                        CPU Requests  CPU Limits  Memory Requests  Memory Limits  Age
	  ---------                   ----                                        ------------  ----------  ---------------  -------------  ---
	  default                     cloud-spanner-emulator-769b77f747-qvhhc     0 (0%)        0 (0%)      0 (0%)           0 (0%)         9m1s
	  gadget                      gadget-hx2qq                                0 (0%)        0 (0%)      0 (0%)           0 (0%)         8m59s
	  gcp-auth                    gcp-auth-89d5ffd79-j2ckg                    0 (0%)        0 (0%)      0 (0%)           0 (0%)         8m53s
	  ingress-nginx               ingress-nginx-controller-bc57996ff-jgfjf    100m (5%)     0 (0%)      90Mi (1%)        0 (0%)         8m59s
	  kube-system                 coredns-7c65d6cfc9-r6x6b                    100m (5%)     0 (0%)      70Mi (0%)        170Mi (2%)     9m3s
	  kube-system                 csi-hostpath-attacher-0                     0 (0%)        0 (0%)      0 (0%)           0 (0%)         8m58s
	  kube-system                 csi-hostpath-resizer-0                      0 (0%)        0 (0%)      0 (0%)           0 (0%)         8m58s
	  kube-system                 csi-hostpathplugin-zrlmd                    0 (0%)        0 (0%)      0 (0%)           0 (0%)         8m19s
	  kube-system                 etcd-addons-936355                          100m (5%)     0 (0%)      100Mi (1%)       0 (0%)         9m9s
	  kube-system                 kindnet-wv5d6                               100m (5%)     100m (5%)   50Mi (0%)        50Mi (0%)      9m3s
	  kube-system                 kube-apiserver-addons-936355                250m (12%)    0 (0%)      0 (0%)           0 (0%)         9m10s
	  kube-system                 kube-controller-manager-addons-936355       200m (10%)    0 (0%)      0 (0%)           0 (0%)         9m9s
	  kube-system                 kube-ingress-dns-minikube                   0 (0%)        0 (0%)      0 (0%)           0 (0%)         9m
	  kube-system                 kube-proxy-6zqlq                            0 (0%)        0 (0%)      0 (0%)           0 (0%)         9m4s
	  kube-system                 kube-scheduler-addons-936355                100m (5%)     0 (0%)      0 (0%)           0 (0%)         9m9s
	  kube-system                 nvidia-device-plugin-daemonset-6j9gc        0 (0%)        0 (0%)      0 (0%)           0 (0%)         8m19s
	  kube-system                 snapshot-controller-56fcc65765-5th26        0 (0%)        0 (0%)      0 (0%)           0 (0%)         8m58s
	  kube-system                 snapshot-controller-56fcc65765-fjrw9        0 (0%)        0 (0%)      0 (0%)           0 (0%)         8m58s
	  kube-system                 storage-provisioner                         0 (0%)        0 (0%)      0 (0%)           0 (0%)         8m59s
	  local-path-storage          local-path-provisioner-86d989889c-b652d     0 (0%)        0 (0%)      0 (0%)           0 (0%)         8m59s
	  yakd-dashboard              yakd-dashboard-67d98fc6b-ztsj8              0 (0%)        0 (0%)      128Mi (1%)       256Mi (3%)     8m59s
	Allocated resources:
	  (Total limits may be over 100 percent, i.e., overcommitted.)
	  Resource           Requests    Limits
	  --------           --------    ------
	  cpu                950m (47%)  100m (5%)
	  memory             438Mi (5%)  476Mi (6%)
	  ephemeral-storage  0 (0%)      0 (0%)
	  hugepages-1Gi      0 (0%)      0 (0%)
	  hugepages-2Mi      0 (0%)      0 (0%)
	  hugepages-32Mi     0 (0%)      0 (0%)
	  hugepages-64Ki     0 (0%)      0 (0%)
	Events:
	  Type     Reason                   Age                    From             Message
	  ----     ------                   ----                   ----             -------
	  Normal   Starting                 8m58s                  kube-proxy       
	  Normal   NodeHasSufficientMemory  9m16s (x8 over 9m17s)  kubelet          Node addons-936355 status is now: NodeHasSufficientMemory
	  Normal   NodeHasNoDiskPressure    9m16s (x8 over 9m17s)  kubelet          Node addons-936355 status is now: NodeHasNoDiskPressure
	  Normal   NodeHasSufficientPID     9m16s (x7 over 9m17s)  kubelet          Node addons-936355 status is now: NodeHasSufficientPID
	  Normal   Starting                 9m10s                  kubelet          Starting kubelet.
	  Warning  CgroupV1                 9m10s                  kubelet          Cgroup v1 support is in maintenance mode, please migrate to Cgroup v2.
	  Normal   NodeHasSufficientMemory  9m9s                   kubelet          Node addons-936355 status is now: NodeHasSufficientMemory
	  Normal   NodeHasNoDiskPressure    9m9s                   kubelet          Node addons-936355 status is now: NodeHasNoDiskPressure
	  Normal   NodeHasSufficientPID     9m9s                   kubelet          Node addons-936355 status is now: NodeHasSufficientPID
	  Normal   RegisteredNode           9m5s                   node-controller  Node addons-936355 event: Registered Node addons-936355 in Controller
	  Normal   NodeReady                8m19s                  kubelet          Node addons-936355 status is now: NodeReady
	
	
	==> dmesg <==
	[Sep16 10:07] systemd-journald[226]: Failed to send stream file descriptor to service manager: Connection refused
	
	
	==> etcd [3b247261f15f4cdd596d5e7ee3354c24cb995a27a5e0581e877596df04b900d5] <==
	{"level":"warn","ts":"2024-09-16T10:35:59.461521Z","caller":"etcdserver/util.go:170","msg":"apply request took too long","took":"173.42953ms","expected-duration":"100ms","prefix":"read-only range ","request":"key:\"/registry/serviceaccounts\" limit:1 ","response":"range_response_count:0 size:5"}
	{"level":"info","ts":"2024-09-16T10:35:59.461548Z","caller":"traceutil/trace.go:171","msg":"trace[825824076] range","detail":"{range_begin:/registry/serviceaccounts; range_end:; response_count:0; response_revision:365; }","duration":"173.471351ms","start":"2024-09-16T10:35:59.288071Z","end":"2024-09-16T10:35:59.461542Z","steps":["trace[825824076] 'agreement among raft nodes before linearized reading'  (duration: 173.394077ms)"],"step_count":1}
	{"level":"info","ts":"2024-09-16T10:35:59.910299Z","caller":"traceutil/trace.go:171","msg":"trace[571848] transaction","detail":"{read_only:false; response_revision:372; number_of_response:1; }","duration":"101.485416ms","start":"2024-09-16T10:35:59.808786Z","end":"2024-09-16T10:35:59.910272Z","steps":["trace[571848] 'process raft request'  (duration: 72.962753ms)"],"step_count":1}
	{"level":"info","ts":"2024-09-16T10:35:59.910551Z","caller":"traceutil/trace.go:171","msg":"trace[2049811000] transaction","detail":"{read_only:false; response_revision:373; number_of_response:1; }","duration":"101.622964ms","start":"2024-09-16T10:35:59.808918Z","end":"2024-09-16T10:35:59.910541Z","steps":["trace[2049811000] 'process raft request'  (duration: 72.910972ms)"],"step_count":1}
	{"level":"info","ts":"2024-09-16T10:35:59.910806Z","caller":"traceutil/trace.go:171","msg":"trace[1901548869] transaction","detail":"{read_only:false; response_revision:374; number_of_response:1; }","duration":"101.844209ms","start":"2024-09-16T10:35:59.808954Z","end":"2024-09-16T10:35:59.910798Z","steps":["trace[1901548869] 'process raft request'  (duration: 72.897089ms)"],"step_count":1}
	{"level":"warn","ts":"2024-09-16T10:35:59.945205Z","caller":"etcdserver/util.go:170","msg":"apply request took too long","took":"136.325816ms","expected-duration":"100ms","prefix":"read-only range ","request":"key:\"/registry/daemonsets/kube-system/kindnet\" ","response":"range_response_count:1 size:4681"}
	{"level":"info","ts":"2024-09-16T10:35:59.945344Z","caller":"traceutil/trace.go:171","msg":"trace[1851060564] range","detail":"{range_begin:/registry/daemonsets/kube-system/kindnet; range_end:; response_count:1; response_revision:375; }","duration":"136.4823ms","start":"2024-09-16T10:35:59.808847Z","end":"2024-09-16T10:35:59.945330Z","steps":["trace[1851060564] 'agreement among raft nodes before linearized reading'  (duration: 136.289206ms)"],"step_count":1}
	{"level":"warn","ts":"2024-09-16T10:35:59.945577Z","caller":"etcdserver/util.go:170","msg":"apply request took too long","took":"136.842881ms","expected-duration":"100ms","prefix":"read-only range ","request":"key:\"/registry/pods/kube-system/kube-proxy-6zqlq\" ","response":"range_response_count:1 size:4833"}
	{"level":"info","ts":"2024-09-16T10:35:59.945682Z","caller":"traceutil/trace.go:171","msg":"trace[840636989] range","detail":"{range_begin:/registry/pods/kube-system/kube-proxy-6zqlq; range_end:; response_count:1; response_revision:375; }","duration":"136.945081ms","start":"2024-09-16T10:35:59.808725Z","end":"2024-09-16T10:35:59.945670Z","steps":["trace[840636989] 'agreement among raft nodes before linearized reading'  (duration: 136.808125ms)"],"step_count":1}
	{"level":"info","ts":"2024-09-16T10:36:00.052890Z","caller":"traceutil/trace.go:171","msg":"trace[935433541] transaction","detail":"{read_only:false; response_revision:376; number_of_response:1; }","duration":"171.425064ms","start":"2024-09-16T10:35:59.881432Z","end":"2024-09-16T10:36:00.052857Z","steps":["trace[935433541] 'process raft request'  (duration: 103.374661ms)","trace[935433541] 'compare'  (duration: 67.370586ms)"],"step_count":2}
	{"level":"info","ts":"2024-09-16T10:36:00.053251Z","caller":"traceutil/trace.go:171","msg":"trace[1640692462] linearizableReadLoop","detail":"{readStateIndex:386; appliedIndex:385; }","duration":"171.181083ms","start":"2024-09-16T10:35:59.882059Z","end":"2024-09-16T10:36:00.053240Z","steps":["trace[1640692462] 'read index received'  (duration: 86.984477ms)","trace[1640692462] 'applied index is now lower than readState.Index'  (duration: 84.173345ms)"],"step_count":2}
	{"level":"warn","ts":"2024-09-16T10:36:00.082458Z","caller":"etcdserver/util.go:170","msg":"apply request took too long","took":"201.082081ms","expected-duration":"100ms","prefix":"read-only range ","request":"key:\"/registry/services/specs/default/cloud-spanner-emulator\" ","response":"range_response_count:0 size:5"}
	{"level":"info","ts":"2024-09-16T10:36:00.131166Z","caller":"traceutil/trace.go:171","msg":"trace[1850329919] range","detail":"{range_begin:/registry/services/specs/default/cloud-spanner-emulator; range_end:; response_count:0; response_revision:379; }","duration":"249.789246ms","start":"2024-09-16T10:35:59.881352Z","end":"2024-09-16T10:36:00.131141Z","steps":["trace[1850329919] 'agreement among raft nodes before linearized reading'  (duration: 201.06412ms)"],"step_count":1}
	{"level":"warn","ts":"2024-09-16T10:36:00.081165Z","caller":"etcdserver/util.go:170","msg":"apply request took too long","took":"179.917297ms","expected-duration":"100ms","prefix":"read-only range ","request":"key:\"/registry/apiextensions.k8s.io/customresourcedefinitions\" limit:1 ","response":"range_response_count:0 size:5"}
	{"level":"info","ts":"2024-09-16T10:36:00.139019Z","caller":"traceutil/trace.go:171","msg":"trace[179530847] range","detail":"{range_begin:/registry/apiextensions.k8s.io/customresourcedefinitions; range_end:; response_count:0; response_revision:377; }","duration":"257.611311ms","start":"2024-09-16T10:35:59.881381Z","end":"2024-09-16T10:36:00.138992Z","steps":["trace[179530847] 'agreement among raft nodes before linearized reading'  (duration: 179.875904ms)"],"step_count":1}
	{"level":"info","ts":"2024-09-16T10:36:00.497778Z","caller":"traceutil/trace.go:171","msg":"trace[781374587] transaction","detail":"{read_only:false; response_revision:383; number_of_response:1; }","duration":"244.927422ms","start":"2024-09-16T10:36:00.252822Z","end":"2024-09-16T10:36:00.497749Z","steps":["trace[781374587] 'process raft request'  (duration: 240.255139ms)"],"step_count":1}
	{"level":"info","ts":"2024-09-16T10:36:00.498033Z","caller":"traceutil/trace.go:171","msg":"trace[2049862755] transaction","detail":"{read_only:false; response_revision:384; number_of_response:1; }","duration":"245.149988ms","start":"2024-09-16T10:36:00.252873Z","end":"2024-09-16T10:36:00.498023Z","steps":["trace[2049862755] 'process raft request'  (duration: 243.936212ms)"],"step_count":1}
	{"level":"info","ts":"2024-09-16T10:36:00.498257Z","caller":"traceutil/trace.go:171","msg":"trace[1308392049] transaction","detail":"{read_only:false; response_revision:385; number_of_response:1; }","duration":"245.371382ms","start":"2024-09-16T10:36:00.252875Z","end":"2024-09-16T10:36:00.498247Z","steps":["trace[1308392049] 'process raft request'  (duration: 243.967662ms)"],"step_count":1}
	{"level":"info","ts":"2024-09-16T10:36:00.498461Z","caller":"traceutil/trace.go:171","msg":"trace[1919696831] transaction","detail":"{read_only:false; response_revision:386; number_of_response:1; }","duration":"245.376936ms","start":"2024-09-16T10:36:00.253076Z","end":"2024-09-16T10:36:00.498453Z","steps":["trace[1919696831] 'process raft request'  (duration: 243.813828ms)"],"step_count":1}
	{"level":"warn","ts":"2024-09-16T10:36:00.508772Z","caller":"etcdserver/util.go:170","msg":"apply request took too long","took":"105.013609ms","expected-duration":"100ms","prefix":"read-only range ","request":"key:\"/registry/namespaces/kube-system\" ","response":"range_response_count:1 size:351"}
	{"level":"info","ts":"2024-09-16T10:36:00.508863Z","caller":"traceutil/trace.go:171","msg":"trace[1394685121] range","detail":"{range_begin:/registry/namespaces/kube-system; range_end:; response_count:1; response_revision:394; }","duration":"105.116096ms","start":"2024-09-16T10:36:00.403731Z","end":"2024-09-16T10:36:00.508847Z","steps":["trace[1394685121] 'agreement among raft nodes before linearized reading'  (duration: 104.925356ms)"],"step_count":1}
	{"level":"warn","ts":"2024-09-16T10:36:00.510822Z","caller":"etcdserver/util.go:170","msg":"apply request took too long","took":"106.978552ms","expected-duration":"100ms","prefix":"read-only range ","request":"key:\"/registry/ranges/serviceips\" ","response":"range_response_count:1 size:116"}
	{"level":"info","ts":"2024-09-16T10:36:00.510873Z","caller":"traceutil/trace.go:171","msg":"trace[90254389] range","detail":"{range_begin:/registry/ranges/serviceips; range_end:; response_count:1; response_revision:394; }","duration":"107.038374ms","start":"2024-09-16T10:36:00.403822Z","end":"2024-09-16T10:36:00.510860Z","steps":["trace[90254389] 'agreement among raft nodes before linearized reading'  (duration: 106.927616ms)"],"step_count":1}
	{"level":"warn","ts":"2024-09-16T10:36:00.513542Z","caller":"etcdserver/util.go:170","msg":"apply request took too long","took":"109.80734ms","expected-duration":"100ms","prefix":"read-only range ","request":"key:\"/registry/daemonsets/kube-system/kindnet\" ","response":"range_response_count:1 size:4681"}
	{"level":"info","ts":"2024-09-16T10:36:00.513613Z","caller":"traceutil/trace.go:171","msg":"trace[1039894144] range","detail":"{range_begin:/registry/daemonsets/kube-system/kindnet; range_end:; response_count:1; response_revision:395; }","duration":"109.886707ms","start":"2024-09-16T10:36:00.403712Z","end":"2024-09-16T10:36:00.513599Z","steps":["trace[1039894144] 'agreement among raft nodes before linearized reading'  (duration: 109.778304ms)"],"step_count":1}
	
	
	==> gcp-auth [66246ecfc47d65d522c45cff2baf15e2433dc0e0681c400a1437f7890b27b5b4] <==
	2024/09/16 10:37:52 GCP Auth Webhook started!
	
	
	==> kernel <==
	 10:45:01 up 10:27,  0 users,  load average: 0.77, 0.87, 1.65
	Linux addons-936355 5.15.0-1069-aws #75~20.04.1-Ubuntu SMP Mon Aug 19 16:22:47 UTC 2024 aarch64 aarch64 aarch64 GNU/Linux
	PRETTY_NAME="Ubuntu 22.04.4 LTS"
	
	
	==> kindnet [8d59e894feca0e01e03cc7257c67ed10cf0f9db194b88b314e4961bc62d9e7f1] <==
	I0916 10:42:52.020927       1 main.go:299] handling current node
	I0916 10:43:02.017772       1 main.go:295] Handling node with IPs: map[192.168.49.2:{}]
	I0916 10:43:02.017806       1 main.go:299] handling current node
	I0916 10:43:12.017271       1 main.go:295] Handling node with IPs: map[192.168.49.2:{}]
	I0916 10:43:12.017425       1 main.go:299] handling current node
	I0916 10:43:22.020176       1 main.go:295] Handling node with IPs: map[192.168.49.2:{}]
	I0916 10:43:22.020291       1 main.go:299] handling current node
	I0916 10:43:32.018190       1 main.go:295] Handling node with IPs: map[192.168.49.2:{}]
	I0916 10:43:32.018318       1 main.go:299] handling current node
	I0916 10:43:42.017224       1 main.go:295] Handling node with IPs: map[192.168.49.2:{}]
	I0916 10:43:42.017265       1 main.go:299] handling current node
	I0916 10:43:52.020877       1 main.go:295] Handling node with IPs: map[192.168.49.2:{}]
	I0916 10:43:52.021004       1 main.go:299] handling current node
	I0916 10:44:02.018165       1 main.go:295] Handling node with IPs: map[192.168.49.2:{}]
	I0916 10:44:02.018203       1 main.go:299] handling current node
	I0916 10:44:12.017210       1 main.go:295] Handling node with IPs: map[192.168.49.2:{}]
	I0916 10:44:12.017340       1 main.go:299] handling current node
	I0916 10:44:22.017201       1 main.go:295] Handling node with IPs: map[192.168.49.2:{}]
	I0916 10:44:22.017236       1 main.go:299] handling current node
	I0916 10:44:32.025659       1 main.go:295] Handling node with IPs: map[192.168.49.2:{}]
	I0916 10:44:32.025697       1 main.go:299] handling current node
	I0916 10:44:42.017259       1 main.go:295] Handling node with IPs: map[192.168.49.2:{}]
	I0916 10:44:42.017354       1 main.go:299] handling current node
	I0916 10:44:52.019299       1 main.go:295] Handling node with IPs: map[192.168.49.2:{}]
	I0916 10:44:52.019335       1 main.go:299] handling current node
	
	
	==> kube-apiserver [f911db1ed55bbf8b3dc28ca0fef7e51209be97baaa15d9194b879451dd6fd403] <==
	I0916 10:37:03.362849       1 controller.go:126] OpenAPI AggregationController: action for item v1beta1.metrics.k8s.io: Rate Limited Requeue.
	E0916 10:38:17.001292       1 remote_available_controller.go:448] "Unhandled Error" err="v1beta1.metrics.k8s.io failed with: failing or missing response from https://10.100.138.161:443/apis/metrics.k8s.io/v1beta1: Get \"https://10.100.138.161:443/apis/metrics.k8s.io/v1beta1\": dial tcp 10.100.138.161:443: connect: connection refused" logger="UnhandledError"
	W0916 10:38:17.001510       1 handler_proxy.go:99] no RequestInfo found in the context
	E0916 10:38:17.001593       1 controller.go:146] "Unhandled Error" err=<
		Error updating APIService "v1beta1.metrics.k8s.io" with err: failed to download v1beta1.metrics.k8s.io: failed to retrieve openAPI spec, http error: ResponseCode: 503, Body: service unavailable
		, Header: map[Content-Type:[text/plain; charset=utf-8] X-Content-Type-Options:[nosniff]]
	 > logger="UnhandledError"
	W0916 10:38:18.005200       1 handler_proxy.go:99] no RequestInfo found in the context
	E0916 10:38:18.005260       1 controller.go:113] "Unhandled Error" err="loading OpenAPI spec for \"v1beta1.metrics.k8s.io\" failed with: Error, could not get list of group versions for APIService" logger="UnhandledError"
	W0916 10:38:18.005310       1 handler_proxy.go:99] no RequestInfo found in the context
	E0916 10:38:18.005375       1 controller.go:102] "Unhandled Error" err=<
		loading OpenAPI spec for "v1beta1.metrics.k8s.io" failed with: failed to download v1beta1.metrics.k8s.io: failed to retrieve openAPI spec, http error: ResponseCode: 503, Body: service unavailable
		, Header: map[Content-Type:[text/plain; charset=utf-8] X-Content-Type-Options:[nosniff]]
	 > logger="UnhandledError"
	I0916 10:38:18.006688       1 controller.go:126] OpenAPI AggregationController: action for item v1beta1.metrics.k8s.io: Rate Limited Requeue.
	I0916 10:38:18.006752       1 controller.go:109] OpenAPI AggregationController: action for item v1beta1.metrics.k8s.io: Rate Limited Requeue.
	W0916 10:38:22.012823       1 handler_proxy.go:99] no RequestInfo found in the context
	E0916 10:38:22.012831       1 remote_available_controller.go:448] "Unhandled Error" err="v1beta1.metrics.k8s.io failed with: failing or missing response from https://10.100.138.161:443/apis/metrics.k8s.io/v1beta1: Get \"https://10.100.138.161:443/apis/metrics.k8s.io/v1beta1\": dial tcp 10.100.138.161:443: i/o timeout" logger="UnhandledError"
	E0916 10:38:22.012998       1 controller.go:146] "Unhandled Error" err=<
		Error updating APIService "v1beta1.metrics.k8s.io" with err: failed to download v1beta1.metrics.k8s.io: failed to retrieve openAPI spec, http error: ResponseCode: 503, Body: service unavailable
		, Header: map[Content-Type:[text/plain; charset=utf-8] X-Content-Type-Options:[nosniff]]
	 > logger="UnhandledError"
	I0916 10:38:22.050506       1 handler.go:286] Adding GroupVersion metrics.k8s.io v1beta1 to ResourceManager
	E0916 10:38:22.062342       1 remote_available_controller.go:448] "Unhandled Error" err="v1beta1.metrics.k8s.io failed with: Operation cannot be fulfilled on apiservices.apiregistration.k8s.io \"v1beta1.metrics.k8s.io\": the object has been modified; please apply your changes to the latest version and try again" logger="UnhandledError"
	I0916 10:45:01.609832       1 handler.go:286] Adding GroupVersion gadget.kinvolk.io v1alpha1 to ResourceManager
	
	
	==> kube-controller-manager [4ee66eef50ab615bdd0d94fe194567492cafe76910819703a964b78b45f55436] <==
	I0916 10:37:40.457925       1 replica_set.go:679] "Finished syncing" logger="replicaset-controller" kind="ReplicaSet" key="ingress-nginx/ingress-nginx-controller-bc57996ff" duration="58.632µs"
	I0916 10:37:42.479801       1 job_controller.go:568] "enqueueing job" logger="job-controller" key="gcp-auth/gcp-auth-certs-patch" delay="1s"
	I0916 10:37:43.509000       1 job_controller.go:568] "enqueueing job" logger="job-controller" key="gcp-auth/gcp-auth-certs-patch" delay="1s"
	I0916 10:37:43.572015       1 job_controller.go:568] "enqueueing job" logger="job-controller" key="gcp-auth/gcp-auth-certs-patch" delay="1s"
	I0916 10:37:44.515757       1 job_controller.go:568] "enqueueing job" logger="job-controller" key="gcp-auth/gcp-auth-certs-patch" delay="1s"
	I0916 10:37:44.524004       1 job_controller.go:568] "enqueueing job" logger="job-controller" key="gcp-auth/gcp-auth-certs-patch" delay="1s"
	I0916 10:37:44.530235       1 job_controller.go:568] "enqueueing job" logger="job-controller" key="gcp-auth/gcp-auth-certs-patch" delay="1s"
	I0916 10:37:45.150271       1 range_allocator.go:241] "Successfully synced" logger="node-ipam-controller" key="addons-936355"
	I0916 10:37:52.558234       1 replica_set.go:679] "Finished syncing" logger="replicaset-controller" kind="ReplicaSet" key="gcp-auth/gcp-auth-89d5ffd79" duration="22.364743ms"
	I0916 10:37:52.559132       1 replica_set.go:679] "Finished syncing" logger="replicaset-controller" kind="ReplicaSet" key="gcp-auth/gcp-auth-89d5ffd79" duration="31.351µs"
	I0916 10:37:54.619407       1 range_allocator.go:241] "Successfully synced" logger="node-ipam-controller" key="addons-936355"
	I0916 10:37:54.659736       1 replica_set.go:679] "Finished syncing" logger="replicaset-controller" kind="ReplicaSet" key="ingress-nginx/ingress-nginx-controller-bc57996ff" duration="16.445353ms"
	I0916 10:37:54.660858       1 replica_set.go:679] "Finished syncing" logger="replicaset-controller" kind="ReplicaSet" key="ingress-nginx/ingress-nginx-controller-bc57996ff" duration="68.076µs"
	E0916 10:37:56.423390       1 resource_quota_controller.go:446] "Unhandled Error" err="unable to retrieve the complete list of server APIs: metrics.k8s.io/v1beta1: stale GroupVersion discovery: metrics.k8s.io/v1beta1" logger="UnhandledError"
	I0916 10:37:56.899634       1 garbagecollector.go:826] "failed to discover some groups" logger="garbage-collector-controller" groups="<internal error: json: unsupported type: map[schema.GroupVersion]error>"
	I0916 10:37:58.024462       1 job_controller.go:568] "enqueueing job" logger="job-controller" key="gcp-auth/gcp-auth-certs-create" delay="0s"
	I0916 10:37:58.060468       1 job_controller.go:568] "enqueueing job" logger="job-controller" key="gcp-auth/gcp-auth-certs-create" delay="0s"
	I0916 10:38:14.019749       1 job_controller.go:568] "enqueueing job" logger="job-controller" key="gcp-auth/gcp-auth-certs-patch" delay="0s"
	I0916 10:38:14.050322       1 job_controller.go:568] "enqueueing job" logger="job-controller" key="gcp-auth/gcp-auth-certs-patch" delay="0s"
	I0916 10:38:16.992451       1 replica_set.go:679] "Finished syncing" logger="replicaset-controller" kind="ReplicaSet" key="kube-system/metrics-server-84c5f94fbc" duration="19.424064ms"
	I0916 10:38:16.993500       1 replica_set.go:679] "Finished syncing" logger="replicaset-controller" kind="ReplicaSet" key="kube-system/metrics-server-84c5f94fbc" duration="50.764µs"
	I0916 10:38:25.225399       1 range_allocator.go:241] "Successfully synced" logger="node-ipam-controller" key="addons-936355"
	I0916 10:39:05.149618       1 replica_set.go:679] "Finished syncing" logger="replicaset-controller" kind="ReplicaSet" key="kube-system/registry-66c9cd494c" duration="10.223µs"
	I0916 10:43:30.185523       1 range_allocator.go:241] "Successfully synced" logger="node-ipam-controller" key="addons-936355"
	I0916 10:44:59.000560       1 replica_set.go:679] "Finished syncing" logger="replicaset-controller" kind="ReplicaSet" key="kube-system/metrics-server-84c5f94fbc" duration="4.677µs"
	
	
	==> kube-proxy [6200eb5cfcd24bb0f0253359201c6d75c0624dcb7a313b0bc95b7370a13539a0] <==
	I0916 10:36:01.688812       1 server_linux.go:66] "Using iptables proxy"
	I0916 10:36:02.265241       1 server.go:677] "Successfully retrieved node IP(s)" IPs=["192.168.49.2"]
	E0916 10:36:02.271591       1 server.go:234] "Kube-proxy configuration may be incomplete or incorrect" err="nodePortAddresses is unset; NodePort connections will be accepted on all local IPs. Consider using `--nodeport-addresses primary`"
	I0916 10:36:02.423456       1 server.go:243] "kube-proxy running in dual-stack mode" primary ipFamily="IPv4"
	I0916 10:36:02.423579       1 server_linux.go:169] "Using iptables Proxier"
	I0916 10:36:02.431736       1 proxier.go:255] "Setting route_localnet=1 to allow node-ports on localhost; to change this either disable iptables.localhostNodePorts (--iptables-localhost-nodeports) or set nodePortAddresses (--nodeport-addresses) to filter loopback addresses" ipFamily="IPv4"
	I0916 10:36:02.432160       1 server.go:483] "Version info" version="v1.31.1"
	I0916 10:36:02.432351       1 server.go:485] "Golang settings" GOGC="" GOMAXPROCS="" GOTRACEBACK=""
	I0916 10:36:02.433544       1 config.go:199] "Starting service config controller"
	I0916 10:36:02.433620       1 shared_informer.go:313] Waiting for caches to sync for service config
	I0916 10:36:02.433682       1 config.go:105] "Starting endpoint slice config controller"
	I0916 10:36:02.433713       1 shared_informer.go:313] Waiting for caches to sync for endpoint slice config
	I0916 10:36:02.434194       1 config.go:328] "Starting node config controller"
	I0916 10:36:02.434243       1 shared_informer.go:313] Waiting for caches to sync for node config
	I0916 10:36:02.545223       1 shared_informer.go:320] Caches are synced for node config
	I0916 10:36:02.585616       1 shared_informer.go:320] Caches are synced for service config
	I0916 10:36:02.585634       1 shared_informer.go:320] Caches are synced for endpoint slice config
	
	
	==> kube-scheduler [2b161087caf5a6ab9dedbb699f7c69ddf6c2c5cdb19026d46daf824d90966d25] <==
	W0916 10:35:50.291598       1 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.PodDisruptionBudget: poddisruptionbudgets.policy is forbidden: User "system:kube-scheduler" cannot list resource "poddisruptionbudgets" in API group "policy" at the cluster scope
	E0916 10:35:50.291652       1 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.PodDisruptionBudget: failed to list *v1.PodDisruptionBudget: poddisruptionbudgets.policy is forbidden: User \"system:kube-scheduler\" cannot list resource \"poddisruptionbudgets\" in API group \"policy\" at the cluster scope" logger="UnhandledError"
	W0916 10:35:50.291738       1 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.StorageClass: storageclasses.storage.k8s.io is forbidden: User "system:kube-scheduler" cannot list resource "storageclasses" in API group "storage.k8s.io" at the cluster scope
	E0916 10:35:50.291810       1 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.StorageClass: failed to list *v1.StorageClass: storageclasses.storage.k8s.io is forbidden: User \"system:kube-scheduler\" cannot list resource \"storageclasses\" in API group \"storage.k8s.io\" at the cluster scope" logger="UnhandledError"
	W0916 10:35:50.291911       1 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.CSINode: csinodes.storage.k8s.io is forbidden: User "system:kube-scheduler" cannot list resource "csinodes" in API group "storage.k8s.io" at the cluster scope
	E0916 10:35:50.291966       1 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.CSINode: failed to list *v1.CSINode: csinodes.storage.k8s.io is forbidden: User \"system:kube-scheduler\" cannot list resource \"csinodes\" in API group \"storage.k8s.io\" at the cluster scope" logger="UnhandledError"
	W0916 10:35:50.292090       1 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Service: services is forbidden: User "system:kube-scheduler" cannot list resource "services" in API group "" at the cluster scope
	E0916 10:35:50.292141       1 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Service: failed to list *v1.Service: services is forbidden: User \"system:kube-scheduler\" cannot list resource \"services\" in API group \"\" at the cluster scope" logger="UnhandledError"
	W0916 10:35:50.292276       1 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.CSIStorageCapacity: csistoragecapacities.storage.k8s.io is forbidden: User "system:kube-scheduler" cannot list resource "csistoragecapacities" in API group "storage.k8s.io" at the cluster scope
	E0916 10:35:50.292635       1 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.CSIStorageCapacity: failed to list *v1.CSIStorageCapacity: csistoragecapacities.storage.k8s.io is forbidden: User \"system:kube-scheduler\" cannot list resource \"csistoragecapacities\" in API group \"storage.k8s.io\" at the cluster scope" logger="UnhandledError"
	W0916 10:35:50.292342       1 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Pod: pods is forbidden: User "system:kube-scheduler" cannot list resource "pods" in API group "" at the cluster scope
	E0916 10:35:50.292669       1 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Pod: failed to list *v1.Pod: pods is forbidden: User \"system:kube-scheduler\" cannot list resource \"pods\" in API group \"\" at the cluster scope" logger="UnhandledError"
	W0916 10:35:50.292396       1 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.PersistentVolume: persistentvolumes is forbidden: User "system:kube-scheduler" cannot list resource "persistentvolumes" in API group "" at the cluster scope
	E0916 10:35:50.292714       1 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.PersistentVolume: failed to list *v1.PersistentVolume: persistentvolumes is forbidden: User \"system:kube-scheduler\" cannot list resource \"persistentvolumes\" in API group \"\" at the cluster scope" logger="UnhandledError"
	W0916 10:35:50.292436       1 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Namespace: namespaces is forbidden: User "system:kube-scheduler" cannot list resource "namespaces" in API group "" at the cluster scope
	E0916 10:35:50.292743       1 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Namespace: failed to list *v1.Namespace: namespaces is forbidden: User \"system:kube-scheduler\" cannot list resource \"namespaces\" in API group \"\" at the cluster scope" logger="UnhandledError"
	W0916 10:35:50.292494       1 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.ReplicationController: replicationcontrollers is forbidden: User "system:kube-scheduler" cannot list resource "replicationcontrollers" in API group "" at the cluster scope
	E0916 10:35:50.292771       1 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.ReplicationController: failed to list *v1.ReplicationController: replicationcontrollers is forbidden: User \"system:kube-scheduler\" cannot list resource \"replicationcontrollers\" in API group \"\" at the cluster scope" logger="UnhandledError"
	W0916 10:35:50.292533       1 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Node: nodes is forbidden: User "system:kube-scheduler" cannot list resource "nodes" in API group "" at the cluster scope
	E0916 10:35:50.292790       1 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Node: failed to list *v1.Node: nodes is forbidden: User \"system:kube-scheduler\" cannot list resource \"nodes\" in API group \"\" at the cluster scope" logger="UnhandledError"
	W0916 10:35:50.292588       1 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.CSIDriver: csidrivers.storage.k8s.io is forbidden: User "system:kube-scheduler" cannot list resource "csidrivers" in API group "storage.k8s.io" at the cluster scope
	E0916 10:35:50.292814       1 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.CSIDriver: failed to list *v1.CSIDriver: csidrivers.storage.k8s.io is forbidden: User \"system:kube-scheduler\" cannot list resource \"csidrivers\" in API group \"storage.k8s.io\" at the cluster scope" logger="UnhandledError"
	W0916 10:35:50.292916       1 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.ReplicaSet: replicasets.apps is forbidden: User "system:kube-scheduler" cannot list resource "replicasets" in API group "apps" at the cluster scope
	E0916 10:35:50.292984       1 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.ReplicaSet: failed to list *v1.ReplicaSet: replicasets.apps is forbidden: User \"system:kube-scheduler\" cannot list resource \"replicasets\" in API group \"apps\" at the cluster scope" logger="UnhandledError"
	I0916 10:35:51.479680       1 shared_informer.go:320] Caches are synced for client-ca::kube-system::extension-apiserver-authentication::client-ca-file
	
	
	==> kubelet <==
	Sep 16 10:45:00 addons-936355 kubelet[1507]: I0916 10:45:00.616407    1507 scope.go:117] "RemoveContainer" containerID="b65d0d4cafeecf004f5ab649f5343b72888fe4317e08a1f161b35e6e17844410"
	Sep 16 10:45:00 addons-936355 kubelet[1507]: I0916 10:45:00.705676    1507 scope.go:117] "RemoveContainer" containerID="b65d0d4cafeecf004f5ab649f5343b72888fe4317e08a1f161b35e6e17844410"
	Sep 16 10:45:00 addons-936355 kubelet[1507]: E0916 10:45:00.735650    1507 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b65d0d4cafeecf004f5ab649f5343b72888fe4317e08a1f161b35e6e17844410\": container with ID starting with b65d0d4cafeecf004f5ab649f5343b72888fe4317e08a1f161b35e6e17844410 not found: ID does not exist" containerID="b65d0d4cafeecf004f5ab649f5343b72888fe4317e08a1f161b35e6e17844410"
	Sep 16 10:45:00 addons-936355 kubelet[1507]: I0916 10:45:00.735702    1507 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b65d0d4cafeecf004f5ab649f5343b72888fe4317e08a1f161b35e6e17844410"} err="failed to get container status \"b65d0d4cafeecf004f5ab649f5343b72888fe4317e08a1f161b35e6e17844410\": rpc error: code = NotFound desc = could not find container \"b65d0d4cafeecf004f5ab649f5343b72888fe4317e08a1f161b35e6e17844410\": container with ID starting with b65d0d4cafeecf004f5ab649f5343b72888fe4317e08a1f161b35e6e17844410 not found: ID does not exist"
	Sep 16 10:45:01 addons-936355 kubelet[1507]: I0916 10:45:01.827766    1507 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cgroup\" (UniqueName: \"kubernetes.io/host-path/fb6217d4-dbed-40c2-b47e-4342cb3f94b1-cgroup\") pod \"fb6217d4-dbed-40c2-b47e-4342cb3f94b1\" (UID: \"fb6217d4-dbed-40c2-b47e-4342cb3f94b1\") "
	Sep 16 10:45:01 addons-936355 kubelet[1507]: I0916 10:45:01.827817    1507 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"debugfs\" (UniqueName: \"kubernetes.io/host-path/fb6217d4-dbed-40c2-b47e-4342cb3f94b1-debugfs\") pod \"fb6217d4-dbed-40c2-b47e-4342cb3f94b1\" (UID: \"fb6217d4-dbed-40c2-b47e-4342cb3f94b1\") "
	Sep 16 10:45:01 addons-936355 kubelet[1507]: I0916 10:45:01.827850    1507 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/fb6217d4-dbed-40c2-b47e-4342cb3f94b1-host\") pod \"fb6217d4-dbed-40c2-b47e-4342cb3f94b1\" (UID: \"fb6217d4-dbed-40c2-b47e-4342cb3f94b1\") "
	Sep 16 10:45:01 addons-936355 kubelet[1507]: I0916 10:45:01.827886    1507 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lkfcs\" (UniqueName: \"kubernetes.io/projected/fb6217d4-dbed-40c2-b47e-4342cb3f94b1-kube-api-access-lkfcs\") pod \"fb6217d4-dbed-40c2-b47e-4342cb3f94b1\" (UID: \"fb6217d4-dbed-40c2-b47e-4342cb3f94b1\") "
	Sep 16 10:45:01 addons-936355 kubelet[1507]: I0916 10:45:01.827909    1507 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bpffs\" (UniqueName: \"kubernetes.io/host-path/fb6217d4-dbed-40c2-b47e-4342cb3f94b1-bpffs\") pod \"fb6217d4-dbed-40c2-b47e-4342cb3f94b1\" (UID: \"fb6217d4-dbed-40c2-b47e-4342cb3f94b1\") "
	Sep 16 10:45:01 addons-936355 kubelet[1507]: I0916 10:45:01.827932    1507 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"modules\" (UniqueName: \"kubernetes.io/host-path/fb6217d4-dbed-40c2-b47e-4342cb3f94b1-modules\") pod \"fb6217d4-dbed-40c2-b47e-4342cb3f94b1\" (UID: \"fb6217d4-dbed-40c2-b47e-4342cb3f94b1\") "
	Sep 16 10:45:01 addons-936355 kubelet[1507]: I0916 10:45:01.827951    1507 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run\" (UniqueName: \"kubernetes.io/host-path/fb6217d4-dbed-40c2-b47e-4342cb3f94b1-run\") pod \"fb6217d4-dbed-40c2-b47e-4342cb3f94b1\" (UID: \"fb6217d4-dbed-40c2-b47e-4342cb3f94b1\") "
	Sep 16 10:45:01 addons-936355 kubelet[1507]: I0916 10:45:01.828069    1507 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/fb6217d4-dbed-40c2-b47e-4342cb3f94b1-run" (OuterVolumeSpecName: "run") pod "fb6217d4-dbed-40c2-b47e-4342cb3f94b1" (UID: "fb6217d4-dbed-40c2-b47e-4342cb3f94b1"). InnerVolumeSpecName "run". PluginName "kubernetes.io/host-path", VolumeGidValue ""
	Sep 16 10:45:01 addons-936355 kubelet[1507]: I0916 10:45:01.828119    1507 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/fb6217d4-dbed-40c2-b47e-4342cb3f94b1-cgroup" (OuterVolumeSpecName: "cgroup") pod "fb6217d4-dbed-40c2-b47e-4342cb3f94b1" (UID: "fb6217d4-dbed-40c2-b47e-4342cb3f94b1"). InnerVolumeSpecName "cgroup". PluginName "kubernetes.io/host-path", VolumeGidValue ""
	Sep 16 10:45:01 addons-936355 kubelet[1507]: I0916 10:45:01.828162    1507 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/fb6217d4-dbed-40c2-b47e-4342cb3f94b1-debugfs" (OuterVolumeSpecName: "debugfs") pod "fb6217d4-dbed-40c2-b47e-4342cb3f94b1" (UID: "fb6217d4-dbed-40c2-b47e-4342cb3f94b1"). InnerVolumeSpecName "debugfs". PluginName "kubernetes.io/host-path", VolumeGidValue ""
	Sep 16 10:45:01 addons-936355 kubelet[1507]: I0916 10:45:01.828180    1507 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/fb6217d4-dbed-40c2-b47e-4342cb3f94b1-host" (OuterVolumeSpecName: "host") pod "fb6217d4-dbed-40c2-b47e-4342cb3f94b1" (UID: "fb6217d4-dbed-40c2-b47e-4342cb3f94b1"). InnerVolumeSpecName "host". PluginName "kubernetes.io/host-path", VolumeGidValue ""
	Sep 16 10:45:01 addons-936355 kubelet[1507]: I0916 10:45:01.829456    1507 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/fb6217d4-dbed-40c2-b47e-4342cb3f94b1-bpffs" (OuterVolumeSpecName: "bpffs") pod "fb6217d4-dbed-40c2-b47e-4342cb3f94b1" (UID: "fb6217d4-dbed-40c2-b47e-4342cb3f94b1"). InnerVolumeSpecName "bpffs". PluginName "kubernetes.io/host-path", VolumeGidValue ""
	Sep 16 10:45:01 addons-936355 kubelet[1507]: I0916 10:45:01.829526    1507 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/fb6217d4-dbed-40c2-b47e-4342cb3f94b1-modules" (OuterVolumeSpecName: "modules") pod "fb6217d4-dbed-40c2-b47e-4342cb3f94b1" (UID: "fb6217d4-dbed-40c2-b47e-4342cb3f94b1"). InnerVolumeSpecName "modules". PluginName "kubernetes.io/host-path", VolumeGidValue ""
	Sep 16 10:45:01 addons-936355 kubelet[1507]: I0916 10:45:01.830432    1507 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/fb6217d4-dbed-40c2-b47e-4342cb3f94b1-kube-api-access-lkfcs" (OuterVolumeSpecName: "kube-api-access-lkfcs") pod "fb6217d4-dbed-40c2-b47e-4342cb3f94b1" (UID: "fb6217d4-dbed-40c2-b47e-4342cb3f94b1"). InnerVolumeSpecName "kube-api-access-lkfcs". PluginName "kubernetes.io/projected", VolumeGidValue ""
	Sep 16 10:45:01 addons-936355 kubelet[1507]: I0916 10:45:01.929150    1507 reconciler_common.go:288] "Volume detached for volume \"host\" (UniqueName: \"kubernetes.io/host-path/fb6217d4-dbed-40c2-b47e-4342cb3f94b1-host\") on node \"addons-936355\" DevicePath \"\""
	Sep 16 10:45:01 addons-936355 kubelet[1507]: I0916 10:45:01.929195    1507 reconciler_common.go:288] "Volume detached for volume \"kube-api-access-lkfcs\" (UniqueName: \"kubernetes.io/projected/fb6217d4-dbed-40c2-b47e-4342cb3f94b1-kube-api-access-lkfcs\") on node \"addons-936355\" DevicePath \"\""
	Sep 16 10:45:01 addons-936355 kubelet[1507]: I0916 10:45:01.929207    1507 reconciler_common.go:288] "Volume detached for volume \"bpffs\" (UniqueName: \"kubernetes.io/host-path/fb6217d4-dbed-40c2-b47e-4342cb3f94b1-bpffs\") on node \"addons-936355\" DevicePath \"\""
	Sep 16 10:45:01 addons-936355 kubelet[1507]: I0916 10:45:01.929217    1507 reconciler_common.go:288] "Volume detached for volume \"modules\" (UniqueName: \"kubernetes.io/host-path/fb6217d4-dbed-40c2-b47e-4342cb3f94b1-modules\") on node \"addons-936355\" DevicePath \"\""
	Sep 16 10:45:01 addons-936355 kubelet[1507]: I0916 10:45:01.929229    1507 reconciler_common.go:288] "Volume detached for volume \"run\" (UniqueName: \"kubernetes.io/host-path/fb6217d4-dbed-40c2-b47e-4342cb3f94b1-run\") on node \"addons-936355\" DevicePath \"\""
	Sep 16 10:45:01 addons-936355 kubelet[1507]: I0916 10:45:01.929238    1507 reconciler_common.go:288] "Volume detached for volume \"cgroup\" (UniqueName: \"kubernetes.io/host-path/fb6217d4-dbed-40c2-b47e-4342cb3f94b1-cgroup\") on node \"addons-936355\" DevicePath \"\""
	Sep 16 10:45:01 addons-936355 kubelet[1507]: I0916 10:45:01.929246    1507 reconciler_common.go:288] "Volume detached for volume \"debugfs\" (UniqueName: \"kubernetes.io/host-path/fb6217d4-dbed-40c2-b47e-4342cb3f94b1-debugfs\") on node \"addons-936355\" DevicePath \"\""
	
	
	==> storage-provisioner [2a862ef326432a5d0293f9317e2a22cc3bbc0e787dab4595749d403d11fd2627] <==
	I0916 10:36:43.471506       1 storage_provisioner.go:116] Initializing the minikube storage provisioner...
	I0916 10:36:43.494873       1 storage_provisioner.go:141] Storage provisioner initialized, now starting service!
	I0916 10:36:43.495065       1 leaderelection.go:243] attempting to acquire leader lease kube-system/k8s.io-minikube-hostpath...
	I0916 10:36:43.512818       1 leaderelection.go:253] successfully acquired lease kube-system/k8s.io-minikube-hostpath
	I0916 10:36:43.513129       1 controller.go:835] Starting provisioner controller k8s.io/minikube-hostpath_addons-936355_2129c620-91c1-42a8-a96f-7ac21cc45cc8!
	I0916 10:36:43.520230       1 event.go:282] Event(v1.ObjectReference{Kind:"Endpoints", Namespace:"kube-system", Name:"k8s.io-minikube-hostpath", UID:"89d79315-71d3-40c0-aeb5-687aa54390d8", APIVersion:"v1", ResourceVersion:"938", FieldPath:""}): type: 'Normal' reason: 'LeaderElection' addons-936355_2129c620-91c1-42a8-a96f-7ac21cc45cc8 became leader
	I0916 10:36:43.613923       1 controller.go:884] Started provisioner controller k8s.io/minikube-hostpath_addons-936355_2129c620-91c1-42a8-a96f-7ac21cc45cc8!
	

                                                
                                                
-- /stdout --
helpers_test.go:254: (dbg) Run:  out/minikube-linux-arm64 status --format={{.APIServer}} -p addons-936355 -n addons-936355
helpers_test.go:261: (dbg) Run:  kubectl --context addons-936355 get po -o=jsonpath={.items[*].metadata.name} -A --field-selector=status.phase!=Running
helpers_test.go:261: (dbg) Non-zero exit: kubectl --context addons-936355 get po -o=jsonpath={.items[*].metadata.name} -A --field-selector=status.phase!=Running: fork/exec /usr/local/bin/kubectl: exec format error (760.899µs)
helpers_test.go:263: kubectl --context addons-936355 get po -o=jsonpath={.items[*].metadata.name} -A --field-selector=status.phase!=Running: fork/exec /usr/local/bin/kubectl: exec format error
--- FAIL: TestAddons/parallel/MetricsServer (354.78s)

                                                
                                    
x
+
TestAddons/parallel/CSI (362.56s)

                                                
                                                
=== RUN   TestAddons/parallel/CSI
=== PAUSE TestAddons/parallel/CSI

                                                
                                                

                                                
                                                
=== CONT  TestAddons/parallel/CSI
addons_test.go:567: csi-hostpath-driver pods stabilized in 30.731188ms
addons_test.go:570: (dbg) Run:  kubectl --context addons-936355 create -f testdata/csi-hostpath-driver/pvc.yaml
addons_test.go:570: (dbg) Non-zero exit: kubectl --context addons-936355 create -f testdata/csi-hostpath-driver/pvc.yaml: fork/exec /usr/local/bin/kubectl: exec format error (277.917µs)
addons_test.go:572: creating sample PVC with kubectl --context addons-936355 create -f testdata/csi-hostpath-driver/pvc.yaml failed: fork/exec /usr/local/bin/kubectl: exec format error
addons_test.go:575: (dbg) TestAddons/parallel/CSI: waiting 6m0s for pvc "hpvc" in namespace "default" ...
helpers_test.go:394: (dbg) Run:  kubectl --context addons-936355 get pvc hpvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Non-zero exit: kubectl --context addons-936355 get pvc hpvc -o jsonpath={.status.phase} -n default: fork/exec /usr/local/bin/kubectl: exec format error (165.304µs)
helpers_test.go:396: TestAddons/parallel/CSI: WARNING: PVC get for "default" "hpvc" returned: fork/exec /usr/local/bin/kubectl: exec format error
helpers_test.go:394: (dbg) Run:  kubectl --context addons-936355 get pvc hpvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Non-zero exit: kubectl --context addons-936355 get pvc hpvc -o jsonpath={.status.phase} -n default: fork/exec /usr/local/bin/kubectl: exec format error (595.775µs)
helpers_test.go:396: TestAddons/parallel/CSI: WARNING: PVC get for "default" "hpvc" returned: fork/exec /usr/local/bin/kubectl: exec format error
helpers_test.go:394: (dbg) Run:  kubectl --context addons-936355 get pvc hpvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Non-zero exit: kubectl --context addons-936355 get pvc hpvc -o jsonpath={.status.phase} -n default: fork/exec /usr/local/bin/kubectl: exec format error (522.276µs)
helpers_test.go:396: TestAddons/parallel/CSI: WARNING: PVC get for "default" "hpvc" returned: fork/exec /usr/local/bin/kubectl: exec format error
helpers_test.go:394: (dbg) Run:  kubectl --context addons-936355 get pvc hpvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Non-zero exit: kubectl --context addons-936355 get pvc hpvc -o jsonpath={.status.phase} -n default: fork/exec /usr/local/bin/kubectl: exec format error (642.297µs)
helpers_test.go:396: TestAddons/parallel/CSI: WARNING: PVC get for "default" "hpvc" returned: fork/exec /usr/local/bin/kubectl: exec format error
helpers_test.go:394: (dbg) Run:  kubectl --context addons-936355 get pvc hpvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Non-zero exit: kubectl --context addons-936355 get pvc hpvc -o jsonpath={.status.phase} -n default: fork/exec /usr/local/bin/kubectl: exec format error (450.007µs)
helpers_test.go:396: TestAddons/parallel/CSI: WARNING: PVC get for "default" "hpvc" returned: fork/exec /usr/local/bin/kubectl: exec format error
helpers_test.go:394: (dbg) Run:  kubectl --context addons-936355 get pvc hpvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Non-zero exit: kubectl --context addons-936355 get pvc hpvc -o jsonpath={.status.phase} -n default: fork/exec /usr/local/bin/kubectl: exec format error (591.951µs)
helpers_test.go:396: TestAddons/parallel/CSI: WARNING: PVC get for "default" "hpvc" returned: fork/exec /usr/local/bin/kubectl: exec format error
helpers_test.go:394: (dbg) Run:  kubectl --context addons-936355 get pvc hpvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Non-zero exit: kubectl --context addons-936355 get pvc hpvc -o jsonpath={.status.phase} -n default: fork/exec /usr/local/bin/kubectl: exec format error (344.064µs)
helpers_test.go:396: TestAddons/parallel/CSI: WARNING: PVC get for "default" "hpvc" returned: fork/exec /usr/local/bin/kubectl: exec format error
helpers_test.go:394: (dbg) Run:  kubectl --context addons-936355 get pvc hpvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Non-zero exit: kubectl --context addons-936355 get pvc hpvc -o jsonpath={.status.phase} -n default: fork/exec /usr/local/bin/kubectl: exec format error (379.059µs)
helpers_test.go:396: TestAddons/parallel/CSI: WARNING: PVC get for "default" "hpvc" returned: fork/exec /usr/local/bin/kubectl: exec format error
helpers_test.go:394: (dbg) Run:  kubectl --context addons-936355 get pvc hpvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Non-zero exit: kubectl --context addons-936355 get pvc hpvc -o jsonpath={.status.phase} -n default: fork/exec /usr/local/bin/kubectl: exec format error (404.305µs)
helpers_test.go:396: TestAddons/parallel/CSI: WARNING: PVC get for "default" "hpvc" returned: fork/exec /usr/local/bin/kubectl: exec format error
helpers_test.go:394: (dbg) Run:  kubectl --context addons-936355 get pvc hpvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Non-zero exit: kubectl --context addons-936355 get pvc hpvc -o jsonpath={.status.phase} -n default: fork/exec /usr/local/bin/kubectl: exec format error (401.836µs)
helpers_test.go:396: TestAddons/parallel/CSI: WARNING: PVC get for "default" "hpvc" returned: fork/exec /usr/local/bin/kubectl: exec format error
helpers_test.go:394: (dbg) Run:  kubectl --context addons-936355 get pvc hpvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Non-zero exit: kubectl --context addons-936355 get pvc hpvc -o jsonpath={.status.phase} -n default: fork/exec /usr/local/bin/kubectl: exec format error (516.048µs)
helpers_test.go:396: TestAddons/parallel/CSI: WARNING: PVC get for "default" "hpvc" returned: fork/exec /usr/local/bin/kubectl: exec format error
helpers_test.go:394: (dbg) Run:  kubectl --context addons-936355 get pvc hpvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Non-zero exit: kubectl --context addons-936355 get pvc hpvc -o jsonpath={.status.phase} -n default: fork/exec /usr/local/bin/kubectl: exec format error (521.537µs)
helpers_test.go:396: TestAddons/parallel/CSI: WARNING: PVC get for "default" "hpvc" returned: fork/exec /usr/local/bin/kubectl: exec format error
helpers_test.go:394: (dbg) Run:  kubectl --context addons-936355 get pvc hpvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Non-zero exit: kubectl --context addons-936355 get pvc hpvc -o jsonpath={.status.phase} -n default: fork/exec /usr/local/bin/kubectl: exec format error (10.440601ms)
helpers_test.go:396: TestAddons/parallel/CSI: WARNING: PVC get for "default" "hpvc" returned: fork/exec /usr/local/bin/kubectl: exec format error
helpers_test.go:394: (dbg) Run:  kubectl --context addons-936355 get pvc hpvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Non-zero exit: kubectl --context addons-936355 get pvc hpvc -o jsonpath={.status.phase} -n default: fork/exec /usr/local/bin/kubectl: exec format error (451.992µs)
helpers_test.go:396: TestAddons/parallel/CSI: WARNING: PVC get for "default" "hpvc" returned: fork/exec /usr/local/bin/kubectl: exec format error
helpers_test.go:394: (dbg) Run:  kubectl --context addons-936355 get pvc hpvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Non-zero exit: kubectl --context addons-936355 get pvc hpvc -o jsonpath={.status.phase} -n default: fork/exec /usr/local/bin/kubectl: exec format error (452.295µs)
helpers_test.go:396: TestAddons/parallel/CSI: WARNING: PVC get for "default" "hpvc" returned: fork/exec /usr/local/bin/kubectl: exec format error
helpers_test.go:394: (dbg) Run:  kubectl --context addons-936355 get pvc hpvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Non-zero exit: kubectl --context addons-936355 get pvc hpvc -o jsonpath={.status.phase} -n default: fork/exec /usr/local/bin/kubectl: exec format error (474.605µs)
helpers_test.go:396: TestAddons/parallel/CSI: WARNING: PVC get for "default" "hpvc" returned: fork/exec /usr/local/bin/kubectl: exec format error
helpers_test.go:394: (dbg) Run:  kubectl --context addons-936355 get pvc hpvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Non-zero exit: kubectl --context addons-936355 get pvc hpvc -o jsonpath={.status.phase} -n default: fork/exec /usr/local/bin/kubectl: exec format error (600.862µs)
helpers_test.go:396: TestAddons/parallel/CSI: WARNING: PVC get for "default" "hpvc" returned: fork/exec /usr/local/bin/kubectl: exec format error
helpers_test.go:394: (dbg) Run:  kubectl --context addons-936355 get pvc hpvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Non-zero exit: kubectl --context addons-936355 get pvc hpvc -o jsonpath={.status.phase} -n default: fork/exec /usr/local/bin/kubectl: exec format error (752.505µs)
helpers_test.go:396: TestAddons/parallel/CSI: WARNING: PVC get for "default" "hpvc" returned: fork/exec /usr/local/bin/kubectl: exec format error
helpers_test.go:394: (dbg) Run:  kubectl --context addons-936355 get pvc hpvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Non-zero exit: kubectl --context addons-936355 get pvc hpvc -o jsonpath={.status.phase} -n default: fork/exec /usr/local/bin/kubectl: exec format error (505.538µs)
helpers_test.go:396: TestAddons/parallel/CSI: WARNING: PVC get for "default" "hpvc" returned: fork/exec /usr/local/bin/kubectl: exec format error
helpers_test.go:394: (dbg) Run:  kubectl --context addons-936355 get pvc hpvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Non-zero exit: kubectl --context addons-936355 get pvc hpvc -o jsonpath={.status.phase} -n default: fork/exec /usr/local/bin/kubectl: exec format error (415.734µs)
helpers_test.go:396: TestAddons/parallel/CSI: WARNING: PVC get for "default" "hpvc" returned: fork/exec /usr/local/bin/kubectl: exec format error
helpers_test.go:394: (dbg) Run:  kubectl --context addons-936355 get pvc hpvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Non-zero exit: kubectl --context addons-936355 get pvc hpvc -o jsonpath={.status.phase} -n default: fork/exec /usr/local/bin/kubectl: exec format error (475.179µs)
helpers_test.go:396: TestAddons/parallel/CSI: WARNING: PVC get for "default" "hpvc" returned: fork/exec /usr/local/bin/kubectl: exec format error
helpers_test.go:394: (dbg) Run:  kubectl --context addons-936355 get pvc hpvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Non-zero exit: kubectl --context addons-936355 get pvc hpvc -o jsonpath={.status.phase} -n default: fork/exec /usr/local/bin/kubectl: exec format error (504.175µs)
helpers_test.go:396: TestAddons/parallel/CSI: WARNING: PVC get for "default" "hpvc" returned: fork/exec /usr/local/bin/kubectl: exec format error
helpers_test.go:394: (dbg) Run:  kubectl --context addons-936355 get pvc hpvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Non-zero exit: kubectl --context addons-936355 get pvc hpvc -o jsonpath={.status.phase} -n default: fork/exec /usr/local/bin/kubectl: exec format error (367.449µs)
helpers_test.go:396: TestAddons/parallel/CSI: WARNING: PVC get for "default" "hpvc" returned: fork/exec /usr/local/bin/kubectl: exec format error
helpers_test.go:394: (dbg) Run:  kubectl --context addons-936355 get pvc hpvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Non-zero exit: kubectl --context addons-936355 get pvc hpvc -o jsonpath={.status.phase} -n default: fork/exec /usr/local/bin/kubectl: exec format error (422.75µs)
helpers_test.go:396: TestAddons/parallel/CSI: WARNING: PVC get for "default" "hpvc" returned: fork/exec /usr/local/bin/kubectl: exec format error
helpers_test.go:394: (dbg) Run:  kubectl --context addons-936355 get pvc hpvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Non-zero exit: kubectl --context addons-936355 get pvc hpvc -o jsonpath={.status.phase} -n default: fork/exec /usr/local/bin/kubectl: exec format error (426.803µs)
helpers_test.go:396: TestAddons/parallel/CSI: WARNING: PVC get for "default" "hpvc" returned: fork/exec /usr/local/bin/kubectl: exec format error
helpers_test.go:394: (dbg) Run:  kubectl --context addons-936355 get pvc hpvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Non-zero exit: kubectl --context addons-936355 get pvc hpvc -o jsonpath={.status.phase} -n default: fork/exec /usr/local/bin/kubectl: exec format error (487.675µs)
helpers_test.go:396: TestAddons/parallel/CSI: WARNING: PVC get for "default" "hpvc" returned: fork/exec /usr/local/bin/kubectl: exec format error
helpers_test.go:394: (dbg) Run:  kubectl --context addons-936355 get pvc hpvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Non-zero exit: kubectl --context addons-936355 get pvc hpvc -o jsonpath={.status.phase} -n default: fork/exec /usr/local/bin/kubectl: exec format error (540.737µs)
helpers_test.go:396: TestAddons/parallel/CSI: WARNING: PVC get for "default" "hpvc" returned: fork/exec /usr/local/bin/kubectl: exec format error
helpers_test.go:394: (dbg) Run:  kubectl --context addons-936355 get pvc hpvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Non-zero exit: kubectl --context addons-936355 get pvc hpvc -o jsonpath={.status.phase} -n default: fork/exec /usr/local/bin/kubectl: exec format error (511.856µs)
helpers_test.go:396: TestAddons/parallel/CSI: WARNING: PVC get for "default" "hpvc" returned: fork/exec /usr/local/bin/kubectl: exec format error
helpers_test.go:394: (dbg) Run:  kubectl --context addons-936355 get pvc hpvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Non-zero exit: kubectl --context addons-936355 get pvc hpvc -o jsonpath={.status.phase} -n default: fork/exec /usr/local/bin/kubectl: exec format error (409.737µs)
helpers_test.go:396: TestAddons/parallel/CSI: WARNING: PVC get for "default" "hpvc" returned: fork/exec /usr/local/bin/kubectl: exec format error
helpers_test.go:394: (dbg) Run:  kubectl --context addons-936355 get pvc hpvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Non-zero exit: kubectl --context addons-936355 get pvc hpvc -o jsonpath={.status.phase} -n default: fork/exec /usr/local/bin/kubectl: exec format error (340.045µs)
helpers_test.go:396: TestAddons/parallel/CSI: WARNING: PVC get for "default" "hpvc" returned: fork/exec /usr/local/bin/kubectl: exec format error
helpers_test.go:394: (dbg) Run:  kubectl --context addons-936355 get pvc hpvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Non-zero exit: kubectl --context addons-936355 get pvc hpvc -o jsonpath={.status.phase} -n default: fork/exec /usr/local/bin/kubectl: exec format error (353.976µs)
helpers_test.go:396: TestAddons/parallel/CSI: WARNING: PVC get for "default" "hpvc" returned: fork/exec /usr/local/bin/kubectl: exec format error
helpers_test.go:394: (dbg) Run:  kubectl --context addons-936355 get pvc hpvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Non-zero exit: kubectl --context addons-936355 get pvc hpvc -o jsonpath={.status.phase} -n default: fork/exec /usr/local/bin/kubectl: exec format error (647.105µs)
helpers_test.go:396: TestAddons/parallel/CSI: WARNING: PVC get for "default" "hpvc" returned: fork/exec /usr/local/bin/kubectl: exec format error
helpers_test.go:394: (dbg) Run:  kubectl --context addons-936355 get pvc hpvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Non-zero exit: kubectl --context addons-936355 get pvc hpvc -o jsonpath={.status.phase} -n default: fork/exec /usr/local/bin/kubectl: exec format error (437.174µs)
helpers_test.go:396: TestAddons/parallel/CSI: WARNING: PVC get for "default" "hpvc" returned: fork/exec /usr/local/bin/kubectl: exec format error
helpers_test.go:394: (dbg) Run:  kubectl --context addons-936355 get pvc hpvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Non-zero exit: kubectl --context addons-936355 get pvc hpvc -o jsonpath={.status.phase} -n default: fork/exec /usr/local/bin/kubectl: exec format error (503.962µs)
helpers_test.go:396: TestAddons/parallel/CSI: WARNING: PVC get for "default" "hpvc" returned: fork/exec /usr/local/bin/kubectl: exec format error
helpers_test.go:394: (dbg) Run:  kubectl --context addons-936355 get pvc hpvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Non-zero exit: kubectl --context addons-936355 get pvc hpvc -o jsonpath={.status.phase} -n default: fork/exec /usr/local/bin/kubectl: exec format error (474.424µs)
helpers_test.go:396: TestAddons/parallel/CSI: WARNING: PVC get for "default" "hpvc" returned: fork/exec /usr/local/bin/kubectl: exec format error
helpers_test.go:394: (dbg) Run:  kubectl --context addons-936355 get pvc hpvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Non-zero exit: kubectl --context addons-936355 get pvc hpvc -o jsonpath={.status.phase} -n default: fork/exec /usr/local/bin/kubectl: exec format error (500.811µs)
helpers_test.go:396: TestAddons/parallel/CSI: WARNING: PVC get for "default" "hpvc" returned: fork/exec /usr/local/bin/kubectl: exec format error
helpers_test.go:394: (dbg) Run:  kubectl --context addons-936355 get pvc hpvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Non-zero exit: kubectl --context addons-936355 get pvc hpvc -o jsonpath={.status.phase} -n default: fork/exec /usr/local/bin/kubectl: exec format error (395.009µs)
helpers_test.go:396: TestAddons/parallel/CSI: WARNING: PVC get for "default" "hpvc" returned: fork/exec /usr/local/bin/kubectl: exec format error
helpers_test.go:394: (dbg) Run:  kubectl --context addons-936355 get pvc hpvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Non-zero exit: kubectl --context addons-936355 get pvc hpvc -o jsonpath={.status.phase} -n default: fork/exec /usr/local/bin/kubectl: exec format error (524.121µs)
helpers_test.go:396: TestAddons/parallel/CSI: WARNING: PVC get for "default" "hpvc" returned: fork/exec /usr/local/bin/kubectl: exec format error
helpers_test.go:394: (dbg) Run:  kubectl --context addons-936355 get pvc hpvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Non-zero exit: kubectl --context addons-936355 get pvc hpvc -o jsonpath={.status.phase} -n default: fork/exec /usr/local/bin/kubectl: exec format error (496.66µs)
helpers_test.go:396: TestAddons/parallel/CSI: WARNING: PVC get for "default" "hpvc" returned: fork/exec /usr/local/bin/kubectl: exec format error
helpers_test.go:394: (dbg) Run:  kubectl --context addons-936355 get pvc hpvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Non-zero exit: kubectl --context addons-936355 get pvc hpvc -o jsonpath={.status.phase} -n default: fork/exec /usr/local/bin/kubectl: exec format error (14.669454ms)
helpers_test.go:396: TestAddons/parallel/CSI: WARNING: PVC get for "default" "hpvc" returned: fork/exec /usr/local/bin/kubectl: exec format error
helpers_test.go:394: (dbg) Run:  kubectl --context addons-936355 get pvc hpvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Non-zero exit: kubectl --context addons-936355 get pvc hpvc -o jsonpath={.status.phase} -n default: fork/exec /usr/local/bin/kubectl: exec format error (470.428µs)
helpers_test.go:396: TestAddons/parallel/CSI: WARNING: PVC get for "default" "hpvc" returned: fork/exec /usr/local/bin/kubectl: exec format error
helpers_test.go:394: (dbg) Run:  kubectl --context addons-936355 get pvc hpvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Non-zero exit: kubectl --context addons-936355 get pvc hpvc -o jsonpath={.status.phase} -n default: fork/exec /usr/local/bin/kubectl: exec format error (322.765µs)
helpers_test.go:396: TestAddons/parallel/CSI: WARNING: PVC get for "default" "hpvc" returned: fork/exec /usr/local/bin/kubectl: exec format error
helpers_test.go:394: (dbg) Run:  kubectl --context addons-936355 get pvc hpvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Non-zero exit: kubectl --context addons-936355 get pvc hpvc -o jsonpath={.status.phase} -n default: fork/exec /usr/local/bin/kubectl: exec format error (550.336µs)
helpers_test.go:396: TestAddons/parallel/CSI: WARNING: PVC get for "default" "hpvc" returned: fork/exec /usr/local/bin/kubectl: exec format error
helpers_test.go:394: (dbg) Run:  kubectl --context addons-936355 get pvc hpvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Non-zero exit: kubectl --context addons-936355 get pvc hpvc -o jsonpath={.status.phase} -n default: fork/exec /usr/local/bin/kubectl: exec format error (459.073µs)
helpers_test.go:396: TestAddons/parallel/CSI: WARNING: PVC get for "default" "hpvc" returned: fork/exec /usr/local/bin/kubectl: exec format error
helpers_test.go:394: (dbg) Run:  kubectl --context addons-936355 get pvc hpvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Non-zero exit: kubectl --context addons-936355 get pvc hpvc -o jsonpath={.status.phase} -n default: fork/exec /usr/local/bin/kubectl: exec format error (343.654µs)
helpers_test.go:396: TestAddons/parallel/CSI: WARNING: PVC get for "default" "hpvc" returned: fork/exec /usr/local/bin/kubectl: exec format error
helpers_test.go:394: (dbg) Run:  kubectl --context addons-936355 get pvc hpvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Non-zero exit: kubectl --context addons-936355 get pvc hpvc -o jsonpath={.status.phase} -n default: fork/exec /usr/local/bin/kubectl: exec format error (422.774µs)
helpers_test.go:396: TestAddons/parallel/CSI: WARNING: PVC get for "default" "hpvc" returned: fork/exec /usr/local/bin/kubectl: exec format error
helpers_test.go:394: (dbg) Run:  kubectl --context addons-936355 get pvc hpvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Non-zero exit: kubectl --context addons-936355 get pvc hpvc -o jsonpath={.status.phase} -n default: fork/exec /usr/local/bin/kubectl: exec format error (407.489µs)
helpers_test.go:396: TestAddons/parallel/CSI: WARNING: PVC get for "default" "hpvc" returned: fork/exec /usr/local/bin/kubectl: exec format error
helpers_test.go:394: (dbg) Run:  kubectl --context addons-936355 get pvc hpvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Non-zero exit: kubectl --context addons-936355 get pvc hpvc -o jsonpath={.status.phase} -n default: fork/exec /usr/local/bin/kubectl: exec format error (569.487µs)
helpers_test.go:396: TestAddons/parallel/CSI: WARNING: PVC get for "default" "hpvc" returned: fork/exec /usr/local/bin/kubectl: exec format error
helpers_test.go:394: (dbg) Run:  kubectl --context addons-936355 get pvc hpvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Non-zero exit: kubectl --context addons-936355 get pvc hpvc -o jsonpath={.status.phase} -n default: fork/exec /usr/local/bin/kubectl: exec format error (334.817µs)
helpers_test.go:396: TestAddons/parallel/CSI: WARNING: PVC get for "default" "hpvc" returned: fork/exec /usr/local/bin/kubectl: exec format error
helpers_test.go:394: (dbg) Run:  kubectl --context addons-936355 get pvc hpvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Non-zero exit: kubectl --context addons-936355 get pvc hpvc -o jsonpath={.status.phase} -n default: fork/exec /usr/local/bin/kubectl: exec format error (380.971µs)
helpers_test.go:396: TestAddons/parallel/CSI: WARNING: PVC get for "default" "hpvc" returned: fork/exec /usr/local/bin/kubectl: exec format error
helpers_test.go:394: (dbg) Run:  kubectl --context addons-936355 get pvc hpvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Non-zero exit: kubectl --context addons-936355 get pvc hpvc -o jsonpath={.status.phase} -n default: fork/exec /usr/local/bin/kubectl: exec format error (384.573µs)
helpers_test.go:396: TestAddons/parallel/CSI: WARNING: PVC get for "default" "hpvc" returned: fork/exec /usr/local/bin/kubectl: exec format error
helpers_test.go:394: (dbg) Run:  kubectl --context addons-936355 get pvc hpvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Non-zero exit: kubectl --context addons-936355 get pvc hpvc -o jsonpath={.status.phase} -n default: fork/exec /usr/local/bin/kubectl: exec format error (326.998µs)
helpers_test.go:396: TestAddons/parallel/CSI: WARNING: PVC get for "default" "hpvc" returned: fork/exec /usr/local/bin/kubectl: exec format error
helpers_test.go:394: (dbg) Run:  kubectl --context addons-936355 get pvc hpvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Non-zero exit: kubectl --context addons-936355 get pvc hpvc -o jsonpath={.status.phase} -n default: fork/exec /usr/local/bin/kubectl: exec format error (440.259µs)
helpers_test.go:396: TestAddons/parallel/CSI: WARNING: PVC get for "default" "hpvc" returned: fork/exec /usr/local/bin/kubectl: exec format error
helpers_test.go:394: (dbg) Run:  kubectl --context addons-936355 get pvc hpvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Non-zero exit: kubectl --context addons-936355 get pvc hpvc -o jsonpath={.status.phase} -n default: fork/exec /usr/local/bin/kubectl: exec format error (323.462µs)
helpers_test.go:396: TestAddons/parallel/CSI: WARNING: PVC get for "default" "hpvc" returned: fork/exec /usr/local/bin/kubectl: exec format error
helpers_test.go:394: (dbg) Run:  kubectl --context addons-936355 get pvc hpvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Non-zero exit: kubectl --context addons-936355 get pvc hpvc -o jsonpath={.status.phase} -n default: fork/exec /usr/local/bin/kubectl: exec format error (340.11µs)
helpers_test.go:396: TestAddons/parallel/CSI: WARNING: PVC get for "default" "hpvc" returned: fork/exec /usr/local/bin/kubectl: exec format error
helpers_test.go:394: (dbg) Run:  kubectl --context addons-936355 get pvc hpvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Non-zero exit: kubectl --context addons-936355 get pvc hpvc -o jsonpath={.status.phase} -n default: fork/exec /usr/local/bin/kubectl: exec format error (374.505µs)
helpers_test.go:396: TestAddons/parallel/CSI: WARNING: PVC get for "default" "hpvc" returned: fork/exec /usr/local/bin/kubectl: exec format error
helpers_test.go:394: (dbg) Run:  kubectl --context addons-936355 get pvc hpvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Non-zero exit: kubectl --context addons-936355 get pvc hpvc -o jsonpath={.status.phase} -n default: fork/exec /usr/local/bin/kubectl: exec format error (556.777µs)
helpers_test.go:396: TestAddons/parallel/CSI: WARNING: PVC get for "default" "hpvc" returned: fork/exec /usr/local/bin/kubectl: exec format error
helpers_test.go:394: (dbg) Run:  kubectl --context addons-936355 get pvc hpvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Non-zero exit: kubectl --context addons-936355 get pvc hpvc -o jsonpath={.status.phase} -n default: fork/exec /usr/local/bin/kubectl: exec format error (492.352µs)
helpers_test.go:396: TestAddons/parallel/CSI: WARNING: PVC get for "default" "hpvc" returned: fork/exec /usr/local/bin/kubectl: exec format error
helpers_test.go:394: (dbg) Run:  kubectl --context addons-936355 get pvc hpvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Non-zero exit: kubectl --context addons-936355 get pvc hpvc -o jsonpath={.status.phase} -n default: fork/exec /usr/local/bin/kubectl: exec format error (354.78µs)
helpers_test.go:396: TestAddons/parallel/CSI: WARNING: PVC get for "default" "hpvc" returned: fork/exec /usr/local/bin/kubectl: exec format error
helpers_test.go:394: (dbg) Run:  kubectl --context addons-936355 get pvc hpvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Non-zero exit: kubectl --context addons-936355 get pvc hpvc -o jsonpath={.status.phase} -n default: fork/exec /usr/local/bin/kubectl: exec format error (674.263µs)
helpers_test.go:396: TestAddons/parallel/CSI: WARNING: PVC get for "default" "hpvc" returned: fork/exec /usr/local/bin/kubectl: exec format error
helpers_test.go:394: (dbg) Run:  kubectl --context addons-936355 get pvc hpvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Non-zero exit: kubectl --context addons-936355 get pvc hpvc -o jsonpath={.status.phase} -n default: fork/exec /usr/local/bin/kubectl: exec format error (532.129µs)
helpers_test.go:396: TestAddons/parallel/CSI: WARNING: PVC get for "default" "hpvc" returned: fork/exec /usr/local/bin/kubectl: exec format error
helpers_test.go:394: (dbg) Run:  kubectl --context addons-936355 get pvc hpvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Non-zero exit: kubectl --context addons-936355 get pvc hpvc -o jsonpath={.status.phase} -n default: fork/exec /usr/local/bin/kubectl: exec format error (369.443µs)
helpers_test.go:396: TestAddons/parallel/CSI: WARNING: PVC get for "default" "hpvc" returned: fork/exec /usr/local/bin/kubectl: exec format error
helpers_test.go:394: (dbg) Run:  kubectl --context addons-936355 get pvc hpvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Non-zero exit: kubectl --context addons-936355 get pvc hpvc -o jsonpath={.status.phase} -n default: fork/exec /usr/local/bin/kubectl: exec format error (352.065µs)
helpers_test.go:396: TestAddons/parallel/CSI: WARNING: PVC get for "default" "hpvc" returned: fork/exec /usr/local/bin/kubectl: exec format error
helpers_test.go:394: (dbg) Run:  kubectl --context addons-936355 get pvc hpvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Non-zero exit: kubectl --context addons-936355 get pvc hpvc -o jsonpath={.status.phase} -n default: fork/exec /usr/local/bin/kubectl: exec format error (535.453µs)
helpers_test.go:396: TestAddons/parallel/CSI: WARNING: PVC get for "default" "hpvc" returned: fork/exec /usr/local/bin/kubectl: exec format error
helpers_test.go:394: (dbg) Run:  kubectl --context addons-936355 get pvc hpvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Non-zero exit: kubectl --context addons-936355 get pvc hpvc -o jsonpath={.status.phase} -n default: fork/exec /usr/local/bin/kubectl: exec format error (513.783µs)
helpers_test.go:396: TestAddons/parallel/CSI: WARNING: PVC get for "default" "hpvc" returned: fork/exec /usr/local/bin/kubectl: exec format error
helpers_test.go:394: (dbg) Run:  kubectl --context addons-936355 get pvc hpvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Non-zero exit: kubectl --context addons-936355 get pvc hpvc -o jsonpath={.status.phase} -n default: fork/exec /usr/local/bin/kubectl: exec format error (361.697µs)
helpers_test.go:396: TestAddons/parallel/CSI: WARNING: PVC get for "default" "hpvc" returned: fork/exec /usr/local/bin/kubectl: exec format error
helpers_test.go:394: (dbg) Run:  kubectl --context addons-936355 get pvc hpvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Non-zero exit: kubectl --context addons-936355 get pvc hpvc -o jsonpath={.status.phase} -n default: fork/exec /usr/local/bin/kubectl: exec format error (380.208µs)
helpers_test.go:396: TestAddons/parallel/CSI: WARNING: PVC get for "default" "hpvc" returned: fork/exec /usr/local/bin/kubectl: exec format error
helpers_test.go:394: (dbg) Run:  kubectl --context addons-936355 get pvc hpvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Non-zero exit: kubectl --context addons-936355 get pvc hpvc -o jsonpath={.status.phase} -n default: fork/exec /usr/local/bin/kubectl: exec format error (502.739µs)
helpers_test.go:396: TestAddons/parallel/CSI: WARNING: PVC get for "default" "hpvc" returned: fork/exec /usr/local/bin/kubectl: exec format error
helpers_test.go:394: (dbg) Run:  kubectl --context addons-936355 get pvc hpvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Non-zero exit: kubectl --context addons-936355 get pvc hpvc -o jsonpath={.status.phase} -n default: fork/exec /usr/local/bin/kubectl: exec format error (350.522µs)
helpers_test.go:396: TestAddons/parallel/CSI: WARNING: PVC get for "default" "hpvc" returned: fork/exec /usr/local/bin/kubectl: exec format error
helpers_test.go:394: (dbg) Run:  kubectl --context addons-936355 get pvc hpvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Non-zero exit: kubectl --context addons-936355 get pvc hpvc -o jsonpath={.status.phase} -n default: fork/exec /usr/local/bin/kubectl: exec format error (378.377µs)
helpers_test.go:396: TestAddons/parallel/CSI: WARNING: PVC get for "default" "hpvc" returned: fork/exec /usr/local/bin/kubectl: exec format error
helpers_test.go:394: (dbg) Run:  kubectl --context addons-936355 get pvc hpvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Non-zero exit: kubectl --context addons-936355 get pvc hpvc -o jsonpath={.status.phase} -n default: fork/exec /usr/local/bin/kubectl: exec format error (361.426µs)
helpers_test.go:396: TestAddons/parallel/CSI: WARNING: PVC get for "default" "hpvc" returned: fork/exec /usr/local/bin/kubectl: exec format error
helpers_test.go:394: (dbg) Run:  kubectl --context addons-936355 get pvc hpvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Non-zero exit: kubectl --context addons-936355 get pvc hpvc -o jsonpath={.status.phase} -n default: fork/exec /usr/local/bin/kubectl: exec format error (405.962µs)
helpers_test.go:396: TestAddons/parallel/CSI: WARNING: PVC get for "default" "hpvc" returned: fork/exec /usr/local/bin/kubectl: exec format error
helpers_test.go:394: (dbg) Run:  kubectl --context addons-936355 get pvc hpvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Non-zero exit: kubectl --context addons-936355 get pvc hpvc -o jsonpath={.status.phase} -n default: fork/exec /usr/local/bin/kubectl: exec format error (355.543µs)
helpers_test.go:396: TestAddons/parallel/CSI: WARNING: PVC get for "default" "hpvc" returned: fork/exec /usr/local/bin/kubectl: exec format error
helpers_test.go:394: (dbg) Run:  kubectl --context addons-936355 get pvc hpvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Non-zero exit: kubectl --context addons-936355 get pvc hpvc -o jsonpath={.status.phase} -n default: fork/exec /usr/local/bin/kubectl: exec format error (329.525µs)
helpers_test.go:396: TestAddons/parallel/CSI: WARNING: PVC get for "default" "hpvc" returned: fork/exec /usr/local/bin/kubectl: exec format error
helpers_test.go:394: (dbg) Run:  kubectl --context addons-936355 get pvc hpvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Non-zero exit: kubectl --context addons-936355 get pvc hpvc -o jsonpath={.status.phase} -n default: fork/exec /usr/local/bin/kubectl: exec format error (365.078µs)
helpers_test.go:396: TestAddons/parallel/CSI: WARNING: PVC get for "default" "hpvc" returned: fork/exec /usr/local/bin/kubectl: exec format error
helpers_test.go:394: (dbg) Run:  kubectl --context addons-936355 get pvc hpvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Non-zero exit: kubectl --context addons-936355 get pvc hpvc -o jsonpath={.status.phase} -n default: fork/exec /usr/local/bin/kubectl: exec format error (430.66µs)
helpers_test.go:396: TestAddons/parallel/CSI: WARNING: PVC get for "default" "hpvc" returned: fork/exec /usr/local/bin/kubectl: exec format error
helpers_test.go:394: (dbg) Run:  kubectl --context addons-936355 get pvc hpvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Non-zero exit: kubectl --context addons-936355 get pvc hpvc -o jsonpath={.status.phase} -n default: fork/exec /usr/local/bin/kubectl: exec format error (361.09µs)
helpers_test.go:396: TestAddons/parallel/CSI: WARNING: PVC get for "default" "hpvc" returned: fork/exec /usr/local/bin/kubectl: exec format error
helpers_test.go:394: (dbg) Run:  kubectl --context addons-936355 get pvc hpvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Non-zero exit: kubectl --context addons-936355 get pvc hpvc -o jsonpath={.status.phase} -n default: fork/exec /usr/local/bin/kubectl: exec format error (414.176µs)
helpers_test.go:396: TestAddons/parallel/CSI: WARNING: PVC get for "default" "hpvc" returned: fork/exec /usr/local/bin/kubectl: exec format error
helpers_test.go:394: (dbg) Run:  kubectl --context addons-936355 get pvc hpvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Non-zero exit: kubectl --context addons-936355 get pvc hpvc -o jsonpath={.status.phase} -n default: fork/exec /usr/local/bin/kubectl: exec format error (314.724µs)
helpers_test.go:396: TestAddons/parallel/CSI: WARNING: PVC get for "default" "hpvc" returned: fork/exec /usr/local/bin/kubectl: exec format error
helpers_test.go:394: (dbg) Run:  kubectl --context addons-936355 get pvc hpvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Non-zero exit: kubectl --context addons-936355 get pvc hpvc -o jsonpath={.status.phase} -n default: fork/exec /usr/local/bin/kubectl: exec format error (329.345µs)
helpers_test.go:396: TestAddons/parallel/CSI: WARNING: PVC get for "default" "hpvc" returned: fork/exec /usr/local/bin/kubectl: exec format error
helpers_test.go:394: (dbg) Run:  kubectl --context addons-936355 get pvc hpvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Non-zero exit: kubectl --context addons-936355 get pvc hpvc -o jsonpath={.status.phase} -n default: fork/exec /usr/local/bin/kubectl: exec format error (363.625µs)
helpers_test.go:396: TestAddons/parallel/CSI: WARNING: PVC get for "default" "hpvc" returned: fork/exec /usr/local/bin/kubectl: exec format error
helpers_test.go:394: (dbg) Run:  kubectl --context addons-936355 get pvc hpvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Non-zero exit: kubectl --context addons-936355 get pvc hpvc -o jsonpath={.status.phase} -n default: fork/exec /usr/local/bin/kubectl: exec format error (405.7µs)
helpers_test.go:396: TestAddons/parallel/CSI: WARNING: PVC get for "default" "hpvc" returned: fork/exec /usr/local/bin/kubectl: exec format error
helpers_test.go:394: (dbg) Run:  kubectl --context addons-936355 get pvc hpvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Non-zero exit: kubectl --context addons-936355 get pvc hpvc -o jsonpath={.status.phase} -n default: fork/exec /usr/local/bin/kubectl: exec format error (554.028µs)
helpers_test.go:396: TestAddons/parallel/CSI: WARNING: PVC get for "default" "hpvc" returned: fork/exec /usr/local/bin/kubectl: exec format error
helpers_test.go:394: (dbg) Run:  kubectl --context addons-936355 get pvc hpvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Non-zero exit: kubectl --context addons-936355 get pvc hpvc -o jsonpath={.status.phase} -n default: fork/exec /usr/local/bin/kubectl: exec format error (330.125µs)
helpers_test.go:396: TestAddons/parallel/CSI: WARNING: PVC get for "default" "hpvc" returned: fork/exec /usr/local/bin/kubectl: exec format error
helpers_test.go:394: (dbg) Run:  kubectl --context addons-936355 get pvc hpvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Non-zero exit: kubectl --context addons-936355 get pvc hpvc -o jsonpath={.status.phase} -n default: fork/exec /usr/local/bin/kubectl: exec format error (430.651µs)
helpers_test.go:396: TestAddons/parallel/CSI: WARNING: PVC get for "default" "hpvc" returned: fork/exec /usr/local/bin/kubectl: exec format error
helpers_test.go:394: (dbg) Run:  kubectl --context addons-936355 get pvc hpvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Non-zero exit: kubectl --context addons-936355 get pvc hpvc -o jsonpath={.status.phase} -n default: fork/exec /usr/local/bin/kubectl: exec format error (311.779µs)
helpers_test.go:396: TestAddons/parallel/CSI: WARNING: PVC get for "default" "hpvc" returned: fork/exec /usr/local/bin/kubectl: exec format error
helpers_test.go:394: (dbg) Run:  kubectl --context addons-936355 get pvc hpvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Non-zero exit: kubectl --context addons-936355 get pvc hpvc -o jsonpath={.status.phase} -n default: fork/exec /usr/local/bin/kubectl: exec format error (484.369µs)
helpers_test.go:396: TestAddons/parallel/CSI: WARNING: PVC get for "default" "hpvc" returned: fork/exec /usr/local/bin/kubectl: exec format error
helpers_test.go:394: (dbg) Run:  kubectl --context addons-936355 get pvc hpvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Non-zero exit: kubectl --context addons-936355 get pvc hpvc -o jsonpath={.status.phase} -n default: fork/exec /usr/local/bin/kubectl: exec format error (492.5µs)
helpers_test.go:396: TestAddons/parallel/CSI: WARNING: PVC get for "default" "hpvc" returned: fork/exec /usr/local/bin/kubectl: exec format error
helpers_test.go:394: (dbg) Run:  kubectl --context addons-936355 get pvc hpvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Non-zero exit: kubectl --context addons-936355 get pvc hpvc -o jsonpath={.status.phase} -n default: fork/exec /usr/local/bin/kubectl: exec format error (576.042µs)
helpers_test.go:396: TestAddons/parallel/CSI: WARNING: PVC get for "default" "hpvc" returned: fork/exec /usr/local/bin/kubectl: exec format error
helpers_test.go:394: (dbg) Run:  kubectl --context addons-936355 get pvc hpvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Non-zero exit: kubectl --context addons-936355 get pvc hpvc -o jsonpath={.status.phase} -n default: fork/exec /usr/local/bin/kubectl: exec format error (313.707µs)
helpers_test.go:396: TestAddons/parallel/CSI: WARNING: PVC get for "default" "hpvc" returned: fork/exec /usr/local/bin/kubectl: exec format error
helpers_test.go:394: (dbg) Run:  kubectl --context addons-936355 get pvc hpvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Non-zero exit: kubectl --context addons-936355 get pvc hpvc -o jsonpath={.status.phase} -n default: fork/exec /usr/local/bin/kubectl: exec format error (552.257µs)
helpers_test.go:396: TestAddons/parallel/CSI: WARNING: PVC get for "default" "hpvc" returned: fork/exec /usr/local/bin/kubectl: exec format error
helpers_test.go:394: (dbg) Run:  kubectl --context addons-936355 get pvc hpvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Non-zero exit: kubectl --context addons-936355 get pvc hpvc -o jsonpath={.status.phase} -n default: fork/exec /usr/local/bin/kubectl: exec format error (345.689µs)
helpers_test.go:396: TestAddons/parallel/CSI: WARNING: PVC get for "default" "hpvc" returned: fork/exec /usr/local/bin/kubectl: exec format error
helpers_test.go:394: (dbg) Run:  kubectl --context addons-936355 get pvc hpvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Non-zero exit: kubectl --context addons-936355 get pvc hpvc -o jsonpath={.status.phase} -n default: fork/exec /usr/local/bin/kubectl: exec format error (481.981µs)
helpers_test.go:396: TestAddons/parallel/CSI: WARNING: PVC get for "default" "hpvc" returned: fork/exec /usr/local/bin/kubectl: exec format error
helpers_test.go:394: (dbg) Run:  kubectl --context addons-936355 get pvc hpvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Non-zero exit: kubectl --context addons-936355 get pvc hpvc -o jsonpath={.status.phase} -n default: fork/exec /usr/local/bin/kubectl: exec format error (528.273µs)
helpers_test.go:396: TestAddons/parallel/CSI: WARNING: PVC get for "default" "hpvc" returned: fork/exec /usr/local/bin/kubectl: exec format error
helpers_test.go:394: (dbg) Run:  kubectl --context addons-936355 get pvc hpvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Non-zero exit: kubectl --context addons-936355 get pvc hpvc -o jsonpath={.status.phase} -n default: fork/exec /usr/local/bin/kubectl: exec format error (375.178µs)
helpers_test.go:396: TestAddons/parallel/CSI: WARNING: PVC get for "default" "hpvc" returned: fork/exec /usr/local/bin/kubectl: exec format error
helpers_test.go:394: (dbg) Run:  kubectl --context addons-936355 get pvc hpvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Non-zero exit: kubectl --context addons-936355 get pvc hpvc -o jsonpath={.status.phase} -n default: fork/exec /usr/local/bin/kubectl: exec format error (448.086µs)
helpers_test.go:396: TestAddons/parallel/CSI: WARNING: PVC get for "default" "hpvc" returned: fork/exec /usr/local/bin/kubectl: exec format error
helpers_test.go:394: (dbg) Run:  kubectl --context addons-936355 get pvc hpvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Non-zero exit: kubectl --context addons-936355 get pvc hpvc -o jsonpath={.status.phase} -n default: fork/exec /usr/local/bin/kubectl: exec format error (446.577µs)
helpers_test.go:396: TestAddons/parallel/CSI: WARNING: PVC get for "default" "hpvc" returned: fork/exec /usr/local/bin/kubectl: exec format error
helpers_test.go:394: (dbg) Run:  kubectl --context addons-936355 get pvc hpvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Non-zero exit: kubectl --context addons-936355 get pvc hpvc -o jsonpath={.status.phase} -n default: fork/exec /usr/local/bin/kubectl: exec format error (333.899µs)
helpers_test.go:396: TestAddons/parallel/CSI: WARNING: PVC get for "default" "hpvc" returned: fork/exec /usr/local/bin/kubectl: exec format error
helpers_test.go:394: (dbg) Run:  kubectl --context addons-936355 get pvc hpvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Non-zero exit: kubectl --context addons-936355 get pvc hpvc -o jsonpath={.status.phase} -n default: fork/exec /usr/local/bin/kubectl: exec format error (389.971µs)
helpers_test.go:396: TestAddons/parallel/CSI: WARNING: PVC get for "default" "hpvc" returned: fork/exec /usr/local/bin/kubectl: exec format error
helpers_test.go:394: (dbg) Run:  kubectl --context addons-936355 get pvc hpvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Non-zero exit: kubectl --context addons-936355 get pvc hpvc -o jsonpath={.status.phase} -n default: fork/exec /usr/local/bin/kubectl: exec format error (490.473µs)
helpers_test.go:396: TestAddons/parallel/CSI: WARNING: PVC get for "default" "hpvc" returned: fork/exec /usr/local/bin/kubectl: exec format error
helpers_test.go:394: (dbg) Run:  kubectl --context addons-936355 get pvc hpvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Non-zero exit: kubectl --context addons-936355 get pvc hpvc -o jsonpath={.status.phase} -n default: fork/exec /usr/local/bin/kubectl: exec format error (14.044308ms)
helpers_test.go:396: TestAddons/parallel/CSI: WARNING: PVC get for "default" "hpvc" returned: fork/exec /usr/local/bin/kubectl: exec format error
helpers_test.go:394: (dbg) Run:  kubectl --context addons-936355 get pvc hpvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Non-zero exit: kubectl --context addons-936355 get pvc hpvc -o jsonpath={.status.phase} -n default: fork/exec /usr/local/bin/kubectl: exec format error (412.091µs)
helpers_test.go:396: TestAddons/parallel/CSI: WARNING: PVC get for "default" "hpvc" returned: fork/exec /usr/local/bin/kubectl: exec format error
helpers_test.go:394: (dbg) Run:  kubectl --context addons-936355 get pvc hpvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Non-zero exit: kubectl --context addons-936355 get pvc hpvc -o jsonpath={.status.phase} -n default: fork/exec /usr/local/bin/kubectl: exec format error (581.473µs)
helpers_test.go:396: TestAddons/parallel/CSI: WARNING: PVC get for "default" "hpvc" returned: fork/exec /usr/local/bin/kubectl: exec format error
helpers_test.go:394: (dbg) Run:  kubectl --context addons-936355 get pvc hpvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Non-zero exit: kubectl --context addons-936355 get pvc hpvc -o jsonpath={.status.phase} -n default: fork/exec /usr/local/bin/kubectl: exec format error (349.357µs)
helpers_test.go:396: TestAddons/parallel/CSI: WARNING: PVC get for "default" "hpvc" returned: fork/exec /usr/local/bin/kubectl: exec format error
helpers_test.go:394: (dbg) Run:  kubectl --context addons-936355 get pvc hpvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Non-zero exit: kubectl --context addons-936355 get pvc hpvc -o jsonpath={.status.phase} -n default: fork/exec /usr/local/bin/kubectl: exec format error (401.991µs)
helpers_test.go:396: TestAddons/parallel/CSI: WARNING: PVC get for "default" "hpvc" returned: fork/exec /usr/local/bin/kubectl: exec format error
helpers_test.go:394: (dbg) Run:  kubectl --context addons-936355 get pvc hpvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Non-zero exit: kubectl --context addons-936355 get pvc hpvc -o jsonpath={.status.phase} -n default: fork/exec /usr/local/bin/kubectl: exec format error (498.178µs)
helpers_test.go:396: TestAddons/parallel/CSI: WARNING: PVC get for "default" "hpvc" returned: fork/exec /usr/local/bin/kubectl: exec format error
helpers_test.go:394: (dbg) Run:  kubectl --context addons-936355 get pvc hpvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Non-zero exit: kubectl --context addons-936355 get pvc hpvc -o jsonpath={.status.phase} -n default: fork/exec /usr/local/bin/kubectl: exec format error (621.883µs)
helpers_test.go:396: TestAddons/parallel/CSI: WARNING: PVC get for "default" "hpvc" returned: fork/exec /usr/local/bin/kubectl: exec format error
helpers_test.go:394: (dbg) Run:  kubectl --context addons-936355 get pvc hpvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Non-zero exit: kubectl --context addons-936355 get pvc hpvc -o jsonpath={.status.phase} -n default: fork/exec /usr/local/bin/kubectl: exec format error (393.212µs)
helpers_test.go:396: TestAddons/parallel/CSI: WARNING: PVC get for "default" "hpvc" returned: fork/exec /usr/local/bin/kubectl: exec format error
helpers_test.go:394: (dbg) Run:  kubectl --context addons-936355 get pvc hpvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Non-zero exit: kubectl --context addons-936355 get pvc hpvc -o jsonpath={.status.phase} -n default: fork/exec /usr/local/bin/kubectl: exec format error (424.112µs)
helpers_test.go:396: TestAddons/parallel/CSI: WARNING: PVC get for "default" "hpvc" returned: fork/exec /usr/local/bin/kubectl: exec format error
helpers_test.go:394: (dbg) Run:  kubectl --context addons-936355 get pvc hpvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Non-zero exit: kubectl --context addons-936355 get pvc hpvc -o jsonpath={.status.phase} -n default: fork/exec /usr/local/bin/kubectl: exec format error (353.131µs)
helpers_test.go:396: TestAddons/parallel/CSI: WARNING: PVC get for "default" "hpvc" returned: fork/exec /usr/local/bin/kubectl: exec format error
helpers_test.go:394: (dbg) Run:  kubectl --context addons-936355 get pvc hpvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Non-zero exit: kubectl --context addons-936355 get pvc hpvc -o jsonpath={.status.phase} -n default: fork/exec /usr/local/bin/kubectl: exec format error (425.482µs)
helpers_test.go:396: TestAddons/parallel/CSI: WARNING: PVC get for "default" "hpvc" returned: fork/exec /usr/local/bin/kubectl: exec format error
helpers_test.go:394: (dbg) Run:  kubectl --context addons-936355 get pvc hpvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Non-zero exit: kubectl --context addons-936355 get pvc hpvc -o jsonpath={.status.phase} -n default: fork/exec /usr/local/bin/kubectl: exec format error (424.768µs)
helpers_test.go:396: TestAddons/parallel/CSI: WARNING: PVC get for "default" "hpvc" returned: fork/exec /usr/local/bin/kubectl: exec format error
helpers_test.go:394: (dbg) Run:  kubectl --context addons-936355 get pvc hpvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Non-zero exit: kubectl --context addons-936355 get pvc hpvc -o jsonpath={.status.phase} -n default: fork/exec /usr/local/bin/kubectl: exec format error (573.096µs)
helpers_test.go:396: TestAddons/parallel/CSI: WARNING: PVC get for "default" "hpvc" returned: fork/exec /usr/local/bin/kubectl: exec format error
helpers_test.go:394: (dbg) Run:  kubectl --context addons-936355 get pvc hpvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Non-zero exit: kubectl --context addons-936355 get pvc hpvc -o jsonpath={.status.phase} -n default: fork/exec /usr/local/bin/kubectl: exec format error (526.369µs)
helpers_test.go:396: TestAddons/parallel/CSI: WARNING: PVC get for "default" "hpvc" returned: fork/exec /usr/local/bin/kubectl: exec format error
helpers_test.go:394: (dbg) Run:  kubectl --context addons-936355 get pvc hpvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Non-zero exit: kubectl --context addons-936355 get pvc hpvc -o jsonpath={.status.phase} -n default: fork/exec /usr/local/bin/kubectl: exec format error (397.971µs)
helpers_test.go:396: TestAddons/parallel/CSI: WARNING: PVC get for "default" "hpvc" returned: fork/exec /usr/local/bin/kubectl: exec format error
helpers_test.go:394: (dbg) Run:  kubectl --context addons-936355 get pvc hpvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Non-zero exit: kubectl --context addons-936355 get pvc hpvc -o jsonpath={.status.phase} -n default: fork/exec /usr/local/bin/kubectl: exec format error (341.456µs)
helpers_test.go:396: TestAddons/parallel/CSI: WARNING: PVC get for "default" "hpvc" returned: fork/exec /usr/local/bin/kubectl: exec format error
helpers_test.go:394: (dbg) Run:  kubectl --context addons-936355 get pvc hpvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Non-zero exit: kubectl --context addons-936355 get pvc hpvc -o jsonpath={.status.phase} -n default: fork/exec /usr/local/bin/kubectl: exec format error (486.067µs)
helpers_test.go:396: TestAddons/parallel/CSI: WARNING: PVC get for "default" "hpvc" returned: fork/exec /usr/local/bin/kubectl: exec format error
helpers_test.go:394: (dbg) Run:  kubectl --context addons-936355 get pvc hpvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Non-zero exit: kubectl --context addons-936355 get pvc hpvc -o jsonpath={.status.phase} -n default: fork/exec /usr/local/bin/kubectl: exec format error (424.514µs)
helpers_test.go:396: TestAddons/parallel/CSI: WARNING: PVC get for "default" "hpvc" returned: fork/exec /usr/local/bin/kubectl: exec format error
helpers_test.go:394: (dbg) Run:  kubectl --context addons-936355 get pvc hpvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Non-zero exit: kubectl --context addons-936355 get pvc hpvc -o jsonpath={.status.phase} -n default: fork/exec /usr/local/bin/kubectl: exec format error (448.062µs)
helpers_test.go:396: TestAddons/parallel/CSI: WARNING: PVC get for "default" "hpvc" returned: fork/exec /usr/local/bin/kubectl: exec format error
helpers_test.go:394: (dbg) Run:  kubectl --context addons-936355 get pvc hpvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Non-zero exit: kubectl --context addons-936355 get pvc hpvc -o jsonpath={.status.phase} -n default: fork/exec /usr/local/bin/kubectl: exec format error (546.545µs)
helpers_test.go:396: TestAddons/parallel/CSI: WARNING: PVC get for "default" "hpvc" returned: fork/exec /usr/local/bin/kubectl: exec format error
helpers_test.go:394: (dbg) Run:  kubectl --context addons-936355 get pvc hpvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Non-zero exit: kubectl --context addons-936355 get pvc hpvc -o jsonpath={.status.phase} -n default: fork/exec /usr/local/bin/kubectl: exec format error (625.403µs)
helpers_test.go:396: TestAddons/parallel/CSI: WARNING: PVC get for "default" "hpvc" returned: fork/exec /usr/local/bin/kubectl: exec format error
helpers_test.go:394: (dbg) Run:  kubectl --context addons-936355 get pvc hpvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Non-zero exit: kubectl --context addons-936355 get pvc hpvc -o jsonpath={.status.phase} -n default: fork/exec /usr/local/bin/kubectl: exec format error (371.158µs)
helpers_test.go:396: TestAddons/parallel/CSI: WARNING: PVC get for "default" "hpvc" returned: fork/exec /usr/local/bin/kubectl: exec format error
helpers_test.go:394: (dbg) Run:  kubectl --context addons-936355 get pvc hpvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Non-zero exit: kubectl --context addons-936355 get pvc hpvc -o jsonpath={.status.phase} -n default: fork/exec /usr/local/bin/kubectl: exec format error (499.252µs)
helpers_test.go:396: TestAddons/parallel/CSI: WARNING: PVC get for "default" "hpvc" returned: fork/exec /usr/local/bin/kubectl: exec format error
helpers_test.go:394: (dbg) Run:  kubectl --context addons-936355 get pvc hpvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Non-zero exit: kubectl --context addons-936355 get pvc hpvc -o jsonpath={.status.phase} -n default: fork/exec /usr/local/bin/kubectl: exec format error (381.89µs)
helpers_test.go:396: TestAddons/parallel/CSI: WARNING: PVC get for "default" "hpvc" returned: fork/exec /usr/local/bin/kubectl: exec format error
helpers_test.go:394: (dbg) Run:  kubectl --context addons-936355 get pvc hpvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Non-zero exit: kubectl --context addons-936355 get pvc hpvc -o jsonpath={.status.phase} -n default: fork/exec /usr/local/bin/kubectl: exec format error (1.006759ms)
helpers_test.go:396: TestAddons/parallel/CSI: WARNING: PVC get for "default" "hpvc" returned: fork/exec /usr/local/bin/kubectl: exec format error
helpers_test.go:394: (dbg) Run:  kubectl --context addons-936355 get pvc hpvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Non-zero exit: kubectl --context addons-936355 get pvc hpvc -o jsonpath={.status.phase} -n default: fork/exec /usr/local/bin/kubectl: exec format error (316.423µs)
helpers_test.go:396: TestAddons/parallel/CSI: WARNING: PVC get for "default" "hpvc" returned: fork/exec /usr/local/bin/kubectl: exec format error
helpers_test.go:394: (dbg) Run:  kubectl --context addons-936355 get pvc hpvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Non-zero exit: kubectl --context addons-936355 get pvc hpvc -o jsonpath={.status.phase} -n default: fork/exec /usr/local/bin/kubectl: exec format error (350.341µs)
helpers_test.go:396: TestAddons/parallel/CSI: WARNING: PVC get for "default" "hpvc" returned: fork/exec /usr/local/bin/kubectl: exec format error
helpers_test.go:394: (dbg) Run:  kubectl --context addons-936355 get pvc hpvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Non-zero exit: kubectl --context addons-936355 get pvc hpvc -o jsonpath={.status.phase} -n default: fork/exec /usr/local/bin/kubectl: exec format error (467.549µs)
helpers_test.go:396: TestAddons/parallel/CSI: WARNING: PVC get for "default" "hpvc" returned: fork/exec /usr/local/bin/kubectl: exec format error
helpers_test.go:394: (dbg) Run:  kubectl --context addons-936355 get pvc hpvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Non-zero exit: kubectl --context addons-936355 get pvc hpvc -o jsonpath={.status.phase} -n default: fork/exec /usr/local/bin/kubectl: exec format error (380.207µs)
helpers_test.go:396: TestAddons/parallel/CSI: WARNING: PVC get for "default" "hpvc" returned: fork/exec /usr/local/bin/kubectl: exec format error
helpers_test.go:394: (dbg) Run:  kubectl --context addons-936355 get pvc hpvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Non-zero exit: kubectl --context addons-936355 get pvc hpvc -o jsonpath={.status.phase} -n default: fork/exec /usr/local/bin/kubectl: exec format error (517.68µs)
helpers_test.go:396: TestAddons/parallel/CSI: WARNING: PVC get for "default" "hpvc" returned: fork/exec /usr/local/bin/kubectl: exec format error
helpers_test.go:394: (dbg) Run:  kubectl --context addons-936355 get pvc hpvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Non-zero exit: kubectl --context addons-936355 get pvc hpvc -o jsonpath={.status.phase} -n default: fork/exec /usr/local/bin/kubectl: exec format error (447.389µs)
helpers_test.go:396: TestAddons/parallel/CSI: WARNING: PVC get for "default" "hpvc" returned: fork/exec /usr/local/bin/kubectl: exec format error
helpers_test.go:394: (dbg) Run:  kubectl --context addons-936355 get pvc hpvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Non-zero exit: kubectl --context addons-936355 get pvc hpvc -o jsonpath={.status.phase} -n default: fork/exec /usr/local/bin/kubectl: exec format error (406.209µs)
helpers_test.go:396: TestAddons/parallel/CSI: WARNING: PVC get for "default" "hpvc" returned: fork/exec /usr/local/bin/kubectl: exec format error
helpers_test.go:394: (dbg) Run:  kubectl --context addons-936355 get pvc hpvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Non-zero exit: kubectl --context addons-936355 get pvc hpvc -o jsonpath={.status.phase} -n default: fork/exec /usr/local/bin/kubectl: exec format error (503.183µs)
helpers_test.go:396: TestAddons/parallel/CSI: WARNING: PVC get for "default" "hpvc" returned: fork/exec /usr/local/bin/kubectl: exec format error
helpers_test.go:394: (dbg) Run:  kubectl --context addons-936355 get pvc hpvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Non-zero exit: kubectl --context addons-936355 get pvc hpvc -o jsonpath={.status.phase} -n default: fork/exec /usr/local/bin/kubectl: exec format error (374.858µs)
helpers_test.go:396: TestAddons/parallel/CSI: WARNING: PVC get for "default" "hpvc" returned: fork/exec /usr/local/bin/kubectl: exec format error
helpers_test.go:394: (dbg) Run:  kubectl --context addons-936355 get pvc hpvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Non-zero exit: kubectl --context addons-936355 get pvc hpvc -o jsonpath={.status.phase} -n default: fork/exec /usr/local/bin/kubectl: exec format error (450.367µs)
helpers_test.go:396: TestAddons/parallel/CSI: WARNING: PVC get for "default" "hpvc" returned: fork/exec /usr/local/bin/kubectl: exec format error
helpers_test.go:394: (dbg) Run:  kubectl --context addons-936355 get pvc hpvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Non-zero exit: kubectl --context addons-936355 get pvc hpvc -o jsonpath={.status.phase} -n default: fork/exec /usr/local/bin/kubectl: exec format error (341.931µs)
helpers_test.go:396: TestAddons/parallel/CSI: WARNING: PVC get for "default" "hpvc" returned: fork/exec /usr/local/bin/kubectl: exec format error
helpers_test.go:394: (dbg) Run:  kubectl --context addons-936355 get pvc hpvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Non-zero exit: kubectl --context addons-936355 get pvc hpvc -o jsonpath={.status.phase} -n default: fork/exec /usr/local/bin/kubectl: exec format error (357.964µs)
helpers_test.go:396: TestAddons/parallel/CSI: WARNING: PVC get for "default" "hpvc" returned: fork/exec /usr/local/bin/kubectl: exec format error
helpers_test.go:394: (dbg) Run:  kubectl --context addons-936355 get pvc hpvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Non-zero exit: kubectl --context addons-936355 get pvc hpvc -o jsonpath={.status.phase} -n default: fork/exec /usr/local/bin/kubectl: exec format error (869.655µs)
helpers_test.go:396: TestAddons/parallel/CSI: WARNING: PVC get for "default" "hpvc" returned: fork/exec /usr/local/bin/kubectl: exec format error
helpers_test.go:394: (dbg) Run:  kubectl --context addons-936355 get pvc hpvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Non-zero exit: kubectl --context addons-936355 get pvc hpvc -o jsonpath={.status.phase} -n default: fork/exec /usr/local/bin/kubectl: exec format error (400.137µs)
helpers_test.go:396: TestAddons/parallel/CSI: WARNING: PVC get for "default" "hpvc" returned: fork/exec /usr/local/bin/kubectl: exec format error
helpers_test.go:394: (dbg) Run:  kubectl --context addons-936355 get pvc hpvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Non-zero exit: kubectl --context addons-936355 get pvc hpvc -o jsonpath={.status.phase} -n default: fork/exec /usr/local/bin/kubectl: exec format error (582.196µs)
helpers_test.go:396: TestAddons/parallel/CSI: WARNING: PVC get for "default" "hpvc" returned: fork/exec /usr/local/bin/kubectl: exec format error
helpers_test.go:394: (dbg) Run:  kubectl --context addons-936355 get pvc hpvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Non-zero exit: kubectl --context addons-936355 get pvc hpvc -o jsonpath={.status.phase} -n default: fork/exec /usr/local/bin/kubectl: exec format error (364.684µs)
helpers_test.go:396: TestAddons/parallel/CSI: WARNING: PVC get for "default" "hpvc" returned: fork/exec /usr/local/bin/kubectl: exec format error
helpers_test.go:394: (dbg) Run:  kubectl --context addons-936355 get pvc hpvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Non-zero exit: kubectl --context addons-936355 get pvc hpvc -o jsonpath={.status.phase} -n default: fork/exec /usr/local/bin/kubectl: exec format error (406.742µs)
helpers_test.go:396: TestAddons/parallel/CSI: WARNING: PVC get for "default" "hpvc" returned: fork/exec /usr/local/bin/kubectl: exec format error
helpers_test.go:394: (dbg) Run:  kubectl --context addons-936355 get pvc hpvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Non-zero exit: kubectl --context addons-936355 get pvc hpvc -o jsonpath={.status.phase} -n default: fork/exec /usr/local/bin/kubectl: exec format error (379.173µs)
helpers_test.go:396: TestAddons/parallel/CSI: WARNING: PVC get for "default" "hpvc" returned: fork/exec /usr/local/bin/kubectl: exec format error
helpers_test.go:394: (dbg) Run:  kubectl --context addons-936355 get pvc hpvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Non-zero exit: kubectl --context addons-936355 get pvc hpvc -o jsonpath={.status.phase} -n default: fork/exec /usr/local/bin/kubectl: exec format error (328.41µs)
helpers_test.go:396: TestAddons/parallel/CSI: WARNING: PVC get for "default" "hpvc" returned: fork/exec /usr/local/bin/kubectl: exec format error
helpers_test.go:394: (dbg) Run:  kubectl --context addons-936355 get pvc hpvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Non-zero exit: kubectl --context addons-936355 get pvc hpvc -o jsonpath={.status.phase} -n default: fork/exec /usr/local/bin/kubectl: exec format error (376.712µs)
helpers_test.go:396: TestAddons/parallel/CSI: WARNING: PVC get for "default" "hpvc" returned: fork/exec /usr/local/bin/kubectl: exec format error
helpers_test.go:394: (dbg) Run:  kubectl --context addons-936355 get pvc hpvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Non-zero exit: kubectl --context addons-936355 get pvc hpvc -o jsonpath={.status.phase} -n default: fork/exec /usr/local/bin/kubectl: exec format error (383.588µs)
helpers_test.go:396: TestAddons/parallel/CSI: WARNING: PVC get for "default" "hpvc" returned: fork/exec /usr/local/bin/kubectl: exec format error
helpers_test.go:394: (dbg) Run:  kubectl --context addons-936355 get pvc hpvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Non-zero exit: kubectl --context addons-936355 get pvc hpvc -o jsonpath={.status.phase} -n default: fork/exec /usr/local/bin/kubectl: exec format error (689.803µs)
helpers_test.go:396: TestAddons/parallel/CSI: WARNING: PVC get for "default" "hpvc" returned: fork/exec /usr/local/bin/kubectl: exec format error
helpers_test.go:394: (dbg) Run:  kubectl --context addons-936355 get pvc hpvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Non-zero exit: kubectl --context addons-936355 get pvc hpvc -o jsonpath={.status.phase} -n default: fork/exec /usr/local/bin/kubectl: exec format error (521.439µs)
helpers_test.go:396: TestAddons/parallel/CSI: WARNING: PVC get for "default" "hpvc" returned: fork/exec /usr/local/bin/kubectl: exec format error
helpers_test.go:394: (dbg) Run:  kubectl --context addons-936355 get pvc hpvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Non-zero exit: kubectl --context addons-936355 get pvc hpvc -o jsonpath={.status.phase} -n default: fork/exec /usr/local/bin/kubectl: exec format error (392.588µs)
helpers_test.go:396: TestAddons/parallel/CSI: WARNING: PVC get for "default" "hpvc" returned: fork/exec /usr/local/bin/kubectl: exec format error
helpers_test.go:394: (dbg) Run:  kubectl --context addons-936355 get pvc hpvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Non-zero exit: kubectl --context addons-936355 get pvc hpvc -o jsonpath={.status.phase} -n default: fork/exec /usr/local/bin/kubectl: exec format error (488.29µs)
helpers_test.go:396: TestAddons/parallel/CSI: WARNING: PVC get for "default" "hpvc" returned: fork/exec /usr/local/bin/kubectl: exec format error
helpers_test.go:394: (dbg) Run:  kubectl --context addons-936355 get pvc hpvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Non-zero exit: kubectl --context addons-936355 get pvc hpvc -o jsonpath={.status.phase} -n default: fork/exec /usr/local/bin/kubectl: exec format error (484.78µs)
helpers_test.go:396: TestAddons/parallel/CSI: WARNING: PVC get for "default" "hpvc" returned: fork/exec /usr/local/bin/kubectl: exec format error
helpers_test.go:394: (dbg) Run:  kubectl --context addons-936355 get pvc hpvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Non-zero exit: kubectl --context addons-936355 get pvc hpvc -o jsonpath={.status.phase} -n default: fork/exec /usr/local/bin/kubectl: exec format error (426.254µs)
helpers_test.go:396: TestAddons/parallel/CSI: WARNING: PVC get for "default" "hpvc" returned: fork/exec /usr/local/bin/kubectl: exec format error
helpers_test.go:394: (dbg) Run:  kubectl --context addons-936355 get pvc hpvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Non-zero exit: kubectl --context addons-936355 get pvc hpvc -o jsonpath={.status.phase} -n default: fork/exec /usr/local/bin/kubectl: exec format error (527.149µs)
helpers_test.go:396: TestAddons/parallel/CSI: WARNING: PVC get for "default" "hpvc" returned: fork/exec /usr/local/bin/kubectl: exec format error
helpers_test.go:394: (dbg) Run:  kubectl --context addons-936355 get pvc hpvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Non-zero exit: kubectl --context addons-936355 get pvc hpvc -o jsonpath={.status.phase} -n default: fork/exec /usr/local/bin/kubectl: exec format error (509.525µs)
helpers_test.go:396: TestAddons/parallel/CSI: WARNING: PVC get for "default" "hpvc" returned: fork/exec /usr/local/bin/kubectl: exec format error
helpers_test.go:394: (dbg) Run:  kubectl --context addons-936355 get pvc hpvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Non-zero exit: kubectl --context addons-936355 get pvc hpvc -o jsonpath={.status.phase} -n default: fork/exec /usr/local/bin/kubectl: exec format error (623.811µs)
helpers_test.go:396: TestAddons/parallel/CSI: WARNING: PVC get for "default" "hpvc" returned: fork/exec /usr/local/bin/kubectl: exec format error
helpers_test.go:394: (dbg) Run:  kubectl --context addons-936355 get pvc hpvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Non-zero exit: kubectl --context addons-936355 get pvc hpvc -o jsonpath={.status.phase} -n default: fork/exec /usr/local/bin/kubectl: exec format error (349.513µs)
helpers_test.go:396: TestAddons/parallel/CSI: WARNING: PVC get for "default" "hpvc" returned: fork/exec /usr/local/bin/kubectl: exec format error
helpers_test.go:394: (dbg) Run:  kubectl --context addons-936355 get pvc hpvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Non-zero exit: kubectl --context addons-936355 get pvc hpvc -o jsonpath={.status.phase} -n default: fork/exec /usr/local/bin/kubectl: exec format error (484.451µs)
helpers_test.go:396: TestAddons/parallel/CSI: WARNING: PVC get for "default" "hpvc" returned: fork/exec /usr/local/bin/kubectl: exec format error
helpers_test.go:394: (dbg) Run:  kubectl --context addons-936355 get pvc hpvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Non-zero exit: kubectl --context addons-936355 get pvc hpvc -o jsonpath={.status.phase} -n default: fork/exec /usr/local/bin/kubectl: exec format error (477.854µs)
helpers_test.go:396: TestAddons/parallel/CSI: WARNING: PVC get for "default" "hpvc" returned: fork/exec /usr/local/bin/kubectl: exec format error
helpers_test.go:394: (dbg) Run:  kubectl --context addons-936355 get pvc hpvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Non-zero exit: kubectl --context addons-936355 get pvc hpvc -o jsonpath={.status.phase} -n default: fork/exec /usr/local/bin/kubectl: exec format error (451.631µs)
helpers_test.go:396: TestAddons/parallel/CSI: WARNING: PVC get for "default" "hpvc" returned: fork/exec /usr/local/bin/kubectl: exec format error
helpers_test.go:394: (dbg) Run:  kubectl --context addons-936355 get pvc hpvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Non-zero exit: kubectl --context addons-936355 get pvc hpvc -o jsonpath={.status.phase} -n default: fork/exec /usr/local/bin/kubectl: exec format error (489.899µs)
helpers_test.go:396: TestAddons/parallel/CSI: WARNING: PVC get for "default" "hpvc" returned: fork/exec /usr/local/bin/kubectl: exec format error
helpers_test.go:394: (dbg) Run:  kubectl --context addons-936355 get pvc hpvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Non-zero exit: kubectl --context addons-936355 get pvc hpvc -o jsonpath={.status.phase} -n default: fork/exec /usr/local/bin/kubectl: exec format error (360.319µs)
helpers_test.go:396: TestAddons/parallel/CSI: WARNING: PVC get for "default" "hpvc" returned: fork/exec /usr/local/bin/kubectl: exec format error
helpers_test.go:394: (dbg) Run:  kubectl --context addons-936355 get pvc hpvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Non-zero exit: kubectl --context addons-936355 get pvc hpvc -o jsonpath={.status.phase} -n default: fork/exec /usr/local/bin/kubectl: exec format error (372.495µs)
helpers_test.go:396: TestAddons/parallel/CSI: WARNING: PVC get for "default" "hpvc" returned: fork/exec /usr/local/bin/kubectl: exec format error
helpers_test.go:394: (dbg) Run:  kubectl --context addons-936355 get pvc hpvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Non-zero exit: kubectl --context addons-936355 get pvc hpvc -o jsonpath={.status.phase} -n default: fork/exec /usr/local/bin/kubectl: exec format error (377.508µs)
helpers_test.go:396: TestAddons/parallel/CSI: WARNING: PVC get for "default" "hpvc" returned: fork/exec /usr/local/bin/kubectl: exec format error
helpers_test.go:394: (dbg) Run:  kubectl --context addons-936355 get pvc hpvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Non-zero exit: kubectl --context addons-936355 get pvc hpvc -o jsonpath={.status.phase} -n default: fork/exec /usr/local/bin/kubectl: exec format error (377.533µs)
helpers_test.go:396: TestAddons/parallel/CSI: WARNING: PVC get for "default" "hpvc" returned: fork/exec /usr/local/bin/kubectl: exec format error
helpers_test.go:394: (dbg) Run:  kubectl --context addons-936355 get pvc hpvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Non-zero exit: kubectl --context addons-936355 get pvc hpvc -o jsonpath={.status.phase} -n default: fork/exec /usr/local/bin/kubectl: exec format error (502.42µs)
helpers_test.go:396: TestAddons/parallel/CSI: WARNING: PVC get for "default" "hpvc" returned: fork/exec /usr/local/bin/kubectl: exec format error
helpers_test.go:394: (dbg) Run:  kubectl --context addons-936355 get pvc hpvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Non-zero exit: kubectl --context addons-936355 get pvc hpvc -o jsonpath={.status.phase} -n default: fork/exec /usr/local/bin/kubectl: exec format error (577.019µs)
helpers_test.go:396: TestAddons/parallel/CSI: WARNING: PVC get for "default" "hpvc" returned: fork/exec /usr/local/bin/kubectl: exec format error
helpers_test.go:394: (dbg) Run:  kubectl --context addons-936355 get pvc hpvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Non-zero exit: kubectl --context addons-936355 get pvc hpvc -o jsonpath={.status.phase} -n default: fork/exec /usr/local/bin/kubectl: exec format error (371.789µs)
helpers_test.go:396: TestAddons/parallel/CSI: WARNING: PVC get for "default" "hpvc" returned: fork/exec /usr/local/bin/kubectl: exec format error
helpers_test.go:394: (dbg) Run:  kubectl --context addons-936355 get pvc hpvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Non-zero exit: kubectl --context addons-936355 get pvc hpvc -o jsonpath={.status.phase} -n default: fork/exec /usr/local/bin/kubectl: exec format error (407.144µs)
helpers_test.go:396: TestAddons/parallel/CSI: WARNING: PVC get for "default" "hpvc" returned: fork/exec /usr/local/bin/kubectl: exec format error
helpers_test.go:394: (dbg) Run:  kubectl --context addons-936355 get pvc hpvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Non-zero exit: kubectl --context addons-936355 get pvc hpvc -o jsonpath={.status.phase} -n default: fork/exec /usr/local/bin/kubectl: exec format error (533.238µs)
helpers_test.go:396: TestAddons/parallel/CSI: WARNING: PVC get for "default" "hpvc" returned: fork/exec /usr/local/bin/kubectl: exec format error
helpers_test.go:394: (dbg) Run:  kubectl --context addons-936355 get pvc hpvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Non-zero exit: kubectl --context addons-936355 get pvc hpvc -o jsonpath={.status.phase} -n default: fork/exec /usr/local/bin/kubectl: exec format error (385.541µs)
helpers_test.go:396: TestAddons/parallel/CSI: WARNING: PVC get for "default" "hpvc" returned: fork/exec /usr/local/bin/kubectl: exec format error
helpers_test.go:394: (dbg) Run:  kubectl --context addons-936355 get pvc hpvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Non-zero exit: kubectl --context addons-936355 get pvc hpvc -o jsonpath={.status.phase} -n default: fork/exec /usr/local/bin/kubectl: exec format error (352.787µs)
helpers_test.go:396: TestAddons/parallel/CSI: WARNING: PVC get for "default" "hpvc" returned: fork/exec /usr/local/bin/kubectl: exec format error
helpers_test.go:394: (dbg) Run:  kubectl --context addons-936355 get pvc hpvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Non-zero exit: kubectl --context addons-936355 get pvc hpvc -o jsonpath={.status.phase} -n default: fork/exec /usr/local/bin/kubectl: exec format error (370.575µs)
helpers_test.go:396: TestAddons/parallel/CSI: WARNING: PVC get for "default" "hpvc" returned: fork/exec /usr/local/bin/kubectl: exec format error
helpers_test.go:394: (dbg) Run:  kubectl --context addons-936355 get pvc hpvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Non-zero exit: kubectl --context addons-936355 get pvc hpvc -o jsonpath={.status.phase} -n default: fork/exec /usr/local/bin/kubectl: exec format error (454.774µs)
helpers_test.go:396: TestAddons/parallel/CSI: WARNING: PVC get for "default" "hpvc" returned: fork/exec /usr/local/bin/kubectl: exec format error
helpers_test.go:394: (dbg) Run:  kubectl --context addons-936355 get pvc hpvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Non-zero exit: kubectl --context addons-936355 get pvc hpvc -o jsonpath={.status.phase} -n default: fork/exec /usr/local/bin/kubectl: exec format error (381.143µs)
helpers_test.go:396: TestAddons/parallel/CSI: WARNING: PVC get for "default" "hpvc" returned: fork/exec /usr/local/bin/kubectl: exec format error
helpers_test.go:394: (dbg) Run:  kubectl --context addons-936355 get pvc hpvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Non-zero exit: kubectl --context addons-936355 get pvc hpvc -o jsonpath={.status.phase} -n default: fork/exec /usr/local/bin/kubectl: exec format error (371.091µs)
helpers_test.go:396: TestAddons/parallel/CSI: WARNING: PVC get for "default" "hpvc" returned: fork/exec /usr/local/bin/kubectl: exec format error
helpers_test.go:394: (dbg) Run:  kubectl --context addons-936355 get pvc hpvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Non-zero exit: kubectl --context addons-936355 get pvc hpvc -o jsonpath={.status.phase} -n default: fork/exec /usr/local/bin/kubectl: exec format error (342.013µs)
helpers_test.go:396: TestAddons/parallel/CSI: WARNING: PVC get for "default" "hpvc" returned: fork/exec /usr/local/bin/kubectl: exec format error
helpers_test.go:394: (dbg) Run:  kubectl --context addons-936355 get pvc hpvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Non-zero exit: kubectl --context addons-936355 get pvc hpvc -o jsonpath={.status.phase} -n default: fork/exec /usr/local/bin/kubectl: exec format error (400.753µs)
helpers_test.go:396: TestAddons/parallel/CSI: WARNING: PVC get for "default" "hpvc" returned: fork/exec /usr/local/bin/kubectl: exec format error
helpers_test.go:394: (dbg) Run:  kubectl --context addons-936355 get pvc hpvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Non-zero exit: kubectl --context addons-936355 get pvc hpvc -o jsonpath={.status.phase} -n default: fork/exec /usr/local/bin/kubectl: exec format error (485.707µs)
helpers_test.go:396: TestAddons/parallel/CSI: WARNING: PVC get for "default" "hpvc" returned: fork/exec /usr/local/bin/kubectl: exec format error
helpers_test.go:394: (dbg) Run:  kubectl --context addons-936355 get pvc hpvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Non-zero exit: kubectl --context addons-936355 get pvc hpvc -o jsonpath={.status.phase} -n default: fork/exec /usr/local/bin/kubectl: exec format error (450.688µs)
helpers_test.go:396: TestAddons/parallel/CSI: WARNING: PVC get for "default" "hpvc" returned: fork/exec /usr/local/bin/kubectl: exec format error
helpers_test.go:394: (dbg) Run:  kubectl --context addons-936355 get pvc hpvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Non-zero exit: kubectl --context addons-936355 get pvc hpvc -o jsonpath={.status.phase} -n default: fork/exec /usr/local/bin/kubectl: exec format error (340.266µs)
helpers_test.go:396: TestAddons/parallel/CSI: WARNING: PVC get for "default" "hpvc" returned: fork/exec /usr/local/bin/kubectl: exec format error
helpers_test.go:394: (dbg) Run:  kubectl --context addons-936355 get pvc hpvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Non-zero exit: kubectl --context addons-936355 get pvc hpvc -o jsonpath={.status.phase} -n default: fork/exec /usr/local/bin/kubectl: exec format error (458.417µs)
helpers_test.go:396: TestAddons/parallel/CSI: WARNING: PVC get for "default" "hpvc" returned: fork/exec /usr/local/bin/kubectl: exec format error
helpers_test.go:394: (dbg) Run:  kubectl --context addons-936355 get pvc hpvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Non-zero exit: kubectl --context addons-936355 get pvc hpvc -o jsonpath={.status.phase} -n default: fork/exec /usr/local/bin/kubectl: exec format error (479.356µs)
helpers_test.go:396: TestAddons/parallel/CSI: WARNING: PVC get for "default" "hpvc" returned: fork/exec /usr/local/bin/kubectl: exec format error
helpers_test.go:394: (dbg) Run:  kubectl --context addons-936355 get pvc hpvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Non-zero exit: kubectl --context addons-936355 get pvc hpvc -o jsonpath={.status.phase} -n default: fork/exec /usr/local/bin/kubectl: exec format error (512.955µs)
helpers_test.go:396: TestAddons/parallel/CSI: WARNING: PVC get for "default" "hpvc" returned: fork/exec /usr/local/bin/kubectl: exec format error
helpers_test.go:394: (dbg) Run:  kubectl --context addons-936355 get pvc hpvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Non-zero exit: kubectl --context addons-936355 get pvc hpvc -o jsonpath={.status.phase} -n default: fork/exec /usr/local/bin/kubectl: exec format error (452.443µs)
helpers_test.go:396: TestAddons/parallel/CSI: WARNING: PVC get for "default" "hpvc" returned: fork/exec /usr/local/bin/kubectl: exec format error
helpers_test.go:394: (dbg) Run:  kubectl --context addons-936355 get pvc hpvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Non-zero exit: kubectl --context addons-936355 get pvc hpvc -o jsonpath={.status.phase} -n default: fork/exec /usr/local/bin/kubectl: exec format error (348.299µs)
helpers_test.go:396: TestAddons/parallel/CSI: WARNING: PVC get for "default" "hpvc" returned: fork/exec /usr/local/bin/kubectl: exec format error
helpers_test.go:394: (dbg) Run:  kubectl --context addons-936355 get pvc hpvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Non-zero exit: kubectl --context addons-936355 get pvc hpvc -o jsonpath={.status.phase} -n default: fork/exec /usr/local/bin/kubectl: exec format error (363.117µs)
helpers_test.go:396: TestAddons/parallel/CSI: WARNING: PVC get for "default" "hpvc" returned: fork/exec /usr/local/bin/kubectl: exec format error
helpers_test.go:394: (dbg) Run:  kubectl --context addons-936355 get pvc hpvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Non-zero exit: kubectl --context addons-936355 get pvc hpvc -o jsonpath={.status.phase} -n default: fork/exec /usr/local/bin/kubectl: exec format error (459.705µs)
helpers_test.go:396: TestAddons/parallel/CSI: WARNING: PVC get for "default" "hpvc" returned: fork/exec /usr/local/bin/kubectl: exec format error
helpers_test.go:394: (dbg) Run:  kubectl --context addons-936355 get pvc hpvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Non-zero exit: kubectl --context addons-936355 get pvc hpvc -o jsonpath={.status.phase} -n default: fork/exec /usr/local/bin/kubectl: exec format error (444.599µs)
helpers_test.go:396: TestAddons/parallel/CSI: WARNING: PVC get for "default" "hpvc" returned: fork/exec /usr/local/bin/kubectl: exec format error
helpers_test.go:394: (dbg) Run:  kubectl --context addons-936355 get pvc hpvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Non-zero exit: kubectl --context addons-936355 get pvc hpvc -o jsonpath={.status.phase} -n default: fork/exec /usr/local/bin/kubectl: exec format error (549.532µs)
helpers_test.go:396: TestAddons/parallel/CSI: WARNING: PVC get for "default" "hpvc" returned: fork/exec /usr/local/bin/kubectl: exec format error
helpers_test.go:394: (dbg) Run:  kubectl --context addons-936355 get pvc hpvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Non-zero exit: kubectl --context addons-936355 get pvc hpvc -o jsonpath={.status.phase} -n default: fork/exec /usr/local/bin/kubectl: exec format error (431.455µs)
helpers_test.go:396: TestAddons/parallel/CSI: WARNING: PVC get for "default" "hpvc" returned: fork/exec /usr/local/bin/kubectl: exec format error
helpers_test.go:394: (dbg) Run:  kubectl --context addons-936355 get pvc hpvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Non-zero exit: kubectl --context addons-936355 get pvc hpvc -o jsonpath={.status.phase} -n default: fork/exec /usr/local/bin/kubectl: exec format error (445.552µs)
helpers_test.go:396: TestAddons/parallel/CSI: WARNING: PVC get for "default" "hpvc" returned: fork/exec /usr/local/bin/kubectl: exec format error
helpers_test.go:394: (dbg) Run:  kubectl --context addons-936355 get pvc hpvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Non-zero exit: kubectl --context addons-936355 get pvc hpvc -o jsonpath={.status.phase} -n default: fork/exec /usr/local/bin/kubectl: exec format error (470.896µs)
helpers_test.go:396: TestAddons/parallel/CSI: WARNING: PVC get for "default" "hpvc" returned: fork/exec /usr/local/bin/kubectl: exec format error
helpers_test.go:394: (dbg) Run:  kubectl --context addons-936355 get pvc hpvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Non-zero exit: kubectl --context addons-936355 get pvc hpvc -o jsonpath={.status.phase} -n default: fork/exec /usr/local/bin/kubectl: exec format error (576.346µs)
helpers_test.go:396: TestAddons/parallel/CSI: WARNING: PVC get for "default" "hpvc" returned: fork/exec /usr/local/bin/kubectl: exec format error
helpers_test.go:394: (dbg) Run:  kubectl --context addons-936355 get pvc hpvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Non-zero exit: kubectl --context addons-936355 get pvc hpvc -o jsonpath={.status.phase} -n default: fork/exec /usr/local/bin/kubectl: exec format error (359.45µs)
helpers_test.go:396: TestAddons/parallel/CSI: WARNING: PVC get for "default" "hpvc" returned: fork/exec /usr/local/bin/kubectl: exec format error
helpers_test.go:394: (dbg) Run:  kubectl --context addons-936355 get pvc hpvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Non-zero exit: kubectl --context addons-936355 get pvc hpvc -o jsonpath={.status.phase} -n default: fork/exec /usr/local/bin/kubectl: exec format error (516.663µs)
helpers_test.go:396: TestAddons/parallel/CSI: WARNING: PVC get for "default" "hpvc" returned: fork/exec /usr/local/bin/kubectl: exec format error
helpers_test.go:394: (dbg) Run:  kubectl --context addons-936355 get pvc hpvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Non-zero exit: kubectl --context addons-936355 get pvc hpvc -o jsonpath={.status.phase} -n default: fork/exec /usr/local/bin/kubectl: exec format error (391.883µs)
helpers_test.go:396: TestAddons/parallel/CSI: WARNING: PVC get for "default" "hpvc" returned: fork/exec /usr/local/bin/kubectl: exec format error
helpers_test.go:394: (dbg) Run:  kubectl --context addons-936355 get pvc hpvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Non-zero exit: kubectl --context addons-936355 get pvc hpvc -o jsonpath={.status.phase} -n default: fork/exec /usr/local/bin/kubectl: exec format error (553.381µs)
helpers_test.go:396: TestAddons/parallel/CSI: WARNING: PVC get for "default" "hpvc" returned: fork/exec /usr/local/bin/kubectl: exec format error
helpers_test.go:394: (dbg) Run:  kubectl --context addons-936355 get pvc hpvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Non-zero exit: kubectl --context addons-936355 get pvc hpvc -o jsonpath={.status.phase} -n default: fork/exec /usr/local/bin/kubectl: exec format error (516.606µs)
helpers_test.go:396: TestAddons/parallel/CSI: WARNING: PVC get for "default" "hpvc" returned: fork/exec /usr/local/bin/kubectl: exec format error
helpers_test.go:394: (dbg) Run:  kubectl --context addons-936355 get pvc hpvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Non-zero exit: kubectl --context addons-936355 get pvc hpvc -o jsonpath={.status.phase} -n default: fork/exec /usr/local/bin/kubectl: exec format error (551.961µs)
helpers_test.go:396: TestAddons/parallel/CSI: WARNING: PVC get for "default" "hpvc" returned: fork/exec /usr/local/bin/kubectl: exec format error
helpers_test.go:394: (dbg) Run:  kubectl --context addons-936355 get pvc hpvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Non-zero exit: kubectl --context addons-936355 get pvc hpvc -o jsonpath={.status.phase} -n default: fork/exec /usr/local/bin/kubectl: exec format error (507.097µs)
helpers_test.go:396: TestAddons/parallel/CSI: WARNING: PVC get for "default" "hpvc" returned: fork/exec /usr/local/bin/kubectl: exec format error
helpers_test.go:394: (dbg) Run:  kubectl --context addons-936355 get pvc hpvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Non-zero exit: kubectl --context addons-936355 get pvc hpvc -o jsonpath={.status.phase} -n default: fork/exec /usr/local/bin/kubectl: exec format error (486.24µs)
helpers_test.go:396: TestAddons/parallel/CSI: WARNING: PVC get for "default" "hpvc" returned: fork/exec /usr/local/bin/kubectl: exec format error
helpers_test.go:394: (dbg) Run:  kubectl --context addons-936355 get pvc hpvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Non-zero exit: kubectl --context addons-936355 get pvc hpvc -o jsonpath={.status.phase} -n default: fork/exec /usr/local/bin/kubectl: exec format error (348.348µs)
helpers_test.go:396: TestAddons/parallel/CSI: WARNING: PVC get for "default" "hpvc" returned: fork/exec /usr/local/bin/kubectl: exec format error
helpers_test.go:394: (dbg) Run:  kubectl --context addons-936355 get pvc hpvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Non-zero exit: kubectl --context addons-936355 get pvc hpvc -o jsonpath={.status.phase} -n default: fork/exec /usr/local/bin/kubectl: exec format error (481.825µs)
helpers_test.go:396: TestAddons/parallel/CSI: WARNING: PVC get for "default" "hpvc" returned: fork/exec /usr/local/bin/kubectl: exec format error
helpers_test.go:394: (dbg) Run:  kubectl --context addons-936355 get pvc hpvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Non-zero exit: kubectl --context addons-936355 get pvc hpvc -o jsonpath={.status.phase} -n default: fork/exec /usr/local/bin/kubectl: exec format error (583.386µs)
helpers_test.go:396: TestAddons/parallel/CSI: WARNING: PVC get for "default" "hpvc" returned: fork/exec /usr/local/bin/kubectl: exec format error
helpers_test.go:394: (dbg) Run:  kubectl --context addons-936355 get pvc hpvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Non-zero exit: kubectl --context addons-936355 get pvc hpvc -o jsonpath={.status.phase} -n default: fork/exec /usr/local/bin/kubectl: exec format error (425.917µs)
helpers_test.go:396: TestAddons/parallel/CSI: WARNING: PVC get for "default" "hpvc" returned: fork/exec /usr/local/bin/kubectl: exec format error
helpers_test.go:394: (dbg) Run:  kubectl --context addons-936355 get pvc hpvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Non-zero exit: kubectl --context addons-936355 get pvc hpvc -o jsonpath={.status.phase} -n default: fork/exec /usr/local/bin/kubectl: exec format error (436.592µs)
helpers_test.go:396: TestAddons/parallel/CSI: WARNING: PVC get for "default" "hpvc" returned: fork/exec /usr/local/bin/kubectl: exec format error
helpers_test.go:394: (dbg) Run:  kubectl --context addons-936355 get pvc hpvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Non-zero exit: kubectl --context addons-936355 get pvc hpvc -o jsonpath={.status.phase} -n default: fork/exec /usr/local/bin/kubectl: exec format error (528.618µs)
helpers_test.go:396: TestAddons/parallel/CSI: WARNING: PVC get for "default" "hpvc" returned: fork/exec /usr/local/bin/kubectl: exec format error
helpers_test.go:394: (dbg) Run:  kubectl --context addons-936355 get pvc hpvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Non-zero exit: kubectl --context addons-936355 get pvc hpvc -o jsonpath={.status.phase} -n default: fork/exec /usr/local/bin/kubectl: exec format error (403.23µs)
helpers_test.go:396: TestAddons/parallel/CSI: WARNING: PVC get for "default" "hpvc" returned: fork/exec /usr/local/bin/kubectl: exec format error
helpers_test.go:394: (dbg) Run:  kubectl --context addons-936355 get pvc hpvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Non-zero exit: kubectl --context addons-936355 get pvc hpvc -o jsonpath={.status.phase} -n default: fork/exec /usr/local/bin/kubectl: exec format error (598.786µs)
helpers_test.go:396: TestAddons/parallel/CSI: WARNING: PVC get for "default" "hpvc" returned: fork/exec /usr/local/bin/kubectl: exec format error
helpers_test.go:394: (dbg) Run:  kubectl --context addons-936355 get pvc hpvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Non-zero exit: kubectl --context addons-936355 get pvc hpvc -o jsonpath={.status.phase} -n default: fork/exec /usr/local/bin/kubectl: exec format error (606.597µs)
helpers_test.go:396: TestAddons/parallel/CSI: WARNING: PVC get for "default" "hpvc" returned: fork/exec /usr/local/bin/kubectl: exec format error
helpers_test.go:394: (dbg) Run:  kubectl --context addons-936355 get pvc hpvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Non-zero exit: kubectl --context addons-936355 get pvc hpvc -o jsonpath={.status.phase} -n default: fork/exec /usr/local/bin/kubectl: exec format error (531.679µs)
helpers_test.go:396: TestAddons/parallel/CSI: WARNING: PVC get for "default" "hpvc" returned: fork/exec /usr/local/bin/kubectl: exec format error
helpers_test.go:394: (dbg) Run:  kubectl --context addons-936355 get pvc hpvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Non-zero exit: kubectl --context addons-936355 get pvc hpvc -o jsonpath={.status.phase} -n default: fork/exec /usr/local/bin/kubectl: exec format error (556.392µs)
helpers_test.go:396: TestAddons/parallel/CSI: WARNING: PVC get for "default" "hpvc" returned: fork/exec /usr/local/bin/kubectl: exec format error
helpers_test.go:394: (dbg) Run:  kubectl --context addons-936355 get pvc hpvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Non-zero exit: kubectl --context addons-936355 get pvc hpvc -o jsonpath={.status.phase} -n default: fork/exec /usr/local/bin/kubectl: exec format error (396.379µs)
helpers_test.go:396: TestAddons/parallel/CSI: WARNING: PVC get for "default" "hpvc" returned: fork/exec /usr/local/bin/kubectl: exec format error
helpers_test.go:394: (dbg) Run:  kubectl --context addons-936355 get pvc hpvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Non-zero exit: kubectl --context addons-936355 get pvc hpvc -o jsonpath={.status.phase} -n default: fork/exec /usr/local/bin/kubectl: exec format error (490.785µs)
helpers_test.go:396: TestAddons/parallel/CSI: WARNING: PVC get for "default" "hpvc" returned: fork/exec /usr/local/bin/kubectl: exec format error
helpers_test.go:394: (dbg) Run:  kubectl --context addons-936355 get pvc hpvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Non-zero exit: kubectl --context addons-936355 get pvc hpvc -o jsonpath={.status.phase} -n default: fork/exec /usr/local/bin/kubectl: exec format error (538.563µs)
helpers_test.go:396: TestAddons/parallel/CSI: WARNING: PVC get for "default" "hpvc" returned: fork/exec /usr/local/bin/kubectl: exec format error
helpers_test.go:394: (dbg) Run:  kubectl --context addons-936355 get pvc hpvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Non-zero exit: kubectl --context addons-936355 get pvc hpvc -o jsonpath={.status.phase} -n default: fork/exec /usr/local/bin/kubectl: exec format error (344.385µs)
helpers_test.go:396: TestAddons/parallel/CSI: WARNING: PVC get for "default" "hpvc" returned: fork/exec /usr/local/bin/kubectl: exec format error
helpers_test.go:394: (dbg) Run:  kubectl --context addons-936355 get pvc hpvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Non-zero exit: kubectl --context addons-936355 get pvc hpvc -o jsonpath={.status.phase} -n default: fork/exec /usr/local/bin/kubectl: exec format error (599.935µs)
helpers_test.go:396: TestAddons/parallel/CSI: WARNING: PVC get for "default" "hpvc" returned: fork/exec /usr/local/bin/kubectl: exec format error
helpers_test.go:394: (dbg) Run:  kubectl --context addons-936355 get pvc hpvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Non-zero exit: kubectl --context addons-936355 get pvc hpvc -o jsonpath={.status.phase} -n default: fork/exec /usr/local/bin/kubectl: exec format error (563.308µs)
helpers_test.go:396: TestAddons/parallel/CSI: WARNING: PVC get for "default" "hpvc" returned: fork/exec /usr/local/bin/kubectl: exec format error
helpers_test.go:394: (dbg) Run:  kubectl --context addons-936355 get pvc hpvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Non-zero exit: kubectl --context addons-936355 get pvc hpvc -o jsonpath={.status.phase} -n default: fork/exec /usr/local/bin/kubectl: exec format error (366.186µs)
helpers_test.go:396: TestAddons/parallel/CSI: WARNING: PVC get for "default" "hpvc" returned: fork/exec /usr/local/bin/kubectl: exec format error
helpers_test.go:394: (dbg) Run:  kubectl --context addons-936355 get pvc hpvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Non-zero exit: kubectl --context addons-936355 get pvc hpvc -o jsonpath={.status.phase} -n default: fork/exec /usr/local/bin/kubectl: exec format error (332.774µs)
helpers_test.go:396: TestAddons/parallel/CSI: WARNING: PVC get for "default" "hpvc" returned: fork/exec /usr/local/bin/kubectl: exec format error
helpers_test.go:394: (dbg) Run:  kubectl --context addons-936355 get pvc hpvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Non-zero exit: kubectl --context addons-936355 get pvc hpvc -o jsonpath={.status.phase} -n default: fork/exec /usr/local/bin/kubectl: exec format error (480.086µs)
helpers_test.go:396: TestAddons/parallel/CSI: WARNING: PVC get for "default" "hpvc" returned: fork/exec /usr/local/bin/kubectl: exec format error
helpers_test.go:394: (dbg) Run:  kubectl --context addons-936355 get pvc hpvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Non-zero exit: kubectl --context addons-936355 get pvc hpvc -o jsonpath={.status.phase} -n default: fork/exec /usr/local/bin/kubectl: exec format error (539.366µs)
helpers_test.go:396: TestAddons/parallel/CSI: WARNING: PVC get for "default" "hpvc" returned: fork/exec /usr/local/bin/kubectl: exec format error
helpers_test.go:394: (dbg) Run:  kubectl --context addons-936355 get pvc hpvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Non-zero exit: kubectl --context addons-936355 get pvc hpvc -o jsonpath={.status.phase} -n default: fork/exec /usr/local/bin/kubectl: exec format error (354.165µs)
helpers_test.go:396: TestAddons/parallel/CSI: WARNING: PVC get for "default" "hpvc" returned: fork/exec /usr/local/bin/kubectl: exec format error
helpers_test.go:394: (dbg) Run:  kubectl --context addons-936355 get pvc hpvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Non-zero exit: kubectl --context addons-936355 get pvc hpvc -o jsonpath={.status.phase} -n default: fork/exec /usr/local/bin/kubectl: exec format error (407.908µs)
helpers_test.go:396: TestAddons/parallel/CSI: WARNING: PVC get for "default" "hpvc" returned: fork/exec /usr/local/bin/kubectl: exec format error
helpers_test.go:394: (dbg) Run:  kubectl --context addons-936355 get pvc hpvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Non-zero exit: kubectl --context addons-936355 get pvc hpvc -o jsonpath={.status.phase} -n default: fork/exec /usr/local/bin/kubectl: exec format error (437.749µs)
helpers_test.go:396: TestAddons/parallel/CSI: WARNING: PVC get for "default" "hpvc" returned: fork/exec /usr/local/bin/kubectl: exec format error
helpers_test.go:394: (dbg) Run:  kubectl --context addons-936355 get pvc hpvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Non-zero exit: kubectl --context addons-936355 get pvc hpvc -o jsonpath={.status.phase} -n default: fork/exec /usr/local/bin/kubectl: exec format error (464.915µs)
helpers_test.go:396: TestAddons/parallel/CSI: WARNING: PVC get for "default" "hpvc" returned: fork/exec /usr/local/bin/kubectl: exec format error
helpers_test.go:394: (dbg) Run:  kubectl --context addons-936355 get pvc hpvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Non-zero exit: kubectl --context addons-936355 get pvc hpvc -o jsonpath={.status.phase} -n default: fork/exec /usr/local/bin/kubectl: exec format error (421.323µs)
helpers_test.go:396: TestAddons/parallel/CSI: WARNING: PVC get for "default" "hpvc" returned: fork/exec /usr/local/bin/kubectl: exec format error
helpers_test.go:394: (dbg) Run:  kubectl --context addons-936355 get pvc hpvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Non-zero exit: kubectl --context addons-936355 get pvc hpvc -o jsonpath={.status.phase} -n default: fork/exec /usr/local/bin/kubectl: exec format error (14.442307ms)
helpers_test.go:396: TestAddons/parallel/CSI: WARNING: PVC get for "default" "hpvc" returned: fork/exec /usr/local/bin/kubectl: exec format error
helpers_test.go:394: (dbg) Run:  kubectl --context addons-936355 get pvc hpvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Non-zero exit: kubectl --context addons-936355 get pvc hpvc -o jsonpath={.status.phase} -n default: fork/exec /usr/local/bin/kubectl: exec format error (572.252µs)
helpers_test.go:396: TestAddons/parallel/CSI: WARNING: PVC get for "default" "hpvc" returned: fork/exec /usr/local/bin/kubectl: exec format error
helpers_test.go:394: (dbg) Run:  kubectl --context addons-936355 get pvc hpvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Non-zero exit: kubectl --context addons-936355 get pvc hpvc -o jsonpath={.status.phase} -n default: fork/exec /usr/local/bin/kubectl: exec format error (666.846µs)
helpers_test.go:396: TestAddons/parallel/CSI: WARNING: PVC get for "default" "hpvc" returned: fork/exec /usr/local/bin/kubectl: exec format error
helpers_test.go:394: (dbg) Run:  kubectl --context addons-936355 get pvc hpvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Non-zero exit: kubectl --context addons-936355 get pvc hpvc -o jsonpath={.status.phase} -n default: fork/exec /usr/local/bin/kubectl: exec format error (420.838µs)
helpers_test.go:396: TestAddons/parallel/CSI: WARNING: PVC get for "default" "hpvc" returned: fork/exec /usr/local/bin/kubectl: exec format error
helpers_test.go:394: (dbg) Run:  kubectl --context addons-936355 get pvc hpvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Non-zero exit: kubectl --context addons-936355 get pvc hpvc -o jsonpath={.status.phase} -n default: fork/exec /usr/local/bin/kubectl: exec format error (614.31µs)
helpers_test.go:396: TestAddons/parallel/CSI: WARNING: PVC get for "default" "hpvc" returned: fork/exec /usr/local/bin/kubectl: exec format error
helpers_test.go:394: (dbg) Run:  kubectl --context addons-936355 get pvc hpvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Non-zero exit: kubectl --context addons-936355 get pvc hpvc -o jsonpath={.status.phase} -n default: fork/exec /usr/local/bin/kubectl: exec format error (542.435µs)
helpers_test.go:396: TestAddons/parallel/CSI: WARNING: PVC get for "default" "hpvc" returned: fork/exec /usr/local/bin/kubectl: exec format error
helpers_test.go:394: (dbg) Run:  kubectl --context addons-936355 get pvc hpvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Non-zero exit: kubectl --context addons-936355 get pvc hpvc -o jsonpath={.status.phase} -n default: fork/exec /usr/local/bin/kubectl: exec format error (402.697µs)
helpers_test.go:396: TestAddons/parallel/CSI: WARNING: PVC get for "default" "hpvc" returned: fork/exec /usr/local/bin/kubectl: exec format error
helpers_test.go:394: (dbg) Run:  kubectl --context addons-936355 get pvc hpvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Non-zero exit: kubectl --context addons-936355 get pvc hpvc -o jsonpath={.status.phase} -n default: fork/exec /usr/local/bin/kubectl: exec format error (496.93µs)
helpers_test.go:396: TestAddons/parallel/CSI: WARNING: PVC get for "default" "hpvc" returned: fork/exec /usr/local/bin/kubectl: exec format error
helpers_test.go:394: (dbg) Run:  kubectl --context addons-936355 get pvc hpvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Non-zero exit: kubectl --context addons-936355 get pvc hpvc -o jsonpath={.status.phase} -n default: fork/exec /usr/local/bin/kubectl: exec format error (501.14µs)
helpers_test.go:396: TestAddons/parallel/CSI: WARNING: PVC get for "default" "hpvc" returned: fork/exec /usr/local/bin/kubectl: exec format error
helpers_test.go:394: (dbg) Run:  kubectl --context addons-936355 get pvc hpvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Non-zero exit: kubectl --context addons-936355 get pvc hpvc -o jsonpath={.status.phase} -n default: fork/exec /usr/local/bin/kubectl: exec format error (351.983µs)
helpers_test.go:396: TestAddons/parallel/CSI: WARNING: PVC get for "default" "hpvc" returned: fork/exec /usr/local/bin/kubectl: exec format error
helpers_test.go:394: (dbg) Run:  kubectl --context addons-936355 get pvc hpvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Non-zero exit: kubectl --context addons-936355 get pvc hpvc -o jsonpath={.status.phase} -n default: fork/exec /usr/local/bin/kubectl: exec format error (478.765µs)
helpers_test.go:396: TestAddons/parallel/CSI: WARNING: PVC get for "default" "hpvc" returned: fork/exec /usr/local/bin/kubectl: exec format error
helpers_test.go:394: (dbg) Run:  kubectl --context addons-936355 get pvc hpvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Non-zero exit: kubectl --context addons-936355 get pvc hpvc -o jsonpath={.status.phase} -n default: fork/exec /usr/local/bin/kubectl: exec format error (507.047µs)
helpers_test.go:396: TestAddons/parallel/CSI: WARNING: PVC get for "default" "hpvc" returned: fork/exec /usr/local/bin/kubectl: exec format error
helpers_test.go:394: (dbg) Run:  kubectl --context addons-936355 get pvc hpvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Non-zero exit: kubectl --context addons-936355 get pvc hpvc -o jsonpath={.status.phase} -n default: fork/exec /usr/local/bin/kubectl: exec format error (525.254µs)
helpers_test.go:396: TestAddons/parallel/CSI: WARNING: PVC get for "default" "hpvc" returned: fork/exec /usr/local/bin/kubectl: exec format error
helpers_test.go:394: (dbg) Run:  kubectl --context addons-936355 get pvc hpvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Non-zero exit: kubectl --context addons-936355 get pvc hpvc -o jsonpath={.status.phase} -n default: fork/exec /usr/local/bin/kubectl: exec format error (445.822µs)
helpers_test.go:396: TestAddons/parallel/CSI: WARNING: PVC get for "default" "hpvc" returned: fork/exec /usr/local/bin/kubectl: exec format error
helpers_test.go:394: (dbg) Run:  kubectl --context addons-936355 get pvc hpvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Non-zero exit: kubectl --context addons-936355 get pvc hpvc -o jsonpath={.status.phase} -n default: fork/exec /usr/local/bin/kubectl: exec format error (458.794µs)
helpers_test.go:396: TestAddons/parallel/CSI: WARNING: PVC get for "default" "hpvc" returned: fork/exec /usr/local/bin/kubectl: exec format error
helpers_test.go:394: (dbg) Run:  kubectl --context addons-936355 get pvc hpvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Non-zero exit: kubectl --context addons-936355 get pvc hpvc -o jsonpath={.status.phase} -n default: fork/exec /usr/local/bin/kubectl: exec format error (355.191µs)
helpers_test.go:396: TestAddons/parallel/CSI: WARNING: PVC get for "default" "hpvc" returned: fork/exec /usr/local/bin/kubectl: exec format error
helpers_test.go:394: (dbg) Run:  kubectl --context addons-936355 get pvc hpvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Non-zero exit: kubectl --context addons-936355 get pvc hpvc -o jsonpath={.status.phase} -n default: fork/exec /usr/local/bin/kubectl: exec format error (351.392µs)
helpers_test.go:396: TestAddons/parallel/CSI: WARNING: PVC get for "default" "hpvc" returned: fork/exec /usr/local/bin/kubectl: exec format error
helpers_test.go:394: (dbg) Run:  kubectl --context addons-936355 get pvc hpvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Non-zero exit: kubectl --context addons-936355 get pvc hpvc -o jsonpath={.status.phase} -n default: fork/exec /usr/local/bin/kubectl: exec format error (519.65µs)
helpers_test.go:396: TestAddons/parallel/CSI: WARNING: PVC get for "default" "hpvc" returned: fork/exec /usr/local/bin/kubectl: exec format error
helpers_test.go:394: (dbg) Run:  kubectl --context addons-936355 get pvc hpvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Non-zero exit: kubectl --context addons-936355 get pvc hpvc -o jsonpath={.status.phase} -n default: fork/exec /usr/local/bin/kubectl: exec format error (420.387µs)
helpers_test.go:396: TestAddons/parallel/CSI: WARNING: PVC get for "default" "hpvc" returned: fork/exec /usr/local/bin/kubectl: exec format error
helpers_test.go:394: (dbg) Run:  kubectl --context addons-936355 get pvc hpvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Non-zero exit: kubectl --context addons-936355 get pvc hpvc -o jsonpath={.status.phase} -n default: fork/exec /usr/local/bin/kubectl: exec format error (351.17µs)
helpers_test.go:396: TestAddons/parallel/CSI: WARNING: PVC get for "default" "hpvc" returned: fork/exec /usr/local/bin/kubectl: exec format error
helpers_test.go:394: (dbg) Run:  kubectl --context addons-936355 get pvc hpvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Non-zero exit: kubectl --context addons-936355 get pvc hpvc -o jsonpath={.status.phase} -n default: fork/exec /usr/local/bin/kubectl: exec format error (408.039µs)
helpers_test.go:396: TestAddons/parallel/CSI: WARNING: PVC get for "default" "hpvc" returned: fork/exec /usr/local/bin/kubectl: exec format error
helpers_test.go:394: (dbg) Run:  kubectl --context addons-936355 get pvc hpvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Non-zero exit: kubectl --context addons-936355 get pvc hpvc -o jsonpath={.status.phase} -n default: fork/exec /usr/local/bin/kubectl: exec format error (437.413µs)
helpers_test.go:396: TestAddons/parallel/CSI: WARNING: PVC get for "default" "hpvc" returned: fork/exec /usr/local/bin/kubectl: exec format error
helpers_test.go:394: (dbg) Run:  kubectl --context addons-936355 get pvc hpvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Non-zero exit: kubectl --context addons-936355 get pvc hpvc -o jsonpath={.status.phase} -n default: fork/exec /usr/local/bin/kubectl: exec format error (415.177µs)
helpers_test.go:396: TestAddons/parallel/CSI: WARNING: PVC get for "default" "hpvc" returned: fork/exec /usr/local/bin/kubectl: exec format error
helpers_test.go:394: (dbg) Run:  kubectl --context addons-936355 get pvc hpvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Non-zero exit: kubectl --context addons-936355 get pvc hpvc -o jsonpath={.status.phase} -n default: fork/exec /usr/local/bin/kubectl: exec format error (422.684µs)
helpers_test.go:396: TestAddons/parallel/CSI: WARNING: PVC get for "default" "hpvc" returned: fork/exec /usr/local/bin/kubectl: exec format error
helpers_test.go:394: (dbg) Run:  kubectl --context addons-936355 get pvc hpvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Non-zero exit: kubectl --context addons-936355 get pvc hpvc -o jsonpath={.status.phase} -n default: fork/exec /usr/local/bin/kubectl: exec format error (545.758µs)
helpers_test.go:396: TestAddons/parallel/CSI: WARNING: PVC get for "default" "hpvc" returned: fork/exec /usr/local/bin/kubectl: exec format error
helpers_test.go:394: (dbg) Run:  kubectl --context addons-936355 get pvc hpvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Non-zero exit: kubectl --context addons-936355 get pvc hpvc -o jsonpath={.status.phase} -n default: fork/exec /usr/local/bin/kubectl: exec format error (371.314µs)
helpers_test.go:396: TestAddons/parallel/CSI: WARNING: PVC get for "default" "hpvc" returned: fork/exec /usr/local/bin/kubectl: exec format error
helpers_test.go:394: (dbg) Run:  kubectl --context addons-936355 get pvc hpvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Non-zero exit: kubectl --context addons-936355 get pvc hpvc -o jsonpath={.status.phase} -n default: fork/exec /usr/local/bin/kubectl: exec format error (407.833µs)
helpers_test.go:396: TestAddons/parallel/CSI: WARNING: PVC get for "default" "hpvc" returned: fork/exec /usr/local/bin/kubectl: exec format error
helpers_test.go:394: (dbg) Run:  kubectl --context addons-936355 get pvc hpvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Non-zero exit: kubectl --context addons-936355 get pvc hpvc -o jsonpath={.status.phase} -n default: fork/exec /usr/local/bin/kubectl: exec format error (399.333µs)
helpers_test.go:396: TestAddons/parallel/CSI: WARNING: PVC get for "default" "hpvc" returned: fork/exec /usr/local/bin/kubectl: exec format error
helpers_test.go:394: (dbg) Run:  kubectl --context addons-936355 get pvc hpvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Non-zero exit: kubectl --context addons-936355 get pvc hpvc -o jsonpath={.status.phase} -n default: fork/exec /usr/local/bin/kubectl: exec format error (352.098µs)
helpers_test.go:396: TestAddons/parallel/CSI: WARNING: PVC get for "default" "hpvc" returned: fork/exec /usr/local/bin/kubectl: exec format error
helpers_test.go:394: (dbg) Run:  kubectl --context addons-936355 get pvc hpvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Non-zero exit: kubectl --context addons-936355 get pvc hpvc -o jsonpath={.status.phase} -n default: fork/exec /usr/local/bin/kubectl: exec format error (448.12µs)
helpers_test.go:396: TestAddons/parallel/CSI: WARNING: PVC get for "default" "hpvc" returned: fork/exec /usr/local/bin/kubectl: exec format error
helpers_test.go:394: (dbg) Run:  kubectl --context addons-936355 get pvc hpvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Non-zero exit: kubectl --context addons-936355 get pvc hpvc -o jsonpath={.status.phase} -n default: fork/exec /usr/local/bin/kubectl: exec format error (771.336µs)
helpers_test.go:396: TestAddons/parallel/CSI: WARNING: PVC get for "default" "hpvc" returned: fork/exec /usr/local/bin/kubectl: exec format error
helpers_test.go:394: (dbg) Run:  kubectl --context addons-936355 get pvc hpvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Non-zero exit: kubectl --context addons-936355 get pvc hpvc -o jsonpath={.status.phase} -n default: fork/exec /usr/local/bin/kubectl: exec format error (503.15µs)
helpers_test.go:396: TestAddons/parallel/CSI: WARNING: PVC get for "default" "hpvc" returned: fork/exec /usr/local/bin/kubectl: exec format error
helpers_test.go:394: (dbg) Run:  kubectl --context addons-936355 get pvc hpvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Non-zero exit: kubectl --context addons-936355 get pvc hpvc -o jsonpath={.status.phase} -n default: fork/exec /usr/local/bin/kubectl: exec format error (602.725µs)
helpers_test.go:396: TestAddons/parallel/CSI: WARNING: PVC get for "default" "hpvc" returned: fork/exec /usr/local/bin/kubectl: exec format error
helpers_test.go:394: (dbg) Run:  kubectl --context addons-936355 get pvc hpvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Non-zero exit: kubectl --context addons-936355 get pvc hpvc -o jsonpath={.status.phase} -n default: fork/exec /usr/local/bin/kubectl: exec format error (359.465µs)
helpers_test.go:396: TestAddons/parallel/CSI: WARNING: PVC get for "default" "hpvc" returned: fork/exec /usr/local/bin/kubectl: exec format error
helpers_test.go:394: (dbg) Run:  kubectl --context addons-936355 get pvc hpvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Non-zero exit: kubectl --context addons-936355 get pvc hpvc -o jsonpath={.status.phase} -n default: fork/exec /usr/local/bin/kubectl: exec format error (540.212µs)
helpers_test.go:396: TestAddons/parallel/CSI: WARNING: PVC get for "default" "hpvc" returned: fork/exec /usr/local/bin/kubectl: exec format error
helpers_test.go:394: (dbg) Run:  kubectl --context addons-936355 get pvc hpvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Non-zero exit: kubectl --context addons-936355 get pvc hpvc -o jsonpath={.status.phase} -n default: fork/exec /usr/local/bin/kubectl: exec format error (386.969µs)
helpers_test.go:396: TestAddons/parallel/CSI: WARNING: PVC get for "default" "hpvc" returned: fork/exec /usr/local/bin/kubectl: exec format error
helpers_test.go:394: (dbg) Run:  kubectl --context addons-936355 get pvc hpvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Non-zero exit: kubectl --context addons-936355 get pvc hpvc -o jsonpath={.status.phase} -n default: fork/exec /usr/local/bin/kubectl: exec format error (444.92µs)
helpers_test.go:396: TestAddons/parallel/CSI: WARNING: PVC get for "default" "hpvc" returned: fork/exec /usr/local/bin/kubectl: exec format error
helpers_test.go:394: (dbg) Run:  kubectl --context addons-936355 get pvc hpvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Non-zero exit: kubectl --context addons-936355 get pvc hpvc -o jsonpath={.status.phase} -n default: fork/exec /usr/local/bin/kubectl: exec format error (482.687µs)
helpers_test.go:396: TestAddons/parallel/CSI: WARNING: PVC get for "default" "hpvc" returned: fork/exec /usr/local/bin/kubectl: exec format error
helpers_test.go:394: (dbg) Run:  kubectl --context addons-936355 get pvc hpvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Non-zero exit: kubectl --context addons-936355 get pvc hpvc -o jsonpath={.status.phase} -n default: fork/exec /usr/local/bin/kubectl: exec format error (526.46µs)
helpers_test.go:396: TestAddons/parallel/CSI: WARNING: PVC get for "default" "hpvc" returned: fork/exec /usr/local/bin/kubectl: exec format error
helpers_test.go:394: (dbg) Run:  kubectl --context addons-936355 get pvc hpvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Non-zero exit: kubectl --context addons-936355 get pvc hpvc -o jsonpath={.status.phase} -n default: fork/exec /usr/local/bin/kubectl: exec format error (596.982µs)
helpers_test.go:396: TestAddons/parallel/CSI: WARNING: PVC get for "default" "hpvc" returned: fork/exec /usr/local/bin/kubectl: exec format error
helpers_test.go:394: (dbg) Run:  kubectl --context addons-936355 get pvc hpvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Non-zero exit: kubectl --context addons-936355 get pvc hpvc -o jsonpath={.status.phase} -n default: fork/exec /usr/local/bin/kubectl: exec format error (540.294µs)
helpers_test.go:396: TestAddons/parallel/CSI: WARNING: PVC get for "default" "hpvc" returned: fork/exec /usr/local/bin/kubectl: exec format error
helpers_test.go:394: (dbg) Run:  kubectl --context addons-936355 get pvc hpvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Non-zero exit: kubectl --context addons-936355 get pvc hpvc -o jsonpath={.status.phase} -n default: fork/exec /usr/local/bin/kubectl: exec format error (386.739µs)
helpers_test.go:396: TestAddons/parallel/CSI: WARNING: PVC get for "default" "hpvc" returned: fork/exec /usr/local/bin/kubectl: exec format error
helpers_test.go:394: (dbg) Run:  kubectl --context addons-936355 get pvc hpvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Non-zero exit: kubectl --context addons-936355 get pvc hpvc -o jsonpath={.status.phase} -n default: fork/exec /usr/local/bin/kubectl: exec format error (360.992µs)
helpers_test.go:396: TestAddons/parallel/CSI: WARNING: PVC get for "default" "hpvc" returned: fork/exec /usr/local/bin/kubectl: exec format error
helpers_test.go:394: (dbg) Run:  kubectl --context addons-936355 get pvc hpvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Non-zero exit: kubectl --context addons-936355 get pvc hpvc -o jsonpath={.status.phase} -n default: fork/exec /usr/local/bin/kubectl: exec format error (422.454µs)
helpers_test.go:396: TestAddons/parallel/CSI: WARNING: PVC get for "default" "hpvc" returned: fork/exec /usr/local/bin/kubectl: exec format error
helpers_test.go:394: (dbg) Run:  kubectl --context addons-936355 get pvc hpvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Non-zero exit: kubectl --context addons-936355 get pvc hpvc -o jsonpath={.status.phase} -n default: fork/exec /usr/local/bin/kubectl: exec format error (359.744µs)
helpers_test.go:396: TestAddons/parallel/CSI: WARNING: PVC get for "default" "hpvc" returned: fork/exec /usr/local/bin/kubectl: exec format error
helpers_test.go:394: (dbg) Run:  kubectl --context addons-936355 get pvc hpvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Non-zero exit: kubectl --context addons-936355 get pvc hpvc -o jsonpath={.status.phase} -n default: fork/exec /usr/local/bin/kubectl: exec format error (384.097µs)
helpers_test.go:396: TestAddons/parallel/CSI: WARNING: PVC get for "default" "hpvc" returned: fork/exec /usr/local/bin/kubectl: exec format error
helpers_test.go:394: (dbg) Run:  kubectl --context addons-936355 get pvc hpvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Non-zero exit: kubectl --context addons-936355 get pvc hpvc -o jsonpath={.status.phase} -n default: fork/exec /usr/local/bin/kubectl: exec format error (412.248µs)
helpers_test.go:396: TestAddons/parallel/CSI: WARNING: PVC get for "default" "hpvc" returned: fork/exec /usr/local/bin/kubectl: exec format error
helpers_test.go:394: (dbg) Run:  kubectl --context addons-936355 get pvc hpvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Non-zero exit: kubectl --context addons-936355 get pvc hpvc -o jsonpath={.status.phase} -n default: fork/exec /usr/local/bin/kubectl: exec format error (734.734µs)
helpers_test.go:396: TestAddons/parallel/CSI: WARNING: PVC get for "default" "hpvc" returned: fork/exec /usr/local/bin/kubectl: exec format error
helpers_test.go:394: (dbg) Run:  kubectl --context addons-936355 get pvc hpvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Non-zero exit: kubectl --context addons-936355 get pvc hpvc -o jsonpath={.status.phase} -n default: fork/exec /usr/local/bin/kubectl: exec format error (499.007µs)
helpers_test.go:396: TestAddons/parallel/CSI: WARNING: PVC get for "default" "hpvc" returned: fork/exec /usr/local/bin/kubectl: exec format error
helpers_test.go:394: (dbg) Run:  kubectl --context addons-936355 get pvc hpvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Non-zero exit: kubectl --context addons-936355 get pvc hpvc -o jsonpath={.status.phase} -n default: fork/exec /usr/local/bin/kubectl: exec format error (370.264µs)
helpers_test.go:396: TestAddons/parallel/CSI: WARNING: PVC get for "default" "hpvc" returned: fork/exec /usr/local/bin/kubectl: exec format error
helpers_test.go:394: (dbg) Run:  kubectl --context addons-936355 get pvc hpvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Non-zero exit: kubectl --context addons-936355 get pvc hpvc -o jsonpath={.status.phase} -n default: fork/exec /usr/local/bin/kubectl: exec format error (506.58µs)
helpers_test.go:396: TestAddons/parallel/CSI: WARNING: PVC get for "default" "hpvc" returned: fork/exec /usr/local/bin/kubectl: exec format error
helpers_test.go:394: (dbg) Run:  kubectl --context addons-936355 get pvc hpvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Non-zero exit: kubectl --context addons-936355 get pvc hpvc -o jsonpath={.status.phase} -n default: fork/exec /usr/local/bin/kubectl: exec format error (535.699µs)
helpers_test.go:396: TestAddons/parallel/CSI: WARNING: PVC get for "default" "hpvc" returned: fork/exec /usr/local/bin/kubectl: exec format error
helpers_test.go:394: (dbg) Run:  kubectl --context addons-936355 get pvc hpvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Non-zero exit: kubectl --context addons-936355 get pvc hpvc -o jsonpath={.status.phase} -n default: fork/exec /usr/local/bin/kubectl: exec format error (497.349µs)
helpers_test.go:396: TestAddons/parallel/CSI: WARNING: PVC get for "default" "hpvc" returned: fork/exec /usr/local/bin/kubectl: exec format error
helpers_test.go:394: (dbg) Run:  kubectl --context addons-936355 get pvc hpvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Non-zero exit: kubectl --context addons-936355 get pvc hpvc -o jsonpath={.status.phase} -n default: fork/exec /usr/local/bin/kubectl: exec format error (404.232µs)
helpers_test.go:396: TestAddons/parallel/CSI: WARNING: PVC get for "default" "hpvc" returned: fork/exec /usr/local/bin/kubectl: exec format error
helpers_test.go:394: (dbg) Run:  kubectl --context addons-936355 get pvc hpvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Non-zero exit: kubectl --context addons-936355 get pvc hpvc -o jsonpath={.status.phase} -n default: fork/exec /usr/local/bin/kubectl: exec format error (490.219µs)
helpers_test.go:396: TestAddons/parallel/CSI: WARNING: PVC get for "default" "hpvc" returned: fork/exec /usr/local/bin/kubectl: exec format error
helpers_test.go:394: (dbg) Run:  kubectl --context addons-936355 get pvc hpvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Non-zero exit: kubectl --context addons-936355 get pvc hpvc -o jsonpath={.status.phase} -n default: fork/exec /usr/local/bin/kubectl: exec format error (539.958µs)
helpers_test.go:396: TestAddons/parallel/CSI: WARNING: PVC get for "default" "hpvc" returned: fork/exec /usr/local/bin/kubectl: exec format error
helpers_test.go:394: (dbg) Run:  kubectl --context addons-936355 get pvc hpvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Non-zero exit: kubectl --context addons-936355 get pvc hpvc -o jsonpath={.status.phase} -n default: fork/exec /usr/local/bin/kubectl: exec format error (376.023µs)
helpers_test.go:396: TestAddons/parallel/CSI: WARNING: PVC get for "default" "hpvc" returned: fork/exec /usr/local/bin/kubectl: exec format error
helpers_test.go:394: (dbg) Run:  kubectl --context addons-936355 get pvc hpvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Non-zero exit: kubectl --context addons-936355 get pvc hpvc -o jsonpath={.status.phase} -n default: fork/exec /usr/local/bin/kubectl: exec format error (376.072µs)
helpers_test.go:396: TestAddons/parallel/CSI: WARNING: PVC get for "default" "hpvc" returned: fork/exec /usr/local/bin/kubectl: exec format error
helpers_test.go:394: (dbg) Run:  kubectl --context addons-936355 get pvc hpvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Non-zero exit: kubectl --context addons-936355 get pvc hpvc -o jsonpath={.status.phase} -n default: fork/exec /usr/local/bin/kubectl: exec format error (523.802µs)
helpers_test.go:396: TestAddons/parallel/CSI: WARNING: PVC get for "default" "hpvc" returned: fork/exec /usr/local/bin/kubectl: exec format error
helpers_test.go:394: (dbg) Run:  kubectl --context addons-936355 get pvc hpvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Non-zero exit: kubectl --context addons-936355 get pvc hpvc -o jsonpath={.status.phase} -n default: fork/exec /usr/local/bin/kubectl: exec format error (516.713µs)
helpers_test.go:396: TestAddons/parallel/CSI: WARNING: PVC get for "default" "hpvc" returned: fork/exec /usr/local/bin/kubectl: exec format error
helpers_test.go:394: (dbg) Run:  kubectl --context addons-936355 get pvc hpvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Non-zero exit: kubectl --context addons-936355 get pvc hpvc -o jsonpath={.status.phase} -n default: fork/exec /usr/local/bin/kubectl: exec format error (544.757µs)
helpers_test.go:396: TestAddons/parallel/CSI: WARNING: PVC get for "default" "hpvc" returned: fork/exec /usr/local/bin/kubectl: exec format error
helpers_test.go:394: (dbg) Run:  kubectl --context addons-936355 get pvc hpvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Non-zero exit: kubectl --context addons-936355 get pvc hpvc -o jsonpath={.status.phase} -n default: fork/exec /usr/local/bin/kubectl: exec format error (482.129µs)
helpers_test.go:396: TestAddons/parallel/CSI: WARNING: PVC get for "default" "hpvc" returned: fork/exec /usr/local/bin/kubectl: exec format error
helpers_test.go:394: (dbg) Run:  kubectl --context addons-936355 get pvc hpvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Non-zero exit: kubectl --context addons-936355 get pvc hpvc -o jsonpath={.status.phase} -n default: fork/exec /usr/local/bin/kubectl: exec format error (504.447µs)
helpers_test.go:396: TestAddons/parallel/CSI: WARNING: PVC get for "default" "hpvc" returned: fork/exec /usr/local/bin/kubectl: exec format error
helpers_test.go:394: (dbg) Run:  kubectl --context addons-936355 get pvc hpvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Non-zero exit: kubectl --context addons-936355 get pvc hpvc -o jsonpath={.status.phase} -n default: fork/exec /usr/local/bin/kubectl: exec format error (412.051µs)
helpers_test.go:396: TestAddons/parallel/CSI: WARNING: PVC get for "default" "hpvc" returned: fork/exec /usr/local/bin/kubectl: exec format error
helpers_test.go:394: (dbg) Run:  kubectl --context addons-936355 get pvc hpvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Non-zero exit: kubectl --context addons-936355 get pvc hpvc -o jsonpath={.status.phase} -n default: fork/exec /usr/local/bin/kubectl: exec format error (367.375µs)
helpers_test.go:396: TestAddons/parallel/CSI: WARNING: PVC get for "default" "hpvc" returned: fork/exec /usr/local/bin/kubectl: exec format error
helpers_test.go:394: (dbg) Run:  kubectl --context addons-936355 get pvc hpvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Non-zero exit: kubectl --context addons-936355 get pvc hpvc -o jsonpath={.status.phase} -n default: fork/exec /usr/local/bin/kubectl: exec format error (492.123µs)
helpers_test.go:396: TestAddons/parallel/CSI: WARNING: PVC get for "default" "hpvc" returned: fork/exec /usr/local/bin/kubectl: exec format error
helpers_test.go:394: (dbg) Run:  kubectl --context addons-936355 get pvc hpvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Non-zero exit: kubectl --context addons-936355 get pvc hpvc -o jsonpath={.status.phase} -n default: fork/exec /usr/local/bin/kubectl: exec format error (492.435µs)
helpers_test.go:396: TestAddons/parallel/CSI: WARNING: PVC get for "default" "hpvc" returned: fork/exec /usr/local/bin/kubectl: exec format error
helpers_test.go:394: (dbg) Run:  kubectl --context addons-936355 get pvc hpvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Non-zero exit: kubectl --context addons-936355 get pvc hpvc -o jsonpath={.status.phase} -n default: fork/exec /usr/local/bin/kubectl: exec format error (392.974µs)
helpers_test.go:396: TestAddons/parallel/CSI: WARNING: PVC get for "default" "hpvc" returned: fork/exec /usr/local/bin/kubectl: exec format error
helpers_test.go:394: (dbg) Run:  kubectl --context addons-936355 get pvc hpvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Non-zero exit: kubectl --context addons-936355 get pvc hpvc -o jsonpath={.status.phase} -n default: fork/exec /usr/local/bin/kubectl: exec format error (430.659µs)
helpers_test.go:396: TestAddons/parallel/CSI: WARNING: PVC get for "default" "hpvc" returned: fork/exec /usr/local/bin/kubectl: exec format error
helpers_test.go:394: (dbg) Run:  kubectl --context addons-936355 get pvc hpvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Non-zero exit: kubectl --context addons-936355 get pvc hpvc -o jsonpath={.status.phase} -n default: fork/exec /usr/local/bin/kubectl: exec format error (378.879µs)
helpers_test.go:396: TestAddons/parallel/CSI: WARNING: PVC get for "default" "hpvc" returned: fork/exec /usr/local/bin/kubectl: exec format error
helpers_test.go:394: (dbg) Run:  kubectl --context addons-936355 get pvc hpvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Non-zero exit: kubectl --context addons-936355 get pvc hpvc -o jsonpath={.status.phase} -n default: fork/exec /usr/local/bin/kubectl: exec format error (409.59µs)
helpers_test.go:396: TestAddons/parallel/CSI: WARNING: PVC get for "default" "hpvc" returned: fork/exec /usr/local/bin/kubectl: exec format error
helpers_test.go:394: (dbg) Run:  kubectl --context addons-936355 get pvc hpvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Non-zero exit: kubectl --context addons-936355 get pvc hpvc -o jsonpath={.status.phase} -n default: fork/exec /usr/local/bin/kubectl: exec format error (420.584µs)
helpers_test.go:396: TestAddons/parallel/CSI: WARNING: PVC get for "default" "hpvc" returned: fork/exec /usr/local/bin/kubectl: exec format error
helpers_test.go:394: (dbg) Run:  kubectl --context addons-936355 get pvc hpvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Non-zero exit: kubectl --context addons-936355 get pvc hpvc -o jsonpath={.status.phase} -n default: fork/exec /usr/local/bin/kubectl: exec format error (509.846µs)
helpers_test.go:396: TestAddons/parallel/CSI: WARNING: PVC get for "default" "hpvc" returned: fork/exec /usr/local/bin/kubectl: exec format error
helpers_test.go:394: (dbg) Run:  kubectl --context addons-936355 get pvc hpvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Non-zero exit: kubectl --context addons-936355 get pvc hpvc -o jsonpath={.status.phase} -n default: fork/exec /usr/local/bin/kubectl: exec format error (470.732µs)
helpers_test.go:396: TestAddons/parallel/CSI: WARNING: PVC get for "default" "hpvc" returned: fork/exec /usr/local/bin/kubectl: exec format error
helpers_test.go:394: (dbg) Run:  kubectl --context addons-936355 get pvc hpvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Non-zero exit: kubectl --context addons-936355 get pvc hpvc -o jsonpath={.status.phase} -n default: fork/exec /usr/local/bin/kubectl: exec format error (353.69µs)
helpers_test.go:396: TestAddons/parallel/CSI: WARNING: PVC get for "default" "hpvc" returned: fork/exec /usr/local/bin/kubectl: exec format error
helpers_test.go:394: (dbg) Run:  kubectl --context addons-936355 get pvc hpvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Non-zero exit: kubectl --context addons-936355 get pvc hpvc -o jsonpath={.status.phase} -n default: fork/exec /usr/local/bin/kubectl: exec format error (356.503µs)
helpers_test.go:396: TestAddons/parallel/CSI: WARNING: PVC get for "default" "hpvc" returned: fork/exec /usr/local/bin/kubectl: exec format error
helpers_test.go:394: (dbg) Run:  kubectl --context addons-936355 get pvc hpvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Non-zero exit: kubectl --context addons-936355 get pvc hpvc -o jsonpath={.status.phase} -n default: fork/exec /usr/local/bin/kubectl: exec format error (408.58µs)
helpers_test.go:396: TestAddons/parallel/CSI: WARNING: PVC get for "default" "hpvc" returned: fork/exec /usr/local/bin/kubectl: exec format error
helpers_test.go:394: (dbg) Run:  kubectl --context addons-936355 get pvc hpvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Non-zero exit: kubectl --context addons-936355 get pvc hpvc -o jsonpath={.status.phase} -n default: fork/exec /usr/local/bin/kubectl: exec format error (386.016µs)
helpers_test.go:396: TestAddons/parallel/CSI: WARNING: PVC get for "default" "hpvc" returned: fork/exec /usr/local/bin/kubectl: exec format error
helpers_test.go:394: (dbg) Run:  kubectl --context addons-936355 get pvc hpvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Non-zero exit: kubectl --context addons-936355 get pvc hpvc -o jsonpath={.status.phase} -n default: fork/exec /usr/local/bin/kubectl: exec format error (408.498µs)
helpers_test.go:396: TestAddons/parallel/CSI: WARNING: PVC get for "default" "hpvc" returned: fork/exec /usr/local/bin/kubectl: exec format error
helpers_test.go:394: (dbg) Run:  kubectl --context addons-936355 get pvc hpvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Non-zero exit: kubectl --context addons-936355 get pvc hpvc -o jsonpath={.status.phase} -n default: fork/exec /usr/local/bin/kubectl: exec format error (464.685µs)
helpers_test.go:396: TestAddons/parallel/CSI: WARNING: PVC get for "default" "hpvc" returned: fork/exec /usr/local/bin/kubectl: exec format error
helpers_test.go:394: (dbg) Run:  kubectl --context addons-936355 get pvc hpvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Non-zero exit: kubectl --context addons-936355 get pvc hpvc -o jsonpath={.status.phase} -n default: fork/exec /usr/local/bin/kubectl: exec format error (484.434µs)
helpers_test.go:396: TestAddons/parallel/CSI: WARNING: PVC get for "default" "hpvc" returned: fork/exec /usr/local/bin/kubectl: exec format error
helpers_test.go:394: (dbg) Run:  kubectl --context addons-936355 get pvc hpvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Non-zero exit: kubectl --context addons-936355 get pvc hpvc -o jsonpath={.status.phase} -n default: fork/exec /usr/local/bin/kubectl: exec format error (560.683µs)
helpers_test.go:396: TestAddons/parallel/CSI: WARNING: PVC get for "default" "hpvc" returned: fork/exec /usr/local/bin/kubectl: exec format error
helpers_test.go:394: (dbg) Run:  kubectl --context addons-936355 get pvc hpvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Non-zero exit: kubectl --context addons-936355 get pvc hpvc -o jsonpath={.status.phase} -n default: fork/exec /usr/local/bin/kubectl: exec format error (482.933µs)
helpers_test.go:396: TestAddons/parallel/CSI: WARNING: PVC get for "default" "hpvc" returned: fork/exec /usr/local/bin/kubectl: exec format error
helpers_test.go:394: (dbg) Run:  kubectl --context addons-936355 get pvc hpvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Non-zero exit: kubectl --context addons-936355 get pvc hpvc -o jsonpath={.status.phase} -n default: fork/exec /usr/local/bin/kubectl: exec format error (507.975µs)
helpers_test.go:396: TestAddons/parallel/CSI: WARNING: PVC get for "default" "hpvc" returned: fork/exec /usr/local/bin/kubectl: exec format error
helpers_test.go:394: (dbg) Run:  kubectl --context addons-936355 get pvc hpvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Non-zero exit: kubectl --context addons-936355 get pvc hpvc -o jsonpath={.status.phase} -n default: fork/exec /usr/local/bin/kubectl: exec format error (362.723µs)
helpers_test.go:396: TestAddons/parallel/CSI: WARNING: PVC get for "default" "hpvc" returned: fork/exec /usr/local/bin/kubectl: exec format error
helpers_test.go:394: (dbg) Run:  kubectl --context addons-936355 get pvc hpvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Non-zero exit: kubectl --context addons-936355 get pvc hpvc -o jsonpath={.status.phase} -n default: fork/exec /usr/local/bin/kubectl: exec format error (511.478µs)
helpers_test.go:396: TestAddons/parallel/CSI: WARNING: PVC get for "default" "hpvc" returned: fork/exec /usr/local/bin/kubectl: exec format error
helpers_test.go:394: (dbg) Run:  kubectl --context addons-936355 get pvc hpvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Non-zero exit: kubectl --context addons-936355 get pvc hpvc -o jsonpath={.status.phase} -n default: fork/exec /usr/local/bin/kubectl: exec format error (476.23µs)
helpers_test.go:396: TestAddons/parallel/CSI: WARNING: PVC get for "default" "hpvc" returned: fork/exec /usr/local/bin/kubectl: exec format error
helpers_test.go:394: (dbg) Run:  kubectl --context addons-936355 get pvc hpvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Non-zero exit: kubectl --context addons-936355 get pvc hpvc -o jsonpath={.status.phase} -n default: fork/exec /usr/local/bin/kubectl: exec format error (422.873µs)
helpers_test.go:396: TestAddons/parallel/CSI: WARNING: PVC get for "default" "hpvc" returned: fork/exec /usr/local/bin/kubectl: exec format error
helpers_test.go:394: (dbg) Run:  kubectl --context addons-936355 get pvc hpvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Non-zero exit: kubectl --context addons-936355 get pvc hpvc -o jsonpath={.status.phase} -n default: fork/exec /usr/local/bin/kubectl: exec format error (409.598µs)
helpers_test.go:396: TestAddons/parallel/CSI: WARNING: PVC get for "default" "hpvc" returned: fork/exec /usr/local/bin/kubectl: exec format error
helpers_test.go:394: (dbg) Run:  kubectl --context addons-936355 get pvc hpvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Non-zero exit: kubectl --context addons-936355 get pvc hpvc -o jsonpath={.status.phase} -n default: fork/exec /usr/local/bin/kubectl: exec format error (456.218µs)
helpers_test.go:396: TestAddons/parallel/CSI: WARNING: PVC get for "default" "hpvc" returned: fork/exec /usr/local/bin/kubectl: exec format error
helpers_test.go:394: (dbg) Run:  kubectl --context addons-936355 get pvc hpvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Non-zero exit: kubectl --context addons-936355 get pvc hpvc -o jsonpath={.status.phase} -n default: fork/exec /usr/local/bin/kubectl: exec format error (440.981µs)
helpers_test.go:396: TestAddons/parallel/CSI: WARNING: PVC get for "default" "hpvc" returned: fork/exec /usr/local/bin/kubectl: exec format error
helpers_test.go:394: (dbg) Run:  kubectl --context addons-936355 get pvc hpvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Non-zero exit: kubectl --context addons-936355 get pvc hpvc -o jsonpath={.status.phase} -n default: fork/exec /usr/local/bin/kubectl: exec format error (565.023µs)
helpers_test.go:396: TestAddons/parallel/CSI: WARNING: PVC get for "default" "hpvc" returned: fork/exec /usr/local/bin/kubectl: exec format error
helpers_test.go:394: (dbg) Run:  kubectl --context addons-936355 get pvc hpvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Non-zero exit: kubectl --context addons-936355 get pvc hpvc -o jsonpath={.status.phase} -n default: fork/exec /usr/local/bin/kubectl: exec format error (484.091µs)
helpers_test.go:396: TestAddons/parallel/CSI: WARNING: PVC get for "default" "hpvc" returned: fork/exec /usr/local/bin/kubectl: exec format error
helpers_test.go:394: (dbg) Run:  kubectl --context addons-936355 get pvc hpvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Non-zero exit: kubectl --context addons-936355 get pvc hpvc -o jsonpath={.status.phase} -n default: fork/exec /usr/local/bin/kubectl: exec format error (383.022µs)
helpers_test.go:396: TestAddons/parallel/CSI: WARNING: PVC get for "default" "hpvc" returned: fork/exec /usr/local/bin/kubectl: exec format error
helpers_test.go:394: (dbg) Run:  kubectl --context addons-936355 get pvc hpvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Non-zero exit: kubectl --context addons-936355 get pvc hpvc -o jsonpath={.status.phase} -n default: fork/exec /usr/local/bin/kubectl: exec format error (386.813µs)
helpers_test.go:396: TestAddons/parallel/CSI: WARNING: PVC get for "default" "hpvc" returned: fork/exec /usr/local/bin/kubectl: exec format error
helpers_test.go:394: (dbg) Run:  kubectl --context addons-936355 get pvc hpvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Non-zero exit: kubectl --context addons-936355 get pvc hpvc -o jsonpath={.status.phase} -n default: fork/exec /usr/local/bin/kubectl: exec format error (439.037µs)
helpers_test.go:396: TestAddons/parallel/CSI: WARNING: PVC get for "default" "hpvc" returned: fork/exec /usr/local/bin/kubectl: exec format error
helpers_test.go:394: (dbg) Run:  kubectl --context addons-936355 get pvc hpvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Non-zero exit: kubectl --context addons-936355 get pvc hpvc -o jsonpath={.status.phase} -n default: fork/exec /usr/local/bin/kubectl: exec format error (399.596µs)
helpers_test.go:396: TestAddons/parallel/CSI: WARNING: PVC get for "default" "hpvc" returned: fork/exec /usr/local/bin/kubectl: exec format error
helpers_test.go:394: (dbg) Run:  kubectl --context addons-936355 get pvc hpvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Non-zero exit: kubectl --context addons-936355 get pvc hpvc -o jsonpath={.status.phase} -n default: fork/exec /usr/local/bin/kubectl: exec format error (409.015µs)
helpers_test.go:396: TestAddons/parallel/CSI: WARNING: PVC get for "default" "hpvc" returned: fork/exec /usr/local/bin/kubectl: exec format error
helpers_test.go:394: (dbg) Run:  kubectl --context addons-936355 get pvc hpvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Non-zero exit: kubectl --context addons-936355 get pvc hpvc -o jsonpath={.status.phase} -n default: fork/exec /usr/local/bin/kubectl: exec format error (542.386µs)
helpers_test.go:396: TestAddons/parallel/CSI: WARNING: PVC get for "default" "hpvc" returned: fork/exec /usr/local/bin/kubectl: exec format error
helpers_test.go:394: (dbg) Run:  kubectl --context addons-936355 get pvc hpvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Non-zero exit: kubectl --context addons-936355 get pvc hpvc -o jsonpath={.status.phase} -n default: fork/exec /usr/local/bin/kubectl: exec format error (556.187µs)
helpers_test.go:396: TestAddons/parallel/CSI: WARNING: PVC get for "default" "hpvc" returned: fork/exec /usr/local/bin/kubectl: exec format error
helpers_test.go:394: (dbg) Run:  kubectl --context addons-936355 get pvc hpvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Non-zero exit: kubectl --context addons-936355 get pvc hpvc -o jsonpath={.status.phase} -n default: fork/exec /usr/local/bin/kubectl: exec format error (476.673µs)
helpers_test.go:396: TestAddons/parallel/CSI: WARNING: PVC get for "default" "hpvc" returned: fork/exec /usr/local/bin/kubectl: exec format error
helpers_test.go:394: (dbg) Run:  kubectl --context addons-936355 get pvc hpvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Non-zero exit: kubectl --context addons-936355 get pvc hpvc -o jsonpath={.status.phase} -n default: fork/exec /usr/local/bin/kubectl: exec format error (483.894µs)
helpers_test.go:396: TestAddons/parallel/CSI: WARNING: PVC get for "default" "hpvc" returned: fork/exec /usr/local/bin/kubectl: exec format error
helpers_test.go:394: (dbg) Run:  kubectl --context addons-936355 get pvc hpvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Non-zero exit: kubectl --context addons-936355 get pvc hpvc -o jsonpath={.status.phase} -n default: fork/exec /usr/local/bin/kubectl: exec format error (397.143µs)
helpers_test.go:396: TestAddons/parallel/CSI: WARNING: PVC get for "default" "hpvc" returned: fork/exec /usr/local/bin/kubectl: exec format error
helpers_test.go:394: (dbg) Run:  kubectl --context addons-936355 get pvc hpvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Non-zero exit: kubectl --context addons-936355 get pvc hpvc -o jsonpath={.status.phase} -n default: fork/exec /usr/local/bin/kubectl: exec format error (399.538µs)
helpers_test.go:396: TestAddons/parallel/CSI: WARNING: PVC get for "default" "hpvc" returned: fork/exec /usr/local/bin/kubectl: exec format error
helpers_test.go:394: (dbg) Run:  kubectl --context addons-936355 get pvc hpvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Non-zero exit: kubectl --context addons-936355 get pvc hpvc -o jsonpath={.status.phase} -n default: fork/exec /usr/local/bin/kubectl: exec format error (474.892µs)
helpers_test.go:396: TestAddons/parallel/CSI: WARNING: PVC get for "default" "hpvc" returned: fork/exec /usr/local/bin/kubectl: exec format error
helpers_test.go:394: (dbg) Run:  kubectl --context addons-936355 get pvc hpvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Non-zero exit: kubectl --context addons-936355 get pvc hpvc -o jsonpath={.status.phase} -n default: fork/exec /usr/local/bin/kubectl: exec format error (528.479µs)
helpers_test.go:396: TestAddons/parallel/CSI: WARNING: PVC get for "default" "hpvc" returned: fork/exec /usr/local/bin/kubectl: exec format error
helpers_test.go:394: (dbg) Run:  kubectl --context addons-936355 get pvc hpvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Non-zero exit: kubectl --context addons-936355 get pvc hpvc -o jsonpath={.status.phase} -n default: fork/exec /usr/local/bin/kubectl: exec format error (391.776µs)
helpers_test.go:396: TestAddons/parallel/CSI: WARNING: PVC get for "default" "hpvc" returned: fork/exec /usr/local/bin/kubectl: exec format error
helpers_test.go:394: (dbg) Run:  kubectl --context addons-936355 get pvc hpvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Non-zero exit: kubectl --context addons-936355 get pvc hpvc -o jsonpath={.status.phase} -n default: fork/exec /usr/local/bin/kubectl: exec format error (430.651µs)
helpers_test.go:396: TestAddons/parallel/CSI: WARNING: PVC get for "default" "hpvc" returned: fork/exec /usr/local/bin/kubectl: exec format error
helpers_test.go:394: (dbg) Run:  kubectl --context addons-936355 get pvc hpvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Non-zero exit: kubectl --context addons-936355 get pvc hpvc -o jsonpath={.status.phase} -n default: fork/exec /usr/local/bin/kubectl: exec format error (510.806µs)
helpers_test.go:396: TestAddons/parallel/CSI: WARNING: PVC get for "default" "hpvc" returned: fork/exec /usr/local/bin/kubectl: exec format error
helpers_test.go:394: (dbg) Run:  kubectl --context addons-936355 get pvc hpvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Non-zero exit: kubectl --context addons-936355 get pvc hpvc -o jsonpath={.status.phase} -n default: fork/exec /usr/local/bin/kubectl: exec format error (431.948µs)
helpers_test.go:396: TestAddons/parallel/CSI: WARNING: PVC get for "default" "hpvc" returned: fork/exec /usr/local/bin/kubectl: exec format error
helpers_test.go:394: (dbg) Run:  kubectl --context addons-936355 get pvc hpvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Non-zero exit: kubectl --context addons-936355 get pvc hpvc -o jsonpath={.status.phase} -n default: fork/exec /usr/local/bin/kubectl: exec format error (567.329µs)
helpers_test.go:396: TestAddons/parallel/CSI: WARNING: PVC get for "default" "hpvc" returned: fork/exec /usr/local/bin/kubectl: exec format error
helpers_test.go:394: (dbg) Run:  kubectl --context addons-936355 get pvc hpvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Non-zero exit: kubectl --context addons-936355 get pvc hpvc -o jsonpath={.status.phase} -n default: fork/exec /usr/local/bin/kubectl: exec format error (599.754µs)
helpers_test.go:396: TestAddons/parallel/CSI: WARNING: PVC get for "default" "hpvc" returned: fork/exec /usr/local/bin/kubectl: exec format error
helpers_test.go:394: (dbg) Run:  kubectl --context addons-936355 get pvc hpvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Non-zero exit: kubectl --context addons-936355 get pvc hpvc -o jsonpath={.status.phase} -n default: fork/exec /usr/local/bin/kubectl: exec format error (371.872µs)
helpers_test.go:396: TestAddons/parallel/CSI: WARNING: PVC get for "default" "hpvc" returned: fork/exec /usr/local/bin/kubectl: exec format error
helpers_test.go:394: (dbg) Run:  kubectl --context addons-936355 get pvc hpvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Non-zero exit: kubectl --context addons-936355 get pvc hpvc -o jsonpath={.status.phase} -n default: fork/exec /usr/local/bin/kubectl: exec format error (546.891µs)
helpers_test.go:396: TestAddons/parallel/CSI: WARNING: PVC get for "default" "hpvc" returned: fork/exec /usr/local/bin/kubectl: exec format error
helpers_test.go:394: (dbg) Run:  kubectl --context addons-936355 get pvc hpvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Non-zero exit: kubectl --context addons-936355 get pvc hpvc -o jsonpath={.status.phase} -n default: fork/exec /usr/local/bin/kubectl: exec format error (506.408µs)
helpers_test.go:396: TestAddons/parallel/CSI: WARNING: PVC get for "default" "hpvc" returned: fork/exec /usr/local/bin/kubectl: exec format error
helpers_test.go:394: (dbg) Run:  kubectl --context addons-936355 get pvc hpvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Non-zero exit: kubectl --context addons-936355 get pvc hpvc -o jsonpath={.status.phase} -n default: fork/exec /usr/local/bin/kubectl: exec format error (501.436µs)
helpers_test.go:396: TestAddons/parallel/CSI: WARNING: PVC get for "default" "hpvc" returned: fork/exec /usr/local/bin/kubectl: exec format error
helpers_test.go:394: (dbg) Run:  kubectl --context addons-936355 get pvc hpvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Non-zero exit: kubectl --context addons-936355 get pvc hpvc -o jsonpath={.status.phase} -n default: fork/exec /usr/local/bin/kubectl: exec format error (595.086µs)
helpers_test.go:396: TestAddons/parallel/CSI: WARNING: PVC get for "default" "hpvc" returned: fork/exec /usr/local/bin/kubectl: exec format error
helpers_test.go:394: (dbg) Run:  kubectl --context addons-936355 get pvc hpvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Non-zero exit: kubectl --context addons-936355 get pvc hpvc -o jsonpath={.status.phase} -n default: fork/exec /usr/local/bin/kubectl: exec format error (697.5µs)
helpers_test.go:396: TestAddons/parallel/CSI: WARNING: PVC get for "default" "hpvc" returned: fork/exec /usr/local/bin/kubectl: exec format error
helpers_test.go:394: (dbg) Run:  kubectl --context addons-936355 get pvc hpvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Non-zero exit: kubectl --context addons-936355 get pvc hpvc -o jsonpath={.status.phase} -n default: fork/exec /usr/local/bin/kubectl: exec format error (385.886µs)
helpers_test.go:396: TestAddons/parallel/CSI: WARNING: PVC get for "default" "hpvc" returned: fork/exec /usr/local/bin/kubectl: exec format error
helpers_test.go:394: (dbg) Run:  kubectl --context addons-936355 get pvc hpvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Non-zero exit: kubectl --context addons-936355 get pvc hpvc -o jsonpath={.status.phase} -n default: fork/exec /usr/local/bin/kubectl: exec format error (468.427µs)
helpers_test.go:396: TestAddons/parallel/CSI: WARNING: PVC get for "default" "hpvc" returned: fork/exec /usr/local/bin/kubectl: exec format error
helpers_test.go:394: (dbg) Run:  kubectl --context addons-936355 get pvc hpvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Non-zero exit: kubectl --context addons-936355 get pvc hpvc -o jsonpath={.status.phase} -n default: fork/exec /usr/local/bin/kubectl: exec format error (414.849µs)
helpers_test.go:396: TestAddons/parallel/CSI: WARNING: PVC get for "default" "hpvc" returned: fork/exec /usr/local/bin/kubectl: exec format error
helpers_test.go:394: (dbg) Run:  kubectl --context addons-936355 get pvc hpvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Non-zero exit: kubectl --context addons-936355 get pvc hpvc -o jsonpath={.status.phase} -n default: fork/exec /usr/local/bin/kubectl: exec format error (508.557µs)
helpers_test.go:396: TestAddons/parallel/CSI: WARNING: PVC get for "default" "hpvc" returned: fork/exec /usr/local/bin/kubectl: exec format error
helpers_test.go:394: (dbg) Run:  kubectl --context addons-936355 get pvc hpvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Non-zero exit: kubectl --context addons-936355 get pvc hpvc -o jsonpath={.status.phase} -n default: fork/exec /usr/local/bin/kubectl: exec format error (433.491µs)
helpers_test.go:396: TestAddons/parallel/CSI: WARNING: PVC get for "default" "hpvc" returned: fork/exec /usr/local/bin/kubectl: exec format error
helpers_test.go:394: (dbg) Run:  kubectl --context addons-936355 get pvc hpvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Non-zero exit: kubectl --context addons-936355 get pvc hpvc -o jsonpath={.status.phase} -n default: fork/exec /usr/local/bin/kubectl: exec format error (520.389µs)
helpers_test.go:396: TestAddons/parallel/CSI: WARNING: PVC get for "default" "hpvc" returned: fork/exec /usr/local/bin/kubectl: exec format error
helpers_test.go:394: (dbg) Run:  kubectl --context addons-936355 get pvc hpvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Non-zero exit: kubectl --context addons-936355 get pvc hpvc -o jsonpath={.status.phase} -n default: fork/exec /usr/local/bin/kubectl: exec format error (361.074µs)
helpers_test.go:396: TestAddons/parallel/CSI: WARNING: PVC get for "default" "hpvc" returned: fork/exec /usr/local/bin/kubectl: exec format error
helpers_test.go:394: (dbg) Run:  kubectl --context addons-936355 get pvc hpvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Non-zero exit: kubectl --context addons-936355 get pvc hpvc -o jsonpath={.status.phase} -n default: fork/exec /usr/local/bin/kubectl: exec format error (489.259µs)
helpers_test.go:396: TestAddons/parallel/CSI: WARNING: PVC get for "default" "hpvc" returned: fork/exec /usr/local/bin/kubectl: exec format error
helpers_test.go:394: (dbg) Run:  kubectl --context addons-936355 get pvc hpvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Non-zero exit: kubectl --context addons-936355 get pvc hpvc -o jsonpath={.status.phase} -n default: fork/exec /usr/local/bin/kubectl: exec format error (568.864µs)
helpers_test.go:396: TestAddons/parallel/CSI: WARNING: PVC get for "default" "hpvc" returned: fork/exec /usr/local/bin/kubectl: exec format error
helpers_test.go:394: (dbg) Run:  kubectl --context addons-936355 get pvc hpvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Non-zero exit: kubectl --context addons-936355 get pvc hpvc -o jsonpath={.status.phase} -n default: fork/exec /usr/local/bin/kubectl: exec format error (481.916µs)
helpers_test.go:396: TestAddons/parallel/CSI: WARNING: PVC get for "default" "hpvc" returned: fork/exec /usr/local/bin/kubectl: exec format error
helpers_test.go:394: (dbg) Run:  kubectl --context addons-936355 get pvc hpvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Non-zero exit: kubectl --context addons-936355 get pvc hpvc -o jsonpath={.status.phase} -n default: fork/exec /usr/local/bin/kubectl: exec format error (499.245µs)
helpers_test.go:396: TestAddons/parallel/CSI: WARNING: PVC get for "default" "hpvc" returned: fork/exec /usr/local/bin/kubectl: exec format error
helpers_test.go:394: (dbg) Run:  kubectl --context addons-936355 get pvc hpvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Non-zero exit: kubectl --context addons-936355 get pvc hpvc -o jsonpath={.status.phase} -n default: fork/exec /usr/local/bin/kubectl: exec format error (455.636µs)
helpers_test.go:396: TestAddons/parallel/CSI: WARNING: PVC get for "default" "hpvc" returned: fork/exec /usr/local/bin/kubectl: exec format error
helpers_test.go:394: (dbg) Run:  kubectl --context addons-936355 get pvc hpvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Non-zero exit: kubectl --context addons-936355 get pvc hpvc -o jsonpath={.status.phase} -n default: fork/exec /usr/local/bin/kubectl: exec format error (535.658µs)
helpers_test.go:396: TestAddons/parallel/CSI: WARNING: PVC get for "default" "hpvc" returned: fork/exec /usr/local/bin/kubectl: exec format error
helpers_test.go:394: (dbg) Run:  kubectl --context addons-936355 get pvc hpvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Non-zero exit: kubectl --context addons-936355 get pvc hpvc -o jsonpath={.status.phase} -n default: fork/exec /usr/local/bin/kubectl: exec format error (572.793µs)
helpers_test.go:396: TestAddons/parallel/CSI: WARNING: PVC get for "default" "hpvc" returned: fork/exec /usr/local/bin/kubectl: exec format error
helpers_test.go:394: (dbg) Run:  kubectl --context addons-936355 get pvc hpvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Non-zero exit: kubectl --context addons-936355 get pvc hpvc -o jsonpath={.status.phase} -n default: fork/exec /usr/local/bin/kubectl: exec format error (439.964µs)
helpers_test.go:396: TestAddons/parallel/CSI: WARNING: PVC get for "default" "hpvc" returned: fork/exec /usr/local/bin/kubectl: exec format error
helpers_test.go:394: (dbg) Run:  kubectl --context addons-936355 get pvc hpvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Non-zero exit: kubectl --context addons-936355 get pvc hpvc -o jsonpath={.status.phase} -n default: context deadline exceeded (1.386µs)
helpers_test.go:396: TestAddons/parallel/CSI: WARNING: PVC get for "default" "hpvc" returned: context deadline exceeded
addons_test.go:576: failed waiting for PVC hpvc: context deadline exceeded
helpers_test.go:222: -----------------------post-mortem--------------------------------
helpers_test.go:230: ======>  post-mortem[TestAddons/parallel/CSI]: docker inspect <======
helpers_test.go:231: (dbg) Run:  docker inspect addons-936355
helpers_test.go:235: (dbg) docker inspect addons-936355:

                                                
                                                
-- stdout --
	[
	    {
	        "Id": "990f1d352091220982d3e72266c05a58085b58b6631f82700a66decf59d84c22",
	        "Created": "2024-09-16T10:35:26.829229764Z",
	        "Path": "/usr/local/bin/entrypoint",
	        "Args": [
	            "/sbin/init"
	        ],
	        "State": {
	            "Status": "running",
	            "Running": true,
	            "Paused": false,
	            "Restarting": false,
	            "OOMKilled": false,
	            "Dead": false,
	            "Pid": 1385081,
	            "ExitCode": 0,
	            "Error": "",
	            "StartedAt": "2024-09-16T10:35:26.979651686Z",
	            "FinishedAt": "0001-01-01T00:00:00Z"
	        },
	        "Image": "sha256:a1b71fa87733590eb4674b16f6945626ae533f3af37066893e3fd70eb9476268",
	        "ResolvConfPath": "/var/lib/docker/containers/990f1d352091220982d3e72266c05a58085b58b6631f82700a66decf59d84c22/resolv.conf",
	        "HostnamePath": "/var/lib/docker/containers/990f1d352091220982d3e72266c05a58085b58b6631f82700a66decf59d84c22/hostname",
	        "HostsPath": "/var/lib/docker/containers/990f1d352091220982d3e72266c05a58085b58b6631f82700a66decf59d84c22/hosts",
	        "LogPath": "/var/lib/docker/containers/990f1d352091220982d3e72266c05a58085b58b6631f82700a66decf59d84c22/990f1d352091220982d3e72266c05a58085b58b6631f82700a66decf59d84c22-json.log",
	        "Name": "/addons-936355",
	        "RestartCount": 0,
	        "Driver": "overlay2",
	        "Platform": "linux",
	        "MountLabel": "",
	        "ProcessLabel": "",
	        "AppArmorProfile": "unconfined",
	        "ExecIDs": null,
	        "HostConfig": {
	            "Binds": [
	                "/lib/modules:/lib/modules:ro",
	                "addons-936355:/var"
	            ],
	            "ContainerIDFile": "",
	            "LogConfig": {
	                "Type": "json-file",
	                "Config": {}
	            },
	            "NetworkMode": "addons-936355",
	            "PortBindings": {
	                "22/tcp": [
	                    {
	                        "HostIp": "127.0.0.1",
	                        "HostPort": ""
	                    }
	                ],
	                "2376/tcp": [
	                    {
	                        "HostIp": "127.0.0.1",
	                        "HostPort": ""
	                    }
	                ],
	                "32443/tcp": [
	                    {
	                        "HostIp": "127.0.0.1",
	                        "HostPort": ""
	                    }
	                ],
	                "5000/tcp": [
	                    {
	                        "HostIp": "127.0.0.1",
	                        "HostPort": ""
	                    }
	                ],
	                "8443/tcp": [
	                    {
	                        "HostIp": "127.0.0.1",
	                        "HostPort": ""
	                    }
	                ]
	            },
	            "RestartPolicy": {
	                "Name": "no",
	                "MaximumRetryCount": 0
	            },
	            "AutoRemove": false,
	            "VolumeDriver": "",
	            "VolumesFrom": null,
	            "ConsoleSize": [
	                0,
	                0
	            ],
	            "CapAdd": null,
	            "CapDrop": null,
	            "CgroupnsMode": "host",
	            "Dns": [],
	            "DnsOptions": [],
	            "DnsSearch": [],
	            "ExtraHosts": null,
	            "GroupAdd": null,
	            "IpcMode": "private",
	            "Cgroup": "",
	            "Links": null,
	            "OomScoreAdj": 0,
	            "PidMode": "",
	            "Privileged": true,
	            "PublishAllPorts": false,
	            "ReadonlyRootfs": false,
	            "SecurityOpt": [
	                "seccomp=unconfined",
	                "apparmor=unconfined",
	                "label=disable"
	            ],
	            "Tmpfs": {
	                "/run": "",
	                "/tmp": ""
	            },
	            "UTSMode": "",
	            "UsernsMode": "",
	            "ShmSize": 67108864,
	            "Runtime": "runc",
	            "Isolation": "",
	            "CpuShares": 0,
	            "Memory": 4194304000,
	            "NanoCpus": 2000000000,
	            "CgroupParent": "",
	            "BlkioWeight": 0,
	            "BlkioWeightDevice": [],
	            "BlkioDeviceReadBps": [],
	            "BlkioDeviceWriteBps": [],
	            "BlkioDeviceReadIOps": [],
	            "BlkioDeviceWriteIOps": [],
	            "CpuPeriod": 0,
	            "CpuQuota": 0,
	            "CpuRealtimePeriod": 0,
	            "CpuRealtimeRuntime": 0,
	            "CpusetCpus": "",
	            "CpusetMems": "",
	            "Devices": [],
	            "DeviceCgroupRules": null,
	            "DeviceRequests": null,
	            "MemoryReservation": 0,
	            "MemorySwap": 8388608000,
	            "MemorySwappiness": null,
	            "OomKillDisable": false,
	            "PidsLimit": null,
	            "Ulimits": [],
	            "CpuCount": 0,
	            "CpuPercent": 0,
	            "IOMaximumIOps": 0,
	            "IOMaximumBandwidth": 0,
	            "MaskedPaths": null,
	            "ReadonlyPaths": null
	        },
	        "GraphDriver": {
	            "Data": {
	                "LowerDir": "/var/lib/docker/overlay2/c77d59ded00fa56b49dc4ec025d7a90bf6cdbcc44e193db0ee5c49a540e58e7c-init/diff:/var/lib/docker/overlay2/1502e35c27c097cfc834a7c6caeee5bb9f58b41375577f491b73f55bc131cbae/diff",
	                "MergedDir": "/var/lib/docker/overlay2/c77d59ded00fa56b49dc4ec025d7a90bf6cdbcc44e193db0ee5c49a540e58e7c/merged",
	                "UpperDir": "/var/lib/docker/overlay2/c77d59ded00fa56b49dc4ec025d7a90bf6cdbcc44e193db0ee5c49a540e58e7c/diff",
	                "WorkDir": "/var/lib/docker/overlay2/c77d59ded00fa56b49dc4ec025d7a90bf6cdbcc44e193db0ee5c49a540e58e7c/work"
	            },
	            "Name": "overlay2"
	        },
	        "Mounts": [
	            {
	                "Type": "bind",
	                "Source": "/lib/modules",
	                "Destination": "/lib/modules",
	                "Mode": "ro",
	                "RW": false,
	                "Propagation": "rprivate"
	            },
	            {
	                "Type": "volume",
	                "Name": "addons-936355",
	                "Source": "/var/lib/docker/volumes/addons-936355/_data",
	                "Destination": "/var",
	                "Driver": "local",
	                "Mode": "z",
	                "RW": true,
	                "Propagation": ""
	            }
	        ],
	        "Config": {
	            "Hostname": "addons-936355",
	            "Domainname": "",
	            "User": "",
	            "AttachStdin": false,
	            "AttachStdout": false,
	            "AttachStderr": false,
	            "ExposedPorts": {
	                "22/tcp": {},
	                "2376/tcp": {},
	                "32443/tcp": {},
	                "5000/tcp": {},
	                "8443/tcp": {}
	            },
	            "Tty": true,
	            "OpenStdin": false,
	            "StdinOnce": false,
	            "Env": [
	                "container=docker",
	                "PATH=/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin"
	            ],
	            "Cmd": null,
	            "Image": "gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0",
	            "Volumes": null,
	            "WorkingDir": "/",
	            "Entrypoint": [
	                "/usr/local/bin/entrypoint",
	                "/sbin/init"
	            ],
	            "OnBuild": null,
	            "Labels": {
	                "created_by.minikube.sigs.k8s.io": "true",
	                "mode.minikube.sigs.k8s.io": "addons-936355",
	                "name.minikube.sigs.k8s.io": "addons-936355",
	                "role.minikube.sigs.k8s.io": ""
	            },
	            "StopSignal": "SIGRTMIN+3"
	        },
	        "NetworkSettings": {
	            "Bridge": "",
	            "SandboxID": "c25ad70fe630d4f698b2829da4e56bff2645b3ff549ca5302800a382e6bdd028",
	            "SandboxKey": "/var/run/docker/netns/c25ad70fe630",
	            "Ports": {
	                "22/tcp": [
	                    {
	                        "HostIp": "127.0.0.1",
	                        "HostPort": "34603"
	                    }
	                ],
	                "2376/tcp": [
	                    {
	                        "HostIp": "127.0.0.1",
	                        "HostPort": "34604"
	                    }
	                ],
	                "32443/tcp": [
	                    {
	                        "HostIp": "127.0.0.1",
	                        "HostPort": "34607"
	                    }
	                ],
	                "5000/tcp": [
	                    {
	                        "HostIp": "127.0.0.1",
	                        "HostPort": "34605"
	                    }
	                ],
	                "8443/tcp": [
	                    {
	                        "HostIp": "127.0.0.1",
	                        "HostPort": "34606"
	                    }
	                ]
	            },
	            "HairpinMode": false,
	            "LinkLocalIPv6Address": "",
	            "LinkLocalIPv6PrefixLen": 0,
	            "SecondaryIPAddresses": null,
	            "SecondaryIPv6Addresses": null,
	            "EndpointID": "",
	            "Gateway": "",
	            "GlobalIPv6Address": "",
	            "GlobalIPv6PrefixLen": 0,
	            "IPAddress": "",
	            "IPPrefixLen": 0,
	            "IPv6Gateway": "",
	            "MacAddress": "",
	            "Networks": {
	                "addons-936355": {
	                    "IPAMConfig": {
	                        "IPv4Address": "192.168.49.2"
	                    },
	                    "Links": null,
	                    "Aliases": null,
	                    "MacAddress": "02:42:c0:a8:31:02",
	                    "DriverOpts": null,
	                    "NetworkID": "5d73edaa3366fd0ba0b4bacad454985b0bd272fda9938fc527483e0046d7c748",
	                    "EndpointID": "cf4cd538acb5e979612a79c60d294fba1f05c9fef1a1bec978977fcb945819c4",
	                    "Gateway": "192.168.49.1",
	                    "IPAddress": "192.168.49.2",
	                    "IPPrefixLen": 24,
	                    "IPv6Gateway": "",
	                    "GlobalIPv6Address": "",
	                    "GlobalIPv6PrefixLen": 0,
	                    "DNSNames": [
	                        "addons-936355",
	                        "990f1d352091"
	                    ]
	                }
	            }
	        }
	    }
	]

                                                
                                                
-- /stdout --
helpers_test.go:239: (dbg) Run:  out/minikube-linux-arm64 status --format={{.Host}} -p addons-936355 -n addons-936355
helpers_test.go:244: <<< TestAddons/parallel/CSI FAILED: start of post-mortem logs <<<
helpers_test.go:245: ======>  post-mortem[TestAddons/parallel/CSI]: minikube logs <======
helpers_test.go:247: (dbg) Run:  out/minikube-linux-arm64 -p addons-936355 logs -n 25
helpers_test.go:247: (dbg) Done: out/minikube-linux-arm64 -p addons-936355 logs -n 25: (1.57658138s)
helpers_test.go:252: TestAddons/parallel/CSI logs: 
-- stdout --
	
	==> Audit <==
	|---------|--------------------------------------|------------------------|---------|---------|---------------------|---------------------|
	| Command |                 Args                 |        Profile         |  User   | Version |     Start Time      |      End Time       |
	|---------|--------------------------------------|------------------------|---------|---------|---------------------|---------------------|
	| start   | -o=json --download-only              | download-only-084128   | jenkins | v1.34.0 | 16 Sep 24 10:34 UTC |                     |
	|         | -p download-only-084128              |                        |         |         |                     |                     |
	|         | --force --alsologtostderr            |                        |         |         |                     |                     |
	|         | --kubernetes-version=v1.20.0         |                        |         |         |                     |                     |
	|         | --container-runtime=crio             |                        |         |         |                     |                     |
	|         | --driver=docker                      |                        |         |         |                     |                     |
	|         | --container-runtime=crio             |                        |         |         |                     |                     |
	| delete  | --all                                | minikube               | jenkins | v1.34.0 | 16 Sep 24 10:34 UTC | 16 Sep 24 10:34 UTC |
	| delete  | -p download-only-084128              | download-only-084128   | jenkins | v1.34.0 | 16 Sep 24 10:34 UTC | 16 Sep 24 10:34 UTC |
	| start   | -o=json --download-only              | download-only-605096   | jenkins | v1.34.0 | 16 Sep 24 10:34 UTC |                     |
	|         | -p download-only-605096              |                        |         |         |                     |                     |
	|         | --force --alsologtostderr            |                        |         |         |                     |                     |
	|         | --kubernetes-version=v1.31.1         |                        |         |         |                     |                     |
	|         | --container-runtime=crio             |                        |         |         |                     |                     |
	|         | --driver=docker                      |                        |         |         |                     |                     |
	|         | --container-runtime=crio             |                        |         |         |                     |                     |
	| delete  | --all                                | minikube               | jenkins | v1.34.0 | 16 Sep 24 10:34 UTC | 16 Sep 24 10:34 UTC |
	| delete  | -p download-only-605096              | download-only-605096   | jenkins | v1.34.0 | 16 Sep 24 10:34 UTC | 16 Sep 24 10:34 UTC |
	| delete  | -p download-only-084128              | download-only-084128   | jenkins | v1.34.0 | 16 Sep 24 10:35 UTC | 16 Sep 24 10:35 UTC |
	| delete  | -p download-only-605096              | download-only-605096   | jenkins | v1.34.0 | 16 Sep 24 10:35 UTC | 16 Sep 24 10:35 UTC |
	| start   | --download-only -p                   | download-docker-880503 | jenkins | v1.34.0 | 16 Sep 24 10:35 UTC |                     |
	|         | download-docker-880503               |                        |         |         |                     |                     |
	|         | --alsologtostderr                    |                        |         |         |                     |                     |
	|         | --driver=docker                      |                        |         |         |                     |                     |
	|         | --container-runtime=crio             |                        |         |         |                     |                     |
	| delete  | -p download-docker-880503            | download-docker-880503 | jenkins | v1.34.0 | 16 Sep 24 10:35 UTC | 16 Sep 24 10:35 UTC |
	| start   | --download-only -p                   | binary-mirror-652159   | jenkins | v1.34.0 | 16 Sep 24 10:35 UTC |                     |
	|         | binary-mirror-652159                 |                        |         |         |                     |                     |
	|         | --alsologtostderr                    |                        |         |         |                     |                     |
	|         | --binary-mirror                      |                        |         |         |                     |                     |
	|         | http://127.0.0.1:40363               |                        |         |         |                     |                     |
	|         | --driver=docker                      |                        |         |         |                     |                     |
	|         | --container-runtime=crio             |                        |         |         |                     |                     |
	| delete  | -p binary-mirror-652159              | binary-mirror-652159   | jenkins | v1.34.0 | 16 Sep 24 10:35 UTC | 16 Sep 24 10:35 UTC |
	| addons  | enable dashboard -p                  | addons-936355          | jenkins | v1.34.0 | 16 Sep 24 10:35 UTC |                     |
	|         | addons-936355                        |                        |         |         |                     |                     |
	| addons  | disable dashboard -p                 | addons-936355          | jenkins | v1.34.0 | 16 Sep 24 10:35 UTC |                     |
	|         | addons-936355                        |                        |         |         |                     |                     |
	| start   | -p addons-936355 --wait=true         | addons-936355          | jenkins | v1.34.0 | 16 Sep 24 10:35 UTC | 16 Sep 24 10:38 UTC |
	|         | --memory=4000 --alsologtostderr      |                        |         |         |                     |                     |
	|         | --addons=registry                    |                        |         |         |                     |                     |
	|         | --addons=metrics-server              |                        |         |         |                     |                     |
	|         | --addons=volumesnapshots             |                        |         |         |                     |                     |
	|         | --addons=csi-hostpath-driver         |                        |         |         |                     |                     |
	|         | --addons=gcp-auth                    |                        |         |         |                     |                     |
	|         | --addons=cloud-spanner               |                        |         |         |                     |                     |
	|         | --addons=inspektor-gadget            |                        |         |         |                     |                     |
	|         | --addons=storage-provisioner-rancher |                        |         |         |                     |                     |
	|         | --addons=nvidia-device-plugin        |                        |         |         |                     |                     |
	|         | --addons=yakd --addons=volcano       |                        |         |         |                     |                     |
	|         | --driver=docker                      |                        |         |         |                     |                     |
	|         | --container-runtime=crio             |                        |         |         |                     |                     |
	|         | --addons=ingress                     |                        |         |         |                     |                     |
	|         | --addons=ingress-dns                 |                        |         |         |                     |                     |
	| ip      | addons-936355 ip                     | addons-936355          | jenkins | v1.34.0 | 16 Sep 24 10:39 UTC | 16 Sep 24 10:39 UTC |
	| addons  | addons-936355 addons disable         | addons-936355          | jenkins | v1.34.0 | 16 Sep 24 10:39 UTC | 16 Sep 24 10:39 UTC |
	|         | registry --alsologtostderr           |                        |         |         |                     |                     |
	|         | -v=1                                 |                        |         |         |                     |                     |
	|---------|--------------------------------------|------------------------|---------|---------|---------------------|---------------------|
	
	
	==> Last Start <==
	Log file created at: 2024/09/16 10:35:01
	Running on machine: ip-172-31-21-244
	Binary: Built with gc go1.23.0 for linux/arm64
	Log line format: [IWEF]mmdd hh:mm:ss.uuuuuu threadid file:line] msg
	I0916 10:35:01.861741 1384589 out.go:345] Setting OutFile to fd 1 ...
	I0916 10:35:01.861923 1384589 out.go:392] TERM=,COLORTERM=, which probably does not support color
	I0916 10:35:01.861959 1384589 out.go:358] Setting ErrFile to fd 2...
	I0916 10:35:01.861972 1384589 out.go:392] TERM=,COLORTERM=, which probably does not support color
	I0916 10:35:01.862230 1384589 root.go:338] Updating PATH: /home/jenkins/minikube-integration/19651-1378450/.minikube/bin
	I0916 10:35:01.862730 1384589 out.go:352] Setting JSON to false
	I0916 10:35:01.863665 1384589 start.go:129] hostinfo: {"hostname":"ip-172-31-21-244","uptime":37047,"bootTime":1726445855,"procs":155,"os":"linux","platform":"ubuntu","platformFamily":"debian","platformVersion":"20.04","kernelVersion":"5.15.0-1069-aws","kernelArch":"aarch64","virtualizationSystem":"","virtualizationRole":"","hostId":"da8ac1fd-6236-412a-a346-95873c98230d"}
	I0916 10:35:01.863739 1384589 start.go:139] virtualization:  
	I0916 10:35:01.866923 1384589 out.go:177] * [addons-936355] minikube v1.34.0 on Ubuntu 20.04 (arm64)
	I0916 10:35:01.870432 1384589 out.go:177]   - MINIKUBE_LOCATION=19651
	I0916 10:35:01.870537 1384589 notify.go:220] Checking for updates...
	I0916 10:35:01.875880 1384589 out.go:177]   - MINIKUBE_SUPPRESS_DOCKER_PERFORMANCE=true
	I0916 10:35:01.878650 1384589 out.go:177]   - KUBECONFIG=/home/jenkins/minikube-integration/19651-1378450/kubeconfig
	I0916 10:35:01.881242 1384589 out.go:177]   - MINIKUBE_HOME=/home/jenkins/minikube-integration/19651-1378450/.minikube
	I0916 10:35:01.883862 1384589 out.go:177]   - MINIKUBE_BIN=out/minikube-linux-arm64
	I0916 10:35:01.886520 1384589 out.go:177]   - MINIKUBE_FORCE_SYSTEMD=
	I0916 10:35:01.889353 1384589 driver.go:394] Setting default libvirt URI to qemu:///system
	I0916 10:35:01.930300 1384589 docker.go:123] docker version: linux-27.2.1:Docker Engine - Community
	I0916 10:35:01.930438 1384589 cli_runner.go:164] Run: docker system info --format "{{json .}}"
	I0916 10:35:01.986400 1384589 info.go:266] docker info: {ID:5FDH:SA5P:5GCT:NLAS:B73P:SGDQ:PBG5:UBVH:UZY3:RXGO:CI7S:WAIH Containers:0 ContainersRunning:0 ContainersPaused:0 ContainersStopped:0 Images:1 Driver:overlay2 DriverStatus:[[Backing Filesystem extfs] [Supports d_type true] [Using metacopy false] [Native Overlay Diff true] [userxattr false]] SystemStatus:<nil> Plugins:{Volume:[local] Network:[bridge host ipvlan macvlan null overlay] Authorization:<nil> Log:[awslogs fluentd gcplogs gelf journald json-file local splunk syslog]} MemoryLimit:true SwapLimit:true KernelMemory:false KernelMemoryTCP:true CPUCfsPeriod:true CPUCfsQuota:true CPUShares:true CPUSet:true PidsLimit:true IPv4Forwarding:true BridgeNfIptables:true BridgeNfIP6Tables:true Debug:false NFd:25 OomKillDisable:true NGoroutines:44 SystemTime:2024-09-16 10:35:01.976217774 +0000 UTC LoggingDriver:json-file CgroupDriver:cgroupfs NEventsListener:0 KernelVersion:5.15.0-1069-aws OperatingSystem:Ubuntu 20.04.6 LTS OSType:linux Architecture:aar
ch64 IndexServerAddress:https://index.docker.io/v1/ RegistryConfig:{AllowNondistributableArtifactsCIDRs:[] AllowNondistributableArtifactsHostnames:[] InsecureRegistryCIDRs:[127.0.0.0/8] IndexConfigs:{DockerIo:{Name:docker.io Mirrors:[] Secure:true Official:true}} Mirrors:[]} NCPU:2 MemTotal:8214839296 GenericResources:<nil> DockerRootDir:/var/lib/docker HTTPProxy: HTTPSProxy: NoProxy: Name:ip-172-31-21-244 Labels:[] ExperimentalBuild:false ServerVersion:27.2.1 ClusterStore: ClusterAdvertise: Runtimes:{Runc:{Path:runc}} DefaultRuntime:runc Swarm:{NodeID: NodeAddr: LocalNodeState:inactive ControlAvailable:false Error: RemoteManagers:<nil>} LiveRestoreEnabled:false Isolation: InitBinary:docker-init ContainerdCommit:{ID:7f7fdf5fed64eb6a7caf99b3e12efcf9d60e311c Expected:7f7fdf5fed64eb6a7caf99b3e12efcf9d60e311c} RuncCommit:{ID:v1.1.14-0-g2c9f560 Expected:v1.1.14-0-g2c9f560} InitCommit:{ID:de40ad0 Expected:de40ad0} SecurityOptions:[name=apparmor name=seccomp,profile=builtin] ProductLicense: Warnings:<nil> ServerErro
rs:[] ClientInfo:{Debug:false Plugins:[map[Name:buildx Path:/usr/libexec/docker/cli-plugins/docker-buildx SchemaVersion:0.1.0 ShortDescription:Docker Buildx Vendor:Docker Inc. Version:v0.16.2] map[Name:compose Path:/usr/libexec/docker/cli-plugins/docker-compose SchemaVersion:0.1.0 ShortDescription:Docker Compose Vendor:Docker Inc. Version:v2.29.2]] Warnings:<nil>}}
	I0916 10:35:01.986524 1384589 docker.go:318] overlay module found
	I0916 10:35:01.989262 1384589 out.go:177] * Using the docker driver based on user configuration
	I0916 10:35:01.991996 1384589 start.go:297] selected driver: docker
	I0916 10:35:01.992025 1384589 start.go:901] validating driver "docker" against <nil>
	I0916 10:35:01.992040 1384589 start.go:912] status for docker: {Installed:true Healthy:true Running:false NeedsImprovement:false Error:<nil> Reason: Fix: Doc: Version:}
	I0916 10:35:01.992727 1384589 cli_runner.go:164] Run: docker system info --format "{{json .}}"
	I0916 10:35:02.058953 1384589 info.go:266] docker info: {ID:5FDH:SA5P:5GCT:NLAS:B73P:SGDQ:PBG5:UBVH:UZY3:RXGO:CI7S:WAIH Containers:0 ContainersRunning:0 ContainersPaused:0 ContainersStopped:0 Images:1 Driver:overlay2 DriverStatus:[[Backing Filesystem extfs] [Supports d_type true] [Using metacopy false] [Native Overlay Diff true] [userxattr false]] SystemStatus:<nil> Plugins:{Volume:[local] Network:[bridge host ipvlan macvlan null overlay] Authorization:<nil> Log:[awslogs fluentd gcplogs gelf journald json-file local splunk syslog]} MemoryLimit:true SwapLimit:true KernelMemory:false KernelMemoryTCP:true CPUCfsPeriod:true CPUCfsQuota:true CPUShares:true CPUSet:true PidsLimit:true IPv4Forwarding:true BridgeNfIptables:true BridgeNfIP6Tables:true Debug:false NFd:25 OomKillDisable:true NGoroutines:44 SystemTime:2024-09-16 10:35:02.049617339 +0000 UTC LoggingDriver:json-file CgroupDriver:cgroupfs NEventsListener:0 KernelVersion:5.15.0-1069-aws OperatingSystem:Ubuntu 20.04.6 LTS OSType:linux Architecture:aar
ch64 IndexServerAddress:https://index.docker.io/v1/ RegistryConfig:{AllowNondistributableArtifactsCIDRs:[] AllowNondistributableArtifactsHostnames:[] InsecureRegistryCIDRs:[127.0.0.0/8] IndexConfigs:{DockerIo:{Name:docker.io Mirrors:[] Secure:true Official:true}} Mirrors:[]} NCPU:2 MemTotal:8214839296 GenericResources:<nil> DockerRootDir:/var/lib/docker HTTPProxy: HTTPSProxy: NoProxy: Name:ip-172-31-21-244 Labels:[] ExperimentalBuild:false ServerVersion:27.2.1 ClusterStore: ClusterAdvertise: Runtimes:{Runc:{Path:runc}} DefaultRuntime:runc Swarm:{NodeID: NodeAddr: LocalNodeState:inactive ControlAvailable:false Error: RemoteManagers:<nil>} LiveRestoreEnabled:false Isolation: InitBinary:docker-init ContainerdCommit:{ID:7f7fdf5fed64eb6a7caf99b3e12efcf9d60e311c Expected:7f7fdf5fed64eb6a7caf99b3e12efcf9d60e311c} RuncCommit:{ID:v1.1.14-0-g2c9f560 Expected:v1.1.14-0-g2c9f560} InitCommit:{ID:de40ad0 Expected:de40ad0} SecurityOptions:[name=apparmor name=seccomp,profile=builtin] ProductLicense: Warnings:<nil> ServerErro
rs:[] ClientInfo:{Debug:false Plugins:[map[Name:buildx Path:/usr/libexec/docker/cli-plugins/docker-buildx SchemaVersion:0.1.0 ShortDescription:Docker Buildx Vendor:Docker Inc. Version:v0.16.2] map[Name:compose Path:/usr/libexec/docker/cli-plugins/docker-compose SchemaVersion:0.1.0 ShortDescription:Docker Compose Vendor:Docker Inc. Version:v2.29.2]] Warnings:<nil>}}
	I0916 10:35:02.059182 1384589 start_flags.go:310] no existing cluster config was found, will generate one from the flags 
	I0916 10:35:02.059420 1384589 start_flags.go:947] Waiting for all components: map[apiserver:true apps_running:true default_sa:true extra:true kubelet:true node_ready:true system_pods:true]
	I0916 10:35:02.062017 1384589 out.go:177] * Using Docker driver with root privileges
	I0916 10:35:02.064628 1384589 cni.go:84] Creating CNI manager for ""
	I0916 10:35:02.064789 1384589 cni.go:143] "docker" driver + "crio" runtime found, recommending kindnet
	I0916 10:35:02.064804 1384589 start_flags.go:319] Found "CNI" CNI - setting NetworkPlugin=cni
	I0916 10:35:02.064885 1384589 start.go:340] cluster config:
	{Name:addons-936355 KeepContext:false EmbedCerts:false MinikubeISO: KicBaseImage:gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 Memory:4000 CPUs:2 DiskSize:20000 Driver:docker HyperkitVpnKitSock: HyperkitVSockPorts:[] DockerEnv:[] ContainerVolumeMounts:[] InsecureRegistry:[] RegistryMirror:[] HostOnlyCIDR:192.168.59.1/24 HypervVirtualSwitch: HypervUseExternalSwitch:false HypervExternalAdapter: KVMNetwork:default KVMQemuURI:qemu:///system KVMGPU:false KVMHidden:false KVMNUMACount:1 APIServerPort:8443 DockerOpt:[] DisableDriverMounts:false NFSShare:[] NFSSharesRoot:/nfsshares UUID: NoVTXCheck:false DNSProxy:false HostDNSResolver:true HostOnlyNicType:virtio NatNicType:virtio SSHIPAddress: SSHUser:root SSHKey: SSHPort:22 KubernetesConfig:{KubernetesVersion:v1.31.1 ClusterName:addons-936355 Namespace:default APIServerHAVIP: APIServerName:minikubeCA APIServerNames:[] APIServerIPs:[] DNSDomain:cluster.local ContainerRuntime
:crio CRISocket: NetworkPlugin:cni FeatureGates: ServiceCIDR:10.96.0.0/12 ImageRepository: LoadBalancerStartIP: LoadBalancerEndIP: CustomIngressCert: RegistryAliases: ExtraOptions:[] ShouldLoadCachedImages:true EnableDefaultCNI:false CNI:} Nodes:[{Name: IP: Port:8443 KubernetesVersion:v1.31.1 ContainerRuntime:crio ControlPlane:true Worker:true}] Addons:map[] CustomAddonImages:map[] CustomAddonRegistries:map[] VerifyComponents:map[apiserver:true apps_running:true default_sa:true extra:true kubelet:true node_ready:true system_pods:true] StartHostTimeout:6m0s ScheduledStop:<nil> ExposedPorts:[] ListenAddress: Network: Subnet: MultiNodeRequested:false ExtraDisks:0 CertExpiration:26280h0m0s Mount:false MountString:/home/jenkins:/minikube-host Mount9PVersion:9p2000.L MountGID:docker MountIP: MountMSize:262144 MountOptions:[] MountPort:0 MountType:9p MountUID:docker BinaryMirror: DisableOptimizations:false DisableMetrics:false CustomQemuFirmwarePath: SocketVMnetClientPath: SocketVMnetPath: StaticIP: SSHAuthSock: SSH
AgentPID:0 GPUs: AutoPauseInterval:1m0s}
	I0916 10:35:02.069567 1384589 out.go:177] * Starting "addons-936355" primary control-plane node in "addons-936355" cluster
	I0916 10:35:02.072130 1384589 cache.go:121] Beginning downloading kic base image for docker with crio
	I0916 10:35:02.074827 1384589 out.go:177] * Pulling base image v0.0.45-1726358845-19644 ...
	I0916 10:35:02.077314 1384589 preload.go:131] Checking if preload exists for k8s version v1.31.1 and runtime crio
	I0916 10:35:02.077371 1384589 preload.go:146] Found local preload: /home/jenkins/minikube-integration/19651-1378450/.minikube/cache/preloaded-tarball/preloaded-images-k8s-v18-v1.31.1-cri-o-overlay-arm64.tar.lz4
	I0916 10:35:02.077383 1384589 cache.go:56] Caching tarball of preloaded images
	I0916 10:35:02.077398 1384589 image.go:79] Checking for gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 in local docker daemon
	I0916 10:35:02.077476 1384589 preload.go:172] Found /home/jenkins/minikube-integration/19651-1378450/.minikube/cache/preloaded-tarball/preloaded-images-k8s-v18-v1.31.1-cri-o-overlay-arm64.tar.lz4 in cache, skipping download
	I0916 10:35:02.077486 1384589 cache.go:59] Finished verifying existence of preloaded tar for v1.31.1 on crio
	I0916 10:35:02.077848 1384589 profile.go:143] Saving config to /home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/addons-936355/config.json ...
	I0916 10:35:02.077880 1384589 lock.go:35] WriteFile acquiring /home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/addons-936355/config.json: {Name:mkd05c2b0dbaa1cc700db22c74ae8fbcc0c53329 Clock:{} Delay:500ms Timeout:1m0s Cancel:<nil>}
	I0916 10:35:02.092106 1384589 cache.go:149] Downloading gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 to local cache
	I0916 10:35:02.092232 1384589 image.go:63] Checking for gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 in local cache directory
	I0916 10:35:02.092252 1384589 image.go:66] Found gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 in local cache directory, skipping pull
	I0916 10:35:02.092257 1384589 image.go:135] gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 exists in cache, skipping pull
	I0916 10:35:02.092264 1384589 cache.go:152] successfully saved gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 as a tarball
	I0916 10:35:02.092269 1384589 cache.go:162] Loading gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 from local cache
	I0916 10:35:19.265886 1384589 cache.go:164] successfully loaded and using gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 from cached tarball
	I0916 10:35:19.265926 1384589 cache.go:194] Successfully downloaded all kic artifacts
	I0916 10:35:19.265955 1384589 start.go:360] acquireMachinesLock for addons-936355: {Name:mk780e867f4084d469fbad7a4968b7ad3d556c69 Clock:{} Delay:500ms Timeout:10m0s Cancel:<nil>}
	I0916 10:35:19.266489 1384589 start.go:364] duration metric: took 511.962µs to acquireMachinesLock for "addons-936355"
	I0916 10:35:19.266531 1384589 start.go:93] Provisioning new machine with config: &{Name:addons-936355 KeepContext:false EmbedCerts:false MinikubeISO: KicBaseImage:gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 Memory:4000 CPUs:2 DiskSize:20000 Driver:docker HyperkitVpnKitSock: HyperkitVSockPorts:[] DockerEnv:[] ContainerVolumeMounts:[] InsecureRegistry:[] RegistryMirror:[] HostOnlyCIDR:192.168.59.1/24 HypervVirtualSwitch: HypervUseExternalSwitch:false HypervExternalAdapter: KVMNetwork:default KVMQemuURI:qemu:///system KVMGPU:false KVMHidden:false KVMNUMACount:1 APIServerPort:8443 DockerOpt:[] DisableDriverMounts:false NFSShare:[] NFSSharesRoot:/nfsshares UUID: NoVTXCheck:false DNSProxy:false HostDNSResolver:true HostOnlyNicType:virtio NatNicType:virtio SSHIPAddress: SSHUser:root SSHKey: SSHPort:22 KubernetesConfig:{KubernetesVersion:v1.31.1 ClusterName:addons-936355 Namespace:default APIServerHAVIP: APIServerName:min
ikubeCA APIServerNames:[] APIServerIPs:[] DNSDomain:cluster.local ContainerRuntime:crio CRISocket: NetworkPlugin:cni FeatureGates: ServiceCIDR:10.96.0.0/12 ImageRepository: LoadBalancerStartIP: LoadBalancerEndIP: CustomIngressCert: RegistryAliases: ExtraOptions:[] ShouldLoadCachedImages:true EnableDefaultCNI:false CNI:} Nodes:[{Name: IP: Port:8443 KubernetesVersion:v1.31.1 ContainerRuntime:crio ControlPlane:true Worker:true}] Addons:map[] CustomAddonImages:map[] CustomAddonRegistries:map[] VerifyComponents:map[apiserver:true apps_running:true default_sa:true extra:true kubelet:true node_ready:true system_pods:true] StartHostTimeout:6m0s ScheduledStop:<nil> ExposedPorts:[] ListenAddress: Network: Subnet: MultiNodeRequested:false ExtraDisks:0 CertExpiration:26280h0m0s Mount:false MountString:/home/jenkins:/minikube-host Mount9PVersion:9p2000.L MountGID:docker MountIP: MountMSize:262144 MountOptions:[] MountPort:0 MountType:9p MountUID:docker BinaryMirror: DisableOptimizations:false DisableMetrics:false CustomQe
muFirmwarePath: SocketVMnetClientPath: SocketVMnetPath: StaticIP: SSHAuthSock: SSHAgentPID:0 GPUs: AutoPauseInterval:1m0s} &{Name: IP: Port:8443 KubernetesVersion:v1.31.1 ContainerRuntime:crio ControlPlane:true Worker:true}
	I0916 10:35:19.266610 1384589 start.go:125] createHost starting for "" (driver="docker")
	I0916 10:35:19.269716 1384589 out.go:235] * Creating docker container (CPUs=2, Memory=4000MB) ...
	I0916 10:35:19.269968 1384589 start.go:159] libmachine.API.Create for "addons-936355" (driver="docker")
	I0916 10:35:19.270003 1384589 client.go:168] LocalClient.Create starting
	I0916 10:35:19.270125 1384589 main.go:141] libmachine: Creating CA: /home/jenkins/minikube-integration/19651-1378450/.minikube/certs/ca.pem
	I0916 10:35:20.065665 1384589 main.go:141] libmachine: Creating client certificate: /home/jenkins/minikube-integration/19651-1378450/.minikube/certs/cert.pem
	I0916 10:35:20.505791 1384589 cli_runner.go:164] Run: docker network inspect addons-936355 --format "{"Name": "{{.Name}}","Driver": "{{.Driver}}","Subnet": "{{range .IPAM.Config}}{{.Subnet}}{{end}}","Gateway": "{{range .IPAM.Config}}{{.Gateway}}{{end}}","MTU": {{if (index .Options "com.docker.network.driver.mtu")}}{{(index .Options "com.docker.network.driver.mtu")}}{{else}}0{{end}}, "ContainerIPs": [{{range $k,$v := .Containers }}"{{$v.IPv4Address}}",{{end}}]}"
	W0916 10:35:20.520423 1384589 cli_runner.go:211] docker network inspect addons-936355 --format "{"Name": "{{.Name}}","Driver": "{{.Driver}}","Subnet": "{{range .IPAM.Config}}{{.Subnet}}{{end}}","Gateway": "{{range .IPAM.Config}}{{.Gateway}}{{end}}","MTU": {{if (index .Options "com.docker.network.driver.mtu")}}{{(index .Options "com.docker.network.driver.mtu")}}{{else}}0{{end}}, "ContainerIPs": [{{range $k,$v := .Containers }}"{{$v.IPv4Address}}",{{end}}]}" returned with exit code 1
	I0916 10:35:20.520525 1384589 network_create.go:284] running [docker network inspect addons-936355] to gather additional debugging logs...
	I0916 10:35:20.520546 1384589 cli_runner.go:164] Run: docker network inspect addons-936355
	W0916 10:35:20.534395 1384589 cli_runner.go:211] docker network inspect addons-936355 returned with exit code 1
	I0916 10:35:20.534432 1384589 network_create.go:287] error running [docker network inspect addons-936355]: docker network inspect addons-936355: exit status 1
	stdout:
	[]
	
	stderr:
	Error response from daemon: network addons-936355 not found
	I0916 10:35:20.534447 1384589 network_create.go:289] output of [docker network inspect addons-936355]: -- stdout --
	[]
	
	-- /stdout --
	** stderr ** 
	Error response from daemon: network addons-936355 not found
	
	** /stderr **
	I0916 10:35:20.534555 1384589 cli_runner.go:164] Run: docker network inspect bridge --format "{"Name": "{{.Name}}","Driver": "{{.Driver}}","Subnet": "{{range .IPAM.Config}}{{.Subnet}}{{end}}","Gateway": "{{range .IPAM.Config}}{{.Gateway}}{{end}}","MTU": {{if (index .Options "com.docker.network.driver.mtu")}}{{(index .Options "com.docker.network.driver.mtu")}}{{else}}0{{end}}, "ContainerIPs": [{{range $k,$v := .Containers }}"{{$v.IPv4Address}}",{{end}}]}"
	I0916 10:35:20.550802 1384589 network.go:206] using free private subnet 192.168.49.0/24: &{IP:192.168.49.0 Netmask:255.255.255.0 Prefix:24 CIDR:192.168.49.0/24 Gateway:192.168.49.1 ClientMin:192.168.49.2 ClientMax:192.168.49.254 Broadcast:192.168.49.255 IsPrivate:true Interface:{IfaceName: IfaceIPv4: IfaceMTU:0 IfaceMAC:} reservation:0x4001826a70}
	I0916 10:35:20.550849 1384589 network_create.go:124] attempt to create docker network addons-936355 192.168.49.0/24 with gateway 192.168.49.1 and MTU of 1500 ...
	I0916 10:35:20.550909 1384589 cli_runner.go:164] Run: docker network create --driver=bridge --subnet=192.168.49.0/24 --gateway=192.168.49.1 -o --ip-masq -o --icc -o com.docker.network.driver.mtu=1500 --label=created_by.minikube.sigs.k8s.io=true --label=name.minikube.sigs.k8s.io=addons-936355 addons-936355
	I0916 10:35:20.622324 1384589 network_create.go:108] docker network addons-936355 192.168.49.0/24 created
	I0916 10:35:20.622359 1384589 kic.go:121] calculated static IP "192.168.49.2" for the "addons-936355" container
	I0916 10:35:20.622443 1384589 cli_runner.go:164] Run: docker ps -a --format {{.Names}}
	I0916 10:35:20.636891 1384589 cli_runner.go:164] Run: docker volume create addons-936355 --label name.minikube.sigs.k8s.io=addons-936355 --label created_by.minikube.sigs.k8s.io=true
	I0916 10:35:20.653249 1384589 oci.go:103] Successfully created a docker volume addons-936355
	I0916 10:35:20.653357 1384589 cli_runner.go:164] Run: docker run --rm --name addons-936355-preload-sidecar --label created_by.minikube.sigs.k8s.io=true --label name.minikube.sigs.k8s.io=addons-936355 --entrypoint /usr/bin/test -v addons-936355:/var gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 -d /var/lib
	I0916 10:35:22.737442 1384589 cli_runner.go:217] Completed: docker run --rm --name addons-936355-preload-sidecar --label created_by.minikube.sigs.k8s.io=true --label name.minikube.sigs.k8s.io=addons-936355 --entrypoint /usr/bin/test -v addons-936355:/var gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 -d /var/lib: (2.08404207s)
	I0916 10:35:22.737471 1384589 oci.go:107] Successfully prepared a docker volume addons-936355
	I0916 10:35:22.737499 1384589 preload.go:131] Checking if preload exists for k8s version v1.31.1 and runtime crio
	I0916 10:35:22.737519 1384589 kic.go:194] Starting extracting preloaded images to volume ...
	I0916 10:35:22.737588 1384589 cli_runner.go:164] Run: docker run --rm --entrypoint /usr/bin/tar -v /home/jenkins/minikube-integration/19651-1378450/.minikube/cache/preloaded-tarball/preloaded-images-k8s-v18-v1.31.1-cri-o-overlay-arm64.tar.lz4:/preloaded.tar:ro -v addons-936355:/extractDir gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 -I lz4 -xf /preloaded.tar -C /extractDir
	I0916 10:35:26.763089 1384589 cli_runner.go:217] Completed: docker run --rm --entrypoint /usr/bin/tar -v /home/jenkins/minikube-integration/19651-1378450/.minikube/cache/preloaded-tarball/preloaded-images-k8s-v18-v1.31.1-cri-o-overlay-arm64.tar.lz4:/preloaded.tar:ro -v addons-936355:/extractDir gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 -I lz4 -xf /preloaded.tar -C /extractDir: (4.025452617s)
	I0916 10:35:26.763126 1384589 kic.go:203] duration metric: took 4.025604753s to extract preloaded images to volume ...
	W0916 10:35:26.763258 1384589 cgroups_linux.go:77] Your kernel does not support swap limit capabilities or the cgroup is not mounted.
	I0916 10:35:26.763378 1384589 cli_runner.go:164] Run: docker info --format "'{{json .SecurityOptions}}'"
	I0916 10:35:26.814712 1384589 cli_runner.go:164] Run: docker run -d -t --privileged --security-opt seccomp=unconfined --tmpfs /tmp --tmpfs /run -v /lib/modules:/lib/modules:ro --hostname addons-936355 --name addons-936355 --label created_by.minikube.sigs.k8s.io=true --label name.minikube.sigs.k8s.io=addons-936355 --label role.minikube.sigs.k8s.io= --label mode.minikube.sigs.k8s.io=addons-936355 --network addons-936355 --ip 192.168.49.2 --volume addons-936355:/var --security-opt apparmor=unconfined --memory=4000mb --cpus=2 -e container=docker --expose 8443 --publish=127.0.0.1::8443 --publish=127.0.0.1::22 --publish=127.0.0.1::2376 --publish=127.0.0.1::5000 --publish=127.0.0.1::32443 gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0
	I0916 10:35:27.165000 1384589 cli_runner.go:164] Run: docker container inspect addons-936355 --format={{.State.Running}}
	I0916 10:35:27.189076 1384589 cli_runner.go:164] Run: docker container inspect addons-936355 --format={{.State.Status}}
	I0916 10:35:27.216370 1384589 cli_runner.go:164] Run: docker exec addons-936355 stat /var/lib/dpkg/alternatives/iptables
	I0916 10:35:27.281467 1384589 oci.go:144] the created container "addons-936355" has a running status.
	I0916 10:35:27.281502 1384589 kic.go:225] Creating ssh key for kic: /home/jenkins/minikube-integration/19651-1378450/.minikube/machines/addons-936355/id_rsa...
	I0916 10:35:28.804386 1384589 kic_runner.go:191] docker (temp): /home/jenkins/minikube-integration/19651-1378450/.minikube/machines/addons-936355/id_rsa.pub --> /home/docker/.ssh/authorized_keys (381 bytes)
	I0916 10:35:28.826599 1384589 cli_runner.go:164] Run: docker container inspect addons-936355 --format={{.State.Status}}
	I0916 10:35:28.843564 1384589 kic_runner.go:93] Run: chown docker:docker /home/docker/.ssh/authorized_keys
	I0916 10:35:28.843591 1384589 kic_runner.go:114] Args: [docker exec --privileged addons-936355 chown docker:docker /home/docker/.ssh/authorized_keys]
	I0916 10:35:28.892577 1384589 cli_runner.go:164] Run: docker container inspect addons-936355 --format={{.State.Status}}
	I0916 10:35:28.913158 1384589 machine.go:93] provisionDockerMachine start ...
	I0916 10:35:28.913258 1384589 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" addons-936355
	I0916 10:35:28.931596 1384589 main.go:141] libmachine: Using SSH client type: native
	I0916 10:35:28.931893 1384589 main.go:141] libmachine: &{{{<nil> 0 [] [] []} docker [0x41abe0] 0x41d420 <nil>  [] 0s} 127.0.0.1 34603 <nil> <nil>}
	I0916 10:35:28.931910 1384589 main.go:141] libmachine: About to run SSH command:
	hostname
	I0916 10:35:29.068030 1384589 main.go:141] libmachine: SSH cmd err, output: <nil>: addons-936355
	
	I0916 10:35:29.068064 1384589 ubuntu.go:169] provisioning hostname "addons-936355"
	I0916 10:35:29.068142 1384589 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" addons-936355
	I0916 10:35:29.085139 1384589 main.go:141] libmachine: Using SSH client type: native
	I0916 10:35:29.085383 1384589 main.go:141] libmachine: &{{{<nil> 0 [] [] []} docker [0x41abe0] 0x41d420 <nil>  [] 0s} 127.0.0.1 34603 <nil> <nil>}
	I0916 10:35:29.085399 1384589 main.go:141] libmachine: About to run SSH command:
	sudo hostname addons-936355 && echo "addons-936355" | sudo tee /etc/hostname
	I0916 10:35:29.232508 1384589 main.go:141] libmachine: SSH cmd err, output: <nil>: addons-936355
	
	I0916 10:35:29.232589 1384589 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" addons-936355
	I0916 10:35:29.248944 1384589 main.go:141] libmachine: Using SSH client type: native
	I0916 10:35:29.249190 1384589 main.go:141] libmachine: &{{{<nil> 0 [] [] []} docker [0x41abe0] 0x41d420 <nil>  [] 0s} 127.0.0.1 34603 <nil> <nil>}
	I0916 10:35:29.249214 1384589 main.go:141] libmachine: About to run SSH command:
	
			if ! grep -xq '.*\saddons-936355' /etc/hosts; then
				if grep -xq '127.0.1.1\s.*' /etc/hosts; then
					sudo sed -i 's/^127.0.1.1\s.*/127.0.1.1 addons-936355/g' /etc/hosts;
				else 
					echo '127.0.1.1 addons-936355' | sudo tee -a /etc/hosts; 
				fi
			fi
	I0916 10:35:29.385206 1384589 main.go:141] libmachine: SSH cmd err, output: <nil>: 
	I0916 10:35:29.385233 1384589 ubuntu.go:175] set auth options {CertDir:/home/jenkins/minikube-integration/19651-1378450/.minikube CaCertPath:/home/jenkins/minikube-integration/19651-1378450/.minikube/certs/ca.pem CaPrivateKeyPath:/home/jenkins/minikube-integration/19651-1378450/.minikube/certs/ca-key.pem CaCertRemotePath:/etc/docker/ca.pem ServerCertPath:/home/jenkins/minikube-integration/19651-1378450/.minikube/machines/server.pem ServerKeyPath:/home/jenkins/minikube-integration/19651-1378450/.minikube/machines/server-key.pem ClientKeyPath:/home/jenkins/minikube-integration/19651-1378450/.minikube/certs/key.pem ServerCertRemotePath:/etc/docker/server.pem ServerKeyRemotePath:/etc/docker/server-key.pem ClientCertPath:/home/jenkins/minikube-integration/19651-1378450/.minikube/certs/cert.pem ServerCertSANs:[] StorePath:/home/jenkins/minikube-integration/19651-1378450/.minikube}
	I0916 10:35:29.385263 1384589 ubuntu.go:177] setting up certificates
	I0916 10:35:29.385275 1384589 provision.go:84] configureAuth start
	I0916 10:35:29.385357 1384589 cli_runner.go:164] Run: docker container inspect -f "{{range .NetworkSettings.Networks}}{{.IPAddress}},{{.GlobalIPv6Address}}{{end}}" addons-936355
	I0916 10:35:29.401844 1384589 provision.go:143] copyHostCerts
	I0916 10:35:29.401930 1384589 exec_runner.go:151] cp: /home/jenkins/minikube-integration/19651-1378450/.minikube/certs/ca.pem --> /home/jenkins/minikube-integration/19651-1378450/.minikube/ca.pem (1078 bytes)
	I0916 10:35:29.402060 1384589 exec_runner.go:151] cp: /home/jenkins/minikube-integration/19651-1378450/.minikube/certs/cert.pem --> /home/jenkins/minikube-integration/19651-1378450/.minikube/cert.pem (1123 bytes)
	I0916 10:35:29.402129 1384589 exec_runner.go:151] cp: /home/jenkins/minikube-integration/19651-1378450/.minikube/certs/key.pem --> /home/jenkins/minikube-integration/19651-1378450/.minikube/key.pem (1679 bytes)
	I0916 10:35:29.402184 1384589 provision.go:117] generating server cert: /home/jenkins/minikube-integration/19651-1378450/.minikube/machines/server.pem ca-key=/home/jenkins/minikube-integration/19651-1378450/.minikube/certs/ca.pem private-key=/home/jenkins/minikube-integration/19651-1378450/.minikube/certs/ca-key.pem org=jenkins.addons-936355 san=[127.0.0.1 192.168.49.2 addons-936355 localhost minikube]
	I0916 10:35:29.844064 1384589 provision.go:177] copyRemoteCerts
	I0916 10:35:29.844139 1384589 ssh_runner.go:195] Run: sudo mkdir -p /etc/docker /etc/docker /etc/docker
	I0916 10:35:29.844181 1384589 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" addons-936355
	I0916 10:35:29.860341 1384589 sshutil.go:53] new ssh client: &{IP:127.0.0.1 Port:34603 SSHKeyPath:/home/jenkins/minikube-integration/19651-1378450/.minikube/machines/addons-936355/id_rsa Username:docker}
	I0916 10:35:29.957424 1384589 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-1378450/.minikube/certs/ca.pem --> /etc/docker/ca.pem (1078 bytes)
	I0916 10:35:29.982494 1384589 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-1378450/.minikube/machines/server.pem --> /etc/docker/server.pem (1208 bytes)
	I0916 10:35:30.020527 1384589 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-1378450/.minikube/machines/server-key.pem --> /etc/docker/server-key.pem (1675 bytes)
	I0916 10:35:30.083993 1384589 provision.go:87] duration metric: took 698.682489ms to configureAuth
	I0916 10:35:30.084118 1384589 ubuntu.go:193] setting minikube options for container-runtime
	I0916 10:35:30.084480 1384589 config.go:182] Loaded profile config "addons-936355": Driver=docker, ContainerRuntime=crio, KubernetesVersion=v1.31.1
	I0916 10:35:30.084746 1384589 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" addons-936355
	I0916 10:35:30.108015 1384589 main.go:141] libmachine: Using SSH client type: native
	I0916 10:35:30.108273 1384589 main.go:141] libmachine: &{{{<nil> 0 [] [] []} docker [0x41abe0] 0x41d420 <nil>  [] 0s} 127.0.0.1 34603 <nil> <nil>}
	I0916 10:35:30.108291 1384589 main.go:141] libmachine: About to run SSH command:
	sudo mkdir -p /etc/sysconfig && printf %s "
	CRIO_MINIKUBE_OPTIONS='--insecure-registry 10.96.0.0/12 '
	" | sudo tee /etc/sysconfig/crio.minikube && sudo systemctl restart crio
	I0916 10:35:30.350713 1384589 main.go:141] libmachine: SSH cmd err, output: <nil>: 
	CRIO_MINIKUBE_OPTIONS='--insecure-registry 10.96.0.0/12 '
	
	I0916 10:35:30.350736 1384589 machine.go:96] duration metric: took 1.437556677s to provisionDockerMachine
	I0916 10:35:30.350754 1384589 client.go:171] duration metric: took 11.080732872s to LocalClient.Create
	I0916 10:35:30.350775 1384589 start.go:167] duration metric: took 11.080807939s to libmachine.API.Create "addons-936355"
	I0916 10:35:30.350784 1384589 start.go:293] postStartSetup for "addons-936355" (driver="docker")
	I0916 10:35:30.350795 1384589 start.go:322] creating required directories: [/etc/kubernetes/addons /etc/kubernetes/manifests /var/tmp/minikube /var/lib/minikube /var/lib/minikube/certs /var/lib/minikube/images /var/lib/minikube/binaries /tmp/gvisor /usr/share/ca-certificates /etc/ssl/certs]
	I0916 10:35:30.350871 1384589 ssh_runner.go:195] Run: sudo mkdir -p /etc/kubernetes/addons /etc/kubernetes/manifests /var/tmp/minikube /var/lib/minikube /var/lib/minikube/certs /var/lib/minikube/images /var/lib/minikube/binaries /tmp/gvisor /usr/share/ca-certificates /etc/ssl/certs
	I0916 10:35:30.350928 1384589 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" addons-936355
	I0916 10:35:30.367694 1384589 sshutil.go:53] new ssh client: &{IP:127.0.0.1 Port:34603 SSHKeyPath:/home/jenkins/minikube-integration/19651-1378450/.minikube/machines/addons-936355/id_rsa Username:docker}
	I0916 10:35:30.471627 1384589 ssh_runner.go:195] Run: cat /etc/os-release
	I0916 10:35:30.475048 1384589 main.go:141] libmachine: Couldn't set key VERSION_CODENAME, no corresponding struct field found
	I0916 10:35:30.475083 1384589 main.go:141] libmachine: Couldn't set key PRIVACY_POLICY_URL, no corresponding struct field found
	I0916 10:35:30.475094 1384589 main.go:141] libmachine: Couldn't set key UBUNTU_CODENAME, no corresponding struct field found
	I0916 10:35:30.475101 1384589 info.go:137] Remote host: Ubuntu 22.04.4 LTS
	I0916 10:35:30.475111 1384589 filesync.go:126] Scanning /home/jenkins/minikube-integration/19651-1378450/.minikube/addons for local assets ...
	I0916 10:35:30.475191 1384589 filesync.go:126] Scanning /home/jenkins/minikube-integration/19651-1378450/.minikube/files for local assets ...
	I0916 10:35:30.475215 1384589 start.go:296] duration metric: took 124.425275ms for postStartSetup
	I0916 10:35:30.475537 1384589 cli_runner.go:164] Run: docker container inspect -f "{{range .NetworkSettings.Networks}}{{.IPAddress}},{{.GlobalIPv6Address}}{{end}}" addons-936355
	I0916 10:35:30.492884 1384589 profile.go:143] Saving config to /home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/addons-936355/config.json ...
	I0916 10:35:30.493230 1384589 ssh_runner.go:195] Run: sh -c "df -h /var | awk 'NR==2{print $5}'"
	I0916 10:35:30.493280 1384589 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" addons-936355
	I0916 10:35:30.510291 1384589 sshutil.go:53] new ssh client: &{IP:127.0.0.1 Port:34603 SSHKeyPath:/home/jenkins/minikube-integration/19651-1378450/.minikube/machines/addons-936355/id_rsa Username:docker}
	I0916 10:35:30.601939 1384589 ssh_runner.go:195] Run: sh -c "df -BG /var | awk 'NR==2{print $4}'"
	I0916 10:35:30.606785 1384589 start.go:128] duration metric: took 11.340152497s to createHost
	I0916 10:35:30.606809 1384589 start.go:83] releasing machines lock for "addons-936355", held for 11.340303023s
	I0916 10:35:30.606879 1384589 cli_runner.go:164] Run: docker container inspect -f "{{range .NetworkSettings.Networks}}{{.IPAddress}},{{.GlobalIPv6Address}}{{end}}" addons-936355
	I0916 10:35:30.623200 1384589 ssh_runner.go:195] Run: cat /version.json
	I0916 10:35:30.623223 1384589 ssh_runner.go:195] Run: curl -sS -m 2 https://registry.k8s.io/
	I0916 10:35:30.623263 1384589 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" addons-936355
	I0916 10:35:30.623284 1384589 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" addons-936355
	I0916 10:35:30.644076 1384589 sshutil.go:53] new ssh client: &{IP:127.0.0.1 Port:34603 SSHKeyPath:/home/jenkins/minikube-integration/19651-1378450/.minikube/machines/addons-936355/id_rsa Username:docker}
	I0916 10:35:30.644213 1384589 sshutil.go:53] new ssh client: &{IP:127.0.0.1 Port:34603 SSHKeyPath:/home/jenkins/minikube-integration/19651-1378450/.minikube/machines/addons-936355/id_rsa Username:docker}
	I0916 10:35:30.736416 1384589 ssh_runner.go:195] Run: systemctl --version
	I0916 10:35:30.866086 1384589 ssh_runner.go:195] Run: sudo sh -c "podman version >/dev/null"
	I0916 10:35:31.012168 1384589 ssh_runner.go:195] Run: sh -c "stat /etc/cni/net.d/*loopback.conf*"
	I0916 10:35:31.016985 1384589 ssh_runner.go:195] Run: sudo find /etc/cni/net.d -maxdepth 1 -type f -name *loopback.conf* -not -name *.mk_disabled -exec sh -c "sudo mv {} {}.mk_disabled" ;
	I0916 10:35:31.040299 1384589 cni.go:221] loopback cni configuration disabled: "/etc/cni/net.d/*loopback.conf*" found
	I0916 10:35:31.040383 1384589 ssh_runner.go:195] Run: sudo find /etc/cni/net.d -maxdepth 1 -type f ( ( -name *bridge* -or -name *podman* ) -and -not -name *.mk_disabled ) -printf "%p, " -exec sh -c "sudo mv {} {}.mk_disabled" ;
	I0916 10:35:31.079331 1384589 cni.go:262] disabled [/etc/cni/net.d/87-podman-bridge.conflist, /etc/cni/net.d/100-crio-bridge.conf] bridge cni config(s)
	I0916 10:35:31.079357 1384589 start.go:495] detecting cgroup driver to use...
	I0916 10:35:31.079391 1384589 detect.go:187] detected "cgroupfs" cgroup driver on host os
	I0916 10:35:31.079448 1384589 ssh_runner.go:195] Run: sudo systemctl stop -f containerd
	I0916 10:35:31.097860 1384589 ssh_runner.go:195] Run: sudo systemctl is-active --quiet service containerd
	I0916 10:35:31.111311 1384589 docker.go:217] disabling cri-docker service (if available) ...
	I0916 10:35:31.111396 1384589 ssh_runner.go:195] Run: sudo systemctl stop -f cri-docker.socket
	I0916 10:35:31.126864 1384589 ssh_runner.go:195] Run: sudo systemctl stop -f cri-docker.service
	I0916 10:35:31.142983 1384589 ssh_runner.go:195] Run: sudo systemctl disable cri-docker.socket
	I0916 10:35:31.237602 1384589 ssh_runner.go:195] Run: sudo systemctl mask cri-docker.service
	I0916 10:35:31.329055 1384589 docker.go:233] disabling docker service ...
	I0916 10:35:31.329150 1384589 ssh_runner.go:195] Run: sudo systemctl stop -f docker.socket
	I0916 10:35:31.350134 1384589 ssh_runner.go:195] Run: sudo systemctl stop -f docker.service
	I0916 10:35:31.362931 1384589 ssh_runner.go:195] Run: sudo systemctl disable docker.socket
	I0916 10:35:31.458212 1384589 ssh_runner.go:195] Run: sudo systemctl mask docker.service
	I0916 10:35:31.563725 1384589 ssh_runner.go:195] Run: sudo systemctl is-active --quiet service docker
	I0916 10:35:31.575461 1384589 ssh_runner.go:195] Run: /bin/bash -c "sudo mkdir -p /etc && printf %s "runtime-endpoint: unix:///var/run/crio/crio.sock
	" | sudo tee /etc/crictl.yaml"
	I0916 10:35:31.592172 1384589 crio.go:59] configure cri-o to use "registry.k8s.io/pause:3.10" pause image...
	I0916 10:35:31.592265 1384589 ssh_runner.go:195] Run: sh -c "sudo sed -i 's|^.*pause_image = .*$|pause_image = "registry.k8s.io/pause:3.10"|' /etc/crio/crio.conf.d/02-crio.conf"
	I0916 10:35:31.602336 1384589 crio.go:70] configuring cri-o to use "cgroupfs" as cgroup driver...
	I0916 10:35:31.602418 1384589 ssh_runner.go:195] Run: sh -c "sudo sed -i 's|^.*cgroup_manager = .*$|cgroup_manager = "cgroupfs"|' /etc/crio/crio.conf.d/02-crio.conf"
	I0916 10:35:31.612396 1384589 ssh_runner.go:195] Run: sh -c "sudo sed -i '/conmon_cgroup = .*/d' /etc/crio/crio.conf.d/02-crio.conf"
	I0916 10:35:31.622391 1384589 ssh_runner.go:195] Run: sh -c "sudo sed -i '/cgroup_manager = .*/a conmon_cgroup = "pod"' /etc/crio/crio.conf.d/02-crio.conf"
	I0916 10:35:31.632203 1384589 ssh_runner.go:195] Run: sh -c "sudo rm -rf /etc/cni/net.mk"
	I0916 10:35:31.642063 1384589 ssh_runner.go:195] Run: sh -c "sudo sed -i '/^ *"net.ipv4.ip_unprivileged_port_start=.*"/d' /etc/crio/crio.conf.d/02-crio.conf"
	I0916 10:35:31.651889 1384589 ssh_runner.go:195] Run: sh -c "sudo grep -q "^ *default_sysctls" /etc/crio/crio.conf.d/02-crio.conf || sudo sed -i '/conmon_cgroup = .*/a default_sysctls = \[\n\]' /etc/crio/crio.conf.d/02-crio.conf"
	I0916 10:35:31.669408 1384589 ssh_runner.go:195] Run: sh -c "sudo sed -i -r 's|^default_sysctls *= *\[|&\n  "net.ipv4.ip_unprivileged_port_start=0",|' /etc/crio/crio.conf.d/02-crio.conf"
	I0916 10:35:31.683307 1384589 ssh_runner.go:195] Run: sudo sysctl net.bridge.bridge-nf-call-iptables
	I0916 10:35:31.692220 1384589 ssh_runner.go:195] Run: sudo sh -c "echo 1 > /proc/sys/net/ipv4/ip_forward"
	I0916 10:35:31.702005 1384589 ssh_runner.go:195] Run: sudo systemctl daemon-reload
	I0916 10:35:31.781982 1384589 ssh_runner.go:195] Run: sudo systemctl restart crio
	I0916 10:35:31.897438 1384589 start.go:542] Will wait 60s for socket path /var/run/crio/crio.sock
	I0916 10:35:31.897567 1384589 ssh_runner.go:195] Run: stat /var/run/crio/crio.sock
	I0916 10:35:31.901379 1384589 start.go:563] Will wait 60s for crictl version
	I0916 10:35:31.901491 1384589 ssh_runner.go:195] Run: which crictl
	I0916 10:35:31.904735 1384589 ssh_runner.go:195] Run: sudo /usr/bin/crictl version
	I0916 10:35:31.941675 1384589 start.go:579] Version:  0.1.0
	RuntimeName:  cri-o
	RuntimeVersion:  1.24.6
	RuntimeApiVersion:  v1
	I0916 10:35:31.941854 1384589 ssh_runner.go:195] Run: crio --version
	I0916 10:35:31.981298 1384589 ssh_runner.go:195] Run: crio --version
	I0916 10:35:32.027709 1384589 out.go:177] * Preparing Kubernetes v1.31.1 on CRI-O 1.24.6 ...
	I0916 10:35:32.030371 1384589 cli_runner.go:164] Run: docker network inspect addons-936355 --format "{"Name": "{{.Name}}","Driver": "{{.Driver}}","Subnet": "{{range .IPAM.Config}}{{.Subnet}}{{end}}","Gateway": "{{range .IPAM.Config}}{{.Gateway}}{{end}}","MTU": {{if (index .Options "com.docker.network.driver.mtu")}}{{(index .Options "com.docker.network.driver.mtu")}}{{else}}0{{end}}, "ContainerIPs": [{{range $k,$v := .Containers }}"{{$v.IPv4Address}}",{{end}}]}"
	I0916 10:35:32.045684 1384589 ssh_runner.go:195] Run: grep 192.168.49.1	host.minikube.internal$ /etc/hosts
	I0916 10:35:32.049353 1384589 ssh_runner.go:195] Run: /bin/bash -c "{ grep -v $'\thost.minikube.internal$' "/etc/hosts"; echo "192.168.49.1	host.minikube.internal"; } > /tmp/h.$$; sudo cp /tmp/h.$$ "/etc/hosts""
	I0916 10:35:32.060434 1384589 kubeadm.go:883] updating cluster {Name:addons-936355 KeepContext:false EmbedCerts:false MinikubeISO: KicBaseImage:gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 Memory:4000 CPUs:2 DiskSize:20000 Driver:docker HyperkitVpnKitSock: HyperkitVSockPorts:[] DockerEnv:[] ContainerVolumeMounts:[] InsecureRegistry:[] RegistryMirror:[] HostOnlyCIDR:192.168.59.1/24 HypervVirtualSwitch: HypervUseExternalSwitch:false HypervExternalAdapter: KVMNetwork:default KVMQemuURI:qemu:///system KVMGPU:false KVMHidden:false KVMNUMACount:1 APIServerPort:8443 DockerOpt:[] DisableDriverMounts:false NFSShare:[] NFSSharesRoot:/nfsshares UUID: NoVTXCheck:false DNSProxy:false HostDNSResolver:true HostOnlyNicType:virtio NatNicType:virtio SSHIPAddress: SSHUser:root SSHKey: SSHPort:22 KubernetesConfig:{KubernetesVersion:v1.31.1 ClusterName:addons-936355 Namespace:default APIServerHAVIP: APIServerName:minikubeCA APIServerNa
mes:[] APIServerIPs:[] DNSDomain:cluster.local ContainerRuntime:crio CRISocket: NetworkPlugin:cni FeatureGates: ServiceCIDR:10.96.0.0/12 ImageRepository: LoadBalancerStartIP: LoadBalancerEndIP: CustomIngressCert: RegistryAliases: ExtraOptions:[] ShouldLoadCachedImages:true EnableDefaultCNI:false CNI:} Nodes:[{Name: IP:192.168.49.2 Port:8443 KubernetesVersion:v1.31.1 ContainerRuntime:crio ControlPlane:true Worker:true}] Addons:map[] CustomAddonImages:map[] CustomAddonRegistries:map[] VerifyComponents:map[apiserver:true apps_running:true default_sa:true extra:true kubelet:true node_ready:true system_pods:true] StartHostTimeout:6m0s ScheduledStop:<nil> ExposedPorts:[] ListenAddress: Network: Subnet: MultiNodeRequested:false ExtraDisks:0 CertExpiration:26280h0m0s Mount:false MountString:/home/jenkins:/minikube-host Mount9PVersion:9p2000.L MountGID:docker MountIP: MountMSize:262144 MountOptions:[] MountPort:0 MountType:9p MountUID:docker BinaryMirror: DisableOptimizations:false DisableMetrics:false CustomQemuFirmw
arePath: SocketVMnetClientPath: SocketVMnetPath: StaticIP: SSHAuthSock: SSHAgentPID:0 GPUs: AutoPauseInterval:1m0s} ...
	I0916 10:35:32.060562 1384589 preload.go:131] Checking if preload exists for k8s version v1.31.1 and runtime crio
	I0916 10:35:32.060622 1384589 ssh_runner.go:195] Run: sudo crictl images --output json
	I0916 10:35:32.132274 1384589 crio.go:514] all images are preloaded for cri-o runtime.
	I0916 10:35:32.132300 1384589 crio.go:433] Images already preloaded, skipping extraction
	I0916 10:35:32.132361 1384589 ssh_runner.go:195] Run: sudo crictl images --output json
	I0916 10:35:32.168136 1384589 crio.go:514] all images are preloaded for cri-o runtime.
	I0916 10:35:32.168159 1384589 cache_images.go:84] Images are preloaded, skipping loading
	I0916 10:35:32.168167 1384589 kubeadm.go:934] updating node { 192.168.49.2 8443 v1.31.1 crio true true} ...
	I0916 10:35:32.168274 1384589 kubeadm.go:946] kubelet [Unit]
	Wants=crio.service
	
	[Service]
	ExecStart=
	ExecStart=/var/lib/minikube/binaries/v1.31.1/kubelet --bootstrap-kubeconfig=/etc/kubernetes/bootstrap-kubelet.conf --cgroups-per-qos=false --config=/var/lib/kubelet/config.yaml --enforce-node-allocatable= --hostname-override=addons-936355 --kubeconfig=/etc/kubernetes/kubelet.conf --node-ip=192.168.49.2
	
	[Install]
	 config:
	{KubernetesVersion:v1.31.1 ClusterName:addons-936355 Namespace:default APIServerHAVIP: APIServerName:minikubeCA APIServerNames:[] APIServerIPs:[] DNSDomain:cluster.local ContainerRuntime:crio CRISocket: NetworkPlugin:cni FeatureGates: ServiceCIDR:10.96.0.0/12 ImageRepository: LoadBalancerStartIP: LoadBalancerEndIP: CustomIngressCert: RegistryAliases: ExtraOptions:[] ShouldLoadCachedImages:true EnableDefaultCNI:false CNI:}
	I0916 10:35:32.168366 1384589 ssh_runner.go:195] Run: crio config
	I0916 10:35:32.227191 1384589 cni.go:84] Creating CNI manager for ""
	I0916 10:35:32.227213 1384589 cni.go:143] "docker" driver + "crio" runtime found, recommending kindnet
	I0916 10:35:32.227223 1384589 kubeadm.go:84] Using pod CIDR: 10.244.0.0/16
	I0916 10:35:32.227267 1384589 kubeadm.go:181] kubeadm options: {CertDir:/var/lib/minikube/certs ServiceCIDR:10.96.0.0/12 PodSubnet:10.244.0.0/16 AdvertiseAddress:192.168.49.2 APIServerPort:8443 KubernetesVersion:v1.31.1 EtcdDataDir:/var/lib/minikube/etcd EtcdExtraArgs:map[] ClusterName:addons-936355 NodeName:addons-936355 DNSDomain:cluster.local CRISocket:/var/run/crio/crio.sock ImageRepository: ComponentOptions:[{Component:apiServer ExtraArgs:map[enable-admission-plugins:NamespaceLifecycle,LimitRanger,ServiceAccount,DefaultStorageClass,DefaultTolerationSeconds,NodeRestriction,MutatingAdmissionWebhook,ValidatingAdmissionWebhook,ResourceQuota] Pairs:map[certSANs:["127.0.0.1", "localhost", "192.168.49.2"]]} {Component:controllerManager ExtraArgs:map[allocate-node-cidrs:true leader-elect:false] Pairs:map[]} {Component:scheduler ExtraArgs:map[leader-elect:false] Pairs:map[]}] FeatureArgs:map[] NodeIP:192.168.49.2 CgroupDriver:cgroupfs ClientCAFile:/var/lib/minikube/certs/ca.crt StaticPodPath:/etc/kuberne
tes/manifests ControlPlaneAddress:control-plane.minikube.internal KubeProxyOptions:map[] ResolvConfSearchRegression:false KubeletConfigOpts:map[containerRuntimeEndpoint:unix:///var/run/crio/crio.sock hairpinMode:hairpin-veth runtimeRequestTimeout:15m] PrependCriSocketUnix:true}
	I0916 10:35:32.227445 1384589 kubeadm.go:187] kubeadm config:
	apiVersion: kubeadm.k8s.io/v1beta3
	kind: InitConfiguration
	localAPIEndpoint:
	  advertiseAddress: 192.168.49.2
	  bindPort: 8443
	bootstrapTokens:
	  - groups:
	      - system:bootstrappers:kubeadm:default-node-token
	    ttl: 24h0m0s
	    usages:
	      - signing
	      - authentication
	nodeRegistration:
	  criSocket: unix:///var/run/crio/crio.sock
	  name: "addons-936355"
	  kubeletExtraArgs:
	    node-ip: 192.168.49.2
	  taints: []
	---
	apiVersion: kubeadm.k8s.io/v1beta3
	kind: ClusterConfiguration
	apiServer:
	  certSANs: ["127.0.0.1", "localhost", "192.168.49.2"]
	  extraArgs:
	    enable-admission-plugins: "NamespaceLifecycle,LimitRanger,ServiceAccount,DefaultStorageClass,DefaultTolerationSeconds,NodeRestriction,MutatingAdmissionWebhook,ValidatingAdmissionWebhook,ResourceQuota"
	controllerManager:
	  extraArgs:
	    allocate-node-cidrs: "true"
	    leader-elect: "false"
	scheduler:
	  extraArgs:
	    leader-elect: "false"
	certificatesDir: /var/lib/minikube/certs
	clusterName: mk
	controlPlaneEndpoint: control-plane.minikube.internal:8443
	etcd:
	  local:
	    dataDir: /var/lib/minikube/etcd
	    extraArgs:
	      proxy-refresh-interval: "70000"
	kubernetesVersion: v1.31.1
	networking:
	  dnsDomain: cluster.local
	  podSubnet: "10.244.0.0/16"
	  serviceSubnet: 10.96.0.0/12
	---
	apiVersion: kubelet.config.k8s.io/v1beta1
	kind: KubeletConfiguration
	authentication:
	  x509:
	    clientCAFile: /var/lib/minikube/certs/ca.crt
	cgroupDriver: cgroupfs
	containerRuntimeEndpoint: unix:///var/run/crio/crio.sock
	hairpinMode: hairpin-veth
	runtimeRequestTimeout: 15m
	clusterDomain: "cluster.local"
	# disable disk resource management by default
	imageGCHighThresholdPercent: 100
	evictionHard:
	  nodefs.available: "0%"
	  nodefs.inodesFree: "0%"
	  imagefs.available: "0%"
	failSwapOn: false
	staticPodPath: /etc/kubernetes/manifests
	---
	apiVersion: kubeproxy.config.k8s.io/v1alpha1
	kind: KubeProxyConfiguration
	clusterCIDR: "10.244.0.0/16"
	metricsBindAddress: 0.0.0.0:10249
	conntrack:
	  maxPerCore: 0
	# Skip setting "net.netfilter.nf_conntrack_tcp_timeout_established"
	  tcpEstablishedTimeout: 0s
	# Skip setting "net.netfilter.nf_conntrack_tcp_timeout_close"
	  tcpCloseWaitTimeout: 0s
	
	I0916 10:35:32.227523 1384589 ssh_runner.go:195] Run: sudo ls /var/lib/minikube/binaries/v1.31.1
	I0916 10:35:32.236628 1384589 binaries.go:44] Found k8s binaries, skipping transfer
	I0916 10:35:32.236739 1384589 ssh_runner.go:195] Run: sudo mkdir -p /etc/systemd/system/kubelet.service.d /lib/systemd/system /var/tmp/minikube
	I0916 10:35:32.245582 1384589 ssh_runner.go:362] scp memory --> /etc/systemd/system/kubelet.service.d/10-kubeadm.conf (363 bytes)
	I0916 10:35:32.264058 1384589 ssh_runner.go:362] scp memory --> /lib/systemd/system/kubelet.service (352 bytes)
	I0916 10:35:32.283541 1384589 ssh_runner.go:362] scp memory --> /var/tmp/minikube/kubeadm.yaml.new (2151 bytes)
	I0916 10:35:32.302607 1384589 ssh_runner.go:195] Run: grep 192.168.49.2	control-plane.minikube.internal$ /etc/hosts
	I0916 10:35:32.306351 1384589 ssh_runner.go:195] Run: /bin/bash -c "{ grep -v $'\tcontrol-plane.minikube.internal$' "/etc/hosts"; echo "192.168.49.2	control-plane.minikube.internal"; } > /tmp/h.$$; sudo cp /tmp/h.$$ "/etc/hosts""
	I0916 10:35:32.317408 1384589 ssh_runner.go:195] Run: sudo systemctl daemon-reload
	I0916 10:35:32.409376 1384589 ssh_runner.go:195] Run: sudo systemctl start kubelet
	I0916 10:35:32.423337 1384589 certs.go:68] Setting up /home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/addons-936355 for IP: 192.168.49.2
	I0916 10:35:32.423401 1384589 certs.go:194] generating shared ca certs ...
	I0916 10:35:32.423434 1384589 certs.go:226] acquiring lock for ca certs: {Name:mk0ae46b50e2e49d53ad6fcc94535aa50d9156d6 Clock:{} Delay:500ms Timeout:1m0s Cancel:<nil>}
	I0916 10:35:32.423586 1384589 certs.go:240] generating "minikubeCA" ca cert: /home/jenkins/minikube-integration/19651-1378450/.minikube/ca.key
	I0916 10:35:34.185450 1384589 crypto.go:156] Writing cert to /home/jenkins/minikube-integration/19651-1378450/.minikube/ca.crt ...
	I0916 10:35:34.185484 1384589 lock.go:35] WriteFile acquiring /home/jenkins/minikube-integration/19651-1378450/.minikube/ca.crt: {Name:mk7933e16cdd72038659b0287d05eb0c475b810e Clock:{} Delay:500ms Timeout:1m0s Cancel:<nil>}
	I0916 10:35:34.185680 1384589 crypto.go:164] Writing key to /home/jenkins/minikube-integration/19651-1378450/.minikube/ca.key ...
	I0916 10:35:34.185693 1384589 lock.go:35] WriteFile acquiring /home/jenkins/minikube-integration/19651-1378450/.minikube/ca.key: {Name:mkb7482a30b71122d1b4fb2bf43b1e757c702edc Clock:{} Delay:500ms Timeout:1m0s Cancel:<nil>}
	I0916 10:35:34.186220 1384589 certs.go:240] generating "proxyClientCA" ca cert: /home/jenkins/minikube-integration/19651-1378450/.minikube/proxy-client-ca.key
	I0916 10:35:34.459909 1384589 crypto.go:156] Writing cert to /home/jenkins/minikube-integration/19651-1378450/.minikube/proxy-client-ca.crt ...
	I0916 10:35:34.459947 1384589 lock.go:35] WriteFile acquiring /home/jenkins/minikube-integration/19651-1378450/.minikube/proxy-client-ca.crt: {Name:mke012c32e9f14a06899ff2aaaf49a35a27f11b6 Clock:{} Delay:500ms Timeout:1m0s Cancel:<nil>}
	I0916 10:35:34.460629 1384589 crypto.go:164] Writing key to /home/jenkins/minikube-integration/19651-1378450/.minikube/proxy-client-ca.key ...
	I0916 10:35:34.460645 1384589 lock.go:35] WriteFile acquiring /home/jenkins/minikube-integration/19651-1378450/.minikube/proxy-client-ca.key: {Name:mk5d1994088ad6012c806fe8f78deff99aef1b4a Clock:{} Delay:500ms Timeout:1m0s Cancel:<nil>}
	I0916 10:35:34.460749 1384589 certs.go:256] generating profile certs ...
	I0916 10:35:34.460814 1384589 certs.go:363] generating signed profile cert for "minikube-user": /home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/addons-936355/client.key
	I0916 10:35:34.460832 1384589 crypto.go:68] Generating cert /home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/addons-936355/client.crt with IP's: []
	I0916 10:35:34.818752 1384589 crypto.go:156] Writing cert to /home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/addons-936355/client.crt ...
	I0916 10:35:34.818789 1384589 lock.go:35] WriteFile acquiring /home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/addons-936355/client.crt: {Name:mk0c01900c6bb90e11943bb255479c9c46b42cdc Clock:{} Delay:500ms Timeout:1m0s Cancel:<nil>}
	I0916 10:35:34.819458 1384589 crypto.go:164] Writing key to /home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/addons-936355/client.key ...
	I0916 10:35:34.819477 1384589 lock.go:35] WriteFile acquiring /home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/addons-936355/client.key: {Name:mk6a80bf44231e37c26b15b78c1573c745bc94c7 Clock:{} Delay:500ms Timeout:1m0s Cancel:<nil>}
	I0916 10:35:34.820007 1384589 certs.go:363] generating signed profile cert for "minikube": /home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/addons-936355/apiserver.key.87ecb0c8
	I0916 10:35:34.820055 1384589 crypto.go:68] Generating cert /home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/addons-936355/apiserver.crt.87ecb0c8 with IP's: [10.96.0.1 127.0.0.1 10.0.0.1 192.168.49.2]
	I0916 10:35:35.136595 1384589 crypto.go:156] Writing cert to /home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/addons-936355/apiserver.crt.87ecb0c8 ...
	I0916 10:35:35.136634 1384589 lock.go:35] WriteFile acquiring /home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/addons-936355/apiserver.crt.87ecb0c8: {Name:mkefb9e5abb2f41ae336f1dfb5f1a2e66afaeb9d Clock:{} Delay:500ms Timeout:1m0s Cancel:<nil>}
	I0916 10:35:35.136842 1384589 crypto.go:164] Writing key to /home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/addons-936355/apiserver.key.87ecb0c8 ...
	I0916 10:35:35.136857 1384589 lock.go:35] WriteFile acquiring /home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/addons-936355/apiserver.key.87ecb0c8: {Name:mkea4ee147dec7cfd16ab920313dbb27db2e74f5 Clock:{} Delay:500ms Timeout:1m0s Cancel:<nil>}
	I0916 10:35:35.137417 1384589 certs.go:381] copying /home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/addons-936355/apiserver.crt.87ecb0c8 -> /home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/addons-936355/apiserver.crt
	I0916 10:35:35.137519 1384589 certs.go:385] copying /home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/addons-936355/apiserver.key.87ecb0c8 -> /home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/addons-936355/apiserver.key
	I0916 10:35:35.137576 1384589 certs.go:363] generating signed profile cert for "aggregator": /home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/addons-936355/proxy-client.key
	I0916 10:35:35.137599 1384589 crypto.go:68] Generating cert /home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/addons-936355/proxy-client.crt with IP's: []
	I0916 10:35:35.880558 1384589 crypto.go:156] Writing cert to /home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/addons-936355/proxy-client.crt ...
	I0916 10:35:35.880594 1384589 lock.go:35] WriteFile acquiring /home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/addons-936355/proxy-client.crt: {Name:mke368773a6b2b93aed6ad850fe8fd0d4a737afa Clock:{} Delay:500ms Timeout:1m0s Cancel:<nil>}
	I0916 10:35:35.881334 1384589 crypto.go:164] Writing key to /home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/addons-936355/proxy-client.key ...
	I0916 10:35:35.881354 1384589 lock.go:35] WriteFile acquiring /home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/addons-936355/proxy-client.key: {Name:mk0b7d6a78a045adf50310a69acebceca87fff88 Clock:{} Delay:500ms Timeout:1m0s Cancel:<nil>}
	I0916 10:35:35.881575 1384589 certs.go:484] found cert: /home/jenkins/minikube-integration/19651-1378450/.minikube/certs/ca-key.pem (1679 bytes)
	I0916 10:35:35.881620 1384589 certs.go:484] found cert: /home/jenkins/minikube-integration/19651-1378450/.minikube/certs/ca.pem (1078 bytes)
	I0916 10:35:35.881652 1384589 certs.go:484] found cert: /home/jenkins/minikube-integration/19651-1378450/.minikube/certs/cert.pem (1123 bytes)
	I0916 10:35:35.881681 1384589 certs.go:484] found cert: /home/jenkins/minikube-integration/19651-1378450/.minikube/certs/key.pem (1679 bytes)
	I0916 10:35:35.882348 1384589 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-1378450/.minikube/ca.crt --> /var/lib/minikube/certs/ca.crt (1111 bytes)
	I0916 10:35:35.913124 1384589 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-1378450/.minikube/ca.key --> /var/lib/minikube/certs/ca.key (1675 bytes)
	I0916 10:35:35.940837 1384589 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-1378450/.minikube/proxy-client-ca.crt --> /var/lib/minikube/certs/proxy-client-ca.crt (1119 bytes)
	I0916 10:35:35.966731 1384589 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-1378450/.minikube/proxy-client-ca.key --> /var/lib/minikube/certs/proxy-client-ca.key (1679 bytes)
	I0916 10:35:35.992292 1384589 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/addons-936355/apiserver.crt --> /var/lib/minikube/certs/apiserver.crt (1419 bytes)
	I0916 10:35:36.018704 1384589 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/addons-936355/apiserver.key --> /var/lib/minikube/certs/apiserver.key (1675 bytes)
	I0916 10:35:36.045022 1384589 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/addons-936355/proxy-client.crt --> /var/lib/minikube/certs/proxy-client.crt (1147 bytes)
	I0916 10:35:36.070444 1384589 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/addons-936355/proxy-client.key --> /var/lib/minikube/certs/proxy-client.key (1679 bytes)
	I0916 10:35:36.097278 1384589 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-1378450/.minikube/ca.crt --> /usr/share/ca-certificates/minikubeCA.pem (1111 bytes)
	I0916 10:35:36.122467 1384589 ssh_runner.go:362] scp memory --> /var/lib/minikube/kubeconfig (738 bytes)
	I0916 10:35:36.141948 1384589 ssh_runner.go:195] Run: openssl version
	I0916 10:35:36.147681 1384589 ssh_runner.go:195] Run: sudo /bin/bash -c "test -s /usr/share/ca-certificates/minikubeCA.pem && ln -fs /usr/share/ca-certificates/minikubeCA.pem /etc/ssl/certs/minikubeCA.pem"
	I0916 10:35:36.157655 1384589 ssh_runner.go:195] Run: ls -la /usr/share/ca-certificates/minikubeCA.pem
	I0916 10:35:36.161783 1384589 certs.go:528] hashing: -rw-r--r-- 1 root root 1111 Sep 16 10:35 /usr/share/ca-certificates/minikubeCA.pem
	I0916 10:35:36.161849 1384589 ssh_runner.go:195] Run: openssl x509 -hash -noout -in /usr/share/ca-certificates/minikubeCA.pem
	I0916 10:35:36.169303 1384589 ssh_runner.go:195] Run: sudo /bin/bash -c "test -L /etc/ssl/certs/b5213941.0 || ln -fs /etc/ssl/certs/minikubeCA.pem /etc/ssl/certs/b5213941.0"
	I0916 10:35:36.183583 1384589 ssh_runner.go:195] Run: stat /var/lib/minikube/certs/apiserver-kubelet-client.crt
	I0916 10:35:36.188459 1384589 certs.go:399] 'apiserver-kubelet-client' cert doesn't exist, likely first start: stat /var/lib/minikube/certs/apiserver-kubelet-client.crt: Process exited with status 1
	stdout:
	
	stderr:
	stat: cannot statx '/var/lib/minikube/certs/apiserver-kubelet-client.crt': No such file or directory
	I0916 10:35:36.188535 1384589 kubeadm.go:392] StartCluster: {Name:addons-936355 KeepContext:false EmbedCerts:false MinikubeISO: KicBaseImage:gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 Memory:4000 CPUs:2 DiskSize:20000 Driver:docker HyperkitVpnKitSock: HyperkitVSockPorts:[] DockerEnv:[] ContainerVolumeMounts:[] InsecureRegistry:[] RegistryMirror:[] HostOnlyCIDR:192.168.59.1/24 HypervVirtualSwitch: HypervUseExternalSwitch:false HypervExternalAdapter: KVMNetwork:default KVMQemuURI:qemu:///system KVMGPU:false KVMHidden:false KVMNUMACount:1 APIServerPort:8443 DockerOpt:[] DisableDriverMounts:false NFSShare:[] NFSSharesRoot:/nfsshares UUID: NoVTXCheck:false DNSProxy:false HostDNSResolver:true HostOnlyNicType:virtio NatNicType:virtio SSHIPAddress: SSHUser:root SSHKey: SSHPort:22 KubernetesConfig:{KubernetesVersion:v1.31.1 ClusterName:addons-936355 Namespace:default APIServerHAVIP: APIServerName:minikubeCA APIServerNames
:[] APIServerIPs:[] DNSDomain:cluster.local ContainerRuntime:crio CRISocket: NetworkPlugin:cni FeatureGates: ServiceCIDR:10.96.0.0/12 ImageRepository: LoadBalancerStartIP: LoadBalancerEndIP: CustomIngressCert: RegistryAliases: ExtraOptions:[] ShouldLoadCachedImages:true EnableDefaultCNI:false CNI:} Nodes:[{Name: IP:192.168.49.2 Port:8443 KubernetesVersion:v1.31.1 ContainerRuntime:crio ControlPlane:true Worker:true}] Addons:map[] CustomAddonImages:map[] CustomAddonRegistries:map[] VerifyComponents:map[apiserver:true apps_running:true default_sa:true extra:true kubelet:true node_ready:true system_pods:true] StartHostTimeout:6m0s ScheduledStop:<nil> ExposedPorts:[] ListenAddress: Network: Subnet: MultiNodeRequested:false ExtraDisks:0 CertExpiration:26280h0m0s Mount:false MountString:/home/jenkins:/minikube-host Mount9PVersion:9p2000.L MountGID:docker MountIP: MountMSize:262144 MountOptions:[] MountPort:0 MountType:9p MountUID:docker BinaryMirror: DisableOptimizations:false DisableMetrics:false CustomQemuFirmware
Path: SocketVMnetClientPath: SocketVMnetPath: StaticIP: SSHAuthSock: SSHAgentPID:0 GPUs: AutoPauseInterval:1m0s}
	I0916 10:35:36.188663 1384589 cri.go:54] listing CRI containers in root : {State:paused Name: Namespaces:[kube-system]}
	I0916 10:35:36.188762 1384589 ssh_runner.go:195] Run: sudo -s eval "crictl ps -a --quiet --label io.kubernetes.pod.namespace=kube-system"
	I0916 10:35:36.238853 1384589 cri.go:89] found id: ""
	I0916 10:35:36.238944 1384589 ssh_runner.go:195] Run: sudo ls /var/lib/kubelet/kubeadm-flags.env /var/lib/kubelet/config.yaml /var/lib/minikube/etcd
	I0916 10:35:36.247955 1384589 ssh_runner.go:195] Run: sudo cp /var/tmp/minikube/kubeadm.yaml.new /var/tmp/minikube/kubeadm.yaml
	I0916 10:35:36.256986 1384589 kubeadm.go:214] ignoring SystemVerification for kubeadm because of docker driver
	I0916 10:35:36.257089 1384589 ssh_runner.go:195] Run: sudo ls -la /etc/kubernetes/admin.conf /etc/kubernetes/kubelet.conf /etc/kubernetes/controller-manager.conf /etc/kubernetes/scheduler.conf
	I0916 10:35:36.266246 1384589 kubeadm.go:155] config check failed, skipping stale config cleanup: sudo ls -la /etc/kubernetes/admin.conf /etc/kubernetes/kubelet.conf /etc/kubernetes/controller-manager.conf /etc/kubernetes/scheduler.conf: Process exited with status 2
	stdout:
	
	stderr:
	ls: cannot access '/etc/kubernetes/admin.conf': No such file or directory
	ls: cannot access '/etc/kubernetes/kubelet.conf': No such file or directory
	ls: cannot access '/etc/kubernetes/controller-manager.conf': No such file or directory
	ls: cannot access '/etc/kubernetes/scheduler.conf': No such file or directory
	I0916 10:35:36.266266 1384589 kubeadm.go:157] found existing configuration files:
	
	I0916 10:35:36.266339 1384589 ssh_runner.go:195] Run: sudo grep https://control-plane.minikube.internal:8443 /etc/kubernetes/admin.conf
	I0916 10:35:36.274963 1384589 kubeadm.go:163] "https://control-plane.minikube.internal:8443" may not be in /etc/kubernetes/admin.conf - will remove: sudo grep https://control-plane.minikube.internal:8443 /etc/kubernetes/admin.conf: Process exited with status 2
	stdout:
	
	stderr:
	grep: /etc/kubernetes/admin.conf: No such file or directory
	I0916 10:35:36.275044 1384589 ssh_runner.go:195] Run: sudo rm -f /etc/kubernetes/admin.conf
	I0916 10:35:36.283444 1384589 ssh_runner.go:195] Run: sudo grep https://control-plane.minikube.internal:8443 /etc/kubernetes/kubelet.conf
	I0916 10:35:36.292355 1384589 kubeadm.go:163] "https://control-plane.minikube.internal:8443" may not be in /etc/kubernetes/kubelet.conf - will remove: sudo grep https://control-plane.minikube.internal:8443 /etc/kubernetes/kubelet.conf: Process exited with status 2
	stdout:
	
	stderr:
	grep: /etc/kubernetes/kubelet.conf: No such file or directory
	I0916 10:35:36.292450 1384589 ssh_runner.go:195] Run: sudo rm -f /etc/kubernetes/kubelet.conf
	I0916 10:35:36.300873 1384589 ssh_runner.go:195] Run: sudo grep https://control-plane.minikube.internal:8443 /etc/kubernetes/controller-manager.conf
	I0916 10:35:36.309855 1384589 kubeadm.go:163] "https://control-plane.minikube.internal:8443" may not be in /etc/kubernetes/controller-manager.conf - will remove: sudo grep https://control-plane.minikube.internal:8443 /etc/kubernetes/controller-manager.conf: Process exited with status 2
	stdout:
	
	stderr:
	grep: /etc/kubernetes/controller-manager.conf: No such file or directory
	I0916 10:35:36.309929 1384589 ssh_runner.go:195] Run: sudo rm -f /etc/kubernetes/controller-manager.conf
	I0916 10:35:36.318718 1384589 ssh_runner.go:195] Run: sudo grep https://control-plane.minikube.internal:8443 /etc/kubernetes/scheduler.conf
	I0916 10:35:36.328008 1384589 kubeadm.go:163] "https://control-plane.minikube.internal:8443" may not be in /etc/kubernetes/scheduler.conf - will remove: sudo grep https://control-plane.minikube.internal:8443 /etc/kubernetes/scheduler.conf: Process exited with status 2
	stdout:
	
	stderr:
	grep: /etc/kubernetes/scheduler.conf: No such file or directory
	I0916 10:35:36.328097 1384589 ssh_runner.go:195] Run: sudo rm -f /etc/kubernetes/scheduler.conf
	I0916 10:35:36.336437 1384589 ssh_runner.go:286] Start: /bin/bash -c "sudo env PATH="/var/lib/minikube/binaries/v1.31.1:$PATH" kubeadm init --config /var/tmp/minikube/kubeadm.yaml  --ignore-preflight-errors=DirAvailable--etc-kubernetes-manifests,DirAvailable--var-lib-minikube,DirAvailable--var-lib-minikube-etcd,FileAvailable--etc-kubernetes-manifests-kube-scheduler.yaml,FileAvailable--etc-kubernetes-manifests-kube-apiserver.yaml,FileAvailable--etc-kubernetes-manifests-kube-controller-manager.yaml,FileAvailable--etc-kubernetes-manifests-etcd.yaml,Port-10250,Swap,NumCPU,Mem,SystemVerification,FileContent--proc-sys-net-bridge-bridge-nf-call-iptables"
	I0916 10:35:36.378930 1384589 kubeadm.go:310] [init] Using Kubernetes version: v1.31.1
	I0916 10:35:36.379124 1384589 kubeadm.go:310] [preflight] Running pre-flight checks
	I0916 10:35:36.400406 1384589 kubeadm.go:310] [preflight] The system verification failed. Printing the output from the verification:
	I0916 10:35:36.400480 1384589 kubeadm.go:310] KERNEL_VERSION: 5.15.0-1069-aws
	I0916 10:35:36.400522 1384589 kubeadm.go:310] OS: Linux
	I0916 10:35:36.400571 1384589 kubeadm.go:310] CGROUPS_CPU: enabled
	I0916 10:35:36.400622 1384589 kubeadm.go:310] CGROUPS_CPUACCT: enabled
	I0916 10:35:36.400687 1384589 kubeadm.go:310] CGROUPS_CPUSET: enabled
	I0916 10:35:36.400738 1384589 kubeadm.go:310] CGROUPS_DEVICES: enabled
	I0916 10:35:36.400790 1384589 kubeadm.go:310] CGROUPS_FREEZER: enabled
	I0916 10:35:36.400843 1384589 kubeadm.go:310] CGROUPS_MEMORY: enabled
	I0916 10:35:36.400891 1384589 kubeadm.go:310] CGROUPS_PIDS: enabled
	I0916 10:35:36.400941 1384589 kubeadm.go:310] CGROUPS_HUGETLB: enabled
	I0916 10:35:36.400990 1384589 kubeadm.go:310] CGROUPS_BLKIO: enabled
	I0916 10:35:36.460868 1384589 kubeadm.go:310] [preflight] Pulling images required for setting up a Kubernetes cluster
	I0916 10:35:36.460983 1384589 kubeadm.go:310] [preflight] This might take a minute or two, depending on the speed of your internet connection
	I0916 10:35:36.461077 1384589 kubeadm.go:310] [preflight] You can also perform this action beforehand using 'kubeadm config images pull'
	I0916 10:35:36.469524 1384589 kubeadm.go:310] [certs] Using certificateDir folder "/var/lib/minikube/certs"
	I0916 10:35:36.478231 1384589 out.go:235]   - Generating certificates and keys ...
	I0916 10:35:36.478421 1384589 kubeadm.go:310] [certs] Using existing ca certificate authority
	I0916 10:35:36.478536 1384589 kubeadm.go:310] [certs] Using existing apiserver certificate and key on disk
	I0916 10:35:37.031514 1384589 kubeadm.go:310] [certs] Generating "apiserver-kubelet-client" certificate and key
	I0916 10:35:37.927948 1384589 kubeadm.go:310] [certs] Generating "front-proxy-ca" certificate and key
	I0916 10:35:38.481156 1384589 kubeadm.go:310] [certs] Generating "front-proxy-client" certificate and key
	I0916 10:35:38.950500 1384589 kubeadm.go:310] [certs] Generating "etcd/ca" certificate and key
	I0916 10:35:40.037164 1384589 kubeadm.go:310] [certs] Generating "etcd/server" certificate and key
	I0916 10:35:40.037694 1384589 kubeadm.go:310] [certs] etcd/server serving cert is signed for DNS names [addons-936355 localhost] and IPs [192.168.49.2 127.0.0.1 ::1]
	I0916 10:35:40.393078 1384589 kubeadm.go:310] [certs] Generating "etcd/peer" certificate and key
	I0916 10:35:40.393223 1384589 kubeadm.go:310] [certs] etcd/peer serving cert is signed for DNS names [addons-936355 localhost] and IPs [192.168.49.2 127.0.0.1 ::1]
	I0916 10:35:40.639316 1384589 kubeadm.go:310] [certs] Generating "etcd/healthcheck-client" certificate and key
	I0916 10:35:41.086019 1384589 kubeadm.go:310] [certs] Generating "apiserver-etcd-client" certificate and key
	I0916 10:35:41.417060 1384589 kubeadm.go:310] [certs] Generating "sa" key and public key
	I0916 10:35:41.417146 1384589 kubeadm.go:310] [kubeconfig] Using kubeconfig folder "/etc/kubernetes"
	I0916 10:35:41.829000 1384589 kubeadm.go:310] [kubeconfig] Writing "admin.conf" kubeconfig file
	I0916 10:35:42.186509 1384589 kubeadm.go:310] [kubeconfig] Writing "super-admin.conf" kubeconfig file
	I0916 10:35:43.056769 1384589 kubeadm.go:310] [kubeconfig] Writing "kubelet.conf" kubeconfig file
	I0916 10:35:43.944133 1384589 kubeadm.go:310] [kubeconfig] Writing "controller-manager.conf" kubeconfig file
	I0916 10:35:44.069436 1384589 kubeadm.go:310] [kubeconfig] Writing "scheduler.conf" kubeconfig file
	I0916 10:35:44.070260 1384589 kubeadm.go:310] [etcd] Creating static Pod manifest for local etcd in "/etc/kubernetes/manifests"
	I0916 10:35:44.073516 1384589 kubeadm.go:310] [control-plane] Using manifest folder "/etc/kubernetes/manifests"
	I0916 10:35:44.076353 1384589 out.go:235]   - Booting up control plane ...
	I0916 10:35:44.076466 1384589 kubeadm.go:310] [control-plane] Creating static Pod manifest for "kube-apiserver"
	I0916 10:35:44.076546 1384589 kubeadm.go:310] [control-plane] Creating static Pod manifest for "kube-controller-manager"
	I0916 10:35:44.077309 1384589 kubeadm.go:310] [control-plane] Creating static Pod manifest for "kube-scheduler"
	I0916 10:35:44.088522 1384589 kubeadm.go:310] [kubelet-start] Writing kubelet environment file with flags to file "/var/lib/kubelet/kubeadm-flags.env"
	I0916 10:35:44.095329 1384589 kubeadm.go:310] [kubelet-start] Writing kubelet configuration to file "/var/lib/kubelet/config.yaml"
	I0916 10:35:44.095390 1384589 kubeadm.go:310] [kubelet-start] Starting the kubelet
	I0916 10:35:44.198308 1384589 kubeadm.go:310] [wait-control-plane] Waiting for the kubelet to boot up the control plane as static Pods from directory "/etc/kubernetes/manifests"
	I0916 10:35:44.198428 1384589 kubeadm.go:310] [kubelet-check] Waiting for a healthy kubelet at http://127.0.0.1:10248/healthz. This can take up to 4m0s
	I0916 10:35:45.200220 1384589 kubeadm.go:310] [kubelet-check] The kubelet is healthy after 1.00196488s
	I0916 10:35:45.200324 1384589 kubeadm.go:310] [api-check] Waiting for a healthy API server. This can take up to 4m0s
	I0916 10:35:51.202352 1384589 kubeadm.go:310] [api-check] The API server is healthy after 6.002166951s
	I0916 10:35:51.223941 1384589 kubeadm.go:310] [upload-config] Storing the configuration used in ConfigMap "kubeadm-config" in the "kube-system" Namespace
	I0916 10:35:51.239556 1384589 kubeadm.go:310] [kubelet] Creating a ConfigMap "kubelet-config" in namespace kube-system with the configuration for the kubelets in the cluster
	I0916 10:35:51.267029 1384589 kubeadm.go:310] [upload-certs] Skipping phase. Please see --upload-certs
	I0916 10:35:51.267231 1384589 kubeadm.go:310] [mark-control-plane] Marking the node addons-936355 as control-plane by adding the labels: [node-role.kubernetes.io/control-plane node.kubernetes.io/exclude-from-external-load-balancers]
	I0916 10:35:51.278589 1384589 kubeadm.go:310] [bootstrap-token] Using token: 08qv26.fux33djnogp684b3
	I0916 10:35:51.281486 1384589 out.go:235]   - Configuring RBAC rules ...
	I0916 10:35:51.281633 1384589 kubeadm.go:310] [bootstrap-token] Configuring bootstrap tokens, cluster-info ConfigMap, RBAC Roles
	I0916 10:35:51.288736 1384589 kubeadm.go:310] [bootstrap-token] Configured RBAC rules to allow Node Bootstrap tokens to get nodes
	I0916 10:35:51.298974 1384589 kubeadm.go:310] [bootstrap-token] Configured RBAC rules to allow Node Bootstrap tokens to post CSRs in order for nodes to get long term certificate credentials
	I0916 10:35:51.303116 1384589 kubeadm.go:310] [bootstrap-token] Configured RBAC rules to allow the csrapprover controller automatically approve CSRs from a Node Bootstrap Token
	I0916 10:35:51.306944 1384589 kubeadm.go:310] [bootstrap-token] Configured RBAC rules to allow certificate rotation for all node client certificates in the cluster
	I0916 10:35:51.312530 1384589 kubeadm.go:310] [bootstrap-token] Creating the "cluster-info" ConfigMap in the "kube-public" namespace
	I0916 10:35:51.609739 1384589 kubeadm.go:310] [kubelet-finalize] Updating "/etc/kubernetes/kubelet.conf" to point to a rotatable kubelet client certificate and key
	I0916 10:35:52.042589 1384589 kubeadm.go:310] [addons] Applied essential addon: CoreDNS
	I0916 10:35:52.609454 1384589 kubeadm.go:310] [addons] Applied essential addon: kube-proxy
	I0916 10:35:52.610559 1384589 kubeadm.go:310] 
	I0916 10:35:52.610639 1384589 kubeadm.go:310] Your Kubernetes control-plane has initialized successfully!
	I0916 10:35:52.610651 1384589 kubeadm.go:310] 
	I0916 10:35:52.610728 1384589 kubeadm.go:310] To start using your cluster, you need to run the following as a regular user:
	I0916 10:35:52.610737 1384589 kubeadm.go:310] 
	I0916 10:35:52.610762 1384589 kubeadm.go:310]   mkdir -p $HOME/.kube
	I0916 10:35:52.610825 1384589 kubeadm.go:310]   sudo cp -i /etc/kubernetes/admin.conf $HOME/.kube/config
	I0916 10:35:52.610877 1384589 kubeadm.go:310]   sudo chown $(id -u):$(id -g) $HOME/.kube/config
	I0916 10:35:52.610886 1384589 kubeadm.go:310] 
	I0916 10:35:52.610939 1384589 kubeadm.go:310] Alternatively, if you are the root user, you can run:
	I0916 10:35:52.610947 1384589 kubeadm.go:310] 
	I0916 10:35:52.610994 1384589 kubeadm.go:310]   export KUBECONFIG=/etc/kubernetes/admin.conf
	I0916 10:35:52.611003 1384589 kubeadm.go:310] 
	I0916 10:35:52.611054 1384589 kubeadm.go:310] You should now deploy a pod network to the cluster.
	I0916 10:35:52.611131 1384589 kubeadm.go:310] Run "kubectl apply -f [podnetwork].yaml" with one of the options listed at:
	I0916 10:35:52.611205 1384589 kubeadm.go:310]   https://kubernetes.io/docs/concepts/cluster-administration/addons/
	I0916 10:35:52.611213 1384589 kubeadm.go:310] 
	I0916 10:35:52.611296 1384589 kubeadm.go:310] You can now join any number of control-plane nodes by copying certificate authorities
	I0916 10:35:52.611376 1384589 kubeadm.go:310] and service account keys on each node and then running the following as root:
	I0916 10:35:52.611384 1384589 kubeadm.go:310] 
	I0916 10:35:52.611467 1384589 kubeadm.go:310]   kubeadm join control-plane.minikube.internal:8443 --token 08qv26.fux33djnogp684b3 \
	I0916 10:35:52.611571 1384589 kubeadm.go:310] 	--discovery-token-ca-cert-hash sha256:a39d4a6e06a2efc97f5d9564a89b81063790e757dde370e866d9dc4c2ed0ec07 \
	I0916 10:35:52.611602 1384589 kubeadm.go:310] 	--control-plane 
	I0916 10:35:52.611610 1384589 kubeadm.go:310] 
	I0916 10:35:52.611694 1384589 kubeadm.go:310] Then you can join any number of worker nodes by running the following on each as root:
	I0916 10:35:52.611701 1384589 kubeadm.go:310] 
	I0916 10:35:52.611782 1384589 kubeadm.go:310] kubeadm join control-plane.minikube.internal:8443 --token 08qv26.fux33djnogp684b3 \
	I0916 10:35:52.612037 1384589 kubeadm.go:310] 	--discovery-token-ca-cert-hash sha256:a39d4a6e06a2efc97f5d9564a89b81063790e757dde370e866d9dc4c2ed0ec07 
	I0916 10:35:52.615159 1384589 kubeadm.go:310] W0916 10:35:36.375856    1193 common.go:101] your configuration file uses a deprecated API spec: "kubeadm.k8s.io/v1beta3" (kind: "ClusterConfiguration"). Please use 'kubeadm config migrate --old-config old.yaml --new-config new.yaml', which will write the new, similar spec using a newer API version.
	I0916 10:35:52.615456 1384589 kubeadm.go:310] W0916 10:35:36.376640    1193 common.go:101] your configuration file uses a deprecated API spec: "kubeadm.k8s.io/v1beta3" (kind: "InitConfiguration"). Please use 'kubeadm config migrate --old-config old.yaml --new-config new.yaml', which will write the new, similar spec using a newer API version.
	I0916 10:35:52.615672 1384589 kubeadm.go:310] 	[WARNING SystemVerification]: failed to parse kernel config: unable to load kernel module: "configs", output: "modprobe: FATAL: Module configs not found in directory /lib/modules/5.15.0-1069-aws\n", err: exit status 1
	I0916 10:35:52.615783 1384589 kubeadm.go:310] 	[WARNING Service-Kubelet]: kubelet service is not enabled, please run 'systemctl enable kubelet.service'
	I0916 10:35:52.615802 1384589 cni.go:84] Creating CNI manager for ""
	I0916 10:35:52.615810 1384589 cni.go:143] "docker" driver + "crio" runtime found, recommending kindnet
	I0916 10:35:52.618721 1384589 out.go:177] * Configuring CNI (Container Networking Interface) ...
	I0916 10:35:52.621394 1384589 ssh_runner.go:195] Run: stat /opt/cni/bin/portmap
	I0916 10:35:52.625462 1384589 cni.go:182] applying CNI manifest using /var/lib/minikube/binaries/v1.31.1/kubectl ...
	I0916 10:35:52.625484 1384589 ssh_runner.go:362] scp memory --> /var/tmp/minikube/cni.yaml (2601 bytes)
	I0916 10:35:52.644461 1384589 ssh_runner.go:195] Run: sudo /var/lib/minikube/binaries/v1.31.1/kubectl apply --kubeconfig=/var/lib/minikube/kubeconfig -f /var/tmp/minikube/cni.yaml
	I0916 10:35:52.919005 1384589 ssh_runner.go:195] Run: /bin/bash -c "cat /proc/$(pgrep kube-apiserver)/oom_adj"
	I0916 10:35:52.919065 1384589 ssh_runner.go:195] Run: sudo /var/lib/minikube/binaries/v1.31.1/kubectl create clusterrolebinding minikube-rbac --clusterrole=cluster-admin --serviceaccount=kube-system:default --kubeconfig=/var/lib/minikube/kubeconfig
	I0916 10:35:52.919130 1384589 ssh_runner.go:195] Run: sudo /var/lib/minikube/binaries/v1.31.1/kubectl --kubeconfig=/var/lib/minikube/kubeconfig label --overwrite nodes addons-936355 minikube.k8s.io/updated_at=2024_09_16T10_35_52_0700 minikube.k8s.io/version=v1.34.0 minikube.k8s.io/commit=90d544f06ea0f69499271b003be64a9a224d57ed minikube.k8s.io/name=addons-936355 minikube.k8s.io/primary=true
	I0916 10:35:52.934021 1384589 ops.go:34] apiserver oom_adj: -16
	I0916 10:35:53.058693 1384589 ssh_runner.go:195] Run: sudo /var/lib/minikube/binaries/v1.31.1/kubectl get sa default --kubeconfig=/var/lib/minikube/kubeconfig
	I0916 10:35:53.559565 1384589 ssh_runner.go:195] Run: sudo /var/lib/minikube/binaries/v1.31.1/kubectl get sa default --kubeconfig=/var/lib/minikube/kubeconfig
	I0916 10:35:54.058855 1384589 ssh_runner.go:195] Run: sudo /var/lib/minikube/binaries/v1.31.1/kubectl get sa default --kubeconfig=/var/lib/minikube/kubeconfig
	I0916 10:35:54.558709 1384589 ssh_runner.go:195] Run: sudo /var/lib/minikube/binaries/v1.31.1/kubectl get sa default --kubeconfig=/var/lib/minikube/kubeconfig
	I0916 10:35:55.059014 1384589 ssh_runner.go:195] Run: sudo /var/lib/minikube/binaries/v1.31.1/kubectl get sa default --kubeconfig=/var/lib/minikube/kubeconfig
	I0916 10:35:55.559273 1384589 ssh_runner.go:195] Run: sudo /var/lib/minikube/binaries/v1.31.1/kubectl get sa default --kubeconfig=/var/lib/minikube/kubeconfig
	I0916 10:35:56.058909 1384589 ssh_runner.go:195] Run: sudo /var/lib/minikube/binaries/v1.31.1/kubectl get sa default --kubeconfig=/var/lib/minikube/kubeconfig
	I0916 10:35:56.559492 1384589 ssh_runner.go:195] Run: sudo /var/lib/minikube/binaries/v1.31.1/kubectl get sa default --kubeconfig=/var/lib/minikube/kubeconfig
	I0916 10:35:56.645989 1384589 kubeadm.go:1113] duration metric: took 3.7269816s to wait for elevateKubeSystemPrivileges
	I0916 10:35:56.646081 1384589 kubeadm.go:394] duration metric: took 20.457571781s to StartCluster
	I0916 10:35:56.646115 1384589 settings.go:142] acquiring lock: {Name:mkc0474d366ad36774e47290c7932cc180a1b9f8 Clock:{} Delay:500ms Timeout:1m0s Cancel:<nil>}
	I0916 10:35:56.646272 1384589 settings.go:150] Updating kubeconfig:  /home/jenkins/minikube-integration/19651-1378450/kubeconfig
	I0916 10:35:56.646729 1384589 lock.go:35] WriteFile acquiring /home/jenkins/minikube-integration/19651-1378450/kubeconfig: {Name:mk806df66aa01ad28d0c99bc1a876b4310e8a3a0 Clock:{} Delay:500ms Timeout:1m0s Cancel:<nil>}
	I0916 10:35:56.647006 1384589 start.go:235] Will wait 6m0s for node &{Name: IP:192.168.49.2 Port:8443 KubernetesVersion:v1.31.1 ContainerRuntime:crio ControlPlane:true Worker:true}
	I0916 10:35:56.647218 1384589 config.go:182] Loaded profile config "addons-936355": Driver=docker, ContainerRuntime=crio, KubernetesVersion=v1.31.1
	I0916 10:35:56.647256 1384589 addons.go:507] enable addons start: toEnable=map[ambassador:false auto-pause:false cloud-spanner:true csi-hostpath-driver:true dashboard:false default-storageclass:true efk:false freshpod:false gcp-auth:true gvisor:false headlamp:false helm-tiller:false inaccel:false ingress:true ingress-dns:true inspektor-gadget:true istio:false istio-provisioner:false kong:false kubeflow:false kubevirt:false logviewer:false metallb:false metrics-server:true nvidia-device-plugin:true nvidia-driver-installer:false nvidia-gpu-device-plugin:false olm:false pod-security-policy:false portainer:false registry:true registry-aliases:false registry-creds:false storage-provisioner:true storage-provisioner-gluster:false storage-provisioner-rancher:true volcano:true volumesnapshots:true yakd:true]
	I0916 10:35:56.647344 1384589 addons.go:69] Setting yakd=true in profile "addons-936355"
	I0916 10:35:56.647362 1384589 addons.go:234] Setting addon yakd=true in "addons-936355"
	I0916 10:35:56.647386 1384589 host.go:66] Checking if "addons-936355" exists ...
	I0916 10:35:56.647853 1384589 cli_runner.go:164] Run: docker container inspect addons-936355 --format={{.State.Status}}
	I0916 10:35:56.647019 1384589 ssh_runner.go:195] Run: /bin/bash -c "sudo /var/lib/minikube/binaries/v1.31.1/kubectl --kubeconfig=/var/lib/minikube/kubeconfig -n kube-system get configmap coredns -o yaml"
	I0916 10:35:56.648343 1384589 addons.go:69] Setting inspektor-gadget=true in profile "addons-936355"
	I0916 10:35:56.648358 1384589 addons.go:69] Setting metrics-server=true in profile "addons-936355"
	I0916 10:35:56.648364 1384589 addons.go:69] Setting cloud-spanner=true in profile "addons-936355"
	I0916 10:35:56.648372 1384589 addons.go:234] Setting addon cloud-spanner=true in "addons-936355"
	I0916 10:35:56.648375 1384589 addons.go:234] Setting addon metrics-server=true in "addons-936355"
	I0916 10:35:56.648397 1384589 host.go:66] Checking if "addons-936355" exists ...
	I0916 10:35:56.648398 1384589 host.go:66] Checking if "addons-936355" exists ...
	I0916 10:35:56.648856 1384589 cli_runner.go:164] Run: docker container inspect addons-936355 --format={{.State.Status}}
	I0916 10:35:56.648883 1384589 cli_runner.go:164] Run: docker container inspect addons-936355 --format={{.State.Status}}
	I0916 10:35:56.651521 1384589 addons.go:69] Setting nvidia-device-plugin=true in profile "addons-936355"
	I0916 10:35:56.651556 1384589 addons.go:234] Setting addon nvidia-device-plugin=true in "addons-936355"
	I0916 10:35:56.651597 1384589 host.go:66] Checking if "addons-936355" exists ...
	I0916 10:35:56.652064 1384589 cli_runner.go:164] Run: docker container inspect addons-936355 --format={{.State.Status}}
	I0916 10:35:56.654169 1384589 addons.go:69] Setting csi-hostpath-driver=true in profile "addons-936355"
	I0916 10:35:56.654360 1384589 addons.go:234] Setting addon csi-hostpath-driver=true in "addons-936355"
	I0916 10:35:56.654505 1384589 host.go:66] Checking if "addons-936355" exists ...
	I0916 10:35:56.656244 1384589 cli_runner.go:164] Run: docker container inspect addons-936355 --format={{.State.Status}}
	I0916 10:35:56.657047 1384589 addons.go:69] Setting registry=true in profile "addons-936355"
	I0916 10:35:56.657068 1384589 addons.go:234] Setting addon registry=true in "addons-936355"
	I0916 10:35:56.657100 1384589 host.go:66] Checking if "addons-936355" exists ...
	I0916 10:35:56.657530 1384589 cli_runner.go:164] Run: docker container inspect addons-936355 --format={{.State.Status}}
	I0916 10:35:56.665356 1384589 addons.go:69] Setting storage-provisioner=true in profile "addons-936355"
	I0916 10:35:56.665392 1384589 addons.go:234] Setting addon storage-provisioner=true in "addons-936355"
	I0916 10:35:56.665428 1384589 host.go:66] Checking if "addons-936355" exists ...
	I0916 10:35:56.665900 1384589 cli_runner.go:164] Run: docker container inspect addons-936355 --format={{.State.Status}}
	I0916 10:35:56.656864 1384589 addons.go:69] Setting default-storageclass=true in profile "addons-936355"
	I0916 10:35:56.672310 1384589 addons_storage_classes.go:33] enableOrDisableStorageClasses default-storageclass=true on "addons-936355"
	I0916 10:35:56.672744 1384589 cli_runner.go:164] Run: docker container inspect addons-936355 --format={{.State.Status}}
	I0916 10:35:56.656877 1384589 addons.go:69] Setting gcp-auth=true in profile "addons-936355"
	I0916 10:35:56.677792 1384589 mustload.go:65] Loading cluster: addons-936355
	I0916 10:35:56.678032 1384589 config.go:182] Loaded profile config "addons-936355": Driver=docker, ContainerRuntime=crio, KubernetesVersion=v1.31.1
	I0916 10:35:56.678386 1384589 cli_runner.go:164] Run: docker container inspect addons-936355 --format={{.State.Status}}
	I0916 10:35:56.685741 1384589 addons.go:69] Setting storage-provisioner-rancher=true in profile "addons-936355"
	I0916 10:35:56.685780 1384589 addons_storage_classes.go:33] enableOrDisableStorageClasses storage-provisioner-rancher=true on "addons-936355"
	I0916 10:35:56.686170 1384589 cli_runner.go:164] Run: docker container inspect addons-936355 --format={{.State.Status}}
	I0916 10:35:56.656881 1384589 addons.go:69] Setting ingress=true in profile "addons-936355"
	I0916 10:35:56.697863 1384589 addons.go:234] Setting addon ingress=true in "addons-936355"
	I0916 10:35:56.697916 1384589 host.go:66] Checking if "addons-936355" exists ...
	I0916 10:35:56.698402 1384589 cli_runner.go:164] Run: docker container inspect addons-936355 --format={{.State.Status}}
	I0916 10:35:56.656886 1384589 addons.go:69] Setting ingress-dns=true in profile "addons-936355"
	I0916 10:35:56.714403 1384589 addons.go:234] Setting addon ingress-dns=true in "addons-936355"
	I0916 10:35:56.714458 1384589 host.go:66] Checking if "addons-936355" exists ...
	I0916 10:35:56.715038 1384589 cli_runner.go:164] Run: docker container inspect addons-936355 --format={{.State.Status}}
	I0916 10:35:56.718711 1384589 out.go:177] * Verifying Kubernetes components...
	I0916 10:35:56.721654 1384589 ssh_runner.go:195] Run: sudo systemctl daemon-reload
	I0916 10:35:56.725191 1384589 addons.go:69] Setting volcano=true in profile "addons-936355"
	I0916 10:35:56.725221 1384589 addons.go:234] Setting addon volcano=true in "addons-936355"
	I0916 10:35:56.725264 1384589 host.go:66] Checking if "addons-936355" exists ...
	I0916 10:35:56.725742 1384589 cli_runner.go:164] Run: docker container inspect addons-936355 --format={{.State.Status}}
	I0916 10:35:56.755780 1384589 addons.go:69] Setting volumesnapshots=true in profile "addons-936355"
	I0916 10:35:56.755830 1384589 addons.go:234] Setting addon volumesnapshots=true in "addons-936355"
	I0916 10:35:56.755891 1384589 host.go:66] Checking if "addons-936355" exists ...
	I0916 10:35:56.756438 1384589 cli_runner.go:164] Run: docker container inspect addons-936355 --format={{.State.Status}}
	I0916 10:35:56.648360 1384589 addons.go:234] Setting addon inspektor-gadget=true in "addons-936355"
	I0916 10:35:56.781338 1384589 host.go:66] Checking if "addons-936355" exists ...
	I0916 10:35:56.781866 1384589 cli_runner.go:164] Run: docker container inspect addons-936355 --format={{.State.Status}}
	I0916 10:35:56.795868 1384589 out.go:177]   - Using image gcr.io/cloud-spanner-emulator/emulator:1.5.23
	I0916 10:35:56.806961 1384589 out.go:177]   - Using image nvcr.io/nvidia/k8s-device-plugin:v0.16.2
	I0916 10:35:56.813860 1384589 addons.go:431] installing /etc/kubernetes/addons/nvidia-device-plugin.yaml
	I0916 10:35:56.813885 1384589 ssh_runner.go:362] scp memory --> /etc/kubernetes/addons/nvidia-device-plugin.yaml (1966 bytes)
	I0916 10:35:56.813953 1384589 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" addons-936355
	I0916 10:35:56.825055 1384589 addons.go:234] Setting addon default-storageclass=true in "addons-936355"
	I0916 10:35:56.825094 1384589 host.go:66] Checking if "addons-936355" exists ...
	I0916 10:35:56.825522 1384589 cli_runner.go:164] Run: docker container inspect addons-936355 --format={{.State.Status}}
	I0916 10:35:56.844917 1384589 out.go:177]   - Using image gcr.io/k8s-minikube/minikube-ingress-dns:0.0.3
	I0916 10:35:56.847733 1384589 addons.go:431] installing /etc/kubernetes/addons/ingress-dns-pod.yaml
	I0916 10:35:56.847756 1384589 ssh_runner.go:362] scp memory --> /etc/kubernetes/addons/ingress-dns-pod.yaml (2442 bytes)
	I0916 10:35:56.847823 1384589 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" addons-936355
	I0916 10:35:56.855550 1384589 addons.go:431] installing /etc/kubernetes/addons/deployment.yaml
	I0916 10:35:56.855573 1384589 ssh_runner.go:362] scp memory --> /etc/kubernetes/addons/deployment.yaml (1004 bytes)
	I0916 10:35:56.855637 1384589 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" addons-936355
	I0916 10:35:56.868185 1384589 out.go:177]   - Using image docker.io/marcnuri/yakd:0.0.5
	I0916 10:35:56.870805 1384589 addons.go:431] installing /etc/kubernetes/addons/yakd-ns.yaml
	I0916 10:35:56.870832 1384589 ssh_runner.go:362] scp yakd/yakd-ns.yaml --> /etc/kubernetes/addons/yakd-ns.yaml (171 bytes)
	I0916 10:35:56.870903 1384589 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" addons-936355
	I0916 10:35:56.880834 1384589 out.go:177]   - Using image registry.k8s.io/ingress-nginx/kube-webhook-certgen:v1.4.3
	I0916 10:35:56.883983 1384589 out.go:177]   - Using image registry.k8s.io/ingress-nginx/controller:v1.11.2
	I0916 10:35:56.888274 1384589 out.go:177]   - Using image registry.k8s.io/ingress-nginx/kube-webhook-certgen:v1.4.3
	I0916 10:35:56.892893 1384589 out.go:177]   - Using image registry.k8s.io/metrics-server/metrics-server:v0.7.2
	I0916 10:35:56.893194 1384589 addons.go:431] installing /etc/kubernetes/addons/ingress-deploy.yaml
	I0916 10:35:56.893206 1384589 ssh_runner.go:362] scp memory --> /etc/kubernetes/addons/ingress-deploy.yaml (16078 bytes)
	I0916 10:35:56.893271 1384589 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" addons-936355
	I0916 10:35:56.895536 1384589 addons.go:431] installing /etc/kubernetes/addons/metrics-apiservice.yaml
	I0916 10:35:56.895559 1384589 ssh_runner.go:362] scp metrics-server/metrics-apiservice.yaml --> /etc/kubernetes/addons/metrics-apiservice.yaml (424 bytes)
	I0916 10:35:56.895631 1384589 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" addons-936355
	I0916 10:35:56.932992 1384589 addons.go:234] Setting addon storage-provisioner-rancher=true in "addons-936355"
	I0916 10:35:56.933037 1384589 host.go:66] Checking if "addons-936355" exists ...
	I0916 10:35:56.933461 1384589 cli_runner.go:164] Run: docker container inspect addons-936355 --format={{.State.Status}}
	I0916 10:35:56.975517 1384589 out.go:177]   - Using image registry.k8s.io/sig-storage/livenessprobe:v2.8.0
	I0916 10:35:56.981731 1384589 out.go:177]   - Using image gcr.io/k8s-minikube/kube-registry-proxy:0.0.6
	I0916 10:35:57.008862 1384589 out.go:177]   - Using image gcr.io/k8s-minikube/storage-provisioner:v5
	I0916 10:35:57.011867 1384589 addons.go:431] installing /etc/kubernetes/addons/storage-provisioner.yaml
	I0916 10:35:57.012071 1384589 ssh_runner.go:362] scp memory --> /etc/kubernetes/addons/storage-provisioner.yaml (2676 bytes)
	I0916 10:35:57.012271 1384589 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" addons-936355
	I0916 10:35:57.012572 1384589 out.go:177]   - Using image registry.k8s.io/sig-storage/csi-resizer:v1.6.0
	I0916 10:35:57.018623 1384589 out.go:177]   - Using image registry.k8s.io/sig-storage/csi-snapshotter:v6.1.0
	W0916 10:35:57.018876 1384589 out.go:270] ! Enabling 'volcano' returned an error: running callbacks: [volcano addon does not support crio]
	I0916 10:35:57.019207 1384589 out.go:177]   - Using image ghcr.io/inspektor-gadget/inspektor-gadget:v0.32.0
	I0916 10:35:57.026491 1384589 out.go:177]   - Using image registry.k8s.io/sig-storage/snapshot-controller:v6.1.0
	I0916 10:35:57.031787 1384589 addons.go:431] installing /etc/kubernetes/addons/storageclass.yaml
	I0916 10:35:57.031824 1384589 ssh_runner.go:362] scp storageclass/storageclass.yaml --> /etc/kubernetes/addons/storageclass.yaml (271 bytes)
	I0916 10:35:57.031905 1384589 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" addons-936355
	I0916 10:35:57.035870 1384589 out.go:177]   - Using image docker.io/registry:2.8.3
	I0916 10:35:57.037432 1384589 addons.go:431] installing /etc/kubernetes/addons/csi-hostpath-snapshotclass.yaml
	I0916 10:35:57.040920 1384589 ssh_runner.go:362] scp volumesnapshots/csi-hostpath-snapshotclass.yaml --> /etc/kubernetes/addons/csi-hostpath-snapshotclass.yaml (934 bytes)
	I0916 10:35:57.041029 1384589 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" addons-936355
	I0916 10:35:57.041817 1384589 host.go:66] Checking if "addons-936355" exists ...
	I0916 10:35:57.047002 1384589 addons.go:431] installing /etc/kubernetes/addons/registry-rc.yaml
	I0916 10:35:57.047021 1384589 ssh_runner.go:362] scp memory --> /etc/kubernetes/addons/registry-rc.yaml (860 bytes)
	I0916 10:35:57.047081 1384589 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" addons-936355
	I0916 10:35:57.039199 1384589 out.go:177]   - Using image registry.k8s.io/sig-storage/csi-provisioner:v3.3.0
	I0916 10:35:57.067112 1384589 addons.go:431] installing /etc/kubernetes/addons/ig-namespace.yaml
	I0916 10:35:57.067136 1384589 ssh_runner.go:362] scp inspektor-gadget/ig-namespace.yaml --> /etc/kubernetes/addons/ig-namespace.yaml (55 bytes)
	I0916 10:35:57.067221 1384589 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" addons-936355
	I0916 10:35:57.077336 1384589 sshutil.go:53] new ssh client: &{IP:127.0.0.1 Port:34603 SSHKeyPath:/home/jenkins/minikube-integration/19651-1378450/.minikube/machines/addons-936355/id_rsa Username:docker}
	I0916 10:35:57.080728 1384589 out.go:177]   - Using image registry.k8s.io/sig-storage/csi-attacher:v4.0.0
	I0916 10:35:57.083509 1384589 out.go:177]   - Using image registry.k8s.io/sig-storage/csi-external-health-monitor-controller:v0.7.0
	I0916 10:35:57.084922 1384589 sshutil.go:53] new ssh client: &{IP:127.0.0.1 Port:34603 SSHKeyPath:/home/jenkins/minikube-integration/19651-1378450/.minikube/machines/addons-936355/id_rsa Username:docker}
	I0916 10:35:57.092584 1384589 out.go:177]   - Using image registry.k8s.io/sig-storage/csi-node-driver-registrar:v2.6.0
	I0916 10:35:57.100918 1384589 out.go:177]   - Using image registry.k8s.io/sig-storage/hostpathplugin:v1.9.0
	I0916 10:35:57.102580 1384589 sshutil.go:53] new ssh client: &{IP:127.0.0.1 Port:34603 SSHKeyPath:/home/jenkins/minikube-integration/19651-1378450/.minikube/machines/addons-936355/id_rsa Username:docker}
	I0916 10:35:57.103637 1384589 addons.go:431] installing /etc/kubernetes/addons/rbac-external-attacher.yaml
	I0916 10:35:57.103656 1384589 ssh_runner.go:362] scp csi-hostpath-driver/rbac/rbac-external-attacher.yaml --> /etc/kubernetes/addons/rbac-external-attacher.yaml (3073 bytes)
	I0916 10:35:57.103715 1384589 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" addons-936355
	I0916 10:35:57.120809 1384589 sshutil.go:53] new ssh client: &{IP:127.0.0.1 Port:34603 SSHKeyPath:/home/jenkins/minikube-integration/19651-1378450/.minikube/machines/addons-936355/id_rsa Username:docker}
	I0916 10:35:57.121658 1384589 sshutil.go:53] new ssh client: &{IP:127.0.0.1 Port:34603 SSHKeyPath:/home/jenkins/minikube-integration/19651-1378450/.minikube/machines/addons-936355/id_rsa Username:docker}
	I0916 10:35:57.165011 1384589 sshutil.go:53] new ssh client: &{IP:127.0.0.1 Port:34603 SSHKeyPath:/home/jenkins/minikube-integration/19651-1378450/.minikube/machines/addons-936355/id_rsa Username:docker}
	I0916 10:35:57.190914 1384589 out.go:177]   - Using image docker.io/rancher/local-path-provisioner:v0.0.22
	I0916 10:35:57.195762 1384589 out.go:177]   - Using image docker.io/busybox:stable
	I0916 10:35:57.198447 1384589 addons.go:431] installing /etc/kubernetes/addons/storage-provisioner-rancher.yaml
	I0916 10:35:57.198482 1384589 ssh_runner.go:362] scp memory --> /etc/kubernetes/addons/storage-provisioner-rancher.yaml (3113 bytes)
	I0916 10:35:57.198559 1384589 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" addons-936355
	I0916 10:35:57.237951 1384589 sshutil.go:53] new ssh client: &{IP:127.0.0.1 Port:34603 SSHKeyPath:/home/jenkins/minikube-integration/19651-1378450/.minikube/machines/addons-936355/id_rsa Username:docker}
	I0916 10:35:57.247913 1384589 sshutil.go:53] new ssh client: &{IP:127.0.0.1 Port:34603 SSHKeyPath:/home/jenkins/minikube-integration/19651-1378450/.minikube/machines/addons-936355/id_rsa Username:docker}
	I0916 10:35:57.261430 1384589 sshutil.go:53] new ssh client: &{IP:127.0.0.1 Port:34603 SSHKeyPath:/home/jenkins/minikube-integration/19651-1378450/.minikube/machines/addons-936355/id_rsa Username:docker}
	I0916 10:35:57.263688 1384589 sshutil.go:53] new ssh client: &{IP:127.0.0.1 Port:34603 SSHKeyPath:/home/jenkins/minikube-integration/19651-1378450/.minikube/machines/addons-936355/id_rsa Username:docker}
	I0916 10:35:57.268844 1384589 sshutil.go:53] new ssh client: &{IP:127.0.0.1 Port:34603 SSHKeyPath:/home/jenkins/minikube-integration/19651-1378450/.minikube/machines/addons-936355/id_rsa Username:docker}
	I0916 10:35:57.272259 1384589 sshutil.go:53] new ssh client: &{IP:127.0.0.1 Port:34603 SSHKeyPath:/home/jenkins/minikube-integration/19651-1378450/.minikube/machines/addons-936355/id_rsa Username:docker}
	W0916 10:35:57.289422 1384589 sshutil.go:64] dial failure (will retry): ssh: handshake failed: EOF
	I0916 10:35:57.289665 1384589 retry.go:31] will retry after 343.76577ms: ssh: handshake failed: EOF
	I0916 10:35:57.317769 1384589 sshutil.go:53] new ssh client: &{IP:127.0.0.1 Port:34603 SSHKeyPath:/home/jenkins/minikube-integration/19651-1378450/.minikube/machines/addons-936355/id_rsa Username:docker}
	I0916 10:35:57.327435 1384589 ssh_runner.go:195] Run: sudo systemctl start kubelet
	I0916 10:35:57.327622 1384589 ssh_runner.go:195] Run: /bin/bash -c "sudo /var/lib/minikube/binaries/v1.31.1/kubectl --kubeconfig=/var/lib/minikube/kubeconfig -n kube-system get configmap coredns -o yaml | sed -e '/^        forward . \/etc\/resolv.conf.*/i \        hosts {\n           192.168.49.1 host.minikube.internal\n           fallthrough\n        }' -e '/^        errors *$/i \        log' | sudo /var/lib/minikube/binaries/v1.31.1/kubectl --kubeconfig=/var/lib/minikube/kubeconfig replace -f -"
	I0916 10:35:57.507011 1384589 ssh_runner.go:195] Run: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.31.1/kubectl apply -f /etc/kubernetes/addons/ingress-deploy.yaml
	I0916 10:35:57.508273 1384589 ssh_runner.go:195] Run: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.31.1/kubectl apply -f /etc/kubernetes/addons/nvidia-device-plugin.yaml
	I0916 10:35:57.512529 1384589 addons.go:431] installing /etc/kubernetes/addons/yakd-sa.yaml
	I0916 10:35:57.512557 1384589 ssh_runner.go:362] scp yakd/yakd-sa.yaml --> /etc/kubernetes/addons/yakd-sa.yaml (247 bytes)
	I0916 10:35:57.532805 1384589 ssh_runner.go:195] Run: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.31.1/kubectl apply -f /etc/kubernetes/addons/ingress-dns-pod.yaml
	I0916 10:35:57.544603 1384589 addons.go:431] installing /etc/kubernetes/addons/metrics-server-deployment.yaml
	I0916 10:35:57.544626 1384589 ssh_runner.go:362] scp memory --> /etc/kubernetes/addons/metrics-server-deployment.yaml (1907 bytes)
	I0916 10:35:57.554769 1384589 ssh_runner.go:195] Run: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.31.1/kubectl apply -f /etc/kubernetes/addons/deployment.yaml
	I0916 10:35:57.597359 1384589 ssh_runner.go:195] Run: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.31.1/kubectl apply -f /etc/kubernetes/addons/storageclass.yaml
	I0916 10:35:57.683748 1384589 addons.go:431] installing /etc/kubernetes/addons/yakd-crb.yaml
	I0916 10:35:57.683782 1384589 ssh_runner.go:362] scp yakd/yakd-crb.yaml --> /etc/kubernetes/addons/yakd-crb.yaml (422 bytes)
	I0916 10:35:57.706763 1384589 ssh_runner.go:195] Run: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.31.1/kubectl apply -f /etc/kubernetes/addons/storage-provisioner.yaml
	I0916 10:35:57.708832 1384589 addons.go:431] installing /etc/kubernetes/addons/ig-serviceaccount.yaml
	I0916 10:35:57.708864 1384589 ssh_runner.go:362] scp inspektor-gadget/ig-serviceaccount.yaml --> /etc/kubernetes/addons/ig-serviceaccount.yaml (80 bytes)
	I0916 10:35:57.733074 1384589 addons.go:431] installing /etc/kubernetes/addons/metrics-server-rbac.yaml
	I0916 10:35:57.733107 1384589 ssh_runner.go:362] scp metrics-server/metrics-server-rbac.yaml --> /etc/kubernetes/addons/metrics-server-rbac.yaml (2175 bytes)
	I0916 10:35:57.767880 1384589 addons.go:431] installing /etc/kubernetes/addons/snapshot.storage.k8s.io_volumesnapshotclasses.yaml
	I0916 10:35:57.767908 1384589 ssh_runner.go:362] scp volumesnapshots/snapshot.storage.k8s.io_volumesnapshotclasses.yaml --> /etc/kubernetes/addons/snapshot.storage.k8s.io_volumesnapshotclasses.yaml (6471 bytes)
	I0916 10:35:57.780746 1384589 addons.go:431] installing /etc/kubernetes/addons/registry-svc.yaml
	I0916 10:35:57.780786 1384589 ssh_runner.go:362] scp registry/registry-svc.yaml --> /etc/kubernetes/addons/registry-svc.yaml (398 bytes)
	I0916 10:35:57.807404 1384589 ssh_runner.go:195] Run: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.31.1/kubectl apply -f /etc/kubernetes/addons/storage-provisioner-rancher.yaml
	I0916 10:35:57.850707 1384589 addons.go:431] installing /etc/kubernetes/addons/yakd-svc.yaml
	I0916 10:35:57.850745 1384589 ssh_runner.go:362] scp yakd/yakd-svc.yaml --> /etc/kubernetes/addons/yakd-svc.yaml (412 bytes)
	I0916 10:35:57.887607 1384589 addons.go:431] installing /etc/kubernetes/addons/ig-role.yaml
	I0916 10:35:57.887636 1384589 ssh_runner.go:362] scp inspektor-gadget/ig-role.yaml --> /etc/kubernetes/addons/ig-role.yaml (210 bytes)
	I0916 10:35:57.954841 1384589 addons.go:431] installing /etc/kubernetes/addons/metrics-server-service.yaml
	I0916 10:35:57.954878 1384589 ssh_runner.go:362] scp metrics-server/metrics-server-service.yaml --> /etc/kubernetes/addons/metrics-server-service.yaml (446 bytes)
	I0916 10:35:57.957894 1384589 addons.go:431] installing /etc/kubernetes/addons/snapshot.storage.k8s.io_volumesnapshotcontents.yaml
	I0916 10:35:57.957918 1384589 ssh_runner.go:362] scp volumesnapshots/snapshot.storage.k8s.io_volumesnapshotcontents.yaml --> /etc/kubernetes/addons/snapshot.storage.k8s.io_volumesnapshotcontents.yaml (23126 bytes)
	I0916 10:35:57.990850 1384589 addons.go:431] installing /etc/kubernetes/addons/registry-proxy.yaml
	I0916 10:35:57.990882 1384589 ssh_runner.go:362] scp memory --> /etc/kubernetes/addons/registry-proxy.yaml (947 bytes)
	I0916 10:35:58.040155 1384589 addons.go:431] installing /etc/kubernetes/addons/yakd-dp.yaml
	I0916 10:35:58.040193 1384589 ssh_runner.go:362] scp memory --> /etc/kubernetes/addons/yakd-dp.yaml (2017 bytes)
	I0916 10:35:58.078005 1384589 addons.go:431] installing /etc/kubernetes/addons/ig-rolebinding.yaml
	I0916 10:35:58.078038 1384589 ssh_runner.go:362] scp inspektor-gadget/ig-rolebinding.yaml --> /etc/kubernetes/addons/ig-rolebinding.yaml (244 bytes)
	I0916 10:35:58.084259 1384589 addons.go:431] installing /etc/kubernetes/addons/rbac-hostpath.yaml
	I0916 10:35:58.084302 1384589 ssh_runner.go:362] scp csi-hostpath-driver/rbac/rbac-hostpath.yaml --> /etc/kubernetes/addons/rbac-hostpath.yaml (4266 bytes)
	I0916 10:35:58.131227 1384589 addons.go:431] installing /etc/kubernetes/addons/snapshot.storage.k8s.io_volumesnapshots.yaml
	I0916 10:35:58.131253 1384589 ssh_runner.go:362] scp volumesnapshots/snapshot.storage.k8s.io_volumesnapshots.yaml --> /etc/kubernetes/addons/snapshot.storage.k8s.io_volumesnapshots.yaml (19582 bytes)
	I0916 10:35:58.132161 1384589 ssh_runner.go:195] Run: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.31.1/kubectl apply -f /etc/kubernetes/addons/registry-rc.yaml -f /etc/kubernetes/addons/registry-svc.yaml -f /etc/kubernetes/addons/registry-proxy.yaml
	I0916 10:35:58.147419 1384589 ssh_runner.go:195] Run: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.31.1/kubectl apply -f /etc/kubernetes/addons/metrics-apiservice.yaml -f /etc/kubernetes/addons/metrics-server-deployment.yaml -f /etc/kubernetes/addons/metrics-server-rbac.yaml -f /etc/kubernetes/addons/metrics-server-service.yaml
	I0916 10:35:58.178615 1384589 ssh_runner.go:195] Run: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.31.1/kubectl apply -f /etc/kubernetes/addons/yakd-ns.yaml -f /etc/kubernetes/addons/yakd-sa.yaml -f /etc/kubernetes/addons/yakd-crb.yaml -f /etc/kubernetes/addons/yakd-svc.yaml -f /etc/kubernetes/addons/yakd-dp.yaml
	I0916 10:35:58.199520 1384589 addons.go:431] installing /etc/kubernetes/addons/ig-clusterrole.yaml
	I0916 10:35:58.199553 1384589 ssh_runner.go:362] scp inspektor-gadget/ig-clusterrole.yaml --> /etc/kubernetes/addons/ig-clusterrole.yaml (1485 bytes)
	I0916 10:35:58.206840 1384589 addons.go:431] installing /etc/kubernetes/addons/rbac-external-health-monitor-controller.yaml
	I0916 10:35:58.206873 1384589 ssh_runner.go:362] scp csi-hostpath-driver/rbac/rbac-external-health-monitor-controller.yaml --> /etc/kubernetes/addons/rbac-external-health-monitor-controller.yaml (3038 bytes)
	I0916 10:35:58.251350 1384589 addons.go:431] installing /etc/kubernetes/addons/rbac-volume-snapshot-controller.yaml
	I0916 10:35:58.251378 1384589 ssh_runner.go:362] scp volumesnapshots/rbac-volume-snapshot-controller.yaml --> /etc/kubernetes/addons/rbac-volume-snapshot-controller.yaml (3545 bytes)
	I0916 10:35:58.301781 1384589 addons.go:431] installing /etc/kubernetes/addons/rbac-external-provisioner.yaml
	I0916 10:35:58.301809 1384589 ssh_runner.go:362] scp csi-hostpath-driver/rbac/rbac-external-provisioner.yaml --> /etc/kubernetes/addons/rbac-external-provisioner.yaml (4442 bytes)
	I0916 10:35:58.328155 1384589 addons.go:431] installing /etc/kubernetes/addons/ig-clusterrolebinding.yaml
	I0916 10:35:58.328184 1384589 ssh_runner.go:362] scp inspektor-gadget/ig-clusterrolebinding.yaml --> /etc/kubernetes/addons/ig-clusterrolebinding.yaml (274 bytes)
	I0916 10:35:58.351423 1384589 addons.go:431] installing /etc/kubernetes/addons/volume-snapshot-controller-deployment.yaml
	I0916 10:35:58.351449 1384589 ssh_runner.go:362] scp memory --> /etc/kubernetes/addons/volume-snapshot-controller-deployment.yaml (1475 bytes)
	I0916 10:35:58.404154 1384589 addons.go:431] installing /etc/kubernetes/addons/rbac-external-resizer.yaml
	I0916 10:35:58.404188 1384589 ssh_runner.go:362] scp csi-hostpath-driver/rbac/rbac-external-resizer.yaml --> /etc/kubernetes/addons/rbac-external-resizer.yaml (2943 bytes)
	I0916 10:35:58.467023 1384589 ssh_runner.go:195] Run: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.31.1/kubectl apply -f /etc/kubernetes/addons/csi-hostpath-snapshotclass.yaml -f /etc/kubernetes/addons/snapshot.storage.k8s.io_volumesnapshotclasses.yaml -f /etc/kubernetes/addons/snapshot.storage.k8s.io_volumesnapshotcontents.yaml -f /etc/kubernetes/addons/snapshot.storage.k8s.io_volumesnapshots.yaml -f /etc/kubernetes/addons/rbac-volume-snapshot-controller.yaml -f /etc/kubernetes/addons/volume-snapshot-controller-deployment.yaml
	I0916 10:35:58.468235 1384589 addons.go:431] installing /etc/kubernetes/addons/rbac-external-snapshotter.yaml
	I0916 10:35:58.468257 1384589 ssh_runner.go:362] scp csi-hostpath-driver/rbac/rbac-external-snapshotter.yaml --> /etc/kubernetes/addons/rbac-external-snapshotter.yaml (3149 bytes)
	I0916 10:35:58.517809 1384589 addons.go:431] installing /etc/kubernetes/addons/ig-crd.yaml
	I0916 10:35:58.517836 1384589 ssh_runner.go:362] scp inspektor-gadget/ig-crd.yaml --> /etc/kubernetes/addons/ig-crd.yaml (5216 bytes)
	I0916 10:35:58.529132 1384589 addons.go:431] installing /etc/kubernetes/addons/csi-hostpath-attacher.yaml
	I0916 10:35:58.529162 1384589 ssh_runner.go:362] scp memory --> /etc/kubernetes/addons/csi-hostpath-attacher.yaml (2143 bytes)
	I0916 10:35:58.607318 1384589 addons.go:431] installing /etc/kubernetes/addons/ig-daemonset.yaml
	I0916 10:35:58.607345 1384589 ssh_runner.go:362] scp memory --> /etc/kubernetes/addons/ig-daemonset.yaml (7735 bytes)
	I0916 10:35:58.620217 1384589 addons.go:431] installing /etc/kubernetes/addons/csi-hostpath-driverinfo.yaml
	I0916 10:35:58.620264 1384589 ssh_runner.go:362] scp csi-hostpath-driver/deploy/csi-hostpath-driverinfo.yaml --> /etc/kubernetes/addons/csi-hostpath-driverinfo.yaml (1274 bytes)
	I0916 10:35:58.671546 1384589 ssh_runner.go:195] Run: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.31.1/kubectl apply -f /etc/kubernetes/addons/ig-namespace.yaml -f /etc/kubernetes/addons/ig-serviceaccount.yaml -f /etc/kubernetes/addons/ig-role.yaml -f /etc/kubernetes/addons/ig-rolebinding.yaml -f /etc/kubernetes/addons/ig-clusterrole.yaml -f /etc/kubernetes/addons/ig-clusterrolebinding.yaml -f /etc/kubernetes/addons/ig-crd.yaml -f /etc/kubernetes/addons/ig-daemonset.yaml
	I0916 10:35:58.726776 1384589 addons.go:431] installing /etc/kubernetes/addons/csi-hostpath-plugin.yaml
	I0916 10:35:58.726803 1384589 ssh_runner.go:362] scp memory --> /etc/kubernetes/addons/csi-hostpath-plugin.yaml (8201 bytes)
	I0916 10:35:58.855138 1384589 addons.go:431] installing /etc/kubernetes/addons/csi-hostpath-resizer.yaml
	I0916 10:35:58.855204 1384589 ssh_runner.go:362] scp memory --> /etc/kubernetes/addons/csi-hostpath-resizer.yaml (2191 bytes)
	I0916 10:35:58.993338 1384589 addons.go:431] installing /etc/kubernetes/addons/csi-hostpath-storageclass.yaml
	I0916 10:35:58.993375 1384589 ssh_runner.go:362] scp csi-hostpath-driver/deploy/csi-hostpath-storageclass.yaml --> /etc/kubernetes/addons/csi-hostpath-storageclass.yaml (846 bytes)
	I0916 10:35:59.149795 1384589 ssh_runner.go:195] Run: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.31.1/kubectl apply -f /etc/kubernetes/addons/rbac-external-attacher.yaml -f /etc/kubernetes/addons/rbac-hostpath.yaml -f /etc/kubernetes/addons/rbac-external-health-monitor-controller.yaml -f /etc/kubernetes/addons/rbac-external-provisioner.yaml -f /etc/kubernetes/addons/rbac-external-resizer.yaml -f /etc/kubernetes/addons/rbac-external-snapshotter.yaml -f /etc/kubernetes/addons/csi-hostpath-attacher.yaml -f /etc/kubernetes/addons/csi-hostpath-driverinfo.yaml -f /etc/kubernetes/addons/csi-hostpath-plugin.yaml -f /etc/kubernetes/addons/csi-hostpath-resizer.yaml -f /etc/kubernetes/addons/csi-hostpath-storageclass.yaml
	I0916 10:36:00.098293 1384589 ssh_runner.go:235] Completed: /bin/bash -c "sudo /var/lib/minikube/binaries/v1.31.1/kubectl --kubeconfig=/var/lib/minikube/kubeconfig -n kube-system get configmap coredns -o yaml | sed -e '/^        forward . \/etc\/resolv.conf.*/i \        hosts {\n           192.168.49.1 host.minikube.internal\n           fallthrough\n        }' -e '/^        errors *$/i \        log' | sudo /var/lib/minikube/binaries/v1.31.1/kubectl --kubeconfig=/var/lib/minikube/kubeconfig replace -f -": (2.77063538s)
	I0916 10:36:00.098468 1384589 start.go:971] {"host.minikube.internal": 192.168.49.1} host record injected into CoreDNS's ConfigMap
	I0916 10:36:00.098398 1384589 ssh_runner.go:235] Completed: sudo systemctl start kubelet: (2.770933472s)
	I0916 10:36:00.099620 1384589 node_ready.go:35] waiting up to 6m0s for node "addons-936355" to be "Ready" ...
	I0916 10:36:00.683691 1384589 kapi.go:214] "coredns" deployment in "kube-system" namespace and "addons-936355" context rescaled to 1 replicas
	I0916 10:36:02.134513 1384589 node_ready.go:53] node "addons-936355" has status "Ready":"False"
	I0916 10:36:03.099256 1384589 ssh_runner.go:235] Completed: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.31.1/kubectl apply -f /etc/kubernetes/addons/ingress-deploy.yaml: (5.592195221s)
	I0916 10:36:03.099297 1384589 addons.go:475] Verifying addon ingress=true in "addons-936355"
	I0916 10:36:03.099513 1384589 ssh_runner.go:235] Completed: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.31.1/kubectl apply -f /etc/kubernetes/addons/nvidia-device-plugin.yaml: (5.591216064s)
	I0916 10:36:03.099584 1384589 ssh_runner.go:235] Completed: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.31.1/kubectl apply -f /etc/kubernetes/addons/ingress-dns-pod.yaml: (5.56674984s)
	I0916 10:36:03.099618 1384589 ssh_runner.go:235] Completed: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.31.1/kubectl apply -f /etc/kubernetes/addons/deployment.yaml: (5.544827293s)
	I0916 10:36:03.099645 1384589 ssh_runner.go:235] Completed: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.31.1/kubectl apply -f /etc/kubernetes/addons/storageclass.yaml: (5.502263542s)
	I0916 10:36:03.099882 1384589 ssh_runner.go:235] Completed: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.31.1/kubectl apply -f /etc/kubernetes/addons/storage-provisioner.yaml: (5.393096916s)
	I0916 10:36:03.099983 1384589 ssh_runner.go:235] Completed: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.31.1/kubectl apply -f /etc/kubernetes/addons/storage-provisioner-rancher.yaml: (5.292558502s)
	I0916 10:36:03.100117 1384589 ssh_runner.go:235] Completed: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.31.1/kubectl apply -f /etc/kubernetes/addons/registry-rc.yaml -f /etc/kubernetes/addons/registry-svc.yaml -f /etc/kubernetes/addons/registry-proxy.yaml: (4.967882873s)
	I0916 10:36:03.100138 1384589 addons.go:475] Verifying addon registry=true in "addons-936355"
	I0916 10:36:03.100642 1384589 ssh_runner.go:235] Completed: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.31.1/kubectl apply -f /etc/kubernetes/addons/metrics-apiservice.yaml -f /etc/kubernetes/addons/metrics-server-deployment.yaml -f /etc/kubernetes/addons/metrics-server-rbac.yaml -f /etc/kubernetes/addons/metrics-server-service.yaml: (4.953181668s)
	I0916 10:36:03.100670 1384589 addons.go:475] Verifying addon metrics-server=true in "addons-936355"
	I0916 10:36:03.100733 1384589 ssh_runner.go:235] Completed: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.31.1/kubectl apply -f /etc/kubernetes/addons/yakd-ns.yaml -f /etc/kubernetes/addons/yakd-sa.yaml -f /etc/kubernetes/addons/yakd-crb.yaml -f /etc/kubernetes/addons/yakd-svc.yaml -f /etc/kubernetes/addons/yakd-dp.yaml: (4.922089802s)
	I0916 10:36:03.102943 1384589 out.go:177] * To access YAKD - Kubernetes Dashboard, wait for Pod to be ready and run the following command:
	
		minikube -p addons-936355 service yakd-dashboard -n yakd-dashboard
	
	I0916 10:36:03.102961 1384589 out.go:177] * Verifying registry addon...
	I0916 10:36:03.103034 1384589 out.go:177] * Verifying ingress addon...
	I0916 10:36:03.105813 1384589 kapi.go:75] Waiting for pod with label "app.kubernetes.io/name=ingress-nginx" in ns "ingress-nginx" ...
	I0916 10:36:03.106800 1384589 kapi.go:75] Waiting for pod with label "kubernetes.io/minikube-addons=registry" in ns "kube-system" ...
	I0916 10:36:03.137676 1384589 kapi.go:86] Found 3 Pods for label selector app.kubernetes.io/name=ingress-nginx
	I0916 10:36:03.137755 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:36:03.140614 1384589 kapi.go:86] Found 1 Pods for label selector kubernetes.io/minikube-addons=registry
	I0916 10:36:03.140698 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	W0916 10:36:03.153289 1384589 out.go:270] ! Enabling 'storage-provisioner-rancher' returned an error: running callbacks: [Error making local-path the default storage class: Error while marking storage class local-path as default: Operation cannot be fulfilled on storageclasses.storage.k8s.io "local-path": the object has been modified; please apply your changes to the latest version and try again]
	I0916 10:36:03.250335 1384589 ssh_runner.go:235] Completed: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.31.1/kubectl apply -f /etc/kubernetes/addons/csi-hostpath-snapshotclass.yaml -f /etc/kubernetes/addons/snapshot.storage.k8s.io_volumesnapshotclasses.yaml -f /etc/kubernetes/addons/snapshot.storage.k8s.io_volumesnapshotcontents.yaml -f /etc/kubernetes/addons/snapshot.storage.k8s.io_volumesnapshots.yaml -f /etc/kubernetes/addons/rbac-volume-snapshot-controller.yaml -f /etc/kubernetes/addons/volume-snapshot-controller-deployment.yaml: (4.783269551s)
	W0916 10:36:03.250417 1384589 addons.go:457] apply failed, will retry: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.31.1/kubectl apply -f /etc/kubernetes/addons/csi-hostpath-snapshotclass.yaml -f /etc/kubernetes/addons/snapshot.storage.k8s.io_volumesnapshotclasses.yaml -f /etc/kubernetes/addons/snapshot.storage.k8s.io_volumesnapshotcontents.yaml -f /etc/kubernetes/addons/snapshot.storage.k8s.io_volumesnapshots.yaml -f /etc/kubernetes/addons/rbac-volume-snapshot-controller.yaml -f /etc/kubernetes/addons/volume-snapshot-controller-deployment.yaml: Process exited with status 1
	stdout:
	customresourcedefinition.apiextensions.k8s.io/volumesnapshotclasses.snapshot.storage.k8s.io created
	customresourcedefinition.apiextensions.k8s.io/volumesnapshotcontents.snapshot.storage.k8s.io created
	customresourcedefinition.apiextensions.k8s.io/volumesnapshots.snapshot.storage.k8s.io created
	serviceaccount/snapshot-controller created
	clusterrole.rbac.authorization.k8s.io/snapshot-controller-runner created
	clusterrolebinding.rbac.authorization.k8s.io/snapshot-controller-role created
	role.rbac.authorization.k8s.io/snapshot-controller-leaderelection created
	rolebinding.rbac.authorization.k8s.io/snapshot-controller-leaderelection created
	deployment.apps/snapshot-controller created
	
	stderr:
	error: resource mapping not found for name: "csi-hostpath-snapclass" namespace: "" from "/etc/kubernetes/addons/csi-hostpath-snapshotclass.yaml": no matches for kind "VolumeSnapshotClass" in version "snapshot.storage.k8s.io/v1"
	ensure CRDs are installed first
	I0916 10:36:03.250450 1384589 retry.go:31] will retry after 275.497637ms: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.31.1/kubectl apply -f /etc/kubernetes/addons/csi-hostpath-snapshotclass.yaml -f /etc/kubernetes/addons/snapshot.storage.k8s.io_volumesnapshotclasses.yaml -f /etc/kubernetes/addons/snapshot.storage.k8s.io_volumesnapshotcontents.yaml -f /etc/kubernetes/addons/snapshot.storage.k8s.io_volumesnapshots.yaml -f /etc/kubernetes/addons/rbac-volume-snapshot-controller.yaml -f /etc/kubernetes/addons/volume-snapshot-controller-deployment.yaml: Process exited with status 1
	stdout:
	customresourcedefinition.apiextensions.k8s.io/volumesnapshotclasses.snapshot.storage.k8s.io created
	customresourcedefinition.apiextensions.k8s.io/volumesnapshotcontents.snapshot.storage.k8s.io created
	customresourcedefinition.apiextensions.k8s.io/volumesnapshots.snapshot.storage.k8s.io created
	serviceaccount/snapshot-controller created
	clusterrole.rbac.authorization.k8s.io/snapshot-controller-runner created
	clusterrolebinding.rbac.authorization.k8s.io/snapshot-controller-role created
	role.rbac.authorization.k8s.io/snapshot-controller-leaderelection created
	rolebinding.rbac.authorization.k8s.io/snapshot-controller-leaderelection created
	deployment.apps/snapshot-controller created
	
	stderr:
	error: resource mapping not found for name: "csi-hostpath-snapclass" namespace: "" from "/etc/kubernetes/addons/csi-hostpath-snapshotclass.yaml": no matches for kind "VolumeSnapshotClass" in version "snapshot.storage.k8s.io/v1"
	ensure CRDs are installed first
	I0916 10:36:03.250543 1384589 ssh_runner.go:235] Completed: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.31.1/kubectl apply -f /etc/kubernetes/addons/ig-namespace.yaml -f /etc/kubernetes/addons/ig-serviceaccount.yaml -f /etc/kubernetes/addons/ig-role.yaml -f /etc/kubernetes/addons/ig-rolebinding.yaml -f /etc/kubernetes/addons/ig-clusterrole.yaml -f /etc/kubernetes/addons/ig-clusterrolebinding.yaml -f /etc/kubernetes/addons/ig-crd.yaml -f /etc/kubernetes/addons/ig-daemonset.yaml: (4.57892356s)
	I0916 10:36:03.461537 1384589 ssh_runner.go:235] Completed: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.31.1/kubectl apply -f /etc/kubernetes/addons/rbac-external-attacher.yaml -f /etc/kubernetes/addons/rbac-hostpath.yaml -f /etc/kubernetes/addons/rbac-external-health-monitor-controller.yaml -f /etc/kubernetes/addons/rbac-external-provisioner.yaml -f /etc/kubernetes/addons/rbac-external-resizer.yaml -f /etc/kubernetes/addons/rbac-external-snapshotter.yaml -f /etc/kubernetes/addons/csi-hostpath-attacher.yaml -f /etc/kubernetes/addons/csi-hostpath-driverinfo.yaml -f /etc/kubernetes/addons/csi-hostpath-plugin.yaml -f /etc/kubernetes/addons/csi-hostpath-resizer.yaml -f /etc/kubernetes/addons/csi-hostpath-storageclass.yaml: (4.311696877s)
	I0916 10:36:03.461620 1384589 addons.go:475] Verifying addon csi-hostpath-driver=true in "addons-936355"
	I0916 10:36:03.466201 1384589 out.go:177] * Verifying csi-hostpath-driver addon...
	I0916 10:36:03.469722 1384589 kapi.go:75] Waiting for pod with label "kubernetes.io/minikube-addons=csi-hostpath-driver" in ns "kube-system" ...
	I0916 10:36:03.486422 1384589 kapi.go:86] Found 2 Pods for label selector kubernetes.io/minikube-addons=csi-hostpath-driver
	I0916 10:36:03.486490 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:36:03.526121 1384589 ssh_runner.go:195] Run: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.31.1/kubectl apply --force -f /etc/kubernetes/addons/csi-hostpath-snapshotclass.yaml -f /etc/kubernetes/addons/snapshot.storage.k8s.io_volumesnapshotclasses.yaml -f /etc/kubernetes/addons/snapshot.storage.k8s.io_volumesnapshotcontents.yaml -f /etc/kubernetes/addons/snapshot.storage.k8s.io_volumesnapshots.yaml -f /etc/kubernetes/addons/rbac-volume-snapshot-controller.yaml -f /etc/kubernetes/addons/volume-snapshot-controller-deployment.yaml
	I0916 10:36:03.615580 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:36:03.616763 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:36:03.973974 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:36:04.110336 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:36:04.111341 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:36:04.482735 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:36:04.603445 1384589 node_ready.go:53] node "addons-936355" has status "Ready":"False"
	I0916 10:36:04.611582 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:36:04.612963 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:36:04.974584 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:36:05.112352 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:36:05.113152 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:36:05.475349 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:36:05.612975 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:36:05.617564 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:36:05.994295 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:36:06.112783 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:36:06.113610 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:36:06.402733 1384589 ssh_runner.go:235] Completed: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.31.1/kubectl apply --force -f /etc/kubernetes/addons/csi-hostpath-snapshotclass.yaml -f /etc/kubernetes/addons/snapshot.storage.k8s.io_volumesnapshotclasses.yaml -f /etc/kubernetes/addons/snapshot.storage.k8s.io_volumesnapshotcontents.yaml -f /etc/kubernetes/addons/snapshot.storage.k8s.io_volumesnapshots.yaml -f /etc/kubernetes/addons/rbac-volume-snapshot-controller.yaml -f /etc/kubernetes/addons/volume-snapshot-controller-deployment.yaml: (2.87656549s)
	I0916 10:36:06.474104 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:36:06.604073 1384589 node_ready.go:53] node "addons-936355" has status "Ready":"False"
	I0916 10:36:06.611947 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:36:06.613297 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:36:06.973698 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:36:07.111053 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:36:07.112244 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:36:07.247182 1384589 ssh_runner.go:362] scp memory --> /var/lib/minikube/google_application_credentials.json (162 bytes)
	I0916 10:36:07.247343 1384589 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" addons-936355
	I0916 10:36:07.269993 1384589 sshutil.go:53] new ssh client: &{IP:127.0.0.1 Port:34603 SSHKeyPath:/home/jenkins/minikube-integration/19651-1378450/.minikube/machines/addons-936355/id_rsa Username:docker}
	I0916 10:36:07.399328 1384589 ssh_runner.go:362] scp memory --> /var/lib/minikube/google_cloud_project (12 bytes)
	I0916 10:36:07.424561 1384589 addons.go:234] Setting addon gcp-auth=true in "addons-936355"
	I0916 10:36:07.424615 1384589 host.go:66] Checking if "addons-936355" exists ...
	I0916 10:36:07.425137 1384589 cli_runner.go:164] Run: docker container inspect addons-936355 --format={{.State.Status}}
	I0916 10:36:07.445430 1384589 ssh_runner.go:195] Run: cat /var/lib/minikube/google_application_credentials.json
	I0916 10:36:07.445507 1384589 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" addons-936355
	I0916 10:36:07.462936 1384589 sshutil.go:53] new ssh client: &{IP:127.0.0.1 Port:34603 SSHKeyPath:/home/jenkins/minikube-integration/19651-1378450/.minikube/machines/addons-936355/id_rsa Username:docker}
	I0916 10:36:07.473788 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:36:07.564092 1384589 out.go:177]   - Using image gcr.io/k8s-minikube/gcp-auth-webhook:v0.1.2
	I0916 10:36:07.566842 1384589 out.go:177]   - Using image registry.k8s.io/ingress-nginx/kube-webhook-certgen:v1.4.3
	I0916 10:36:07.569433 1384589 addons.go:431] installing /etc/kubernetes/addons/gcp-auth-ns.yaml
	I0916 10:36:07.569479 1384589 ssh_runner.go:362] scp gcp-auth/gcp-auth-ns.yaml --> /etc/kubernetes/addons/gcp-auth-ns.yaml (700 bytes)
	I0916 10:36:07.591162 1384589 addons.go:431] installing /etc/kubernetes/addons/gcp-auth-service.yaml
	I0916 10:36:07.591235 1384589 ssh_runner.go:362] scp gcp-auth/gcp-auth-service.yaml --> /etc/kubernetes/addons/gcp-auth-service.yaml (788 bytes)
	I0916 10:36:07.611011 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:36:07.612352 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:36:07.614169 1384589 addons.go:431] installing /etc/kubernetes/addons/gcp-auth-webhook.yaml
	I0916 10:36:07.614230 1384589 ssh_runner.go:362] scp memory --> /etc/kubernetes/addons/gcp-auth-webhook.yaml (5421 bytes)
	I0916 10:36:07.634944 1384589 ssh_runner.go:195] Run: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.31.1/kubectl apply -f /etc/kubernetes/addons/gcp-auth-ns.yaml -f /etc/kubernetes/addons/gcp-auth-service.yaml -f /etc/kubernetes/addons/gcp-auth-webhook.yaml
	I0916 10:36:07.973644 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:36:08.114938 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:36:08.115927 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:36:08.289637 1384589 addons.go:475] Verifying addon gcp-auth=true in "addons-936355"
	I0916 10:36:08.292442 1384589 out.go:177] * Verifying gcp-auth addon...
	I0916 10:36:08.297073 1384589 kapi.go:75] Waiting for pod with label "kubernetes.io/minikube-addons=gcp-auth" in ns "gcp-auth" ...
	I0916 10:36:08.311457 1384589 kapi.go:86] Found 1 Pods for label selector kubernetes.io/minikube-addons=gcp-auth
	I0916 10:36:08.311536 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:36:08.473794 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:36:08.610857 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:36:08.611876 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:36:08.801268 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:36:08.973643 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:36:09.105583 1384589 node_ready.go:53] node "addons-936355" has status "Ready":"False"
	I0916 10:36:09.110567 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:36:09.111022 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:36:09.300943 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:36:09.478291 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:36:09.611071 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:36:09.612876 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:36:09.801153 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:36:09.973766 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:36:10.118258 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:36:10.119777 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:36:10.307205 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:36:10.473996 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:36:10.611600 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:36:10.611698 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:36:10.801229 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:36:10.974340 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:36:11.112014 1384589 node_ready.go:53] node "addons-936355" has status "Ready":"False"
	I0916 10:36:11.116183 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:36:11.120476 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:36:11.301066 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:36:11.473420 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:36:11.610713 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:36:11.612423 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:36:11.800270 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:36:11.973407 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:36:12.115791 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:36:12.116920 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:36:12.301411 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:36:12.473867 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:36:12.609770 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:36:12.610662 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:36:12.801634 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:36:12.973046 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:36:13.110851 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:36:13.111134 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:36:13.300575 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:36:13.473835 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:36:13.603219 1384589 node_ready.go:53] node "addons-936355" has status "Ready":"False"
	I0916 10:36:13.610390 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:36:13.611574 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:36:13.801371 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:36:13.973479 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:36:14.112208 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:36:14.113533 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:36:14.300299 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:36:14.474139 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:36:14.610046 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:36:14.612561 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:36:14.800653 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:36:14.972848 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:36:15.110408 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:36:15.110932 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:36:15.300237 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:36:15.473707 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:36:15.603293 1384589 node_ready.go:53] node "addons-936355" has status "Ready":"False"
	I0916 10:36:15.610246 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:36:15.611371 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:36:15.800451 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:36:15.973710 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:36:16.110350 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:36:16.111259 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:36:16.300830 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:36:16.472823 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:36:16.609912 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:36:16.610711 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:36:16.801005 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:36:16.973568 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:36:17.110550 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:36:17.112172 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:36:17.301017 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:36:17.473847 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:36:17.603589 1384589 node_ready.go:53] node "addons-936355" has status "Ready":"False"
	I0916 10:36:17.610593 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:36:17.611441 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:36:17.800956 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:36:17.974143 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:36:18.110263 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:36:18.111182 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:36:18.301212 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:36:18.473331 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:36:18.610442 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:36:18.611436 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:36:18.800286 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:36:18.973687 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:36:19.110597 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:36:19.111342 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:36:19.301090 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:36:19.473269 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:36:19.609625 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:36:19.610850 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:36:19.800307 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:36:19.974046 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:36:20.103731 1384589 node_ready.go:53] node "addons-936355" has status "Ready":"False"
	I0916 10:36:20.112214 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:36:20.113558 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:36:20.301265 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:36:20.473689 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:36:20.610324 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:36:20.611114 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:36:20.800597 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:36:20.973842 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:36:21.109533 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:36:21.111696 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:36:21.302328 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:36:21.473189 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:36:21.610124 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:36:21.611262 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:36:21.801275 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:36:21.973296 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:36:22.111525 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:36:22.113002 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:36:22.300321 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:36:22.473211 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:36:22.602936 1384589 node_ready.go:53] node "addons-936355" has status "Ready":"False"
	I0916 10:36:22.610283 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:36:22.611107 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:36:22.800931 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:36:22.974004 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:36:23.109980 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:36:23.110973 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:36:23.301081 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:36:23.473035 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:36:23.610199 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:36:23.611296 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:36:23.800268 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:36:23.973666 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:36:24.109603 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:36:24.110778 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:36:24.301295 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:36:24.473680 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:36:24.609537 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:36:24.610685 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:36:24.800457 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:36:24.974147 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:36:25.103048 1384589 node_ready.go:53] node "addons-936355" has status "Ready":"False"
	I0916 10:36:25.111012 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:36:25.111240 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:36:25.300767 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:36:25.473813 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:36:25.610908 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:36:25.611483 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:36:25.801271 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:36:25.973399 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:36:26.109553 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:36:26.111922 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:36:26.300892 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:36:26.473331 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:36:26.609476 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:36:26.610465 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:36:26.800314 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:36:26.974947 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:36:27.104747 1384589 node_ready.go:53] node "addons-936355" has status "Ready":"False"
	I0916 10:36:27.110466 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:36:27.113262 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:36:27.302886 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:36:27.475127 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:36:27.610103 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:36:27.619742 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:36:27.801198 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:36:27.974956 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:36:28.115379 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:36:28.117659 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:36:28.300851 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:36:28.474546 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:36:28.610341 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:36:28.611106 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:36:28.800632 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:36:28.973876 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:36:29.109998 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:36:29.111054 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:36:29.300629 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:36:29.473403 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:36:29.603802 1384589 node_ready.go:53] node "addons-936355" has status "Ready":"False"
	I0916 10:36:29.610293 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:36:29.611053 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:36:29.800316 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:36:29.975589 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:36:30.112209 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:36:30.112442 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:36:30.300936 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:36:30.473757 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:36:30.610468 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:36:30.610927 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:36:30.801173 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:36:30.974752 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:36:31.111549 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:36:31.111768 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:36:31.300752 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:36:31.472954 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:36:31.610456 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:36:31.611765 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:36:31.801083 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:36:31.973842 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:36:32.103800 1384589 node_ready.go:53] node "addons-936355" has status "Ready":"False"
	I0916 10:36:32.109737 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:36:32.111636 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:36:32.301104 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:36:32.473774 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:36:32.610924 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:36:32.611190 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:36:32.801482 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:36:32.974672 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:36:33.110188 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:36:33.111271 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:36:33.301349 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:36:33.473433 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:36:33.610409 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:36:33.610888 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:36:33.801627 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:36:33.973881 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:36:34.110134 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:36:34.110497 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:36:34.301089 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:36:34.474295 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:36:34.603135 1384589 node_ready.go:53] node "addons-936355" has status "Ready":"False"
	I0916 10:36:34.610342 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:36:34.611690 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:36:34.801258 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:36:34.973555 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:36:35.110766 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:36:35.111394 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:36:35.300970 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:36:35.473087 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:36:35.610115 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:36:35.611008 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:36:35.800154 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:36:35.974082 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:36:36.109881 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:36:36.110992 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:36:36.300326 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:36:36.473408 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:36:36.604025 1384589 node_ready.go:53] node "addons-936355" has status "Ready":"False"
	I0916 10:36:36.610440 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:36:36.610869 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:36:36.801065 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:36:36.973323 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:36:37.109996 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:36:37.111285 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:36:37.300895 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:36:37.474211 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:36:37.610044 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:36:37.610356 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:36:37.800660 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:36:37.973698 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:36:38.110670 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:36:38.110901 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:36:38.301861 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:36:38.473168 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:36:38.610218 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:36:38.611834 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:36:38.800936 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:36:38.975190 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:36:39.103702 1384589 node_ready.go:53] node "addons-936355" has status "Ready":"False"
	I0916 10:36:39.110476 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:36:39.111170 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:36:39.301227 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:36:39.473926 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:36:39.609710 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:36:39.611195 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:36:39.800502 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:36:39.973582 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:36:40.111455 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:36:40.111653 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:36:40.300951 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:36:40.473797 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:36:40.610268 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:36:40.611132 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:36:40.800770 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:36:40.974250 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:36:41.110735 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:36:41.111970 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:36:41.300538 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:36:41.473964 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:36:41.603723 1384589 node_ready.go:53] node "addons-936355" has status "Ready":"False"
	I0916 10:36:41.610292 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:36:41.610627 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:36:41.801470 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:36:41.974052 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:36:42.110959 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:36:42.112236 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:36:42.300960 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:36:42.473748 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:36:42.612461 1384589 node_ready.go:49] node "addons-936355" has status "Ready":"True"
	I0916 10:36:42.612538 1384589 node_ready.go:38] duration metric: took 42.512890552s for node "addons-936355" to be "Ready" ...
	I0916 10:36:42.612563 1384589 pod_ready.go:36] extra waiting up to 6m0s for all system-critical pods including labels [k8s-app=kube-dns component=etcd component=kube-apiserver component=kube-controller-manager k8s-app=kube-proxy component=kube-scheduler] to be "Ready" ...
	I0916 10:36:42.623341 1384589 kapi.go:86] Found 2 Pods for label selector kubernetes.io/minikube-addons=registry
	I0916 10:36:42.623417 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:36:42.624231 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:36:42.627174 1384589 pod_ready.go:79] waiting up to 6m0s for pod "coredns-7c65d6cfc9-r6x6b" in "kube-system" namespace to be "Ready" ...
	I0916 10:36:42.859763 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:36:42.978703 1384589 kapi.go:86] Found 3 Pods for label selector kubernetes.io/minikube-addons=csi-hostpath-driver
	I0916 10:36:42.978731 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:36:43.131865 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:36:43.133687 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:36:43.349019 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:36:43.479093 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:36:43.612085 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:36:43.613250 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:36:43.838378 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:36:43.975549 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:36:44.112002 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:36:44.113078 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:36:44.303567 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:36:44.474708 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:36:44.612644 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:36:44.614103 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:36:44.633869 1384589 pod_ready.go:93] pod "coredns-7c65d6cfc9-r6x6b" in "kube-system" namespace has status "Ready":"True"
	I0916 10:36:44.633943 1384589 pod_ready.go:82] duration metric: took 2.006728044s for pod "coredns-7c65d6cfc9-r6x6b" in "kube-system" namespace to be "Ready" ...
	I0916 10:36:44.633994 1384589 pod_ready.go:79] waiting up to 6m0s for pod "etcd-addons-936355" in "kube-system" namespace to be "Ready" ...
	I0916 10:36:44.642581 1384589 pod_ready.go:93] pod "etcd-addons-936355" in "kube-system" namespace has status "Ready":"True"
	I0916 10:36:44.642653 1384589 pod_ready.go:82] duration metric: took 8.633064ms for pod "etcd-addons-936355" in "kube-system" namespace to be "Ready" ...
	I0916 10:36:44.642683 1384589 pod_ready.go:79] waiting up to 6m0s for pod "kube-apiserver-addons-936355" in "kube-system" namespace to be "Ready" ...
	I0916 10:36:44.650836 1384589 pod_ready.go:93] pod "kube-apiserver-addons-936355" in "kube-system" namespace has status "Ready":"True"
	I0916 10:36:44.650858 1384589 pod_ready.go:82] duration metric: took 8.155202ms for pod "kube-apiserver-addons-936355" in "kube-system" namespace to be "Ready" ...
	I0916 10:36:44.650871 1384589 pod_ready.go:79] waiting up to 6m0s for pod "kube-controller-manager-addons-936355" in "kube-system" namespace to be "Ready" ...
	I0916 10:36:44.656888 1384589 pod_ready.go:93] pod "kube-controller-manager-addons-936355" in "kube-system" namespace has status "Ready":"True"
	I0916 10:36:44.656911 1384589 pod_ready.go:82] duration metric: took 6.032453ms for pod "kube-controller-manager-addons-936355" in "kube-system" namespace to be "Ready" ...
	I0916 10:36:44.656925 1384589 pod_ready.go:79] waiting up to 6m0s for pod "kube-proxy-6zqlq" in "kube-system" namespace to be "Ready" ...
	I0916 10:36:44.663172 1384589 pod_ready.go:93] pod "kube-proxy-6zqlq" in "kube-system" namespace has status "Ready":"True"
	I0916 10:36:44.663198 1384589 pod_ready.go:82] duration metric: took 6.264685ms for pod "kube-proxy-6zqlq" in "kube-system" namespace to be "Ready" ...
	I0916 10:36:44.663210 1384589 pod_ready.go:79] waiting up to 6m0s for pod "kube-scheduler-addons-936355" in "kube-system" namespace to be "Ready" ...
	I0916 10:36:44.800889 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:36:44.975665 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:36:45.036535 1384589 pod_ready.go:93] pod "kube-scheduler-addons-936355" in "kube-system" namespace has status "Ready":"True"
	I0916 10:36:45.036565 1384589 pod_ready.go:82] duration metric: took 373.347727ms for pod "kube-scheduler-addons-936355" in "kube-system" namespace to be "Ready" ...
	I0916 10:36:45.036579 1384589 pod_ready.go:79] waiting up to 6m0s for pod "metrics-server-84c5f94fbc-hngcs" in "kube-system" namespace to be "Ready" ...
	I0916 10:36:45.111493 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:36:45.112631 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:36:45.308107 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:36:45.474657 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:36:45.611914 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:36:45.612461 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:36:45.801892 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:36:45.974950 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:36:46.111683 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:36:46.114082 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:36:46.301157 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:36:46.475128 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:36:46.611945 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:36:46.613048 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:36:46.801341 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:36:46.974921 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:36:47.044703 1384589 pod_ready.go:103] pod "metrics-server-84c5f94fbc-hngcs" in "kube-system" namespace has status "Ready":"False"
	I0916 10:36:47.112165 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:36:47.114489 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:36:47.301333 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:36:47.480727 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:36:47.612823 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:36:47.613992 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:36:47.802256 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:36:47.975336 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:36:48.114295 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:36:48.116308 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:36:48.301669 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:36:48.478171 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:36:48.613077 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:36:48.615032 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:36:48.802520 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:36:48.974753 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:36:49.045627 1384589 pod_ready.go:103] pod "metrics-server-84c5f94fbc-hngcs" in "kube-system" namespace has status "Ready":"False"
	I0916 10:36:49.112778 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:36:49.116258 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:36:49.301317 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:36:49.477632 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:36:49.617030 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:36:49.618841 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:36:49.801756 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:36:49.975098 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:36:50.112372 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:36:50.115428 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:36:50.303239 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:36:50.475866 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:36:50.610712 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:36:50.613666 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:36:50.800849 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:36:50.975104 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:36:51.113376 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:36:51.116309 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:36:51.305523 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:36:51.476644 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:36:51.547164 1384589 pod_ready.go:103] pod "metrics-server-84c5f94fbc-hngcs" in "kube-system" namespace has status "Ready":"False"
	I0916 10:36:51.619471 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:36:51.620588 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:36:51.803271 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:36:51.978508 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:36:52.112860 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:36:52.114242 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:36:52.300920 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:36:52.475635 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:36:52.610961 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:36:52.611563 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:36:52.802388 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:36:52.975192 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:36:53.112514 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:36:53.113242 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:36:53.301036 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:36:53.475517 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:36:53.613316 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:36:53.614402 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:36:53.801348 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:36:53.977291 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:36:54.050970 1384589 pod_ready.go:103] pod "metrics-server-84c5f94fbc-hngcs" in "kube-system" namespace has status "Ready":"False"
	I0916 10:36:54.110981 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:36:54.112076 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:36:54.300546 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:36:54.476454 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:36:54.610582 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:36:54.612518 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:36:54.803551 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:36:54.974994 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:36:55.111398 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:36:55.112761 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:36:55.301089 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:36:55.474274 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:36:55.609938 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:36:55.612002 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:36:55.800575 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:36:55.974519 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:36:56.112644 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:36:56.113614 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:36:56.301290 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:36:56.476637 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:36:56.543349 1384589 pod_ready.go:103] pod "metrics-server-84c5f94fbc-hngcs" in "kube-system" namespace has status "Ready":"False"
	I0916 10:36:56.613159 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:36:56.614779 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:36:56.801547 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:36:56.975878 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:36:57.111646 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:36:57.114449 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:36:57.301068 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:36:57.475345 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:36:57.612454 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:36:57.613637 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:36:57.802031 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:36:57.975475 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:36:58.112792 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:36:58.114331 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:36:58.301185 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:36:58.477806 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:36:58.543702 1384589 pod_ready.go:103] pod "metrics-server-84c5f94fbc-hngcs" in "kube-system" namespace has status "Ready":"False"
	I0916 10:36:58.611292 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:36:58.612924 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:36:58.801770 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:36:58.978258 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:36:59.111614 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:36:59.113277 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:36:59.300874 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:36:59.478857 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:36:59.612769 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:36:59.614234 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:36:59.801191 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:36:59.975770 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:37:00.124776 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:37:00.127598 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:37:00.312397 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:37:00.476593 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:37:00.612256 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:37:00.615086 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:37:00.801400 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:37:00.975455 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:37:01.045782 1384589 pod_ready.go:103] pod "metrics-server-84c5f94fbc-hngcs" in "kube-system" namespace has status "Ready":"False"
	I0916 10:37:01.116772 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:37:01.117862 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:37:01.300859 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:37:01.475607 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:37:01.614426 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:37:01.616901 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:37:01.806694 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:37:01.976923 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:37:02.111895 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:37:02.112248 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:37:02.301293 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:37:02.474913 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:37:02.610544 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:37:02.611469 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:37:02.801570 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:37:02.974546 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:37:03.110553 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:37:03.111258 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:37:03.302951 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:37:03.475760 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:37:03.542976 1384589 pod_ready.go:103] pod "metrics-server-84c5f94fbc-hngcs" in "kube-system" namespace has status "Ready":"False"
	I0916 10:37:03.612478 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:37:03.614314 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:37:03.802588 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:37:03.974619 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:37:04.116170 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:37:04.117565 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:37:04.301282 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:37:04.474423 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:37:04.609959 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:37:04.611546 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:37:04.802714 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:37:04.974564 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:37:05.111189 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:37:05.119380 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:37:05.301308 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:37:05.480667 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:37:05.545296 1384589 pod_ready.go:103] pod "metrics-server-84c5f94fbc-hngcs" in "kube-system" namespace has status "Ready":"False"
	I0916 10:37:05.613921 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:37:05.620210 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:37:05.801887 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:37:05.979380 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:37:06.117389 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:37:06.120937 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:37:06.301555 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:37:06.475271 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:37:06.612080 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:37:06.614588 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:37:06.801421 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:37:06.975493 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:37:07.111399 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:37:07.114107 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:37:07.300779 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:37:07.478877 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:37:07.558060 1384589 pod_ready.go:103] pod "metrics-server-84c5f94fbc-hngcs" in "kube-system" namespace has status "Ready":"False"
	I0916 10:37:07.615155 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:37:07.616925 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:37:07.801853 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:37:07.975171 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:37:08.110594 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:37:08.111215 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:37:08.300440 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:37:08.476290 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:37:08.611297 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:37:08.612374 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:37:08.801416 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:37:08.975287 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:37:09.110125 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:37:09.111958 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:37:09.304146 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:37:09.474050 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:37:09.610553 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:37:09.611805 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:37:09.801358 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:37:09.974606 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:37:10.045151 1384589 pod_ready.go:103] pod "metrics-server-84c5f94fbc-hngcs" in "kube-system" namespace has status "Ready":"False"
	I0916 10:37:10.115132 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:37:10.117029 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:37:10.300604 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:37:10.478567 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:37:10.612321 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:37:10.613469 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:37:10.801386 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:37:10.979174 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:37:11.112568 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:37:11.116046 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:37:11.301477 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:37:11.475805 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:37:11.613534 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:37:11.615206 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:37:11.802410 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:37:11.976748 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:37:12.047271 1384589 pod_ready.go:103] pod "metrics-server-84c5f94fbc-hngcs" in "kube-system" namespace has status "Ready":"False"
	I0916 10:37:12.112753 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:37:12.114779 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:37:12.300849 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:37:12.479609 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:37:12.633512 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:37:12.635102 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:37:12.801945 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:37:12.978658 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:37:13.111553 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:37:13.113586 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:37:13.303385 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:37:13.479039 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:37:13.614588 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:37:13.615554 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:37:13.806654 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:37:13.981409 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:37:14.060889 1384589 pod_ready.go:103] pod "metrics-server-84c5f94fbc-hngcs" in "kube-system" namespace has status "Ready":"False"
	I0916 10:37:14.112654 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:37:14.113844 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:37:14.301688 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:37:14.474872 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:37:14.610310 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:37:14.610746 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:37:14.800633 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:37:14.975036 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:37:15.112998 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:37:15.115460 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:37:15.300634 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:37:15.474102 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:37:15.613955 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:37:15.615489 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:37:15.801741 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:37:15.975686 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:37:16.113469 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:37:16.114978 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:37:16.301581 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:37:16.475151 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:37:16.550481 1384589 pod_ready.go:103] pod "metrics-server-84c5f94fbc-hngcs" in "kube-system" namespace has status "Ready":"False"
	I0916 10:37:16.614516 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:37:16.615278 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:37:16.802546 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:37:16.975189 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:37:17.110944 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:37:17.111649 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:37:17.302100 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:37:17.475101 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:37:17.611759 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:37:17.612357 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:37:17.800825 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:37:17.975226 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:37:18.110760 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:37:18.112805 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:37:18.300370 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:37:18.474527 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:37:18.610984 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:37:18.611944 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:37:18.801132 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:37:18.974591 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:37:19.046356 1384589 pod_ready.go:103] pod "metrics-server-84c5f94fbc-hngcs" in "kube-system" namespace has status "Ready":"False"
	I0916 10:37:19.112245 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:37:19.115197 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:37:19.301744 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:37:19.475515 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:37:19.610679 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:37:19.614216 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:37:19.801704 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:37:19.974949 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:37:20.111388 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:37:20.114141 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:37:20.301219 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:37:20.474669 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:37:20.611319 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:37:20.615110 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:37:20.801384 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:37:20.976136 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:37:21.113352 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:37:21.113988 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:37:21.300920 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:37:21.489778 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:37:21.545440 1384589 pod_ready.go:103] pod "metrics-server-84c5f94fbc-hngcs" in "kube-system" namespace has status "Ready":"False"
	I0916 10:37:21.613554 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:37:21.616634 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:37:21.801820 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:37:21.977146 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:37:22.111094 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:37:22.112217 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:37:22.301825 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:37:22.475834 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:37:22.611602 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:37:22.612556 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:37:22.805363 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:37:22.975337 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:37:23.112472 1384589 kapi.go:107] duration metric: took 1m20.005670496s to wait for kubernetes.io/minikube-addons=registry ...
	I0916 10:37:23.113515 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:37:23.300925 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:37:23.474515 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:37:23.610822 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:37:23.801408 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:37:23.977906 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:37:24.044059 1384589 pod_ready.go:103] pod "metrics-server-84c5f94fbc-hngcs" in "kube-system" namespace has status "Ready":"False"
	I0916 10:37:24.117487 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:37:24.301384 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:37:24.476565 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:37:24.611373 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:37:24.801872 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:37:24.984901 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:37:25.111954 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:37:25.300421 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:37:25.475126 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:37:25.611267 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:37:25.808830 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:37:25.975068 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:37:26.111025 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:37:26.310954 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:37:26.475111 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:37:26.543709 1384589 pod_ready.go:103] pod "metrics-server-84c5f94fbc-hngcs" in "kube-system" namespace has status "Ready":"False"
	I0916 10:37:26.609974 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:37:26.838995 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:37:26.975321 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:37:27.110779 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:37:27.301198 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:37:27.476321 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:37:27.610748 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:37:27.801486 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:37:27.975547 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:37:28.110763 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:37:28.301469 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:37:28.474991 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:37:28.610943 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:37:28.801350 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:37:28.975749 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:37:29.046127 1384589 pod_ready.go:103] pod "metrics-server-84c5f94fbc-hngcs" in "kube-system" namespace has status "Ready":"False"
	I0916 10:37:29.110966 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:37:29.305494 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:37:29.475929 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:37:29.609824 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:37:29.801492 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:37:29.977852 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:37:30.113447 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:37:30.301994 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:37:30.476258 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:37:30.610718 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:37:30.801712 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:37:30.975400 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:37:31.110916 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:37:31.300717 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:37:31.474547 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:37:31.542764 1384589 pod_ready.go:103] pod "metrics-server-84c5f94fbc-hngcs" in "kube-system" namespace has status "Ready":"False"
	I0916 10:37:31.612339 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:37:31.804045 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:37:31.975617 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:37:32.110961 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:37:32.300588 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:37:32.482569 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:37:32.611127 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:37:32.804201 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:37:32.975368 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:37:33.111355 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:37:33.301816 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:37:33.477518 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:37:33.551472 1384589 pod_ready.go:103] pod "metrics-server-84c5f94fbc-hngcs" in "kube-system" namespace has status "Ready":"False"
	I0916 10:37:33.611027 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:37:33.801158 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:37:34.013405 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:37:34.127200 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:37:34.310368 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:37:34.475923 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:37:34.611219 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:37:34.801913 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:37:34.978855 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:37:35.118452 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:37:35.300764 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:37:35.476873 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:37:35.611849 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:37:35.802246 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:37:35.975118 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:37:36.044866 1384589 pod_ready.go:103] pod "metrics-server-84c5f94fbc-hngcs" in "kube-system" namespace has status "Ready":"False"
	I0916 10:37:36.111125 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:37:36.301167 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:37:36.477188 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:37:36.617190 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:37:36.801375 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:37:36.974623 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:37:37.113798 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:37:37.301345 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:37:37.479115 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:37:37.611187 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:37:37.802141 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:37:37.976103 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:37:38.094708 1384589 pod_ready.go:103] pod "metrics-server-84c5f94fbc-hngcs" in "kube-system" namespace has status "Ready":"False"
	I0916 10:37:38.116394 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:37:38.300966 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:37:38.474752 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:37:38.610164 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:37:38.800561 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:37:38.975817 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:37:39.110879 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:37:39.301972 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:37:39.475982 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:37:39.614550 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:37:39.801870 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:37:39.975576 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:37:40.112781 1384589 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:37:40.301195 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:37:40.476921 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:37:40.543014 1384589 pod_ready.go:103] pod "metrics-server-84c5f94fbc-hngcs" in "kube-system" namespace has status "Ready":"False"
	I0916 10:37:40.612230 1384589 kapi.go:107] duration metric: took 1m37.506412903s to wait for app.kubernetes.io/name=ingress-nginx ...
	I0916 10:37:40.800501 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:37:40.980528 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:37:41.301899 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:37:41.478479 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:37:41.801278 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:37:41.975045 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:37:42.302225 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:37:42.487350 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:37:42.548067 1384589 pod_ready.go:103] pod "metrics-server-84c5f94fbc-hngcs" in "kube-system" namespace has status "Ready":"False"
	I0916 10:37:42.806839 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:37:42.976392 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:37:43.300621 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:37:43.475884 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:37:43.802919 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:37:43.975139 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:37:44.301415 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:37:44.475371 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:37:44.801688 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:37:44.975259 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:37:45.062166 1384589 pod_ready.go:103] pod "metrics-server-84c5f94fbc-hngcs" in "kube-system" namespace has status "Ready":"False"
	I0916 10:37:45.301957 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:37:45.477003 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:37:45.802892 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:37:45.974994 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:37:46.301112 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:37:46.475372 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:37:46.800784 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:37:46.974857 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:37:47.303524 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:37:47.475768 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:37:47.545443 1384589 pod_ready.go:103] pod "metrics-server-84c5f94fbc-hngcs" in "kube-system" namespace has status "Ready":"False"
	I0916 10:37:47.800473 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:37:47.974841 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:37:48.301353 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:37:48.474781 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:37:48.800728 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:37:48.975372 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:37:49.301044 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:37:49.475307 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:37:49.801251 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:37:49.976296 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:37:50.044755 1384589 pod_ready.go:103] pod "metrics-server-84c5f94fbc-hngcs" in "kube-system" namespace has status "Ready":"False"
	I0916 10:37:50.306461 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:37:50.478119 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:37:50.802508 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:37:50.975919 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:37:51.310303 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:37:51.475230 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:37:51.801606 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:37:51.975318 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:37:52.053332 1384589 pod_ready.go:103] pod "metrics-server-84c5f94fbc-hngcs" in "kube-system" namespace has status "Ready":"False"
	I0916 10:37:52.302006 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:37:52.476486 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:37:52.801441 1384589 kapi.go:107] duration metric: took 1m44.50438368s to wait for kubernetes.io/minikube-addons=gcp-auth ...
	I0916 10:37:52.803585 1384589 out.go:177] * Your GCP credentials will now be mounted into every pod created in the addons-936355 cluster.
	I0916 10:37:52.805126 1384589 out.go:177] * If you don't want your credentials mounted into a specific pod, add a label with the `gcp-auth-skip-secret` key to your pod configuration.
	I0916 10:37:52.807003 1384589 out.go:177] * If you want existing pods to be mounted with credentials, either recreate them or rerun addons enable with --refresh.
	I0916 10:37:52.974797 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:37:53.475581 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:37:53.975250 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:37:54.474446 1384589 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:37:54.542561 1384589 pod_ready.go:103] pod "metrics-server-84c5f94fbc-hngcs" in "kube-system" namespace has status "Ready":"False"
	I0916 10:37:54.975714 1384589 kapi.go:107] duration metric: took 1m51.5059929s to wait for kubernetes.io/minikube-addons=csi-hostpath-driver ...
	I0916 10:37:54.976913 1384589 out.go:177] * Enabled addons: nvidia-device-plugin, ingress-dns, cloud-spanner, storage-provisioner, metrics-server, yakd, default-storageclass, inspektor-gadget, volumesnapshots, registry, ingress, gcp-auth, csi-hostpath-driver
	I0916 10:37:54.977951 1384589 addons.go:510] duration metric: took 1m58.330681209s for enable addons: enabled=[nvidia-device-plugin ingress-dns cloud-spanner storage-provisioner metrics-server yakd default-storageclass inspektor-gadget volumesnapshots registry ingress gcp-auth csi-hostpath-driver]
	I0916 10:37:56.543286 1384589 pod_ready.go:103] pod "metrics-server-84c5f94fbc-hngcs" in "kube-system" namespace has status "Ready":"False"
	I0916 10:37:58.543538 1384589 pod_ready.go:103] pod "metrics-server-84c5f94fbc-hngcs" in "kube-system" namespace has status "Ready":"False"
	I0916 10:38:00.545466 1384589 pod_ready.go:103] pod "metrics-server-84c5f94fbc-hngcs" in "kube-system" namespace has status "Ready":"False"
	I0916 10:38:03.044859 1384589 pod_ready.go:103] pod "metrics-server-84c5f94fbc-hngcs" in "kube-system" namespace has status "Ready":"False"
	I0916 10:38:05.543384 1384589 pod_ready.go:103] pod "metrics-server-84c5f94fbc-hngcs" in "kube-system" namespace has status "Ready":"False"
	I0916 10:38:08.044081 1384589 pod_ready.go:103] pod "metrics-server-84c5f94fbc-hngcs" in "kube-system" namespace has status "Ready":"False"
	I0916 10:38:10.044862 1384589 pod_ready.go:103] pod "metrics-server-84c5f94fbc-hngcs" in "kube-system" namespace has status "Ready":"False"
	I0916 10:38:12.543815 1384589 pod_ready.go:103] pod "metrics-server-84c5f94fbc-hngcs" in "kube-system" namespace has status "Ready":"False"
	I0916 10:38:15.046388 1384589 pod_ready.go:103] pod "metrics-server-84c5f94fbc-hngcs" in "kube-system" namespace has status "Ready":"False"
	I0916 10:38:17.044536 1384589 pod_ready.go:93] pod "metrics-server-84c5f94fbc-hngcs" in "kube-system" namespace has status "Ready":"True"
	I0916 10:38:17.044563 1384589 pod_ready.go:82] duration metric: took 1m32.00797612s for pod "metrics-server-84c5f94fbc-hngcs" in "kube-system" namespace to be "Ready" ...
	I0916 10:38:17.044576 1384589 pod_ready.go:79] waiting up to 6m0s for pod "nvidia-device-plugin-daemonset-6j9gc" in "kube-system" namespace to be "Ready" ...
	I0916 10:38:17.054621 1384589 pod_ready.go:93] pod "nvidia-device-plugin-daemonset-6j9gc" in "kube-system" namespace has status "Ready":"True"
	I0916 10:38:17.054646 1384589 pod_ready.go:82] duration metric: took 10.061393ms for pod "nvidia-device-plugin-daemonset-6j9gc" in "kube-system" namespace to be "Ready" ...
	I0916 10:38:17.054673 1384589 pod_ready.go:39] duration metric: took 1m34.442085136s for extra waiting for all system-critical and pods with labels [k8s-app=kube-dns component=etcd component=kube-apiserver component=kube-controller-manager k8s-app=kube-proxy component=kube-scheduler] to be "Ready" ...
	I0916 10:38:17.054689 1384589 api_server.go:52] waiting for apiserver process to appear ...
	I0916 10:38:17.054724 1384589 cri.go:54] listing CRI containers in root : {State:all Name:kube-apiserver Namespaces:[]}
	I0916 10:38:17.054791 1384589 ssh_runner.go:195] Run: sudo crictl ps -a --quiet --name=kube-apiserver
	I0916 10:38:17.110909 1384589 cri.go:89] found id: "f911db1ed55bbf8b3dc28ca0fef7e51209be97baaa15d9194b879451dd6fd403"
	I0916 10:38:17.110942 1384589 cri.go:89] found id: ""
	I0916 10:38:17.110950 1384589 logs.go:276] 1 containers: [f911db1ed55bbf8b3dc28ca0fef7e51209be97baaa15d9194b879451dd6fd403]
	I0916 10:38:17.111018 1384589 ssh_runner.go:195] Run: which crictl
	I0916 10:38:17.114542 1384589 cri.go:54] listing CRI containers in root : {State:all Name:etcd Namespaces:[]}
	I0916 10:38:17.114619 1384589 ssh_runner.go:195] Run: sudo crictl ps -a --quiet --name=etcd
	I0916 10:38:17.153834 1384589 cri.go:89] found id: "3b247261f15f4cdd596d5e7ee3354c24cb995a27a5e0581e877596df04b900d5"
	I0916 10:38:17.153856 1384589 cri.go:89] found id: ""
	I0916 10:38:17.153864 1384589 logs.go:276] 1 containers: [3b247261f15f4cdd596d5e7ee3354c24cb995a27a5e0581e877596df04b900d5]
	I0916 10:38:17.153923 1384589 ssh_runner.go:195] Run: which crictl
	I0916 10:38:17.157470 1384589 cri.go:54] listing CRI containers in root : {State:all Name:coredns Namespaces:[]}
	I0916 10:38:17.157579 1384589 ssh_runner.go:195] Run: sudo crictl ps -a --quiet --name=coredns
	I0916 10:38:17.198133 1384589 cri.go:89] found id: "ee934dc9f4f92e52b49ad02508bb42771f460a2494fa8b1a65d888191266a4ad"
	I0916 10:38:17.198155 1384589 cri.go:89] found id: ""
	I0916 10:38:17.198163 1384589 logs.go:276] 1 containers: [ee934dc9f4f92e52b49ad02508bb42771f460a2494fa8b1a65d888191266a4ad]
	I0916 10:38:17.198222 1384589 ssh_runner.go:195] Run: which crictl
	I0916 10:38:17.201699 1384589 cri.go:54] listing CRI containers in root : {State:all Name:kube-scheduler Namespaces:[]}
	I0916 10:38:17.201773 1384589 ssh_runner.go:195] Run: sudo crictl ps -a --quiet --name=kube-scheduler
	I0916 10:38:17.244177 1384589 cri.go:89] found id: "2b161087caf5a6ab9dedbb699f7c69ddf6c2c5cdb19026d46daf824d90966d25"
	I0916 10:38:17.244206 1384589 cri.go:89] found id: ""
	I0916 10:38:17.244215 1384589 logs.go:276] 1 containers: [2b161087caf5a6ab9dedbb699f7c69ddf6c2c5cdb19026d46daf824d90966d25]
	I0916 10:38:17.244287 1384589 ssh_runner.go:195] Run: which crictl
	I0916 10:38:17.248238 1384589 cri.go:54] listing CRI containers in root : {State:all Name:kube-proxy Namespaces:[]}
	I0916 10:38:17.248346 1384589 ssh_runner.go:195] Run: sudo crictl ps -a --quiet --name=kube-proxy
	I0916 10:38:17.286359 1384589 cri.go:89] found id: "6200eb5cfcd24bb0f0253359201c6d75c0624dcb7a313b0bc95b7370a13539a0"
	I0916 10:38:17.286380 1384589 cri.go:89] found id: ""
	I0916 10:38:17.286388 1384589 logs.go:276] 1 containers: [6200eb5cfcd24bb0f0253359201c6d75c0624dcb7a313b0bc95b7370a13539a0]
	I0916 10:38:17.286476 1384589 ssh_runner.go:195] Run: which crictl
	I0916 10:38:17.290475 1384589 cri.go:54] listing CRI containers in root : {State:all Name:kube-controller-manager Namespaces:[]}
	I0916 10:38:17.290598 1384589 ssh_runner.go:195] Run: sudo crictl ps -a --quiet --name=kube-controller-manager
	I0916 10:38:17.332786 1384589 cri.go:89] found id: "4ee66eef50ab615bdd0d94fe194567492cafe76910819703a964b78b45f55436"
	I0916 10:38:17.332808 1384589 cri.go:89] found id: ""
	I0916 10:38:17.332817 1384589 logs.go:276] 1 containers: [4ee66eef50ab615bdd0d94fe194567492cafe76910819703a964b78b45f55436]
	I0916 10:38:17.332887 1384589 ssh_runner.go:195] Run: which crictl
	I0916 10:38:17.336545 1384589 cri.go:54] listing CRI containers in root : {State:all Name:kindnet Namespaces:[]}
	I0916 10:38:17.336625 1384589 ssh_runner.go:195] Run: sudo crictl ps -a --quiet --name=kindnet
	I0916 10:38:17.376900 1384589 cri.go:89] found id: "8d59e894feca0e01e03cc7257c67ed10cf0f9db194b88b314e4961bc62d9e7f1"
	I0916 10:38:17.376922 1384589 cri.go:89] found id: ""
	I0916 10:38:17.376930 1384589 logs.go:276] 1 containers: [8d59e894feca0e01e03cc7257c67ed10cf0f9db194b88b314e4961bc62d9e7f1]
	I0916 10:38:17.376991 1384589 ssh_runner.go:195] Run: which crictl
	I0916 10:38:17.380608 1384589 logs.go:123] Gathering logs for kube-scheduler [2b161087caf5a6ab9dedbb699f7c69ddf6c2c5cdb19026d46daf824d90966d25] ...
	I0916 10:38:17.380639 1384589 ssh_runner.go:195] Run: /bin/bash -c "sudo /usr/bin/crictl logs --tail 400 2b161087caf5a6ab9dedbb699f7c69ddf6c2c5cdb19026d46daf824d90966d25"
	I0916 10:38:17.430005 1384589 logs.go:123] Gathering logs for kindnet [8d59e894feca0e01e03cc7257c67ed10cf0f9db194b88b314e4961bc62d9e7f1] ...
	I0916 10:38:17.430059 1384589 ssh_runner.go:195] Run: /bin/bash -c "sudo /usr/bin/crictl logs --tail 400 8d59e894feca0e01e03cc7257c67ed10cf0f9db194b88b314e4961bc62d9e7f1"
	I0916 10:38:17.478918 1384589 logs.go:123] Gathering logs for CRI-O ...
	I0916 10:38:17.478953 1384589 ssh_runner.go:195] Run: /bin/bash -c "sudo journalctl -u crio -n 400"
	I0916 10:38:17.578588 1384589 logs.go:123] Gathering logs for dmesg ...
	I0916 10:38:17.578626 1384589 ssh_runner.go:195] Run: /bin/bash -c "sudo dmesg -PH -L=never --level warn,err,crit,alert,emerg | tail -n 400"
	I0916 10:38:17.596725 1384589 logs.go:123] Gathering logs for describe nodes ...
	I0916 10:38:17.596755 1384589 ssh_runner.go:195] Run: /bin/bash -c "sudo /var/lib/minikube/binaries/v1.31.1/kubectl describe nodes --kubeconfig=/var/lib/minikube/kubeconfig"
	I0916 10:38:17.780455 1384589 logs.go:123] Gathering logs for etcd [3b247261f15f4cdd596d5e7ee3354c24cb995a27a5e0581e877596df04b900d5] ...
	I0916 10:38:17.780482 1384589 ssh_runner.go:195] Run: /bin/bash -c "sudo /usr/bin/crictl logs --tail 400 3b247261f15f4cdd596d5e7ee3354c24cb995a27a5e0581e877596df04b900d5"
	I0916 10:38:17.832701 1384589 logs.go:123] Gathering logs for kube-proxy [6200eb5cfcd24bb0f0253359201c6d75c0624dcb7a313b0bc95b7370a13539a0] ...
	I0916 10:38:17.832737 1384589 ssh_runner.go:195] Run: /bin/bash -c "sudo /usr/bin/crictl logs --tail 400 6200eb5cfcd24bb0f0253359201c6d75c0624dcb7a313b0bc95b7370a13539a0"
	I0916 10:38:17.873549 1384589 logs.go:123] Gathering logs for kube-controller-manager [4ee66eef50ab615bdd0d94fe194567492cafe76910819703a964b78b45f55436] ...
	I0916 10:38:17.873579 1384589 ssh_runner.go:195] Run: /bin/bash -c "sudo /usr/bin/crictl logs --tail 400 4ee66eef50ab615bdd0d94fe194567492cafe76910819703a964b78b45f55436"
	I0916 10:38:17.944894 1384589 logs.go:123] Gathering logs for container status ...
	I0916 10:38:17.944933 1384589 ssh_runner.go:195] Run: /bin/bash -c "sudo `which crictl || echo crictl` ps -a || sudo docker ps -a"
	I0916 10:38:18.006230 1384589 logs.go:123] Gathering logs for kubelet ...
	I0916 10:38:18.006286 1384589 ssh_runner.go:195] Run: /bin/bash -c "sudo journalctl -u kubelet -n 400"
	W0916 10:38:18.071787 1384589 logs.go:138] Found kubelet problem: Sep 16 10:35:59 addons-936355 kubelet[1507]: W0916 10:35:59.171269    1507 reflector.go:561] object-"kube-system"/"kube-root-ca.crt": failed to list *v1.ConfigMap: configmaps "kube-root-ca.crt" is forbidden: User "system:node:addons-936355" cannot list resource "configmaps" in API group "" in the namespace "kube-system": no relationship found between node 'addons-936355' and this object
	W0916 10:38:18.072057 1384589 logs.go:138] Found kubelet problem: Sep 16 10:35:59 addons-936355 kubelet[1507]: E0916 10:35:59.171326    1507 reflector.go:158] "Unhandled Error" err="object-\"kube-system\"/\"kube-root-ca.crt\": Failed to watch *v1.ConfigMap: failed to list *v1.ConfigMap: configmaps \"kube-root-ca.crt\" is forbidden: User \"system:node:addons-936355\" cannot list resource \"configmaps\" in API group \"\" in the namespace \"kube-system\": no relationship found between node 'addons-936355' and this object" logger="UnhandledError"
	W0916 10:38:18.072239 1384589 logs.go:138] Found kubelet problem: Sep 16 10:35:59 addons-936355 kubelet[1507]: W0916 10:35:59.171509    1507 reflector.go:561] object-"kube-system"/"kube-proxy": failed to list *v1.ConfigMap: configmaps "kube-proxy" is forbidden: User "system:node:addons-936355" cannot list resource "configmaps" in API group "" in the namespace "kube-system": no relationship found between node 'addons-936355' and this object
	W0916 10:38:18.072456 1384589 logs.go:138] Found kubelet problem: Sep 16 10:35:59 addons-936355 kubelet[1507]: E0916 10:35:59.171550    1507 reflector.go:158] "Unhandled Error" err="object-\"kube-system\"/\"kube-proxy\": Failed to watch *v1.ConfigMap: failed to list *v1.ConfigMap: configmaps \"kube-proxy\" is forbidden: User \"system:node:addons-936355\" cannot list resource \"configmaps\" in API group \"\" in the namespace \"kube-system\": no relationship found between node 'addons-936355' and this object" logger="UnhandledError"
	W0916 10:38:18.075800 1384589 logs.go:138] Found kubelet problem: Sep 16 10:36:02 addons-936355 kubelet[1507]: W0916 10:36:02.454965    1507 reflector.go:561] object-"gadget"/"kube-root-ca.crt": failed to list *v1.ConfigMap: configmaps "kube-root-ca.crt" is forbidden: User "system:node:addons-936355" cannot list resource "configmaps" in API group "" in the namespace "gadget": no relationship found between node 'addons-936355' and this object
	W0916 10:38:18.076027 1384589 logs.go:138] Found kubelet problem: Sep 16 10:36:02 addons-936355 kubelet[1507]: E0916 10:36:02.455028    1507 reflector.go:158] "Unhandled Error" err="object-\"gadget\"/\"kube-root-ca.crt\": Failed to watch *v1.ConfigMap: failed to list *v1.ConfigMap: configmaps \"kube-root-ca.crt\" is forbidden: User \"system:node:addons-936355\" cannot list resource \"configmaps\" in API group \"\" in the namespace \"gadget\": no relationship found between node 'addons-936355' and this object" logger="UnhandledError"
	W0916 10:38:18.087591 1384589 logs.go:138] Found kubelet problem: Sep 16 10:36:42 addons-936355 kubelet[1507]: W0916 10:36:42.520421    1507 reflector.go:561] object-"default"/"kube-root-ca.crt": failed to list *v1.ConfigMap: configmaps "kube-root-ca.crt" is forbidden: User "system:node:addons-936355" cannot list resource "configmaps" in API group "" in the namespace "default": no relationship found between node 'addons-936355' and this object
	W0916 10:38:18.087896 1384589 logs.go:138] Found kubelet problem: Sep 16 10:36:42 addons-936355 kubelet[1507]: E0916 10:36:42.520489    1507 reflector.go:158] "Unhandled Error" err="object-\"default\"/\"kube-root-ca.crt\": Failed to watch *v1.ConfigMap: failed to list *v1.ConfigMap: configmaps \"kube-root-ca.crt\" is forbidden: User \"system:node:addons-936355\" cannot list resource \"configmaps\" in API group \"\" in the namespace \"default\": no relationship found between node 'addons-936355' and this object" logger="UnhandledError"
	W0916 10:38:18.088088 1384589 logs.go:138] Found kubelet problem: Sep 16 10:36:42 addons-936355 kubelet[1507]: W0916 10:36:42.520540    1507 reflector.go:561] object-"local-path-storage"/"kube-root-ca.crt": failed to list *v1.ConfigMap: configmaps "kube-root-ca.crt" is forbidden: User "system:node:addons-936355" cannot list resource "configmaps" in API group "" in the namespace "local-path-storage": no relationship found between node 'addons-936355' and this object
	W0916 10:38:18.088320 1384589 logs.go:138] Found kubelet problem: Sep 16 10:36:42 addons-936355 kubelet[1507]: E0916 10:36:42.520560    1507 reflector.go:158] "Unhandled Error" err="object-\"local-path-storage\"/\"kube-root-ca.crt\": Failed to watch *v1.ConfigMap: failed to list *v1.ConfigMap: configmaps \"kube-root-ca.crt\" is forbidden: User \"system:node:addons-936355\" cannot list resource \"configmaps\" in API group \"\" in the namespace \"local-path-storage\": no relationship found between node 'addons-936355' and this object" logger="UnhandledError"
	I0916 10:38:18.128812 1384589 logs.go:123] Gathering logs for kube-apiserver [f911db1ed55bbf8b3dc28ca0fef7e51209be97baaa15d9194b879451dd6fd403] ...
	I0916 10:38:18.128841 1384589 ssh_runner.go:195] Run: /bin/bash -c "sudo /usr/bin/crictl logs --tail 400 f911db1ed55bbf8b3dc28ca0fef7e51209be97baaa15d9194b879451dd6fd403"
	I0916 10:38:18.186612 1384589 logs.go:123] Gathering logs for coredns [ee934dc9f4f92e52b49ad02508bb42771f460a2494fa8b1a65d888191266a4ad] ...
	I0916 10:38:18.186644 1384589 ssh_runner.go:195] Run: /bin/bash -c "sudo /usr/bin/crictl logs --tail 400 ee934dc9f4f92e52b49ad02508bb42771f460a2494fa8b1a65d888191266a4ad"
	I0916 10:38:18.233148 1384589 out.go:358] Setting ErrFile to fd 2...
	I0916 10:38:18.233182 1384589 out.go:392] TERM=,COLORTERM=, which probably does not support color
	W0916 10:38:18.233388 1384589 out.go:270] X Problems detected in kubelet:
	W0916 10:38:18.233404 1384589 out.go:270]   Sep 16 10:36:02 addons-936355 kubelet[1507]: E0916 10:36:02.455028    1507 reflector.go:158] "Unhandled Error" err="object-\"gadget\"/\"kube-root-ca.crt\": Failed to watch *v1.ConfigMap: failed to list *v1.ConfigMap: configmaps \"kube-root-ca.crt\" is forbidden: User \"system:node:addons-936355\" cannot list resource \"configmaps\" in API group \"\" in the namespace \"gadget\": no relationship found between node 'addons-936355' and this object" logger="UnhandledError"
	W0916 10:38:18.233412 1384589 out.go:270]   Sep 16 10:36:42 addons-936355 kubelet[1507]: W0916 10:36:42.520421    1507 reflector.go:561] object-"default"/"kube-root-ca.crt": failed to list *v1.ConfigMap: configmaps "kube-root-ca.crt" is forbidden: User "system:node:addons-936355" cannot list resource "configmaps" in API group "" in the namespace "default": no relationship found between node 'addons-936355' and this object
	W0916 10:38:18.233423 1384589 out.go:270]   Sep 16 10:36:42 addons-936355 kubelet[1507]: E0916 10:36:42.520489    1507 reflector.go:158] "Unhandled Error" err="object-\"default\"/\"kube-root-ca.crt\": Failed to watch *v1.ConfigMap: failed to list *v1.ConfigMap: configmaps \"kube-root-ca.crt\" is forbidden: User \"system:node:addons-936355\" cannot list resource \"configmaps\" in API group \"\" in the namespace \"default\": no relationship found between node 'addons-936355' and this object" logger="UnhandledError"
	W0916 10:38:18.233429 1384589 out.go:270]   Sep 16 10:36:42 addons-936355 kubelet[1507]: W0916 10:36:42.520540    1507 reflector.go:561] object-"local-path-storage"/"kube-root-ca.crt": failed to list *v1.ConfigMap: configmaps "kube-root-ca.crt" is forbidden: User "system:node:addons-936355" cannot list resource "configmaps" in API group "" in the namespace "local-path-storage": no relationship found between node 'addons-936355' and this object
	W0916 10:38:18.233449 1384589 out.go:270]   Sep 16 10:36:42 addons-936355 kubelet[1507]: E0916 10:36:42.520560    1507 reflector.go:158] "Unhandled Error" err="object-\"local-path-storage\"/\"kube-root-ca.crt\": Failed to watch *v1.ConfigMap: failed to list *v1.ConfigMap: configmaps \"kube-root-ca.crt\" is forbidden: User \"system:node:addons-936355\" cannot list resource \"configmaps\" in API group \"\" in the namespace \"local-path-storage\": no relationship found between node 'addons-936355' and this object" logger="UnhandledError"
	I0916 10:38:18.233461 1384589 out.go:358] Setting ErrFile to fd 2...
	I0916 10:38:18.233470 1384589 out.go:392] TERM=,COLORTERM=, which probably does not support color
	I0916 10:38:28.234697 1384589 ssh_runner.go:195] Run: sudo pgrep -xnf kube-apiserver.*minikube.*
	I0916 10:38:28.249266 1384589 api_server.go:72] duration metric: took 2m31.602198408s to wait for apiserver process to appear ...
	I0916 10:38:28.249292 1384589 api_server.go:88] waiting for apiserver healthz status ...
	I0916 10:38:28.249329 1384589 cri.go:54] listing CRI containers in root : {State:all Name:kube-apiserver Namespaces:[]}
	I0916 10:38:28.249401 1384589 ssh_runner.go:195] Run: sudo crictl ps -a --quiet --name=kube-apiserver
	I0916 10:38:28.291513 1384589 cri.go:89] found id: "f911db1ed55bbf8b3dc28ca0fef7e51209be97baaa15d9194b879451dd6fd403"
	I0916 10:38:28.291538 1384589 cri.go:89] found id: ""
	I0916 10:38:28.291546 1384589 logs.go:276] 1 containers: [f911db1ed55bbf8b3dc28ca0fef7e51209be97baaa15d9194b879451dd6fd403]
	I0916 10:38:28.291605 1384589 ssh_runner.go:195] Run: which crictl
	I0916 10:38:28.295282 1384589 cri.go:54] listing CRI containers in root : {State:all Name:etcd Namespaces:[]}
	I0916 10:38:28.295362 1384589 ssh_runner.go:195] Run: sudo crictl ps -a --quiet --name=etcd
	I0916 10:38:28.334381 1384589 cri.go:89] found id: "3b247261f15f4cdd596d5e7ee3354c24cb995a27a5e0581e877596df04b900d5"
	I0916 10:38:28.334460 1384589 cri.go:89] found id: ""
	I0916 10:38:28.334479 1384589 logs.go:276] 1 containers: [3b247261f15f4cdd596d5e7ee3354c24cb995a27a5e0581e877596df04b900d5]
	I0916 10:38:28.334596 1384589 ssh_runner.go:195] Run: which crictl
	I0916 10:38:28.338232 1384589 cri.go:54] listing CRI containers in root : {State:all Name:coredns Namespaces:[]}
	I0916 10:38:28.338315 1384589 ssh_runner.go:195] Run: sudo crictl ps -a --quiet --name=coredns
	I0916 10:38:28.386465 1384589 cri.go:89] found id: "ee934dc9f4f92e52b49ad02508bb42771f460a2494fa8b1a65d888191266a4ad"
	I0916 10:38:28.386495 1384589 cri.go:89] found id: ""
	I0916 10:38:28.386503 1384589 logs.go:276] 1 containers: [ee934dc9f4f92e52b49ad02508bb42771f460a2494fa8b1a65d888191266a4ad]
	I0916 10:38:28.386564 1384589 ssh_runner.go:195] Run: which crictl
	I0916 10:38:28.390431 1384589 cri.go:54] listing CRI containers in root : {State:all Name:kube-scheduler Namespaces:[]}
	I0916 10:38:28.390508 1384589 ssh_runner.go:195] Run: sudo crictl ps -a --quiet --name=kube-scheduler
	I0916 10:38:28.428479 1384589 cri.go:89] found id: "2b161087caf5a6ab9dedbb699f7c69ddf6c2c5cdb19026d46daf824d90966d25"
	I0916 10:38:28.428500 1384589 cri.go:89] found id: ""
	I0916 10:38:28.428508 1384589 logs.go:276] 1 containers: [2b161087caf5a6ab9dedbb699f7c69ddf6c2c5cdb19026d46daf824d90966d25]
	I0916 10:38:28.428568 1384589 ssh_runner.go:195] Run: which crictl
	I0916 10:38:28.431936 1384589 cri.go:54] listing CRI containers in root : {State:all Name:kube-proxy Namespaces:[]}
	I0916 10:38:28.432009 1384589 ssh_runner.go:195] Run: sudo crictl ps -a --quiet --name=kube-proxy
	I0916 10:38:28.480074 1384589 cri.go:89] found id: "6200eb5cfcd24bb0f0253359201c6d75c0624dcb7a313b0bc95b7370a13539a0"
	I0916 10:38:28.480148 1384589 cri.go:89] found id: ""
	I0916 10:38:28.480171 1384589 logs.go:276] 1 containers: [6200eb5cfcd24bb0f0253359201c6d75c0624dcb7a313b0bc95b7370a13539a0]
	I0916 10:38:28.480257 1384589 ssh_runner.go:195] Run: which crictl
	I0916 10:38:28.484845 1384589 cri.go:54] listing CRI containers in root : {State:all Name:kube-controller-manager Namespaces:[]}
	I0916 10:38:28.484948 1384589 ssh_runner.go:195] Run: sudo crictl ps -a --quiet --name=kube-controller-manager
	I0916 10:38:28.526872 1384589 cri.go:89] found id: "4ee66eef50ab615bdd0d94fe194567492cafe76910819703a964b78b45f55436"
	I0916 10:38:28.526896 1384589 cri.go:89] found id: ""
	I0916 10:38:28.526905 1384589 logs.go:276] 1 containers: [4ee66eef50ab615bdd0d94fe194567492cafe76910819703a964b78b45f55436]
	I0916 10:38:28.526965 1384589 ssh_runner.go:195] Run: which crictl
	I0916 10:38:28.530520 1384589 cri.go:54] listing CRI containers in root : {State:all Name:kindnet Namespaces:[]}
	I0916 10:38:28.530607 1384589 ssh_runner.go:195] Run: sudo crictl ps -a --quiet --name=kindnet
	I0916 10:38:28.569037 1384589 cri.go:89] found id: "8d59e894feca0e01e03cc7257c67ed10cf0f9db194b88b314e4961bc62d9e7f1"
	I0916 10:38:28.569065 1384589 cri.go:89] found id: ""
	I0916 10:38:28.569074 1384589 logs.go:276] 1 containers: [8d59e894feca0e01e03cc7257c67ed10cf0f9db194b88b314e4961bc62d9e7f1]
	I0916 10:38:28.569150 1384589 ssh_runner.go:195] Run: which crictl
	I0916 10:38:28.572604 1384589 logs.go:123] Gathering logs for dmesg ...
	I0916 10:38:28.572634 1384589 ssh_runner.go:195] Run: /bin/bash -c "sudo dmesg -PH -L=never --level warn,err,crit,alert,emerg | tail -n 400"
	I0916 10:38:28.589298 1384589 logs.go:123] Gathering logs for describe nodes ...
	I0916 10:38:28.589323 1384589 ssh_runner.go:195] Run: /bin/bash -c "sudo /var/lib/minikube/binaries/v1.31.1/kubectl describe nodes --kubeconfig=/var/lib/minikube/kubeconfig"
	I0916 10:38:28.729585 1384589 logs.go:123] Gathering logs for etcd [3b247261f15f4cdd596d5e7ee3354c24cb995a27a5e0581e877596df04b900d5] ...
	I0916 10:38:28.729703 1384589 ssh_runner.go:195] Run: /bin/bash -c "sudo /usr/bin/crictl logs --tail 400 3b247261f15f4cdd596d5e7ee3354c24cb995a27a5e0581e877596df04b900d5"
	I0916 10:38:28.802248 1384589 logs.go:123] Gathering logs for coredns [ee934dc9f4f92e52b49ad02508bb42771f460a2494fa8b1a65d888191266a4ad] ...
	I0916 10:38:28.802300 1384589 ssh_runner.go:195] Run: /bin/bash -c "sudo /usr/bin/crictl logs --tail 400 ee934dc9f4f92e52b49ad02508bb42771f460a2494fa8b1a65d888191266a4ad"
	I0916 10:38:28.843099 1384589 logs.go:123] Gathering logs for kube-proxy [6200eb5cfcd24bb0f0253359201c6d75c0624dcb7a313b0bc95b7370a13539a0] ...
	I0916 10:38:28.843130 1384589 ssh_runner.go:195] Run: /bin/bash -c "sudo /usr/bin/crictl logs --tail 400 6200eb5cfcd24bb0f0253359201c6d75c0624dcb7a313b0bc95b7370a13539a0"
	I0916 10:38:28.886320 1384589 logs.go:123] Gathering logs for kindnet [8d59e894feca0e01e03cc7257c67ed10cf0f9db194b88b314e4961bc62d9e7f1] ...
	I0916 10:38:28.886350 1384589 ssh_runner.go:195] Run: /bin/bash -c "sudo /usr/bin/crictl logs --tail 400 8d59e894feca0e01e03cc7257c67ed10cf0f9db194b88b314e4961bc62d9e7f1"
	I0916 10:38:28.930299 1384589 logs.go:123] Gathering logs for CRI-O ...
	I0916 10:38:28.930374 1384589 ssh_runner.go:195] Run: /bin/bash -c "sudo journalctl -u crio -n 400"
	I0916 10:38:29.041608 1384589 logs.go:123] Gathering logs for kubelet ...
	I0916 10:38:29.041656 1384589 ssh_runner.go:195] Run: /bin/bash -c "sudo journalctl -u kubelet -n 400"
	W0916 10:38:29.079590 1384589 logs.go:138] Found kubelet problem: Sep 16 10:35:59 addons-936355 kubelet[1507]: W0916 10:35:59.171269    1507 reflector.go:561] object-"kube-system"/"kube-root-ca.crt": failed to list *v1.ConfigMap: configmaps "kube-root-ca.crt" is forbidden: User "system:node:addons-936355" cannot list resource "configmaps" in API group "" in the namespace "kube-system": no relationship found between node 'addons-936355' and this object
	W0916 10:38:29.079841 1384589 logs.go:138] Found kubelet problem: Sep 16 10:35:59 addons-936355 kubelet[1507]: E0916 10:35:59.171326    1507 reflector.go:158] "Unhandled Error" err="object-\"kube-system\"/\"kube-root-ca.crt\": Failed to watch *v1.ConfigMap: failed to list *v1.ConfigMap: configmaps \"kube-root-ca.crt\" is forbidden: User \"system:node:addons-936355\" cannot list resource \"configmaps\" in API group \"\" in the namespace \"kube-system\": no relationship found between node 'addons-936355' and this object" logger="UnhandledError"
	W0916 10:38:29.080020 1384589 logs.go:138] Found kubelet problem: Sep 16 10:35:59 addons-936355 kubelet[1507]: W0916 10:35:59.171509    1507 reflector.go:561] object-"kube-system"/"kube-proxy": failed to list *v1.ConfigMap: configmaps "kube-proxy" is forbidden: User "system:node:addons-936355" cannot list resource "configmaps" in API group "" in the namespace "kube-system": no relationship found between node 'addons-936355' and this object
	W0916 10:38:29.080236 1384589 logs.go:138] Found kubelet problem: Sep 16 10:35:59 addons-936355 kubelet[1507]: E0916 10:35:59.171550    1507 reflector.go:158] "Unhandled Error" err="object-\"kube-system\"/\"kube-proxy\": Failed to watch *v1.ConfigMap: failed to list *v1.ConfigMap: configmaps \"kube-proxy\" is forbidden: User \"system:node:addons-936355\" cannot list resource \"configmaps\" in API group \"\" in the namespace \"kube-system\": no relationship found between node 'addons-936355' and this object" logger="UnhandledError"
	W0916 10:38:29.083646 1384589 logs.go:138] Found kubelet problem: Sep 16 10:36:02 addons-936355 kubelet[1507]: W0916 10:36:02.454965    1507 reflector.go:561] object-"gadget"/"kube-root-ca.crt": failed to list *v1.ConfigMap: configmaps "kube-root-ca.crt" is forbidden: User "system:node:addons-936355" cannot list resource "configmaps" in API group "" in the namespace "gadget": no relationship found between node 'addons-936355' and this object
	W0916 10:38:29.083870 1384589 logs.go:138] Found kubelet problem: Sep 16 10:36:02 addons-936355 kubelet[1507]: E0916 10:36:02.455028    1507 reflector.go:158] "Unhandled Error" err="object-\"gadget\"/\"kube-root-ca.crt\": Failed to watch *v1.ConfigMap: failed to list *v1.ConfigMap: configmaps \"kube-root-ca.crt\" is forbidden: User \"system:node:addons-936355\" cannot list resource \"configmaps\" in API group \"\" in the namespace \"gadget\": no relationship found between node 'addons-936355' and this object" logger="UnhandledError"
	W0916 10:38:29.095503 1384589 logs.go:138] Found kubelet problem: Sep 16 10:36:42 addons-936355 kubelet[1507]: W0916 10:36:42.520421    1507 reflector.go:561] object-"default"/"kube-root-ca.crt": failed to list *v1.ConfigMap: configmaps "kube-root-ca.crt" is forbidden: User "system:node:addons-936355" cannot list resource "configmaps" in API group "" in the namespace "default": no relationship found between node 'addons-936355' and this object
	W0916 10:38:29.095743 1384589 logs.go:138] Found kubelet problem: Sep 16 10:36:42 addons-936355 kubelet[1507]: E0916 10:36:42.520489    1507 reflector.go:158] "Unhandled Error" err="object-\"default\"/\"kube-root-ca.crt\": Failed to watch *v1.ConfigMap: failed to list *v1.ConfigMap: configmaps \"kube-root-ca.crt\" is forbidden: User \"system:node:addons-936355\" cannot list resource \"configmaps\" in API group \"\" in the namespace \"default\": no relationship found between node 'addons-936355' and this object" logger="UnhandledError"
	W0916 10:38:29.095931 1384589 logs.go:138] Found kubelet problem: Sep 16 10:36:42 addons-936355 kubelet[1507]: W0916 10:36:42.520540    1507 reflector.go:561] object-"local-path-storage"/"kube-root-ca.crt": failed to list *v1.ConfigMap: configmaps "kube-root-ca.crt" is forbidden: User "system:node:addons-936355" cannot list resource "configmaps" in API group "" in the namespace "local-path-storage": no relationship found between node 'addons-936355' and this object
	W0916 10:38:29.096162 1384589 logs.go:138] Found kubelet problem: Sep 16 10:36:42 addons-936355 kubelet[1507]: E0916 10:36:42.520560    1507 reflector.go:158] "Unhandled Error" err="object-\"local-path-storage\"/\"kube-root-ca.crt\": Failed to watch *v1.ConfigMap: failed to list *v1.ConfigMap: configmaps \"kube-root-ca.crt\" is forbidden: User \"system:node:addons-936355\" cannot list resource \"configmaps\" in API group \"\" in the namespace \"local-path-storage\": no relationship found between node 'addons-936355' and this object" logger="UnhandledError"
	I0916 10:38:29.147372 1384589 logs.go:123] Gathering logs for kube-apiserver [f911db1ed55bbf8b3dc28ca0fef7e51209be97baaa15d9194b879451dd6fd403] ...
	I0916 10:38:29.147401 1384589 ssh_runner.go:195] Run: /bin/bash -c "sudo /usr/bin/crictl logs --tail 400 f911db1ed55bbf8b3dc28ca0fef7e51209be97baaa15d9194b879451dd6fd403"
	I0916 10:38:29.214117 1384589 logs.go:123] Gathering logs for kube-scheduler [2b161087caf5a6ab9dedbb699f7c69ddf6c2c5cdb19026d46daf824d90966d25] ...
	I0916 10:38:29.214148 1384589 ssh_runner.go:195] Run: /bin/bash -c "sudo /usr/bin/crictl logs --tail 400 2b161087caf5a6ab9dedbb699f7c69ddf6c2c5cdb19026d46daf824d90966d25"
	I0916 10:38:29.266528 1384589 logs.go:123] Gathering logs for kube-controller-manager [4ee66eef50ab615bdd0d94fe194567492cafe76910819703a964b78b45f55436] ...
	I0916 10:38:29.266562 1384589 ssh_runner.go:195] Run: /bin/bash -c "sudo /usr/bin/crictl logs --tail 400 4ee66eef50ab615bdd0d94fe194567492cafe76910819703a964b78b45f55436"
	I0916 10:38:29.339157 1384589 logs.go:123] Gathering logs for container status ...
	I0916 10:38:29.339193 1384589 ssh_runner.go:195] Run: /bin/bash -c "sudo `which crictl || echo crictl` ps -a || sudo docker ps -a"
	I0916 10:38:29.402328 1384589 out.go:358] Setting ErrFile to fd 2...
	I0916 10:38:29.402360 1384589 out.go:392] TERM=,COLORTERM=, which probably does not support color
	W0916 10:38:29.402421 1384589 out.go:270] X Problems detected in kubelet:
	W0916 10:38:29.402433 1384589 out.go:270]   Sep 16 10:36:02 addons-936355 kubelet[1507]: E0916 10:36:02.455028    1507 reflector.go:158] "Unhandled Error" err="object-\"gadget\"/\"kube-root-ca.crt\": Failed to watch *v1.ConfigMap: failed to list *v1.ConfigMap: configmaps \"kube-root-ca.crt\" is forbidden: User \"system:node:addons-936355\" cannot list resource \"configmaps\" in API group \"\" in the namespace \"gadget\": no relationship found between node 'addons-936355' and this object" logger="UnhandledError"
	W0916 10:38:29.402445 1384589 out.go:270]   Sep 16 10:36:42 addons-936355 kubelet[1507]: W0916 10:36:42.520421    1507 reflector.go:561] object-"default"/"kube-root-ca.crt": failed to list *v1.ConfigMap: configmaps "kube-root-ca.crt" is forbidden: User "system:node:addons-936355" cannot list resource "configmaps" in API group "" in the namespace "default": no relationship found between node 'addons-936355' and this object
	W0916 10:38:29.402453 1384589 out.go:270]   Sep 16 10:36:42 addons-936355 kubelet[1507]: E0916 10:36:42.520489    1507 reflector.go:158] "Unhandled Error" err="object-\"default\"/\"kube-root-ca.crt\": Failed to watch *v1.ConfigMap: failed to list *v1.ConfigMap: configmaps \"kube-root-ca.crt\" is forbidden: User \"system:node:addons-936355\" cannot list resource \"configmaps\" in API group \"\" in the namespace \"default\": no relationship found between node 'addons-936355' and this object" logger="UnhandledError"
	W0916 10:38:29.402464 1384589 out.go:270]   Sep 16 10:36:42 addons-936355 kubelet[1507]: W0916 10:36:42.520540    1507 reflector.go:561] object-"local-path-storage"/"kube-root-ca.crt": failed to list *v1.ConfigMap: configmaps "kube-root-ca.crt" is forbidden: User "system:node:addons-936355" cannot list resource "configmaps" in API group "" in the namespace "local-path-storage": no relationship found between node 'addons-936355' and this object
	W0916 10:38:29.402472 1384589 out.go:270]   Sep 16 10:36:42 addons-936355 kubelet[1507]: E0916 10:36:42.520560    1507 reflector.go:158] "Unhandled Error" err="object-\"local-path-storage\"/\"kube-root-ca.crt\": Failed to watch *v1.ConfigMap: failed to list *v1.ConfigMap: configmaps \"kube-root-ca.crt\" is forbidden: User \"system:node:addons-936355\" cannot list resource \"configmaps\" in API group \"\" in the namespace \"local-path-storage\": no relationship found between node 'addons-936355' and this object" logger="UnhandledError"
	I0916 10:38:29.402483 1384589 out.go:358] Setting ErrFile to fd 2...
	I0916 10:38:29.402490 1384589 out.go:392] TERM=,COLORTERM=, which probably does not support color
	I0916 10:38:39.403739 1384589 api_server.go:253] Checking apiserver healthz at https://192.168.49.2:8443/healthz ...
	I0916 10:38:39.411467 1384589 api_server.go:279] https://192.168.49.2:8443/healthz returned 200:
	ok
	I0916 10:38:39.412460 1384589 api_server.go:141] control plane version: v1.31.1
	I0916 10:38:39.412486 1384589 api_server.go:131] duration metric: took 11.16318566s to wait for apiserver health ...
	I0916 10:38:39.412495 1384589 system_pods.go:43] waiting for kube-system pods to appear ...
	I0916 10:38:39.412517 1384589 cri.go:54] listing CRI containers in root : {State:all Name:kube-apiserver Namespaces:[]}
	I0916 10:38:39.412584 1384589 ssh_runner.go:195] Run: sudo crictl ps -a --quiet --name=kube-apiserver
	I0916 10:38:39.451224 1384589 cri.go:89] found id: "f911db1ed55bbf8b3dc28ca0fef7e51209be97baaa15d9194b879451dd6fd403"
	I0916 10:38:39.451243 1384589 cri.go:89] found id: ""
	I0916 10:38:39.451251 1384589 logs.go:276] 1 containers: [f911db1ed55bbf8b3dc28ca0fef7e51209be97baaa15d9194b879451dd6fd403]
	I0916 10:38:39.451311 1384589 ssh_runner.go:195] Run: which crictl
	I0916 10:38:39.454893 1384589 cri.go:54] listing CRI containers in root : {State:all Name:etcd Namespaces:[]}
	I0916 10:38:39.454968 1384589 ssh_runner.go:195] Run: sudo crictl ps -a --quiet --name=etcd
	I0916 10:38:39.499416 1384589 cri.go:89] found id: "3b247261f15f4cdd596d5e7ee3354c24cb995a27a5e0581e877596df04b900d5"
	I0916 10:38:39.499439 1384589 cri.go:89] found id: ""
	I0916 10:38:39.499448 1384589 logs.go:276] 1 containers: [3b247261f15f4cdd596d5e7ee3354c24cb995a27a5e0581e877596df04b900d5]
	I0916 10:38:39.499510 1384589 ssh_runner.go:195] Run: which crictl
	I0916 10:38:39.503122 1384589 cri.go:54] listing CRI containers in root : {State:all Name:coredns Namespaces:[]}
	I0916 10:38:39.503208 1384589 ssh_runner.go:195] Run: sudo crictl ps -a --quiet --name=coredns
	I0916 10:38:39.542014 1384589 cri.go:89] found id: "ee934dc9f4f92e52b49ad02508bb42771f460a2494fa8b1a65d888191266a4ad"
	I0916 10:38:39.542035 1384589 cri.go:89] found id: ""
	I0916 10:38:39.542043 1384589 logs.go:276] 1 containers: [ee934dc9f4f92e52b49ad02508bb42771f460a2494fa8b1a65d888191266a4ad]
	I0916 10:38:39.542101 1384589 ssh_runner.go:195] Run: which crictl
	I0916 10:38:39.546062 1384589 cri.go:54] listing CRI containers in root : {State:all Name:kube-scheduler Namespaces:[]}
	I0916 10:38:39.546152 1384589 ssh_runner.go:195] Run: sudo crictl ps -a --quiet --name=kube-scheduler
	I0916 10:38:39.587808 1384589 cri.go:89] found id: "2b161087caf5a6ab9dedbb699f7c69ddf6c2c5cdb19026d46daf824d90966d25"
	I0916 10:38:39.587831 1384589 cri.go:89] found id: ""
	I0916 10:38:39.587842 1384589 logs.go:276] 1 containers: [2b161087caf5a6ab9dedbb699f7c69ddf6c2c5cdb19026d46daf824d90966d25]
	I0916 10:38:39.587908 1384589 ssh_runner.go:195] Run: which crictl
	I0916 10:38:39.591371 1384589 cri.go:54] listing CRI containers in root : {State:all Name:kube-proxy Namespaces:[]}
	I0916 10:38:39.591441 1384589 ssh_runner.go:195] Run: sudo crictl ps -a --quiet --name=kube-proxy
	I0916 10:38:39.629404 1384589 cri.go:89] found id: "6200eb5cfcd24bb0f0253359201c6d75c0624dcb7a313b0bc95b7370a13539a0"
	I0916 10:38:39.629428 1384589 cri.go:89] found id: ""
	I0916 10:38:39.629437 1384589 logs.go:276] 1 containers: [6200eb5cfcd24bb0f0253359201c6d75c0624dcb7a313b0bc95b7370a13539a0]
	I0916 10:38:39.629495 1384589 ssh_runner.go:195] Run: which crictl
	I0916 10:38:39.633014 1384589 cri.go:54] listing CRI containers in root : {State:all Name:kube-controller-manager Namespaces:[]}
	I0916 10:38:39.633091 1384589 ssh_runner.go:195] Run: sudo crictl ps -a --quiet --name=kube-controller-manager
	I0916 10:38:39.676945 1384589 cri.go:89] found id: "4ee66eef50ab615bdd0d94fe194567492cafe76910819703a964b78b45f55436"
	I0916 10:38:39.676965 1384589 cri.go:89] found id: ""
	I0916 10:38:39.676973 1384589 logs.go:276] 1 containers: [4ee66eef50ab615bdd0d94fe194567492cafe76910819703a964b78b45f55436]
	I0916 10:38:39.677033 1384589 ssh_runner.go:195] Run: which crictl
	I0916 10:38:39.680612 1384589 cri.go:54] listing CRI containers in root : {State:all Name:kindnet Namespaces:[]}
	I0916 10:38:39.680742 1384589 ssh_runner.go:195] Run: sudo crictl ps -a --quiet --name=kindnet
	I0916 10:38:39.722262 1384589 cri.go:89] found id: "8d59e894feca0e01e03cc7257c67ed10cf0f9db194b88b314e4961bc62d9e7f1"
	I0916 10:38:39.722282 1384589 cri.go:89] found id: ""
	I0916 10:38:39.722291 1384589 logs.go:276] 1 containers: [8d59e894feca0e01e03cc7257c67ed10cf0f9db194b88b314e4961bc62d9e7f1]
	I0916 10:38:39.722347 1384589 ssh_runner.go:195] Run: which crictl
	I0916 10:38:39.726091 1384589 logs.go:123] Gathering logs for dmesg ...
	I0916 10:38:39.726167 1384589 ssh_runner.go:195] Run: /bin/bash -c "sudo dmesg -PH -L=never --level warn,err,crit,alert,emerg | tail -n 400"
	I0916 10:38:39.742632 1384589 logs.go:123] Gathering logs for etcd [3b247261f15f4cdd596d5e7ee3354c24cb995a27a5e0581e877596df04b900d5] ...
	I0916 10:38:39.742660 1384589 ssh_runner.go:195] Run: /bin/bash -c "sudo /usr/bin/crictl logs --tail 400 3b247261f15f4cdd596d5e7ee3354c24cb995a27a5e0581e877596df04b900d5"
	I0916 10:38:39.814109 1384589 logs.go:123] Gathering logs for CRI-O ...
	I0916 10:38:39.814142 1384589 ssh_runner.go:195] Run: /bin/bash -c "sudo journalctl -u crio -n 400"
	I0916 10:38:39.914270 1384589 logs.go:123] Gathering logs for kube-controller-manager [4ee66eef50ab615bdd0d94fe194567492cafe76910819703a964b78b45f55436] ...
	I0916 10:38:39.914308 1384589 ssh_runner.go:195] Run: /bin/bash -c "sudo /usr/bin/crictl logs --tail 400 4ee66eef50ab615bdd0d94fe194567492cafe76910819703a964b78b45f55436"
	I0916 10:38:40.019354 1384589 logs.go:123] Gathering logs for kindnet [8d59e894feca0e01e03cc7257c67ed10cf0f9db194b88b314e4961bc62d9e7f1] ...
	I0916 10:38:40.019397 1384589 ssh_runner.go:195] Run: /bin/bash -c "sudo /usr/bin/crictl logs --tail 400 8d59e894feca0e01e03cc7257c67ed10cf0f9db194b88b314e4961bc62d9e7f1"
	I0916 10:38:40.079304 1384589 logs.go:123] Gathering logs for kubelet ...
	I0916 10:38:40.079345 1384589 ssh_runner.go:195] Run: /bin/bash -c "sudo journalctl -u kubelet -n 400"
	W0916 10:38:40.123482 1384589 logs.go:138] Found kubelet problem: Sep 16 10:35:59 addons-936355 kubelet[1507]: W0916 10:35:59.171269    1507 reflector.go:561] object-"kube-system"/"kube-root-ca.crt": failed to list *v1.ConfigMap: configmaps "kube-root-ca.crt" is forbidden: User "system:node:addons-936355" cannot list resource "configmaps" in API group "" in the namespace "kube-system": no relationship found between node 'addons-936355' and this object
	W0916 10:38:40.123736 1384589 logs.go:138] Found kubelet problem: Sep 16 10:35:59 addons-936355 kubelet[1507]: E0916 10:35:59.171326    1507 reflector.go:158] "Unhandled Error" err="object-\"kube-system\"/\"kube-root-ca.crt\": Failed to watch *v1.ConfigMap: failed to list *v1.ConfigMap: configmaps \"kube-root-ca.crt\" is forbidden: User \"system:node:addons-936355\" cannot list resource \"configmaps\" in API group \"\" in the namespace \"kube-system\": no relationship found between node 'addons-936355' and this object" logger="UnhandledError"
	W0916 10:38:40.123917 1384589 logs.go:138] Found kubelet problem: Sep 16 10:35:59 addons-936355 kubelet[1507]: W0916 10:35:59.171509    1507 reflector.go:561] object-"kube-system"/"kube-proxy": failed to list *v1.ConfigMap: configmaps "kube-proxy" is forbidden: User "system:node:addons-936355" cannot list resource "configmaps" in API group "" in the namespace "kube-system": no relationship found between node 'addons-936355' and this object
	W0916 10:38:40.124171 1384589 logs.go:138] Found kubelet problem: Sep 16 10:35:59 addons-936355 kubelet[1507]: E0916 10:35:59.171550    1507 reflector.go:158] "Unhandled Error" err="object-\"kube-system\"/\"kube-proxy\": Failed to watch *v1.ConfigMap: failed to list *v1.ConfigMap: configmaps \"kube-proxy\" is forbidden: User \"system:node:addons-936355\" cannot list resource \"configmaps\" in API group \"\" in the namespace \"kube-system\": no relationship found between node 'addons-936355' and this object" logger="UnhandledError"
	W0916 10:38:40.127515 1384589 logs.go:138] Found kubelet problem: Sep 16 10:36:02 addons-936355 kubelet[1507]: W0916 10:36:02.454965    1507 reflector.go:561] object-"gadget"/"kube-root-ca.crt": failed to list *v1.ConfigMap: configmaps "kube-root-ca.crt" is forbidden: User "system:node:addons-936355" cannot list resource "configmaps" in API group "" in the namespace "gadget": no relationship found between node 'addons-936355' and this object
	W0916 10:38:40.127756 1384589 logs.go:138] Found kubelet problem: Sep 16 10:36:02 addons-936355 kubelet[1507]: E0916 10:36:02.455028    1507 reflector.go:158] "Unhandled Error" err="object-\"gadget\"/\"kube-root-ca.crt\": Failed to watch *v1.ConfigMap: failed to list *v1.ConfigMap: configmaps \"kube-root-ca.crt\" is forbidden: User \"system:node:addons-936355\" cannot list resource \"configmaps\" in API group \"\" in the namespace \"gadget\": no relationship found between node 'addons-936355' and this object" logger="UnhandledError"
	W0916 10:38:40.139306 1384589 logs.go:138] Found kubelet problem: Sep 16 10:36:42 addons-936355 kubelet[1507]: W0916 10:36:42.520421    1507 reflector.go:561] object-"default"/"kube-root-ca.crt": failed to list *v1.ConfigMap: configmaps "kube-root-ca.crt" is forbidden: User "system:node:addons-936355" cannot list resource "configmaps" in API group "" in the namespace "default": no relationship found between node 'addons-936355' and this object
	W0916 10:38:40.139536 1384589 logs.go:138] Found kubelet problem: Sep 16 10:36:42 addons-936355 kubelet[1507]: E0916 10:36:42.520489    1507 reflector.go:158] "Unhandled Error" err="object-\"default\"/\"kube-root-ca.crt\": Failed to watch *v1.ConfigMap: failed to list *v1.ConfigMap: configmaps \"kube-root-ca.crt\" is forbidden: User \"system:node:addons-936355\" cannot list resource \"configmaps\" in API group \"\" in the namespace \"default\": no relationship found between node 'addons-936355' and this object" logger="UnhandledError"
	W0916 10:38:40.139726 1384589 logs.go:138] Found kubelet problem: Sep 16 10:36:42 addons-936355 kubelet[1507]: W0916 10:36:42.520540    1507 reflector.go:561] object-"local-path-storage"/"kube-root-ca.crt": failed to list *v1.ConfigMap: configmaps "kube-root-ca.crt" is forbidden: User "system:node:addons-936355" cannot list resource "configmaps" in API group "" in the namespace "local-path-storage": no relationship found between node 'addons-936355' and this object
	W0916 10:38:40.139953 1384589 logs.go:138] Found kubelet problem: Sep 16 10:36:42 addons-936355 kubelet[1507]: E0916 10:36:42.520560    1507 reflector.go:158] "Unhandled Error" err="object-\"local-path-storage\"/\"kube-root-ca.crt\": Failed to watch *v1.ConfigMap: failed to list *v1.ConfigMap: configmaps \"kube-root-ca.crt\" is forbidden: User \"system:node:addons-936355\" cannot list resource \"configmaps\" in API group \"\" in the namespace \"local-path-storage\": no relationship found between node 'addons-936355' and this object" logger="UnhandledError"
	I0916 10:38:40.192100 1384589 logs.go:123] Gathering logs for describe nodes ...
	I0916 10:38:40.192138 1384589 ssh_runner.go:195] Run: /bin/bash -c "sudo /var/lib/minikube/binaries/v1.31.1/kubectl describe nodes --kubeconfig=/var/lib/minikube/kubeconfig"
	I0916 10:38:40.333078 1384589 logs.go:123] Gathering logs for kube-apiserver [f911db1ed55bbf8b3dc28ca0fef7e51209be97baaa15d9194b879451dd6fd403] ...
	I0916 10:38:40.333117 1384589 ssh_runner.go:195] Run: /bin/bash -c "sudo /usr/bin/crictl logs --tail 400 f911db1ed55bbf8b3dc28ca0fef7e51209be97baaa15d9194b879451dd6fd403"
	I0916 10:38:40.403526 1384589 logs.go:123] Gathering logs for coredns [ee934dc9f4f92e52b49ad02508bb42771f460a2494fa8b1a65d888191266a4ad] ...
	I0916 10:38:40.403566 1384589 ssh_runner.go:195] Run: /bin/bash -c "sudo /usr/bin/crictl logs --tail 400 ee934dc9f4f92e52b49ad02508bb42771f460a2494fa8b1a65d888191266a4ad"
	I0916 10:38:40.442653 1384589 logs.go:123] Gathering logs for kube-scheduler [2b161087caf5a6ab9dedbb699f7c69ddf6c2c5cdb19026d46daf824d90966d25] ...
	I0916 10:38:40.442681 1384589 ssh_runner.go:195] Run: /bin/bash -c "sudo /usr/bin/crictl logs --tail 400 2b161087caf5a6ab9dedbb699f7c69ddf6c2c5cdb19026d46daf824d90966d25"
	I0916 10:38:40.492601 1384589 logs.go:123] Gathering logs for kube-proxy [6200eb5cfcd24bb0f0253359201c6d75c0624dcb7a313b0bc95b7370a13539a0] ...
	I0916 10:38:40.492632 1384589 ssh_runner.go:195] Run: /bin/bash -c "sudo /usr/bin/crictl logs --tail 400 6200eb5cfcd24bb0f0253359201c6d75c0624dcb7a313b0bc95b7370a13539a0"
	I0916 10:38:40.533326 1384589 logs.go:123] Gathering logs for container status ...
	I0916 10:38:40.533357 1384589 ssh_runner.go:195] Run: /bin/bash -c "sudo `which crictl || echo crictl` ps -a || sudo docker ps -a"
	I0916 10:38:40.587619 1384589 out.go:358] Setting ErrFile to fd 2...
	I0916 10:38:40.587653 1384589 out.go:392] TERM=,COLORTERM=, which probably does not support color
	W0916 10:38:40.587735 1384589 out.go:270] X Problems detected in kubelet:
	W0916 10:38:40.587753 1384589 out.go:270]   Sep 16 10:36:02 addons-936355 kubelet[1507]: E0916 10:36:02.455028    1507 reflector.go:158] "Unhandled Error" err="object-\"gadget\"/\"kube-root-ca.crt\": Failed to watch *v1.ConfigMap: failed to list *v1.ConfigMap: configmaps \"kube-root-ca.crt\" is forbidden: User \"system:node:addons-936355\" cannot list resource \"configmaps\" in API group \"\" in the namespace \"gadget\": no relationship found between node 'addons-936355' and this object" logger="UnhandledError"
	W0916 10:38:40.587783 1384589 out.go:270]   Sep 16 10:36:42 addons-936355 kubelet[1507]: W0916 10:36:42.520421    1507 reflector.go:561] object-"default"/"kube-root-ca.crt": failed to list *v1.ConfigMap: configmaps "kube-root-ca.crt" is forbidden: User "system:node:addons-936355" cannot list resource "configmaps" in API group "" in the namespace "default": no relationship found between node 'addons-936355' and this object
	W0916 10:38:40.587793 1384589 out.go:270]   Sep 16 10:36:42 addons-936355 kubelet[1507]: E0916 10:36:42.520489    1507 reflector.go:158] "Unhandled Error" err="object-\"default\"/\"kube-root-ca.crt\": Failed to watch *v1.ConfigMap: failed to list *v1.ConfigMap: configmaps \"kube-root-ca.crt\" is forbidden: User \"system:node:addons-936355\" cannot list resource \"configmaps\" in API group \"\" in the namespace \"default\": no relationship found between node 'addons-936355' and this object" logger="UnhandledError"
	W0916 10:38:40.587808 1384589 out.go:270]   Sep 16 10:36:42 addons-936355 kubelet[1507]: W0916 10:36:42.520540    1507 reflector.go:561] object-"local-path-storage"/"kube-root-ca.crt": failed to list *v1.ConfigMap: configmaps "kube-root-ca.crt" is forbidden: User "system:node:addons-936355" cannot list resource "configmaps" in API group "" in the namespace "local-path-storage": no relationship found between node 'addons-936355' and this object
	W0916 10:38:40.587820 1384589 out.go:270]   Sep 16 10:36:42 addons-936355 kubelet[1507]: E0916 10:36:42.520560    1507 reflector.go:158] "Unhandled Error" err="object-\"local-path-storage\"/\"kube-root-ca.crt\": Failed to watch *v1.ConfigMap: failed to list *v1.ConfigMap: configmaps \"kube-root-ca.crt\" is forbidden: User \"system:node:addons-936355\" cannot list resource \"configmaps\" in API group \"\" in the namespace \"local-path-storage\": no relationship found between node 'addons-936355' and this object" logger="UnhandledError"
	I0916 10:38:40.587827 1384589 out.go:358] Setting ErrFile to fd 2...
	I0916 10:38:40.587838 1384589 out.go:392] TERM=,COLORTERM=, which probably does not support color
	I0916 10:38:50.602620 1384589 system_pods.go:59] 18 kube-system pods found
	I0916 10:38:50.602695 1384589 system_pods.go:61] "coredns-7c65d6cfc9-r6x6b" [fc313ec6-5b9a-444f-ae74-8a9d31bad075] Running
	I0916 10:38:50.602715 1384589 system_pods.go:61] "csi-hostpath-attacher-0" [973b3dd3-b66c-4f66-a499-e50893dc0d35] Running
	I0916 10:38:50.602720 1384589 system_pods.go:61] "csi-hostpath-resizer-0" [51405fd6-eaa1-4b53-ab6c-fc127aa3e3ed] Running
	I0916 10:38:50.602728 1384589 system_pods.go:61] "csi-hostpathplugin-zrlmd" [86e81bf7-3587-41e4-a08a-e800ecc90538] Running
	I0916 10:38:50.602736 1384589 system_pods.go:61] "etcd-addons-936355" [354ae326-d376-4f6f-805d-2605645d8d04] Running
	I0916 10:38:50.602745 1384589 system_pods.go:61] "kindnet-wv5d6" [35e2a463-84e1-4b51-8b1d-2f07b7677069] Running
	I0916 10:38:50.602749 1384589 system_pods.go:61] "kube-apiserver-addons-936355" [397fd8ae-a57b-462d-9c08-d0d45236f3b0] Running
	I0916 10:38:50.602753 1384589 system_pods.go:61] "kube-controller-manager-addons-936355" [d2285801-6e4d-4f4f-a300-721484f9834e] Running
	I0916 10:38:50.602762 1384589 system_pods.go:61] "kube-ingress-dns-minikube" [cfe0a31e-4a7c-4260-9320-4d769706f403] Running
	I0916 10:38:50.602767 1384589 system_pods.go:61] "kube-proxy-6zqlq" [c2680a6c-7cc0-48d6-8094-2d804da5c90b] Running
	I0916 10:38:50.602771 1384589 system_pods.go:61] "kube-scheduler-addons-936355" [881986a3-b57c-4fd3-bd1e-c796e39d9a39] Running
	I0916 10:38:50.602775 1384589 system_pods.go:61] "metrics-server-84c5f94fbc-hngcs" [5901d847-eeb7-4c71-97ba-d08734fb39ed] Running
	I0916 10:38:50.602794 1384589 system_pods.go:61] "nvidia-device-plugin-daemonset-6j9gc" [7ee6aa38-6656-4e60-bd4b-f35c0299acea] Running
	I0916 10:38:50.602798 1384589 system_pods.go:61] "registry-66c9cd494c-xh5d4" [6f439a0d-4e84-4ea2-97ef-2666b73327b7] Running
	I0916 10:38:50.602813 1384589 system_pods.go:61] "registry-proxy-xdksj" [f3007abe-d474-44b8-91de-56f1d2dc83a9] Running
	I0916 10:38:50.602821 1384589 system_pods.go:61] "snapshot-controller-56fcc65765-5th26" [00fbb682-4a60-4b76-84a9-4b0d4669fc20] Running
	I0916 10:38:50.602825 1384589 system_pods.go:61] "snapshot-controller-56fcc65765-fjrw9" [1eb3d0c0-5ee6-493b-ab86-8b96ac9e4110] Running
	I0916 10:38:50.602832 1384589 system_pods.go:61] "storage-provisioner" [1b62a2a2-7b11-4305-99cc-88c5a411f505] Running
	I0916 10:38:50.602848 1384589 system_pods.go:74] duration metric: took 11.190345697s to wait for pod list to return data ...
	I0916 10:38:50.602873 1384589 default_sa.go:34] waiting for default service account to be created ...
	I0916 10:38:50.606360 1384589 default_sa.go:45] found service account: "default"
	I0916 10:38:50.606391 1384589 default_sa.go:55] duration metric: took 3.50956ms for default service account to be created ...
	I0916 10:38:50.606400 1384589 system_pods.go:116] waiting for k8s-apps to be running ...
	I0916 10:38:50.616619 1384589 system_pods.go:86] 18 kube-system pods found
	I0916 10:38:50.616661 1384589 system_pods.go:89] "coredns-7c65d6cfc9-r6x6b" [fc313ec6-5b9a-444f-ae74-8a9d31bad075] Running
	I0916 10:38:50.616668 1384589 system_pods.go:89] "csi-hostpath-attacher-0" [973b3dd3-b66c-4f66-a499-e50893dc0d35] Running
	I0916 10:38:50.617624 1384589 system_pods.go:89] "csi-hostpath-resizer-0" [51405fd6-eaa1-4b53-ab6c-fc127aa3e3ed] Running
	I0916 10:38:50.617646 1384589 system_pods.go:89] "csi-hostpathplugin-zrlmd" [86e81bf7-3587-41e4-a08a-e800ecc90538] Running
	I0916 10:38:50.617652 1384589 system_pods.go:89] "etcd-addons-936355" [354ae326-d376-4f6f-805d-2605645d8d04] Running
	I0916 10:38:50.617662 1384589 system_pods.go:89] "kindnet-wv5d6" [35e2a463-84e1-4b51-8b1d-2f07b7677069] Running
	I0916 10:38:50.617668 1384589 system_pods.go:89] "kube-apiserver-addons-936355" [397fd8ae-a57b-462d-9c08-d0d45236f3b0] Running
	I0916 10:38:50.617673 1384589 system_pods.go:89] "kube-controller-manager-addons-936355" [d2285801-6e4d-4f4f-a300-721484f9834e] Running
	I0916 10:38:50.617677 1384589 system_pods.go:89] "kube-ingress-dns-minikube" [cfe0a31e-4a7c-4260-9320-4d769706f403] Running
	I0916 10:38:50.617682 1384589 system_pods.go:89] "kube-proxy-6zqlq" [c2680a6c-7cc0-48d6-8094-2d804da5c90b] Running
	I0916 10:38:50.617686 1384589 system_pods.go:89] "kube-scheduler-addons-936355" [881986a3-b57c-4fd3-bd1e-c796e39d9a39] Running
	I0916 10:38:50.617691 1384589 system_pods.go:89] "metrics-server-84c5f94fbc-hngcs" [5901d847-eeb7-4c71-97ba-d08734fb39ed] Running
	I0916 10:38:50.617696 1384589 system_pods.go:89] "nvidia-device-plugin-daemonset-6j9gc" [7ee6aa38-6656-4e60-bd4b-f35c0299acea] Running
	I0916 10:38:50.617701 1384589 system_pods.go:89] "registry-66c9cd494c-xh5d4" [6f439a0d-4e84-4ea2-97ef-2666b73327b7] Running
	I0916 10:38:50.617705 1384589 system_pods.go:89] "registry-proxy-xdksj" [f3007abe-d474-44b8-91de-56f1d2dc83a9] Running
	I0916 10:38:50.617716 1384589 system_pods.go:89] "snapshot-controller-56fcc65765-5th26" [00fbb682-4a60-4b76-84a9-4b0d4669fc20] Running
	I0916 10:38:50.617730 1384589 system_pods.go:89] "snapshot-controller-56fcc65765-fjrw9" [1eb3d0c0-5ee6-493b-ab86-8b96ac9e4110] Running
	I0916 10:38:50.617734 1384589 system_pods.go:89] "storage-provisioner" [1b62a2a2-7b11-4305-99cc-88c5a411f505] Running
	I0916 10:38:50.617742 1384589 system_pods.go:126] duration metric: took 11.335042ms to wait for k8s-apps to be running ...
	I0916 10:38:50.617754 1384589 system_svc.go:44] waiting for kubelet service to be running ....
	I0916 10:38:50.617812 1384589 ssh_runner.go:195] Run: sudo systemctl is-active --quiet service kubelet
	I0916 10:38:50.630041 1384589 system_svc.go:56] duration metric: took 12.276523ms WaitForService to wait for kubelet
	I0916 10:38:50.630069 1384589 kubeadm.go:582] duration metric: took 2m53.983006463s to wait for: map[apiserver:true apps_running:true default_sa:true extra:true kubelet:true node_ready:true system_pods:true]
	I0916 10:38:50.630088 1384589 node_conditions.go:102] verifying NodePressure condition ...
	I0916 10:38:50.633754 1384589 node_conditions.go:122] node storage ephemeral capacity is 203034800Ki
	I0916 10:38:50.633790 1384589 node_conditions.go:123] node cpu capacity is 2
	I0916 10:38:50.633806 1384589 node_conditions.go:105] duration metric: took 3.708685ms to run NodePressure ...
	I0916 10:38:50.633819 1384589 start.go:241] waiting for startup goroutines ...
	I0916 10:38:50.633826 1384589 start.go:246] waiting for cluster config update ...
	I0916 10:38:50.633842 1384589 start.go:255] writing updated cluster config ...
	I0916 10:38:50.634158 1384589 ssh_runner.go:195] Run: rm -f paused
	I0916 10:38:50.643301 1384589 out.go:177] * Done! kubectl is now configured to use "addons-936355" cluster and "default" namespace by default
	E0916 10:38:50.646536 1384589 start.go:291] kubectl info: exec: fork/exec /usr/local/bin/kubectl: exec format error
	
	
	==> CRI-O <==
	Sep 16 10:39:52 addons-936355 crio[961]: time="2024-09-16 10:39:52.210394470Z" level=info msg="Creating container: gadget/gadget-hx2qq/gadget" id=8534bd4e-4454-42c7-89c6-6bb89bba019b name=/runtime.v1.RuntimeService/CreateContainer
	Sep 16 10:39:52 addons-936355 crio[961]: time="2024-09-16 10:39:52.210492355Z" level=warning msg="Allowed annotations are specified for workload []"
	Sep 16 10:39:52 addons-936355 crio[961]: time="2024-09-16 10:39:52.277200542Z" level=info msg="Created container d36df2407ca5e1100ca95fde6e52ae6ca976aeb3d1ff54fe7ca517a91ca9c88f: gadget/gadget-hx2qq/gadget" id=8534bd4e-4454-42c7-89c6-6bb89bba019b name=/runtime.v1.RuntimeService/CreateContainer
	Sep 16 10:39:52 addons-936355 crio[961]: time="2024-09-16 10:39:52.278002129Z" level=info msg="Starting container: d36df2407ca5e1100ca95fde6e52ae6ca976aeb3d1ff54fe7ca517a91ca9c88f" id=f03e1810-67cb-46b5-85ff-75bf18d627bc name=/runtime.v1.RuntimeService/StartContainer
	Sep 16 10:39:52 addons-936355 crio[961]: time="2024-09-16 10:39:52.285827711Z" level=info msg="Started container" PID=6185 containerID=d36df2407ca5e1100ca95fde6e52ae6ca976aeb3d1ff54fe7ca517a91ca9c88f description=gadget/gadget-hx2qq/gadget id=f03e1810-67cb-46b5-85ff-75bf18d627bc name=/runtime.v1.RuntimeService/StartContainer sandboxID=cf56dfeabe5decbedd58fc457dc7719d29c93fc1ac2509ce2b409125c237d769
	Sep 16 10:39:53 addons-936355 conmon[6174]: conmon d36df2407ca5e1100ca9 <ninfo>: container 6185 exited with status 1
	Sep 16 10:39:53 addons-936355 crio[961]: time="2024-09-16 10:39:53.862367788Z" level=info msg="Removing container: b3fda3bbc6527bab3713f7ee920de180eb00d2705006c750054ca0da818f0982" id=feca78d4-768d-48ab-b181-c4a19536a2cc name=/runtime.v1.RuntimeService/RemoveContainer
	Sep 16 10:39:53 addons-936355 crio[961]: time="2024-09-16 10:39:53.891029644Z" level=info msg="Removed container b3fda3bbc6527bab3713f7ee920de180eb00d2705006c750054ca0da818f0982: gadget/gadget-hx2qq/gadget" id=feca78d4-768d-48ab-b181-c4a19536a2cc name=/runtime.v1.RuntimeService/RemoveContainer
	Sep 16 10:42:40 addons-936355 crio[961]: time="2024-09-16 10:42:40.951968254Z" level=info msg="Checking image status: ghcr.io/inspektor-gadget/inspektor-gadget:v0.32.0@sha256:03e677e1cf9d2c9bea454e3dbcbcef20b3022e987534a2874eb1abc5bc3e73ec" id=ade1caf6-2145-4e84-81e9-3f71b003b2c7 name=/runtime.v1.ImageService/ImageStatus
	Sep 16 10:42:40 addons-936355 crio[961]: time="2024-09-16 10:42:40.952231508Z" level=info msg="Image status: &ImageStatusResponse{Image:&Image{Id:4f725bf50aaa5c697fbb84c107e9c7a3766f0f85f514ffce712d03ee5f62e8dd,RepoTags:[],RepoDigests:[ghcr.io/inspektor-gadget/inspektor-gadget@sha256:03e677e1cf9d2c9bea454e3dbcbcef20b3022e987534a2874eb1abc5bc3e73ec ghcr.io/inspektor-gadget/inspektor-gadget@sha256:7bb75e6a6a00e80a93c6115d94a22482eba22ee957f22e34e0b2310fc3a1391d],Size_:171509623,Uid:&Int64Value{Value:0,},Username:,Spec:nil,},Info:map[string]string{},}" id=ade1caf6-2145-4e84-81e9-3f71b003b2c7 name=/runtime.v1.ImageService/ImageStatus
	Sep 16 10:42:40 addons-936355 crio[961]: time="2024-09-16 10:42:40.953062633Z" level=info msg="Pulling image: ghcr.io/inspektor-gadget/inspektor-gadget:v0.32.0@sha256:03e677e1cf9d2c9bea454e3dbcbcef20b3022e987534a2874eb1abc5bc3e73ec" id=aec7eb7a-34f5-4a54-8544-6d4473e524d8 name=/runtime.v1.ImageService/PullImage
	Sep 16 10:42:40 addons-936355 crio[961]: time="2024-09-16 10:42:40.955317303Z" level=info msg="Trying to access \"ghcr.io/inspektor-gadget/inspektor-gadget@sha256:03e677e1cf9d2c9bea454e3dbcbcef20b3022e987534a2874eb1abc5bc3e73ec\""
	Sep 16 10:42:41 addons-936355 crio[961]: time="2024-09-16 10:42:41.190458782Z" level=info msg="Pulled image: ghcr.io/inspektor-gadget/inspektor-gadget@sha256:03e677e1cf9d2c9bea454e3dbcbcef20b3022e987534a2874eb1abc5bc3e73ec" id=aec7eb7a-34f5-4a54-8544-6d4473e524d8 name=/runtime.v1.ImageService/PullImage
	Sep 16 10:42:41 addons-936355 crio[961]: time="2024-09-16 10:42:41.191172133Z" level=info msg="Checking image status: ghcr.io/inspektor-gadget/inspektor-gadget:v0.32.0@sha256:03e677e1cf9d2c9bea454e3dbcbcef20b3022e987534a2874eb1abc5bc3e73ec" id=f8aaafe6-9303-4892-941c-fe3f3cbab9c5 name=/runtime.v1.ImageService/ImageStatus
	Sep 16 10:42:41 addons-936355 crio[961]: time="2024-09-16 10:42:41.191407713Z" level=info msg="Image status: &ImageStatusResponse{Image:&Image{Id:4f725bf50aaa5c697fbb84c107e9c7a3766f0f85f514ffce712d03ee5f62e8dd,RepoTags:[],RepoDigests:[ghcr.io/inspektor-gadget/inspektor-gadget@sha256:03e677e1cf9d2c9bea454e3dbcbcef20b3022e987534a2874eb1abc5bc3e73ec ghcr.io/inspektor-gadget/inspektor-gadget@sha256:7bb75e6a6a00e80a93c6115d94a22482eba22ee957f22e34e0b2310fc3a1391d],Size_:171509623,Uid:&Int64Value{Value:0,},Username:,Spec:nil,},Info:map[string]string{},}" id=f8aaafe6-9303-4892-941c-fe3f3cbab9c5 name=/runtime.v1.ImageService/ImageStatus
	Sep 16 10:42:41 addons-936355 crio[961]: time="2024-09-16 10:42:41.192237878Z" level=info msg="Checking image status: ghcr.io/inspektor-gadget/inspektor-gadget:v0.32.0@sha256:03e677e1cf9d2c9bea454e3dbcbcef20b3022e987534a2874eb1abc5bc3e73ec" id=83481a4a-c4dd-4cb0-9f55-4cb8a97d2f56 name=/runtime.v1.ImageService/ImageStatus
	Sep 16 10:42:41 addons-936355 crio[961]: time="2024-09-16 10:42:41.192468518Z" level=info msg="Image status: &ImageStatusResponse{Image:&Image{Id:4f725bf50aaa5c697fbb84c107e9c7a3766f0f85f514ffce712d03ee5f62e8dd,RepoTags:[],RepoDigests:[ghcr.io/inspektor-gadget/inspektor-gadget@sha256:03e677e1cf9d2c9bea454e3dbcbcef20b3022e987534a2874eb1abc5bc3e73ec ghcr.io/inspektor-gadget/inspektor-gadget@sha256:7bb75e6a6a00e80a93c6115d94a22482eba22ee957f22e34e0b2310fc3a1391d],Size_:171509623,Uid:&Int64Value{Value:0,},Username:,Spec:nil,},Info:map[string]string{},}" id=83481a4a-c4dd-4cb0-9f55-4cb8a97d2f56 name=/runtime.v1.ImageService/ImageStatus
	Sep 16 10:42:41 addons-936355 crio[961]: time="2024-09-16 10:42:41.193197779Z" level=info msg="Creating container: gadget/gadget-hx2qq/gadget" id=9b815aa8-db37-4a38-bb71-3d1e33129027 name=/runtime.v1.RuntimeService/CreateContainer
	Sep 16 10:42:41 addons-936355 crio[961]: time="2024-09-16 10:42:41.193291028Z" level=warning msg="Allowed annotations are specified for workload []"
	Sep 16 10:42:41 addons-936355 crio[961]: time="2024-09-16 10:42:41.257217560Z" level=info msg="Created container bcf51d70eaf49387d9ea6641126c0b61aca168a47c3d61e6314346f6d445ad66: gadget/gadget-hx2qq/gadget" id=9b815aa8-db37-4a38-bb71-3d1e33129027 name=/runtime.v1.RuntimeService/CreateContainer
	Sep 16 10:42:41 addons-936355 crio[961]: time="2024-09-16 10:42:41.257933316Z" level=info msg="Starting container: bcf51d70eaf49387d9ea6641126c0b61aca168a47c3d61e6314346f6d445ad66" id=ee8c419a-09c7-4f67-bb0e-42b94ffd6d3c name=/runtime.v1.RuntimeService/StartContainer
	Sep 16 10:42:41 addons-936355 crio[961]: time="2024-09-16 10:42:41.266608200Z" level=info msg="Started container" PID=6346 containerID=bcf51d70eaf49387d9ea6641126c0b61aca168a47c3d61e6314346f6d445ad66 description=gadget/gadget-hx2qq/gadget id=ee8c419a-09c7-4f67-bb0e-42b94ffd6d3c name=/runtime.v1.RuntimeService/StartContainer sandboxID=cf56dfeabe5decbedd58fc457dc7719d29c93fc1ac2509ce2b409125c237d769
	Sep 16 10:42:42 addons-936355 conmon[6335]: conmon bcf51d70eaf49387d9ea <ninfo>: container 6346 exited with status 1
	Sep 16 10:42:43 addons-936355 crio[961]: time="2024-09-16 10:42:43.272327618Z" level=info msg="Removing container: d36df2407ca5e1100ca95fde6e52ae6ca976aeb3d1ff54fe7ca517a91ca9c88f" id=5e037ab7-a3fb-48d4-8ceb-27378ea04007 name=/runtime.v1.RuntimeService/RemoveContainer
	Sep 16 10:42:43 addons-936355 crio[961]: time="2024-09-16 10:42:43.294374970Z" level=info msg="Removed container d36df2407ca5e1100ca95fde6e52ae6ca976aeb3d1ff54fe7ca517a91ca9c88f: gadget/gadget-hx2qq/gadget" id=5e037ab7-a3fb-48d4-8ceb-27378ea04007 name=/runtime.v1.RuntimeService/RemoveContainer
	
	
	==> container status <==
	CONTAINER           IMAGE                                                                                                                                        CREATED             STATE               NAME                                     ATTEMPT             POD ID              POD
	bcf51d70eaf49       ghcr.io/inspektor-gadget/inspektor-gadget@sha256:03e677e1cf9d2c9bea454e3dbcbcef20b3022e987534a2874eb1abc5bc3e73ec                            2 minutes ago       Exited              gadget                                   6                   cf56dfeabe5de       gadget-hx2qq
	3b30e9b80217f       registry.k8s.io/sig-storage/csi-snapshotter@sha256:291334908ddf71a4661fd7f6d9d97274de8a5378a2b6fdfeb2ce73414a34f82f                          7 minutes ago       Running             csi-snapshotter                          0                   32259548d9366       csi-hostpathplugin-zrlmd
	66246ecfc47d6       gcr.io/k8s-minikube/gcp-auth-webhook@sha256:a40e1a121ee367d1712ac3a54ec9c38c405a65dde923c98e5fa6368fa82c4b69                                 7 minutes ago       Running             gcp-auth                                 0                   b09347ee3cb04       gcp-auth-89d5ffd79-j2ckg
	5dabae8faaade       registry.k8s.io/sig-storage/csi-provisioner@sha256:98ffd09c0784203d200e0f8c241501de31c8df79644caac7eed61bd6391e5d49                          7 minutes ago       Running             csi-provisioner                          0                   32259548d9366       csi-hostpathplugin-zrlmd
	63d680209bdeb       registry.k8s.io/sig-storage/livenessprobe@sha256:8b00c6e8f52639ed9c6f866085893ab688e57879741b3089e3cfa9998502e158                            7 minutes ago       Running             liveness-probe                           0                   32259548d9366       csi-hostpathplugin-zrlmd
	b241211876358       registry.k8s.io/sig-storage/hostpathplugin@sha256:7b1dfc90a367222067fc468442fdf952e20fc5961f25c1ad654300ddc34d7083                           7 minutes ago       Running             hostpath                                 0                   32259548d9366       csi-hostpathplugin-zrlmd
	ab8eaedf8040a       registry.k8s.io/sig-storage/csi-node-driver-registrar@sha256:511b8c8ac828194a753909d26555ff08bc12f497dd8daeb83fe9d593693a26c1                7 minutes ago       Running             node-driver-registrar                    0                   32259548d9366       csi-hostpathplugin-zrlmd
	331ea01abf2ed       registry.k8s.io/ingress-nginx/controller@sha256:22f9d129ae8c89a2cabbd13af3c1668944f3dd68fec186199b7024a0a2fc75b3                             7 minutes ago       Running             controller                               0                   549ac22ef6389       ingress-nginx-controller-bc57996ff-jgfjf
	5e5f91a726842       docker.io/rancher/local-path-provisioner@sha256:689a2489a24e74426e4a4666e611c988202c5fa995908b0c60133aca3eb87d98                             7 minutes ago       Running             local-path-provisioner                   0                   0d353b19ef8b9       local-path-provisioner-86d989889c-b652d
	9773c25a0a3dc       gcr.io/cloud-spanner-emulator/emulator@sha256:41ec188288c7943f488600462b2b74002814e52439be82d15de33c3ee4898a58                               7 minutes ago       Running             cloud-spanner-emulator                   0                   32e89c2c5a56d       cloud-spanner-emulator-769b77f747-qvhhc
	3d28641a10686       registry.k8s.io/ingress-nginx/kube-webhook-certgen@sha256:7c4c1a6ca8855c524a64983eaf590e126a669ae12df83ad65de281c9beee13d3                   7 minutes ago       Exited              patch                                    0                   ae4e1f0886d62       ingress-nginx-admission-patch-5hvnf
	98ee5c554b6be       registry.k8s.io/sig-storage/csi-external-health-monitor-controller@sha256:80b9ba94aa2afe24553d69bd165a6a51552d1582d68618ec00d3b804a7d9193c   7 minutes ago       Running             csi-external-health-monitor-controller   0                   32259548d9366       csi-hostpathplugin-zrlmd
	11f6f0bf554a7       registry.k8s.io/sig-storage/csi-resizer@sha256:425d8f1b769398127767b06ed97ce62578a3179bcb99809ce93a1649e025ffe7                              7 minutes ago       Running             csi-resizer                              0                   b35d742443216       csi-hostpath-resizer-0
	b9e189d1acd4c       registry.k8s.io/ingress-nginx/kube-webhook-certgen@sha256:7c4c1a6ca8855c524a64983eaf590e126a669ae12df83ad65de281c9beee13d3                   7 minutes ago       Exited              create                                   0                   ef2a1639e8386       ingress-nginx-admission-create-kmjkm
	b65d0d4cafeec       registry.k8s.io/metrics-server/metrics-server@sha256:048bcf48fc2cce517a61777e22bac782ba59ea5e9b9a54bcb42dbee99566a91f                        7 minutes ago       Running             metrics-server                           0                   d015a3419dfc0       metrics-server-84c5f94fbc-hngcs
	fe7a31fb7fe71       registry.k8s.io/sig-storage/snapshot-controller@sha256:5d668e35c15df6e87e2530da25d557f543182cedbdb39d421b87076463ee9857                      7 minutes ago       Running             volume-snapshot-controller               0                   54dbfb69eabc4       snapshot-controller-56fcc65765-5th26
	082cee4b81438       docker.io/marcnuri/yakd@sha256:1c961556224d57fc747de0b1874524208e5fb4f8386f23e9c1c4c18e97109f17                                              7 minutes ago       Running             yakd                                     0                   91ccf72a05daa       yakd-dashboard-67d98fc6b-ztsj8
	4cf01aeaccd3f       registry.k8s.io/sig-storage/snapshot-controller@sha256:5d668e35c15df6e87e2530da25d557f543182cedbdb39d421b87076463ee9857                      7 minutes ago       Running             volume-snapshot-controller               0                   29551751a8a3b       snapshot-controller-56fcc65765-fjrw9
	d5f8b279203cd       nvcr.io/nvidia/k8s-device-plugin@sha256:cdd05f9d89f0552478d46474005e86b98795ad364664f644225b99d94978e680                                     7 minutes ago       Running             nvidia-device-plugin-ctr                 0                   eda9663f4feb4       nvidia-device-plugin-daemonset-6j9gc
	d50b4977768d7       registry.k8s.io/sig-storage/csi-attacher@sha256:4b5609c78455de45821910065281a368d5f760b41250f90cbde5110543bdc326                             8 minutes ago       Running             csi-attacher                             0                   6f989f68a9599       csi-hostpath-attacher-0
	198a1da1f3633       gcr.io/k8s-minikube/minikube-ingress-dns@sha256:4211a1de532376c881851542238121b26792225faa36a7b02dccad88fd05797c                             8 minutes ago       Running             minikube-ingress-dns                     0                   69f4c5e690a85       kube-ingress-dns-minikube
	ee934dc9f4f92       2f6c962e7b8311337352d9fdea917da2184d9919f4da7695bc2a6517cf392fe4                                                                             8 minutes ago       Running             coredns                                  0                   d4b44085e648e       coredns-7c65d6cfc9-r6x6b
	2a862ef326432       ba04bb24b95753201135cbc420b233c1b0b9fa2e1fd21d28319c348c33fbcde6                                                                             8 minutes ago       Running             storage-provisioner                      0                   e168c388c9d11       storage-provisioner
	8d59e894feca0       6a23fa8fd2b78ab58e42ba273808edc936a9c53d8ac4a919f6337be094843a51                                                                             8 minutes ago       Running             kindnet-cni                              0                   ca9fcc6465180       kindnet-wv5d6
	6200eb5cfcd24       24a140c548c075e487e45d0ee73b1aa89f8bfb40c08a57e05975559728822b1d                                                                             8 minutes ago       Running             kube-proxy                               0                   a491da0967548       kube-proxy-6zqlq
	2b161087caf5a       7f8aa378bb47dffcf430f3a601abe39137e88aee0238e23ed8530fdd18dab82d                                                                             9 minutes ago       Running             kube-scheduler                           0                   c99e3a64f4ade       kube-scheduler-addons-936355
	4ee66eef50ab6       279f381cb37365bbbcd133c9531fba9c2beb0f38dbbe6ddfcd0b1b1643d3450e                                                                             9 minutes ago       Running             kube-controller-manager                  0                   70ee024a23a5b       kube-controller-manager-addons-936355
	f911db1ed55bb       d3f53a98c0a9d9163c4848bcf34b2d2f5e1e3691b79f3d1dd6d0206809e02853                                                                             9 minutes ago       Running             kube-apiserver                           0                   fe5dcd273af65       kube-apiserver-addons-936355
	3b247261f15f4       27e3830e1402783674d8b594038967deea9d51f0d91b34c93c8f39d2f68af7da                                                                             9 minutes ago       Running             etcd                                     0                   24ef782ab6be4       etcd-addons-936355
	
	
	==> coredns [ee934dc9f4f92e52b49ad02508bb42771f460a2494fa8b1a65d888191266a4ad] <==
	[INFO] 10.244.0.6:41410 - 64521 "A IN registry.kube-system.svc.cluster.local.cluster.local. udp 70 false 512" NXDOMAIN qr,aa,rd 163 0.000074082s
	[INFO] 10.244.0.6:32998 - 54705 "AAAA IN registry.kube-system.svc.cluster.local.us-east-2.compute.internal. udp 83 false 512" NXDOMAIN qr,rd,ra 83 0.002300443s
	[INFO] 10.244.0.6:32998 - 29583 "A IN registry.kube-system.svc.cluster.local.us-east-2.compute.internal. udp 83 false 512" NXDOMAIN qr,rd,ra 83 0.002118081s
	[INFO] 10.244.0.6:57466 - 59415 "A IN registry.kube-system.svc.cluster.local. udp 56 false 512" NOERROR qr,aa,rd 110 0.000171687s
	[INFO] 10.244.0.6:57466 - 26377 "AAAA IN registry.kube-system.svc.cluster.local. udp 56 false 512" NOERROR qr,aa,rd 149 0.000100124s
	[INFO] 10.244.0.6:57769 - 49607 "A IN registry.kube-system.svc.cluster.local.kube-system.svc.cluster.local. udp 86 false 512" NXDOMAIN qr,aa,rd 179 0.000092608s
	[INFO] 10.244.0.6:57769 - 14275 "AAAA IN registry.kube-system.svc.cluster.local.kube-system.svc.cluster.local. udp 86 false 512" NXDOMAIN qr,aa,rd 179 0.00007117s
	[INFO] 10.244.0.6:44055 - 7650 "AAAA IN registry.kube-system.svc.cluster.local.svc.cluster.local. udp 74 false 512" NXDOMAIN qr,aa,rd 167 0.000056876s
	[INFO] 10.244.0.6:44055 - 57820 "A IN registry.kube-system.svc.cluster.local.svc.cluster.local. udp 74 false 512" NXDOMAIN qr,aa,rd 167 0.000036053s
	[INFO] 10.244.0.6:42734 - 36918 "AAAA IN registry.kube-system.svc.cluster.local.cluster.local. udp 70 false 512" NXDOMAIN qr,aa,rd 163 0.000045283s
	[INFO] 10.244.0.6:42734 - 61736 "A IN registry.kube-system.svc.cluster.local.cluster.local. udp 70 false 512" NXDOMAIN qr,aa,rd 163 0.000033599s
	[INFO] 10.244.0.6:54338 - 26081 "AAAA IN registry.kube-system.svc.cluster.local.us-east-2.compute.internal. udp 83 false 512" NXDOMAIN qr,rd,ra 83 0.001812112s
	[INFO] 10.244.0.6:54338 - 40423 "A IN registry.kube-system.svc.cluster.local.us-east-2.compute.internal. udp 83 false 512" NXDOMAIN qr,rd,ra 83 0.001769553s
	[INFO] 10.244.0.6:39094 - 56002 "AAAA IN registry.kube-system.svc.cluster.local. udp 56 false 512" NOERROR qr,aa,rd 149 0.000048811s
	[INFO] 10.244.0.6:39094 - 9935 "A IN registry.kube-system.svc.cluster.local. udp 56 false 512" NOERROR qr,aa,rd 110 0.000036265s
	[INFO] 10.244.0.20:53754 - 1366 "A IN storage.googleapis.com.gcp-auth.svc.cluster.local. udp 78 false 1232" NXDOMAIN qr,aa,rd 160 0.0016936s
	[INFO] 10.244.0.20:51144 - 45189 "AAAA IN storage.googleapis.com.gcp-auth.svc.cluster.local. udp 78 false 1232" NXDOMAIN qr,aa,rd 160 0.002085581s
	[INFO] 10.244.0.20:60186 - 495 "AAAA IN storage.googleapis.com.svc.cluster.local. udp 69 false 1232" NXDOMAIN qr,aa,rd 151 0.000131541s
	[INFO] 10.244.0.20:58173 - 47948 "A IN storage.googleapis.com.svc.cluster.local. udp 69 false 1232" NXDOMAIN qr,aa,rd 151 0.000146031s
	[INFO] 10.244.0.20:41557 - 45319 "AAAA IN storage.googleapis.com.cluster.local. udp 65 false 1232" NXDOMAIN qr,aa,rd 147 0.000125396s
	[INFO] 10.244.0.20:60168 - 27262 "A IN storage.googleapis.com.cluster.local. udp 65 false 1232" NXDOMAIN qr,aa,rd 147 0.00012213s
	[INFO] 10.244.0.20:55951 - 7020 "A IN storage.googleapis.com.us-east-2.compute.internal. udp 78 false 1232" NXDOMAIN qr,rd,ra 67 0.004693068s
	[INFO] 10.244.0.20:46529 - 17954 "AAAA IN storage.googleapis.com.us-east-2.compute.internal. udp 78 false 1232" NXDOMAIN qr,rd,ra 67 0.005735124s
	[INFO] 10.244.0.20:54136 - 20848 "A IN storage.googleapis.com. udp 51 false 1232" NOERROR qr,rd,ra 610 0.001853292s
	[INFO] 10.244.0.20:59146 - 51848 "AAAA IN storage.googleapis.com. udp 51 false 1232" NOERROR qr,rd,ra 240 0.0025582s
	
	
	==> describe nodes <==
	Name:               addons-936355
	Roles:              control-plane
	Labels:             beta.kubernetes.io/arch=arm64
	                    beta.kubernetes.io/os=linux
	                    kubernetes.io/arch=arm64
	                    kubernetes.io/hostname=addons-936355
	                    kubernetes.io/os=linux
	                    minikube.k8s.io/commit=90d544f06ea0f69499271b003be64a9a224d57ed
	                    minikube.k8s.io/name=addons-936355
	                    minikube.k8s.io/primary=true
	                    minikube.k8s.io/updated_at=2024_09_16T10_35_52_0700
	                    minikube.k8s.io/version=v1.34.0
	                    node-role.kubernetes.io/control-plane=
	                    node.kubernetes.io/exclude-from-external-load-balancers=
	                    topology.hostpath.csi/node=addons-936355
	Annotations:        csi.volume.kubernetes.io/nodeid: {"hostpath.csi.k8s.io":"addons-936355"}
	                    kubeadm.alpha.kubernetes.io/cri-socket: unix:///var/run/crio/crio.sock
	                    node.alpha.kubernetes.io/ttl: 0
	                    volumes.kubernetes.io/controller-managed-attach-detach: true
	CreationTimestamp:  Mon, 16 Sep 2024 10:35:49 +0000
	Taints:             <none>
	Unschedulable:      false
	Lease:
	  HolderIdentity:  addons-936355
	  AcquireTime:     <unset>
	  RenewTime:       Mon, 16 Sep 2024 10:44:54 +0000
	Conditions:
	  Type             Status  LastHeartbeatTime                 LastTransitionTime                Reason                       Message
	  ----             ------  -----------------                 ------------------                ------                       -------
	  MemoryPressure   False   Mon, 16 Sep 2024 10:43:30 +0000   Mon, 16 Sep 2024 10:35:45 +0000   KubeletHasSufficientMemory   kubelet has sufficient memory available
	  DiskPressure     False   Mon, 16 Sep 2024 10:43:30 +0000   Mon, 16 Sep 2024 10:35:45 +0000   KubeletHasNoDiskPressure     kubelet has no disk pressure
	  PIDPressure      False   Mon, 16 Sep 2024 10:43:30 +0000   Mon, 16 Sep 2024 10:35:45 +0000   KubeletHasSufficientPID      kubelet has sufficient PID available
	  Ready            True    Mon, 16 Sep 2024 10:43:30 +0000   Mon, 16 Sep 2024 10:36:42 +0000   KubeletReady                 kubelet is posting ready status
	Addresses:
	  InternalIP:  192.168.49.2
	  Hostname:    addons-936355
	Capacity:
	  cpu:                2
	  ephemeral-storage:  203034800Ki
	  hugepages-1Gi:      0
	  hugepages-2Mi:      0
	  hugepages-32Mi:     0
	  hugepages-64Ki:     0
	  memory:             8022304Ki
	  pods:               110
	Allocatable:
	  cpu:                2
	  ephemeral-storage:  203034800Ki
	  hugepages-1Gi:      0
	  hugepages-2Mi:      0
	  hugepages-32Mi:     0
	  hugepages-64Ki:     0
	  memory:             8022304Ki
	  pods:               110
	System Info:
	  Machine ID:                 d04f59375248444681829ec487634926
	  System UUID:                65d15a11-4f3c-4207-941c-6a3b096d7c27
	  Boot ID:                    34b2555f-ef29-4c31-9b47-b3b930bd3b4b
	  Kernel Version:             5.15.0-1069-aws
	  OS Image:                   Ubuntu 22.04.4 LTS
	  Operating System:           linux
	  Architecture:               arm64
	  Container Runtime Version:  cri-o://1.24.6
	  Kubelet Version:            v1.31.1
	  Kube-Proxy Version:         v1.31.1
	PodCIDR:                      10.244.0.0/24
	PodCIDRs:                     10.244.0.0/24
	Non-terminated Pods:          (22 in total)
	  Namespace                   Name                                        CPU Requests  CPU Limits  Memory Requests  Memory Limits  Age
	  ---------                   ----                                        ------------  ----------  ---------------  -------------  ---
	  default                     cloud-spanner-emulator-769b77f747-qvhhc     0 (0%)        0 (0%)      0 (0%)           0 (0%)         8m54s
	  gadget                      gadget-hx2qq                                0 (0%)        0 (0%)      0 (0%)           0 (0%)         8m52s
	  gcp-auth                    gcp-auth-89d5ffd79-j2ckg                    0 (0%)        0 (0%)      0 (0%)           0 (0%)         8m46s
	  ingress-nginx               ingress-nginx-controller-bc57996ff-jgfjf    100m (5%)     0 (0%)      90Mi (1%)        0 (0%)         8m52s
	  kube-system                 coredns-7c65d6cfc9-r6x6b                    100m (5%)     0 (0%)      70Mi (0%)        170Mi (2%)     8m56s
	  kube-system                 csi-hostpath-attacher-0                     0 (0%)        0 (0%)      0 (0%)           0 (0%)         8m51s
	  kube-system                 csi-hostpath-resizer-0                      0 (0%)        0 (0%)      0 (0%)           0 (0%)         8m51s
	  kube-system                 csi-hostpathplugin-zrlmd                    0 (0%)        0 (0%)      0 (0%)           0 (0%)         8m12s
	  kube-system                 etcd-addons-936355                          100m (5%)     0 (0%)      100Mi (1%)       0 (0%)         9m2s
	  kube-system                 kindnet-wv5d6                               100m (5%)     100m (5%)   50Mi (0%)        50Mi (0%)      8m56s
	  kube-system                 kube-apiserver-addons-936355                250m (12%)    0 (0%)      0 (0%)           0 (0%)         9m3s
	  kube-system                 kube-controller-manager-addons-936355       200m (10%)    0 (0%)      0 (0%)           0 (0%)         9m2s
	  kube-system                 kube-ingress-dns-minikube                   0 (0%)        0 (0%)      0 (0%)           0 (0%)         8m53s
	  kube-system                 kube-proxy-6zqlq                            0 (0%)        0 (0%)      0 (0%)           0 (0%)         8m57s
	  kube-system                 kube-scheduler-addons-936355                100m (5%)     0 (0%)      0 (0%)           0 (0%)         9m2s
	  kube-system                 metrics-server-84c5f94fbc-hngcs             100m (5%)     0 (0%)      200Mi (2%)       0 (0%)         8m53s
	  kube-system                 nvidia-device-plugin-daemonset-6j9gc        0 (0%)        0 (0%)      0 (0%)           0 (0%)         8m12s
	  kube-system                 snapshot-controller-56fcc65765-5th26        0 (0%)        0 (0%)      0 (0%)           0 (0%)         8m51s
	  kube-system                 snapshot-controller-56fcc65765-fjrw9        0 (0%)        0 (0%)      0 (0%)           0 (0%)         8m51s
	  kube-system                 storage-provisioner                         0 (0%)        0 (0%)      0 (0%)           0 (0%)         8m52s
	  local-path-storage          local-path-provisioner-86d989889c-b652d     0 (0%)        0 (0%)      0 (0%)           0 (0%)         8m52s
	  yakd-dashboard              yakd-dashboard-67d98fc6b-ztsj8              0 (0%)        0 (0%)      128Mi (1%)       256Mi (3%)     8m52s
	Allocated resources:
	  (Total limits may be over 100 percent, i.e., overcommitted.)
	  Resource           Requests     Limits
	  --------           --------     ------
	  cpu                1050m (52%)  100m (5%)
	  memory             638Mi (8%)   476Mi (6%)
	  ephemeral-storage  0 (0%)       0 (0%)
	  hugepages-1Gi      0 (0%)       0 (0%)
	  hugepages-2Mi      0 (0%)       0 (0%)
	  hugepages-32Mi     0 (0%)       0 (0%)
	  hugepages-64Ki     0 (0%)       0 (0%)
	Events:
	  Type     Reason                   Age                   From             Message
	  ----     ------                   ----                  ----             -------
	  Normal   Starting                 8m52s                 kube-proxy       
	  Normal   NodeHasSufficientMemory  9m9s (x8 over 9m10s)  kubelet          Node addons-936355 status is now: NodeHasSufficientMemory
	  Normal   NodeHasNoDiskPressure    9m9s (x8 over 9m10s)  kubelet          Node addons-936355 status is now: NodeHasNoDiskPressure
	  Normal   NodeHasSufficientPID     9m9s (x7 over 9m10s)  kubelet          Node addons-936355 status is now: NodeHasSufficientPID
	  Normal   Starting                 9m3s                  kubelet          Starting kubelet.
	  Warning  CgroupV1                 9m3s                  kubelet          Cgroup v1 support is in maintenance mode, please migrate to Cgroup v2.
	  Normal   NodeHasSufficientMemory  9m2s                  kubelet          Node addons-936355 status is now: NodeHasSufficientMemory
	  Normal   NodeHasNoDiskPressure    9m2s                  kubelet          Node addons-936355 status is now: NodeHasNoDiskPressure
	  Normal   NodeHasSufficientPID     9m2s                  kubelet          Node addons-936355 status is now: NodeHasSufficientPID
	  Normal   RegisteredNode           8m58s                 node-controller  Node addons-936355 event: Registered Node addons-936355 in Controller
	  Normal   NodeReady                8m12s                 kubelet          Node addons-936355 status is now: NodeReady
	
	
	==> dmesg <==
	[Sep16 10:07] systemd-journald[226]: Failed to send stream file descriptor to service manager: Connection refused
	
	
	==> etcd [3b247261f15f4cdd596d5e7ee3354c24cb995a27a5e0581e877596df04b900d5] <==
	{"level":"warn","ts":"2024-09-16T10:35:59.461521Z","caller":"etcdserver/util.go:170","msg":"apply request took too long","took":"173.42953ms","expected-duration":"100ms","prefix":"read-only range ","request":"key:\"/registry/serviceaccounts\" limit:1 ","response":"range_response_count:0 size:5"}
	{"level":"info","ts":"2024-09-16T10:35:59.461548Z","caller":"traceutil/trace.go:171","msg":"trace[825824076] range","detail":"{range_begin:/registry/serviceaccounts; range_end:; response_count:0; response_revision:365; }","duration":"173.471351ms","start":"2024-09-16T10:35:59.288071Z","end":"2024-09-16T10:35:59.461542Z","steps":["trace[825824076] 'agreement among raft nodes before linearized reading'  (duration: 173.394077ms)"],"step_count":1}
	{"level":"info","ts":"2024-09-16T10:35:59.910299Z","caller":"traceutil/trace.go:171","msg":"trace[571848] transaction","detail":"{read_only:false; response_revision:372; number_of_response:1; }","duration":"101.485416ms","start":"2024-09-16T10:35:59.808786Z","end":"2024-09-16T10:35:59.910272Z","steps":["trace[571848] 'process raft request'  (duration: 72.962753ms)"],"step_count":1}
	{"level":"info","ts":"2024-09-16T10:35:59.910551Z","caller":"traceutil/trace.go:171","msg":"trace[2049811000] transaction","detail":"{read_only:false; response_revision:373; number_of_response:1; }","duration":"101.622964ms","start":"2024-09-16T10:35:59.808918Z","end":"2024-09-16T10:35:59.910541Z","steps":["trace[2049811000] 'process raft request'  (duration: 72.910972ms)"],"step_count":1}
	{"level":"info","ts":"2024-09-16T10:35:59.910806Z","caller":"traceutil/trace.go:171","msg":"trace[1901548869] transaction","detail":"{read_only:false; response_revision:374; number_of_response:1; }","duration":"101.844209ms","start":"2024-09-16T10:35:59.808954Z","end":"2024-09-16T10:35:59.910798Z","steps":["trace[1901548869] 'process raft request'  (duration: 72.897089ms)"],"step_count":1}
	{"level":"warn","ts":"2024-09-16T10:35:59.945205Z","caller":"etcdserver/util.go:170","msg":"apply request took too long","took":"136.325816ms","expected-duration":"100ms","prefix":"read-only range ","request":"key:\"/registry/daemonsets/kube-system/kindnet\" ","response":"range_response_count:1 size:4681"}
	{"level":"info","ts":"2024-09-16T10:35:59.945344Z","caller":"traceutil/trace.go:171","msg":"trace[1851060564] range","detail":"{range_begin:/registry/daemonsets/kube-system/kindnet; range_end:; response_count:1; response_revision:375; }","duration":"136.4823ms","start":"2024-09-16T10:35:59.808847Z","end":"2024-09-16T10:35:59.945330Z","steps":["trace[1851060564] 'agreement among raft nodes before linearized reading'  (duration: 136.289206ms)"],"step_count":1}
	{"level":"warn","ts":"2024-09-16T10:35:59.945577Z","caller":"etcdserver/util.go:170","msg":"apply request took too long","took":"136.842881ms","expected-duration":"100ms","prefix":"read-only range ","request":"key:\"/registry/pods/kube-system/kube-proxy-6zqlq\" ","response":"range_response_count:1 size:4833"}
	{"level":"info","ts":"2024-09-16T10:35:59.945682Z","caller":"traceutil/trace.go:171","msg":"trace[840636989] range","detail":"{range_begin:/registry/pods/kube-system/kube-proxy-6zqlq; range_end:; response_count:1; response_revision:375; }","duration":"136.945081ms","start":"2024-09-16T10:35:59.808725Z","end":"2024-09-16T10:35:59.945670Z","steps":["trace[840636989] 'agreement among raft nodes before linearized reading'  (duration: 136.808125ms)"],"step_count":1}
	{"level":"info","ts":"2024-09-16T10:36:00.052890Z","caller":"traceutil/trace.go:171","msg":"trace[935433541] transaction","detail":"{read_only:false; response_revision:376; number_of_response:1; }","duration":"171.425064ms","start":"2024-09-16T10:35:59.881432Z","end":"2024-09-16T10:36:00.052857Z","steps":["trace[935433541] 'process raft request'  (duration: 103.374661ms)","trace[935433541] 'compare'  (duration: 67.370586ms)"],"step_count":2}
	{"level":"info","ts":"2024-09-16T10:36:00.053251Z","caller":"traceutil/trace.go:171","msg":"trace[1640692462] linearizableReadLoop","detail":"{readStateIndex:386; appliedIndex:385; }","duration":"171.181083ms","start":"2024-09-16T10:35:59.882059Z","end":"2024-09-16T10:36:00.053240Z","steps":["trace[1640692462] 'read index received'  (duration: 86.984477ms)","trace[1640692462] 'applied index is now lower than readState.Index'  (duration: 84.173345ms)"],"step_count":2}
	{"level":"warn","ts":"2024-09-16T10:36:00.082458Z","caller":"etcdserver/util.go:170","msg":"apply request took too long","took":"201.082081ms","expected-duration":"100ms","prefix":"read-only range ","request":"key:\"/registry/services/specs/default/cloud-spanner-emulator\" ","response":"range_response_count:0 size:5"}
	{"level":"info","ts":"2024-09-16T10:36:00.131166Z","caller":"traceutil/trace.go:171","msg":"trace[1850329919] range","detail":"{range_begin:/registry/services/specs/default/cloud-spanner-emulator; range_end:; response_count:0; response_revision:379; }","duration":"249.789246ms","start":"2024-09-16T10:35:59.881352Z","end":"2024-09-16T10:36:00.131141Z","steps":["trace[1850329919] 'agreement among raft nodes before linearized reading'  (duration: 201.06412ms)"],"step_count":1}
	{"level":"warn","ts":"2024-09-16T10:36:00.081165Z","caller":"etcdserver/util.go:170","msg":"apply request took too long","took":"179.917297ms","expected-duration":"100ms","prefix":"read-only range ","request":"key:\"/registry/apiextensions.k8s.io/customresourcedefinitions\" limit:1 ","response":"range_response_count:0 size:5"}
	{"level":"info","ts":"2024-09-16T10:36:00.139019Z","caller":"traceutil/trace.go:171","msg":"trace[179530847] range","detail":"{range_begin:/registry/apiextensions.k8s.io/customresourcedefinitions; range_end:; response_count:0; response_revision:377; }","duration":"257.611311ms","start":"2024-09-16T10:35:59.881381Z","end":"2024-09-16T10:36:00.138992Z","steps":["trace[179530847] 'agreement among raft nodes before linearized reading'  (duration: 179.875904ms)"],"step_count":1}
	{"level":"info","ts":"2024-09-16T10:36:00.497778Z","caller":"traceutil/trace.go:171","msg":"trace[781374587] transaction","detail":"{read_only:false; response_revision:383; number_of_response:1; }","duration":"244.927422ms","start":"2024-09-16T10:36:00.252822Z","end":"2024-09-16T10:36:00.497749Z","steps":["trace[781374587] 'process raft request'  (duration: 240.255139ms)"],"step_count":1}
	{"level":"info","ts":"2024-09-16T10:36:00.498033Z","caller":"traceutil/trace.go:171","msg":"trace[2049862755] transaction","detail":"{read_only:false; response_revision:384; number_of_response:1; }","duration":"245.149988ms","start":"2024-09-16T10:36:00.252873Z","end":"2024-09-16T10:36:00.498023Z","steps":["trace[2049862755] 'process raft request'  (duration: 243.936212ms)"],"step_count":1}
	{"level":"info","ts":"2024-09-16T10:36:00.498257Z","caller":"traceutil/trace.go:171","msg":"trace[1308392049] transaction","detail":"{read_only:false; response_revision:385; number_of_response:1; }","duration":"245.371382ms","start":"2024-09-16T10:36:00.252875Z","end":"2024-09-16T10:36:00.498247Z","steps":["trace[1308392049] 'process raft request'  (duration: 243.967662ms)"],"step_count":1}
	{"level":"info","ts":"2024-09-16T10:36:00.498461Z","caller":"traceutil/trace.go:171","msg":"trace[1919696831] transaction","detail":"{read_only:false; response_revision:386; number_of_response:1; }","duration":"245.376936ms","start":"2024-09-16T10:36:00.253076Z","end":"2024-09-16T10:36:00.498453Z","steps":["trace[1919696831] 'process raft request'  (duration: 243.813828ms)"],"step_count":1}
	{"level":"warn","ts":"2024-09-16T10:36:00.508772Z","caller":"etcdserver/util.go:170","msg":"apply request took too long","took":"105.013609ms","expected-duration":"100ms","prefix":"read-only range ","request":"key:\"/registry/namespaces/kube-system\" ","response":"range_response_count:1 size:351"}
	{"level":"info","ts":"2024-09-16T10:36:00.508863Z","caller":"traceutil/trace.go:171","msg":"trace[1394685121] range","detail":"{range_begin:/registry/namespaces/kube-system; range_end:; response_count:1; response_revision:394; }","duration":"105.116096ms","start":"2024-09-16T10:36:00.403731Z","end":"2024-09-16T10:36:00.508847Z","steps":["trace[1394685121] 'agreement among raft nodes before linearized reading'  (duration: 104.925356ms)"],"step_count":1}
	{"level":"warn","ts":"2024-09-16T10:36:00.510822Z","caller":"etcdserver/util.go:170","msg":"apply request took too long","took":"106.978552ms","expected-duration":"100ms","prefix":"read-only range ","request":"key:\"/registry/ranges/serviceips\" ","response":"range_response_count:1 size:116"}
	{"level":"info","ts":"2024-09-16T10:36:00.510873Z","caller":"traceutil/trace.go:171","msg":"trace[90254389] range","detail":"{range_begin:/registry/ranges/serviceips; range_end:; response_count:1; response_revision:394; }","duration":"107.038374ms","start":"2024-09-16T10:36:00.403822Z","end":"2024-09-16T10:36:00.510860Z","steps":["trace[90254389] 'agreement among raft nodes before linearized reading'  (duration: 106.927616ms)"],"step_count":1}
	{"level":"warn","ts":"2024-09-16T10:36:00.513542Z","caller":"etcdserver/util.go:170","msg":"apply request took too long","took":"109.80734ms","expected-duration":"100ms","prefix":"read-only range ","request":"key:\"/registry/daemonsets/kube-system/kindnet\" ","response":"range_response_count:1 size:4681"}
	{"level":"info","ts":"2024-09-16T10:36:00.513613Z","caller":"traceutil/trace.go:171","msg":"trace[1039894144] range","detail":"{range_begin:/registry/daemonsets/kube-system/kindnet; range_end:; response_count:1; response_revision:395; }","duration":"109.886707ms","start":"2024-09-16T10:36:00.403712Z","end":"2024-09-16T10:36:00.513599Z","steps":["trace[1039894144] 'agreement among raft nodes before linearized reading'  (duration: 109.778304ms)"],"step_count":1}
	
	
	==> gcp-auth [66246ecfc47d65d522c45cff2baf15e2433dc0e0681c400a1437f7890b27b5b4] <==
	2024/09/16 10:37:52 GCP Auth Webhook started!
	
	
	==> kernel <==
	 10:44:54 up 10:27,  0 users,  load average: 0.06, 0.74, 1.61
	Linux addons-936355 5.15.0-1069-aws #75~20.04.1-Ubuntu SMP Mon Aug 19 16:22:47 UTC 2024 aarch64 aarch64 aarch64 GNU/Linux
	PRETTY_NAME="Ubuntu 22.04.4 LTS"
	
	
	==> kindnet [8d59e894feca0e01e03cc7257c67ed10cf0f9db194b88b314e4961bc62d9e7f1] <==
	I0916 10:42:52.020927       1 main.go:299] handling current node
	I0916 10:43:02.017772       1 main.go:295] Handling node with IPs: map[192.168.49.2:{}]
	I0916 10:43:02.017806       1 main.go:299] handling current node
	I0916 10:43:12.017271       1 main.go:295] Handling node with IPs: map[192.168.49.2:{}]
	I0916 10:43:12.017425       1 main.go:299] handling current node
	I0916 10:43:22.020176       1 main.go:295] Handling node with IPs: map[192.168.49.2:{}]
	I0916 10:43:22.020291       1 main.go:299] handling current node
	I0916 10:43:32.018190       1 main.go:295] Handling node with IPs: map[192.168.49.2:{}]
	I0916 10:43:32.018318       1 main.go:299] handling current node
	I0916 10:43:42.017224       1 main.go:295] Handling node with IPs: map[192.168.49.2:{}]
	I0916 10:43:42.017265       1 main.go:299] handling current node
	I0916 10:43:52.020877       1 main.go:295] Handling node with IPs: map[192.168.49.2:{}]
	I0916 10:43:52.021004       1 main.go:299] handling current node
	I0916 10:44:02.018165       1 main.go:295] Handling node with IPs: map[192.168.49.2:{}]
	I0916 10:44:02.018203       1 main.go:299] handling current node
	I0916 10:44:12.017210       1 main.go:295] Handling node with IPs: map[192.168.49.2:{}]
	I0916 10:44:12.017340       1 main.go:299] handling current node
	I0916 10:44:22.017201       1 main.go:295] Handling node with IPs: map[192.168.49.2:{}]
	I0916 10:44:22.017236       1 main.go:299] handling current node
	I0916 10:44:32.025659       1 main.go:295] Handling node with IPs: map[192.168.49.2:{}]
	I0916 10:44:32.025697       1 main.go:299] handling current node
	I0916 10:44:42.017259       1 main.go:295] Handling node with IPs: map[192.168.49.2:{}]
	I0916 10:44:42.017354       1 main.go:299] handling current node
	I0916 10:44:52.019299       1 main.go:295] Handling node with IPs: map[192.168.49.2:{}]
	I0916 10:44:52.019335       1 main.go:299] handling current node
	
	
	==> kube-apiserver [f911db1ed55bbf8b3dc28ca0fef7e51209be97baaa15d9194b879451dd6fd403] <==
	I0916 10:37:03.362794       1 controller.go:109] OpenAPI AggregationController: action for item v1beta1.metrics.k8s.io: Rate Limited Requeue.
	I0916 10:37:03.362849       1 controller.go:126] OpenAPI AggregationController: action for item v1beta1.metrics.k8s.io: Rate Limited Requeue.
	E0916 10:38:17.001292       1 remote_available_controller.go:448] "Unhandled Error" err="v1beta1.metrics.k8s.io failed with: failing or missing response from https://10.100.138.161:443/apis/metrics.k8s.io/v1beta1: Get \"https://10.100.138.161:443/apis/metrics.k8s.io/v1beta1\": dial tcp 10.100.138.161:443: connect: connection refused" logger="UnhandledError"
	W0916 10:38:17.001510       1 handler_proxy.go:99] no RequestInfo found in the context
	E0916 10:38:17.001593       1 controller.go:146] "Unhandled Error" err=<
		Error updating APIService "v1beta1.metrics.k8s.io" with err: failed to download v1beta1.metrics.k8s.io: failed to retrieve openAPI spec, http error: ResponseCode: 503, Body: service unavailable
		, Header: map[Content-Type:[text/plain; charset=utf-8] X-Content-Type-Options:[nosniff]]
	 > logger="UnhandledError"
	W0916 10:38:18.005200       1 handler_proxy.go:99] no RequestInfo found in the context
	E0916 10:38:18.005260       1 controller.go:113] "Unhandled Error" err="loading OpenAPI spec for \"v1beta1.metrics.k8s.io\" failed with: Error, could not get list of group versions for APIService" logger="UnhandledError"
	W0916 10:38:18.005310       1 handler_proxy.go:99] no RequestInfo found in the context
	E0916 10:38:18.005375       1 controller.go:102] "Unhandled Error" err=<
		loading OpenAPI spec for "v1beta1.metrics.k8s.io" failed with: failed to download v1beta1.metrics.k8s.io: failed to retrieve openAPI spec, http error: ResponseCode: 503, Body: service unavailable
		, Header: map[Content-Type:[text/plain; charset=utf-8] X-Content-Type-Options:[nosniff]]
	 > logger="UnhandledError"
	I0916 10:38:18.006688       1 controller.go:126] OpenAPI AggregationController: action for item v1beta1.metrics.k8s.io: Rate Limited Requeue.
	I0916 10:38:18.006752       1 controller.go:109] OpenAPI AggregationController: action for item v1beta1.metrics.k8s.io: Rate Limited Requeue.
	W0916 10:38:22.012823       1 handler_proxy.go:99] no RequestInfo found in the context
	E0916 10:38:22.012831       1 remote_available_controller.go:448] "Unhandled Error" err="v1beta1.metrics.k8s.io failed with: failing or missing response from https://10.100.138.161:443/apis/metrics.k8s.io/v1beta1: Get \"https://10.100.138.161:443/apis/metrics.k8s.io/v1beta1\": dial tcp 10.100.138.161:443: i/o timeout" logger="UnhandledError"
	E0916 10:38:22.012998       1 controller.go:146] "Unhandled Error" err=<
		Error updating APIService "v1beta1.metrics.k8s.io" with err: failed to download v1beta1.metrics.k8s.io: failed to retrieve openAPI spec, http error: ResponseCode: 503, Body: service unavailable
		, Header: map[Content-Type:[text/plain; charset=utf-8] X-Content-Type-Options:[nosniff]]
	 > logger="UnhandledError"
	I0916 10:38:22.050506       1 handler.go:286] Adding GroupVersion metrics.k8s.io v1beta1 to ResourceManager
	E0916 10:38:22.062342       1 remote_available_controller.go:448] "Unhandled Error" err="v1beta1.metrics.k8s.io failed with: Operation cannot be fulfilled on apiservices.apiregistration.k8s.io \"v1beta1.metrics.k8s.io\": the object has been modified; please apply your changes to the latest version and try again" logger="UnhandledError"
	
	
	==> kube-controller-manager [4ee66eef50ab615bdd0d94fe194567492cafe76910819703a964b78b45f55436] <==
	I0916 10:37:35.666189       1 replica_set.go:679] "Finished syncing" logger="replicaset-controller" kind="ReplicaSet" key="kube-system/snapshot-controller-56fcc65765" duration="55.9µs"
	I0916 10:37:40.457925       1 replica_set.go:679] "Finished syncing" logger="replicaset-controller" kind="ReplicaSet" key="ingress-nginx/ingress-nginx-controller-bc57996ff" duration="58.632µs"
	I0916 10:37:42.479801       1 job_controller.go:568] "enqueueing job" logger="job-controller" key="gcp-auth/gcp-auth-certs-patch" delay="1s"
	I0916 10:37:43.509000       1 job_controller.go:568] "enqueueing job" logger="job-controller" key="gcp-auth/gcp-auth-certs-patch" delay="1s"
	I0916 10:37:43.572015       1 job_controller.go:568] "enqueueing job" logger="job-controller" key="gcp-auth/gcp-auth-certs-patch" delay="1s"
	I0916 10:37:44.515757       1 job_controller.go:568] "enqueueing job" logger="job-controller" key="gcp-auth/gcp-auth-certs-patch" delay="1s"
	I0916 10:37:44.524004       1 job_controller.go:568] "enqueueing job" logger="job-controller" key="gcp-auth/gcp-auth-certs-patch" delay="1s"
	I0916 10:37:44.530235       1 job_controller.go:568] "enqueueing job" logger="job-controller" key="gcp-auth/gcp-auth-certs-patch" delay="1s"
	I0916 10:37:45.150271       1 range_allocator.go:241] "Successfully synced" logger="node-ipam-controller" key="addons-936355"
	I0916 10:37:52.558234       1 replica_set.go:679] "Finished syncing" logger="replicaset-controller" kind="ReplicaSet" key="gcp-auth/gcp-auth-89d5ffd79" duration="22.364743ms"
	I0916 10:37:52.559132       1 replica_set.go:679] "Finished syncing" logger="replicaset-controller" kind="ReplicaSet" key="gcp-auth/gcp-auth-89d5ffd79" duration="31.351µs"
	I0916 10:37:54.619407       1 range_allocator.go:241] "Successfully synced" logger="node-ipam-controller" key="addons-936355"
	I0916 10:37:54.659736       1 replica_set.go:679] "Finished syncing" logger="replicaset-controller" kind="ReplicaSet" key="ingress-nginx/ingress-nginx-controller-bc57996ff" duration="16.445353ms"
	I0916 10:37:54.660858       1 replica_set.go:679] "Finished syncing" logger="replicaset-controller" kind="ReplicaSet" key="ingress-nginx/ingress-nginx-controller-bc57996ff" duration="68.076µs"
	E0916 10:37:56.423390       1 resource_quota_controller.go:446] "Unhandled Error" err="unable to retrieve the complete list of server APIs: metrics.k8s.io/v1beta1: stale GroupVersion discovery: metrics.k8s.io/v1beta1" logger="UnhandledError"
	I0916 10:37:56.899634       1 garbagecollector.go:826] "failed to discover some groups" logger="garbage-collector-controller" groups="<internal error: json: unsupported type: map[schema.GroupVersion]error>"
	I0916 10:37:58.024462       1 job_controller.go:568] "enqueueing job" logger="job-controller" key="gcp-auth/gcp-auth-certs-create" delay="0s"
	I0916 10:37:58.060468       1 job_controller.go:568] "enqueueing job" logger="job-controller" key="gcp-auth/gcp-auth-certs-create" delay="0s"
	I0916 10:38:14.019749       1 job_controller.go:568] "enqueueing job" logger="job-controller" key="gcp-auth/gcp-auth-certs-patch" delay="0s"
	I0916 10:38:14.050322       1 job_controller.go:568] "enqueueing job" logger="job-controller" key="gcp-auth/gcp-auth-certs-patch" delay="0s"
	I0916 10:38:16.992451       1 replica_set.go:679] "Finished syncing" logger="replicaset-controller" kind="ReplicaSet" key="kube-system/metrics-server-84c5f94fbc" duration="19.424064ms"
	I0916 10:38:16.993500       1 replica_set.go:679] "Finished syncing" logger="replicaset-controller" kind="ReplicaSet" key="kube-system/metrics-server-84c5f94fbc" duration="50.764µs"
	I0916 10:38:25.225399       1 range_allocator.go:241] "Successfully synced" logger="node-ipam-controller" key="addons-936355"
	I0916 10:39:05.149618       1 replica_set.go:679] "Finished syncing" logger="replicaset-controller" kind="ReplicaSet" key="kube-system/registry-66c9cd494c" duration="10.223µs"
	I0916 10:43:30.185523       1 range_allocator.go:241] "Successfully synced" logger="node-ipam-controller" key="addons-936355"
	
	
	==> kube-proxy [6200eb5cfcd24bb0f0253359201c6d75c0624dcb7a313b0bc95b7370a13539a0] <==
	I0916 10:36:01.688812       1 server_linux.go:66] "Using iptables proxy"
	I0916 10:36:02.265241       1 server.go:677] "Successfully retrieved node IP(s)" IPs=["192.168.49.2"]
	E0916 10:36:02.271591       1 server.go:234] "Kube-proxy configuration may be incomplete or incorrect" err="nodePortAddresses is unset; NodePort connections will be accepted on all local IPs. Consider using `--nodeport-addresses primary`"
	I0916 10:36:02.423456       1 server.go:243] "kube-proxy running in dual-stack mode" primary ipFamily="IPv4"
	I0916 10:36:02.423579       1 server_linux.go:169] "Using iptables Proxier"
	I0916 10:36:02.431736       1 proxier.go:255] "Setting route_localnet=1 to allow node-ports on localhost; to change this either disable iptables.localhostNodePorts (--iptables-localhost-nodeports) or set nodePortAddresses (--nodeport-addresses) to filter loopback addresses" ipFamily="IPv4"
	I0916 10:36:02.432160       1 server.go:483] "Version info" version="v1.31.1"
	I0916 10:36:02.432351       1 server.go:485] "Golang settings" GOGC="" GOMAXPROCS="" GOTRACEBACK=""
	I0916 10:36:02.433544       1 config.go:199] "Starting service config controller"
	I0916 10:36:02.433620       1 shared_informer.go:313] Waiting for caches to sync for service config
	I0916 10:36:02.433682       1 config.go:105] "Starting endpoint slice config controller"
	I0916 10:36:02.433713       1 shared_informer.go:313] Waiting for caches to sync for endpoint slice config
	I0916 10:36:02.434194       1 config.go:328] "Starting node config controller"
	I0916 10:36:02.434243       1 shared_informer.go:313] Waiting for caches to sync for node config
	I0916 10:36:02.545223       1 shared_informer.go:320] Caches are synced for node config
	I0916 10:36:02.585616       1 shared_informer.go:320] Caches are synced for service config
	I0916 10:36:02.585634       1 shared_informer.go:320] Caches are synced for endpoint slice config
	
	
	==> kube-scheduler [2b161087caf5a6ab9dedbb699f7c69ddf6c2c5cdb19026d46daf824d90966d25] <==
	W0916 10:35:50.291598       1 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.PodDisruptionBudget: poddisruptionbudgets.policy is forbidden: User "system:kube-scheduler" cannot list resource "poddisruptionbudgets" in API group "policy" at the cluster scope
	E0916 10:35:50.291652       1 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.PodDisruptionBudget: failed to list *v1.PodDisruptionBudget: poddisruptionbudgets.policy is forbidden: User \"system:kube-scheduler\" cannot list resource \"poddisruptionbudgets\" in API group \"policy\" at the cluster scope" logger="UnhandledError"
	W0916 10:35:50.291738       1 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.StorageClass: storageclasses.storage.k8s.io is forbidden: User "system:kube-scheduler" cannot list resource "storageclasses" in API group "storage.k8s.io" at the cluster scope
	E0916 10:35:50.291810       1 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.StorageClass: failed to list *v1.StorageClass: storageclasses.storage.k8s.io is forbidden: User \"system:kube-scheduler\" cannot list resource \"storageclasses\" in API group \"storage.k8s.io\" at the cluster scope" logger="UnhandledError"
	W0916 10:35:50.291911       1 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.CSINode: csinodes.storage.k8s.io is forbidden: User "system:kube-scheduler" cannot list resource "csinodes" in API group "storage.k8s.io" at the cluster scope
	E0916 10:35:50.291966       1 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.CSINode: failed to list *v1.CSINode: csinodes.storage.k8s.io is forbidden: User \"system:kube-scheduler\" cannot list resource \"csinodes\" in API group \"storage.k8s.io\" at the cluster scope" logger="UnhandledError"
	W0916 10:35:50.292090       1 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Service: services is forbidden: User "system:kube-scheduler" cannot list resource "services" in API group "" at the cluster scope
	E0916 10:35:50.292141       1 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Service: failed to list *v1.Service: services is forbidden: User \"system:kube-scheduler\" cannot list resource \"services\" in API group \"\" at the cluster scope" logger="UnhandledError"
	W0916 10:35:50.292276       1 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.CSIStorageCapacity: csistoragecapacities.storage.k8s.io is forbidden: User "system:kube-scheduler" cannot list resource "csistoragecapacities" in API group "storage.k8s.io" at the cluster scope
	E0916 10:35:50.292635       1 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.CSIStorageCapacity: failed to list *v1.CSIStorageCapacity: csistoragecapacities.storage.k8s.io is forbidden: User \"system:kube-scheduler\" cannot list resource \"csistoragecapacities\" in API group \"storage.k8s.io\" at the cluster scope" logger="UnhandledError"
	W0916 10:35:50.292342       1 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Pod: pods is forbidden: User "system:kube-scheduler" cannot list resource "pods" in API group "" at the cluster scope
	E0916 10:35:50.292669       1 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Pod: failed to list *v1.Pod: pods is forbidden: User \"system:kube-scheduler\" cannot list resource \"pods\" in API group \"\" at the cluster scope" logger="UnhandledError"
	W0916 10:35:50.292396       1 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.PersistentVolume: persistentvolumes is forbidden: User "system:kube-scheduler" cannot list resource "persistentvolumes" in API group "" at the cluster scope
	E0916 10:35:50.292714       1 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.PersistentVolume: failed to list *v1.PersistentVolume: persistentvolumes is forbidden: User \"system:kube-scheduler\" cannot list resource \"persistentvolumes\" in API group \"\" at the cluster scope" logger="UnhandledError"
	W0916 10:35:50.292436       1 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Namespace: namespaces is forbidden: User "system:kube-scheduler" cannot list resource "namespaces" in API group "" at the cluster scope
	E0916 10:35:50.292743       1 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Namespace: failed to list *v1.Namespace: namespaces is forbidden: User \"system:kube-scheduler\" cannot list resource \"namespaces\" in API group \"\" at the cluster scope" logger="UnhandledError"
	W0916 10:35:50.292494       1 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.ReplicationController: replicationcontrollers is forbidden: User "system:kube-scheduler" cannot list resource "replicationcontrollers" in API group "" at the cluster scope
	E0916 10:35:50.292771       1 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.ReplicationController: failed to list *v1.ReplicationController: replicationcontrollers is forbidden: User \"system:kube-scheduler\" cannot list resource \"replicationcontrollers\" in API group \"\" at the cluster scope" logger="UnhandledError"
	W0916 10:35:50.292533       1 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Node: nodes is forbidden: User "system:kube-scheduler" cannot list resource "nodes" in API group "" at the cluster scope
	E0916 10:35:50.292790       1 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Node: failed to list *v1.Node: nodes is forbidden: User \"system:kube-scheduler\" cannot list resource \"nodes\" in API group \"\" at the cluster scope" logger="UnhandledError"
	W0916 10:35:50.292588       1 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.CSIDriver: csidrivers.storage.k8s.io is forbidden: User "system:kube-scheduler" cannot list resource "csidrivers" in API group "storage.k8s.io" at the cluster scope
	E0916 10:35:50.292814       1 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.CSIDriver: failed to list *v1.CSIDriver: csidrivers.storage.k8s.io is forbidden: User \"system:kube-scheduler\" cannot list resource \"csidrivers\" in API group \"storage.k8s.io\" at the cluster scope" logger="UnhandledError"
	W0916 10:35:50.292916       1 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.ReplicaSet: replicasets.apps is forbidden: User "system:kube-scheduler" cannot list resource "replicasets" in API group "apps" at the cluster scope
	E0916 10:35:50.292984       1 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.ReplicaSet: failed to list *v1.ReplicaSet: replicasets.apps is forbidden: User \"system:kube-scheduler\" cannot list resource \"replicasets\" in API group \"apps\" at the cluster scope" logger="UnhandledError"
	I0916 10:35:51.479680       1 shared_informer.go:320] Caches are synced for client-ca::kube-system::extension-apiserver-authentication::client-ca-file
	
	
	==> kubelet <==
	Sep 16 10:43:52 addons-936355 kubelet[1507]: E0916 10:43:52.195284    1507 eviction_manager.go:257] "Eviction manager: failed to get HasDedicatedImageFs" err="missing image stats: &ImageFsInfoResponse{ImageFilesystems:[]*FilesystemUsage{&FilesystemUsage{Timestamp:1726483432195045058,FsId:&FilesystemIdentifier{Mountpoint:/var/lib/containers/storage/overlay-images,},UsedBytes:&UInt64Value{Value:460628,},InodesUsed:&UInt64Value{Value:183,},},},ContainerFilesystems:[]*FilesystemUsage{},}"
	Sep 16 10:43:52 addons-936355 kubelet[1507]: E0916 10:43:52.195323    1507 eviction_manager.go:212] "Eviction manager: failed to synchronize" err="eviction manager: failed to get HasDedicatedImageFs: missing image stats: &ImageFsInfoResponse{ImageFilesystems:[]*FilesystemUsage{&FilesystemUsage{Timestamp:1726483432195045058,FsId:&FilesystemIdentifier{Mountpoint:/var/lib/containers/storage/overlay-images,},UsedBytes:&UInt64Value{Value:460628,},InodesUsed:&UInt64Value{Value:183,},},},ContainerFilesystems:[]*FilesystemUsage{},}"
	Sep 16 10:43:52 addons-936355 kubelet[1507]: I0916 10:43:52.950679    1507 scope.go:117] "RemoveContainer" containerID="bcf51d70eaf49387d9ea6641126c0b61aca168a47c3d61e6314346f6d445ad66"
	Sep 16 10:43:52 addons-936355 kubelet[1507]: E0916 10:43:52.950881    1507 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"gadget\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=gadget pod=gadget-hx2qq_gadget(fb6217d4-dbed-40c2-b47e-4342cb3f94b1)\"" pod="gadget/gadget-hx2qq" podUID="fb6217d4-dbed-40c2-b47e-4342cb3f94b1"
	Sep 16 10:44:02 addons-936355 kubelet[1507]: E0916 10:44:02.197581    1507 eviction_manager.go:257] "Eviction manager: failed to get HasDedicatedImageFs" err="missing image stats: &ImageFsInfoResponse{ImageFilesystems:[]*FilesystemUsage{&FilesystemUsage{Timestamp:1726483442197339100,FsId:&FilesystemIdentifier{Mountpoint:/var/lib/containers/storage/overlay-images,},UsedBytes:&UInt64Value{Value:460628,},InodesUsed:&UInt64Value{Value:183,},},},ContainerFilesystems:[]*FilesystemUsage{},}"
	Sep 16 10:44:02 addons-936355 kubelet[1507]: E0916 10:44:02.197621    1507 eviction_manager.go:212] "Eviction manager: failed to synchronize" err="eviction manager: failed to get HasDedicatedImageFs: missing image stats: &ImageFsInfoResponse{ImageFilesystems:[]*FilesystemUsage{&FilesystemUsage{Timestamp:1726483442197339100,FsId:&FilesystemIdentifier{Mountpoint:/var/lib/containers/storage/overlay-images,},UsedBytes:&UInt64Value{Value:460628,},InodesUsed:&UInt64Value{Value:183,},},},ContainerFilesystems:[]*FilesystemUsage{},}"
	Sep 16 10:44:07 addons-936355 kubelet[1507]: I0916 10:44:07.951452    1507 scope.go:117] "RemoveContainer" containerID="bcf51d70eaf49387d9ea6641126c0b61aca168a47c3d61e6314346f6d445ad66"
	Sep 16 10:44:07 addons-936355 kubelet[1507]: E0916 10:44:07.951634    1507 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"gadget\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=gadget pod=gadget-hx2qq_gadget(fb6217d4-dbed-40c2-b47e-4342cb3f94b1)\"" pod="gadget/gadget-hx2qq" podUID="fb6217d4-dbed-40c2-b47e-4342cb3f94b1"
	Sep 16 10:44:12 addons-936355 kubelet[1507]: E0916 10:44:12.199962    1507 eviction_manager.go:257] "Eviction manager: failed to get HasDedicatedImageFs" err="missing image stats: &ImageFsInfoResponse{ImageFilesystems:[]*FilesystemUsage{&FilesystemUsage{Timestamp:1726483452199723093,FsId:&FilesystemIdentifier{Mountpoint:/var/lib/containers/storage/overlay-images,},UsedBytes:&UInt64Value{Value:460628,},InodesUsed:&UInt64Value{Value:183,},},},ContainerFilesystems:[]*FilesystemUsage{},}"
	Sep 16 10:44:12 addons-936355 kubelet[1507]: E0916 10:44:12.200000    1507 eviction_manager.go:212] "Eviction manager: failed to synchronize" err="eviction manager: failed to get HasDedicatedImageFs: missing image stats: &ImageFsInfoResponse{ImageFilesystems:[]*FilesystemUsage{&FilesystemUsage{Timestamp:1726483452199723093,FsId:&FilesystemIdentifier{Mountpoint:/var/lib/containers/storage/overlay-images,},UsedBytes:&UInt64Value{Value:460628,},InodesUsed:&UInt64Value{Value:183,},},},ContainerFilesystems:[]*FilesystemUsage{},}"
	Sep 16 10:44:19 addons-936355 kubelet[1507]: I0916 10:44:19.950840    1507 scope.go:117] "RemoveContainer" containerID="bcf51d70eaf49387d9ea6641126c0b61aca168a47c3d61e6314346f6d445ad66"
	Sep 16 10:44:19 addons-936355 kubelet[1507]: E0916 10:44:19.951035    1507 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"gadget\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=gadget pod=gadget-hx2qq_gadget(fb6217d4-dbed-40c2-b47e-4342cb3f94b1)\"" pod="gadget/gadget-hx2qq" podUID="fb6217d4-dbed-40c2-b47e-4342cb3f94b1"
	Sep 16 10:44:22 addons-936355 kubelet[1507]: E0916 10:44:22.202619    1507 eviction_manager.go:257] "Eviction manager: failed to get HasDedicatedImageFs" err="missing image stats: &ImageFsInfoResponse{ImageFilesystems:[]*FilesystemUsage{&FilesystemUsage{Timestamp:1726483462202414340,FsId:&FilesystemIdentifier{Mountpoint:/var/lib/containers/storage/overlay-images,},UsedBytes:&UInt64Value{Value:460628,},InodesUsed:&UInt64Value{Value:183,},},},ContainerFilesystems:[]*FilesystemUsage{},}"
	Sep 16 10:44:22 addons-936355 kubelet[1507]: E0916 10:44:22.202653    1507 eviction_manager.go:212] "Eviction manager: failed to synchronize" err="eviction manager: failed to get HasDedicatedImageFs: missing image stats: &ImageFsInfoResponse{ImageFilesystems:[]*FilesystemUsage{&FilesystemUsage{Timestamp:1726483462202414340,FsId:&FilesystemIdentifier{Mountpoint:/var/lib/containers/storage/overlay-images,},UsedBytes:&UInt64Value{Value:460628,},InodesUsed:&UInt64Value{Value:183,},},},ContainerFilesystems:[]*FilesystemUsage{},}"
	Sep 16 10:44:32 addons-936355 kubelet[1507]: E0916 10:44:32.205197    1507 eviction_manager.go:257] "Eviction manager: failed to get HasDedicatedImageFs" err="missing image stats: &ImageFsInfoResponse{ImageFilesystems:[]*FilesystemUsage{&FilesystemUsage{Timestamp:1726483472204960272,FsId:&FilesystemIdentifier{Mountpoint:/var/lib/containers/storage/overlay-images,},UsedBytes:&UInt64Value{Value:460628,},InodesUsed:&UInt64Value{Value:183,},},},ContainerFilesystems:[]*FilesystemUsage{},}"
	Sep 16 10:44:32 addons-936355 kubelet[1507]: E0916 10:44:32.205239    1507 eviction_manager.go:212] "Eviction manager: failed to synchronize" err="eviction manager: failed to get HasDedicatedImageFs: missing image stats: &ImageFsInfoResponse{ImageFilesystems:[]*FilesystemUsage{&FilesystemUsage{Timestamp:1726483472204960272,FsId:&FilesystemIdentifier{Mountpoint:/var/lib/containers/storage/overlay-images,},UsedBytes:&UInt64Value{Value:460628,},InodesUsed:&UInt64Value{Value:183,},},},ContainerFilesystems:[]*FilesystemUsage{},}"
	Sep 16 10:44:33 addons-936355 kubelet[1507]: I0916 10:44:33.951259    1507 scope.go:117] "RemoveContainer" containerID="bcf51d70eaf49387d9ea6641126c0b61aca168a47c3d61e6314346f6d445ad66"
	Sep 16 10:44:33 addons-936355 kubelet[1507]: E0916 10:44:33.951480    1507 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"gadget\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=gadget pod=gadget-hx2qq_gadget(fb6217d4-dbed-40c2-b47e-4342cb3f94b1)\"" pod="gadget/gadget-hx2qq" podUID="fb6217d4-dbed-40c2-b47e-4342cb3f94b1"
	Sep 16 10:44:39 addons-936355 kubelet[1507]: I0916 10:44:39.951373    1507 kubelet_pods.go:1007] "Unable to retrieve pull secret, the image pull may not succeed." pod="kube-system/nvidia-device-plugin-daemonset-6j9gc" secret="" err="secret \"gcp-auth\" not found"
	Sep 16 10:44:42 addons-936355 kubelet[1507]: E0916 10:44:42.208332    1507 eviction_manager.go:257] "Eviction manager: failed to get HasDedicatedImageFs" err="missing image stats: &ImageFsInfoResponse{ImageFilesystems:[]*FilesystemUsage{&FilesystemUsage{Timestamp:1726483482208032844,FsId:&FilesystemIdentifier{Mountpoint:/var/lib/containers/storage/overlay-images,},UsedBytes:&UInt64Value{Value:460628,},InodesUsed:&UInt64Value{Value:183,},},},ContainerFilesystems:[]*FilesystemUsage{},}"
	Sep 16 10:44:42 addons-936355 kubelet[1507]: E0916 10:44:42.208376    1507 eviction_manager.go:212] "Eviction manager: failed to synchronize" err="eviction manager: failed to get HasDedicatedImageFs: missing image stats: &ImageFsInfoResponse{ImageFilesystems:[]*FilesystemUsage{&FilesystemUsage{Timestamp:1726483482208032844,FsId:&FilesystemIdentifier{Mountpoint:/var/lib/containers/storage/overlay-images,},UsedBytes:&UInt64Value{Value:460628,},InodesUsed:&UInt64Value{Value:183,},},},ContainerFilesystems:[]*FilesystemUsage{},}"
	Sep 16 10:44:44 addons-936355 kubelet[1507]: I0916 10:44:44.950702    1507 scope.go:117] "RemoveContainer" containerID="bcf51d70eaf49387d9ea6641126c0b61aca168a47c3d61e6314346f6d445ad66"
	Sep 16 10:44:44 addons-936355 kubelet[1507]: E0916 10:44:44.950913    1507 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"gadget\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=gadget pod=gadget-hx2qq_gadget(fb6217d4-dbed-40c2-b47e-4342cb3f94b1)\"" pod="gadget/gadget-hx2qq" podUID="fb6217d4-dbed-40c2-b47e-4342cb3f94b1"
	Sep 16 10:44:52 addons-936355 kubelet[1507]: E0916 10:44:52.210673    1507 eviction_manager.go:257] "Eviction manager: failed to get HasDedicatedImageFs" err="missing image stats: &ImageFsInfoResponse{ImageFilesystems:[]*FilesystemUsage{&FilesystemUsage{Timestamp:1726483492210368562,FsId:&FilesystemIdentifier{Mountpoint:/var/lib/containers/storage/overlay-images,},UsedBytes:&UInt64Value{Value:460628,},InodesUsed:&UInt64Value{Value:183,},},},ContainerFilesystems:[]*FilesystemUsage{},}"
	Sep 16 10:44:52 addons-936355 kubelet[1507]: E0916 10:44:52.210713    1507 eviction_manager.go:212] "Eviction manager: failed to synchronize" err="eviction manager: failed to get HasDedicatedImageFs: missing image stats: &ImageFsInfoResponse{ImageFilesystems:[]*FilesystemUsage{&FilesystemUsage{Timestamp:1726483492210368562,FsId:&FilesystemIdentifier{Mountpoint:/var/lib/containers/storage/overlay-images,},UsedBytes:&UInt64Value{Value:460628,},InodesUsed:&UInt64Value{Value:183,},},},ContainerFilesystems:[]*FilesystemUsage{},}"
	
	
	==> storage-provisioner [2a862ef326432a5d0293f9317e2a22cc3bbc0e787dab4595749d403d11fd2627] <==
	I0916 10:36:43.471506       1 storage_provisioner.go:116] Initializing the minikube storage provisioner...
	I0916 10:36:43.494873       1 storage_provisioner.go:141] Storage provisioner initialized, now starting service!
	I0916 10:36:43.495065       1 leaderelection.go:243] attempting to acquire leader lease kube-system/k8s.io-minikube-hostpath...
	I0916 10:36:43.512818       1 leaderelection.go:253] successfully acquired lease kube-system/k8s.io-minikube-hostpath
	I0916 10:36:43.513129       1 controller.go:835] Starting provisioner controller k8s.io/minikube-hostpath_addons-936355_2129c620-91c1-42a8-a96f-7ac21cc45cc8!
	I0916 10:36:43.520230       1 event.go:282] Event(v1.ObjectReference{Kind:"Endpoints", Namespace:"kube-system", Name:"k8s.io-minikube-hostpath", UID:"89d79315-71d3-40c0-aeb5-687aa54390d8", APIVersion:"v1", ResourceVersion:"938", FieldPath:""}): type: 'Normal' reason: 'LeaderElection' addons-936355_2129c620-91c1-42a8-a96f-7ac21cc45cc8 became leader
	I0916 10:36:43.613923       1 controller.go:884] Started provisioner controller k8s.io/minikube-hostpath_addons-936355_2129c620-91c1-42a8-a96f-7ac21cc45cc8!
	

                                                
                                                
-- /stdout --
helpers_test.go:254: (dbg) Run:  out/minikube-linux-arm64 status --format={{.APIServer}} -p addons-936355 -n addons-936355
helpers_test.go:261: (dbg) Run:  kubectl --context addons-936355 get po -o=jsonpath={.items[*].metadata.name} -A --field-selector=status.phase!=Running
helpers_test.go:261: (dbg) Non-zero exit: kubectl --context addons-936355 get po -o=jsonpath={.items[*].metadata.name} -A --field-selector=status.phase!=Running: fork/exec /usr/local/bin/kubectl: exec format error (671.892µs)
helpers_test.go:263: kubectl --context addons-936355 get po -o=jsonpath={.items[*].metadata.name} -A --field-selector=status.phase!=Running: fork/exec /usr/local/bin/kubectl: exec format error
--- FAIL: TestAddons/parallel/CSI (362.56s)

                                                
                                    
x
+
TestAddons/parallel/LocalPath (0s)

                                                
                                                
=== RUN   TestAddons/parallel/LocalPath
=== PAUSE TestAddons/parallel/LocalPath

                                                
                                                

                                                
                                                
=== CONT  TestAddons/parallel/LocalPath
addons_test.go:982: (dbg) Run:  kubectl --context addons-936355 apply -f testdata/storage-provisioner-rancher/pvc.yaml
addons_test.go:982: (dbg) Non-zero exit: kubectl --context addons-936355 apply -f testdata/storage-provisioner-rancher/pvc.yaml: fork/exec /usr/local/bin/kubectl: exec format error (351.696µs)
addons_test.go:984: kubectl apply pvc.yaml failed: args "kubectl --context addons-936355 apply -f testdata/storage-provisioner-rancher/pvc.yaml": fork/exec /usr/local/bin/kubectl: exec format error
--- FAIL: TestAddons/parallel/LocalPath (0.00s)

                                                
                                    
x
+
TestCertOptions (41.78s)

                                                
                                                
=== RUN   TestCertOptions
=== PAUSE TestCertOptions

                                                
                                                

                                                
                                                
=== CONT  TestCertOptions
cert_options_test.go:49: (dbg) Run:  out/minikube-linux-arm64 start -p cert-options-209735 --memory=2048 --apiserver-ips=127.0.0.1 --apiserver-ips=192.168.15.15 --apiserver-names=localhost --apiserver-names=www.google.com --apiserver-port=8555 --driver=docker  --container-runtime=crio
cert_options_test.go:49: (dbg) Done: out/minikube-linux-arm64 start -p cert-options-209735 --memory=2048 --apiserver-ips=127.0.0.1 --apiserver-ips=192.168.15.15 --apiserver-names=localhost --apiserver-names=www.google.com --apiserver-port=8555 --driver=docker  --container-runtime=crio: (36.594822363s)
cert_options_test.go:60: (dbg) Run:  out/minikube-linux-arm64 -p cert-options-209735 ssh "openssl x509 -text -noout -in /var/lib/minikube/certs/apiserver.crt"
cert_options_test.go:88: (dbg) Run:  kubectl --context cert-options-209735 config view
cert_options_test.go:88: (dbg) Non-zero exit: kubectl --context cert-options-209735 config view: fork/exec /usr/local/bin/kubectl: exec format error (720.737µs)
cert_options_test.go:90: failed to get kubectl config. args "kubectl --context cert-options-209735 config view" : fork/exec /usr/local/bin/kubectl: exec format error
cert_options_test.go:93: Kubeconfig apiserver server port incorrect. Output of 
'kubectl config view' = ""
cert_options_test.go:100: (dbg) Run:  out/minikube-linux-arm64 ssh -p cert-options-209735 -- "sudo cat /etc/kubernetes/admin.conf"
cert_options_test.go:109: *** TestCertOptions FAILED at 2024-09-16 11:28:25.386189645 +0000 UTC m=+3218.120283484
helpers_test.go:222: -----------------------post-mortem--------------------------------
helpers_test.go:230: ======>  post-mortem[TestCertOptions]: docker inspect <======
helpers_test.go:231: (dbg) Run:  docker inspect cert-options-209735
helpers_test.go:235: (dbg) docker inspect cert-options-209735:

                                                
                                                
-- stdout --
	[
	    {
	        "Id": "575a360b37118a88e17f50f8ae42a0b5d6354b7d7779790091c80529a4cef6cb",
	        "Created": "2024-09-16T11:27:54.26777413Z",
	        "Path": "/usr/local/bin/entrypoint",
	        "Args": [
	            "/sbin/init"
	        ],
	        "State": {
	            "Status": "running",
	            "Running": true,
	            "Paused": false,
	            "Restarting": false,
	            "OOMKilled": false,
	            "Dead": false,
	            "Pid": 1566803,
	            "ExitCode": 0,
	            "Error": "",
	            "StartedAt": "2024-09-16T11:27:54.450935808Z",
	            "FinishedAt": "0001-01-01T00:00:00Z"
	        },
	        "Image": "sha256:a1b71fa87733590eb4674b16f6945626ae533f3af37066893e3fd70eb9476268",
	        "ResolvConfPath": "/var/lib/docker/containers/575a360b37118a88e17f50f8ae42a0b5d6354b7d7779790091c80529a4cef6cb/resolv.conf",
	        "HostnamePath": "/var/lib/docker/containers/575a360b37118a88e17f50f8ae42a0b5d6354b7d7779790091c80529a4cef6cb/hostname",
	        "HostsPath": "/var/lib/docker/containers/575a360b37118a88e17f50f8ae42a0b5d6354b7d7779790091c80529a4cef6cb/hosts",
	        "LogPath": "/var/lib/docker/containers/575a360b37118a88e17f50f8ae42a0b5d6354b7d7779790091c80529a4cef6cb/575a360b37118a88e17f50f8ae42a0b5d6354b7d7779790091c80529a4cef6cb-json.log",
	        "Name": "/cert-options-209735",
	        "RestartCount": 0,
	        "Driver": "overlay2",
	        "Platform": "linux",
	        "MountLabel": "",
	        "ProcessLabel": "",
	        "AppArmorProfile": "unconfined",
	        "ExecIDs": null,
	        "HostConfig": {
	            "Binds": [
	                "/lib/modules:/lib/modules:ro",
	                "cert-options-209735:/var"
	            ],
	            "ContainerIDFile": "",
	            "LogConfig": {
	                "Type": "json-file",
	                "Config": {}
	            },
	            "NetworkMode": "cert-options-209735",
	            "PortBindings": {
	                "22/tcp": [
	                    {
	                        "HostIp": "127.0.0.1",
	                        "HostPort": ""
	                    }
	                ],
	                "2376/tcp": [
	                    {
	                        "HostIp": "127.0.0.1",
	                        "HostPort": ""
	                    }
	                ],
	                "32443/tcp": [
	                    {
	                        "HostIp": "127.0.0.1",
	                        "HostPort": ""
	                    }
	                ],
	                "5000/tcp": [
	                    {
	                        "HostIp": "127.0.0.1",
	                        "HostPort": ""
	                    }
	                ],
	                "8555/tcp": [
	                    {
	                        "HostIp": "127.0.0.1",
	                        "HostPort": ""
	                    }
	                ]
	            },
	            "RestartPolicy": {
	                "Name": "no",
	                "MaximumRetryCount": 0
	            },
	            "AutoRemove": false,
	            "VolumeDriver": "",
	            "VolumesFrom": null,
	            "ConsoleSize": [
	                0,
	                0
	            ],
	            "CapAdd": null,
	            "CapDrop": null,
	            "CgroupnsMode": "host",
	            "Dns": [],
	            "DnsOptions": [],
	            "DnsSearch": [],
	            "ExtraHosts": null,
	            "GroupAdd": null,
	            "IpcMode": "private",
	            "Cgroup": "",
	            "Links": null,
	            "OomScoreAdj": 0,
	            "PidMode": "",
	            "Privileged": true,
	            "PublishAllPorts": false,
	            "ReadonlyRootfs": false,
	            "SecurityOpt": [
	                "seccomp=unconfined",
	                "apparmor=unconfined",
	                "label=disable"
	            ],
	            "Tmpfs": {
	                "/run": "",
	                "/tmp": ""
	            },
	            "UTSMode": "",
	            "UsernsMode": "",
	            "ShmSize": 67108864,
	            "Runtime": "runc",
	            "Isolation": "",
	            "CpuShares": 0,
	            "Memory": 2147483648,
	            "NanoCpus": 2000000000,
	            "CgroupParent": "",
	            "BlkioWeight": 0,
	            "BlkioWeightDevice": [],
	            "BlkioDeviceReadBps": [],
	            "BlkioDeviceWriteBps": [],
	            "BlkioDeviceReadIOps": [],
	            "BlkioDeviceWriteIOps": [],
	            "CpuPeriod": 0,
	            "CpuQuota": 0,
	            "CpuRealtimePeriod": 0,
	            "CpuRealtimeRuntime": 0,
	            "CpusetCpus": "",
	            "CpusetMems": "",
	            "Devices": [],
	            "DeviceCgroupRules": null,
	            "DeviceRequests": null,
	            "MemoryReservation": 0,
	            "MemorySwap": 4294967296,
	            "MemorySwappiness": null,
	            "OomKillDisable": false,
	            "PidsLimit": null,
	            "Ulimits": [],
	            "CpuCount": 0,
	            "CpuPercent": 0,
	            "IOMaximumIOps": 0,
	            "IOMaximumBandwidth": 0,
	            "MaskedPaths": null,
	            "ReadonlyPaths": null
	        },
	        "GraphDriver": {
	            "Data": {
	                "LowerDir": "/var/lib/docker/overlay2/680b8543546ea21106fc2f1ecaf729f72930e942e6af1585d85734c863f397b7-init/diff:/var/lib/docker/overlay2/1502e35c27c097cfc834a7c6caeee5bb9f58b41375577f491b73f55bc131cbae/diff",
	                "MergedDir": "/var/lib/docker/overlay2/680b8543546ea21106fc2f1ecaf729f72930e942e6af1585d85734c863f397b7/merged",
	                "UpperDir": "/var/lib/docker/overlay2/680b8543546ea21106fc2f1ecaf729f72930e942e6af1585d85734c863f397b7/diff",
	                "WorkDir": "/var/lib/docker/overlay2/680b8543546ea21106fc2f1ecaf729f72930e942e6af1585d85734c863f397b7/work"
	            },
	            "Name": "overlay2"
	        },
	        "Mounts": [
	            {
	                "Type": "volume",
	                "Name": "cert-options-209735",
	                "Source": "/var/lib/docker/volumes/cert-options-209735/_data",
	                "Destination": "/var",
	                "Driver": "local",
	                "Mode": "z",
	                "RW": true,
	                "Propagation": ""
	            },
	            {
	                "Type": "bind",
	                "Source": "/lib/modules",
	                "Destination": "/lib/modules",
	                "Mode": "ro",
	                "RW": false,
	                "Propagation": "rprivate"
	            }
	        ],
	        "Config": {
	            "Hostname": "cert-options-209735",
	            "Domainname": "",
	            "User": "",
	            "AttachStdin": false,
	            "AttachStdout": false,
	            "AttachStderr": false,
	            "ExposedPorts": {
	                "22/tcp": {},
	                "2376/tcp": {},
	                "32443/tcp": {},
	                "5000/tcp": {},
	                "8555/tcp": {}
	            },
	            "Tty": true,
	            "OpenStdin": false,
	            "StdinOnce": false,
	            "Env": [
	                "container=docker",
	                "PATH=/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin"
	            ],
	            "Cmd": null,
	            "Image": "gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0",
	            "Volumes": null,
	            "WorkingDir": "/",
	            "Entrypoint": [
	                "/usr/local/bin/entrypoint",
	                "/sbin/init"
	            ],
	            "OnBuild": null,
	            "Labels": {
	                "created_by.minikube.sigs.k8s.io": "true",
	                "mode.minikube.sigs.k8s.io": "cert-options-209735",
	                "name.minikube.sigs.k8s.io": "cert-options-209735",
	                "role.minikube.sigs.k8s.io": ""
	            },
	            "StopSignal": "SIGRTMIN+3"
	        },
	        "NetworkSettings": {
	            "Bridge": "",
	            "SandboxID": "cedc000420f414b6bcb1c59f3fa3bc067f09d0c343f34697fd448f2b8dadea69",
	            "SandboxKey": "/var/run/docker/netns/cedc000420f4",
	            "Ports": {
	                "22/tcp": [
	                    {
	                        "HostIp": "127.0.0.1",
	                        "HostPort": "34873"
	                    }
	                ],
	                "2376/tcp": [
	                    {
	                        "HostIp": "127.0.0.1",
	                        "HostPort": "34875"
	                    }
	                ],
	                "32443/tcp": [
	                    {
	                        "HostIp": "127.0.0.1",
	                        "HostPort": "34878"
	                    }
	                ],
	                "5000/tcp": [
	                    {
	                        "HostIp": "127.0.0.1",
	                        "HostPort": "34876"
	                    }
	                ],
	                "8555/tcp": [
	                    {
	                        "HostIp": "127.0.0.1",
	                        "HostPort": "34877"
	                    }
	                ]
	            },
	            "HairpinMode": false,
	            "LinkLocalIPv6Address": "",
	            "LinkLocalIPv6PrefixLen": 0,
	            "SecondaryIPAddresses": null,
	            "SecondaryIPv6Addresses": null,
	            "EndpointID": "",
	            "Gateway": "",
	            "GlobalIPv6Address": "",
	            "GlobalIPv6PrefixLen": 0,
	            "IPAddress": "",
	            "IPPrefixLen": 0,
	            "IPv6Gateway": "",
	            "MacAddress": "",
	            "Networks": {
	                "cert-options-209735": {
	                    "IPAMConfig": {
	                        "IPv4Address": "192.168.85.2"
	                    },
	                    "Links": null,
	                    "Aliases": null,
	                    "MacAddress": "02:42:c0:a8:55:02",
	                    "DriverOpts": null,
	                    "NetworkID": "a53bc3eba341ac36a24e64a77ef0f18b24ae6c37f408c3a3e2fd492fd3d59919",
	                    "EndpointID": "ba5c9b569751cba3dd45d86c09bc38a9a241d759616d2c9938d4fce271d41f7e",
	                    "Gateway": "192.168.85.1",
	                    "IPAddress": "192.168.85.2",
	                    "IPPrefixLen": 24,
	                    "IPv6Gateway": "",
	                    "GlobalIPv6Address": "",
	                    "GlobalIPv6PrefixLen": 0,
	                    "DNSNames": [
	                        "cert-options-209735",
	                        "575a360b3711"
	                    ]
	                }
	            }
	        }
	    }
	]

                                                
                                                
-- /stdout --
helpers_test.go:239: (dbg) Run:  out/minikube-linux-arm64 status --format={{.Host}} -p cert-options-209735 -n cert-options-209735
helpers_test.go:244: <<< TestCertOptions FAILED: start of post-mortem logs <<<
helpers_test.go:245: ======>  post-mortem[TestCertOptions]: minikube logs <======
helpers_test.go:247: (dbg) Run:  out/minikube-linux-arm64 -p cert-options-209735 logs -n 25
helpers_test.go:247: (dbg) Done: out/minikube-linux-arm64 -p cert-options-209735 logs -n 25: (1.402074913s)
helpers_test.go:252: TestCertOptions logs: 
-- stdout --
	
	==> Audit <==
	|---------|------------------------------------------------------|---------------------------|---------|---------|---------------------|---------------------|
	| Command |                         Args                         |          Profile          |  User   | Version |     Start Time      |      End Time       |
	|---------|------------------------------------------------------|---------------------------|---------|---------|---------------------|---------------------|
	| ssh     | -p cilium-141252 sudo                                | cilium-141252             | jenkins | v1.34.0 | 16 Sep 24 11:27 UTC |                     |
	|         | systemctl cat docker                                 |                           |         |         |                     |                     |
	|         | --no-pager                                           |                           |         |         |                     |                     |
	| ssh     | -p cilium-141252 sudo cat                            | cilium-141252             | jenkins | v1.34.0 | 16 Sep 24 11:27 UTC |                     |
	|         | /etc/docker/daemon.json                              |                           |         |         |                     |                     |
	| ssh     | -p cilium-141252 sudo docker                         | cilium-141252             | jenkins | v1.34.0 | 16 Sep 24 11:27 UTC |                     |
	|         | system info                                          |                           |         |         |                     |                     |
	| ssh     | -p cilium-141252 sudo                                | cilium-141252             | jenkins | v1.34.0 | 16 Sep 24 11:27 UTC |                     |
	|         | systemctl status cri-docker                          |                           |         |         |                     |                     |
	|         | --all --full --no-pager                              |                           |         |         |                     |                     |
	| ssh     | -p cilium-141252 sudo                                | cilium-141252             | jenkins | v1.34.0 | 16 Sep 24 11:27 UTC |                     |
	|         | systemctl cat cri-docker                             |                           |         |         |                     |                     |
	|         | --no-pager                                           |                           |         |         |                     |                     |
	| ssh     | -p cilium-141252 sudo cat                            | cilium-141252             | jenkins | v1.34.0 | 16 Sep 24 11:27 UTC |                     |
	|         | /etc/systemd/system/cri-docker.service.d/10-cni.conf |                           |         |         |                     |                     |
	| ssh     | -p cilium-141252 sudo cat                            | cilium-141252             | jenkins | v1.34.0 | 16 Sep 24 11:27 UTC |                     |
	|         | /usr/lib/systemd/system/cri-docker.service           |                           |         |         |                     |                     |
	| ssh     | -p cilium-141252 sudo                                | cilium-141252             | jenkins | v1.34.0 | 16 Sep 24 11:27 UTC |                     |
	|         | cri-dockerd --version                                |                           |         |         |                     |                     |
	| ssh     | -p cilium-141252 sudo                                | cilium-141252             | jenkins | v1.34.0 | 16 Sep 24 11:27 UTC |                     |
	|         | systemctl status containerd                          |                           |         |         |                     |                     |
	|         | --all --full --no-pager                              |                           |         |         |                     |                     |
	| ssh     | -p cilium-141252 sudo                                | cilium-141252             | jenkins | v1.34.0 | 16 Sep 24 11:27 UTC |                     |
	|         | systemctl cat containerd                             |                           |         |         |                     |                     |
	|         | --no-pager                                           |                           |         |         |                     |                     |
	| ssh     | -p cilium-141252 sudo cat                            | cilium-141252             | jenkins | v1.34.0 | 16 Sep 24 11:27 UTC |                     |
	|         | /lib/systemd/system/containerd.service               |                           |         |         |                     |                     |
	| ssh     | -p cilium-141252 sudo cat                            | cilium-141252             | jenkins | v1.34.0 | 16 Sep 24 11:27 UTC |                     |
	|         | /etc/containerd/config.toml                          |                           |         |         |                     |                     |
	| ssh     | -p cilium-141252 sudo                                | cilium-141252             | jenkins | v1.34.0 | 16 Sep 24 11:27 UTC |                     |
	|         | containerd config dump                               |                           |         |         |                     |                     |
	| ssh     | -p cilium-141252 sudo                                | cilium-141252             | jenkins | v1.34.0 | 16 Sep 24 11:27 UTC |                     |
	|         | systemctl status crio --all                          |                           |         |         |                     |                     |
	|         | --full --no-pager                                    |                           |         |         |                     |                     |
	| ssh     | -p cilium-141252 sudo                                | cilium-141252             | jenkins | v1.34.0 | 16 Sep 24 11:27 UTC |                     |
	|         | systemctl cat crio --no-pager                        |                           |         |         |                     |                     |
	| ssh     | -p cilium-141252 sudo find                           | cilium-141252             | jenkins | v1.34.0 | 16 Sep 24 11:27 UTC |                     |
	|         | /etc/crio -type f -exec sh -c                        |                           |         |         |                     |                     |
	|         | 'echo {}; cat {}' \;                                 |                           |         |         |                     |                     |
	| ssh     | -p cilium-141252 sudo crio                           | cilium-141252             | jenkins | v1.34.0 | 16 Sep 24 11:27 UTC |                     |
	|         | config                                               |                           |         |         |                     |                     |
	| delete  | -p cilium-141252                                     | cilium-141252             | jenkins | v1.34.0 | 16 Sep 24 11:27 UTC | 16 Sep 24 11:27 UTC |
	| start   | -p force-systemd-env-541584                          | force-systemd-env-541584  | jenkins | v1.34.0 | 16 Sep 24 11:27 UTC | 16 Sep 24 11:27 UTC |
	|         | --memory=2048                                        |                           |         |         |                     |                     |
	|         | --alsologtostderr                                    |                           |         |         |                     |                     |
	|         | -v=5 --driver=docker                                 |                           |         |         |                     |                     |
	|         | --container-runtime=crio                             |                           |         |         |                     |                     |
	| delete  | -p kubernetes-upgrade-485103                         | kubernetes-upgrade-485103 | jenkins | v1.34.0 | 16 Sep 24 11:27 UTC | 16 Sep 24 11:27 UTC |
	| start   | -p cert-expiration-258290                            | cert-expiration-258290    | jenkins | v1.34.0 | 16 Sep 24 11:27 UTC | 16 Sep 24 11:28 UTC |
	|         | --memory=2048                                        |                           |         |         |                     |                     |
	|         | --cert-expiration=3m                                 |                           |         |         |                     |                     |
	|         | --driver=docker                                      |                           |         |         |                     |                     |
	|         | --container-runtime=crio                             |                           |         |         |                     |                     |
	| delete  | -p force-systemd-env-541584                          | force-systemd-env-541584  | jenkins | v1.34.0 | 16 Sep 24 11:27 UTC | 16 Sep 24 11:27 UTC |
	| start   | -p cert-options-209735                               | cert-options-209735       | jenkins | v1.34.0 | 16 Sep 24 11:27 UTC | 16 Sep 24 11:28 UTC |
	|         | --memory=2048                                        |                           |         |         |                     |                     |
	|         | --apiserver-ips=127.0.0.1                            |                           |         |         |                     |                     |
	|         | --apiserver-ips=192.168.15.15                        |                           |         |         |                     |                     |
	|         | --apiserver-names=localhost                          |                           |         |         |                     |                     |
	|         | --apiserver-names=www.google.com                     |                           |         |         |                     |                     |
	|         | --apiserver-port=8555                                |                           |         |         |                     |                     |
	|         | --driver=docker                                      |                           |         |         |                     |                     |
	|         | --container-runtime=crio                             |                           |         |         |                     |                     |
	| ssh     | cert-options-209735 ssh                              | cert-options-209735       | jenkins | v1.34.0 | 16 Sep 24 11:28 UTC | 16 Sep 24 11:28 UTC |
	|         | openssl x509 -text -noout -in                        |                           |         |         |                     |                     |
	|         | /var/lib/minikube/certs/apiserver.crt                |                           |         |         |                     |                     |
	| ssh     | -p cert-options-209735 -- sudo                       | cert-options-209735       | jenkins | v1.34.0 | 16 Sep 24 11:28 UTC | 16 Sep 24 11:28 UTC |
	|         | cat /etc/kubernetes/admin.conf                       |                           |         |         |                     |                     |
	|---------|------------------------------------------------------|---------------------------|---------|---------|---------------------|---------------------|
	
	
	==> Last Start <==
	Log file created at: 2024/09/16 11:27:48
	Running on machine: ip-172-31-21-244
	Binary: Built with gc go1.23.0 for linux/arm64
	Log line format: [IWEF]mmdd hh:mm:ss.uuuuuu threadid file:line] msg
	I0916 11:27:48.187922 1566252 out.go:345] Setting OutFile to fd 1 ...
	I0916 11:27:48.188507 1566252 out.go:392] TERM=,COLORTERM=, which probably does not support color
	I0916 11:27:48.188512 1566252 out.go:358] Setting ErrFile to fd 2...
	I0916 11:27:48.188516 1566252 out.go:392] TERM=,COLORTERM=, which probably does not support color
	I0916 11:27:48.188781 1566252 root.go:338] Updating PATH: /home/jenkins/minikube-integration/19651-1378450/.minikube/bin
	I0916 11:27:48.189218 1566252 out.go:352] Setting JSON to false
	I0916 11:27:48.190150 1566252 start.go:129] hostinfo: {"hostname":"ip-172-31-21-244","uptime":40214,"bootTime":1726445855,"procs":191,"os":"linux","platform":"ubuntu","platformFamily":"debian","platformVersion":"20.04","kernelVersion":"5.15.0-1069-aws","kernelArch":"aarch64","virtualizationSystem":"","virtualizationRole":"","hostId":"da8ac1fd-6236-412a-a346-95873c98230d"}
	I0916 11:27:48.190230 1566252 start.go:139] virtualization:  
	I0916 11:27:48.192235 1566252 out.go:177] * [cert-options-209735] minikube v1.34.0 on Ubuntu 20.04 (arm64)
	I0916 11:27:48.193650 1566252 out.go:177]   - MINIKUBE_LOCATION=19651
	I0916 11:27:48.193715 1566252 notify.go:220] Checking for updates...
	I0916 11:27:48.195835 1566252 out.go:177]   - MINIKUBE_SUPPRESS_DOCKER_PERFORMANCE=true
	I0916 11:27:48.197080 1566252 out.go:177]   - KUBECONFIG=/home/jenkins/minikube-integration/19651-1378450/kubeconfig
	I0916 11:27:48.198659 1566252 out.go:177]   - MINIKUBE_HOME=/home/jenkins/minikube-integration/19651-1378450/.minikube
	I0916 11:27:48.199865 1566252 out.go:177]   - MINIKUBE_BIN=out/minikube-linux-arm64
	I0916 11:27:48.201121 1566252 out.go:177]   - MINIKUBE_FORCE_SYSTEMD=
	I0916 11:27:48.202809 1566252 config.go:182] Loaded profile config "cert-expiration-258290": Driver=docker, ContainerRuntime=crio, KubernetesVersion=v1.31.1
	I0916 11:27:48.202895 1566252 driver.go:394] Setting default libvirt URI to qemu:///system
	I0916 11:27:48.233783 1566252 docker.go:123] docker version: linux-27.2.1:Docker Engine - Community
	I0916 11:27:48.233902 1566252 cli_runner.go:164] Run: docker system info --format "{{json .}}"
	I0916 11:27:48.330387 1566252 info.go:266] docker info: {ID:5FDH:SA5P:5GCT:NLAS:B73P:SGDQ:PBG5:UBVH:UZY3:RXGO:CI7S:WAIH Containers:1 ContainersRunning:1 ContainersPaused:0 ContainersStopped:0 Images:4 Driver:overlay2 DriverStatus:[[Backing Filesystem extfs] [Supports d_type true] [Using metacopy false] [Native Overlay Diff true] [userxattr false]] SystemStatus:<nil> Plugins:{Volume:[local] Network:[bridge host ipvlan macvlan null overlay] Authorization:<nil> Log:[awslogs fluentd gcplogs gelf journald json-file local splunk syslog]} MemoryLimit:true SwapLimit:true KernelMemory:false KernelMemoryTCP:true CPUCfsPeriod:true CPUCfsQuota:true CPUShares:true CPUSet:true PidsLimit:true IPv4Forwarding:true BridgeNfIptables:true BridgeNfIP6Tables:true Debug:false NFd:38 OomKillDisable:true NGoroutines:53 SystemTime:2024-09-16 11:27:48.319022439 +0000 UTC LoggingDriver:json-file CgroupDriver:cgroupfs NEventsListener:0 KernelVersion:5.15.0-1069-aws OperatingSystem:Ubuntu 20.04.6 LTS OSType:linux Architecture:aar
ch64 IndexServerAddress:https://index.docker.io/v1/ RegistryConfig:{AllowNondistributableArtifactsCIDRs:[] AllowNondistributableArtifactsHostnames:[] InsecureRegistryCIDRs:[127.0.0.0/8] IndexConfigs:{DockerIo:{Name:docker.io Mirrors:[] Secure:true Official:true}} Mirrors:[]} NCPU:2 MemTotal:8214839296 GenericResources:<nil> DockerRootDir:/var/lib/docker HTTPProxy: HTTPSProxy: NoProxy: Name:ip-172-31-21-244 Labels:[] ExperimentalBuild:false ServerVersion:27.2.1 ClusterStore: ClusterAdvertise: Runtimes:{Runc:{Path:runc}} DefaultRuntime:runc Swarm:{NodeID: NodeAddr: LocalNodeState:inactive ControlAvailable:false Error: RemoteManagers:<nil>} LiveRestoreEnabled:false Isolation: InitBinary:docker-init ContainerdCommit:{ID:7f7fdf5fed64eb6a7caf99b3e12efcf9d60e311c Expected:7f7fdf5fed64eb6a7caf99b3e12efcf9d60e311c} RuncCommit:{ID:v1.1.14-0-g2c9f560 Expected:v1.1.14-0-g2c9f560} InitCommit:{ID:de40ad0 Expected:de40ad0} SecurityOptions:[name=apparmor name=seccomp,profile=builtin] ProductLicense: Warnings:<nil> ServerErro
rs:[] ClientInfo:{Debug:false Plugins:[map[Name:buildx Path:/usr/libexec/docker/cli-plugins/docker-buildx SchemaVersion:0.1.0 ShortDescription:Docker Buildx Vendor:Docker Inc. Version:v0.16.2] map[Name:compose Path:/usr/libexec/docker/cli-plugins/docker-compose SchemaVersion:0.1.0 ShortDescription:Docker Compose Vendor:Docker Inc. Version:v2.29.2]] Warnings:<nil>}}
	I0916 11:27:48.330488 1566252 docker.go:318] overlay module found
	I0916 11:27:48.331975 1566252 out.go:177] * Using the docker driver based on user configuration
	I0916 11:27:48.333305 1566252 start.go:297] selected driver: docker
	I0916 11:27:48.333314 1566252 start.go:901] validating driver "docker" against <nil>
	I0916 11:27:48.333326 1566252 start.go:912] status for docker: {Installed:true Healthy:true Running:false NeedsImprovement:false Error:<nil> Reason: Fix: Doc: Version:}
	I0916 11:27:48.333972 1566252 cli_runner.go:164] Run: docker system info --format "{{json .}}"
	I0916 11:27:48.449765 1566252 info.go:266] docker info: {ID:5FDH:SA5P:5GCT:NLAS:B73P:SGDQ:PBG5:UBVH:UZY3:RXGO:CI7S:WAIH Containers:1 ContainersRunning:1 ContainersPaused:0 ContainersStopped:0 Images:4 Driver:overlay2 DriverStatus:[[Backing Filesystem extfs] [Supports d_type true] [Using metacopy false] [Native Overlay Diff true] [userxattr false]] SystemStatus:<nil> Plugins:{Volume:[local] Network:[bridge host ipvlan macvlan null overlay] Authorization:<nil> Log:[awslogs fluentd gcplogs gelf journald json-file local splunk syslog]} MemoryLimit:true SwapLimit:true KernelMemory:false KernelMemoryTCP:true CPUCfsPeriod:true CPUCfsQuota:true CPUShares:true CPUSet:true PidsLimit:true IPv4Forwarding:true BridgeNfIptables:true BridgeNfIP6Tables:true Debug:false NFd:38 OomKillDisable:true NGoroutines:53 SystemTime:2024-09-16 11:27:48.440414166 +0000 UTC LoggingDriver:json-file CgroupDriver:cgroupfs NEventsListener:0 KernelVersion:5.15.0-1069-aws OperatingSystem:Ubuntu 20.04.6 LTS OSType:linux Architecture:aar
ch64 IndexServerAddress:https://index.docker.io/v1/ RegistryConfig:{AllowNondistributableArtifactsCIDRs:[] AllowNondistributableArtifactsHostnames:[] InsecureRegistryCIDRs:[127.0.0.0/8] IndexConfigs:{DockerIo:{Name:docker.io Mirrors:[] Secure:true Official:true}} Mirrors:[]} NCPU:2 MemTotal:8214839296 GenericResources:<nil> DockerRootDir:/var/lib/docker HTTPProxy: HTTPSProxy: NoProxy: Name:ip-172-31-21-244 Labels:[] ExperimentalBuild:false ServerVersion:27.2.1 ClusterStore: ClusterAdvertise: Runtimes:{Runc:{Path:runc}} DefaultRuntime:runc Swarm:{NodeID: NodeAddr: LocalNodeState:inactive ControlAvailable:false Error: RemoteManagers:<nil>} LiveRestoreEnabled:false Isolation: InitBinary:docker-init ContainerdCommit:{ID:7f7fdf5fed64eb6a7caf99b3e12efcf9d60e311c Expected:7f7fdf5fed64eb6a7caf99b3e12efcf9d60e311c} RuncCommit:{ID:v1.1.14-0-g2c9f560 Expected:v1.1.14-0-g2c9f560} InitCommit:{ID:de40ad0 Expected:de40ad0} SecurityOptions:[name=apparmor name=seccomp,profile=builtin] ProductLicense: Warnings:<nil> ServerErro
rs:[] ClientInfo:{Debug:false Plugins:[map[Name:buildx Path:/usr/libexec/docker/cli-plugins/docker-buildx SchemaVersion:0.1.0 ShortDescription:Docker Buildx Vendor:Docker Inc. Version:v0.16.2] map[Name:compose Path:/usr/libexec/docker/cli-plugins/docker-compose SchemaVersion:0.1.0 ShortDescription:Docker Compose Vendor:Docker Inc. Version:v2.29.2]] Warnings:<nil>}}
	I0916 11:27:48.449975 1566252 start_flags.go:310] no existing cluster config was found, will generate one from the flags 
	I0916 11:27:48.450187 1566252 start_flags.go:929] Wait components to verify : map[apiserver:true system_pods:true]
	I0916 11:27:48.451411 1566252 out.go:177] * Using Docker driver with root privileges
	I0916 11:27:48.453536 1566252 cni.go:84] Creating CNI manager for ""
	I0916 11:27:48.453613 1566252 cni.go:143] "docker" driver + "crio" runtime found, recommending kindnet
	I0916 11:27:48.453621 1566252 start_flags.go:319] Found "CNI" CNI - setting NetworkPlugin=cni
	I0916 11:27:48.453701 1566252 start.go:340] cluster config:
	{Name:cert-options-209735 KeepContext:false EmbedCerts:false MinikubeISO: KicBaseImage:gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 Memory:2048 CPUs:2 DiskSize:20000 Driver:docker HyperkitVpnKitSock: HyperkitVSockPorts:[] DockerEnv:[] ContainerVolumeMounts:[] InsecureRegistry:[] RegistryMirror:[] HostOnlyCIDR:192.168.59.1/24 HypervVirtualSwitch: HypervUseExternalSwitch:false HypervExternalAdapter: KVMNetwork:default KVMQemuURI:qemu:///system KVMGPU:false KVMHidden:false KVMNUMACount:1 APIServerPort:8555 DockerOpt:[] DisableDriverMounts:false NFSShare:[] NFSSharesRoot:/nfsshares UUID: NoVTXCheck:false DNSProxy:false HostDNSResolver:true HostOnlyNicType:virtio NatNicType:virtio SSHIPAddress: SSHUser:root SSHKey: SSHPort:22 KubernetesConfig:{KubernetesVersion:v1.31.1 ClusterName:cert-options-209735 Namespace:default APIServerHAVIP: APIServerName:minikubeCA APIServerNames:[localhost www.google.com] APIServerIPs:[127.0.
0.1 192.168.15.15] DNSDomain:cluster.local ContainerRuntime:crio CRISocket: NetworkPlugin:cni FeatureGates: ServiceCIDR:10.96.0.0/12 ImageRepository: LoadBalancerStartIP: LoadBalancerEndIP: CustomIngressCert: RegistryAliases: ExtraOptions:[] ShouldLoadCachedImages:true EnableDefaultCNI:false CNI:} Nodes:[{Name: IP: Port:8555 KubernetesVersion:v1.31.1 ContainerRuntime:crio ControlPlane:true Worker:true}] Addons:map[] CustomAddonImages:map[] CustomAddonRegistries:map[] VerifyComponents:map[apiserver:true system_pods:true] StartHostTimeout:6m0s ScheduledStop:<nil> ExposedPorts:[] ListenAddress: Network: Subnet: MultiNodeRequested:false ExtraDisks:0 CertExpiration:26280h0m0s Mount:false MountString:/home/jenkins:/minikube-host Mount9PVersion:9p2000.L MountGID:docker MountIP: MountMSize:262144 MountOptions:[] MountPort:0 MountType:9p MountUID:docker BinaryMirror: DisableOptimizations:false DisableMetrics:false CustomQemuFirmwarePath: SocketVMnetClientPath: SocketVMnetPath: StaticIP: SSHAuthSock: SSHAgentPID:0 GPUs
: AutoPauseInterval:1m0s}
	I0916 11:27:48.457423 1566252 out.go:177] * Starting "cert-options-209735" primary control-plane node in "cert-options-209735" cluster
	I0916 11:27:48.458503 1566252 cache.go:121] Beginning downloading kic base image for docker with crio
	I0916 11:27:48.460005 1566252 out.go:177] * Pulling base image v0.0.45-1726358845-19644 ...
	I0916 11:27:48.461216 1566252 preload.go:131] Checking if preload exists for k8s version v1.31.1 and runtime crio
	I0916 11:27:48.461261 1566252 preload.go:146] Found local preload: /home/jenkins/minikube-integration/19651-1378450/.minikube/cache/preloaded-tarball/preloaded-images-k8s-v18-v1.31.1-cri-o-overlay-arm64.tar.lz4
	I0916 11:27:48.461274 1566252 cache.go:56] Caching tarball of preloaded images
	I0916 11:27:48.461357 1566252 preload.go:172] Found /home/jenkins/minikube-integration/19651-1378450/.minikube/cache/preloaded-tarball/preloaded-images-k8s-v18-v1.31.1-cri-o-overlay-arm64.tar.lz4 in cache, skipping download
	I0916 11:27:48.461365 1566252 cache.go:59] Finished verifying existence of preloaded tar for v1.31.1 on crio
	I0916 11:27:48.461483 1566252 profile.go:143] Saving config to /home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/cert-options-209735/config.json ...
	I0916 11:27:48.461500 1566252 lock.go:35] WriteFile acquiring /home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/cert-options-209735/config.json: {Name:mk001a8eac91bc44abfdc2dc4dc18e29b141f7a9 Clock:{} Delay:500ms Timeout:1m0s Cancel:<nil>}
	I0916 11:27:48.461658 1566252 image.go:79] Checking for gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 in local docker daemon
	W0916 11:27:48.494160 1566252 image.go:95] image gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 is of wrong architecture
	I0916 11:27:48.494171 1566252 cache.go:149] Downloading gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 to local cache
	I0916 11:27:48.494259 1566252 image.go:63] Checking for gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 in local cache directory
	I0916 11:27:48.494277 1566252 image.go:66] Found gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 in local cache directory, skipping pull
	I0916 11:27:48.494280 1566252 image.go:135] gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 exists in cache, skipping pull
	I0916 11:27:48.494287 1566252 cache.go:152] successfully saved gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 as a tarball
	I0916 11:27:48.494291 1566252 cache.go:162] Loading gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 from local cache
	I0916 11:27:48.637914 1566252 cache.go:164] successfully loaded and using gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 from cached tarball
	I0916 11:27:48.637955 1566252 cache.go:194] Successfully downloaded all kic artifacts
	I0916 11:27:48.637986 1566252 start.go:360] acquireMachinesLock for cert-options-209735: {Name:mk895c57c86e74855f159a75cac59c9f53d4bdcc Clock:{} Delay:500ms Timeout:10m0s Cancel:<nil>}
	I0916 11:27:48.638110 1566252 start.go:364] duration metric: took 107.476µs to acquireMachinesLock for "cert-options-209735"
	I0916 11:27:48.638138 1566252 start.go:93] Provisioning new machine with config: &{Name:cert-options-209735 KeepContext:false EmbedCerts:false MinikubeISO: KicBaseImage:gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 Memory:2048 CPUs:2 DiskSize:20000 Driver:docker HyperkitVpnKitSock: HyperkitVSockPorts:[] DockerEnv:[] ContainerVolumeMounts:[] InsecureRegistry:[] RegistryMirror:[] HostOnlyCIDR:192.168.59.1/24 HypervVirtualSwitch: HypervUseExternalSwitch:false HypervExternalAdapter: KVMNetwork:default KVMQemuURI:qemu:///system KVMGPU:false KVMHidden:false KVMNUMACount:1 APIServerPort:8555 DockerOpt:[] DisableDriverMounts:false NFSShare:[] NFSSharesRoot:/nfsshares UUID: NoVTXCheck:false DNSProxy:false HostDNSResolver:true HostOnlyNicType:virtio NatNicType:virtio SSHIPAddress: SSHUser:root SSHKey: SSHPort:22 KubernetesConfig:{KubernetesVersion:v1.31.1 ClusterName:cert-options-209735 Namespace:default APIServerHAVIP: APISe
rverName:minikubeCA APIServerNames:[localhost www.google.com] APIServerIPs:[127.0.0.1 192.168.15.15] DNSDomain:cluster.local ContainerRuntime:crio CRISocket: NetworkPlugin:cni FeatureGates: ServiceCIDR:10.96.0.0/12 ImageRepository: LoadBalancerStartIP: LoadBalancerEndIP: CustomIngressCert: RegistryAliases: ExtraOptions:[] ShouldLoadCachedImages:true EnableDefaultCNI:false CNI:} Nodes:[{Name: IP: Port:8555 KubernetesVersion:v1.31.1 ContainerRuntime:crio ControlPlane:true Worker:true}] Addons:map[] CustomAddonImages:map[] CustomAddonRegistries:map[] VerifyComponents:map[apiserver:true system_pods:true] StartHostTimeout:6m0s ScheduledStop:<nil> ExposedPorts:[] ListenAddress: Network: Subnet: MultiNodeRequested:false ExtraDisks:0 CertExpiration:26280h0m0s Mount:false MountString:/home/jenkins:/minikube-host Mount9PVersion:9p2000.L MountGID:docker MountIP: MountMSize:262144 MountOptions:[] MountPort:0 MountType:9p MountUID:docker BinaryMirror: DisableOptimizations:false DisableMetrics:false CustomQemuFirmwarePath:
SocketVMnetClientPath: SocketVMnetPath: StaticIP: SSHAuthSock: SSHAgentPID:0 GPUs: AutoPauseInterval:1m0s} &{Name: IP: Port:8555 KubernetesVersion:v1.31.1 ContainerRuntime:crio ControlPlane:true Worker:true}
	I0916 11:27:48.638215 1566252 start.go:125] createHost starting for "" (driver="docker")
	I0916 11:27:45.729450 1563615 out.go:235]   - Generating certificates and keys ...
	I0916 11:27:45.729539 1563615 kubeadm.go:310] [certs] Using existing ca certificate authority
	I0916 11:27:45.729603 1563615 kubeadm.go:310] [certs] Using existing apiserver certificate and key on disk
	I0916 11:27:47.351657 1563615 kubeadm.go:310] [certs] Generating "apiserver-kubelet-client" certificate and key
	I0916 11:27:48.609473 1563615 kubeadm.go:310] [certs] Generating "front-proxy-ca" certificate and key
	I0916 11:27:48.640809 1566252 out.go:235] * Creating docker container (CPUs=2, Memory=2048MB) ...
	I0916 11:27:48.641109 1566252 start.go:159] libmachine.API.Create for "cert-options-209735" (driver="docker")
	I0916 11:27:48.641138 1566252 client.go:168] LocalClient.Create starting
	I0916 11:27:48.641208 1566252 main.go:141] libmachine: Reading certificate data from /home/jenkins/minikube-integration/19651-1378450/.minikube/certs/ca.pem
	I0916 11:27:48.641243 1566252 main.go:141] libmachine: Decoding PEM data...
	I0916 11:27:48.641256 1566252 main.go:141] libmachine: Parsing certificate...
	I0916 11:27:48.641305 1566252 main.go:141] libmachine: Reading certificate data from /home/jenkins/minikube-integration/19651-1378450/.minikube/certs/cert.pem
	I0916 11:27:48.641321 1566252 main.go:141] libmachine: Decoding PEM data...
	I0916 11:27:48.641334 1566252 main.go:141] libmachine: Parsing certificate...
	I0916 11:27:48.641764 1566252 cli_runner.go:164] Run: docker network inspect cert-options-209735 --format "{"Name": "{{.Name}}","Driver": "{{.Driver}}","Subnet": "{{range .IPAM.Config}}{{.Subnet}}{{end}}","Gateway": "{{range .IPAM.Config}}{{.Gateway}}{{end}}","MTU": {{if (index .Options "com.docker.network.driver.mtu")}}{{(index .Options "com.docker.network.driver.mtu")}}{{else}}0{{end}}, "ContainerIPs": [{{range $k,$v := .Containers }}"{{$v.IPv4Address}}",{{end}}]}"
	W0916 11:27:48.657423 1566252 cli_runner.go:211] docker network inspect cert-options-209735 --format "{"Name": "{{.Name}}","Driver": "{{.Driver}}","Subnet": "{{range .IPAM.Config}}{{.Subnet}}{{end}}","Gateway": "{{range .IPAM.Config}}{{.Gateway}}{{end}}","MTU": {{if (index .Options "com.docker.network.driver.mtu")}}{{(index .Options "com.docker.network.driver.mtu")}}{{else}}0{{end}}, "ContainerIPs": [{{range $k,$v := .Containers }}"{{$v.IPv4Address}}",{{end}}]}" returned with exit code 1
	I0916 11:27:48.657499 1566252 network_create.go:284] running [docker network inspect cert-options-209735] to gather additional debugging logs...
	I0916 11:27:48.657514 1566252 cli_runner.go:164] Run: docker network inspect cert-options-209735
	W0916 11:27:48.670586 1566252 cli_runner.go:211] docker network inspect cert-options-209735 returned with exit code 1
	I0916 11:27:48.670607 1566252 network_create.go:287] error running [docker network inspect cert-options-209735]: docker network inspect cert-options-209735: exit status 1
	stdout:
	[]
	
	stderr:
	Error response from daemon: network cert-options-209735 not found
	I0916 11:27:48.670618 1566252 network_create.go:289] output of [docker network inspect cert-options-209735]: -- stdout --
	[]
	
	-- /stdout --
	** stderr ** 
	Error response from daemon: network cert-options-209735 not found
	
	** /stderr **
	I0916 11:27:48.670717 1566252 cli_runner.go:164] Run: docker network inspect bridge --format "{"Name": "{{.Name}}","Driver": "{{.Driver}}","Subnet": "{{range .IPAM.Config}}{{.Subnet}}{{end}}","Gateway": "{{range .IPAM.Config}}{{.Gateway}}{{end}}","MTU": {{if (index .Options "com.docker.network.driver.mtu")}}{{(index .Options "com.docker.network.driver.mtu")}}{{else}}0{{end}}, "ContainerIPs": [{{range $k,$v := .Containers }}"{{$v.IPv4Address}}",{{end}}]}"
	I0916 11:27:48.687946 1566252 network.go:211] skipping subnet 192.168.49.0/24 that is taken: &{IP:192.168.49.0 Netmask:255.255.255.0 Prefix:24 CIDR:192.168.49.0/24 Gateway:192.168.49.1 ClientMin:192.168.49.2 ClientMax:192.168.49.254 Broadcast:192.168.49.255 IsPrivate:true Interface:{IfaceName:br-a49e1846148d IfaceIPv4:192.168.49.1 IfaceMTU:1500 IfaceMAC:02:42:d3:9d:ef:74} reservation:<nil>}
	I0916 11:27:48.688411 1566252 network.go:211] skipping subnet 192.168.58.0/24 that is taken: &{IP:192.168.58.0 Netmask:255.255.255.0 Prefix:24 CIDR:192.168.58.0/24 Gateway:192.168.58.1 ClientMin:192.168.58.2 ClientMax:192.168.58.254 Broadcast:192.168.58.255 IsPrivate:true Interface:{IfaceName:br-2e9863632116 IfaceIPv4:192.168.58.1 IfaceMTU:1500 IfaceMAC:02:42:77:c8:06:b6} reservation:<nil>}
	I0916 11:27:48.688942 1566252 network.go:211] skipping subnet 192.168.67.0/24 that is taken: &{IP:192.168.67.0 Netmask:255.255.255.0 Prefix:24 CIDR:192.168.67.0/24 Gateway:192.168.67.1 ClientMin:192.168.67.2 ClientMax:192.168.67.254 Broadcast:192.168.67.255 IsPrivate:true Interface:{IfaceName:br-76703dbf7b5c IfaceIPv4:192.168.67.1 IfaceMTU:1500 IfaceMAC:02:42:29:f7:34:a1} reservation:<nil>}
	I0916 11:27:48.689265 1566252 network.go:211] skipping subnet 192.168.76.0/24 that is taken: &{IP:192.168.76.0 Netmask:255.255.255.0 Prefix:24 CIDR:192.168.76.0/24 Gateway:192.168.76.1 ClientMin:192.168.76.2 ClientMax:192.168.76.254 Broadcast:192.168.76.255 IsPrivate:true Interface:{IfaceName:br-b1b595306fdc IfaceIPv4:192.168.76.1 IfaceMTU:1500 IfaceMAC:02:42:88:38:89:15} reservation:<nil>}
	I0916 11:27:48.689969 1566252 network.go:206] using free private subnet 192.168.85.0/24: &{IP:192.168.85.0 Netmask:255.255.255.0 Prefix:24 CIDR:192.168.85.0/24 Gateway:192.168.85.1 ClientMin:192.168.85.2 ClientMax:192.168.85.254 Broadcast:192.168.85.255 IsPrivate:true Interface:{IfaceName: IfaceIPv4: IfaceMTU:0 IfaceMAC:} reservation:0x4001857e60}
	I0916 11:27:48.690025 1566252 network_create.go:124] attempt to create docker network cert-options-209735 192.168.85.0/24 with gateway 192.168.85.1 and MTU of 1500 ...
	I0916 11:27:48.690110 1566252 cli_runner.go:164] Run: docker network create --driver=bridge --subnet=192.168.85.0/24 --gateway=192.168.85.1 -o --ip-masq -o --icc -o com.docker.network.driver.mtu=1500 --label=created_by.minikube.sigs.k8s.io=true --label=name.minikube.sigs.k8s.io=cert-options-209735 cert-options-209735
	I0916 11:27:48.772911 1566252 network_create.go:108] docker network cert-options-209735 192.168.85.0/24 created
	I0916 11:27:48.772935 1566252 kic.go:121] calculated static IP "192.168.85.2" for the "cert-options-209735" container
	I0916 11:27:48.773021 1566252 cli_runner.go:164] Run: docker ps -a --format {{.Names}}
	I0916 11:27:48.796755 1566252 cli_runner.go:164] Run: docker volume create cert-options-209735 --label name.minikube.sigs.k8s.io=cert-options-209735 --label created_by.minikube.sigs.k8s.io=true
	I0916 11:27:48.848812 1566252 oci.go:103] Successfully created a docker volume cert-options-209735
	I0916 11:27:48.848903 1566252 cli_runner.go:164] Run: docker run --rm --name cert-options-209735-preload-sidecar --label created_by.minikube.sigs.k8s.io=true --label name.minikube.sigs.k8s.io=cert-options-209735 --entrypoint /usr/bin/test -v cert-options-209735:/var gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 -d /var/lib
	I0916 11:27:49.559573 1566252 oci.go:107] Successfully prepared a docker volume cert-options-209735
	I0916 11:27:49.559615 1566252 preload.go:131] Checking if preload exists for k8s version v1.31.1 and runtime crio
	I0916 11:27:49.559633 1566252 kic.go:194] Starting extracting preloaded images to volume ...
	I0916 11:27:49.559714 1566252 cli_runner.go:164] Run: docker run --rm --entrypoint /usr/bin/tar -v /home/jenkins/minikube-integration/19651-1378450/.minikube/cache/preloaded-tarball/preloaded-images-k8s-v18-v1.31.1-cri-o-overlay-arm64.tar.lz4:/preloaded.tar:ro -v cert-options-209735:/extractDir gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 -I lz4 -xf /preloaded.tar -C /extractDir
	I0916 11:27:49.694057 1563615 kubeadm.go:310] [certs] Generating "front-proxy-client" certificate and key
	I0916 11:27:49.904137 1563615 kubeadm.go:310] [certs] Generating "etcd/ca" certificate and key
	I0916 11:27:50.341919 1563615 kubeadm.go:310] [certs] Generating "etcd/server" certificate and key
	I0916 11:27:50.342202 1563615 kubeadm.go:310] [certs] etcd/server serving cert is signed for DNS names [cert-expiration-258290 localhost] and IPs [192.168.76.2 127.0.0.1 ::1]
	I0916 11:27:50.911530 1563615 kubeadm.go:310] [certs] Generating "etcd/peer" certificate and key
	I0916 11:27:50.911766 1563615 kubeadm.go:310] [certs] etcd/peer serving cert is signed for DNS names [cert-expiration-258290 localhost] and IPs [192.168.76.2 127.0.0.1 ::1]
	I0916 11:27:51.639882 1563615 kubeadm.go:310] [certs] Generating "etcd/healthcheck-client" certificate and key
	I0916 11:27:52.109492 1563615 kubeadm.go:310] [certs] Generating "apiserver-etcd-client" certificate and key
	I0916 11:27:52.705032 1563615 kubeadm.go:310] [certs] Generating "sa" key and public key
	I0916 11:27:52.705197 1563615 kubeadm.go:310] [kubeconfig] Using kubeconfig folder "/etc/kubernetes"
	I0916 11:27:53.334653 1563615 kubeadm.go:310] [kubeconfig] Writing "admin.conf" kubeconfig file
	I0916 11:27:53.543093 1563615 kubeadm.go:310] [kubeconfig] Writing "super-admin.conf" kubeconfig file
	I0916 11:27:53.914872 1563615 kubeadm.go:310] [kubeconfig] Writing "kubelet.conf" kubeconfig file
	I0916 11:27:54.572349 1563615 kubeadm.go:310] [kubeconfig] Writing "controller-manager.conf" kubeconfig file
	I0916 11:27:55.873094 1563615 kubeadm.go:310] [kubeconfig] Writing "scheduler.conf" kubeconfig file
	I0916 11:27:55.873929 1563615 kubeadm.go:310] [etcd] Creating static Pod manifest for local etcd in "/etc/kubernetes/manifests"
	I0916 11:27:55.885111 1563615 kubeadm.go:310] [control-plane] Using manifest folder "/etc/kubernetes/manifests"
	I0916 11:27:54.173475 1566252 cli_runner.go:217] Completed: docker run --rm --entrypoint /usr/bin/tar -v /home/jenkins/minikube-integration/19651-1378450/.minikube/cache/preloaded-tarball/preloaded-images-k8s-v18-v1.31.1-cri-o-overlay-arm64.tar.lz4:/preloaded.tar:ro -v cert-options-209735:/extractDir gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 -I lz4 -xf /preloaded.tar -C /extractDir: (4.613727482s)
	I0916 11:27:54.173496 1566252 kic.go:203] duration metric: took 4.613859558s to extract preloaded images to volume ...
	W0916 11:27:54.173633 1566252 cgroups_linux.go:77] Your kernel does not support swap limit capabilities or the cgroup is not mounted.
	I0916 11:27:54.173744 1566252 cli_runner.go:164] Run: docker info --format "'{{json .SecurityOptions}}'"
	I0916 11:27:54.253714 1566252 cli_runner.go:164] Run: docker run -d -t --privileged --security-opt seccomp=unconfined --tmpfs /tmp --tmpfs /run -v /lib/modules:/lib/modules:ro --hostname cert-options-209735 --name cert-options-209735 --label created_by.minikube.sigs.k8s.io=true --label name.minikube.sigs.k8s.io=cert-options-209735 --label role.minikube.sigs.k8s.io= --label mode.minikube.sigs.k8s.io=cert-options-209735 --network cert-options-209735 --ip 192.168.85.2 --volume cert-options-209735:/var --security-opt apparmor=unconfined --memory=2048mb --cpus=2 -e container=docker --expose 8555 --publish=127.0.0.1::8555 --publish=127.0.0.1::22 --publish=127.0.0.1::2376 --publish=127.0.0.1::5000 --publish=127.0.0.1::32443 gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0
	I0916 11:27:54.639787 1566252 cli_runner.go:164] Run: docker container inspect cert-options-209735 --format={{.State.Running}}
	I0916 11:27:54.657455 1566252 cli_runner.go:164] Run: docker container inspect cert-options-209735 --format={{.State.Status}}
	I0916 11:27:54.684945 1566252 cli_runner.go:164] Run: docker exec cert-options-209735 stat /var/lib/dpkg/alternatives/iptables
	I0916 11:27:54.766115 1566252 oci.go:144] the created container "cert-options-209735" has a running status.
	I0916 11:27:54.766135 1566252 kic.go:225] Creating ssh key for kic: /home/jenkins/minikube-integration/19651-1378450/.minikube/machines/cert-options-209735/id_rsa...
	I0916 11:27:55.323594 1566252 kic_runner.go:191] docker (temp): /home/jenkins/minikube-integration/19651-1378450/.minikube/machines/cert-options-209735/id_rsa.pub --> /home/docker/.ssh/authorized_keys (381 bytes)
	I0916 11:27:55.382382 1566252 cli_runner.go:164] Run: docker container inspect cert-options-209735 --format={{.State.Status}}
	I0916 11:27:55.412869 1566252 kic_runner.go:93] Run: chown docker:docker /home/docker/.ssh/authorized_keys
	I0916 11:27:55.412884 1566252 kic_runner.go:114] Args: [docker exec --privileged cert-options-209735 chown docker:docker /home/docker/.ssh/authorized_keys]
	I0916 11:27:55.533872 1566252 cli_runner.go:164] Run: docker container inspect cert-options-209735 --format={{.State.Status}}
	I0916 11:27:55.572536 1566252 machine.go:93] provisionDockerMachine start ...
	I0916 11:27:55.572633 1566252 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" cert-options-209735
	I0916 11:27:55.599950 1566252 main.go:141] libmachine: Using SSH client type: native
	I0916 11:27:55.600261 1566252 main.go:141] libmachine: &{{{<nil> 0 [] [] []} docker [0x41abe0] 0x41d420 <nil>  [] 0s} 127.0.0.1 34873 <nil> <nil>}
	I0916 11:27:55.600269 1566252 main.go:141] libmachine: About to run SSH command:
	hostname
	I0916 11:27:55.600938 1566252 main.go:141] libmachine: Error dialing TCP: ssh: handshake failed: read tcp 127.0.0.1:56050->127.0.0.1:34873: read: connection reset by peer
	I0916 11:27:55.888986 1563615 out.go:235]   - Booting up control plane ...
	I0916 11:27:55.889100 1563615 kubeadm.go:310] [control-plane] Creating static Pod manifest for "kube-apiserver"
	I0916 11:27:55.889176 1563615 kubeadm.go:310] [control-plane] Creating static Pod manifest for "kube-controller-manager"
	I0916 11:27:55.891371 1563615 kubeadm.go:310] [control-plane] Creating static Pod manifest for "kube-scheduler"
	I0916 11:27:55.906156 1563615 kubeadm.go:310] [kubelet-start] Writing kubelet environment file with flags to file "/var/lib/kubelet/kubeadm-flags.env"
	I0916 11:27:55.914096 1563615 kubeadm.go:310] [kubelet-start] Writing kubelet configuration to file "/var/lib/kubelet/config.yaml"
	I0916 11:27:55.914145 1563615 kubeadm.go:310] [kubelet-start] Starting the kubelet
	I0916 11:27:56.093721 1563615 kubeadm.go:310] [wait-control-plane] Waiting for the kubelet to boot up the control plane as static Pods from directory "/etc/kubernetes/manifests"
	I0916 11:27:56.093834 1563615 kubeadm.go:310] [kubelet-check] Waiting for a healthy kubelet at http://127.0.0.1:10248/healthz. This can take up to 4m0s
	I0916 11:27:57.094585 1563615 kubeadm.go:310] [kubelet-check] The kubelet is healthy after 1.000845891s
	I0916 11:27:57.094675 1563615 kubeadm.go:310] [api-check] Waiting for a healthy API server. This can take up to 4m0s
	I0916 11:27:58.780191 1566252 main.go:141] libmachine: SSH cmd err, output: <nil>: cert-options-209735
	
	I0916 11:27:58.780205 1566252 ubuntu.go:169] provisioning hostname "cert-options-209735"
	I0916 11:27:58.780270 1566252 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" cert-options-209735
	I0916 11:27:58.802997 1566252 main.go:141] libmachine: Using SSH client type: native
	I0916 11:27:58.803225 1566252 main.go:141] libmachine: &{{{<nil> 0 [] [] []} docker [0x41abe0] 0x41d420 <nil>  [] 0s} 127.0.0.1 34873 <nil> <nil>}
	I0916 11:27:58.803234 1566252 main.go:141] libmachine: About to run SSH command:
	sudo hostname cert-options-209735 && echo "cert-options-209735" | sudo tee /etc/hostname
	I0916 11:27:58.993052 1566252 main.go:141] libmachine: SSH cmd err, output: <nil>: cert-options-209735
	
	I0916 11:27:58.993130 1566252 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" cert-options-209735
	I0916 11:27:59.040913 1566252 main.go:141] libmachine: Using SSH client type: native
	I0916 11:27:59.041148 1566252 main.go:141] libmachine: &{{{<nil> 0 [] [] []} docker [0x41abe0] 0x41d420 <nil>  [] 0s} 127.0.0.1 34873 <nil> <nil>}
	I0916 11:27:59.041163 1566252 main.go:141] libmachine: About to run SSH command:
	
			if ! grep -xq '.*\scert-options-209735' /etc/hosts; then
				if grep -xq '127.0.1.1\s.*' /etc/hosts; then
					sudo sed -i 's/^127.0.1.1\s.*/127.0.1.1 cert-options-209735/g' /etc/hosts;
				else 
					echo '127.0.1.1 cert-options-209735' | sudo tee -a /etc/hosts; 
				fi
			fi
	I0916 11:27:59.201061 1566252 main.go:141] libmachine: SSH cmd err, output: <nil>: 
	I0916 11:27:59.201078 1566252 ubuntu.go:175] set auth options {CertDir:/home/jenkins/minikube-integration/19651-1378450/.minikube CaCertPath:/home/jenkins/minikube-integration/19651-1378450/.minikube/certs/ca.pem CaPrivateKeyPath:/home/jenkins/minikube-integration/19651-1378450/.minikube/certs/ca-key.pem CaCertRemotePath:/etc/docker/ca.pem ServerCertPath:/home/jenkins/minikube-integration/19651-1378450/.minikube/machines/server.pem ServerKeyPath:/home/jenkins/minikube-integration/19651-1378450/.minikube/machines/server-key.pem ClientKeyPath:/home/jenkins/minikube-integration/19651-1378450/.minikube/certs/key.pem ServerCertRemotePath:/etc/docker/server.pem ServerKeyRemotePath:/etc/docker/server-key.pem ClientCertPath:/home/jenkins/minikube-integration/19651-1378450/.minikube/certs/cert.pem ServerCertSANs:[] StorePath:/home/jenkins/minikube-integration/19651-1378450/.minikube}
	I0916 11:27:59.201099 1566252 ubuntu.go:177] setting up certificates
	I0916 11:27:59.201131 1566252 provision.go:84] configureAuth start
	I0916 11:27:59.201202 1566252 cli_runner.go:164] Run: docker container inspect -f "{{range .NetworkSettings.Networks}}{{.IPAddress}},{{.GlobalIPv6Address}}{{end}}" cert-options-209735
	I0916 11:27:59.230054 1566252 provision.go:143] copyHostCerts
	I0916 11:27:59.230125 1566252 exec_runner.go:144] found /home/jenkins/minikube-integration/19651-1378450/.minikube/ca.pem, removing ...
	I0916 11:27:59.230133 1566252 exec_runner.go:203] rm: /home/jenkins/minikube-integration/19651-1378450/.minikube/ca.pem
	I0916 11:27:59.230220 1566252 exec_runner.go:151] cp: /home/jenkins/minikube-integration/19651-1378450/.minikube/certs/ca.pem --> /home/jenkins/minikube-integration/19651-1378450/.minikube/ca.pem (1078 bytes)
	I0916 11:27:59.230308 1566252 exec_runner.go:144] found /home/jenkins/minikube-integration/19651-1378450/.minikube/cert.pem, removing ...
	I0916 11:27:59.230312 1566252 exec_runner.go:203] rm: /home/jenkins/minikube-integration/19651-1378450/.minikube/cert.pem
	I0916 11:27:59.230337 1566252 exec_runner.go:151] cp: /home/jenkins/minikube-integration/19651-1378450/.minikube/certs/cert.pem --> /home/jenkins/minikube-integration/19651-1378450/.minikube/cert.pem (1123 bytes)
	I0916 11:27:59.230385 1566252 exec_runner.go:144] found /home/jenkins/minikube-integration/19651-1378450/.minikube/key.pem, removing ...
	I0916 11:27:59.230388 1566252 exec_runner.go:203] rm: /home/jenkins/minikube-integration/19651-1378450/.minikube/key.pem
	I0916 11:27:59.230409 1566252 exec_runner.go:151] cp: /home/jenkins/minikube-integration/19651-1378450/.minikube/certs/key.pem --> /home/jenkins/minikube-integration/19651-1378450/.minikube/key.pem (1679 bytes)
	I0916 11:27:59.230454 1566252 provision.go:117] generating server cert: /home/jenkins/minikube-integration/19651-1378450/.minikube/machines/server.pem ca-key=/home/jenkins/minikube-integration/19651-1378450/.minikube/certs/ca.pem private-key=/home/jenkins/minikube-integration/19651-1378450/.minikube/certs/ca-key.pem org=jenkins.cert-options-209735 san=[127.0.0.1 192.168.85.2 cert-options-209735 localhost minikube]
	I0916 11:27:59.506058 1566252 provision.go:177] copyRemoteCerts
	I0916 11:27:59.506112 1566252 ssh_runner.go:195] Run: sudo mkdir -p /etc/docker /etc/docker /etc/docker
	I0916 11:27:59.506163 1566252 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" cert-options-209735
	I0916 11:27:59.523276 1566252 sshutil.go:53] new ssh client: &{IP:127.0.0.1 Port:34873 SSHKeyPath:/home/jenkins/minikube-integration/19651-1378450/.minikube/machines/cert-options-209735/id_rsa Username:docker}
	I0916 11:27:59.626225 1566252 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-1378450/.minikube/certs/ca.pem --> /etc/docker/ca.pem (1078 bytes)
	I0916 11:27:59.662794 1566252 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-1378450/.minikube/machines/server.pem --> /etc/docker/server.pem (1224 bytes)
	I0916 11:27:59.704509 1566252 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-1378450/.minikube/machines/server-key.pem --> /etc/docker/server-key.pem (1675 bytes)
	I0916 11:27:59.742052 1566252 provision.go:87] duration metric: took 540.907661ms to configureAuth
	I0916 11:27:59.742068 1566252 ubuntu.go:193] setting minikube options for container-runtime
	I0916 11:27:59.742280 1566252 config.go:182] Loaded profile config "cert-options-209735": Driver=docker, ContainerRuntime=crio, KubernetesVersion=v1.31.1
	I0916 11:27:59.742402 1566252 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" cert-options-209735
	I0916 11:27:59.776437 1566252 main.go:141] libmachine: Using SSH client type: native
	I0916 11:27:59.776759 1566252 main.go:141] libmachine: &{{{<nil> 0 [] [] []} docker [0x41abe0] 0x41d420 <nil>  [] 0s} 127.0.0.1 34873 <nil> <nil>}
	I0916 11:27:59.776774 1566252 main.go:141] libmachine: About to run SSH command:
	sudo mkdir -p /etc/sysconfig && printf %s "
	CRIO_MINIKUBE_OPTIONS='--insecure-registry 10.96.0.0/12 '
	" | sudo tee /etc/sysconfig/crio.minikube && sudo systemctl restart crio
	I0916 11:28:00.162414 1566252 main.go:141] libmachine: SSH cmd err, output: <nil>: 
	CRIO_MINIKUBE_OPTIONS='--insecure-registry 10.96.0.0/12 '
	
	I0916 11:28:00.162431 1566252 machine.go:96] duration metric: took 4.589877463s to provisionDockerMachine
	I0916 11:28:00.162440 1566252 client.go:171] duration metric: took 11.521297373s to LocalClient.Create
	I0916 11:28:00.162458 1566252 start.go:167] duration metric: took 11.521348522s to libmachine.API.Create "cert-options-209735"
	I0916 11:28:00.162464 1566252 start.go:293] postStartSetup for "cert-options-209735" (driver="docker")
	I0916 11:28:00.162492 1566252 start.go:322] creating required directories: [/etc/kubernetes/addons /etc/kubernetes/manifests /var/tmp/minikube /var/lib/minikube /var/lib/minikube/certs /var/lib/minikube/images /var/lib/minikube/binaries /tmp/gvisor /usr/share/ca-certificates /etc/ssl/certs]
	I0916 11:28:00.162570 1566252 ssh_runner.go:195] Run: sudo mkdir -p /etc/kubernetes/addons /etc/kubernetes/manifests /var/tmp/minikube /var/lib/minikube /var/lib/minikube/certs /var/lib/minikube/images /var/lib/minikube/binaries /tmp/gvisor /usr/share/ca-certificates /etc/ssl/certs
	I0916 11:28:00.162621 1566252 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" cert-options-209735
	I0916 11:28:00.190525 1566252 sshutil.go:53] new ssh client: &{IP:127.0.0.1 Port:34873 SSHKeyPath:/home/jenkins/minikube-integration/19651-1378450/.minikube/machines/cert-options-209735/id_rsa Username:docker}
	I0916 11:28:00.313082 1566252 ssh_runner.go:195] Run: cat /etc/os-release
	I0916 11:28:00.322076 1566252 main.go:141] libmachine: Couldn't set key VERSION_CODENAME, no corresponding struct field found
	I0916 11:28:00.322108 1566252 main.go:141] libmachine: Couldn't set key PRIVACY_POLICY_URL, no corresponding struct field found
	I0916 11:28:00.322118 1566252 main.go:141] libmachine: Couldn't set key UBUNTU_CODENAME, no corresponding struct field found
	I0916 11:28:00.322126 1566252 info.go:137] Remote host: Ubuntu 22.04.4 LTS
	I0916 11:28:00.322151 1566252 filesync.go:126] Scanning /home/jenkins/minikube-integration/19651-1378450/.minikube/addons for local assets ...
	I0916 11:28:00.322226 1566252 filesync.go:126] Scanning /home/jenkins/minikube-integration/19651-1378450/.minikube/files for local assets ...
	I0916 11:28:00.322331 1566252 filesync.go:149] local asset: /home/jenkins/minikube-integration/19651-1378450/.minikube/files/etc/ssl/certs/13838332.pem -> 13838332.pem in /etc/ssl/certs
	I0916 11:28:00.322461 1566252 ssh_runner.go:195] Run: sudo mkdir -p /etc/ssl/certs
	I0916 11:28:00.337734 1566252 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-1378450/.minikube/files/etc/ssl/certs/13838332.pem --> /etc/ssl/certs/13838332.pem (1708 bytes)
	I0916 11:28:00.394406 1566252 start.go:296] duration metric: took 231.925231ms for postStartSetup
	I0916 11:28:00.394830 1566252 cli_runner.go:164] Run: docker container inspect -f "{{range .NetworkSettings.Networks}}{{.IPAddress}},{{.GlobalIPv6Address}}{{end}}" cert-options-209735
	I0916 11:28:00.426725 1566252 profile.go:143] Saving config to /home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/cert-options-209735/config.json ...
	I0916 11:28:00.427051 1566252 ssh_runner.go:195] Run: sh -c "df -h /var | awk 'NR==2{print $5}'"
	I0916 11:28:00.427100 1566252 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" cert-options-209735
	I0916 11:28:00.466703 1566252 sshutil.go:53] new ssh client: &{IP:127.0.0.1 Port:34873 SSHKeyPath:/home/jenkins/minikube-integration/19651-1378450/.minikube/machines/cert-options-209735/id_rsa Username:docker}
	I0916 11:28:00.578904 1566252 ssh_runner.go:195] Run: sh -c "df -BG /var | awk 'NR==2{print $4}'"
	I0916 11:28:00.589439 1566252 start.go:128] duration metric: took 11.951208067s to createHost
	I0916 11:28:00.589454 1566252 start.go:83] releasing machines lock for "cert-options-209735", held for 11.951337549s
	I0916 11:28:00.589543 1566252 cli_runner.go:164] Run: docker container inspect -f "{{range .NetworkSettings.Networks}}{{.IPAddress}},{{.GlobalIPv6Address}}{{end}}" cert-options-209735
	I0916 11:28:00.618400 1566252 ssh_runner.go:195] Run: cat /version.json
	I0916 11:28:00.618447 1566252 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" cert-options-209735
	I0916 11:28:00.618686 1566252 ssh_runner.go:195] Run: curl -sS -m 2 https://registry.k8s.io/
	I0916 11:28:00.618750 1566252 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" cert-options-209735
	I0916 11:28:00.648767 1566252 sshutil.go:53] new ssh client: &{IP:127.0.0.1 Port:34873 SSHKeyPath:/home/jenkins/minikube-integration/19651-1378450/.minikube/machines/cert-options-209735/id_rsa Username:docker}
	I0916 11:28:00.667324 1566252 sshutil.go:53] new ssh client: &{IP:127.0.0.1 Port:34873 SSHKeyPath:/home/jenkins/minikube-integration/19651-1378450/.minikube/machines/cert-options-209735/id_rsa Username:docker}
	I0916 11:28:00.772392 1566252 ssh_runner.go:195] Run: systemctl --version
	I0916 11:28:00.922390 1566252 ssh_runner.go:195] Run: sudo sh -c "podman version >/dev/null"
	I0916 11:28:01.112282 1566252 ssh_runner.go:195] Run: sh -c "stat /etc/cni/net.d/*loopback.conf*"
	I0916 11:28:01.117276 1566252 ssh_runner.go:195] Run: sudo find /etc/cni/net.d -maxdepth 1 -type f -name *loopback.conf* -not -name *.mk_disabled -exec sh -c "sudo mv {} {}.mk_disabled" ;
	I0916 11:28:01.144273 1566252 cni.go:221] loopback cni configuration disabled: "/etc/cni/net.d/*loopback.conf*" found
	I0916 11:28:01.144361 1566252 ssh_runner.go:195] Run: sudo find /etc/cni/net.d -maxdepth 1 -type f ( ( -name *bridge* -or -name *podman* ) -and -not -name *.mk_disabled ) -printf "%p, " -exec sh -c "sudo mv {} {}.mk_disabled" ;
	I0916 11:28:01.190526 1566252 cni.go:262] disabled [/etc/cni/net.d/87-podman-bridge.conflist, /etc/cni/net.d/100-crio-bridge.conf] bridge cni config(s)
	I0916 11:28:01.190540 1566252 start.go:495] detecting cgroup driver to use...
	I0916 11:28:01.190573 1566252 detect.go:187] detected "cgroupfs" cgroup driver on host os
	I0916 11:28:01.190630 1566252 ssh_runner.go:195] Run: sudo systemctl stop -f containerd
	I0916 11:28:01.220755 1566252 ssh_runner.go:195] Run: sudo systemctl is-active --quiet service containerd
	I0916 11:28:01.238987 1566252 docker.go:217] disabling cri-docker service (if available) ...
	I0916 11:28:01.239047 1566252 ssh_runner.go:195] Run: sudo systemctl stop -f cri-docker.socket
	I0916 11:28:01.255953 1566252 ssh_runner.go:195] Run: sudo systemctl stop -f cri-docker.service
	I0916 11:28:01.272905 1566252 ssh_runner.go:195] Run: sudo systemctl disable cri-docker.socket
	I0916 11:28:01.409494 1566252 ssh_runner.go:195] Run: sudo systemctl mask cri-docker.service
	I0916 11:28:01.566198 1566252 docker.go:233] disabling docker service ...
	I0916 11:28:01.566264 1566252 ssh_runner.go:195] Run: sudo systemctl stop -f docker.socket
	I0916 11:28:01.600379 1566252 ssh_runner.go:195] Run: sudo systemctl stop -f docker.service
	I0916 11:28:01.618294 1566252 ssh_runner.go:195] Run: sudo systemctl disable docker.socket
	I0916 11:28:01.757461 1566252 ssh_runner.go:195] Run: sudo systemctl mask docker.service
	I0916 11:28:01.937648 1566252 ssh_runner.go:195] Run: sudo systemctl is-active --quiet service docker
	I0916 11:28:01.956717 1566252 ssh_runner.go:195] Run: /bin/bash -c "sudo mkdir -p /etc && printf %s "runtime-endpoint: unix:///var/run/crio/crio.sock
	" | sudo tee /etc/crictl.yaml"
	I0916 11:28:01.977238 1566252 crio.go:59] configure cri-o to use "registry.k8s.io/pause:3.10" pause image...
	I0916 11:28:01.977312 1566252 ssh_runner.go:195] Run: sh -c "sudo sed -i 's|^.*pause_image = .*$|pause_image = "registry.k8s.io/pause:3.10"|' /etc/crio/crio.conf.d/02-crio.conf"
	I0916 11:28:01.990166 1566252 crio.go:70] configuring cri-o to use "cgroupfs" as cgroup driver...
	I0916 11:28:01.990258 1566252 ssh_runner.go:195] Run: sh -c "sudo sed -i 's|^.*cgroup_manager = .*$|cgroup_manager = "cgroupfs"|' /etc/crio/crio.conf.d/02-crio.conf"
	I0916 11:28:02.010447 1566252 ssh_runner.go:195] Run: sh -c "sudo sed -i '/conmon_cgroup = .*/d' /etc/crio/crio.conf.d/02-crio.conf"
	I0916 11:28:02.025263 1566252 ssh_runner.go:195] Run: sh -c "sudo sed -i '/cgroup_manager = .*/a conmon_cgroup = "pod"' /etc/crio/crio.conf.d/02-crio.conf"
	I0916 11:28:02.041458 1566252 ssh_runner.go:195] Run: sh -c "sudo rm -rf /etc/cni/net.mk"
	I0916 11:28:02.056003 1566252 ssh_runner.go:195] Run: sh -c "sudo sed -i '/^ *"net.ipv4.ip_unprivileged_port_start=.*"/d' /etc/crio/crio.conf.d/02-crio.conf"
	I0916 11:28:02.075441 1566252 ssh_runner.go:195] Run: sh -c "sudo grep -q "^ *default_sysctls" /etc/crio/crio.conf.d/02-crio.conf || sudo sed -i '/conmon_cgroup = .*/a default_sysctls = \[\n\]' /etc/crio/crio.conf.d/02-crio.conf"
	I0916 11:28:02.107957 1566252 ssh_runner.go:195] Run: sh -c "sudo sed -i -r 's|^default_sysctls *= *\[|&\n  "net.ipv4.ip_unprivileged_port_start=0",|' /etc/crio/crio.conf.d/02-crio.conf"
	I0916 11:28:02.122068 1566252 ssh_runner.go:195] Run: sudo sysctl net.bridge.bridge-nf-call-iptables
	I0916 11:28:02.144426 1566252 ssh_runner.go:195] Run: sudo sh -c "echo 1 > /proc/sys/net/ipv4/ip_forward"
	I0916 11:28:02.157612 1566252 ssh_runner.go:195] Run: sudo systemctl daemon-reload
	I0916 11:28:02.315294 1566252 ssh_runner.go:195] Run: sudo systemctl restart crio
	I0916 11:28:02.541829 1566252 start.go:542] Will wait 60s for socket path /var/run/crio/crio.sock
	I0916 11:28:02.541913 1566252 ssh_runner.go:195] Run: stat /var/run/crio/crio.sock
	I0916 11:28:02.548030 1566252 start.go:563] Will wait 60s for crictl version
	I0916 11:28:02.548119 1566252 ssh_runner.go:195] Run: which crictl
	I0916 11:28:02.553470 1566252 ssh_runner.go:195] Run: sudo /usr/bin/crictl version
	I0916 11:28:02.639548 1566252 start.go:579] Version:  0.1.0
	RuntimeName:  cri-o
	RuntimeVersion:  1.24.6
	RuntimeApiVersion:  v1
	I0916 11:28:02.639639 1566252 ssh_runner.go:195] Run: crio --version
	I0916 11:28:02.729924 1566252 ssh_runner.go:195] Run: crio --version
	I0916 11:28:02.830531 1566252 out.go:177] * Preparing Kubernetes v1.31.1 on CRI-O 1.24.6 ...
	I0916 11:28:02.833470 1566252 cli_runner.go:164] Run: docker network inspect cert-options-209735 --format "{"Name": "{{.Name}}","Driver": "{{.Driver}}","Subnet": "{{range .IPAM.Config}}{{.Subnet}}{{end}}","Gateway": "{{range .IPAM.Config}}{{.Gateway}}{{end}}","MTU": {{if (index .Options "com.docker.network.driver.mtu")}}{{(index .Options "com.docker.network.driver.mtu")}}{{else}}0{{end}}, "ContainerIPs": [{{range $k,$v := .Containers }}"{{$v.IPv4Address}}",{{end}}]}"
	I0916 11:28:02.867018 1566252 ssh_runner.go:195] Run: grep 192.168.85.1	host.minikube.internal$ /etc/hosts
	I0916 11:28:02.870859 1566252 ssh_runner.go:195] Run: /bin/bash -c "{ grep -v $'\thost.minikube.internal$' "/etc/hosts"; echo "192.168.85.1	host.minikube.internal"; } > /tmp/h.$$; sudo cp /tmp/h.$$ "/etc/hosts""
	I0916 11:28:02.887565 1566252 kubeadm.go:883] updating cluster {Name:cert-options-209735 KeepContext:false EmbedCerts:false MinikubeISO: KicBaseImage:gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 Memory:2048 CPUs:2 DiskSize:20000 Driver:docker HyperkitVpnKitSock: HyperkitVSockPorts:[] DockerEnv:[] ContainerVolumeMounts:[] InsecureRegistry:[] RegistryMirror:[] HostOnlyCIDR:192.168.59.1/24 HypervVirtualSwitch: HypervUseExternalSwitch:false HypervExternalAdapter: KVMNetwork:default KVMQemuURI:qemu:///system KVMGPU:false KVMHidden:false KVMNUMACount:1 APIServerPort:8555 DockerOpt:[] DisableDriverMounts:false NFSShare:[] NFSSharesRoot:/nfsshares UUID: NoVTXCheck:false DNSProxy:false HostDNSResolver:true HostOnlyNicType:virtio NatNicType:virtio SSHIPAddress: SSHUser:root SSHKey: SSHPort:22 KubernetesConfig:{KubernetesVersion:v1.31.1 ClusterName:cert-options-209735 Namespace:default APIServerHAVIP: APIServerName:minikubeCA
APIServerNames:[localhost www.google.com] APIServerIPs:[127.0.0.1 192.168.15.15] DNSDomain:cluster.local ContainerRuntime:crio CRISocket: NetworkPlugin:cni FeatureGates: ServiceCIDR:10.96.0.0/12 ImageRepository: LoadBalancerStartIP: LoadBalancerEndIP: CustomIngressCert: RegistryAliases: ExtraOptions:[] ShouldLoadCachedImages:true EnableDefaultCNI:false CNI:} Nodes:[{Name: IP:192.168.85.2 Port:8555 KubernetesVersion:v1.31.1 ContainerRuntime:crio ControlPlane:true Worker:true}] Addons:map[] CustomAddonImages:map[] CustomAddonRegistries:map[] VerifyComponents:map[apiserver:true system_pods:true] StartHostTimeout:6m0s ScheduledStop:<nil> ExposedPorts:[] ListenAddress: Network: Subnet: MultiNodeRequested:false ExtraDisks:0 CertExpiration:26280h0m0s Mount:false MountString:/home/jenkins:/minikube-host Mount9PVersion:9p2000.L MountGID:docker MountIP: MountMSize:262144 MountOptions:[] MountPort:0 MountType:9p MountUID:docker BinaryMirror: DisableOptimizations:false DisableMetrics:false CustomQemuFirmwarePath: Socket
VMnetClientPath: SocketVMnetPath: StaticIP: SSHAuthSock: SSHAgentPID:0 GPUs: AutoPauseInterval:1m0s} ...
	I0916 11:28:02.887679 1566252 preload.go:131] Checking if preload exists for k8s version v1.31.1 and runtime crio
	I0916 11:28:02.887738 1566252 ssh_runner.go:195] Run: sudo crictl images --output json
	I0916 11:28:03.003545 1566252 crio.go:514] all images are preloaded for cri-o runtime.
	I0916 11:28:03.003558 1566252 crio.go:433] Images already preloaded, skipping extraction
	I0916 11:28:03.003630 1566252 ssh_runner.go:195] Run: sudo crictl images --output json
	I0916 11:28:03.053905 1566252 crio.go:514] all images are preloaded for cri-o runtime.
	I0916 11:28:03.053918 1566252 cache_images.go:84] Images are preloaded, skipping loading
	I0916 11:28:03.053924 1566252 kubeadm.go:934] updating node { 192.168.85.2 8555 v1.31.1 crio true true} ...
	I0916 11:28:03.054018 1566252 kubeadm.go:946] kubelet [Unit]
	Wants=crio.service
	
	[Service]
	ExecStart=
	ExecStart=/var/lib/minikube/binaries/v1.31.1/kubelet --bootstrap-kubeconfig=/etc/kubernetes/bootstrap-kubelet.conf --cgroups-per-qos=false --config=/var/lib/kubelet/config.yaml --enforce-node-allocatable= --hostname-override=cert-options-209735 --kubeconfig=/etc/kubernetes/kubelet.conf --node-ip=192.168.85.2
	
	[Install]
	 config:
	{KubernetesVersion:v1.31.1 ClusterName:cert-options-209735 Namespace:default APIServerHAVIP: APIServerName:minikubeCA APIServerNames:[localhost www.google.com] APIServerIPs:[127.0.0.1 192.168.15.15] DNSDomain:cluster.local ContainerRuntime:crio CRISocket: NetworkPlugin:cni FeatureGates: ServiceCIDR:10.96.0.0/12 ImageRepository: LoadBalancerStartIP: LoadBalancerEndIP: CustomIngressCert: RegistryAliases: ExtraOptions:[] ShouldLoadCachedImages:true EnableDefaultCNI:false CNI:}
	I0916 11:28:03.054113 1566252 ssh_runner.go:195] Run: crio config
	I0916 11:28:03.124813 1566252 cni.go:84] Creating CNI manager for ""
	I0916 11:28:03.124825 1566252 cni.go:143] "docker" driver + "crio" runtime found, recommending kindnet
	I0916 11:28:03.124833 1566252 kubeadm.go:84] Using pod CIDR: 10.244.0.0/16
	I0916 11:28:03.124864 1566252 kubeadm.go:181] kubeadm options: {CertDir:/var/lib/minikube/certs ServiceCIDR:10.96.0.0/12 PodSubnet:10.244.0.0/16 AdvertiseAddress:192.168.85.2 APIServerPort:8555 KubernetesVersion:v1.31.1 EtcdDataDir:/var/lib/minikube/etcd EtcdExtraArgs:map[] ClusterName:cert-options-209735 NodeName:cert-options-209735 DNSDomain:cluster.local CRISocket:/var/run/crio/crio.sock ImageRepository: ComponentOptions:[{Component:apiServer ExtraArgs:map[enable-admission-plugins:NamespaceLifecycle,LimitRanger,ServiceAccount,DefaultStorageClass,DefaultTolerationSeconds,NodeRestriction,MutatingAdmissionWebhook,ValidatingAdmissionWebhook,ResourceQuota] Pairs:map[certSANs:["127.0.0.1", "localhost", "192.168.85.2"]]} {Component:controllerManager ExtraArgs:map[allocate-node-cidrs:true leader-elect:false] Pairs:map[]} {Component:scheduler ExtraArgs:map[leader-elect:false] Pairs:map[]}] FeatureArgs:map[] NodeIP:192.168.85.2 CgroupDriver:cgroupfs ClientCAFile:/var/lib/minikube/certs/ca.crt StaticPodPath:
/etc/kubernetes/manifests ControlPlaneAddress:control-plane.minikube.internal KubeProxyOptions:map[] ResolvConfSearchRegression:false KubeletConfigOpts:map[containerRuntimeEndpoint:unix:///var/run/crio/crio.sock hairpinMode:hairpin-veth runtimeRequestTimeout:15m] PrependCriSocketUnix:true}
	I0916 11:28:03.125016 1566252 kubeadm.go:187] kubeadm config:
	apiVersion: kubeadm.k8s.io/v1beta3
	kind: InitConfiguration
	localAPIEndpoint:
	  advertiseAddress: 192.168.85.2
	  bindPort: 8555
	bootstrapTokens:
	  - groups:
	      - system:bootstrappers:kubeadm:default-node-token
	    ttl: 24h0m0s
	    usages:
	      - signing
	      - authentication
	nodeRegistration:
	  criSocket: unix:///var/run/crio/crio.sock
	  name: "cert-options-209735"
	  kubeletExtraArgs:
	    node-ip: 192.168.85.2
	  taints: []
	---
	apiVersion: kubeadm.k8s.io/v1beta3
	kind: ClusterConfiguration
	apiServer:
	  certSANs: ["127.0.0.1", "localhost", "192.168.85.2"]
	  extraArgs:
	    enable-admission-plugins: "NamespaceLifecycle,LimitRanger,ServiceAccount,DefaultStorageClass,DefaultTolerationSeconds,NodeRestriction,MutatingAdmissionWebhook,ValidatingAdmissionWebhook,ResourceQuota"
	controllerManager:
	  extraArgs:
	    allocate-node-cidrs: "true"
	    leader-elect: "false"
	scheduler:
	  extraArgs:
	    leader-elect: "false"
	certificatesDir: /var/lib/minikube/certs
	clusterName: mk
	controlPlaneEndpoint: control-plane.minikube.internal:8555
	etcd:
	  local:
	    dataDir: /var/lib/minikube/etcd
	    extraArgs:
	      proxy-refresh-interval: "70000"
	kubernetesVersion: v1.31.1
	networking:
	  dnsDomain: cluster.local
	  podSubnet: "10.244.0.0/16"
	  serviceSubnet: 10.96.0.0/12
	---
	apiVersion: kubelet.config.k8s.io/v1beta1
	kind: KubeletConfiguration
	authentication:
	  x509:
	    clientCAFile: /var/lib/minikube/certs/ca.crt
	cgroupDriver: cgroupfs
	containerRuntimeEndpoint: unix:///var/run/crio/crio.sock
	hairpinMode: hairpin-veth
	runtimeRequestTimeout: 15m
	clusterDomain: "cluster.local"
	# disable disk resource management by default
	imageGCHighThresholdPercent: 100
	evictionHard:
	  nodefs.available: "0%"
	  nodefs.inodesFree: "0%"
	  imagefs.available: "0%"
	failSwapOn: false
	staticPodPath: /etc/kubernetes/manifests
	---
	apiVersion: kubeproxy.config.k8s.io/v1alpha1
	kind: KubeProxyConfiguration
	clusterCIDR: "10.244.0.0/16"
	metricsBindAddress: 0.0.0.0:10249
	conntrack:
	  maxPerCore: 0
	# Skip setting "net.netfilter.nf_conntrack_tcp_timeout_established"
	  tcpEstablishedTimeout: 0s
	# Skip setting "net.netfilter.nf_conntrack_tcp_timeout_close"
	  tcpCloseWaitTimeout: 0s
	
	I0916 11:28:03.125094 1566252 ssh_runner.go:195] Run: sudo ls /var/lib/minikube/binaries/v1.31.1
	I0916 11:28:03.134596 1566252 binaries.go:44] Found k8s binaries, skipping transfer
	I0916 11:28:03.134670 1566252 ssh_runner.go:195] Run: sudo mkdir -p /etc/systemd/system/kubelet.service.d /lib/systemd/system /var/tmp/minikube
	I0916 11:28:03.143701 1566252 ssh_runner.go:362] scp memory --> /etc/systemd/system/kubelet.service.d/10-kubeadm.conf (369 bytes)
	I0916 11:28:03.163357 1566252 ssh_runner.go:362] scp memory --> /lib/systemd/system/kubelet.service (352 bytes)
	I0916 11:28:03.183125 1566252 ssh_runner.go:362] scp memory --> /var/tmp/minikube/kubeadm.yaml.new (2157 bytes)
	I0916 11:28:05.098243 1563615 kubeadm.go:310] [api-check] The API server is healthy after 8.001531594s
	I0916 11:28:05.128060 1563615 kubeadm.go:310] [upload-config] Storing the configuration used in ConfigMap "kubeadm-config" in the "kube-system" Namespace
	I0916 11:28:05.157084 1563615 kubeadm.go:310] [kubelet] Creating a ConfigMap "kubelet-config" in namespace kube-system with the configuration for the kubelets in the cluster
	I0916 11:28:05.217326 1563615 kubeadm.go:310] [upload-certs] Skipping phase. Please see --upload-certs
	I0916 11:28:05.217558 1563615 kubeadm.go:310] [mark-control-plane] Marking the node cert-expiration-258290 as control-plane by adding the labels: [node-role.kubernetes.io/control-plane node.kubernetes.io/exclude-from-external-load-balancers]
	I0916 11:28:05.243360 1563615 kubeadm.go:310] [bootstrap-token] Using token: i7h8q4.931cjkcfpd03p040
	I0916 11:28:03.203016 1566252 ssh_runner.go:195] Run: grep 192.168.85.2	control-plane.minikube.internal$ /etc/hosts
	I0916 11:28:03.207021 1566252 ssh_runner.go:195] Run: /bin/bash -c "{ grep -v $'\tcontrol-plane.minikube.internal$' "/etc/hosts"; echo "192.168.85.2	control-plane.minikube.internal"; } > /tmp/h.$$; sudo cp /tmp/h.$$ "/etc/hosts""
	I0916 11:28:03.217712 1566252 ssh_runner.go:195] Run: sudo systemctl daemon-reload
	I0916 11:28:03.338791 1566252 ssh_runner.go:195] Run: sudo systemctl start kubelet
	I0916 11:28:03.355533 1566252 certs.go:68] Setting up /home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/cert-options-209735 for IP: 192.168.85.2
	I0916 11:28:03.355545 1566252 certs.go:194] generating shared ca certs ...
	I0916 11:28:03.355559 1566252 certs.go:226] acquiring lock for ca certs: {Name:mk0ae46b50e2e49d53ad6fcc94535aa50d9156d6 Clock:{} Delay:500ms Timeout:1m0s Cancel:<nil>}
	I0916 11:28:03.355712 1566252 certs.go:235] skipping valid "minikubeCA" ca cert: /home/jenkins/minikube-integration/19651-1378450/.minikube/ca.key
	I0916 11:28:03.355751 1566252 certs.go:235] skipping valid "proxyClientCA" ca cert: /home/jenkins/minikube-integration/19651-1378450/.minikube/proxy-client-ca.key
	I0916 11:28:03.355757 1566252 certs.go:256] generating profile certs ...
	I0916 11:28:03.355819 1566252 certs.go:363] generating signed profile cert for "minikube-user": /home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/cert-options-209735/client.key
	I0916 11:28:03.355830 1566252 crypto.go:68] Generating cert /home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/cert-options-209735/client.crt with IP's: []
	I0916 11:28:03.679401 1566252 crypto.go:156] Writing cert to /home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/cert-options-209735/client.crt ...
	I0916 11:28:03.679418 1566252 lock.go:35] WriteFile acquiring /home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/cert-options-209735/client.crt: {Name:mk60af39f31c14d7f11014346cc7790c98c610a2 Clock:{} Delay:500ms Timeout:1m0s Cancel:<nil>}
	I0916 11:28:03.679639 1566252 crypto.go:164] Writing key to /home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/cert-options-209735/client.key ...
	I0916 11:28:03.679652 1566252 lock.go:35] WriteFile acquiring /home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/cert-options-209735/client.key: {Name:mk9d98a056043f1fd3c9a24e81363006b3a1e458 Clock:{} Delay:500ms Timeout:1m0s Cancel:<nil>}
	I0916 11:28:03.679754 1566252 certs.go:363] generating signed profile cert for "minikube": /home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/cert-options-209735/apiserver.key.f650e870
	I0916 11:28:03.679768 1566252 crypto.go:68] Generating cert /home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/cert-options-209735/apiserver.crt.f650e870 with IP's: [127.0.0.1 192.168.15.15 10.96.0.1 127.0.0.1 10.0.0.1 192.168.85.2]
	I0916 11:28:04.340548 1566252 crypto.go:156] Writing cert to /home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/cert-options-209735/apiserver.crt.f650e870 ...
	I0916 11:28:04.340564 1566252 lock.go:35] WriteFile acquiring /home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/cert-options-209735/apiserver.crt.f650e870: {Name:mk721b2d7a8b51d83da94d4ad890310f8b86f4dd Clock:{} Delay:500ms Timeout:1m0s Cancel:<nil>}
	I0916 11:28:04.340762 1566252 crypto.go:164] Writing key to /home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/cert-options-209735/apiserver.key.f650e870 ...
	I0916 11:28:04.340771 1566252 lock.go:35] WriteFile acquiring /home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/cert-options-209735/apiserver.key.f650e870: {Name:mkf06edd982a118fe36370c85a5bc97d25fe7e48 Clock:{} Delay:500ms Timeout:1m0s Cancel:<nil>}
	I0916 11:28:04.340850 1566252 certs.go:381] copying /home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/cert-options-209735/apiserver.crt.f650e870 -> /home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/cert-options-209735/apiserver.crt
	I0916 11:28:04.340931 1566252 certs.go:385] copying /home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/cert-options-209735/apiserver.key.f650e870 -> /home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/cert-options-209735/apiserver.key
	I0916 11:28:04.340984 1566252 certs.go:363] generating signed profile cert for "aggregator": /home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/cert-options-209735/proxy-client.key
	I0916 11:28:04.340998 1566252 crypto.go:68] Generating cert /home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/cert-options-209735/proxy-client.crt with IP's: []
	I0916 11:28:04.726268 1566252 crypto.go:156] Writing cert to /home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/cert-options-209735/proxy-client.crt ...
	I0916 11:28:04.726288 1566252 lock.go:35] WriteFile acquiring /home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/cert-options-209735/proxy-client.crt: {Name:mk7a3c810ea90f4e9d751f30755a983a7f57a84f Clock:{} Delay:500ms Timeout:1m0s Cancel:<nil>}
	I0916 11:28:04.726490 1566252 crypto.go:164] Writing key to /home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/cert-options-209735/proxy-client.key ...
	I0916 11:28:04.726499 1566252 lock.go:35] WriteFile acquiring /home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/cert-options-209735/proxy-client.key: {Name:mk61c9dffedbf146c5b57707fef45e7d5300b74c Clock:{} Delay:500ms Timeout:1m0s Cancel:<nil>}
	I0916 11:28:04.726777 1566252 certs.go:484] found cert: /home/jenkins/minikube-integration/19651-1378450/.minikube/certs/1383833.pem (1338 bytes)
	W0916 11:28:04.726823 1566252 certs.go:480] ignoring /home/jenkins/minikube-integration/19651-1378450/.minikube/certs/1383833_empty.pem, impossibly tiny 0 bytes
	I0916 11:28:04.726831 1566252 certs.go:484] found cert: /home/jenkins/minikube-integration/19651-1378450/.minikube/certs/ca-key.pem (1679 bytes)
	I0916 11:28:04.726877 1566252 certs.go:484] found cert: /home/jenkins/minikube-integration/19651-1378450/.minikube/certs/ca.pem (1078 bytes)
	I0916 11:28:04.726910 1566252 certs.go:484] found cert: /home/jenkins/minikube-integration/19651-1378450/.minikube/certs/cert.pem (1123 bytes)
	I0916 11:28:04.726933 1566252 certs.go:484] found cert: /home/jenkins/minikube-integration/19651-1378450/.minikube/certs/key.pem (1679 bytes)
	I0916 11:28:04.726994 1566252 certs.go:484] found cert: /home/jenkins/minikube-integration/19651-1378450/.minikube/files/etc/ssl/certs/13838332.pem (1708 bytes)
	I0916 11:28:04.727777 1566252 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-1378450/.minikube/ca.crt --> /var/lib/minikube/certs/ca.crt (1111 bytes)
	I0916 11:28:04.760272 1566252 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-1378450/.minikube/ca.key --> /var/lib/minikube/certs/ca.key (1675 bytes)
	I0916 11:28:04.790604 1566252 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-1378450/.minikube/proxy-client-ca.crt --> /var/lib/minikube/certs/proxy-client-ca.crt (1119 bytes)
	I0916 11:28:04.826140 1566252 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-1378450/.minikube/proxy-client-ca.key --> /var/lib/minikube/certs/proxy-client-ca.key (1679 bytes)
	I0916 11:28:04.860054 1566252 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/cert-options-209735/apiserver.crt --> /var/lib/minikube/certs/apiserver.crt (1480 bytes)
	I0916 11:28:04.892578 1566252 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/cert-options-209735/apiserver.key --> /var/lib/minikube/certs/apiserver.key (1675 bytes)
	I0916 11:28:04.925717 1566252 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/cert-options-209735/proxy-client.crt --> /var/lib/minikube/certs/proxy-client.crt (1147 bytes)
	I0916 11:28:05.017655 1566252 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/cert-options-209735/proxy-client.key --> /var/lib/minikube/certs/proxy-client.key (1679 bytes)
	I0916 11:28:05.048162 1566252 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-1378450/.minikube/certs/1383833.pem --> /usr/share/ca-certificates/1383833.pem (1338 bytes)
	I0916 11:28:05.079230 1566252 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-1378450/.minikube/files/etc/ssl/certs/13838332.pem --> /usr/share/ca-certificates/13838332.pem (1708 bytes)
	I0916 11:28:05.118158 1566252 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-1378450/.minikube/ca.crt --> /usr/share/ca-certificates/minikubeCA.pem (1111 bytes)
	I0916 11:28:05.150190 1566252 ssh_runner.go:362] scp memory --> /var/lib/minikube/kubeconfig (738 bytes)
	I0916 11:28:05.173942 1566252 ssh_runner.go:195] Run: openssl version
	I0916 11:28:05.180519 1566252 ssh_runner.go:195] Run: sudo /bin/bash -c "test -s /usr/share/ca-certificates/1383833.pem && ln -fs /usr/share/ca-certificates/1383833.pem /etc/ssl/certs/1383833.pem"
	I0916 11:28:05.190973 1566252 ssh_runner.go:195] Run: ls -la /usr/share/ca-certificates/1383833.pem
	I0916 11:28:05.195126 1566252 certs.go:528] hashing: -rw-r--r-- 1 root root 1338 Sep 16 10:46 /usr/share/ca-certificates/1383833.pem
	I0916 11:28:05.195187 1566252 ssh_runner.go:195] Run: openssl x509 -hash -noout -in /usr/share/ca-certificates/1383833.pem
	I0916 11:28:05.202617 1566252 ssh_runner.go:195] Run: sudo /bin/bash -c "test -L /etc/ssl/certs/51391683.0 || ln -fs /etc/ssl/certs/1383833.pem /etc/ssl/certs/51391683.0"
	I0916 11:28:05.212854 1566252 ssh_runner.go:195] Run: sudo /bin/bash -c "test -s /usr/share/ca-certificates/13838332.pem && ln -fs /usr/share/ca-certificates/13838332.pem /etc/ssl/certs/13838332.pem"
	I0916 11:28:05.226523 1566252 ssh_runner.go:195] Run: ls -la /usr/share/ca-certificates/13838332.pem
	I0916 11:28:05.230684 1566252 certs.go:528] hashing: -rw-r--r-- 1 root root 1708 Sep 16 10:46 /usr/share/ca-certificates/13838332.pem
	I0916 11:28:05.230769 1566252 ssh_runner.go:195] Run: openssl x509 -hash -noout -in /usr/share/ca-certificates/13838332.pem
	I0916 11:28:05.240630 1566252 ssh_runner.go:195] Run: sudo /bin/bash -c "test -L /etc/ssl/certs/3ec20f2e.0 || ln -fs /etc/ssl/certs/13838332.pem /etc/ssl/certs/3ec20f2e.0"
	I0916 11:28:05.255074 1566252 ssh_runner.go:195] Run: sudo /bin/bash -c "test -s /usr/share/ca-certificates/minikubeCA.pem && ln -fs /usr/share/ca-certificates/minikubeCA.pem /etc/ssl/certs/minikubeCA.pem"
	I0916 11:28:05.265052 1566252 ssh_runner.go:195] Run: ls -la /usr/share/ca-certificates/minikubeCA.pem
	I0916 11:28:05.270539 1566252 certs.go:528] hashing: -rw-r--r-- 1 root root 1111 Sep 16 10:35 /usr/share/ca-certificates/minikubeCA.pem
	I0916 11:28:05.270601 1566252 ssh_runner.go:195] Run: openssl x509 -hash -noout -in /usr/share/ca-certificates/minikubeCA.pem
	I0916 11:28:05.278335 1566252 ssh_runner.go:195] Run: sudo /bin/bash -c "test -L /etc/ssl/certs/b5213941.0 || ln -fs /etc/ssl/certs/minikubeCA.pem /etc/ssl/certs/b5213941.0"
	I0916 11:28:05.288554 1566252 ssh_runner.go:195] Run: stat /var/lib/minikube/certs/apiserver-kubelet-client.crt
	I0916 11:28:05.292669 1566252 certs.go:399] 'apiserver-kubelet-client' cert doesn't exist, likely first start: stat /var/lib/minikube/certs/apiserver-kubelet-client.crt: Process exited with status 1
	stdout:
	
	stderr:
	stat: cannot statx '/var/lib/minikube/certs/apiserver-kubelet-client.crt': No such file or directory
	I0916 11:28:05.292732 1566252 kubeadm.go:392] StartCluster: {Name:cert-options-209735 KeepContext:false EmbedCerts:false MinikubeISO: KicBaseImage:gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 Memory:2048 CPUs:2 DiskSize:20000 Driver:docker HyperkitVpnKitSock: HyperkitVSockPorts:[] DockerEnv:[] ContainerVolumeMounts:[] InsecureRegistry:[] RegistryMirror:[] HostOnlyCIDR:192.168.59.1/24 HypervVirtualSwitch: HypervUseExternalSwitch:false HypervExternalAdapter: KVMNetwork:default KVMQemuURI:qemu:///system KVMGPU:false KVMHidden:false KVMNUMACount:1 APIServerPort:8555 DockerOpt:[] DisableDriverMounts:false NFSShare:[] NFSSharesRoot:/nfsshares UUID: NoVTXCheck:false DNSProxy:false HostDNSResolver:true HostOnlyNicType:virtio NatNicType:virtio SSHIPAddress: SSHUser:root SSHKey: SSHPort:22 KubernetesConfig:{KubernetesVersion:v1.31.1 ClusterName:cert-options-209735 Namespace:default APIServerHAVIP: APIServerName:minikubeCA AP
IServerNames:[localhost www.google.com] APIServerIPs:[127.0.0.1 192.168.15.15] DNSDomain:cluster.local ContainerRuntime:crio CRISocket: NetworkPlugin:cni FeatureGates: ServiceCIDR:10.96.0.0/12 ImageRepository: LoadBalancerStartIP: LoadBalancerEndIP: CustomIngressCert: RegistryAliases: ExtraOptions:[] ShouldLoadCachedImages:true EnableDefaultCNI:false CNI:} Nodes:[{Name: IP:192.168.85.2 Port:8555 KubernetesVersion:v1.31.1 ContainerRuntime:crio ControlPlane:true Worker:true}] Addons:map[] CustomAddonImages:map[] CustomAddonRegistries:map[] VerifyComponents:map[apiserver:true system_pods:true] StartHostTimeout:6m0s ScheduledStop:<nil> ExposedPorts:[] ListenAddress: Network: Subnet: MultiNodeRequested:false ExtraDisks:0 CertExpiration:26280h0m0s Mount:false MountString:/home/jenkins:/minikube-host Mount9PVersion:9p2000.L MountGID:docker MountIP: MountMSize:262144 MountOptions:[] MountPort:0 MountType:9p MountUID:docker BinaryMirror: DisableOptimizations:false DisableMetrics:false CustomQemuFirmwarePath: SocketVMn
etClientPath: SocketVMnetPath: StaticIP: SSHAuthSock: SSHAgentPID:0 GPUs: AutoPauseInterval:1m0s}
	I0916 11:28:05.292815 1566252 cri.go:54] listing CRI containers in root : {State:paused Name: Namespaces:[kube-system]}
	I0916 11:28:05.292872 1566252 ssh_runner.go:195] Run: sudo -s eval "crictl ps -a --quiet --label io.kubernetes.pod.namespace=kube-system"
	I0916 11:28:05.338320 1566252 cri.go:89] found id: ""
	I0916 11:28:05.338386 1566252 ssh_runner.go:195] Run: sudo ls /var/lib/kubelet/kubeadm-flags.env /var/lib/kubelet/config.yaml /var/lib/minikube/etcd
	I0916 11:28:05.350246 1566252 ssh_runner.go:195] Run: sudo cp /var/tmp/minikube/kubeadm.yaml.new /var/tmp/minikube/kubeadm.yaml
	I0916 11:28:05.360618 1566252 kubeadm.go:214] ignoring SystemVerification for kubeadm because of docker driver
	I0916 11:28:05.360705 1566252 ssh_runner.go:195] Run: sudo ls -la /etc/kubernetes/admin.conf /etc/kubernetes/kubelet.conf /etc/kubernetes/controller-manager.conf /etc/kubernetes/scheduler.conf
	I0916 11:28:05.374735 1566252 kubeadm.go:155] config check failed, skipping stale config cleanup: sudo ls -la /etc/kubernetes/admin.conf /etc/kubernetes/kubelet.conf /etc/kubernetes/controller-manager.conf /etc/kubernetes/scheduler.conf: Process exited with status 2
	stdout:
	
	stderr:
	ls: cannot access '/etc/kubernetes/admin.conf': No such file or directory
	ls: cannot access '/etc/kubernetes/kubelet.conf': No such file or directory
	ls: cannot access '/etc/kubernetes/controller-manager.conf': No such file or directory
	ls: cannot access '/etc/kubernetes/scheduler.conf': No such file or directory
	I0916 11:28:05.374744 1566252 kubeadm.go:157] found existing configuration files:
	
	I0916 11:28:05.374797 1566252 ssh_runner.go:195] Run: sudo grep https://control-plane.minikube.internal:8555 /etc/kubernetes/admin.conf
	I0916 11:28:05.385038 1566252 kubeadm.go:163] "https://control-plane.minikube.internal:8555" may not be in /etc/kubernetes/admin.conf - will remove: sudo grep https://control-plane.minikube.internal:8555 /etc/kubernetes/admin.conf: Process exited with status 2
	stdout:
	
	stderr:
	grep: /etc/kubernetes/admin.conf: No such file or directory
	I0916 11:28:05.385109 1566252 ssh_runner.go:195] Run: sudo rm -f /etc/kubernetes/admin.conf
	I0916 11:28:05.394905 1566252 ssh_runner.go:195] Run: sudo grep https://control-plane.minikube.internal:8555 /etc/kubernetes/kubelet.conf
	I0916 11:28:05.405746 1566252 kubeadm.go:163] "https://control-plane.minikube.internal:8555" may not be in /etc/kubernetes/kubelet.conf - will remove: sudo grep https://control-plane.minikube.internal:8555 /etc/kubernetes/kubelet.conf: Process exited with status 2
	stdout:
	
	stderr:
	grep: /etc/kubernetes/kubelet.conf: No such file or directory
	I0916 11:28:05.405837 1566252 ssh_runner.go:195] Run: sudo rm -f /etc/kubernetes/kubelet.conf
	I0916 11:28:05.417144 1566252 ssh_runner.go:195] Run: sudo grep https://control-plane.minikube.internal:8555 /etc/kubernetes/controller-manager.conf
	I0916 11:28:05.433206 1566252 kubeadm.go:163] "https://control-plane.minikube.internal:8555" may not be in /etc/kubernetes/controller-manager.conf - will remove: sudo grep https://control-plane.minikube.internal:8555 /etc/kubernetes/controller-manager.conf: Process exited with status 2
	stdout:
	
	stderr:
	grep: /etc/kubernetes/controller-manager.conf: No such file or directory
	I0916 11:28:05.433265 1566252 ssh_runner.go:195] Run: sudo rm -f /etc/kubernetes/controller-manager.conf
	I0916 11:28:05.443814 1566252 ssh_runner.go:195] Run: sudo grep https://control-plane.minikube.internal:8555 /etc/kubernetes/scheduler.conf
	I0916 11:28:05.453912 1566252 kubeadm.go:163] "https://control-plane.minikube.internal:8555" may not be in /etc/kubernetes/scheduler.conf - will remove: sudo grep https://control-plane.minikube.internal:8555 /etc/kubernetes/scheduler.conf: Process exited with status 2
	stdout:
	
	stderr:
	grep: /etc/kubernetes/scheduler.conf: No such file or directory
	I0916 11:28:05.453988 1566252 ssh_runner.go:195] Run: sudo rm -f /etc/kubernetes/scheduler.conf
	I0916 11:28:05.463432 1566252 ssh_runner.go:286] Start: /bin/bash -c "sudo env PATH="/var/lib/minikube/binaries/v1.31.1:$PATH" kubeadm init --config /var/tmp/minikube/kubeadm.yaml  --ignore-preflight-errors=DirAvailable--etc-kubernetes-manifests,DirAvailable--var-lib-minikube,DirAvailable--var-lib-minikube-etcd,FileAvailable--etc-kubernetes-manifests-kube-scheduler.yaml,FileAvailable--etc-kubernetes-manifests-kube-apiserver.yaml,FileAvailable--etc-kubernetes-manifests-kube-controller-manager.yaml,FileAvailable--etc-kubernetes-manifests-etcd.yaml,Port-10250,Swap,NumCPU,Mem,SystemVerification,FileContent--proc-sys-net-bridge-bridge-nf-call-iptables"
	I0916 11:28:05.519684 1566252 kubeadm.go:310] [init] Using Kubernetes version: v1.31.1
	I0916 11:28:05.526814 1566252 kubeadm.go:310] [preflight] Running pre-flight checks
	I0916 11:28:05.547654 1566252 kubeadm.go:310] [preflight] The system verification failed. Printing the output from the verification:
	I0916 11:28:05.547747 1566252 kubeadm.go:310] KERNEL_VERSION: 5.15.0-1069-aws
	I0916 11:28:05.547797 1566252 kubeadm.go:310] OS: Linux
	I0916 11:28:05.547850 1566252 kubeadm.go:310] CGROUPS_CPU: enabled
	I0916 11:28:05.547903 1566252 kubeadm.go:310] CGROUPS_CPUACCT: enabled
	I0916 11:28:05.547959 1566252 kubeadm.go:310] CGROUPS_CPUSET: enabled
	I0916 11:28:05.548012 1566252 kubeadm.go:310] CGROUPS_DEVICES: enabled
	I0916 11:28:05.548059 1566252 kubeadm.go:310] CGROUPS_FREEZER: enabled
	I0916 11:28:05.548142 1566252 kubeadm.go:310] CGROUPS_MEMORY: enabled
	I0916 11:28:05.548207 1566252 kubeadm.go:310] CGROUPS_PIDS: enabled
	I0916 11:28:05.548268 1566252 kubeadm.go:310] CGROUPS_HUGETLB: enabled
	I0916 11:28:05.548321 1566252 kubeadm.go:310] CGROUPS_BLKIO: enabled
	I0916 11:28:05.664770 1566252 kubeadm.go:310] [preflight] Pulling images required for setting up a Kubernetes cluster
	I0916 11:28:05.664879 1566252 kubeadm.go:310] [preflight] This might take a minute or two, depending on the speed of your internet connection
	I0916 11:28:05.664976 1566252 kubeadm.go:310] [preflight] You can also perform this action beforehand using 'kubeadm config images pull'
	I0916 11:28:05.694912 1566252 kubeadm.go:310] [certs] Using certificateDir folder "/var/lib/minikube/certs"
	I0916 11:28:05.246066 1563615 out.go:235]   - Configuring RBAC rules ...
	I0916 11:28:05.246187 1563615 kubeadm.go:310] [bootstrap-token] Configuring bootstrap tokens, cluster-info ConfigMap, RBAC Roles
	I0916 11:28:05.252265 1563615 kubeadm.go:310] [bootstrap-token] Configured RBAC rules to allow Node Bootstrap tokens to get nodes
	I0916 11:28:05.267643 1563615 kubeadm.go:310] [bootstrap-token] Configured RBAC rules to allow Node Bootstrap tokens to post CSRs in order for nodes to get long term certificate credentials
	I0916 11:28:05.283186 1563615 kubeadm.go:310] [bootstrap-token] Configured RBAC rules to allow the csrapprover controller automatically approve CSRs from a Node Bootstrap Token
	I0916 11:28:05.306672 1563615 kubeadm.go:310] [bootstrap-token] Configured RBAC rules to allow certificate rotation for all node client certificates in the cluster
	I0916 11:28:05.319211 1563615 kubeadm.go:310] [bootstrap-token] Creating the "cluster-info" ConfigMap in the "kube-public" namespace
	I0916 11:28:05.506696 1563615 kubeadm.go:310] [kubelet-finalize] Updating "/etc/kubernetes/kubelet.conf" to point to a rotatable kubelet client certificate and key
	I0916 11:28:06.210651 1563615 kubeadm.go:310] [addons] Applied essential addon: CoreDNS
	I0916 11:28:06.502619 1563615 kubeadm.go:310] [addons] Applied essential addon: kube-proxy
	I0916 11:28:06.503514 1563615 kubeadm.go:310] 
	I0916 11:28:06.503618 1563615 kubeadm.go:310] Your Kubernetes control-plane has initialized successfully!
	I0916 11:28:06.503627 1563615 kubeadm.go:310] 
	I0916 11:28:06.503704 1563615 kubeadm.go:310] To start using your cluster, you need to run the following as a regular user:
	I0916 11:28:06.503709 1563615 kubeadm.go:310] 
	I0916 11:28:06.503738 1563615 kubeadm.go:310]   mkdir -p $HOME/.kube
	I0916 11:28:06.503809 1563615 kubeadm.go:310]   sudo cp -i /etc/kubernetes/admin.conf $HOME/.kube/config
	I0916 11:28:06.503859 1563615 kubeadm.go:310]   sudo chown $(id -u):$(id -g) $HOME/.kube/config
	I0916 11:28:06.503863 1563615 kubeadm.go:310] 
	I0916 11:28:06.503917 1563615 kubeadm.go:310] Alternatively, if you are the root user, you can run:
	I0916 11:28:06.503920 1563615 kubeadm.go:310] 
	I0916 11:28:06.503967 1563615 kubeadm.go:310]   export KUBECONFIG=/etc/kubernetes/admin.conf
	I0916 11:28:06.503973 1563615 kubeadm.go:310] 
	I0916 11:28:06.504030 1563615 kubeadm.go:310] You should now deploy a pod network to the cluster.
	I0916 11:28:06.504104 1563615 kubeadm.go:310] Run "kubectl apply -f [podnetwork].yaml" with one of the options listed at:
	I0916 11:28:06.504227 1563615 kubeadm.go:310]   https://kubernetes.io/docs/concepts/cluster-administration/addons/
	I0916 11:28:06.504238 1563615 kubeadm.go:310] 
	I0916 11:28:06.504343 1563615 kubeadm.go:310] You can now join any number of control-plane nodes by copying certificate authorities
	I0916 11:28:06.504438 1563615 kubeadm.go:310] and service account keys on each node and then running the following as root:
	I0916 11:28:06.504441 1563615 kubeadm.go:310] 
	I0916 11:28:06.504558 1563615 kubeadm.go:310]   kubeadm join control-plane.minikube.internal:8443 --token i7h8q4.931cjkcfpd03p040 \
	I0916 11:28:06.504751 1563615 kubeadm.go:310] 	--discovery-token-ca-cert-hash sha256:a39d4a6e06a2efc97f5d9564a89b81063790e757dde370e866d9dc4c2ed0ec07 \
	I0916 11:28:06.504779 1563615 kubeadm.go:310] 	--control-plane 
	I0916 11:28:06.504785 1563615 kubeadm.go:310] 
	I0916 11:28:06.504897 1563615 kubeadm.go:310] Then you can join any number of worker nodes by running the following on each as root:
	I0916 11:28:06.504905 1563615 kubeadm.go:310] 
	I0916 11:28:06.505025 1563615 kubeadm.go:310] kubeadm join control-plane.minikube.internal:8443 --token i7h8q4.931cjkcfpd03p040 \
	I0916 11:28:06.505146 1563615 kubeadm.go:310] 	--discovery-token-ca-cert-hash sha256:a39d4a6e06a2efc97f5d9564a89b81063790e757dde370e866d9dc4c2ed0ec07 
	I0916 11:28:06.511816 1563615 kubeadm.go:310] W0916 11:27:45.537299    1203 common.go:101] your configuration file uses a deprecated API spec: "kubeadm.k8s.io/v1beta3" (kind: "ClusterConfiguration"). Please use 'kubeadm config migrate --old-config old.yaml --new-config new.yaml', which will write the new, similar spec using a newer API version.
	I0916 11:28:06.512297 1563615 kubeadm.go:310] W0916 11:27:45.538554    1203 common.go:101] your configuration file uses a deprecated API spec: "kubeadm.k8s.io/v1beta3" (kind: "InitConfiguration"). Please use 'kubeadm config migrate --old-config old.yaml --new-config new.yaml', which will write the new, similar spec using a newer API version.
	I0916 11:28:06.512587 1563615 kubeadm.go:310] 	[WARNING SystemVerification]: failed to parse kernel config: unable to load kernel module: "configs", output: "modprobe: FATAL: Module configs not found in directory /lib/modules/5.15.0-1069-aws\n", err: exit status 1
	I0916 11:28:06.512763 1563615 kubeadm.go:310] 	[WARNING Service-Kubelet]: kubelet service is not enabled, please run 'systemctl enable kubelet.service'
	I0916 11:28:06.512797 1563615 cni.go:84] Creating CNI manager for ""
	I0916 11:28:06.512810 1563615 cni.go:143] "docker" driver + "crio" runtime found, recommending kindnet
	I0916 11:28:06.517485 1563615 out.go:177] * Configuring CNI (Container Networking Interface) ...
	I0916 11:28:06.519457 1563615 ssh_runner.go:195] Run: stat /opt/cni/bin/portmap
	I0916 11:28:06.525489 1563615 cni.go:182] applying CNI manifest using /var/lib/minikube/binaries/v1.31.1/kubectl ...
	I0916 11:28:06.525500 1563615 ssh_runner.go:362] scp memory --> /var/tmp/minikube/cni.yaml (2601 bytes)
	I0916 11:28:06.549964 1563615 ssh_runner.go:195] Run: sudo /var/lib/minikube/binaries/v1.31.1/kubectl apply --kubeconfig=/var/lib/minikube/kubeconfig -f /var/tmp/minikube/cni.yaml
	I0916 11:28:06.959847 1563615 ssh_runner.go:195] Run: /bin/bash -c "cat /proc/$(pgrep kube-apiserver)/oom_adj"
	I0916 11:28:06.959970 1563615 ssh_runner.go:195] Run: sudo /var/lib/minikube/binaries/v1.31.1/kubectl create clusterrolebinding minikube-rbac --clusterrole=cluster-admin --serviceaccount=kube-system:default --kubeconfig=/var/lib/minikube/kubeconfig
	I0916 11:28:06.960037 1563615 ssh_runner.go:195] Run: sudo /var/lib/minikube/binaries/v1.31.1/kubectl --kubeconfig=/var/lib/minikube/kubeconfig label --overwrite nodes cert-expiration-258290 minikube.k8s.io/updated_at=2024_09_16T11_28_06_0700 minikube.k8s.io/version=v1.34.0 minikube.k8s.io/commit=90d544f06ea0f69499271b003be64a9a224d57ed minikube.k8s.io/name=cert-expiration-258290 minikube.k8s.io/primary=true
	I0916 11:28:07.289025 1563615 ops.go:34] apiserver oom_adj: -16
	I0916 11:28:07.289047 1563615 kubeadm.go:1113] duration metric: took 329.134576ms to wait for elevateKubeSystemPrivileges
	I0916 11:28:07.289058 1563615 kubeadm.go:394] duration metric: took 22.020745226s to StartCluster
	I0916 11:28:07.289074 1563615 settings.go:142] acquiring lock: {Name:mkc0474d366ad36774e47290c7932cc180a1b9f8 Clock:{} Delay:500ms Timeout:1m0s Cancel:<nil>}
	I0916 11:28:07.289142 1563615 settings.go:150] Updating kubeconfig:  /home/jenkins/minikube-integration/19651-1378450/kubeconfig
	I0916 11:28:07.289812 1563615 lock.go:35] WriteFile acquiring /home/jenkins/minikube-integration/19651-1378450/kubeconfig: {Name:mk806df66aa01ad28d0c99bc1a876b4310e8a3a0 Clock:{} Delay:500ms Timeout:1m0s Cancel:<nil>}
	I0916 11:28:07.290009 1563615 start.go:235] Will wait 6m0s for node &{Name: IP:192.168.76.2 Port:8443 KubernetesVersion:v1.31.1 ContainerRuntime:crio ControlPlane:true Worker:true}
	I0916 11:28:07.290125 1563615 ssh_runner.go:195] Run: /bin/bash -c "sudo /var/lib/minikube/binaries/v1.31.1/kubectl --kubeconfig=/var/lib/minikube/kubeconfig -n kube-system get configmap coredns -o yaml"
	I0916 11:28:07.290367 1563615 config.go:182] Loaded profile config "cert-expiration-258290": Driver=docker, ContainerRuntime=crio, KubernetesVersion=v1.31.1
	I0916 11:28:07.290405 1563615 addons.go:507] enable addons start: toEnable=map[ambassador:false auto-pause:false cloud-spanner:false csi-hostpath-driver:false dashboard:false default-storageclass:true efk:false freshpod:false gcp-auth:false gvisor:false headlamp:false helm-tiller:false inaccel:false ingress:false ingress-dns:false inspektor-gadget:false istio:false istio-provisioner:false kong:false kubeflow:false kubevirt:false logviewer:false metallb:false metrics-server:false nvidia-device-plugin:false nvidia-driver-installer:false nvidia-gpu-device-plugin:false olm:false pod-security-policy:false portainer:false registry:false registry-aliases:false registry-creds:false storage-provisioner:true storage-provisioner-gluster:false storage-provisioner-rancher:false volcano:false volumesnapshots:false yakd:false]
	I0916 11:28:07.290513 1563615 addons.go:69] Setting storage-provisioner=true in profile "cert-expiration-258290"
	I0916 11:28:07.290527 1563615 addons.go:234] Setting addon storage-provisioner=true in "cert-expiration-258290"
	I0916 11:28:07.290534 1563615 addons.go:69] Setting default-storageclass=true in profile "cert-expiration-258290"
	I0916 11:28:07.290548 1563615 host.go:66] Checking if "cert-expiration-258290" exists ...
	I0916 11:28:07.290549 1563615 addons_storage_classes.go:33] enableOrDisableStorageClasses default-storageclass=true on "cert-expiration-258290"
	I0916 11:28:07.290900 1563615 cli_runner.go:164] Run: docker container inspect cert-expiration-258290 --format={{.State.Status}}
	I0916 11:28:07.291043 1563615 cli_runner.go:164] Run: docker container inspect cert-expiration-258290 --format={{.State.Status}}
	I0916 11:28:07.294013 1563615 out.go:177] * Verifying Kubernetes components...
	I0916 11:28:07.301197 1563615 ssh_runner.go:195] Run: sudo systemctl daemon-reload
	I0916 11:28:07.337801 1563615 addons.go:234] Setting addon default-storageclass=true in "cert-expiration-258290"
	I0916 11:28:07.337833 1563615 host.go:66] Checking if "cert-expiration-258290" exists ...
	I0916 11:28:07.338260 1563615 cli_runner.go:164] Run: docker container inspect cert-expiration-258290 --format={{.State.Status}}
	I0916 11:28:07.341002 1563615 out.go:177]   - Using image gcr.io/k8s-minikube/storage-provisioner:v5
	I0916 11:28:05.699710 1566252 out.go:235]   - Generating certificates and keys ...
	I0916 11:28:05.699840 1566252 kubeadm.go:310] [certs] Using existing ca certificate authority
	I0916 11:28:05.699918 1566252 kubeadm.go:310] [certs] Using existing apiserver certificate and key on disk
	I0916 11:28:06.161237 1566252 kubeadm.go:310] [certs] Generating "apiserver-kubelet-client" certificate and key
	I0916 11:28:06.527807 1566252 kubeadm.go:310] [certs] Generating "front-proxy-ca" certificate and key
	I0916 11:28:06.944298 1566252 kubeadm.go:310] [certs] Generating "front-proxy-client" certificate and key
	I0916 11:28:07.782384 1566252 kubeadm.go:310] [certs] Generating "etcd/ca" certificate and key
	I0916 11:28:07.343600 1563615 addons.go:431] installing /etc/kubernetes/addons/storage-provisioner.yaml
	I0916 11:28:07.343612 1563615 ssh_runner.go:362] scp memory --> /etc/kubernetes/addons/storage-provisioner.yaml (2676 bytes)
	I0916 11:28:07.343682 1563615 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" cert-expiration-258290
	I0916 11:28:07.376402 1563615 addons.go:431] installing /etc/kubernetes/addons/storageclass.yaml
	I0916 11:28:07.376416 1563615 ssh_runner.go:362] scp storageclass/storageclass.yaml --> /etc/kubernetes/addons/storageclass.yaml (271 bytes)
	I0916 11:28:07.376480 1563615 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" cert-expiration-258290
	I0916 11:28:07.399586 1563615 sshutil.go:53] new ssh client: &{IP:127.0.0.1 Port:34868 SSHKeyPath:/home/jenkins/minikube-integration/19651-1378450/.minikube/machines/cert-expiration-258290/id_rsa Username:docker}
	I0916 11:28:07.418254 1563615 sshutil.go:53] new ssh client: &{IP:127.0.0.1 Port:34868 SSHKeyPath:/home/jenkins/minikube-integration/19651-1378450/.minikube/machines/cert-expiration-258290/id_rsa Username:docker}
	I0916 11:28:07.688831 1563615 ssh_runner.go:195] Run: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.31.1/kubectl apply -f /etc/kubernetes/addons/storage-provisioner.yaml
	I0916 11:28:07.703191 1563615 ssh_runner.go:195] Run: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.31.1/kubectl apply -f /etc/kubernetes/addons/storageclass.yaml
	I0916 11:28:07.858191 1563615 ssh_runner.go:195] Run: /bin/bash -c "sudo /var/lib/minikube/binaries/v1.31.1/kubectl --kubeconfig=/var/lib/minikube/kubeconfig -n kube-system get configmap coredns -o yaml | sed -e '/^        forward . \/etc\/resolv.conf.*/i \        hosts {\n           192.168.76.1 host.minikube.internal\n           fallthrough\n        }' -e '/^        errors *$/i \        log' | sudo /var/lib/minikube/binaries/v1.31.1/kubectl --kubeconfig=/var/lib/minikube/kubeconfig replace -f -"
	I0916 11:28:07.858339 1563615 ssh_runner.go:195] Run: sudo systemctl start kubelet
	I0916 11:28:08.794778 1563615 ssh_runner.go:235] Completed: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.31.1/kubectl apply -f /etc/kubernetes/addons/storage-provisioner.yaml: (1.105919861s)
	I0916 11:28:08.794822 1563615 ssh_runner.go:235] Completed: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.31.1/kubectl apply -f /etc/kubernetes/addons/storageclass.yaml: (1.091620631s)
	I0916 11:28:08.796645 1563615 api_server.go:52] waiting for apiserver process to appear ...
	I0916 11:28:08.796731 1563615 ssh_runner.go:195] Run: sudo pgrep -xnf kube-apiserver.*minikube.*
	I0916 11:28:08.797400 1563615 start.go:971] {"host.minikube.internal": 192.168.76.1} host record injected into CoreDNS's ConfigMap
	I0916 11:28:08.821315 1563615 api_server.go:72] duration metric: took 1.53127962s to wait for apiserver process to appear ...
	I0916 11:28:08.821330 1563615 api_server.go:88] waiting for apiserver healthz status ...
	I0916 11:28:08.821353 1563615 api_server.go:253] Checking apiserver healthz at https://192.168.76.2:8443/healthz ...
	I0916 11:28:08.879143 1563615 api_server.go:279] https://192.168.76.2:8443/healthz returned 200:
	ok
	I0916 11:28:08.880739 1563615 out.go:177] * Enabled addons: storage-provisioner, default-storageclass
	I0916 11:28:08.883000 1563615 addons.go:510] duration metric: took 1.592592899s for enable addons: enabled=[storage-provisioner default-storageclass]
	I0916 11:28:08.886510 1563615 api_server.go:141] control plane version: v1.31.1
	I0916 11:28:08.886527 1563615 api_server.go:131] duration metric: took 65.191813ms to wait for apiserver health ...
	I0916 11:28:08.886535 1563615 system_pods.go:43] waiting for kube-system pods to appear ...
	I0916 11:28:08.902934 1563615 system_pods.go:59] 5 kube-system pods found
	I0916 11:28:08.902957 1563615 system_pods.go:61] "etcd-cert-expiration-258290" [d66b83f8-34b4-4412-b4f8-c2a2c28137f7] Running / Ready:ContainersNotReady (containers with unready status: [etcd]) / ContainersReady:ContainersNotReady (containers with unready status: [etcd])
	I0916 11:28:08.902965 1563615 system_pods.go:61] "kube-apiserver-cert-expiration-258290" [8970a228-4b3b-4c4d-99b8-38e2ce1dded0] Running / Ready:ContainersNotReady (containers with unready status: [kube-apiserver]) / ContainersReady:ContainersNotReady (containers with unready status: [kube-apiserver])
	I0916 11:28:08.902972 1563615 system_pods.go:61] "kube-controller-manager-cert-expiration-258290" [f61f6d0e-1efa-4127-9856-c0b771b6b305] Running / Ready:ContainersNotReady (containers with unready status: [kube-controller-manager]) / ContainersReady:ContainersNotReady (containers with unready status: [kube-controller-manager])
	I0916 11:28:08.902978 1563615 system_pods.go:61] "kube-scheduler-cert-expiration-258290" [52b264a1-791e-4ae8-924d-fe6ae178aadd] Running / Ready:ContainersNotReady (containers with unready status: [kube-scheduler]) / ContainersReady:ContainersNotReady (containers with unready status: [kube-scheduler])
	I0916 11:28:08.902983 1563615 system_pods.go:61] "storage-provisioner" [e81325e6-ad5b-4846-a4fa-ce681200d82f] Pending: PodScheduled:Unschedulable (0/1 nodes are available: 1 node(s) had untolerated taint {node.kubernetes.io/not-ready: }. preemption: 0/1 nodes are available: 1 Preemption is not helpful for scheduling.)
	I0916 11:28:08.902988 1563615 system_pods.go:74] duration metric: took 16.448772ms to wait for pod list to return data ...
	I0916 11:28:08.902998 1563615 kubeadm.go:582] duration metric: took 1.612969377s to wait for: map[apiserver:true system_pods:true]
	I0916 11:28:08.903009 1563615 node_conditions.go:102] verifying NodePressure condition ...
	I0916 11:28:08.914859 1563615 node_conditions.go:122] node storage ephemeral capacity is 203034800Ki
	I0916 11:28:08.914883 1563615 node_conditions.go:123] node cpu capacity is 2
	I0916 11:28:08.914895 1563615 node_conditions.go:105] duration metric: took 11.881591ms to run NodePressure ...
	I0916 11:28:08.914904 1563615 start.go:241] waiting for startup goroutines ...
	I0916 11:28:09.303116 1563615 kapi.go:214] "coredns" deployment in "kube-system" namespace and "cert-expiration-258290" context rescaled to 1 replicas
	I0916 11:28:09.303158 1563615 start.go:246] waiting for cluster config update ...
	I0916 11:28:09.303169 1563615 start.go:255] writing updated cluster config ...
	I0916 11:28:09.303506 1563615 ssh_runner.go:195] Run: rm -f paused
	I0916 11:28:09.311822 1563615 out.go:177] * Done! kubectl is now configured to use "cert-expiration-258290" cluster and "default" namespace by default
	E0916 11:28:09.315176 1563615 start.go:291] kubectl info: exec: fork/exec /usr/local/bin/kubectl: exec format error
	I0916 11:28:09.024090 1566252 kubeadm.go:310] [certs] Generating "etcd/server" certificate and key
	I0916 11:28:09.024643 1566252 kubeadm.go:310] [certs] etcd/server serving cert is signed for DNS names [cert-options-209735 localhost] and IPs [192.168.85.2 127.0.0.1 ::1]
	I0916 11:28:09.253302 1566252 kubeadm.go:310] [certs] Generating "etcd/peer" certificate and key
	I0916 11:28:09.253596 1566252 kubeadm.go:310] [certs] etcd/peer serving cert is signed for DNS names [cert-options-209735 localhost] and IPs [192.168.85.2 127.0.0.1 ::1]
	I0916 11:28:09.414743 1566252 kubeadm.go:310] [certs] Generating "etcd/healthcheck-client" certificate and key
	I0916 11:28:09.836355 1566252 kubeadm.go:310] [certs] Generating "apiserver-etcd-client" certificate and key
	I0916 11:28:10.406081 1566252 kubeadm.go:310] [certs] Generating "sa" key and public key
	I0916 11:28:10.406281 1566252 kubeadm.go:310] [kubeconfig] Using kubeconfig folder "/etc/kubernetes"
	I0916 11:28:10.787934 1566252 kubeadm.go:310] [kubeconfig] Writing "admin.conf" kubeconfig file
	I0916 11:28:11.411946 1566252 kubeadm.go:310] [kubeconfig] Writing "super-admin.conf" kubeconfig file
	I0916 11:28:12.553165 1566252 kubeadm.go:310] [kubeconfig] Writing "kubelet.conf" kubeconfig file
	I0916 11:28:12.676601 1566252 kubeadm.go:310] [kubeconfig] Writing "controller-manager.conf" kubeconfig file
	I0916 11:28:13.141107 1566252 kubeadm.go:310] [kubeconfig] Writing "scheduler.conf" kubeconfig file
	I0916 11:28:13.141987 1566252 kubeadm.go:310] [etcd] Creating static Pod manifest for local etcd in "/etc/kubernetes/manifests"
	I0916 11:28:13.145175 1566252 kubeadm.go:310] [control-plane] Using manifest folder "/etc/kubernetes/manifests"
	I0916 11:28:13.148342 1566252 out.go:235]   - Booting up control plane ...
	I0916 11:28:13.148468 1566252 kubeadm.go:310] [control-plane] Creating static Pod manifest for "kube-apiserver"
	I0916 11:28:13.148557 1566252 kubeadm.go:310] [control-plane] Creating static Pod manifest for "kube-controller-manager"
	I0916 11:28:13.148629 1566252 kubeadm.go:310] [control-plane] Creating static Pod manifest for "kube-scheduler"
	I0916 11:28:13.158903 1566252 kubeadm.go:310] [kubelet-start] Writing kubelet environment file with flags to file "/var/lib/kubelet/kubeadm-flags.env"
	I0916 11:28:13.165124 1566252 kubeadm.go:310] [kubelet-start] Writing kubelet configuration to file "/var/lib/kubelet/config.yaml"
	I0916 11:28:13.165348 1566252 kubeadm.go:310] [kubelet-start] Starting the kubelet
	I0916 11:28:13.283760 1566252 kubeadm.go:310] [wait-control-plane] Waiting for the kubelet to boot up the control plane as static Pods from directory "/etc/kubernetes/manifests"
	I0916 11:28:13.283891 1566252 kubeadm.go:310] [kubelet-check] Waiting for a healthy kubelet at http://127.0.0.1:10248/healthz. This can take up to 4m0s
	I0916 11:28:14.285204 1566252 kubeadm.go:310] [kubelet-check] The kubelet is healthy after 1.00107122s
	I0916 11:28:14.285284 1566252 kubeadm.go:310] [api-check] Waiting for a healthy API server. This can take up to 4m0s
	I0916 11:28:21.291117 1566252 kubeadm.go:310] [api-check] The API server is healthy after 7.006303917s
	I0916 11:28:21.323317 1566252 kubeadm.go:310] [upload-config] Storing the configuration used in ConfigMap "kubeadm-config" in the "kube-system" Namespace
	I0916 11:28:21.839612 1566252 kubeadm.go:310] [kubelet] Creating a ConfigMap "kubelet-config" in namespace kube-system with the configuration for the kubelets in the cluster
	I0916 11:28:21.869890 1566252 kubeadm.go:310] [upload-certs] Skipping phase. Please see --upload-certs
	I0916 11:28:21.870082 1566252 kubeadm.go:310] [mark-control-plane] Marking the node cert-options-209735 as control-plane by adding the labels: [node-role.kubernetes.io/control-plane node.kubernetes.io/exclude-from-external-load-balancers]
	I0916 11:28:21.882806 1566252 kubeadm.go:310] [bootstrap-token] Using token: tbf7ky.1tt0e70e2s2z6skw
	I0916 11:28:21.885505 1566252 out.go:235]   - Configuring RBAC rules ...
	I0916 11:28:21.885632 1566252 kubeadm.go:310] [bootstrap-token] Configuring bootstrap tokens, cluster-info ConfigMap, RBAC Roles
	I0916 11:28:21.890366 1566252 kubeadm.go:310] [bootstrap-token] Configured RBAC rules to allow Node Bootstrap tokens to get nodes
	I0916 11:28:21.899407 1566252 kubeadm.go:310] [bootstrap-token] Configured RBAC rules to allow Node Bootstrap tokens to post CSRs in order for nodes to get long term certificate credentials
	I0916 11:28:21.903587 1566252 kubeadm.go:310] [bootstrap-token] Configured RBAC rules to allow the csrapprover controller automatically approve CSRs from a Node Bootstrap Token
	I0916 11:28:21.910211 1566252 kubeadm.go:310] [bootstrap-token] Configured RBAC rules to allow certificate rotation for all node client certificates in the cluster
	I0916 11:28:21.918152 1566252 kubeadm.go:310] [bootstrap-token] Creating the "cluster-info" ConfigMap in the "kube-public" namespace
	I0916 11:28:22.035435 1566252 kubeadm.go:310] [kubelet-finalize] Updating "/etc/kubernetes/kubelet.conf" to point to a rotatable kubelet client certificate and key
	I0916 11:28:22.476190 1566252 kubeadm.go:310] [addons] Applied essential addon: CoreDNS
	I0916 11:28:23.037021 1566252 kubeadm.go:310] [addons] Applied essential addon: kube-proxy
	I0916 11:28:23.037035 1566252 kubeadm.go:310] 
	I0916 11:28:23.037093 1566252 kubeadm.go:310] Your Kubernetes control-plane has initialized successfully!
	I0916 11:28:23.037097 1566252 kubeadm.go:310] 
	I0916 11:28:23.037172 1566252 kubeadm.go:310] To start using your cluster, you need to run the following as a regular user:
	I0916 11:28:23.037176 1566252 kubeadm.go:310] 
	I0916 11:28:23.037200 1566252 kubeadm.go:310]   mkdir -p $HOME/.kube
	I0916 11:28:23.037257 1566252 kubeadm.go:310]   sudo cp -i /etc/kubernetes/admin.conf $HOME/.kube/config
	I0916 11:28:23.037305 1566252 kubeadm.go:310]   sudo chown $(id -u):$(id -g) $HOME/.kube/config
	I0916 11:28:23.037309 1566252 kubeadm.go:310] 
	I0916 11:28:23.037361 1566252 kubeadm.go:310] Alternatively, if you are the root user, you can run:
	I0916 11:28:23.037365 1566252 kubeadm.go:310] 
	I0916 11:28:23.037418 1566252 kubeadm.go:310]   export KUBECONFIG=/etc/kubernetes/admin.conf
	I0916 11:28:23.037421 1566252 kubeadm.go:310] 
	I0916 11:28:23.037472 1566252 kubeadm.go:310] You should now deploy a pod network to the cluster.
	I0916 11:28:23.037545 1566252 kubeadm.go:310] Run "kubectl apply -f [podnetwork].yaml" with one of the options listed at:
	I0916 11:28:23.037610 1566252 kubeadm.go:310]   https://kubernetes.io/docs/concepts/cluster-administration/addons/
	I0916 11:28:23.037614 1566252 kubeadm.go:310] 
	I0916 11:28:23.037696 1566252 kubeadm.go:310] You can now join any number of control-plane nodes by copying certificate authorities
	I0916 11:28:23.037771 1566252 kubeadm.go:310] and service account keys on each node and then running the following as root:
	I0916 11:28:23.037775 1566252 kubeadm.go:310] 
	I0916 11:28:23.037862 1566252 kubeadm.go:310]   kubeadm join control-plane.minikube.internal:8555 --token tbf7ky.1tt0e70e2s2z6skw \
	I0916 11:28:23.037963 1566252 kubeadm.go:310] 	--discovery-token-ca-cert-hash sha256:a39d4a6e06a2efc97f5d9564a89b81063790e757dde370e866d9dc4c2ed0ec07 \
	I0916 11:28:23.037989 1566252 kubeadm.go:310] 	--control-plane 
	I0916 11:28:23.037992 1566252 kubeadm.go:310] 
	I0916 11:28:23.038075 1566252 kubeadm.go:310] Then you can join any number of worker nodes by running the following on each as root:
	I0916 11:28:23.038079 1566252 kubeadm.go:310] 
	I0916 11:28:23.038158 1566252 kubeadm.go:310] kubeadm join control-plane.minikube.internal:8555 --token tbf7ky.1tt0e70e2s2z6skw \
	I0916 11:28:23.038257 1566252 kubeadm.go:310] 	--discovery-token-ca-cert-hash sha256:a39d4a6e06a2efc97f5d9564a89b81063790e757dde370e866d9dc4c2ed0ec07 
	I0916 11:28:23.041855 1566252 kubeadm.go:310] W0916 11:28:05.515909    1186 common.go:101] your configuration file uses a deprecated API spec: "kubeadm.k8s.io/v1beta3" (kind: "ClusterConfiguration"). Please use 'kubeadm config migrate --old-config old.yaml --new-config new.yaml', which will write the new, similar spec using a newer API version.
	I0916 11:28:23.042173 1566252 kubeadm.go:310] W0916 11:28:05.517088    1186 common.go:101] your configuration file uses a deprecated API spec: "kubeadm.k8s.io/v1beta3" (kind: "InitConfiguration"). Please use 'kubeadm config migrate --old-config old.yaml --new-config new.yaml', which will write the new, similar spec using a newer API version.
	I0916 11:28:23.042382 1566252 kubeadm.go:310] 	[WARNING SystemVerification]: failed to parse kernel config: unable to load kernel module: "configs", output: "modprobe: FATAL: Module configs not found in directory /lib/modules/5.15.0-1069-aws\n", err: exit status 1
	I0916 11:28:23.042486 1566252 kubeadm.go:310] 	[WARNING Service-Kubelet]: kubelet service is not enabled, please run 'systemctl enable kubelet.service'
	I0916 11:28:23.042507 1566252 cni.go:84] Creating CNI manager for ""
	I0916 11:28:23.042514 1566252 cni.go:143] "docker" driver + "crio" runtime found, recommending kindnet
	I0916 11:28:23.045432 1566252 out.go:177] * Configuring CNI (Container Networking Interface) ...
	I0916 11:28:23.047950 1566252 ssh_runner.go:195] Run: stat /opt/cni/bin/portmap
	I0916 11:28:23.051834 1566252 cni.go:182] applying CNI manifest using /var/lib/minikube/binaries/v1.31.1/kubectl ...
	I0916 11:28:23.051845 1566252 ssh_runner.go:362] scp memory --> /var/tmp/minikube/cni.yaml (2601 bytes)
	I0916 11:28:23.072545 1566252 ssh_runner.go:195] Run: sudo /var/lib/minikube/binaries/v1.31.1/kubectl apply --kubeconfig=/var/lib/minikube/kubeconfig -f /var/tmp/minikube/cni.yaml
	I0916 11:28:23.372877 1566252 ssh_runner.go:195] Run: /bin/bash -c "cat /proc/$(pgrep kube-apiserver)/oom_adj"
	I0916 11:28:23.373007 1566252 ssh_runner.go:195] Run: sudo /var/lib/minikube/binaries/v1.31.1/kubectl create clusterrolebinding minikube-rbac --clusterrole=cluster-admin --serviceaccount=kube-system:default --kubeconfig=/var/lib/minikube/kubeconfig
	I0916 11:28:23.373083 1566252 ssh_runner.go:195] Run: sudo /var/lib/minikube/binaries/v1.31.1/kubectl --kubeconfig=/var/lib/minikube/kubeconfig label --overwrite nodes cert-options-209735 minikube.k8s.io/updated_at=2024_09_16T11_28_23_0700 minikube.k8s.io/version=v1.34.0 minikube.k8s.io/commit=90d544f06ea0f69499271b003be64a9a224d57ed minikube.k8s.io/name=cert-options-209735 minikube.k8s.io/primary=true
	I0916 11:28:23.544656 1566252 ops.go:34] apiserver oom_adj: -16
	I0916 11:28:23.544697 1566252 kubeadm.go:1113] duration metric: took 171.727983ms to wait for elevateKubeSystemPrivileges
	I0916 11:28:23.544709 1566252 kubeadm.go:394] duration metric: took 18.251983542s to StartCluster
	I0916 11:28:23.544724 1566252 settings.go:142] acquiring lock: {Name:mkc0474d366ad36774e47290c7932cc180a1b9f8 Clock:{} Delay:500ms Timeout:1m0s Cancel:<nil>}
	I0916 11:28:23.544788 1566252 settings.go:150] Updating kubeconfig:  /home/jenkins/minikube-integration/19651-1378450/kubeconfig
	I0916 11:28:23.545780 1566252 lock.go:35] WriteFile acquiring /home/jenkins/minikube-integration/19651-1378450/kubeconfig: {Name:mk806df66aa01ad28d0c99bc1a876b4310e8a3a0 Clock:{} Delay:500ms Timeout:1m0s Cancel:<nil>}
	I0916 11:28:23.545994 1566252 start.go:235] Will wait 6m0s for node &{Name: IP:192.168.85.2 Port:8555 KubernetesVersion:v1.31.1 ContainerRuntime:crio ControlPlane:true Worker:true}
	I0916 11:28:23.546103 1566252 ssh_runner.go:195] Run: /bin/bash -c "sudo /var/lib/minikube/binaries/v1.31.1/kubectl --kubeconfig=/var/lib/minikube/kubeconfig -n kube-system get configmap coredns -o yaml"
	I0916 11:28:23.546323 1566252 config.go:182] Loaded profile config "cert-options-209735": Driver=docker, ContainerRuntime=crio, KubernetesVersion=v1.31.1
	I0916 11:28:23.546355 1566252 addons.go:507] enable addons start: toEnable=map[ambassador:false auto-pause:false cloud-spanner:false csi-hostpath-driver:false dashboard:false default-storageclass:true efk:false freshpod:false gcp-auth:false gvisor:false headlamp:false helm-tiller:false inaccel:false ingress:false ingress-dns:false inspektor-gadget:false istio:false istio-provisioner:false kong:false kubeflow:false kubevirt:false logviewer:false metallb:false metrics-server:false nvidia-device-plugin:false nvidia-driver-installer:false nvidia-gpu-device-plugin:false olm:false pod-security-policy:false portainer:false registry:false registry-aliases:false registry-creds:false storage-provisioner:true storage-provisioner-gluster:false storage-provisioner-rancher:false volcano:false volumesnapshots:false yakd:false]
	I0916 11:28:23.546415 1566252 addons.go:69] Setting storage-provisioner=true in profile "cert-options-209735"
	I0916 11:28:23.546431 1566252 addons.go:234] Setting addon storage-provisioner=true in "cert-options-209735"
	I0916 11:28:23.546452 1566252 host.go:66] Checking if "cert-options-209735" exists ...
	I0916 11:28:23.546973 1566252 cli_runner.go:164] Run: docker container inspect cert-options-209735 --format={{.State.Status}}
	I0916 11:28:23.547412 1566252 addons.go:69] Setting default-storageclass=true in profile "cert-options-209735"
	I0916 11:28:23.547426 1566252 addons_storage_classes.go:33] enableOrDisableStorageClasses default-storageclass=true on "cert-options-209735"
	I0916 11:28:23.547705 1566252 cli_runner.go:164] Run: docker container inspect cert-options-209735 --format={{.State.Status}}
	I0916 11:28:23.550098 1566252 out.go:177] * Verifying Kubernetes components...
	I0916 11:28:23.560142 1566252 ssh_runner.go:195] Run: sudo systemctl daemon-reload
	I0916 11:28:23.583448 1566252 addons.go:234] Setting addon default-storageclass=true in "cert-options-209735"
	I0916 11:28:23.583486 1566252 host.go:66] Checking if "cert-options-209735" exists ...
	I0916 11:28:23.583956 1566252 cli_runner.go:164] Run: docker container inspect cert-options-209735 --format={{.State.Status}}
	I0916 11:28:23.599667 1566252 out.go:177]   - Using image gcr.io/k8s-minikube/storage-provisioner:v5
	I0916 11:28:23.602381 1566252 addons.go:431] installing /etc/kubernetes/addons/storage-provisioner.yaml
	I0916 11:28:23.602392 1566252 ssh_runner.go:362] scp memory --> /etc/kubernetes/addons/storage-provisioner.yaml (2676 bytes)
	I0916 11:28:23.602460 1566252 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" cert-options-209735
	I0916 11:28:23.610502 1566252 addons.go:431] installing /etc/kubernetes/addons/storageclass.yaml
	I0916 11:28:23.610517 1566252 ssh_runner.go:362] scp storageclass/storageclass.yaml --> /etc/kubernetes/addons/storageclass.yaml (271 bytes)
	I0916 11:28:23.610578 1566252 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" cert-options-209735
	I0916 11:28:23.665088 1566252 sshutil.go:53] new ssh client: &{IP:127.0.0.1 Port:34873 SSHKeyPath:/home/jenkins/minikube-integration/19651-1378450/.minikube/machines/cert-options-209735/id_rsa Username:docker}
	I0916 11:28:23.665478 1566252 sshutil.go:53] new ssh client: &{IP:127.0.0.1 Port:34873 SSHKeyPath:/home/jenkins/minikube-integration/19651-1378450/.minikube/machines/cert-options-209735/id_rsa Username:docker}
	I0916 11:28:23.789295 1566252 ssh_runner.go:195] Run: /bin/bash -c "sudo /var/lib/minikube/binaries/v1.31.1/kubectl --kubeconfig=/var/lib/minikube/kubeconfig -n kube-system get configmap coredns -o yaml | sed -e '/^        forward . \/etc\/resolv.conf.*/i \        hosts {\n           192.168.85.1 host.minikube.internal\n           fallthrough\n        }' -e '/^        errors *$/i \        log' | sudo /var/lib/minikube/binaries/v1.31.1/kubectl --kubeconfig=/var/lib/minikube/kubeconfig replace -f -"
	I0916 11:28:23.822359 1566252 ssh_runner.go:195] Run: sudo systemctl start kubelet
	I0916 11:28:23.875628 1566252 ssh_runner.go:195] Run: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.31.1/kubectl apply -f /etc/kubernetes/addons/storageclass.yaml
	I0916 11:28:23.908338 1566252 ssh_runner.go:195] Run: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.31.1/kubectl apply -f /etc/kubernetes/addons/storage-provisioner.yaml
	I0916 11:28:24.187180 1566252 start.go:971] {"host.minikube.internal": 192.168.85.1} host record injected into CoreDNS's ConfigMap
	I0916 11:28:24.189606 1566252 api_server.go:52] waiting for apiserver process to appear ...
	I0916 11:28:24.189655 1566252 ssh_runner.go:195] Run: sudo pgrep -xnf kube-apiserver.*minikube.*
	I0916 11:28:24.485430 1566252 api_server.go:72] duration metric: took 939.403609ms to wait for apiserver process to appear ...
	I0916 11:28:24.485447 1566252 api_server.go:88] waiting for apiserver healthz status ...
	I0916 11:28:24.485465 1566252 api_server.go:253] Checking apiserver healthz at https://192.168.85.2:8555/healthz ...
	I0916 11:28:24.488191 1566252 out.go:177] * Enabled addons: default-storageclass, storage-provisioner
	I0916 11:28:24.490906 1566252 addons.go:510] duration metric: took 944.531982ms for enable addons: enabled=[default-storageclass storage-provisioner]
	I0916 11:28:24.503089 1566252 api_server.go:279] https://192.168.85.2:8555/healthz returned 200:
	ok
	I0916 11:28:24.504286 1566252 api_server.go:141] control plane version: v1.31.1
	I0916 11:28:24.504303 1566252 api_server.go:131] duration metric: took 18.850838ms to wait for apiserver health ...
	I0916 11:28:24.504310 1566252 system_pods.go:43] waiting for kube-system pods to appear ...
	I0916 11:28:24.516643 1566252 system_pods.go:59] 5 kube-system pods found
	I0916 11:28:24.516666 1566252 system_pods.go:61] "etcd-cert-options-209735" [714e91ca-fe83-4749-9074-63de66c9e40b] Running / Ready:ContainersNotReady (containers with unready status: [etcd]) / ContainersReady:ContainersNotReady (containers with unready status: [etcd])
	I0916 11:28:24.516722 1566252 system_pods.go:61] "kube-apiserver-cert-options-209735" [c8042642-f825-474c-a614-b87c37da410a] Running / Ready:ContainersNotReady (containers with unready status: [kube-apiserver]) / ContainersReady:ContainersNotReady (containers with unready status: [kube-apiserver])
	I0916 11:28:24.516730 1566252 system_pods.go:61] "kube-controller-manager-cert-options-209735" [89ea01e6-2ec2-487d-8048-5edc0e4d2b15] Running / Ready:ContainersNotReady (containers with unready status: [kube-controller-manager]) / ContainersReady:ContainersNotReady (containers with unready status: [kube-controller-manager])
	I0916 11:28:24.516736 1566252 system_pods.go:61] "kube-scheduler-cert-options-209735" [a7677833-bae4-47fc-ad4a-ad73a7ddb9fb] Running / Ready:ContainersNotReady (containers with unready status: [kube-scheduler]) / ContainersReady:ContainersNotReady (containers with unready status: [kube-scheduler])
	I0916 11:28:24.516741 1566252 system_pods.go:61] "storage-provisioner" [f5fcb466-f137-42bf-8674-308c10e48bb4] Pending: PodScheduled:Unschedulable (0/1 nodes are available: 1 node(s) had untolerated taint {node.kubernetes.io/not-ready: }. preemption: 0/1 nodes are available: 1 Preemption is not helpful for scheduling.)
	I0916 11:28:24.516746 1566252 system_pods.go:74] duration metric: took 12.431157ms to wait for pod list to return data ...
	I0916 11:28:24.516756 1566252 kubeadm.go:582] duration metric: took 970.741947ms to wait for: map[apiserver:true system_pods:true]
	I0916 11:28:24.516768 1566252 node_conditions.go:102] verifying NodePressure condition ...
	I0916 11:28:24.520555 1566252 node_conditions.go:122] node storage ephemeral capacity is 203034800Ki
	I0916 11:28:24.520576 1566252 node_conditions.go:123] node cpu capacity is 2
	I0916 11:28:24.520586 1566252 node_conditions.go:105] duration metric: took 3.813723ms to run NodePressure ...
	I0916 11:28:24.520596 1566252 start.go:241] waiting for startup goroutines ...
	I0916 11:28:24.691485 1566252 kapi.go:214] "coredns" deployment in "kube-system" namespace and "cert-options-209735" context rescaled to 1 replicas
	I0916 11:28:24.691508 1566252 start.go:246] waiting for cluster config update ...
	I0916 11:28:24.691519 1566252 start.go:255] writing updated cluster config ...
	I0916 11:28:24.691817 1566252 ssh_runner.go:195] Run: rm -f paused
	I0916 11:28:24.699250 1566252 out.go:177] * Done! kubectl is now configured to use "cert-options-209735" cluster and "default" namespace by default
	E0916 11:28:24.702548 1566252 start.go:291] kubectl info: exec: fork/exec /usr/local/bin/kubectl: exec format error
	
	
	==> CRI-O <==
	Sep 16 11:28:14 cert-options-209735 crio[945]: time="2024-09-16 11:28:14.761238688Z" level=info msg="Image status: &ImageStatusResponse{Image:&Image{Id:279f381cb37365bbbcd133c9531fba9c2beb0f38dbbe6ddfcd0b1b1643d3450e,RepoTags:[registry.k8s.io/kube-controller-manager:v1.31.1],RepoDigests:[registry.k8s.io/kube-controller-manager@sha256:9f9da5b27e03f89599cc40ba89150aebf3b4cff001e6db6d998674b34181e1a1 registry.k8s.io/kube-controller-manager@sha256:a9a0505b7d0caca0edd18e37bacc9425b2c8824546b26f5b286e8cb144669849],Size_:86930758,Uid:&Int64Value{Value:0,},Username:,Spec:nil,},Info:map[string]string{},}" id=ca3f72fd-c63a-4e33-9115-c1de638eef7c name=/runtime.v1.ImageService/ImageStatus
	Sep 16 11:28:14 cert-options-209735 crio[945]: time="2024-09-16 11:28:14.761454002Z" level=info msg="Image status: &ImageStatusResponse{Image:&Image{Id:d3f53a98c0a9d9163c4848bcf34b2d2f5e1e3691b79f3d1dd6d0206809e02853,RepoTags:[registry.k8s.io/kube-apiserver:v1.31.1],RepoDigests:[registry.k8s.io/kube-apiserver@sha256:2409c23dbb5a2b7a81adbb184d3eac43ac653e9b97a7c0ee121b89bb3ef61fdb registry.k8s.io/kube-apiserver@sha256:e3a40e6c6e99ba4a4d72432b3eda702099a2926e49d4afeb6138f2d95e6371ef],Size_:92632544,Uid:&Int64Value{Value:0,},Username:,Spec:nil,},Info:map[string]string{},}" id=5de44547-2e40-4a4e-97ca-0eb671be887a name=/runtime.v1.ImageService/ImageStatus
	Sep 16 11:28:14 cert-options-209735 crio[945]: time="2024-09-16 11:28:14.763671949Z" level=info msg="Ran pod sandbox 587d4752705ab2fadabd6f0f9b780170100c83f4ce997eedb364ec6467193e3c with infra container: kube-system/etcd-cert-options-209735/POD" id=8a7cf639-abd3-403b-9af0-94d96759b58c name=/runtime.v1.RuntimeService/RunPodSandbox
	Sep 16 11:28:14 cert-options-209735 crio[945]: time="2024-09-16 11:28:14.764019903Z" level=info msg="Creating container: kube-system/kube-controller-manager-cert-options-209735/kube-controller-manager" id=034851b5-8901-43f4-80ce-03a04f24af5d name=/runtime.v1.RuntimeService/CreateContainer
	Sep 16 11:28:14 cert-options-209735 crio[945]: time="2024-09-16 11:28:14.764115162Z" level=warning msg="Allowed annotations are specified for workload []"
	Sep 16 11:28:14 cert-options-209735 crio[945]: time="2024-09-16 11:28:14.764652585Z" level=info msg="Creating container: kube-system/kube-apiserver-cert-options-209735/kube-apiserver" id=800790fa-361b-4eac-a244-d61ac9877a38 name=/runtime.v1.RuntimeService/CreateContainer
	Sep 16 11:28:14 cert-options-209735 crio[945]: time="2024-09-16 11:28:14.764815272Z" level=warning msg="Allowed annotations are specified for workload []"
	Sep 16 11:28:14 cert-options-209735 crio[945]: time="2024-09-16 11:28:14.765908324Z" level=info msg="Checking image status: registry.k8s.io/etcd:3.5.15-0" id=e03f7ca3-c19c-4979-a47b-e7f9a7eb4179 name=/runtime.v1.ImageService/ImageStatus
	Sep 16 11:28:14 cert-options-209735 crio[945]: time="2024-09-16 11:28:14.766123842Z" level=info msg="Image status: &ImageStatusResponse{Image:&Image{Id:27e3830e1402783674d8b594038967deea9d51f0d91b34c93c8f39d2f68af7da,RepoTags:[registry.k8s.io/etcd:3.5.15-0],RepoDigests:[registry.k8s.io/etcd@sha256:a6dc63e6e8cfa0307d7851762fa6b629afb18f28d8aa3fab5a6e91b4af60026a registry.k8s.io/etcd@sha256:e3ee3ca2dbaf511385000dbd54123629c71b6cfaabd469e658d76a116b7f43da],Size_:139912446,Uid:&Int64Value{Value:0,},Username:,Spec:nil,},Info:map[string]string{},}" id=e03f7ca3-c19c-4979-a47b-e7f9a7eb4179 name=/runtime.v1.ImageService/ImageStatus
	Sep 16 11:28:14 cert-options-209735 crio[945]: time="2024-09-16 11:28:14.766708066Z" level=info msg="Checking image status: registry.k8s.io/etcd:3.5.15-0" id=683ce864-e3f3-4d6d-8ace-64487f4890ea name=/runtime.v1.ImageService/ImageStatus
	Sep 16 11:28:14 cert-options-209735 crio[945]: time="2024-09-16 11:28:14.766860013Z" level=info msg="Image status: &ImageStatusResponse{Image:&Image{Id:27e3830e1402783674d8b594038967deea9d51f0d91b34c93c8f39d2f68af7da,RepoTags:[registry.k8s.io/etcd:3.5.15-0],RepoDigests:[registry.k8s.io/etcd@sha256:a6dc63e6e8cfa0307d7851762fa6b629afb18f28d8aa3fab5a6e91b4af60026a registry.k8s.io/etcd@sha256:e3ee3ca2dbaf511385000dbd54123629c71b6cfaabd469e658d76a116b7f43da],Size_:139912446,Uid:&Int64Value{Value:0,},Username:,Spec:nil,},Info:map[string]string{},}" id=683ce864-e3f3-4d6d-8ace-64487f4890ea name=/runtime.v1.ImageService/ImageStatus
	Sep 16 11:28:14 cert-options-209735 crio[945]: time="2024-09-16 11:28:14.767814853Z" level=info msg="Creating container: kube-system/etcd-cert-options-209735/etcd" id=269f2142-35ac-4aed-8208-ff8a6f18d848 name=/runtime.v1.RuntimeService/CreateContainer
	Sep 16 11:28:14 cert-options-209735 crio[945]: time="2024-09-16 11:28:14.767948814Z" level=warning msg="Allowed annotations are specified for workload []"
	Sep 16 11:28:14 cert-options-209735 crio[945]: time="2024-09-16 11:28:14.876012873Z" level=info msg="Created container f287a2c88f30ac387f75974835438a40d3af26eb6cafdc3aadba3f7a4a840322: kube-system/kube-scheduler-cert-options-209735/kube-scheduler" id=0ee02af4-79db-44d0-ab86-654b849d9ed8 name=/runtime.v1.RuntimeService/CreateContainer
	Sep 16 11:28:14 cert-options-209735 crio[945]: time="2024-09-16 11:28:14.876912477Z" level=info msg="Starting container: f287a2c88f30ac387f75974835438a40d3af26eb6cafdc3aadba3f7a4a840322" id=3eaa1e4f-ea1d-4f4e-8297-0875d12ae719 name=/runtime.v1.RuntimeService/StartContainer
	Sep 16 11:28:14 cert-options-209735 crio[945]: time="2024-09-16 11:28:14.894267155Z" level=info msg="Started container" PID=1328 containerID=f287a2c88f30ac387f75974835438a40d3af26eb6cafdc3aadba3f7a4a840322 description=kube-system/kube-scheduler-cert-options-209735/kube-scheduler id=3eaa1e4f-ea1d-4f4e-8297-0875d12ae719 name=/runtime.v1.RuntimeService/StartContainer sandboxID=fd69fc38e71ad429b6b7e849c02960bd331a8634870e5ba97b6e365c30652233
	Sep 16 11:28:14 cert-options-209735 crio[945]: time="2024-09-16 11:28:14.927713929Z" level=info msg="Created container 463e25a2c457c141605abc8b946628f7875fbff5e46ebb32b746b97985a1f261: kube-system/kube-apiserver-cert-options-209735/kube-apiserver" id=800790fa-361b-4eac-a244-d61ac9877a38 name=/runtime.v1.RuntimeService/CreateContainer
	Sep 16 11:28:14 cert-options-209735 crio[945]: time="2024-09-16 11:28:14.928506254Z" level=info msg="Starting container: 463e25a2c457c141605abc8b946628f7875fbff5e46ebb32b746b97985a1f261" id=a7063c19-7bc7-4f72-b5dc-c81dac6684dd name=/runtime.v1.RuntimeService/StartContainer
	Sep 16 11:28:14 cert-options-209735 crio[945]: time="2024-09-16 11:28:14.931595227Z" level=info msg="Created container ac4ee257666b5802935140be76ef5f55e990e59977e6426e90796726142326c5: kube-system/etcd-cert-options-209735/etcd" id=269f2142-35ac-4aed-8208-ff8a6f18d848 name=/runtime.v1.RuntimeService/CreateContainer
	Sep 16 11:28:14 cert-options-209735 crio[945]: time="2024-09-16 11:28:14.932110275Z" level=info msg="Starting container: ac4ee257666b5802935140be76ef5f55e990e59977e6426e90796726142326c5" id=79dcea35-1c34-4557-b4dd-4926f1696e88 name=/runtime.v1.RuntimeService/StartContainer
	Sep 16 11:28:14 cert-options-209735 crio[945]: time="2024-09-16 11:28:14.934407612Z" level=info msg="Created container caec969db886386957bbe9e3e1e20a6e2089ab03fd82260a04ec0fe5bc50501d: kube-system/kube-controller-manager-cert-options-209735/kube-controller-manager" id=034851b5-8901-43f4-80ce-03a04f24af5d name=/runtime.v1.RuntimeService/CreateContainer
	Sep 16 11:28:14 cert-options-209735 crio[945]: time="2024-09-16 11:28:14.934980062Z" level=info msg="Starting container: caec969db886386957bbe9e3e1e20a6e2089ab03fd82260a04ec0fe5bc50501d" id=63594ae3-d94c-432d-ad70-a17608d8ccef name=/runtime.v1.RuntimeService/StartContainer
	Sep 16 11:28:14 cert-options-209735 crio[945]: time="2024-09-16 11:28:14.943072028Z" level=info msg="Started container" PID=1350 containerID=463e25a2c457c141605abc8b946628f7875fbff5e46ebb32b746b97985a1f261 description=kube-system/kube-apiserver-cert-options-209735/kube-apiserver id=a7063c19-7bc7-4f72-b5dc-c81dac6684dd name=/runtime.v1.RuntimeService/StartContainer sandboxID=ba079f7871fcb136959d65ef789195b87e6c15a538d5a2da682be2926a4c7d90
	Sep 16 11:28:14 cert-options-209735 crio[945]: time="2024-09-16 11:28:14.951801994Z" level=info msg="Started container" PID=1376 containerID=ac4ee257666b5802935140be76ef5f55e990e59977e6426e90796726142326c5 description=kube-system/etcd-cert-options-209735/etcd id=79dcea35-1c34-4557-b4dd-4926f1696e88 name=/runtime.v1.RuntimeService/StartContainer sandboxID=587d4752705ab2fadabd6f0f9b780170100c83f4ce997eedb364ec6467193e3c
	Sep 16 11:28:14 cert-options-209735 crio[945]: time="2024-09-16 11:28:14.965575157Z" level=info msg="Started container" PID=1378 containerID=caec969db886386957bbe9e3e1e20a6e2089ab03fd82260a04ec0fe5bc50501d description=kube-system/kube-controller-manager-cert-options-209735/kube-controller-manager id=63594ae3-d94c-432d-ad70-a17608d8ccef name=/runtime.v1.RuntimeService/StartContainer sandboxID=19cbf574eaa804bdf895e7186d3a88d9de075ad9e6fdfe2cf955d6aa7f3ae05e
	
	
	==> container status <==
	CONTAINER           IMAGE                                                              CREATED             STATE               NAME                      ATTEMPT             POD ID              POD
	caec969db8863       279f381cb37365bbbcd133c9531fba9c2beb0f38dbbe6ddfcd0b1b1643d3450e   11 seconds ago      Running             kube-controller-manager   0                   19cbf574eaa80       kube-controller-manager-cert-options-209735
	ac4ee257666b5       27e3830e1402783674d8b594038967deea9d51f0d91b34c93c8f39d2f68af7da   11 seconds ago      Running             etcd                      0                   587d4752705ab       etcd-cert-options-209735
	463e25a2c457c       d3f53a98c0a9d9163c4848bcf34b2d2f5e1e3691b79f3d1dd6d0206809e02853   11 seconds ago      Running             kube-apiserver            0                   ba079f7871fcb       kube-apiserver-cert-options-209735
	f287a2c88f30a       7f8aa378bb47dffcf430f3a601abe39137e88aee0238e23ed8530fdd18dab82d   11 seconds ago      Running             kube-scheduler            0                   fd69fc38e71ad       kube-scheduler-cert-options-209735
	
	
	==> describe nodes <==
	Name:               cert-options-209735
	Roles:              control-plane
	Labels:             beta.kubernetes.io/arch=arm64
	                    beta.kubernetes.io/os=linux
	                    kubernetes.io/arch=arm64
	                    kubernetes.io/hostname=cert-options-209735
	                    kubernetes.io/os=linux
	                    minikube.k8s.io/commit=90d544f06ea0f69499271b003be64a9a224d57ed
	                    minikube.k8s.io/name=cert-options-209735
	                    minikube.k8s.io/primary=true
	                    minikube.k8s.io/updated_at=2024_09_16T11_28_23_0700
	                    minikube.k8s.io/version=v1.34.0
	                    node-role.kubernetes.io/control-plane=
	                    node.kubernetes.io/exclude-from-external-load-balancers=
	Annotations:        kubeadm.alpha.kubernetes.io/cri-socket: unix:///var/run/crio/crio.sock
	                    node.alpha.kubernetes.io/ttl: 0
	                    volumes.kubernetes.io/controller-managed-attach-detach: true
	CreationTimestamp:  Mon, 16 Sep 2024 11:28:19 +0000
	Taints:             node.kubernetes.io/not-ready:NoSchedule
	Unschedulable:      false
	Lease:
	  HolderIdentity:  cert-options-209735
	  AcquireTime:     <unset>
	  RenewTime:       Mon, 16 Sep 2024 11:28:22 +0000
	Conditions:
	  Type             Status  LastHeartbeatTime                 LastTransitionTime                Reason                       Message
	  ----             ------  -----------------                 ------------------                ------                       -------
	  MemoryPressure   False   Mon, 16 Sep 2024 11:28:22 +0000   Mon, 16 Sep 2024 11:28:15 +0000   KubeletHasSufficientMemory   kubelet has sufficient memory available
	  DiskPressure     False   Mon, 16 Sep 2024 11:28:22 +0000   Mon, 16 Sep 2024 11:28:15 +0000   KubeletHasNoDiskPressure     kubelet has no disk pressure
	  PIDPressure      False   Mon, 16 Sep 2024 11:28:22 +0000   Mon, 16 Sep 2024 11:28:15 +0000   KubeletHasSufficientPID      kubelet has sufficient PID available
	  Ready            False   Mon, 16 Sep 2024 11:28:22 +0000   Mon, 16 Sep 2024 11:28:15 +0000   KubeletNotReady              container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: No CNI configuration file in /etc/cni/net.d/. Has your network provider started?
	Addresses:
	  InternalIP:  192.168.85.2
	  Hostname:    cert-options-209735
	Capacity:
	  cpu:                2
	  ephemeral-storage:  203034800Ki
	  hugepages-1Gi:      0
	  hugepages-2Mi:      0
	  hugepages-32Mi:     0
	  hugepages-64Ki:     0
	  memory:             8022304Ki
	  pods:               110
	Allocatable:
	  cpu:                2
	  ephemeral-storage:  203034800Ki
	  hugepages-1Gi:      0
	  hugepages-2Mi:      0
	  hugepages-32Mi:     0
	  hugepages-64Ki:     0
	  memory:             8022304Ki
	  pods:               110
	System Info:
	  Machine ID:                 b725f64a430042c6b0685b8e85ed0e21
	  System UUID:                e381d3f8-201a-4cb5-81aa-1149d7eadbe4
	  Boot ID:                    34b2555f-ef29-4c31-9b47-b3b930bd3b4b
	  Kernel Version:             5.15.0-1069-aws
	  OS Image:                   Ubuntu 22.04.4 LTS
	  Operating System:           linux
	  Architecture:               arm64
	  Container Runtime Version:  cri-o://1.24.6
	  Kubelet Version:            v1.31.1
	  Kube-Proxy Version:         v1.31.1
	PodCIDR:                      10.244.0.0/24
	PodCIDRs:                     10.244.0.0/24
	Non-terminated Pods:          (4 in total)
	  Namespace                   Name                                           CPU Requests  CPU Limits  Memory Requests  Memory Limits  Age
	  ---------                   ----                                           ------------  ----------  ---------------  -------------  ---
	  kube-system                 etcd-cert-options-209735                       100m (5%)     0 (0%)      100Mi (1%)       0 (0%)         4s
	  kube-system                 kube-apiserver-cert-options-209735             250m (12%)    0 (0%)      0 (0%)           0 (0%)         4s
	  kube-system                 kube-controller-manager-cert-options-209735    200m (10%)    0 (0%)      0 (0%)           0 (0%)         4s
	  kube-system                 kube-scheduler-cert-options-209735             100m (5%)     0 (0%)      0 (0%)           0 (0%)         4s
	Allocated resources:
	  (Total limits may be over 100 percent, i.e., overcommitted.)
	  Resource           Requests    Limits
	  --------           --------    ------
	  cpu                650m (32%)  0 (0%)
	  memory             100Mi (1%)  0 (0%)
	  ephemeral-storage  0 (0%)      0 (0%)
	  hugepages-1Gi      0 (0%)      0 (0%)
	  hugepages-2Mi      0 (0%)      0 (0%)
	  hugepages-32Mi     0 (0%)      0 (0%)
	  hugepages-64Ki     0 (0%)      0 (0%)
	Events:
	  Type     Reason                   Age                From             Message
	  ----     ------                   ----               ----             -------
	  Normal   NodeHasSufficientMemory  12s (x8 over 12s)  kubelet          Node cert-options-209735 status is now: NodeHasSufficientMemory
	  Normal   NodeHasNoDiskPressure    12s (x8 over 12s)  kubelet          Node cert-options-209735 status is now: NodeHasNoDiskPressure
	  Normal   NodeHasSufficientPID     12s (x7 over 12s)  kubelet          Node cert-options-209735 status is now: NodeHasSufficientPID
	  Normal   Starting                 4s                 kubelet          Starting kubelet.
	  Warning  CgroupV1                 4s                 kubelet          Cgroup v1 support is in maintenance mode, please migrate to Cgroup v2.
	  Normal   NodeHasSufficientMemory  4s                 kubelet          Node cert-options-209735 status is now: NodeHasSufficientMemory
	  Normal   NodeHasNoDiskPressure    4s                 kubelet          Node cert-options-209735 status is now: NodeHasNoDiskPressure
	  Normal   NodeHasSufficientPID     4s                 kubelet          Node cert-options-209735 status is now: NodeHasSufficientPID
	  Normal   RegisteredNode           1s                 node-controller  Node cert-options-209735 event: Registered Node cert-options-209735 in Controller
	
	
	==> dmesg <==
	[Sep16 10:07] systemd-journald[226]: Failed to send stream file descriptor to service manager: Connection refused
	
	
	==> etcd [ac4ee257666b5802935140be76ef5f55e990e59977e6426e90796726142326c5] <==
	{"level":"info","ts":"2024-09-16T11:28:15.104178Z","caller":"fileutil/purge.go:50","msg":"started to purge file","dir":"/var/lib/minikube/etcd/member/snap","suffix":"snap.db","max":5,"interval":"30s"}
	{"level":"info","ts":"2024-09-16T11:28:15.109529Z","caller":"fileutil/purge.go:50","msg":"started to purge file","dir":"/var/lib/minikube/etcd/member/snap","suffix":"snap","max":5,"interval":"30s"}
	{"level":"info","ts":"2024-09-16T11:28:15.104532Z","logger":"raft","caller":"etcdserver/zap_raft.go:77","msg":"9f0758e1c58a86ed switched to configuration voters=(11459225503572592365)"}
	{"level":"info","ts":"2024-09-16T11:28:15.110124Z","caller":"membership/cluster.go:421","msg":"added member","cluster-id":"68eaea490fab4e05","local-member-id":"9f0758e1c58a86ed","added-peer-id":"9f0758e1c58a86ed","added-peer-peer-urls":["https://192.168.85.2:2380"]}
	{"level":"info","ts":"2024-09-16T11:28:15.110211Z","caller":"fileutil/purge.go:50","msg":"started to purge file","dir":"/var/lib/minikube/etcd/member/wal","suffix":"wal","max":5,"interval":"30s"}
	{"level":"info","ts":"2024-09-16T11:28:15.461631Z","logger":"raft","caller":"etcdserver/zap_raft.go:77","msg":"9f0758e1c58a86ed is starting a new election at term 1"}
	{"level":"info","ts":"2024-09-16T11:28:15.461774Z","logger":"raft","caller":"etcdserver/zap_raft.go:77","msg":"9f0758e1c58a86ed became pre-candidate at term 1"}
	{"level":"info","ts":"2024-09-16T11:28:15.461838Z","logger":"raft","caller":"etcdserver/zap_raft.go:77","msg":"9f0758e1c58a86ed received MsgPreVoteResp from 9f0758e1c58a86ed at term 1"}
	{"level":"info","ts":"2024-09-16T11:28:15.461881Z","logger":"raft","caller":"etcdserver/zap_raft.go:77","msg":"9f0758e1c58a86ed became candidate at term 2"}
	{"level":"info","ts":"2024-09-16T11:28:15.461916Z","logger":"raft","caller":"etcdserver/zap_raft.go:77","msg":"9f0758e1c58a86ed received MsgVoteResp from 9f0758e1c58a86ed at term 2"}
	{"level":"info","ts":"2024-09-16T11:28:15.461956Z","logger":"raft","caller":"etcdserver/zap_raft.go:77","msg":"9f0758e1c58a86ed became leader at term 2"}
	{"level":"info","ts":"2024-09-16T11:28:15.461993Z","logger":"raft","caller":"etcdserver/zap_raft.go:77","msg":"raft.node: 9f0758e1c58a86ed elected leader 9f0758e1c58a86ed at term 2"}
	{"level":"info","ts":"2024-09-16T11:28:15.464842Z","caller":"etcdserver/server.go:2629","msg":"setting up initial cluster version using v2 API","cluster-version":"3.5"}
	{"level":"info","ts":"2024-09-16T11:28:15.467803Z","caller":"etcdserver/server.go:2118","msg":"published local member to cluster through raft","local-member-id":"9f0758e1c58a86ed","local-member-attributes":"{Name:cert-options-209735 ClientURLs:[https://192.168.85.2:2379]}","request-path":"/0/members/9f0758e1c58a86ed/attributes","cluster-id":"68eaea490fab4e05","publish-timeout":"7s"}
	{"level":"info","ts":"2024-09-16T11:28:15.468551Z","caller":"membership/cluster.go:584","msg":"set initial cluster version","cluster-id":"68eaea490fab4e05","local-member-id":"9f0758e1c58a86ed","cluster-version":"3.5"}
	{"level":"info","ts":"2024-09-16T11:28:15.468657Z","caller":"api/capability.go:75","msg":"enabled capabilities for version","cluster-version":"3.5"}
	{"level":"info","ts":"2024-09-16T11:28:15.468738Z","caller":"etcdserver/server.go:2653","msg":"cluster version is updated","cluster-version":"3.5"}
	{"level":"info","ts":"2024-09-16T11:28:15.468742Z","caller":"etcdmain/main.go:44","msg":"notifying init daemon"}
	{"level":"info","ts":"2024-09-16T11:28:15.468816Z","caller":"etcdmain/main.go:50","msg":"successfully notified init daemon"}
	{"level":"info","ts":"2024-09-16T11:28:15.468713Z","caller":"embed/serve.go:103","msg":"ready to serve client requests"}
	{"level":"info","ts":"2024-09-16T11:28:15.468692Z","caller":"embed/serve.go:103","msg":"ready to serve client requests"}
	{"level":"info","ts":"2024-09-16T11:28:15.471294Z","caller":"v3rpc/health.go:61","msg":"grpc service status changed","service":"","status":"SERVING"}
	{"level":"info","ts":"2024-09-16T11:28:15.477649Z","caller":"embed/serve.go:250","msg":"serving client traffic securely","traffic":"grpc+http","address":"127.0.0.1:2379"}
	{"level":"info","ts":"2024-09-16T11:28:15.478428Z","caller":"v3rpc/health.go:61","msg":"grpc service status changed","service":"","status":"SERVING"}
	{"level":"info","ts":"2024-09-16T11:28:15.484971Z","caller":"embed/serve.go:250","msg":"serving client traffic securely","traffic":"grpc+http","address":"192.168.85.2:2379"}
	
	
	==> kernel <==
	 11:28:26 up 11:10,  0 users,  load average: 5.70, 4.26, 3.27
	Linux cert-options-209735 5.15.0-1069-aws #75~20.04.1-Ubuntu SMP Mon Aug 19 16:22:47 UTC 2024 aarch64 aarch64 aarch64 GNU/Linux
	PRETTY_NAME="Ubuntu 22.04.4 LTS"
	
	
	==> kube-apiserver [463e25a2c457c141605abc8b946628f7875fbff5e46ebb32b746b97985a1f261] <==
	I0916 11:28:19.269411       1 shared_informer.go:320] Caches are synced for *generic.policySource[*k8s.io/api/admissionregistration/v1.ValidatingAdmissionPolicy,*k8s.io/api/admissionregistration/v1.ValidatingAdmissionPolicyBinding,k8s.io/apiserver/pkg/admission/plugin/policy/validating.Validator]
	I0916 11:28:19.269447       1 policy_source.go:224] refreshing policies
	I0916 11:28:19.272431       1 controller.go:615] quota admission added evaluator for: namespaces
	I0916 11:28:19.286687       1 shared_informer.go:320] Caches are synced for crd-autoregister
	I0916 11:28:19.286767       1 cache.go:39] Caches are synced for LocalAvailability controller
	I0916 11:28:19.286796       1 aggregator.go:171] initial CRD sync complete...
	I0916 11:28:19.286810       1 autoregister_controller.go:144] Starting autoregister controller
	I0916 11:28:19.286827       1 cache.go:32] Waiting for caches to sync for autoregister controller
	I0916 11:28:19.286832       1 cache.go:39] Caches are synced for autoregister controller
	I0916 11:28:19.309342       1 controller.go:615] quota admission added evaluator for: leases.coordination.k8s.io
	I0916 11:28:20.067654       1 storage_scheduling.go:95] created PriorityClass system-node-critical with value 2000001000
	I0916 11:28:20.074252       1 storage_scheduling.go:95] created PriorityClass system-cluster-critical with value 2000000000
	I0916 11:28:20.074280       1 storage_scheduling.go:111] all system priority classes are created successfully or already exist.
	I0916 11:28:20.756378       1 controller.go:615] quota admission added evaluator for: roles.rbac.authorization.k8s.io
	I0916 11:28:20.815412       1 controller.go:615] quota admission added evaluator for: rolebindings.rbac.authorization.k8s.io
	I0916 11:28:20.969166       1 alloc.go:330] "allocated clusterIPs" service="default/kubernetes" clusterIPs={"IPv4":"10.96.0.1"}
	W0916 11:28:20.976886       1 lease.go:265] Resetting endpoints for master service "kubernetes" to [192.168.85.2]
	I0916 11:28:20.978103       1 controller.go:615] quota admission added evaluator for: endpoints
	I0916 11:28:20.983188       1 controller.go:615] quota admission added evaluator for: endpointslices.discovery.k8s.io
	I0916 11:28:21.213043       1 controller.go:615] quota admission added evaluator for: serviceaccounts
	I0916 11:28:22.457335       1 controller.go:615] quota admission added evaluator for: deployments.apps
	I0916 11:28:22.473573       1 alloc.go:330] "allocated clusterIPs" service="kube-system/kube-dns" clusterIPs={"IPv4":"10.96.0.10"}
	I0916 11:28:22.638119       1 controller.go:615] quota admission added evaluator for: daemonsets.apps
	I0916 11:28:26.565993       1 controller.go:615] quota admission added evaluator for: replicasets.apps
	I0916 11:28:26.967380       1 controller.go:615] quota admission added evaluator for: controllerrevisions.apps
	
	
	==> kube-controller-manager [caec969db886386957bbe9e3e1e20a6e2089ab03fd82260a04ec0fe5bc50501d] <==
	I0916 11:28:26.016380       1 shared_informer.go:320] Caches are synced for TTL
	I0916 11:28:26.017995       1 shared_informer.go:320] Caches are synced for node
	I0916 11:28:26.018149       1 range_allocator.go:171] "Sending events to api server" logger="node-ipam-controller"
	I0916 11:28:26.018206       1 range_allocator.go:177] "Starting range CIDR allocator" logger="node-ipam-controller"
	I0916 11:28:26.018239       1 shared_informer.go:313] Waiting for caches to sync for cidrallocator
	I0916 11:28:26.018272       1 shared_informer.go:320] Caches are synced for cidrallocator
	I0916 11:28:26.018015       1 shared_informer.go:320] Caches are synced for job
	I0916 11:28:26.016413       1 shared_informer.go:320] Caches are synced for bootstrap_signer
	I0916 11:28:26.018030       1 shared_informer.go:320] Caches are synced for daemon sets
	I0916 11:28:26.018038       1 shared_informer.go:320] Caches are synced for endpoint
	I0916 11:28:26.018045       1 shared_informer.go:320] Caches are synced for ReplicationController
	I0916 11:28:26.018059       1 shared_informer.go:320] Caches are synced for crt configmap
	I0916 11:28:26.016403       1 shared_informer.go:320] Caches are synced for taint-eviction-controller
	I0916 11:28:26.024321       1 shared_informer.go:320] Caches are synced for legacy-service-account-token-cleaner
	I0916 11:28:26.036376       1 range_allocator.go:422] "Set node PodCIDR" logger="node-ipam-controller" node="cert-options-209735" podCIDRs=["10.244.0.0/24"]
	I0916 11:28:26.039331       1 range_allocator.go:241] "Successfully synced" logger="node-ipam-controller" key="cert-options-209735"
	I0916 11:28:26.039445       1 range_allocator.go:241] "Successfully synced" logger="node-ipam-controller" key="cert-options-209735"
	I0916 11:28:26.059560       1 shared_informer.go:320] Caches are synced for disruption
	I0916 11:28:26.112937       1 shared_informer.go:320] Caches are synced for stateful set
	I0916 11:28:26.172942       1 shared_informer.go:320] Caches are synced for resource quota
	I0916 11:28:26.189901       1 shared_informer.go:320] Caches are synced for resource quota
	I0916 11:28:26.657375       1 shared_informer.go:320] Caches are synced for garbage collector
	I0916 11:28:26.658703       1 shared_informer.go:320] Caches are synced for garbage collector
	I0916 11:28:26.658731       1 garbagecollector.go:157] "All resource monitors have synced. Proceeding to collect garbage" logger="garbage-collector-controller"
	I0916 11:28:26.725614       1 range_allocator.go:241] "Successfully synced" logger="node-ipam-controller" key="cert-options-209735"
	
	
	==> kube-scheduler [f287a2c88f30ac387f75974835438a40d3af26eb6cafdc3aadba3f7a4a840322] <==
	W0916 11:28:19.786083       1 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.CSINode: csinodes.storage.k8s.io is forbidden: User "system:kube-scheduler" cannot list resource "csinodes" in API group "storage.k8s.io" at the cluster scope
	E0916 11:28:19.786103       1 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.CSINode: failed to list *v1.CSINode: csinodes.storage.k8s.io is forbidden: User \"system:kube-scheduler\" cannot list resource \"csinodes\" in API group \"storage.k8s.io\" at the cluster scope" logger="UnhandledError"
	W0916 11:28:19.786147       1 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.StorageClass: storageclasses.storage.k8s.io is forbidden: User "system:kube-scheduler" cannot list resource "storageclasses" in API group "storage.k8s.io" at the cluster scope
	E0916 11:28:19.786162       1 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.StorageClass: failed to list *v1.StorageClass: storageclasses.storage.k8s.io is forbidden: User \"system:kube-scheduler\" cannot list resource \"storageclasses\" in API group \"storage.k8s.io\" at the cluster scope" logger="UnhandledError"
	W0916 11:28:19.786147       1 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Service: services is forbidden: User "system:kube-scheduler" cannot list resource "services" in API group "" at the cluster scope
	W0916 11:28:19.786224       1 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.ReplicationController: replicationcontrollers is forbidden: User "system:kube-scheduler" cannot list resource "replicationcontrollers" in API group "" at the cluster scope
	E0916 11:28:19.786240       1 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.ReplicationController: failed to list *v1.ReplicationController: replicationcontrollers is forbidden: User \"system:kube-scheduler\" cannot list resource \"replicationcontrollers\" in API group \"\" at the cluster scope" logger="UnhandledError"
	E0916 11:28:19.786229       1 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Service: failed to list *v1.Service: services is forbidden: User \"system:kube-scheduler\" cannot list resource \"services\" in API group \"\" at the cluster scope" logger="UnhandledError"
	W0916 11:28:19.786332       1 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.ReplicaSet: replicasets.apps is forbidden: User "system:kube-scheduler" cannot list resource "replicasets" in API group "apps" at the cluster scope
	E0916 11:28:19.786348       1 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.ReplicaSet: failed to list *v1.ReplicaSet: replicasets.apps is forbidden: User \"system:kube-scheduler\" cannot list resource \"replicasets\" in API group \"apps\" at the cluster scope" logger="UnhandledError"
	W0916 11:28:19.786377       1 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Node: nodes is forbidden: User "system:kube-scheduler" cannot list resource "nodes" in API group "" at the cluster scope
	E0916 11:28:19.786417       1 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Node: failed to list *v1.Node: nodes is forbidden: User \"system:kube-scheduler\" cannot list resource \"nodes\" in API group \"\" at the cluster scope" logger="UnhandledError"
	W0916 11:28:19.786451       1 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.PersistentVolume: persistentvolumes is forbidden: User "system:kube-scheduler" cannot list resource "persistentvolumes" in API group "" at the cluster scope
	E0916 11:28:19.786508       1 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.PersistentVolume: failed to list *v1.PersistentVolume: persistentvolumes is forbidden: User \"system:kube-scheduler\" cannot list resource \"persistentvolumes\" in API group \"\" at the cluster scope" logger="UnhandledError"
	W0916 11:28:19.786531       1 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.PodDisruptionBudget: poddisruptionbudgets.policy is forbidden: User "system:kube-scheduler" cannot list resource "poddisruptionbudgets" in API group "policy" at the cluster scope
	W0916 11:28:19.786572       1 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.CSIStorageCapacity: csistoragecapacities.storage.k8s.io is forbidden: User "system:kube-scheduler" cannot list resource "csistoragecapacities" in API group "storage.k8s.io" at the cluster scope
	E0916 11:28:19.786592       1 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.CSIStorageCapacity: failed to list *v1.CSIStorageCapacity: csistoragecapacities.storage.k8s.io is forbidden: User \"system:kube-scheduler\" cannot list resource \"csistoragecapacities\" in API group \"storage.k8s.io\" at the cluster scope" logger="UnhandledError"
	E0916 11:28:19.786621       1 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.PodDisruptionBudget: failed to list *v1.PodDisruptionBudget: poddisruptionbudgets.policy is forbidden: User \"system:kube-scheduler\" cannot list resource \"poddisruptionbudgets\" in API group \"policy\" at the cluster scope" logger="UnhandledError"
	W0916 11:28:19.786397       1 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.CSIDriver: csidrivers.storage.k8s.io is forbidden: User "system:kube-scheduler" cannot list resource "csidrivers" in API group "storage.k8s.io" at the cluster scope
	E0916 11:28:19.786714       1 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.CSIDriver: failed to list *v1.CSIDriver: csidrivers.storage.k8s.io is forbidden: User \"system:kube-scheduler\" cannot list resource \"csidrivers\" in API group \"storage.k8s.io\" at the cluster scope" logger="UnhandledError"
	W0916 11:28:19.786491       1 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.PersistentVolumeClaim: persistentvolumeclaims is forbidden: User "system:kube-scheduler" cannot list resource "persistentvolumeclaims" in API group "" at the cluster scope
	E0916 11:28:19.786812       1 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.PersistentVolumeClaim: failed to list *v1.PersistentVolumeClaim: persistentvolumeclaims is forbidden: User \"system:kube-scheduler\" cannot list resource \"persistentvolumeclaims\" in API group \"\" at the cluster scope" logger="UnhandledError"
	W0916 11:28:19.786660       1 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Namespace: namespaces is forbidden: User "system:kube-scheduler" cannot list resource "namespaces" in API group "" at the cluster scope
	E0916 11:28:19.786903       1 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Namespace: failed to list *v1.Namespace: namespaces is forbidden: User \"system:kube-scheduler\" cannot list resource \"namespaces\" in API group \"\" at the cluster scope" logger="UnhandledError"
	I0916 11:28:21.173632       1 shared_informer.go:320] Caches are synced for client-ca::kube-system::extension-apiserver-authentication::client-ca-file
	
	
	==> kubelet <==
	Sep 16 11:28:22 cert-options-209735 kubelet[1496]: I0916 11:28:22.577300    1496 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ca-certs\" (UniqueName: \"kubernetes.io/host-path/5b65259f0fb2a4ab54fb29632d5316db-ca-certs\") pod \"kube-controller-manager-cert-options-209735\" (UID: \"5b65259f0fb2a4ab54fb29632d5316db\") " pod="kube-system/kube-controller-manager-cert-options-209735"
	Sep 16 11:28:22 cert-options-209735 kubelet[1496]: I0916 11:28:22.577318    1496 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-ca-certificates\" (UniqueName: \"kubernetes.io/host-path/5b65259f0fb2a4ab54fb29632d5316db-etc-ca-certificates\") pod \"kube-controller-manager-cert-options-209735\" (UID: \"5b65259f0fb2a4ab54fb29632d5316db\") " pod="kube-system/kube-controller-manager-cert-options-209735"
	Sep 16 11:28:22 cert-options-209735 kubelet[1496]: I0916 11:28:22.577337    1496 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"usr-share-ca-certificates\" (UniqueName: \"kubernetes.io/host-path/5b65259f0fb2a4ab54fb29632d5316db-usr-share-ca-certificates\") pod \"kube-controller-manager-cert-options-209735\" (UID: \"5b65259f0fb2a4ab54fb29632d5316db\") " pod="kube-system/kube-controller-manager-cert-options-209735"
	Sep 16 11:28:22 cert-options-209735 kubelet[1496]: I0916 11:28:22.581608    1496 kubelet_node_status.go:111] "Node was previously registered" node="cert-options-209735"
	Sep 16 11:28:22 cert-options-209735 kubelet[1496]: I0916 11:28:22.581713    1496 kubelet_node_status.go:75] "Successfully registered node" node="cert-options-209735"
	Sep 16 11:28:23 cert-options-209735 kubelet[1496]: I0916 11:28:23.350203    1496 apiserver.go:52] "Watching apiserver"
	Sep 16 11:28:23 cert-options-209735 kubelet[1496]: I0916 11:28:23.376231    1496 desired_state_of_world_populator.go:154] "Finished populating initial desired state of world"
	Sep 16 11:28:23 cert-options-209735 kubelet[1496]: E0916 11:28:23.481379    1496 kubelet.go:1915] "Failed creating a mirror pod for" err="pods \"kube-scheduler-cert-options-209735\" already exists" pod="kube-system/kube-scheduler-cert-options-209735"
	Sep 16 11:28:23 cert-options-209735 kubelet[1496]: E0916 11:28:23.496730    1496 kubelet.go:1915] "Failed creating a mirror pod for" err="pods \"etcd-cert-options-209735\" already exists" pod="kube-system/etcd-cert-options-209735"
	Sep 16 11:28:23 cert-options-209735 kubelet[1496]: E0916 11:28:23.501098    1496 kubelet.go:1915] "Failed creating a mirror pod for" err="pods \"kube-apiserver-cert-options-209735\" already exists" pod="kube-system/kube-apiserver-cert-options-209735"
	Sep 16 11:28:23 cert-options-209735 kubelet[1496]: I0916 11:28:23.538437    1496 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="kube-system/kube-controller-manager-cert-options-209735" podStartSLOduration=1.538400272 podStartE2EDuration="1.538400272s" podCreationTimestamp="2024-09-16 11:28:22 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2024-09-16 11:28:23.498341659 +0000 UTC m=+1.222665835" watchObservedRunningTime="2024-09-16 11:28:23.538400272 +0000 UTC m=+1.262724431"
	Sep 16 11:28:23 cert-options-209735 kubelet[1496]: I0916 11:28:23.579639    1496 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="kube-system/etcd-cert-options-209735" podStartSLOduration=1.579619948 podStartE2EDuration="1.579619948s" podCreationTimestamp="2024-09-16 11:28:22 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2024-09-16 11:28:23.539065076 +0000 UTC m=+1.263389243" watchObservedRunningTime="2024-09-16 11:28:23.579619948 +0000 UTC m=+1.303944107"
	Sep 16 11:28:23 cert-options-209735 kubelet[1496]: I0916 11:28:23.670822    1496 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="kube-system/kube-apiserver-cert-options-209735" podStartSLOduration=1.670799742 podStartE2EDuration="1.670799742s" podCreationTimestamp="2024-09-16 11:28:22 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2024-09-16 11:28:23.58228133 +0000 UTC m=+1.306605497" watchObservedRunningTime="2024-09-16 11:28:23.670799742 +0000 UTC m=+1.395123909"
	Sep 16 11:28:25 cert-options-209735 kubelet[1496]: I0916 11:28:25.543450    1496 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="kube-system/kube-scheduler-cert-options-209735" podStartSLOduration=3.543430662 podStartE2EDuration="3.543430662s" podCreationTimestamp="2024-09-16 11:28:22 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2024-09-16 11:28:23.685004089 +0000 UTC m=+1.409328248" watchObservedRunningTime="2024-09-16 11:28:25.543430662 +0000 UTC m=+3.267754829"
	Sep 16 11:28:26 cert-options-209735 kubelet[1496]: I0916 11:28:26.093675    1496 kuberuntime_manager.go:1635] "Updating runtime config through cri with podcidr" CIDR="10.244.0.0/24"
	Sep 16 11:28:26 cert-options-209735 kubelet[1496]: I0916 11:28:26.094674    1496 kubelet_network.go:61] "Updating Pod CIDR" originalPodCIDR="" newPodCIDR="10.244.0.0/24"
	Sep 16 11:28:27 cert-options-209735 kubelet[1496]: I0916 11:28:27.014773    1496 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-proxy\" (UniqueName: \"kubernetes.io/configmap/31c4c638-4d62-4838-af3d-9cd5f5d41819-kube-proxy\") pod \"kube-proxy-w4nlm\" (UID: \"31c4c638-4d62-4838-af3d-9cd5f5d41819\") " pod="kube-system/kube-proxy-w4nlm"
	Sep 16 11:28:27 cert-options-209735 kubelet[1496]: I0916 11:28:27.014823    1496 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"xtables-lock\" (UniqueName: \"kubernetes.io/host-path/31c4c638-4d62-4838-af3d-9cd5f5d41819-xtables-lock\") pod \"kube-proxy-w4nlm\" (UID: \"31c4c638-4d62-4838-af3d-9cd5f5d41819\") " pod="kube-system/kube-proxy-w4nlm"
	Sep 16 11:28:27 cert-options-209735 kubelet[1496]: I0916 11:28:27.014850    1496 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vvsrm\" (UniqueName: \"kubernetes.io/projected/31c4c638-4d62-4838-af3d-9cd5f5d41819-kube-api-access-vvsrm\") pod \"kube-proxy-w4nlm\" (UID: \"31c4c638-4d62-4838-af3d-9cd5f5d41819\") " pod="kube-system/kube-proxy-w4nlm"
	Sep 16 11:28:27 cert-options-209735 kubelet[1496]: I0916 11:28:27.014876    1496 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"lib-modules\" (UniqueName: \"kubernetes.io/host-path/31c4c638-4d62-4838-af3d-9cd5f5d41819-lib-modules\") pod \"kube-proxy-w4nlm\" (UID: \"31c4c638-4d62-4838-af3d-9cd5f5d41819\") " pod="kube-system/kube-proxy-w4nlm"
	Sep 16 11:28:27 cert-options-209735 kubelet[1496]: I0916 11:28:27.115857    1496 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"xtables-lock\" (UniqueName: \"kubernetes.io/host-path/e9bd9ac8-8f21-4cad-abb8-5bb7016e1d0b-xtables-lock\") pod \"kindnet-ch6wl\" (UID: \"e9bd9ac8-8f21-4cad-abb8-5bb7016e1d0b\") " pod="kube-system/kindnet-ch6wl"
	Sep 16 11:28:27 cert-options-209735 kubelet[1496]: I0916 11:28:27.115906    1496 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nkfcs\" (UniqueName: \"kubernetes.io/projected/e9bd9ac8-8f21-4cad-abb8-5bb7016e1d0b-kube-api-access-nkfcs\") pod \"kindnet-ch6wl\" (UID: \"e9bd9ac8-8f21-4cad-abb8-5bb7016e1d0b\") " pod="kube-system/kindnet-ch6wl"
	Sep 16 11:28:27 cert-options-209735 kubelet[1496]: I0916 11:28:27.115959    1496 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cni-cfg\" (UniqueName: \"kubernetes.io/host-path/e9bd9ac8-8f21-4cad-abb8-5bb7016e1d0b-cni-cfg\") pod \"kindnet-ch6wl\" (UID: \"e9bd9ac8-8f21-4cad-abb8-5bb7016e1d0b\") " pod="kube-system/kindnet-ch6wl"
	Sep 16 11:28:27 cert-options-209735 kubelet[1496]: I0916 11:28:27.115980    1496 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"lib-modules\" (UniqueName: \"kubernetes.io/host-path/e9bd9ac8-8f21-4cad-abb8-5bb7016e1d0b-lib-modules\") pod \"kindnet-ch6wl\" (UID: \"e9bd9ac8-8f21-4cad-abb8-5bb7016e1d0b\") " pod="kube-system/kindnet-ch6wl"
	Sep 16 11:28:27 cert-options-209735 kubelet[1496]: I0916 11:28:27.141075    1496 swap_util.go:74] "error creating dir to test if tmpfs noswap is enabled. Assuming not supported" mount path="" error="stat /var/lib/kubelet/plugins/kubernetes.io/empty-dir: no such file or directory"
	

                                                
                                                
-- /stdout --
helpers_test.go:254: (dbg) Run:  out/minikube-linux-arm64 status --format={{.APIServer}} -p cert-options-209735 -n cert-options-209735
helpers_test.go:261: (dbg) Run:  kubectl --context cert-options-209735 get po -o=jsonpath={.items[*].metadata.name} -A --field-selector=status.phase!=Running
helpers_test.go:261: (dbg) Non-zero exit: kubectl --context cert-options-209735 get po -o=jsonpath={.items[*].metadata.name} -A --field-selector=status.phase!=Running: fork/exec /usr/local/bin/kubectl: exec format error (2.450565ms)
helpers_test.go:263: kubectl --context cert-options-209735 get po -o=jsonpath={.items[*].metadata.name} -A --field-selector=status.phase!=Running: fork/exec /usr/local/bin/kubectl: exec format error
helpers_test.go:175: Cleaning up "cert-options-209735" profile ...
helpers_test.go:178: (dbg) Run:  out/minikube-linux-arm64 delete -p cert-options-209735
helpers_test.go:178: (dbg) Done: out/minikube-linux-arm64 delete -p cert-options-209735: (2.05103415s)
--- FAIL: TestCertOptions (41.78s)

                                                
                                    
x
+
TestFunctional/serial/KubeContext (2.89s)

                                                
                                                
=== RUN   TestFunctional/serial/KubeContext
functional_test.go:681: (dbg) Run:  kubectl config current-context
functional_test.go:681: (dbg) Non-zero exit: kubectl config current-context: fork/exec /usr/local/bin/kubectl: exec format error (4.156144ms)
functional_test.go:683: failed to get current-context. args "kubectl config current-context" : fork/exec /usr/local/bin/kubectl: exec format error
functional_test.go:687: expected current-context = "functional-919910", but got *""*
helpers_test.go:222: -----------------------post-mortem--------------------------------
helpers_test.go:230: ======>  post-mortem[TestFunctional/serial/KubeContext]: docker inspect <======
helpers_test.go:231: (dbg) Run:  docker inspect functional-919910
helpers_test.go:235: (dbg) docker inspect functional-919910:

                                                
                                                
-- stdout --
	[
	    {
	        "Id": "40a7320e94dbd1ca8f99c16961d5283390467882986d80f040baa102ab2046bd",
	        "Created": "2024-09-16T10:46:39.195115177Z",
	        "Path": "/usr/local/bin/entrypoint",
	        "Args": [
	            "/sbin/init"
	        ],
	        "State": {
	            "Status": "running",
	            "Running": true,
	            "Paused": false,
	            "Restarting": false,
	            "OOMKilled": false,
	            "Dead": false,
	            "Pid": 1399656,
	            "ExitCode": 0,
	            "Error": "",
	            "StartedAt": "2024-09-16T10:46:39.363423533Z",
	            "FinishedAt": "0001-01-01T00:00:00Z"
	        },
	        "Image": "sha256:a1b71fa87733590eb4674b16f6945626ae533f3af37066893e3fd70eb9476268",
	        "ResolvConfPath": "/var/lib/docker/containers/40a7320e94dbd1ca8f99c16961d5283390467882986d80f040baa102ab2046bd/resolv.conf",
	        "HostnamePath": "/var/lib/docker/containers/40a7320e94dbd1ca8f99c16961d5283390467882986d80f040baa102ab2046bd/hostname",
	        "HostsPath": "/var/lib/docker/containers/40a7320e94dbd1ca8f99c16961d5283390467882986d80f040baa102ab2046bd/hosts",
	        "LogPath": "/var/lib/docker/containers/40a7320e94dbd1ca8f99c16961d5283390467882986d80f040baa102ab2046bd/40a7320e94dbd1ca8f99c16961d5283390467882986d80f040baa102ab2046bd-json.log",
	        "Name": "/functional-919910",
	        "RestartCount": 0,
	        "Driver": "overlay2",
	        "Platform": "linux",
	        "MountLabel": "",
	        "ProcessLabel": "",
	        "AppArmorProfile": "unconfined",
	        "ExecIDs": null,
	        "HostConfig": {
	            "Binds": [
	                "/lib/modules:/lib/modules:ro",
	                "functional-919910:/var"
	            ],
	            "ContainerIDFile": "",
	            "LogConfig": {
	                "Type": "json-file",
	                "Config": {}
	            },
	            "NetworkMode": "functional-919910",
	            "PortBindings": {
	                "22/tcp": [
	                    {
	                        "HostIp": "127.0.0.1",
	                        "HostPort": ""
	                    }
	                ],
	                "2376/tcp": [
	                    {
	                        "HostIp": "127.0.0.1",
	                        "HostPort": ""
	                    }
	                ],
	                "32443/tcp": [
	                    {
	                        "HostIp": "127.0.0.1",
	                        "HostPort": ""
	                    }
	                ],
	                "5000/tcp": [
	                    {
	                        "HostIp": "127.0.0.1",
	                        "HostPort": ""
	                    }
	                ],
	                "8441/tcp": [
	                    {
	                        "HostIp": "127.0.0.1",
	                        "HostPort": ""
	                    }
	                ]
	            },
	            "RestartPolicy": {
	                "Name": "no",
	                "MaximumRetryCount": 0
	            },
	            "AutoRemove": false,
	            "VolumeDriver": "",
	            "VolumesFrom": null,
	            "ConsoleSize": [
	                0,
	                0
	            ],
	            "CapAdd": null,
	            "CapDrop": null,
	            "CgroupnsMode": "host",
	            "Dns": [],
	            "DnsOptions": [],
	            "DnsSearch": [],
	            "ExtraHosts": null,
	            "GroupAdd": null,
	            "IpcMode": "private",
	            "Cgroup": "",
	            "Links": null,
	            "OomScoreAdj": 0,
	            "PidMode": "",
	            "Privileged": true,
	            "PublishAllPorts": false,
	            "ReadonlyRootfs": false,
	            "SecurityOpt": [
	                "seccomp=unconfined",
	                "apparmor=unconfined",
	                "label=disable"
	            ],
	            "Tmpfs": {
	                "/run": "",
	                "/tmp": ""
	            },
	            "UTSMode": "",
	            "UsernsMode": "",
	            "ShmSize": 67108864,
	            "Runtime": "runc",
	            "Isolation": "",
	            "CpuShares": 0,
	            "Memory": 4194304000,
	            "NanoCpus": 2000000000,
	            "CgroupParent": "",
	            "BlkioWeight": 0,
	            "BlkioWeightDevice": [],
	            "BlkioDeviceReadBps": [],
	            "BlkioDeviceWriteBps": [],
	            "BlkioDeviceReadIOps": [],
	            "BlkioDeviceWriteIOps": [],
	            "CpuPeriod": 0,
	            "CpuQuota": 0,
	            "CpuRealtimePeriod": 0,
	            "CpuRealtimeRuntime": 0,
	            "CpusetCpus": "",
	            "CpusetMems": "",
	            "Devices": [],
	            "DeviceCgroupRules": null,
	            "DeviceRequests": null,
	            "MemoryReservation": 0,
	            "MemorySwap": 8388608000,
	            "MemorySwappiness": null,
	            "OomKillDisable": false,
	            "PidsLimit": null,
	            "Ulimits": [],
	            "CpuCount": 0,
	            "CpuPercent": 0,
	            "IOMaximumIOps": 0,
	            "IOMaximumBandwidth": 0,
	            "MaskedPaths": null,
	            "ReadonlyPaths": null
	        },
	        "GraphDriver": {
	            "Data": {
	                "LowerDir": "/var/lib/docker/overlay2/14032252dd4d379a5dd6bfc812b8514e72a450050f00baaedcadb811ce19b2ca-init/diff:/var/lib/docker/overlay2/1502e35c27c097cfc834a7c6caeee5bb9f58b41375577f491b73f55bc131cbae/diff",
	                "MergedDir": "/var/lib/docker/overlay2/14032252dd4d379a5dd6bfc812b8514e72a450050f00baaedcadb811ce19b2ca/merged",
	                "UpperDir": "/var/lib/docker/overlay2/14032252dd4d379a5dd6bfc812b8514e72a450050f00baaedcadb811ce19b2ca/diff",
	                "WorkDir": "/var/lib/docker/overlay2/14032252dd4d379a5dd6bfc812b8514e72a450050f00baaedcadb811ce19b2ca/work"
	            },
	            "Name": "overlay2"
	        },
	        "Mounts": [
	            {
	                "Type": "bind",
	                "Source": "/lib/modules",
	                "Destination": "/lib/modules",
	                "Mode": "ro",
	                "RW": false,
	                "Propagation": "rprivate"
	            },
	            {
	                "Type": "volume",
	                "Name": "functional-919910",
	                "Source": "/var/lib/docker/volumes/functional-919910/_data",
	                "Destination": "/var",
	                "Driver": "local",
	                "Mode": "z",
	                "RW": true,
	                "Propagation": ""
	            }
	        ],
	        "Config": {
	            "Hostname": "functional-919910",
	            "Domainname": "",
	            "User": "",
	            "AttachStdin": false,
	            "AttachStdout": false,
	            "AttachStderr": false,
	            "ExposedPorts": {
	                "22/tcp": {},
	                "2376/tcp": {},
	                "32443/tcp": {},
	                "5000/tcp": {},
	                "8441/tcp": {}
	            },
	            "Tty": true,
	            "OpenStdin": false,
	            "StdinOnce": false,
	            "Env": [
	                "container=docker",
	                "PATH=/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin"
	            ],
	            "Cmd": null,
	            "Image": "gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0",
	            "Volumes": null,
	            "WorkingDir": "/",
	            "Entrypoint": [
	                "/usr/local/bin/entrypoint",
	                "/sbin/init"
	            ],
	            "OnBuild": null,
	            "Labels": {
	                "created_by.minikube.sigs.k8s.io": "true",
	                "mode.minikube.sigs.k8s.io": "functional-919910",
	                "name.minikube.sigs.k8s.io": "functional-919910",
	                "role.minikube.sigs.k8s.io": ""
	            },
	            "StopSignal": "SIGRTMIN+3"
	        },
	        "NetworkSettings": {
	            "Bridge": "",
	            "SandboxID": "09e546724865183e02638a32689645e28fd2b24039febe37938c93bd516fa319",
	            "SandboxKey": "/var/run/docker/netns/09e546724865",
	            "Ports": {
	                "22/tcp": [
	                    {
	                        "HostIp": "127.0.0.1",
	                        "HostPort": "34613"
	                    }
	                ],
	                "2376/tcp": [
	                    {
	                        "HostIp": "127.0.0.1",
	                        "HostPort": "34614"
	                    }
	                ],
	                "32443/tcp": [
	                    {
	                        "HostIp": "127.0.0.1",
	                        "HostPort": "34617"
	                    }
	                ],
	                "5000/tcp": [
	                    {
	                        "HostIp": "127.0.0.1",
	                        "HostPort": "34615"
	                    }
	                ],
	                "8441/tcp": [
	                    {
	                        "HostIp": "127.0.0.1",
	                        "HostPort": "34616"
	                    }
	                ]
	            },
	            "HairpinMode": false,
	            "LinkLocalIPv6Address": "",
	            "LinkLocalIPv6PrefixLen": 0,
	            "SecondaryIPAddresses": null,
	            "SecondaryIPv6Addresses": null,
	            "EndpointID": "",
	            "Gateway": "",
	            "GlobalIPv6Address": "",
	            "GlobalIPv6PrefixLen": 0,
	            "IPAddress": "",
	            "IPPrefixLen": 0,
	            "IPv6Gateway": "",
	            "MacAddress": "",
	            "Networks": {
	                "functional-919910": {
	                    "IPAMConfig": {
	                        "IPv4Address": "192.168.49.2"
	                    },
	                    "Links": null,
	                    "Aliases": null,
	                    "MacAddress": "02:42:c0:a8:31:02",
	                    "DriverOpts": null,
	                    "NetworkID": "6e0fb93702822d0f6745b0df63c8098af583107dce24967dde54449c81a6a7de",
	                    "EndpointID": "0e4e29393de23184514ee78cc12ea7445e6307e65c69c812751182560a7c0121",
	                    "Gateway": "192.168.49.1",
	                    "IPAddress": "192.168.49.2",
	                    "IPPrefixLen": 24,
	                    "IPv6Gateway": "",
	                    "GlobalIPv6Address": "",
	                    "GlobalIPv6PrefixLen": 0,
	                    "DNSNames": [
	                        "functional-919910",
	                        "40a7320e94db"
	                    ]
	                }
	            }
	        }
	    }
	]

                                                
                                                
-- /stdout --
helpers_test.go:239: (dbg) Run:  out/minikube-linux-arm64 status --format={{.Host}} -p functional-919910 -n functional-919910
helpers_test.go:244: <<< TestFunctional/serial/KubeContext FAILED: start of post-mortem logs <<<
helpers_test.go:245: ======>  post-mortem[TestFunctional/serial/KubeContext]: minikube logs <======
helpers_test.go:247: (dbg) Run:  out/minikube-linux-arm64 -p functional-919910 logs -n 25
helpers_test.go:247: (dbg) Done: out/minikube-linux-arm64 -p functional-919910 logs -n 25: (1.904099349s)
helpers_test.go:252: TestFunctional/serial/KubeContext logs: 
-- stdout --
	
	==> Audit <==
	|---------|--------------------------------|-------------------|---------|---------|---------------------|---------------------|
	| Command |              Args              |      Profile      |  User   | Version |     Start Time      |      End Time       |
	|---------|--------------------------------|-------------------|---------|---------|---------------------|---------------------|
	| addons  | disable nvidia-device-plugin   | addons-936355     | jenkins | v1.34.0 | 16 Sep 24 10:45 UTC | 16 Sep 24 10:45 UTC |
	|         | -p addons-936355               |                   |         |         |                     |                     |
	| addons  | enable headlamp                | addons-936355     | jenkins | v1.34.0 | 16 Sep 24 10:45 UTC | 16 Sep 24 10:45 UTC |
	|         | -p addons-936355               |                   |         |         |                     |                     |
	|         | --alsologtostderr -v=1         |                   |         |         |                     |                     |
	| addons  | disable cloud-spanner -p       | addons-936355     | jenkins | v1.34.0 | 16 Sep 24 10:45 UTC | 16 Sep 24 10:45 UTC |
	|         | addons-936355                  |                   |         |         |                     |                     |
	| addons  | addons-936355 addons disable   | addons-936355     | jenkins | v1.34.0 | 16 Sep 24 10:45 UTC | 16 Sep 24 10:45 UTC |
	|         | headlamp --alsologtostderr     |                   |         |         |                     |                     |
	|         | -v=1                           |                   |         |         |                     |                     |
	| stop    | -p addons-936355               | addons-936355     | jenkins | v1.34.0 | 16 Sep 24 10:45 UTC | 16 Sep 24 10:45 UTC |
	| addons  | enable dashboard -p            | addons-936355     | jenkins | v1.34.0 | 16 Sep 24 10:45 UTC | 16 Sep 24 10:45 UTC |
	|         | addons-936355                  |                   |         |         |                     |                     |
	| addons  | disable dashboard -p           | addons-936355     | jenkins | v1.34.0 | 16 Sep 24 10:45 UTC | 16 Sep 24 10:45 UTC |
	|         | addons-936355                  |                   |         |         |                     |                     |
	| addons  | disable gvisor -p              | addons-936355     | jenkins | v1.34.0 | 16 Sep 24 10:45 UTC | 16 Sep 24 10:45 UTC |
	|         | addons-936355                  |                   |         |         |                     |                     |
	| delete  | -p addons-936355               | addons-936355     | jenkins | v1.34.0 | 16 Sep 24 10:45 UTC | 16 Sep 24 10:45 UTC |
	| start   | -p nospam-329014 -n=1          | nospam-329014     | jenkins | v1.34.0 | 16 Sep 24 10:45 UTC | 16 Sep 24 10:46 UTC |
	|         | --memory=2250 --wait=false     |                   |         |         |                     |                     |
	|         | --log_dir=/tmp/nospam-329014   |                   |         |         |                     |                     |
	|         | --driver=docker                |                   |         |         |                     |                     |
	|         | --container-runtime=crio       |                   |         |         |                     |                     |
	| start   | nospam-329014 --log_dir        | nospam-329014     | jenkins | v1.34.0 | 16 Sep 24 10:46 UTC |                     |
	|         | /tmp/nospam-329014 start       |                   |         |         |                     |                     |
	|         | --dry-run                      |                   |         |         |                     |                     |
	| start   | nospam-329014 --log_dir        | nospam-329014     | jenkins | v1.34.0 | 16 Sep 24 10:46 UTC |                     |
	|         | /tmp/nospam-329014 start       |                   |         |         |                     |                     |
	|         | --dry-run                      |                   |         |         |                     |                     |
	| start   | nospam-329014 --log_dir        | nospam-329014     | jenkins | v1.34.0 | 16 Sep 24 10:46 UTC |                     |
	|         | /tmp/nospam-329014 start       |                   |         |         |                     |                     |
	|         | --dry-run                      |                   |         |         |                     |                     |
	| pause   | nospam-329014 --log_dir        | nospam-329014     | jenkins | v1.34.0 | 16 Sep 24 10:46 UTC | 16 Sep 24 10:46 UTC |
	|         | /tmp/nospam-329014 pause       |                   |         |         |                     |                     |
	| pause   | nospam-329014 --log_dir        | nospam-329014     | jenkins | v1.34.0 | 16 Sep 24 10:46 UTC | 16 Sep 24 10:46 UTC |
	|         | /tmp/nospam-329014 pause       |                   |         |         |                     |                     |
	| pause   | nospam-329014 --log_dir        | nospam-329014     | jenkins | v1.34.0 | 16 Sep 24 10:46 UTC | 16 Sep 24 10:46 UTC |
	|         | /tmp/nospam-329014 pause       |                   |         |         |                     |                     |
	| unpause | nospam-329014 --log_dir        | nospam-329014     | jenkins | v1.34.0 | 16 Sep 24 10:46 UTC | 16 Sep 24 10:46 UTC |
	|         | /tmp/nospam-329014 unpause     |                   |         |         |                     |                     |
	| unpause | nospam-329014 --log_dir        | nospam-329014     | jenkins | v1.34.0 | 16 Sep 24 10:46 UTC | 16 Sep 24 10:46 UTC |
	|         | /tmp/nospam-329014 unpause     |                   |         |         |                     |                     |
	| unpause | nospam-329014 --log_dir        | nospam-329014     | jenkins | v1.34.0 | 16 Sep 24 10:46 UTC | 16 Sep 24 10:46 UTC |
	|         | /tmp/nospam-329014 unpause     |                   |         |         |                     |                     |
	| stop    | nospam-329014 --log_dir        | nospam-329014     | jenkins | v1.34.0 | 16 Sep 24 10:46 UTC | 16 Sep 24 10:46 UTC |
	|         | /tmp/nospam-329014 stop        |                   |         |         |                     |                     |
	| stop    | nospam-329014 --log_dir        | nospam-329014     | jenkins | v1.34.0 | 16 Sep 24 10:46 UTC | 16 Sep 24 10:46 UTC |
	|         | /tmp/nospam-329014 stop        |                   |         |         |                     |                     |
	| stop    | nospam-329014 --log_dir        | nospam-329014     | jenkins | v1.34.0 | 16 Sep 24 10:46 UTC | 16 Sep 24 10:46 UTC |
	|         | /tmp/nospam-329014 stop        |                   |         |         |                     |                     |
	| delete  | -p nospam-329014               | nospam-329014     | jenkins | v1.34.0 | 16 Sep 24 10:46 UTC | 16 Sep 24 10:46 UTC |
	| start   | -p functional-919910           | functional-919910 | jenkins | v1.34.0 | 16 Sep 24 10:46 UTC | 16 Sep 24 10:47 UTC |
	|         | --memory=4000                  |                   |         |         |                     |                     |
	|         | --apiserver-port=8441          |                   |         |         |                     |                     |
	|         | --wait=all --driver=docker     |                   |         |         |                     |                     |
	|         | --container-runtime=crio       |                   |         |         |                     |                     |
	| start   | -p functional-919910           | functional-919910 | jenkins | v1.34.0 | 16 Sep 24 10:47 UTC | 16 Sep 24 10:48 UTC |
	|         | --alsologtostderr -v=8         |                   |         |         |                     |                     |
	|---------|--------------------------------|-------------------|---------|---------|---------------------|---------------------|
	
	
	==> Last Start <==
	Log file created at: 2024/09/16 10:47:50
	Running on machine: ip-172-31-21-244
	Binary: Built with gc go1.23.0 for linux/arm64
	Log line format: [IWEF]mmdd hh:mm:ss.uuuuuu threadid file:line] msg
	I0916 10:47:50.205624 1401996 out.go:345] Setting OutFile to fd 1 ...
	I0916 10:47:50.205806 1401996 out.go:392] TERM=,COLORTERM=, which probably does not support color
	I0916 10:47:50.205843 1401996 out.go:358] Setting ErrFile to fd 2...
	I0916 10:47:50.205856 1401996 out.go:392] TERM=,COLORTERM=, which probably does not support color
	I0916 10:47:50.206158 1401996 root.go:338] Updating PATH: /home/jenkins/minikube-integration/19651-1378450/.minikube/bin
	I0916 10:47:50.206622 1401996 out.go:352] Setting JSON to false
	I0916 10:47:50.207693 1401996 start.go:129] hostinfo: {"hostname":"ip-172-31-21-244","uptime":37816,"bootTime":1726445855,"procs":176,"os":"linux","platform":"ubuntu","platformFamily":"debian","platformVersion":"20.04","kernelVersion":"5.15.0-1069-aws","kernelArch":"aarch64","virtualizationSystem":"","virtualizationRole":"","hostId":"da8ac1fd-6236-412a-a346-95873c98230d"}
	I0916 10:47:50.207772 1401996 start.go:139] virtualization:  
	I0916 10:47:50.211311 1401996 out.go:177] * [functional-919910] minikube v1.34.0 on Ubuntu 20.04 (arm64)
	I0916 10:47:50.214854 1401996 out.go:177]   - MINIKUBE_LOCATION=19651
	I0916 10:47:50.214961 1401996 notify.go:220] Checking for updates...
	I0916 10:47:50.221512 1401996 out.go:177]   - MINIKUBE_SUPPRESS_DOCKER_PERFORMANCE=true
	I0916 10:47:50.225211 1401996 out.go:177]   - KUBECONFIG=/home/jenkins/minikube-integration/19651-1378450/kubeconfig
	I0916 10:47:50.228542 1401996 out.go:177]   - MINIKUBE_HOME=/home/jenkins/minikube-integration/19651-1378450/.minikube
	I0916 10:47:50.231201 1401996 out.go:177]   - MINIKUBE_BIN=out/minikube-linux-arm64
	I0916 10:47:50.233889 1401996 out.go:177]   - MINIKUBE_FORCE_SYSTEMD=
	I0916 10:47:50.237099 1401996 config.go:182] Loaded profile config "functional-919910": Driver=docker, ContainerRuntime=crio, KubernetesVersion=v1.31.1
	I0916 10:47:50.237205 1401996 driver.go:394] Setting default libvirt URI to qemu:///system
	I0916 10:47:50.273934 1401996 docker.go:123] docker version: linux-27.2.1:Docker Engine - Community
	I0916 10:47:50.274107 1401996 cli_runner.go:164] Run: docker system info --format "{{json .}}"
	I0916 10:47:50.332273 1401996 info.go:266] docker info: {ID:5FDH:SA5P:5GCT:NLAS:B73P:SGDQ:PBG5:UBVH:UZY3:RXGO:CI7S:WAIH Containers:1 ContainersRunning:1 ContainersPaused:0 ContainersStopped:0 Images:2 Driver:overlay2 DriverStatus:[[Backing Filesystem extfs] [Supports d_type true] [Using metacopy false] [Native Overlay Diff true] [userxattr false]] SystemStatus:<nil> Plugins:{Volume:[local] Network:[bridge host ipvlan macvlan null overlay] Authorization:<nil> Log:[awslogs fluentd gcplogs gelf journald json-file local splunk syslog]} MemoryLimit:true SwapLimit:true KernelMemory:false KernelMemoryTCP:true CPUCfsPeriod:true CPUCfsQuota:true CPUShares:true CPUSet:true PidsLimit:true IPv4Forwarding:true BridgeNfIptables:true BridgeNfIP6Tables:true Debug:false NFd:34 OomKillDisable:true NGoroutines:51 SystemTime:2024-09-16 10:47:50.322237464 +0000 UTC LoggingDriver:json-file CgroupDriver:cgroupfs NEventsListener:0 KernelVersion:5.15.0-1069-aws OperatingSystem:Ubuntu 20.04.6 LTS OSType:linux Architecture:aar
ch64 IndexServerAddress:https://index.docker.io/v1/ RegistryConfig:{AllowNondistributableArtifactsCIDRs:[] AllowNondistributableArtifactsHostnames:[] InsecureRegistryCIDRs:[127.0.0.0/8] IndexConfigs:{DockerIo:{Name:docker.io Mirrors:[] Secure:true Official:true}} Mirrors:[]} NCPU:2 MemTotal:8214839296 GenericResources:<nil> DockerRootDir:/var/lib/docker HTTPProxy: HTTPSProxy: NoProxy: Name:ip-172-31-21-244 Labels:[] ExperimentalBuild:false ServerVersion:27.2.1 ClusterStore: ClusterAdvertise: Runtimes:{Runc:{Path:runc}} DefaultRuntime:runc Swarm:{NodeID: NodeAddr: LocalNodeState:inactive ControlAvailable:false Error: RemoteManagers:<nil>} LiveRestoreEnabled:false Isolation: InitBinary:docker-init ContainerdCommit:{ID:7f7fdf5fed64eb6a7caf99b3e12efcf9d60e311c Expected:7f7fdf5fed64eb6a7caf99b3e12efcf9d60e311c} RuncCommit:{ID:v1.1.14-0-g2c9f560 Expected:v1.1.14-0-g2c9f560} InitCommit:{ID:de40ad0 Expected:de40ad0} SecurityOptions:[name=apparmor name=seccomp,profile=builtin] ProductLicense: Warnings:<nil> ServerErro
rs:[] ClientInfo:{Debug:false Plugins:[map[Name:buildx Path:/usr/libexec/docker/cli-plugins/docker-buildx SchemaVersion:0.1.0 ShortDescription:Docker Buildx Vendor:Docker Inc. Version:v0.16.2] map[Name:compose Path:/usr/libexec/docker/cli-plugins/docker-compose SchemaVersion:0.1.0 ShortDescription:Docker Compose Vendor:Docker Inc. Version:v2.29.2]] Warnings:<nil>}}
	I0916 10:47:50.332388 1401996 docker.go:318] overlay module found
	I0916 10:47:50.335094 1401996 out.go:177] * Using the docker driver based on existing profile
	I0916 10:47:50.337742 1401996 start.go:297] selected driver: docker
	I0916 10:47:50.337768 1401996 start.go:901] validating driver "docker" against &{Name:functional-919910 KeepContext:false EmbedCerts:false MinikubeISO: KicBaseImage:gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 Memory:4000 CPUs:2 DiskSize:20000 Driver:docker HyperkitVpnKitSock: HyperkitVSockPorts:[] DockerEnv:[] ContainerVolumeMounts:[] InsecureRegistry:[] RegistryMirror:[] HostOnlyCIDR:192.168.59.1/24 HypervVirtualSwitch: HypervUseExternalSwitch:false HypervExternalAdapter: KVMNetwork:default KVMQemuURI:qemu:///system KVMGPU:false KVMHidden:false KVMNUMACount:1 APIServerPort:8441 DockerOpt:[] DisableDriverMounts:false NFSShare:[] NFSSharesRoot:/nfsshares UUID: NoVTXCheck:false DNSProxy:false HostDNSResolver:true HostOnlyNicType:virtio NatNicType:virtio SSHIPAddress: SSHUser:root SSHKey: SSHPort:22 KubernetesConfig:{KubernetesVersion:v1.31.1 ClusterName:functional-919910 Namespace:default APIServerHAVIP: APIServerNa
me:minikubeCA APIServerNames:[] APIServerIPs:[] DNSDomain:cluster.local ContainerRuntime:crio CRISocket: NetworkPlugin:cni FeatureGates: ServiceCIDR:10.96.0.0/12 ImageRepository: LoadBalancerStartIP: LoadBalancerEndIP: CustomIngressCert: RegistryAliases: ExtraOptions:[] ShouldLoadCachedImages:true EnableDefaultCNI:false CNI:} Nodes:[{Name: IP:192.168.49.2 Port:8441 KubernetesVersion:v1.31.1 ContainerRuntime:crio ControlPlane:true Worker:true}] Addons:map[default-storageclass:true storage-provisioner:true] CustomAddonImages:map[] CustomAddonRegistries:map[] VerifyComponents:map[apiserver:true apps_running:true default_sa:true extra:true kubelet:true node_ready:true system_pods:true] StartHostTimeout:6m0s ScheduledStop:<nil> ExposedPorts:[] ListenAddress: Network: Subnet: MultiNodeRequested:false ExtraDisks:0 CertExpiration:26280h0m0s Mount:false MountString:/home/jenkins:/minikube-host Mount9PVersion:9p2000.L MountGID:docker MountIP: MountMSize:262144 MountOptions:[] MountPort:0 MountType:9p MountUID:docker Bi
naryMirror: DisableOptimizations:false DisableMetrics:false CustomQemuFirmwarePath: SocketVMnetClientPath: SocketVMnetPath: StaticIP: SSHAuthSock: SSHAgentPID:0 GPUs: AutoPauseInterval:1m0s}
	I0916 10:47:50.337887 1401996 start.go:912] status for docker: {Installed:true Healthy:true Running:false NeedsImprovement:false Error:<nil> Reason: Fix: Doc: Version:}
	I0916 10:47:50.338002 1401996 cli_runner.go:164] Run: docker system info --format "{{json .}}"
	I0916 10:47:50.398334 1401996 info.go:266] docker info: {ID:5FDH:SA5P:5GCT:NLAS:B73P:SGDQ:PBG5:UBVH:UZY3:RXGO:CI7S:WAIH Containers:1 ContainersRunning:1 ContainersPaused:0 ContainersStopped:0 Images:2 Driver:overlay2 DriverStatus:[[Backing Filesystem extfs] [Supports d_type true] [Using metacopy false] [Native Overlay Diff true] [userxattr false]] SystemStatus:<nil> Plugins:{Volume:[local] Network:[bridge host ipvlan macvlan null overlay] Authorization:<nil> Log:[awslogs fluentd gcplogs gelf journald json-file local splunk syslog]} MemoryLimit:true SwapLimit:true KernelMemory:false KernelMemoryTCP:true CPUCfsPeriod:true CPUCfsQuota:true CPUShares:true CPUSet:true PidsLimit:true IPv4Forwarding:true BridgeNfIptables:true BridgeNfIP6Tables:true Debug:false NFd:34 OomKillDisable:true NGoroutines:51 SystemTime:2024-09-16 10:47:50.388377677 +0000 UTC LoggingDriver:json-file CgroupDriver:cgroupfs NEventsListener:0 KernelVersion:5.15.0-1069-aws OperatingSystem:Ubuntu 20.04.6 LTS OSType:linux Architecture:aar
ch64 IndexServerAddress:https://index.docker.io/v1/ RegistryConfig:{AllowNondistributableArtifactsCIDRs:[] AllowNondistributableArtifactsHostnames:[] InsecureRegistryCIDRs:[127.0.0.0/8] IndexConfigs:{DockerIo:{Name:docker.io Mirrors:[] Secure:true Official:true}} Mirrors:[]} NCPU:2 MemTotal:8214839296 GenericResources:<nil> DockerRootDir:/var/lib/docker HTTPProxy: HTTPSProxy: NoProxy: Name:ip-172-31-21-244 Labels:[] ExperimentalBuild:false ServerVersion:27.2.1 ClusterStore: ClusterAdvertise: Runtimes:{Runc:{Path:runc}} DefaultRuntime:runc Swarm:{NodeID: NodeAddr: LocalNodeState:inactive ControlAvailable:false Error: RemoteManagers:<nil>} LiveRestoreEnabled:false Isolation: InitBinary:docker-init ContainerdCommit:{ID:7f7fdf5fed64eb6a7caf99b3e12efcf9d60e311c Expected:7f7fdf5fed64eb6a7caf99b3e12efcf9d60e311c} RuncCommit:{ID:v1.1.14-0-g2c9f560 Expected:v1.1.14-0-g2c9f560} InitCommit:{ID:de40ad0 Expected:de40ad0} SecurityOptions:[name=apparmor name=seccomp,profile=builtin] ProductLicense: Warnings:<nil> ServerErro
rs:[] ClientInfo:{Debug:false Plugins:[map[Name:buildx Path:/usr/libexec/docker/cli-plugins/docker-buildx SchemaVersion:0.1.0 ShortDescription:Docker Buildx Vendor:Docker Inc. Version:v0.16.2] map[Name:compose Path:/usr/libexec/docker/cli-plugins/docker-compose SchemaVersion:0.1.0 ShortDescription:Docker Compose Vendor:Docker Inc. Version:v2.29.2]] Warnings:<nil>}}
	I0916 10:47:50.398780 1401996 cni.go:84] Creating CNI manager for ""
	I0916 10:47:50.398847 1401996 cni.go:143] "docker" driver + "crio" runtime found, recommending kindnet
	I0916 10:47:50.398900 1401996 start.go:340] cluster config:
	{Name:functional-919910 KeepContext:false EmbedCerts:false MinikubeISO: KicBaseImage:gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 Memory:4000 CPUs:2 DiskSize:20000 Driver:docker HyperkitVpnKitSock: HyperkitVSockPorts:[] DockerEnv:[] ContainerVolumeMounts:[] InsecureRegistry:[] RegistryMirror:[] HostOnlyCIDR:192.168.59.1/24 HypervVirtualSwitch: HypervUseExternalSwitch:false HypervExternalAdapter: KVMNetwork:default KVMQemuURI:qemu:///system KVMGPU:false KVMHidden:false KVMNUMACount:1 APIServerPort:8441 DockerOpt:[] DisableDriverMounts:false NFSShare:[] NFSSharesRoot:/nfsshares UUID: NoVTXCheck:false DNSProxy:false HostDNSResolver:true HostOnlyNicType:virtio NatNicType:virtio SSHIPAddress: SSHUser:root SSHKey: SSHPort:22 KubernetesConfig:{KubernetesVersion:v1.31.1 ClusterName:functional-919910 Namespace:default APIServerHAVIP: APIServerName:minikubeCA APIServerNames:[] APIServerIPs:[] DNSDomain:cluster.local Containe
rRuntime:crio CRISocket: NetworkPlugin:cni FeatureGates: ServiceCIDR:10.96.0.0/12 ImageRepository: LoadBalancerStartIP: LoadBalancerEndIP: CustomIngressCert: RegistryAliases: ExtraOptions:[] ShouldLoadCachedImages:true EnableDefaultCNI:false CNI:} Nodes:[{Name: IP:192.168.49.2 Port:8441 KubernetesVersion:v1.31.1 ContainerRuntime:crio ControlPlane:true Worker:true}] Addons:map[default-storageclass:true storage-provisioner:true] CustomAddonImages:map[] CustomAddonRegistries:map[] VerifyComponents:map[apiserver:true apps_running:true default_sa:true extra:true kubelet:true node_ready:true system_pods:true] StartHostTimeout:6m0s ScheduledStop:<nil> ExposedPorts:[] ListenAddress: Network: Subnet: MultiNodeRequested:false ExtraDisks:0 CertExpiration:26280h0m0s Mount:false MountString:/home/jenkins:/minikube-host Mount9PVersion:9p2000.L MountGID:docker MountIP: MountMSize:262144 MountOptions:[] MountPort:0 MountType:9p MountUID:docker BinaryMirror: DisableOptimizations:false DisableMetrics:false CustomQemuFirmwarePa
th: SocketVMnetClientPath: SocketVMnetPath: StaticIP: SSHAuthSock: SSHAgentPID:0 GPUs: AutoPauseInterval:1m0s}
	I0916 10:47:50.401766 1401996 out.go:177] * Starting "functional-919910" primary control-plane node in "functional-919910" cluster
	I0916 10:47:50.404667 1401996 cache.go:121] Beginning downloading kic base image for docker with crio
	I0916 10:47:50.407246 1401996 out.go:177] * Pulling base image v0.0.45-1726358845-19644 ...
	I0916 10:47:50.409875 1401996 preload.go:131] Checking if preload exists for k8s version v1.31.1 and runtime crio
	I0916 10:47:50.409936 1401996 preload.go:146] Found local preload: /home/jenkins/minikube-integration/19651-1378450/.minikube/cache/preloaded-tarball/preloaded-images-k8s-v18-v1.31.1-cri-o-overlay-arm64.tar.lz4
	I0916 10:47:50.409948 1401996 cache.go:56] Caching tarball of preloaded images
	I0916 10:47:50.409958 1401996 image.go:79] Checking for gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 in local docker daemon
	I0916 10:47:50.410031 1401996 preload.go:172] Found /home/jenkins/minikube-integration/19651-1378450/.minikube/cache/preloaded-tarball/preloaded-images-k8s-v18-v1.31.1-cri-o-overlay-arm64.tar.lz4 in cache, skipping download
	I0916 10:47:50.410041 1401996 cache.go:59] Finished verifying existence of preloaded tar for v1.31.1 on crio
	I0916 10:47:50.410157 1401996 profile.go:143] Saving config to /home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/functional-919910/config.json ...
	W0916 10:47:50.438976 1401996 image.go:95] image gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 is of wrong architecture
	I0916 10:47:50.438997 1401996 cache.go:149] Downloading gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 to local cache
	I0916 10:47:50.439079 1401996 image.go:63] Checking for gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 in local cache directory
	I0916 10:47:50.439104 1401996 image.go:66] Found gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 in local cache directory, skipping pull
	I0916 10:47:50.439111 1401996 image.go:135] gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 exists in cache, skipping pull
	I0916 10:47:50.439119 1401996 cache.go:152] successfully saved gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 as a tarball
	I0916 10:47:50.439137 1401996 cache.go:162] Loading gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 from local cache
	I0916 10:47:50.440551 1401996 image.go:273] response: 
	I0916 10:47:50.570726 1401996 cache.go:164] successfully loaded and using gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 from cached tarball
	I0916 10:47:50.570776 1401996 cache.go:194] Successfully downloaded all kic artifacts
	I0916 10:47:50.570806 1401996 start.go:360] acquireMachinesLock for functional-919910: {Name:mkddf275897a7528274aa0390d95d40845ffb1ab Clock:{} Delay:500ms Timeout:10m0s Cancel:<nil>}
	I0916 10:47:50.570911 1401996 start.go:364] duration metric: took 57.352µs to acquireMachinesLock for "functional-919910"
	I0916 10:47:50.570939 1401996 start.go:96] Skipping create...Using existing machine configuration
	I0916 10:47:50.570948 1401996 fix.go:54] fixHost starting: 
	I0916 10:47:50.571270 1401996 cli_runner.go:164] Run: docker container inspect functional-919910 --format={{.State.Status}}
	I0916 10:47:50.588516 1401996 fix.go:112] recreateIfNeeded on functional-919910: state=Running err=<nil>
	W0916 10:47:50.588549 1401996 fix.go:138] unexpected machine state, will restart: <nil>
	I0916 10:47:50.591367 1401996 out.go:177] * Updating the running docker "functional-919910" container ...
	I0916 10:47:50.593873 1401996 machine.go:93] provisionDockerMachine start ...
	I0916 10:47:50.594037 1401996 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" functional-919910
	I0916 10:47:50.612718 1401996 main.go:141] libmachine: Using SSH client type: native
	I0916 10:47:50.613019 1401996 main.go:141] libmachine: &{{{<nil> 0 [] [] []} docker [0x41abe0] 0x41d420 <nil>  [] 0s} 127.0.0.1 34613 <nil> <nil>}
	I0916 10:47:50.613034 1401996 main.go:141] libmachine: About to run SSH command:
	hostname
	I0916 10:47:50.756346 1401996 main.go:141] libmachine: SSH cmd err, output: <nil>: functional-919910
	
	I0916 10:47:50.756391 1401996 ubuntu.go:169] provisioning hostname "functional-919910"
	I0916 10:47:50.756460 1401996 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" functional-919910
	I0916 10:47:50.775145 1401996 main.go:141] libmachine: Using SSH client type: native
	I0916 10:47:50.775403 1401996 main.go:141] libmachine: &{{{<nil> 0 [] [] []} docker [0x41abe0] 0x41d420 <nil>  [] 0s} 127.0.0.1 34613 <nil> <nil>}
	I0916 10:47:50.775430 1401996 main.go:141] libmachine: About to run SSH command:
	sudo hostname functional-919910 && echo "functional-919910" | sudo tee /etc/hostname
	I0916 10:47:50.926354 1401996 main.go:141] libmachine: SSH cmd err, output: <nil>: functional-919910
	
	I0916 10:47:50.926444 1401996 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" functional-919910
	I0916 10:47:50.945122 1401996 main.go:141] libmachine: Using SSH client type: native
	I0916 10:47:50.945378 1401996 main.go:141] libmachine: &{{{<nil> 0 [] [] []} docker [0x41abe0] 0x41d420 <nil>  [] 0s} 127.0.0.1 34613 <nil> <nil>}
	I0916 10:47:50.945401 1401996 main.go:141] libmachine: About to run SSH command:
	
			if ! grep -xq '.*\sfunctional-919910' /etc/hosts; then
				if grep -xq '127.0.1.1\s.*' /etc/hosts; then
					sudo sed -i 's/^127.0.1.1\s.*/127.0.1.1 functional-919910/g' /etc/hosts;
				else 
					echo '127.0.1.1 functional-919910' | sudo tee -a /etc/hosts; 
				fi
			fi
	I0916 10:47:51.093446 1401996 main.go:141] libmachine: SSH cmd err, output: <nil>: 
	I0916 10:47:51.093489 1401996 ubuntu.go:175] set auth options {CertDir:/home/jenkins/minikube-integration/19651-1378450/.minikube CaCertPath:/home/jenkins/minikube-integration/19651-1378450/.minikube/certs/ca.pem CaPrivateKeyPath:/home/jenkins/minikube-integration/19651-1378450/.minikube/certs/ca-key.pem CaCertRemotePath:/etc/docker/ca.pem ServerCertPath:/home/jenkins/minikube-integration/19651-1378450/.minikube/machines/server.pem ServerKeyPath:/home/jenkins/minikube-integration/19651-1378450/.minikube/machines/server-key.pem ClientKeyPath:/home/jenkins/minikube-integration/19651-1378450/.minikube/certs/key.pem ServerCertRemotePath:/etc/docker/server.pem ServerKeyRemotePath:/etc/docker/server-key.pem ClientCertPath:/home/jenkins/minikube-integration/19651-1378450/.minikube/certs/cert.pem ServerCertSANs:[] StorePath:/home/jenkins/minikube-integration/19651-1378450/.minikube}
	I0916 10:47:51.093510 1401996 ubuntu.go:177] setting up certificates
	I0916 10:47:51.093521 1401996 provision.go:84] configureAuth start
	I0916 10:47:51.093593 1401996 cli_runner.go:164] Run: docker container inspect -f "{{range .NetworkSettings.Networks}}{{.IPAddress}},{{.GlobalIPv6Address}}{{end}}" functional-919910
	I0916 10:47:51.112587 1401996 provision.go:143] copyHostCerts
	I0916 10:47:51.112638 1401996 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-1378450/.minikube/certs/cert.pem -> /home/jenkins/minikube-integration/19651-1378450/.minikube/cert.pem
	I0916 10:47:51.112698 1401996 exec_runner.go:144] found /home/jenkins/minikube-integration/19651-1378450/.minikube/cert.pem, removing ...
	I0916 10:47:51.112711 1401996 exec_runner.go:203] rm: /home/jenkins/minikube-integration/19651-1378450/.minikube/cert.pem
	I0916 10:47:51.112791 1401996 exec_runner.go:151] cp: /home/jenkins/minikube-integration/19651-1378450/.minikube/certs/cert.pem --> /home/jenkins/minikube-integration/19651-1378450/.minikube/cert.pem (1123 bytes)
	I0916 10:47:51.112900 1401996 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-1378450/.minikube/certs/key.pem -> /home/jenkins/minikube-integration/19651-1378450/.minikube/key.pem
	I0916 10:47:51.112924 1401996 exec_runner.go:144] found /home/jenkins/minikube-integration/19651-1378450/.minikube/key.pem, removing ...
	I0916 10:47:51.112931 1401996 exec_runner.go:203] rm: /home/jenkins/minikube-integration/19651-1378450/.minikube/key.pem
	I0916 10:47:51.112961 1401996 exec_runner.go:151] cp: /home/jenkins/minikube-integration/19651-1378450/.minikube/certs/key.pem --> /home/jenkins/minikube-integration/19651-1378450/.minikube/key.pem (1679 bytes)
	I0916 10:47:51.113021 1401996 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-1378450/.minikube/certs/ca.pem -> /home/jenkins/minikube-integration/19651-1378450/.minikube/ca.pem
	I0916 10:47:51.113043 1401996 exec_runner.go:144] found /home/jenkins/minikube-integration/19651-1378450/.minikube/ca.pem, removing ...
	I0916 10:47:51.113051 1401996 exec_runner.go:203] rm: /home/jenkins/minikube-integration/19651-1378450/.minikube/ca.pem
	I0916 10:47:51.113092 1401996 exec_runner.go:151] cp: /home/jenkins/minikube-integration/19651-1378450/.minikube/certs/ca.pem --> /home/jenkins/minikube-integration/19651-1378450/.minikube/ca.pem (1078 bytes)
	I0916 10:47:51.113161 1401996 provision.go:117] generating server cert: /home/jenkins/minikube-integration/19651-1378450/.minikube/machines/server.pem ca-key=/home/jenkins/minikube-integration/19651-1378450/.minikube/certs/ca.pem private-key=/home/jenkins/minikube-integration/19651-1378450/.minikube/certs/ca-key.pem org=jenkins.functional-919910 san=[127.0.0.1 192.168.49.2 functional-919910 localhost minikube]
	I0916 10:47:51.593684 1401996 provision.go:177] copyRemoteCerts
	I0916 10:47:51.593768 1401996 ssh_runner.go:195] Run: sudo mkdir -p /etc/docker /etc/docker /etc/docker
	I0916 10:47:51.593810 1401996 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" functional-919910
	I0916 10:47:51.612407 1401996 sshutil.go:53] new ssh client: &{IP:127.0.0.1 Port:34613 SSHKeyPath:/home/jenkins/minikube-integration/19651-1378450/.minikube/machines/functional-919910/id_rsa Username:docker}
	I0916 10:47:51.710588 1401996 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-1378450/.minikube/machines/server-key.pem -> /etc/docker/server-key.pem
	I0916 10:47:51.710650 1401996 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-1378450/.minikube/machines/server-key.pem --> /etc/docker/server-key.pem (1679 bytes)
	I0916 10:47:51.738523 1401996 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-1378450/.minikube/certs/ca.pem -> /etc/docker/ca.pem
	I0916 10:47:51.738608 1401996 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-1378450/.minikube/certs/ca.pem --> /etc/docker/ca.pem (1078 bytes)
	I0916 10:47:51.763604 1401996 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-1378450/.minikube/machines/server.pem -> /etc/docker/server.pem
	I0916 10:47:51.763668 1401996 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-1378450/.minikube/machines/server.pem --> /etc/docker/server.pem (1220 bytes)
	I0916 10:47:51.791416 1401996 provision.go:87] duration metric: took 697.879051ms to configureAuth
	I0916 10:47:51.791445 1401996 ubuntu.go:193] setting minikube options for container-runtime
	I0916 10:47:51.791646 1401996 config.go:182] Loaded profile config "functional-919910": Driver=docker, ContainerRuntime=crio, KubernetesVersion=v1.31.1
	I0916 10:47:51.791766 1401996 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" functional-919910
	I0916 10:47:51.809379 1401996 main.go:141] libmachine: Using SSH client type: native
	I0916 10:47:51.809661 1401996 main.go:141] libmachine: &{{{<nil> 0 [] [] []} docker [0x41abe0] 0x41d420 <nil>  [] 0s} 127.0.0.1 34613 <nil> <nil>}
	I0916 10:47:51.809685 1401996 main.go:141] libmachine: About to run SSH command:
	sudo mkdir -p /etc/sysconfig && printf %s "
	CRIO_MINIKUBE_OPTIONS='--insecure-registry 10.96.0.0/12 '
	" | sudo tee /etc/sysconfig/crio.minikube && sudo systemctl restart crio
	I0916 10:47:57.199046 1401996 main.go:141] libmachine: SSH cmd err, output: <nil>: 
	CRIO_MINIKUBE_OPTIONS='--insecure-registry 10.96.0.0/12 '
	
	I0916 10:47:57.199069 1401996 machine.go:96] duration metric: took 6.605174237s to provisionDockerMachine
	I0916 10:47:57.199080 1401996 start.go:293] postStartSetup for "functional-919910" (driver="docker")
	I0916 10:47:57.199092 1401996 start.go:322] creating required directories: [/etc/kubernetes/addons /etc/kubernetes/manifests /var/tmp/minikube /var/lib/minikube /var/lib/minikube/certs /var/lib/minikube/images /var/lib/minikube/binaries /tmp/gvisor /usr/share/ca-certificates /etc/ssl/certs]
	I0916 10:47:57.199163 1401996 ssh_runner.go:195] Run: sudo mkdir -p /etc/kubernetes/addons /etc/kubernetes/manifests /var/tmp/minikube /var/lib/minikube /var/lib/minikube/certs /var/lib/minikube/images /var/lib/minikube/binaries /tmp/gvisor /usr/share/ca-certificates /etc/ssl/certs
	I0916 10:47:57.199205 1401996 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" functional-919910
	I0916 10:47:57.216257 1401996 sshutil.go:53] new ssh client: &{IP:127.0.0.1 Port:34613 SSHKeyPath:/home/jenkins/minikube-integration/19651-1378450/.minikube/machines/functional-919910/id_rsa Username:docker}
	I0916 10:47:57.317946 1401996 ssh_runner.go:195] Run: cat /etc/os-release
	I0916 10:47:57.321343 1401996 command_runner.go:130] > PRETTY_NAME="Ubuntu 22.04.4 LTS"
	I0916 10:47:57.321365 1401996 command_runner.go:130] > NAME="Ubuntu"
	I0916 10:47:57.321371 1401996 command_runner.go:130] > VERSION_ID="22.04"
	I0916 10:47:57.321377 1401996 command_runner.go:130] > VERSION="22.04.4 LTS (Jammy Jellyfish)"
	I0916 10:47:57.321382 1401996 command_runner.go:130] > VERSION_CODENAME=jammy
	I0916 10:47:57.321386 1401996 command_runner.go:130] > ID=ubuntu
	I0916 10:47:57.321390 1401996 command_runner.go:130] > ID_LIKE=debian
	I0916 10:47:57.321394 1401996 command_runner.go:130] > HOME_URL="https://www.ubuntu.com/"
	I0916 10:47:57.321400 1401996 command_runner.go:130] > SUPPORT_URL="https://help.ubuntu.com/"
	I0916 10:47:57.321406 1401996 command_runner.go:130] > BUG_REPORT_URL="https://bugs.launchpad.net/ubuntu/"
	I0916 10:47:57.321413 1401996 command_runner.go:130] > PRIVACY_POLICY_URL="https://www.ubuntu.com/legal/terms-and-policies/privacy-policy"
	I0916 10:47:57.321417 1401996 command_runner.go:130] > UBUNTU_CODENAME=jammy
	I0916 10:47:57.321481 1401996 main.go:141] libmachine: Couldn't set key VERSION_CODENAME, no corresponding struct field found
	I0916 10:47:57.321509 1401996 main.go:141] libmachine: Couldn't set key PRIVACY_POLICY_URL, no corresponding struct field found
	I0916 10:47:57.321522 1401996 main.go:141] libmachine: Couldn't set key UBUNTU_CODENAME, no corresponding struct field found
	I0916 10:47:57.321532 1401996 info.go:137] Remote host: Ubuntu 22.04.4 LTS
	I0916 10:47:57.321543 1401996 filesync.go:126] Scanning /home/jenkins/minikube-integration/19651-1378450/.minikube/addons for local assets ...
	I0916 10:47:57.321605 1401996 filesync.go:126] Scanning /home/jenkins/minikube-integration/19651-1378450/.minikube/files for local assets ...
	I0916 10:47:57.321689 1401996 filesync.go:149] local asset: /home/jenkins/minikube-integration/19651-1378450/.minikube/files/etc/ssl/certs/13838332.pem -> 13838332.pem in /etc/ssl/certs
	I0916 10:47:57.321700 1401996 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-1378450/.minikube/files/etc/ssl/certs/13838332.pem -> /etc/ssl/certs/13838332.pem
	I0916 10:47:57.321774 1401996 filesync.go:149] local asset: /home/jenkins/minikube-integration/19651-1378450/.minikube/files/etc/test/nested/copy/1383833/hosts -> hosts in /etc/test/nested/copy/1383833
	I0916 10:47:57.321782 1401996 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-1378450/.minikube/files/etc/test/nested/copy/1383833/hosts -> /etc/test/nested/copy/1383833/hosts
	I0916 10:47:57.321836 1401996 ssh_runner.go:195] Run: sudo mkdir -p /etc/ssl/certs /etc/test/nested/copy/1383833
	I0916 10:47:57.330778 1401996 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-1378450/.minikube/files/etc/ssl/certs/13838332.pem --> /etc/ssl/certs/13838332.pem (1708 bytes)
	I0916 10:47:57.356667 1401996 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-1378450/.minikube/files/etc/test/nested/copy/1383833/hosts --> /etc/test/nested/copy/1383833/hosts (40 bytes)
	I0916 10:47:57.381584 1401996 start.go:296] duration metric: took 182.487479ms for postStartSetup
	I0916 10:47:57.381669 1401996 ssh_runner.go:195] Run: sh -c "df -h /var | awk 'NR==2{print $5}'"
	I0916 10:47:57.381735 1401996 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" functional-919910
	I0916 10:47:57.399941 1401996 sshutil.go:53] new ssh client: &{IP:127.0.0.1 Port:34613 SSHKeyPath:/home/jenkins/minikube-integration/19651-1378450/.minikube/machines/functional-919910/id_rsa Username:docker}
	I0916 10:47:57.493892 1401996 command_runner.go:130] > 12%
	I0916 10:47:57.493969 1401996 ssh_runner.go:195] Run: sh -c "df -BG /var | awk 'NR==2{print $4}'"
	I0916 10:47:57.498209 1401996 command_runner.go:130] > 172G
	I0916 10:47:57.498639 1401996 fix.go:56] duration metric: took 6.927687118s for fixHost
	I0916 10:47:57.498657 1401996 start.go:83] releasing machines lock for "functional-919910", held for 6.927732663s
	I0916 10:47:57.498733 1401996 cli_runner.go:164] Run: docker container inspect -f "{{range .NetworkSettings.Networks}}{{.IPAddress}},{{.GlobalIPv6Address}}{{end}}" functional-919910
	I0916 10:47:57.515304 1401996 ssh_runner.go:195] Run: cat /version.json
	I0916 10:47:57.515346 1401996 ssh_runner.go:195] Run: curl -sS -m 2 https://registry.k8s.io/
	I0916 10:47:57.515393 1401996 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" functional-919910
	I0916 10:47:57.515401 1401996 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" functional-919910
	I0916 10:47:57.534618 1401996 sshutil.go:53] new ssh client: &{IP:127.0.0.1 Port:34613 SSHKeyPath:/home/jenkins/minikube-integration/19651-1378450/.minikube/machines/functional-919910/id_rsa Username:docker}
	I0916 10:47:57.535082 1401996 sshutil.go:53] new ssh client: &{IP:127.0.0.1 Port:34613 SSHKeyPath:/home/jenkins/minikube-integration/19651-1378450/.minikube/machines/functional-919910/id_rsa Username:docker}
	I0916 10:47:57.628245 1401996 command_runner.go:130] > {"iso_version": "v1.34.0-1726281733-19643", "kicbase_version": "v0.0.45-1726358845-19644", "minikube_version": "v1.34.0", "commit": "f890713149c79cf50e25c13e6a5c0470aa0f0450"}
	I0916 10:47:57.628425 1401996 ssh_runner.go:195] Run: systemctl --version
	I0916 10:47:57.749769 1401996 command_runner.go:130] > <a href="https://github.com/kubernetes/registry.k8s.io">Temporary Redirect</a>.
	I0916 10:47:57.752956 1401996 command_runner.go:130] > systemd 249 (249.11-0ubuntu3.12)
	I0916 10:47:57.752994 1401996 command_runner.go:130] > +PAM +AUDIT +SELINUX +APPARMOR +IMA +SMACK +SECCOMP +GCRYPT +GNUTLS +OPENSSL +ACL +BLKID +CURL +ELFUTILS +FIDO2 +IDN2 -IDN +IPTC +KMOD +LIBCRYPTSETUP +LIBFDISK +PCRE2 -PWQUALITY -P11KIT -QRENCODE +BZIP2 +LZ4 +XZ +ZLIB +ZSTD -XKBCOMMON +UTMP +SYSVINIT default-hierarchy=unified
	I0916 10:47:57.753060 1401996 ssh_runner.go:195] Run: sudo sh -c "podman version >/dev/null"
	I0916 10:47:57.895632 1401996 ssh_runner.go:195] Run: sh -c "stat /etc/cni/net.d/*loopback.conf*"
	I0916 10:47:57.899891 1401996 command_runner.go:130] >   File: /etc/cni/net.d/200-loopback.conf.mk_disabled
	I0916 10:47:57.899921 1401996 command_runner.go:130] >   Size: 54        	Blocks: 8          IO Block: 4096   regular file
	I0916 10:47:57.899928 1401996 command_runner.go:130] > Device: 36h/54d	Inode: 1570512     Links: 1
	I0916 10:47:57.899936 1401996 command_runner.go:130] > Access: (0644/-rw-r--r--)  Uid: (    0/    root)   Gid: (    0/    root)
	I0916 10:47:57.899942 1401996 command_runner.go:130] > Access: 2023-06-14 14:44:50.000000000 +0000
	I0916 10:47:57.899947 1401996 command_runner.go:130] > Modify: 2023-06-14 14:44:50.000000000 +0000
	I0916 10:47:57.899952 1401996 command_runner.go:130] > Change: 2024-09-16 10:46:42.438096271 +0000
	I0916 10:47:57.899957 1401996 command_runner.go:130] >  Birth: 2024-09-16 10:46:42.434096374 +0000
	I0916 10:47:57.900143 1401996 ssh_runner.go:195] Run: sudo find /etc/cni/net.d -maxdepth 1 -type f -name *loopback.conf* -not -name *.mk_disabled -exec sh -c "sudo mv {} {}.mk_disabled" ;
	I0916 10:47:57.909149 1401996 cni.go:221] loopback cni configuration disabled: "/etc/cni/net.d/*loopback.conf*" found
	I0916 10:47:57.909234 1401996 ssh_runner.go:195] Run: sudo find /etc/cni/net.d -maxdepth 1 -type f ( ( -name *bridge* -or -name *podman* ) -and -not -name *.mk_disabled ) -printf "%p, " -exec sh -c "sudo mv {} {}.mk_disabled" ;
	I0916 10:47:57.918432 1401996 cni.go:259] no active bridge cni configs found in "/etc/cni/net.d" - nothing to disable
	I0916 10:47:57.918474 1401996 start.go:495] detecting cgroup driver to use...
	I0916 10:47:57.918507 1401996 detect.go:187] detected "cgroupfs" cgroup driver on host os
	I0916 10:47:57.918558 1401996 ssh_runner.go:195] Run: sudo systemctl stop -f containerd
	I0916 10:47:57.932361 1401996 ssh_runner.go:195] Run: sudo systemctl is-active --quiet service containerd
	I0916 10:47:57.944964 1401996 docker.go:217] disabling cri-docker service (if available) ...
	I0916 10:47:57.945069 1401996 ssh_runner.go:195] Run: sudo systemctl stop -f cri-docker.socket
	I0916 10:47:57.959974 1401996 ssh_runner.go:195] Run: sudo systemctl stop -f cri-docker.service
	I0916 10:47:57.972585 1401996 ssh_runner.go:195] Run: sudo systemctl disable cri-docker.socket
	I0916 10:47:58.098313 1401996 ssh_runner.go:195] Run: sudo systemctl mask cri-docker.service
	I0916 10:47:58.224367 1401996 docker.go:233] disabling docker service ...
	I0916 10:47:58.224443 1401996 ssh_runner.go:195] Run: sudo systemctl stop -f docker.socket
	I0916 10:47:58.238821 1401996 ssh_runner.go:195] Run: sudo systemctl stop -f docker.service
	I0916 10:47:58.251747 1401996 ssh_runner.go:195] Run: sudo systemctl disable docker.socket
	I0916 10:47:58.377715 1401996 ssh_runner.go:195] Run: sudo systemctl mask docker.service
	I0916 10:47:58.505320 1401996 ssh_runner.go:195] Run: sudo systemctl is-active --quiet service docker
	I0916 10:47:58.516841 1401996 ssh_runner.go:195] Run: /bin/bash -c "sudo mkdir -p /etc && printf %s "runtime-endpoint: unix:///var/run/crio/crio.sock
	" | sudo tee /etc/crictl.yaml"
	I0916 10:47:58.533953 1401996 command_runner.go:130] > runtime-endpoint: unix:///var/run/crio/crio.sock
	I0916 10:47:58.535668 1401996 crio.go:59] configure cri-o to use "registry.k8s.io/pause:3.10" pause image...
	I0916 10:47:58.535761 1401996 ssh_runner.go:195] Run: sh -c "sudo sed -i 's|^.*pause_image = .*$|pause_image = "registry.k8s.io/pause:3.10"|' /etc/crio/crio.conf.d/02-crio.conf"
	I0916 10:47:58.545995 1401996 crio.go:70] configuring cri-o to use "cgroupfs" as cgroup driver...
	I0916 10:47:58.546067 1401996 ssh_runner.go:195] Run: sh -c "sudo sed -i 's|^.*cgroup_manager = .*$|cgroup_manager = "cgroupfs"|' /etc/crio/crio.conf.d/02-crio.conf"
	I0916 10:47:58.556144 1401996 ssh_runner.go:195] Run: sh -c "sudo sed -i '/conmon_cgroup = .*/d' /etc/crio/crio.conf.d/02-crio.conf"
	I0916 10:47:58.567289 1401996 ssh_runner.go:195] Run: sh -c "sudo sed -i '/cgroup_manager = .*/a conmon_cgroup = "pod"' /etc/crio/crio.conf.d/02-crio.conf"
	I0916 10:47:58.578091 1401996 ssh_runner.go:195] Run: sh -c "sudo rm -rf /etc/cni/net.mk"
	I0916 10:47:58.587840 1401996 ssh_runner.go:195] Run: sh -c "sudo sed -i '/^ *"net.ipv4.ip_unprivileged_port_start=.*"/d' /etc/crio/crio.conf.d/02-crio.conf"
	I0916 10:47:58.597939 1401996 ssh_runner.go:195] Run: sh -c "sudo grep -q "^ *default_sysctls" /etc/crio/crio.conf.d/02-crio.conf || sudo sed -i '/conmon_cgroup = .*/a default_sysctls = \[\n\]' /etc/crio/crio.conf.d/02-crio.conf"
	I0916 10:47:58.607513 1401996 ssh_runner.go:195] Run: sh -c "sudo sed -i -r 's|^default_sysctls *= *\[|&\n  "net.ipv4.ip_unprivileged_port_start=0",|' /etc/crio/crio.conf.d/02-crio.conf"
	I0916 10:47:58.617313 1401996 ssh_runner.go:195] Run: sudo sysctl net.bridge.bridge-nf-call-iptables
	I0916 10:47:58.625015 1401996 command_runner.go:130] > net.bridge.bridge-nf-call-iptables = 1
	I0916 10:47:58.626470 1401996 ssh_runner.go:195] Run: sudo sh -c "echo 1 > /proc/sys/net/ipv4/ip_forward"
	I0916 10:47:58.635977 1401996 ssh_runner.go:195] Run: sudo systemctl daemon-reload
	I0916 10:47:58.755097 1401996 ssh_runner.go:195] Run: sudo systemctl restart crio
	I0916 10:47:58.923185 1401996 start.go:542] Will wait 60s for socket path /var/run/crio/crio.sock
	I0916 10:47:58.923281 1401996 ssh_runner.go:195] Run: stat /var/run/crio/crio.sock
	I0916 10:47:58.927043 1401996 command_runner.go:130] >   File: /var/run/crio/crio.sock
	I0916 10:47:58.927069 1401996 command_runner.go:130] >   Size: 0         	Blocks: 0          IO Block: 4096   socket
	I0916 10:47:58.927076 1401996 command_runner.go:130] > Device: 43h/67d	Inode: 572         Links: 1
	I0916 10:47:58.927084 1401996 command_runner.go:130] > Access: (0660/srw-rw----)  Uid: (    0/    root)   Gid: (    0/    root)
	I0916 10:47:58.927089 1401996 command_runner.go:130] > Access: 2024-09-16 10:47:58.872135911 +0000
	I0916 10:47:58.927114 1401996 command_runner.go:130] > Modify: 2024-09-16 10:47:58.872135911 +0000
	I0916 10:47:58.927125 1401996 command_runner.go:130] > Change: 2024-09-16 10:47:58.872135911 +0000
	I0916 10:47:58.927156 1401996 command_runner.go:130] >  Birth: -
	I0916 10:47:58.927204 1401996 start.go:563] Will wait 60s for crictl version
	I0916 10:47:58.927275 1401996 ssh_runner.go:195] Run: which crictl
	I0916 10:47:58.930492 1401996 command_runner.go:130] > /usr/bin/crictl
	I0916 10:47:58.930791 1401996 ssh_runner.go:195] Run: sudo /usr/bin/crictl version
	I0916 10:47:58.968537 1401996 command_runner.go:130] > Version:  0.1.0
	I0916 10:47:58.968561 1401996 command_runner.go:130] > RuntimeName:  cri-o
	I0916 10:47:58.968567 1401996 command_runner.go:130] > RuntimeVersion:  1.24.6
	I0916 10:47:58.968573 1401996 command_runner.go:130] > RuntimeApiVersion:  v1
	I0916 10:47:58.971326 1401996 start.go:579] Version:  0.1.0
	RuntimeName:  cri-o
	RuntimeVersion:  1.24.6
	RuntimeApiVersion:  v1
	I0916 10:47:58.971438 1401996 ssh_runner.go:195] Run: crio --version
	I0916 10:47:59.011967 1401996 command_runner.go:130] > crio version 1.24.6
	I0916 10:47:59.012041 1401996 command_runner.go:130] > Version:          1.24.6
	I0916 10:47:59.012074 1401996 command_runner.go:130] > GitCommit:        4bfe15a9feb74ffc95e66a21c04b15fa7bbc2b90
	I0916 10:47:59.012093 1401996 command_runner.go:130] > GitTreeState:     clean
	I0916 10:47:59.012114 1401996 command_runner.go:130] > BuildDate:        2023-06-14T14:44:50Z
	I0916 10:47:59.012149 1401996 command_runner.go:130] > GoVersion:        go1.18.2
	I0916 10:47:59.012167 1401996 command_runner.go:130] > Compiler:         gc
	I0916 10:47:59.012185 1401996 command_runner.go:130] > Platform:         linux/arm64
	I0916 10:47:59.012207 1401996 command_runner.go:130] > Linkmode:         dynamic
	I0916 10:47:59.012241 1401996 command_runner.go:130] > BuildTags:        apparmor, exclude_graphdriver_devicemapper, containers_image_ostree_stub, seccomp
	I0916 10:47:59.012260 1401996 command_runner.go:130] > SeccompEnabled:   true
	I0916 10:47:59.012280 1401996 command_runner.go:130] > AppArmorEnabled:  false
	I0916 10:47:59.013900 1401996 ssh_runner.go:195] Run: crio --version
	I0916 10:47:59.050810 1401996 command_runner.go:130] > crio version 1.24.6
	I0916 10:47:59.050856 1401996 command_runner.go:130] > Version:          1.24.6
	I0916 10:47:59.050865 1401996 command_runner.go:130] > GitCommit:        4bfe15a9feb74ffc95e66a21c04b15fa7bbc2b90
	I0916 10:47:59.050870 1401996 command_runner.go:130] > GitTreeState:     clean
	I0916 10:47:59.050878 1401996 command_runner.go:130] > BuildDate:        2023-06-14T14:44:50Z
	I0916 10:47:59.050882 1401996 command_runner.go:130] > GoVersion:        go1.18.2
	I0916 10:47:59.050886 1401996 command_runner.go:130] > Compiler:         gc
	I0916 10:47:59.050890 1401996 command_runner.go:130] > Platform:         linux/arm64
	I0916 10:47:59.050908 1401996 command_runner.go:130] > Linkmode:         dynamic
	I0916 10:47:59.050919 1401996 command_runner.go:130] > BuildTags:        apparmor, exclude_graphdriver_devicemapper, containers_image_ostree_stub, seccomp
	I0916 10:47:59.050926 1401996 command_runner.go:130] > SeccompEnabled:   true
	I0916 10:47:59.050930 1401996 command_runner.go:130] > AppArmorEnabled:  false
	I0916 10:47:59.058518 1401996 out.go:177] * Preparing Kubernetes v1.31.1 on CRI-O 1.24.6 ...
	I0916 10:47:59.061208 1401996 cli_runner.go:164] Run: docker network inspect functional-919910 --format "{"Name": "{{.Name}}","Driver": "{{.Driver}}","Subnet": "{{range .IPAM.Config}}{{.Subnet}}{{end}}","Gateway": "{{range .IPAM.Config}}{{.Gateway}}{{end}}","MTU": {{if (index .Options "com.docker.network.driver.mtu")}}{{(index .Options "com.docker.network.driver.mtu")}}{{else}}0{{end}}, "ContainerIPs": [{{range $k,$v := .Containers }}"{{$v.IPv4Address}}",{{end}}]}"
	I0916 10:47:59.076753 1401996 ssh_runner.go:195] Run: grep 192.168.49.1	host.minikube.internal$ /etc/hosts
	I0916 10:47:59.080562 1401996 command_runner.go:130] > 192.168.49.1	host.minikube.internal
	I0916 10:47:59.080708 1401996 kubeadm.go:883] updating cluster {Name:functional-919910 KeepContext:false EmbedCerts:false MinikubeISO: KicBaseImage:gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 Memory:4000 CPUs:2 DiskSize:20000 Driver:docker HyperkitVpnKitSock: HyperkitVSockPorts:[] DockerEnv:[] ContainerVolumeMounts:[] InsecureRegistry:[] RegistryMirror:[] HostOnlyCIDR:192.168.59.1/24 HypervVirtualSwitch: HypervUseExternalSwitch:false HypervExternalAdapter: KVMNetwork:default KVMQemuURI:qemu:///system KVMGPU:false KVMHidden:false KVMNUMACount:1 APIServerPort:8441 DockerOpt:[] DisableDriverMounts:false NFSShare:[] NFSSharesRoot:/nfsshares UUID: NoVTXCheck:false DNSProxy:false HostDNSResolver:true HostOnlyNicType:virtio NatNicType:virtio SSHIPAddress: SSHUser:root SSHKey: SSHPort:22 KubernetesConfig:{KubernetesVersion:v1.31.1 ClusterName:functional-919910 Namespace:default APIServerHAVIP: APIServerName:minikubeCA API
ServerNames:[] APIServerIPs:[] DNSDomain:cluster.local ContainerRuntime:crio CRISocket: NetworkPlugin:cni FeatureGates: ServiceCIDR:10.96.0.0/12 ImageRepository: LoadBalancerStartIP: LoadBalancerEndIP: CustomIngressCert: RegistryAliases: ExtraOptions:[] ShouldLoadCachedImages:true EnableDefaultCNI:false CNI:} Nodes:[{Name: IP:192.168.49.2 Port:8441 KubernetesVersion:v1.31.1 ContainerRuntime:crio ControlPlane:true Worker:true}] Addons:map[default-storageclass:true storage-provisioner:true] CustomAddonImages:map[] CustomAddonRegistries:map[] VerifyComponents:map[apiserver:true apps_running:true default_sa:true extra:true kubelet:true node_ready:true system_pods:true] StartHostTimeout:6m0s ScheduledStop:<nil> ExposedPorts:[] ListenAddress: Network: Subnet: MultiNodeRequested:false ExtraDisks:0 CertExpiration:26280h0m0s Mount:false MountString:/home/jenkins:/minikube-host Mount9PVersion:9p2000.L MountGID:docker MountIP: MountMSize:262144 MountOptions:[] MountPort:0 MountType:9p MountUID:docker BinaryMirror: Disab
leOptimizations:false DisableMetrics:false CustomQemuFirmwarePath: SocketVMnetClientPath: SocketVMnetPath: StaticIP: SSHAuthSock: SSHAgentPID:0 GPUs: AutoPauseInterval:1m0s} ...
	I0916 10:47:59.080823 1401996 preload.go:131] Checking if preload exists for k8s version v1.31.1 and runtime crio
	I0916 10:47:59.080882 1401996 ssh_runner.go:195] Run: sudo crictl images --output json
	I0916 10:47:59.129335 1401996 command_runner.go:130] > {
	I0916 10:47:59.129361 1401996 command_runner.go:130] >   "images": [
	I0916 10:47:59.129366 1401996 command_runner.go:130] >     {
	I0916 10:47:59.129376 1401996 command_runner.go:130] >       "id": "6a23fa8fd2b78ab58e42ba273808edc936a9c53d8ac4a919f6337be094843a51",
	I0916 10:47:59.129381 1401996 command_runner.go:130] >       "repoTags": [
	I0916 10:47:59.129399 1401996 command_runner.go:130] >         "docker.io/kindest/kindnetd:v20240813-c6f155d6"
	I0916 10:47:59.129405 1401996 command_runner.go:130] >       ],
	I0916 10:47:59.129410 1401996 command_runner.go:130] >       "repoDigests": [
	I0916 10:47:59.129425 1401996 command_runner.go:130] >         "docker.io/kindest/kindnetd@sha256:4d39335073da9a0b82be8e01028f0aa75aff16caff2e2d8889d0effd579a6f64",
	I0916 10:47:59.129436 1401996 command_runner.go:130] >         "docker.io/kindest/kindnetd@sha256:e59a687ca28ae274a2fc92f1e2f5f1c739f353178a43a23aafc71adb802ed166"
	I0916 10:47:59.129442 1401996 command_runner.go:130] >       ],
	I0916 10:47:59.129447 1401996 command_runner.go:130] >       "size": "90295858",
	I0916 10:47:59.129454 1401996 command_runner.go:130] >       "uid": null,
	I0916 10:47:59.129458 1401996 command_runner.go:130] >       "username": "",
	I0916 10:47:59.129468 1401996 command_runner.go:130] >       "spec": null,
	I0916 10:47:59.129476 1401996 command_runner.go:130] >       "pinned": false
	I0916 10:47:59.129479 1401996 command_runner.go:130] >     },
	I0916 10:47:59.129482 1401996 command_runner.go:130] >     {
	I0916 10:47:59.129488 1401996 command_runner.go:130] >       "id": "ba04bb24b95753201135cbc420b233c1b0b9fa2e1fd21d28319c348c33fbcde6",
	I0916 10:47:59.129497 1401996 command_runner.go:130] >       "repoTags": [
	I0916 10:47:59.129502 1401996 command_runner.go:130] >         "gcr.io/k8s-minikube/storage-provisioner:v5"
	I0916 10:47:59.129509 1401996 command_runner.go:130] >       ],
	I0916 10:47:59.129513 1401996 command_runner.go:130] >       "repoDigests": [
	I0916 10:47:59.129525 1401996 command_runner.go:130] >         "gcr.io/k8s-minikube/storage-provisioner@sha256:0ba370588274b88531ab311a5d2e645d240a853555c1e58fd1dd428fc333c9d2",
	I0916 10:47:59.129536 1401996 command_runner.go:130] >         "gcr.io/k8s-minikube/storage-provisioner@sha256:18eb69d1418e854ad5a19e399310e52808a8321e4c441c1dddad8977a0d7a944"
	I0916 10:47:59.129542 1401996 command_runner.go:130] >       ],
	I0916 10:47:59.129555 1401996 command_runner.go:130] >       "size": "29037500",
	I0916 10:47:59.129569 1401996 command_runner.go:130] >       "uid": null,
	I0916 10:47:59.129573 1401996 command_runner.go:130] >       "username": "",
	I0916 10:47:59.129579 1401996 command_runner.go:130] >       "spec": null,
	I0916 10:47:59.129583 1401996 command_runner.go:130] >       "pinned": false
	I0916 10:47:59.129591 1401996 command_runner.go:130] >     },
	I0916 10:47:59.129594 1401996 command_runner.go:130] >     {
	I0916 10:47:59.129601 1401996 command_runner.go:130] >       "id": "2f6c962e7b8311337352d9fdea917da2184d9919f4da7695bc2a6517cf392fe4",
	I0916 10:47:59.129607 1401996 command_runner.go:130] >       "repoTags": [
	I0916 10:47:59.129613 1401996 command_runner.go:130] >         "registry.k8s.io/coredns/coredns:v1.11.3"
	I0916 10:47:59.129619 1401996 command_runner.go:130] >       ],
	I0916 10:47:59.129623 1401996 command_runner.go:130] >       "repoDigests": [
	I0916 10:47:59.129635 1401996 command_runner.go:130] >         "registry.k8s.io/coredns/coredns@sha256:31440a2bef59e2f1ffb600113b557103740ff851e27b0aef5b849f6e3ab994a6",
	I0916 10:47:59.129646 1401996 command_runner.go:130] >         "registry.k8s.io/coredns/coredns@sha256:9caabbf6238b189a65d0d6e6ac138de60d6a1c419e5a341fbbb7c78382559c6e"
	I0916 10:47:59.129649 1401996 command_runner.go:130] >       ],
	I0916 10:47:59.129653 1401996 command_runner.go:130] >       "size": "61647114",
	I0916 10:47:59.129659 1401996 command_runner.go:130] >       "uid": null,
	I0916 10:47:59.129663 1401996 command_runner.go:130] >       "username": "nonroot",
	I0916 10:47:59.129669 1401996 command_runner.go:130] >       "spec": null,
	I0916 10:47:59.129673 1401996 command_runner.go:130] >       "pinned": false
	I0916 10:47:59.129679 1401996 command_runner.go:130] >     },
	I0916 10:47:59.129682 1401996 command_runner.go:130] >     {
	I0916 10:47:59.129689 1401996 command_runner.go:130] >       "id": "27e3830e1402783674d8b594038967deea9d51f0d91b34c93c8f39d2f68af7da",
	I0916 10:47:59.129695 1401996 command_runner.go:130] >       "repoTags": [
	I0916 10:47:59.129702 1401996 command_runner.go:130] >         "registry.k8s.io/etcd:3.5.15-0"
	I0916 10:47:59.129708 1401996 command_runner.go:130] >       ],
	I0916 10:47:59.129712 1401996 command_runner.go:130] >       "repoDigests": [
	I0916 10:47:59.129723 1401996 command_runner.go:130] >         "registry.k8s.io/etcd@sha256:a6dc63e6e8cfa0307d7851762fa6b629afb18f28d8aa3fab5a6e91b4af60026a",
	I0916 10:47:59.129739 1401996 command_runner.go:130] >         "registry.k8s.io/etcd@sha256:e3ee3ca2dbaf511385000dbd54123629c71b6cfaabd469e658d76a116b7f43da"
	I0916 10:47:59.129746 1401996 command_runner.go:130] >       ],
	I0916 10:47:59.129750 1401996 command_runner.go:130] >       "size": "139912446",
	I0916 10:47:59.129754 1401996 command_runner.go:130] >       "uid": {
	I0916 10:47:59.129760 1401996 command_runner.go:130] >         "value": "0"
	I0916 10:47:59.129764 1401996 command_runner.go:130] >       },
	I0916 10:47:59.129775 1401996 command_runner.go:130] >       "username": "",
	I0916 10:47:59.129782 1401996 command_runner.go:130] >       "spec": null,
	I0916 10:47:59.129787 1401996 command_runner.go:130] >       "pinned": false
	I0916 10:47:59.129793 1401996 command_runner.go:130] >     },
	I0916 10:47:59.129796 1401996 command_runner.go:130] >     {
	I0916 10:47:59.129803 1401996 command_runner.go:130] >       "id": "d3f53a98c0a9d9163c4848bcf34b2d2f5e1e3691b79f3d1dd6d0206809e02853",
	I0916 10:47:59.129809 1401996 command_runner.go:130] >       "repoTags": [
	I0916 10:47:59.129815 1401996 command_runner.go:130] >         "registry.k8s.io/kube-apiserver:v1.31.1"
	I0916 10:47:59.129821 1401996 command_runner.go:130] >       ],
	I0916 10:47:59.129825 1401996 command_runner.go:130] >       "repoDigests": [
	I0916 10:47:59.129838 1401996 command_runner.go:130] >         "registry.k8s.io/kube-apiserver@sha256:2409c23dbb5a2b7a81adbb184d3eac43ac653e9b97a7c0ee121b89bb3ef61fdb",
	I0916 10:47:59.129846 1401996 command_runner.go:130] >         "registry.k8s.io/kube-apiserver@sha256:e3a40e6c6e99ba4a4d72432b3eda702099a2926e49d4afeb6138f2d95e6371ef"
	I0916 10:47:59.129853 1401996 command_runner.go:130] >       ],
	I0916 10:47:59.129857 1401996 command_runner.go:130] >       "size": "92632544",
	I0916 10:47:59.129864 1401996 command_runner.go:130] >       "uid": {
	I0916 10:47:59.129868 1401996 command_runner.go:130] >         "value": "0"
	I0916 10:47:59.129875 1401996 command_runner.go:130] >       },
	I0916 10:47:59.129879 1401996 command_runner.go:130] >       "username": "",
	I0916 10:47:59.129886 1401996 command_runner.go:130] >       "spec": null,
	I0916 10:47:59.129890 1401996 command_runner.go:130] >       "pinned": false
	I0916 10:47:59.129896 1401996 command_runner.go:130] >     },
	I0916 10:47:59.129900 1401996 command_runner.go:130] >     {
	I0916 10:47:59.129906 1401996 command_runner.go:130] >       "id": "279f381cb37365bbbcd133c9531fba9c2beb0f38dbbe6ddfcd0b1b1643d3450e",
	I0916 10:47:59.129912 1401996 command_runner.go:130] >       "repoTags": [
	I0916 10:47:59.129918 1401996 command_runner.go:130] >         "registry.k8s.io/kube-controller-manager:v1.31.1"
	I0916 10:47:59.129922 1401996 command_runner.go:130] >       ],
	I0916 10:47:59.129928 1401996 command_runner.go:130] >       "repoDigests": [
	I0916 10:47:59.129937 1401996 command_runner.go:130] >         "registry.k8s.io/kube-controller-manager@sha256:9f9da5b27e03f89599cc40ba89150aebf3b4cff001e6db6d998674b34181e1a1",
	I0916 10:47:59.129949 1401996 command_runner.go:130] >         "registry.k8s.io/kube-controller-manager@sha256:a9a0505b7d0caca0edd18e37bacc9425b2c8824546b26f5b286e8cb144669849"
	I0916 10:47:59.129955 1401996 command_runner.go:130] >       ],
	I0916 10:47:59.129959 1401996 command_runner.go:130] >       "size": "86930758",
	I0916 10:47:59.129966 1401996 command_runner.go:130] >       "uid": {
	I0916 10:47:59.129970 1401996 command_runner.go:130] >         "value": "0"
	I0916 10:47:59.129981 1401996 command_runner.go:130] >       },
	I0916 10:47:59.129988 1401996 command_runner.go:130] >       "username": "",
	I0916 10:47:59.129992 1401996 command_runner.go:130] >       "spec": null,
	I0916 10:47:59.129999 1401996 command_runner.go:130] >       "pinned": false
	I0916 10:47:59.130002 1401996 command_runner.go:130] >     },
	I0916 10:47:59.130010 1401996 command_runner.go:130] >     {
	I0916 10:47:59.130017 1401996 command_runner.go:130] >       "id": "24a140c548c075e487e45d0ee73b1aa89f8bfb40c08a57e05975559728822b1d",
	I0916 10:47:59.130021 1401996 command_runner.go:130] >       "repoTags": [
	I0916 10:47:59.130026 1401996 command_runner.go:130] >         "registry.k8s.io/kube-proxy:v1.31.1"
	I0916 10:47:59.130033 1401996 command_runner.go:130] >       ],
	I0916 10:47:59.130037 1401996 command_runner.go:130] >       "repoDigests": [
	I0916 10:47:59.130049 1401996 command_runner.go:130] >         "registry.k8s.io/kube-proxy@sha256:4ee50b00484d7f39a90fc4cda92251177ef5ad8fdf2f2a0c768f9e634b4c6d44",
	I0916 10:47:59.130060 1401996 command_runner.go:130] >         "registry.k8s.io/kube-proxy@sha256:7b3bf9f1e260ccb1fd543570e1e9869a373f716fb050cd23a6a2771aa4e06ae9"
	I0916 10:47:59.130066 1401996 command_runner.go:130] >       ],
	I0916 10:47:59.130070 1401996 command_runner.go:130] >       "size": "95951255",
	I0916 10:47:59.130077 1401996 command_runner.go:130] >       "uid": null,
	I0916 10:47:59.130081 1401996 command_runner.go:130] >       "username": "",
	I0916 10:47:59.130088 1401996 command_runner.go:130] >       "spec": null,
	I0916 10:47:59.130092 1401996 command_runner.go:130] >       "pinned": false
	I0916 10:47:59.130098 1401996 command_runner.go:130] >     },
	I0916 10:47:59.130101 1401996 command_runner.go:130] >     {
	I0916 10:47:59.130107 1401996 command_runner.go:130] >       "id": "7f8aa378bb47dffcf430f3a601abe39137e88aee0238e23ed8530fdd18dab82d",
	I0916 10:47:59.130114 1401996 command_runner.go:130] >       "repoTags": [
	I0916 10:47:59.130120 1401996 command_runner.go:130] >         "registry.k8s.io/kube-scheduler:v1.31.1"
	I0916 10:47:59.130125 1401996 command_runner.go:130] >       ],
	I0916 10:47:59.130128 1401996 command_runner.go:130] >       "repoDigests": [
	I0916 10:47:59.130152 1401996 command_runner.go:130] >         "registry.k8s.io/kube-scheduler@sha256:65212209347a96b08a97e679b98dca46885f09cf3a53e8d13b28d2c083a5b690",
	I0916 10:47:59.130164 1401996 command_runner.go:130] >         "registry.k8s.io/kube-scheduler@sha256:969a7e96340f3a927b3d652582edec2d6d82a083871d81ef5064b7edaab430d0"
	I0916 10:47:59.130170 1401996 command_runner.go:130] >       ],
	I0916 10:47:59.130174 1401996 command_runner.go:130] >       "size": "67007814",
	I0916 10:47:59.130181 1401996 command_runner.go:130] >       "uid": {
	I0916 10:47:59.130185 1401996 command_runner.go:130] >         "value": "0"
	I0916 10:47:59.130191 1401996 command_runner.go:130] >       },
	I0916 10:47:59.130199 1401996 command_runner.go:130] >       "username": "",
	I0916 10:47:59.130207 1401996 command_runner.go:130] >       "spec": null,
	I0916 10:47:59.130211 1401996 command_runner.go:130] >       "pinned": false
	I0916 10:47:59.130217 1401996 command_runner.go:130] >     },
	I0916 10:47:59.130221 1401996 command_runner.go:130] >     {
	I0916 10:47:59.130231 1401996 command_runner.go:130] >       "id": "afb61768ce381961ca0beff95337601f29dc70ff3ed14e5e4b3e5699057e6aa8",
	I0916 10:47:59.130238 1401996 command_runner.go:130] >       "repoTags": [
	I0916 10:47:59.130243 1401996 command_runner.go:130] >         "registry.k8s.io/pause:3.10"
	I0916 10:47:59.130249 1401996 command_runner.go:130] >       ],
	I0916 10:47:59.130253 1401996 command_runner.go:130] >       "repoDigests": [
	I0916 10:47:59.130264 1401996 command_runner.go:130] >         "registry.k8s.io/pause@sha256:e50b7059b633caf3c1449b8da680d11845cda4506b513ee7a2de00725f0a34a7",
	I0916 10:47:59.130275 1401996 command_runner.go:130] >         "registry.k8s.io/pause@sha256:ee6521f290b2168b6e0935a181d4cff9be1ac3f505666ef0e3c98fae8199917a"
	I0916 10:47:59.130281 1401996 command_runner.go:130] >       ],
	I0916 10:47:59.130285 1401996 command_runner.go:130] >       "size": "519877",
	I0916 10:47:59.130288 1401996 command_runner.go:130] >       "uid": {
	I0916 10:47:59.130292 1401996 command_runner.go:130] >         "value": "65535"
	I0916 10:47:59.130298 1401996 command_runner.go:130] >       },
	I0916 10:47:59.130302 1401996 command_runner.go:130] >       "username": "",
	I0916 10:47:59.130308 1401996 command_runner.go:130] >       "spec": null,
	I0916 10:47:59.130312 1401996 command_runner.go:130] >       "pinned": false
	I0916 10:47:59.130319 1401996 command_runner.go:130] >     }
	I0916 10:47:59.130322 1401996 command_runner.go:130] >   ]
	I0916 10:47:59.130325 1401996 command_runner.go:130] > }
	I0916 10:47:59.132975 1401996 crio.go:514] all images are preloaded for cri-o runtime.
	I0916 10:47:59.133000 1401996 crio.go:433] Images already preloaded, skipping extraction
	I0916 10:47:59.133067 1401996 ssh_runner.go:195] Run: sudo crictl images --output json
	I0916 10:47:59.170227 1401996 command_runner.go:130] > {
	I0916 10:47:59.170298 1401996 command_runner.go:130] >   "images": [
	I0916 10:47:59.170315 1401996 command_runner.go:130] >     {
	I0916 10:47:59.170338 1401996 command_runner.go:130] >       "id": "6a23fa8fd2b78ab58e42ba273808edc936a9c53d8ac4a919f6337be094843a51",
	I0916 10:47:59.170371 1401996 command_runner.go:130] >       "repoTags": [
	I0916 10:47:59.170400 1401996 command_runner.go:130] >         "docker.io/kindest/kindnetd:v20240813-c6f155d6"
	I0916 10:47:59.170417 1401996 command_runner.go:130] >       ],
	I0916 10:47:59.170450 1401996 command_runner.go:130] >       "repoDigests": [
	I0916 10:47:59.170477 1401996 command_runner.go:130] >         "docker.io/kindest/kindnetd@sha256:4d39335073da9a0b82be8e01028f0aa75aff16caff2e2d8889d0effd579a6f64",
	I0916 10:47:59.170497 1401996 command_runner.go:130] >         "docker.io/kindest/kindnetd@sha256:e59a687ca28ae274a2fc92f1e2f5f1c739f353178a43a23aafc71adb802ed166"
	I0916 10:47:59.170536 1401996 command_runner.go:130] >       ],
	I0916 10:47:59.170559 1401996 command_runner.go:130] >       "size": "90295858",
	I0916 10:47:59.170580 1401996 command_runner.go:130] >       "uid": null,
	I0916 10:47:59.170610 1401996 command_runner.go:130] >       "username": "",
	I0916 10:47:59.170634 1401996 command_runner.go:130] >       "spec": null,
	I0916 10:47:59.170653 1401996 command_runner.go:130] >       "pinned": false
	I0916 10:47:59.170672 1401996 command_runner.go:130] >     },
	I0916 10:47:59.170698 1401996 command_runner.go:130] >     {
	I0916 10:47:59.170722 1401996 command_runner.go:130] >       "id": "ba04bb24b95753201135cbc420b233c1b0b9fa2e1fd21d28319c348c33fbcde6",
	I0916 10:47:59.170739 1401996 command_runner.go:130] >       "repoTags": [
	I0916 10:47:59.170760 1401996 command_runner.go:130] >         "gcr.io/k8s-minikube/storage-provisioner:v5"
	I0916 10:47:59.170787 1401996 command_runner.go:130] >       ],
	I0916 10:47:59.170809 1401996 command_runner.go:130] >       "repoDigests": [
	I0916 10:47:59.170831 1401996 command_runner.go:130] >         "gcr.io/k8s-minikube/storage-provisioner@sha256:0ba370588274b88531ab311a5d2e645d240a853555c1e58fd1dd428fc333c9d2",
	I0916 10:47:59.170866 1401996 command_runner.go:130] >         "gcr.io/k8s-minikube/storage-provisioner@sha256:18eb69d1418e854ad5a19e399310e52808a8321e4c441c1dddad8977a0d7a944"
	I0916 10:47:59.170885 1401996 command_runner.go:130] >       ],
	I0916 10:47:59.170906 1401996 command_runner.go:130] >       "size": "29037500",
	I0916 10:47:59.170923 1401996 command_runner.go:130] >       "uid": null,
	I0916 10:47:59.170954 1401996 command_runner.go:130] >       "username": "",
	I0916 10:47:59.170975 1401996 command_runner.go:130] >       "spec": null,
	I0916 10:47:59.170992 1401996 command_runner.go:130] >       "pinned": false
	I0916 10:47:59.171009 1401996 command_runner.go:130] >     },
	I0916 10:47:59.171040 1401996 command_runner.go:130] >     {
	I0916 10:47:59.171064 1401996 command_runner.go:130] >       "id": "2f6c962e7b8311337352d9fdea917da2184d9919f4da7695bc2a6517cf392fe4",
	I0916 10:47:59.171083 1401996 command_runner.go:130] >       "repoTags": [
	I0916 10:47:59.171102 1401996 command_runner.go:130] >         "registry.k8s.io/coredns/coredns:v1.11.3"
	I0916 10:47:59.171137 1401996 command_runner.go:130] >       ],
	I0916 10:47:59.171158 1401996 command_runner.go:130] >       "repoDigests": [
	I0916 10:47:59.171182 1401996 command_runner.go:130] >         "registry.k8s.io/coredns/coredns@sha256:31440a2bef59e2f1ffb600113b557103740ff851e27b0aef5b849f6e3ab994a6",
	I0916 10:47:59.171216 1401996 command_runner.go:130] >         "registry.k8s.io/coredns/coredns@sha256:9caabbf6238b189a65d0d6e6ac138de60d6a1c419e5a341fbbb7c78382559c6e"
	I0916 10:47:59.171238 1401996 command_runner.go:130] >       ],
	I0916 10:47:59.171257 1401996 command_runner.go:130] >       "size": "61647114",
	I0916 10:47:59.171275 1401996 command_runner.go:130] >       "uid": null,
	I0916 10:47:59.171319 1401996 command_runner.go:130] >       "username": "nonroot",
	I0916 10:47:59.171338 1401996 command_runner.go:130] >       "spec": null,
	I0916 10:47:59.171368 1401996 command_runner.go:130] >       "pinned": false
	I0916 10:47:59.171388 1401996 command_runner.go:130] >     },
	I0916 10:47:59.171404 1401996 command_runner.go:130] >     {
	I0916 10:47:59.171425 1401996 command_runner.go:130] >       "id": "27e3830e1402783674d8b594038967deea9d51f0d91b34c93c8f39d2f68af7da",
	I0916 10:47:59.171454 1401996 command_runner.go:130] >       "repoTags": [
	I0916 10:47:59.171477 1401996 command_runner.go:130] >         "registry.k8s.io/etcd:3.5.15-0"
	I0916 10:47:59.171495 1401996 command_runner.go:130] >       ],
	I0916 10:47:59.171512 1401996 command_runner.go:130] >       "repoDigests": [
	I0916 10:47:59.171546 1401996 command_runner.go:130] >         "registry.k8s.io/etcd@sha256:a6dc63e6e8cfa0307d7851762fa6b629afb18f28d8aa3fab5a6e91b4af60026a",
	I0916 10:47:59.171575 1401996 command_runner.go:130] >         "registry.k8s.io/etcd@sha256:e3ee3ca2dbaf511385000dbd54123629c71b6cfaabd469e658d76a116b7f43da"
	I0916 10:47:59.171592 1401996 command_runner.go:130] >       ],
	I0916 10:47:59.171625 1401996 command_runner.go:130] >       "size": "139912446",
	I0916 10:47:59.171647 1401996 command_runner.go:130] >       "uid": {
	I0916 10:47:59.171664 1401996 command_runner.go:130] >         "value": "0"
	I0916 10:47:59.171681 1401996 command_runner.go:130] >       },
	I0916 10:47:59.171709 1401996 command_runner.go:130] >       "username": "",
	I0916 10:47:59.171729 1401996 command_runner.go:130] >       "spec": null,
	I0916 10:47:59.171747 1401996 command_runner.go:130] >       "pinned": false
	I0916 10:47:59.171764 1401996 command_runner.go:130] >     },
	I0916 10:47:59.171794 1401996 command_runner.go:130] >     {
	I0916 10:47:59.171820 1401996 command_runner.go:130] >       "id": "d3f53a98c0a9d9163c4848bcf34b2d2f5e1e3691b79f3d1dd6d0206809e02853",
	I0916 10:47:59.171838 1401996 command_runner.go:130] >       "repoTags": [
	I0916 10:47:59.171857 1401996 command_runner.go:130] >         "registry.k8s.io/kube-apiserver:v1.31.1"
	I0916 10:47:59.171884 1401996 command_runner.go:130] >       ],
	I0916 10:47:59.171905 1401996 command_runner.go:130] >       "repoDigests": [
	I0916 10:47:59.171926 1401996 command_runner.go:130] >         "registry.k8s.io/kube-apiserver@sha256:2409c23dbb5a2b7a81adbb184d3eac43ac653e9b97a7c0ee121b89bb3ef61fdb",
	I0916 10:47:59.171948 1401996 command_runner.go:130] >         "registry.k8s.io/kube-apiserver@sha256:e3a40e6c6e99ba4a4d72432b3eda702099a2926e49d4afeb6138f2d95e6371ef"
	I0916 10:47:59.171980 1401996 command_runner.go:130] >       ],
	I0916 10:47:59.171998 1401996 command_runner.go:130] >       "size": "92632544",
	I0916 10:47:59.172015 1401996 command_runner.go:130] >       "uid": {
	I0916 10:47:59.172044 1401996 command_runner.go:130] >         "value": "0"
	I0916 10:47:59.172075 1401996 command_runner.go:130] >       },
	I0916 10:47:59.172093 1401996 command_runner.go:130] >       "username": "",
	I0916 10:47:59.172141 1401996 command_runner.go:130] >       "spec": null,
	I0916 10:47:59.172163 1401996 command_runner.go:130] >       "pinned": false
	I0916 10:47:59.172179 1401996 command_runner.go:130] >     },
	I0916 10:47:59.172196 1401996 command_runner.go:130] >     {
	I0916 10:47:59.172231 1401996 command_runner.go:130] >       "id": "279f381cb37365bbbcd133c9531fba9c2beb0f38dbbe6ddfcd0b1b1643d3450e",
	I0916 10:47:59.172252 1401996 command_runner.go:130] >       "repoTags": [
	I0916 10:47:59.172272 1401996 command_runner.go:130] >         "registry.k8s.io/kube-controller-manager:v1.31.1"
	I0916 10:47:59.172289 1401996 command_runner.go:130] >       ],
	I0916 10:47:59.172318 1401996 command_runner.go:130] >       "repoDigests": [
	I0916 10:47:59.172342 1401996 command_runner.go:130] >         "registry.k8s.io/kube-controller-manager@sha256:9f9da5b27e03f89599cc40ba89150aebf3b4cff001e6db6d998674b34181e1a1",
	I0916 10:47:59.172364 1401996 command_runner.go:130] >         "registry.k8s.io/kube-controller-manager@sha256:a9a0505b7d0caca0edd18e37bacc9425b2c8824546b26f5b286e8cb144669849"
	I0916 10:47:59.172396 1401996 command_runner.go:130] >       ],
	I0916 10:47:59.172418 1401996 command_runner.go:130] >       "size": "86930758",
	I0916 10:47:59.172435 1401996 command_runner.go:130] >       "uid": {
	I0916 10:47:59.172476 1401996 command_runner.go:130] >         "value": "0"
	I0916 10:47:59.172496 1401996 command_runner.go:130] >       },
	I0916 10:47:59.172521 1401996 command_runner.go:130] >       "username": "",
	I0916 10:47:59.172551 1401996 command_runner.go:130] >       "spec": null,
	I0916 10:47:59.172572 1401996 command_runner.go:130] >       "pinned": false
	I0916 10:47:59.172588 1401996 command_runner.go:130] >     },
	I0916 10:47:59.172606 1401996 command_runner.go:130] >     {
	I0916 10:47:59.172638 1401996 command_runner.go:130] >       "id": "24a140c548c075e487e45d0ee73b1aa89f8bfb40c08a57e05975559728822b1d",
	I0916 10:47:59.172661 1401996 command_runner.go:130] >       "repoTags": [
	I0916 10:47:59.172703 1401996 command_runner.go:130] >         "registry.k8s.io/kube-proxy:v1.31.1"
	I0916 10:47:59.172715 1401996 command_runner.go:130] >       ],
	I0916 10:47:59.172720 1401996 command_runner.go:130] >       "repoDigests": [
	I0916 10:47:59.172728 1401996 command_runner.go:130] >         "registry.k8s.io/kube-proxy@sha256:4ee50b00484d7f39a90fc4cda92251177ef5ad8fdf2f2a0c768f9e634b4c6d44",
	I0916 10:47:59.172736 1401996 command_runner.go:130] >         "registry.k8s.io/kube-proxy@sha256:7b3bf9f1e260ccb1fd543570e1e9869a373f716fb050cd23a6a2771aa4e06ae9"
	I0916 10:47:59.172739 1401996 command_runner.go:130] >       ],
	I0916 10:47:59.172743 1401996 command_runner.go:130] >       "size": "95951255",
	I0916 10:47:59.172759 1401996 command_runner.go:130] >       "uid": null,
	I0916 10:47:59.172790 1401996 command_runner.go:130] >       "username": "",
	I0916 10:47:59.172797 1401996 command_runner.go:130] >       "spec": null,
	I0916 10:47:59.172801 1401996 command_runner.go:130] >       "pinned": false
	I0916 10:47:59.172804 1401996 command_runner.go:130] >     },
	I0916 10:47:59.172808 1401996 command_runner.go:130] >     {
	I0916 10:47:59.172815 1401996 command_runner.go:130] >       "id": "7f8aa378bb47dffcf430f3a601abe39137e88aee0238e23ed8530fdd18dab82d",
	I0916 10:47:59.172821 1401996 command_runner.go:130] >       "repoTags": [
	I0916 10:47:59.172848 1401996 command_runner.go:130] >         "registry.k8s.io/kube-scheduler:v1.31.1"
	I0916 10:47:59.172859 1401996 command_runner.go:130] >       ],
	I0916 10:47:59.172863 1401996 command_runner.go:130] >       "repoDigests": [
	I0916 10:47:59.172885 1401996 command_runner.go:130] >         "registry.k8s.io/kube-scheduler@sha256:65212209347a96b08a97e679b98dca46885f09cf3a53e8d13b28d2c083a5b690",
	I0916 10:47:59.172897 1401996 command_runner.go:130] >         "registry.k8s.io/kube-scheduler@sha256:969a7e96340f3a927b3d652582edec2d6d82a083871d81ef5064b7edaab430d0"
	I0916 10:47:59.172901 1401996 command_runner.go:130] >       ],
	I0916 10:47:59.172920 1401996 command_runner.go:130] >       "size": "67007814",
	I0916 10:47:59.172931 1401996 command_runner.go:130] >       "uid": {
	I0916 10:47:59.172934 1401996 command_runner.go:130] >         "value": "0"
	I0916 10:47:59.172938 1401996 command_runner.go:130] >       },
	I0916 10:47:59.172942 1401996 command_runner.go:130] >       "username": "",
	I0916 10:47:59.172947 1401996 command_runner.go:130] >       "spec": null,
	I0916 10:47:59.172951 1401996 command_runner.go:130] >       "pinned": false
	I0916 10:47:59.172957 1401996 command_runner.go:130] >     },
	I0916 10:47:59.172959 1401996 command_runner.go:130] >     {
	I0916 10:47:59.172966 1401996 command_runner.go:130] >       "id": "afb61768ce381961ca0beff95337601f29dc70ff3ed14e5e4b3e5699057e6aa8",
	I0916 10:47:59.172971 1401996 command_runner.go:130] >       "repoTags": [
	I0916 10:47:59.172976 1401996 command_runner.go:130] >         "registry.k8s.io/pause:3.10"
	I0916 10:47:59.172993 1401996 command_runner.go:130] >       ],
	I0916 10:47:59.173003 1401996 command_runner.go:130] >       "repoDigests": [
	I0916 10:47:59.173011 1401996 command_runner.go:130] >         "registry.k8s.io/pause@sha256:e50b7059b633caf3c1449b8da680d11845cda4506b513ee7a2de00725f0a34a7",
	I0916 10:47:59.173026 1401996 command_runner.go:130] >         "registry.k8s.io/pause@sha256:ee6521f290b2168b6e0935a181d4cff9be1ac3f505666ef0e3c98fae8199917a"
	I0916 10:47:59.173030 1401996 command_runner.go:130] >       ],
	I0916 10:47:59.173034 1401996 command_runner.go:130] >       "size": "519877",
	I0916 10:47:59.173040 1401996 command_runner.go:130] >       "uid": {
	I0916 10:47:59.173044 1401996 command_runner.go:130] >         "value": "65535"
	I0916 10:47:59.173054 1401996 command_runner.go:130] >       },
	I0916 10:47:59.173068 1401996 command_runner.go:130] >       "username": "",
	I0916 10:47:59.173075 1401996 command_runner.go:130] >       "spec": null,
	I0916 10:47:59.173079 1401996 command_runner.go:130] >       "pinned": false
	I0916 10:47:59.173082 1401996 command_runner.go:130] >     }
	I0916 10:47:59.173088 1401996 command_runner.go:130] >   ]
	I0916 10:47:59.173091 1401996 command_runner.go:130] > }
	I0916 10:47:59.175764 1401996 crio.go:514] all images are preloaded for cri-o runtime.
	I0916 10:47:59.175833 1401996 cache_images.go:84] Images are preloaded, skipping loading
	I0916 10:47:59.175849 1401996 kubeadm.go:934] updating node { 192.168.49.2 8441 v1.31.1 crio true true} ...
	I0916 10:47:59.175967 1401996 kubeadm.go:946] kubelet [Unit]
	Wants=crio.service
	
	[Service]
	ExecStart=
	ExecStart=/var/lib/minikube/binaries/v1.31.1/kubelet --bootstrap-kubeconfig=/etc/kubernetes/bootstrap-kubelet.conf --cgroups-per-qos=false --config=/var/lib/kubelet/config.yaml --enforce-node-allocatable= --hostname-override=functional-919910 --kubeconfig=/etc/kubernetes/kubelet.conf --node-ip=192.168.49.2
	
	[Install]
	 config:
	{KubernetesVersion:v1.31.1 ClusterName:functional-919910 Namespace:default APIServerHAVIP: APIServerName:minikubeCA APIServerNames:[] APIServerIPs:[] DNSDomain:cluster.local ContainerRuntime:crio CRISocket: NetworkPlugin:cni FeatureGates: ServiceCIDR:10.96.0.0/12 ImageRepository: LoadBalancerStartIP: LoadBalancerEndIP: CustomIngressCert: RegistryAliases: ExtraOptions:[] ShouldLoadCachedImages:true EnableDefaultCNI:false CNI:}
	I0916 10:47:59.176058 1401996 ssh_runner.go:195] Run: crio config
	I0916 10:47:59.225080 1401996 command_runner.go:130] > # The CRI-O configuration file specifies all of the available configuration
	I0916 10:47:59.225107 1401996 command_runner.go:130] > # options and command-line flags for the crio(8) OCI Kubernetes Container Runtime
	I0916 10:47:59.225116 1401996 command_runner.go:130] > # daemon, but in a TOML format that can be more easily modified and versioned.
	I0916 10:47:59.225119 1401996 command_runner.go:130] > #
	I0916 10:47:59.225128 1401996 command_runner.go:130] > # Please refer to crio.conf(5) for details of all configuration options.
	I0916 10:47:59.225135 1401996 command_runner.go:130] > # CRI-O supports partial configuration reload during runtime, which can be
	I0916 10:47:59.225141 1401996 command_runner.go:130] > # done by sending SIGHUP to the running process. Currently supported options
	I0916 10:47:59.225148 1401996 command_runner.go:130] > # are explicitly mentioned with: 'This option supports live configuration
	I0916 10:47:59.225152 1401996 command_runner.go:130] > # reload'.
	I0916 10:47:59.225159 1401996 command_runner.go:130] > # CRI-O reads its storage defaults from the containers-storage.conf(5) file
	I0916 10:47:59.225166 1401996 command_runner.go:130] > # located at /etc/containers/storage.conf. Modify this storage configuration if
	I0916 10:47:59.225175 1401996 command_runner.go:130] > # you want to change the system's defaults. If you want to modify storage just
	I0916 10:47:59.225181 1401996 command_runner.go:130] > # for CRI-O, you can change the storage configuration options here.
	I0916 10:47:59.225184 1401996 command_runner.go:130] > [crio]
	I0916 10:47:59.225190 1401996 command_runner.go:130] > # Path to the "root directory". CRI-O stores all of its data, including
	I0916 10:47:59.225195 1401996 command_runner.go:130] > # containers images, in this directory.
	I0916 10:47:59.225857 1401996 command_runner.go:130] > # root = "/home/docker/.local/share/containers/storage"
	I0916 10:47:59.225883 1401996 command_runner.go:130] > # Path to the "run directory". CRI-O stores all of its state in this directory.
	I0916 10:47:59.226449 1401996 command_runner.go:130] > # runroot = "/tmp/containers-user-1000/containers"
	I0916 10:47:59.226467 1401996 command_runner.go:130] > # Storage driver used to manage the storage of images and containers. Please
	I0916 10:47:59.226480 1401996 command_runner.go:130] > # refer to containers-storage.conf(5) to see all available storage drivers.
	I0916 10:47:59.227033 1401996 command_runner.go:130] > # storage_driver = "vfs"
	I0916 10:47:59.227049 1401996 command_runner.go:130] > # List to pass options to the storage driver. Please refer to
	I0916 10:47:59.227061 1401996 command_runner.go:130] > # containers-storage.conf(5) to see all available storage options.
	I0916 10:47:59.227342 1401996 command_runner.go:130] > # storage_option = [
	I0916 10:47:59.227631 1401996 command_runner.go:130] > # ]
	I0916 10:47:59.227641 1401996 command_runner.go:130] > # The default log directory where all logs will go unless directly specified by
	I0916 10:47:59.227648 1401996 command_runner.go:130] > # the kubelet. The log directory specified must be an absolute directory.
	I0916 10:47:59.228197 1401996 command_runner.go:130] > # log_dir = "/var/log/crio/pods"
	I0916 10:47:59.228220 1401996 command_runner.go:130] > # Location for CRI-O to lay down the temporary version file.
	I0916 10:47:59.228240 1401996 command_runner.go:130] > # It is used to check if crio wipe should wipe containers, which should
	I0916 10:47:59.228249 1401996 command_runner.go:130] > # always happen on a node reboot
	I0916 10:47:59.228820 1401996 command_runner.go:130] > # version_file = "/var/run/crio/version"
	I0916 10:47:59.228838 1401996 command_runner.go:130] > # Location for CRI-O to lay down the persistent version file.
	I0916 10:47:59.228851 1401996 command_runner.go:130] > # It is used to check if crio wipe should wipe images, which should
	I0916 10:47:59.228874 1401996 command_runner.go:130] > # only happen when CRI-O has been upgraded
	I0916 10:47:59.229533 1401996 command_runner.go:130] > # version_file_persist = "/var/lib/crio/version"
	I0916 10:47:59.229573 1401996 command_runner.go:130] > # InternalWipe is whether CRI-O should wipe containers and images after a reboot when the server starts.
	I0916 10:47:59.229582 1401996 command_runner.go:130] > # If set to false, one must use the external command 'crio wipe' to wipe the containers and images in these situations.
	I0916 10:47:59.230129 1401996 command_runner.go:130] > # internal_wipe = true
	I0916 10:47:59.230145 1401996 command_runner.go:130] > # Location for CRI-O to lay down the clean shutdown file.
	I0916 10:47:59.230159 1401996 command_runner.go:130] > # It is used to check whether crio had time to sync before shutting down.
	I0916 10:47:59.230168 1401996 command_runner.go:130] > # If not found, crio wipe will clear the storage directory.
	I0916 10:47:59.230748 1401996 command_runner.go:130] > # clean_shutdown_file = "/var/lib/crio/clean.shutdown"
	I0916 10:47:59.230766 1401996 command_runner.go:130] > # The crio.api table contains settings for the kubelet/gRPC interface.
	I0916 10:47:59.230776 1401996 command_runner.go:130] > [crio.api]
	I0916 10:47:59.230784 1401996 command_runner.go:130] > # Path to AF_LOCAL socket on which CRI-O will listen.
	I0916 10:47:59.231355 1401996 command_runner.go:130] > # listen = "/var/run/crio/crio.sock"
	I0916 10:47:59.231372 1401996 command_runner.go:130] > # IP address on which the stream server will listen.
	I0916 10:47:59.231950 1401996 command_runner.go:130] > # stream_address = "127.0.0.1"
	I0916 10:47:59.231967 1401996 command_runner.go:130] > # The port on which the stream server will listen. If the port is set to "0", then
	I0916 10:47:59.231979 1401996 command_runner.go:130] > # CRI-O will allocate a random free port number.
	I0916 10:47:59.232535 1401996 command_runner.go:130] > # stream_port = "0"
	I0916 10:47:59.232551 1401996 command_runner.go:130] > # Enable encrypted TLS transport of the stream server.
	I0916 10:47:59.233183 1401996 command_runner.go:130] > # stream_enable_tls = false
	I0916 10:47:59.233199 1401996 command_runner.go:130] > # Length of time until open streams terminate due to lack of activity
	I0916 10:47:59.233627 1401996 command_runner.go:130] > # stream_idle_timeout = ""
	I0916 10:47:59.233651 1401996 command_runner.go:130] > # Path to the x509 certificate file used to serve the encrypted stream. This
	I0916 10:47:59.233660 1401996 command_runner.go:130] > # file can change, and CRI-O will automatically pick up the changes within 5
	I0916 10:47:59.233668 1401996 command_runner.go:130] > # minutes.
	I0916 10:47:59.234100 1401996 command_runner.go:130] > # stream_tls_cert = ""
	I0916 10:47:59.234125 1401996 command_runner.go:130] > # Path to the key file used to serve the encrypted stream. This file can
	I0916 10:47:59.234136 1401996 command_runner.go:130] > # change and CRI-O will automatically pick up the changes within 5 minutes.
	I0916 10:47:59.234567 1401996 command_runner.go:130] > # stream_tls_key = ""
	I0916 10:47:59.234591 1401996 command_runner.go:130] > # Path to the x509 CA(s) file used to verify and authenticate client
	I0916 10:47:59.234598 1401996 command_runner.go:130] > # communication with the encrypted stream. This file can change and CRI-O will
	I0916 10:47:59.234604 1401996 command_runner.go:130] > # automatically pick up the changes within 5 minutes.
	I0916 10:47:59.235021 1401996 command_runner.go:130] > # stream_tls_ca = ""
	I0916 10:47:59.235039 1401996 command_runner.go:130] > # Maximum grpc send message size in bytes. If not set or <=0, then CRI-O will default to 16 * 1024 * 1024.
	I0916 10:47:59.235607 1401996 command_runner.go:130] > # grpc_max_send_msg_size = 83886080
	I0916 10:47:59.235633 1401996 command_runner.go:130] > # Maximum grpc receive message size. If not set or <= 0, then CRI-O will default to 16 * 1024 * 1024.
	I0916 10:47:59.236265 1401996 command_runner.go:130] > # grpc_max_recv_msg_size = 83886080
	I0916 10:47:59.236299 1401996 command_runner.go:130] > # The crio.runtime table contains settings pertaining to the OCI runtime used
	I0916 10:47:59.236309 1401996 command_runner.go:130] > # and options for how to set up and manage the OCI runtime.
	I0916 10:47:59.236313 1401996 command_runner.go:130] > [crio.runtime]
	I0916 10:47:59.236319 1401996 command_runner.go:130] > # A list of ulimits to be set in containers by default, specified as
	I0916 10:47:59.236328 1401996 command_runner.go:130] > # "<ulimit name>=<soft limit>:<hard limit>", for example:
	I0916 10:47:59.236333 1401996 command_runner.go:130] > # "nofile=1024:2048"
	I0916 10:47:59.236341 1401996 command_runner.go:130] > # If nothing is set here, settings will be inherited from the CRI-O daemon
	I0916 10:47:59.236652 1401996 command_runner.go:130] > # default_ulimits = [
	I0916 10:47:59.236991 1401996 command_runner.go:130] > # ]
	I0916 10:47:59.237014 1401996 command_runner.go:130] > # If true, the runtime will not use pivot_root, but instead use MS_MOVE.
	I0916 10:47:59.237613 1401996 command_runner.go:130] > # no_pivot = false
	I0916 10:47:59.237635 1401996 command_runner.go:130] > # decryption_keys_path is the path where the keys required for
	I0916 10:47:59.237644 1401996 command_runner.go:130] > # image decryption are stored. This option supports live configuration reload.
	I0916 10:47:59.238256 1401996 command_runner.go:130] > # decryption_keys_path = "/etc/crio/keys/"
	I0916 10:47:59.238277 1401996 command_runner.go:130] > # Path to the conmon binary, used for monitoring the OCI runtime.
	I0916 10:47:59.238284 1401996 command_runner.go:130] > # Will be searched for using $PATH if empty.
	I0916 10:47:59.238294 1401996 command_runner.go:130] > # This option is currently deprecated, and will be replaced with RuntimeHandler.MonitorEnv.
	I0916 10:47:59.238790 1401996 command_runner.go:130] > # conmon = ""
	I0916 10:47:59.238805 1401996 command_runner.go:130] > # Cgroup setting for conmon
	I0916 10:47:59.238814 1401996 command_runner.go:130] > # This option is currently deprecated, and will be replaced with RuntimeHandler.MonitorCgroup.
	I0916 10:47:59.239157 1401996 command_runner.go:130] > conmon_cgroup = "pod"
	I0916 10:47:59.239175 1401996 command_runner.go:130] > # Environment variable list for the conmon process, used for passing necessary
	I0916 10:47:59.239181 1401996 command_runner.go:130] > # environment variables to conmon or the runtime.
	I0916 10:47:59.239190 1401996 command_runner.go:130] > # This option is currently deprecated, and will be replaced with RuntimeHandler.MonitorEnv.
	I0916 10:47:59.239495 1401996 command_runner.go:130] > # conmon_env = [
	I0916 10:47:59.239816 1401996 command_runner.go:130] > # ]
	I0916 10:47:59.239837 1401996 command_runner.go:130] > # Additional environment variables to set for all the
	I0916 10:47:59.239843 1401996 command_runner.go:130] > # containers. These are overridden if set in the
	I0916 10:47:59.239849 1401996 command_runner.go:130] > # container image spec or in the container runtime configuration.
	I0916 10:47:59.240159 1401996 command_runner.go:130] > # default_env = [
	I0916 10:47:59.240541 1401996 command_runner.go:130] > # ]
	I0916 10:47:59.240557 1401996 command_runner.go:130] > # If true, SELinux will be used for pod separation on the host.
	I0916 10:47:59.241047 1401996 command_runner.go:130] > # selinux = false
	I0916 10:47:59.241075 1401996 command_runner.go:130] > # Path to the seccomp.json profile which is used as the default seccomp profile
	I0916 10:47:59.241083 1401996 command_runner.go:130] > # for the runtime. If not specified, then the internal default seccomp profile
	I0916 10:47:59.241089 1401996 command_runner.go:130] > # will be used. This option supports live configuration reload.
	I0916 10:47:59.241093 1401996 command_runner.go:130] > # seccomp_profile = ""
	I0916 10:47:59.241105 1401996 command_runner.go:130] > # Changes the meaning of an empty seccomp profile. By default
	I0916 10:47:59.241111 1401996 command_runner.go:130] > # (and according to CRI spec), an empty profile means unconfined.
	I0916 10:47:59.241117 1401996 command_runner.go:130] > # This option tells CRI-O to treat an empty profile as the default profile,
	I0916 10:47:59.241122 1401996 command_runner.go:130] > # which might increase security.
	I0916 10:47:59.241126 1401996 command_runner.go:130] > # seccomp_use_default_when_empty = true
	I0916 10:47:59.241133 1401996 command_runner.go:130] > # Used to change the name of the default AppArmor profile of CRI-O. The default
	I0916 10:47:59.241143 1401996 command_runner.go:130] > # profile name is "crio-default". This profile only takes effect if the user
	I0916 10:47:59.241149 1401996 command_runner.go:130] > # does not specify a profile via the Kubernetes Pod's metadata annotation. If
	I0916 10:47:59.241160 1401996 command_runner.go:130] > # the profile is set to "unconfined", then this equals to disabling AppArmor.
	I0916 10:47:59.241166 1401996 command_runner.go:130] > # This option supports live configuration reload.
	I0916 10:47:59.241184 1401996 command_runner.go:130] > # apparmor_profile = "crio-default"
	I0916 10:47:59.241191 1401996 command_runner.go:130] > # Path to the blockio class configuration file for configuring
	I0916 10:47:59.241196 1401996 command_runner.go:130] > # the cgroup blockio controller.
	I0916 10:47:59.241200 1401996 command_runner.go:130] > # blockio_config_file = ""
	I0916 10:47:59.241207 1401996 command_runner.go:130] > # Used to change irqbalance service config file path which is used for configuring
	I0916 10:47:59.241214 1401996 command_runner.go:130] > # irqbalance daemon.
	I0916 10:47:59.241219 1401996 command_runner.go:130] > # irqbalance_config_file = "/etc/sysconfig/irqbalance"
	I0916 10:47:59.241226 1401996 command_runner.go:130] > # Path to the RDT configuration file for configuring the resctrl pseudo-filesystem.
	I0916 10:47:59.241235 1401996 command_runner.go:130] > # This option supports live configuration reload.
	I0916 10:47:59.241360 1401996 command_runner.go:130] > # rdt_config_file = ""
	I0916 10:47:59.241381 1401996 command_runner.go:130] > # Cgroup management implementation used for the runtime.
	I0916 10:47:59.241391 1401996 command_runner.go:130] > cgroup_manager = "cgroupfs"
	I0916 10:47:59.241401 1401996 command_runner.go:130] > # Specify whether the image pull must be performed in a separate cgroup.
	I0916 10:47:59.241507 1401996 command_runner.go:130] > # separate_pull_cgroup = ""
	I0916 10:47:59.241522 1401996 command_runner.go:130] > # List of default capabilities for containers. If it is empty or commented out,
	I0916 10:47:59.241530 1401996 command_runner.go:130] > # only the capabilities defined in the containers json file by the user/kube
	I0916 10:47:59.241539 1401996 command_runner.go:130] > # will be added.
	I0916 10:47:59.241547 1401996 command_runner.go:130] > # default_capabilities = [
	I0916 10:47:59.241551 1401996 command_runner.go:130] > # 	"CHOWN",
	I0916 10:47:59.241555 1401996 command_runner.go:130] > # 	"DAC_OVERRIDE",
	I0916 10:47:59.241693 1401996 command_runner.go:130] > # 	"FSETID",
	I0916 10:47:59.241710 1401996 command_runner.go:130] > # 	"FOWNER",
	I0916 10:47:59.241715 1401996 command_runner.go:130] > # 	"SETGID",
	I0916 10:47:59.241718 1401996 command_runner.go:130] > # 	"SETUID",
	I0916 10:47:59.241721 1401996 command_runner.go:130] > # 	"SETPCAP",
	I0916 10:47:59.241725 1401996 command_runner.go:130] > # 	"NET_BIND_SERVICE",
	I0916 10:47:59.241729 1401996 command_runner.go:130] > # 	"KILL",
	I0916 10:47:59.241734 1401996 command_runner.go:130] > # ]
	I0916 10:47:59.241743 1401996 command_runner.go:130] > # Add capabilities to the inheritable set, as well as the default group of permitted, bounding and effective.
	I0916 10:47:59.241755 1401996 command_runner.go:130] > # If capabilities are expected to work for non-root users, this option should be set.
	I0916 10:47:59.241760 1401996 command_runner.go:130] > # add_inheritable_capabilities = true
	I0916 10:47:59.241768 1401996 command_runner.go:130] > # List of default sysctls. If it is empty or commented out, only the sysctls
	I0916 10:47:59.241777 1401996 command_runner.go:130] > # defined in the container json file by the user/kube will be added.
	I0916 10:47:59.241781 1401996 command_runner.go:130] > default_sysctls = [
	I0916 10:47:59.241889 1401996 command_runner.go:130] > 	"net.ipv4.ip_unprivileged_port_start=0",
	I0916 10:47:59.241899 1401996 command_runner.go:130] > ]
	I0916 10:47:59.241906 1401996 command_runner.go:130] > # List of devices on the host that a
	I0916 10:47:59.241918 1401996 command_runner.go:130] > # user can specify with the "io.kubernetes.cri-o.Devices" allowed annotation.
	I0916 10:47:59.241927 1401996 command_runner.go:130] > # allowed_devices = [
	I0916 10:47:59.241931 1401996 command_runner.go:130] > # 	"/dev/fuse",
	I0916 10:47:59.241934 1401996 command_runner.go:130] > # ]
	I0916 10:47:59.241939 1401996 command_runner.go:130] > # List of additional devices. specified as
	I0916 10:47:59.241965 1401996 command_runner.go:130] > # "<device-on-host>:<device-on-container>:<permissions>", for example: "--device=/dev/sdc:/dev/xvdc:rwm".
	I0916 10:47:59.241971 1401996 command_runner.go:130] > # If it is empty or commented out, only the devices
	I0916 10:47:59.241980 1401996 command_runner.go:130] > # defined in the container json file by the user/kube will be added.
	I0916 10:47:59.241985 1401996 command_runner.go:130] > # additional_devices = [
	I0916 10:47:59.241988 1401996 command_runner.go:130] > # ]
	I0916 10:47:59.241994 1401996 command_runner.go:130] > # List of directories to scan for CDI Spec files.
	I0916 10:47:59.242001 1401996 command_runner.go:130] > # cdi_spec_dirs = [
	I0916 10:47:59.242004 1401996 command_runner.go:130] > # 	"/etc/cdi",
	I0916 10:47:59.242009 1401996 command_runner.go:130] > # 	"/var/run/cdi",
	I0916 10:47:59.242012 1401996 command_runner.go:130] > # ]
	I0916 10:47:59.242018 1401996 command_runner.go:130] > # Change the default behavior of setting container devices uid/gid from CRI's
	I0916 10:47:59.242028 1401996 command_runner.go:130] > # SecurityContext (RunAsUser/RunAsGroup) instead of taking host's uid/gid.
	I0916 10:47:59.242032 1401996 command_runner.go:130] > # Defaults to false.
	I0916 10:47:59.242181 1401996 command_runner.go:130] > # device_ownership_from_security_context = false
	I0916 10:47:59.242203 1401996 command_runner.go:130] > # Path to OCI hooks directories for automatically executed hooks. If one of the
	I0916 10:47:59.242211 1401996 command_runner.go:130] > # directories does not exist, then CRI-O will automatically skip them.
	I0916 10:47:59.242214 1401996 command_runner.go:130] > # hooks_dir = [
	I0916 10:47:59.242218 1401996 command_runner.go:130] > # 	"/usr/share/containers/oci/hooks.d",
	I0916 10:47:59.242221 1401996 command_runner.go:130] > # ]
	I0916 10:47:59.242234 1401996 command_runner.go:130] > # Path to the file specifying the defaults mounts for each container. The
	I0916 10:47:59.242245 1401996 command_runner.go:130] > # format of the config is /SRC:/DST, one mount per line. Notice that CRI-O reads
	I0916 10:47:59.242251 1401996 command_runner.go:130] > # its default mounts from the following two files:
	I0916 10:47:59.242254 1401996 command_runner.go:130] > #
	I0916 10:47:59.242260 1401996 command_runner.go:130] > #   1) /etc/containers/mounts.conf (i.e., default_mounts_file): This is the
	I0916 10:47:59.242270 1401996 command_runner.go:130] > #      override file, where users can either add in their own default mounts, or
	I0916 10:47:59.242276 1401996 command_runner.go:130] > #      override the default mounts shipped with the package.
	I0916 10:47:59.242284 1401996 command_runner.go:130] > #
	I0916 10:47:59.242290 1401996 command_runner.go:130] > #   2) /usr/share/containers/mounts.conf: This is the default file read for
	I0916 10:47:59.242297 1401996 command_runner.go:130] > #      mounts. If you want CRI-O to read from a different, specific mounts file,
	I0916 10:47:59.242303 1401996 command_runner.go:130] > #      you can change the default_mounts_file. Note, if this is done, CRI-O will
	I0916 10:47:59.242308 1401996 command_runner.go:130] > #      only add mounts it finds in this file.
	I0916 10:47:59.242311 1401996 command_runner.go:130] > #
	I0916 10:47:59.242315 1401996 command_runner.go:130] > # default_mounts_file = ""
	I0916 10:47:59.242325 1401996 command_runner.go:130] > # Maximum number of processes allowed in a container.
	I0916 10:47:59.242332 1401996 command_runner.go:130] > # This option is deprecated. The Kubelet flag '--pod-pids-limit' should be used instead.
	I0916 10:47:59.242340 1401996 command_runner.go:130] > # pids_limit = 0
	I0916 10:47:59.242346 1401996 command_runner.go:130] > # Maximum sized allowed for the container log file. Negative numbers indicate
	I0916 10:47:59.242352 1401996 command_runner.go:130] > # that no size limit is imposed. If it is positive, it must be >= 8192 to
	I0916 10:47:59.242362 1401996 command_runner.go:130] > # match/exceed conmon's read buffer. The file is truncated and re-opened so the
	I0916 10:47:59.242370 1401996 command_runner.go:130] > # limit is never exceeded. This option is deprecated. The Kubelet flag '--container-log-max-size' should be used instead.
	I0916 10:47:59.242374 1401996 command_runner.go:130] > # log_size_max = -1
	I0916 10:47:59.242381 1401996 command_runner.go:130] > # Whether container output should be logged to journald in addition to the kuberentes log file
	I0916 10:47:59.242496 1401996 command_runner.go:130] > # log_to_journald = false
	I0916 10:47:59.242510 1401996 command_runner.go:130] > # Path to directory in which container exit files are written to by conmon.
	I0916 10:47:59.242522 1401996 command_runner.go:130] > # container_exits_dir = "/var/run/crio/exits"
	I0916 10:47:59.242538 1401996 command_runner.go:130] > # Path to directory for container attach sockets.
	I0916 10:47:59.242548 1401996 command_runner.go:130] > # container_attach_socket_dir = "/var/run/crio"
	I0916 10:47:59.242554 1401996 command_runner.go:130] > # The prefix to use for the source of the bind mounts.
	I0916 10:47:59.242563 1401996 command_runner.go:130] > # bind_mount_prefix = ""
	I0916 10:47:59.242568 1401996 command_runner.go:130] > # If set to true, all containers will run in read-only mode.
	I0916 10:47:59.242573 1401996 command_runner.go:130] > # read_only = false
	I0916 10:47:59.242587 1401996 command_runner.go:130] > # Changes the verbosity of the logs based on the level it is set to. Options
	I0916 10:47:59.242594 1401996 command_runner.go:130] > # are fatal, panic, error, warn, info, debug and trace. This option supports
	I0916 10:47:59.242598 1401996 command_runner.go:130] > # live configuration reload.
	I0916 10:47:59.242601 1401996 command_runner.go:130] > # log_level = "info"
	I0916 10:47:59.242607 1401996 command_runner.go:130] > # Filter the log messages by the provided regular expression.
	I0916 10:47:59.242612 1401996 command_runner.go:130] > # This option supports live configuration reload.
	I0916 10:47:59.242620 1401996 command_runner.go:130] > # log_filter = ""
	I0916 10:47:59.242626 1401996 command_runner.go:130] > # The UID mappings for the user namespace of each container. A range is
	I0916 10:47:59.242632 1401996 command_runner.go:130] > # specified in the form containerUID:HostUID:Size. Multiple ranges must be
	I0916 10:47:59.242639 1401996 command_runner.go:130] > # separated by comma.
	I0916 10:47:59.242643 1401996 command_runner.go:130] > # uid_mappings = ""
	I0916 10:47:59.242649 1401996 command_runner.go:130] > # The GID mappings for the user namespace of each container. A range is
	I0916 10:47:59.242658 1401996 command_runner.go:130] > # specified in the form containerGID:HostGID:Size. Multiple ranges must be
	I0916 10:47:59.242662 1401996 command_runner.go:130] > # separated by comma.
	I0916 10:47:59.242794 1401996 command_runner.go:130] > # gid_mappings = ""
	I0916 10:47:59.242814 1401996 command_runner.go:130] > # If set, CRI-O will reject any attempt to map host UIDs below this value
	I0916 10:47:59.242822 1401996 command_runner.go:130] > # into user namespaces.  A negative value indicates that no minimum is set,
	I0916 10:47:59.242830 1401996 command_runner.go:130] > # so specifying mappings will only be allowed for pods that run as UID 0.
	I0916 10:47:59.242838 1401996 command_runner.go:130] > # minimum_mappable_uid = -1
	I0916 10:47:59.242845 1401996 command_runner.go:130] > # If set, CRI-O will reject any attempt to map host GIDs below this value
	I0916 10:47:59.242851 1401996 command_runner.go:130] > # into user namespaces.  A negative value indicates that no minimum is set,
	I0916 10:47:59.242857 1401996 command_runner.go:130] > # so specifying mappings will only be allowed for pods that run as UID 0.
	I0916 10:47:59.242861 1401996 command_runner.go:130] > # minimum_mappable_gid = -1
	I0916 10:47:59.242868 1401996 command_runner.go:130] > # The minimal amount of time in seconds to wait before issuing a timeout
	I0916 10:47:59.242884 1401996 command_runner.go:130] > # regarding the proper termination of the container. The lowest possible
	I0916 10:47:59.242895 1401996 command_runner.go:130] > # value is 30s, whereas lower values are not considered by CRI-O.
	I0916 10:47:59.242899 1401996 command_runner.go:130] > # ctr_stop_timeout = 30
	I0916 10:47:59.242905 1401996 command_runner.go:130] > # drop_infra_ctr determines whether CRI-O drops the infra container
	I0916 10:47:59.242914 1401996 command_runner.go:130] > # when a pod does not have a private PID namespace, and does not use
	I0916 10:47:59.242919 1401996 command_runner.go:130] > # a kernel separating runtime (like kata).
	I0916 10:47:59.242924 1401996 command_runner.go:130] > # It requires manage_ns_lifecycle to be true.
	I0916 10:47:59.242927 1401996 command_runner.go:130] > # drop_infra_ctr = true
	I0916 10:47:59.242933 1401996 command_runner.go:130] > # infra_ctr_cpuset determines what CPUs will be used to run infra containers.
	I0916 10:47:59.242939 1401996 command_runner.go:130] > # You can use linux CPU list format to specify desired CPUs.
	I0916 10:47:59.242950 1401996 command_runner.go:130] > # To get better isolation for guaranteed pods, set this parameter to be equal to kubelet reserved-cpus.
	I0916 10:47:59.242954 1401996 command_runner.go:130] > # infra_ctr_cpuset = ""
	I0916 10:47:59.242965 1401996 command_runner.go:130] > # The directory where the state of the managed namespaces gets tracked.
	I0916 10:47:59.242972 1401996 command_runner.go:130] > # Only used when manage_ns_lifecycle is true.
	I0916 10:47:59.243104 1401996 command_runner.go:130] > # namespaces_dir = "/var/run"
	I0916 10:47:59.243144 1401996 command_runner.go:130] > # pinns_path is the path to find the pinns binary, which is needed to manage namespace lifecycle
	I0916 10:47:59.243153 1401996 command_runner.go:130] > # pinns_path = ""
	I0916 10:47:59.243161 1401996 command_runner.go:130] > # default_runtime is the _name_ of the OCI runtime to be used as the default.
	I0916 10:47:59.243172 1401996 command_runner.go:130] > # The name is matched against the runtimes map below. If this value is changed,
	I0916 10:47:59.243179 1401996 command_runner.go:130] > # the corresponding existing entry from the runtimes map below will be ignored.
	I0916 10:47:59.243183 1401996 command_runner.go:130] > # default_runtime = "runc"
	I0916 10:47:59.243188 1401996 command_runner.go:130] > # A list of paths that, when absent from the host,
	I0916 10:47:59.243196 1401996 command_runner.go:130] > # will cause a container creation to fail (as opposed to the current behavior being created as a directory).
	I0916 10:47:59.243211 1401996 command_runner.go:130] > # This option is to protect from source locations whose existence as a directory could jepordize the health of the node, and whose
	I0916 10:47:59.243220 1401996 command_runner.go:130] > # creation as a file is not desired either.
	I0916 10:47:59.243230 1401996 command_runner.go:130] > # An example is /etc/hostname, which will cause failures on reboot if it's created as a directory, but often doesn't exist because
	I0916 10:47:59.243237 1401996 command_runner.go:130] > # the hostname is being managed dynamically.
	I0916 10:47:59.243248 1401996 command_runner.go:130] > # absent_mount_sources_to_reject = [
	I0916 10:47:59.243254 1401996 command_runner.go:130] > # ]
	I0916 10:47:59.243261 1401996 command_runner.go:130] > # The "crio.runtime.runtimes" table defines a list of OCI compatible runtimes.
	I0916 10:47:59.243267 1401996 command_runner.go:130] > # The runtime to use is picked based on the runtime handler provided by the CRI.
	I0916 10:47:59.243274 1401996 command_runner.go:130] > # If no runtime handler is provided, the runtime will be picked based on the level
	I0916 10:47:59.243280 1401996 command_runner.go:130] > # of trust of the workload. Each entry in the table should follow the format:
	I0916 10:47:59.243286 1401996 command_runner.go:130] > #
	I0916 10:47:59.243291 1401996 command_runner.go:130] > #[crio.runtime.runtimes.runtime-handler]
	I0916 10:47:59.243296 1401996 command_runner.go:130] > #  runtime_path = "/path/to/the/executable"
	I0916 10:47:59.243304 1401996 command_runner.go:130] > #  runtime_type = "oci"
	I0916 10:47:59.243308 1401996 command_runner.go:130] > #  runtime_root = "/path/to/the/root"
	I0916 10:47:59.243313 1401996 command_runner.go:130] > #  privileged_without_host_devices = false
	I0916 10:47:59.243322 1401996 command_runner.go:130] > #  allowed_annotations = []
	I0916 10:47:59.243325 1401996 command_runner.go:130] > # Where:
	I0916 10:47:59.243331 1401996 command_runner.go:130] > # - runtime-handler: name used to identify the runtime
	I0916 10:47:59.243337 1401996 command_runner.go:130] > # - runtime_path (optional, string): absolute path to the runtime executable in
	I0916 10:47:59.243344 1401996 command_runner.go:130] > #   the host filesystem. If omitted, the runtime-handler identifier should match
	I0916 10:47:59.243350 1401996 command_runner.go:130] > #   the runtime executable name, and the runtime executable should be placed
	I0916 10:47:59.243354 1401996 command_runner.go:130] > #   in $PATH.
	I0916 10:47:59.243361 1401996 command_runner.go:130] > # - runtime_type (optional, string): type of runtime, one of: "oci", "vm". If
	I0916 10:47:59.243369 1401996 command_runner.go:130] > #   omitted, an "oci" runtime is assumed.
	I0916 10:47:59.243380 1401996 command_runner.go:130] > # - runtime_root (optional, string): root directory for storage of containers
	I0916 10:47:59.243387 1401996 command_runner.go:130] > #   state.
	I0916 10:47:59.243395 1401996 command_runner.go:130] > # - runtime_config_path (optional, string): the path for the runtime configuration
	I0916 10:47:59.243406 1401996 command_runner.go:130] > #   file. This can only be used with when using the VM runtime_type.
	I0916 10:47:59.243412 1401996 command_runner.go:130] > # - privileged_without_host_devices (optional, bool): an option for restricting
	I0916 10:47:59.243418 1401996 command_runner.go:130] > #   host devices from being passed to privileged containers.
	I0916 10:47:59.243424 1401996 command_runner.go:130] > # - allowed_annotations (optional, array of strings): an option for specifying
	I0916 10:47:59.243431 1401996 command_runner.go:130] > #   a list of experimental annotations that this runtime handler is allowed to process.
	I0916 10:47:59.243435 1401996 command_runner.go:130] > #   The currently recognized values are:
	I0916 10:47:59.243446 1401996 command_runner.go:130] > #   "io.kubernetes.cri-o.userns-mode" for configuring a user namespace for the pod.
	I0916 10:47:59.243453 1401996 command_runner.go:130] > #   "io.kubernetes.cri-o.cgroup2-mount-hierarchy-rw" for mounting cgroups writably when set to "true".
	I0916 10:47:59.243463 1401996 command_runner.go:130] > #   "io.kubernetes.cri-o.Devices" for configuring devices for the pod.
	I0916 10:47:59.243471 1401996 command_runner.go:130] > #   "io.kubernetes.cri-o.ShmSize" for configuring the size of /dev/shm.
	I0916 10:47:59.243482 1401996 command_runner.go:130] > #   "io.kubernetes.cri-o.UnifiedCgroup.$CTR_NAME" for configuring the cgroup v2 unified block for a container.
	I0916 10:47:59.243489 1401996 command_runner.go:130] > #   "io.containers.trace-syscall" for tracing syscalls via the OCI seccomp BPF hook.
	I0916 10:47:59.243498 1401996 command_runner.go:130] > #   "io.kubernetes.cri.rdt-class" for setting the RDT class of a container
	I0916 10:47:59.243505 1401996 command_runner.go:130] > # - monitor_exec_cgroup (optional, string): if set to "container", indicates exec probes
	I0916 10:47:59.243510 1401996 command_runner.go:130] > #   should be moved to the container's cgroup
	I0916 10:47:59.243514 1401996 command_runner.go:130] > [crio.runtime.runtimes.runc]
	I0916 10:47:59.243519 1401996 command_runner.go:130] > runtime_path = "/usr/lib/cri-o-runc/sbin/runc"
	I0916 10:47:59.243523 1401996 command_runner.go:130] > runtime_type = "oci"
	I0916 10:47:59.243685 1401996 command_runner.go:130] > runtime_root = "/run/runc"
	I0916 10:47:59.243698 1401996 command_runner.go:130] > runtime_config_path = ""
	I0916 10:47:59.243702 1401996 command_runner.go:130] > monitor_path = ""
	I0916 10:47:59.243705 1401996 command_runner.go:130] > monitor_cgroup = ""
	I0916 10:47:59.243709 1401996 command_runner.go:130] > monitor_exec_cgroup = ""
	I0916 10:47:59.243739 1401996 command_runner.go:130] > # crun is a fast and lightweight fully featured OCI runtime and C library for
	I0916 10:47:59.243748 1401996 command_runner.go:130] > # running containers
	I0916 10:47:59.243753 1401996 command_runner.go:130] > #[crio.runtime.runtimes.crun]
	I0916 10:47:59.243760 1401996 command_runner.go:130] > # Kata Containers is an OCI runtime, where containers are run inside lightweight
	I0916 10:47:59.243770 1401996 command_runner.go:130] > # VMs. Kata provides additional isolation towards the host, minimizing the host attack
	I0916 10:47:59.243776 1401996 command_runner.go:130] > # surface and mitigating the consequences of containers breakout.
	I0916 10:47:59.243781 1401996 command_runner.go:130] > # Kata Containers with the default configured VMM
	I0916 10:47:59.243786 1401996 command_runner.go:130] > #[crio.runtime.runtimes.kata-runtime]
	I0916 10:47:59.243790 1401996 command_runner.go:130] > # Kata Containers with the QEMU VMM
	I0916 10:47:59.243795 1401996 command_runner.go:130] > #[crio.runtime.runtimes.kata-qemu]
	I0916 10:47:59.243800 1401996 command_runner.go:130] > # Kata Containers with the Firecracker VMM
	I0916 10:47:59.243811 1401996 command_runner.go:130] > #[crio.runtime.runtimes.kata-fc]
	I0916 10:47:59.243817 1401996 command_runner.go:130] > # The workloads table defines ways to customize containers with different resources
	I0916 10:47:59.243826 1401996 command_runner.go:130] > # that work based on annotations, rather than the CRI.
	I0916 10:47:59.243837 1401996 command_runner.go:130] > # Note, the behavior of this table is EXPERIMENTAL and may change at any time.
	I0916 10:47:59.243845 1401996 command_runner.go:130] > # Each workload, has a name, activation_annotation, annotation_prefix and set of resources it supports mutating.
	I0916 10:47:59.243857 1401996 command_runner.go:130] > # The currently supported resources are "cpu" (to configure the cpu shares) and "cpuset" to configure the cpuset.
	I0916 10:47:59.243863 1401996 command_runner.go:130] > # Each resource can have a default value specified, or be empty.
	I0916 10:47:59.243874 1401996 command_runner.go:130] > # For a container to opt-into this workload, the pod should be configured with the annotation $activation_annotation (key only, value is ignored).
	I0916 10:47:59.243883 1401996 command_runner.go:130] > # To customize per-container, an annotation of the form $annotation_prefix.$resource/$ctrName = "value" can be specified
	I0916 10:47:59.243893 1401996 command_runner.go:130] > # signifying for that resource type to override the default value.
	I0916 10:47:59.243900 1401996 command_runner.go:130] > # If the annotation_prefix is not present, every container in the pod will be given the default values.
	I0916 10:47:59.243908 1401996 command_runner.go:130] > # Example:
	I0916 10:47:59.243912 1401996 command_runner.go:130] > # [crio.runtime.workloads.workload-type]
	I0916 10:47:59.243918 1401996 command_runner.go:130] > # activation_annotation = "io.crio/workload"
	I0916 10:47:59.243927 1401996 command_runner.go:130] > # annotation_prefix = "io.crio.workload-type"
	I0916 10:47:59.243932 1401996 command_runner.go:130] > # [crio.runtime.workloads.workload-type.resources]
	I0916 10:47:59.243940 1401996 command_runner.go:130] > # cpuset = 0
	I0916 10:47:59.243944 1401996 command_runner.go:130] > # cpushares = "0-1"
	I0916 10:47:59.243947 1401996 command_runner.go:130] > # Where:
	I0916 10:47:59.243951 1401996 command_runner.go:130] > # The workload name is workload-type.
	I0916 10:47:59.243959 1401996 command_runner.go:130] > # To specify, the pod must have the "io.crio.workload" annotation (this is a precise string match).
	I0916 10:47:59.243964 1401996 command_runner.go:130] > # This workload supports setting cpuset and cpu resources.
	I0916 10:47:59.243971 1401996 command_runner.go:130] > # annotation_prefix is used to customize the different resources.
	I0916 10:47:59.243982 1401996 command_runner.go:130] > # To configure the cpu shares a container gets in the example above, the pod would have to have the following annotation:
	I0916 10:47:59.243989 1401996 command_runner.go:130] > # "io.crio.workload-type/$container_name = {"cpushares": "value"}"
	I0916 10:47:59.243995 1401996 command_runner.go:130] > # 
	I0916 10:47:59.244001 1401996 command_runner.go:130] > # The crio.image table contains settings pertaining to the management of OCI images.
	I0916 10:47:59.244005 1401996 command_runner.go:130] > #
	I0916 10:47:59.244012 1401996 command_runner.go:130] > # CRI-O reads its configured registries defaults from the system wide
	I0916 10:47:59.244022 1401996 command_runner.go:130] > # containers-registries.conf(5) located in /etc/containers/registries.conf. If
	I0916 10:47:59.244029 1401996 command_runner.go:130] > # you want to modify just CRI-O, you can change the registries configuration in
	I0916 10:47:59.244035 1401996 command_runner.go:130] > # this file. Otherwise, leave insecure_registries and registries commented out to
	I0916 10:47:59.244041 1401996 command_runner.go:130] > # use the system's defaults from /etc/containers/registries.conf.
	I0916 10:47:59.244049 1401996 command_runner.go:130] > [crio.image]
	I0916 10:47:59.244055 1401996 command_runner.go:130] > # Default transport for pulling images from a remote container storage.
	I0916 10:47:59.244059 1401996 command_runner.go:130] > # default_transport = "docker://"
	I0916 10:47:59.244070 1401996 command_runner.go:130] > # The path to a file containing credentials necessary for pulling images from
	I0916 10:47:59.244079 1401996 command_runner.go:130] > # secure registries. The file is similar to that of /var/lib/kubelet/config.json
	I0916 10:47:59.244084 1401996 command_runner.go:130] > # global_auth_file = ""
	I0916 10:47:59.244089 1401996 command_runner.go:130] > # The image used to instantiate infra containers.
	I0916 10:47:59.244098 1401996 command_runner.go:130] > # This option supports live configuration reload.
	I0916 10:47:59.244103 1401996 command_runner.go:130] > pause_image = "registry.k8s.io/pause:3.10"
	I0916 10:47:59.244109 1401996 command_runner.go:130] > # The path to a file containing credentials specific for pulling the pause_image from
	I0916 10:47:59.244115 1401996 command_runner.go:130] > # above. The file is similar to that of /var/lib/kubelet/config.json
	I0916 10:47:59.244127 1401996 command_runner.go:130] > # This option supports live configuration reload.
	I0916 10:47:59.244134 1401996 command_runner.go:130] > # pause_image_auth_file = ""
	I0916 10:47:59.244141 1401996 command_runner.go:130] > # The command to run to have a container stay in the paused state.
	I0916 10:47:59.244147 1401996 command_runner.go:130] > # When explicitly set to "", it will fallback to the entrypoint and command
	I0916 10:47:59.244157 1401996 command_runner.go:130] > # specified in the pause image. When commented out, it will fallback to the
	I0916 10:47:59.244163 1401996 command_runner.go:130] > # default: "/pause". This option supports live configuration reload.
	I0916 10:47:59.244171 1401996 command_runner.go:130] > # pause_command = "/pause"
	I0916 10:47:59.244178 1401996 command_runner.go:130] > # Path to the file which decides what sort of policy we use when deciding
	I0916 10:47:59.244191 1401996 command_runner.go:130] > # whether or not to trust an image that we've pulled. It is not recommended that
	I0916 10:47:59.244198 1401996 command_runner.go:130] > # this option be used, as the default behavior of using the system-wide default
	I0916 10:47:59.244205 1401996 command_runner.go:130] > # policy (i.e., /etc/containers/policy.json) is most often preferred. Please
	I0916 10:47:59.244210 1401996 command_runner.go:130] > # refer to containers-policy.json(5) for more details.
	I0916 10:47:59.244344 1401996 command_runner.go:130] > # signature_policy = ""
	I0916 10:47:59.244374 1401996 command_runner.go:130] > # List of registries to skip TLS verification for pulling images. Please
	I0916 10:47:59.244384 1401996 command_runner.go:130] > # consider configuring the registries via /etc/containers/registries.conf before
	I0916 10:47:59.244394 1401996 command_runner.go:130] > # changing them here.
	I0916 10:47:59.244399 1401996 command_runner.go:130] > # insecure_registries = [
	I0916 10:47:59.244403 1401996 command_runner.go:130] > # ]
	I0916 10:47:59.244409 1401996 command_runner.go:130] > # Controls how image volumes are handled. The valid values are mkdir, bind and
	I0916 10:47:59.244418 1401996 command_runner.go:130] > # ignore; the latter will ignore volumes entirely.
	I0916 10:47:59.244422 1401996 command_runner.go:130] > # image_volumes = "mkdir"
	I0916 10:47:59.244428 1401996 command_runner.go:130] > # Temporary directory to use for storing big files
	I0916 10:47:59.244432 1401996 command_runner.go:130] > # big_files_temporary_dir = ""
	I0916 10:47:59.244439 1401996 command_runner.go:130] > # The crio.network table containers settings pertaining to the management of
	I0916 10:47:59.244443 1401996 command_runner.go:130] > # CNI plugins.
	I0916 10:47:59.244447 1401996 command_runner.go:130] > [crio.network]
	I0916 10:47:59.244453 1401996 command_runner.go:130] > # The default CNI network name to be selected. If not set or "", then
	I0916 10:47:59.244458 1401996 command_runner.go:130] > # CRI-O will pick-up the first one found in network_dir.
	I0916 10:47:59.244467 1401996 command_runner.go:130] > # cni_default_network = ""
	I0916 10:47:59.244474 1401996 command_runner.go:130] > # Path to the directory where CNI configuration files are located.
	I0916 10:47:59.244483 1401996 command_runner.go:130] > # network_dir = "/etc/cni/net.d/"
	I0916 10:47:59.244493 1401996 command_runner.go:130] > # Paths to directories where CNI plugin binaries are located.
	I0916 10:47:59.244496 1401996 command_runner.go:130] > # plugin_dirs = [
	I0916 10:47:59.244500 1401996 command_runner.go:130] > # 	"/opt/cni/bin/",
	I0916 10:47:59.244505 1401996 command_runner.go:130] > # ]
	I0916 10:47:59.244516 1401996 command_runner.go:130] > # A necessary configuration for Prometheus based metrics retrieval
	I0916 10:47:59.244523 1401996 command_runner.go:130] > [crio.metrics]
	I0916 10:47:59.244529 1401996 command_runner.go:130] > # Globally enable or disable metrics support.
	I0916 10:47:59.244533 1401996 command_runner.go:130] > # enable_metrics = false
	I0916 10:47:59.244538 1401996 command_runner.go:130] > # Specify enabled metrics collectors.
	I0916 10:47:59.244542 1401996 command_runner.go:130] > # Per default all metrics are enabled.
	I0916 10:47:59.244553 1401996 command_runner.go:130] > # It is possible, to prefix the metrics with "container_runtime_" and "crio_".
	I0916 10:47:59.244560 1401996 command_runner.go:130] > # For example, the metrics collector "operations" would be treated in the same
	I0916 10:47:59.244569 1401996 command_runner.go:130] > # way as "crio_operations" and "container_runtime_crio_operations".
	I0916 10:47:59.244747 1401996 command_runner.go:130] > # metrics_collectors = [
	I0916 10:47:59.244757 1401996 command_runner.go:130] > # 	"operations",
	I0916 10:47:59.244762 1401996 command_runner.go:130] > # 	"operations_latency_microseconds_total",
	I0916 10:47:59.244766 1401996 command_runner.go:130] > # 	"operations_latency_microseconds",
	I0916 10:47:59.244770 1401996 command_runner.go:130] > # 	"operations_errors",
	I0916 10:47:59.244775 1401996 command_runner.go:130] > # 	"image_pulls_by_digest",
	I0916 10:47:59.244779 1401996 command_runner.go:130] > # 	"image_pulls_by_name",
	I0916 10:47:59.244793 1401996 command_runner.go:130] > # 	"image_pulls_by_name_skipped",
	I0916 10:47:59.244798 1401996 command_runner.go:130] > # 	"image_pulls_failures",
	I0916 10:47:59.244801 1401996 command_runner.go:130] > # 	"image_pulls_successes",
	I0916 10:47:59.244805 1401996 command_runner.go:130] > # 	"image_pulls_layer_size",
	I0916 10:47:59.244809 1401996 command_runner.go:130] > # 	"image_layer_reuse",
	I0916 10:47:59.244813 1401996 command_runner.go:130] > # 	"containers_oom_total",
	I0916 10:47:59.244819 1401996 command_runner.go:130] > # 	"containers_oom",
	I0916 10:47:59.244823 1401996 command_runner.go:130] > # 	"processes_defunct",
	I0916 10:47:59.244827 1401996 command_runner.go:130] > # 	"operations_total",
	I0916 10:47:59.244831 1401996 command_runner.go:130] > # 	"operations_latency_seconds",
	I0916 10:47:59.244835 1401996 command_runner.go:130] > # 	"operations_latency_seconds_total",
	I0916 10:47:59.244839 1401996 command_runner.go:130] > # 	"operations_errors_total",
	I0916 10:47:59.246637 1401996 command_runner.go:130] > # 	"image_pulls_bytes_total",
	I0916 10:47:59.246654 1401996 command_runner.go:130] > # 	"image_pulls_skipped_bytes_total",
	I0916 10:47:59.246658 1401996 command_runner.go:130] > # 	"image_pulls_failure_total",
	I0916 10:47:59.246663 1401996 command_runner.go:130] > # 	"image_pulls_success_total",
	I0916 10:47:59.246667 1401996 command_runner.go:130] > # 	"image_layer_reuse_total",
	I0916 10:47:59.246671 1401996 command_runner.go:130] > # 	"containers_oom_count_total",
	I0916 10:47:59.246674 1401996 command_runner.go:130] > # ]
	I0916 10:47:59.246681 1401996 command_runner.go:130] > # The port on which the metrics server will listen.
	I0916 10:47:59.246685 1401996 command_runner.go:130] > # metrics_port = 9090
	I0916 10:47:59.246691 1401996 command_runner.go:130] > # Local socket path to bind the metrics server to
	I0916 10:47:59.246702 1401996 command_runner.go:130] > # metrics_socket = ""
	I0916 10:47:59.246707 1401996 command_runner.go:130] > # The certificate for the secure metrics server.
	I0916 10:47:59.246714 1401996 command_runner.go:130] > # If the certificate is not available on disk, then CRI-O will generate a
	I0916 10:47:59.246747 1401996 command_runner.go:130] > # self-signed one. CRI-O also watches for changes of this path and reloads the
	I0916 10:47:59.246756 1401996 command_runner.go:130] > # certificate on any modification event.
	I0916 10:47:59.246761 1401996 command_runner.go:130] > # metrics_cert = ""
	I0916 10:47:59.246766 1401996 command_runner.go:130] > # The certificate key for the secure metrics server.
	I0916 10:47:59.246771 1401996 command_runner.go:130] > # Behaves in the same way as the metrics_cert.
	I0916 10:47:59.246941 1401996 command_runner.go:130] > # metrics_key = ""
	I0916 10:47:59.246963 1401996 command_runner.go:130] > # A necessary configuration for OpenTelemetry trace data exporting
	I0916 10:47:59.246968 1401996 command_runner.go:130] > [crio.tracing]
	I0916 10:47:59.246974 1401996 command_runner.go:130] > # Globally enable or disable exporting OpenTelemetry traces.
	I0916 10:47:59.246978 1401996 command_runner.go:130] > # enable_tracing = false
	I0916 10:47:59.246983 1401996 command_runner.go:130] > # Address on which the gRPC trace collector listens on.
	I0916 10:47:59.246987 1401996 command_runner.go:130] > # tracing_endpoint = "0.0.0.0:4317"
	I0916 10:47:59.246993 1401996 command_runner.go:130] > # Number of samples to collect per million spans.
	I0916 10:47:59.246998 1401996 command_runner.go:130] > # tracing_sampling_rate_per_million = 0
	I0916 10:47:59.247004 1401996 command_runner.go:130] > # Necessary information pertaining to container and pod stats reporting.
	I0916 10:47:59.247008 1401996 command_runner.go:130] > [crio.stats]
	I0916 10:47:59.247014 1401996 command_runner.go:130] > # The number of seconds between collecting pod and container stats.
	I0916 10:47:59.247023 1401996 command_runner.go:130] > # If set to 0, the stats are collected on-demand instead.
	I0916 10:47:59.247027 1401996 command_runner.go:130] > # stats_collection_period = 0
	I0916 10:47:59.247220 1401996 command_runner.go:130] ! time="2024-09-16 10:47:59.222448532Z" level=info msg="Starting CRI-O, version: 1.24.6, git: 4bfe15a9feb74ffc95e66a21c04b15fa7bbc2b90(clean)"
	I0916 10:47:59.247241 1401996 command_runner.go:130] ! level=info msg="Using default capabilities: CAP_CHOWN, CAP_DAC_OVERRIDE, CAP_FSETID, CAP_FOWNER, CAP_SETGID, CAP_SETUID, CAP_SETPCAP, CAP_NET_BIND_SERVICE, CAP_KILL"
	I0916 10:47:59.247293 1401996 cni.go:84] Creating CNI manager for ""
	I0916 10:47:59.247313 1401996 cni.go:143] "docker" driver + "crio" runtime found, recommending kindnet
	I0916 10:47:59.247323 1401996 kubeadm.go:84] Using pod CIDR: 10.244.0.0/16
	I0916 10:47:59.247348 1401996 kubeadm.go:181] kubeadm options: {CertDir:/var/lib/minikube/certs ServiceCIDR:10.96.0.0/12 PodSubnet:10.244.0.0/16 AdvertiseAddress:192.168.49.2 APIServerPort:8441 KubernetesVersion:v1.31.1 EtcdDataDir:/var/lib/minikube/etcd EtcdExtraArgs:map[] ClusterName:functional-919910 NodeName:functional-919910 DNSDomain:cluster.local CRISocket:/var/run/crio/crio.sock ImageRepository: ComponentOptions:[{Component:apiServer ExtraArgs:map[enable-admission-plugins:NamespaceLifecycle,LimitRanger,ServiceAccount,DefaultStorageClass,DefaultTolerationSeconds,NodeRestriction,MutatingAdmissionWebhook,ValidatingAdmissionWebhook,ResourceQuota] Pairs:map[certSANs:["127.0.0.1", "localhost", "192.168.49.2"]]} {Component:controllerManager ExtraArgs:map[allocate-node-cidrs:true leader-elect:false] Pairs:map[]} {Component:scheduler ExtraArgs:map[leader-elect:false] Pairs:map[]}] FeatureArgs:map[] NodeIP:192.168.49.2 CgroupDriver:cgroupfs ClientCAFile:/var/lib/minikube/certs/ca.crt StaticPodPath:/etc
/kubernetes/manifests ControlPlaneAddress:control-plane.minikube.internal KubeProxyOptions:map[] ResolvConfSearchRegression:false KubeletConfigOpts:map[containerRuntimeEndpoint:unix:///var/run/crio/crio.sock hairpinMode:hairpin-veth runtimeRequestTimeout:15m] PrependCriSocketUnix:true}
	I0916 10:47:59.247499 1401996 kubeadm.go:187] kubeadm config:
	apiVersion: kubeadm.k8s.io/v1beta3
	kind: InitConfiguration
	localAPIEndpoint:
	  advertiseAddress: 192.168.49.2
	  bindPort: 8441
	bootstrapTokens:
	  - groups:
	      - system:bootstrappers:kubeadm:default-node-token
	    ttl: 24h0m0s
	    usages:
	      - signing
	      - authentication
	nodeRegistration:
	  criSocket: unix:///var/run/crio/crio.sock
	  name: "functional-919910"
	  kubeletExtraArgs:
	    node-ip: 192.168.49.2
	  taints: []
	---
	apiVersion: kubeadm.k8s.io/v1beta3
	kind: ClusterConfiguration
	apiServer:
	  certSANs: ["127.0.0.1", "localhost", "192.168.49.2"]
	  extraArgs:
	    enable-admission-plugins: "NamespaceLifecycle,LimitRanger,ServiceAccount,DefaultStorageClass,DefaultTolerationSeconds,NodeRestriction,MutatingAdmissionWebhook,ValidatingAdmissionWebhook,ResourceQuota"
	controllerManager:
	  extraArgs:
	    allocate-node-cidrs: "true"
	    leader-elect: "false"
	scheduler:
	  extraArgs:
	    leader-elect: "false"
	certificatesDir: /var/lib/minikube/certs
	clusterName: mk
	controlPlaneEndpoint: control-plane.minikube.internal:8441
	etcd:
	  local:
	    dataDir: /var/lib/minikube/etcd
	    extraArgs:
	      proxy-refresh-interval: "70000"
	kubernetesVersion: v1.31.1
	networking:
	  dnsDomain: cluster.local
	  podSubnet: "10.244.0.0/16"
	  serviceSubnet: 10.96.0.0/12
	---
	apiVersion: kubelet.config.k8s.io/v1beta1
	kind: KubeletConfiguration
	authentication:
	  x509:
	    clientCAFile: /var/lib/minikube/certs/ca.crt
	cgroupDriver: cgroupfs
	containerRuntimeEndpoint: unix:///var/run/crio/crio.sock
	hairpinMode: hairpin-veth
	runtimeRequestTimeout: 15m
	clusterDomain: "cluster.local"
	# disable disk resource management by default
	imageGCHighThresholdPercent: 100
	evictionHard:
	  nodefs.available: "0%"
	  nodefs.inodesFree: "0%"
	  imagefs.available: "0%"
	failSwapOn: false
	staticPodPath: /etc/kubernetes/manifests
	---
	apiVersion: kubeproxy.config.k8s.io/v1alpha1
	kind: KubeProxyConfiguration
	clusterCIDR: "10.244.0.0/16"
	metricsBindAddress: 0.0.0.0:10249
	conntrack:
	  maxPerCore: 0
	# Skip setting "net.netfilter.nf_conntrack_tcp_timeout_established"
	  tcpEstablishedTimeout: 0s
	# Skip setting "net.netfilter.nf_conntrack_tcp_timeout_close"
	  tcpCloseWaitTimeout: 0s
	
	I0916 10:47:59.247580 1401996 ssh_runner.go:195] Run: sudo ls /var/lib/minikube/binaries/v1.31.1
	I0916 10:47:59.255418 1401996 command_runner.go:130] > kubeadm
	I0916 10:47:59.255435 1401996 command_runner.go:130] > kubectl
	I0916 10:47:59.255511 1401996 command_runner.go:130] > kubelet
	I0916 10:47:59.256537 1401996 binaries.go:44] Found k8s binaries, skipping transfer
	I0916 10:47:59.256602 1401996 ssh_runner.go:195] Run: sudo mkdir -p /etc/systemd/system/kubelet.service.d /lib/systemd/system /var/tmp/minikube
	I0916 10:47:59.265347 1401996 ssh_runner.go:362] scp memory --> /etc/systemd/system/kubelet.service.d/10-kubeadm.conf (367 bytes)
	I0916 10:47:59.284157 1401996 ssh_runner.go:362] scp memory --> /lib/systemd/system/kubelet.service (352 bytes)
	I0916 10:47:59.302791 1401996 ssh_runner.go:362] scp memory --> /var/tmp/minikube/kubeadm.yaml.new (2155 bytes)
	I0916 10:47:59.321595 1401996 ssh_runner.go:195] Run: grep 192.168.49.2	control-plane.minikube.internal$ /etc/hosts
	I0916 10:47:59.325044 1401996 command_runner.go:130] > 192.168.49.2	control-plane.minikube.internal
	I0916 10:47:59.325415 1401996 ssh_runner.go:195] Run: sudo systemctl daemon-reload
	I0916 10:47:59.441461 1401996 ssh_runner.go:195] Run: sudo systemctl start kubelet
	I0916 10:47:59.454512 1401996 certs.go:68] Setting up /home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/functional-919910 for IP: 192.168.49.2
	I0916 10:47:59.454535 1401996 certs.go:194] generating shared ca certs ...
	I0916 10:47:59.454551 1401996 certs.go:226] acquiring lock for ca certs: {Name:mk0ae46b50e2e49d53ad6fcc94535aa50d9156d6 Clock:{} Delay:500ms Timeout:1m0s Cancel:<nil>}
	I0916 10:47:59.454697 1401996 certs.go:235] skipping valid "minikubeCA" ca cert: /home/jenkins/minikube-integration/19651-1378450/.minikube/ca.key
	I0916 10:47:59.454744 1401996 certs.go:235] skipping valid "proxyClientCA" ca cert: /home/jenkins/minikube-integration/19651-1378450/.minikube/proxy-client-ca.key
	I0916 10:47:59.454756 1401996 certs.go:256] generating profile certs ...
	I0916 10:47:59.454848 1401996 certs.go:359] skipping valid signed profile cert regeneration for "minikube-user": /home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/functional-919910/client.key
	I0916 10:47:59.454922 1401996 certs.go:359] skipping valid signed profile cert regeneration for "minikube": /home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/functional-919910/apiserver.key.debd5ef9
	I0916 10:47:59.454972 1401996 certs.go:359] skipping valid signed profile cert regeneration for "aggregator": /home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/functional-919910/proxy-client.key
	I0916 10:47:59.454984 1401996 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-1378450/.minikube/ca.crt -> /var/lib/minikube/certs/ca.crt
	I0916 10:47:59.454999 1401996 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-1378450/.minikube/ca.key -> /var/lib/minikube/certs/ca.key
	I0916 10:47:59.455011 1401996 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-1378450/.minikube/proxy-client-ca.crt -> /var/lib/minikube/certs/proxy-client-ca.crt
	I0916 10:47:59.455026 1401996 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-1378450/.minikube/proxy-client-ca.key -> /var/lib/minikube/certs/proxy-client-ca.key
	I0916 10:47:59.455037 1401996 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/functional-919910/apiserver.crt -> /var/lib/minikube/certs/apiserver.crt
	I0916 10:47:59.455054 1401996 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/functional-919910/apiserver.key -> /var/lib/minikube/certs/apiserver.key
	I0916 10:47:59.455066 1401996 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/functional-919910/proxy-client.crt -> /var/lib/minikube/certs/proxy-client.crt
	I0916 10:47:59.455081 1401996 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/functional-919910/proxy-client.key -> /var/lib/minikube/certs/proxy-client.key
	I0916 10:47:59.455140 1401996 certs.go:484] found cert: /home/jenkins/minikube-integration/19651-1378450/.minikube/certs/1383833.pem (1338 bytes)
	W0916 10:47:59.455172 1401996 certs.go:480] ignoring /home/jenkins/minikube-integration/19651-1378450/.minikube/certs/1383833_empty.pem, impossibly tiny 0 bytes
	I0916 10:47:59.455184 1401996 certs.go:484] found cert: /home/jenkins/minikube-integration/19651-1378450/.minikube/certs/ca-key.pem (1679 bytes)
	I0916 10:47:59.455210 1401996 certs.go:484] found cert: /home/jenkins/minikube-integration/19651-1378450/.minikube/certs/ca.pem (1078 bytes)
	I0916 10:47:59.455242 1401996 certs.go:484] found cert: /home/jenkins/minikube-integration/19651-1378450/.minikube/certs/cert.pem (1123 bytes)
	I0916 10:47:59.455268 1401996 certs.go:484] found cert: /home/jenkins/minikube-integration/19651-1378450/.minikube/certs/key.pem (1679 bytes)
	I0916 10:47:59.455313 1401996 certs.go:484] found cert: /home/jenkins/minikube-integration/19651-1378450/.minikube/files/etc/ssl/certs/13838332.pem (1708 bytes)
	I0916 10:47:59.455344 1401996 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-1378450/.minikube/files/etc/ssl/certs/13838332.pem -> /usr/share/ca-certificates/13838332.pem
	I0916 10:47:59.455360 1401996 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-1378450/.minikube/ca.crt -> /usr/share/ca-certificates/minikubeCA.pem
	I0916 10:47:59.455373 1401996 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-1378450/.minikube/certs/1383833.pem -> /usr/share/ca-certificates/1383833.pem
	I0916 10:47:59.456003 1401996 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-1378450/.minikube/ca.crt --> /var/lib/minikube/certs/ca.crt (1111 bytes)
	I0916 10:47:59.482482 1401996 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-1378450/.minikube/ca.key --> /var/lib/minikube/certs/ca.key (1675 bytes)
	I0916 10:47:59.508357 1401996 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-1378450/.minikube/proxy-client-ca.crt --> /var/lib/minikube/certs/proxy-client-ca.crt (1119 bytes)
	I0916 10:47:59.533138 1401996 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-1378450/.minikube/proxy-client-ca.key --> /var/lib/minikube/certs/proxy-client-ca.key (1679 bytes)
	I0916 10:47:59.558621 1401996 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/functional-919910/apiserver.crt --> /var/lib/minikube/certs/apiserver.crt (1424 bytes)
	I0916 10:47:59.583104 1401996 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/functional-919910/apiserver.key --> /var/lib/minikube/certs/apiserver.key (1679 bytes)
	I0916 10:47:59.607676 1401996 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/functional-919910/proxy-client.crt --> /var/lib/minikube/certs/proxy-client.crt (1147 bytes)
	I0916 10:47:59.632251 1401996 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/functional-919910/proxy-client.key --> /var/lib/minikube/certs/proxy-client.key (1679 bytes)
	I0916 10:47:59.656249 1401996 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-1378450/.minikube/files/etc/ssl/certs/13838332.pem --> /usr/share/ca-certificates/13838332.pem (1708 bytes)
	I0916 10:47:59.682129 1401996 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-1378450/.minikube/ca.crt --> /usr/share/ca-certificates/minikubeCA.pem (1111 bytes)
	I0916 10:47:59.707402 1401996 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-1378450/.minikube/certs/1383833.pem --> /usr/share/ca-certificates/1383833.pem (1338 bytes)
	I0916 10:47:59.732945 1401996 ssh_runner.go:362] scp memory --> /var/lib/minikube/kubeconfig (738 bytes)
	I0916 10:47:59.751902 1401996 ssh_runner.go:195] Run: openssl version
	I0916 10:47:59.757177 1401996 command_runner.go:130] > OpenSSL 3.0.2 15 Mar 2022 (Library: OpenSSL 3.0.2 15 Mar 2022)
	I0916 10:47:59.757641 1401996 ssh_runner.go:195] Run: sudo /bin/bash -c "test -s /usr/share/ca-certificates/13838332.pem && ln -fs /usr/share/ca-certificates/13838332.pem /etc/ssl/certs/13838332.pem"
	I0916 10:47:59.767361 1401996 ssh_runner.go:195] Run: ls -la /usr/share/ca-certificates/13838332.pem
	I0916 10:47:59.771009 1401996 command_runner.go:130] > -rw-r--r-- 1 root root 1708 Sep 16 10:46 /usr/share/ca-certificates/13838332.pem
	I0916 10:47:59.771054 1401996 certs.go:528] hashing: -rw-r--r-- 1 root root 1708 Sep 16 10:46 /usr/share/ca-certificates/13838332.pem
	I0916 10:47:59.771108 1401996 ssh_runner.go:195] Run: openssl x509 -hash -noout -in /usr/share/ca-certificates/13838332.pem
	I0916 10:47:59.779053 1401996 command_runner.go:130] > 3ec20f2e
	I0916 10:47:59.779521 1401996 ssh_runner.go:195] Run: sudo /bin/bash -c "test -L /etc/ssl/certs/3ec20f2e.0 || ln -fs /etc/ssl/certs/13838332.pem /etc/ssl/certs/3ec20f2e.0"
	I0916 10:47:59.788786 1401996 ssh_runner.go:195] Run: sudo /bin/bash -c "test -s /usr/share/ca-certificates/minikubeCA.pem && ln -fs /usr/share/ca-certificates/minikubeCA.pem /etc/ssl/certs/minikubeCA.pem"
	I0916 10:47:59.798081 1401996 ssh_runner.go:195] Run: ls -la /usr/share/ca-certificates/minikubeCA.pem
	I0916 10:47:59.801713 1401996 command_runner.go:130] > -rw-r--r-- 1 root root 1111 Sep 16 10:35 /usr/share/ca-certificates/minikubeCA.pem
	I0916 10:47:59.801753 1401996 certs.go:528] hashing: -rw-r--r-- 1 root root 1111 Sep 16 10:35 /usr/share/ca-certificates/minikubeCA.pem
	I0916 10:47:59.801812 1401996 ssh_runner.go:195] Run: openssl x509 -hash -noout -in /usr/share/ca-certificates/minikubeCA.pem
	I0916 10:47:59.808360 1401996 command_runner.go:130] > b5213941
	I0916 10:47:59.808860 1401996 ssh_runner.go:195] Run: sudo /bin/bash -c "test -L /etc/ssl/certs/b5213941.0 || ln -fs /etc/ssl/certs/minikubeCA.pem /etc/ssl/certs/b5213941.0"
	I0916 10:47:59.818410 1401996 ssh_runner.go:195] Run: sudo /bin/bash -c "test -s /usr/share/ca-certificates/1383833.pem && ln -fs /usr/share/ca-certificates/1383833.pem /etc/ssl/certs/1383833.pem"
	I0916 10:47:59.828438 1401996 ssh_runner.go:195] Run: ls -la /usr/share/ca-certificates/1383833.pem
	I0916 10:47:59.832025 1401996 command_runner.go:130] > -rw-r--r-- 1 root root 1338 Sep 16 10:46 /usr/share/ca-certificates/1383833.pem
	I0916 10:47:59.832064 1401996 certs.go:528] hashing: -rw-r--r-- 1 root root 1338 Sep 16 10:46 /usr/share/ca-certificates/1383833.pem
	I0916 10:47:59.832115 1401996 ssh_runner.go:195] Run: openssl x509 -hash -noout -in /usr/share/ca-certificates/1383833.pem
	I0916 10:47:59.838985 1401996 command_runner.go:130] > 51391683
	I0916 10:47:59.839071 1401996 ssh_runner.go:195] Run: sudo /bin/bash -c "test -L /etc/ssl/certs/51391683.0 || ln -fs /etc/ssl/certs/1383833.pem /etc/ssl/certs/51391683.0"
	I0916 10:47:59.848212 1401996 ssh_runner.go:195] Run: stat /var/lib/minikube/certs/apiserver-kubelet-client.crt
	I0916 10:47:59.851764 1401996 command_runner.go:130] >   File: /var/lib/minikube/certs/apiserver-kubelet-client.crt
	I0916 10:47:59.851830 1401996 command_runner.go:130] >   Size: 1176      	Blocks: 8          IO Block: 4096   regular file
	I0916 10:47:59.851846 1401996 command_runner.go:130] > Device: 10301h/66305d	Inode: 1308756     Links: 1
	I0916 10:47:59.851853 1401996 command_runner.go:130] > Access: (0644/-rw-r--r--)  Uid: (    0/    root)   Gid: (    0/    root)
	I0916 10:47:59.851859 1401996 command_runner.go:130] > Access: 2024-09-16 10:46:46.002004725 +0000
	I0916 10:47:59.851865 1401996 command_runner.go:130] > Modify: 2024-09-16 10:46:46.002004725 +0000
	I0916 10:47:59.851869 1401996 command_runner.go:130] > Change: 2024-09-16 10:46:46.002004725 +0000
	I0916 10:47:59.851874 1401996 command_runner.go:130] >  Birth: 2024-09-16 10:46:46.002004725 +0000
	I0916 10:47:59.851952 1401996 ssh_runner.go:195] Run: openssl x509 -noout -in /var/lib/minikube/certs/apiserver-etcd-client.crt -checkend 86400
	I0916 10:47:59.858501 1401996 command_runner.go:130] > Certificate will not expire
	I0916 10:47:59.858963 1401996 ssh_runner.go:195] Run: openssl x509 -noout -in /var/lib/minikube/certs/apiserver-kubelet-client.crt -checkend 86400
	I0916 10:47:59.865688 1401996 command_runner.go:130] > Certificate will not expire
	I0916 10:47:59.866098 1401996 ssh_runner.go:195] Run: openssl x509 -noout -in /var/lib/minikube/certs/etcd/server.crt -checkend 86400
	I0916 10:47:59.872474 1401996 command_runner.go:130] > Certificate will not expire
	I0916 10:47:59.872964 1401996 ssh_runner.go:195] Run: openssl x509 -noout -in /var/lib/minikube/certs/etcd/healthcheck-client.crt -checkend 86400
	I0916 10:47:59.879512 1401996 command_runner.go:130] > Certificate will not expire
	I0916 10:47:59.879952 1401996 ssh_runner.go:195] Run: openssl x509 -noout -in /var/lib/minikube/certs/etcd/peer.crt -checkend 86400
	I0916 10:47:59.886569 1401996 command_runner.go:130] > Certificate will not expire
	I0916 10:47:59.887097 1401996 ssh_runner.go:195] Run: openssl x509 -noout -in /var/lib/minikube/certs/front-proxy-client.crt -checkend 86400
	I0916 10:47:59.893931 1401996 command_runner.go:130] > Certificate will not expire
	I0916 10:47:59.894042 1401996 kubeadm.go:392] StartCluster: {Name:functional-919910 KeepContext:false EmbedCerts:false MinikubeISO: KicBaseImage:gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 Memory:4000 CPUs:2 DiskSize:20000 Driver:docker HyperkitVpnKitSock: HyperkitVSockPorts:[] DockerEnv:[] ContainerVolumeMounts:[] InsecureRegistry:[] RegistryMirror:[] HostOnlyCIDR:192.168.59.1/24 HypervVirtualSwitch: HypervUseExternalSwitch:false HypervExternalAdapter: KVMNetwork:default KVMQemuURI:qemu:///system KVMGPU:false KVMHidden:false KVMNUMACount:1 APIServerPort:8441 DockerOpt:[] DisableDriverMounts:false NFSShare:[] NFSSharesRoot:/nfsshares UUID: NoVTXCheck:false DNSProxy:false HostDNSResolver:true HostOnlyNicType:virtio NatNicType:virtio SSHIPAddress: SSHUser:root SSHKey: SSHPort:22 KubernetesConfig:{KubernetesVersion:v1.31.1 ClusterName:functional-919910 Namespace:default APIServerHAVIP: APIServerName:minikubeCA APISer
verNames:[] APIServerIPs:[] DNSDomain:cluster.local ContainerRuntime:crio CRISocket: NetworkPlugin:cni FeatureGates: ServiceCIDR:10.96.0.0/12 ImageRepository: LoadBalancerStartIP: LoadBalancerEndIP: CustomIngressCert: RegistryAliases: ExtraOptions:[] ShouldLoadCachedImages:true EnableDefaultCNI:false CNI:} Nodes:[{Name: IP:192.168.49.2 Port:8441 KubernetesVersion:v1.31.1 ContainerRuntime:crio ControlPlane:true Worker:true}] Addons:map[default-storageclass:true storage-provisioner:true] CustomAddonImages:map[] CustomAddonRegistries:map[] VerifyComponents:map[apiserver:true apps_running:true default_sa:true extra:true kubelet:true node_ready:true system_pods:true] StartHostTimeout:6m0s ScheduledStop:<nil> ExposedPorts:[] ListenAddress: Network: Subnet: MultiNodeRequested:false ExtraDisks:0 CertExpiration:26280h0m0s Mount:false MountString:/home/jenkins:/minikube-host Mount9PVersion:9p2000.L MountGID:docker MountIP: MountMSize:262144 MountOptions:[] MountPort:0 MountType:9p MountUID:docker BinaryMirror: DisableO
ptimizations:false DisableMetrics:false CustomQemuFirmwarePath: SocketVMnetClientPath: SocketVMnetPath: StaticIP: SSHAuthSock: SSHAgentPID:0 GPUs: AutoPauseInterval:1m0s}
	I0916 10:47:59.894136 1401996 cri.go:54] listing CRI containers in root : {State:paused Name: Namespaces:[kube-system]}
	I0916 10:47:59.894206 1401996 ssh_runner.go:195] Run: sudo -s eval "crictl ps -a --quiet --label io.kubernetes.pod.namespace=kube-system"
	I0916 10:47:59.928977 1401996 command_runner.go:130] > 89084e33c979a76a3a4bbd24eab8c848deb25d8bd474bad381f47a24e0373c2e
	I0916 10:47:59.929057 1401996 command_runner.go:130] > 584cffa44f32723af45447c07bf6e3fc641b7c61fe43302aad35c776bd065faf
	I0916 10:47:59.929099 1401996 command_runner.go:130] > 9fdab793eb970a5f01845e2aeaf1389846fd7113bbdedbb122c9c796017271d5
	I0916 10:47:59.929123 1401996 command_runner.go:130] > 3e31d247381fd150f97fed045c0d264e01a0046902133f839fc323ed9d5fa7b9
	I0916 10:47:59.929144 1401996 command_runner.go:130] > 6d211253a1170338e5b23dda8b3c6a26dde0aa55d2f91ee289142b0410943b49
	I0916 10:47:59.929179 1401996 command_runner.go:130] > 19cb8b26283b5427eeb4adf80032848225300f8293659c95a04c937ca3877ced
	I0916 10:47:59.929200 1401996 command_runner.go:130] > b88a79882d73e8e5ca5f134464b8f60ebbeb4a0aa75d6f83d1ec9e3d9f6bd093
	I0916 10:47:59.929246 1401996 command_runner.go:130] > 790d8c6b7f5cff6aa8da32ec82eeab04f109110f2b3a39803bda7a570da2cf75
	I0916 10:47:59.931789 1401996 cri.go:89] found id: "89084e33c979a76a3a4bbd24eab8c848deb25d8bd474bad381f47a24e0373c2e"
	I0916 10:47:59.931812 1401996 cri.go:89] found id: "584cffa44f32723af45447c07bf6e3fc641b7c61fe43302aad35c776bd065faf"
	I0916 10:47:59.931818 1401996 cri.go:89] found id: "9fdab793eb970a5f01845e2aeaf1389846fd7113bbdedbb122c9c796017271d5"
	I0916 10:47:59.931822 1401996 cri.go:89] found id: "3e31d247381fd150f97fed045c0d264e01a0046902133f839fc323ed9d5fa7b9"
	I0916 10:47:59.931825 1401996 cri.go:89] found id: "6d211253a1170338e5b23dda8b3c6a26dde0aa55d2f91ee289142b0410943b49"
	I0916 10:47:59.931829 1401996 cri.go:89] found id: "19cb8b26283b5427eeb4adf80032848225300f8293659c95a04c937ca3877ced"
	I0916 10:47:59.931833 1401996 cri.go:89] found id: "b88a79882d73e8e5ca5f134464b8f60ebbeb4a0aa75d6f83d1ec9e3d9f6bd093"
	I0916 10:47:59.931836 1401996 cri.go:89] found id: "790d8c6b7f5cff6aa8da32ec82eeab04f109110f2b3a39803bda7a570da2cf75"
	I0916 10:47:59.931839 1401996 cri.go:89] found id: ""
	I0916 10:47:59.931892 1401996 ssh_runner.go:195] Run: sudo runc list -f json
	I0916 10:47:59.955445 1401996 command_runner.go:130] > [{"ociVersion":"1.0.2-dev","id":"19cb8b26283b5427eeb4adf80032848225300f8293659c95a04c937ca3877ced","pid":0,"status":"stopped","bundle":"/run/containers/storage/overlay-containers/19cb8b26283b5427eeb4adf80032848225300f8293659c95a04c937ca3877ced/userdata","rootfs":"/var/lib/containers/storage/overlay/9d25114c4e5423d24252b77dba36894f36016e9218116badbb9dbec6638e1801/merged","created":"2024-09-16T10:46:54.451546712Z","annotations":{"io.container.manager":"cri-o","io.kubernetes.container.hash":"d1900d79","io.kubernetes.container.name":"kube-controller-manager","io.kubernetes.container.restartCount":"0","io.kubernetes.container.terminationMessagePath":"/dev/termination-log","io.kubernetes.container.terminationMessagePolicy":"File","io.kubernetes.cri-o.Annotations":"{\"io.kubernetes.container.hash\":\"d1900d79\",\"io.kubernetes.container.restartCount\":\"0\",\"io.kubernetes.container.terminationMessagePath\":\"/dev/termination-log\",\"io.kubernetes.conta
iner.terminationMessagePolicy\":\"File\",\"io.kubernetes.pod.terminationGracePeriod\":\"30\"}","io.kubernetes.cri-o.ContainerID":"19cb8b26283b5427eeb4adf80032848225300f8293659c95a04c937ca3877ced","io.kubernetes.cri-o.ContainerType":"container","io.kubernetes.cri-o.Created":"2024-09-16T10:46:54.388751196Z","io.kubernetes.cri-o.Image":"279f381cb37365bbbcd133c9531fba9c2beb0f38dbbe6ddfcd0b1b1643d3450e","io.kubernetes.cri-o.ImageName":"registry.k8s.io/kube-controller-manager:v1.31.1","io.kubernetes.cri-o.ImageRef":"279f381cb37365bbbcd133c9531fba9c2beb0f38dbbe6ddfcd0b1b1643d3450e","io.kubernetes.cri-o.Labels":"{\"io.kubernetes.container.name\":\"kube-controller-manager\",\"io.kubernetes.pod.name\":\"kube-controller-manager-functional-919910\",\"io.kubernetes.pod.namespace\":\"kube-system\",\"io.kubernetes.pod.uid\":\"bcfd044776fa163108ac9ce9912dd1b1\"}","io.kubernetes.cri-o.LogPath":"/var/log/pods/kube-system_kube-controller-manager-functional-919910_bcfd044776fa163108ac9ce9912dd1b1/kube-controller-manager/0.log","
io.kubernetes.cri-o.Metadata":"{\"name\":\"kube-controller-manager\"}","io.kubernetes.cri-o.MountPoint":"/var/lib/containers/storage/overlay/9d25114c4e5423d24252b77dba36894f36016e9218116badbb9dbec6638e1801/merged","io.kubernetes.cri-o.Name":"k8s_kube-controller-manager_kube-controller-manager-functional-919910_kube-system_bcfd044776fa163108ac9ce9912dd1b1_0","io.kubernetes.cri-o.ResolvPath":"/run/containers/storage/overlay-containers/0ffab32638624e8f0235604afb94e9e67c3d4e06616208483a5debcc914e3cae/userdata/resolv.conf","io.kubernetes.cri-o.SandboxID":"0ffab32638624e8f0235604afb94e9e67c3d4e06616208483a5debcc914e3cae","io.kubernetes.cri-o.SandboxName":"k8s_kube-controller-manager-functional-919910_kube-system_bcfd044776fa163108ac9ce9912dd1b1_0","io.kubernetes.cri-o.SeccompProfilePath":"","io.kubernetes.cri-o.Stdin":"false","io.kubernetes.cri-o.StdinOnce":"false","io.kubernetes.cri-o.TTY":"false","io.kubernetes.cri-o.Volumes":"[{\"container_path\":\"/etc/ca-certificates\",\"host_path\":\"/etc/ca-certificates\",\"
readonly\":true,\"propagation\":0,\"selinux_relabel\":false},{\"container_path\":\"/dev/termination-log\",\"host_path\":\"/var/lib/kubelet/pods/bcfd044776fa163108ac9ce9912dd1b1/containers/kube-controller-manager/2c567ce7\",\"readonly\":false,\"propagation\":0,\"selinux_relabel\":false},{\"container_path\":\"/etc/hosts\",\"host_path\":\"/var/lib/kubelet/pods/bcfd044776fa163108ac9ce9912dd1b1/etc-hosts\",\"readonly\":false,\"propagation\":0,\"selinux_relabel\":false},{\"container_path\":\"/etc/ssl/certs\",\"host_path\":\"/etc/ssl/certs\",\"readonly\":true,\"propagation\":0,\"selinux_relabel\":false},{\"container_path\":\"/etc/kubernetes/controller-manager.conf\",\"host_path\":\"/etc/kubernetes/controller-manager.conf\",\"readonly\":true,\"propagation\":0,\"selinux_relabel\":false},{\"container_path\":\"/usr/share/ca-certificates\",\"host_path\":\"/usr/share/ca-certificates\",\"readonly\":true,\"propagation\":0,\"selinux_relabel\":false},{\"container_path\":\"/var/lib/minikube/certs\",\"host_path\":\"/var/lib/min
ikube/certs\",\"readonly\":true,\"propagation\":0,\"selinux_relabel\":false},{\"container_path\":\"/usr/local/share/ca-certificates\",\"host_path\":\"/usr/local/share/ca-certificates\",\"readonly\":true,\"propagation\":0,\"selinux_relabel\":false},{\"container_path\":\"/usr/libexec/kubernetes/kubelet-plugins/volume/exec\",\"host_path\":\"/usr/libexec/kubernetes/kubelet-plugins/volume/exec\",\"readonly\":false,\"propagation\":0,\"selinux_relabel\":false}]","io.kubernetes.pod.name":"kube-controller-manager-functional-919910","io.kubernetes.pod.namespace":"kube-system","io.kubernetes.pod.terminationGracePeriod":"30","io.kubernetes.pod.uid":"bcfd044776fa163108ac9ce9912dd1b1","kubernetes.io/config.hash":"bcfd044776fa163108ac9ce9912dd1b1","kubernetes.io/config.seen":"2024-09-16T10:46:53.802316924Z","kubernetes.io/config.source":"file"},"owner":"root"},{"ociVersion":"1.0.2-dev","id":"3e31d247381fd150f97fed045c0d264e01a0046902133f839fc323ed9d5fa7b9","pid":0,"status":"stopped","bundle":"/run/containers/storage/overlay
-containers/3e31d247381fd150f97fed045c0d264e01a0046902133f839fc323ed9d5fa7b9/userdata","rootfs":"/var/lib/containers/storage/overlay/33ae8f381ec56dbef8842ba8809fd9b503de8020ea42b8f8c194145d6dbea159/merged","created":"2024-09-16T10:47:07.149526142Z","annotations":{"io.container.manager":"cri-o","io.kubernetes.container.hash":"e80daca3","io.kubernetes.container.name":"kindnet-cni","io.kubernetes.container.restartCount":"0","io.kubernetes.container.terminationMessagePath":"/dev/termination-log","io.kubernetes.container.terminationMessagePolicy":"File","io.kubernetes.cri-o.Annotations":"{\"io.kubernetes.container.hash\":\"e80daca3\",\"io.kubernetes.container.restartCount\":\"0\",\"io.kubernetes.container.terminationMessagePath\":\"/dev/termination-log\",\"io.kubernetes.container.terminationMessagePolicy\":\"File\",\"io.kubernetes.pod.terminationGracePeriod\":\"30\"}","io.kubernetes.cri-o.ContainerID":"3e31d247381fd150f97fed045c0d264e01a0046902133f839fc323ed9d5fa7b9","io.kubernetes.cri-o.ContainerType":"container"
,"io.kubernetes.cri-o.Created":"2024-09-16T10:47:07.08917364Z","io.kubernetes.cri-o.Image":"6a23fa8fd2b78ab58e42ba273808edc936a9c53d8ac4a919f6337be094843a51","io.kubernetes.cri-o.ImageName":"docker.io/kindest/kindnetd:v20240813-c6f155d6","io.kubernetes.cri-o.ImageRef":"6a23fa8fd2b78ab58e42ba273808edc936a9c53d8ac4a919f6337be094843a51","io.kubernetes.cri-o.Labels":"{\"io.kubernetes.container.name\":\"kindnet-cni\",\"io.kubernetes.pod.name\":\"kindnet-nb5xl\",\"io.kubernetes.pod.namespace\":\"kube-system\",\"io.kubernetes.pod.uid\":\"1282e172-7d16-4f24-9f7d-33da705832a9\"}","io.kubernetes.cri-o.LogPath":"/var/log/pods/kube-system_kindnet-nb5xl_1282e172-7d16-4f24-9f7d-33da705832a9/kindnet-cni/0.log","io.kubernetes.cri-o.Metadata":"{\"name\":\"kindnet-cni\"}","io.kubernetes.cri-o.MountPoint":"/var/lib/containers/storage/overlay/33ae8f381ec56dbef8842ba8809fd9b503de8020ea42b8f8c194145d6dbea159/merged","io.kubernetes.cri-o.Name":"k8s_kindnet-cni_kindnet-nb5xl_kube-system_1282e172-7d16-4f24-9f7d-33da705832a9_0","io.ku
bernetes.cri-o.ResolvPath":"/run/containers/storage/overlay-containers/306886331d6eea412e2593dd8cefd104ae0353cb2453c12f41db88e1881fec0f/userdata/resolv.conf","io.kubernetes.cri-o.SandboxID":"306886331d6eea412e2593dd8cefd104ae0353cb2453c12f41db88e1881fec0f","io.kubernetes.cri-o.SandboxName":"k8s_kindnet-nb5xl_kube-system_1282e172-7d16-4f24-9f7d-33da705832a9_0","io.kubernetes.cri-o.SeccompProfilePath":"","io.kubernetes.cri-o.Stdin":"false","io.kubernetes.cri-o.StdinOnce":"false","io.kubernetes.cri-o.TTY":"false","io.kubernetes.cri-o.Volumes":"[{\"container_path\":\"/run/xtables.lock\",\"host_path\":\"/run/xtables.lock\",\"readonly\":false,\"propagation\":0,\"selinux_relabel\":false},{\"container_path\":\"/lib/modules\",\"host_path\":\"/lib/modules\",\"readonly\":true,\"propagation\":0,\"selinux_relabel\":false},{\"container_path\":\"/etc/hosts\",\"host_path\":\"/var/lib/kubelet/pods/1282e172-7d16-4f24-9f7d-33da705832a9/etc-hosts\",\"readonly\":false,\"propagation\":0,\"selinux_relabel\":false},{\"container_path
\":\"/dev/termination-log\",\"host_path\":\"/var/lib/kubelet/pods/1282e172-7d16-4f24-9f7d-33da705832a9/containers/kindnet-cni/4675b3f6\",\"readonly\":false,\"propagation\":0,\"selinux_relabel\":false},{\"container_path\":\"/etc/cni/net.d\",\"host_path\":\"/etc/cni/net.d\",\"readonly\":false,\"propagation\":0,\"selinux_relabel\":false},{\"container_path\":\"/var/run/secrets/kubernetes.io/serviceaccount\",\"host_path\":\"/var/lib/kubelet/pods/1282e172-7d16-4f24-9f7d-33da705832a9/volumes/kubernetes.io~projected/kube-api-access-bxwpg\",\"readonly\":true,\"propagation\":0,\"selinux_relabel\":false}]","io.kubernetes.pod.name":"kindnet-nb5xl","io.kubernetes.pod.namespace":"kube-system","io.kubernetes.pod.terminationGracePeriod":"30","io.kubernetes.pod.uid":"1282e172-7d16-4f24-9f7d-33da705832a9","kubernetes.io/config.seen":"2024-09-16T10:47:06.101213303Z","kubernetes.io/config.source":"api"},"owner":"root"},{"ociVersion":"1.0.2-dev","id":"584cffa44f32723af45447c07bf6e3fc641b7c61fe43302aad35c776bd065faf","pid":0,"stat
us":"stopped","bundle":"/run/containers/storage/overlay-containers/584cffa44f32723af45447c07bf6e3fc641b7c61fe43302aad35c776bd065faf/userdata","rootfs":"/var/lib/containers/storage/overlay/0bedb04df165ede72307d852c687b05c750fe9223f4fb8c1d3776f63a28900f8/merged","created":"2024-09-16T10:47:48.333720036Z","annotations":{"io.container.manager":"cri-o","io.kubernetes.container.hash":"6c6bf961","io.kubernetes.container.name":"storage-provisioner","io.kubernetes.container.restartCount":"0","io.kubernetes.container.terminationMessagePath":"/dev/termination-log","io.kubernetes.container.terminationMessagePolicy":"File","io.kubernetes.cri-o.Annotations":"{\"io.kubernetes.container.hash\":\"6c6bf961\",\"io.kubernetes.container.restartCount\":\"0\",\"io.kubernetes.container.terminationMessagePath\":\"/dev/termination-log\",\"io.kubernetes.container.terminationMessagePolicy\":\"File\",\"io.kubernetes.pod.terminationGracePeriod\":\"30\"}","io.kubernetes.cri-o.ContainerID":"584cffa44f32723af45447c07bf6e3fc641b7c61fe43302aad
35c776bd065faf","io.kubernetes.cri-o.ContainerType":"container","io.kubernetes.cri-o.Created":"2024-09-16T10:47:48.29216211Z","io.kubernetes.cri-o.Image":"ba04bb24b95753201135cbc420b233c1b0b9fa2e1fd21d28319c348c33fbcde6","io.kubernetes.cri-o.ImageName":"gcr.io/k8s-minikube/storage-provisioner:v5","io.kubernetes.cri-o.ImageRef":"ba04bb24b95753201135cbc420b233c1b0b9fa2e1fd21d28319c348c33fbcde6","io.kubernetes.cri-o.Labels":"{\"io.kubernetes.container.name\":\"storage-provisioner\",\"io.kubernetes.pod.name\":\"storage-provisioner\",\"io.kubernetes.pod.namespace\":\"kube-system\",\"io.kubernetes.pod.uid\":\"2eb6523f-f61a-4c33-8e91-0bbbb874554b\"}","io.kubernetes.cri-o.LogPath":"/var/log/pods/kube-system_storage-provisioner_2eb6523f-f61a-4c33-8e91-0bbbb874554b/storage-provisioner/0.log","io.kubernetes.cri-o.Metadata":"{\"name\":\"storage-provisioner\"}","io.kubernetes.cri-o.MountPoint":"/var/lib/containers/storage/overlay/0bedb04df165ede72307d852c687b05c750fe9223f4fb8c1d3776f63a28900f8/merged","io.kubernetes.cri-o
.Name":"k8s_storage-provisioner_storage-provisioner_kube-system_2eb6523f-f61a-4c33-8e91-0bbbb874554b_0","io.kubernetes.cri-o.ResolvPath":"/run/containers/storage/overlay-containers/e27809ba106031f0a2ea1939eccfaa14ca2ade78903409cc767b25e9de7c812a/userdata/resolv.conf","io.kubernetes.cri-o.SandboxID":"e27809ba106031f0a2ea1939eccfaa14ca2ade78903409cc767b25e9de7c812a","io.kubernetes.cri-o.SandboxName":"k8s_storage-provisioner_kube-system_2eb6523f-f61a-4c33-8e91-0bbbb874554b_0","io.kubernetes.cri-o.SeccompProfilePath":"","io.kubernetes.cri-o.Stdin":"false","io.kubernetes.cri-o.StdinOnce":"false","io.kubernetes.cri-o.TTY":"false","io.kubernetes.cri-o.Volumes":"[{\"container_path\":\"/tmp\",\"host_path\":\"/tmp\",\"readonly\":false,\"propagation\":0,\"selinux_relabel\":false},{\"container_path\":\"/etc/hosts\",\"host_path\":\"/var/lib/kubelet/pods/2eb6523f-f61a-4c33-8e91-0bbbb874554b/etc-hosts\",\"readonly\":false,\"propagation\":0,\"selinux_relabel\":false},{\"container_path\":\"/dev/termination-log\",\"host_path\"
:\"/var/lib/kubelet/pods/2eb6523f-f61a-4c33-8e91-0bbbb874554b/containers/storage-provisioner/a9710de8\",\"readonly\":false,\"propagation\":0,\"selinux_relabel\":false},{\"container_path\":\"/var/run/secrets/kubernetes.io/serviceaccount\",\"host_path\":\"/var/lib/kubelet/pods/2eb6523f-f61a-4c33-8e91-0bbbb874554b/volumes/kubernetes.io~projected/kube-api-access-kn9qz\",\"readonly\":true,\"propagation\":0,\"selinux_relabel\":false}]","io.kubernetes.pod.name":"storage-provisioner","io.kubernetes.pod.namespace":"kube-system","io.kubernetes.pod.terminationGracePeriod":"30","io.kubernetes.pod.uid":"2eb6523f-f61a-4c33-8e91-0bbbb874554b","kubectl.kubernetes.io/last-applied-configuration":"{\"apiVersion\":\"v1\",\"kind\":\"Pod\",\"metadata\":{\"annotations\":{},\"labels\":{\"addonmanager.kubernetes.io/mode\":\"Reconcile\",\"integration-test\":\"storage-provisioner\"},\"name\":\"storage-provisioner\",\"namespace\":\"kube-system\"},\"spec\":{\"containers\":[{\"command\":[\"/storage-provisioner\"],\"image\":\"gcr.io/k8s-mi
nikube/storage-provisioner:v5\",\"imagePullPolicy\":\"IfNotPresent\",\"name\":\"storage-provisioner\",\"volumeMounts\":[{\"mountPath\":\"/tmp\",\"name\":\"tmp\"}]}],\"hostNetwork\":true,\"serviceAccountName\":\"storage-provisioner\",\"volumes\":[{\"hostPath\":{\"path\":\"/tmp\",\"type\":\"Directory\"},\"name\":\"tmp\"}]}}\n","kubernetes.io/config.seen":"2024-09-16T10:47:47.935314547Z","kubernetes.io/config.source":"api"},"owner":"root"},{"ociVersion":"1.0.2-dev","id":"6d211253a1170338e5b23dda8b3c6a26dde0aa55d2f91ee289142b0410943b49","pid":0,"status":"stopped","bundle":"/run/containers/storage/overlay-containers/6d211253a1170338e5b23dda8b3c6a26dde0aa55d2f91ee289142b0410943b49/userdata","rootfs":"/var/lib/containers/storage/overlay/cab360ca19399b430c9ac3118df3b10c96a9ca4f93f89484957af5838b7a7903/merged","created":"2024-09-16T10:46:54.47745643Z","annotations":{"io.container.manager":"cri-o","io.kubernetes.container.hash":"12faacf7","io.kubernetes.container.name":"kube-scheduler","io.kubernetes.container.restartC
ount":"0","io.kubernetes.container.terminationMessagePath":"/dev/termination-log","io.kubernetes.container.terminationMessagePolicy":"File","io.kubernetes.cri-o.Annotations":"{\"io.kubernetes.container.hash\":\"12faacf7\",\"io.kubernetes.container.restartCount\":\"0\",\"io.kubernetes.container.terminationMessagePath\":\"/dev/termination-log\",\"io.kubernetes.container.terminationMessagePolicy\":\"File\",\"io.kubernetes.pod.terminationGracePeriod\":\"30\"}","io.kubernetes.cri-o.ContainerID":"6d211253a1170338e5b23dda8b3c6a26dde0aa55d2f91ee289142b0410943b49","io.kubernetes.cri-o.ContainerType":"container","io.kubernetes.cri-o.Created":"2024-09-16T10:46:54.420996546Z","io.kubernetes.cri-o.Image":"7f8aa378bb47dffcf430f3a601abe39137e88aee0238e23ed8530fdd18dab82d","io.kubernetes.cri-o.ImageName":"registry.k8s.io/kube-scheduler:v1.31.1","io.kubernetes.cri-o.ImageRef":"7f8aa378bb47dffcf430f3a601abe39137e88aee0238e23ed8530fdd18dab82d","io.kubernetes.cri-o.Labels":"{\"io.kubernetes.container.name\":\"kube-scheduler\",\"
io.kubernetes.pod.name\":\"kube-scheduler-functional-919910\",\"io.kubernetes.pod.namespace\":\"kube-system\",\"io.kubernetes.pod.uid\":\"60f2072c6865fb71ef7928175ceb3dad\"}","io.kubernetes.cri-o.LogPath":"/var/log/pods/kube-system_kube-scheduler-functional-919910_60f2072c6865fb71ef7928175ceb3dad/kube-scheduler/0.log","io.kubernetes.cri-o.Metadata":"{\"name\":\"kube-scheduler\"}","io.kubernetes.cri-o.MountPoint":"/var/lib/containers/storage/overlay/cab360ca19399b430c9ac3118df3b10c96a9ca4f93f89484957af5838b7a7903/merged","io.kubernetes.cri-o.Name":"k8s_kube-scheduler_kube-scheduler-functional-919910_kube-system_60f2072c6865fb71ef7928175ceb3dad_0","io.kubernetes.cri-o.ResolvPath":"/run/containers/storage/overlay-containers/00455a328acb5f6e9ea466104b770d8dfdb288b1e2998c0222c8b30b804b19cb/userdata/resolv.conf","io.kubernetes.cri-o.SandboxID":"00455a328acb5f6e9ea466104b770d8dfdb288b1e2998c0222c8b30b804b19cb","io.kubernetes.cri-o.SandboxName":"k8s_kube-scheduler-functional-919910_kube-system_60f2072c6865fb71ef79281
75ceb3dad_0","io.kubernetes.cri-o.SeccompProfilePath":"","io.kubernetes.cri-o.Stdin":"false","io.kubernetes.cri-o.StdinOnce":"false","io.kubernetes.cri-o.TTY":"false","io.kubernetes.cri-o.Volumes":"[{\"container_path\":\"/etc/hosts\",\"host_path\":\"/var/lib/kubelet/pods/60f2072c6865fb71ef7928175ceb3dad/etc-hosts\",\"readonly\":false,\"propagation\":0,\"selinux_relabel\":false},{\"container_path\":\"/dev/termination-log\",\"host_path\":\"/var/lib/kubelet/pods/60f2072c6865fb71ef7928175ceb3dad/containers/kube-scheduler/e278c329\",\"readonly\":false,\"propagation\":0,\"selinux_relabel\":false},{\"container_path\":\"/etc/kubernetes/scheduler.conf\",\"host_path\":\"/etc/kubernetes/scheduler.conf\",\"readonly\":true,\"propagation\":0,\"selinux_relabel\":false}]","io.kubernetes.pod.name":"kube-scheduler-functional-919910","io.kubernetes.pod.namespace":"kube-system","io.kubernetes.pod.terminationGracePeriod":"30","io.kubernetes.pod.uid":"60f2072c6865fb71ef7928175ceb3dad","kubernetes.io/config.hash":"60f2072c6865fb71e
f7928175ceb3dad","kubernetes.io/config.seen":"2024-09-16T10:46:53.802318072Z","kubernetes.io/config.source":"file"},"owner":"root"},{"ociVersion":"1.0.2-dev","id":"790d8c6b7f5cff6aa8da32ec82eeab04f109110f2b3a39803bda7a570da2cf75","pid":0,"status":"stopped","bundle":"/run/containers/storage/overlay-containers/790d8c6b7f5cff6aa8da32ec82eeab04f109110f2b3a39803bda7a570da2cf75/userdata","rootfs":"/var/lib/containers/storage/overlay/a08c3c563565903a078ef017305fa2825386d400183a5328437db2f752f9752f/merged","created":"2024-09-16T10:46:54.415982086Z","annotations":{"io.container.manager":"cri-o","io.kubernetes.container.hash":"7df2713b","io.kubernetes.container.name":"kube-apiserver","io.kubernetes.container.restartCount":"0","io.kubernetes.container.terminationMessagePath":"/dev/termination-log","io.kubernetes.container.terminationMessagePolicy":"File","io.kubernetes.cri-o.Annotations":"{\"io.kubernetes.container.hash\":\"7df2713b\",\"io.kubernetes.container.restartCount\":\"0\",\"io.kubernetes.container.terminationMe
ssagePath\":\"/dev/termination-log\",\"io.kubernetes.container.terminationMessagePolicy\":\"File\",\"io.kubernetes.pod.terminationGracePeriod\":\"30\"}","io.kubernetes.cri-o.ContainerID":"790d8c6b7f5cff6aa8da32ec82eeab04f109110f2b3a39803bda7a570da2cf75","io.kubernetes.cri-o.ContainerType":"container","io.kubernetes.cri-o.Created":"2024-09-16T10:46:54.357706151Z","io.kubernetes.cri-o.Image":"d3f53a98c0a9d9163c4848bcf34b2d2f5e1e3691b79f3d1dd6d0206809e02853","io.kubernetes.cri-o.ImageName":"registry.k8s.io/kube-apiserver:v1.31.1","io.kubernetes.cri-o.ImageRef":"d3f53a98c0a9d9163c4848bcf34b2d2f5e1e3691b79f3d1dd6d0206809e02853","io.kubernetes.cri-o.Labels":"{\"io.kubernetes.container.name\":\"kube-apiserver\",\"io.kubernetes.pod.name\":\"kube-apiserver-functional-919910\",\"io.kubernetes.pod.namespace\":\"kube-system\",\"io.kubernetes.pod.uid\":\"3d8a6ba31c18f33c5660170029e5cde1\"}","io.kubernetes.cri-o.LogPath":"/var/log/pods/kube-system_kube-apiserver-functional-919910_3d8a6ba31c18f33c5660170029e5cde1/kube-apise
rver/0.log","io.kubernetes.cri-o.Metadata":"{\"name\":\"kube-apiserver\"}","io.kubernetes.cri-o.MountPoint":"/var/lib/containers/storage/overlay/a08c3c563565903a078ef017305fa2825386d400183a5328437db2f752f9752f/merged","io.kubernetes.cri-o.Name":"k8s_kube-apiserver_kube-apiserver-functional-919910_kube-system_3d8a6ba31c18f33c5660170029e5cde1_0","io.kubernetes.cri-o.ResolvPath":"/run/containers/storage/overlay-containers/8fd62fbc34bf1ffb9092b83c48e89b00e7cdd219dbb5b91410c53ba0718a28f1/userdata/resolv.conf","io.kubernetes.cri-o.SandboxID":"8fd62fbc34bf1ffb9092b83c48e89b00e7cdd219dbb5b91410c53ba0718a28f1","io.kubernetes.cri-o.SandboxName":"k8s_kube-apiserver-functional-919910_kube-system_3d8a6ba31c18f33c5660170029e5cde1_0","io.kubernetes.cri-o.SeccompProfilePath":"","io.kubernetes.cri-o.Stdin":"false","io.kubernetes.cri-o.StdinOnce":"false","io.kubernetes.cri-o.TTY":"false","io.kubernetes.cri-o.Volumes":"[{\"container_path\":\"/dev/termination-log\",\"host_path\":\"/var/lib/kubelet/pods/3d8a6ba31c18f33c5660170029
e5cde1/containers/kube-apiserver/e14b8c41\",\"readonly\":false,\"propagation\":0,\"selinux_relabel\":false},{\"container_path\":\"/etc/ca-certificates\",\"host_path\":\"/etc/ca-certificates\",\"readonly\":true,\"propagation\":0,\"selinux_relabel\":false},{\"container_path\":\"/etc/hosts\",\"host_path\":\"/var/lib/kubelet/pods/3d8a6ba31c18f33c5660170029e5cde1/etc-hosts\",\"readonly\":false,\"propagation\":0,\"selinux_relabel\":false},{\"container_path\":\"/usr/share/ca-certificates\",\"host_path\":\"/usr/share/ca-certificates\",\"readonly\":true,\"propagation\":0,\"selinux_relabel\":false},{\"container_path\":\"/etc/ssl/certs\",\"host_path\":\"/etc/ssl/certs\",\"readonly\":true,\"propagation\":0,\"selinux_relabel\":false},{\"container_path\":\"/var/lib/minikube/certs\",\"host_path\":\"/var/lib/minikube/certs\",\"readonly\":true,\"propagation\":0,\"selinux_relabel\":false},{\"container_path\":\"/usr/local/share/ca-certificates\",\"host_path\":\"/usr/local/share/ca-certificates\",\"readonly\":true,\"propagation\
":0,\"selinux_relabel\":false}]","io.kubernetes.pod.name":"kube-apiserver-functional-919910","io.kubernetes.pod.namespace":"kube-system","io.kubernetes.pod.terminationGracePeriod":"30","io.kubernetes.pod.uid":"3d8a6ba31c18f33c5660170029e5cde1","kubeadm.kubernetes.io/kube-apiserver.advertise-address.endpoint":"192.168.49.2:8441","kubernetes.io/config.hash":"3d8a6ba31c18f33c5660170029e5cde1","kubernetes.io/config.seen":"2024-09-16T10:46:53.802315340Z","kubernetes.io/config.source":"file"},"owner":"root"},{"ociVersion":"1.0.2-dev","id":"89084e33c979a76a3a4bbd24eab8c848deb25d8bd474bad381f47a24e0373c2e","pid":0,"status":"stopped","bundle":"/run/containers/storage/overlay-containers/89084e33c979a76a3a4bbd24eab8c848deb25d8bd474bad381f47a24e0373c2e/userdata","rootfs":"/var/lib/containers/storage/overlay/d6960bf7e5d7c4f7e17e6f57c4b342d7214a64f55a8e636cea36824eb532352c/merged","created":"2024-09-16T10:47:48.353491936Z","annotations":{"io.container.manager":"cri-o","io.kubernetes.container.hash":"2a3a204d","io.kubernete
s.container.name":"coredns","io.kubernetes.container.ports":"[{\"name\":\"dns\",\"containerPort\":53,\"protocol\":\"UDP\"},{\"name\":\"dns-tcp\",\"containerPort\":53,\"protocol\":\"TCP\"},{\"name\":\"metrics\",\"containerPort\":9153,\"protocol\":\"TCP\"}]","io.kubernetes.container.restartCount":"0","io.kubernetes.container.terminationMessagePath":"/dev/termination-log","io.kubernetes.container.terminationMessagePolicy":"File","io.kubernetes.cri-o.Annotations":"{\"io.kubernetes.container.hash\":\"2a3a204d\",\"io.kubernetes.container.ports\":\"[{\\\"name\\\":\\\"dns\\\",\\\"containerPort\\\":53,\\\"protocol\\\":\\\"UDP\\\"},{\\\"name\\\":\\\"dns-tcp\\\",\\\"containerPort\\\":53,\\\"protocol\\\":\\\"TCP\\\"},{\\\"name\\\":\\\"metrics\\\",\\\"containerPort\\\":9153,\\\"protocol\\\":\\\"TCP\\\"}]\",\"io.kubernetes.container.restartCount\":\"0\",\"io.kubernetes.container.terminationMessagePath\":\"/dev/termination-log\",\"io.kubernetes.container.terminationMessagePolicy\":\"File\",\"io.kubernetes.pod.terminationGra
cePeriod\":\"30\"}","io.kubernetes.cri-o.ContainerID":"89084e33c979a76a3a4bbd24eab8c848deb25d8bd474bad381f47a24e0373c2e","io.kubernetes.cri-o.ContainerType":"container","io.kubernetes.cri-o.Created":"2024-09-16T10:47:48.313875121Z","io.kubernetes.cri-o.IP.0":"10.244.0.2","io.kubernetes.cri-o.Image":"2f6c962e7b8311337352d9fdea917da2184d9919f4da7695bc2a6517cf392fe4","io.kubernetes.cri-o.ImageName":"registry.k8s.io/coredns/coredns:v1.11.3","io.kubernetes.cri-o.ImageRef":"2f6c962e7b8311337352d9fdea917da2184d9919f4da7695bc2a6517cf392fe4","io.kubernetes.cri-o.Labels":"{\"io.kubernetes.container.name\":\"coredns\",\"io.kubernetes.pod.name\":\"coredns-7c65d6cfc9-qzn8c\",\"io.kubernetes.pod.namespace\":\"kube-system\",\"io.kubernetes.pod.uid\":\"ada36fb7-8486-4afc-9bef-04ab2e65fc7b\"}","io.kubernetes.cri-o.LogPath":"/var/log/pods/kube-system_coredns-7c65d6cfc9-qzn8c_ada36fb7-8486-4afc-9bef-04ab2e65fc7b/coredns/0.log","io.kubernetes.cri-o.Metadata":"{\"name\":\"coredns\"}","io.kubernetes.cri-o.MountPoint":"/var/lib/con
tainers/storage/overlay/d6960bf7e5d7c4f7e17e6f57c4b342d7214a64f55a8e636cea36824eb532352c/merged","io.kubernetes.cri-o.Name":"k8s_coredns_coredns-7c65d6cfc9-qzn8c_kube-system_ada36fb7-8486-4afc-9bef-04ab2e65fc7b_0","io.kubernetes.cri-o.ResolvPath":"/run/containers/storage/overlay-containers/4bae1031966b207c601881c3be1d2b66aa5218cb02e6eb2af68deea5be18503b/userdata/resolv.conf","io.kubernetes.cri-o.SandboxID":"4bae1031966b207c601881c3be1d2b66aa5218cb02e6eb2af68deea5be18503b","io.kubernetes.cri-o.SandboxName":"k8s_coredns-7c65d6cfc9-qzn8c_kube-system_ada36fb7-8486-4afc-9bef-04ab2e65fc7b_0","io.kubernetes.cri-o.SeccompProfilePath":"","io.kubernetes.cri-o.Stdin":"false","io.kubernetes.cri-o.StdinOnce":"false","io.kubernetes.cri-o.TTY":"false","io.kubernetes.cri-o.Volumes":"[{\"container_path\":\"/etc/coredns\",\"host_path\":\"/var/lib/kubelet/pods/ada36fb7-8486-4afc-9bef-04ab2e65fc7b/volumes/kubernetes.io~configmap/config-volume\",\"readonly\":true,\"propagation\":0,\"selinux_relabel\":false},{\"container_path\":\"
/etc/hosts\",\"host_path\":\"/var/lib/kubelet/pods/ada36fb7-8486-4afc-9bef-04ab2e65fc7b/etc-hosts\",\"readonly\":false,\"propagation\":0,\"selinux_relabel\":false},{\"container_path\":\"/dev/termination-log\",\"host_path\":\"/var/lib/kubelet/pods/ada36fb7-8486-4afc-9bef-04ab2e65fc7b/containers/coredns/4fbb99bf\",\"readonly\":false,\"propagation\":0,\"selinux_relabel\":false},{\"container_path\":\"/var/run/secrets/kubernetes.io/serviceaccount\",\"host_path\":\"/var/lib/kubelet/pods/ada36fb7-8486-4afc-9bef-04ab2e65fc7b/volumes/kubernetes.io~projected/kube-api-access-lfgrj\",\"readonly\":true,\"propagation\":0,\"selinux_relabel\":false}]","io.kubernetes.pod.name":"coredns-7c65d6cfc9-qzn8c","io.kubernetes.pod.namespace":"kube-system","io.kubernetes.pod.terminationGracePeriod":"30","io.kubernetes.pod.uid":"ada36fb7-8486-4afc-9bef-04ab2e65fc7b","kubernetes.io/config.seen":"2024-09-16T10:47:47.928368173Z","kubernetes.io/config.source":"api"},"owner":"root"},{"ociVersion":"1.0.2-dev","id":"9fdab793eb970a5f01845e2aeaf
1389846fd7113bbdedbb122c9c796017271d5","pid":0,"status":"stopped","bundle":"/run/containers/storage/overlay-containers/9fdab793eb970a5f01845e2aeaf1389846fd7113bbdedbb122c9c796017271d5/userdata","rootfs":"/var/lib/containers/storage/overlay/ef08c3fd03152a8aa0ede7087e0739682cabfd602e73253e1c7bf91e655d2b30/merged","created":"2024-09-16T10:47:07.30034468Z","annotations":{"io.container.manager":"cri-o","io.kubernetes.container.hash":"159dcc59","io.kubernetes.container.name":"kube-proxy","io.kubernetes.container.restartCount":"0","io.kubernetes.container.terminationMessagePath":"/dev/termination-log","io.kubernetes.container.terminationMessagePolicy":"File","io.kubernetes.cri-o.Annotations":"{\"io.kubernetes.container.hash\":\"159dcc59\",\"io.kubernetes.container.restartCount\":\"0\",\"io.kubernetes.container.terminationMessagePath\":\"/dev/termination-log\",\"io.kubernetes.container.terminationMessagePolicy\":\"File\",\"io.kubernetes.pod.terminationGracePeriod\":\"30\"}","io.kubernetes.cri-o.ContainerID":"9fdab793
eb970a5f01845e2aeaf1389846fd7113bbdedbb122c9c796017271d5","io.kubernetes.cri-o.ContainerType":"container","io.kubernetes.cri-o.Created":"2024-09-16T10:47:07.113935925Z","io.kubernetes.cri-o.Image":"24a140c548c075e487e45d0ee73b1aa89f8bfb40c08a57e05975559728822b1d","io.kubernetes.cri-o.ImageName":"registry.k8s.io/kube-proxy:v1.31.1","io.kubernetes.cri-o.ImageRef":"24a140c548c075e487e45d0ee73b1aa89f8bfb40c08a57e05975559728822b1d","io.kubernetes.cri-o.Labels":"{\"io.kubernetes.container.name\":\"kube-proxy\",\"io.kubernetes.pod.name\":\"kube-proxy-nvpzv\",\"io.kubernetes.pod.namespace\":\"kube-system\",\"io.kubernetes.pod.uid\":\"2e1bfc3e-dea3-4511-a154-e367e28b0898\"}","io.kubernetes.cri-o.LogPath":"/var/log/pods/kube-system_kube-proxy-nvpzv_2e1bfc3e-dea3-4511-a154-e367e28b0898/kube-proxy/0.log","io.kubernetes.cri-o.Metadata":"{\"name\":\"kube-proxy\"}","io.kubernetes.cri-o.MountPoint":"/var/lib/containers/storage/overlay/ef08c3fd03152a8aa0ede7087e0739682cabfd602e73253e1c7bf91e655d2b30/merged","io.kubernetes.cri
-o.Name":"k8s_kube-proxy_kube-proxy-nvpzv_kube-system_2e1bfc3e-dea3-4511-a154-e367e28b0898_0","io.kubernetes.cri-o.ResolvPath":"/run/containers/storage/overlay-containers/46672cf6a1a3cfbb490f865d512383492c0c4c4061599f90461031829a93bd49/userdata/resolv.conf","io.kubernetes.cri-o.SandboxID":"46672cf6a1a3cfbb490f865d512383492c0c4c4061599f90461031829a93bd49","io.kubernetes.cri-o.SandboxName":"k8s_kube-proxy-nvpzv_kube-system_2e1bfc3e-dea3-4511-a154-e367e28b0898_0","io.kubernetes.cri-o.SeccompProfilePath":"","io.kubernetes.cri-o.Stdin":"false","io.kubernetes.cri-o.StdinOnce":"false","io.kubernetes.cri-o.TTY":"false","io.kubernetes.cri-o.Volumes":"[{\"container_path\":\"/run/xtables.lock\",\"host_path\":\"/run/xtables.lock\",\"readonly\":false,\"propagation\":0,\"selinux_relabel\":false},{\"container_path\":\"/lib/modules\",\"host_path\":\"/lib/modules\",\"readonly\":true,\"propagation\":0,\"selinux_relabel\":false},{\"container_path\":\"/etc/hosts\",\"host_path\":\"/var/lib/kubelet/pods/2e1bfc3e-dea3-4511-a154-e36
7e28b0898/etc-hosts\",\"readonly\":false,\"propagation\":0,\"selinux_relabel\":false},{\"container_path\":\"/dev/termination-log\",\"host_path\":\"/var/lib/kubelet/pods/2e1bfc3e-dea3-4511-a154-e367e28b0898/containers/kube-proxy/8c6823e0\",\"readonly\":false,\"propagation\":0,\"selinux_relabel\":false},{\"container_path\":\"/var/lib/kube-proxy\",\"host_path\":\"/var/lib/kubelet/pods/2e1bfc3e-dea3-4511-a154-e367e28b0898/volumes/kubernetes.io~configmap/kube-proxy\",\"readonly\":true,\"propagation\":0,\"selinux_relabel\":false},{\"container_path\":\"/var/run/secrets/kubernetes.io/serviceaccount\",\"host_path\":\"/var/lib/kubelet/pods/2e1bfc3e-dea3-4511-a154-e367e28b0898/volumes/kubernetes.io~projected/kube-api-access-4b6t8\",\"readonly\":true,\"propagation\":0,\"selinux_relabel\":false}]","io.kubernetes.pod.name":"kube-proxy-nvpzv","io.kubernetes.pod.namespace":"kube-system","io.kubernetes.pod.terminationGracePeriod":"30","io.kubernetes.pod.uid":"2e1bfc3e-dea3-4511-a154-e367e28b0898","kubernetes.io/config.seen":"
2024-09-16T10:47:06.101265018Z","kubernetes.io/config.source":"api"},"owner":"root"},{"ociVersion":"1.0.2-dev","id":"b88a79882d73e8e5ca5f134464b8f60ebbeb4a0aa75d6f83d1ec9e3d9f6bd093","pid":0,"status":"stopped","bundle":"/run/containers/storage/overlay-containers/b88a79882d73e8e5ca5f134464b8f60ebbeb4a0aa75d6f83d1ec9e3d9f6bd093/userdata","rootfs":"/var/lib/containers/storage/overlay/19326c5ffd0f01a6b991a3440fcc45df2485a34cf81cae741c2cf03d1a3151c6/merged","created":"2024-09-16T10:46:54.468809185Z","annotations":{"io.container.manager":"cri-o","io.kubernetes.container.hash":"cdf7d3fa","io.kubernetes.container.name":"etcd","io.kubernetes.container.restartCount":"0","io.kubernetes.container.terminationMessagePath":"/dev/termination-log","io.kubernetes.container.terminationMessagePolicy":"File","io.kubernetes.cri-o.Annotations":"{\"io.kubernetes.container.hash\":\"cdf7d3fa\",\"io.kubernetes.container.restartCount\":\"0\",\"io.kubernetes.container.terminationMessagePath\":\"/dev/termination-log\",\"io.kubernetes.cont
ainer.terminationMessagePolicy\":\"File\",\"io.kubernetes.pod.terminationGracePeriod\":\"30\"}","io.kubernetes.cri-o.ContainerID":"b88a79882d73e8e5ca5f134464b8f60ebbeb4a0aa75d6f83d1ec9e3d9f6bd093","io.kubernetes.cri-o.ContainerType":"container","io.kubernetes.cri-o.Created":"2024-09-16T10:46:54.371518695Z","io.kubernetes.cri-o.Image":"27e3830e1402783674d8b594038967deea9d51f0d91b34c93c8f39d2f68af7da","io.kubernetes.cri-o.ImageName":"registry.k8s.io/etcd:3.5.15-0","io.kubernetes.cri-o.ImageRef":"27e3830e1402783674d8b594038967deea9d51f0d91b34c93c8f39d2f68af7da","io.kubernetes.cri-o.Labels":"{\"io.kubernetes.container.name\":\"etcd\",\"io.kubernetes.pod.name\":\"etcd-functional-919910\",\"io.kubernetes.pod.namespace\":\"kube-system\",\"io.kubernetes.pod.uid\":\"3e910b182a705a484fdc6733177892d1\"}","io.kubernetes.cri-o.LogPath":"/var/log/pods/kube-system_etcd-functional-919910_3e910b182a705a484fdc6733177892d1/etcd/0.log","io.kubernetes.cri-o.Metadata":"{\"name\":\"etcd\"}","io.kubernetes.cri-o.MountPoint":"/var/li
b/containers/storage/overlay/19326c5ffd0f01a6b991a3440fcc45df2485a34cf81cae741c2cf03d1a3151c6/merged","io.kubernetes.cri-o.Name":"k8s_etcd_etcd-functional-919910_kube-system_3e910b182a705a484fdc6733177892d1_0","io.kubernetes.cri-o.ResolvPath":"/run/containers/storage/overlay-containers/46079181d292566bf3368ecdd33ca88588287c49a4c55e1ea9b8c742914a5eee/userdata/resolv.conf","io.kubernetes.cri-o.SandboxID":"46079181d292566bf3368ecdd33ca88588287c49a4c55e1ea9b8c742914a5eee","io.kubernetes.cri-o.SandboxName":"k8s_etcd-functional-919910_kube-system_3e910b182a705a484fdc6733177892d1_0","io.kubernetes.cri-o.SeccompProfilePath":"","io.kubernetes.cri-o.Stdin":"false","io.kubernetes.cri-o.StdinOnce":"false","io.kubernetes.cri-o.TTY":"false","io.kubernetes.cri-o.Volumes":"[{\"container_path\":\"/etc/hosts\",\"host_path\":\"/var/lib/kubelet/pods/3e910b182a705a484fdc6733177892d1/etc-hosts\",\"readonly\":false,\"propagation\":0,\"selinux_relabel\":false},{\"container_path\":\"/dev/termination-log\",\"host_path\":\"/var/lib/kub
elet/pods/3e910b182a705a484fdc6733177892d1/containers/etcd/840357dc\",\"readonly\":false,\"propagation\":0,\"selinux_relabel\":false},{\"container_path\":\"/var/lib/minikube/etcd\",\"host_path\":\"/var/lib/minikube/etcd\",\"readonly\":false,\"propagation\":0,\"selinux_relabel\":false},{\"container_path\":\"/var/lib/minikube/certs/etcd\",\"host_path\":\"/var/lib/minikube/certs/etcd\",\"readonly\":false,\"propagation\":0,\"selinux_relabel\":false}]","io.kubernetes.pod.name":"etcd-functional-919910","io.kubernetes.pod.namespace":"kube-system","io.kubernetes.pod.terminationGracePeriod":"30","io.kubernetes.pod.uid":"3e910b182a705a484fdc6733177892d1","kubeadm.kubernetes.io/etcd.advertise-client-urls":"https://192.168.49.2:2379","kubernetes.io/config.hash":"3e910b182a705a484fdc6733177892d1","kubernetes.io/config.seen":"2024-09-16T10:46:53.802310056Z","kubernetes.io/config.source":"file"},"owner":"root"}]
	I0916 10:47:59.955530 1401996 cri.go:116] JSON = [{"ociVersion":"1.0.2-dev","id":"19cb8b26283b5427eeb4adf80032848225300f8293659c95a04c937ca3877ced","pid":0,"status":"stopped","bundle":"/run/containers/storage/overlay-containers/19cb8b26283b5427eeb4adf80032848225300f8293659c95a04c937ca3877ced/userdata","rootfs":"/var/lib/containers/storage/overlay/9d25114c4e5423d24252b77dba36894f36016e9218116badbb9dbec6638e1801/merged","created":"2024-09-16T10:46:54.451546712Z","annotations":{"io.container.manager":"cri-o","io.kubernetes.container.hash":"d1900d79","io.kubernetes.container.name":"kube-controller-manager","io.kubernetes.container.restartCount":"0","io.kubernetes.container.terminationMessagePath":"/dev/termination-log","io.kubernetes.container.terminationMessagePolicy":"File","io.kubernetes.cri-o.Annotations":"{\"io.kubernetes.container.hash\":\"d1900d79\",\"io.kubernetes.container.restartCount\":\"0\",\"io.kubernetes.container.terminationMessagePath\":\"/dev/termination-log\",\"io.kubernetes.container.t
erminationMessagePolicy\":\"File\",\"io.kubernetes.pod.terminationGracePeriod\":\"30\"}","io.kubernetes.cri-o.ContainerID":"19cb8b26283b5427eeb4adf80032848225300f8293659c95a04c937ca3877ced","io.kubernetes.cri-o.ContainerType":"container","io.kubernetes.cri-o.Created":"2024-09-16T10:46:54.388751196Z","io.kubernetes.cri-o.Image":"279f381cb37365bbbcd133c9531fba9c2beb0f38dbbe6ddfcd0b1b1643d3450e","io.kubernetes.cri-o.ImageName":"registry.k8s.io/kube-controller-manager:v1.31.1","io.kubernetes.cri-o.ImageRef":"279f381cb37365bbbcd133c9531fba9c2beb0f38dbbe6ddfcd0b1b1643d3450e","io.kubernetes.cri-o.Labels":"{\"io.kubernetes.container.name\":\"kube-controller-manager\",\"io.kubernetes.pod.name\":\"kube-controller-manager-functional-919910\",\"io.kubernetes.pod.namespace\":\"kube-system\",\"io.kubernetes.pod.uid\":\"bcfd044776fa163108ac9ce9912dd1b1\"}","io.kubernetes.cri-o.LogPath":"/var/log/pods/kube-system_kube-controller-manager-functional-919910_bcfd044776fa163108ac9ce9912dd1b1/kube-controller-manager/0.log","io.kub
ernetes.cri-o.Metadata":"{\"name\":\"kube-controller-manager\"}","io.kubernetes.cri-o.MountPoint":"/var/lib/containers/storage/overlay/9d25114c4e5423d24252b77dba36894f36016e9218116badbb9dbec6638e1801/merged","io.kubernetes.cri-o.Name":"k8s_kube-controller-manager_kube-controller-manager-functional-919910_kube-system_bcfd044776fa163108ac9ce9912dd1b1_0","io.kubernetes.cri-o.ResolvPath":"/run/containers/storage/overlay-containers/0ffab32638624e8f0235604afb94e9e67c3d4e06616208483a5debcc914e3cae/userdata/resolv.conf","io.kubernetes.cri-o.SandboxID":"0ffab32638624e8f0235604afb94e9e67c3d4e06616208483a5debcc914e3cae","io.kubernetes.cri-o.SandboxName":"k8s_kube-controller-manager-functional-919910_kube-system_bcfd044776fa163108ac9ce9912dd1b1_0","io.kubernetes.cri-o.SeccompProfilePath":"","io.kubernetes.cri-o.Stdin":"false","io.kubernetes.cri-o.StdinOnce":"false","io.kubernetes.cri-o.TTY":"false","io.kubernetes.cri-o.Volumes":"[{\"container_path\":\"/etc/ca-certificates\",\"host_path\":\"/etc/ca-certificates\",\"readon
ly\":true,\"propagation\":0,\"selinux_relabel\":false},{\"container_path\":\"/dev/termination-log\",\"host_path\":\"/var/lib/kubelet/pods/bcfd044776fa163108ac9ce9912dd1b1/containers/kube-controller-manager/2c567ce7\",\"readonly\":false,\"propagation\":0,\"selinux_relabel\":false},{\"container_path\":\"/etc/hosts\",\"host_path\":\"/var/lib/kubelet/pods/bcfd044776fa163108ac9ce9912dd1b1/etc-hosts\",\"readonly\":false,\"propagation\":0,\"selinux_relabel\":false},{\"container_path\":\"/etc/ssl/certs\",\"host_path\":\"/etc/ssl/certs\",\"readonly\":true,\"propagation\":0,\"selinux_relabel\":false},{\"container_path\":\"/etc/kubernetes/controller-manager.conf\",\"host_path\":\"/etc/kubernetes/controller-manager.conf\",\"readonly\":true,\"propagation\":0,\"selinux_relabel\":false},{\"container_path\":\"/usr/share/ca-certificates\",\"host_path\":\"/usr/share/ca-certificates\",\"readonly\":true,\"propagation\":0,\"selinux_relabel\":false},{\"container_path\":\"/var/lib/minikube/certs\",\"host_path\":\"/var/lib/minikube/
certs\",\"readonly\":true,\"propagation\":0,\"selinux_relabel\":false},{\"container_path\":\"/usr/local/share/ca-certificates\",\"host_path\":\"/usr/local/share/ca-certificates\",\"readonly\":true,\"propagation\":0,\"selinux_relabel\":false},{\"container_path\":\"/usr/libexec/kubernetes/kubelet-plugins/volume/exec\",\"host_path\":\"/usr/libexec/kubernetes/kubelet-plugins/volume/exec\",\"readonly\":false,\"propagation\":0,\"selinux_relabel\":false}]","io.kubernetes.pod.name":"kube-controller-manager-functional-919910","io.kubernetes.pod.namespace":"kube-system","io.kubernetes.pod.terminationGracePeriod":"30","io.kubernetes.pod.uid":"bcfd044776fa163108ac9ce9912dd1b1","kubernetes.io/config.hash":"bcfd044776fa163108ac9ce9912dd1b1","kubernetes.io/config.seen":"2024-09-16T10:46:53.802316924Z","kubernetes.io/config.source":"file"},"owner":"root"},{"ociVersion":"1.0.2-dev","id":"3e31d247381fd150f97fed045c0d264e01a0046902133f839fc323ed9d5fa7b9","pid":0,"status":"stopped","bundle":"/run/containers/storage/overlay-conta
iners/3e31d247381fd150f97fed045c0d264e01a0046902133f839fc323ed9d5fa7b9/userdata","rootfs":"/var/lib/containers/storage/overlay/33ae8f381ec56dbef8842ba8809fd9b503de8020ea42b8f8c194145d6dbea159/merged","created":"2024-09-16T10:47:07.149526142Z","annotations":{"io.container.manager":"cri-o","io.kubernetes.container.hash":"e80daca3","io.kubernetes.container.name":"kindnet-cni","io.kubernetes.container.restartCount":"0","io.kubernetes.container.terminationMessagePath":"/dev/termination-log","io.kubernetes.container.terminationMessagePolicy":"File","io.kubernetes.cri-o.Annotations":"{\"io.kubernetes.container.hash\":\"e80daca3\",\"io.kubernetes.container.restartCount\":\"0\",\"io.kubernetes.container.terminationMessagePath\":\"/dev/termination-log\",\"io.kubernetes.container.terminationMessagePolicy\":\"File\",\"io.kubernetes.pod.terminationGracePeriod\":\"30\"}","io.kubernetes.cri-o.ContainerID":"3e31d247381fd150f97fed045c0d264e01a0046902133f839fc323ed9d5fa7b9","io.kubernetes.cri-o.ContainerType":"container","io.k
ubernetes.cri-o.Created":"2024-09-16T10:47:07.08917364Z","io.kubernetes.cri-o.Image":"6a23fa8fd2b78ab58e42ba273808edc936a9c53d8ac4a919f6337be094843a51","io.kubernetes.cri-o.ImageName":"docker.io/kindest/kindnetd:v20240813-c6f155d6","io.kubernetes.cri-o.ImageRef":"6a23fa8fd2b78ab58e42ba273808edc936a9c53d8ac4a919f6337be094843a51","io.kubernetes.cri-o.Labels":"{\"io.kubernetes.container.name\":\"kindnet-cni\",\"io.kubernetes.pod.name\":\"kindnet-nb5xl\",\"io.kubernetes.pod.namespace\":\"kube-system\",\"io.kubernetes.pod.uid\":\"1282e172-7d16-4f24-9f7d-33da705832a9\"}","io.kubernetes.cri-o.LogPath":"/var/log/pods/kube-system_kindnet-nb5xl_1282e172-7d16-4f24-9f7d-33da705832a9/kindnet-cni/0.log","io.kubernetes.cri-o.Metadata":"{\"name\":\"kindnet-cni\"}","io.kubernetes.cri-o.MountPoint":"/var/lib/containers/storage/overlay/33ae8f381ec56dbef8842ba8809fd9b503de8020ea42b8f8c194145d6dbea159/merged","io.kubernetes.cri-o.Name":"k8s_kindnet-cni_kindnet-nb5xl_kube-system_1282e172-7d16-4f24-9f7d-33da705832a9_0","io.kubernet
es.cri-o.ResolvPath":"/run/containers/storage/overlay-containers/306886331d6eea412e2593dd8cefd104ae0353cb2453c12f41db88e1881fec0f/userdata/resolv.conf","io.kubernetes.cri-o.SandboxID":"306886331d6eea412e2593dd8cefd104ae0353cb2453c12f41db88e1881fec0f","io.kubernetes.cri-o.SandboxName":"k8s_kindnet-nb5xl_kube-system_1282e172-7d16-4f24-9f7d-33da705832a9_0","io.kubernetes.cri-o.SeccompProfilePath":"","io.kubernetes.cri-o.Stdin":"false","io.kubernetes.cri-o.StdinOnce":"false","io.kubernetes.cri-o.TTY":"false","io.kubernetes.cri-o.Volumes":"[{\"container_path\":\"/run/xtables.lock\",\"host_path\":\"/run/xtables.lock\",\"readonly\":false,\"propagation\":0,\"selinux_relabel\":false},{\"container_path\":\"/lib/modules\",\"host_path\":\"/lib/modules\",\"readonly\":true,\"propagation\":0,\"selinux_relabel\":false},{\"container_path\":\"/etc/hosts\",\"host_path\":\"/var/lib/kubelet/pods/1282e172-7d16-4f24-9f7d-33da705832a9/etc-hosts\",\"readonly\":false,\"propagation\":0,\"selinux_relabel\":false},{\"container_path\":\"/
dev/termination-log\",\"host_path\":\"/var/lib/kubelet/pods/1282e172-7d16-4f24-9f7d-33da705832a9/containers/kindnet-cni/4675b3f6\",\"readonly\":false,\"propagation\":0,\"selinux_relabel\":false},{\"container_path\":\"/etc/cni/net.d\",\"host_path\":\"/etc/cni/net.d\",\"readonly\":false,\"propagation\":0,\"selinux_relabel\":false},{\"container_path\":\"/var/run/secrets/kubernetes.io/serviceaccount\",\"host_path\":\"/var/lib/kubelet/pods/1282e172-7d16-4f24-9f7d-33da705832a9/volumes/kubernetes.io~projected/kube-api-access-bxwpg\",\"readonly\":true,\"propagation\":0,\"selinux_relabel\":false}]","io.kubernetes.pod.name":"kindnet-nb5xl","io.kubernetes.pod.namespace":"kube-system","io.kubernetes.pod.terminationGracePeriod":"30","io.kubernetes.pod.uid":"1282e172-7d16-4f24-9f7d-33da705832a9","kubernetes.io/config.seen":"2024-09-16T10:47:06.101213303Z","kubernetes.io/config.source":"api"},"owner":"root"},{"ociVersion":"1.0.2-dev","id":"584cffa44f32723af45447c07bf6e3fc641b7c61fe43302aad35c776bd065faf","pid":0,"status":"s
topped","bundle":"/run/containers/storage/overlay-containers/584cffa44f32723af45447c07bf6e3fc641b7c61fe43302aad35c776bd065faf/userdata","rootfs":"/var/lib/containers/storage/overlay/0bedb04df165ede72307d852c687b05c750fe9223f4fb8c1d3776f63a28900f8/merged","created":"2024-09-16T10:47:48.333720036Z","annotations":{"io.container.manager":"cri-o","io.kubernetes.container.hash":"6c6bf961","io.kubernetes.container.name":"storage-provisioner","io.kubernetes.container.restartCount":"0","io.kubernetes.container.terminationMessagePath":"/dev/termination-log","io.kubernetes.container.terminationMessagePolicy":"File","io.kubernetes.cri-o.Annotations":"{\"io.kubernetes.container.hash\":\"6c6bf961\",\"io.kubernetes.container.restartCount\":\"0\",\"io.kubernetes.container.terminationMessagePath\":\"/dev/termination-log\",\"io.kubernetes.container.terminationMessagePolicy\":\"File\",\"io.kubernetes.pod.terminationGracePeriod\":\"30\"}","io.kubernetes.cri-o.ContainerID":"584cffa44f32723af45447c07bf6e3fc641b7c61fe43302aad35c776
bd065faf","io.kubernetes.cri-o.ContainerType":"container","io.kubernetes.cri-o.Created":"2024-09-16T10:47:48.29216211Z","io.kubernetes.cri-o.Image":"ba04bb24b95753201135cbc420b233c1b0b9fa2e1fd21d28319c348c33fbcde6","io.kubernetes.cri-o.ImageName":"gcr.io/k8s-minikube/storage-provisioner:v5","io.kubernetes.cri-o.ImageRef":"ba04bb24b95753201135cbc420b233c1b0b9fa2e1fd21d28319c348c33fbcde6","io.kubernetes.cri-o.Labels":"{\"io.kubernetes.container.name\":\"storage-provisioner\",\"io.kubernetes.pod.name\":\"storage-provisioner\",\"io.kubernetes.pod.namespace\":\"kube-system\",\"io.kubernetes.pod.uid\":\"2eb6523f-f61a-4c33-8e91-0bbbb874554b\"}","io.kubernetes.cri-o.LogPath":"/var/log/pods/kube-system_storage-provisioner_2eb6523f-f61a-4c33-8e91-0bbbb874554b/storage-provisioner/0.log","io.kubernetes.cri-o.Metadata":"{\"name\":\"storage-provisioner\"}","io.kubernetes.cri-o.MountPoint":"/var/lib/containers/storage/overlay/0bedb04df165ede72307d852c687b05c750fe9223f4fb8c1d3776f63a28900f8/merged","io.kubernetes.cri-o.Name"
:"k8s_storage-provisioner_storage-provisioner_kube-system_2eb6523f-f61a-4c33-8e91-0bbbb874554b_0","io.kubernetes.cri-o.ResolvPath":"/run/containers/storage/overlay-containers/e27809ba106031f0a2ea1939eccfaa14ca2ade78903409cc767b25e9de7c812a/userdata/resolv.conf","io.kubernetes.cri-o.SandboxID":"e27809ba106031f0a2ea1939eccfaa14ca2ade78903409cc767b25e9de7c812a","io.kubernetes.cri-o.SandboxName":"k8s_storage-provisioner_kube-system_2eb6523f-f61a-4c33-8e91-0bbbb874554b_0","io.kubernetes.cri-o.SeccompProfilePath":"","io.kubernetes.cri-o.Stdin":"false","io.kubernetes.cri-o.StdinOnce":"false","io.kubernetes.cri-o.TTY":"false","io.kubernetes.cri-o.Volumes":"[{\"container_path\":\"/tmp\",\"host_path\":\"/tmp\",\"readonly\":false,\"propagation\":0,\"selinux_relabel\":false},{\"container_path\":\"/etc/hosts\",\"host_path\":\"/var/lib/kubelet/pods/2eb6523f-f61a-4c33-8e91-0bbbb874554b/etc-hosts\",\"readonly\":false,\"propagation\":0,\"selinux_relabel\":false},{\"container_path\":\"/dev/termination-log\",\"host_path\":\"/va
r/lib/kubelet/pods/2eb6523f-f61a-4c33-8e91-0bbbb874554b/containers/storage-provisioner/a9710de8\",\"readonly\":false,\"propagation\":0,\"selinux_relabel\":false},{\"container_path\":\"/var/run/secrets/kubernetes.io/serviceaccount\",\"host_path\":\"/var/lib/kubelet/pods/2eb6523f-f61a-4c33-8e91-0bbbb874554b/volumes/kubernetes.io~projected/kube-api-access-kn9qz\",\"readonly\":true,\"propagation\":0,\"selinux_relabel\":false}]","io.kubernetes.pod.name":"storage-provisioner","io.kubernetes.pod.namespace":"kube-system","io.kubernetes.pod.terminationGracePeriod":"30","io.kubernetes.pod.uid":"2eb6523f-f61a-4c33-8e91-0bbbb874554b","kubectl.kubernetes.io/last-applied-configuration":"{\"apiVersion\":\"v1\",\"kind\":\"Pod\",\"metadata\":{\"annotations\":{},\"labels\":{\"addonmanager.kubernetes.io/mode\":\"Reconcile\",\"integration-test\":\"storage-provisioner\"},\"name\":\"storage-provisioner\",\"namespace\":\"kube-system\"},\"spec\":{\"containers\":[{\"command\":[\"/storage-provisioner\"],\"image\":\"gcr.io/k8s-minikube
/storage-provisioner:v5\",\"imagePullPolicy\":\"IfNotPresent\",\"name\":\"storage-provisioner\",\"volumeMounts\":[{\"mountPath\":\"/tmp\",\"name\":\"tmp\"}]}],\"hostNetwork\":true,\"serviceAccountName\":\"storage-provisioner\",\"volumes\":[{\"hostPath\":{\"path\":\"/tmp\",\"type\":\"Directory\"},\"name\":\"tmp\"}]}}\n","kubernetes.io/config.seen":"2024-09-16T10:47:47.935314547Z","kubernetes.io/config.source":"api"},"owner":"root"},{"ociVersion":"1.0.2-dev","id":"6d211253a1170338e5b23dda8b3c6a26dde0aa55d2f91ee289142b0410943b49","pid":0,"status":"stopped","bundle":"/run/containers/storage/overlay-containers/6d211253a1170338e5b23dda8b3c6a26dde0aa55d2f91ee289142b0410943b49/userdata","rootfs":"/var/lib/containers/storage/overlay/cab360ca19399b430c9ac3118df3b10c96a9ca4f93f89484957af5838b7a7903/merged","created":"2024-09-16T10:46:54.47745643Z","annotations":{"io.container.manager":"cri-o","io.kubernetes.container.hash":"12faacf7","io.kubernetes.container.name":"kube-scheduler","io.kubernetes.container.restartCount":
"0","io.kubernetes.container.terminationMessagePath":"/dev/termination-log","io.kubernetes.container.terminationMessagePolicy":"File","io.kubernetes.cri-o.Annotations":"{\"io.kubernetes.container.hash\":\"12faacf7\",\"io.kubernetes.container.restartCount\":\"0\",\"io.kubernetes.container.terminationMessagePath\":\"/dev/termination-log\",\"io.kubernetes.container.terminationMessagePolicy\":\"File\",\"io.kubernetes.pod.terminationGracePeriod\":\"30\"}","io.kubernetes.cri-o.ContainerID":"6d211253a1170338e5b23dda8b3c6a26dde0aa55d2f91ee289142b0410943b49","io.kubernetes.cri-o.ContainerType":"container","io.kubernetes.cri-o.Created":"2024-09-16T10:46:54.420996546Z","io.kubernetes.cri-o.Image":"7f8aa378bb47dffcf430f3a601abe39137e88aee0238e23ed8530fdd18dab82d","io.kubernetes.cri-o.ImageName":"registry.k8s.io/kube-scheduler:v1.31.1","io.kubernetes.cri-o.ImageRef":"7f8aa378bb47dffcf430f3a601abe39137e88aee0238e23ed8530fdd18dab82d","io.kubernetes.cri-o.Labels":"{\"io.kubernetes.container.name\":\"kube-scheduler\",\"io.kub
ernetes.pod.name\":\"kube-scheduler-functional-919910\",\"io.kubernetes.pod.namespace\":\"kube-system\",\"io.kubernetes.pod.uid\":\"60f2072c6865fb71ef7928175ceb3dad\"}","io.kubernetes.cri-o.LogPath":"/var/log/pods/kube-system_kube-scheduler-functional-919910_60f2072c6865fb71ef7928175ceb3dad/kube-scheduler/0.log","io.kubernetes.cri-o.Metadata":"{\"name\":\"kube-scheduler\"}","io.kubernetes.cri-o.MountPoint":"/var/lib/containers/storage/overlay/cab360ca19399b430c9ac3118df3b10c96a9ca4f93f89484957af5838b7a7903/merged","io.kubernetes.cri-o.Name":"k8s_kube-scheduler_kube-scheduler-functional-919910_kube-system_60f2072c6865fb71ef7928175ceb3dad_0","io.kubernetes.cri-o.ResolvPath":"/run/containers/storage/overlay-containers/00455a328acb5f6e9ea466104b770d8dfdb288b1e2998c0222c8b30b804b19cb/userdata/resolv.conf","io.kubernetes.cri-o.SandboxID":"00455a328acb5f6e9ea466104b770d8dfdb288b1e2998c0222c8b30b804b19cb","io.kubernetes.cri-o.SandboxName":"k8s_kube-scheduler-functional-919910_kube-system_60f2072c6865fb71ef7928175ceb3
dad_0","io.kubernetes.cri-o.SeccompProfilePath":"","io.kubernetes.cri-o.Stdin":"false","io.kubernetes.cri-o.StdinOnce":"false","io.kubernetes.cri-o.TTY":"false","io.kubernetes.cri-o.Volumes":"[{\"container_path\":\"/etc/hosts\",\"host_path\":\"/var/lib/kubelet/pods/60f2072c6865fb71ef7928175ceb3dad/etc-hosts\",\"readonly\":false,\"propagation\":0,\"selinux_relabel\":false},{\"container_path\":\"/dev/termination-log\",\"host_path\":\"/var/lib/kubelet/pods/60f2072c6865fb71ef7928175ceb3dad/containers/kube-scheduler/e278c329\",\"readonly\":false,\"propagation\":0,\"selinux_relabel\":false},{\"container_path\":\"/etc/kubernetes/scheduler.conf\",\"host_path\":\"/etc/kubernetes/scheduler.conf\",\"readonly\":true,\"propagation\":0,\"selinux_relabel\":false}]","io.kubernetes.pod.name":"kube-scheduler-functional-919910","io.kubernetes.pod.namespace":"kube-system","io.kubernetes.pod.terminationGracePeriod":"30","io.kubernetes.pod.uid":"60f2072c6865fb71ef7928175ceb3dad","kubernetes.io/config.hash":"60f2072c6865fb71ef79281
75ceb3dad","kubernetes.io/config.seen":"2024-09-16T10:46:53.802318072Z","kubernetes.io/config.source":"file"},"owner":"root"},{"ociVersion":"1.0.2-dev","id":"790d8c6b7f5cff6aa8da32ec82eeab04f109110f2b3a39803bda7a570da2cf75","pid":0,"status":"stopped","bundle":"/run/containers/storage/overlay-containers/790d8c6b7f5cff6aa8da32ec82eeab04f109110f2b3a39803bda7a570da2cf75/userdata","rootfs":"/var/lib/containers/storage/overlay/a08c3c563565903a078ef017305fa2825386d400183a5328437db2f752f9752f/merged","created":"2024-09-16T10:46:54.415982086Z","annotations":{"io.container.manager":"cri-o","io.kubernetes.container.hash":"7df2713b","io.kubernetes.container.name":"kube-apiserver","io.kubernetes.container.restartCount":"0","io.kubernetes.container.terminationMessagePath":"/dev/termination-log","io.kubernetes.container.terminationMessagePolicy":"File","io.kubernetes.cri-o.Annotations":"{\"io.kubernetes.container.hash\":\"7df2713b\",\"io.kubernetes.container.restartCount\":\"0\",\"io.kubernetes.container.terminationMessageP
ath\":\"/dev/termination-log\",\"io.kubernetes.container.terminationMessagePolicy\":\"File\",\"io.kubernetes.pod.terminationGracePeriod\":\"30\"}","io.kubernetes.cri-o.ContainerID":"790d8c6b7f5cff6aa8da32ec82eeab04f109110f2b3a39803bda7a570da2cf75","io.kubernetes.cri-o.ContainerType":"container","io.kubernetes.cri-o.Created":"2024-09-16T10:46:54.357706151Z","io.kubernetes.cri-o.Image":"d3f53a98c0a9d9163c4848bcf34b2d2f5e1e3691b79f3d1dd6d0206809e02853","io.kubernetes.cri-o.ImageName":"registry.k8s.io/kube-apiserver:v1.31.1","io.kubernetes.cri-o.ImageRef":"d3f53a98c0a9d9163c4848bcf34b2d2f5e1e3691b79f3d1dd6d0206809e02853","io.kubernetes.cri-o.Labels":"{\"io.kubernetes.container.name\":\"kube-apiserver\",\"io.kubernetes.pod.name\":\"kube-apiserver-functional-919910\",\"io.kubernetes.pod.namespace\":\"kube-system\",\"io.kubernetes.pod.uid\":\"3d8a6ba31c18f33c5660170029e5cde1\"}","io.kubernetes.cri-o.LogPath":"/var/log/pods/kube-system_kube-apiserver-functional-919910_3d8a6ba31c18f33c5660170029e5cde1/kube-apiserver/0
.log","io.kubernetes.cri-o.Metadata":"{\"name\":\"kube-apiserver\"}","io.kubernetes.cri-o.MountPoint":"/var/lib/containers/storage/overlay/a08c3c563565903a078ef017305fa2825386d400183a5328437db2f752f9752f/merged","io.kubernetes.cri-o.Name":"k8s_kube-apiserver_kube-apiserver-functional-919910_kube-system_3d8a6ba31c18f33c5660170029e5cde1_0","io.kubernetes.cri-o.ResolvPath":"/run/containers/storage/overlay-containers/8fd62fbc34bf1ffb9092b83c48e89b00e7cdd219dbb5b91410c53ba0718a28f1/userdata/resolv.conf","io.kubernetes.cri-o.SandboxID":"8fd62fbc34bf1ffb9092b83c48e89b00e7cdd219dbb5b91410c53ba0718a28f1","io.kubernetes.cri-o.SandboxName":"k8s_kube-apiserver-functional-919910_kube-system_3d8a6ba31c18f33c5660170029e5cde1_0","io.kubernetes.cri-o.SeccompProfilePath":"","io.kubernetes.cri-o.Stdin":"false","io.kubernetes.cri-o.StdinOnce":"false","io.kubernetes.cri-o.TTY":"false","io.kubernetes.cri-o.Volumes":"[{\"container_path\":\"/dev/termination-log\",\"host_path\":\"/var/lib/kubelet/pods/3d8a6ba31c18f33c5660170029e5cde1
/containers/kube-apiserver/e14b8c41\",\"readonly\":false,\"propagation\":0,\"selinux_relabel\":false},{\"container_path\":\"/etc/ca-certificates\",\"host_path\":\"/etc/ca-certificates\",\"readonly\":true,\"propagation\":0,\"selinux_relabel\":false},{\"container_path\":\"/etc/hosts\",\"host_path\":\"/var/lib/kubelet/pods/3d8a6ba31c18f33c5660170029e5cde1/etc-hosts\",\"readonly\":false,\"propagation\":0,\"selinux_relabel\":false},{\"container_path\":\"/usr/share/ca-certificates\",\"host_path\":\"/usr/share/ca-certificates\",\"readonly\":true,\"propagation\":0,\"selinux_relabel\":false},{\"container_path\":\"/etc/ssl/certs\",\"host_path\":\"/etc/ssl/certs\",\"readonly\":true,\"propagation\":0,\"selinux_relabel\":false},{\"container_path\":\"/var/lib/minikube/certs\",\"host_path\":\"/var/lib/minikube/certs\",\"readonly\":true,\"propagation\":0,\"selinux_relabel\":false},{\"container_path\":\"/usr/local/share/ca-certificates\",\"host_path\":\"/usr/local/share/ca-certificates\",\"readonly\":true,\"propagation\":0,\"
selinux_relabel\":false}]","io.kubernetes.pod.name":"kube-apiserver-functional-919910","io.kubernetes.pod.namespace":"kube-system","io.kubernetes.pod.terminationGracePeriod":"30","io.kubernetes.pod.uid":"3d8a6ba31c18f33c5660170029e5cde1","kubeadm.kubernetes.io/kube-apiserver.advertise-address.endpoint":"192.168.49.2:8441","kubernetes.io/config.hash":"3d8a6ba31c18f33c5660170029e5cde1","kubernetes.io/config.seen":"2024-09-16T10:46:53.802315340Z","kubernetes.io/config.source":"file"},"owner":"root"},{"ociVersion":"1.0.2-dev","id":"89084e33c979a76a3a4bbd24eab8c848deb25d8bd474bad381f47a24e0373c2e","pid":0,"status":"stopped","bundle":"/run/containers/storage/overlay-containers/89084e33c979a76a3a4bbd24eab8c848deb25d8bd474bad381f47a24e0373c2e/userdata","rootfs":"/var/lib/containers/storage/overlay/d6960bf7e5d7c4f7e17e6f57c4b342d7214a64f55a8e636cea36824eb532352c/merged","created":"2024-09-16T10:47:48.353491936Z","annotations":{"io.container.manager":"cri-o","io.kubernetes.container.hash":"2a3a204d","io.kubernetes.cont
ainer.name":"coredns","io.kubernetes.container.ports":"[{\"name\":\"dns\",\"containerPort\":53,\"protocol\":\"UDP\"},{\"name\":\"dns-tcp\",\"containerPort\":53,\"protocol\":\"TCP\"},{\"name\":\"metrics\",\"containerPort\":9153,\"protocol\":\"TCP\"}]","io.kubernetes.container.restartCount":"0","io.kubernetes.container.terminationMessagePath":"/dev/termination-log","io.kubernetes.container.terminationMessagePolicy":"File","io.kubernetes.cri-o.Annotations":"{\"io.kubernetes.container.hash\":\"2a3a204d\",\"io.kubernetes.container.ports\":\"[{\\\"name\\\":\\\"dns\\\",\\\"containerPort\\\":53,\\\"protocol\\\":\\\"UDP\\\"},{\\\"name\\\":\\\"dns-tcp\\\",\\\"containerPort\\\":53,\\\"protocol\\\":\\\"TCP\\\"},{\\\"name\\\":\\\"metrics\\\",\\\"containerPort\\\":9153,\\\"protocol\\\":\\\"TCP\\\"}]\",\"io.kubernetes.container.restartCount\":\"0\",\"io.kubernetes.container.terminationMessagePath\":\"/dev/termination-log\",\"io.kubernetes.container.terminationMessagePolicy\":\"File\",\"io.kubernetes.pod.terminationGracePeri
od\":\"30\"}","io.kubernetes.cri-o.ContainerID":"89084e33c979a76a3a4bbd24eab8c848deb25d8bd474bad381f47a24e0373c2e","io.kubernetes.cri-o.ContainerType":"container","io.kubernetes.cri-o.Created":"2024-09-16T10:47:48.313875121Z","io.kubernetes.cri-o.IP.0":"10.244.0.2","io.kubernetes.cri-o.Image":"2f6c962e7b8311337352d9fdea917da2184d9919f4da7695bc2a6517cf392fe4","io.kubernetes.cri-o.ImageName":"registry.k8s.io/coredns/coredns:v1.11.3","io.kubernetes.cri-o.ImageRef":"2f6c962e7b8311337352d9fdea917da2184d9919f4da7695bc2a6517cf392fe4","io.kubernetes.cri-o.Labels":"{\"io.kubernetes.container.name\":\"coredns\",\"io.kubernetes.pod.name\":\"coredns-7c65d6cfc9-qzn8c\",\"io.kubernetes.pod.namespace\":\"kube-system\",\"io.kubernetes.pod.uid\":\"ada36fb7-8486-4afc-9bef-04ab2e65fc7b\"}","io.kubernetes.cri-o.LogPath":"/var/log/pods/kube-system_coredns-7c65d6cfc9-qzn8c_ada36fb7-8486-4afc-9bef-04ab2e65fc7b/coredns/0.log","io.kubernetes.cri-o.Metadata":"{\"name\":\"coredns\"}","io.kubernetes.cri-o.MountPoint":"/var/lib/container
s/storage/overlay/d6960bf7e5d7c4f7e17e6f57c4b342d7214a64f55a8e636cea36824eb532352c/merged","io.kubernetes.cri-o.Name":"k8s_coredns_coredns-7c65d6cfc9-qzn8c_kube-system_ada36fb7-8486-4afc-9bef-04ab2e65fc7b_0","io.kubernetes.cri-o.ResolvPath":"/run/containers/storage/overlay-containers/4bae1031966b207c601881c3be1d2b66aa5218cb02e6eb2af68deea5be18503b/userdata/resolv.conf","io.kubernetes.cri-o.SandboxID":"4bae1031966b207c601881c3be1d2b66aa5218cb02e6eb2af68deea5be18503b","io.kubernetes.cri-o.SandboxName":"k8s_coredns-7c65d6cfc9-qzn8c_kube-system_ada36fb7-8486-4afc-9bef-04ab2e65fc7b_0","io.kubernetes.cri-o.SeccompProfilePath":"","io.kubernetes.cri-o.Stdin":"false","io.kubernetes.cri-o.StdinOnce":"false","io.kubernetes.cri-o.TTY":"false","io.kubernetes.cri-o.Volumes":"[{\"container_path\":\"/etc/coredns\",\"host_path\":\"/var/lib/kubelet/pods/ada36fb7-8486-4afc-9bef-04ab2e65fc7b/volumes/kubernetes.io~configmap/config-volume\",\"readonly\":true,\"propagation\":0,\"selinux_relabel\":false},{\"container_path\":\"/etc/h
osts\",\"host_path\":\"/var/lib/kubelet/pods/ada36fb7-8486-4afc-9bef-04ab2e65fc7b/etc-hosts\",\"readonly\":false,\"propagation\":0,\"selinux_relabel\":false},{\"container_path\":\"/dev/termination-log\",\"host_path\":\"/var/lib/kubelet/pods/ada36fb7-8486-4afc-9bef-04ab2e65fc7b/containers/coredns/4fbb99bf\",\"readonly\":false,\"propagation\":0,\"selinux_relabel\":false},{\"container_path\":\"/var/run/secrets/kubernetes.io/serviceaccount\",\"host_path\":\"/var/lib/kubelet/pods/ada36fb7-8486-4afc-9bef-04ab2e65fc7b/volumes/kubernetes.io~projected/kube-api-access-lfgrj\",\"readonly\":true,\"propagation\":0,\"selinux_relabel\":false}]","io.kubernetes.pod.name":"coredns-7c65d6cfc9-qzn8c","io.kubernetes.pod.namespace":"kube-system","io.kubernetes.pod.terminationGracePeriod":"30","io.kubernetes.pod.uid":"ada36fb7-8486-4afc-9bef-04ab2e65fc7b","kubernetes.io/config.seen":"2024-09-16T10:47:47.928368173Z","kubernetes.io/config.source":"api"},"owner":"root"},{"ociVersion":"1.0.2-dev","id":"9fdab793eb970a5f01845e2aeaf138984
6fd7113bbdedbb122c9c796017271d5","pid":0,"status":"stopped","bundle":"/run/containers/storage/overlay-containers/9fdab793eb970a5f01845e2aeaf1389846fd7113bbdedbb122c9c796017271d5/userdata","rootfs":"/var/lib/containers/storage/overlay/ef08c3fd03152a8aa0ede7087e0739682cabfd602e73253e1c7bf91e655d2b30/merged","created":"2024-09-16T10:47:07.30034468Z","annotations":{"io.container.manager":"cri-o","io.kubernetes.container.hash":"159dcc59","io.kubernetes.container.name":"kube-proxy","io.kubernetes.container.restartCount":"0","io.kubernetes.container.terminationMessagePath":"/dev/termination-log","io.kubernetes.container.terminationMessagePolicy":"File","io.kubernetes.cri-o.Annotations":"{\"io.kubernetes.container.hash\":\"159dcc59\",\"io.kubernetes.container.restartCount\":\"0\",\"io.kubernetes.container.terminationMessagePath\":\"/dev/termination-log\",\"io.kubernetes.container.terminationMessagePolicy\":\"File\",\"io.kubernetes.pod.terminationGracePeriod\":\"30\"}","io.kubernetes.cri-o.ContainerID":"9fdab793eb970a
5f01845e2aeaf1389846fd7113bbdedbb122c9c796017271d5","io.kubernetes.cri-o.ContainerType":"container","io.kubernetes.cri-o.Created":"2024-09-16T10:47:07.113935925Z","io.kubernetes.cri-o.Image":"24a140c548c075e487e45d0ee73b1aa89f8bfb40c08a57e05975559728822b1d","io.kubernetes.cri-o.ImageName":"registry.k8s.io/kube-proxy:v1.31.1","io.kubernetes.cri-o.ImageRef":"24a140c548c075e487e45d0ee73b1aa89f8bfb40c08a57e05975559728822b1d","io.kubernetes.cri-o.Labels":"{\"io.kubernetes.container.name\":\"kube-proxy\",\"io.kubernetes.pod.name\":\"kube-proxy-nvpzv\",\"io.kubernetes.pod.namespace\":\"kube-system\",\"io.kubernetes.pod.uid\":\"2e1bfc3e-dea3-4511-a154-e367e28b0898\"}","io.kubernetes.cri-o.LogPath":"/var/log/pods/kube-system_kube-proxy-nvpzv_2e1bfc3e-dea3-4511-a154-e367e28b0898/kube-proxy/0.log","io.kubernetes.cri-o.Metadata":"{\"name\":\"kube-proxy\"}","io.kubernetes.cri-o.MountPoint":"/var/lib/containers/storage/overlay/ef08c3fd03152a8aa0ede7087e0739682cabfd602e73253e1c7bf91e655d2b30/merged","io.kubernetes.cri-o.Nam
e":"k8s_kube-proxy_kube-proxy-nvpzv_kube-system_2e1bfc3e-dea3-4511-a154-e367e28b0898_0","io.kubernetes.cri-o.ResolvPath":"/run/containers/storage/overlay-containers/46672cf6a1a3cfbb490f865d512383492c0c4c4061599f90461031829a93bd49/userdata/resolv.conf","io.kubernetes.cri-o.SandboxID":"46672cf6a1a3cfbb490f865d512383492c0c4c4061599f90461031829a93bd49","io.kubernetes.cri-o.SandboxName":"k8s_kube-proxy-nvpzv_kube-system_2e1bfc3e-dea3-4511-a154-e367e28b0898_0","io.kubernetes.cri-o.SeccompProfilePath":"","io.kubernetes.cri-o.Stdin":"false","io.kubernetes.cri-o.StdinOnce":"false","io.kubernetes.cri-o.TTY":"false","io.kubernetes.cri-o.Volumes":"[{\"container_path\":\"/run/xtables.lock\",\"host_path\":\"/run/xtables.lock\",\"readonly\":false,\"propagation\":0,\"selinux_relabel\":false},{\"container_path\":\"/lib/modules\",\"host_path\":\"/lib/modules\",\"readonly\":true,\"propagation\":0,\"selinux_relabel\":false},{\"container_path\":\"/etc/hosts\",\"host_path\":\"/var/lib/kubelet/pods/2e1bfc3e-dea3-4511-a154-e367e28b0
898/etc-hosts\",\"readonly\":false,\"propagation\":0,\"selinux_relabel\":false},{\"container_path\":\"/dev/termination-log\",\"host_path\":\"/var/lib/kubelet/pods/2e1bfc3e-dea3-4511-a154-e367e28b0898/containers/kube-proxy/8c6823e0\",\"readonly\":false,\"propagation\":0,\"selinux_relabel\":false},{\"container_path\":\"/var/lib/kube-proxy\",\"host_path\":\"/var/lib/kubelet/pods/2e1bfc3e-dea3-4511-a154-e367e28b0898/volumes/kubernetes.io~configmap/kube-proxy\",\"readonly\":true,\"propagation\":0,\"selinux_relabel\":false},{\"container_path\":\"/var/run/secrets/kubernetes.io/serviceaccount\",\"host_path\":\"/var/lib/kubelet/pods/2e1bfc3e-dea3-4511-a154-e367e28b0898/volumes/kubernetes.io~projected/kube-api-access-4b6t8\",\"readonly\":true,\"propagation\":0,\"selinux_relabel\":false}]","io.kubernetes.pod.name":"kube-proxy-nvpzv","io.kubernetes.pod.namespace":"kube-system","io.kubernetes.pod.terminationGracePeriod":"30","io.kubernetes.pod.uid":"2e1bfc3e-dea3-4511-a154-e367e28b0898","kubernetes.io/config.seen":"2024-0
9-16T10:47:06.101265018Z","kubernetes.io/config.source":"api"},"owner":"root"},{"ociVersion":"1.0.2-dev","id":"b88a79882d73e8e5ca5f134464b8f60ebbeb4a0aa75d6f83d1ec9e3d9f6bd093","pid":0,"status":"stopped","bundle":"/run/containers/storage/overlay-containers/b88a79882d73e8e5ca5f134464b8f60ebbeb4a0aa75d6f83d1ec9e3d9f6bd093/userdata","rootfs":"/var/lib/containers/storage/overlay/19326c5ffd0f01a6b991a3440fcc45df2485a34cf81cae741c2cf03d1a3151c6/merged","created":"2024-09-16T10:46:54.468809185Z","annotations":{"io.container.manager":"cri-o","io.kubernetes.container.hash":"cdf7d3fa","io.kubernetes.container.name":"etcd","io.kubernetes.container.restartCount":"0","io.kubernetes.container.terminationMessagePath":"/dev/termination-log","io.kubernetes.container.terminationMessagePolicy":"File","io.kubernetes.cri-o.Annotations":"{\"io.kubernetes.container.hash\":\"cdf7d3fa\",\"io.kubernetes.container.restartCount\":\"0\",\"io.kubernetes.container.terminationMessagePath\":\"/dev/termination-log\",\"io.kubernetes.container.
terminationMessagePolicy\":\"File\",\"io.kubernetes.pod.terminationGracePeriod\":\"30\"}","io.kubernetes.cri-o.ContainerID":"b88a79882d73e8e5ca5f134464b8f60ebbeb4a0aa75d6f83d1ec9e3d9f6bd093","io.kubernetes.cri-o.ContainerType":"container","io.kubernetes.cri-o.Created":"2024-09-16T10:46:54.371518695Z","io.kubernetes.cri-o.Image":"27e3830e1402783674d8b594038967deea9d51f0d91b34c93c8f39d2f68af7da","io.kubernetes.cri-o.ImageName":"registry.k8s.io/etcd:3.5.15-0","io.kubernetes.cri-o.ImageRef":"27e3830e1402783674d8b594038967deea9d51f0d91b34c93c8f39d2f68af7da","io.kubernetes.cri-o.Labels":"{\"io.kubernetes.container.name\":\"etcd\",\"io.kubernetes.pod.name\":\"etcd-functional-919910\",\"io.kubernetes.pod.namespace\":\"kube-system\",\"io.kubernetes.pod.uid\":\"3e910b182a705a484fdc6733177892d1\"}","io.kubernetes.cri-o.LogPath":"/var/log/pods/kube-system_etcd-functional-919910_3e910b182a705a484fdc6733177892d1/etcd/0.log","io.kubernetes.cri-o.Metadata":"{\"name\":\"etcd\"}","io.kubernetes.cri-o.MountPoint":"/var/lib/cont
ainers/storage/overlay/19326c5ffd0f01a6b991a3440fcc45df2485a34cf81cae741c2cf03d1a3151c6/merged","io.kubernetes.cri-o.Name":"k8s_etcd_etcd-functional-919910_kube-system_3e910b182a705a484fdc6733177892d1_0","io.kubernetes.cri-o.ResolvPath":"/run/containers/storage/overlay-containers/46079181d292566bf3368ecdd33ca88588287c49a4c55e1ea9b8c742914a5eee/userdata/resolv.conf","io.kubernetes.cri-o.SandboxID":"46079181d292566bf3368ecdd33ca88588287c49a4c55e1ea9b8c742914a5eee","io.kubernetes.cri-o.SandboxName":"k8s_etcd-functional-919910_kube-system_3e910b182a705a484fdc6733177892d1_0","io.kubernetes.cri-o.SeccompProfilePath":"","io.kubernetes.cri-o.Stdin":"false","io.kubernetes.cri-o.StdinOnce":"false","io.kubernetes.cri-o.TTY":"false","io.kubernetes.cri-o.Volumes":"[{\"container_path\":\"/etc/hosts\",\"host_path\":\"/var/lib/kubelet/pods/3e910b182a705a484fdc6733177892d1/etc-hosts\",\"readonly\":false,\"propagation\":0,\"selinux_relabel\":false},{\"container_path\":\"/dev/termination-log\",\"host_path\":\"/var/lib/kubelet/p
ods/3e910b182a705a484fdc6733177892d1/containers/etcd/840357dc\",\"readonly\":false,\"propagation\":0,\"selinux_relabel\":false},{\"container_path\":\"/var/lib/minikube/etcd\",\"host_path\":\"/var/lib/minikube/etcd\",\"readonly\":false,\"propagation\":0,\"selinux_relabel\":false},{\"container_path\":\"/var/lib/minikube/certs/etcd\",\"host_path\":\"/var/lib/minikube/certs/etcd\",\"readonly\":false,\"propagation\":0,\"selinux_relabel\":false}]","io.kubernetes.pod.name":"etcd-functional-919910","io.kubernetes.pod.namespace":"kube-system","io.kubernetes.pod.terminationGracePeriod":"30","io.kubernetes.pod.uid":"3e910b182a705a484fdc6733177892d1","kubeadm.kubernetes.io/etcd.advertise-client-urls":"https://192.168.49.2:2379","kubernetes.io/config.hash":"3e910b182a705a484fdc6733177892d1","kubernetes.io/config.seen":"2024-09-16T10:46:53.802310056Z","kubernetes.io/config.source":"file"},"owner":"root"}]
	I0916 10:47:59.956282 1401996 cri.go:126] list returned 8 containers
	I0916 10:47:59.956312 1401996 cri.go:129] container: {ID:19cb8b26283b5427eeb4adf80032848225300f8293659c95a04c937ca3877ced Status:stopped}
	I0916 10:47:59.956327 1401996 cri.go:135] skipping {19cb8b26283b5427eeb4adf80032848225300f8293659c95a04c937ca3877ced stopped}: state = "stopped", want "paused"
	I0916 10:47:59.956337 1401996 cri.go:129] container: {ID:3e31d247381fd150f97fed045c0d264e01a0046902133f839fc323ed9d5fa7b9 Status:stopped}
	I0916 10:47:59.956343 1401996 cri.go:135] skipping {3e31d247381fd150f97fed045c0d264e01a0046902133f839fc323ed9d5fa7b9 stopped}: state = "stopped", want "paused"
	I0916 10:47:59.956348 1401996 cri.go:129] container: {ID:584cffa44f32723af45447c07bf6e3fc641b7c61fe43302aad35c776bd065faf Status:stopped}
	I0916 10:47:59.956354 1401996 cri.go:135] skipping {584cffa44f32723af45447c07bf6e3fc641b7c61fe43302aad35c776bd065faf stopped}: state = "stopped", want "paused"
	I0916 10:47:59.956363 1401996 cri.go:129] container: {ID:6d211253a1170338e5b23dda8b3c6a26dde0aa55d2f91ee289142b0410943b49 Status:stopped}
	I0916 10:47:59.956368 1401996 cri.go:135] skipping {6d211253a1170338e5b23dda8b3c6a26dde0aa55d2f91ee289142b0410943b49 stopped}: state = "stopped", want "paused"
	I0916 10:47:59.956374 1401996 cri.go:129] container: {ID:790d8c6b7f5cff6aa8da32ec82eeab04f109110f2b3a39803bda7a570da2cf75 Status:stopped}
	I0916 10:47:59.956382 1401996 cri.go:135] skipping {790d8c6b7f5cff6aa8da32ec82eeab04f109110f2b3a39803bda7a570da2cf75 stopped}: state = "stopped", want "paused"
	I0916 10:47:59.956397 1401996 cri.go:129] container: {ID:89084e33c979a76a3a4bbd24eab8c848deb25d8bd474bad381f47a24e0373c2e Status:stopped}
	I0916 10:47:59.956403 1401996 cri.go:135] skipping {89084e33c979a76a3a4bbd24eab8c848deb25d8bd474bad381f47a24e0373c2e stopped}: state = "stopped", want "paused"
	I0916 10:47:59.956409 1401996 cri.go:129] container: {ID:9fdab793eb970a5f01845e2aeaf1389846fd7113bbdedbb122c9c796017271d5 Status:stopped}
	I0916 10:47:59.956414 1401996 cri.go:135] skipping {9fdab793eb970a5f01845e2aeaf1389846fd7113bbdedbb122c9c796017271d5 stopped}: state = "stopped", want "paused"
	I0916 10:47:59.956420 1401996 cri.go:129] container: {ID:b88a79882d73e8e5ca5f134464b8f60ebbeb4a0aa75d6f83d1ec9e3d9f6bd093 Status:stopped}
	I0916 10:47:59.956425 1401996 cri.go:135] skipping {b88a79882d73e8e5ca5f134464b8f60ebbeb4a0aa75d6f83d1ec9e3d9f6bd093 stopped}: state = "stopped", want "paused"
	I0916 10:47:59.956500 1401996 ssh_runner.go:195] Run: sudo ls /var/lib/kubelet/kubeadm-flags.env /var/lib/kubelet/config.yaml /var/lib/minikube/etcd
	I0916 10:47:59.965165 1401996 command_runner.go:130] > /var/lib/kubelet/config.yaml
	I0916 10:47:59.965185 1401996 command_runner.go:130] > /var/lib/kubelet/kubeadm-flags.env
	I0916 10:47:59.965192 1401996 command_runner.go:130] > /var/lib/minikube/etcd:
	I0916 10:47:59.965196 1401996 command_runner.go:130] > member
	I0916 10:47:59.966329 1401996 kubeadm.go:408] found existing configuration files, will attempt cluster restart
	I0916 10:47:59.966344 1401996 kubeadm.go:593] restartPrimaryControlPlane start ...
	I0916 10:47:59.966415 1401996 ssh_runner.go:195] Run: sudo test -d /data/minikube
	I0916 10:47:59.975219 1401996 kubeadm.go:130] /data/minikube skipping compat symlinks: sudo test -d /data/minikube: Process exited with status 1
	stdout:
	
	stderr:
	I0916 10:47:59.975763 1401996 kubeconfig.go:125] found "functional-919910" server: "https://192.168.49.2:8441"
	I0916 10:47:59.976267 1401996 loader.go:395] Config loaded from file:  /home/jenkins/minikube-integration/19651-1378450/kubeconfig
	I0916 10:47:59.976523 1401996 kapi.go:59] client config for functional-919910: &rest.Config{Host:"https://192.168.49.2:8441", APIPath:"", ContentConfig:rest.ContentConfig{AcceptContentTypes:"", ContentType:"", GroupVersion:(*schema.GroupVersion)(nil), NegotiatedSerializer:runtime.NegotiatedSerializer(nil)}, Username:"", Password:"", BearerToken:"", BearerTokenFile:"", Impersonate:rest.ImpersonationConfig{UserName:"", UID:"", Groups:[]string(nil), Extra:map[string][]string(nil)}, AuthProvider:<nil>, AuthConfigPersister:rest.AuthProviderConfigPersister(nil), ExecProvider:<nil>, TLSClientConfig:rest.sanitizedTLSClientConfig{Insecure:false, ServerName:"", CertFile:"/home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/functional-919910/client.crt", KeyFile:"/home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/functional-919910/client.key", CAFile:"/home/jenkins/minikube-integration/19651-1378450/.minikube/ca.crt", CertData:[]uint8(nil), KeyData:[]uint8(nil), CAData:[]uint8(ni
l), NextProtos:[]string(nil)}, UserAgent:"", DisableCompression:false, Transport:http.RoundTripper(nil), WrapTransport:(transport.WrapperFunc)(0x1a1e6c0), QPS:0, Burst:0, RateLimiter:flowcontrol.RateLimiter(nil), WarningHandler:rest.WarningHandler(nil), Timeout:0, Dial:(func(context.Context, string, string) (net.Conn, error))(nil), Proxy:(func(*http.Request) (*url.URL, error))(nil)}
	I0916 10:47:59.977304 1401996 ssh_runner.go:195] Run: sudo diff -u /var/tmp/minikube/kubeadm.yaml /var/tmp/minikube/kubeadm.yaml.new
	I0916 10:47:59.977398 1401996 cert_rotation.go:140] Starting client certificate rotation controller
	I0916 10:47:59.986232 1401996 kubeadm.go:630] The running cluster does not require reconfiguration: 192.168.49.2
	I0916 10:47:59.986266 1401996 kubeadm.go:597] duration metric: took 19.915756ms to restartPrimaryControlPlane
	I0916 10:47:59.986276 1401996 kubeadm.go:394] duration metric: took 92.240124ms to StartCluster
	I0916 10:47:59.986317 1401996 settings.go:142] acquiring lock: {Name:mkc0474d366ad36774e47290c7932cc180a1b9f8 Clock:{} Delay:500ms Timeout:1m0s Cancel:<nil>}
	I0916 10:47:59.986408 1401996 settings.go:150] Updating kubeconfig:  /home/jenkins/minikube-integration/19651-1378450/kubeconfig
	I0916 10:47:59.987095 1401996 lock.go:35] WriteFile acquiring /home/jenkins/minikube-integration/19651-1378450/kubeconfig: {Name:mk806df66aa01ad28d0c99bc1a876b4310e8a3a0 Clock:{} Delay:500ms Timeout:1m0s Cancel:<nil>}
	I0916 10:47:59.987344 1401996 start.go:235] Will wait 6m0s for node &{Name: IP:192.168.49.2 Port:8441 KubernetesVersion:v1.31.1 ContainerRuntime:crio ControlPlane:true Worker:true}
	I0916 10:47:59.987701 1401996 addons.go:507] enable addons start: toEnable=map[ambassador:false auto-pause:false cloud-spanner:false csi-hostpath-driver:false dashboard:false default-storageclass:true efk:false freshpod:false gcp-auth:false gvisor:false headlamp:false helm-tiller:false inaccel:false ingress:false ingress-dns:false inspektor-gadget:false istio:false istio-provisioner:false kong:false kubeflow:false kubevirt:false logviewer:false metallb:false metrics-server:false nvidia-device-plugin:false nvidia-driver-installer:false nvidia-gpu-device-plugin:false olm:false pod-security-policy:false portainer:false registry:false registry-aliases:false registry-creds:false storage-provisioner:true storage-provisioner-gluster:false storage-provisioner-rancher:false volcano:false volumesnapshots:false yakd:false]
	I0916 10:47:59.987810 1401996 addons.go:69] Setting storage-provisioner=true in profile "functional-919910"
	I0916 10:47:59.987840 1401996 addons.go:234] Setting addon storage-provisioner=true in "functional-919910"
	W0916 10:47:59.987862 1401996 addons.go:243] addon storage-provisioner should already be in state true
	I0916 10:47:59.987901 1401996 host.go:66] Checking if "functional-919910" exists ...
	I0916 10:47:59.987966 1401996 config.go:182] Loaded profile config "functional-919910": Driver=docker, ContainerRuntime=crio, KubernetesVersion=v1.31.1
	I0916 10:47:59.988024 1401996 addons.go:69] Setting default-storageclass=true in profile "functional-919910"
	I0916 10:47:59.988041 1401996 addons_storage_classes.go:33] enableOrDisableStorageClasses default-storageclass=true on "functional-919910"
	I0916 10:47:59.988320 1401996 cli_runner.go:164] Run: docker container inspect functional-919910 --format={{.State.Status}}
	I0916 10:47:59.988449 1401996 cli_runner.go:164] Run: docker container inspect functional-919910 --format={{.State.Status}}
	I0916 10:47:59.993883 1401996 out.go:177] * Verifying Kubernetes components...
	I0916 10:47:59.996540 1401996 ssh_runner.go:195] Run: sudo systemctl daemon-reload
	I0916 10:48:00.012508 1401996 loader.go:395] Config loaded from file:  /home/jenkins/minikube-integration/19651-1378450/kubeconfig
	I0916 10:48:00.012872 1401996 kapi.go:59] client config for functional-919910: &rest.Config{Host:"https://192.168.49.2:8441", APIPath:"", ContentConfig:rest.ContentConfig{AcceptContentTypes:"", ContentType:"", GroupVersion:(*schema.GroupVersion)(nil), NegotiatedSerializer:runtime.NegotiatedSerializer(nil)}, Username:"", Password:"", BearerToken:"", BearerTokenFile:"", Impersonate:rest.ImpersonationConfig{UserName:"", UID:"", Groups:[]string(nil), Extra:map[string][]string(nil)}, AuthProvider:<nil>, AuthConfigPersister:rest.AuthProviderConfigPersister(nil), ExecProvider:<nil>, TLSClientConfig:rest.sanitizedTLSClientConfig{Insecure:false, ServerName:"", CertFile:"/home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/functional-919910/client.crt", KeyFile:"/home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/functional-919910/client.key", CAFile:"/home/jenkins/minikube-integration/19651-1378450/.minikube/ca.crt", CertData:[]uint8(nil), KeyData:[]uint8(nil), CAData:[]uint8(ni
l), NextProtos:[]string(nil)}, UserAgent:"", DisableCompression:false, Transport:http.RoundTripper(nil), WrapTransport:(transport.WrapperFunc)(0x1a1e6c0), QPS:0, Burst:0, RateLimiter:flowcontrol.RateLimiter(nil), WarningHandler:rest.WarningHandler(nil), Timeout:0, Dial:(func(context.Context, string, string) (net.Conn, error))(nil), Proxy:(func(*http.Request) (*url.URL, error))(nil)}
	I0916 10:48:00.013941 1401996 addons.go:234] Setting addon default-storageclass=true in "functional-919910"
	W0916 10:48:00.013978 1401996 addons.go:243] addon default-storageclass should already be in state true
	I0916 10:48:00.014011 1401996 host.go:66] Checking if "functional-919910" exists ...
	I0916 10:48:00.015576 1401996 cli_runner.go:164] Run: docker container inspect functional-919910 --format={{.State.Status}}
	I0916 10:48:00.028749 1401996 out.go:177]   - Using image gcr.io/k8s-minikube/storage-provisioner:v5
	I0916 10:48:00.032397 1401996 addons.go:431] installing /etc/kubernetes/addons/storage-provisioner.yaml
	I0916 10:48:00.032435 1401996 ssh_runner.go:362] scp memory --> /etc/kubernetes/addons/storage-provisioner.yaml (2676 bytes)
	I0916 10:48:00.032514 1401996 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" functional-919910
	I0916 10:48:00.057250 1401996 addons.go:431] installing /etc/kubernetes/addons/storageclass.yaml
	I0916 10:48:00.057279 1401996 ssh_runner.go:362] scp storageclass/storageclass.yaml --> /etc/kubernetes/addons/storageclass.yaml (271 bytes)
	I0916 10:48:00.057353 1401996 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" functional-919910
	I0916 10:48:00.088811 1401996 sshutil.go:53] new ssh client: &{IP:127.0.0.1 Port:34613 SSHKeyPath:/home/jenkins/minikube-integration/19651-1378450/.minikube/machines/functional-919910/id_rsa Username:docker}
	I0916 10:48:00.142080 1401996 sshutil.go:53] new ssh client: &{IP:127.0.0.1 Port:34613 SSHKeyPath:/home/jenkins/minikube-integration/19651-1378450/.minikube/machines/functional-919910/id_rsa Username:docker}
	I0916 10:48:00.384195 1401996 ssh_runner.go:195] Run: sudo systemctl start kubelet
	I0916 10:48:00.384326 1401996 ssh_runner.go:195] Run: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.31.1/kubectl apply -f /etc/kubernetes/addons/storage-provisioner.yaml
	I0916 10:48:00.426788 1401996 node_ready.go:35] waiting up to 6m0s for node "functional-919910" to be "Ready" ...
	I0916 10:48:00.427006 1401996 round_trippers.go:463] GET https://192.168.49.2:8441/api/v1/nodes/functional-919910
	I0916 10:48:00.427037 1401996 round_trippers.go:469] Request Headers:
	I0916 10:48:00.427063 1401996 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:48:00.427084 1401996 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:48:00.428469 1401996 round_trippers.go:574] Response Status:  in 0 milliseconds
	I0916 10:48:00.428543 1401996 round_trippers.go:577] Response Headers:
	I0916 10:48:00.433927 1401996 ssh_runner.go:195] Run: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.31.1/kubectl apply -f /etc/kubernetes/addons/storageclass.yaml
	I0916 10:48:00.534904 1401996 command_runner.go:130] ! error: error validating "/etc/kubernetes/addons/storage-provisioner.yaml": error validating data: failed to download openapi: Get "https://localhost:8441/openapi/v2?timeout=32s": dial tcp [::1]:8441: connect: connection refused; if you choose to ignore these errors, turn validation off with --validate=false
	W0916 10:48:00.535009 1401996 addons.go:457] apply failed, will retry: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.31.1/kubectl apply -f /etc/kubernetes/addons/storage-provisioner.yaml: Process exited with status 1
	stdout:
	
	stderr:
	error: error validating "/etc/kubernetes/addons/storage-provisioner.yaml": error validating data: failed to download openapi: Get "https://localhost:8441/openapi/v2?timeout=32s": dial tcp [::1]:8441: connect: connection refused; if you choose to ignore these errors, turn validation off with --validate=false
	I0916 10:48:00.535069 1401996 retry.go:31] will retry after 308.565003ms: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.31.1/kubectl apply -f /etc/kubernetes/addons/storage-provisioner.yaml: Process exited with status 1
	stdout:
	
	stderr:
	error: error validating "/etc/kubernetes/addons/storage-provisioner.yaml": error validating data: failed to download openapi: Get "https://localhost:8441/openapi/v2?timeout=32s": dial tcp [::1]:8441: connect: connection refused; if you choose to ignore these errors, turn validation off with --validate=false
	I0916 10:48:00.575462 1401996 command_runner.go:130] ! error: error validating "/etc/kubernetes/addons/storageclass.yaml": error validating data: failed to download openapi: Get "https://localhost:8441/openapi/v2?timeout=32s": dial tcp [::1]:8441: connect: connection refused; if you choose to ignore these errors, turn validation off with --validate=false
	W0916 10:48:00.575589 1401996 addons.go:457] apply failed, will retry: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.31.1/kubectl apply -f /etc/kubernetes/addons/storageclass.yaml: Process exited with status 1
	stdout:
	
	stderr:
	error: error validating "/etc/kubernetes/addons/storageclass.yaml": error validating data: failed to download openapi: Get "https://localhost:8441/openapi/v2?timeout=32s": dial tcp [::1]:8441: connect: connection refused; if you choose to ignore these errors, turn validation off with --validate=false
	I0916 10:48:00.575652 1401996 retry.go:31] will retry after 310.710217ms: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.31.1/kubectl apply -f /etc/kubernetes/addons/storageclass.yaml: Process exited with status 1
	stdout:
	
	stderr:
	error: error validating "/etc/kubernetes/addons/storageclass.yaml": error validating data: failed to download openapi: Get "https://localhost:8441/openapi/v2?timeout=32s": dial tcp [::1]:8441: connect: connection refused; if you choose to ignore these errors, turn validation off with --validate=false
	I0916 10:48:00.843924 1401996 ssh_runner.go:195] Run: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.31.1/kubectl apply --force -f /etc/kubernetes/addons/storage-provisioner.yaml
	I0916 10:48:00.887536 1401996 ssh_runner.go:195] Run: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.31.1/kubectl apply --force -f /etc/kubernetes/addons/storageclass.yaml
	I0916 10:48:00.927006 1401996 round_trippers.go:463] GET https://192.168.49.2:8441/api/v1/nodes/functional-919910
	I0916 10:48:00.927080 1401996 round_trippers.go:469] Request Headers:
	I0916 10:48:00.927116 1401996 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:48:00.927152 1401996 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:48:00.927470 1401996 round_trippers.go:574] Response Status:  in 0 milliseconds
	I0916 10:48:00.927519 1401996 round_trippers.go:577] Response Headers:
	I0916 10:48:01.062205 1401996 command_runner.go:130] ! error: error validating "/etc/kubernetes/addons/storage-provisioner.yaml": error validating data: failed to download openapi: Get "https://localhost:8441/openapi/v2?timeout=32s": dial tcp [::1]:8441: connect: connection refused; if you choose to ignore these errors, turn validation off with --validate=false
	W0916 10:48:01.062298 1401996 addons.go:457] apply failed, will retry: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.31.1/kubectl apply --force -f /etc/kubernetes/addons/storage-provisioner.yaml: Process exited with status 1
	stdout:
	
	stderr:
	error: error validating "/etc/kubernetes/addons/storage-provisioner.yaml": error validating data: failed to download openapi: Get "https://localhost:8441/openapi/v2?timeout=32s": dial tcp [::1]:8441: connect: connection refused; if you choose to ignore these errors, turn validation off with --validate=false
	I0916 10:48:01.062334 1401996 retry.go:31] will retry after 200.404538ms: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.31.1/kubectl apply --force -f /etc/kubernetes/addons/storage-provisioner.yaml: Process exited with status 1
	stdout:
	
	stderr:
	error: error validating "/etc/kubernetes/addons/storage-provisioner.yaml": error validating data: failed to download openapi: Get "https://localhost:8441/openapi/v2?timeout=32s": dial tcp [::1]:8441: connect: connection refused; if you choose to ignore these errors, turn validation off with --validate=false
	I0916 10:48:01.111099 1401996 command_runner.go:130] ! error: error validating "/etc/kubernetes/addons/storageclass.yaml": error validating data: failed to download openapi: Get "https://localhost:8441/openapi/v2?timeout=32s": dial tcp [::1]:8441: connect: connection refused; if you choose to ignore these errors, turn validation off with --validate=false
	W0916 10:48:01.111213 1401996 addons.go:457] apply failed, will retry: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.31.1/kubectl apply --force -f /etc/kubernetes/addons/storageclass.yaml: Process exited with status 1
	stdout:
	
	stderr:
	error: error validating "/etc/kubernetes/addons/storageclass.yaml": error validating data: failed to download openapi: Get "https://localhost:8441/openapi/v2?timeout=32s": dial tcp [::1]:8441: connect: connection refused; if you choose to ignore these errors, turn validation off with --validate=false
	I0916 10:48:01.111266 1401996 retry.go:31] will retry after 431.025884ms: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.31.1/kubectl apply --force -f /etc/kubernetes/addons/storageclass.yaml: Process exited with status 1
	stdout:
	
	stderr:
	error: error validating "/etc/kubernetes/addons/storageclass.yaml": error validating data: failed to download openapi: Get "https://localhost:8441/openapi/v2?timeout=32s": dial tcp [::1]:8441: connect: connection refused; if you choose to ignore these errors, turn validation off with --validate=false
	I0916 10:48:01.263423 1401996 ssh_runner.go:195] Run: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.31.1/kubectl apply --force -f /etc/kubernetes/addons/storage-provisioner.yaml
	I0916 10:48:01.428245 1401996 round_trippers.go:463] GET https://192.168.49.2:8441/api/v1/nodes/functional-919910
	I0916 10:48:01.428324 1401996 round_trippers.go:469] Request Headers:
	I0916 10:48:01.428359 1401996 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:48:01.428379 1401996 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:48:01.542998 1401996 ssh_runner.go:195] Run: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.31.1/kubectl apply --force -f /etc/kubernetes/addons/storageclass.yaml
	I0916 10:48:05.268811 1401996 round_trippers.go:574] Response Status: 200 OK in 3840 milliseconds
	I0916 10:48:05.268845 1401996 round_trippers.go:577] Response Headers:
	I0916 10:48:05.268865 1401996 round_trippers.go:580]     Audit-Id: b20755fb-7b68-4485-a8ac-3c8c03c63fcc
	I0916 10:48:05.268869 1401996 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 10:48:05.268873 1401996 round_trippers.go:580]     Content-Type: application/json
	I0916 10:48:05.268877 1401996 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 
	I0916 10:48:05.268880 1401996 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 
	I0916 10:48:05.268883 1401996 round_trippers.go:580]     Date: Mon, 16 Sep 2024 10:48:05 GMT
	I0916 10:48:05.290959 1401996 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"functional-919910","uid":"53a8f356-7cc5-491d-804d-fdafec0ed62c","resourceVersion":"423","creationTimestamp":"2024-09-16T10:46:58Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"functional-919910","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"functional-919910","minikube.k8s.io/primary":"true","minikube.k8s.io/updated_at":"2024_09_16T10_47_02_0700","minikube.k8s.io/version":"v1.34.0","node-role.kubernetes.io/control-plane":"","node.kubernetes.io/exclude-from-external-load-balancers":""},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///var/run/crio/crio.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubelet","operation":"Update","ap
iVersion":"v1","time":"2024-09-16T10:46:58Z","fieldsType":"FieldsV1","f [truncated 6033 chars]
	I0916 10:48:05.291832 1401996 node_ready.go:49] node "functional-919910" has status "Ready":"True"
	I0916 10:48:05.291863 1401996 node_ready.go:38] duration metric: took 4.864992104s for node "functional-919910" to be "Ready" ...
	I0916 10:48:05.291873 1401996 pod_ready.go:36] extra waiting up to 6m0s for all system-critical pods including labels [k8s-app=kube-dns component=etcd component=kube-apiserver component=kube-controller-manager k8s-app=kube-proxy component=kube-scheduler] to be "Ready" ...
	I0916 10:48:05.291919 1401996 envvar.go:172] "Feature gate default state" feature="WatchListClient" enabled=false
	I0916 10:48:05.291929 1401996 envvar.go:172] "Feature gate default state" feature="InformerResourceVersion" enabled=false
	I0916 10:48:05.291997 1401996 round_trippers.go:463] GET https://192.168.49.2:8441/api/v1/namespaces/kube-system/pods
	I0916 10:48:05.292001 1401996 round_trippers.go:469] Request Headers:
	I0916 10:48:05.292009 1401996 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:48:05.292013 1401996 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:48:05.403335 1401996 round_trippers.go:574] Response Status: 200 OK in 111 milliseconds
	I0916 10:48:05.403355 1401996 round_trippers.go:577] Response Headers:
	I0916 10:48:05.403364 1401996 round_trippers.go:580]     Audit-Id: 7d422454-4258-4b85-a1cc-37a8b98d571f
	I0916 10:48:05.403368 1401996 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 10:48:05.403371 1401996 round_trippers.go:580]     Content-Type: application/json
	I0916 10:48:05.403374 1401996 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 
	I0916 10:48:05.403376 1401996 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 
	I0916 10:48:05.403379 1401996 round_trippers.go:580]     Date: Mon, 16 Sep 2024 10:48:05 GMT
	I0916 10:48:05.405776 1401996 request.go:1351] Response Body: {"kind":"PodList","apiVersion":"v1","metadata":{"resourceVersion":"427"},"items":[{"metadata":{"name":"coredns-7c65d6cfc9-qzn8c","generateName":"coredns-7c65d6cfc9-","namespace":"kube-system","uid":"ada36fb7-8486-4afc-9bef-04ab2e65fc7b","resourceVersion":"417","creationTimestamp":"2024-09-16T10:47:06Z","labels":{"k8s-app":"kube-dns","pod-template-hash":"7c65d6cfc9"},"ownerReferences":[{"apiVersion":"apps/v1","kind":"ReplicaSet","name":"coredns-7c65d6cfc9","uid":"d0a0989d-3c55-4e39-bd63-2b9459e552ef","controller":true,"blockOwnerDeletion":true}],"managedFields":[{"manager":"kube-controller-manager","operation":"Update","apiVersion":"v1","time":"2024-09-16T10:47:06Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:generateName":{},"f:labels":{".":{},"f:k8s-app":{},"f:pod-template-hash":{}},"f:ownerReferences":{".":{},"k:{\"uid\":\"d0a0989d-3c55-4e39-bd63-2b9459e552ef\"}":{}}},"f:spec":{"f:affinity":{".":{},"f:podAntiAffinity":{".":{},"f
:preferredDuringSchedulingIgnoredDuringExecution":{}}},"f:containers":{ [truncated 59464 chars]
	I0916 10:48:05.410422 1401996 pod_ready.go:79] waiting up to 6m0s for pod "coredns-7c65d6cfc9-qzn8c" in "kube-system" namespace to be "Ready" ...
	I0916 10:48:05.410574 1401996 round_trippers.go:463] GET https://192.168.49.2:8441/api/v1/namespaces/kube-system/pods/coredns-7c65d6cfc9-qzn8c
	I0916 10:48:05.410601 1401996 round_trippers.go:469] Request Headers:
	I0916 10:48:05.410640 1401996 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:48:05.410663 1401996 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:48:05.457641 1401996 round_trippers.go:574] Response Status: 200 OK in 46 milliseconds
	I0916 10:48:05.457719 1401996 round_trippers.go:577] Response Headers:
	I0916 10:48:05.457741 1401996 round_trippers.go:580]     Date: Mon, 16 Sep 2024 10:48:05 GMT
	I0916 10:48:05.457760 1401996 round_trippers.go:580]     Audit-Id: 16dfbd36-f6d0-42fb-b5ed-9209469f4674
	I0916 10:48:05.457792 1401996 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 10:48:05.457813 1401996 round_trippers.go:580]     Content-Type: application/json
	I0916 10:48:05.457833 1401996 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: dde2938c-b5ca-4a57-b9bd-15f56dda7856
	I0916 10:48:05.457850 1401996 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: bc2a80da-1d56-4954-b760-1948662f5b5f
	I0916 10:48:05.462154 1401996 request.go:1351] Response Body: {"kind":"Pod","apiVersion":"v1","metadata":{"name":"coredns-7c65d6cfc9-qzn8c","generateName":"coredns-7c65d6cfc9-","namespace":"kube-system","uid":"ada36fb7-8486-4afc-9bef-04ab2e65fc7b","resourceVersion":"417","creationTimestamp":"2024-09-16T10:47:06Z","labels":{"k8s-app":"kube-dns","pod-template-hash":"7c65d6cfc9"},"ownerReferences":[{"apiVersion":"apps/v1","kind":"ReplicaSet","name":"coredns-7c65d6cfc9","uid":"d0a0989d-3c55-4e39-bd63-2b9459e552ef","controller":true,"blockOwnerDeletion":true}],"managedFields":[{"manager":"kube-controller-manager","operation":"Update","apiVersion":"v1","time":"2024-09-16T10:47:06Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:generateName":{},"f:labels":{".":{},"f:k8s-app":{},"f:pod-template-hash":{}},"f:ownerReferences":{".":{},"k:{\"uid\":\"d0a0989d-3c55-4e39-bd63-2b9459e552ef\"}":{}}},"f:spec":{"f:affinity":{".":{},"f:podAntiAffinity":{".":{},"f:preferredDuringSchedulingIgnoredDuringExecution":{
}}},"f:containers":{"k:{\"name\":\"coredns\"}":{".":{},"f:args":{},"f:i [truncated 6814 chars]
	I0916 10:48:05.462872 1401996 round_trippers.go:463] GET https://192.168.49.2:8441/api/v1/nodes/functional-919910
	I0916 10:48:05.462919 1401996 round_trippers.go:469] Request Headers:
	I0916 10:48:05.462944 1401996 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:48:05.462963 1401996 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:48:05.507563 1401996 round_trippers.go:574] Response Status: 200 OK in 44 milliseconds
	I0916 10:48:05.507637 1401996 round_trippers.go:577] Response Headers:
	I0916 10:48:05.507659 1401996 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: bc2a80da-1d56-4954-b760-1948662f5b5f
	I0916 10:48:05.507679 1401996 round_trippers.go:580]     Date: Mon, 16 Sep 2024 10:48:05 GMT
	I0916 10:48:05.507717 1401996 round_trippers.go:580]     Audit-Id: 2ac7f2e0-df41-4a25-a5a6-013d4b6ff9b5
	I0916 10:48:05.507739 1401996 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 10:48:05.507758 1401996 round_trippers.go:580]     Content-Type: application/json
	I0916 10:48:05.507778 1401996 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: dde2938c-b5ca-4a57-b9bd-15f56dda7856
	I0916 10:48:05.508058 1401996 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"functional-919910","uid":"53a8f356-7cc5-491d-804d-fdafec0ed62c","resourceVersion":"423","creationTimestamp":"2024-09-16T10:46:58Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"functional-919910","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"functional-919910","minikube.k8s.io/primary":"true","minikube.k8s.io/updated_at":"2024_09_16T10_47_02_0700","minikube.k8s.io/version":"v1.34.0","node-role.kubernetes.io/control-plane":"","node.kubernetes.io/exclude-from-external-load-balancers":""},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///var/run/crio/crio.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubelet","operation":"Update","ap
iVersion":"v1","time":"2024-09-16T10:46:58Z","fieldsType":"FieldsV1","f [truncated 6033 chars]
	I0916 10:48:05.508540 1401996 pod_ready.go:93] pod "coredns-7c65d6cfc9-qzn8c" in "kube-system" namespace has status "Ready":"True"
	I0916 10:48:05.508585 1401996 pod_ready.go:82] duration metric: took 98.105074ms for pod "coredns-7c65d6cfc9-qzn8c" in "kube-system" namespace to be "Ready" ...
	I0916 10:48:05.508618 1401996 pod_ready.go:79] waiting up to 6m0s for pod "etcd-functional-919910" in "kube-system" namespace to be "Ready" ...
	I0916 10:48:05.508752 1401996 round_trippers.go:463] GET https://192.168.49.2:8441/api/v1/namespaces/kube-system/pods/etcd-functional-919910
	I0916 10:48:05.508779 1401996 round_trippers.go:469] Request Headers:
	I0916 10:48:05.508800 1401996 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:48:05.508835 1401996 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:48:05.523151 1401996 round_trippers.go:574] Response Status: 200 OK in 14 milliseconds
	I0916 10:48:05.523176 1401996 round_trippers.go:577] Response Headers:
	I0916 10:48:05.523186 1401996 round_trippers.go:580]     Audit-Id: 6019da01-d618-4959-b44c-39894e5dbd68
	I0916 10:48:05.523191 1401996 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 10:48:05.523197 1401996 round_trippers.go:580]     Content-Type: application/json
	I0916 10:48:05.523206 1401996 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: dde2938c-b5ca-4a57-b9bd-15f56dda7856
	I0916 10:48:05.523210 1401996 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: bc2a80da-1d56-4954-b760-1948662f5b5f
	I0916 10:48:05.523214 1401996 round_trippers.go:580]     Date: Mon, 16 Sep 2024 10:48:05 GMT
	I0916 10:48:05.527083 1401996 request.go:1351] Response Body: {"kind":"Pod","apiVersion":"v1","metadata":{"name":"etcd-functional-919910","namespace":"kube-system","uid":"73472289-b523-4c96-8d5d-33ea5c657902","resourceVersion":"385","creationTimestamp":"2024-09-16T10:47:00Z","labels":{"component":"etcd","tier":"control-plane"},"annotations":{"kubeadm.kubernetes.io/etcd.advertise-client-urls":"https://192.168.49.2:2379","kubernetes.io/config.hash":"3e910b182a705a484fdc6733177892d1","kubernetes.io/config.mirror":"3e910b182a705a484fdc6733177892d1","kubernetes.io/config.seen":"2024-09-16T10:46:53.802310056Z","kubernetes.io/config.source":"file"},"ownerReferences":[{"apiVersion":"v1","kind":"Node","name":"functional-919910","uid":"53a8f356-7cc5-491d-804d-fdafec0ed62c","controller":true}],"managedFields":[{"manager":"kubelet","operation":"Update","apiVersion":"v1","time":"2024-09-16T10:47:00Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:annotations":{".":{},"f:kubeadm.kubernetes.io/etcd.advertise-
client-urls":{},"f:kubernetes.io/config.hash":{},"f:kubernetes.io/confi [truncated 6440 chars]
	I0916 10:48:05.527724 1401996 round_trippers.go:463] GET https://192.168.49.2:8441/api/v1/nodes/functional-919910
	I0916 10:48:05.527771 1401996 round_trippers.go:469] Request Headers:
	I0916 10:48:05.527793 1401996 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:48:05.527814 1401996 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:48:05.554600 1401996 round_trippers.go:574] Response Status: 200 OK in 26 milliseconds
	I0916 10:48:05.554677 1401996 round_trippers.go:577] Response Headers:
	I0916 10:48:05.554700 1401996 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: bc2a80da-1d56-4954-b760-1948662f5b5f
	I0916 10:48:05.554721 1401996 round_trippers.go:580]     Date: Mon, 16 Sep 2024 10:48:05 GMT
	I0916 10:48:05.554752 1401996 round_trippers.go:580]     Audit-Id: 64260f4c-bc78-4580-ae77-d48120f6be4a
	I0916 10:48:05.554774 1401996 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 10:48:05.554791 1401996 round_trippers.go:580]     Content-Type: application/json
	I0916 10:48:05.554809 1401996 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: dde2938c-b5ca-4a57-b9bd-15f56dda7856
	I0916 10:48:05.561247 1401996 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"functional-919910","uid":"53a8f356-7cc5-491d-804d-fdafec0ed62c","resourceVersion":"423","creationTimestamp":"2024-09-16T10:46:58Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"functional-919910","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"functional-919910","minikube.k8s.io/primary":"true","minikube.k8s.io/updated_at":"2024_09_16T10_47_02_0700","minikube.k8s.io/version":"v1.34.0","node-role.kubernetes.io/control-plane":"","node.kubernetes.io/exclude-from-external-load-balancers":""},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///var/run/crio/crio.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubelet","operation":"Update","ap
iVersion":"v1","time":"2024-09-16T10:46:58Z","fieldsType":"FieldsV1","f [truncated 6033 chars]
	I0916 10:48:05.561811 1401996 pod_ready.go:93] pod "etcd-functional-919910" in "kube-system" namespace has status "Ready":"True"
	I0916 10:48:05.561852 1401996 pod_ready.go:82] duration metric: took 53.213628ms for pod "etcd-functional-919910" in "kube-system" namespace to be "Ready" ...
	I0916 10:48:05.561889 1401996 pod_ready.go:79] waiting up to 6m0s for pod "kube-apiserver-functional-919910" in "kube-system" namespace to be "Ready" ...
	I0916 10:48:05.562011 1401996 round_trippers.go:463] GET https://192.168.49.2:8441/api/v1/namespaces/kube-system/pods/kube-apiserver-functional-919910
	I0916 10:48:05.562035 1401996 round_trippers.go:469] Request Headers:
	I0916 10:48:05.562059 1401996 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:48:05.562093 1401996 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:48:05.574539 1401996 round_trippers.go:574] Response Status: 200 OK in 12 milliseconds
	I0916 10:48:05.574615 1401996 round_trippers.go:577] Response Headers:
	I0916 10:48:05.574645 1401996 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: dde2938c-b5ca-4a57-b9bd-15f56dda7856
	I0916 10:48:05.574693 1401996 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: bc2a80da-1d56-4954-b760-1948662f5b5f
	I0916 10:48:05.574732 1401996 round_trippers.go:580]     Date: Mon, 16 Sep 2024 10:48:05 GMT
	I0916 10:48:05.574750 1401996 round_trippers.go:580]     Audit-Id: 9a7034d0-d767-43fe-a880-66e341250069
	I0916 10:48:05.574796 1401996 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 10:48:05.574816 1401996 round_trippers.go:580]     Content-Type: application/json
	I0916 10:48:05.575573 1401996 request.go:1351] Response Body: {"kind":"Pod","apiVersion":"v1","metadata":{"name":"kube-apiserver-functional-919910","namespace":"kube-system","uid":"82da7bbe-1484-402c-b1a5-7165f1938703","resourceVersion":"340","creationTimestamp":"2024-09-16T10:47:01Z","labels":{"component":"kube-apiserver","tier":"control-plane"},"annotations":{"kubeadm.kubernetes.io/kube-apiserver.advertise-address.endpoint":"192.168.49.2:8441","kubernetes.io/config.hash":"3d8a6ba31c18f33c5660170029e5cde1","kubernetes.io/config.mirror":"3d8a6ba31c18f33c5660170029e5cde1","kubernetes.io/config.seen":"2024-09-16T10:47:01.310178039Z","kubernetes.io/config.source":"file"},"ownerReferences":[{"apiVersion":"v1","kind":"Node","name":"functional-919910","uid":"53a8f356-7cc5-491d-804d-fdafec0ed62c","controller":true}],"managedFields":[{"manager":"kubelet","operation":"Update","apiVersion":"v1","time":"2024-09-16T10:47:01Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:annotations":{".":{},"f:kubeadm.ku
bernetes.io/kube-apiserver.advertise-address.endpoint":{},"f:kubernetes [truncated 8516 chars]
	I0916 10:48:05.576295 1401996 round_trippers.go:463] GET https://192.168.49.2:8441/api/v1/nodes/functional-919910
	I0916 10:48:05.576351 1401996 round_trippers.go:469] Request Headers:
	I0916 10:48:05.576390 1401996 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:48:05.576409 1401996 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:48:05.597433 1401996 round_trippers.go:574] Response Status: 200 OK in 20 milliseconds
	I0916 10:48:05.597508 1401996 round_trippers.go:577] Response Headers:
	I0916 10:48:05.597531 1401996 round_trippers.go:580]     Audit-Id: 6589c107-019c-408e-845e-a53b672e2cc8
	I0916 10:48:05.597551 1401996 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 10:48:05.597583 1401996 round_trippers.go:580]     Content-Type: application/json
	I0916 10:48:05.597604 1401996 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: dde2938c-b5ca-4a57-b9bd-15f56dda7856
	I0916 10:48:05.597623 1401996 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: bc2a80da-1d56-4954-b760-1948662f5b5f
	I0916 10:48:05.597641 1401996 round_trippers.go:580]     Date: Mon, 16 Sep 2024 10:48:05 GMT
	I0916 10:48:05.598277 1401996 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"functional-919910","uid":"53a8f356-7cc5-491d-804d-fdafec0ed62c","resourceVersion":"423","creationTimestamp":"2024-09-16T10:46:58Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"functional-919910","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"functional-919910","minikube.k8s.io/primary":"true","minikube.k8s.io/updated_at":"2024_09_16T10_47_02_0700","minikube.k8s.io/version":"v1.34.0","node-role.kubernetes.io/control-plane":"","node.kubernetes.io/exclude-from-external-load-balancers":""},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///var/run/crio/crio.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubelet","operation":"Update","ap
iVersion":"v1","time":"2024-09-16T10:46:58Z","fieldsType":"FieldsV1","f [truncated 6033 chars]
	I0916 10:48:05.598798 1401996 pod_ready.go:93] pod "kube-apiserver-functional-919910" in "kube-system" namespace has status "Ready":"True"
	I0916 10:48:05.598847 1401996 pod_ready.go:82] duration metric: took 36.937901ms for pod "kube-apiserver-functional-919910" in "kube-system" namespace to be "Ready" ...
	I0916 10:48:05.598873 1401996 pod_ready.go:79] waiting up to 6m0s for pod "kube-controller-manager-functional-919910" in "kube-system" namespace to be "Ready" ...
	I0916 10:48:05.598980 1401996 round_trippers.go:463] GET https://192.168.49.2:8441/api/v1/namespaces/kube-system/pods/kube-controller-manager-functional-919910
	I0916 10:48:05.599012 1401996 round_trippers.go:469] Request Headers:
	I0916 10:48:05.599033 1401996 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:48:05.599051 1401996 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:48:05.602884 1401996 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:48:05.602953 1401996 round_trippers.go:577] Response Headers:
	I0916 10:48:05.602974 1401996 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: bc2a80da-1d56-4954-b760-1948662f5b5f
	I0916 10:48:05.602992 1401996 round_trippers.go:580]     Date: Mon, 16 Sep 2024 10:48:05 GMT
	I0916 10:48:05.603025 1401996 round_trippers.go:580]     Audit-Id: 58c20f1e-d8c3-47e5-85ef-28bea0245620
	I0916 10:48:05.603047 1401996 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 10:48:05.603064 1401996 round_trippers.go:580]     Content-Type: application/json
	I0916 10:48:05.603082 1401996 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: dde2938c-b5ca-4a57-b9bd-15f56dda7856
	I0916 10:48:05.610241 1401996 request.go:1351] Response Body: {"kind":"Pod","apiVersion":"v1","metadata":{"name":"kube-controller-manager-functional-919910","namespace":"kube-system","uid":"483b3e2c-288a-41e1-a29b-33a95b5b536a","resourceVersion":"389","creationTimestamp":"2024-09-16T10:47:01Z","labels":{"component":"kube-controller-manager","tier":"control-plane"},"annotations":{"kubernetes.io/config.hash":"bcfd044776fa163108ac9ce9912dd1b1","kubernetes.io/config.mirror":"bcfd044776fa163108ac9ce9912dd1b1","kubernetes.io/config.seen":"2024-09-16T10:47:01.310179278Z","kubernetes.io/config.source":"file"},"ownerReferences":[{"apiVersion":"v1","kind":"Node","name":"functional-919910","uid":"53a8f356-7cc5-491d-804d-fdafec0ed62c","controller":true}],"managedFields":[{"manager":"kubelet","operation":"Update","apiVersion":"v1","time":"2024-09-16T10:47:01Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:annotations":{".":{},"f:kubernetes.io/config.hash":{},"f:kubernetes.io/config.mirror":{},"f:kubernetes
.io/config.seen":{},"f:kubernetes.io/config.source":{}},"f:labels":{"." [truncated 8091 chars]
	I0916 10:48:05.610955 1401996 round_trippers.go:463] GET https://192.168.49.2:8441/api/v1/nodes/functional-919910
	I0916 10:48:05.611001 1401996 round_trippers.go:469] Request Headers:
	I0916 10:48:05.611024 1401996 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:48:05.611040 1401996 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:48:05.623206 1401996 round_trippers.go:574] Response Status: 200 OK in 12 milliseconds
	I0916 10:48:05.623278 1401996 round_trippers.go:577] Response Headers:
	I0916 10:48:05.623302 1401996 round_trippers.go:580]     Audit-Id: eba365c8-7916-4bed-afc6-3ac57c8778c6
	I0916 10:48:05.623318 1401996 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 10:48:05.623351 1401996 round_trippers.go:580]     Content-Type: application/json
	I0916 10:48:05.623372 1401996 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: dde2938c-b5ca-4a57-b9bd-15f56dda7856
	I0916 10:48:05.623387 1401996 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: bc2a80da-1d56-4954-b760-1948662f5b5f
	I0916 10:48:05.623404 1401996 round_trippers.go:580]     Date: Mon, 16 Sep 2024 10:48:05 GMT
	I0916 10:48:05.625918 1401996 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"functional-919910","uid":"53a8f356-7cc5-491d-804d-fdafec0ed62c","resourceVersion":"423","creationTimestamp":"2024-09-16T10:46:58Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"functional-919910","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"functional-919910","minikube.k8s.io/primary":"true","minikube.k8s.io/updated_at":"2024_09_16T10_47_02_0700","minikube.k8s.io/version":"v1.34.0","node-role.kubernetes.io/control-plane":"","node.kubernetes.io/exclude-from-external-load-balancers":""},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///var/run/crio/crio.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubelet","operation":"Update","ap
iVersion":"v1","time":"2024-09-16T10:46:58Z","fieldsType":"FieldsV1","f [truncated 6033 chars]
	I0916 10:48:05.626484 1401996 pod_ready.go:93] pod "kube-controller-manager-functional-919910" in "kube-system" namespace has status "Ready":"True"
	I0916 10:48:05.626528 1401996 pod_ready.go:82] duration metric: took 27.634829ms for pod "kube-controller-manager-functional-919910" in "kube-system" namespace to be "Ready" ...
	I0916 10:48:05.626554 1401996 pod_ready.go:79] waiting up to 6m0s for pod "kube-proxy-nvpzv" in "kube-system" namespace to be "Ready" ...
	I0916 10:48:05.626649 1401996 round_trippers.go:463] GET https://192.168.49.2:8441/api/v1/namespaces/kube-system/pods/kube-proxy-nvpzv
	I0916 10:48:05.626682 1401996 round_trippers.go:469] Request Headers:
	I0916 10:48:05.626704 1401996 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:48:05.626724 1401996 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:48:05.639523 1401996 round_trippers.go:574] Response Status: 200 OK in 12 milliseconds
	I0916 10:48:05.639598 1401996 round_trippers.go:577] Response Headers:
	I0916 10:48:05.639619 1401996 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 10:48:05.639635 1401996 round_trippers.go:580]     Content-Type: application/json
	I0916 10:48:05.639654 1401996 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: dde2938c-b5ca-4a57-b9bd-15f56dda7856
	I0916 10:48:05.639687 1401996 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: bc2a80da-1d56-4954-b760-1948662f5b5f
	I0916 10:48:05.639703 1401996 round_trippers.go:580]     Date: Mon, 16 Sep 2024 10:48:05 GMT
	I0916 10:48:05.639721 1401996 round_trippers.go:580]     Audit-Id: 2b6ebbcf-d263-4d6c-b097-8c0ab6e63f49
	I0916 10:48:05.642765 1401996 request.go:1351] Response Body: {"kind":"Pod","apiVersion":"v1","metadata":{"name":"kube-proxy-nvpzv","generateName":"kube-proxy-","namespace":"kube-system","uid":"2e1bfc3e-dea3-4511-a154-e367e28b0898","resourceVersion":"357","creationTimestamp":"2024-09-16T10:47:06Z","labels":{"controller-revision-hash":"648b489c5b","k8s-app":"kube-proxy","pod-template-generation":"1"},"ownerReferences":[{"apiVersion":"apps/v1","kind":"DaemonSet","name":"kube-proxy","uid":"e471ede7-5b70-4fcb-8bb8-8ab058b1f83f","controller":true,"blockOwnerDeletion":true}],"managedFields":[{"manager":"kube-controller-manager","operation":"Update","apiVersion":"v1","time":"2024-09-16T10:47:06Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:generateName":{},"f:labels":{".":{},"f:controller-revision-hash":{},"f:k8s-app":{},"f:pod-template-generation":{}},"f:ownerReferences":{".":{},"k:{\"uid\":\"e471ede7-5b70-4fcb-8bb8-8ab058b1f83f\"}":{}}},"f:spec":{"f:affinity":{".":{},"f:nodeAffinity":{".":{},"f:r
equiredDuringSchedulingIgnoredDuringExecution":{}}},"f:containers":{"k: [truncated 6172 chars]
	I0916 10:48:05.643413 1401996 round_trippers.go:463] GET https://192.168.49.2:8441/api/v1/nodes/functional-919910
	I0916 10:48:05.643461 1401996 round_trippers.go:469] Request Headers:
	I0916 10:48:05.643484 1401996 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:48:05.643505 1401996 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:48:05.652916 1401996 round_trippers.go:574] Response Status: 200 OK in 9 milliseconds
	I0916 10:48:05.652989 1401996 round_trippers.go:577] Response Headers:
	I0916 10:48:05.653011 1401996 round_trippers.go:580]     Audit-Id: 39e2f15b-9376-434d-a0fd-05e78846f93a
	I0916 10:48:05.653031 1401996 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 10:48:05.653124 1401996 round_trippers.go:580]     Content-Type: application/json
	I0916 10:48:05.653146 1401996 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: dde2938c-b5ca-4a57-b9bd-15f56dda7856
	I0916 10:48:05.653163 1401996 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: bc2a80da-1d56-4954-b760-1948662f5b5f
	I0916 10:48:05.653179 1401996 round_trippers.go:580]     Date: Mon, 16 Sep 2024 10:48:05 GMT
	I0916 10:48:05.654529 1401996 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"functional-919910","uid":"53a8f356-7cc5-491d-804d-fdafec0ed62c","resourceVersion":"423","creationTimestamp":"2024-09-16T10:46:58Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"functional-919910","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"functional-919910","minikube.k8s.io/primary":"true","minikube.k8s.io/updated_at":"2024_09_16T10_47_02_0700","minikube.k8s.io/version":"v1.34.0","node-role.kubernetes.io/control-plane":"","node.kubernetes.io/exclude-from-external-load-balancers":""},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///var/run/crio/crio.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubelet","operation":"Update","ap
iVersion":"v1","time":"2024-09-16T10:46:58Z","fieldsType":"FieldsV1","f [truncated 6033 chars]
	I0916 10:48:05.655026 1401996 pod_ready.go:93] pod "kube-proxy-nvpzv" in "kube-system" namespace has status "Ready":"True"
	I0916 10:48:05.655070 1401996 pod_ready.go:82] duration metric: took 28.496567ms for pod "kube-proxy-nvpzv" in "kube-system" namespace to be "Ready" ...
	I0916 10:48:05.655097 1401996 pod_ready.go:79] waiting up to 6m0s for pod "kube-scheduler-functional-919910" in "kube-system" namespace to be "Ready" ...
	I0916 10:48:05.692423 1401996 round_trippers.go:463] GET https://192.168.49.2:8441/api/v1/namespaces/kube-system/pods/kube-scheduler-functional-919910
	I0916 10:48:05.692494 1401996 round_trippers.go:469] Request Headers:
	I0916 10:48:05.692526 1401996 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:48:05.692546 1401996 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:48:05.699783 1401996 round_trippers.go:574] Response Status: 200 OK in 7 milliseconds
	I0916 10:48:05.699856 1401996 round_trippers.go:577] Response Headers:
	I0916 10:48:05.699889 1401996 round_trippers.go:580]     Audit-Id: 6a583f7c-1627-40c3-9614-00b54d361799
	I0916 10:48:05.699908 1401996 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 10:48:05.699937 1401996 round_trippers.go:580]     Content-Type: application/json
	I0916 10:48:05.699957 1401996 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: dde2938c-b5ca-4a57-b9bd-15f56dda7856
	I0916 10:48:05.699976 1401996 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: bc2a80da-1d56-4954-b760-1948662f5b5f
	I0916 10:48:05.699993 1401996 round_trippers.go:580]     Date: Mon, 16 Sep 2024 10:48:05 GMT
	I0916 10:48:05.709127 1401996 request.go:1351] Response Body: {"kind":"Pod","apiVersion":"v1","metadata":{"name":"kube-scheduler-functional-919910","namespace":"kube-system","uid":"80a1c6e8-dcc4-4602-a66a-658796f6ae58","resourceVersion":"430","creationTimestamp":"2024-09-16T10:47:01Z","labels":{"component":"kube-scheduler","tier":"control-plane"},"annotations":{"kubernetes.io/config.hash":"60f2072c6865fb71ef7928175ceb3dad","kubernetes.io/config.mirror":"60f2072c6865fb71ef7928175ceb3dad","kubernetes.io/config.seen":"2024-09-16T10:47:01.310180468Z","kubernetes.io/config.source":"file"},"ownerReferences":[{"apiVersion":"v1","kind":"Node","name":"functional-919910","uid":"53a8f356-7cc5-491d-804d-fdafec0ed62c","controller":true}],"managedFields":[{"manager":"kubelet","operation":"Update","apiVersion":"v1","time":"2024-09-16T10:47:01Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:annotations":{".":{},"f:kubernetes.io/config.hash":{},"f:kubernetes.io/config.mirror":{},"f:kubernetes.io/config.seen":{
},"f:kubernetes.io/config.source":{}},"f:labels":{".":{},"f:component": [truncated 5337 chars]
	I0916 10:48:05.892560 1401996 request.go:632] Waited for 182.854067ms due to client-side throttling, not priority and fairness, request: GET:https://192.168.49.2:8441/api/v1/nodes/functional-919910
	I0916 10:48:05.892662 1401996 round_trippers.go:463] GET https://192.168.49.2:8441/api/v1/nodes/functional-919910
	I0916 10:48:05.892704 1401996 round_trippers.go:469] Request Headers:
	I0916 10:48:05.892736 1401996 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:48:05.892756 1401996 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:48:05.921649 1401996 round_trippers.go:574] Response Status: 200 OK in 28 milliseconds
	I0916 10:48:05.921736 1401996 round_trippers.go:577] Response Headers:
	I0916 10:48:05.921759 1401996 round_trippers.go:580]     Audit-Id: a55d5629-0c4b-4e58-b9f3-f01a607ebccc
	I0916 10:48:05.921818 1401996 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 10:48:05.921840 1401996 round_trippers.go:580]     Content-Type: application/json
	I0916 10:48:05.921858 1401996 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: dde2938c-b5ca-4a57-b9bd-15f56dda7856
	I0916 10:48:05.921885 1401996 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: bc2a80da-1d56-4954-b760-1948662f5b5f
	I0916 10:48:05.921908 1401996 round_trippers.go:580]     Date: Mon, 16 Sep 2024 10:48:05 GMT
	I0916 10:48:05.922118 1401996 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"functional-919910","uid":"53a8f356-7cc5-491d-804d-fdafec0ed62c","resourceVersion":"423","creationTimestamp":"2024-09-16T10:46:58Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"functional-919910","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"functional-919910","minikube.k8s.io/primary":"true","minikube.k8s.io/updated_at":"2024_09_16T10_47_02_0700","minikube.k8s.io/version":"v1.34.0","node-role.kubernetes.io/control-plane":"","node.kubernetes.io/exclude-from-external-load-balancers":""},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///var/run/crio/crio.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubelet","operation":"Update","ap
iVersion":"v1","time":"2024-09-16T10:46:58Z","fieldsType":"FieldsV1","f [truncated 6033 chars]
	I0916 10:48:06.155797 1401996 round_trippers.go:463] GET https://192.168.49.2:8441/api/v1/namespaces/kube-system/pods/kube-scheduler-functional-919910
	I0916 10:48:06.155894 1401996 round_trippers.go:469] Request Headers:
	I0916 10:48:06.155918 1401996 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:48:06.155940 1401996 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:48:06.178184 1401996 round_trippers.go:574] Response Status: 200 OK in 22 milliseconds
	I0916 10:48:06.178261 1401996 round_trippers.go:577] Response Headers:
	I0916 10:48:06.178285 1401996 round_trippers.go:580]     Audit-Id: 651630df-1444-437f-9e5e-0bd74d71db58
	I0916 10:48:06.178304 1401996 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 10:48:06.178336 1401996 round_trippers.go:580]     Content-Type: application/json
	I0916 10:48:06.178359 1401996 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: dde2938c-b5ca-4a57-b9bd-15f56dda7856
	I0916 10:48:06.178379 1401996 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: bc2a80da-1d56-4954-b760-1948662f5b5f
	I0916 10:48:06.178396 1401996 round_trippers.go:580]     Date: Mon, 16 Sep 2024 10:48:06 GMT
	I0916 10:48:06.195434 1401996 request.go:1351] Response Body: {"kind":"Pod","apiVersion":"v1","metadata":{"name":"kube-scheduler-functional-919910","namespace":"kube-system","uid":"80a1c6e8-dcc4-4602-a66a-658796f6ae58","resourceVersion":"460","creationTimestamp":"2024-09-16T10:47:01Z","labels":{"component":"kube-scheduler","tier":"control-plane"},"annotations":{"kubernetes.io/config.hash":"60f2072c6865fb71ef7928175ceb3dad","kubernetes.io/config.mirror":"60f2072c6865fb71ef7928175ceb3dad","kubernetes.io/config.seen":"2024-09-16T10:47:01.310180468Z","kubernetes.io/config.source":"file"},"ownerReferences":[{"apiVersion":"v1","kind":"Node","name":"functional-919910","uid":"53a8f356-7cc5-491d-804d-fdafec0ed62c","controller":true}],"managedFields":[{"manager":"kubelet","operation":"Update","apiVersion":"v1","time":"2024-09-16T10:47:01Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:annotations":{".":{},"f:kubernetes.io/config.hash":{},"f:kubernetes.io/config.mirror":{},"f:kubernetes.io/config.seen":{
},"f:kubernetes.io/config.source":{}},"f:labels":{".":{},"f:component": [truncated 5421 chars]
	I0916 10:48:06.292928 1401996 request.go:632] Waited for 96.929572ms due to client-side throttling, not priority and fairness, request: GET:https://192.168.49.2:8441/api/v1/nodes/functional-919910
	I0916 10:48:06.293057 1401996 round_trippers.go:463] GET https://192.168.49.2:8441/api/v1/nodes/functional-919910
	I0916 10:48:06.293082 1401996 round_trippers.go:469] Request Headers:
	I0916 10:48:06.293117 1401996 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:48:06.293141 1401996 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:48:06.294792 1401996 command_runner.go:130] > serviceaccount/storage-provisioner unchanged
	I0916 10:48:06.294862 1401996 command_runner.go:130] > clusterrolebinding.rbac.authorization.k8s.io/storage-provisioner unchanged
	I0916 10:48:06.294886 1401996 command_runner.go:130] > role.rbac.authorization.k8s.io/system:persistent-volume-provisioner unchanged
	I0916 10:48:06.294907 1401996 command_runner.go:130] > rolebinding.rbac.authorization.k8s.io/system:persistent-volume-provisioner unchanged
	I0916 10:48:06.294941 1401996 command_runner.go:130] > endpoints/k8s.io-minikube-hostpath unchanged
	I0916 10:48:06.294965 1401996 command_runner.go:130] > pod/storage-provisioner configured
	I0916 10:48:06.295030 1401996 command_runner.go:130] > storageclass.storage.k8s.io/standard unchanged
	I0916 10:48:06.295062 1401996 ssh_runner.go:235] Completed: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.31.1/kubectl apply --force -f /etc/kubernetes/addons/storageclass.yaml: (4.752042541s)
	I0916 10:48:06.295182 1401996 round_trippers.go:463] GET https://192.168.49.2:8441/apis/storage.k8s.io/v1/storageclasses
	I0916 10:48:06.295188 1401996 round_trippers.go:469] Request Headers:
	I0916 10:48:06.295197 1401996 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:48:06.295201 1401996 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:48:06.295320 1401996 ssh_runner.go:235] Completed: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.31.1/kubectl apply --force -f /etc/kubernetes/addons/storage-provisioner.yaml: (5.031500772s)
	I0916 10:48:06.302450 1401996 round_trippers.go:574] Response Status: 200 OK in 9 milliseconds
	I0916 10:48:06.302477 1401996 round_trippers.go:577] Response Headers:
	I0916 10:48:06.302486 1401996 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: bc2a80da-1d56-4954-b760-1948662f5b5f
	I0916 10:48:06.302490 1401996 round_trippers.go:580]     Date: Mon, 16 Sep 2024 10:48:06 GMT
	I0916 10:48:06.302493 1401996 round_trippers.go:580]     Audit-Id: a46c5f67-ef7e-4d00-b4b6-bf1b00251284
	I0916 10:48:06.302497 1401996 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 10:48:06.302500 1401996 round_trippers.go:580]     Content-Type: application/json
	I0916 10:48:06.302503 1401996 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: dde2938c-b5ca-4a57-b9bd-15f56dda7856
	I0916 10:48:06.303381 1401996 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"functional-919910","uid":"53a8f356-7cc5-491d-804d-fdafec0ed62c","resourceVersion":"423","creationTimestamp":"2024-09-16T10:46:58Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"functional-919910","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"functional-919910","minikube.k8s.io/primary":"true","minikube.k8s.io/updated_at":"2024_09_16T10_47_02_0700","minikube.k8s.io/version":"v1.34.0","node-role.kubernetes.io/control-plane":"","node.kubernetes.io/exclude-from-external-load-balancers":""},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///var/run/crio/crio.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubelet","operation":"Update","ap
iVersion":"v1","time":"2024-09-16T10:46:58Z","fieldsType":"FieldsV1","f [truncated 6033 chars]
	I0916 10:48:06.306118 1401996 round_trippers.go:574] Response Status: 200 OK in 10 milliseconds
	I0916 10:48:06.306194 1401996 round_trippers.go:577] Response Headers:
	I0916 10:48:06.306216 1401996 round_trippers.go:580]     Audit-Id: efdb759c-528f-47e1-a33c-909f4e747a5b
	I0916 10:48:06.306233 1401996 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 10:48:06.306264 1401996 round_trippers.go:580]     Content-Type: application/json
	I0916 10:48:06.306285 1401996 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: dde2938c-b5ca-4a57-b9bd-15f56dda7856
	I0916 10:48:06.306302 1401996 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: bc2a80da-1d56-4954-b760-1948662f5b5f
	I0916 10:48:06.306320 1401996 round_trippers.go:580]     Content-Length: 1273
	I0916 10:48:06.306349 1401996 round_trippers.go:580]     Date: Mon, 16 Sep 2024 10:48:06 GMT
	I0916 10:48:06.306635 1401996 request.go:1351] Response Body: {"kind":"StorageClassList","apiVersion":"storage.k8s.io/v1","metadata":{"resourceVersion":"472"},"items":[{"metadata":{"name":"standard","uid":"db207f0e-3071-4d4c-96f2-ab3073d7e7e0","resourceVersion":"351","creationTimestamp":"2024-09-16T10:47:07Z","labels":{"addonmanager.kubernetes.io/mode":"EnsureExists"},"annotations":{"kubectl.kubernetes.io/last-applied-configuration":"{\"apiVersion\":\"storage.k8s.io/v1\",\"kind\":\"StorageClass\",\"metadata\":{\"annotations\":{\"storageclass.kubernetes.io/is-default-class\":\"true\"},\"labels\":{\"addonmanager.kubernetes.io/mode\":\"EnsureExists\"},\"name\":\"standard\"},\"provisioner\":\"k8s.io/minikube-hostpath\"}\n","storageclass.kubernetes.io/is-default-class":"true"},"managedFields":[{"manager":"kubectl-client-side-apply","operation":"Update","apiVersion":"storage.k8s.io/v1","time":"2024-09-16T10:47:07Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:annotations":{".":{},"f:kubectl.kuberne
tes.io/last-applied-configuration":{},"f:storageclass.kubernetes.io/is- [truncated 249 chars]
	I0916 10:48:06.307239 1401996 request.go:1351] Request Body: {"kind":"StorageClass","apiVersion":"storage.k8s.io/v1","metadata":{"name":"standard","uid":"db207f0e-3071-4d4c-96f2-ab3073d7e7e0","resourceVersion":"351","creationTimestamp":"2024-09-16T10:47:07Z","labels":{"addonmanager.kubernetes.io/mode":"EnsureExists"},"annotations":{"kubectl.kubernetes.io/last-applied-configuration":"{\"apiVersion\":\"storage.k8s.io/v1\",\"kind\":\"StorageClass\",\"metadata\":{\"annotations\":{\"storageclass.kubernetes.io/is-default-class\":\"true\"},\"labels\":{\"addonmanager.kubernetes.io/mode\":\"EnsureExists\"},\"name\":\"standard\"},\"provisioner\":\"k8s.io/minikube-hostpath\"}\n","storageclass.kubernetes.io/is-default-class":"true"},"managedFields":[{"manager":"kubectl-client-side-apply","operation":"Update","apiVersion":"storage.k8s.io/v1","time":"2024-09-16T10:47:07Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:annotations":{".":{},"f:kubectl.kubernetes.io/last-applied-configuration":{},"f:storageclas
s.kubernetes.io/is-default-class":{}},"f:labels":{".":{},"f:addonmanag [truncated 196 chars]
	I0916 10:48:06.307335 1401996 round_trippers.go:463] PUT https://192.168.49.2:8441/apis/storage.k8s.io/v1/storageclasses/standard
	I0916 10:48:06.307361 1401996 round_trippers.go:469] Request Headers:
	I0916 10:48:06.307396 1401996 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:48:06.307417 1401996 round_trippers.go:473]     Content-Type: application/json
	I0916 10:48:06.307434 1401996 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:48:06.330130 1401996 round_trippers.go:574] Response Status: 200 OK in 22 milliseconds
	I0916 10:48:06.330203 1401996 round_trippers.go:577] Response Headers:
	I0916 10:48:06.330223 1401996 round_trippers.go:580]     Content-Length: 1220
	I0916 10:48:06.330244 1401996 round_trippers.go:580]     Date: Mon, 16 Sep 2024 10:48:06 GMT
	I0916 10:48:06.330275 1401996 round_trippers.go:580]     Audit-Id: c3519c1d-0b15-44be-ba85-74c1a4cd1612
	I0916 10:48:06.330296 1401996 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 10:48:06.330309 1401996 round_trippers.go:580]     Content-Type: application/json
	I0916 10:48:06.330326 1401996 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: dde2938c-b5ca-4a57-b9bd-15f56dda7856
	I0916 10:48:06.330360 1401996 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: bc2a80da-1d56-4954-b760-1948662f5b5f
	I0916 10:48:06.330643 1401996 request.go:1351] Response Body: {"kind":"StorageClass","apiVersion":"storage.k8s.io/v1","metadata":{"name":"standard","uid":"db207f0e-3071-4d4c-96f2-ab3073d7e7e0","resourceVersion":"351","creationTimestamp":"2024-09-16T10:47:07Z","labels":{"addonmanager.kubernetes.io/mode":"EnsureExists"},"annotations":{"kubectl.kubernetes.io/last-applied-configuration":"{\"apiVersion\":\"storage.k8s.io/v1\",\"kind\":\"StorageClass\",\"metadata\":{\"annotations\":{\"storageclass.kubernetes.io/is-default-class\":\"true\"},\"labels\":{\"addonmanager.kubernetes.io/mode\":\"EnsureExists\"},\"name\":\"standard\"},\"provisioner\":\"k8s.io/minikube-hostpath\"}\n","storageclass.kubernetes.io/is-default-class":"true"},"managedFields":[{"manager":"kubectl-client-side-apply","operation":"Update","apiVersion":"storage.k8s.io/v1","time":"2024-09-16T10:47:07Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:annotations":{".":{},"f:kubectl.kubernetes.io/last-applied-configuration":{},"f:storagecla
ss.kubernetes.io/is-default-class":{}},"f:labels":{".":{},"f:addonmanag [truncated 196 chars]
	I0916 10:48:06.335529 1401996 out.go:177] * Enabled addons: storage-provisioner, default-storageclass
	I0916 10:48:06.338267 1401996 addons.go:510] duration metric: took 6.350561855s for enable addons: enabled=[storage-provisioner default-storageclass]
	I0916 10:48:06.655344 1401996 round_trippers.go:463] GET https://192.168.49.2:8441/api/v1/namespaces/kube-system/pods/kube-scheduler-functional-919910
	I0916 10:48:06.655367 1401996 round_trippers.go:469] Request Headers:
	I0916 10:48:06.655377 1401996 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:48:06.655383 1401996 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:48:06.658864 1401996 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:48:06.658890 1401996 round_trippers.go:577] Response Headers:
	I0916 10:48:06.658899 1401996 round_trippers.go:580]     Audit-Id: 27b8243b-941e-466b-899a-27f9168ec15a
	I0916 10:48:06.658903 1401996 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 10:48:06.658908 1401996 round_trippers.go:580]     Content-Type: application/json
	I0916 10:48:06.658911 1401996 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: dde2938c-b5ca-4a57-b9bd-15f56dda7856
	I0916 10:48:06.658914 1401996 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: bc2a80da-1d56-4954-b760-1948662f5b5f
	I0916 10:48:06.658918 1401996 round_trippers.go:580]     Date: Mon, 16 Sep 2024 10:48:06 GMT
	I0916 10:48:06.659036 1401996 request.go:1351] Response Body: {"kind":"Pod","apiVersion":"v1","metadata":{"name":"kube-scheduler-functional-919910","namespace":"kube-system","uid":"80a1c6e8-dcc4-4602-a66a-658796f6ae58","resourceVersion":"460","creationTimestamp":"2024-09-16T10:47:01Z","labels":{"component":"kube-scheduler","tier":"control-plane"},"annotations":{"kubernetes.io/config.hash":"60f2072c6865fb71ef7928175ceb3dad","kubernetes.io/config.mirror":"60f2072c6865fb71ef7928175ceb3dad","kubernetes.io/config.seen":"2024-09-16T10:47:01.310180468Z","kubernetes.io/config.source":"file"},"ownerReferences":[{"apiVersion":"v1","kind":"Node","name":"functional-919910","uid":"53a8f356-7cc5-491d-804d-fdafec0ed62c","controller":true}],"managedFields":[{"manager":"kubelet","operation":"Update","apiVersion":"v1","time":"2024-09-16T10:47:01Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:annotations":{".":{},"f:kubernetes.io/config.hash":{},"f:kubernetes.io/config.mirror":{},"f:kubernetes.io/config.seen":{
},"f:kubernetes.io/config.source":{}},"f:labels":{".":{},"f:component": [truncated 5421 chars]
	I0916 10:48:06.692658 1401996 round_trippers.go:463] GET https://192.168.49.2:8441/api/v1/nodes/functional-919910
	I0916 10:48:06.692701 1401996 round_trippers.go:469] Request Headers:
	I0916 10:48:06.692712 1401996 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:48:06.692717 1401996 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:48:06.695074 1401996 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 10:48:06.695101 1401996 round_trippers.go:577] Response Headers:
	I0916 10:48:06.695110 1401996 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: bc2a80da-1d56-4954-b760-1948662f5b5f
	I0916 10:48:06.695115 1401996 round_trippers.go:580]     Date: Mon, 16 Sep 2024 10:48:06 GMT
	I0916 10:48:06.695118 1401996 round_trippers.go:580]     Audit-Id: de8f589d-df48-428d-a44c-3f5cf6ca5c27
	I0916 10:48:06.695151 1401996 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 10:48:06.695161 1401996 round_trippers.go:580]     Content-Type: application/json
	I0916 10:48:06.695165 1401996 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: dde2938c-b5ca-4a57-b9bd-15f56dda7856
	I0916 10:48:06.695310 1401996 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"functional-919910","uid":"53a8f356-7cc5-491d-804d-fdafec0ed62c","resourceVersion":"423","creationTimestamp":"2024-09-16T10:46:58Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"functional-919910","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"functional-919910","minikube.k8s.io/primary":"true","minikube.k8s.io/updated_at":"2024_09_16T10_47_02_0700","minikube.k8s.io/version":"v1.34.0","node-role.kubernetes.io/control-plane":"","node.kubernetes.io/exclude-from-external-load-balancers":""},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///var/run/crio/crio.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubelet","operation":"Update","ap
iVersion":"v1","time":"2024-09-16T10:46:58Z","fieldsType":"FieldsV1","f [truncated 6033 chars]
	I0916 10:48:07.155348 1401996 round_trippers.go:463] GET https://192.168.49.2:8441/api/v1/namespaces/kube-system/pods/kube-scheduler-functional-919910
	I0916 10:48:07.155373 1401996 round_trippers.go:469] Request Headers:
	I0916 10:48:07.155382 1401996 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:48:07.155386 1401996 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:48:07.157729 1401996 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 10:48:07.157756 1401996 round_trippers.go:577] Response Headers:
	I0916 10:48:07.157764 1401996 round_trippers.go:580]     Audit-Id: e28e28ca-46f8-45a1-9102-49223c47f5ba
	I0916 10:48:07.157770 1401996 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 10:48:07.157773 1401996 round_trippers.go:580]     Content-Type: application/json
	I0916 10:48:07.157776 1401996 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: dde2938c-b5ca-4a57-b9bd-15f56dda7856
	I0916 10:48:07.157782 1401996 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: bc2a80da-1d56-4954-b760-1948662f5b5f
	I0916 10:48:07.157785 1401996 round_trippers.go:580]     Date: Mon, 16 Sep 2024 10:48:07 GMT
	I0916 10:48:07.158536 1401996 request.go:1351] Response Body: {"kind":"Pod","apiVersion":"v1","metadata":{"name":"kube-scheduler-functional-919910","namespace":"kube-system","uid":"80a1c6e8-dcc4-4602-a66a-658796f6ae58","resourceVersion":"460","creationTimestamp":"2024-09-16T10:47:01Z","labels":{"component":"kube-scheduler","tier":"control-plane"},"annotations":{"kubernetes.io/config.hash":"60f2072c6865fb71ef7928175ceb3dad","kubernetes.io/config.mirror":"60f2072c6865fb71ef7928175ceb3dad","kubernetes.io/config.seen":"2024-09-16T10:47:01.310180468Z","kubernetes.io/config.source":"file"},"ownerReferences":[{"apiVersion":"v1","kind":"Node","name":"functional-919910","uid":"53a8f356-7cc5-491d-804d-fdafec0ed62c","controller":true}],"managedFields":[{"manager":"kubelet","operation":"Update","apiVersion":"v1","time":"2024-09-16T10:47:01Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:annotations":{".":{},"f:kubernetes.io/config.hash":{},"f:kubernetes.io/config.mirror":{},"f:kubernetes.io/config.seen":{
},"f:kubernetes.io/config.source":{}},"f:labels":{".":{},"f:component": [truncated 5421 chars]
	I0916 10:48:07.159091 1401996 round_trippers.go:463] GET https://192.168.49.2:8441/api/v1/nodes/functional-919910
	I0916 10:48:07.159146 1401996 round_trippers.go:469] Request Headers:
	I0916 10:48:07.159170 1401996 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:48:07.159190 1401996 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:48:07.161613 1401996 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 10:48:07.161634 1401996 round_trippers.go:577] Response Headers:
	I0916 10:48:07.161641 1401996 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: bc2a80da-1d56-4954-b760-1948662f5b5f
	I0916 10:48:07.161645 1401996 round_trippers.go:580]     Date: Mon, 16 Sep 2024 10:48:07 GMT
	I0916 10:48:07.161648 1401996 round_trippers.go:580]     Audit-Id: 918b0e6f-c4bd-4271-8e3c-5b4ce9d57e17
	I0916 10:48:07.161651 1401996 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 10:48:07.161654 1401996 round_trippers.go:580]     Content-Type: application/json
	I0916 10:48:07.161657 1401996 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: dde2938c-b5ca-4a57-b9bd-15f56dda7856
	I0916 10:48:07.161769 1401996 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"functional-919910","uid":"53a8f356-7cc5-491d-804d-fdafec0ed62c","resourceVersion":"423","creationTimestamp":"2024-09-16T10:46:58Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"functional-919910","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"functional-919910","minikube.k8s.io/primary":"true","minikube.k8s.io/updated_at":"2024_09_16T10_47_02_0700","minikube.k8s.io/version":"v1.34.0","node-role.kubernetes.io/control-plane":"","node.kubernetes.io/exclude-from-external-load-balancers":""},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///var/run/crio/crio.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubelet","operation":"Update","ap
iVersion":"v1","time":"2024-09-16T10:46:58Z","fieldsType":"FieldsV1","f [truncated 6033 chars]
	I0916 10:48:07.655945 1401996 round_trippers.go:463] GET https://192.168.49.2:8441/api/v1/namespaces/kube-system/pods/kube-scheduler-functional-919910
	I0916 10:48:07.655970 1401996 round_trippers.go:469] Request Headers:
	I0916 10:48:07.655978 1401996 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:48:07.655983 1401996 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:48:07.658377 1401996 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 10:48:07.658407 1401996 round_trippers.go:577] Response Headers:
	I0916 10:48:07.658416 1401996 round_trippers.go:580]     Audit-Id: 76d8e195-1414-4d8c-ba2e-8fe0c471961a
	I0916 10:48:07.658430 1401996 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 10:48:07.658434 1401996 round_trippers.go:580]     Content-Type: application/json
	I0916 10:48:07.658437 1401996 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: dde2938c-b5ca-4a57-b9bd-15f56dda7856
	I0916 10:48:07.658457 1401996 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: bc2a80da-1d56-4954-b760-1948662f5b5f
	I0916 10:48:07.658467 1401996 round_trippers.go:580]     Date: Mon, 16 Sep 2024 10:48:07 GMT
	I0916 10:48:07.658601 1401996 request.go:1351] Response Body: {"kind":"Pod","apiVersion":"v1","metadata":{"name":"kube-scheduler-functional-919910","namespace":"kube-system","uid":"80a1c6e8-dcc4-4602-a66a-658796f6ae58","resourceVersion":"460","creationTimestamp":"2024-09-16T10:47:01Z","labels":{"component":"kube-scheduler","tier":"control-plane"},"annotations":{"kubernetes.io/config.hash":"60f2072c6865fb71ef7928175ceb3dad","kubernetes.io/config.mirror":"60f2072c6865fb71ef7928175ceb3dad","kubernetes.io/config.seen":"2024-09-16T10:47:01.310180468Z","kubernetes.io/config.source":"file"},"ownerReferences":[{"apiVersion":"v1","kind":"Node","name":"functional-919910","uid":"53a8f356-7cc5-491d-804d-fdafec0ed62c","controller":true}],"managedFields":[{"manager":"kubelet","operation":"Update","apiVersion":"v1","time":"2024-09-16T10:47:01Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:annotations":{".":{},"f:kubernetes.io/config.hash":{},"f:kubernetes.io/config.mirror":{},"f:kubernetes.io/config.seen":{
},"f:kubernetes.io/config.source":{}},"f:labels":{".":{},"f:component": [truncated 5421 chars]
	I0916 10:48:07.659104 1401996 round_trippers.go:463] GET https://192.168.49.2:8441/api/v1/nodes/functional-919910
	I0916 10:48:07.659122 1401996 round_trippers.go:469] Request Headers:
	I0916 10:48:07.659130 1401996 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:48:07.659135 1401996 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:48:07.661280 1401996 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 10:48:07.661303 1401996 round_trippers.go:577] Response Headers:
	I0916 10:48:07.661313 1401996 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: dde2938c-b5ca-4a57-b9bd-15f56dda7856
	I0916 10:48:07.661318 1401996 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: bc2a80da-1d56-4954-b760-1948662f5b5f
	I0916 10:48:07.661323 1401996 round_trippers.go:580]     Date: Mon, 16 Sep 2024 10:48:07 GMT
	I0916 10:48:07.661326 1401996 round_trippers.go:580]     Audit-Id: 041d95a8-0989-4252-8fc5-0e02d4e5b989
	I0916 10:48:07.661329 1401996 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 10:48:07.661332 1401996 round_trippers.go:580]     Content-Type: application/json
	I0916 10:48:07.661648 1401996 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"functional-919910","uid":"53a8f356-7cc5-491d-804d-fdafec0ed62c","resourceVersion":"423","creationTimestamp":"2024-09-16T10:46:58Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"functional-919910","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"functional-919910","minikube.k8s.io/primary":"true","minikube.k8s.io/updated_at":"2024_09_16T10_47_02_0700","minikube.k8s.io/version":"v1.34.0","node-role.kubernetes.io/control-plane":"","node.kubernetes.io/exclude-from-external-load-balancers":""},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///var/run/crio/crio.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubelet","operation":"Update","ap
iVersion":"v1","time":"2024-09-16T10:46:58Z","fieldsType":"FieldsV1","f [truncated 6033 chars]
	I0916 10:48:07.662055 1401996 pod_ready.go:103] pod "kube-scheduler-functional-919910" in "kube-system" namespace has status "Ready":"False"
	I0916 10:48:08.155546 1401996 round_trippers.go:463] GET https://192.168.49.2:8441/api/v1/namespaces/kube-system/pods/kube-scheduler-functional-919910
	I0916 10:48:08.155619 1401996 round_trippers.go:469] Request Headers:
	I0916 10:48:08.155643 1401996 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:48:08.155665 1401996 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:48:08.158082 1401996 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 10:48:08.158153 1401996 round_trippers.go:577] Response Headers:
	I0916 10:48:08.158175 1401996 round_trippers.go:580]     Audit-Id: 6edd8771-2339-4400-9108-b26a1ef4d0bc
	I0916 10:48:08.158193 1401996 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 10:48:08.158223 1401996 round_trippers.go:580]     Content-Type: application/json
	I0916 10:48:08.158245 1401996 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: dde2938c-b5ca-4a57-b9bd-15f56dda7856
	I0916 10:48:08.158263 1401996 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: bc2a80da-1d56-4954-b760-1948662f5b5f
	I0916 10:48:08.158280 1401996 round_trippers.go:580]     Date: Mon, 16 Sep 2024 10:48:08 GMT
	I0916 10:48:08.158439 1401996 request.go:1351] Response Body: {"kind":"Pod","apiVersion":"v1","metadata":{"name":"kube-scheduler-functional-919910","namespace":"kube-system","uid":"80a1c6e8-dcc4-4602-a66a-658796f6ae58","resourceVersion":"460","creationTimestamp":"2024-09-16T10:47:01Z","labels":{"component":"kube-scheduler","tier":"control-plane"},"annotations":{"kubernetes.io/config.hash":"60f2072c6865fb71ef7928175ceb3dad","kubernetes.io/config.mirror":"60f2072c6865fb71ef7928175ceb3dad","kubernetes.io/config.seen":"2024-09-16T10:47:01.310180468Z","kubernetes.io/config.source":"file"},"ownerReferences":[{"apiVersion":"v1","kind":"Node","name":"functional-919910","uid":"53a8f356-7cc5-491d-804d-fdafec0ed62c","controller":true}],"managedFields":[{"manager":"kubelet","operation":"Update","apiVersion":"v1","time":"2024-09-16T10:47:01Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:annotations":{".":{},"f:kubernetes.io/config.hash":{},"f:kubernetes.io/config.mirror":{},"f:kubernetes.io/config.seen":{
},"f:kubernetes.io/config.source":{}},"f:labels":{".":{},"f:component": [truncated 5421 chars]
	I0916 10:48:08.158935 1401996 round_trippers.go:463] GET https://192.168.49.2:8441/api/v1/nodes/functional-919910
	I0916 10:48:08.158954 1401996 round_trippers.go:469] Request Headers:
	I0916 10:48:08.158963 1401996 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:48:08.158968 1401996 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:48:08.161016 1401996 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 10:48:08.161039 1401996 round_trippers.go:577] Response Headers:
	I0916 10:48:08.161048 1401996 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: dde2938c-b5ca-4a57-b9bd-15f56dda7856
	I0916 10:48:08.161051 1401996 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: bc2a80da-1d56-4954-b760-1948662f5b5f
	I0916 10:48:08.161054 1401996 round_trippers.go:580]     Date: Mon, 16 Sep 2024 10:48:08 GMT
	I0916 10:48:08.161057 1401996 round_trippers.go:580]     Audit-Id: a126ddfb-3d41-4bb4-8107-d94f2aae2022
	I0916 10:48:08.161060 1401996 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 10:48:08.161063 1401996 round_trippers.go:580]     Content-Type: application/json
	I0916 10:48:08.161463 1401996 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"functional-919910","uid":"53a8f356-7cc5-491d-804d-fdafec0ed62c","resourceVersion":"423","creationTimestamp":"2024-09-16T10:46:58Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"functional-919910","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"functional-919910","minikube.k8s.io/primary":"true","minikube.k8s.io/updated_at":"2024_09_16T10_47_02_0700","minikube.k8s.io/version":"v1.34.0","node-role.kubernetes.io/control-plane":"","node.kubernetes.io/exclude-from-external-load-balancers":""},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///var/run/crio/crio.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubelet","operation":"Update","ap
iVersion":"v1","time":"2024-09-16T10:46:58Z","fieldsType":"FieldsV1","f [truncated 6033 chars]
	I0916 10:48:08.656176 1401996 round_trippers.go:463] GET https://192.168.49.2:8441/api/v1/namespaces/kube-system/pods/kube-scheduler-functional-919910
	I0916 10:48:08.656201 1401996 round_trippers.go:469] Request Headers:
	I0916 10:48:08.656217 1401996 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:48:08.656223 1401996 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:48:08.658586 1401996 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 10:48:08.658651 1401996 round_trippers.go:577] Response Headers:
	I0916 10:48:08.658684 1401996 round_trippers.go:580]     Audit-Id: e5465de8-0fe1-4264-aa6e-f64ee2497e82
	I0916 10:48:08.658703 1401996 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 10:48:08.658740 1401996 round_trippers.go:580]     Content-Type: application/json
	I0916 10:48:08.658751 1401996 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: dde2938c-b5ca-4a57-b9bd-15f56dda7856
	I0916 10:48:08.658755 1401996 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: bc2a80da-1d56-4954-b760-1948662f5b5f
	I0916 10:48:08.658759 1401996 round_trippers.go:580]     Date: Mon, 16 Sep 2024 10:48:08 GMT
	I0916 10:48:08.658892 1401996 request.go:1351] Response Body: {"kind":"Pod","apiVersion":"v1","metadata":{"name":"kube-scheduler-functional-919910","namespace":"kube-system","uid":"80a1c6e8-dcc4-4602-a66a-658796f6ae58","resourceVersion":"460","creationTimestamp":"2024-09-16T10:47:01Z","labels":{"component":"kube-scheduler","tier":"control-plane"},"annotations":{"kubernetes.io/config.hash":"60f2072c6865fb71ef7928175ceb3dad","kubernetes.io/config.mirror":"60f2072c6865fb71ef7928175ceb3dad","kubernetes.io/config.seen":"2024-09-16T10:47:01.310180468Z","kubernetes.io/config.source":"file"},"ownerReferences":[{"apiVersion":"v1","kind":"Node","name":"functional-919910","uid":"53a8f356-7cc5-491d-804d-fdafec0ed62c","controller":true}],"managedFields":[{"manager":"kubelet","operation":"Update","apiVersion":"v1","time":"2024-09-16T10:47:01Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:annotations":{".":{},"f:kubernetes.io/config.hash":{},"f:kubernetes.io/config.mirror":{},"f:kubernetes.io/config.seen":{
},"f:kubernetes.io/config.source":{}},"f:labels":{".":{},"f:component": [truncated 5421 chars]
	I0916 10:48:08.659399 1401996 round_trippers.go:463] GET https://192.168.49.2:8441/api/v1/nodes/functional-919910
	I0916 10:48:08.659422 1401996 round_trippers.go:469] Request Headers:
	I0916 10:48:08.659431 1401996 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:48:08.659452 1401996 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:48:08.661658 1401996 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 10:48:08.661694 1401996 round_trippers.go:577] Response Headers:
	I0916 10:48:08.661702 1401996 round_trippers.go:580]     Audit-Id: e04c4294-98da-4d6d-9ca5-5da906e4a84a
	I0916 10:48:08.661706 1401996 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 10:48:08.661709 1401996 round_trippers.go:580]     Content-Type: application/json
	I0916 10:48:08.661712 1401996 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: dde2938c-b5ca-4a57-b9bd-15f56dda7856
	I0916 10:48:08.661714 1401996 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: bc2a80da-1d56-4954-b760-1948662f5b5f
	I0916 10:48:08.661717 1401996 round_trippers.go:580]     Date: Mon, 16 Sep 2024 10:48:08 GMT
	I0916 10:48:08.661890 1401996 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"functional-919910","uid":"53a8f356-7cc5-491d-804d-fdafec0ed62c","resourceVersion":"423","creationTimestamp":"2024-09-16T10:46:58Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"functional-919910","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"functional-919910","minikube.k8s.io/primary":"true","minikube.k8s.io/updated_at":"2024_09_16T10_47_02_0700","minikube.k8s.io/version":"v1.34.0","node-role.kubernetes.io/control-plane":"","node.kubernetes.io/exclude-from-external-load-balancers":""},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///var/run/crio/crio.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubelet","operation":"Update","ap
iVersion":"v1","time":"2024-09-16T10:46:58Z","fieldsType":"FieldsV1","f [truncated 6033 chars]
	I0916 10:48:09.156075 1401996 round_trippers.go:463] GET https://192.168.49.2:8441/api/v1/namespaces/kube-system/pods/kube-scheduler-functional-919910
	I0916 10:48:09.156102 1401996 round_trippers.go:469] Request Headers:
	I0916 10:48:09.156112 1401996 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:48:09.156117 1401996 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:48:09.158504 1401996 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 10:48:09.158531 1401996 round_trippers.go:577] Response Headers:
	I0916 10:48:09.158539 1401996 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: bc2a80da-1d56-4954-b760-1948662f5b5f
	I0916 10:48:09.158545 1401996 round_trippers.go:580]     Date: Mon, 16 Sep 2024 10:48:09 GMT
	I0916 10:48:09.158548 1401996 round_trippers.go:580]     Audit-Id: c771052a-b618-42a7-bfe5-d3cd94727405
	I0916 10:48:09.158551 1401996 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 10:48:09.158561 1401996 round_trippers.go:580]     Content-Type: application/json
	I0916 10:48:09.158564 1401996 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: dde2938c-b5ca-4a57-b9bd-15f56dda7856
	I0916 10:48:09.158817 1401996 request.go:1351] Response Body: {"kind":"Pod","apiVersion":"v1","metadata":{"name":"kube-scheduler-functional-919910","namespace":"kube-system","uid":"80a1c6e8-dcc4-4602-a66a-658796f6ae58","resourceVersion":"460","creationTimestamp":"2024-09-16T10:47:01Z","labels":{"component":"kube-scheduler","tier":"control-plane"},"annotations":{"kubernetes.io/config.hash":"60f2072c6865fb71ef7928175ceb3dad","kubernetes.io/config.mirror":"60f2072c6865fb71ef7928175ceb3dad","kubernetes.io/config.seen":"2024-09-16T10:47:01.310180468Z","kubernetes.io/config.source":"file"},"ownerReferences":[{"apiVersion":"v1","kind":"Node","name":"functional-919910","uid":"53a8f356-7cc5-491d-804d-fdafec0ed62c","controller":true}],"managedFields":[{"manager":"kubelet","operation":"Update","apiVersion":"v1","time":"2024-09-16T10:47:01Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:annotations":{".":{},"f:kubernetes.io/config.hash":{},"f:kubernetes.io/config.mirror":{},"f:kubernetes.io/config.seen":{
},"f:kubernetes.io/config.source":{}},"f:labels":{".":{},"f:component": [truncated 5421 chars]
	I0916 10:48:09.159316 1401996 round_trippers.go:463] GET https://192.168.49.2:8441/api/v1/nodes/functional-919910
	I0916 10:48:09.159333 1401996 round_trippers.go:469] Request Headers:
	I0916 10:48:09.159351 1401996 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:48:09.159360 1401996 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:48:09.161355 1401996 round_trippers.go:574] Response Status: 200 OK in 1 milliseconds
	I0916 10:48:09.161378 1401996 round_trippers.go:577] Response Headers:
	I0916 10:48:09.161387 1401996 round_trippers.go:580]     Audit-Id: 15e3a6f9-f715-4d69-b1cf-94a6b5aeb0cd
	I0916 10:48:09.161392 1401996 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 10:48:09.161396 1401996 round_trippers.go:580]     Content-Type: application/json
	I0916 10:48:09.161399 1401996 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: dde2938c-b5ca-4a57-b9bd-15f56dda7856
	I0916 10:48:09.161402 1401996 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: bc2a80da-1d56-4954-b760-1948662f5b5f
	I0916 10:48:09.161405 1401996 round_trippers.go:580]     Date: Mon, 16 Sep 2024 10:48:09 GMT
	I0916 10:48:09.161846 1401996 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"functional-919910","uid":"53a8f356-7cc5-491d-804d-fdafec0ed62c","resourceVersion":"423","creationTimestamp":"2024-09-16T10:46:58Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"functional-919910","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"functional-919910","minikube.k8s.io/primary":"true","minikube.k8s.io/updated_at":"2024_09_16T10_47_02_0700","minikube.k8s.io/version":"v1.34.0","node-role.kubernetes.io/control-plane":"","node.kubernetes.io/exclude-from-external-load-balancers":""},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///var/run/crio/crio.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubelet","operation":"Update","ap
iVersion":"v1","time":"2024-09-16T10:46:58Z","fieldsType":"FieldsV1","f [truncated 6033 chars]
	I0916 10:48:09.656058 1401996 round_trippers.go:463] GET https://192.168.49.2:8441/api/v1/namespaces/kube-system/pods/kube-scheduler-functional-919910
	I0916 10:48:09.656083 1401996 round_trippers.go:469] Request Headers:
	I0916 10:48:09.656093 1401996 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:48:09.656097 1401996 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:48:09.658445 1401996 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 10:48:09.658510 1401996 round_trippers.go:577] Response Headers:
	I0916 10:48:09.658533 1401996 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 10:48:09.658552 1401996 round_trippers.go:580]     Content-Type: application/json
	I0916 10:48:09.658583 1401996 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: dde2938c-b5ca-4a57-b9bd-15f56dda7856
	I0916 10:48:09.658605 1401996 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: bc2a80da-1d56-4954-b760-1948662f5b5f
	I0916 10:48:09.658623 1401996 round_trippers.go:580]     Date: Mon, 16 Sep 2024 10:48:09 GMT
	I0916 10:48:09.658641 1401996 round_trippers.go:580]     Audit-Id: 0e8ff4b5-169e-445c-adca-58d4f6417f04
	I0916 10:48:09.658850 1401996 request.go:1351] Response Body: {"kind":"Pod","apiVersion":"v1","metadata":{"name":"kube-scheduler-functional-919910","namespace":"kube-system","uid":"80a1c6e8-dcc4-4602-a66a-658796f6ae58","resourceVersion":"460","creationTimestamp":"2024-09-16T10:47:01Z","labels":{"component":"kube-scheduler","tier":"control-plane"},"annotations":{"kubernetes.io/config.hash":"60f2072c6865fb71ef7928175ceb3dad","kubernetes.io/config.mirror":"60f2072c6865fb71ef7928175ceb3dad","kubernetes.io/config.seen":"2024-09-16T10:47:01.310180468Z","kubernetes.io/config.source":"file"},"ownerReferences":[{"apiVersion":"v1","kind":"Node","name":"functional-919910","uid":"53a8f356-7cc5-491d-804d-fdafec0ed62c","controller":true}],"managedFields":[{"manager":"kubelet","operation":"Update","apiVersion":"v1","time":"2024-09-16T10:47:01Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:annotations":{".":{},"f:kubernetes.io/config.hash":{},"f:kubernetes.io/config.mirror":{},"f:kubernetes.io/config.seen":{
},"f:kubernetes.io/config.source":{}},"f:labels":{".":{},"f:component": [truncated 5421 chars]
	I0916 10:48:09.659349 1401996 round_trippers.go:463] GET https://192.168.49.2:8441/api/v1/nodes/functional-919910
	I0916 10:48:09.659366 1401996 round_trippers.go:469] Request Headers:
	I0916 10:48:09.659375 1401996 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:48:09.659379 1401996 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:48:09.661463 1401996 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 10:48:09.661484 1401996 round_trippers.go:577] Response Headers:
	I0916 10:48:09.661491 1401996 round_trippers.go:580]     Audit-Id: 7aabfc6e-075e-41c4-9548-b7c332b6ce4b
	I0916 10:48:09.661495 1401996 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 10:48:09.661522 1401996 round_trippers.go:580]     Content-Type: application/json
	I0916 10:48:09.661541 1401996 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: dde2938c-b5ca-4a57-b9bd-15f56dda7856
	I0916 10:48:09.661544 1401996 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: bc2a80da-1d56-4954-b760-1948662f5b5f
	I0916 10:48:09.661547 1401996 round_trippers.go:580]     Date: Mon, 16 Sep 2024 10:48:09 GMT
	I0916 10:48:09.662008 1401996 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"functional-919910","uid":"53a8f356-7cc5-491d-804d-fdafec0ed62c","resourceVersion":"423","creationTimestamp":"2024-09-16T10:46:58Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"functional-919910","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"functional-919910","minikube.k8s.io/primary":"true","minikube.k8s.io/updated_at":"2024_09_16T10_47_02_0700","minikube.k8s.io/version":"v1.34.0","node-role.kubernetes.io/control-plane":"","node.kubernetes.io/exclude-from-external-load-balancers":""},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///var/run/crio/crio.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubelet","operation":"Update","ap
iVersion":"v1","time":"2024-09-16T10:46:58Z","fieldsType":"FieldsV1","f [truncated 6033 chars]
	I0916 10:48:09.662441 1401996 pod_ready.go:103] pod "kube-scheduler-functional-919910" in "kube-system" namespace has status "Ready":"False"
	I0916 10:48:10.155850 1401996 round_trippers.go:463] GET https://192.168.49.2:8441/api/v1/namespaces/kube-system/pods/kube-scheduler-functional-919910
	I0916 10:48:10.155879 1401996 round_trippers.go:469] Request Headers:
	I0916 10:48:10.155889 1401996 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:48:10.155894 1401996 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:48:10.158351 1401996 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 10:48:10.158379 1401996 round_trippers.go:577] Response Headers:
	I0916 10:48:10.158388 1401996 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: dde2938c-b5ca-4a57-b9bd-15f56dda7856
	I0916 10:48:10.158393 1401996 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: bc2a80da-1d56-4954-b760-1948662f5b5f
	I0916 10:48:10.158397 1401996 round_trippers.go:580]     Date: Mon, 16 Sep 2024 10:48:10 GMT
	I0916 10:48:10.158401 1401996 round_trippers.go:580]     Audit-Id: a2849d4a-9951-48c6-9e96-b95eda339f33
	I0916 10:48:10.158403 1401996 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 10:48:10.158446 1401996 round_trippers.go:580]     Content-Type: application/json
	I0916 10:48:10.158702 1401996 request.go:1351] Response Body: {"kind":"Pod","apiVersion":"v1","metadata":{"name":"kube-scheduler-functional-919910","namespace":"kube-system","uid":"80a1c6e8-dcc4-4602-a66a-658796f6ae58","resourceVersion":"460","creationTimestamp":"2024-09-16T10:47:01Z","labels":{"component":"kube-scheduler","tier":"control-plane"},"annotations":{"kubernetes.io/config.hash":"60f2072c6865fb71ef7928175ceb3dad","kubernetes.io/config.mirror":"60f2072c6865fb71ef7928175ceb3dad","kubernetes.io/config.seen":"2024-09-16T10:47:01.310180468Z","kubernetes.io/config.source":"file"},"ownerReferences":[{"apiVersion":"v1","kind":"Node","name":"functional-919910","uid":"53a8f356-7cc5-491d-804d-fdafec0ed62c","controller":true}],"managedFields":[{"manager":"kubelet","operation":"Update","apiVersion":"v1","time":"2024-09-16T10:47:01Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:annotations":{".":{},"f:kubernetes.io/config.hash":{},"f:kubernetes.io/config.mirror":{},"f:kubernetes.io/config.seen":{
},"f:kubernetes.io/config.source":{}},"f:labels":{".":{},"f:component": [truncated 5421 chars]
	I0916 10:48:10.159227 1401996 round_trippers.go:463] GET https://192.168.49.2:8441/api/v1/nodes/functional-919910
	I0916 10:48:10.159256 1401996 round_trippers.go:469] Request Headers:
	I0916 10:48:10.159266 1401996 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:48:10.159270 1401996 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:48:10.161598 1401996 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 10:48:10.161623 1401996 round_trippers.go:577] Response Headers:
	I0916 10:48:10.161631 1401996 round_trippers.go:580]     Audit-Id: e958b3e9-cc1b-4750-90b1-360b921760fc
	I0916 10:48:10.161636 1401996 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 10:48:10.161640 1401996 round_trippers.go:580]     Content-Type: application/json
	I0916 10:48:10.161644 1401996 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: dde2938c-b5ca-4a57-b9bd-15f56dda7856
	I0916 10:48:10.161647 1401996 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: bc2a80da-1d56-4954-b760-1948662f5b5f
	I0916 10:48:10.161650 1401996 round_trippers.go:580]     Date: Mon, 16 Sep 2024 10:48:10 GMT
	I0916 10:48:10.162063 1401996 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"functional-919910","uid":"53a8f356-7cc5-491d-804d-fdafec0ed62c","resourceVersion":"423","creationTimestamp":"2024-09-16T10:46:58Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"functional-919910","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"functional-919910","minikube.k8s.io/primary":"true","minikube.k8s.io/updated_at":"2024_09_16T10_47_02_0700","minikube.k8s.io/version":"v1.34.0","node-role.kubernetes.io/control-plane":"","node.kubernetes.io/exclude-from-external-load-balancers":""},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///var/run/crio/crio.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubelet","operation":"Update","ap
iVersion":"v1","time":"2024-09-16T10:46:58Z","fieldsType":"FieldsV1","f [truncated 6033 chars]
	I0916 10:48:10.656157 1401996 round_trippers.go:463] GET https://192.168.49.2:8441/api/v1/namespaces/kube-system/pods/kube-scheduler-functional-919910
	I0916 10:48:10.656181 1401996 round_trippers.go:469] Request Headers:
	I0916 10:48:10.656191 1401996 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:48:10.656196 1401996 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:48:10.658610 1401996 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 10:48:10.658635 1401996 round_trippers.go:577] Response Headers:
	I0916 10:48:10.658642 1401996 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: bc2a80da-1d56-4954-b760-1948662f5b5f
	I0916 10:48:10.658647 1401996 round_trippers.go:580]     Date: Mon, 16 Sep 2024 10:48:10 GMT
	I0916 10:48:10.658650 1401996 round_trippers.go:580]     Audit-Id: abc213c3-888d-4372-ab96-c2b656dbe199
	I0916 10:48:10.658653 1401996 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 10:48:10.658656 1401996 round_trippers.go:580]     Content-Type: application/json
	I0916 10:48:10.658664 1401996 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: dde2938c-b5ca-4a57-b9bd-15f56dda7856
	I0916 10:48:10.658943 1401996 request.go:1351] Response Body: {"kind":"Pod","apiVersion":"v1","metadata":{"name":"kube-scheduler-functional-919910","namespace":"kube-system","uid":"80a1c6e8-dcc4-4602-a66a-658796f6ae58","resourceVersion":"460","creationTimestamp":"2024-09-16T10:47:01Z","labels":{"component":"kube-scheduler","tier":"control-plane"},"annotations":{"kubernetes.io/config.hash":"60f2072c6865fb71ef7928175ceb3dad","kubernetes.io/config.mirror":"60f2072c6865fb71ef7928175ceb3dad","kubernetes.io/config.seen":"2024-09-16T10:47:01.310180468Z","kubernetes.io/config.source":"file"},"ownerReferences":[{"apiVersion":"v1","kind":"Node","name":"functional-919910","uid":"53a8f356-7cc5-491d-804d-fdafec0ed62c","controller":true}],"managedFields":[{"manager":"kubelet","operation":"Update","apiVersion":"v1","time":"2024-09-16T10:47:01Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:annotations":{".":{},"f:kubernetes.io/config.hash":{},"f:kubernetes.io/config.mirror":{},"f:kubernetes.io/config.seen":{
},"f:kubernetes.io/config.source":{}},"f:labels":{".":{},"f:component": [truncated 5421 chars]
	I0916 10:48:10.659442 1401996 round_trippers.go:463] GET https://192.168.49.2:8441/api/v1/nodes/functional-919910
	I0916 10:48:10.659460 1401996 round_trippers.go:469] Request Headers:
	I0916 10:48:10.659469 1401996 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:48:10.659474 1401996 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:48:10.661539 1401996 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 10:48:10.661563 1401996 round_trippers.go:577] Response Headers:
	I0916 10:48:10.661572 1401996 round_trippers.go:580]     Content-Type: application/json
	I0916 10:48:10.661577 1401996 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: dde2938c-b5ca-4a57-b9bd-15f56dda7856
	I0916 10:48:10.661581 1401996 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: bc2a80da-1d56-4954-b760-1948662f5b5f
	I0916 10:48:10.661585 1401996 round_trippers.go:580]     Date: Mon, 16 Sep 2024 10:48:10 GMT
	I0916 10:48:10.661589 1401996 round_trippers.go:580]     Audit-Id: d8aea646-e77c-4c0a-ae7d-567910c0c574
	I0916 10:48:10.661592 1401996 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 10:48:10.661726 1401996 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"functional-919910","uid":"53a8f356-7cc5-491d-804d-fdafec0ed62c","resourceVersion":"423","creationTimestamp":"2024-09-16T10:46:58Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"functional-919910","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"functional-919910","minikube.k8s.io/primary":"true","minikube.k8s.io/updated_at":"2024_09_16T10_47_02_0700","minikube.k8s.io/version":"v1.34.0","node-role.kubernetes.io/control-plane":"","node.kubernetes.io/exclude-from-external-load-balancers":""},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///var/run/crio/crio.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubelet","operation":"Update","ap
iVersion":"v1","time":"2024-09-16T10:46:58Z","fieldsType":"FieldsV1","f [truncated 6033 chars]
	I0916 10:48:11.155937 1401996 round_trippers.go:463] GET https://192.168.49.2:8441/api/v1/namespaces/kube-system/pods/kube-scheduler-functional-919910
	I0916 10:48:11.155963 1401996 round_trippers.go:469] Request Headers:
	I0916 10:48:11.155973 1401996 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:48:11.155976 1401996 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:48:11.158585 1401996 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 10:48:11.158614 1401996 round_trippers.go:577] Response Headers:
	I0916 10:48:11.158624 1401996 round_trippers.go:580]     Audit-Id: 4a531874-fb4e-42d1-90e9-20c9231f6650
	I0916 10:48:11.158628 1401996 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 10:48:11.158632 1401996 round_trippers.go:580]     Content-Type: application/json
	I0916 10:48:11.158647 1401996 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: dde2938c-b5ca-4a57-b9bd-15f56dda7856
	I0916 10:48:11.158659 1401996 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: bc2a80da-1d56-4954-b760-1948662f5b5f
	I0916 10:48:11.158663 1401996 round_trippers.go:580]     Date: Mon, 16 Sep 2024 10:48:11 GMT
	I0916 10:48:11.159187 1401996 request.go:1351] Response Body: {"kind":"Pod","apiVersion":"v1","metadata":{"name":"kube-scheduler-functional-919910","namespace":"kube-system","uid":"80a1c6e8-dcc4-4602-a66a-658796f6ae58","resourceVersion":"460","creationTimestamp":"2024-09-16T10:47:01Z","labels":{"component":"kube-scheduler","tier":"control-plane"},"annotations":{"kubernetes.io/config.hash":"60f2072c6865fb71ef7928175ceb3dad","kubernetes.io/config.mirror":"60f2072c6865fb71ef7928175ceb3dad","kubernetes.io/config.seen":"2024-09-16T10:47:01.310180468Z","kubernetes.io/config.source":"file"},"ownerReferences":[{"apiVersion":"v1","kind":"Node","name":"functional-919910","uid":"53a8f356-7cc5-491d-804d-fdafec0ed62c","controller":true}],"managedFields":[{"manager":"kubelet","operation":"Update","apiVersion":"v1","time":"2024-09-16T10:47:01Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:annotations":{".":{},"f:kubernetes.io/config.hash":{},"f:kubernetes.io/config.mirror":{},"f:kubernetes.io/config.seen":{
},"f:kubernetes.io/config.source":{}},"f:labels":{".":{},"f:component": [truncated 5421 chars]
	I0916 10:48:11.159752 1401996 round_trippers.go:463] GET https://192.168.49.2:8441/api/v1/nodes/functional-919910
	I0916 10:48:11.159763 1401996 round_trippers.go:469] Request Headers:
	I0916 10:48:11.159772 1401996 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:48:11.159776 1401996 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:48:11.162269 1401996 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 10:48:11.162298 1401996 round_trippers.go:577] Response Headers:
	I0916 10:48:11.162307 1401996 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: bc2a80da-1d56-4954-b760-1948662f5b5f
	I0916 10:48:11.162314 1401996 round_trippers.go:580]     Date: Mon, 16 Sep 2024 10:48:11 GMT
	I0916 10:48:11.162317 1401996 round_trippers.go:580]     Audit-Id: 78c813d3-a798-456c-b8e9-696c0b388b89
	I0916 10:48:11.162320 1401996 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 10:48:11.162323 1401996 round_trippers.go:580]     Content-Type: application/json
	I0916 10:48:11.162326 1401996 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: dde2938c-b5ca-4a57-b9bd-15f56dda7856
	I0916 10:48:11.162769 1401996 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"functional-919910","uid":"53a8f356-7cc5-491d-804d-fdafec0ed62c","resourceVersion":"423","creationTimestamp":"2024-09-16T10:46:58Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"functional-919910","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"functional-919910","minikube.k8s.io/primary":"true","minikube.k8s.io/updated_at":"2024_09_16T10_47_02_0700","minikube.k8s.io/version":"v1.34.0","node-role.kubernetes.io/control-plane":"","node.kubernetes.io/exclude-from-external-load-balancers":""},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///var/run/crio/crio.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubelet","operation":"Update","ap
iVersion":"v1","time":"2024-09-16T10:46:58Z","fieldsType":"FieldsV1","f [truncated 6033 chars]
	I0916 10:48:11.655388 1401996 round_trippers.go:463] GET https://192.168.49.2:8441/api/v1/namespaces/kube-system/pods/kube-scheduler-functional-919910
	I0916 10:48:11.655416 1401996 round_trippers.go:469] Request Headers:
	I0916 10:48:11.655425 1401996 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:48:11.655430 1401996 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:48:11.657936 1401996 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 10:48:11.657962 1401996 round_trippers.go:577] Response Headers:
	I0916 10:48:11.657971 1401996 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: dde2938c-b5ca-4a57-b9bd-15f56dda7856
	I0916 10:48:11.657975 1401996 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: bc2a80da-1d56-4954-b760-1948662f5b5f
	I0916 10:48:11.657979 1401996 round_trippers.go:580]     Date: Mon, 16 Sep 2024 10:48:11 GMT
	I0916 10:48:11.657982 1401996 round_trippers.go:580]     Audit-Id: d3c4da8c-ad9d-4d03-bd59-3bbb70c528ad
	I0916 10:48:11.657985 1401996 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 10:48:11.657987 1401996 round_trippers.go:580]     Content-Type: application/json
	I0916 10:48:11.658364 1401996 request.go:1351] Response Body: {"kind":"Pod","apiVersion":"v1","metadata":{"name":"kube-scheduler-functional-919910","namespace":"kube-system","uid":"80a1c6e8-dcc4-4602-a66a-658796f6ae58","resourceVersion":"460","creationTimestamp":"2024-09-16T10:47:01Z","labels":{"component":"kube-scheduler","tier":"control-plane"},"annotations":{"kubernetes.io/config.hash":"60f2072c6865fb71ef7928175ceb3dad","kubernetes.io/config.mirror":"60f2072c6865fb71ef7928175ceb3dad","kubernetes.io/config.seen":"2024-09-16T10:47:01.310180468Z","kubernetes.io/config.source":"file"},"ownerReferences":[{"apiVersion":"v1","kind":"Node","name":"functional-919910","uid":"53a8f356-7cc5-491d-804d-fdafec0ed62c","controller":true}],"managedFields":[{"manager":"kubelet","operation":"Update","apiVersion":"v1","time":"2024-09-16T10:47:01Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:annotations":{".":{},"f:kubernetes.io/config.hash":{},"f:kubernetes.io/config.mirror":{},"f:kubernetes.io/config.seen":{
},"f:kubernetes.io/config.source":{}},"f:labels":{".":{},"f:component": [truncated 5421 chars]
	I0916 10:48:11.658832 1401996 round_trippers.go:463] GET https://192.168.49.2:8441/api/v1/nodes/functional-919910
	I0916 10:48:11.658848 1401996 round_trippers.go:469] Request Headers:
	I0916 10:48:11.658857 1401996 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:48:11.658862 1401996 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:48:11.660899 1401996 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 10:48:11.660923 1401996 round_trippers.go:577] Response Headers:
	I0916 10:48:11.660931 1401996 round_trippers.go:580]     Audit-Id: bb877529-f789-4b09-871c-1f09b015efed
	I0916 10:48:11.660935 1401996 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 10:48:11.660939 1401996 round_trippers.go:580]     Content-Type: application/json
	I0916 10:48:11.660985 1401996 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: dde2938c-b5ca-4a57-b9bd-15f56dda7856
	I0916 10:48:11.660994 1401996 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: bc2a80da-1d56-4954-b760-1948662f5b5f
	I0916 10:48:11.660999 1401996 round_trippers.go:580]     Date: Mon, 16 Sep 2024 10:48:11 GMT
	I0916 10:48:11.661117 1401996 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"functional-919910","uid":"53a8f356-7cc5-491d-804d-fdafec0ed62c","resourceVersion":"423","creationTimestamp":"2024-09-16T10:46:58Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"functional-919910","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"functional-919910","minikube.k8s.io/primary":"true","minikube.k8s.io/updated_at":"2024_09_16T10_47_02_0700","minikube.k8s.io/version":"v1.34.0","node-role.kubernetes.io/control-plane":"","node.kubernetes.io/exclude-from-external-load-balancers":""},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///var/run/crio/crio.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubelet","operation":"Update","ap
iVersion":"v1","time":"2024-09-16T10:46:58Z","fieldsType":"FieldsV1","f [truncated 6033 chars]
	I0916 10:48:12.156271 1401996 round_trippers.go:463] GET https://192.168.49.2:8441/api/v1/namespaces/kube-system/pods/kube-scheduler-functional-919910
	I0916 10:48:12.156297 1401996 round_trippers.go:469] Request Headers:
	I0916 10:48:12.156308 1401996 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:48:12.156313 1401996 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:48:12.158870 1401996 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 10:48:12.158894 1401996 round_trippers.go:577] Response Headers:
	I0916 10:48:12.158902 1401996 round_trippers.go:580]     Audit-Id: 0c6eb45b-962b-4873-8662-1f1f93414add
	I0916 10:48:12.158909 1401996 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 10:48:12.158912 1401996 round_trippers.go:580]     Content-Type: application/json
	I0916 10:48:12.158916 1401996 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: dde2938c-b5ca-4a57-b9bd-15f56dda7856
	I0916 10:48:12.158920 1401996 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: bc2a80da-1d56-4954-b760-1948662f5b5f
	I0916 10:48:12.158923 1401996 round_trippers.go:580]     Date: Mon, 16 Sep 2024 10:48:12 GMT
	I0916 10:48:12.159044 1401996 request.go:1351] Response Body: {"kind":"Pod","apiVersion":"v1","metadata":{"name":"kube-scheduler-functional-919910","namespace":"kube-system","uid":"80a1c6e8-dcc4-4602-a66a-658796f6ae58","resourceVersion":"460","creationTimestamp":"2024-09-16T10:47:01Z","labels":{"component":"kube-scheduler","tier":"control-plane"},"annotations":{"kubernetes.io/config.hash":"60f2072c6865fb71ef7928175ceb3dad","kubernetes.io/config.mirror":"60f2072c6865fb71ef7928175ceb3dad","kubernetes.io/config.seen":"2024-09-16T10:47:01.310180468Z","kubernetes.io/config.source":"file"},"ownerReferences":[{"apiVersion":"v1","kind":"Node","name":"functional-919910","uid":"53a8f356-7cc5-491d-804d-fdafec0ed62c","controller":true}],"managedFields":[{"manager":"kubelet","operation":"Update","apiVersion":"v1","time":"2024-09-16T10:47:01Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:annotations":{".":{},"f:kubernetes.io/config.hash":{},"f:kubernetes.io/config.mirror":{},"f:kubernetes.io/config.seen":{
},"f:kubernetes.io/config.source":{}},"f:labels":{".":{},"f:component": [truncated 5421 chars]
	I0916 10:48:12.159576 1401996 round_trippers.go:463] GET https://192.168.49.2:8441/api/v1/nodes/functional-919910
	I0916 10:48:12.159586 1401996 round_trippers.go:469] Request Headers:
	I0916 10:48:12.159594 1401996 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:48:12.159599 1401996 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:48:12.161846 1401996 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 10:48:12.161880 1401996 round_trippers.go:577] Response Headers:
	I0916 10:48:12.161889 1401996 round_trippers.go:580]     Audit-Id: ccde3b03-0793-4985-83c0-12846440be26
	I0916 10:48:12.161894 1401996 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 10:48:12.161898 1401996 round_trippers.go:580]     Content-Type: application/json
	I0916 10:48:12.161901 1401996 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: dde2938c-b5ca-4a57-b9bd-15f56dda7856
	I0916 10:48:12.161904 1401996 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: bc2a80da-1d56-4954-b760-1948662f5b5f
	I0916 10:48:12.161907 1401996 round_trippers.go:580]     Date: Mon, 16 Sep 2024 10:48:12 GMT
	I0916 10:48:12.162157 1401996 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"functional-919910","uid":"53a8f356-7cc5-491d-804d-fdafec0ed62c","resourceVersion":"423","creationTimestamp":"2024-09-16T10:46:58Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"functional-919910","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"functional-919910","minikube.k8s.io/primary":"true","minikube.k8s.io/updated_at":"2024_09_16T10_47_02_0700","minikube.k8s.io/version":"v1.34.0","node-role.kubernetes.io/control-plane":"","node.kubernetes.io/exclude-from-external-load-balancers":""},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///var/run/crio/crio.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubelet","operation":"Update","ap
iVersion":"v1","time":"2024-09-16T10:46:58Z","fieldsType":"FieldsV1","f [truncated 6033 chars]
	I0916 10:48:12.162593 1401996 pod_ready.go:103] pod "kube-scheduler-functional-919910" in "kube-system" namespace has status "Ready":"False"
	I0916 10:48:12.655305 1401996 round_trippers.go:463] GET https://192.168.49.2:8441/api/v1/namespaces/kube-system/pods/kube-scheduler-functional-919910
	I0916 10:48:12.655331 1401996 round_trippers.go:469] Request Headers:
	I0916 10:48:12.655340 1401996 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:48:12.655347 1401996 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:48:12.657713 1401996 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 10:48:12.657740 1401996 round_trippers.go:577] Response Headers:
	I0916 10:48:12.657749 1401996 round_trippers.go:580]     Audit-Id: 07b9928b-c0c0-4875-a188-8b628ddd8e44
	I0916 10:48:12.657754 1401996 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 10:48:12.657758 1401996 round_trippers.go:580]     Content-Type: application/json
	I0916 10:48:12.657761 1401996 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: dde2938c-b5ca-4a57-b9bd-15f56dda7856
	I0916 10:48:12.657764 1401996 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: bc2a80da-1d56-4954-b760-1948662f5b5f
	I0916 10:48:12.657768 1401996 round_trippers.go:580]     Date: Mon, 16 Sep 2024 10:48:12 GMT
	I0916 10:48:12.658062 1401996 request.go:1351] Response Body: {"kind":"Pod","apiVersion":"v1","metadata":{"name":"kube-scheduler-functional-919910","namespace":"kube-system","uid":"80a1c6e8-dcc4-4602-a66a-658796f6ae58","resourceVersion":"460","creationTimestamp":"2024-09-16T10:47:01Z","labels":{"component":"kube-scheduler","tier":"control-plane"},"annotations":{"kubernetes.io/config.hash":"60f2072c6865fb71ef7928175ceb3dad","kubernetes.io/config.mirror":"60f2072c6865fb71ef7928175ceb3dad","kubernetes.io/config.seen":"2024-09-16T10:47:01.310180468Z","kubernetes.io/config.source":"file"},"ownerReferences":[{"apiVersion":"v1","kind":"Node","name":"functional-919910","uid":"53a8f356-7cc5-491d-804d-fdafec0ed62c","controller":true}],"managedFields":[{"manager":"kubelet","operation":"Update","apiVersion":"v1","time":"2024-09-16T10:47:01Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:annotations":{".":{},"f:kubernetes.io/config.hash":{},"f:kubernetes.io/config.mirror":{},"f:kubernetes.io/config.seen":{
},"f:kubernetes.io/config.source":{}},"f:labels":{".":{},"f:component": [truncated 5421 chars]
	I0916 10:48:12.658565 1401996 round_trippers.go:463] GET https://192.168.49.2:8441/api/v1/nodes/functional-919910
	I0916 10:48:12.658583 1401996 round_trippers.go:469] Request Headers:
	I0916 10:48:12.658592 1401996 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:48:12.658597 1401996 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:48:12.660663 1401996 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 10:48:12.660701 1401996 round_trippers.go:577] Response Headers:
	I0916 10:48:12.660709 1401996 round_trippers.go:580]     Audit-Id: c3a14552-eda6-49a2-a911-7d73fe1c4010
	I0916 10:48:12.660713 1401996 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 10:48:12.660717 1401996 round_trippers.go:580]     Content-Type: application/json
	I0916 10:48:12.660721 1401996 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: dde2938c-b5ca-4a57-b9bd-15f56dda7856
	I0916 10:48:12.660726 1401996 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: bc2a80da-1d56-4954-b760-1948662f5b5f
	I0916 10:48:12.660733 1401996 round_trippers.go:580]     Date: Mon, 16 Sep 2024 10:48:12 GMT
	I0916 10:48:12.661029 1401996 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"functional-919910","uid":"53a8f356-7cc5-491d-804d-fdafec0ed62c","resourceVersion":"423","creationTimestamp":"2024-09-16T10:46:58Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"functional-919910","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"functional-919910","minikube.k8s.io/primary":"true","minikube.k8s.io/updated_at":"2024_09_16T10_47_02_0700","minikube.k8s.io/version":"v1.34.0","node-role.kubernetes.io/control-plane":"","node.kubernetes.io/exclude-from-external-load-balancers":""},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///var/run/crio/crio.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubelet","operation":"Update","ap
iVersion":"v1","time":"2024-09-16T10:46:58Z","fieldsType":"FieldsV1","f [truncated 6033 chars]
	I0916 10:48:13.156227 1401996 round_trippers.go:463] GET https://192.168.49.2:8441/api/v1/namespaces/kube-system/pods/kube-scheduler-functional-919910
	I0916 10:48:13.156254 1401996 round_trippers.go:469] Request Headers:
	I0916 10:48:13.156264 1401996 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:48:13.156267 1401996 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:48:13.158747 1401996 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 10:48:13.158778 1401996 round_trippers.go:577] Response Headers:
	I0916 10:48:13.158788 1401996 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 10:48:13.158792 1401996 round_trippers.go:580]     Content-Type: application/json
	I0916 10:48:13.158794 1401996 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: dde2938c-b5ca-4a57-b9bd-15f56dda7856
	I0916 10:48:13.158797 1401996 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: bc2a80da-1d56-4954-b760-1948662f5b5f
	I0916 10:48:13.158801 1401996 round_trippers.go:580]     Date: Mon, 16 Sep 2024 10:48:13 GMT
	I0916 10:48:13.158819 1401996 round_trippers.go:580]     Audit-Id: b4cab94a-002e-4281-b7ab-c555e3ca8d94
	I0916 10:48:13.159195 1401996 request.go:1351] Response Body: {"kind":"Pod","apiVersion":"v1","metadata":{"name":"kube-scheduler-functional-919910","namespace":"kube-system","uid":"80a1c6e8-dcc4-4602-a66a-658796f6ae58","resourceVersion":"460","creationTimestamp":"2024-09-16T10:47:01Z","labels":{"component":"kube-scheduler","tier":"control-plane"},"annotations":{"kubernetes.io/config.hash":"60f2072c6865fb71ef7928175ceb3dad","kubernetes.io/config.mirror":"60f2072c6865fb71ef7928175ceb3dad","kubernetes.io/config.seen":"2024-09-16T10:47:01.310180468Z","kubernetes.io/config.source":"file"},"ownerReferences":[{"apiVersion":"v1","kind":"Node","name":"functional-919910","uid":"53a8f356-7cc5-491d-804d-fdafec0ed62c","controller":true}],"managedFields":[{"manager":"kubelet","operation":"Update","apiVersion":"v1","time":"2024-09-16T10:47:01Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:annotations":{".":{},"f:kubernetes.io/config.hash":{},"f:kubernetes.io/config.mirror":{},"f:kubernetes.io/config.seen":{
},"f:kubernetes.io/config.source":{}},"f:labels":{".":{},"f:component": [truncated 5421 chars]
	I0916 10:48:13.159676 1401996 round_trippers.go:463] GET https://192.168.49.2:8441/api/v1/nodes/functional-919910
	I0916 10:48:13.159694 1401996 round_trippers.go:469] Request Headers:
	I0916 10:48:13.159704 1401996 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:48:13.159710 1401996 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:48:13.162019 1401996 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 10:48:13.162042 1401996 round_trippers.go:577] Response Headers:
	I0916 10:48:13.162050 1401996 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: bc2a80da-1d56-4954-b760-1948662f5b5f
	I0916 10:48:13.162056 1401996 round_trippers.go:580]     Date: Mon, 16 Sep 2024 10:48:13 GMT
	I0916 10:48:13.162059 1401996 round_trippers.go:580]     Audit-Id: f8176ddc-23f5-403b-8117-84fe9f4942e0
	I0916 10:48:13.162062 1401996 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 10:48:13.162066 1401996 round_trippers.go:580]     Content-Type: application/json
	I0916 10:48:13.162069 1401996 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: dde2938c-b5ca-4a57-b9bd-15f56dda7856
	I0916 10:48:13.162500 1401996 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"functional-919910","uid":"53a8f356-7cc5-491d-804d-fdafec0ed62c","resourceVersion":"423","creationTimestamp":"2024-09-16T10:46:58Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"functional-919910","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"functional-919910","minikube.k8s.io/primary":"true","minikube.k8s.io/updated_at":"2024_09_16T10_47_02_0700","minikube.k8s.io/version":"v1.34.0","node-role.kubernetes.io/control-plane":"","node.kubernetes.io/exclude-from-external-load-balancers":""},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///var/run/crio/crio.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubelet","operation":"Update","ap
iVersion":"v1","time":"2024-09-16T10:46:58Z","fieldsType":"FieldsV1","f [truncated 6033 chars]
	I0916 10:48:13.656171 1401996 round_trippers.go:463] GET https://192.168.49.2:8441/api/v1/namespaces/kube-system/pods/kube-scheduler-functional-919910
	I0916 10:48:13.656199 1401996 round_trippers.go:469] Request Headers:
	I0916 10:48:13.656210 1401996 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:48:13.656214 1401996 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:48:13.658620 1401996 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 10:48:13.658646 1401996 round_trippers.go:577] Response Headers:
	I0916 10:48:13.658655 1401996 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 10:48:13.658660 1401996 round_trippers.go:580]     Content-Type: application/json
	I0916 10:48:13.658663 1401996 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: dde2938c-b5ca-4a57-b9bd-15f56dda7856
	I0916 10:48:13.658667 1401996 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: bc2a80da-1d56-4954-b760-1948662f5b5f
	I0916 10:48:13.658671 1401996 round_trippers.go:580]     Date: Mon, 16 Sep 2024 10:48:13 GMT
	I0916 10:48:13.658674 1401996 round_trippers.go:580]     Audit-Id: de20cd4b-0c4a-4fa1-93e9-28c79590dfcd
	I0916 10:48:13.658944 1401996 request.go:1351] Response Body: {"kind":"Pod","apiVersion":"v1","metadata":{"name":"kube-scheduler-functional-919910","namespace":"kube-system","uid":"80a1c6e8-dcc4-4602-a66a-658796f6ae58","resourceVersion":"460","creationTimestamp":"2024-09-16T10:47:01Z","labels":{"component":"kube-scheduler","tier":"control-plane"},"annotations":{"kubernetes.io/config.hash":"60f2072c6865fb71ef7928175ceb3dad","kubernetes.io/config.mirror":"60f2072c6865fb71ef7928175ceb3dad","kubernetes.io/config.seen":"2024-09-16T10:47:01.310180468Z","kubernetes.io/config.source":"file"},"ownerReferences":[{"apiVersion":"v1","kind":"Node","name":"functional-919910","uid":"53a8f356-7cc5-491d-804d-fdafec0ed62c","controller":true}],"managedFields":[{"manager":"kubelet","operation":"Update","apiVersion":"v1","time":"2024-09-16T10:47:01Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:annotations":{".":{},"f:kubernetes.io/config.hash":{},"f:kubernetes.io/config.mirror":{},"f:kubernetes.io/config.seen":{
},"f:kubernetes.io/config.source":{}},"f:labels":{".":{},"f:component": [truncated 5421 chars]
	I0916 10:48:13.659464 1401996 round_trippers.go:463] GET https://192.168.49.2:8441/api/v1/nodes/functional-919910
	I0916 10:48:13.659482 1401996 round_trippers.go:469] Request Headers:
	I0916 10:48:13.659491 1401996 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:48:13.659496 1401996 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:48:13.661610 1401996 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 10:48:13.661629 1401996 round_trippers.go:577] Response Headers:
	I0916 10:48:13.661643 1401996 round_trippers.go:580]     Content-Type: application/json
	I0916 10:48:13.661647 1401996 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: dde2938c-b5ca-4a57-b9bd-15f56dda7856
	I0916 10:48:13.661650 1401996 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: bc2a80da-1d56-4954-b760-1948662f5b5f
	I0916 10:48:13.661653 1401996 round_trippers.go:580]     Date: Mon, 16 Sep 2024 10:48:13 GMT
	I0916 10:48:13.661656 1401996 round_trippers.go:580]     Audit-Id: 4d2de6de-a599-4279-845c-6b9d39f3a34c
	I0916 10:48:13.661658 1401996 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 10:48:13.661879 1401996 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"functional-919910","uid":"53a8f356-7cc5-491d-804d-fdafec0ed62c","resourceVersion":"423","creationTimestamp":"2024-09-16T10:46:58Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"functional-919910","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"functional-919910","minikube.k8s.io/primary":"true","minikube.k8s.io/updated_at":"2024_09_16T10_47_02_0700","minikube.k8s.io/version":"v1.34.0","node-role.kubernetes.io/control-plane":"","node.kubernetes.io/exclude-from-external-load-balancers":""},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///var/run/crio/crio.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubelet","operation":"Update","ap
iVersion":"v1","time":"2024-09-16T10:46:58Z","fieldsType":"FieldsV1","f [truncated 6033 chars]
	I0916 10:48:14.155396 1401996 round_trippers.go:463] GET https://192.168.49.2:8441/api/v1/namespaces/kube-system/pods/kube-scheduler-functional-919910
	I0916 10:48:14.155423 1401996 round_trippers.go:469] Request Headers:
	I0916 10:48:14.155433 1401996 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:48:14.155437 1401996 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:48:14.157856 1401996 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 10:48:14.157884 1401996 round_trippers.go:577] Response Headers:
	I0916 10:48:14.157893 1401996 round_trippers.go:580]     Date: Mon, 16 Sep 2024 10:48:14 GMT
	I0916 10:48:14.157898 1401996 round_trippers.go:580]     Audit-Id: da2be167-9564-4b39-af14-1ec1a09a5827
	I0916 10:48:14.157902 1401996 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 10:48:14.157905 1401996 round_trippers.go:580]     Content-Type: application/json
	I0916 10:48:14.157907 1401996 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: dde2938c-b5ca-4a57-b9bd-15f56dda7856
	I0916 10:48:14.157910 1401996 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: bc2a80da-1d56-4954-b760-1948662f5b5f
	I0916 10:48:14.158046 1401996 request.go:1351] Response Body: {"kind":"Pod","apiVersion":"v1","metadata":{"name":"kube-scheduler-functional-919910","namespace":"kube-system","uid":"80a1c6e8-dcc4-4602-a66a-658796f6ae58","resourceVersion":"460","creationTimestamp":"2024-09-16T10:47:01Z","labels":{"component":"kube-scheduler","tier":"control-plane"},"annotations":{"kubernetes.io/config.hash":"60f2072c6865fb71ef7928175ceb3dad","kubernetes.io/config.mirror":"60f2072c6865fb71ef7928175ceb3dad","kubernetes.io/config.seen":"2024-09-16T10:47:01.310180468Z","kubernetes.io/config.source":"file"},"ownerReferences":[{"apiVersion":"v1","kind":"Node","name":"functional-919910","uid":"53a8f356-7cc5-491d-804d-fdafec0ed62c","controller":true}],"managedFields":[{"manager":"kubelet","operation":"Update","apiVersion":"v1","time":"2024-09-16T10:47:01Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:annotations":{".":{},"f:kubernetes.io/config.hash":{},"f:kubernetes.io/config.mirror":{},"f:kubernetes.io/config.seen":{
},"f:kubernetes.io/config.source":{}},"f:labels":{".":{},"f:component": [truncated 5421 chars]
	I0916 10:48:14.158543 1401996 round_trippers.go:463] GET https://192.168.49.2:8441/api/v1/nodes/functional-919910
	I0916 10:48:14.158559 1401996 round_trippers.go:469] Request Headers:
	I0916 10:48:14.158569 1401996 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:48:14.158573 1401996 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:48:14.160723 1401996 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 10:48:14.160748 1401996 round_trippers.go:577] Response Headers:
	I0916 10:48:14.160757 1401996 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 10:48:14.160760 1401996 round_trippers.go:580]     Content-Type: application/json
	I0916 10:48:14.160763 1401996 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: dde2938c-b5ca-4a57-b9bd-15f56dda7856
	I0916 10:48:14.160767 1401996 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: bc2a80da-1d56-4954-b760-1948662f5b5f
	I0916 10:48:14.160770 1401996 round_trippers.go:580]     Date: Mon, 16 Sep 2024 10:48:14 GMT
	I0916 10:48:14.160773 1401996 round_trippers.go:580]     Audit-Id: 5b1bb651-cb08-4360-bfe0-6f608e2e2e8f
	I0916 10:48:14.160912 1401996 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"functional-919910","uid":"53a8f356-7cc5-491d-804d-fdafec0ed62c","resourceVersion":"423","creationTimestamp":"2024-09-16T10:46:58Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"functional-919910","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"functional-919910","minikube.k8s.io/primary":"true","minikube.k8s.io/updated_at":"2024_09_16T10_47_02_0700","minikube.k8s.io/version":"v1.34.0","node-role.kubernetes.io/control-plane":"","node.kubernetes.io/exclude-from-external-load-balancers":""},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///var/run/crio/crio.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubelet","operation":"Update","ap
iVersion":"v1","time":"2024-09-16T10:46:58Z","fieldsType":"FieldsV1","f [truncated 6033 chars]
	I0916 10:48:14.655393 1401996 round_trippers.go:463] GET https://192.168.49.2:8441/api/v1/namespaces/kube-system/pods/kube-scheduler-functional-919910
	I0916 10:48:14.655431 1401996 round_trippers.go:469] Request Headers:
	I0916 10:48:14.655442 1401996 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:48:14.655446 1401996 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:48:14.657773 1401996 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 10:48:14.657837 1401996 round_trippers.go:577] Response Headers:
	I0916 10:48:14.657853 1401996 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: bc2a80da-1d56-4954-b760-1948662f5b5f
	I0916 10:48:14.657860 1401996 round_trippers.go:580]     Date: Mon, 16 Sep 2024 10:48:14 GMT
	I0916 10:48:14.657865 1401996 round_trippers.go:580]     Audit-Id: c8928e9e-58df-4328-b135-75e55429ffa1
	I0916 10:48:14.657868 1401996 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 10:48:14.657871 1401996 round_trippers.go:580]     Content-Type: application/json
	I0916 10:48:14.657874 1401996 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: dde2938c-b5ca-4a57-b9bd-15f56dda7856
	I0916 10:48:14.658279 1401996 request.go:1351] Response Body: {"kind":"Pod","apiVersion":"v1","metadata":{"name":"kube-scheduler-functional-919910","namespace":"kube-system","uid":"80a1c6e8-dcc4-4602-a66a-658796f6ae58","resourceVersion":"460","creationTimestamp":"2024-09-16T10:47:01Z","labels":{"component":"kube-scheduler","tier":"control-plane"},"annotations":{"kubernetes.io/config.hash":"60f2072c6865fb71ef7928175ceb3dad","kubernetes.io/config.mirror":"60f2072c6865fb71ef7928175ceb3dad","kubernetes.io/config.seen":"2024-09-16T10:47:01.310180468Z","kubernetes.io/config.source":"file"},"ownerReferences":[{"apiVersion":"v1","kind":"Node","name":"functional-919910","uid":"53a8f356-7cc5-491d-804d-fdafec0ed62c","controller":true}],"managedFields":[{"manager":"kubelet","operation":"Update","apiVersion":"v1","time":"2024-09-16T10:47:01Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:annotations":{".":{},"f:kubernetes.io/config.hash":{},"f:kubernetes.io/config.mirror":{},"f:kubernetes.io/config.seen":{
},"f:kubernetes.io/config.source":{}},"f:labels":{".":{},"f:component": [truncated 5421 chars]
	I0916 10:48:14.658859 1401996 round_trippers.go:463] GET https://192.168.49.2:8441/api/v1/nodes/functional-919910
	I0916 10:48:14.658876 1401996 round_trippers.go:469] Request Headers:
	I0916 10:48:14.658885 1401996 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:48:14.658904 1401996 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:48:14.660901 1401996 round_trippers.go:574] Response Status: 200 OK in 1 milliseconds
	I0916 10:48:14.660921 1401996 round_trippers.go:577] Response Headers:
	I0916 10:48:14.660929 1401996 round_trippers.go:580]     Date: Mon, 16 Sep 2024 10:48:14 GMT
	I0916 10:48:14.660933 1401996 round_trippers.go:580]     Audit-Id: a480490d-9daf-4e76-ad83-5c9c102dc605
	I0916 10:48:14.660938 1401996 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 10:48:14.660941 1401996 round_trippers.go:580]     Content-Type: application/json
	I0916 10:48:14.660944 1401996 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: dde2938c-b5ca-4a57-b9bd-15f56dda7856
	I0916 10:48:14.660946 1401996 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: bc2a80da-1d56-4954-b760-1948662f5b5f
	I0916 10:48:14.661467 1401996 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"functional-919910","uid":"53a8f356-7cc5-491d-804d-fdafec0ed62c","resourceVersion":"423","creationTimestamp":"2024-09-16T10:46:58Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"functional-919910","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"functional-919910","minikube.k8s.io/primary":"true","minikube.k8s.io/updated_at":"2024_09_16T10_47_02_0700","minikube.k8s.io/version":"v1.34.0","node-role.kubernetes.io/control-plane":"","node.kubernetes.io/exclude-from-external-load-balancers":""},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///var/run/crio/crio.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubelet","operation":"Update","ap
iVersion":"v1","time":"2024-09-16T10:46:58Z","fieldsType":"FieldsV1","f [truncated 6033 chars]
	I0916 10:48:14.661867 1401996 pod_ready.go:103] pod "kube-scheduler-functional-919910" in "kube-system" namespace has status "Ready":"False"
	I0916 10:48:15.155432 1401996 round_trippers.go:463] GET https://192.168.49.2:8441/api/v1/namespaces/kube-system/pods/kube-scheduler-functional-919910
	I0916 10:48:15.155461 1401996 round_trippers.go:469] Request Headers:
	I0916 10:48:15.155472 1401996 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:48:15.155477 1401996 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:48:15.158073 1401996 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 10:48:15.158152 1401996 round_trippers.go:577] Response Headers:
	I0916 10:48:15.158177 1401996 round_trippers.go:580]     Audit-Id: ac8fbba3-be63-4e5c-9c54-7edad14a5b66
	I0916 10:48:15.158199 1401996 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 10:48:15.158233 1401996 round_trippers.go:580]     Content-Type: application/json
	I0916 10:48:15.158266 1401996 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: dde2938c-b5ca-4a57-b9bd-15f56dda7856
	I0916 10:48:15.158286 1401996 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: bc2a80da-1d56-4954-b760-1948662f5b5f
	I0916 10:48:15.158305 1401996 round_trippers.go:580]     Date: Mon, 16 Sep 2024 10:48:15 GMT
	I0916 10:48:15.158544 1401996 request.go:1351] Response Body: {"kind":"Pod","apiVersion":"v1","metadata":{"name":"kube-scheduler-functional-919910","namespace":"kube-system","uid":"80a1c6e8-dcc4-4602-a66a-658796f6ae58","resourceVersion":"460","creationTimestamp":"2024-09-16T10:47:01Z","labels":{"component":"kube-scheduler","tier":"control-plane"},"annotations":{"kubernetes.io/config.hash":"60f2072c6865fb71ef7928175ceb3dad","kubernetes.io/config.mirror":"60f2072c6865fb71ef7928175ceb3dad","kubernetes.io/config.seen":"2024-09-16T10:47:01.310180468Z","kubernetes.io/config.source":"file"},"ownerReferences":[{"apiVersion":"v1","kind":"Node","name":"functional-919910","uid":"53a8f356-7cc5-491d-804d-fdafec0ed62c","controller":true}],"managedFields":[{"manager":"kubelet","operation":"Update","apiVersion":"v1","time":"2024-09-16T10:47:01Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:annotations":{".":{},"f:kubernetes.io/config.hash":{},"f:kubernetes.io/config.mirror":{},"f:kubernetes.io/config.seen":{
},"f:kubernetes.io/config.source":{}},"f:labels":{".":{},"f:component": [truncated 5421 chars]
	I0916 10:48:15.159054 1401996 round_trippers.go:463] GET https://192.168.49.2:8441/api/v1/nodes/functional-919910
	I0916 10:48:15.159078 1401996 round_trippers.go:469] Request Headers:
	I0916 10:48:15.159087 1401996 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:48:15.159092 1401996 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:48:15.162067 1401996 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 10:48:15.162097 1401996 round_trippers.go:577] Response Headers:
	I0916 10:48:15.162106 1401996 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: bc2a80da-1d56-4954-b760-1948662f5b5f
	I0916 10:48:15.162111 1401996 round_trippers.go:580]     Date: Mon, 16 Sep 2024 10:48:15 GMT
	I0916 10:48:15.162122 1401996 round_trippers.go:580]     Audit-Id: 87aa48de-fda7-4a83-941c-0da0439419b3
	I0916 10:48:15.162125 1401996 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 10:48:15.162129 1401996 round_trippers.go:580]     Content-Type: application/json
	I0916 10:48:15.162131 1401996 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: dde2938c-b5ca-4a57-b9bd-15f56dda7856
	I0916 10:48:15.162359 1401996 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"functional-919910","uid":"53a8f356-7cc5-491d-804d-fdafec0ed62c","resourceVersion":"423","creationTimestamp":"2024-09-16T10:46:58Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"functional-919910","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"functional-919910","minikube.k8s.io/primary":"true","minikube.k8s.io/updated_at":"2024_09_16T10_47_02_0700","minikube.k8s.io/version":"v1.34.0","node-role.kubernetes.io/control-plane":"","node.kubernetes.io/exclude-from-external-load-balancers":""},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///var/run/crio/crio.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubelet","operation":"Update","ap
iVersion":"v1","time":"2024-09-16T10:46:58Z","fieldsType":"FieldsV1","f [truncated 6033 chars]
	I0916 10:48:15.656149 1401996 round_trippers.go:463] GET https://192.168.49.2:8441/api/v1/namespaces/kube-system/pods/kube-scheduler-functional-919910
	I0916 10:48:15.656184 1401996 round_trippers.go:469] Request Headers:
	I0916 10:48:15.656194 1401996 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:48:15.656198 1401996 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:48:15.658899 1401996 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 10:48:15.659009 1401996 round_trippers.go:577] Response Headers:
	I0916 10:48:15.659024 1401996 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: bc2a80da-1d56-4954-b760-1948662f5b5f
	I0916 10:48:15.659029 1401996 round_trippers.go:580]     Date: Mon, 16 Sep 2024 10:48:15 GMT
	I0916 10:48:15.659032 1401996 round_trippers.go:580]     Audit-Id: 5d70e69f-0e76-4702-89cd-db609f376df7
	I0916 10:48:15.659034 1401996 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 10:48:15.659048 1401996 round_trippers.go:580]     Content-Type: application/json
	I0916 10:48:15.659054 1401996 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: dde2938c-b5ca-4a57-b9bd-15f56dda7856
	I0916 10:48:15.659599 1401996 request.go:1351] Response Body: {"kind":"Pod","apiVersion":"v1","metadata":{"name":"kube-scheduler-functional-919910","namespace":"kube-system","uid":"80a1c6e8-dcc4-4602-a66a-658796f6ae58","resourceVersion":"460","creationTimestamp":"2024-09-16T10:47:01Z","labels":{"component":"kube-scheduler","tier":"control-plane"},"annotations":{"kubernetes.io/config.hash":"60f2072c6865fb71ef7928175ceb3dad","kubernetes.io/config.mirror":"60f2072c6865fb71ef7928175ceb3dad","kubernetes.io/config.seen":"2024-09-16T10:47:01.310180468Z","kubernetes.io/config.source":"file"},"ownerReferences":[{"apiVersion":"v1","kind":"Node","name":"functional-919910","uid":"53a8f356-7cc5-491d-804d-fdafec0ed62c","controller":true}],"managedFields":[{"manager":"kubelet","operation":"Update","apiVersion":"v1","time":"2024-09-16T10:47:01Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:annotations":{".":{},"f:kubernetes.io/config.hash":{},"f:kubernetes.io/config.mirror":{},"f:kubernetes.io/config.seen":{
},"f:kubernetes.io/config.source":{}},"f:labels":{".":{},"f:component": [truncated 5421 chars]
	I0916 10:48:15.660068 1401996 round_trippers.go:463] GET https://192.168.49.2:8441/api/v1/nodes/functional-919910
	I0916 10:48:15.660086 1401996 round_trippers.go:469] Request Headers:
	I0916 10:48:15.660094 1401996 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:48:15.660099 1401996 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:48:15.662075 1401996 round_trippers.go:574] Response Status: 200 OK in 1 milliseconds
	I0916 10:48:15.662099 1401996 round_trippers.go:577] Response Headers:
	I0916 10:48:15.662108 1401996 round_trippers.go:580]     Audit-Id: 3d0c68c3-4517-4abd-a8ae-cea0682c0839
	I0916 10:48:15.662112 1401996 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 10:48:15.662115 1401996 round_trippers.go:580]     Content-Type: application/json
	I0916 10:48:15.662117 1401996 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: dde2938c-b5ca-4a57-b9bd-15f56dda7856
	I0916 10:48:15.662120 1401996 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: bc2a80da-1d56-4954-b760-1948662f5b5f
	I0916 10:48:15.662123 1401996 round_trippers.go:580]     Date: Mon, 16 Sep 2024 10:48:15 GMT
	I0916 10:48:15.662312 1401996 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"functional-919910","uid":"53a8f356-7cc5-491d-804d-fdafec0ed62c","resourceVersion":"423","creationTimestamp":"2024-09-16T10:46:58Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"functional-919910","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"functional-919910","minikube.k8s.io/primary":"true","minikube.k8s.io/updated_at":"2024_09_16T10_47_02_0700","minikube.k8s.io/version":"v1.34.0","node-role.kubernetes.io/control-plane":"","node.kubernetes.io/exclude-from-external-load-balancers":""},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///var/run/crio/crio.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubelet","operation":"Update","ap
iVersion":"v1","time":"2024-09-16T10:46:58Z","fieldsType":"FieldsV1","f [truncated 6033 chars]
	I0916 10:48:16.156023 1401996 round_trippers.go:463] GET https://192.168.49.2:8441/api/v1/namespaces/kube-system/pods/kube-scheduler-functional-919910
	I0916 10:48:16.156050 1401996 round_trippers.go:469] Request Headers:
	I0916 10:48:16.156060 1401996 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:48:16.156065 1401996 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:48:16.158425 1401996 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 10:48:16.158450 1401996 round_trippers.go:577] Response Headers:
	I0916 10:48:16.158459 1401996 round_trippers.go:580]     Audit-Id: 0f492724-674f-4757-bc75-460167467303
	I0916 10:48:16.158463 1401996 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 10:48:16.158466 1401996 round_trippers.go:580]     Content-Type: application/json
	I0916 10:48:16.158471 1401996 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: dde2938c-b5ca-4a57-b9bd-15f56dda7856
	I0916 10:48:16.158474 1401996 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: bc2a80da-1d56-4954-b760-1948662f5b5f
	I0916 10:48:16.158477 1401996 round_trippers.go:580]     Date: Mon, 16 Sep 2024 10:48:16 GMT
	I0916 10:48:16.158714 1401996 request.go:1351] Response Body: {"kind":"Pod","apiVersion":"v1","metadata":{"name":"kube-scheduler-functional-919910","namespace":"kube-system","uid":"80a1c6e8-dcc4-4602-a66a-658796f6ae58","resourceVersion":"460","creationTimestamp":"2024-09-16T10:47:01Z","labels":{"component":"kube-scheduler","tier":"control-plane"},"annotations":{"kubernetes.io/config.hash":"60f2072c6865fb71ef7928175ceb3dad","kubernetes.io/config.mirror":"60f2072c6865fb71ef7928175ceb3dad","kubernetes.io/config.seen":"2024-09-16T10:47:01.310180468Z","kubernetes.io/config.source":"file"},"ownerReferences":[{"apiVersion":"v1","kind":"Node","name":"functional-919910","uid":"53a8f356-7cc5-491d-804d-fdafec0ed62c","controller":true}],"managedFields":[{"manager":"kubelet","operation":"Update","apiVersion":"v1","time":"2024-09-16T10:47:01Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:annotations":{".":{},"f:kubernetes.io/config.hash":{},"f:kubernetes.io/config.mirror":{},"f:kubernetes.io/config.seen":{
},"f:kubernetes.io/config.source":{}},"f:labels":{".":{},"f:component": [truncated 5421 chars]
	I0916 10:48:16.159225 1401996 round_trippers.go:463] GET https://192.168.49.2:8441/api/v1/nodes/functional-919910
	I0916 10:48:16.159244 1401996 round_trippers.go:469] Request Headers:
	I0916 10:48:16.159254 1401996 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:48:16.159265 1401996 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:48:16.161350 1401996 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 10:48:16.161368 1401996 round_trippers.go:577] Response Headers:
	I0916 10:48:16.161376 1401996 round_trippers.go:580]     Date: Mon, 16 Sep 2024 10:48:16 GMT
	I0916 10:48:16.161380 1401996 round_trippers.go:580]     Audit-Id: 7ddf8664-34c6-4c2a-846d-5fb6a65279b5
	I0916 10:48:16.161384 1401996 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 10:48:16.161387 1401996 round_trippers.go:580]     Content-Type: application/json
	I0916 10:48:16.161390 1401996 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: dde2938c-b5ca-4a57-b9bd-15f56dda7856
	I0916 10:48:16.161392 1401996 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: bc2a80da-1d56-4954-b760-1948662f5b5f
	I0916 10:48:16.161537 1401996 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"functional-919910","uid":"53a8f356-7cc5-491d-804d-fdafec0ed62c","resourceVersion":"423","creationTimestamp":"2024-09-16T10:46:58Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"functional-919910","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"functional-919910","minikube.k8s.io/primary":"true","minikube.k8s.io/updated_at":"2024_09_16T10_47_02_0700","minikube.k8s.io/version":"v1.34.0","node-role.kubernetes.io/control-plane":"","node.kubernetes.io/exclude-from-external-load-balancers":""},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///var/run/crio/crio.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubelet","operation":"Update","ap
iVersion":"v1","time":"2024-09-16T10:46:58Z","fieldsType":"FieldsV1","f [truncated 6033 chars]
	I0916 10:48:16.656225 1401996 round_trippers.go:463] GET https://192.168.49.2:8441/api/v1/namespaces/kube-system/pods/kube-scheduler-functional-919910
	I0916 10:48:16.656252 1401996 round_trippers.go:469] Request Headers:
	I0916 10:48:16.656262 1401996 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:48:16.656267 1401996 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:48:16.658699 1401996 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 10:48:16.658728 1401996 round_trippers.go:577] Response Headers:
	I0916 10:48:16.658737 1401996 round_trippers.go:580]     Audit-Id: bdd5ae87-e869-44fa-aeeb-fcbe80b5fe07
	I0916 10:48:16.658742 1401996 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 10:48:16.658746 1401996 round_trippers.go:580]     Content-Type: application/json
	I0916 10:48:16.658749 1401996 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: dde2938c-b5ca-4a57-b9bd-15f56dda7856
	I0916 10:48:16.658752 1401996 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: bc2a80da-1d56-4954-b760-1948662f5b5f
	I0916 10:48:16.658756 1401996 round_trippers.go:580]     Date: Mon, 16 Sep 2024 10:48:16 GMT
	I0916 10:48:16.659114 1401996 request.go:1351] Response Body: {"kind":"Pod","apiVersion":"v1","metadata":{"name":"kube-scheduler-functional-919910","namespace":"kube-system","uid":"80a1c6e8-dcc4-4602-a66a-658796f6ae58","resourceVersion":"460","creationTimestamp":"2024-09-16T10:47:01Z","labels":{"component":"kube-scheduler","tier":"control-plane"},"annotations":{"kubernetes.io/config.hash":"60f2072c6865fb71ef7928175ceb3dad","kubernetes.io/config.mirror":"60f2072c6865fb71ef7928175ceb3dad","kubernetes.io/config.seen":"2024-09-16T10:47:01.310180468Z","kubernetes.io/config.source":"file"},"ownerReferences":[{"apiVersion":"v1","kind":"Node","name":"functional-919910","uid":"53a8f356-7cc5-491d-804d-fdafec0ed62c","controller":true}],"managedFields":[{"manager":"kubelet","operation":"Update","apiVersion":"v1","time":"2024-09-16T10:47:01Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:annotations":{".":{},"f:kubernetes.io/config.hash":{},"f:kubernetes.io/config.mirror":{},"f:kubernetes.io/config.seen":{
},"f:kubernetes.io/config.source":{}},"f:labels":{".":{},"f:component": [truncated 5421 chars]
	I0916 10:48:16.659649 1401996 round_trippers.go:463] GET https://192.168.49.2:8441/api/v1/nodes/functional-919910
	I0916 10:48:16.659669 1401996 round_trippers.go:469] Request Headers:
	I0916 10:48:16.659678 1401996 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:48:16.659690 1401996 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:48:16.661913 1401996 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 10:48:16.661940 1401996 round_trippers.go:577] Response Headers:
	I0916 10:48:16.661946 1401996 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: bc2a80da-1d56-4954-b760-1948662f5b5f
	I0916 10:48:16.661952 1401996 round_trippers.go:580]     Date: Mon, 16 Sep 2024 10:48:16 GMT
	I0916 10:48:16.661965 1401996 round_trippers.go:580]     Audit-Id: ecce3ff5-6416-42a6-b0c9-ccd63ec632b6
	I0916 10:48:16.661968 1401996 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 10:48:16.661972 1401996 round_trippers.go:580]     Content-Type: application/json
	I0916 10:48:16.661975 1401996 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: dde2938c-b5ca-4a57-b9bd-15f56dda7856
	I0916 10:48:16.662121 1401996 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"functional-919910","uid":"53a8f356-7cc5-491d-804d-fdafec0ed62c","resourceVersion":"423","creationTimestamp":"2024-09-16T10:46:58Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"functional-919910","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"functional-919910","minikube.k8s.io/primary":"true","minikube.k8s.io/updated_at":"2024_09_16T10_47_02_0700","minikube.k8s.io/version":"v1.34.0","node-role.kubernetes.io/control-plane":"","node.kubernetes.io/exclude-from-external-load-balancers":""},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///var/run/crio/crio.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubelet","operation":"Update","ap
iVersion":"v1","time":"2024-09-16T10:46:58Z","fieldsType":"FieldsV1","f [truncated 6033 chars]
	I0916 10:48:16.662529 1401996 pod_ready.go:103] pod "kube-scheduler-functional-919910" in "kube-system" namespace has status "Ready":"False"
	I0916 10:48:17.155393 1401996 round_trippers.go:463] GET https://192.168.49.2:8441/api/v1/namespaces/kube-system/pods/kube-scheduler-functional-919910
	I0916 10:48:17.155416 1401996 round_trippers.go:469] Request Headers:
	I0916 10:48:17.155426 1401996 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:48:17.155433 1401996 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:48:17.157890 1401996 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 10:48:17.157958 1401996 round_trippers.go:577] Response Headers:
	I0916 10:48:17.157982 1401996 round_trippers.go:580]     Audit-Id: 8e390082-9fa9-482d-8b04-db1bb2e1c058
	I0916 10:48:17.158002 1401996 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 10:48:17.158034 1401996 round_trippers.go:580]     Content-Type: application/json
	I0916 10:48:17.158054 1401996 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: dde2938c-b5ca-4a57-b9bd-15f56dda7856
	I0916 10:48:17.158077 1401996 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: bc2a80da-1d56-4954-b760-1948662f5b5f
	I0916 10:48:17.158080 1401996 round_trippers.go:580]     Date: Mon, 16 Sep 2024 10:48:17 GMT
	I0916 10:48:17.158236 1401996 request.go:1351] Response Body: {"kind":"Pod","apiVersion":"v1","metadata":{"name":"kube-scheduler-functional-919910","namespace":"kube-system","uid":"80a1c6e8-dcc4-4602-a66a-658796f6ae58","resourceVersion":"460","creationTimestamp":"2024-09-16T10:47:01Z","labels":{"component":"kube-scheduler","tier":"control-plane"},"annotations":{"kubernetes.io/config.hash":"60f2072c6865fb71ef7928175ceb3dad","kubernetes.io/config.mirror":"60f2072c6865fb71ef7928175ceb3dad","kubernetes.io/config.seen":"2024-09-16T10:47:01.310180468Z","kubernetes.io/config.source":"file"},"ownerReferences":[{"apiVersion":"v1","kind":"Node","name":"functional-919910","uid":"53a8f356-7cc5-491d-804d-fdafec0ed62c","controller":true}],"managedFields":[{"manager":"kubelet","operation":"Update","apiVersion":"v1","time":"2024-09-16T10:47:01Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:annotations":{".":{},"f:kubernetes.io/config.hash":{},"f:kubernetes.io/config.mirror":{},"f:kubernetes.io/config.seen":{
},"f:kubernetes.io/config.source":{}},"f:labels":{".":{},"f:component": [truncated 5421 chars]
	I0916 10:48:17.158763 1401996 round_trippers.go:463] GET https://192.168.49.2:8441/api/v1/nodes/functional-919910
	I0916 10:48:17.158782 1401996 round_trippers.go:469] Request Headers:
	I0916 10:48:17.158791 1401996 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:48:17.158796 1401996 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:48:17.161280 1401996 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 10:48:17.161308 1401996 round_trippers.go:577] Response Headers:
	I0916 10:48:17.161316 1401996 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: bc2a80da-1d56-4954-b760-1948662f5b5f
	I0916 10:48:17.161322 1401996 round_trippers.go:580]     Date: Mon, 16 Sep 2024 10:48:17 GMT
	I0916 10:48:17.161326 1401996 round_trippers.go:580]     Audit-Id: 1ad80642-0309-4899-a58d-f663ef29271b
	I0916 10:48:17.161343 1401996 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 10:48:17.161347 1401996 round_trippers.go:580]     Content-Type: application/json
	I0916 10:48:17.161351 1401996 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: dde2938c-b5ca-4a57-b9bd-15f56dda7856
	I0916 10:48:17.161629 1401996 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"functional-919910","uid":"53a8f356-7cc5-491d-804d-fdafec0ed62c","resourceVersion":"423","creationTimestamp":"2024-09-16T10:46:58Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"functional-919910","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"functional-919910","minikube.k8s.io/primary":"true","minikube.k8s.io/updated_at":"2024_09_16T10_47_02_0700","minikube.k8s.io/version":"v1.34.0","node-role.kubernetes.io/control-plane":"","node.kubernetes.io/exclude-from-external-load-balancers":""},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///var/run/crio/crio.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubelet","operation":"Update","ap
iVersion":"v1","time":"2024-09-16T10:46:58Z","fieldsType":"FieldsV1","f [truncated 6033 chars]
	I0916 10:48:17.655739 1401996 round_trippers.go:463] GET https://192.168.49.2:8441/api/v1/namespaces/kube-system/pods/kube-scheduler-functional-919910
	I0916 10:48:17.655767 1401996 round_trippers.go:469] Request Headers:
	I0916 10:48:17.655776 1401996 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:48:17.655782 1401996 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:48:17.658224 1401996 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 10:48:17.658251 1401996 round_trippers.go:577] Response Headers:
	I0916 10:48:17.658258 1401996 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: dde2938c-b5ca-4a57-b9bd-15f56dda7856
	I0916 10:48:17.658263 1401996 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: bc2a80da-1d56-4954-b760-1948662f5b5f
	I0916 10:48:17.658267 1401996 round_trippers.go:580]     Date: Mon, 16 Sep 2024 10:48:17 GMT
	I0916 10:48:17.658270 1401996 round_trippers.go:580]     Audit-Id: 1eccb65e-89ac-43ec-ae5b-3075b1605045
	I0916 10:48:17.658273 1401996 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 10:48:17.658275 1401996 round_trippers.go:580]     Content-Type: application/json
	I0916 10:48:17.658511 1401996 request.go:1351] Response Body: {"kind":"Pod","apiVersion":"v1","metadata":{"name":"kube-scheduler-functional-919910","namespace":"kube-system","uid":"80a1c6e8-dcc4-4602-a66a-658796f6ae58","resourceVersion":"460","creationTimestamp":"2024-09-16T10:47:01Z","labels":{"component":"kube-scheduler","tier":"control-plane"},"annotations":{"kubernetes.io/config.hash":"60f2072c6865fb71ef7928175ceb3dad","kubernetes.io/config.mirror":"60f2072c6865fb71ef7928175ceb3dad","kubernetes.io/config.seen":"2024-09-16T10:47:01.310180468Z","kubernetes.io/config.source":"file"},"ownerReferences":[{"apiVersion":"v1","kind":"Node","name":"functional-919910","uid":"53a8f356-7cc5-491d-804d-fdafec0ed62c","controller":true}],"managedFields":[{"manager":"kubelet","operation":"Update","apiVersion":"v1","time":"2024-09-16T10:47:01Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:annotations":{".":{},"f:kubernetes.io/config.hash":{},"f:kubernetes.io/config.mirror":{},"f:kubernetes.io/config.seen":{
},"f:kubernetes.io/config.source":{}},"f:labels":{".":{},"f:component": [truncated 5421 chars]
	I0916 10:48:17.659021 1401996 round_trippers.go:463] GET https://192.168.49.2:8441/api/v1/nodes/functional-919910
	I0916 10:48:17.659040 1401996 round_trippers.go:469] Request Headers:
	I0916 10:48:17.659049 1401996 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:48:17.659056 1401996 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:48:17.661272 1401996 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 10:48:17.661306 1401996 round_trippers.go:577] Response Headers:
	I0916 10:48:17.661315 1401996 round_trippers.go:580]     Audit-Id: 61cb62de-df73-4da2-aa15-e934886c030e
	I0916 10:48:17.661319 1401996 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 10:48:17.661322 1401996 round_trippers.go:580]     Content-Type: application/json
	I0916 10:48:17.661325 1401996 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: dde2938c-b5ca-4a57-b9bd-15f56dda7856
	I0916 10:48:17.661328 1401996 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: bc2a80da-1d56-4954-b760-1948662f5b5f
	I0916 10:48:17.661336 1401996 round_trippers.go:580]     Date: Mon, 16 Sep 2024 10:48:17 GMT
	I0916 10:48:17.661515 1401996 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"functional-919910","uid":"53a8f356-7cc5-491d-804d-fdafec0ed62c","resourceVersion":"423","creationTimestamp":"2024-09-16T10:46:58Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"functional-919910","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"functional-919910","minikube.k8s.io/primary":"true","minikube.k8s.io/updated_at":"2024_09_16T10_47_02_0700","minikube.k8s.io/version":"v1.34.0","node-role.kubernetes.io/control-plane":"","node.kubernetes.io/exclude-from-external-load-balancers":""},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///var/run/crio/crio.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubelet","operation":"Update","ap
iVersion":"v1","time":"2024-09-16T10:46:58Z","fieldsType":"FieldsV1","f [truncated 6033 chars]
	I0916 10:48:18.156182 1401996 round_trippers.go:463] GET https://192.168.49.2:8441/api/v1/namespaces/kube-system/pods/kube-scheduler-functional-919910
	I0916 10:48:18.156208 1401996 round_trippers.go:469] Request Headers:
	I0916 10:48:18.156219 1401996 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:48:18.156223 1401996 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:48:18.158676 1401996 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 10:48:18.158708 1401996 round_trippers.go:577] Response Headers:
	I0916 10:48:18.158724 1401996 round_trippers.go:580]     Audit-Id: 378e2663-2367-4e97-9d34-9dec3d71cad6
	I0916 10:48:18.158730 1401996 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 10:48:18.158733 1401996 round_trippers.go:580]     Content-Type: application/json
	I0916 10:48:18.158736 1401996 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: dde2938c-b5ca-4a57-b9bd-15f56dda7856
	I0916 10:48:18.158740 1401996 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: bc2a80da-1d56-4954-b760-1948662f5b5f
	I0916 10:48:18.158743 1401996 round_trippers.go:580]     Date: Mon, 16 Sep 2024 10:48:18 GMT
	I0916 10:48:18.158866 1401996 request.go:1351] Response Body: {"kind":"Pod","apiVersion":"v1","metadata":{"name":"kube-scheduler-functional-919910","namespace":"kube-system","uid":"80a1c6e8-dcc4-4602-a66a-658796f6ae58","resourceVersion":"460","creationTimestamp":"2024-09-16T10:47:01Z","labels":{"component":"kube-scheduler","tier":"control-plane"},"annotations":{"kubernetes.io/config.hash":"60f2072c6865fb71ef7928175ceb3dad","kubernetes.io/config.mirror":"60f2072c6865fb71ef7928175ceb3dad","kubernetes.io/config.seen":"2024-09-16T10:47:01.310180468Z","kubernetes.io/config.source":"file"},"ownerReferences":[{"apiVersion":"v1","kind":"Node","name":"functional-919910","uid":"53a8f356-7cc5-491d-804d-fdafec0ed62c","controller":true}],"managedFields":[{"manager":"kubelet","operation":"Update","apiVersion":"v1","time":"2024-09-16T10:47:01Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:annotations":{".":{},"f:kubernetes.io/config.hash":{},"f:kubernetes.io/config.mirror":{},"f:kubernetes.io/config.seen":{
},"f:kubernetes.io/config.source":{}},"f:labels":{".":{},"f:component": [truncated 5421 chars]
	I0916 10:48:18.159344 1401996 round_trippers.go:463] GET https://192.168.49.2:8441/api/v1/nodes/functional-919910
	I0916 10:48:18.159361 1401996 round_trippers.go:469] Request Headers:
	I0916 10:48:18.159369 1401996 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:48:18.159374 1401996 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:48:18.161347 1401996 round_trippers.go:574] Response Status: 200 OK in 1 milliseconds
	I0916 10:48:18.161412 1401996 round_trippers.go:577] Response Headers:
	I0916 10:48:18.161436 1401996 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 10:48:18.161454 1401996 round_trippers.go:580]     Content-Type: application/json
	I0916 10:48:18.161488 1401996 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: dde2938c-b5ca-4a57-b9bd-15f56dda7856
	I0916 10:48:18.161493 1401996 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: bc2a80da-1d56-4954-b760-1948662f5b5f
	I0916 10:48:18.161496 1401996 round_trippers.go:580]     Date: Mon, 16 Sep 2024 10:48:18 GMT
	I0916 10:48:18.161499 1401996 round_trippers.go:580]     Audit-Id: bb959ec1-7e1d-44b4-8591-f88b305c6e05
	I0916 10:48:18.161660 1401996 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"functional-919910","uid":"53a8f356-7cc5-491d-804d-fdafec0ed62c","resourceVersion":"423","creationTimestamp":"2024-09-16T10:46:58Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"functional-919910","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"functional-919910","minikube.k8s.io/primary":"true","minikube.k8s.io/updated_at":"2024_09_16T10_47_02_0700","minikube.k8s.io/version":"v1.34.0","node-role.kubernetes.io/control-plane":"","node.kubernetes.io/exclude-from-external-load-balancers":""},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///var/run/crio/crio.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubelet","operation":"Update","ap
iVersion":"v1","time":"2024-09-16T10:46:58Z","fieldsType":"FieldsV1","f [truncated 6033 chars]
	I0916 10:48:18.656149 1401996 round_trippers.go:463] GET https://192.168.49.2:8441/api/v1/namespaces/kube-system/pods/kube-scheduler-functional-919910
	I0916 10:48:18.656175 1401996 round_trippers.go:469] Request Headers:
	I0916 10:48:18.656185 1401996 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:48:18.656191 1401996 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:48:18.658651 1401996 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 10:48:18.658682 1401996 round_trippers.go:577] Response Headers:
	I0916 10:48:18.658696 1401996 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: bc2a80da-1d56-4954-b760-1948662f5b5f
	I0916 10:48:18.658702 1401996 round_trippers.go:580]     Date: Mon, 16 Sep 2024 10:48:18 GMT
	I0916 10:48:18.658706 1401996 round_trippers.go:580]     Audit-Id: 768eca83-3fed-4116-813e-15e61a113bab
	I0916 10:48:18.658710 1401996 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 10:48:18.658714 1401996 round_trippers.go:580]     Content-Type: application/json
	I0916 10:48:18.658718 1401996 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: dde2938c-b5ca-4a57-b9bd-15f56dda7856
	I0916 10:48:18.658859 1401996 request.go:1351] Response Body: {"kind":"Pod","apiVersion":"v1","metadata":{"name":"kube-scheduler-functional-919910","namespace":"kube-system","uid":"80a1c6e8-dcc4-4602-a66a-658796f6ae58","resourceVersion":"460","creationTimestamp":"2024-09-16T10:47:01Z","labels":{"component":"kube-scheduler","tier":"control-plane"},"annotations":{"kubernetes.io/config.hash":"60f2072c6865fb71ef7928175ceb3dad","kubernetes.io/config.mirror":"60f2072c6865fb71ef7928175ceb3dad","kubernetes.io/config.seen":"2024-09-16T10:47:01.310180468Z","kubernetes.io/config.source":"file"},"ownerReferences":[{"apiVersion":"v1","kind":"Node","name":"functional-919910","uid":"53a8f356-7cc5-491d-804d-fdafec0ed62c","controller":true}],"managedFields":[{"manager":"kubelet","operation":"Update","apiVersion":"v1","time":"2024-09-16T10:47:01Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:annotations":{".":{},"f:kubernetes.io/config.hash":{},"f:kubernetes.io/config.mirror":{},"f:kubernetes.io/config.seen":{
},"f:kubernetes.io/config.source":{}},"f:labels":{".":{},"f:component": [truncated 5421 chars]
	I0916 10:48:18.659347 1401996 round_trippers.go:463] GET https://192.168.49.2:8441/api/v1/nodes/functional-919910
	I0916 10:48:18.659365 1401996 round_trippers.go:469] Request Headers:
	I0916 10:48:18.659374 1401996 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:48:18.659378 1401996 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:48:18.661522 1401996 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 10:48:18.661583 1401996 round_trippers.go:577] Response Headers:
	I0916 10:48:18.661606 1401996 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: bc2a80da-1d56-4954-b760-1948662f5b5f
	I0916 10:48:18.661626 1401996 round_trippers.go:580]     Date: Mon, 16 Sep 2024 10:48:18 GMT
	I0916 10:48:18.661659 1401996 round_trippers.go:580]     Audit-Id: d494015f-8294-4098-b4a2-575432838f60
	I0916 10:48:18.661679 1401996 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 10:48:18.661697 1401996 round_trippers.go:580]     Content-Type: application/json
	I0916 10:48:18.661705 1401996 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: dde2938c-b5ca-4a57-b9bd-15f56dda7856
	I0916 10:48:18.661907 1401996 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"functional-919910","uid":"53a8f356-7cc5-491d-804d-fdafec0ed62c","resourceVersion":"423","creationTimestamp":"2024-09-16T10:46:58Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"functional-919910","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"functional-919910","minikube.k8s.io/primary":"true","minikube.k8s.io/updated_at":"2024_09_16T10_47_02_0700","minikube.k8s.io/version":"v1.34.0","node-role.kubernetes.io/control-plane":"","node.kubernetes.io/exclude-from-external-load-balancers":""},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///var/run/crio/crio.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubelet","operation":"Update","ap
iVersion":"v1","time":"2024-09-16T10:46:58Z","fieldsType":"FieldsV1","f [truncated 6033 chars]
	I0916 10:48:19.156120 1401996 round_trippers.go:463] GET https://192.168.49.2:8441/api/v1/namespaces/kube-system/pods/kube-scheduler-functional-919910
	I0916 10:48:19.156144 1401996 round_trippers.go:469] Request Headers:
	I0916 10:48:19.156154 1401996 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:48:19.156158 1401996 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:48:19.158419 1401996 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 10:48:19.158444 1401996 round_trippers.go:577] Response Headers:
	I0916 10:48:19.158452 1401996 round_trippers.go:580]     Audit-Id: 466a2bac-8b73-4458-a018-6e697edd8946
	I0916 10:48:19.158456 1401996 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 10:48:19.158459 1401996 round_trippers.go:580]     Content-Type: application/json
	I0916 10:48:19.158462 1401996 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: dde2938c-b5ca-4a57-b9bd-15f56dda7856
	I0916 10:48:19.158465 1401996 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: bc2a80da-1d56-4954-b760-1948662f5b5f
	I0916 10:48:19.158468 1401996 round_trippers.go:580]     Date: Mon, 16 Sep 2024 10:48:19 GMT
	I0916 10:48:19.158908 1401996 request.go:1351] Response Body: {"kind":"Pod","apiVersion":"v1","metadata":{"name":"kube-scheduler-functional-919910","namespace":"kube-system","uid":"80a1c6e8-dcc4-4602-a66a-658796f6ae58","resourceVersion":"508","creationTimestamp":"2024-09-16T10:47:01Z","labels":{"component":"kube-scheduler","tier":"control-plane"},"annotations":{"kubernetes.io/config.hash":"60f2072c6865fb71ef7928175ceb3dad","kubernetes.io/config.mirror":"60f2072c6865fb71ef7928175ceb3dad","kubernetes.io/config.seen":"2024-09-16T10:47:01.310180468Z","kubernetes.io/config.source":"file"},"ownerReferences":[{"apiVersion":"v1","kind":"Node","name":"functional-919910","uid":"53a8f356-7cc5-491d-804d-fdafec0ed62c","controller":true}],"managedFields":[{"manager":"kubelet","operation":"Update","apiVersion":"v1","time":"2024-09-16T10:47:01Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:annotations":{".":{},"f:kubernetes.io/config.hash":{},"f:kubernetes.io/config.mirror":{},"f:kubernetes.io/config.seen":{
},"f:kubernetes.io/config.source":{}},"f:labels":{".":{},"f:component": [truncated 5177 chars]
	I0916 10:48:19.159391 1401996 round_trippers.go:463] GET https://192.168.49.2:8441/api/v1/nodes/functional-919910
	I0916 10:48:19.159408 1401996 round_trippers.go:469] Request Headers:
	I0916 10:48:19.159416 1401996 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:48:19.159420 1401996 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:48:19.161426 1401996 round_trippers.go:574] Response Status: 200 OK in 1 milliseconds
	I0916 10:48:19.161450 1401996 round_trippers.go:577] Response Headers:
	I0916 10:48:19.161459 1401996 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: bc2a80da-1d56-4954-b760-1948662f5b5f
	I0916 10:48:19.161462 1401996 round_trippers.go:580]     Date: Mon, 16 Sep 2024 10:48:19 GMT
	I0916 10:48:19.161466 1401996 round_trippers.go:580]     Audit-Id: 8ce2e9ae-ed0f-49a1-8a6d-8d61334108e3
	I0916 10:48:19.161474 1401996 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 10:48:19.161481 1401996 round_trippers.go:580]     Content-Type: application/json
	I0916 10:48:19.161485 1401996 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: dde2938c-b5ca-4a57-b9bd-15f56dda7856
	I0916 10:48:19.161628 1401996 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"functional-919910","uid":"53a8f356-7cc5-491d-804d-fdafec0ed62c","resourceVersion":"423","creationTimestamp":"2024-09-16T10:46:58Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"functional-919910","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"functional-919910","minikube.k8s.io/primary":"true","minikube.k8s.io/updated_at":"2024_09_16T10_47_02_0700","minikube.k8s.io/version":"v1.34.0","node-role.kubernetes.io/control-plane":"","node.kubernetes.io/exclude-from-external-load-balancers":""},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///var/run/crio/crio.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubelet","operation":"Update","ap
iVersion":"v1","time":"2024-09-16T10:46:58Z","fieldsType":"FieldsV1","f [truncated 6033 chars]
	I0916 10:48:19.162028 1401996 pod_ready.go:93] pod "kube-scheduler-functional-919910" in "kube-system" namespace has status "Ready":"True"
	I0916 10:48:19.162049 1401996 pod_ready.go:82] duration metric: took 13.506931464s for pod "kube-scheduler-functional-919910" in "kube-system" namespace to be "Ready" ...
	I0916 10:48:19.162062 1401996 pod_ready.go:39] duration metric: took 13.87017796s for extra waiting for all system-critical and pods with labels [k8s-app=kube-dns component=etcd component=kube-apiserver component=kube-controller-manager k8s-app=kube-proxy component=kube-scheduler] to be "Ready" ...
	I0916 10:48:19.162082 1401996 api_server.go:52] waiting for apiserver process to appear ...
	I0916 10:48:19.162155 1401996 ssh_runner.go:195] Run: sudo pgrep -xnf kube-apiserver.*minikube.*
	I0916 10:48:19.171973 1401996 command_runner.go:130] > 2790
	I0916 10:48:19.173159 1401996 api_server.go:72] duration metric: took 19.18578314s to wait for apiserver process to appear ...
	I0916 10:48:19.173209 1401996 api_server.go:88] waiting for apiserver healthz status ...
	I0916 10:48:19.173244 1401996 api_server.go:253] Checking apiserver healthz at https://192.168.49.2:8441/healthz ...
	I0916 10:48:19.180753 1401996 api_server.go:279] https://192.168.49.2:8441/healthz returned 200:
	ok
	I0916 10:48:19.180850 1401996 round_trippers.go:463] GET https://192.168.49.2:8441/version
	I0916 10:48:19.180863 1401996 round_trippers.go:469] Request Headers:
	I0916 10:48:19.180872 1401996 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:48:19.180876 1401996 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:48:19.181761 1401996 round_trippers.go:574] Response Status: 200 OK in 0 milliseconds
	I0916 10:48:19.181779 1401996 round_trippers.go:577] Response Headers:
	I0916 10:48:19.181799 1401996 round_trippers.go:580]     Content-Length: 263
	I0916 10:48:19.181814 1401996 round_trippers.go:580]     Date: Mon, 16 Sep 2024 10:48:19 GMT
	I0916 10:48:19.181823 1401996 round_trippers.go:580]     Audit-Id: 5f8ceb4a-a192-4ab4-8819-0cc719a145a8
	I0916 10:48:19.181826 1401996 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 10:48:19.181829 1401996 round_trippers.go:580]     Content-Type: application/json
	I0916 10:48:19.181832 1401996 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: dde2938c-b5ca-4a57-b9bd-15f56dda7856
	I0916 10:48:19.181835 1401996 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: bc2a80da-1d56-4954-b760-1948662f5b5f
	I0916 10:48:19.181852 1401996 request.go:1351] Response Body: {
	  "major": "1",
	  "minor": "31",
	  "gitVersion": "v1.31.1",
	  "gitCommit": "948afe5ca072329a73c8e79ed5938717a5cb3d21",
	  "gitTreeState": "clean",
	  "buildDate": "2024-09-11T21:22:08Z",
	  "goVersion": "go1.22.6",
	  "compiler": "gc",
	  "platform": "linux/arm64"
	}
	I0916 10:48:19.181976 1401996 api_server.go:141] control plane version: v1.31.1
	I0916 10:48:19.181996 1401996 api_server.go:131] duration metric: took 8.767062ms to wait for apiserver health ...
	I0916 10:48:19.182008 1401996 system_pods.go:43] waiting for kube-system pods to appear ...
	I0916 10:48:19.182072 1401996 round_trippers.go:463] GET https://192.168.49.2:8441/api/v1/namespaces/kube-system/pods
	I0916 10:48:19.182083 1401996 round_trippers.go:469] Request Headers:
	I0916 10:48:19.182090 1401996 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:48:19.182095 1401996 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:48:19.184562 1401996 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 10:48:19.184585 1401996 round_trippers.go:577] Response Headers:
	I0916 10:48:19.184594 1401996 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: bc2a80da-1d56-4954-b760-1948662f5b5f
	I0916 10:48:19.184599 1401996 round_trippers.go:580]     Date: Mon, 16 Sep 2024 10:48:19 GMT
	I0916 10:48:19.184602 1401996 round_trippers.go:580]     Audit-Id: ba5161ea-bc87-4841-a787-f752334f2d4b
	I0916 10:48:19.184605 1401996 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 10:48:19.184608 1401996 round_trippers.go:580]     Content-Type: application/json
	I0916 10:48:19.184610 1401996 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: dde2938c-b5ca-4a57-b9bd-15f56dda7856
	I0916 10:48:19.185526 1401996 request.go:1351] Response Body: {"kind":"PodList","apiVersion":"v1","metadata":{"resourceVersion":"508"},"items":[{"metadata":{"name":"coredns-7c65d6cfc9-qzn8c","generateName":"coredns-7c65d6cfc9-","namespace":"kube-system","uid":"ada36fb7-8486-4afc-9bef-04ab2e65fc7b","resourceVersion":"499","creationTimestamp":"2024-09-16T10:47:06Z","labels":{"k8s-app":"kube-dns","pod-template-hash":"7c65d6cfc9"},"ownerReferences":[{"apiVersion":"apps/v1","kind":"ReplicaSet","name":"coredns-7c65d6cfc9","uid":"d0a0989d-3c55-4e39-bd63-2b9459e552ef","controller":true,"blockOwnerDeletion":true}],"managedFields":[{"manager":"kube-controller-manager","operation":"Update","apiVersion":"v1","time":"2024-09-16T10:47:06Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:generateName":{},"f:labels":{".":{},"f:k8s-app":{},"f:pod-template-hash":{}},"f:ownerReferences":{".":{},"k:{\"uid\":\"d0a0989d-3c55-4e39-bd63-2b9459e552ef\"}":{}}},"f:spec":{"f:affinity":{".":{},"f:podAntiAffinity":{".":{},"f
:preferredDuringSchedulingIgnoredDuringExecution":{}}},"f:containers":{ [truncated 61328 chars]
	I0916 10:48:19.189350 1401996 system_pods.go:59] 8 kube-system pods found
	I0916 10:48:19.189393 1401996 system_pods.go:61] "coredns-7c65d6cfc9-qzn8c" [ada36fb7-8486-4afc-9bef-04ab2e65fc7b] Running
	I0916 10:48:19.189404 1401996 system_pods.go:61] "etcd-functional-919910" [73472289-b523-4c96-8d5d-33ea5c657902] Running / Ready:ContainersNotReady (containers with unready status: [etcd]) / ContainersReady:ContainersNotReady (containers with unready status: [etcd])
	I0916 10:48:19.189409 1401996 system_pods.go:61] "kindnet-nb5xl" [1282e172-7d16-4f24-9f7d-33da705832a9] Running
	I0916 10:48:19.189425 1401996 system_pods.go:61] "kube-apiserver-functional-919910" [82da7bbe-1484-402c-b1a5-7165f1938703] Running
	I0916 10:48:19.189437 1401996 system_pods.go:61] "kube-controller-manager-functional-919910" [483b3e2c-288a-41e1-a29b-33a95b5b536a] Running
	I0916 10:48:19.189442 1401996 system_pods.go:61] "kube-proxy-nvpzv" [2e1bfc3e-dea3-4511-a154-e367e28b0898] Running
	I0916 10:48:19.189446 1401996 system_pods.go:61] "kube-scheduler-functional-919910" [80a1c6e8-dcc4-4602-a66a-658796f6ae58] Running
	I0916 10:48:19.189456 1401996 system_pods.go:61] "storage-provisioner" [2eb6523f-f61a-4c33-8e91-0bbbb874554b] Running
	I0916 10:48:19.189462 1401996 system_pods.go:74] duration metric: took 7.448933ms to wait for pod list to return data ...
	I0916 10:48:19.189477 1401996 default_sa.go:34] waiting for default service account to be created ...
	I0916 10:48:19.189577 1401996 round_trippers.go:463] GET https://192.168.49.2:8441/api/v1/namespaces/default/serviceaccounts
	I0916 10:48:19.189586 1401996 round_trippers.go:469] Request Headers:
	I0916 10:48:19.189594 1401996 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:48:19.189600 1401996 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:48:19.193300 1401996 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:48:19.193324 1401996 round_trippers.go:577] Response Headers:
	I0916 10:48:19.193332 1401996 round_trippers.go:580]     Audit-Id: 9800f864-2310-45e2-b31f-6b3b6d7aa7c3
	I0916 10:48:19.193337 1401996 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 10:48:19.193341 1401996 round_trippers.go:580]     Content-Type: application/json
	I0916 10:48:19.193344 1401996 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: dde2938c-b5ca-4a57-b9bd-15f56dda7856
	I0916 10:48:19.193347 1401996 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: bc2a80da-1d56-4954-b760-1948662f5b5f
	I0916 10:48:19.193349 1401996 round_trippers.go:580]     Content-Length: 261
	I0916 10:48:19.193352 1401996 round_trippers.go:580]     Date: Mon, 16 Sep 2024 10:48:19 GMT
	I0916 10:48:19.193371 1401996 request.go:1351] Response Body: {"kind":"ServiceAccountList","apiVersion":"v1","metadata":{"resourceVersion":"508"},"items":[{"metadata":{"name":"default","namespace":"default","uid":"36ad6c69-803a-49f9-b31a-556a6fc643b7","resourceVersion":"324","creationTimestamp":"2024-09-16T10:47:06Z"}}]}
	I0916 10:48:19.193544 1401996 default_sa.go:45] found service account: "default"
	I0916 10:48:19.193564 1401996 default_sa.go:55] duration metric: took 4.080003ms for default service account to be created ...
	I0916 10:48:19.193573 1401996 system_pods.go:116] waiting for k8s-apps to be running ...
	I0916 10:48:19.193634 1401996 round_trippers.go:463] GET https://192.168.49.2:8441/api/v1/namespaces/kube-system/pods
	I0916 10:48:19.193644 1401996 round_trippers.go:469] Request Headers:
	I0916 10:48:19.193651 1401996 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:48:19.193655 1401996 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:48:19.196161 1401996 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 10:48:19.196184 1401996 round_trippers.go:577] Response Headers:
	I0916 10:48:19.196191 1401996 round_trippers.go:580]     Audit-Id: 8e823e0b-ba03-4d4a-a6eb-7aa20a6ef471
	I0916 10:48:19.196196 1401996 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 10:48:19.196200 1401996 round_trippers.go:580]     Content-Type: application/json
	I0916 10:48:19.196203 1401996 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: dde2938c-b5ca-4a57-b9bd-15f56dda7856
	I0916 10:48:19.196207 1401996 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: bc2a80da-1d56-4954-b760-1948662f5b5f
	I0916 10:48:19.196210 1401996 round_trippers.go:580]     Date: Mon, 16 Sep 2024 10:48:19 GMT
	I0916 10:48:19.196651 1401996 request.go:1351] Response Body: {"kind":"PodList","apiVersion":"v1","metadata":{"resourceVersion":"508"},"items":[{"metadata":{"name":"coredns-7c65d6cfc9-qzn8c","generateName":"coredns-7c65d6cfc9-","namespace":"kube-system","uid":"ada36fb7-8486-4afc-9bef-04ab2e65fc7b","resourceVersion":"499","creationTimestamp":"2024-09-16T10:47:06Z","labels":{"k8s-app":"kube-dns","pod-template-hash":"7c65d6cfc9"},"ownerReferences":[{"apiVersion":"apps/v1","kind":"ReplicaSet","name":"coredns-7c65d6cfc9","uid":"d0a0989d-3c55-4e39-bd63-2b9459e552ef","controller":true,"blockOwnerDeletion":true}],"managedFields":[{"manager":"kube-controller-manager","operation":"Update","apiVersion":"v1","time":"2024-09-16T10:47:06Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:generateName":{},"f:labels":{".":{},"f:k8s-app":{},"f:pod-template-hash":{}},"f:ownerReferences":{".":{},"k:{\"uid\":\"d0a0989d-3c55-4e39-bd63-2b9459e552ef\"}":{}}},"f:spec":{"f:affinity":{".":{},"f:podAntiAffinity":{".":{},"f
:preferredDuringSchedulingIgnoredDuringExecution":{}}},"f:containers":{ [truncated 61328 chars]
	I0916 10:48:19.199402 1401996 system_pods.go:86] 8 kube-system pods found
	I0916 10:48:19.199435 1401996 system_pods.go:89] "coredns-7c65d6cfc9-qzn8c" [ada36fb7-8486-4afc-9bef-04ab2e65fc7b] Running
	I0916 10:48:19.199446 1401996 system_pods.go:89] "etcd-functional-919910" [73472289-b523-4c96-8d5d-33ea5c657902] Running / Ready:ContainersNotReady (containers with unready status: [etcd]) / ContainersReady:ContainersNotReady (containers with unready status: [etcd])
	I0916 10:48:19.199452 1401996 system_pods.go:89] "kindnet-nb5xl" [1282e172-7d16-4f24-9f7d-33da705832a9] Running
	I0916 10:48:19.199457 1401996 system_pods.go:89] "kube-apiserver-functional-919910" [82da7bbe-1484-402c-b1a5-7165f1938703] Running
	I0916 10:48:19.199462 1401996 system_pods.go:89] "kube-controller-manager-functional-919910" [483b3e2c-288a-41e1-a29b-33a95b5b536a] Running
	I0916 10:48:19.199501 1401996 system_pods.go:89] "kube-proxy-nvpzv" [2e1bfc3e-dea3-4511-a154-e367e28b0898] Running
	I0916 10:48:19.199512 1401996 system_pods.go:89] "kube-scheduler-functional-919910" [80a1c6e8-dcc4-4602-a66a-658796f6ae58] Running
	I0916 10:48:19.199517 1401996 system_pods.go:89] "storage-provisioner" [2eb6523f-f61a-4c33-8e91-0bbbb874554b] Running
	I0916 10:48:19.199525 1401996 system_pods.go:126] duration metric: took 5.941781ms to wait for k8s-apps to be running ...
	I0916 10:48:19.199536 1401996 system_svc.go:44] waiting for kubelet service to be running ....
	I0916 10:48:19.199594 1401996 ssh_runner.go:195] Run: sudo systemctl is-active --quiet service kubelet
	I0916 10:48:19.213096 1401996 system_svc.go:56] duration metric: took 13.539501ms WaitForService to wait for kubelet
	I0916 10:48:19.213126 1401996 kubeadm.go:582] duration metric: took 19.225752653s to wait for: map[apiserver:true apps_running:true default_sa:true extra:true kubelet:true node_ready:true system_pods:true]
	I0916 10:48:19.213144 1401996 node_conditions.go:102] verifying NodePressure condition ...
	I0916 10:48:19.213231 1401996 round_trippers.go:463] GET https://192.168.49.2:8441/api/v1/nodes
	I0916 10:48:19.213249 1401996 round_trippers.go:469] Request Headers:
	I0916 10:48:19.213258 1401996 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:48:19.213262 1401996 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:48:19.216028 1401996 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 10:48:19.216055 1401996 round_trippers.go:577] Response Headers:
	I0916 10:48:19.216063 1401996 round_trippers.go:580]     Content-Type: application/json
	I0916 10:48:19.216069 1401996 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: dde2938c-b5ca-4a57-b9bd-15f56dda7856
	I0916 10:48:19.216072 1401996 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: bc2a80da-1d56-4954-b760-1948662f5b5f
	I0916 10:48:19.216078 1401996 round_trippers.go:580]     Date: Mon, 16 Sep 2024 10:48:19 GMT
	I0916 10:48:19.216080 1401996 round_trippers.go:580]     Audit-Id: 61db72f3-2af5-41ed-9166-d37dd31f349b
	I0916 10:48:19.216083 1401996 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 10:48:19.216221 1401996 request.go:1351] Response Body: {"kind":"NodeList","apiVersion":"v1","metadata":{"resourceVersion":"508"},"items":[{"metadata":{"name":"functional-919910","uid":"53a8f356-7cc5-491d-804d-fdafec0ed62c","resourceVersion":"423","creationTimestamp":"2024-09-16T10:46:58Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"functional-919910","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"functional-919910","minikube.k8s.io/primary":"true","minikube.k8s.io/updated_at":"2024_09_16T10_47_02_0700","minikube.k8s.io/version":"v1.34.0","node-role.kubernetes.io/control-plane":"","node.kubernetes.io/exclude-from-external-load-balancers":""},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///var/run/crio/crio.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFie
lds":[{"manager":"kubelet","operation":"Update","apiVersion":"v1","time [truncated 6086 chars]
	I0916 10:48:19.216742 1401996 node_conditions.go:122] node storage ephemeral capacity is 203034800Ki
	I0916 10:48:19.216776 1401996 node_conditions.go:123] node cpu capacity is 2
	I0916 10:48:19.216789 1401996 node_conditions.go:105] duration metric: took 3.638676ms to run NodePressure ...
	I0916 10:48:19.216804 1401996 start.go:241] waiting for startup goroutines ...
	I0916 10:48:19.216814 1401996 start.go:246] waiting for cluster config update ...
	I0916 10:48:19.216825 1401996 start.go:255] writing updated cluster config ...
	I0916 10:48:19.217169 1401996 ssh_runner.go:195] Run: rm -f paused
	I0916 10:48:19.225843 1401996 out.go:177] * Done! kubectl is now configured to use "functional-919910" cluster and "default" namespace by default
	E0916 10:48:19.228445 1401996 start.go:291] kubectl info: exec: fork/exec /usr/local/bin/kubectl: exec format error
	
	
	==> CRI-O <==
	Sep 16 10:48:00 functional-919910 crio[2450]: time="2024-09-16 10:48:00.965477243Z" level=info msg="Started container" PID=2806 containerID=8f5620673b4ff5c0c99db71dd02fc2ce9baec6c9b22460cbdf86d411abc6a715 description=kube-system/kube-controller-manager-functional-919910/kube-controller-manager id=cbbd8d70-0ef6-40ee-b91a-d54d8cb4b085 name=/runtime.v1.RuntimeService/StartContainer sandboxID=0ffab32638624e8f0235604afb94e9e67c3d4e06616208483a5debcc914e3cae
	Sep 16 10:48:00 functional-919910 crio[2450]: time="2024-09-16 10:48:00.967789570Z" level=info msg="Started container" PID=2790 containerID=84ca31fb2ed034d56721c7ab90b9c5e414e315335f55f7d30435fc91501dad28 description=kube-system/kube-apiserver-functional-919910/kube-apiserver id=9dee0322-7a01-42bd-a3f5-c48572b8a62c name=/runtime.v1.RuntimeService/StartContainer sandboxID=8fd62fbc34bf1ffb9092b83c48e89b00e7cdd219dbb5b91410c53ba0718a28f1
	Sep 16 10:48:00 functional-919910 crio[2450]: time="2024-09-16 10:48:00.967919766Z" level=info msg="Started container" PID=2872 containerID=e8aeda4b55bc63f93934a2cc0bed0950a05df3db193d9ed2e77a2dc96b78ec18 description=kube-system/kindnet-nb5xl/kindnet-cni id=30da5d1a-8366-4392-8ba6-af4109e8c65e name=/runtime.v1.RuntimeService/StartContainer sandboxID=306886331d6eea412e2593dd8cefd104ae0353cb2453c12f41db88e1881fec0f
	Sep 16 10:48:00 functional-919910 crio[2450]: time="2024-09-16 10:48:00.969673714Z" level=info msg="Started container" PID=2845 containerID=2089d6c47dd6764fb74a622eaf36e8dda3344083a925f73a4dfcf0ebb952dbf7 description=kube-system/coredns-7c65d6cfc9-qzn8c/coredns id=7f4d1327-f970-4e92-bbc9-1bce58c06e61 name=/runtime.v1.RuntimeService/StartContainer sandboxID=4bae1031966b207c601881c3be1d2b66aa5218cb02e6eb2af68deea5be18503b
	Sep 16 10:48:01 functional-919910 crio[2450]: time="2024-09-16 10:48:01.157628516Z" level=info msg="Created container 68f543d941434df90f12c922b0b45dcb557a7b8316bd36d083123f6f29e0f3d7: kube-system/kube-proxy-nvpzv/kube-proxy" id=134ba4ed-d336-400c-be83-73f40fc44f23 name=/runtime.v1.RuntimeService/CreateContainer
	Sep 16 10:48:01 functional-919910 crio[2450]: time="2024-09-16 10:48:01.158332925Z" level=info msg="Starting container: 68f543d941434df90f12c922b0b45dcb557a7b8316bd36d083123f6f29e0f3d7" id=b2a96f8b-787c-4a6e-abb3-407e88bf0a34 name=/runtime.v1.RuntimeService/StartContainer
	Sep 16 10:48:01 functional-919910 crio[2450]: time="2024-09-16 10:48:01.176145203Z" level=info msg="Created container 67f50b0e25dae16dbad275ffac3a734fe571c8f8cb91d485eaac44783eb641be: kube-system/storage-provisioner/storage-provisioner" id=a8e0d59f-1513-42fa-b3e0-e75ed88b690c name=/runtime.v1.RuntimeService/CreateContainer
	Sep 16 10:48:01 functional-919910 crio[2450]: time="2024-09-16 10:48:01.176991335Z" level=info msg="Starting container: 67f50b0e25dae16dbad275ffac3a734fe571c8f8cb91d485eaac44783eb641be" id=64aa8b98-953a-42de-a047-e95022a4fd7d name=/runtime.v1.RuntimeService/StartContainer
	Sep 16 10:48:01 functional-919910 crio[2450]: time="2024-09-16 10:48:01.192325137Z" level=info msg="Started container" PID=2885 containerID=67f50b0e25dae16dbad275ffac3a734fe571c8f8cb91d485eaac44783eb641be description=kube-system/storage-provisioner/storage-provisioner id=64aa8b98-953a-42de-a047-e95022a4fd7d name=/runtime.v1.RuntimeService/StartContainer sandboxID=e27809ba106031f0a2ea1939eccfaa14ca2ade78903409cc767b25e9de7c812a
	Sep 16 10:48:01 functional-919910 crio[2450]: time="2024-09-16 10:48:01.197212586Z" level=info msg="Started container" PID=2841 containerID=68f543d941434df90f12c922b0b45dcb557a7b8316bd36d083123f6f29e0f3d7 description=kube-system/kube-proxy-nvpzv/kube-proxy id=b2a96f8b-787c-4a6e-abb3-407e88bf0a34 name=/runtime.v1.RuntimeService/StartContainer sandboxID=46672cf6a1a3cfbb490f865d512383492c0c4c4061599f90461031829a93bd49
	Sep 16 10:48:11 functional-919910 crio[2450]: time="2024-09-16 10:48:11.487147142Z" level=info msg="CNI monitoring event \"/etc/cni/net.d/10-kindnet.conflist.temp\": CREATE"
	Sep 16 10:48:11 functional-919910 crio[2450]: time="2024-09-16 10:48:11.491166117Z" level=info msg="Found CNI network kindnet (type=ptp) at /etc/cni/net.d/10-kindnet.conflist"
	Sep 16 10:48:11 functional-919910 crio[2450]: time="2024-09-16 10:48:11.491204975Z" level=info msg="Updated default CNI network name to kindnet"
	Sep 16 10:48:11 functional-919910 crio[2450]: time="2024-09-16 10:48:11.491221582Z" level=info msg="CNI monitoring event \"/etc/cni/net.d/10-kindnet.conflist.temp\": WRITE"
	Sep 16 10:48:11 functional-919910 crio[2450]: time="2024-09-16 10:48:11.494833329Z" level=info msg="Found CNI network kindnet (type=ptp) at /etc/cni/net.d/10-kindnet.conflist"
	Sep 16 10:48:11 functional-919910 crio[2450]: time="2024-09-16 10:48:11.494874403Z" level=info msg="Updated default CNI network name to kindnet"
	Sep 16 10:48:11 functional-919910 crio[2450]: time="2024-09-16 10:48:11.494889976Z" level=info msg="CNI monitoring event \"/etc/cni/net.d/10-kindnet.conflist.temp\": WRITE"
	Sep 16 10:48:11 functional-919910 crio[2450]: time="2024-09-16 10:48:11.497990869Z" level=info msg="Found CNI network kindnet (type=ptp) at /etc/cni/net.d/10-kindnet.conflist"
	Sep 16 10:48:11 functional-919910 crio[2450]: time="2024-09-16 10:48:11.498028357Z" level=info msg="Updated default CNI network name to kindnet"
	Sep 16 10:48:11 functional-919910 crio[2450]: time="2024-09-16 10:48:11.498045604Z" level=info msg="CNI monitoring event \"/etc/cni/net.d/10-kindnet.conflist.temp\": RENAME"
	Sep 16 10:48:11 functional-919910 crio[2450]: time="2024-09-16 10:48:11.501096176Z" level=info msg="Found CNI network kindnet (type=ptp) at /etc/cni/net.d/10-kindnet.conflist"
	Sep 16 10:48:11 functional-919910 crio[2450]: time="2024-09-16 10:48:11.501132680Z" level=info msg="Updated default CNI network name to kindnet"
	Sep 16 10:48:11 functional-919910 crio[2450]: time="2024-09-16 10:48:11.501148638Z" level=info msg="CNI monitoring event \"/etc/cni/net.d/10-kindnet.conflist\": CREATE"
	Sep 16 10:48:11 functional-919910 crio[2450]: time="2024-09-16 10:48:11.504048544Z" level=info msg="Found CNI network kindnet (type=ptp) at /etc/cni/net.d/10-kindnet.conflist"
	Sep 16 10:48:11 functional-919910 crio[2450]: time="2024-09-16 10:48:11.504087410Z" level=info msg="Updated default CNI network name to kindnet"
	
	
	==> container status <==
	CONTAINER           IMAGE                                                              CREATED              STATE               NAME                      ATTEMPT             POD ID              POD
	67f50b0e25dae       ba04bb24b95753201135cbc420b233c1b0b9fa2e1fd21d28319c348c33fbcde6   19 seconds ago       Running             storage-provisioner       1                   e27809ba10603       storage-provisioner
	e8aeda4b55bc6       6a23fa8fd2b78ab58e42ba273808edc936a9c53d8ac4a919f6337be094843a51   19 seconds ago       Running             kindnet-cni               1                   306886331d6ee       kindnet-nb5xl
	68f543d941434       24a140c548c075e487e45d0ee73b1aa89f8bfb40c08a57e05975559728822b1d   19 seconds ago       Running             kube-proxy                1                   46672cf6a1a3c       kube-proxy-nvpzv
	2089d6c47dd67       2f6c962e7b8311337352d9fdea917da2184d9919f4da7695bc2a6517cf392fe4   19 seconds ago       Running             coredns                   1                   4bae1031966b2       coredns-7c65d6cfc9-qzn8c
	84ca31fb2ed03       d3f53a98c0a9d9163c4848bcf34b2d2f5e1e3691b79f3d1dd6d0206809e02853   19 seconds ago       Running             kube-apiserver            1                   8fd62fbc34bf1       kube-apiserver-functional-919910
	8f5620673b4ff       279f381cb37365bbbcd133c9531fba9c2beb0f38dbbe6ddfcd0b1b1643d3450e   19 seconds ago       Running             kube-controller-manager   1                   0ffab32638624       kube-controller-manager-functional-919910
	5bcfe047e4005       27e3830e1402783674d8b594038967deea9d51f0d91b34c93c8f39d2f68af7da   19 seconds ago       Running             etcd                      1                   46079181d2925       etcd-functional-919910
	9a35fb982442f       7f8aa378bb47dffcf430f3a601abe39137e88aee0238e23ed8530fdd18dab82d   19 seconds ago       Running             kube-scheduler            1                   00455a328acb5       kube-scheduler-functional-919910
	89084e33c979a       2f6c962e7b8311337352d9fdea917da2184d9919f4da7695bc2a6517cf392fe4   32 seconds ago       Exited              coredns                   0                   4bae1031966b2       coredns-7c65d6cfc9-qzn8c
	584cffa44f327       ba04bb24b95753201135cbc420b233c1b0b9fa2e1fd21d28319c348c33fbcde6   32 seconds ago       Exited              storage-provisioner       0                   e27809ba10603       storage-provisioner
	9fdab793eb970       24a140c548c075e487e45d0ee73b1aa89f8bfb40c08a57e05975559728822b1d   About a minute ago   Exited              kube-proxy                0                   46672cf6a1a3c       kube-proxy-nvpzv
	3e31d247381fd       6a23fa8fd2b78ab58e42ba273808edc936a9c53d8ac4a919f6337be094843a51   About a minute ago   Exited              kindnet-cni               0                   306886331d6ee       kindnet-nb5xl
	6d211253a1170       7f8aa378bb47dffcf430f3a601abe39137e88aee0238e23ed8530fdd18dab82d   About a minute ago   Exited              kube-scheduler            0                   00455a328acb5       kube-scheduler-functional-919910
	19cb8b26283b5       279f381cb37365bbbcd133c9531fba9c2beb0f38dbbe6ddfcd0b1b1643d3450e   About a minute ago   Exited              kube-controller-manager   0                   0ffab32638624       kube-controller-manager-functional-919910
	b88a79882d73e       27e3830e1402783674d8b594038967deea9d51f0d91b34c93c8f39d2f68af7da   About a minute ago   Exited              etcd                      0                   46079181d2925       etcd-functional-919910
	790d8c6b7f5cf       d3f53a98c0a9d9163c4848bcf34b2d2f5e1e3691b79f3d1dd6d0206809e02853   About a minute ago   Exited              kube-apiserver            0                   8fd62fbc34bf1       kube-apiserver-functional-919910
	
	
	==> coredns [2089d6c47dd6764fb74a622eaf36e8dda3344083a925f73a4dfcf0ebb952dbf7] <==
	[INFO] plugin/kubernetes: pkg/mod/k8s.io/client-go@v0.29.3/tools/cache/reflector.go:229: failed to list *v1.EndpointSlice: Get "https://10.96.0.1:443/apis/discovery.k8s.io/v1/endpointslices?limit=500&resourceVersion=0": dial tcp 10.96.0.1:443: connect: connection refused
	[ERROR] plugin/kubernetes: pkg/mod/k8s.io/client-go@v0.29.3/tools/cache/reflector.go:229: Failed to watch *v1.EndpointSlice: failed to list *v1.EndpointSlice: Get "https://10.96.0.1:443/apis/discovery.k8s.io/v1/endpointslices?limit=500&resourceVersion=0": dial tcp 10.96.0.1:443: connect: connection refused
	[INFO] plugin/kubernetes: pkg/mod/k8s.io/client-go@v0.29.3/tools/cache/reflector.go:229: failed to list *v1.Service: Get "https://10.96.0.1:443/api/v1/services?limit=500&resourceVersion=0": dial tcp 10.96.0.1:443: connect: connection refused
	[ERROR] plugin/kubernetes: pkg/mod/k8s.io/client-go@v0.29.3/tools/cache/reflector.go:229: Failed to watch *v1.Service: failed to list *v1.Service: Get "https://10.96.0.1:443/api/v1/services?limit=500&resourceVersion=0": dial tcp 10.96.0.1:443: connect: connection refused
	[INFO] plugin/kubernetes: pkg/mod/k8s.io/client-go@v0.29.3/tools/cache/reflector.go:229: failed to list *v1.Namespace: Get "https://10.96.0.1:443/api/v1/namespaces?limit=500&resourceVersion=0": dial tcp 10.96.0.1:443: connect: connection refused
	[ERROR] plugin/kubernetes: pkg/mod/k8s.io/client-go@v0.29.3/tools/cache/reflector.go:229: Failed to watch *v1.Namespace: failed to list *v1.Namespace: Get "https://10.96.0.1:443/api/v1/namespaces?limit=500&resourceVersion=0": dial tcp 10.96.0.1:443: connect: connection refused
	[INFO] plugin/ready: Still waiting on: "kubernetes"
	[INFO] plugin/kubernetes: waiting for Kubernetes API before starting server
	[INFO] plugin/kubernetes: waiting for Kubernetes API before starting server
	[INFO] plugin/kubernetes: waiting for Kubernetes API before starting server
	[INFO] plugin/ready: Still waiting on: "kubernetes"
	[INFO] plugin/kubernetes: waiting for Kubernetes API before starting server
	[INFO] plugin/kubernetes: waiting for Kubernetes API before starting server
	[INFO] plugin/kubernetes: waiting for Kubernetes API before starting server
	[INFO] plugin/kubernetes: waiting for Kubernetes API before starting server
	[INFO] plugin/kubernetes: waiting for Kubernetes API before starting server
	.:53
	[INFO] plugin/reload: Running configuration SHA512 = 05e3eaddc414b2d71a69b2e2bc6f2681fc1f4d04bcdd3acc1a41457bb7db518208b95ddfc4c9fffedc59c25a8faf458be1af4915a4a3c0d6777cb7a346bc5d86
	CoreDNS-1.11.3
	linux/arm64, go1.21.11, a6338e9
	[INFO] 127.0.0.1:39206 - 14119 "HINFO IN 5939583222120401635.3946217130147098167. udp 57 false 512" NXDOMAIN qr,rd,ra 57 0.038029402s
	
	
	==> coredns [89084e33c979a76a3a4bbd24eab8c848deb25d8bd474bad381f47a24e0373c2e] <==
	.:53
	[INFO] plugin/reload: Running configuration SHA512 = 05e3eaddc414b2d71a69b2e2bc6f2681fc1f4d04bcdd3acc1a41457bb7db518208b95ddfc4c9fffedc59c25a8faf458be1af4915a4a3c0d6777cb7a346bc5d86
	CoreDNS-1.11.3
	linux/arm64, go1.21.11, a6338e9
	[INFO] 127.0.0.1:48552 - 42859 "HINFO IN 442387380457581256.8648752210731241523. udp 56 false 512" NXDOMAIN qr,rd,ra 56 0.057702013s
	[INFO] SIGTERM: Shutting down servers then terminating
	[INFO] plugin/health: Going into lameduck mode for 5s
	
	
	==> describe nodes <==
	Name:               functional-919910
	Roles:              control-plane
	Labels:             beta.kubernetes.io/arch=arm64
	                    beta.kubernetes.io/os=linux
	                    kubernetes.io/arch=arm64
	                    kubernetes.io/hostname=functional-919910
	                    kubernetes.io/os=linux
	                    minikube.k8s.io/commit=90d544f06ea0f69499271b003be64a9a224d57ed
	                    minikube.k8s.io/name=functional-919910
	                    minikube.k8s.io/primary=true
	                    minikube.k8s.io/updated_at=2024_09_16T10_47_02_0700
	                    minikube.k8s.io/version=v1.34.0
	                    node-role.kubernetes.io/control-plane=
	                    node.kubernetes.io/exclude-from-external-load-balancers=
	Annotations:        kubeadm.alpha.kubernetes.io/cri-socket: unix:///var/run/crio/crio.sock
	                    node.alpha.kubernetes.io/ttl: 0
	                    volumes.kubernetes.io/controller-managed-attach-detach: true
	CreationTimestamp:  Mon, 16 Sep 2024 10:46:58 +0000
	Taints:             <none>
	Unschedulable:      false
	Lease:
	  HolderIdentity:  functional-919910
	  AcquireTime:     <unset>
	  RenewTime:       Mon, 16 Sep 2024 10:48:15 +0000
	Conditions:
	  Type             Status  LastHeartbeatTime                 LastTransitionTime                Reason                       Message
	  ----             ------  -----------------                 ------------------                ------                       -------
	  MemoryPressure   False   Mon, 16 Sep 2024 10:47:47 +0000   Mon, 16 Sep 2024 10:46:55 +0000   KubeletHasSufficientMemory   kubelet has sufficient memory available
	  DiskPressure     False   Mon, 16 Sep 2024 10:47:47 +0000   Mon, 16 Sep 2024 10:46:55 +0000   KubeletHasNoDiskPressure     kubelet has no disk pressure
	  PIDPressure      False   Mon, 16 Sep 2024 10:47:47 +0000   Mon, 16 Sep 2024 10:46:55 +0000   KubeletHasSufficientPID      kubelet has sufficient PID available
	  Ready            True    Mon, 16 Sep 2024 10:47:47 +0000   Mon, 16 Sep 2024 10:47:47 +0000   KubeletReady                 kubelet is posting ready status
	Addresses:
	  InternalIP:  192.168.49.2
	  Hostname:    functional-919910
	Capacity:
	  cpu:                2
	  ephemeral-storage:  203034800Ki
	  hugepages-1Gi:      0
	  hugepages-2Mi:      0
	  hugepages-32Mi:     0
	  hugepages-64Ki:     0
	  memory:             8022304Ki
	  pods:               110
	Allocatable:
	  cpu:                2
	  ephemeral-storage:  203034800Ki
	  hugepages-1Gi:      0
	  hugepages-2Mi:      0
	  hugepages-32Mi:     0
	  hugepages-64Ki:     0
	  memory:             8022304Ki
	  pods:               110
	System Info:
	  Machine ID:                 f14572b8323a44cca0faa88c76f2d4a6
	  System UUID:                d25b0873-ca83-44d4-9ed0-22dc44c6a8ae
	  Boot ID:                    34b2555f-ef29-4c31-9b47-b3b930bd3b4b
	  Kernel Version:             5.15.0-1069-aws
	  OS Image:                   Ubuntu 22.04.4 LTS
	  Operating System:           linux
	  Architecture:               arm64
	  Container Runtime Version:  cri-o://1.24.6
	  Kubelet Version:            v1.31.1
	  Kube-Proxy Version:         v1.31.1
	PodCIDR:                      10.244.0.0/24
	PodCIDRs:                     10.244.0.0/24
	Non-terminated Pods:          (8 in total)
	  Namespace                   Name                                         CPU Requests  CPU Limits  Memory Requests  Memory Limits  Age
	  ---------                   ----                                         ------------  ----------  ---------------  -------------  ---
	  kube-system                 coredns-7c65d6cfc9-qzn8c                     100m (5%)     0 (0%)      70Mi (0%)        170Mi (2%)     74s
	  kube-system                 etcd-functional-919910                       100m (5%)     0 (0%)      100Mi (1%)       0 (0%)         80s
	  kube-system                 kindnet-nb5xl                                100m (5%)     100m (5%)   50Mi (0%)        50Mi (0%)      74s
	  kube-system                 kube-apiserver-functional-919910             250m (12%)    0 (0%)      0 (0%)           0 (0%)         79s
	  kube-system                 kube-controller-manager-functional-919910    200m (10%)    0 (0%)      0 (0%)           0 (0%)         79s
	  kube-system                 kube-proxy-nvpzv                             0 (0%)        0 (0%)      0 (0%)           0 (0%)         74s
	  kube-system                 kube-scheduler-functional-919910             100m (5%)     0 (0%)      0 (0%)           0 (0%)         79s
	  kube-system                 storage-provisioner                          0 (0%)        0 (0%)      0 (0%)           0 (0%)         73s
	Allocated resources:
	  (Total limits may be over 100 percent, i.e., overcommitted.)
	  Resource           Requests    Limits
	  --------           --------    ------
	  cpu                850m (42%)  100m (5%)
	  memory             220Mi (2%)  220Mi (2%)
	  ephemeral-storage  0 (0%)      0 (0%)
	  hugepages-1Gi      0 (0%)      0 (0%)
	  hugepages-2Mi      0 (0%)      0 (0%)
	  hugepages-32Mi     0 (0%)      0 (0%)
	  hugepages-64Ki     0 (0%)      0 (0%)
	Events:
	  Type     Reason                   Age   From             Message
	  ----     ------                   ----  ----             -------
	  Normal   Starting                 72s   kube-proxy       
	  Normal   Starting                 14s   kube-proxy       
	  Normal   Starting                 79s   kubelet          Starting kubelet.
	  Warning  CgroupV1                 79s   kubelet          Cgroup v1 support is in maintenance mode, please migrate to Cgroup v2.
	  Normal   NodeHasSufficientMemory  79s   kubelet          Node functional-919910 status is now: NodeHasSufficientMemory
	  Normal   NodeHasNoDiskPressure    79s   kubelet          Node functional-919910 status is now: NodeHasNoDiskPressure
	  Normal   NodeHasSufficientPID     79s   kubelet          Node functional-919910 status is now: NodeHasSufficientPID
	  Normal   RegisteredNode           75s   node-controller  Node functional-919910 event: Registered Node functional-919910 in Controller
	  Normal   NodeReady                33s   kubelet          Node functional-919910 status is now: NodeReady
	  Normal   RegisteredNode           12s   node-controller  Node functional-919910 event: Registered Node functional-919910 in Controller
	
	
	==> dmesg <==
	[Sep16 10:07] systemd-journald[226]: Failed to send stream file descriptor to service manager: Connection refused
	
	
	==> etcd [5bcfe047e4005e24d6719487f45bde2380924679e0f77e81ce9e05992af73afb] <==
	{"level":"info","ts":"2024-09-16T10:48:01.207309Z","caller":"membership/cluster.go:421","msg":"added member","cluster-id":"fa54960ea34d58be","local-member-id":"aec36adc501070cc","added-peer-id":"aec36adc501070cc","added-peer-peer-urls":["https://192.168.49.2:2380"]}
	{"level":"info","ts":"2024-09-16T10:48:01.207390Z","caller":"membership/cluster.go:584","msg":"set initial cluster version","cluster-id":"fa54960ea34d58be","local-member-id":"aec36adc501070cc","cluster-version":"3.5"}
	{"level":"info","ts":"2024-09-16T10:48:01.207421Z","caller":"api/capability.go:75","msg":"enabled capabilities for version","cluster-version":"3.5"}
	{"level":"info","ts":"2024-09-16T10:48:01.214478Z","caller":"v3rpc/health.go:61","msg":"grpc service status changed","service":"","status":"SERVING"}
	{"level":"info","ts":"2024-09-16T10:48:01.216143Z","caller":"embed/etcd.go:728","msg":"starting with client TLS","tls-info":"cert = /var/lib/minikube/certs/etcd/server.crt, key = /var/lib/minikube/certs/etcd/server.key, client-cert=, client-key=, trusted-ca = /var/lib/minikube/certs/etcd/ca.crt, client-cert-auth = true, crl-file = ","cipher-suites":[]}
	{"level":"info","ts":"2024-09-16T10:48:01.229380Z","caller":"embed/etcd.go:599","msg":"serving peer traffic","address":"192.168.49.2:2380"}
	{"level":"info","ts":"2024-09-16T10:48:01.229419Z","caller":"embed/etcd.go:571","msg":"cmux::serve","address":"192.168.49.2:2380"}
	{"level":"info","ts":"2024-09-16T10:48:01.229922Z","caller":"embed/etcd.go:870","msg":"serving metrics","address":"http://127.0.0.1:2381"}
	{"level":"info","ts":"2024-09-16T10:48:01.229979Z","caller":"embed/etcd.go:279","msg":"now serving peer/client/metrics","local-member-id":"aec36adc501070cc","initial-advertise-peer-urls":["https://192.168.49.2:2380"],"listen-peer-urls":["https://192.168.49.2:2380"],"advertise-client-urls":["https://192.168.49.2:2379"],"listen-client-urls":["https://127.0.0.1:2379","https://192.168.49.2:2379"],"listen-metrics-urls":["http://127.0.0.1:2381"]}
	{"level":"info","ts":"2024-09-16T10:48:02.360729Z","logger":"raft","caller":"etcdserver/zap_raft.go:77","msg":"aec36adc501070cc is starting a new election at term 2"}
	{"level":"info","ts":"2024-09-16T10:48:02.360883Z","logger":"raft","caller":"etcdserver/zap_raft.go:77","msg":"aec36adc501070cc became pre-candidate at term 2"}
	{"level":"info","ts":"2024-09-16T10:48:02.360934Z","logger":"raft","caller":"etcdserver/zap_raft.go:77","msg":"aec36adc501070cc received MsgPreVoteResp from aec36adc501070cc at term 2"}
	{"level":"info","ts":"2024-09-16T10:48:02.360973Z","logger":"raft","caller":"etcdserver/zap_raft.go:77","msg":"aec36adc501070cc became candidate at term 3"}
	{"level":"info","ts":"2024-09-16T10:48:02.361006Z","logger":"raft","caller":"etcdserver/zap_raft.go:77","msg":"aec36adc501070cc received MsgVoteResp from aec36adc501070cc at term 3"}
	{"level":"info","ts":"2024-09-16T10:48:02.361064Z","logger":"raft","caller":"etcdserver/zap_raft.go:77","msg":"aec36adc501070cc became leader at term 3"}
	{"level":"info","ts":"2024-09-16T10:48:02.361099Z","logger":"raft","caller":"etcdserver/zap_raft.go:77","msg":"raft.node: aec36adc501070cc elected leader aec36adc501070cc at term 3"}
	{"level":"info","ts":"2024-09-16T10:48:02.364920Z","caller":"etcdserver/server.go:2118","msg":"published local member to cluster through raft","local-member-id":"aec36adc501070cc","local-member-attributes":"{Name:functional-919910 ClientURLs:[https://192.168.49.2:2379]}","request-path":"/0/members/aec36adc501070cc/attributes","cluster-id":"fa54960ea34d58be","publish-timeout":"7s"}
	{"level":"info","ts":"2024-09-16T10:48:02.365163Z","caller":"embed/serve.go:103","msg":"ready to serve client requests"}
	{"level":"info","ts":"2024-09-16T10:48:02.365549Z","caller":"embed/serve.go:103","msg":"ready to serve client requests"}
	{"level":"info","ts":"2024-09-16T10:48:02.366285Z","caller":"v3rpc/health.go:61","msg":"grpc service status changed","service":"","status":"SERVING"}
	{"level":"info","ts":"2024-09-16T10:48:02.367468Z","caller":"embed/serve.go:250","msg":"serving client traffic securely","traffic":"grpc+http","address":"192.168.49.2:2379"}
	{"level":"info","ts":"2024-09-16T10:48:02.367535Z","caller":"etcdmain/main.go:44","msg":"notifying init daemon"}
	{"level":"info","ts":"2024-09-16T10:48:02.367668Z","caller":"etcdmain/main.go:50","msg":"successfully notified init daemon"}
	{"level":"info","ts":"2024-09-16T10:48:02.369323Z","caller":"v3rpc/health.go:61","msg":"grpc service status changed","service":"","status":"SERVING"}
	{"level":"info","ts":"2024-09-16T10:48:02.370172Z","caller":"embed/serve.go:250","msg":"serving client traffic securely","traffic":"grpc+http","address":"127.0.0.1:2379"}
	
	
	==> etcd [b88a79882d73e8e5ca5f134464b8f60ebbeb4a0aa75d6f83d1ec9e3d9f6bd093] <==
	{"level":"info","ts":"2024-09-16T10:46:55.514339Z","logger":"raft","caller":"etcdserver/zap_raft.go:77","msg":"aec36adc501070cc became leader at term 2"}
	{"level":"info","ts":"2024-09-16T10:46:55.514365Z","logger":"raft","caller":"etcdserver/zap_raft.go:77","msg":"raft.node: aec36adc501070cc elected leader aec36adc501070cc at term 2"}
	{"level":"info","ts":"2024-09-16T10:46:55.520762Z","caller":"etcdserver/server.go:2629","msg":"setting up initial cluster version using v2 API","cluster-version":"3.5"}
	{"level":"info","ts":"2024-09-16T10:46:55.523524Z","caller":"etcdserver/server.go:2118","msg":"published local member to cluster through raft","local-member-id":"aec36adc501070cc","local-member-attributes":"{Name:functional-919910 ClientURLs:[https://192.168.49.2:2379]}","request-path":"/0/members/aec36adc501070cc/attributes","cluster-id":"fa54960ea34d58be","publish-timeout":"7s"}
	{"level":"info","ts":"2024-09-16T10:46:55.528689Z","caller":"embed/serve.go:103","msg":"ready to serve client requests"}
	{"level":"info","ts":"2024-09-16T10:46:55.529056Z","caller":"embed/serve.go:103","msg":"ready to serve client requests"}
	{"level":"info","ts":"2024-09-16T10:46:55.529228Z","caller":"etcdmain/main.go:44","msg":"notifying init daemon"}
	{"level":"info","ts":"2024-09-16T10:46:55.529273Z","caller":"etcdmain/main.go:50","msg":"successfully notified init daemon"}
	{"level":"info","ts":"2024-09-16T10:46:55.529930Z","caller":"v3rpc/health.go:61","msg":"grpc service status changed","service":"","status":"SERVING"}
	{"level":"info","ts":"2024-09-16T10:46:55.530896Z","caller":"embed/serve.go:250","msg":"serving client traffic securely","traffic":"grpc+http","address":"192.168.49.2:2379"}
	{"level":"info","ts":"2024-09-16T10:46:55.531588Z","caller":"v3rpc/health.go:61","msg":"grpc service status changed","service":"","status":"SERVING"}
	{"level":"info","ts":"2024-09-16T10:46:55.538109Z","caller":"embed/serve.go:250","msg":"serving client traffic securely","traffic":"grpc+http","address":"127.0.0.1:2379"}
	{"level":"info","ts":"2024-09-16T10:46:55.537145Z","caller":"membership/cluster.go:584","msg":"set initial cluster version","cluster-id":"fa54960ea34d58be","local-member-id":"aec36adc501070cc","cluster-version":"3.5"}
	{"level":"info","ts":"2024-09-16T10:46:55.560810Z","caller":"api/capability.go:75","msg":"enabled capabilities for version","cluster-version":"3.5"}
	{"level":"info","ts":"2024-09-16T10:46:55.561697Z","caller":"etcdserver/server.go:2653","msg":"cluster version is updated","cluster-version":"3.5"}
	{"level":"info","ts":"2024-09-16T10:47:51.971408Z","caller":"osutil/interrupt_unix.go:64","msg":"received signal; shutting down","signal":"terminated"}
	{"level":"info","ts":"2024-09-16T10:47:51.971462Z","caller":"embed/etcd.go:377","msg":"closing etcd server","name":"functional-919910","data-dir":"/var/lib/minikube/etcd","advertise-peer-urls":["https://192.168.49.2:2380"],"advertise-client-urls":["https://192.168.49.2:2379"]}
	{"level":"warn","ts":"2024-09-16T10:47:51.971540Z","caller":"embed/serve.go:212","msg":"stopping secure grpc server due to error","error":"accept tcp 127.0.0.1:2379: use of closed network connection"}
	{"level":"warn","ts":"2024-09-16T10:47:51.971636Z","caller":"embed/serve.go:214","msg":"stopped secure grpc server due to error","error":"accept tcp 127.0.0.1:2379: use of closed network connection"}
	{"level":"warn","ts":"2024-09-16T10:47:52.033719Z","caller":"embed/serve.go:212","msg":"stopping secure grpc server due to error","error":"accept tcp 192.168.49.2:2379: use of closed network connection"}
	{"level":"warn","ts":"2024-09-16T10:47:52.033852Z","caller":"embed/serve.go:214","msg":"stopped secure grpc server due to error","error":"accept tcp 192.168.49.2:2379: use of closed network connection"}
	{"level":"info","ts":"2024-09-16T10:47:52.033933Z","caller":"etcdserver/server.go:1521","msg":"skipped leadership transfer for single voting member cluster","local-member-id":"aec36adc501070cc","current-leader-member-id":"aec36adc501070cc"}
	{"level":"info","ts":"2024-09-16T10:47:52.036245Z","caller":"embed/etcd.go:581","msg":"stopping serving peer traffic","address":"192.168.49.2:2380"}
	{"level":"info","ts":"2024-09-16T10:47:52.036424Z","caller":"embed/etcd.go:586","msg":"stopped serving peer traffic","address":"192.168.49.2:2380"}
	{"level":"info","ts":"2024-09-16T10:47:52.036463Z","caller":"embed/etcd.go:379","msg":"closed etcd server","name":"functional-919910","data-dir":"/var/lib/minikube/etcd","advertise-peer-urls":["https://192.168.49.2:2380"],"advertise-client-urls":["https://192.168.49.2:2379"]}
	
	
	==> kernel <==
	 10:48:20 up 10:30,  0 users,  load average: 1.83, 1.33, 1.67
	Linux functional-919910 5.15.0-1069-aws #75~20.04.1-Ubuntu SMP Mon Aug 19 16:22:47 UTC 2024 aarch64 aarch64 aarch64 GNU/Linux
	PRETTY_NAME="Ubuntu 22.04.4 LTS"
	
	
	==> kindnet [3e31d247381fd150f97fed045c0d264e01a0046902133f839fc323ed9d5fa7b9] <==
	W0916 10:47:37.742874       1 reflector.go:547] pkg/mod/k8s.io/client-go@v0.30.3/tools/cache/reflector.go:232: failed to list *v1.Pod: Get "https://10.96.0.1:443/api/v1/pods?limit=500&resourceVersion=0": dial tcp 10.96.0.1:443: i/o timeout
	W0916 10:47:37.743031       1 reflector.go:547] pkg/mod/k8s.io/client-go@v0.30.3/tools/cache/reflector.go:232: failed to list *v1.Namespace: Get "https://10.96.0.1:443/api/v1/namespaces?limit=500&resourceVersion=0": dial tcp 10.96.0.1:443: i/o timeout
	I0916 10:47:37.743158       1 trace.go:236] Trace[521782442]: "Reflector ListAndWatch" name:pkg/mod/k8s.io/client-go@v0.30.3/tools/cache/reflector.go:232 (16-Sep-2024 10:47:07.742) (total time: 30000ms):
	Trace[521782442]: ---"Objects listed" error:Get "https://10.96.0.1:443/api/v1/namespaces?limit=500&resourceVersion=0": dial tcp 10.96.0.1:443: i/o timeout 30000ms (10:47:37.743)
	Trace[521782442]: [30.0005021s] [30.0005021s] END
	E0916 10:47:37.743220       1 reflector.go:150] pkg/mod/k8s.io/client-go@v0.30.3/tools/cache/reflector.go:232: Failed to watch *v1.Namespace: failed to list *v1.Namespace: Get "https://10.96.0.1:443/api/v1/namespaces?limit=500&resourceVersion=0": dial tcp 10.96.0.1:443: i/o timeout
	I0916 10:47:37.743054       1 trace.go:236] Trace[974010787]: "Reflector ListAndWatch" name:pkg/mod/k8s.io/client-go@v0.30.3/tools/cache/reflector.go:232 (16-Sep-2024 10:47:07.741) (total time: 30001ms):
	Trace[974010787]: ---"Objects listed" error:Get "https://10.96.0.1:443/api/v1/pods?limit=500&resourceVersion=0": dial tcp 10.96.0.1:443: i/o timeout 30001ms (10:47:37.742)
	Trace[974010787]: [30.001379879s] [30.001379879s] END
	E0916 10:47:37.743260       1 reflector.go:150] pkg/mod/k8s.io/client-go@v0.30.3/tools/cache/reflector.go:232: Failed to watch *v1.Pod: failed to list *v1.Pod: Get "https://10.96.0.1:443/api/v1/pods?limit=500&resourceVersion=0": dial tcp 10.96.0.1:443: i/o timeout
	W0916 10:47:37.742973       1 reflector.go:547] pkg/mod/k8s.io/client-go@v0.30.3/tools/cache/reflector.go:232: failed to list *v1.NetworkPolicy: Get "https://10.96.0.1:443/apis/networking.k8s.io/v1/networkpolicies?limit=500&resourceVersion=0": dial tcp 10.96.0.1:443: i/o timeout
	I0916 10:47:37.743314       1 trace.go:236] Trace[1827800835]: "Reflector ListAndWatch" name:pkg/mod/k8s.io/client-go@v0.30.3/tools/cache/reflector.go:232 (16-Sep-2024 10:47:07.742) (total time: 30000ms):
	Trace[1827800835]: ---"Objects listed" error:Get "https://10.96.0.1:443/apis/networking.k8s.io/v1/networkpolicies?limit=500&resourceVersion=0": dial tcp 10.96.0.1:443: i/o timeout 30000ms (10:47:37.742)
	Trace[1827800835]: [30.000860294s] [30.000860294s] END
	E0916 10:47:37.743332       1 reflector.go:150] pkg/mod/k8s.io/client-go@v0.30.3/tools/cache/reflector.go:232: Failed to watch *v1.NetworkPolicy: failed to list *v1.NetworkPolicy: Get "https://10.96.0.1:443/apis/networking.k8s.io/v1/networkpolicies?limit=500&resourceVersion=0": dial tcp 10.96.0.1:443: i/o timeout
	W0916 10:47:37.742868       1 reflector.go:547] pkg/mod/k8s.io/client-go@v0.30.3/tools/cache/reflector.go:232: failed to list *v1.Node: Get "https://10.96.0.1:443/api/v1/nodes?limit=500&resourceVersion=0": dial tcp 10.96.0.1:443: i/o timeout
	I0916 10:47:37.743361       1 trace.go:236] Trace[215844066]: "Reflector ListAndWatch" name:pkg/mod/k8s.io/client-go@v0.30.3/tools/cache/reflector.go:232 (16-Sep-2024 10:47:07.742) (total time: 30001ms):
	Trace[215844066]: ---"Objects listed" error:Get "https://10.96.0.1:443/api/v1/nodes?limit=500&resourceVersion=0": dial tcp 10.96.0.1:443: i/o timeout 30000ms (10:47:37.742)
	Trace[215844066]: [30.001155081s] [30.001155081s] END
	E0916 10:47:37.743369       1 reflector.go:150] pkg/mod/k8s.io/client-go@v0.30.3/tools/cache/reflector.go:232: Failed to watch *v1.Node: failed to list *v1.Node: Get "https://10.96.0.1:443/api/v1/nodes?limit=500&resourceVersion=0": dial tcp 10.96.0.1:443: i/o timeout
	I0916 10:47:39.343940       1 shared_informer.go:320] Caches are synced for kube-network-policies
	I0916 10:47:39.343985       1 metrics.go:61] Registering metrics
	I0916 10:47:39.344036       1 controller.go:374] Syncing nftables rules
	I0916 10:47:47.742951       1 main.go:295] Handling node with IPs: map[192.168.49.2:{}]
	I0916 10:47:47.743092       1 main.go:299] handling current node
	
	
	==> kindnet [e8aeda4b55bc63f93934a2cc0bed0950a05df3db193d9ed2e77a2dc96b78ec18] <==
	I0916 10:48:01.143502       1 main.go:109] connected to apiserver: https://10.96.0.1:443
	I0916 10:48:01.143730       1 main.go:139] hostIP = 192.168.49.2
	podIP = 192.168.49.2
	I0916 10:48:01.143864       1 main.go:148] setting mtu 1500 for CNI 
	I0916 10:48:01.143886       1 main.go:178] kindnetd IP family: "ipv4"
	I0916 10:48:01.143900       1 main.go:182] noMask IPv4 subnets: [10.244.0.0/16]
	I0916 10:48:01.489821       1 controller.go:334] Starting controller kube-network-policies
	I0916 10:48:01.489995       1 controller.go:338] Waiting for informer caches to sync
	I0916 10:48:01.490034       1 shared_informer.go:313] Waiting for caches to sync for kube-network-policies
	I0916 10:48:05.492976       1 shared_informer.go:320] Caches are synced for kube-network-policies
	I0916 10:48:05.493097       1 metrics.go:61] Registering metrics
	I0916 10:48:05.493204       1 controller.go:374] Syncing nftables rules
	I0916 10:48:11.486739       1 main.go:295] Handling node with IPs: map[192.168.49.2:{}]
	I0916 10:48:11.486849       1 main.go:299] handling current node
	
	
	==> kube-apiserver [790d8c6b7f5cff6aa8da32ec82eeab04f109110f2b3a39803bda7a570da2cf75] <==
	W0916 10:47:52.019895       1 logging.go:55] [core] [Channel #94 SubChannel #95]grpc: addrConn.createTransport failed to connect to {Addr: "127.0.0.1:2379", ServerName: "127.0.0.1:2379", }. Err: connection error: desc = "transport: Error while dialing: dial tcp 127.0.0.1:2379: connect: connection refused"
	W0916 10:47:52.019932       1 logging.go:55] [core] [Channel #25 SubChannel #26]grpc: addrConn.createTransport failed to connect to {Addr: "127.0.0.1:2379", ServerName: "127.0.0.1:2379", }. Err: connection error: desc = "transport: Error while dialing: dial tcp 127.0.0.1:2379: connect: connection refused"
	W0916 10:47:52.019999       1 logging.go:55] [core] [Channel #142 SubChannel #143]grpc: addrConn.createTransport failed to connect to {Addr: "127.0.0.1:2379", ServerName: "127.0.0.1:2379", }. Err: connection error: desc = "transport: Error while dialing: dial tcp 127.0.0.1:2379: connect: connection refused"
	W0916 10:47:52.020055       1 logging.go:55] [core] [Channel #184 SubChannel #185]grpc: addrConn.createTransport failed to connect to {Addr: "127.0.0.1:2379", ServerName: "127.0.0.1:2379", }. Err: connection error: desc = "transport: Error while dialing: dial tcp 127.0.0.1:2379: connect: connection refused"
	W0916 10:47:52.020090       1 logging.go:55] [core] [Channel #163 SubChannel #164]grpc: addrConn.createTransport failed to connect to {Addr: "127.0.0.1:2379", ServerName: "127.0.0.1:2379", }. Err: connection error: desc = "transport: Error while dialing: dial tcp 127.0.0.1:2379: connect: connection refused"
	W0916 10:47:52.020151       1 logging.go:55] [core] [Channel #178 SubChannel #179]grpc: addrConn.createTransport failed to connect to {Addr: "127.0.0.1:2379", ServerName: "127.0.0.1:2379", }. Err: connection error: desc = "transport: Error while dialing: dial tcp 127.0.0.1:2379: connect: connection refused"
	W0916 10:47:52.020210       1 logging.go:55] [core] [Channel #40 SubChannel #41]grpc: addrConn.createTransport failed to connect to {Addr: "127.0.0.1:2379", ServerName: "127.0.0.1:2379", }. Err: connection error: desc = "transport: Error while dialing: dial tcp 127.0.0.1:2379: connect: connection refused"
	W0916 10:47:52.020272       1 logging.go:55] [core] [Channel #88 SubChannel #89]grpc: addrConn.createTransport failed to connect to {Addr: "127.0.0.1:2379", ServerName: "127.0.0.1:2379", }. Err: connection error: desc = "transport: Error while dialing: dial tcp 127.0.0.1:2379: connect: connection refused"
	W0916 10:47:52.020333       1 logging.go:55] [core] [Channel #100 SubChannel #101]grpc: addrConn.createTransport failed to connect to {Addr: "127.0.0.1:2379", ServerName: "127.0.0.1:2379", }. Err: connection error: desc = "transport: Error while dialing: dial tcp 127.0.0.1:2379: connect: connection refused"
	W0916 10:47:52.020367       1 logging.go:55] [core] [Channel #130 SubChannel #131]grpc: addrConn.createTransport failed to connect to {Addr: "127.0.0.1:2379", ServerName: "127.0.0.1:2379", }. Err: connection error: desc = "transport: Error while dialing: dial tcp 127.0.0.1:2379: connect: connection refused"
	W0916 10:47:52.020210       1 logging.go:55] [core] [Channel #52 SubChannel #53]grpc: addrConn.createTransport failed to connect to {Addr: "127.0.0.1:2379", ServerName: "127.0.0.1:2379", }. Err: connection error: desc = "transport: Error while dialing: dial tcp 127.0.0.1:2379: connect: connection refused"
	W0916 10:47:52.020275       1 logging.go:55] [core] [Channel #139 SubChannel #140]grpc: addrConn.createTransport failed to connect to {Addr: "127.0.0.1:2379", ServerName: "127.0.0.1:2379", }. Err: connection error: desc = "transport: Error while dialing: dial tcp 127.0.0.1:2379: connect: connection refused"
	W0916 10:47:52.020486       1 logging.go:55] [core] [Channel #181 SubChannel #182]grpc: addrConn.createTransport failed to connect to {Addr: "127.0.0.1:2379", ServerName: "127.0.0.1:2379", }. Err: connection error: desc = "transport: Error while dialing: dial tcp 127.0.0.1:2379: connect: connection refused"
	W0916 10:47:52.020542       1 logging.go:55] [core] [Channel #64 SubChannel #65]grpc: addrConn.createTransport failed to connect to {Addr: "127.0.0.1:2379", ServerName: "127.0.0.1:2379", }. Err: connection error: desc = "transport: Error while dialing: dial tcp 127.0.0.1:2379: connect: connection refused"
	W0916 10:47:52.020596       1 logging.go:55] [core] [Channel #91 SubChannel #92]grpc: addrConn.createTransport failed to connect to {Addr: "127.0.0.1:2379", ServerName: "127.0.0.1:2379", }. Err: connection error: desc = "transport: Error while dialing: dial tcp 127.0.0.1:2379: connect: connection refused"
	W0916 10:47:52.020631       1 logging.go:55] [core] [Channel #5 SubChannel #6]grpc: addrConn.createTransport failed to connect to {Addr: "127.0.0.1:2379", ServerName: "127.0.0.1:2379", }. Err: connection error: desc = "transport: Error while dialing: dial tcp 127.0.0.1:2379: connect: connection refused"
	W0916 10:47:52.020658       1 logging.go:55] [core] [Channel #46 SubChannel #47]grpc: addrConn.createTransport failed to connect to {Addr: "127.0.0.1:2379", ServerName: "127.0.0.1:2379", }. Err: connection error: desc = "transport: Error while dialing: dial tcp 127.0.0.1:2379: connect: connection refused"
	W0916 10:47:52.020892       1 logging.go:55] [core] [Channel #169 SubChannel #170]grpc: addrConn.createTransport failed to connect to {Addr: "127.0.0.1:2379", ServerName: "127.0.0.1:2379", }. Err: connection error: desc = "transport: Error while dialing: dial tcp 127.0.0.1:2379: connect: connection refused"
	W0916 10:47:52.020542       1 logging.go:55] [core] [Channel #115 SubChannel #116]grpc: addrConn.createTransport failed to connect to {Addr: "127.0.0.1:2379", ServerName: "127.0.0.1:2379", }. Err: connection error: desc = "transport: Error while dialing: dial tcp 127.0.0.1:2379: connect: connection refused"
	W0916 10:47:52.020971       1 logging.go:55] [core] [Channel #112 SubChannel #113]grpc: addrConn.createTransport failed to connect to {Addr: "127.0.0.1:2379", ServerName: "127.0.0.1:2379", }. Err: connection error: desc = "transport: Error while dialing: dial tcp 127.0.0.1:2379: connect: connection refused"
	W0916 10:47:52.021029       1 logging.go:55] [core] [Channel #106 SubChannel #107]grpc: addrConn.createTransport failed to connect to {Addr: "127.0.0.1:2379", ServerName: "127.0.0.1:2379", }. Err: connection error: desc = "transport: Error while dialing: dial tcp 127.0.0.1:2379: connect: connection refused"
	W0916 10:47:52.021078       1 logging.go:55] [core] [Channel #118 SubChannel #119]grpc: addrConn.createTransport failed to connect to {Addr: "127.0.0.1:2379", ServerName: "127.0.0.1:2379", }. Err: connection error: desc = "transport: Error while dialing: dial tcp 127.0.0.1:2379: connect: connection refused"
	W0916 10:47:52.021259       1 logging.go:55] [core] [Channel #49 SubChannel #50]grpc: addrConn.createTransport failed to connect to {Addr: "127.0.0.1:2379", ServerName: "127.0.0.1:2379", }. Err: connection error: desc = "transport: Error while dialing: dial tcp 127.0.0.1:2379: connect: connection refused"
	W0916 10:47:52.021317       1 logging.go:55] [core] [Channel #37 SubChannel #38]grpc: addrConn.createTransport failed to connect to {Addr: "127.0.0.1:2379", ServerName: "127.0.0.1:2379", }. Err: connection error: desc = "transport: Error while dialing: dial tcp 127.0.0.1:2379: connect: connection refused"
	W0916 10:47:52.020062       1 logging.go:55] [core] [Channel #121 SubChannel #122]grpc: addrConn.createTransport failed to connect to {Addr: "127.0.0.1:2379", ServerName: "127.0.0.1:2379", }. Err: connection error: desc = "transport: Error while dialing: dial tcp 127.0.0.1:2379: connect: connection refused"
	
	
	==> kube-apiserver [84ca31fb2ed034d56721c7ab90b9c5e414e315335f55f7d30435fc91501dad28] <==
	I0916 10:48:04.920963       1 establishing_controller.go:81] Starting EstablishingController
	I0916 10:48:04.920985       1 nonstructuralschema_controller.go:195] Starting NonStructuralSchemaConditionController
	I0916 10:48:04.921001       1 apiapproval_controller.go:189] Starting KubernetesAPIApprovalPolicyConformantConditionController
	I0916 10:48:04.921012       1 crd_finalizer.go:269] Starting CRDFinalizer
	I0916 10:48:04.921551       1 dynamic_cafile_content.go:160] "Starting controller" name="client-ca-bundle::/var/lib/minikube/certs/ca.crt"
	I0916 10:48:04.921665       1 dynamic_cafile_content.go:160] "Starting controller" name="request-header::/var/lib/minikube/certs/front-proxy-ca.crt"
	I0916 10:48:05.328418       1 shared_informer.go:320] Caches are synced for crd-autoregister
	I0916 10:48:05.333994       1 aggregator.go:171] initial CRD sync complete...
	I0916 10:48:05.334017       1 autoregister_controller.go:144] Starting autoregister controller
	I0916 10:48:05.334025       1 cache.go:32] Waiting for caches to sync for autoregister controller
	I0916 10:48:05.402697       1 shared_informer.go:320] Caches are synced for *generic.policySource[*k8s.io/api/admissionregistration/v1.ValidatingAdmissionPolicy,*k8s.io/api/admissionregistration/v1.ValidatingAdmissionPolicyBinding,k8s.io/apiserver/pkg/admission/plugin/policy/validating.Validator]
	I0916 10:48:05.402737       1 policy_source.go:224] refreshing policies
	I0916 10:48:05.412636       1 shared_informer.go:320] Caches are synced for cluster_authentication_trust_controller
	I0916 10:48:05.413378       1 cache.go:39] Caches are synced for LocalAvailability controller
	I0916 10:48:05.413700       1 apf_controller.go:382] Running API Priority and Fairness config worker
	I0916 10:48:05.413719       1 apf_controller.go:385] Running API Priority and Fairness periodic rebalancing process
	I0916 10:48:05.413984       1 shared_informer.go:320] Caches are synced for configmaps
	I0916 10:48:05.414029       1 cache.go:39] Caches are synced for RemoteAvailability controller
	I0916 10:48:05.420744       1 cache.go:39] Caches are synced for APIServiceRegistrationController controller
	I0916 10:48:05.434267       1 shared_informer.go:320] Caches are synced for node_authorizer
	I0916 10:48:05.434551       1 controller.go:615] quota admission added evaluator for: leases.coordination.k8s.io
	I0916 10:48:05.442758       1 cache.go:39] Caches are synced for autoregister controller
	I0916 10:48:05.448233       1 handler_discovery.go:450] Starting ResourceDiscoveryManager
	E0916 10:48:05.573273       1 controller.go:97] Error removing old endpoints from kubernetes service: no API server IP addresses were listed in storage, refusing to erase all endpoints for the kubernetes Service
	I0916 10:48:05.963340       1 storage_scheduling.go:111] all system priority classes are created successfully or already exist.
	
	
	==> kube-controller-manager [19cb8b26283b5427eeb4adf80032848225300f8293659c95a04c937ca3877ced] <==
	I0916 10:47:05.622532       1 shared_informer.go:320] Caches are synced for certificate-csrsigning-kubelet-client
	I0916 10:47:05.622583       1 shared_informer.go:320] Caches are synced for certificate-csrsigning-kube-apiserver-client
	I0916 10:47:05.622580       1 shared_informer.go:320] Caches are synced for certificate-csrsigning-legacy-unknown
	I0916 10:47:05.625844       1 shared_informer.go:320] Caches are synced for certificate-csrapproving
	I0916 10:47:05.681754       1 shared_informer.go:320] Caches are synced for resource quota
	I0916 10:47:05.685862       1 shared_informer.go:320] Caches are synced for resource quota
	I0916 10:47:06.120562       1 shared_informer.go:320] Caches are synced for garbage collector
	I0916 10:47:06.128166       1 shared_informer.go:320] Caches are synced for garbage collector
	I0916 10:47:06.128203       1 garbagecollector.go:157] "All resource monitors have synced. Proceeding to collect garbage" logger="garbage-collector-controller"
	I0916 10:47:06.298714       1 range_allocator.go:241] "Successfully synced" logger="node-ipam-controller" key="functional-919910"
	I0916 10:47:06.412493       1 replica_set.go:679] "Finished syncing" logger="replicaset-controller" kind="ReplicaSet" key="kube-system/coredns-7c65d6cfc9" duration="928.095132ms"
	I0916 10:47:06.436112       1 replica_set.go:679] "Finished syncing" logger="replicaset-controller" kind="ReplicaSet" key="kube-system/coredns-7c65d6cfc9" duration="23.562472ms"
	I0916 10:47:06.436311       1 replica_set.go:679] "Finished syncing" logger="replicaset-controller" kind="ReplicaSet" key="kube-system/coredns-7c65d6cfc9" duration="76.216µs"
	I0916 10:47:06.436431       1 replica_set.go:679] "Finished syncing" logger="replicaset-controller" kind="ReplicaSet" key="kube-system/coredns-7c65d6cfc9" duration="29.243µs"
	I0916 10:47:07.569236       1 replica_set.go:679] "Finished syncing" logger="replicaset-controller" kind="ReplicaSet" key="kube-system/coredns-7c65d6cfc9" duration="75.056158ms"
	I0916 10:47:07.583187       1 replica_set.go:679] "Finished syncing" logger="replicaset-controller" kind="ReplicaSet" key="kube-system/coredns-7c65d6cfc9" duration="13.90119ms"
	I0916 10:47:07.583320       1 replica_set.go:679] "Finished syncing" logger="replicaset-controller" kind="ReplicaSet" key="kube-system/coredns-7c65d6cfc9" duration="75.289µs"
	I0916 10:47:47.904049       1 range_allocator.go:241] "Successfully synced" logger="node-ipam-controller" key="functional-919910"
	I0916 10:47:47.921959       1 range_allocator.go:241] "Successfully synced" logger="node-ipam-controller" key="functional-919910"
	I0916 10:47:47.932048       1 replica_set.go:679] "Finished syncing" logger="replicaset-controller" kind="ReplicaSet" key="kube-system/coredns-7c65d6cfc9" duration="55.187µs"
	I0916 10:47:47.946797       1 replica_set.go:679] "Finished syncing" logger="replicaset-controller" kind="ReplicaSet" key="kube-system/coredns-7c65d6cfc9" duration="60.782µs"
	I0916 10:47:48.553366       1 replica_set.go:679] "Finished syncing" logger="replicaset-controller" kind="ReplicaSet" key="kube-system/coredns-7c65d6cfc9" duration="11.363114ms"
	I0916 10:47:48.553489       1 replica_set.go:679] "Finished syncing" logger="replicaset-controller" kind="ReplicaSet" key="kube-system/coredns-7c65d6cfc9" duration="53.668µs"
	I0916 10:47:50.460452       1 node_lifecycle_controller.go:1055] "Controller detected that some Nodes are Ready. Exiting master disruption mode" logger="node-lifecycle-controller"
	I0916 10:47:50.460790       1 range_allocator.go:241] "Successfully synced" logger="node-ipam-controller" key="functional-919910"
	
	
	==> kube-controller-manager [8f5620673b4ff5c0c99db71dd02fc2ce9baec6c9b22460cbdf86d411abc6a715] <==
	I0916 10:48:08.527001       1 actual_state_of_world.go:540] "Failed to update statusUpdateNeeded field in actual state of world" logger="persistentvolume-attach-detach-controller" err="Failed to set statusUpdateNeeded to needed true, because nodeName=\"functional-919910\" does not exist"
	I0916 10:48:08.534308       1 shared_informer.go:320] Caches are synced for node
	I0916 10:48:08.534378       1 range_allocator.go:171] "Sending events to api server" logger="node-ipam-controller"
	I0916 10:48:08.534401       1 range_allocator.go:177] "Starting range CIDR allocator" logger="node-ipam-controller"
	I0916 10:48:08.534407       1 shared_informer.go:313] Waiting for caches to sync for cidrallocator
	I0916 10:48:08.534412       1 shared_informer.go:320] Caches are synced for cidrallocator
	I0916 10:48:08.534494       1 range_allocator.go:241] "Successfully synced" logger="node-ipam-controller" key="functional-919910"
	I0916 10:48:08.535941       1 shared_informer.go:320] Caches are synced for attach detach
	I0916 10:48:08.543605       1 shared_informer.go:320] Caches are synced for endpoint_slice
	I0916 10:48:08.549019       1 shared_informer.go:320] Caches are synced for daemon sets
	I0916 10:48:08.554402       1 shared_informer.go:320] Caches are synced for taint-eviction-controller
	I0916 10:48:08.559718       1 shared_informer.go:320] Caches are synced for persistent volume
	I0916 10:48:08.572151       1 shared_informer.go:320] Caches are synced for GC
	I0916 10:48:08.573409       1 shared_informer.go:320] Caches are synced for PV protection
	I0916 10:48:08.615906       1 shared_informer.go:320] Caches are synced for cronjob
	I0916 10:48:08.623850       1 shared_informer.go:320] Caches are synced for TTL
	I0916 10:48:08.625786       1 shared_informer.go:320] Caches are synced for taint
	I0916 10:48:08.625881       1 node_lifecycle_controller.go:1232] "Initializing eviction metric for zone" logger="node-lifecycle-controller" zone=""
	I0916 10:48:08.625973       1 node_lifecycle_controller.go:884] "Missing timestamp for Node. Assuming now as a timestamp" logger="node-lifecycle-controller" node="functional-919910"
	I0916 10:48:08.626024       1 node_lifecycle_controller.go:1078] "Controller detected that zone is now in new state" logger="node-lifecycle-controller" zone="" newState="Normal"
	I0916 10:48:08.681295       1 shared_informer.go:320] Caches are synced for resource quota
	I0916 10:48:08.695472       1 shared_informer.go:320] Caches are synced for resource quota
	I0916 10:48:09.103907       1 shared_informer.go:320] Caches are synced for garbage collector
	I0916 10:48:09.103941       1 garbagecollector.go:157] "All resource monitors have synced. Proceeding to collect garbage" logger="garbage-collector-controller"
	I0916 10:48:09.123641       1 shared_informer.go:320] Caches are synced for garbage collector
	
	
	==> kube-proxy [68f543d941434df90f12c922b0b45dcb557a7b8316bd36d083123f6f29e0f3d7] <==
	I0916 10:48:03.731423       1 server_linux.go:66] "Using iptables proxy"
	I0916 10:48:05.433154       1 server.go:677] "Successfully retrieved node IP(s)" IPs=["192.168.49.2"]
	E0916 10:48:05.488916       1 server.go:234] "Kube-proxy configuration may be incomplete or incorrect" err="nodePortAddresses is unset; NodePort connections will be accepted on all local IPs. Consider using `--nodeport-addresses primary`"
	I0916 10:48:06.322385       1 server.go:243] "kube-proxy running in dual-stack mode" primary ipFamily="IPv4"
	I0916 10:48:06.341489       1 server_linux.go:169] "Using iptables Proxier"
	I0916 10:48:06.355073       1 proxier.go:255] "Setting route_localnet=1 to allow node-ports on localhost; to change this either disable iptables.localhostNodePorts (--iptables-localhost-nodeports) or set nodePortAddresses (--nodeport-addresses) to filter loopback addresses" ipFamily="IPv4"
	I0916 10:48:06.355531       1 server.go:483] "Version info" version="v1.31.1"
	I0916 10:48:06.357367       1 server.go:485] "Golang settings" GOGC="" GOMAXPROCS="" GOTRACEBACK=""
	I0916 10:48:06.358630       1 config.go:199] "Starting service config controller"
	I0916 10:48:06.358729       1 shared_informer.go:313] Waiting for caches to sync for service config
	I0916 10:48:06.358801       1 config.go:105] "Starting endpoint slice config controller"
	I0916 10:48:06.358840       1 shared_informer.go:313] Waiting for caches to sync for endpoint slice config
	I0916 10:48:06.360984       1 config.go:328] "Starting node config controller"
	I0916 10:48:06.361059       1 shared_informer.go:313] Waiting for caches to sync for node config
	I0916 10:48:06.462180       1 shared_informer.go:320] Caches are synced for endpoint slice config
	I0916 10:48:06.462239       1 shared_informer.go:320] Caches are synced for service config
	I0916 10:48:06.464940       1 shared_informer.go:320] Caches are synced for node config
	
	
	==> kube-proxy [9fdab793eb970a5f01845e2aeaf1389846fd7113bbdedbb122c9c796017271d5] <==
	I0916 10:47:07.571834       1 server_linux.go:66] "Using iptables proxy"
	I0916 10:47:07.773307       1 server.go:677] "Successfully retrieved node IP(s)" IPs=["192.168.49.2"]
	E0916 10:47:07.773379       1 server.go:234] "Kube-proxy configuration may be incomplete or incorrect" err="nodePortAddresses is unset; NodePort connections will be accepted on all local IPs. Consider using `--nodeport-addresses primary`"
	I0916 10:47:07.796962       1 server.go:243] "kube-proxy running in dual-stack mode" primary ipFamily="IPv4"
	I0916 10:47:07.797030       1 server_linux.go:169] "Using iptables Proxier"
	I0916 10:47:07.798928       1 proxier.go:255] "Setting route_localnet=1 to allow node-ports on localhost; to change this either disable iptables.localhostNodePorts (--iptables-localhost-nodeports) or set nodePortAddresses (--nodeport-addresses) to filter loopback addresses" ipFamily="IPv4"
	I0916 10:47:07.799225       1 server.go:483] "Version info" version="v1.31.1"
	I0916 10:47:07.799259       1 server.go:485] "Golang settings" GOGC="" GOMAXPROCS="" GOTRACEBACK=""
	I0916 10:47:07.800857       1 config.go:199] "Starting service config controller"
	I0916 10:47:07.800897       1 shared_informer.go:313] Waiting for caches to sync for service config
	I0916 10:47:07.800943       1 config.go:105] "Starting endpoint slice config controller"
	I0916 10:47:07.800953       1 shared_informer.go:313] Waiting for caches to sync for endpoint slice config
	I0916 10:47:07.801557       1 config.go:328] "Starting node config controller"
	I0916 10:47:07.801619       1 shared_informer.go:313] Waiting for caches to sync for node config
	I0916 10:47:07.901018       1 shared_informer.go:320] Caches are synced for endpoint slice config
	I0916 10:47:07.901079       1 shared_informer.go:320] Caches are synced for service config
	I0916 10:47:07.903369       1 shared_informer.go:320] Caches are synced for node config
	
	
	==> kube-scheduler [6d211253a1170338e5b23dda8b3c6a26dde0aa55d2f91ee289142b0410943b49] <==
	E0916 10:46:58.939724       1 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.ReplicationController: failed to list *v1.ReplicationController: replicationcontrollers is forbidden: User \"system:kube-scheduler\" cannot list resource \"replicationcontrollers\" in API group \"\" at the cluster scope" logger="UnhandledError"
	W0916 10:46:58.939898       1 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.StorageClass: storageclasses.storage.k8s.io is forbidden: User "system:kube-scheduler" cannot list resource "storageclasses" in API group "storage.k8s.io" at the cluster scope
	E0916 10:46:58.939956       1 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.StorageClass: failed to list *v1.StorageClass: storageclasses.storage.k8s.io is forbidden: User \"system:kube-scheduler\" cannot list resource \"storageclasses\" in API group \"storage.k8s.io\" at the cluster scope" logger="UnhandledError"
	W0916 10:46:58.940085       1 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.PodDisruptionBudget: poddisruptionbudgets.policy is forbidden: User "system:kube-scheduler" cannot list resource "poddisruptionbudgets" in API group "policy" at the cluster scope
	E0916 10:46:58.940140       1 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.PodDisruptionBudget: failed to list *v1.PodDisruptionBudget: poddisruptionbudgets.policy is forbidden: User \"system:kube-scheduler\" cannot list resource \"poddisruptionbudgets\" in API group \"policy\" at the cluster scope" logger="UnhandledError"
	W0916 10:46:58.940253       1 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.PersistentVolumeClaim: persistentvolumeclaims is forbidden: User "system:kube-scheduler" cannot list resource "persistentvolumeclaims" in API group "" at the cluster scope
	E0916 10:46:58.940307       1 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.PersistentVolumeClaim: failed to list *v1.PersistentVolumeClaim: persistentvolumeclaims is forbidden: User \"system:kube-scheduler\" cannot list resource \"persistentvolumeclaims\" in API group \"\" at the cluster scope" logger="UnhandledError"
	W0916 10:46:58.940426       1 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.CSIStorageCapacity: csistoragecapacities.storage.k8s.io is forbidden: User "system:kube-scheduler" cannot list resource "csistoragecapacities" in API group "storage.k8s.io" at the cluster scope
	E0916 10:46:58.940479       1 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.CSIStorageCapacity: failed to list *v1.CSIStorageCapacity: csistoragecapacities.storage.k8s.io is forbidden: User \"system:kube-scheduler\" cannot list resource \"csistoragecapacities\" in API group \"storage.k8s.io\" at the cluster scope" logger="UnhandledError"
	W0916 10:46:58.940577       1 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.CSIDriver: csidrivers.storage.k8s.io is forbidden: User "system:kube-scheduler" cannot list resource "csidrivers" in API group "storage.k8s.io" at the cluster scope
	E0916 10:46:58.940630       1 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.CSIDriver: failed to list *v1.CSIDriver: csidrivers.storage.k8s.io is forbidden: User \"system:kube-scheduler\" cannot list resource \"csidrivers\" in API group \"storage.k8s.io\" at the cluster scope" logger="UnhandledError"
	W0916 10:46:58.940734       1 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.PersistentVolume: persistentvolumes is forbidden: User "system:kube-scheduler" cannot list resource "persistentvolumes" in API group "" at the cluster scope
	E0916 10:46:58.940791       1 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.PersistentVolume: failed to list *v1.PersistentVolume: persistentvolumes is forbidden: User \"system:kube-scheduler\" cannot list resource \"persistentvolumes\" in API group \"\" at the cluster scope" logger="UnhandledError"
	W0916 10:46:58.940756       1 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.ReplicaSet: replicasets.apps is forbidden: User "system:kube-scheduler" cannot list resource "replicasets" in API group "apps" at the cluster scope
	E0916 10:46:58.940890       1 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.ReplicaSet: failed to list *v1.ReplicaSet: replicasets.apps is forbidden: User \"system:kube-scheduler\" cannot list resource \"replicasets\" in API group \"apps\" at the cluster scope" logger="UnhandledError"
	W0916 10:46:59.748051       1 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.StorageClass: storageclasses.storage.k8s.io is forbidden: User "system:kube-scheduler" cannot list resource "storageclasses" in API group "storage.k8s.io" at the cluster scope
	E0916 10:46:59.748098       1 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.StorageClass: failed to list *v1.StorageClass: storageclasses.storage.k8s.io is forbidden: User \"system:kube-scheduler\" cannot list resource \"storageclasses\" in API group \"storage.k8s.io\" at the cluster scope" logger="UnhandledError"
	W0916 10:46:59.797427       1 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.CSIStorageCapacity: csistoragecapacities.storage.k8s.io is forbidden: User "system:kube-scheduler" cannot list resource "csistoragecapacities" in API group "storage.k8s.io" at the cluster scope
	E0916 10:46:59.797568       1 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.CSIStorageCapacity: failed to list *v1.CSIStorageCapacity: csistoragecapacities.storage.k8s.io is forbidden: User \"system:kube-scheduler\" cannot list resource \"csistoragecapacities\" in API group \"storage.k8s.io\" at the cluster scope" logger="UnhandledError"
	W0916 10:46:59.814459       1 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Service: services is forbidden: User "system:kube-scheduler" cannot list resource "services" in API group "" at the cluster scope
	E0916 10:46:59.814584       1 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Service: failed to list *v1.Service: services is forbidden: User \"system:kube-scheduler\" cannot list resource \"services\" in API group \"\" at the cluster scope" logger="UnhandledError"
	W0916 10:47:00.394732       1 reflector.go:561] runtime/asm_arm64.s:1222: failed to list *v1.ConfigMap: configmaps "extension-apiserver-authentication" is forbidden: User "system:kube-scheduler" cannot list resource "configmaps" in API group "" in the namespace "kube-system"
	E0916 10:47:00.394804       1 reflector.go:158] "Unhandled Error" err="runtime/asm_arm64.s:1222: Failed to watch *v1.ConfigMap: failed to list *v1.ConfigMap: configmaps \"extension-apiserver-authentication\" is forbidden: User \"system:kube-scheduler\" cannot list resource \"configmaps\" in API group \"\" in the namespace \"kube-system\"" logger="UnhandledError"
	I0916 10:47:02.116962       1 shared_informer.go:320] Caches are synced for client-ca::kube-system::extension-apiserver-authentication::client-ca-file
	E0916 10:47:51.976491       1 run.go:72] "command failed" err="finished without leader elect"
	
	
	==> kube-scheduler [9a35fb982442f2ef08963a8588b112f704124f0fecc14cbfc199e94d6085db98] <==
	I0916 10:48:04.872300       1 serving.go:386] Generated self-signed cert in-memory
	I0916 10:48:06.573495       1 server.go:167] "Starting Kubernetes Scheduler" version="v1.31.1"
	I0916 10:48:06.573525       1 server.go:169] "Golang settings" GOGC="" GOMAXPROCS="" GOTRACEBACK=""
	I0916 10:48:06.588423       1 secure_serving.go:213] Serving securely on 127.0.0.1:10259
	I0916 10:48:06.588642       1 configmap_cafile_content.go:205] "Starting controller" name="client-ca::kube-system::extension-apiserver-authentication::client-ca-file"
	I0916 10:48:06.588658       1 configmap_cafile_content.go:205] "Starting controller" name="client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file"
	I0916 10:48:06.588698       1 tlsconfig.go:243] "Starting DynamicServingCertificateController"
	I0916 10:48:06.588607       1 requestheader_controller.go:172] Starting RequestHeaderAuthRequestController
	I0916 10:48:06.592031       1 shared_informer.go:313] Waiting for caches to sync for RequestHeaderAuthRequestController
	I0916 10:48:06.591278       1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::client-ca-file
	I0916 10:48:06.591687       1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file
	I0916 10:48:06.696997       1 shared_informer.go:320] Caches are synced for client-ca::kube-system::extension-apiserver-authentication::client-ca-file
	I0916 10:48:06.697079       1 shared_informer.go:320] Caches are synced for RequestHeaderAuthRequestController
	I0916 10:48:06.697269       1 shared_informer.go:320] Caches are synced for client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file
	
	
	==> kubelet <==
	Sep 16 10:48:00 functional-919910 kubelet[1525]: I0916 10:48:00.559801    1525 status_manager.go:851] "Failed to get status for pod" podUID="3e910b182a705a484fdc6733177892d1" pod="kube-system/etcd-functional-919910" err="Get \"https://control-plane.minikube.internal:8441/api/v1/namespaces/kube-system/pods/etcd-functional-919910\": dial tcp 192.168.49.2:8441: connect: connection refused"
	Sep 16 10:48:00 functional-919910 kubelet[1525]: I0916 10:48:00.560331    1525 status_manager.go:851] "Failed to get status for pod" podUID="3d8a6ba31c18f33c5660170029e5cde1" pod="kube-system/kube-apiserver-functional-919910" err="Get \"https://control-plane.minikube.internal:8441/api/v1/namespaces/kube-system/pods/kube-apiserver-functional-919910\": dial tcp 192.168.49.2:8441: connect: connection refused"
	Sep 16 10:48:00 functional-919910 kubelet[1525]: I0916 10:48:00.563349    1525 status_manager.go:851] "Failed to get status for pod" podUID="bcfd044776fa163108ac9ce9912dd1b1" pod="kube-system/kube-controller-manager-functional-919910" err="Get \"https://control-plane.minikube.internal:8441/api/v1/namespaces/kube-system/pods/kube-controller-manager-functional-919910\": dial tcp 192.168.49.2:8441: connect: connection refused"
	Sep 16 10:48:00 functional-919910 kubelet[1525]: I0916 10:48:00.580042    1525 scope.go:117] "RemoveContainer" containerID="9fdab793eb970a5f01845e2aeaf1389846fd7113bbdedbb122c9c796017271d5"
	Sep 16 10:48:00 functional-919910 kubelet[1525]: I0916 10:48:00.581495    1525 status_manager.go:851] "Failed to get status for pod" podUID="bcfd044776fa163108ac9ce9912dd1b1" pod="kube-system/kube-controller-manager-functional-919910" err="Get \"https://control-plane.minikube.internal:8441/api/v1/namespaces/kube-system/pods/kube-controller-manager-functional-919910\": dial tcp 192.168.49.2:8441: connect: connection refused"
	Sep 16 10:48:00 functional-919910 kubelet[1525]: I0916 10:48:00.584123    1525 scope.go:117] "RemoveContainer" containerID="3e31d247381fd150f97fed045c0d264e01a0046902133f839fc323ed9d5fa7b9"
	Sep 16 10:48:00 functional-919910 kubelet[1525]: I0916 10:48:00.588218    1525 scope.go:117] "RemoveContainer" containerID="584cffa44f32723af45447c07bf6e3fc641b7c61fe43302aad35c776bd065faf"
	Sep 16 10:48:00 functional-919910 kubelet[1525]: I0916 10:48:00.589441    1525 scope.go:117] "RemoveContainer" containerID="89084e33c979a76a3a4bbd24eab8c848deb25d8bd474bad381f47a24e0373c2e"
	Sep 16 10:48:00 functional-919910 kubelet[1525]: I0916 10:48:00.592539    1525 status_manager.go:851] "Failed to get status for pod" podUID="60f2072c6865fb71ef7928175ceb3dad" pod="kube-system/kube-scheduler-functional-919910" err="Get \"https://control-plane.minikube.internal:8441/api/v1/namespaces/kube-system/pods/kube-scheduler-functional-919910\": dial tcp 192.168.49.2:8441: connect: connection refused"
	Sep 16 10:48:00 functional-919910 kubelet[1525]: I0916 10:48:00.593132    1525 status_manager.go:851] "Failed to get status for pod" podUID="3e910b182a705a484fdc6733177892d1" pod="kube-system/etcd-functional-919910" err="Get \"https://control-plane.minikube.internal:8441/api/v1/namespaces/kube-system/pods/etcd-functional-919910\": dial tcp 192.168.49.2:8441: connect: connection refused"
	Sep 16 10:48:00 functional-919910 kubelet[1525]: I0916 10:48:00.599626    1525 status_manager.go:851] "Failed to get status for pod" podUID="3d8a6ba31c18f33c5660170029e5cde1" pod="kube-system/kube-apiserver-functional-919910" err="Get \"https://control-plane.minikube.internal:8441/api/v1/namespaces/kube-system/pods/kube-apiserver-functional-919910\": dial tcp 192.168.49.2:8441: connect: connection refused"
	Sep 16 10:48:00 functional-919910 kubelet[1525]: I0916 10:48:00.603888    1525 status_manager.go:851] "Failed to get status for pod" podUID="2e1bfc3e-dea3-4511-a154-e367e28b0898" pod="kube-system/kube-proxy-nvpzv" err="Get \"https://control-plane.minikube.internal:8441/api/v1/namespaces/kube-system/pods/kube-proxy-nvpzv\": dial tcp 192.168.49.2:8441: connect: connection refused"
	Sep 16 10:48:00 functional-919910 kubelet[1525]: I0916 10:48:00.604793    1525 status_manager.go:851] "Failed to get status for pod" podUID="ada36fb7-8486-4afc-9bef-04ab2e65fc7b" pod="kube-system/coredns-7c65d6cfc9-qzn8c" err="Get \"https://control-plane.minikube.internal:8441/api/v1/namespaces/kube-system/pods/coredns-7c65d6cfc9-qzn8c\": dial tcp 192.168.49.2:8441: connect: connection refused"
	Sep 16 10:48:00 functional-919910 kubelet[1525]: I0916 10:48:00.618533    1525 status_manager.go:851] "Failed to get status for pod" podUID="2eb6523f-f61a-4c33-8e91-0bbbb874554b" pod="kube-system/storage-provisioner" err="Get \"https://control-plane.minikube.internal:8441/api/v1/namespaces/kube-system/pods/storage-provisioner\": dial tcp 192.168.49.2:8441: connect: connection refused"
	Sep 16 10:48:00 functional-919910 kubelet[1525]: I0916 10:48:00.618991    1525 status_manager.go:851] "Failed to get status for pod" podUID="bcfd044776fa163108ac9ce9912dd1b1" pod="kube-system/kube-controller-manager-functional-919910" err="Get \"https://control-plane.minikube.internal:8441/api/v1/namespaces/kube-system/pods/kube-controller-manager-functional-919910\": dial tcp 192.168.49.2:8441: connect: connection refused"
	Sep 16 10:48:00 functional-919910 kubelet[1525]: I0916 10:48:00.619364    1525 status_manager.go:851] "Failed to get status for pod" podUID="60f2072c6865fb71ef7928175ceb3dad" pod="kube-system/kube-scheduler-functional-919910" err="Get \"https://control-plane.minikube.internal:8441/api/v1/namespaces/kube-system/pods/kube-scheduler-functional-919910\": dial tcp 192.168.49.2:8441: connect: connection refused"
	Sep 16 10:48:00 functional-919910 kubelet[1525]: I0916 10:48:00.619674    1525 status_manager.go:851] "Failed to get status for pod" podUID="3e910b182a705a484fdc6733177892d1" pod="kube-system/etcd-functional-919910" err="Get \"https://control-plane.minikube.internal:8441/api/v1/namespaces/kube-system/pods/etcd-functional-919910\": dial tcp 192.168.49.2:8441: connect: connection refused"
	Sep 16 10:48:00 functional-919910 kubelet[1525]: I0916 10:48:00.619915    1525 status_manager.go:851] "Failed to get status for pod" podUID="3d8a6ba31c18f33c5660170029e5cde1" pod="kube-system/kube-apiserver-functional-919910" err="Get \"https://control-plane.minikube.internal:8441/api/v1/namespaces/kube-system/pods/kube-apiserver-functional-919910\": dial tcp 192.168.49.2:8441: connect: connection refused"
	Sep 16 10:48:00 functional-919910 kubelet[1525]: I0916 10:48:00.620147    1525 status_manager.go:851] "Failed to get status for pod" podUID="2e1bfc3e-dea3-4511-a154-e367e28b0898" pod="kube-system/kube-proxy-nvpzv" err="Get \"https://control-plane.minikube.internal:8441/api/v1/namespaces/kube-system/pods/kube-proxy-nvpzv\": dial tcp 192.168.49.2:8441: connect: connection refused"
	Sep 16 10:48:00 functional-919910 kubelet[1525]: I0916 10:48:00.620368    1525 status_manager.go:851] "Failed to get status for pod" podUID="1282e172-7d16-4f24-9f7d-33da705832a9" pod="kube-system/kindnet-nb5xl" err="Get \"https://control-plane.minikube.internal:8441/api/v1/namespaces/kube-system/pods/kindnet-nb5xl\": dial tcp 192.168.49.2:8441: connect: connection refused"
	Sep 16 10:48:01 functional-919910 kubelet[1525]: E0916 10:48:01.441856    1525 eviction_manager.go:257] "Eviction manager: failed to get HasDedicatedImageFs" err="missing image stats: &ImageFsInfoResponse{ImageFilesystems:[]*FilesystemUsage{&FilesystemUsage{Timestamp:1726483681441651840,FsId:&FilesystemIdentifier{Mountpoint:/var/lib/containers/storage/overlay-images,},UsedBytes:&UInt64Value{Value:125700,},InodesUsed:&UInt64Value{Value:57,},},},ContainerFilesystems:[]*FilesystemUsage{},}"
	Sep 16 10:48:01 functional-919910 kubelet[1525]: E0916 10:48:01.441896    1525 eviction_manager.go:212] "Eviction manager: failed to synchronize" err="eviction manager: failed to get HasDedicatedImageFs: missing image stats: &ImageFsInfoResponse{ImageFilesystems:[]*FilesystemUsage{&FilesystemUsage{Timestamp:1726483681441651840,FsId:&FilesystemIdentifier{Mountpoint:/var/lib/containers/storage/overlay-images,},UsedBytes:&UInt64Value{Value:125700,},InodesUsed:&UInt64Value{Value:57,},},},ContainerFilesystems:[]*FilesystemUsage{},}"
	Sep 16 10:48:05 functional-919910 kubelet[1525]: E0916 10:48:05.003719    1525 reflector.go:158] "Unhandled Error" err="object-\"kube-system\"/\"coredns\": Failed to watch *v1.ConfigMap: unknown (get configmaps)" logger="UnhandledError"
	Sep 16 10:48:11 functional-919910 kubelet[1525]: E0916 10:48:11.442879    1525 eviction_manager.go:257] "Eviction manager: failed to get HasDedicatedImageFs" err="missing image stats: &ImageFsInfoResponse{ImageFilesystems:[]*FilesystemUsage{&FilesystemUsage{Timestamp:1726483691442663916,FsId:&FilesystemIdentifier{Mountpoint:/var/lib/containers/storage/overlay-images,},UsedBytes:&UInt64Value{Value:125700,},InodesUsed:&UInt64Value{Value:57,},},},ContainerFilesystems:[]*FilesystemUsage{},}"
	Sep 16 10:48:11 functional-919910 kubelet[1525]: E0916 10:48:11.442932    1525 eviction_manager.go:212] "Eviction manager: failed to synchronize" err="eviction manager: failed to get HasDedicatedImageFs: missing image stats: &ImageFsInfoResponse{ImageFilesystems:[]*FilesystemUsage{&FilesystemUsage{Timestamp:1726483691442663916,FsId:&FilesystemIdentifier{Mountpoint:/var/lib/containers/storage/overlay-images,},UsedBytes:&UInt64Value{Value:125700,},InodesUsed:&UInt64Value{Value:57,},},},ContainerFilesystems:[]*FilesystemUsage{},}"
	
	
	==> storage-provisioner [584cffa44f32723af45447c07bf6e3fc641b7c61fe43302aad35c776bd065faf] <==
	I0916 10:47:48.421537       1 storage_provisioner.go:116] Initializing the minikube storage provisioner...
	I0916 10:47:48.454695       1 storage_provisioner.go:141] Storage provisioner initialized, now starting service!
	I0916 10:47:48.454750       1 leaderelection.go:243] attempting to acquire leader lease kube-system/k8s.io-minikube-hostpath...
	I0916 10:47:48.463044       1 leaderelection.go:253] successfully acquired lease kube-system/k8s.io-minikube-hostpath
	I0916 10:47:48.463305       1 controller.go:835] Starting provisioner controller k8s.io/minikube-hostpath_functional-919910_9fff52e8-492a-4bd9-921f-e0e8a999d2a3!
	I0916 10:47:48.464240       1 event.go:282] Event(v1.ObjectReference{Kind:"Endpoints", Namespace:"kube-system", Name:"k8s.io-minikube-hostpath", UID:"a458447e-2e14-46d1-bc5f-e9228298bb58", APIVersion:"v1", ResourceVersion:"414", FieldPath:""}): type: 'Normal' reason: 'LeaderElection' functional-919910_9fff52e8-492a-4bd9-921f-e0e8a999d2a3 became leader
	I0916 10:47:48.563893       1 controller.go:884] Started provisioner controller k8s.io/minikube-hostpath_functional-919910_9fff52e8-492a-4bd9-921f-e0e8a999d2a3!
	
	
	==> storage-provisioner [67f50b0e25dae16dbad275ffac3a734fe571c8f8cb91d485eaac44783eb641be] <==
	I0916 10:48:01.486119       1 storage_provisioner.go:116] Initializing the minikube storage provisioner...
	I0916 10:48:05.527187       1 storage_provisioner.go:141] Storage provisioner initialized, now starting service!
	I0916 10:48:05.529539       1 leaderelection.go:243] attempting to acquire leader lease kube-system/k8s.io-minikube-hostpath...
	

                                                
                                                
-- /stdout --
helpers_test.go:254: (dbg) Run:  out/minikube-linux-arm64 status --format={{.APIServer}} -p functional-919910 -n functional-919910
helpers_test.go:261: (dbg) Run:  kubectl --context functional-919910 get po -o=jsonpath={.items[*].metadata.name} -A --field-selector=status.phase!=Running
helpers_test.go:261: (dbg) Non-zero exit: kubectl --context functional-919910 get po -o=jsonpath={.items[*].metadata.name} -A --field-selector=status.phase!=Running: fork/exec /usr/local/bin/kubectl: exec format error (674.411µs)
helpers_test.go:263: kubectl --context functional-919910 get po -o=jsonpath={.items[*].metadata.name} -A --field-selector=status.phase!=Running: fork/exec /usr/local/bin/kubectl: exec format error
--- FAIL: TestFunctional/serial/KubeContext (2.89s)

                                                
                                    
x
+
TestFunctional/serial/KubectlGetPods (2.86s)

                                                
                                                
=== RUN   TestFunctional/serial/KubectlGetPods
functional_test.go:696: (dbg) Run:  kubectl --context functional-919910 get po -A
functional_test.go:696: (dbg) Non-zero exit: kubectl --context functional-919910 get po -A: fork/exec /usr/local/bin/kubectl: exec format error (604.777µs)
functional_test.go:698: failed to get kubectl pods: args "kubectl --context functional-919910 get po -A" : fork/exec /usr/local/bin/kubectl: exec format error
functional_test.go:705: expected stdout to include *kube-system* but got *""*. args: "kubectl --context functional-919910 get po -A"
helpers_test.go:222: -----------------------post-mortem--------------------------------
helpers_test.go:230: ======>  post-mortem[TestFunctional/serial/KubectlGetPods]: docker inspect <======
helpers_test.go:231: (dbg) Run:  docker inspect functional-919910
helpers_test.go:235: (dbg) docker inspect functional-919910:

                                                
                                                
-- stdout --
	[
	    {
	        "Id": "40a7320e94dbd1ca8f99c16961d5283390467882986d80f040baa102ab2046bd",
	        "Created": "2024-09-16T10:46:39.195115177Z",
	        "Path": "/usr/local/bin/entrypoint",
	        "Args": [
	            "/sbin/init"
	        ],
	        "State": {
	            "Status": "running",
	            "Running": true,
	            "Paused": false,
	            "Restarting": false,
	            "OOMKilled": false,
	            "Dead": false,
	            "Pid": 1399656,
	            "ExitCode": 0,
	            "Error": "",
	            "StartedAt": "2024-09-16T10:46:39.363423533Z",
	            "FinishedAt": "0001-01-01T00:00:00Z"
	        },
	        "Image": "sha256:a1b71fa87733590eb4674b16f6945626ae533f3af37066893e3fd70eb9476268",
	        "ResolvConfPath": "/var/lib/docker/containers/40a7320e94dbd1ca8f99c16961d5283390467882986d80f040baa102ab2046bd/resolv.conf",
	        "HostnamePath": "/var/lib/docker/containers/40a7320e94dbd1ca8f99c16961d5283390467882986d80f040baa102ab2046bd/hostname",
	        "HostsPath": "/var/lib/docker/containers/40a7320e94dbd1ca8f99c16961d5283390467882986d80f040baa102ab2046bd/hosts",
	        "LogPath": "/var/lib/docker/containers/40a7320e94dbd1ca8f99c16961d5283390467882986d80f040baa102ab2046bd/40a7320e94dbd1ca8f99c16961d5283390467882986d80f040baa102ab2046bd-json.log",
	        "Name": "/functional-919910",
	        "RestartCount": 0,
	        "Driver": "overlay2",
	        "Platform": "linux",
	        "MountLabel": "",
	        "ProcessLabel": "",
	        "AppArmorProfile": "unconfined",
	        "ExecIDs": null,
	        "HostConfig": {
	            "Binds": [
	                "/lib/modules:/lib/modules:ro",
	                "functional-919910:/var"
	            ],
	            "ContainerIDFile": "",
	            "LogConfig": {
	                "Type": "json-file",
	                "Config": {}
	            },
	            "NetworkMode": "functional-919910",
	            "PortBindings": {
	                "22/tcp": [
	                    {
	                        "HostIp": "127.0.0.1",
	                        "HostPort": ""
	                    }
	                ],
	                "2376/tcp": [
	                    {
	                        "HostIp": "127.0.0.1",
	                        "HostPort": ""
	                    }
	                ],
	                "32443/tcp": [
	                    {
	                        "HostIp": "127.0.0.1",
	                        "HostPort": ""
	                    }
	                ],
	                "5000/tcp": [
	                    {
	                        "HostIp": "127.0.0.1",
	                        "HostPort": ""
	                    }
	                ],
	                "8441/tcp": [
	                    {
	                        "HostIp": "127.0.0.1",
	                        "HostPort": ""
	                    }
	                ]
	            },
	            "RestartPolicy": {
	                "Name": "no",
	                "MaximumRetryCount": 0
	            },
	            "AutoRemove": false,
	            "VolumeDriver": "",
	            "VolumesFrom": null,
	            "ConsoleSize": [
	                0,
	                0
	            ],
	            "CapAdd": null,
	            "CapDrop": null,
	            "CgroupnsMode": "host",
	            "Dns": [],
	            "DnsOptions": [],
	            "DnsSearch": [],
	            "ExtraHosts": null,
	            "GroupAdd": null,
	            "IpcMode": "private",
	            "Cgroup": "",
	            "Links": null,
	            "OomScoreAdj": 0,
	            "PidMode": "",
	            "Privileged": true,
	            "PublishAllPorts": false,
	            "ReadonlyRootfs": false,
	            "SecurityOpt": [
	                "seccomp=unconfined",
	                "apparmor=unconfined",
	                "label=disable"
	            ],
	            "Tmpfs": {
	                "/run": "",
	                "/tmp": ""
	            },
	            "UTSMode": "",
	            "UsernsMode": "",
	            "ShmSize": 67108864,
	            "Runtime": "runc",
	            "Isolation": "",
	            "CpuShares": 0,
	            "Memory": 4194304000,
	            "NanoCpus": 2000000000,
	            "CgroupParent": "",
	            "BlkioWeight": 0,
	            "BlkioWeightDevice": [],
	            "BlkioDeviceReadBps": [],
	            "BlkioDeviceWriteBps": [],
	            "BlkioDeviceReadIOps": [],
	            "BlkioDeviceWriteIOps": [],
	            "CpuPeriod": 0,
	            "CpuQuota": 0,
	            "CpuRealtimePeriod": 0,
	            "CpuRealtimeRuntime": 0,
	            "CpusetCpus": "",
	            "CpusetMems": "",
	            "Devices": [],
	            "DeviceCgroupRules": null,
	            "DeviceRequests": null,
	            "MemoryReservation": 0,
	            "MemorySwap": 8388608000,
	            "MemorySwappiness": null,
	            "OomKillDisable": false,
	            "PidsLimit": null,
	            "Ulimits": [],
	            "CpuCount": 0,
	            "CpuPercent": 0,
	            "IOMaximumIOps": 0,
	            "IOMaximumBandwidth": 0,
	            "MaskedPaths": null,
	            "ReadonlyPaths": null
	        },
	        "GraphDriver": {
	            "Data": {
	                "LowerDir": "/var/lib/docker/overlay2/14032252dd4d379a5dd6bfc812b8514e72a450050f00baaedcadb811ce19b2ca-init/diff:/var/lib/docker/overlay2/1502e35c27c097cfc834a7c6caeee5bb9f58b41375577f491b73f55bc131cbae/diff",
	                "MergedDir": "/var/lib/docker/overlay2/14032252dd4d379a5dd6bfc812b8514e72a450050f00baaedcadb811ce19b2ca/merged",
	                "UpperDir": "/var/lib/docker/overlay2/14032252dd4d379a5dd6bfc812b8514e72a450050f00baaedcadb811ce19b2ca/diff",
	                "WorkDir": "/var/lib/docker/overlay2/14032252dd4d379a5dd6bfc812b8514e72a450050f00baaedcadb811ce19b2ca/work"
	            },
	            "Name": "overlay2"
	        },
	        "Mounts": [
	            {
	                "Type": "bind",
	                "Source": "/lib/modules",
	                "Destination": "/lib/modules",
	                "Mode": "ro",
	                "RW": false,
	                "Propagation": "rprivate"
	            },
	            {
	                "Type": "volume",
	                "Name": "functional-919910",
	                "Source": "/var/lib/docker/volumes/functional-919910/_data",
	                "Destination": "/var",
	                "Driver": "local",
	                "Mode": "z",
	                "RW": true,
	                "Propagation": ""
	            }
	        ],
	        "Config": {
	            "Hostname": "functional-919910",
	            "Domainname": "",
	            "User": "",
	            "AttachStdin": false,
	            "AttachStdout": false,
	            "AttachStderr": false,
	            "ExposedPorts": {
	                "22/tcp": {},
	                "2376/tcp": {},
	                "32443/tcp": {},
	                "5000/tcp": {},
	                "8441/tcp": {}
	            },
	            "Tty": true,
	            "OpenStdin": false,
	            "StdinOnce": false,
	            "Env": [
	                "container=docker",
	                "PATH=/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin"
	            ],
	            "Cmd": null,
	            "Image": "gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0",
	            "Volumes": null,
	            "WorkingDir": "/",
	            "Entrypoint": [
	                "/usr/local/bin/entrypoint",
	                "/sbin/init"
	            ],
	            "OnBuild": null,
	            "Labels": {
	                "created_by.minikube.sigs.k8s.io": "true",
	                "mode.minikube.sigs.k8s.io": "functional-919910",
	                "name.minikube.sigs.k8s.io": "functional-919910",
	                "role.minikube.sigs.k8s.io": ""
	            },
	            "StopSignal": "SIGRTMIN+3"
	        },
	        "NetworkSettings": {
	            "Bridge": "",
	            "SandboxID": "09e546724865183e02638a32689645e28fd2b24039febe37938c93bd516fa319",
	            "SandboxKey": "/var/run/docker/netns/09e546724865",
	            "Ports": {
	                "22/tcp": [
	                    {
	                        "HostIp": "127.0.0.1",
	                        "HostPort": "34613"
	                    }
	                ],
	                "2376/tcp": [
	                    {
	                        "HostIp": "127.0.0.1",
	                        "HostPort": "34614"
	                    }
	                ],
	                "32443/tcp": [
	                    {
	                        "HostIp": "127.0.0.1",
	                        "HostPort": "34617"
	                    }
	                ],
	                "5000/tcp": [
	                    {
	                        "HostIp": "127.0.0.1",
	                        "HostPort": "34615"
	                    }
	                ],
	                "8441/tcp": [
	                    {
	                        "HostIp": "127.0.0.1",
	                        "HostPort": "34616"
	                    }
	                ]
	            },
	            "HairpinMode": false,
	            "LinkLocalIPv6Address": "",
	            "LinkLocalIPv6PrefixLen": 0,
	            "SecondaryIPAddresses": null,
	            "SecondaryIPv6Addresses": null,
	            "EndpointID": "",
	            "Gateway": "",
	            "GlobalIPv6Address": "",
	            "GlobalIPv6PrefixLen": 0,
	            "IPAddress": "",
	            "IPPrefixLen": 0,
	            "IPv6Gateway": "",
	            "MacAddress": "",
	            "Networks": {
	                "functional-919910": {
	                    "IPAMConfig": {
	                        "IPv4Address": "192.168.49.2"
	                    },
	                    "Links": null,
	                    "Aliases": null,
	                    "MacAddress": "02:42:c0:a8:31:02",
	                    "DriverOpts": null,
	                    "NetworkID": "6e0fb93702822d0f6745b0df63c8098af583107dce24967dde54449c81a6a7de",
	                    "EndpointID": "0e4e29393de23184514ee78cc12ea7445e6307e65c69c812751182560a7c0121",
	                    "Gateway": "192.168.49.1",
	                    "IPAddress": "192.168.49.2",
	                    "IPPrefixLen": 24,
	                    "IPv6Gateway": "",
	                    "GlobalIPv6Address": "",
	                    "GlobalIPv6PrefixLen": 0,
	                    "DNSNames": [
	                        "functional-919910",
	                        "40a7320e94db"
	                    ]
	                }
	            }
	        }
	    }
	]

                                                
                                                
-- /stdout --
helpers_test.go:239: (dbg) Run:  out/minikube-linux-arm64 status --format={{.Host}} -p functional-919910 -n functional-919910
helpers_test.go:244: <<< TestFunctional/serial/KubectlGetPods FAILED: start of post-mortem logs <<<
helpers_test.go:245: ======>  post-mortem[TestFunctional/serial/KubectlGetPods]: minikube logs <======
helpers_test.go:247: (dbg) Run:  out/minikube-linux-arm64 -p functional-919910 logs -n 25
helpers_test.go:247: (dbg) Done: out/minikube-linux-arm64 -p functional-919910 logs -n 25: (1.858135172s)
helpers_test.go:252: TestFunctional/serial/KubectlGetPods logs: 
-- stdout --
	
	==> Audit <==
	|---------|--------------------------------|-------------------|---------|---------|---------------------|---------------------|
	| Command |              Args              |      Profile      |  User   | Version |     Start Time      |      End Time       |
	|---------|--------------------------------|-------------------|---------|---------|---------------------|---------------------|
	| addons  | disable nvidia-device-plugin   | addons-936355     | jenkins | v1.34.0 | 16 Sep 24 10:45 UTC | 16 Sep 24 10:45 UTC |
	|         | -p addons-936355               |                   |         |         |                     |                     |
	| addons  | enable headlamp                | addons-936355     | jenkins | v1.34.0 | 16 Sep 24 10:45 UTC | 16 Sep 24 10:45 UTC |
	|         | -p addons-936355               |                   |         |         |                     |                     |
	|         | --alsologtostderr -v=1         |                   |         |         |                     |                     |
	| addons  | disable cloud-spanner -p       | addons-936355     | jenkins | v1.34.0 | 16 Sep 24 10:45 UTC | 16 Sep 24 10:45 UTC |
	|         | addons-936355                  |                   |         |         |                     |                     |
	| addons  | addons-936355 addons disable   | addons-936355     | jenkins | v1.34.0 | 16 Sep 24 10:45 UTC | 16 Sep 24 10:45 UTC |
	|         | headlamp --alsologtostderr     |                   |         |         |                     |                     |
	|         | -v=1                           |                   |         |         |                     |                     |
	| stop    | -p addons-936355               | addons-936355     | jenkins | v1.34.0 | 16 Sep 24 10:45 UTC | 16 Sep 24 10:45 UTC |
	| addons  | enable dashboard -p            | addons-936355     | jenkins | v1.34.0 | 16 Sep 24 10:45 UTC | 16 Sep 24 10:45 UTC |
	|         | addons-936355                  |                   |         |         |                     |                     |
	| addons  | disable dashboard -p           | addons-936355     | jenkins | v1.34.0 | 16 Sep 24 10:45 UTC | 16 Sep 24 10:45 UTC |
	|         | addons-936355                  |                   |         |         |                     |                     |
	| addons  | disable gvisor -p              | addons-936355     | jenkins | v1.34.0 | 16 Sep 24 10:45 UTC | 16 Sep 24 10:45 UTC |
	|         | addons-936355                  |                   |         |         |                     |                     |
	| delete  | -p addons-936355               | addons-936355     | jenkins | v1.34.0 | 16 Sep 24 10:45 UTC | 16 Sep 24 10:45 UTC |
	| start   | -p nospam-329014 -n=1          | nospam-329014     | jenkins | v1.34.0 | 16 Sep 24 10:45 UTC | 16 Sep 24 10:46 UTC |
	|         | --memory=2250 --wait=false     |                   |         |         |                     |                     |
	|         | --log_dir=/tmp/nospam-329014   |                   |         |         |                     |                     |
	|         | --driver=docker                |                   |         |         |                     |                     |
	|         | --container-runtime=crio       |                   |         |         |                     |                     |
	| start   | nospam-329014 --log_dir        | nospam-329014     | jenkins | v1.34.0 | 16 Sep 24 10:46 UTC |                     |
	|         | /tmp/nospam-329014 start       |                   |         |         |                     |                     |
	|         | --dry-run                      |                   |         |         |                     |                     |
	| start   | nospam-329014 --log_dir        | nospam-329014     | jenkins | v1.34.0 | 16 Sep 24 10:46 UTC |                     |
	|         | /tmp/nospam-329014 start       |                   |         |         |                     |                     |
	|         | --dry-run                      |                   |         |         |                     |                     |
	| start   | nospam-329014 --log_dir        | nospam-329014     | jenkins | v1.34.0 | 16 Sep 24 10:46 UTC |                     |
	|         | /tmp/nospam-329014 start       |                   |         |         |                     |                     |
	|         | --dry-run                      |                   |         |         |                     |                     |
	| pause   | nospam-329014 --log_dir        | nospam-329014     | jenkins | v1.34.0 | 16 Sep 24 10:46 UTC | 16 Sep 24 10:46 UTC |
	|         | /tmp/nospam-329014 pause       |                   |         |         |                     |                     |
	| pause   | nospam-329014 --log_dir        | nospam-329014     | jenkins | v1.34.0 | 16 Sep 24 10:46 UTC | 16 Sep 24 10:46 UTC |
	|         | /tmp/nospam-329014 pause       |                   |         |         |                     |                     |
	| pause   | nospam-329014 --log_dir        | nospam-329014     | jenkins | v1.34.0 | 16 Sep 24 10:46 UTC | 16 Sep 24 10:46 UTC |
	|         | /tmp/nospam-329014 pause       |                   |         |         |                     |                     |
	| unpause | nospam-329014 --log_dir        | nospam-329014     | jenkins | v1.34.0 | 16 Sep 24 10:46 UTC | 16 Sep 24 10:46 UTC |
	|         | /tmp/nospam-329014 unpause     |                   |         |         |                     |                     |
	| unpause | nospam-329014 --log_dir        | nospam-329014     | jenkins | v1.34.0 | 16 Sep 24 10:46 UTC | 16 Sep 24 10:46 UTC |
	|         | /tmp/nospam-329014 unpause     |                   |         |         |                     |                     |
	| unpause | nospam-329014 --log_dir        | nospam-329014     | jenkins | v1.34.0 | 16 Sep 24 10:46 UTC | 16 Sep 24 10:46 UTC |
	|         | /tmp/nospam-329014 unpause     |                   |         |         |                     |                     |
	| stop    | nospam-329014 --log_dir        | nospam-329014     | jenkins | v1.34.0 | 16 Sep 24 10:46 UTC | 16 Sep 24 10:46 UTC |
	|         | /tmp/nospam-329014 stop        |                   |         |         |                     |                     |
	| stop    | nospam-329014 --log_dir        | nospam-329014     | jenkins | v1.34.0 | 16 Sep 24 10:46 UTC | 16 Sep 24 10:46 UTC |
	|         | /tmp/nospam-329014 stop        |                   |         |         |                     |                     |
	| stop    | nospam-329014 --log_dir        | nospam-329014     | jenkins | v1.34.0 | 16 Sep 24 10:46 UTC | 16 Sep 24 10:46 UTC |
	|         | /tmp/nospam-329014 stop        |                   |         |         |                     |                     |
	| delete  | -p nospam-329014               | nospam-329014     | jenkins | v1.34.0 | 16 Sep 24 10:46 UTC | 16 Sep 24 10:46 UTC |
	| start   | -p functional-919910           | functional-919910 | jenkins | v1.34.0 | 16 Sep 24 10:46 UTC | 16 Sep 24 10:47 UTC |
	|         | --memory=4000                  |                   |         |         |                     |                     |
	|         | --apiserver-port=8441          |                   |         |         |                     |                     |
	|         | --wait=all --driver=docker     |                   |         |         |                     |                     |
	|         | --container-runtime=crio       |                   |         |         |                     |                     |
	| start   | -p functional-919910           | functional-919910 | jenkins | v1.34.0 | 16 Sep 24 10:47 UTC | 16 Sep 24 10:48 UTC |
	|         | --alsologtostderr -v=8         |                   |         |         |                     |                     |
	|---------|--------------------------------|-------------------|---------|---------|---------------------|---------------------|
	
	
	==> Last Start <==
	Log file created at: 2024/09/16 10:47:50
	Running on machine: ip-172-31-21-244
	Binary: Built with gc go1.23.0 for linux/arm64
	Log line format: [IWEF]mmdd hh:mm:ss.uuuuuu threadid file:line] msg
	I0916 10:47:50.205624 1401996 out.go:345] Setting OutFile to fd 1 ...
	I0916 10:47:50.205806 1401996 out.go:392] TERM=,COLORTERM=, which probably does not support color
	I0916 10:47:50.205843 1401996 out.go:358] Setting ErrFile to fd 2...
	I0916 10:47:50.205856 1401996 out.go:392] TERM=,COLORTERM=, which probably does not support color
	I0916 10:47:50.206158 1401996 root.go:338] Updating PATH: /home/jenkins/minikube-integration/19651-1378450/.minikube/bin
	I0916 10:47:50.206622 1401996 out.go:352] Setting JSON to false
	I0916 10:47:50.207693 1401996 start.go:129] hostinfo: {"hostname":"ip-172-31-21-244","uptime":37816,"bootTime":1726445855,"procs":176,"os":"linux","platform":"ubuntu","platformFamily":"debian","platformVersion":"20.04","kernelVersion":"5.15.0-1069-aws","kernelArch":"aarch64","virtualizationSystem":"","virtualizationRole":"","hostId":"da8ac1fd-6236-412a-a346-95873c98230d"}
	I0916 10:47:50.207772 1401996 start.go:139] virtualization:  
	I0916 10:47:50.211311 1401996 out.go:177] * [functional-919910] minikube v1.34.0 on Ubuntu 20.04 (arm64)
	I0916 10:47:50.214854 1401996 out.go:177]   - MINIKUBE_LOCATION=19651
	I0916 10:47:50.214961 1401996 notify.go:220] Checking for updates...
	I0916 10:47:50.221512 1401996 out.go:177]   - MINIKUBE_SUPPRESS_DOCKER_PERFORMANCE=true
	I0916 10:47:50.225211 1401996 out.go:177]   - KUBECONFIG=/home/jenkins/minikube-integration/19651-1378450/kubeconfig
	I0916 10:47:50.228542 1401996 out.go:177]   - MINIKUBE_HOME=/home/jenkins/minikube-integration/19651-1378450/.minikube
	I0916 10:47:50.231201 1401996 out.go:177]   - MINIKUBE_BIN=out/minikube-linux-arm64
	I0916 10:47:50.233889 1401996 out.go:177]   - MINIKUBE_FORCE_SYSTEMD=
	I0916 10:47:50.237099 1401996 config.go:182] Loaded profile config "functional-919910": Driver=docker, ContainerRuntime=crio, KubernetesVersion=v1.31.1
	I0916 10:47:50.237205 1401996 driver.go:394] Setting default libvirt URI to qemu:///system
	I0916 10:47:50.273934 1401996 docker.go:123] docker version: linux-27.2.1:Docker Engine - Community
	I0916 10:47:50.274107 1401996 cli_runner.go:164] Run: docker system info --format "{{json .}}"
	I0916 10:47:50.332273 1401996 info.go:266] docker info: {ID:5FDH:SA5P:5GCT:NLAS:B73P:SGDQ:PBG5:UBVH:UZY3:RXGO:CI7S:WAIH Containers:1 ContainersRunning:1 ContainersPaused:0 ContainersStopped:0 Images:2 Driver:overlay2 DriverStatus:[[Backing Filesystem extfs] [Supports d_type true] [Using metacopy false] [Native Overlay Diff true] [userxattr false]] SystemStatus:<nil> Plugins:{Volume:[local] Network:[bridge host ipvlan macvlan null overlay] Authorization:<nil> Log:[awslogs fluentd gcplogs gelf journald json-file local splunk syslog]} MemoryLimit:true SwapLimit:true KernelMemory:false KernelMemoryTCP:true CPUCfsPeriod:true CPUCfsQuota:true CPUShares:true CPUSet:true PidsLimit:true IPv4Forwarding:true BridgeNfIptables:true BridgeNfIP6Tables:true Debug:false NFd:34 OomKillDisable:true NGoroutines:51 SystemTime:2024-09-16 10:47:50.322237464 +0000 UTC LoggingDriver:json-file CgroupDriver:cgroupfs NEventsListener:0 KernelVersion:5.15.0-1069-aws OperatingSystem:Ubuntu 20.04.6 LTS OSType:linux Architecture:aar
ch64 IndexServerAddress:https://index.docker.io/v1/ RegistryConfig:{AllowNondistributableArtifactsCIDRs:[] AllowNondistributableArtifactsHostnames:[] InsecureRegistryCIDRs:[127.0.0.0/8] IndexConfigs:{DockerIo:{Name:docker.io Mirrors:[] Secure:true Official:true}} Mirrors:[]} NCPU:2 MemTotal:8214839296 GenericResources:<nil> DockerRootDir:/var/lib/docker HTTPProxy: HTTPSProxy: NoProxy: Name:ip-172-31-21-244 Labels:[] ExperimentalBuild:false ServerVersion:27.2.1 ClusterStore: ClusterAdvertise: Runtimes:{Runc:{Path:runc}} DefaultRuntime:runc Swarm:{NodeID: NodeAddr: LocalNodeState:inactive ControlAvailable:false Error: RemoteManagers:<nil>} LiveRestoreEnabled:false Isolation: InitBinary:docker-init ContainerdCommit:{ID:7f7fdf5fed64eb6a7caf99b3e12efcf9d60e311c Expected:7f7fdf5fed64eb6a7caf99b3e12efcf9d60e311c} RuncCommit:{ID:v1.1.14-0-g2c9f560 Expected:v1.1.14-0-g2c9f560} InitCommit:{ID:de40ad0 Expected:de40ad0} SecurityOptions:[name=apparmor name=seccomp,profile=builtin] ProductLicense: Warnings:<nil> ServerErro
rs:[] ClientInfo:{Debug:false Plugins:[map[Name:buildx Path:/usr/libexec/docker/cli-plugins/docker-buildx SchemaVersion:0.1.0 ShortDescription:Docker Buildx Vendor:Docker Inc. Version:v0.16.2] map[Name:compose Path:/usr/libexec/docker/cli-plugins/docker-compose SchemaVersion:0.1.0 ShortDescription:Docker Compose Vendor:Docker Inc. Version:v2.29.2]] Warnings:<nil>}}
	I0916 10:47:50.332388 1401996 docker.go:318] overlay module found
	I0916 10:47:50.335094 1401996 out.go:177] * Using the docker driver based on existing profile
	I0916 10:47:50.337742 1401996 start.go:297] selected driver: docker
	I0916 10:47:50.337768 1401996 start.go:901] validating driver "docker" against &{Name:functional-919910 KeepContext:false EmbedCerts:false MinikubeISO: KicBaseImage:gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 Memory:4000 CPUs:2 DiskSize:20000 Driver:docker HyperkitVpnKitSock: HyperkitVSockPorts:[] DockerEnv:[] ContainerVolumeMounts:[] InsecureRegistry:[] RegistryMirror:[] HostOnlyCIDR:192.168.59.1/24 HypervVirtualSwitch: HypervUseExternalSwitch:false HypervExternalAdapter: KVMNetwork:default KVMQemuURI:qemu:///system KVMGPU:false KVMHidden:false KVMNUMACount:1 APIServerPort:8441 DockerOpt:[] DisableDriverMounts:false NFSShare:[] NFSSharesRoot:/nfsshares UUID: NoVTXCheck:false DNSProxy:false HostDNSResolver:true HostOnlyNicType:virtio NatNicType:virtio SSHIPAddress: SSHUser:root SSHKey: SSHPort:22 KubernetesConfig:{KubernetesVersion:v1.31.1 ClusterName:functional-919910 Namespace:default APIServerHAVIP: APIServerNa
me:minikubeCA APIServerNames:[] APIServerIPs:[] DNSDomain:cluster.local ContainerRuntime:crio CRISocket: NetworkPlugin:cni FeatureGates: ServiceCIDR:10.96.0.0/12 ImageRepository: LoadBalancerStartIP: LoadBalancerEndIP: CustomIngressCert: RegistryAliases: ExtraOptions:[] ShouldLoadCachedImages:true EnableDefaultCNI:false CNI:} Nodes:[{Name: IP:192.168.49.2 Port:8441 KubernetesVersion:v1.31.1 ContainerRuntime:crio ControlPlane:true Worker:true}] Addons:map[default-storageclass:true storage-provisioner:true] CustomAddonImages:map[] CustomAddonRegistries:map[] VerifyComponents:map[apiserver:true apps_running:true default_sa:true extra:true kubelet:true node_ready:true system_pods:true] StartHostTimeout:6m0s ScheduledStop:<nil> ExposedPorts:[] ListenAddress: Network: Subnet: MultiNodeRequested:false ExtraDisks:0 CertExpiration:26280h0m0s Mount:false MountString:/home/jenkins:/minikube-host Mount9PVersion:9p2000.L MountGID:docker MountIP: MountMSize:262144 MountOptions:[] MountPort:0 MountType:9p MountUID:docker Bi
naryMirror: DisableOptimizations:false DisableMetrics:false CustomQemuFirmwarePath: SocketVMnetClientPath: SocketVMnetPath: StaticIP: SSHAuthSock: SSHAgentPID:0 GPUs: AutoPauseInterval:1m0s}
	I0916 10:47:50.337887 1401996 start.go:912] status for docker: {Installed:true Healthy:true Running:false NeedsImprovement:false Error:<nil> Reason: Fix: Doc: Version:}
	I0916 10:47:50.338002 1401996 cli_runner.go:164] Run: docker system info --format "{{json .}}"
	I0916 10:47:50.398334 1401996 info.go:266] docker info: {ID:5FDH:SA5P:5GCT:NLAS:B73P:SGDQ:PBG5:UBVH:UZY3:RXGO:CI7S:WAIH Containers:1 ContainersRunning:1 ContainersPaused:0 ContainersStopped:0 Images:2 Driver:overlay2 DriverStatus:[[Backing Filesystem extfs] [Supports d_type true] [Using metacopy false] [Native Overlay Diff true] [userxattr false]] SystemStatus:<nil> Plugins:{Volume:[local] Network:[bridge host ipvlan macvlan null overlay] Authorization:<nil> Log:[awslogs fluentd gcplogs gelf journald json-file local splunk syslog]} MemoryLimit:true SwapLimit:true KernelMemory:false KernelMemoryTCP:true CPUCfsPeriod:true CPUCfsQuota:true CPUShares:true CPUSet:true PidsLimit:true IPv4Forwarding:true BridgeNfIptables:true BridgeNfIP6Tables:true Debug:false NFd:34 OomKillDisable:true NGoroutines:51 SystemTime:2024-09-16 10:47:50.388377677 +0000 UTC LoggingDriver:json-file CgroupDriver:cgroupfs NEventsListener:0 KernelVersion:5.15.0-1069-aws OperatingSystem:Ubuntu 20.04.6 LTS OSType:linux Architecture:aar
ch64 IndexServerAddress:https://index.docker.io/v1/ RegistryConfig:{AllowNondistributableArtifactsCIDRs:[] AllowNondistributableArtifactsHostnames:[] InsecureRegistryCIDRs:[127.0.0.0/8] IndexConfigs:{DockerIo:{Name:docker.io Mirrors:[] Secure:true Official:true}} Mirrors:[]} NCPU:2 MemTotal:8214839296 GenericResources:<nil> DockerRootDir:/var/lib/docker HTTPProxy: HTTPSProxy: NoProxy: Name:ip-172-31-21-244 Labels:[] ExperimentalBuild:false ServerVersion:27.2.1 ClusterStore: ClusterAdvertise: Runtimes:{Runc:{Path:runc}} DefaultRuntime:runc Swarm:{NodeID: NodeAddr: LocalNodeState:inactive ControlAvailable:false Error: RemoteManagers:<nil>} LiveRestoreEnabled:false Isolation: InitBinary:docker-init ContainerdCommit:{ID:7f7fdf5fed64eb6a7caf99b3e12efcf9d60e311c Expected:7f7fdf5fed64eb6a7caf99b3e12efcf9d60e311c} RuncCommit:{ID:v1.1.14-0-g2c9f560 Expected:v1.1.14-0-g2c9f560} InitCommit:{ID:de40ad0 Expected:de40ad0} SecurityOptions:[name=apparmor name=seccomp,profile=builtin] ProductLicense: Warnings:<nil> ServerErro
rs:[] ClientInfo:{Debug:false Plugins:[map[Name:buildx Path:/usr/libexec/docker/cli-plugins/docker-buildx SchemaVersion:0.1.0 ShortDescription:Docker Buildx Vendor:Docker Inc. Version:v0.16.2] map[Name:compose Path:/usr/libexec/docker/cli-plugins/docker-compose SchemaVersion:0.1.0 ShortDescription:Docker Compose Vendor:Docker Inc. Version:v2.29.2]] Warnings:<nil>}}
	I0916 10:47:50.398780 1401996 cni.go:84] Creating CNI manager for ""
	I0916 10:47:50.398847 1401996 cni.go:143] "docker" driver + "crio" runtime found, recommending kindnet
	I0916 10:47:50.398900 1401996 start.go:340] cluster config:
	{Name:functional-919910 KeepContext:false EmbedCerts:false MinikubeISO: KicBaseImage:gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 Memory:4000 CPUs:2 DiskSize:20000 Driver:docker HyperkitVpnKitSock: HyperkitVSockPorts:[] DockerEnv:[] ContainerVolumeMounts:[] InsecureRegistry:[] RegistryMirror:[] HostOnlyCIDR:192.168.59.1/24 HypervVirtualSwitch: HypervUseExternalSwitch:false HypervExternalAdapter: KVMNetwork:default KVMQemuURI:qemu:///system KVMGPU:false KVMHidden:false KVMNUMACount:1 APIServerPort:8441 DockerOpt:[] DisableDriverMounts:false NFSShare:[] NFSSharesRoot:/nfsshares UUID: NoVTXCheck:false DNSProxy:false HostDNSResolver:true HostOnlyNicType:virtio NatNicType:virtio SSHIPAddress: SSHUser:root SSHKey: SSHPort:22 KubernetesConfig:{KubernetesVersion:v1.31.1 ClusterName:functional-919910 Namespace:default APIServerHAVIP: APIServerName:minikubeCA APIServerNames:[] APIServerIPs:[] DNSDomain:cluster.local Containe
rRuntime:crio CRISocket: NetworkPlugin:cni FeatureGates: ServiceCIDR:10.96.0.0/12 ImageRepository: LoadBalancerStartIP: LoadBalancerEndIP: CustomIngressCert: RegistryAliases: ExtraOptions:[] ShouldLoadCachedImages:true EnableDefaultCNI:false CNI:} Nodes:[{Name: IP:192.168.49.2 Port:8441 KubernetesVersion:v1.31.1 ContainerRuntime:crio ControlPlane:true Worker:true}] Addons:map[default-storageclass:true storage-provisioner:true] CustomAddonImages:map[] CustomAddonRegistries:map[] VerifyComponents:map[apiserver:true apps_running:true default_sa:true extra:true kubelet:true node_ready:true system_pods:true] StartHostTimeout:6m0s ScheduledStop:<nil> ExposedPorts:[] ListenAddress: Network: Subnet: MultiNodeRequested:false ExtraDisks:0 CertExpiration:26280h0m0s Mount:false MountString:/home/jenkins:/minikube-host Mount9PVersion:9p2000.L MountGID:docker MountIP: MountMSize:262144 MountOptions:[] MountPort:0 MountType:9p MountUID:docker BinaryMirror: DisableOptimizations:false DisableMetrics:false CustomQemuFirmwarePa
th: SocketVMnetClientPath: SocketVMnetPath: StaticIP: SSHAuthSock: SSHAgentPID:0 GPUs: AutoPauseInterval:1m0s}
	I0916 10:47:50.401766 1401996 out.go:177] * Starting "functional-919910" primary control-plane node in "functional-919910" cluster
	I0916 10:47:50.404667 1401996 cache.go:121] Beginning downloading kic base image for docker with crio
	I0916 10:47:50.407246 1401996 out.go:177] * Pulling base image v0.0.45-1726358845-19644 ...
	I0916 10:47:50.409875 1401996 preload.go:131] Checking if preload exists for k8s version v1.31.1 and runtime crio
	I0916 10:47:50.409936 1401996 preload.go:146] Found local preload: /home/jenkins/minikube-integration/19651-1378450/.minikube/cache/preloaded-tarball/preloaded-images-k8s-v18-v1.31.1-cri-o-overlay-arm64.tar.lz4
	I0916 10:47:50.409948 1401996 cache.go:56] Caching tarball of preloaded images
	I0916 10:47:50.409958 1401996 image.go:79] Checking for gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 in local docker daemon
	I0916 10:47:50.410031 1401996 preload.go:172] Found /home/jenkins/minikube-integration/19651-1378450/.minikube/cache/preloaded-tarball/preloaded-images-k8s-v18-v1.31.1-cri-o-overlay-arm64.tar.lz4 in cache, skipping download
	I0916 10:47:50.410041 1401996 cache.go:59] Finished verifying existence of preloaded tar for v1.31.1 on crio
	I0916 10:47:50.410157 1401996 profile.go:143] Saving config to /home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/functional-919910/config.json ...
	W0916 10:47:50.438976 1401996 image.go:95] image gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 is of wrong architecture
	I0916 10:47:50.438997 1401996 cache.go:149] Downloading gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 to local cache
	I0916 10:47:50.439079 1401996 image.go:63] Checking for gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 in local cache directory
	I0916 10:47:50.439104 1401996 image.go:66] Found gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 in local cache directory, skipping pull
	I0916 10:47:50.439111 1401996 image.go:135] gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 exists in cache, skipping pull
	I0916 10:47:50.439119 1401996 cache.go:152] successfully saved gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 as a tarball
	I0916 10:47:50.439137 1401996 cache.go:162] Loading gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 from local cache
	I0916 10:47:50.440551 1401996 image.go:273] response: 
	I0916 10:47:50.570726 1401996 cache.go:164] successfully loaded and using gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 from cached tarball
	I0916 10:47:50.570776 1401996 cache.go:194] Successfully downloaded all kic artifacts
	I0916 10:47:50.570806 1401996 start.go:360] acquireMachinesLock for functional-919910: {Name:mkddf275897a7528274aa0390d95d40845ffb1ab Clock:{} Delay:500ms Timeout:10m0s Cancel:<nil>}
	I0916 10:47:50.570911 1401996 start.go:364] duration metric: took 57.352µs to acquireMachinesLock for "functional-919910"
	I0916 10:47:50.570939 1401996 start.go:96] Skipping create...Using existing machine configuration
	I0916 10:47:50.570948 1401996 fix.go:54] fixHost starting: 
	I0916 10:47:50.571270 1401996 cli_runner.go:164] Run: docker container inspect functional-919910 --format={{.State.Status}}
	I0916 10:47:50.588516 1401996 fix.go:112] recreateIfNeeded on functional-919910: state=Running err=<nil>
	W0916 10:47:50.588549 1401996 fix.go:138] unexpected machine state, will restart: <nil>
	I0916 10:47:50.591367 1401996 out.go:177] * Updating the running docker "functional-919910" container ...
	I0916 10:47:50.593873 1401996 machine.go:93] provisionDockerMachine start ...
	I0916 10:47:50.594037 1401996 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" functional-919910
	I0916 10:47:50.612718 1401996 main.go:141] libmachine: Using SSH client type: native
	I0916 10:47:50.613019 1401996 main.go:141] libmachine: &{{{<nil> 0 [] [] []} docker [0x41abe0] 0x41d420 <nil>  [] 0s} 127.0.0.1 34613 <nil> <nil>}
	I0916 10:47:50.613034 1401996 main.go:141] libmachine: About to run SSH command:
	hostname
	I0916 10:47:50.756346 1401996 main.go:141] libmachine: SSH cmd err, output: <nil>: functional-919910
	
	I0916 10:47:50.756391 1401996 ubuntu.go:169] provisioning hostname "functional-919910"
	I0916 10:47:50.756460 1401996 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" functional-919910
	I0916 10:47:50.775145 1401996 main.go:141] libmachine: Using SSH client type: native
	I0916 10:47:50.775403 1401996 main.go:141] libmachine: &{{{<nil> 0 [] [] []} docker [0x41abe0] 0x41d420 <nil>  [] 0s} 127.0.0.1 34613 <nil> <nil>}
	I0916 10:47:50.775430 1401996 main.go:141] libmachine: About to run SSH command:
	sudo hostname functional-919910 && echo "functional-919910" | sudo tee /etc/hostname
	I0916 10:47:50.926354 1401996 main.go:141] libmachine: SSH cmd err, output: <nil>: functional-919910
	
	I0916 10:47:50.926444 1401996 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" functional-919910
	I0916 10:47:50.945122 1401996 main.go:141] libmachine: Using SSH client type: native
	I0916 10:47:50.945378 1401996 main.go:141] libmachine: &{{{<nil> 0 [] [] []} docker [0x41abe0] 0x41d420 <nil>  [] 0s} 127.0.0.1 34613 <nil> <nil>}
	I0916 10:47:50.945401 1401996 main.go:141] libmachine: About to run SSH command:
	
			if ! grep -xq '.*\sfunctional-919910' /etc/hosts; then
				if grep -xq '127.0.1.1\s.*' /etc/hosts; then
					sudo sed -i 's/^127.0.1.1\s.*/127.0.1.1 functional-919910/g' /etc/hosts;
				else 
					echo '127.0.1.1 functional-919910' | sudo tee -a /etc/hosts; 
				fi
			fi
	I0916 10:47:51.093446 1401996 main.go:141] libmachine: SSH cmd err, output: <nil>: 
	I0916 10:47:51.093489 1401996 ubuntu.go:175] set auth options {CertDir:/home/jenkins/minikube-integration/19651-1378450/.minikube CaCertPath:/home/jenkins/minikube-integration/19651-1378450/.minikube/certs/ca.pem CaPrivateKeyPath:/home/jenkins/minikube-integration/19651-1378450/.minikube/certs/ca-key.pem CaCertRemotePath:/etc/docker/ca.pem ServerCertPath:/home/jenkins/minikube-integration/19651-1378450/.minikube/machines/server.pem ServerKeyPath:/home/jenkins/minikube-integration/19651-1378450/.minikube/machines/server-key.pem ClientKeyPath:/home/jenkins/minikube-integration/19651-1378450/.minikube/certs/key.pem ServerCertRemotePath:/etc/docker/server.pem ServerKeyRemotePath:/etc/docker/server-key.pem ClientCertPath:/home/jenkins/minikube-integration/19651-1378450/.minikube/certs/cert.pem ServerCertSANs:[] StorePath:/home/jenkins/minikube-integration/19651-1378450/.minikube}
	I0916 10:47:51.093510 1401996 ubuntu.go:177] setting up certificates
	I0916 10:47:51.093521 1401996 provision.go:84] configureAuth start
	I0916 10:47:51.093593 1401996 cli_runner.go:164] Run: docker container inspect -f "{{range .NetworkSettings.Networks}}{{.IPAddress}},{{.GlobalIPv6Address}}{{end}}" functional-919910
	I0916 10:47:51.112587 1401996 provision.go:143] copyHostCerts
	I0916 10:47:51.112638 1401996 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-1378450/.minikube/certs/cert.pem -> /home/jenkins/minikube-integration/19651-1378450/.minikube/cert.pem
	I0916 10:47:51.112698 1401996 exec_runner.go:144] found /home/jenkins/minikube-integration/19651-1378450/.minikube/cert.pem, removing ...
	I0916 10:47:51.112711 1401996 exec_runner.go:203] rm: /home/jenkins/minikube-integration/19651-1378450/.minikube/cert.pem
	I0916 10:47:51.112791 1401996 exec_runner.go:151] cp: /home/jenkins/minikube-integration/19651-1378450/.minikube/certs/cert.pem --> /home/jenkins/minikube-integration/19651-1378450/.minikube/cert.pem (1123 bytes)
	I0916 10:47:51.112900 1401996 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-1378450/.minikube/certs/key.pem -> /home/jenkins/minikube-integration/19651-1378450/.minikube/key.pem
	I0916 10:47:51.112924 1401996 exec_runner.go:144] found /home/jenkins/minikube-integration/19651-1378450/.minikube/key.pem, removing ...
	I0916 10:47:51.112931 1401996 exec_runner.go:203] rm: /home/jenkins/minikube-integration/19651-1378450/.minikube/key.pem
	I0916 10:47:51.112961 1401996 exec_runner.go:151] cp: /home/jenkins/minikube-integration/19651-1378450/.minikube/certs/key.pem --> /home/jenkins/minikube-integration/19651-1378450/.minikube/key.pem (1679 bytes)
	I0916 10:47:51.113021 1401996 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-1378450/.minikube/certs/ca.pem -> /home/jenkins/minikube-integration/19651-1378450/.minikube/ca.pem
	I0916 10:47:51.113043 1401996 exec_runner.go:144] found /home/jenkins/minikube-integration/19651-1378450/.minikube/ca.pem, removing ...
	I0916 10:47:51.113051 1401996 exec_runner.go:203] rm: /home/jenkins/minikube-integration/19651-1378450/.minikube/ca.pem
	I0916 10:47:51.113092 1401996 exec_runner.go:151] cp: /home/jenkins/minikube-integration/19651-1378450/.minikube/certs/ca.pem --> /home/jenkins/minikube-integration/19651-1378450/.minikube/ca.pem (1078 bytes)
	I0916 10:47:51.113161 1401996 provision.go:117] generating server cert: /home/jenkins/minikube-integration/19651-1378450/.minikube/machines/server.pem ca-key=/home/jenkins/minikube-integration/19651-1378450/.minikube/certs/ca.pem private-key=/home/jenkins/minikube-integration/19651-1378450/.minikube/certs/ca-key.pem org=jenkins.functional-919910 san=[127.0.0.1 192.168.49.2 functional-919910 localhost minikube]
	I0916 10:47:51.593684 1401996 provision.go:177] copyRemoteCerts
	I0916 10:47:51.593768 1401996 ssh_runner.go:195] Run: sudo mkdir -p /etc/docker /etc/docker /etc/docker
	I0916 10:47:51.593810 1401996 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" functional-919910
	I0916 10:47:51.612407 1401996 sshutil.go:53] new ssh client: &{IP:127.0.0.1 Port:34613 SSHKeyPath:/home/jenkins/minikube-integration/19651-1378450/.minikube/machines/functional-919910/id_rsa Username:docker}
	I0916 10:47:51.710588 1401996 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-1378450/.minikube/machines/server-key.pem -> /etc/docker/server-key.pem
	I0916 10:47:51.710650 1401996 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-1378450/.minikube/machines/server-key.pem --> /etc/docker/server-key.pem (1679 bytes)
	I0916 10:47:51.738523 1401996 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-1378450/.minikube/certs/ca.pem -> /etc/docker/ca.pem
	I0916 10:47:51.738608 1401996 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-1378450/.minikube/certs/ca.pem --> /etc/docker/ca.pem (1078 bytes)
	I0916 10:47:51.763604 1401996 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-1378450/.minikube/machines/server.pem -> /etc/docker/server.pem
	I0916 10:47:51.763668 1401996 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-1378450/.minikube/machines/server.pem --> /etc/docker/server.pem (1220 bytes)
	I0916 10:47:51.791416 1401996 provision.go:87] duration metric: took 697.879051ms to configureAuth
	I0916 10:47:51.791445 1401996 ubuntu.go:193] setting minikube options for container-runtime
	I0916 10:47:51.791646 1401996 config.go:182] Loaded profile config "functional-919910": Driver=docker, ContainerRuntime=crio, KubernetesVersion=v1.31.1
	I0916 10:47:51.791766 1401996 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" functional-919910
	I0916 10:47:51.809379 1401996 main.go:141] libmachine: Using SSH client type: native
	I0916 10:47:51.809661 1401996 main.go:141] libmachine: &{{{<nil> 0 [] [] []} docker [0x41abe0] 0x41d420 <nil>  [] 0s} 127.0.0.1 34613 <nil> <nil>}
	I0916 10:47:51.809685 1401996 main.go:141] libmachine: About to run SSH command:
	sudo mkdir -p /etc/sysconfig && printf %s "
	CRIO_MINIKUBE_OPTIONS='--insecure-registry 10.96.0.0/12 '
	" | sudo tee /etc/sysconfig/crio.minikube && sudo systemctl restart crio
	I0916 10:47:57.199046 1401996 main.go:141] libmachine: SSH cmd err, output: <nil>: 
	CRIO_MINIKUBE_OPTIONS='--insecure-registry 10.96.0.0/12 '
	
	I0916 10:47:57.199069 1401996 machine.go:96] duration metric: took 6.605174237s to provisionDockerMachine
	I0916 10:47:57.199080 1401996 start.go:293] postStartSetup for "functional-919910" (driver="docker")
	I0916 10:47:57.199092 1401996 start.go:322] creating required directories: [/etc/kubernetes/addons /etc/kubernetes/manifests /var/tmp/minikube /var/lib/minikube /var/lib/minikube/certs /var/lib/minikube/images /var/lib/minikube/binaries /tmp/gvisor /usr/share/ca-certificates /etc/ssl/certs]
	I0916 10:47:57.199163 1401996 ssh_runner.go:195] Run: sudo mkdir -p /etc/kubernetes/addons /etc/kubernetes/manifests /var/tmp/minikube /var/lib/minikube /var/lib/minikube/certs /var/lib/minikube/images /var/lib/minikube/binaries /tmp/gvisor /usr/share/ca-certificates /etc/ssl/certs
	I0916 10:47:57.199205 1401996 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" functional-919910
	I0916 10:47:57.216257 1401996 sshutil.go:53] new ssh client: &{IP:127.0.0.1 Port:34613 SSHKeyPath:/home/jenkins/minikube-integration/19651-1378450/.minikube/machines/functional-919910/id_rsa Username:docker}
	I0916 10:47:57.317946 1401996 ssh_runner.go:195] Run: cat /etc/os-release
	I0916 10:47:57.321343 1401996 command_runner.go:130] > PRETTY_NAME="Ubuntu 22.04.4 LTS"
	I0916 10:47:57.321365 1401996 command_runner.go:130] > NAME="Ubuntu"
	I0916 10:47:57.321371 1401996 command_runner.go:130] > VERSION_ID="22.04"
	I0916 10:47:57.321377 1401996 command_runner.go:130] > VERSION="22.04.4 LTS (Jammy Jellyfish)"
	I0916 10:47:57.321382 1401996 command_runner.go:130] > VERSION_CODENAME=jammy
	I0916 10:47:57.321386 1401996 command_runner.go:130] > ID=ubuntu
	I0916 10:47:57.321390 1401996 command_runner.go:130] > ID_LIKE=debian
	I0916 10:47:57.321394 1401996 command_runner.go:130] > HOME_URL="https://www.ubuntu.com/"
	I0916 10:47:57.321400 1401996 command_runner.go:130] > SUPPORT_URL="https://help.ubuntu.com/"
	I0916 10:47:57.321406 1401996 command_runner.go:130] > BUG_REPORT_URL="https://bugs.launchpad.net/ubuntu/"
	I0916 10:47:57.321413 1401996 command_runner.go:130] > PRIVACY_POLICY_URL="https://www.ubuntu.com/legal/terms-and-policies/privacy-policy"
	I0916 10:47:57.321417 1401996 command_runner.go:130] > UBUNTU_CODENAME=jammy
	I0916 10:47:57.321481 1401996 main.go:141] libmachine: Couldn't set key VERSION_CODENAME, no corresponding struct field found
	I0916 10:47:57.321509 1401996 main.go:141] libmachine: Couldn't set key PRIVACY_POLICY_URL, no corresponding struct field found
	I0916 10:47:57.321522 1401996 main.go:141] libmachine: Couldn't set key UBUNTU_CODENAME, no corresponding struct field found
	I0916 10:47:57.321532 1401996 info.go:137] Remote host: Ubuntu 22.04.4 LTS
	I0916 10:47:57.321543 1401996 filesync.go:126] Scanning /home/jenkins/minikube-integration/19651-1378450/.minikube/addons for local assets ...
	I0916 10:47:57.321605 1401996 filesync.go:126] Scanning /home/jenkins/minikube-integration/19651-1378450/.minikube/files for local assets ...
	I0916 10:47:57.321689 1401996 filesync.go:149] local asset: /home/jenkins/minikube-integration/19651-1378450/.minikube/files/etc/ssl/certs/13838332.pem -> 13838332.pem in /etc/ssl/certs
	I0916 10:47:57.321700 1401996 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-1378450/.minikube/files/etc/ssl/certs/13838332.pem -> /etc/ssl/certs/13838332.pem
	I0916 10:47:57.321774 1401996 filesync.go:149] local asset: /home/jenkins/minikube-integration/19651-1378450/.minikube/files/etc/test/nested/copy/1383833/hosts -> hosts in /etc/test/nested/copy/1383833
	I0916 10:47:57.321782 1401996 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-1378450/.minikube/files/etc/test/nested/copy/1383833/hosts -> /etc/test/nested/copy/1383833/hosts
	I0916 10:47:57.321836 1401996 ssh_runner.go:195] Run: sudo mkdir -p /etc/ssl/certs /etc/test/nested/copy/1383833
	I0916 10:47:57.330778 1401996 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-1378450/.minikube/files/etc/ssl/certs/13838332.pem --> /etc/ssl/certs/13838332.pem (1708 bytes)
	I0916 10:47:57.356667 1401996 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-1378450/.minikube/files/etc/test/nested/copy/1383833/hosts --> /etc/test/nested/copy/1383833/hosts (40 bytes)
	I0916 10:47:57.381584 1401996 start.go:296] duration metric: took 182.487479ms for postStartSetup
	I0916 10:47:57.381669 1401996 ssh_runner.go:195] Run: sh -c "df -h /var | awk 'NR==2{print $5}'"
	I0916 10:47:57.381735 1401996 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" functional-919910
	I0916 10:47:57.399941 1401996 sshutil.go:53] new ssh client: &{IP:127.0.0.1 Port:34613 SSHKeyPath:/home/jenkins/minikube-integration/19651-1378450/.minikube/machines/functional-919910/id_rsa Username:docker}
	I0916 10:47:57.493892 1401996 command_runner.go:130] > 12%
	I0916 10:47:57.493969 1401996 ssh_runner.go:195] Run: sh -c "df -BG /var | awk 'NR==2{print $4}'"
	I0916 10:47:57.498209 1401996 command_runner.go:130] > 172G
	I0916 10:47:57.498639 1401996 fix.go:56] duration metric: took 6.927687118s for fixHost
	I0916 10:47:57.498657 1401996 start.go:83] releasing machines lock for "functional-919910", held for 6.927732663s
	I0916 10:47:57.498733 1401996 cli_runner.go:164] Run: docker container inspect -f "{{range .NetworkSettings.Networks}}{{.IPAddress}},{{.GlobalIPv6Address}}{{end}}" functional-919910
	I0916 10:47:57.515304 1401996 ssh_runner.go:195] Run: cat /version.json
	I0916 10:47:57.515346 1401996 ssh_runner.go:195] Run: curl -sS -m 2 https://registry.k8s.io/
	I0916 10:47:57.515393 1401996 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" functional-919910
	I0916 10:47:57.515401 1401996 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" functional-919910
	I0916 10:47:57.534618 1401996 sshutil.go:53] new ssh client: &{IP:127.0.0.1 Port:34613 SSHKeyPath:/home/jenkins/minikube-integration/19651-1378450/.minikube/machines/functional-919910/id_rsa Username:docker}
	I0916 10:47:57.535082 1401996 sshutil.go:53] new ssh client: &{IP:127.0.0.1 Port:34613 SSHKeyPath:/home/jenkins/minikube-integration/19651-1378450/.minikube/machines/functional-919910/id_rsa Username:docker}
	I0916 10:47:57.628245 1401996 command_runner.go:130] > {"iso_version": "v1.34.0-1726281733-19643", "kicbase_version": "v0.0.45-1726358845-19644", "minikube_version": "v1.34.0", "commit": "f890713149c79cf50e25c13e6a5c0470aa0f0450"}
	I0916 10:47:57.628425 1401996 ssh_runner.go:195] Run: systemctl --version
	I0916 10:47:57.749769 1401996 command_runner.go:130] > <a href="https://github.com/kubernetes/registry.k8s.io">Temporary Redirect</a>.
	I0916 10:47:57.752956 1401996 command_runner.go:130] > systemd 249 (249.11-0ubuntu3.12)
	I0916 10:47:57.752994 1401996 command_runner.go:130] > +PAM +AUDIT +SELINUX +APPARMOR +IMA +SMACK +SECCOMP +GCRYPT +GNUTLS +OPENSSL +ACL +BLKID +CURL +ELFUTILS +FIDO2 +IDN2 -IDN +IPTC +KMOD +LIBCRYPTSETUP +LIBFDISK +PCRE2 -PWQUALITY -P11KIT -QRENCODE +BZIP2 +LZ4 +XZ +ZLIB +ZSTD -XKBCOMMON +UTMP +SYSVINIT default-hierarchy=unified
	I0916 10:47:57.753060 1401996 ssh_runner.go:195] Run: sudo sh -c "podman version >/dev/null"
	I0916 10:47:57.895632 1401996 ssh_runner.go:195] Run: sh -c "stat /etc/cni/net.d/*loopback.conf*"
	I0916 10:47:57.899891 1401996 command_runner.go:130] >   File: /etc/cni/net.d/200-loopback.conf.mk_disabled
	I0916 10:47:57.899921 1401996 command_runner.go:130] >   Size: 54        	Blocks: 8          IO Block: 4096   regular file
	I0916 10:47:57.899928 1401996 command_runner.go:130] > Device: 36h/54d	Inode: 1570512     Links: 1
	I0916 10:47:57.899936 1401996 command_runner.go:130] > Access: (0644/-rw-r--r--)  Uid: (    0/    root)   Gid: (    0/    root)
	I0916 10:47:57.899942 1401996 command_runner.go:130] > Access: 2023-06-14 14:44:50.000000000 +0000
	I0916 10:47:57.899947 1401996 command_runner.go:130] > Modify: 2023-06-14 14:44:50.000000000 +0000
	I0916 10:47:57.899952 1401996 command_runner.go:130] > Change: 2024-09-16 10:46:42.438096271 +0000
	I0916 10:47:57.899957 1401996 command_runner.go:130] >  Birth: 2024-09-16 10:46:42.434096374 +0000
	I0916 10:47:57.900143 1401996 ssh_runner.go:195] Run: sudo find /etc/cni/net.d -maxdepth 1 -type f -name *loopback.conf* -not -name *.mk_disabled -exec sh -c "sudo mv {} {}.mk_disabled" ;
	I0916 10:47:57.909149 1401996 cni.go:221] loopback cni configuration disabled: "/etc/cni/net.d/*loopback.conf*" found
	I0916 10:47:57.909234 1401996 ssh_runner.go:195] Run: sudo find /etc/cni/net.d -maxdepth 1 -type f ( ( -name *bridge* -or -name *podman* ) -and -not -name *.mk_disabled ) -printf "%p, " -exec sh -c "sudo mv {} {}.mk_disabled" ;
	I0916 10:47:57.918432 1401996 cni.go:259] no active bridge cni configs found in "/etc/cni/net.d" - nothing to disable
	I0916 10:47:57.918474 1401996 start.go:495] detecting cgroup driver to use...
	I0916 10:47:57.918507 1401996 detect.go:187] detected "cgroupfs" cgroup driver on host os
	I0916 10:47:57.918558 1401996 ssh_runner.go:195] Run: sudo systemctl stop -f containerd
	I0916 10:47:57.932361 1401996 ssh_runner.go:195] Run: sudo systemctl is-active --quiet service containerd
	I0916 10:47:57.944964 1401996 docker.go:217] disabling cri-docker service (if available) ...
	I0916 10:47:57.945069 1401996 ssh_runner.go:195] Run: sudo systemctl stop -f cri-docker.socket
	I0916 10:47:57.959974 1401996 ssh_runner.go:195] Run: sudo systemctl stop -f cri-docker.service
	I0916 10:47:57.972585 1401996 ssh_runner.go:195] Run: sudo systemctl disable cri-docker.socket
	I0916 10:47:58.098313 1401996 ssh_runner.go:195] Run: sudo systemctl mask cri-docker.service
	I0916 10:47:58.224367 1401996 docker.go:233] disabling docker service ...
	I0916 10:47:58.224443 1401996 ssh_runner.go:195] Run: sudo systemctl stop -f docker.socket
	I0916 10:47:58.238821 1401996 ssh_runner.go:195] Run: sudo systemctl stop -f docker.service
	I0916 10:47:58.251747 1401996 ssh_runner.go:195] Run: sudo systemctl disable docker.socket
	I0916 10:47:58.377715 1401996 ssh_runner.go:195] Run: sudo systemctl mask docker.service
	I0916 10:47:58.505320 1401996 ssh_runner.go:195] Run: sudo systemctl is-active --quiet service docker
	I0916 10:47:58.516841 1401996 ssh_runner.go:195] Run: /bin/bash -c "sudo mkdir -p /etc && printf %s "runtime-endpoint: unix:///var/run/crio/crio.sock
	" | sudo tee /etc/crictl.yaml"
	I0916 10:47:58.533953 1401996 command_runner.go:130] > runtime-endpoint: unix:///var/run/crio/crio.sock
	I0916 10:47:58.535668 1401996 crio.go:59] configure cri-o to use "registry.k8s.io/pause:3.10" pause image...
	I0916 10:47:58.535761 1401996 ssh_runner.go:195] Run: sh -c "sudo sed -i 's|^.*pause_image = .*$|pause_image = "registry.k8s.io/pause:3.10"|' /etc/crio/crio.conf.d/02-crio.conf"
	I0916 10:47:58.545995 1401996 crio.go:70] configuring cri-o to use "cgroupfs" as cgroup driver...
	I0916 10:47:58.546067 1401996 ssh_runner.go:195] Run: sh -c "sudo sed -i 's|^.*cgroup_manager = .*$|cgroup_manager = "cgroupfs"|' /etc/crio/crio.conf.d/02-crio.conf"
	I0916 10:47:58.556144 1401996 ssh_runner.go:195] Run: sh -c "sudo sed -i '/conmon_cgroup = .*/d' /etc/crio/crio.conf.d/02-crio.conf"
	I0916 10:47:58.567289 1401996 ssh_runner.go:195] Run: sh -c "sudo sed -i '/cgroup_manager = .*/a conmon_cgroup = "pod"' /etc/crio/crio.conf.d/02-crio.conf"
	I0916 10:47:58.578091 1401996 ssh_runner.go:195] Run: sh -c "sudo rm -rf /etc/cni/net.mk"
	I0916 10:47:58.587840 1401996 ssh_runner.go:195] Run: sh -c "sudo sed -i '/^ *"net.ipv4.ip_unprivileged_port_start=.*"/d' /etc/crio/crio.conf.d/02-crio.conf"
	I0916 10:47:58.597939 1401996 ssh_runner.go:195] Run: sh -c "sudo grep -q "^ *default_sysctls" /etc/crio/crio.conf.d/02-crio.conf || sudo sed -i '/conmon_cgroup = .*/a default_sysctls = \[\n\]' /etc/crio/crio.conf.d/02-crio.conf"
	I0916 10:47:58.607513 1401996 ssh_runner.go:195] Run: sh -c "sudo sed -i -r 's|^default_sysctls *= *\[|&\n  "net.ipv4.ip_unprivileged_port_start=0",|' /etc/crio/crio.conf.d/02-crio.conf"
	I0916 10:47:58.617313 1401996 ssh_runner.go:195] Run: sudo sysctl net.bridge.bridge-nf-call-iptables
	I0916 10:47:58.625015 1401996 command_runner.go:130] > net.bridge.bridge-nf-call-iptables = 1
	I0916 10:47:58.626470 1401996 ssh_runner.go:195] Run: sudo sh -c "echo 1 > /proc/sys/net/ipv4/ip_forward"
	I0916 10:47:58.635977 1401996 ssh_runner.go:195] Run: sudo systemctl daemon-reload
	I0916 10:47:58.755097 1401996 ssh_runner.go:195] Run: sudo systemctl restart crio
	I0916 10:47:58.923185 1401996 start.go:542] Will wait 60s for socket path /var/run/crio/crio.sock
	I0916 10:47:58.923281 1401996 ssh_runner.go:195] Run: stat /var/run/crio/crio.sock
	I0916 10:47:58.927043 1401996 command_runner.go:130] >   File: /var/run/crio/crio.sock
	I0916 10:47:58.927069 1401996 command_runner.go:130] >   Size: 0         	Blocks: 0          IO Block: 4096   socket
	I0916 10:47:58.927076 1401996 command_runner.go:130] > Device: 43h/67d	Inode: 572         Links: 1
	I0916 10:47:58.927084 1401996 command_runner.go:130] > Access: (0660/srw-rw----)  Uid: (    0/    root)   Gid: (    0/    root)
	I0916 10:47:58.927089 1401996 command_runner.go:130] > Access: 2024-09-16 10:47:58.872135911 +0000
	I0916 10:47:58.927114 1401996 command_runner.go:130] > Modify: 2024-09-16 10:47:58.872135911 +0000
	I0916 10:47:58.927125 1401996 command_runner.go:130] > Change: 2024-09-16 10:47:58.872135911 +0000
	I0916 10:47:58.927156 1401996 command_runner.go:130] >  Birth: -
	I0916 10:47:58.927204 1401996 start.go:563] Will wait 60s for crictl version
	I0916 10:47:58.927275 1401996 ssh_runner.go:195] Run: which crictl
	I0916 10:47:58.930492 1401996 command_runner.go:130] > /usr/bin/crictl
	I0916 10:47:58.930791 1401996 ssh_runner.go:195] Run: sudo /usr/bin/crictl version
	I0916 10:47:58.968537 1401996 command_runner.go:130] > Version:  0.1.0
	I0916 10:47:58.968561 1401996 command_runner.go:130] > RuntimeName:  cri-o
	I0916 10:47:58.968567 1401996 command_runner.go:130] > RuntimeVersion:  1.24.6
	I0916 10:47:58.968573 1401996 command_runner.go:130] > RuntimeApiVersion:  v1
	I0916 10:47:58.971326 1401996 start.go:579] Version:  0.1.0
	RuntimeName:  cri-o
	RuntimeVersion:  1.24.6
	RuntimeApiVersion:  v1
	I0916 10:47:58.971438 1401996 ssh_runner.go:195] Run: crio --version
	I0916 10:47:59.011967 1401996 command_runner.go:130] > crio version 1.24.6
	I0916 10:47:59.012041 1401996 command_runner.go:130] > Version:          1.24.6
	I0916 10:47:59.012074 1401996 command_runner.go:130] > GitCommit:        4bfe15a9feb74ffc95e66a21c04b15fa7bbc2b90
	I0916 10:47:59.012093 1401996 command_runner.go:130] > GitTreeState:     clean
	I0916 10:47:59.012114 1401996 command_runner.go:130] > BuildDate:        2023-06-14T14:44:50Z
	I0916 10:47:59.012149 1401996 command_runner.go:130] > GoVersion:        go1.18.2
	I0916 10:47:59.012167 1401996 command_runner.go:130] > Compiler:         gc
	I0916 10:47:59.012185 1401996 command_runner.go:130] > Platform:         linux/arm64
	I0916 10:47:59.012207 1401996 command_runner.go:130] > Linkmode:         dynamic
	I0916 10:47:59.012241 1401996 command_runner.go:130] > BuildTags:        apparmor, exclude_graphdriver_devicemapper, containers_image_ostree_stub, seccomp
	I0916 10:47:59.012260 1401996 command_runner.go:130] > SeccompEnabled:   true
	I0916 10:47:59.012280 1401996 command_runner.go:130] > AppArmorEnabled:  false
	I0916 10:47:59.013900 1401996 ssh_runner.go:195] Run: crio --version
	I0916 10:47:59.050810 1401996 command_runner.go:130] > crio version 1.24.6
	I0916 10:47:59.050856 1401996 command_runner.go:130] > Version:          1.24.6
	I0916 10:47:59.050865 1401996 command_runner.go:130] > GitCommit:        4bfe15a9feb74ffc95e66a21c04b15fa7bbc2b90
	I0916 10:47:59.050870 1401996 command_runner.go:130] > GitTreeState:     clean
	I0916 10:47:59.050878 1401996 command_runner.go:130] > BuildDate:        2023-06-14T14:44:50Z
	I0916 10:47:59.050882 1401996 command_runner.go:130] > GoVersion:        go1.18.2
	I0916 10:47:59.050886 1401996 command_runner.go:130] > Compiler:         gc
	I0916 10:47:59.050890 1401996 command_runner.go:130] > Platform:         linux/arm64
	I0916 10:47:59.050908 1401996 command_runner.go:130] > Linkmode:         dynamic
	I0916 10:47:59.050919 1401996 command_runner.go:130] > BuildTags:        apparmor, exclude_graphdriver_devicemapper, containers_image_ostree_stub, seccomp
	I0916 10:47:59.050926 1401996 command_runner.go:130] > SeccompEnabled:   true
	I0916 10:47:59.050930 1401996 command_runner.go:130] > AppArmorEnabled:  false
	I0916 10:47:59.058518 1401996 out.go:177] * Preparing Kubernetes v1.31.1 on CRI-O 1.24.6 ...
	I0916 10:47:59.061208 1401996 cli_runner.go:164] Run: docker network inspect functional-919910 --format "{"Name": "{{.Name}}","Driver": "{{.Driver}}","Subnet": "{{range .IPAM.Config}}{{.Subnet}}{{end}}","Gateway": "{{range .IPAM.Config}}{{.Gateway}}{{end}}","MTU": {{if (index .Options "com.docker.network.driver.mtu")}}{{(index .Options "com.docker.network.driver.mtu")}}{{else}}0{{end}}, "ContainerIPs": [{{range $k,$v := .Containers }}"{{$v.IPv4Address}}",{{end}}]}"
	I0916 10:47:59.076753 1401996 ssh_runner.go:195] Run: grep 192.168.49.1	host.minikube.internal$ /etc/hosts
	I0916 10:47:59.080562 1401996 command_runner.go:130] > 192.168.49.1	host.minikube.internal
	I0916 10:47:59.080708 1401996 kubeadm.go:883] updating cluster {Name:functional-919910 KeepContext:false EmbedCerts:false MinikubeISO: KicBaseImage:gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 Memory:4000 CPUs:2 DiskSize:20000 Driver:docker HyperkitVpnKitSock: HyperkitVSockPorts:[] DockerEnv:[] ContainerVolumeMounts:[] InsecureRegistry:[] RegistryMirror:[] HostOnlyCIDR:192.168.59.1/24 HypervVirtualSwitch: HypervUseExternalSwitch:false HypervExternalAdapter: KVMNetwork:default KVMQemuURI:qemu:///system KVMGPU:false KVMHidden:false KVMNUMACount:1 APIServerPort:8441 DockerOpt:[] DisableDriverMounts:false NFSShare:[] NFSSharesRoot:/nfsshares UUID: NoVTXCheck:false DNSProxy:false HostDNSResolver:true HostOnlyNicType:virtio NatNicType:virtio SSHIPAddress: SSHUser:root SSHKey: SSHPort:22 KubernetesConfig:{KubernetesVersion:v1.31.1 ClusterName:functional-919910 Namespace:default APIServerHAVIP: APIServerName:minikubeCA API
ServerNames:[] APIServerIPs:[] DNSDomain:cluster.local ContainerRuntime:crio CRISocket: NetworkPlugin:cni FeatureGates: ServiceCIDR:10.96.0.0/12 ImageRepository: LoadBalancerStartIP: LoadBalancerEndIP: CustomIngressCert: RegistryAliases: ExtraOptions:[] ShouldLoadCachedImages:true EnableDefaultCNI:false CNI:} Nodes:[{Name: IP:192.168.49.2 Port:8441 KubernetesVersion:v1.31.1 ContainerRuntime:crio ControlPlane:true Worker:true}] Addons:map[default-storageclass:true storage-provisioner:true] CustomAddonImages:map[] CustomAddonRegistries:map[] VerifyComponents:map[apiserver:true apps_running:true default_sa:true extra:true kubelet:true node_ready:true system_pods:true] StartHostTimeout:6m0s ScheduledStop:<nil> ExposedPorts:[] ListenAddress: Network: Subnet: MultiNodeRequested:false ExtraDisks:0 CertExpiration:26280h0m0s Mount:false MountString:/home/jenkins:/minikube-host Mount9PVersion:9p2000.L MountGID:docker MountIP: MountMSize:262144 MountOptions:[] MountPort:0 MountType:9p MountUID:docker BinaryMirror: Disab
leOptimizations:false DisableMetrics:false CustomQemuFirmwarePath: SocketVMnetClientPath: SocketVMnetPath: StaticIP: SSHAuthSock: SSHAgentPID:0 GPUs: AutoPauseInterval:1m0s} ...
	I0916 10:47:59.080823 1401996 preload.go:131] Checking if preload exists for k8s version v1.31.1 and runtime crio
	I0916 10:47:59.080882 1401996 ssh_runner.go:195] Run: sudo crictl images --output json
	I0916 10:47:59.129335 1401996 command_runner.go:130] > {
	I0916 10:47:59.129361 1401996 command_runner.go:130] >   "images": [
	I0916 10:47:59.129366 1401996 command_runner.go:130] >     {
	I0916 10:47:59.129376 1401996 command_runner.go:130] >       "id": "6a23fa8fd2b78ab58e42ba273808edc936a9c53d8ac4a919f6337be094843a51",
	I0916 10:47:59.129381 1401996 command_runner.go:130] >       "repoTags": [
	I0916 10:47:59.129399 1401996 command_runner.go:130] >         "docker.io/kindest/kindnetd:v20240813-c6f155d6"
	I0916 10:47:59.129405 1401996 command_runner.go:130] >       ],
	I0916 10:47:59.129410 1401996 command_runner.go:130] >       "repoDigests": [
	I0916 10:47:59.129425 1401996 command_runner.go:130] >         "docker.io/kindest/kindnetd@sha256:4d39335073da9a0b82be8e01028f0aa75aff16caff2e2d8889d0effd579a6f64",
	I0916 10:47:59.129436 1401996 command_runner.go:130] >         "docker.io/kindest/kindnetd@sha256:e59a687ca28ae274a2fc92f1e2f5f1c739f353178a43a23aafc71adb802ed166"
	I0916 10:47:59.129442 1401996 command_runner.go:130] >       ],
	I0916 10:47:59.129447 1401996 command_runner.go:130] >       "size": "90295858",
	I0916 10:47:59.129454 1401996 command_runner.go:130] >       "uid": null,
	I0916 10:47:59.129458 1401996 command_runner.go:130] >       "username": "",
	I0916 10:47:59.129468 1401996 command_runner.go:130] >       "spec": null,
	I0916 10:47:59.129476 1401996 command_runner.go:130] >       "pinned": false
	I0916 10:47:59.129479 1401996 command_runner.go:130] >     },
	I0916 10:47:59.129482 1401996 command_runner.go:130] >     {
	I0916 10:47:59.129488 1401996 command_runner.go:130] >       "id": "ba04bb24b95753201135cbc420b233c1b0b9fa2e1fd21d28319c348c33fbcde6",
	I0916 10:47:59.129497 1401996 command_runner.go:130] >       "repoTags": [
	I0916 10:47:59.129502 1401996 command_runner.go:130] >         "gcr.io/k8s-minikube/storage-provisioner:v5"
	I0916 10:47:59.129509 1401996 command_runner.go:130] >       ],
	I0916 10:47:59.129513 1401996 command_runner.go:130] >       "repoDigests": [
	I0916 10:47:59.129525 1401996 command_runner.go:130] >         "gcr.io/k8s-minikube/storage-provisioner@sha256:0ba370588274b88531ab311a5d2e645d240a853555c1e58fd1dd428fc333c9d2",
	I0916 10:47:59.129536 1401996 command_runner.go:130] >         "gcr.io/k8s-minikube/storage-provisioner@sha256:18eb69d1418e854ad5a19e399310e52808a8321e4c441c1dddad8977a0d7a944"
	I0916 10:47:59.129542 1401996 command_runner.go:130] >       ],
	I0916 10:47:59.129555 1401996 command_runner.go:130] >       "size": "29037500",
	I0916 10:47:59.129569 1401996 command_runner.go:130] >       "uid": null,
	I0916 10:47:59.129573 1401996 command_runner.go:130] >       "username": "",
	I0916 10:47:59.129579 1401996 command_runner.go:130] >       "spec": null,
	I0916 10:47:59.129583 1401996 command_runner.go:130] >       "pinned": false
	I0916 10:47:59.129591 1401996 command_runner.go:130] >     },
	I0916 10:47:59.129594 1401996 command_runner.go:130] >     {
	I0916 10:47:59.129601 1401996 command_runner.go:130] >       "id": "2f6c962e7b8311337352d9fdea917da2184d9919f4da7695bc2a6517cf392fe4",
	I0916 10:47:59.129607 1401996 command_runner.go:130] >       "repoTags": [
	I0916 10:47:59.129613 1401996 command_runner.go:130] >         "registry.k8s.io/coredns/coredns:v1.11.3"
	I0916 10:47:59.129619 1401996 command_runner.go:130] >       ],
	I0916 10:47:59.129623 1401996 command_runner.go:130] >       "repoDigests": [
	I0916 10:47:59.129635 1401996 command_runner.go:130] >         "registry.k8s.io/coredns/coredns@sha256:31440a2bef59e2f1ffb600113b557103740ff851e27b0aef5b849f6e3ab994a6",
	I0916 10:47:59.129646 1401996 command_runner.go:130] >         "registry.k8s.io/coredns/coredns@sha256:9caabbf6238b189a65d0d6e6ac138de60d6a1c419e5a341fbbb7c78382559c6e"
	I0916 10:47:59.129649 1401996 command_runner.go:130] >       ],
	I0916 10:47:59.129653 1401996 command_runner.go:130] >       "size": "61647114",
	I0916 10:47:59.129659 1401996 command_runner.go:130] >       "uid": null,
	I0916 10:47:59.129663 1401996 command_runner.go:130] >       "username": "nonroot",
	I0916 10:47:59.129669 1401996 command_runner.go:130] >       "spec": null,
	I0916 10:47:59.129673 1401996 command_runner.go:130] >       "pinned": false
	I0916 10:47:59.129679 1401996 command_runner.go:130] >     },
	I0916 10:47:59.129682 1401996 command_runner.go:130] >     {
	I0916 10:47:59.129689 1401996 command_runner.go:130] >       "id": "27e3830e1402783674d8b594038967deea9d51f0d91b34c93c8f39d2f68af7da",
	I0916 10:47:59.129695 1401996 command_runner.go:130] >       "repoTags": [
	I0916 10:47:59.129702 1401996 command_runner.go:130] >         "registry.k8s.io/etcd:3.5.15-0"
	I0916 10:47:59.129708 1401996 command_runner.go:130] >       ],
	I0916 10:47:59.129712 1401996 command_runner.go:130] >       "repoDigests": [
	I0916 10:47:59.129723 1401996 command_runner.go:130] >         "registry.k8s.io/etcd@sha256:a6dc63e6e8cfa0307d7851762fa6b629afb18f28d8aa3fab5a6e91b4af60026a",
	I0916 10:47:59.129739 1401996 command_runner.go:130] >         "registry.k8s.io/etcd@sha256:e3ee3ca2dbaf511385000dbd54123629c71b6cfaabd469e658d76a116b7f43da"
	I0916 10:47:59.129746 1401996 command_runner.go:130] >       ],
	I0916 10:47:59.129750 1401996 command_runner.go:130] >       "size": "139912446",
	I0916 10:47:59.129754 1401996 command_runner.go:130] >       "uid": {
	I0916 10:47:59.129760 1401996 command_runner.go:130] >         "value": "0"
	I0916 10:47:59.129764 1401996 command_runner.go:130] >       },
	I0916 10:47:59.129775 1401996 command_runner.go:130] >       "username": "",
	I0916 10:47:59.129782 1401996 command_runner.go:130] >       "spec": null,
	I0916 10:47:59.129787 1401996 command_runner.go:130] >       "pinned": false
	I0916 10:47:59.129793 1401996 command_runner.go:130] >     },
	I0916 10:47:59.129796 1401996 command_runner.go:130] >     {
	I0916 10:47:59.129803 1401996 command_runner.go:130] >       "id": "d3f53a98c0a9d9163c4848bcf34b2d2f5e1e3691b79f3d1dd6d0206809e02853",
	I0916 10:47:59.129809 1401996 command_runner.go:130] >       "repoTags": [
	I0916 10:47:59.129815 1401996 command_runner.go:130] >         "registry.k8s.io/kube-apiserver:v1.31.1"
	I0916 10:47:59.129821 1401996 command_runner.go:130] >       ],
	I0916 10:47:59.129825 1401996 command_runner.go:130] >       "repoDigests": [
	I0916 10:47:59.129838 1401996 command_runner.go:130] >         "registry.k8s.io/kube-apiserver@sha256:2409c23dbb5a2b7a81adbb184d3eac43ac653e9b97a7c0ee121b89bb3ef61fdb",
	I0916 10:47:59.129846 1401996 command_runner.go:130] >         "registry.k8s.io/kube-apiserver@sha256:e3a40e6c6e99ba4a4d72432b3eda702099a2926e49d4afeb6138f2d95e6371ef"
	I0916 10:47:59.129853 1401996 command_runner.go:130] >       ],
	I0916 10:47:59.129857 1401996 command_runner.go:130] >       "size": "92632544",
	I0916 10:47:59.129864 1401996 command_runner.go:130] >       "uid": {
	I0916 10:47:59.129868 1401996 command_runner.go:130] >         "value": "0"
	I0916 10:47:59.129875 1401996 command_runner.go:130] >       },
	I0916 10:47:59.129879 1401996 command_runner.go:130] >       "username": "",
	I0916 10:47:59.129886 1401996 command_runner.go:130] >       "spec": null,
	I0916 10:47:59.129890 1401996 command_runner.go:130] >       "pinned": false
	I0916 10:47:59.129896 1401996 command_runner.go:130] >     },
	I0916 10:47:59.129900 1401996 command_runner.go:130] >     {
	I0916 10:47:59.129906 1401996 command_runner.go:130] >       "id": "279f381cb37365bbbcd133c9531fba9c2beb0f38dbbe6ddfcd0b1b1643d3450e",
	I0916 10:47:59.129912 1401996 command_runner.go:130] >       "repoTags": [
	I0916 10:47:59.129918 1401996 command_runner.go:130] >         "registry.k8s.io/kube-controller-manager:v1.31.1"
	I0916 10:47:59.129922 1401996 command_runner.go:130] >       ],
	I0916 10:47:59.129928 1401996 command_runner.go:130] >       "repoDigests": [
	I0916 10:47:59.129937 1401996 command_runner.go:130] >         "registry.k8s.io/kube-controller-manager@sha256:9f9da5b27e03f89599cc40ba89150aebf3b4cff001e6db6d998674b34181e1a1",
	I0916 10:47:59.129949 1401996 command_runner.go:130] >         "registry.k8s.io/kube-controller-manager@sha256:a9a0505b7d0caca0edd18e37bacc9425b2c8824546b26f5b286e8cb144669849"
	I0916 10:47:59.129955 1401996 command_runner.go:130] >       ],
	I0916 10:47:59.129959 1401996 command_runner.go:130] >       "size": "86930758",
	I0916 10:47:59.129966 1401996 command_runner.go:130] >       "uid": {
	I0916 10:47:59.129970 1401996 command_runner.go:130] >         "value": "0"
	I0916 10:47:59.129981 1401996 command_runner.go:130] >       },
	I0916 10:47:59.129988 1401996 command_runner.go:130] >       "username": "",
	I0916 10:47:59.129992 1401996 command_runner.go:130] >       "spec": null,
	I0916 10:47:59.129999 1401996 command_runner.go:130] >       "pinned": false
	I0916 10:47:59.130002 1401996 command_runner.go:130] >     },
	I0916 10:47:59.130010 1401996 command_runner.go:130] >     {
	I0916 10:47:59.130017 1401996 command_runner.go:130] >       "id": "24a140c548c075e487e45d0ee73b1aa89f8bfb40c08a57e05975559728822b1d",
	I0916 10:47:59.130021 1401996 command_runner.go:130] >       "repoTags": [
	I0916 10:47:59.130026 1401996 command_runner.go:130] >         "registry.k8s.io/kube-proxy:v1.31.1"
	I0916 10:47:59.130033 1401996 command_runner.go:130] >       ],
	I0916 10:47:59.130037 1401996 command_runner.go:130] >       "repoDigests": [
	I0916 10:47:59.130049 1401996 command_runner.go:130] >         "registry.k8s.io/kube-proxy@sha256:4ee50b00484d7f39a90fc4cda92251177ef5ad8fdf2f2a0c768f9e634b4c6d44",
	I0916 10:47:59.130060 1401996 command_runner.go:130] >         "registry.k8s.io/kube-proxy@sha256:7b3bf9f1e260ccb1fd543570e1e9869a373f716fb050cd23a6a2771aa4e06ae9"
	I0916 10:47:59.130066 1401996 command_runner.go:130] >       ],
	I0916 10:47:59.130070 1401996 command_runner.go:130] >       "size": "95951255",
	I0916 10:47:59.130077 1401996 command_runner.go:130] >       "uid": null,
	I0916 10:47:59.130081 1401996 command_runner.go:130] >       "username": "",
	I0916 10:47:59.130088 1401996 command_runner.go:130] >       "spec": null,
	I0916 10:47:59.130092 1401996 command_runner.go:130] >       "pinned": false
	I0916 10:47:59.130098 1401996 command_runner.go:130] >     },
	I0916 10:47:59.130101 1401996 command_runner.go:130] >     {
	I0916 10:47:59.130107 1401996 command_runner.go:130] >       "id": "7f8aa378bb47dffcf430f3a601abe39137e88aee0238e23ed8530fdd18dab82d",
	I0916 10:47:59.130114 1401996 command_runner.go:130] >       "repoTags": [
	I0916 10:47:59.130120 1401996 command_runner.go:130] >         "registry.k8s.io/kube-scheduler:v1.31.1"
	I0916 10:47:59.130125 1401996 command_runner.go:130] >       ],
	I0916 10:47:59.130128 1401996 command_runner.go:130] >       "repoDigests": [
	I0916 10:47:59.130152 1401996 command_runner.go:130] >         "registry.k8s.io/kube-scheduler@sha256:65212209347a96b08a97e679b98dca46885f09cf3a53e8d13b28d2c083a5b690",
	I0916 10:47:59.130164 1401996 command_runner.go:130] >         "registry.k8s.io/kube-scheduler@sha256:969a7e96340f3a927b3d652582edec2d6d82a083871d81ef5064b7edaab430d0"
	I0916 10:47:59.130170 1401996 command_runner.go:130] >       ],
	I0916 10:47:59.130174 1401996 command_runner.go:130] >       "size": "67007814",
	I0916 10:47:59.130181 1401996 command_runner.go:130] >       "uid": {
	I0916 10:47:59.130185 1401996 command_runner.go:130] >         "value": "0"
	I0916 10:47:59.130191 1401996 command_runner.go:130] >       },
	I0916 10:47:59.130199 1401996 command_runner.go:130] >       "username": "",
	I0916 10:47:59.130207 1401996 command_runner.go:130] >       "spec": null,
	I0916 10:47:59.130211 1401996 command_runner.go:130] >       "pinned": false
	I0916 10:47:59.130217 1401996 command_runner.go:130] >     },
	I0916 10:47:59.130221 1401996 command_runner.go:130] >     {
	I0916 10:47:59.130231 1401996 command_runner.go:130] >       "id": "afb61768ce381961ca0beff95337601f29dc70ff3ed14e5e4b3e5699057e6aa8",
	I0916 10:47:59.130238 1401996 command_runner.go:130] >       "repoTags": [
	I0916 10:47:59.130243 1401996 command_runner.go:130] >         "registry.k8s.io/pause:3.10"
	I0916 10:47:59.130249 1401996 command_runner.go:130] >       ],
	I0916 10:47:59.130253 1401996 command_runner.go:130] >       "repoDigests": [
	I0916 10:47:59.130264 1401996 command_runner.go:130] >         "registry.k8s.io/pause@sha256:e50b7059b633caf3c1449b8da680d11845cda4506b513ee7a2de00725f0a34a7",
	I0916 10:47:59.130275 1401996 command_runner.go:130] >         "registry.k8s.io/pause@sha256:ee6521f290b2168b6e0935a181d4cff9be1ac3f505666ef0e3c98fae8199917a"
	I0916 10:47:59.130281 1401996 command_runner.go:130] >       ],
	I0916 10:47:59.130285 1401996 command_runner.go:130] >       "size": "519877",
	I0916 10:47:59.130288 1401996 command_runner.go:130] >       "uid": {
	I0916 10:47:59.130292 1401996 command_runner.go:130] >         "value": "65535"
	I0916 10:47:59.130298 1401996 command_runner.go:130] >       },
	I0916 10:47:59.130302 1401996 command_runner.go:130] >       "username": "",
	I0916 10:47:59.130308 1401996 command_runner.go:130] >       "spec": null,
	I0916 10:47:59.130312 1401996 command_runner.go:130] >       "pinned": false
	I0916 10:47:59.130319 1401996 command_runner.go:130] >     }
	I0916 10:47:59.130322 1401996 command_runner.go:130] >   ]
	I0916 10:47:59.130325 1401996 command_runner.go:130] > }
	I0916 10:47:59.132975 1401996 crio.go:514] all images are preloaded for cri-o runtime.
	I0916 10:47:59.133000 1401996 crio.go:433] Images already preloaded, skipping extraction
	I0916 10:47:59.133067 1401996 ssh_runner.go:195] Run: sudo crictl images --output json
	I0916 10:47:59.170227 1401996 command_runner.go:130] > {
	I0916 10:47:59.170298 1401996 command_runner.go:130] >   "images": [
	I0916 10:47:59.170315 1401996 command_runner.go:130] >     {
	I0916 10:47:59.170338 1401996 command_runner.go:130] >       "id": "6a23fa8fd2b78ab58e42ba273808edc936a9c53d8ac4a919f6337be094843a51",
	I0916 10:47:59.170371 1401996 command_runner.go:130] >       "repoTags": [
	I0916 10:47:59.170400 1401996 command_runner.go:130] >         "docker.io/kindest/kindnetd:v20240813-c6f155d6"
	I0916 10:47:59.170417 1401996 command_runner.go:130] >       ],
	I0916 10:47:59.170450 1401996 command_runner.go:130] >       "repoDigests": [
	I0916 10:47:59.170477 1401996 command_runner.go:130] >         "docker.io/kindest/kindnetd@sha256:4d39335073da9a0b82be8e01028f0aa75aff16caff2e2d8889d0effd579a6f64",
	I0916 10:47:59.170497 1401996 command_runner.go:130] >         "docker.io/kindest/kindnetd@sha256:e59a687ca28ae274a2fc92f1e2f5f1c739f353178a43a23aafc71adb802ed166"
	I0916 10:47:59.170536 1401996 command_runner.go:130] >       ],
	I0916 10:47:59.170559 1401996 command_runner.go:130] >       "size": "90295858",
	I0916 10:47:59.170580 1401996 command_runner.go:130] >       "uid": null,
	I0916 10:47:59.170610 1401996 command_runner.go:130] >       "username": "",
	I0916 10:47:59.170634 1401996 command_runner.go:130] >       "spec": null,
	I0916 10:47:59.170653 1401996 command_runner.go:130] >       "pinned": false
	I0916 10:47:59.170672 1401996 command_runner.go:130] >     },
	I0916 10:47:59.170698 1401996 command_runner.go:130] >     {
	I0916 10:47:59.170722 1401996 command_runner.go:130] >       "id": "ba04bb24b95753201135cbc420b233c1b0b9fa2e1fd21d28319c348c33fbcde6",
	I0916 10:47:59.170739 1401996 command_runner.go:130] >       "repoTags": [
	I0916 10:47:59.170760 1401996 command_runner.go:130] >         "gcr.io/k8s-minikube/storage-provisioner:v5"
	I0916 10:47:59.170787 1401996 command_runner.go:130] >       ],
	I0916 10:47:59.170809 1401996 command_runner.go:130] >       "repoDigests": [
	I0916 10:47:59.170831 1401996 command_runner.go:130] >         "gcr.io/k8s-minikube/storage-provisioner@sha256:0ba370588274b88531ab311a5d2e645d240a853555c1e58fd1dd428fc333c9d2",
	I0916 10:47:59.170866 1401996 command_runner.go:130] >         "gcr.io/k8s-minikube/storage-provisioner@sha256:18eb69d1418e854ad5a19e399310e52808a8321e4c441c1dddad8977a0d7a944"
	I0916 10:47:59.170885 1401996 command_runner.go:130] >       ],
	I0916 10:47:59.170906 1401996 command_runner.go:130] >       "size": "29037500",
	I0916 10:47:59.170923 1401996 command_runner.go:130] >       "uid": null,
	I0916 10:47:59.170954 1401996 command_runner.go:130] >       "username": "",
	I0916 10:47:59.170975 1401996 command_runner.go:130] >       "spec": null,
	I0916 10:47:59.170992 1401996 command_runner.go:130] >       "pinned": false
	I0916 10:47:59.171009 1401996 command_runner.go:130] >     },
	I0916 10:47:59.171040 1401996 command_runner.go:130] >     {
	I0916 10:47:59.171064 1401996 command_runner.go:130] >       "id": "2f6c962e7b8311337352d9fdea917da2184d9919f4da7695bc2a6517cf392fe4",
	I0916 10:47:59.171083 1401996 command_runner.go:130] >       "repoTags": [
	I0916 10:47:59.171102 1401996 command_runner.go:130] >         "registry.k8s.io/coredns/coredns:v1.11.3"
	I0916 10:47:59.171137 1401996 command_runner.go:130] >       ],
	I0916 10:47:59.171158 1401996 command_runner.go:130] >       "repoDigests": [
	I0916 10:47:59.171182 1401996 command_runner.go:130] >         "registry.k8s.io/coredns/coredns@sha256:31440a2bef59e2f1ffb600113b557103740ff851e27b0aef5b849f6e3ab994a6",
	I0916 10:47:59.171216 1401996 command_runner.go:130] >         "registry.k8s.io/coredns/coredns@sha256:9caabbf6238b189a65d0d6e6ac138de60d6a1c419e5a341fbbb7c78382559c6e"
	I0916 10:47:59.171238 1401996 command_runner.go:130] >       ],
	I0916 10:47:59.171257 1401996 command_runner.go:130] >       "size": "61647114",
	I0916 10:47:59.171275 1401996 command_runner.go:130] >       "uid": null,
	I0916 10:47:59.171319 1401996 command_runner.go:130] >       "username": "nonroot",
	I0916 10:47:59.171338 1401996 command_runner.go:130] >       "spec": null,
	I0916 10:47:59.171368 1401996 command_runner.go:130] >       "pinned": false
	I0916 10:47:59.171388 1401996 command_runner.go:130] >     },
	I0916 10:47:59.171404 1401996 command_runner.go:130] >     {
	I0916 10:47:59.171425 1401996 command_runner.go:130] >       "id": "27e3830e1402783674d8b594038967deea9d51f0d91b34c93c8f39d2f68af7da",
	I0916 10:47:59.171454 1401996 command_runner.go:130] >       "repoTags": [
	I0916 10:47:59.171477 1401996 command_runner.go:130] >         "registry.k8s.io/etcd:3.5.15-0"
	I0916 10:47:59.171495 1401996 command_runner.go:130] >       ],
	I0916 10:47:59.171512 1401996 command_runner.go:130] >       "repoDigests": [
	I0916 10:47:59.171546 1401996 command_runner.go:130] >         "registry.k8s.io/etcd@sha256:a6dc63e6e8cfa0307d7851762fa6b629afb18f28d8aa3fab5a6e91b4af60026a",
	I0916 10:47:59.171575 1401996 command_runner.go:130] >         "registry.k8s.io/etcd@sha256:e3ee3ca2dbaf511385000dbd54123629c71b6cfaabd469e658d76a116b7f43da"
	I0916 10:47:59.171592 1401996 command_runner.go:130] >       ],
	I0916 10:47:59.171625 1401996 command_runner.go:130] >       "size": "139912446",
	I0916 10:47:59.171647 1401996 command_runner.go:130] >       "uid": {
	I0916 10:47:59.171664 1401996 command_runner.go:130] >         "value": "0"
	I0916 10:47:59.171681 1401996 command_runner.go:130] >       },
	I0916 10:47:59.171709 1401996 command_runner.go:130] >       "username": "",
	I0916 10:47:59.171729 1401996 command_runner.go:130] >       "spec": null,
	I0916 10:47:59.171747 1401996 command_runner.go:130] >       "pinned": false
	I0916 10:47:59.171764 1401996 command_runner.go:130] >     },
	I0916 10:47:59.171794 1401996 command_runner.go:130] >     {
	I0916 10:47:59.171820 1401996 command_runner.go:130] >       "id": "d3f53a98c0a9d9163c4848bcf34b2d2f5e1e3691b79f3d1dd6d0206809e02853",
	I0916 10:47:59.171838 1401996 command_runner.go:130] >       "repoTags": [
	I0916 10:47:59.171857 1401996 command_runner.go:130] >         "registry.k8s.io/kube-apiserver:v1.31.1"
	I0916 10:47:59.171884 1401996 command_runner.go:130] >       ],
	I0916 10:47:59.171905 1401996 command_runner.go:130] >       "repoDigests": [
	I0916 10:47:59.171926 1401996 command_runner.go:130] >         "registry.k8s.io/kube-apiserver@sha256:2409c23dbb5a2b7a81adbb184d3eac43ac653e9b97a7c0ee121b89bb3ef61fdb",
	I0916 10:47:59.171948 1401996 command_runner.go:130] >         "registry.k8s.io/kube-apiserver@sha256:e3a40e6c6e99ba4a4d72432b3eda702099a2926e49d4afeb6138f2d95e6371ef"
	I0916 10:47:59.171980 1401996 command_runner.go:130] >       ],
	I0916 10:47:59.171998 1401996 command_runner.go:130] >       "size": "92632544",
	I0916 10:47:59.172015 1401996 command_runner.go:130] >       "uid": {
	I0916 10:47:59.172044 1401996 command_runner.go:130] >         "value": "0"
	I0916 10:47:59.172075 1401996 command_runner.go:130] >       },
	I0916 10:47:59.172093 1401996 command_runner.go:130] >       "username": "",
	I0916 10:47:59.172141 1401996 command_runner.go:130] >       "spec": null,
	I0916 10:47:59.172163 1401996 command_runner.go:130] >       "pinned": false
	I0916 10:47:59.172179 1401996 command_runner.go:130] >     },
	I0916 10:47:59.172196 1401996 command_runner.go:130] >     {
	I0916 10:47:59.172231 1401996 command_runner.go:130] >       "id": "279f381cb37365bbbcd133c9531fba9c2beb0f38dbbe6ddfcd0b1b1643d3450e",
	I0916 10:47:59.172252 1401996 command_runner.go:130] >       "repoTags": [
	I0916 10:47:59.172272 1401996 command_runner.go:130] >         "registry.k8s.io/kube-controller-manager:v1.31.1"
	I0916 10:47:59.172289 1401996 command_runner.go:130] >       ],
	I0916 10:47:59.172318 1401996 command_runner.go:130] >       "repoDigests": [
	I0916 10:47:59.172342 1401996 command_runner.go:130] >         "registry.k8s.io/kube-controller-manager@sha256:9f9da5b27e03f89599cc40ba89150aebf3b4cff001e6db6d998674b34181e1a1",
	I0916 10:47:59.172364 1401996 command_runner.go:130] >         "registry.k8s.io/kube-controller-manager@sha256:a9a0505b7d0caca0edd18e37bacc9425b2c8824546b26f5b286e8cb144669849"
	I0916 10:47:59.172396 1401996 command_runner.go:130] >       ],
	I0916 10:47:59.172418 1401996 command_runner.go:130] >       "size": "86930758",
	I0916 10:47:59.172435 1401996 command_runner.go:130] >       "uid": {
	I0916 10:47:59.172476 1401996 command_runner.go:130] >         "value": "0"
	I0916 10:47:59.172496 1401996 command_runner.go:130] >       },
	I0916 10:47:59.172521 1401996 command_runner.go:130] >       "username": "",
	I0916 10:47:59.172551 1401996 command_runner.go:130] >       "spec": null,
	I0916 10:47:59.172572 1401996 command_runner.go:130] >       "pinned": false
	I0916 10:47:59.172588 1401996 command_runner.go:130] >     },
	I0916 10:47:59.172606 1401996 command_runner.go:130] >     {
	I0916 10:47:59.172638 1401996 command_runner.go:130] >       "id": "24a140c548c075e487e45d0ee73b1aa89f8bfb40c08a57e05975559728822b1d",
	I0916 10:47:59.172661 1401996 command_runner.go:130] >       "repoTags": [
	I0916 10:47:59.172703 1401996 command_runner.go:130] >         "registry.k8s.io/kube-proxy:v1.31.1"
	I0916 10:47:59.172715 1401996 command_runner.go:130] >       ],
	I0916 10:47:59.172720 1401996 command_runner.go:130] >       "repoDigests": [
	I0916 10:47:59.172728 1401996 command_runner.go:130] >         "registry.k8s.io/kube-proxy@sha256:4ee50b00484d7f39a90fc4cda92251177ef5ad8fdf2f2a0c768f9e634b4c6d44",
	I0916 10:47:59.172736 1401996 command_runner.go:130] >         "registry.k8s.io/kube-proxy@sha256:7b3bf9f1e260ccb1fd543570e1e9869a373f716fb050cd23a6a2771aa4e06ae9"
	I0916 10:47:59.172739 1401996 command_runner.go:130] >       ],
	I0916 10:47:59.172743 1401996 command_runner.go:130] >       "size": "95951255",
	I0916 10:47:59.172759 1401996 command_runner.go:130] >       "uid": null,
	I0916 10:47:59.172790 1401996 command_runner.go:130] >       "username": "",
	I0916 10:47:59.172797 1401996 command_runner.go:130] >       "spec": null,
	I0916 10:47:59.172801 1401996 command_runner.go:130] >       "pinned": false
	I0916 10:47:59.172804 1401996 command_runner.go:130] >     },
	I0916 10:47:59.172808 1401996 command_runner.go:130] >     {
	I0916 10:47:59.172815 1401996 command_runner.go:130] >       "id": "7f8aa378bb47dffcf430f3a601abe39137e88aee0238e23ed8530fdd18dab82d",
	I0916 10:47:59.172821 1401996 command_runner.go:130] >       "repoTags": [
	I0916 10:47:59.172848 1401996 command_runner.go:130] >         "registry.k8s.io/kube-scheduler:v1.31.1"
	I0916 10:47:59.172859 1401996 command_runner.go:130] >       ],
	I0916 10:47:59.172863 1401996 command_runner.go:130] >       "repoDigests": [
	I0916 10:47:59.172885 1401996 command_runner.go:130] >         "registry.k8s.io/kube-scheduler@sha256:65212209347a96b08a97e679b98dca46885f09cf3a53e8d13b28d2c083a5b690",
	I0916 10:47:59.172897 1401996 command_runner.go:130] >         "registry.k8s.io/kube-scheduler@sha256:969a7e96340f3a927b3d652582edec2d6d82a083871d81ef5064b7edaab430d0"
	I0916 10:47:59.172901 1401996 command_runner.go:130] >       ],
	I0916 10:47:59.172920 1401996 command_runner.go:130] >       "size": "67007814",
	I0916 10:47:59.172931 1401996 command_runner.go:130] >       "uid": {
	I0916 10:47:59.172934 1401996 command_runner.go:130] >         "value": "0"
	I0916 10:47:59.172938 1401996 command_runner.go:130] >       },
	I0916 10:47:59.172942 1401996 command_runner.go:130] >       "username": "",
	I0916 10:47:59.172947 1401996 command_runner.go:130] >       "spec": null,
	I0916 10:47:59.172951 1401996 command_runner.go:130] >       "pinned": false
	I0916 10:47:59.172957 1401996 command_runner.go:130] >     },
	I0916 10:47:59.172959 1401996 command_runner.go:130] >     {
	I0916 10:47:59.172966 1401996 command_runner.go:130] >       "id": "afb61768ce381961ca0beff95337601f29dc70ff3ed14e5e4b3e5699057e6aa8",
	I0916 10:47:59.172971 1401996 command_runner.go:130] >       "repoTags": [
	I0916 10:47:59.172976 1401996 command_runner.go:130] >         "registry.k8s.io/pause:3.10"
	I0916 10:47:59.172993 1401996 command_runner.go:130] >       ],
	I0916 10:47:59.173003 1401996 command_runner.go:130] >       "repoDigests": [
	I0916 10:47:59.173011 1401996 command_runner.go:130] >         "registry.k8s.io/pause@sha256:e50b7059b633caf3c1449b8da680d11845cda4506b513ee7a2de00725f0a34a7",
	I0916 10:47:59.173026 1401996 command_runner.go:130] >         "registry.k8s.io/pause@sha256:ee6521f290b2168b6e0935a181d4cff9be1ac3f505666ef0e3c98fae8199917a"
	I0916 10:47:59.173030 1401996 command_runner.go:130] >       ],
	I0916 10:47:59.173034 1401996 command_runner.go:130] >       "size": "519877",
	I0916 10:47:59.173040 1401996 command_runner.go:130] >       "uid": {
	I0916 10:47:59.173044 1401996 command_runner.go:130] >         "value": "65535"
	I0916 10:47:59.173054 1401996 command_runner.go:130] >       },
	I0916 10:47:59.173068 1401996 command_runner.go:130] >       "username": "",
	I0916 10:47:59.173075 1401996 command_runner.go:130] >       "spec": null,
	I0916 10:47:59.173079 1401996 command_runner.go:130] >       "pinned": false
	I0916 10:47:59.173082 1401996 command_runner.go:130] >     }
	I0916 10:47:59.173088 1401996 command_runner.go:130] >   ]
	I0916 10:47:59.173091 1401996 command_runner.go:130] > }
	I0916 10:47:59.175764 1401996 crio.go:514] all images are preloaded for cri-o runtime.
	I0916 10:47:59.175833 1401996 cache_images.go:84] Images are preloaded, skipping loading
	I0916 10:47:59.175849 1401996 kubeadm.go:934] updating node { 192.168.49.2 8441 v1.31.1 crio true true} ...
	I0916 10:47:59.175967 1401996 kubeadm.go:946] kubelet [Unit]
	Wants=crio.service
	
	[Service]
	ExecStart=
	ExecStart=/var/lib/minikube/binaries/v1.31.1/kubelet --bootstrap-kubeconfig=/etc/kubernetes/bootstrap-kubelet.conf --cgroups-per-qos=false --config=/var/lib/kubelet/config.yaml --enforce-node-allocatable= --hostname-override=functional-919910 --kubeconfig=/etc/kubernetes/kubelet.conf --node-ip=192.168.49.2
	
	[Install]
	 config:
	{KubernetesVersion:v1.31.1 ClusterName:functional-919910 Namespace:default APIServerHAVIP: APIServerName:minikubeCA APIServerNames:[] APIServerIPs:[] DNSDomain:cluster.local ContainerRuntime:crio CRISocket: NetworkPlugin:cni FeatureGates: ServiceCIDR:10.96.0.0/12 ImageRepository: LoadBalancerStartIP: LoadBalancerEndIP: CustomIngressCert: RegistryAliases: ExtraOptions:[] ShouldLoadCachedImages:true EnableDefaultCNI:false CNI:}
	I0916 10:47:59.176058 1401996 ssh_runner.go:195] Run: crio config
	I0916 10:47:59.225080 1401996 command_runner.go:130] > # The CRI-O configuration file specifies all of the available configuration
	I0916 10:47:59.225107 1401996 command_runner.go:130] > # options and command-line flags for the crio(8) OCI Kubernetes Container Runtime
	I0916 10:47:59.225116 1401996 command_runner.go:130] > # daemon, but in a TOML format that can be more easily modified and versioned.
	I0916 10:47:59.225119 1401996 command_runner.go:130] > #
	I0916 10:47:59.225128 1401996 command_runner.go:130] > # Please refer to crio.conf(5) for details of all configuration options.
	I0916 10:47:59.225135 1401996 command_runner.go:130] > # CRI-O supports partial configuration reload during runtime, which can be
	I0916 10:47:59.225141 1401996 command_runner.go:130] > # done by sending SIGHUP to the running process. Currently supported options
	I0916 10:47:59.225148 1401996 command_runner.go:130] > # are explicitly mentioned with: 'This option supports live configuration
	I0916 10:47:59.225152 1401996 command_runner.go:130] > # reload'.
	I0916 10:47:59.225159 1401996 command_runner.go:130] > # CRI-O reads its storage defaults from the containers-storage.conf(5) file
	I0916 10:47:59.225166 1401996 command_runner.go:130] > # located at /etc/containers/storage.conf. Modify this storage configuration if
	I0916 10:47:59.225175 1401996 command_runner.go:130] > # you want to change the system's defaults. If you want to modify storage just
	I0916 10:47:59.225181 1401996 command_runner.go:130] > # for CRI-O, you can change the storage configuration options here.
	I0916 10:47:59.225184 1401996 command_runner.go:130] > [crio]
	I0916 10:47:59.225190 1401996 command_runner.go:130] > # Path to the "root directory". CRI-O stores all of its data, including
	I0916 10:47:59.225195 1401996 command_runner.go:130] > # containers images, in this directory.
	I0916 10:47:59.225857 1401996 command_runner.go:130] > # root = "/home/docker/.local/share/containers/storage"
	I0916 10:47:59.225883 1401996 command_runner.go:130] > # Path to the "run directory". CRI-O stores all of its state in this directory.
	I0916 10:47:59.226449 1401996 command_runner.go:130] > # runroot = "/tmp/containers-user-1000/containers"
	I0916 10:47:59.226467 1401996 command_runner.go:130] > # Storage driver used to manage the storage of images and containers. Please
	I0916 10:47:59.226480 1401996 command_runner.go:130] > # refer to containers-storage.conf(5) to see all available storage drivers.
	I0916 10:47:59.227033 1401996 command_runner.go:130] > # storage_driver = "vfs"
	I0916 10:47:59.227049 1401996 command_runner.go:130] > # List to pass options to the storage driver. Please refer to
	I0916 10:47:59.227061 1401996 command_runner.go:130] > # containers-storage.conf(5) to see all available storage options.
	I0916 10:47:59.227342 1401996 command_runner.go:130] > # storage_option = [
	I0916 10:47:59.227631 1401996 command_runner.go:130] > # ]
	I0916 10:47:59.227641 1401996 command_runner.go:130] > # The default log directory where all logs will go unless directly specified by
	I0916 10:47:59.227648 1401996 command_runner.go:130] > # the kubelet. The log directory specified must be an absolute directory.
	I0916 10:47:59.228197 1401996 command_runner.go:130] > # log_dir = "/var/log/crio/pods"
	I0916 10:47:59.228220 1401996 command_runner.go:130] > # Location for CRI-O to lay down the temporary version file.
	I0916 10:47:59.228240 1401996 command_runner.go:130] > # It is used to check if crio wipe should wipe containers, which should
	I0916 10:47:59.228249 1401996 command_runner.go:130] > # always happen on a node reboot
	I0916 10:47:59.228820 1401996 command_runner.go:130] > # version_file = "/var/run/crio/version"
	I0916 10:47:59.228838 1401996 command_runner.go:130] > # Location for CRI-O to lay down the persistent version file.
	I0916 10:47:59.228851 1401996 command_runner.go:130] > # It is used to check if crio wipe should wipe images, which should
	I0916 10:47:59.228874 1401996 command_runner.go:130] > # only happen when CRI-O has been upgraded
	I0916 10:47:59.229533 1401996 command_runner.go:130] > # version_file_persist = "/var/lib/crio/version"
	I0916 10:47:59.229573 1401996 command_runner.go:130] > # InternalWipe is whether CRI-O should wipe containers and images after a reboot when the server starts.
	I0916 10:47:59.229582 1401996 command_runner.go:130] > # If set to false, one must use the external command 'crio wipe' to wipe the containers and images in these situations.
	I0916 10:47:59.230129 1401996 command_runner.go:130] > # internal_wipe = true
	I0916 10:47:59.230145 1401996 command_runner.go:130] > # Location for CRI-O to lay down the clean shutdown file.
	I0916 10:47:59.230159 1401996 command_runner.go:130] > # It is used to check whether crio had time to sync before shutting down.
	I0916 10:47:59.230168 1401996 command_runner.go:130] > # If not found, crio wipe will clear the storage directory.
	I0916 10:47:59.230748 1401996 command_runner.go:130] > # clean_shutdown_file = "/var/lib/crio/clean.shutdown"
	I0916 10:47:59.230766 1401996 command_runner.go:130] > # The crio.api table contains settings for the kubelet/gRPC interface.
	I0916 10:47:59.230776 1401996 command_runner.go:130] > [crio.api]
	I0916 10:47:59.230784 1401996 command_runner.go:130] > # Path to AF_LOCAL socket on which CRI-O will listen.
	I0916 10:47:59.231355 1401996 command_runner.go:130] > # listen = "/var/run/crio/crio.sock"
	I0916 10:47:59.231372 1401996 command_runner.go:130] > # IP address on which the stream server will listen.
	I0916 10:47:59.231950 1401996 command_runner.go:130] > # stream_address = "127.0.0.1"
	I0916 10:47:59.231967 1401996 command_runner.go:130] > # The port on which the stream server will listen. If the port is set to "0", then
	I0916 10:47:59.231979 1401996 command_runner.go:130] > # CRI-O will allocate a random free port number.
	I0916 10:47:59.232535 1401996 command_runner.go:130] > # stream_port = "0"
	I0916 10:47:59.232551 1401996 command_runner.go:130] > # Enable encrypted TLS transport of the stream server.
	I0916 10:47:59.233183 1401996 command_runner.go:130] > # stream_enable_tls = false
	I0916 10:47:59.233199 1401996 command_runner.go:130] > # Length of time until open streams terminate due to lack of activity
	I0916 10:47:59.233627 1401996 command_runner.go:130] > # stream_idle_timeout = ""
	I0916 10:47:59.233651 1401996 command_runner.go:130] > # Path to the x509 certificate file used to serve the encrypted stream. This
	I0916 10:47:59.233660 1401996 command_runner.go:130] > # file can change, and CRI-O will automatically pick up the changes within 5
	I0916 10:47:59.233668 1401996 command_runner.go:130] > # minutes.
	I0916 10:47:59.234100 1401996 command_runner.go:130] > # stream_tls_cert = ""
	I0916 10:47:59.234125 1401996 command_runner.go:130] > # Path to the key file used to serve the encrypted stream. This file can
	I0916 10:47:59.234136 1401996 command_runner.go:130] > # change and CRI-O will automatically pick up the changes within 5 minutes.
	I0916 10:47:59.234567 1401996 command_runner.go:130] > # stream_tls_key = ""
	I0916 10:47:59.234591 1401996 command_runner.go:130] > # Path to the x509 CA(s) file used to verify and authenticate client
	I0916 10:47:59.234598 1401996 command_runner.go:130] > # communication with the encrypted stream. This file can change and CRI-O will
	I0916 10:47:59.234604 1401996 command_runner.go:130] > # automatically pick up the changes within 5 minutes.
	I0916 10:47:59.235021 1401996 command_runner.go:130] > # stream_tls_ca = ""
	I0916 10:47:59.235039 1401996 command_runner.go:130] > # Maximum grpc send message size in bytes. If not set or <=0, then CRI-O will default to 16 * 1024 * 1024.
	I0916 10:47:59.235607 1401996 command_runner.go:130] > # grpc_max_send_msg_size = 83886080
	I0916 10:47:59.235633 1401996 command_runner.go:130] > # Maximum grpc receive message size. If not set or <= 0, then CRI-O will default to 16 * 1024 * 1024.
	I0916 10:47:59.236265 1401996 command_runner.go:130] > # grpc_max_recv_msg_size = 83886080
	I0916 10:47:59.236299 1401996 command_runner.go:130] > # The crio.runtime table contains settings pertaining to the OCI runtime used
	I0916 10:47:59.236309 1401996 command_runner.go:130] > # and options for how to set up and manage the OCI runtime.
	I0916 10:47:59.236313 1401996 command_runner.go:130] > [crio.runtime]
	I0916 10:47:59.236319 1401996 command_runner.go:130] > # A list of ulimits to be set in containers by default, specified as
	I0916 10:47:59.236328 1401996 command_runner.go:130] > # "<ulimit name>=<soft limit>:<hard limit>", for example:
	I0916 10:47:59.236333 1401996 command_runner.go:130] > # "nofile=1024:2048"
	I0916 10:47:59.236341 1401996 command_runner.go:130] > # If nothing is set here, settings will be inherited from the CRI-O daemon
	I0916 10:47:59.236652 1401996 command_runner.go:130] > # default_ulimits = [
	I0916 10:47:59.236991 1401996 command_runner.go:130] > # ]
	I0916 10:47:59.237014 1401996 command_runner.go:130] > # If true, the runtime will not use pivot_root, but instead use MS_MOVE.
	I0916 10:47:59.237613 1401996 command_runner.go:130] > # no_pivot = false
	I0916 10:47:59.237635 1401996 command_runner.go:130] > # decryption_keys_path is the path where the keys required for
	I0916 10:47:59.237644 1401996 command_runner.go:130] > # image decryption are stored. This option supports live configuration reload.
	I0916 10:47:59.238256 1401996 command_runner.go:130] > # decryption_keys_path = "/etc/crio/keys/"
	I0916 10:47:59.238277 1401996 command_runner.go:130] > # Path to the conmon binary, used for monitoring the OCI runtime.
	I0916 10:47:59.238284 1401996 command_runner.go:130] > # Will be searched for using $PATH if empty.
	I0916 10:47:59.238294 1401996 command_runner.go:130] > # This option is currently deprecated, and will be replaced with RuntimeHandler.MonitorEnv.
	I0916 10:47:59.238790 1401996 command_runner.go:130] > # conmon = ""
	I0916 10:47:59.238805 1401996 command_runner.go:130] > # Cgroup setting for conmon
	I0916 10:47:59.238814 1401996 command_runner.go:130] > # This option is currently deprecated, and will be replaced with RuntimeHandler.MonitorCgroup.
	I0916 10:47:59.239157 1401996 command_runner.go:130] > conmon_cgroup = "pod"
	I0916 10:47:59.239175 1401996 command_runner.go:130] > # Environment variable list for the conmon process, used for passing necessary
	I0916 10:47:59.239181 1401996 command_runner.go:130] > # environment variables to conmon or the runtime.
	I0916 10:47:59.239190 1401996 command_runner.go:130] > # This option is currently deprecated, and will be replaced with RuntimeHandler.MonitorEnv.
	I0916 10:47:59.239495 1401996 command_runner.go:130] > # conmon_env = [
	I0916 10:47:59.239816 1401996 command_runner.go:130] > # ]
	I0916 10:47:59.239837 1401996 command_runner.go:130] > # Additional environment variables to set for all the
	I0916 10:47:59.239843 1401996 command_runner.go:130] > # containers. These are overridden if set in the
	I0916 10:47:59.239849 1401996 command_runner.go:130] > # container image spec or in the container runtime configuration.
	I0916 10:47:59.240159 1401996 command_runner.go:130] > # default_env = [
	I0916 10:47:59.240541 1401996 command_runner.go:130] > # ]
	I0916 10:47:59.240557 1401996 command_runner.go:130] > # If true, SELinux will be used for pod separation on the host.
	I0916 10:47:59.241047 1401996 command_runner.go:130] > # selinux = false
	I0916 10:47:59.241075 1401996 command_runner.go:130] > # Path to the seccomp.json profile which is used as the default seccomp profile
	I0916 10:47:59.241083 1401996 command_runner.go:130] > # for the runtime. If not specified, then the internal default seccomp profile
	I0916 10:47:59.241089 1401996 command_runner.go:130] > # will be used. This option supports live configuration reload.
	I0916 10:47:59.241093 1401996 command_runner.go:130] > # seccomp_profile = ""
	I0916 10:47:59.241105 1401996 command_runner.go:130] > # Changes the meaning of an empty seccomp profile. By default
	I0916 10:47:59.241111 1401996 command_runner.go:130] > # (and according to CRI spec), an empty profile means unconfined.
	I0916 10:47:59.241117 1401996 command_runner.go:130] > # This option tells CRI-O to treat an empty profile as the default profile,
	I0916 10:47:59.241122 1401996 command_runner.go:130] > # which might increase security.
	I0916 10:47:59.241126 1401996 command_runner.go:130] > # seccomp_use_default_when_empty = true
	I0916 10:47:59.241133 1401996 command_runner.go:130] > # Used to change the name of the default AppArmor profile of CRI-O. The default
	I0916 10:47:59.241143 1401996 command_runner.go:130] > # profile name is "crio-default". This profile only takes effect if the user
	I0916 10:47:59.241149 1401996 command_runner.go:130] > # does not specify a profile via the Kubernetes Pod's metadata annotation. If
	I0916 10:47:59.241160 1401996 command_runner.go:130] > # the profile is set to "unconfined", then this equals to disabling AppArmor.
	I0916 10:47:59.241166 1401996 command_runner.go:130] > # This option supports live configuration reload.
	I0916 10:47:59.241184 1401996 command_runner.go:130] > # apparmor_profile = "crio-default"
	I0916 10:47:59.241191 1401996 command_runner.go:130] > # Path to the blockio class configuration file for configuring
	I0916 10:47:59.241196 1401996 command_runner.go:130] > # the cgroup blockio controller.
	I0916 10:47:59.241200 1401996 command_runner.go:130] > # blockio_config_file = ""
	I0916 10:47:59.241207 1401996 command_runner.go:130] > # Used to change irqbalance service config file path which is used for configuring
	I0916 10:47:59.241214 1401996 command_runner.go:130] > # irqbalance daemon.
	I0916 10:47:59.241219 1401996 command_runner.go:130] > # irqbalance_config_file = "/etc/sysconfig/irqbalance"
	I0916 10:47:59.241226 1401996 command_runner.go:130] > # Path to the RDT configuration file for configuring the resctrl pseudo-filesystem.
	I0916 10:47:59.241235 1401996 command_runner.go:130] > # This option supports live configuration reload.
	I0916 10:47:59.241360 1401996 command_runner.go:130] > # rdt_config_file = ""
	I0916 10:47:59.241381 1401996 command_runner.go:130] > # Cgroup management implementation used for the runtime.
	I0916 10:47:59.241391 1401996 command_runner.go:130] > cgroup_manager = "cgroupfs"
	I0916 10:47:59.241401 1401996 command_runner.go:130] > # Specify whether the image pull must be performed in a separate cgroup.
	I0916 10:47:59.241507 1401996 command_runner.go:130] > # separate_pull_cgroup = ""
	I0916 10:47:59.241522 1401996 command_runner.go:130] > # List of default capabilities for containers. If it is empty or commented out,
	I0916 10:47:59.241530 1401996 command_runner.go:130] > # only the capabilities defined in the containers json file by the user/kube
	I0916 10:47:59.241539 1401996 command_runner.go:130] > # will be added.
	I0916 10:47:59.241547 1401996 command_runner.go:130] > # default_capabilities = [
	I0916 10:47:59.241551 1401996 command_runner.go:130] > # 	"CHOWN",
	I0916 10:47:59.241555 1401996 command_runner.go:130] > # 	"DAC_OVERRIDE",
	I0916 10:47:59.241693 1401996 command_runner.go:130] > # 	"FSETID",
	I0916 10:47:59.241710 1401996 command_runner.go:130] > # 	"FOWNER",
	I0916 10:47:59.241715 1401996 command_runner.go:130] > # 	"SETGID",
	I0916 10:47:59.241718 1401996 command_runner.go:130] > # 	"SETUID",
	I0916 10:47:59.241721 1401996 command_runner.go:130] > # 	"SETPCAP",
	I0916 10:47:59.241725 1401996 command_runner.go:130] > # 	"NET_BIND_SERVICE",
	I0916 10:47:59.241729 1401996 command_runner.go:130] > # 	"KILL",
	I0916 10:47:59.241734 1401996 command_runner.go:130] > # ]
	I0916 10:47:59.241743 1401996 command_runner.go:130] > # Add capabilities to the inheritable set, as well as the default group of permitted, bounding and effective.
	I0916 10:47:59.241755 1401996 command_runner.go:130] > # If capabilities are expected to work for non-root users, this option should be set.
	I0916 10:47:59.241760 1401996 command_runner.go:130] > # add_inheritable_capabilities = true
	I0916 10:47:59.241768 1401996 command_runner.go:130] > # List of default sysctls. If it is empty or commented out, only the sysctls
	I0916 10:47:59.241777 1401996 command_runner.go:130] > # defined in the container json file by the user/kube will be added.
	I0916 10:47:59.241781 1401996 command_runner.go:130] > default_sysctls = [
	I0916 10:47:59.241889 1401996 command_runner.go:130] > 	"net.ipv4.ip_unprivileged_port_start=0",
	I0916 10:47:59.241899 1401996 command_runner.go:130] > ]
	I0916 10:47:59.241906 1401996 command_runner.go:130] > # List of devices on the host that a
	I0916 10:47:59.241918 1401996 command_runner.go:130] > # user can specify with the "io.kubernetes.cri-o.Devices" allowed annotation.
	I0916 10:47:59.241927 1401996 command_runner.go:130] > # allowed_devices = [
	I0916 10:47:59.241931 1401996 command_runner.go:130] > # 	"/dev/fuse",
	I0916 10:47:59.241934 1401996 command_runner.go:130] > # ]
	I0916 10:47:59.241939 1401996 command_runner.go:130] > # List of additional devices. specified as
	I0916 10:47:59.241965 1401996 command_runner.go:130] > # "<device-on-host>:<device-on-container>:<permissions>", for example: "--device=/dev/sdc:/dev/xvdc:rwm".
	I0916 10:47:59.241971 1401996 command_runner.go:130] > # If it is empty or commented out, only the devices
	I0916 10:47:59.241980 1401996 command_runner.go:130] > # defined in the container json file by the user/kube will be added.
	I0916 10:47:59.241985 1401996 command_runner.go:130] > # additional_devices = [
	I0916 10:47:59.241988 1401996 command_runner.go:130] > # ]
	I0916 10:47:59.241994 1401996 command_runner.go:130] > # List of directories to scan for CDI Spec files.
	I0916 10:47:59.242001 1401996 command_runner.go:130] > # cdi_spec_dirs = [
	I0916 10:47:59.242004 1401996 command_runner.go:130] > # 	"/etc/cdi",
	I0916 10:47:59.242009 1401996 command_runner.go:130] > # 	"/var/run/cdi",
	I0916 10:47:59.242012 1401996 command_runner.go:130] > # ]
	I0916 10:47:59.242018 1401996 command_runner.go:130] > # Change the default behavior of setting container devices uid/gid from CRI's
	I0916 10:47:59.242028 1401996 command_runner.go:130] > # SecurityContext (RunAsUser/RunAsGroup) instead of taking host's uid/gid.
	I0916 10:47:59.242032 1401996 command_runner.go:130] > # Defaults to false.
	I0916 10:47:59.242181 1401996 command_runner.go:130] > # device_ownership_from_security_context = false
	I0916 10:47:59.242203 1401996 command_runner.go:130] > # Path to OCI hooks directories for automatically executed hooks. If one of the
	I0916 10:47:59.242211 1401996 command_runner.go:130] > # directories does not exist, then CRI-O will automatically skip them.
	I0916 10:47:59.242214 1401996 command_runner.go:130] > # hooks_dir = [
	I0916 10:47:59.242218 1401996 command_runner.go:130] > # 	"/usr/share/containers/oci/hooks.d",
	I0916 10:47:59.242221 1401996 command_runner.go:130] > # ]
	I0916 10:47:59.242234 1401996 command_runner.go:130] > # Path to the file specifying the defaults mounts for each container. The
	I0916 10:47:59.242245 1401996 command_runner.go:130] > # format of the config is /SRC:/DST, one mount per line. Notice that CRI-O reads
	I0916 10:47:59.242251 1401996 command_runner.go:130] > # its default mounts from the following two files:
	I0916 10:47:59.242254 1401996 command_runner.go:130] > #
	I0916 10:47:59.242260 1401996 command_runner.go:130] > #   1) /etc/containers/mounts.conf (i.e., default_mounts_file): This is the
	I0916 10:47:59.242270 1401996 command_runner.go:130] > #      override file, where users can either add in their own default mounts, or
	I0916 10:47:59.242276 1401996 command_runner.go:130] > #      override the default mounts shipped with the package.
	I0916 10:47:59.242284 1401996 command_runner.go:130] > #
	I0916 10:47:59.242290 1401996 command_runner.go:130] > #   2) /usr/share/containers/mounts.conf: This is the default file read for
	I0916 10:47:59.242297 1401996 command_runner.go:130] > #      mounts. If you want CRI-O to read from a different, specific mounts file,
	I0916 10:47:59.242303 1401996 command_runner.go:130] > #      you can change the default_mounts_file. Note, if this is done, CRI-O will
	I0916 10:47:59.242308 1401996 command_runner.go:130] > #      only add mounts it finds in this file.
	I0916 10:47:59.242311 1401996 command_runner.go:130] > #
	I0916 10:47:59.242315 1401996 command_runner.go:130] > # default_mounts_file = ""
	I0916 10:47:59.242325 1401996 command_runner.go:130] > # Maximum number of processes allowed in a container.
	I0916 10:47:59.242332 1401996 command_runner.go:130] > # This option is deprecated. The Kubelet flag '--pod-pids-limit' should be used instead.
	I0916 10:47:59.242340 1401996 command_runner.go:130] > # pids_limit = 0
	I0916 10:47:59.242346 1401996 command_runner.go:130] > # Maximum sized allowed for the container log file. Negative numbers indicate
	I0916 10:47:59.242352 1401996 command_runner.go:130] > # that no size limit is imposed. If it is positive, it must be >= 8192 to
	I0916 10:47:59.242362 1401996 command_runner.go:130] > # match/exceed conmon's read buffer. The file is truncated and re-opened so the
	I0916 10:47:59.242370 1401996 command_runner.go:130] > # limit is never exceeded. This option is deprecated. The Kubelet flag '--container-log-max-size' should be used instead.
	I0916 10:47:59.242374 1401996 command_runner.go:130] > # log_size_max = -1
	I0916 10:47:59.242381 1401996 command_runner.go:130] > # Whether container output should be logged to journald in addition to the kuberentes log file
	I0916 10:47:59.242496 1401996 command_runner.go:130] > # log_to_journald = false
	I0916 10:47:59.242510 1401996 command_runner.go:130] > # Path to directory in which container exit files are written to by conmon.
	I0916 10:47:59.242522 1401996 command_runner.go:130] > # container_exits_dir = "/var/run/crio/exits"
	I0916 10:47:59.242538 1401996 command_runner.go:130] > # Path to directory for container attach sockets.
	I0916 10:47:59.242548 1401996 command_runner.go:130] > # container_attach_socket_dir = "/var/run/crio"
	I0916 10:47:59.242554 1401996 command_runner.go:130] > # The prefix to use for the source of the bind mounts.
	I0916 10:47:59.242563 1401996 command_runner.go:130] > # bind_mount_prefix = ""
	I0916 10:47:59.242568 1401996 command_runner.go:130] > # If set to true, all containers will run in read-only mode.
	I0916 10:47:59.242573 1401996 command_runner.go:130] > # read_only = false
	I0916 10:47:59.242587 1401996 command_runner.go:130] > # Changes the verbosity of the logs based on the level it is set to. Options
	I0916 10:47:59.242594 1401996 command_runner.go:130] > # are fatal, panic, error, warn, info, debug and trace. This option supports
	I0916 10:47:59.242598 1401996 command_runner.go:130] > # live configuration reload.
	I0916 10:47:59.242601 1401996 command_runner.go:130] > # log_level = "info"
	I0916 10:47:59.242607 1401996 command_runner.go:130] > # Filter the log messages by the provided regular expression.
	I0916 10:47:59.242612 1401996 command_runner.go:130] > # This option supports live configuration reload.
	I0916 10:47:59.242620 1401996 command_runner.go:130] > # log_filter = ""
	I0916 10:47:59.242626 1401996 command_runner.go:130] > # The UID mappings for the user namespace of each container. A range is
	I0916 10:47:59.242632 1401996 command_runner.go:130] > # specified in the form containerUID:HostUID:Size. Multiple ranges must be
	I0916 10:47:59.242639 1401996 command_runner.go:130] > # separated by comma.
	I0916 10:47:59.242643 1401996 command_runner.go:130] > # uid_mappings = ""
	I0916 10:47:59.242649 1401996 command_runner.go:130] > # The GID mappings for the user namespace of each container. A range is
	I0916 10:47:59.242658 1401996 command_runner.go:130] > # specified in the form containerGID:HostGID:Size. Multiple ranges must be
	I0916 10:47:59.242662 1401996 command_runner.go:130] > # separated by comma.
	I0916 10:47:59.242794 1401996 command_runner.go:130] > # gid_mappings = ""
	I0916 10:47:59.242814 1401996 command_runner.go:130] > # If set, CRI-O will reject any attempt to map host UIDs below this value
	I0916 10:47:59.242822 1401996 command_runner.go:130] > # into user namespaces.  A negative value indicates that no minimum is set,
	I0916 10:47:59.242830 1401996 command_runner.go:130] > # so specifying mappings will only be allowed for pods that run as UID 0.
	I0916 10:47:59.242838 1401996 command_runner.go:130] > # minimum_mappable_uid = -1
	I0916 10:47:59.242845 1401996 command_runner.go:130] > # If set, CRI-O will reject any attempt to map host GIDs below this value
	I0916 10:47:59.242851 1401996 command_runner.go:130] > # into user namespaces.  A negative value indicates that no minimum is set,
	I0916 10:47:59.242857 1401996 command_runner.go:130] > # so specifying mappings will only be allowed for pods that run as UID 0.
	I0916 10:47:59.242861 1401996 command_runner.go:130] > # minimum_mappable_gid = -1
	I0916 10:47:59.242868 1401996 command_runner.go:130] > # The minimal amount of time in seconds to wait before issuing a timeout
	I0916 10:47:59.242884 1401996 command_runner.go:130] > # regarding the proper termination of the container. The lowest possible
	I0916 10:47:59.242895 1401996 command_runner.go:130] > # value is 30s, whereas lower values are not considered by CRI-O.
	I0916 10:47:59.242899 1401996 command_runner.go:130] > # ctr_stop_timeout = 30
	I0916 10:47:59.242905 1401996 command_runner.go:130] > # drop_infra_ctr determines whether CRI-O drops the infra container
	I0916 10:47:59.242914 1401996 command_runner.go:130] > # when a pod does not have a private PID namespace, and does not use
	I0916 10:47:59.242919 1401996 command_runner.go:130] > # a kernel separating runtime (like kata).
	I0916 10:47:59.242924 1401996 command_runner.go:130] > # It requires manage_ns_lifecycle to be true.
	I0916 10:47:59.242927 1401996 command_runner.go:130] > # drop_infra_ctr = true
	I0916 10:47:59.242933 1401996 command_runner.go:130] > # infra_ctr_cpuset determines what CPUs will be used to run infra containers.
	I0916 10:47:59.242939 1401996 command_runner.go:130] > # You can use linux CPU list format to specify desired CPUs.
	I0916 10:47:59.242950 1401996 command_runner.go:130] > # To get better isolation for guaranteed pods, set this parameter to be equal to kubelet reserved-cpus.
	I0916 10:47:59.242954 1401996 command_runner.go:130] > # infra_ctr_cpuset = ""
	I0916 10:47:59.242965 1401996 command_runner.go:130] > # The directory where the state of the managed namespaces gets tracked.
	I0916 10:47:59.242972 1401996 command_runner.go:130] > # Only used when manage_ns_lifecycle is true.
	I0916 10:47:59.243104 1401996 command_runner.go:130] > # namespaces_dir = "/var/run"
	I0916 10:47:59.243144 1401996 command_runner.go:130] > # pinns_path is the path to find the pinns binary, which is needed to manage namespace lifecycle
	I0916 10:47:59.243153 1401996 command_runner.go:130] > # pinns_path = ""
	I0916 10:47:59.243161 1401996 command_runner.go:130] > # default_runtime is the _name_ of the OCI runtime to be used as the default.
	I0916 10:47:59.243172 1401996 command_runner.go:130] > # The name is matched against the runtimes map below. If this value is changed,
	I0916 10:47:59.243179 1401996 command_runner.go:130] > # the corresponding existing entry from the runtimes map below will be ignored.
	I0916 10:47:59.243183 1401996 command_runner.go:130] > # default_runtime = "runc"
	I0916 10:47:59.243188 1401996 command_runner.go:130] > # A list of paths that, when absent from the host,
	I0916 10:47:59.243196 1401996 command_runner.go:130] > # will cause a container creation to fail (as opposed to the current behavior being created as a directory).
	I0916 10:47:59.243211 1401996 command_runner.go:130] > # This option is to protect from source locations whose existence as a directory could jepordize the health of the node, and whose
	I0916 10:47:59.243220 1401996 command_runner.go:130] > # creation as a file is not desired either.
	I0916 10:47:59.243230 1401996 command_runner.go:130] > # An example is /etc/hostname, which will cause failures on reboot if it's created as a directory, but often doesn't exist because
	I0916 10:47:59.243237 1401996 command_runner.go:130] > # the hostname is being managed dynamically.
	I0916 10:47:59.243248 1401996 command_runner.go:130] > # absent_mount_sources_to_reject = [
	I0916 10:47:59.243254 1401996 command_runner.go:130] > # ]
	I0916 10:47:59.243261 1401996 command_runner.go:130] > # The "crio.runtime.runtimes" table defines a list of OCI compatible runtimes.
	I0916 10:47:59.243267 1401996 command_runner.go:130] > # The runtime to use is picked based on the runtime handler provided by the CRI.
	I0916 10:47:59.243274 1401996 command_runner.go:130] > # If no runtime handler is provided, the runtime will be picked based on the level
	I0916 10:47:59.243280 1401996 command_runner.go:130] > # of trust of the workload. Each entry in the table should follow the format:
	I0916 10:47:59.243286 1401996 command_runner.go:130] > #
	I0916 10:47:59.243291 1401996 command_runner.go:130] > #[crio.runtime.runtimes.runtime-handler]
	I0916 10:47:59.243296 1401996 command_runner.go:130] > #  runtime_path = "/path/to/the/executable"
	I0916 10:47:59.243304 1401996 command_runner.go:130] > #  runtime_type = "oci"
	I0916 10:47:59.243308 1401996 command_runner.go:130] > #  runtime_root = "/path/to/the/root"
	I0916 10:47:59.243313 1401996 command_runner.go:130] > #  privileged_without_host_devices = false
	I0916 10:47:59.243322 1401996 command_runner.go:130] > #  allowed_annotations = []
	I0916 10:47:59.243325 1401996 command_runner.go:130] > # Where:
	I0916 10:47:59.243331 1401996 command_runner.go:130] > # - runtime-handler: name used to identify the runtime
	I0916 10:47:59.243337 1401996 command_runner.go:130] > # - runtime_path (optional, string): absolute path to the runtime executable in
	I0916 10:47:59.243344 1401996 command_runner.go:130] > #   the host filesystem. If omitted, the runtime-handler identifier should match
	I0916 10:47:59.243350 1401996 command_runner.go:130] > #   the runtime executable name, and the runtime executable should be placed
	I0916 10:47:59.243354 1401996 command_runner.go:130] > #   in $PATH.
	I0916 10:47:59.243361 1401996 command_runner.go:130] > # - runtime_type (optional, string): type of runtime, one of: "oci", "vm". If
	I0916 10:47:59.243369 1401996 command_runner.go:130] > #   omitted, an "oci" runtime is assumed.
	I0916 10:47:59.243380 1401996 command_runner.go:130] > # - runtime_root (optional, string): root directory for storage of containers
	I0916 10:47:59.243387 1401996 command_runner.go:130] > #   state.
	I0916 10:47:59.243395 1401996 command_runner.go:130] > # - runtime_config_path (optional, string): the path for the runtime configuration
	I0916 10:47:59.243406 1401996 command_runner.go:130] > #   file. This can only be used with when using the VM runtime_type.
	I0916 10:47:59.243412 1401996 command_runner.go:130] > # - privileged_without_host_devices (optional, bool): an option for restricting
	I0916 10:47:59.243418 1401996 command_runner.go:130] > #   host devices from being passed to privileged containers.
	I0916 10:47:59.243424 1401996 command_runner.go:130] > # - allowed_annotations (optional, array of strings): an option for specifying
	I0916 10:47:59.243431 1401996 command_runner.go:130] > #   a list of experimental annotations that this runtime handler is allowed to process.
	I0916 10:47:59.243435 1401996 command_runner.go:130] > #   The currently recognized values are:
	I0916 10:47:59.243446 1401996 command_runner.go:130] > #   "io.kubernetes.cri-o.userns-mode" for configuring a user namespace for the pod.
	I0916 10:47:59.243453 1401996 command_runner.go:130] > #   "io.kubernetes.cri-o.cgroup2-mount-hierarchy-rw" for mounting cgroups writably when set to "true".
	I0916 10:47:59.243463 1401996 command_runner.go:130] > #   "io.kubernetes.cri-o.Devices" for configuring devices for the pod.
	I0916 10:47:59.243471 1401996 command_runner.go:130] > #   "io.kubernetes.cri-o.ShmSize" for configuring the size of /dev/shm.
	I0916 10:47:59.243482 1401996 command_runner.go:130] > #   "io.kubernetes.cri-o.UnifiedCgroup.$CTR_NAME" for configuring the cgroup v2 unified block for a container.
	I0916 10:47:59.243489 1401996 command_runner.go:130] > #   "io.containers.trace-syscall" for tracing syscalls via the OCI seccomp BPF hook.
	I0916 10:47:59.243498 1401996 command_runner.go:130] > #   "io.kubernetes.cri.rdt-class" for setting the RDT class of a container
	I0916 10:47:59.243505 1401996 command_runner.go:130] > # - monitor_exec_cgroup (optional, string): if set to "container", indicates exec probes
	I0916 10:47:59.243510 1401996 command_runner.go:130] > #   should be moved to the container's cgroup
	I0916 10:47:59.243514 1401996 command_runner.go:130] > [crio.runtime.runtimes.runc]
	I0916 10:47:59.243519 1401996 command_runner.go:130] > runtime_path = "/usr/lib/cri-o-runc/sbin/runc"
	I0916 10:47:59.243523 1401996 command_runner.go:130] > runtime_type = "oci"
	I0916 10:47:59.243685 1401996 command_runner.go:130] > runtime_root = "/run/runc"
	I0916 10:47:59.243698 1401996 command_runner.go:130] > runtime_config_path = ""
	I0916 10:47:59.243702 1401996 command_runner.go:130] > monitor_path = ""
	I0916 10:47:59.243705 1401996 command_runner.go:130] > monitor_cgroup = ""
	I0916 10:47:59.243709 1401996 command_runner.go:130] > monitor_exec_cgroup = ""
	I0916 10:47:59.243739 1401996 command_runner.go:130] > # crun is a fast and lightweight fully featured OCI runtime and C library for
	I0916 10:47:59.243748 1401996 command_runner.go:130] > # running containers
	I0916 10:47:59.243753 1401996 command_runner.go:130] > #[crio.runtime.runtimes.crun]
	I0916 10:47:59.243760 1401996 command_runner.go:130] > # Kata Containers is an OCI runtime, where containers are run inside lightweight
	I0916 10:47:59.243770 1401996 command_runner.go:130] > # VMs. Kata provides additional isolation towards the host, minimizing the host attack
	I0916 10:47:59.243776 1401996 command_runner.go:130] > # surface and mitigating the consequences of containers breakout.
	I0916 10:47:59.243781 1401996 command_runner.go:130] > # Kata Containers with the default configured VMM
	I0916 10:47:59.243786 1401996 command_runner.go:130] > #[crio.runtime.runtimes.kata-runtime]
	I0916 10:47:59.243790 1401996 command_runner.go:130] > # Kata Containers with the QEMU VMM
	I0916 10:47:59.243795 1401996 command_runner.go:130] > #[crio.runtime.runtimes.kata-qemu]
	I0916 10:47:59.243800 1401996 command_runner.go:130] > # Kata Containers with the Firecracker VMM
	I0916 10:47:59.243811 1401996 command_runner.go:130] > #[crio.runtime.runtimes.kata-fc]
	I0916 10:47:59.243817 1401996 command_runner.go:130] > # The workloads table defines ways to customize containers with different resources
	I0916 10:47:59.243826 1401996 command_runner.go:130] > # that work based on annotations, rather than the CRI.
	I0916 10:47:59.243837 1401996 command_runner.go:130] > # Note, the behavior of this table is EXPERIMENTAL and may change at any time.
	I0916 10:47:59.243845 1401996 command_runner.go:130] > # Each workload, has a name, activation_annotation, annotation_prefix and set of resources it supports mutating.
	I0916 10:47:59.243857 1401996 command_runner.go:130] > # The currently supported resources are "cpu" (to configure the cpu shares) and "cpuset" to configure the cpuset.
	I0916 10:47:59.243863 1401996 command_runner.go:130] > # Each resource can have a default value specified, or be empty.
	I0916 10:47:59.243874 1401996 command_runner.go:130] > # For a container to opt-into this workload, the pod should be configured with the annotation $activation_annotation (key only, value is ignored).
	I0916 10:47:59.243883 1401996 command_runner.go:130] > # To customize per-container, an annotation of the form $annotation_prefix.$resource/$ctrName = "value" can be specified
	I0916 10:47:59.243893 1401996 command_runner.go:130] > # signifying for that resource type to override the default value.
	I0916 10:47:59.243900 1401996 command_runner.go:130] > # If the annotation_prefix is not present, every container in the pod will be given the default values.
	I0916 10:47:59.243908 1401996 command_runner.go:130] > # Example:
	I0916 10:47:59.243912 1401996 command_runner.go:130] > # [crio.runtime.workloads.workload-type]
	I0916 10:47:59.243918 1401996 command_runner.go:130] > # activation_annotation = "io.crio/workload"
	I0916 10:47:59.243927 1401996 command_runner.go:130] > # annotation_prefix = "io.crio.workload-type"
	I0916 10:47:59.243932 1401996 command_runner.go:130] > # [crio.runtime.workloads.workload-type.resources]
	I0916 10:47:59.243940 1401996 command_runner.go:130] > # cpuset = 0
	I0916 10:47:59.243944 1401996 command_runner.go:130] > # cpushares = "0-1"
	I0916 10:47:59.243947 1401996 command_runner.go:130] > # Where:
	I0916 10:47:59.243951 1401996 command_runner.go:130] > # The workload name is workload-type.
	I0916 10:47:59.243959 1401996 command_runner.go:130] > # To specify, the pod must have the "io.crio.workload" annotation (this is a precise string match).
	I0916 10:47:59.243964 1401996 command_runner.go:130] > # This workload supports setting cpuset and cpu resources.
	I0916 10:47:59.243971 1401996 command_runner.go:130] > # annotation_prefix is used to customize the different resources.
	I0916 10:47:59.243982 1401996 command_runner.go:130] > # To configure the cpu shares a container gets in the example above, the pod would have to have the following annotation:
	I0916 10:47:59.243989 1401996 command_runner.go:130] > # "io.crio.workload-type/$container_name = {"cpushares": "value"}"
	I0916 10:47:59.243995 1401996 command_runner.go:130] > # 
	I0916 10:47:59.244001 1401996 command_runner.go:130] > # The crio.image table contains settings pertaining to the management of OCI images.
	I0916 10:47:59.244005 1401996 command_runner.go:130] > #
	I0916 10:47:59.244012 1401996 command_runner.go:130] > # CRI-O reads its configured registries defaults from the system wide
	I0916 10:47:59.244022 1401996 command_runner.go:130] > # containers-registries.conf(5) located in /etc/containers/registries.conf. If
	I0916 10:47:59.244029 1401996 command_runner.go:130] > # you want to modify just CRI-O, you can change the registries configuration in
	I0916 10:47:59.244035 1401996 command_runner.go:130] > # this file. Otherwise, leave insecure_registries and registries commented out to
	I0916 10:47:59.244041 1401996 command_runner.go:130] > # use the system's defaults from /etc/containers/registries.conf.
	I0916 10:47:59.244049 1401996 command_runner.go:130] > [crio.image]
	I0916 10:47:59.244055 1401996 command_runner.go:130] > # Default transport for pulling images from a remote container storage.
	I0916 10:47:59.244059 1401996 command_runner.go:130] > # default_transport = "docker://"
	I0916 10:47:59.244070 1401996 command_runner.go:130] > # The path to a file containing credentials necessary for pulling images from
	I0916 10:47:59.244079 1401996 command_runner.go:130] > # secure registries. The file is similar to that of /var/lib/kubelet/config.json
	I0916 10:47:59.244084 1401996 command_runner.go:130] > # global_auth_file = ""
	I0916 10:47:59.244089 1401996 command_runner.go:130] > # The image used to instantiate infra containers.
	I0916 10:47:59.244098 1401996 command_runner.go:130] > # This option supports live configuration reload.
	I0916 10:47:59.244103 1401996 command_runner.go:130] > pause_image = "registry.k8s.io/pause:3.10"
	I0916 10:47:59.244109 1401996 command_runner.go:130] > # The path to a file containing credentials specific for pulling the pause_image from
	I0916 10:47:59.244115 1401996 command_runner.go:130] > # above. The file is similar to that of /var/lib/kubelet/config.json
	I0916 10:47:59.244127 1401996 command_runner.go:130] > # This option supports live configuration reload.
	I0916 10:47:59.244134 1401996 command_runner.go:130] > # pause_image_auth_file = ""
	I0916 10:47:59.244141 1401996 command_runner.go:130] > # The command to run to have a container stay in the paused state.
	I0916 10:47:59.244147 1401996 command_runner.go:130] > # When explicitly set to "", it will fallback to the entrypoint and command
	I0916 10:47:59.244157 1401996 command_runner.go:130] > # specified in the pause image. When commented out, it will fallback to the
	I0916 10:47:59.244163 1401996 command_runner.go:130] > # default: "/pause". This option supports live configuration reload.
	I0916 10:47:59.244171 1401996 command_runner.go:130] > # pause_command = "/pause"
	I0916 10:47:59.244178 1401996 command_runner.go:130] > # Path to the file which decides what sort of policy we use when deciding
	I0916 10:47:59.244191 1401996 command_runner.go:130] > # whether or not to trust an image that we've pulled. It is not recommended that
	I0916 10:47:59.244198 1401996 command_runner.go:130] > # this option be used, as the default behavior of using the system-wide default
	I0916 10:47:59.244205 1401996 command_runner.go:130] > # policy (i.e., /etc/containers/policy.json) is most often preferred. Please
	I0916 10:47:59.244210 1401996 command_runner.go:130] > # refer to containers-policy.json(5) for more details.
	I0916 10:47:59.244344 1401996 command_runner.go:130] > # signature_policy = ""
	I0916 10:47:59.244374 1401996 command_runner.go:130] > # List of registries to skip TLS verification for pulling images. Please
	I0916 10:47:59.244384 1401996 command_runner.go:130] > # consider configuring the registries via /etc/containers/registries.conf before
	I0916 10:47:59.244394 1401996 command_runner.go:130] > # changing them here.
	I0916 10:47:59.244399 1401996 command_runner.go:130] > # insecure_registries = [
	I0916 10:47:59.244403 1401996 command_runner.go:130] > # ]
	I0916 10:47:59.244409 1401996 command_runner.go:130] > # Controls how image volumes are handled. The valid values are mkdir, bind and
	I0916 10:47:59.244418 1401996 command_runner.go:130] > # ignore; the latter will ignore volumes entirely.
	I0916 10:47:59.244422 1401996 command_runner.go:130] > # image_volumes = "mkdir"
	I0916 10:47:59.244428 1401996 command_runner.go:130] > # Temporary directory to use for storing big files
	I0916 10:47:59.244432 1401996 command_runner.go:130] > # big_files_temporary_dir = ""
	I0916 10:47:59.244439 1401996 command_runner.go:130] > # The crio.network table containers settings pertaining to the management of
	I0916 10:47:59.244443 1401996 command_runner.go:130] > # CNI plugins.
	I0916 10:47:59.244447 1401996 command_runner.go:130] > [crio.network]
	I0916 10:47:59.244453 1401996 command_runner.go:130] > # The default CNI network name to be selected. If not set or "", then
	I0916 10:47:59.244458 1401996 command_runner.go:130] > # CRI-O will pick-up the first one found in network_dir.
	I0916 10:47:59.244467 1401996 command_runner.go:130] > # cni_default_network = ""
	I0916 10:47:59.244474 1401996 command_runner.go:130] > # Path to the directory where CNI configuration files are located.
	I0916 10:47:59.244483 1401996 command_runner.go:130] > # network_dir = "/etc/cni/net.d/"
	I0916 10:47:59.244493 1401996 command_runner.go:130] > # Paths to directories where CNI plugin binaries are located.
	I0916 10:47:59.244496 1401996 command_runner.go:130] > # plugin_dirs = [
	I0916 10:47:59.244500 1401996 command_runner.go:130] > # 	"/opt/cni/bin/",
	I0916 10:47:59.244505 1401996 command_runner.go:130] > # ]
	I0916 10:47:59.244516 1401996 command_runner.go:130] > # A necessary configuration for Prometheus based metrics retrieval
	I0916 10:47:59.244523 1401996 command_runner.go:130] > [crio.metrics]
	I0916 10:47:59.244529 1401996 command_runner.go:130] > # Globally enable or disable metrics support.
	I0916 10:47:59.244533 1401996 command_runner.go:130] > # enable_metrics = false
	I0916 10:47:59.244538 1401996 command_runner.go:130] > # Specify enabled metrics collectors.
	I0916 10:47:59.244542 1401996 command_runner.go:130] > # Per default all metrics are enabled.
	I0916 10:47:59.244553 1401996 command_runner.go:130] > # It is possible, to prefix the metrics with "container_runtime_" and "crio_".
	I0916 10:47:59.244560 1401996 command_runner.go:130] > # For example, the metrics collector "operations" would be treated in the same
	I0916 10:47:59.244569 1401996 command_runner.go:130] > # way as "crio_operations" and "container_runtime_crio_operations".
	I0916 10:47:59.244747 1401996 command_runner.go:130] > # metrics_collectors = [
	I0916 10:47:59.244757 1401996 command_runner.go:130] > # 	"operations",
	I0916 10:47:59.244762 1401996 command_runner.go:130] > # 	"operations_latency_microseconds_total",
	I0916 10:47:59.244766 1401996 command_runner.go:130] > # 	"operations_latency_microseconds",
	I0916 10:47:59.244770 1401996 command_runner.go:130] > # 	"operations_errors",
	I0916 10:47:59.244775 1401996 command_runner.go:130] > # 	"image_pulls_by_digest",
	I0916 10:47:59.244779 1401996 command_runner.go:130] > # 	"image_pulls_by_name",
	I0916 10:47:59.244793 1401996 command_runner.go:130] > # 	"image_pulls_by_name_skipped",
	I0916 10:47:59.244798 1401996 command_runner.go:130] > # 	"image_pulls_failures",
	I0916 10:47:59.244801 1401996 command_runner.go:130] > # 	"image_pulls_successes",
	I0916 10:47:59.244805 1401996 command_runner.go:130] > # 	"image_pulls_layer_size",
	I0916 10:47:59.244809 1401996 command_runner.go:130] > # 	"image_layer_reuse",
	I0916 10:47:59.244813 1401996 command_runner.go:130] > # 	"containers_oom_total",
	I0916 10:47:59.244819 1401996 command_runner.go:130] > # 	"containers_oom",
	I0916 10:47:59.244823 1401996 command_runner.go:130] > # 	"processes_defunct",
	I0916 10:47:59.244827 1401996 command_runner.go:130] > # 	"operations_total",
	I0916 10:47:59.244831 1401996 command_runner.go:130] > # 	"operations_latency_seconds",
	I0916 10:47:59.244835 1401996 command_runner.go:130] > # 	"operations_latency_seconds_total",
	I0916 10:47:59.244839 1401996 command_runner.go:130] > # 	"operations_errors_total",
	I0916 10:47:59.246637 1401996 command_runner.go:130] > # 	"image_pulls_bytes_total",
	I0916 10:47:59.246654 1401996 command_runner.go:130] > # 	"image_pulls_skipped_bytes_total",
	I0916 10:47:59.246658 1401996 command_runner.go:130] > # 	"image_pulls_failure_total",
	I0916 10:47:59.246663 1401996 command_runner.go:130] > # 	"image_pulls_success_total",
	I0916 10:47:59.246667 1401996 command_runner.go:130] > # 	"image_layer_reuse_total",
	I0916 10:47:59.246671 1401996 command_runner.go:130] > # 	"containers_oom_count_total",
	I0916 10:47:59.246674 1401996 command_runner.go:130] > # ]
	I0916 10:47:59.246681 1401996 command_runner.go:130] > # The port on which the metrics server will listen.
	I0916 10:47:59.246685 1401996 command_runner.go:130] > # metrics_port = 9090
	I0916 10:47:59.246691 1401996 command_runner.go:130] > # Local socket path to bind the metrics server to
	I0916 10:47:59.246702 1401996 command_runner.go:130] > # metrics_socket = ""
	I0916 10:47:59.246707 1401996 command_runner.go:130] > # The certificate for the secure metrics server.
	I0916 10:47:59.246714 1401996 command_runner.go:130] > # If the certificate is not available on disk, then CRI-O will generate a
	I0916 10:47:59.246747 1401996 command_runner.go:130] > # self-signed one. CRI-O also watches for changes of this path and reloads the
	I0916 10:47:59.246756 1401996 command_runner.go:130] > # certificate on any modification event.
	I0916 10:47:59.246761 1401996 command_runner.go:130] > # metrics_cert = ""
	I0916 10:47:59.246766 1401996 command_runner.go:130] > # The certificate key for the secure metrics server.
	I0916 10:47:59.246771 1401996 command_runner.go:130] > # Behaves in the same way as the metrics_cert.
	I0916 10:47:59.246941 1401996 command_runner.go:130] > # metrics_key = ""
	I0916 10:47:59.246963 1401996 command_runner.go:130] > # A necessary configuration for OpenTelemetry trace data exporting
	I0916 10:47:59.246968 1401996 command_runner.go:130] > [crio.tracing]
	I0916 10:47:59.246974 1401996 command_runner.go:130] > # Globally enable or disable exporting OpenTelemetry traces.
	I0916 10:47:59.246978 1401996 command_runner.go:130] > # enable_tracing = false
	I0916 10:47:59.246983 1401996 command_runner.go:130] > # Address on which the gRPC trace collector listens on.
	I0916 10:47:59.246987 1401996 command_runner.go:130] > # tracing_endpoint = "0.0.0.0:4317"
	I0916 10:47:59.246993 1401996 command_runner.go:130] > # Number of samples to collect per million spans.
	I0916 10:47:59.246998 1401996 command_runner.go:130] > # tracing_sampling_rate_per_million = 0
	I0916 10:47:59.247004 1401996 command_runner.go:130] > # Necessary information pertaining to container and pod stats reporting.
	I0916 10:47:59.247008 1401996 command_runner.go:130] > [crio.stats]
	I0916 10:47:59.247014 1401996 command_runner.go:130] > # The number of seconds between collecting pod and container stats.
	I0916 10:47:59.247023 1401996 command_runner.go:130] > # If set to 0, the stats are collected on-demand instead.
	I0916 10:47:59.247027 1401996 command_runner.go:130] > # stats_collection_period = 0
	I0916 10:47:59.247220 1401996 command_runner.go:130] ! time="2024-09-16 10:47:59.222448532Z" level=info msg="Starting CRI-O, version: 1.24.6, git: 4bfe15a9feb74ffc95e66a21c04b15fa7bbc2b90(clean)"
	I0916 10:47:59.247241 1401996 command_runner.go:130] ! level=info msg="Using default capabilities: CAP_CHOWN, CAP_DAC_OVERRIDE, CAP_FSETID, CAP_FOWNER, CAP_SETGID, CAP_SETUID, CAP_SETPCAP, CAP_NET_BIND_SERVICE, CAP_KILL"
	I0916 10:47:59.247293 1401996 cni.go:84] Creating CNI manager for ""
	I0916 10:47:59.247313 1401996 cni.go:143] "docker" driver + "crio" runtime found, recommending kindnet
	I0916 10:47:59.247323 1401996 kubeadm.go:84] Using pod CIDR: 10.244.0.0/16
	I0916 10:47:59.247348 1401996 kubeadm.go:181] kubeadm options: {CertDir:/var/lib/minikube/certs ServiceCIDR:10.96.0.0/12 PodSubnet:10.244.0.0/16 AdvertiseAddress:192.168.49.2 APIServerPort:8441 KubernetesVersion:v1.31.1 EtcdDataDir:/var/lib/minikube/etcd EtcdExtraArgs:map[] ClusterName:functional-919910 NodeName:functional-919910 DNSDomain:cluster.local CRISocket:/var/run/crio/crio.sock ImageRepository: ComponentOptions:[{Component:apiServer ExtraArgs:map[enable-admission-plugins:NamespaceLifecycle,LimitRanger,ServiceAccount,DefaultStorageClass,DefaultTolerationSeconds,NodeRestriction,MutatingAdmissionWebhook,ValidatingAdmissionWebhook,ResourceQuota] Pairs:map[certSANs:["127.0.0.1", "localhost", "192.168.49.2"]]} {Component:controllerManager ExtraArgs:map[allocate-node-cidrs:true leader-elect:false] Pairs:map[]} {Component:scheduler ExtraArgs:map[leader-elect:false] Pairs:map[]}] FeatureArgs:map[] NodeIP:192.168.49.2 CgroupDriver:cgroupfs ClientCAFile:/var/lib/minikube/certs/ca.crt StaticPodPath:/etc
/kubernetes/manifests ControlPlaneAddress:control-plane.minikube.internal KubeProxyOptions:map[] ResolvConfSearchRegression:false KubeletConfigOpts:map[containerRuntimeEndpoint:unix:///var/run/crio/crio.sock hairpinMode:hairpin-veth runtimeRequestTimeout:15m] PrependCriSocketUnix:true}
	I0916 10:47:59.247499 1401996 kubeadm.go:187] kubeadm config:
	apiVersion: kubeadm.k8s.io/v1beta3
	kind: InitConfiguration
	localAPIEndpoint:
	  advertiseAddress: 192.168.49.2
	  bindPort: 8441
	bootstrapTokens:
	  - groups:
	      - system:bootstrappers:kubeadm:default-node-token
	    ttl: 24h0m0s
	    usages:
	      - signing
	      - authentication
	nodeRegistration:
	  criSocket: unix:///var/run/crio/crio.sock
	  name: "functional-919910"
	  kubeletExtraArgs:
	    node-ip: 192.168.49.2
	  taints: []
	---
	apiVersion: kubeadm.k8s.io/v1beta3
	kind: ClusterConfiguration
	apiServer:
	  certSANs: ["127.0.0.1", "localhost", "192.168.49.2"]
	  extraArgs:
	    enable-admission-plugins: "NamespaceLifecycle,LimitRanger,ServiceAccount,DefaultStorageClass,DefaultTolerationSeconds,NodeRestriction,MutatingAdmissionWebhook,ValidatingAdmissionWebhook,ResourceQuota"
	controllerManager:
	  extraArgs:
	    allocate-node-cidrs: "true"
	    leader-elect: "false"
	scheduler:
	  extraArgs:
	    leader-elect: "false"
	certificatesDir: /var/lib/minikube/certs
	clusterName: mk
	controlPlaneEndpoint: control-plane.minikube.internal:8441
	etcd:
	  local:
	    dataDir: /var/lib/minikube/etcd
	    extraArgs:
	      proxy-refresh-interval: "70000"
	kubernetesVersion: v1.31.1
	networking:
	  dnsDomain: cluster.local
	  podSubnet: "10.244.0.0/16"
	  serviceSubnet: 10.96.0.0/12
	---
	apiVersion: kubelet.config.k8s.io/v1beta1
	kind: KubeletConfiguration
	authentication:
	  x509:
	    clientCAFile: /var/lib/minikube/certs/ca.crt
	cgroupDriver: cgroupfs
	containerRuntimeEndpoint: unix:///var/run/crio/crio.sock
	hairpinMode: hairpin-veth
	runtimeRequestTimeout: 15m
	clusterDomain: "cluster.local"
	# disable disk resource management by default
	imageGCHighThresholdPercent: 100
	evictionHard:
	  nodefs.available: "0%"
	  nodefs.inodesFree: "0%"
	  imagefs.available: "0%"
	failSwapOn: false
	staticPodPath: /etc/kubernetes/manifests
	---
	apiVersion: kubeproxy.config.k8s.io/v1alpha1
	kind: KubeProxyConfiguration
	clusterCIDR: "10.244.0.0/16"
	metricsBindAddress: 0.0.0.0:10249
	conntrack:
	  maxPerCore: 0
	# Skip setting "net.netfilter.nf_conntrack_tcp_timeout_established"
	  tcpEstablishedTimeout: 0s
	# Skip setting "net.netfilter.nf_conntrack_tcp_timeout_close"
	  tcpCloseWaitTimeout: 0s
	
	I0916 10:47:59.247580 1401996 ssh_runner.go:195] Run: sudo ls /var/lib/minikube/binaries/v1.31.1
	I0916 10:47:59.255418 1401996 command_runner.go:130] > kubeadm
	I0916 10:47:59.255435 1401996 command_runner.go:130] > kubectl
	I0916 10:47:59.255511 1401996 command_runner.go:130] > kubelet
	I0916 10:47:59.256537 1401996 binaries.go:44] Found k8s binaries, skipping transfer
	I0916 10:47:59.256602 1401996 ssh_runner.go:195] Run: sudo mkdir -p /etc/systemd/system/kubelet.service.d /lib/systemd/system /var/tmp/minikube
	I0916 10:47:59.265347 1401996 ssh_runner.go:362] scp memory --> /etc/systemd/system/kubelet.service.d/10-kubeadm.conf (367 bytes)
	I0916 10:47:59.284157 1401996 ssh_runner.go:362] scp memory --> /lib/systemd/system/kubelet.service (352 bytes)
	I0916 10:47:59.302791 1401996 ssh_runner.go:362] scp memory --> /var/tmp/minikube/kubeadm.yaml.new (2155 bytes)
	I0916 10:47:59.321595 1401996 ssh_runner.go:195] Run: grep 192.168.49.2	control-plane.minikube.internal$ /etc/hosts
	I0916 10:47:59.325044 1401996 command_runner.go:130] > 192.168.49.2	control-plane.minikube.internal
	I0916 10:47:59.325415 1401996 ssh_runner.go:195] Run: sudo systemctl daemon-reload
	I0916 10:47:59.441461 1401996 ssh_runner.go:195] Run: sudo systemctl start kubelet
	I0916 10:47:59.454512 1401996 certs.go:68] Setting up /home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/functional-919910 for IP: 192.168.49.2
	I0916 10:47:59.454535 1401996 certs.go:194] generating shared ca certs ...
	I0916 10:47:59.454551 1401996 certs.go:226] acquiring lock for ca certs: {Name:mk0ae46b50e2e49d53ad6fcc94535aa50d9156d6 Clock:{} Delay:500ms Timeout:1m0s Cancel:<nil>}
	I0916 10:47:59.454697 1401996 certs.go:235] skipping valid "minikubeCA" ca cert: /home/jenkins/minikube-integration/19651-1378450/.minikube/ca.key
	I0916 10:47:59.454744 1401996 certs.go:235] skipping valid "proxyClientCA" ca cert: /home/jenkins/minikube-integration/19651-1378450/.minikube/proxy-client-ca.key
	I0916 10:47:59.454756 1401996 certs.go:256] generating profile certs ...
	I0916 10:47:59.454848 1401996 certs.go:359] skipping valid signed profile cert regeneration for "minikube-user": /home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/functional-919910/client.key
	I0916 10:47:59.454922 1401996 certs.go:359] skipping valid signed profile cert regeneration for "minikube": /home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/functional-919910/apiserver.key.debd5ef9
	I0916 10:47:59.454972 1401996 certs.go:359] skipping valid signed profile cert regeneration for "aggregator": /home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/functional-919910/proxy-client.key
	I0916 10:47:59.454984 1401996 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-1378450/.minikube/ca.crt -> /var/lib/minikube/certs/ca.crt
	I0916 10:47:59.454999 1401996 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-1378450/.minikube/ca.key -> /var/lib/minikube/certs/ca.key
	I0916 10:47:59.455011 1401996 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-1378450/.minikube/proxy-client-ca.crt -> /var/lib/minikube/certs/proxy-client-ca.crt
	I0916 10:47:59.455026 1401996 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-1378450/.minikube/proxy-client-ca.key -> /var/lib/minikube/certs/proxy-client-ca.key
	I0916 10:47:59.455037 1401996 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/functional-919910/apiserver.crt -> /var/lib/minikube/certs/apiserver.crt
	I0916 10:47:59.455054 1401996 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/functional-919910/apiserver.key -> /var/lib/minikube/certs/apiserver.key
	I0916 10:47:59.455066 1401996 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/functional-919910/proxy-client.crt -> /var/lib/minikube/certs/proxy-client.crt
	I0916 10:47:59.455081 1401996 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/functional-919910/proxy-client.key -> /var/lib/minikube/certs/proxy-client.key
	I0916 10:47:59.455140 1401996 certs.go:484] found cert: /home/jenkins/minikube-integration/19651-1378450/.minikube/certs/1383833.pem (1338 bytes)
	W0916 10:47:59.455172 1401996 certs.go:480] ignoring /home/jenkins/minikube-integration/19651-1378450/.minikube/certs/1383833_empty.pem, impossibly tiny 0 bytes
	I0916 10:47:59.455184 1401996 certs.go:484] found cert: /home/jenkins/minikube-integration/19651-1378450/.minikube/certs/ca-key.pem (1679 bytes)
	I0916 10:47:59.455210 1401996 certs.go:484] found cert: /home/jenkins/minikube-integration/19651-1378450/.minikube/certs/ca.pem (1078 bytes)
	I0916 10:47:59.455242 1401996 certs.go:484] found cert: /home/jenkins/minikube-integration/19651-1378450/.minikube/certs/cert.pem (1123 bytes)
	I0916 10:47:59.455268 1401996 certs.go:484] found cert: /home/jenkins/minikube-integration/19651-1378450/.minikube/certs/key.pem (1679 bytes)
	I0916 10:47:59.455313 1401996 certs.go:484] found cert: /home/jenkins/minikube-integration/19651-1378450/.minikube/files/etc/ssl/certs/13838332.pem (1708 bytes)
	I0916 10:47:59.455344 1401996 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-1378450/.minikube/files/etc/ssl/certs/13838332.pem -> /usr/share/ca-certificates/13838332.pem
	I0916 10:47:59.455360 1401996 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-1378450/.minikube/ca.crt -> /usr/share/ca-certificates/minikubeCA.pem
	I0916 10:47:59.455373 1401996 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-1378450/.minikube/certs/1383833.pem -> /usr/share/ca-certificates/1383833.pem
	I0916 10:47:59.456003 1401996 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-1378450/.minikube/ca.crt --> /var/lib/minikube/certs/ca.crt (1111 bytes)
	I0916 10:47:59.482482 1401996 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-1378450/.minikube/ca.key --> /var/lib/minikube/certs/ca.key (1675 bytes)
	I0916 10:47:59.508357 1401996 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-1378450/.minikube/proxy-client-ca.crt --> /var/lib/minikube/certs/proxy-client-ca.crt (1119 bytes)
	I0916 10:47:59.533138 1401996 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-1378450/.minikube/proxy-client-ca.key --> /var/lib/minikube/certs/proxy-client-ca.key (1679 bytes)
	I0916 10:47:59.558621 1401996 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/functional-919910/apiserver.crt --> /var/lib/minikube/certs/apiserver.crt (1424 bytes)
	I0916 10:47:59.583104 1401996 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/functional-919910/apiserver.key --> /var/lib/minikube/certs/apiserver.key (1679 bytes)
	I0916 10:47:59.607676 1401996 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/functional-919910/proxy-client.crt --> /var/lib/minikube/certs/proxy-client.crt (1147 bytes)
	I0916 10:47:59.632251 1401996 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/functional-919910/proxy-client.key --> /var/lib/minikube/certs/proxy-client.key (1679 bytes)
	I0916 10:47:59.656249 1401996 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-1378450/.minikube/files/etc/ssl/certs/13838332.pem --> /usr/share/ca-certificates/13838332.pem (1708 bytes)
	I0916 10:47:59.682129 1401996 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-1378450/.minikube/ca.crt --> /usr/share/ca-certificates/minikubeCA.pem (1111 bytes)
	I0916 10:47:59.707402 1401996 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-1378450/.minikube/certs/1383833.pem --> /usr/share/ca-certificates/1383833.pem (1338 bytes)
	I0916 10:47:59.732945 1401996 ssh_runner.go:362] scp memory --> /var/lib/minikube/kubeconfig (738 bytes)
	I0916 10:47:59.751902 1401996 ssh_runner.go:195] Run: openssl version
	I0916 10:47:59.757177 1401996 command_runner.go:130] > OpenSSL 3.0.2 15 Mar 2022 (Library: OpenSSL 3.0.2 15 Mar 2022)
	I0916 10:47:59.757641 1401996 ssh_runner.go:195] Run: sudo /bin/bash -c "test -s /usr/share/ca-certificates/13838332.pem && ln -fs /usr/share/ca-certificates/13838332.pem /etc/ssl/certs/13838332.pem"
	I0916 10:47:59.767361 1401996 ssh_runner.go:195] Run: ls -la /usr/share/ca-certificates/13838332.pem
	I0916 10:47:59.771009 1401996 command_runner.go:130] > -rw-r--r-- 1 root root 1708 Sep 16 10:46 /usr/share/ca-certificates/13838332.pem
	I0916 10:47:59.771054 1401996 certs.go:528] hashing: -rw-r--r-- 1 root root 1708 Sep 16 10:46 /usr/share/ca-certificates/13838332.pem
	I0916 10:47:59.771108 1401996 ssh_runner.go:195] Run: openssl x509 -hash -noout -in /usr/share/ca-certificates/13838332.pem
	I0916 10:47:59.779053 1401996 command_runner.go:130] > 3ec20f2e
	I0916 10:47:59.779521 1401996 ssh_runner.go:195] Run: sudo /bin/bash -c "test -L /etc/ssl/certs/3ec20f2e.0 || ln -fs /etc/ssl/certs/13838332.pem /etc/ssl/certs/3ec20f2e.0"
	I0916 10:47:59.788786 1401996 ssh_runner.go:195] Run: sudo /bin/bash -c "test -s /usr/share/ca-certificates/minikubeCA.pem && ln -fs /usr/share/ca-certificates/minikubeCA.pem /etc/ssl/certs/minikubeCA.pem"
	I0916 10:47:59.798081 1401996 ssh_runner.go:195] Run: ls -la /usr/share/ca-certificates/minikubeCA.pem
	I0916 10:47:59.801713 1401996 command_runner.go:130] > -rw-r--r-- 1 root root 1111 Sep 16 10:35 /usr/share/ca-certificates/minikubeCA.pem
	I0916 10:47:59.801753 1401996 certs.go:528] hashing: -rw-r--r-- 1 root root 1111 Sep 16 10:35 /usr/share/ca-certificates/minikubeCA.pem
	I0916 10:47:59.801812 1401996 ssh_runner.go:195] Run: openssl x509 -hash -noout -in /usr/share/ca-certificates/minikubeCA.pem
	I0916 10:47:59.808360 1401996 command_runner.go:130] > b5213941
	I0916 10:47:59.808860 1401996 ssh_runner.go:195] Run: sudo /bin/bash -c "test -L /etc/ssl/certs/b5213941.0 || ln -fs /etc/ssl/certs/minikubeCA.pem /etc/ssl/certs/b5213941.0"
	I0916 10:47:59.818410 1401996 ssh_runner.go:195] Run: sudo /bin/bash -c "test -s /usr/share/ca-certificates/1383833.pem && ln -fs /usr/share/ca-certificates/1383833.pem /etc/ssl/certs/1383833.pem"
	I0916 10:47:59.828438 1401996 ssh_runner.go:195] Run: ls -la /usr/share/ca-certificates/1383833.pem
	I0916 10:47:59.832025 1401996 command_runner.go:130] > -rw-r--r-- 1 root root 1338 Sep 16 10:46 /usr/share/ca-certificates/1383833.pem
	I0916 10:47:59.832064 1401996 certs.go:528] hashing: -rw-r--r-- 1 root root 1338 Sep 16 10:46 /usr/share/ca-certificates/1383833.pem
	I0916 10:47:59.832115 1401996 ssh_runner.go:195] Run: openssl x509 -hash -noout -in /usr/share/ca-certificates/1383833.pem
	I0916 10:47:59.838985 1401996 command_runner.go:130] > 51391683
	I0916 10:47:59.839071 1401996 ssh_runner.go:195] Run: sudo /bin/bash -c "test -L /etc/ssl/certs/51391683.0 || ln -fs /etc/ssl/certs/1383833.pem /etc/ssl/certs/51391683.0"
	I0916 10:47:59.848212 1401996 ssh_runner.go:195] Run: stat /var/lib/minikube/certs/apiserver-kubelet-client.crt
	I0916 10:47:59.851764 1401996 command_runner.go:130] >   File: /var/lib/minikube/certs/apiserver-kubelet-client.crt
	I0916 10:47:59.851830 1401996 command_runner.go:130] >   Size: 1176      	Blocks: 8          IO Block: 4096   regular file
	I0916 10:47:59.851846 1401996 command_runner.go:130] > Device: 10301h/66305d	Inode: 1308756     Links: 1
	I0916 10:47:59.851853 1401996 command_runner.go:130] > Access: (0644/-rw-r--r--)  Uid: (    0/    root)   Gid: (    0/    root)
	I0916 10:47:59.851859 1401996 command_runner.go:130] > Access: 2024-09-16 10:46:46.002004725 +0000
	I0916 10:47:59.851865 1401996 command_runner.go:130] > Modify: 2024-09-16 10:46:46.002004725 +0000
	I0916 10:47:59.851869 1401996 command_runner.go:130] > Change: 2024-09-16 10:46:46.002004725 +0000
	I0916 10:47:59.851874 1401996 command_runner.go:130] >  Birth: 2024-09-16 10:46:46.002004725 +0000
	I0916 10:47:59.851952 1401996 ssh_runner.go:195] Run: openssl x509 -noout -in /var/lib/minikube/certs/apiserver-etcd-client.crt -checkend 86400
	I0916 10:47:59.858501 1401996 command_runner.go:130] > Certificate will not expire
	I0916 10:47:59.858963 1401996 ssh_runner.go:195] Run: openssl x509 -noout -in /var/lib/minikube/certs/apiserver-kubelet-client.crt -checkend 86400
	I0916 10:47:59.865688 1401996 command_runner.go:130] > Certificate will not expire
	I0916 10:47:59.866098 1401996 ssh_runner.go:195] Run: openssl x509 -noout -in /var/lib/minikube/certs/etcd/server.crt -checkend 86400
	I0916 10:47:59.872474 1401996 command_runner.go:130] > Certificate will not expire
	I0916 10:47:59.872964 1401996 ssh_runner.go:195] Run: openssl x509 -noout -in /var/lib/minikube/certs/etcd/healthcheck-client.crt -checkend 86400
	I0916 10:47:59.879512 1401996 command_runner.go:130] > Certificate will not expire
	I0916 10:47:59.879952 1401996 ssh_runner.go:195] Run: openssl x509 -noout -in /var/lib/minikube/certs/etcd/peer.crt -checkend 86400
	I0916 10:47:59.886569 1401996 command_runner.go:130] > Certificate will not expire
	I0916 10:47:59.887097 1401996 ssh_runner.go:195] Run: openssl x509 -noout -in /var/lib/minikube/certs/front-proxy-client.crt -checkend 86400
	I0916 10:47:59.893931 1401996 command_runner.go:130] > Certificate will not expire
	I0916 10:47:59.894042 1401996 kubeadm.go:392] StartCluster: {Name:functional-919910 KeepContext:false EmbedCerts:false MinikubeISO: KicBaseImage:gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 Memory:4000 CPUs:2 DiskSize:20000 Driver:docker HyperkitVpnKitSock: HyperkitVSockPorts:[] DockerEnv:[] ContainerVolumeMounts:[] InsecureRegistry:[] RegistryMirror:[] HostOnlyCIDR:192.168.59.1/24 HypervVirtualSwitch: HypervUseExternalSwitch:false HypervExternalAdapter: KVMNetwork:default KVMQemuURI:qemu:///system KVMGPU:false KVMHidden:false KVMNUMACount:1 APIServerPort:8441 DockerOpt:[] DisableDriverMounts:false NFSShare:[] NFSSharesRoot:/nfsshares UUID: NoVTXCheck:false DNSProxy:false HostDNSResolver:true HostOnlyNicType:virtio NatNicType:virtio SSHIPAddress: SSHUser:root SSHKey: SSHPort:22 KubernetesConfig:{KubernetesVersion:v1.31.1 ClusterName:functional-919910 Namespace:default APIServerHAVIP: APIServerName:minikubeCA APISer
verNames:[] APIServerIPs:[] DNSDomain:cluster.local ContainerRuntime:crio CRISocket: NetworkPlugin:cni FeatureGates: ServiceCIDR:10.96.0.0/12 ImageRepository: LoadBalancerStartIP: LoadBalancerEndIP: CustomIngressCert: RegistryAliases: ExtraOptions:[] ShouldLoadCachedImages:true EnableDefaultCNI:false CNI:} Nodes:[{Name: IP:192.168.49.2 Port:8441 KubernetesVersion:v1.31.1 ContainerRuntime:crio ControlPlane:true Worker:true}] Addons:map[default-storageclass:true storage-provisioner:true] CustomAddonImages:map[] CustomAddonRegistries:map[] VerifyComponents:map[apiserver:true apps_running:true default_sa:true extra:true kubelet:true node_ready:true system_pods:true] StartHostTimeout:6m0s ScheduledStop:<nil> ExposedPorts:[] ListenAddress: Network: Subnet: MultiNodeRequested:false ExtraDisks:0 CertExpiration:26280h0m0s Mount:false MountString:/home/jenkins:/minikube-host Mount9PVersion:9p2000.L MountGID:docker MountIP: MountMSize:262144 MountOptions:[] MountPort:0 MountType:9p MountUID:docker BinaryMirror: DisableO
ptimizations:false DisableMetrics:false CustomQemuFirmwarePath: SocketVMnetClientPath: SocketVMnetPath: StaticIP: SSHAuthSock: SSHAgentPID:0 GPUs: AutoPauseInterval:1m0s}
	I0916 10:47:59.894136 1401996 cri.go:54] listing CRI containers in root : {State:paused Name: Namespaces:[kube-system]}
	I0916 10:47:59.894206 1401996 ssh_runner.go:195] Run: sudo -s eval "crictl ps -a --quiet --label io.kubernetes.pod.namespace=kube-system"
	I0916 10:47:59.928977 1401996 command_runner.go:130] > 89084e33c979a76a3a4bbd24eab8c848deb25d8bd474bad381f47a24e0373c2e
	I0916 10:47:59.929057 1401996 command_runner.go:130] > 584cffa44f32723af45447c07bf6e3fc641b7c61fe43302aad35c776bd065faf
	I0916 10:47:59.929099 1401996 command_runner.go:130] > 9fdab793eb970a5f01845e2aeaf1389846fd7113bbdedbb122c9c796017271d5
	I0916 10:47:59.929123 1401996 command_runner.go:130] > 3e31d247381fd150f97fed045c0d264e01a0046902133f839fc323ed9d5fa7b9
	I0916 10:47:59.929144 1401996 command_runner.go:130] > 6d211253a1170338e5b23dda8b3c6a26dde0aa55d2f91ee289142b0410943b49
	I0916 10:47:59.929179 1401996 command_runner.go:130] > 19cb8b26283b5427eeb4adf80032848225300f8293659c95a04c937ca3877ced
	I0916 10:47:59.929200 1401996 command_runner.go:130] > b88a79882d73e8e5ca5f134464b8f60ebbeb4a0aa75d6f83d1ec9e3d9f6bd093
	I0916 10:47:59.929246 1401996 command_runner.go:130] > 790d8c6b7f5cff6aa8da32ec82eeab04f109110f2b3a39803bda7a570da2cf75
	I0916 10:47:59.931789 1401996 cri.go:89] found id: "89084e33c979a76a3a4bbd24eab8c848deb25d8bd474bad381f47a24e0373c2e"
	I0916 10:47:59.931812 1401996 cri.go:89] found id: "584cffa44f32723af45447c07bf6e3fc641b7c61fe43302aad35c776bd065faf"
	I0916 10:47:59.931818 1401996 cri.go:89] found id: "9fdab793eb970a5f01845e2aeaf1389846fd7113bbdedbb122c9c796017271d5"
	I0916 10:47:59.931822 1401996 cri.go:89] found id: "3e31d247381fd150f97fed045c0d264e01a0046902133f839fc323ed9d5fa7b9"
	I0916 10:47:59.931825 1401996 cri.go:89] found id: "6d211253a1170338e5b23dda8b3c6a26dde0aa55d2f91ee289142b0410943b49"
	I0916 10:47:59.931829 1401996 cri.go:89] found id: "19cb8b26283b5427eeb4adf80032848225300f8293659c95a04c937ca3877ced"
	I0916 10:47:59.931833 1401996 cri.go:89] found id: "b88a79882d73e8e5ca5f134464b8f60ebbeb4a0aa75d6f83d1ec9e3d9f6bd093"
	I0916 10:47:59.931836 1401996 cri.go:89] found id: "790d8c6b7f5cff6aa8da32ec82eeab04f109110f2b3a39803bda7a570da2cf75"
	I0916 10:47:59.931839 1401996 cri.go:89] found id: ""
	I0916 10:47:59.931892 1401996 ssh_runner.go:195] Run: sudo runc list -f json
	I0916 10:47:59.955445 1401996 command_runner.go:130] > [{"ociVersion":"1.0.2-dev","id":"19cb8b26283b5427eeb4adf80032848225300f8293659c95a04c937ca3877ced","pid":0,"status":"stopped","bundle":"/run/containers/storage/overlay-containers/19cb8b26283b5427eeb4adf80032848225300f8293659c95a04c937ca3877ced/userdata","rootfs":"/var/lib/containers/storage/overlay/9d25114c4e5423d24252b77dba36894f36016e9218116badbb9dbec6638e1801/merged","created":"2024-09-16T10:46:54.451546712Z","annotations":{"io.container.manager":"cri-o","io.kubernetes.container.hash":"d1900d79","io.kubernetes.container.name":"kube-controller-manager","io.kubernetes.container.restartCount":"0","io.kubernetes.container.terminationMessagePath":"/dev/termination-log","io.kubernetes.container.terminationMessagePolicy":"File","io.kubernetes.cri-o.Annotations":"{\"io.kubernetes.container.hash\":\"d1900d79\",\"io.kubernetes.container.restartCount\":\"0\",\"io.kubernetes.container.terminationMessagePath\":\"/dev/termination-log\",\"io.kubernetes.conta
iner.terminationMessagePolicy\":\"File\",\"io.kubernetes.pod.terminationGracePeriod\":\"30\"}","io.kubernetes.cri-o.ContainerID":"19cb8b26283b5427eeb4adf80032848225300f8293659c95a04c937ca3877ced","io.kubernetes.cri-o.ContainerType":"container","io.kubernetes.cri-o.Created":"2024-09-16T10:46:54.388751196Z","io.kubernetes.cri-o.Image":"279f381cb37365bbbcd133c9531fba9c2beb0f38dbbe6ddfcd0b1b1643d3450e","io.kubernetes.cri-o.ImageName":"registry.k8s.io/kube-controller-manager:v1.31.1","io.kubernetes.cri-o.ImageRef":"279f381cb37365bbbcd133c9531fba9c2beb0f38dbbe6ddfcd0b1b1643d3450e","io.kubernetes.cri-o.Labels":"{\"io.kubernetes.container.name\":\"kube-controller-manager\",\"io.kubernetes.pod.name\":\"kube-controller-manager-functional-919910\",\"io.kubernetes.pod.namespace\":\"kube-system\",\"io.kubernetes.pod.uid\":\"bcfd044776fa163108ac9ce9912dd1b1\"}","io.kubernetes.cri-o.LogPath":"/var/log/pods/kube-system_kube-controller-manager-functional-919910_bcfd044776fa163108ac9ce9912dd1b1/kube-controller-manager/0.log","
io.kubernetes.cri-o.Metadata":"{\"name\":\"kube-controller-manager\"}","io.kubernetes.cri-o.MountPoint":"/var/lib/containers/storage/overlay/9d25114c4e5423d24252b77dba36894f36016e9218116badbb9dbec6638e1801/merged","io.kubernetes.cri-o.Name":"k8s_kube-controller-manager_kube-controller-manager-functional-919910_kube-system_bcfd044776fa163108ac9ce9912dd1b1_0","io.kubernetes.cri-o.ResolvPath":"/run/containers/storage/overlay-containers/0ffab32638624e8f0235604afb94e9e67c3d4e06616208483a5debcc914e3cae/userdata/resolv.conf","io.kubernetes.cri-o.SandboxID":"0ffab32638624e8f0235604afb94e9e67c3d4e06616208483a5debcc914e3cae","io.kubernetes.cri-o.SandboxName":"k8s_kube-controller-manager-functional-919910_kube-system_bcfd044776fa163108ac9ce9912dd1b1_0","io.kubernetes.cri-o.SeccompProfilePath":"","io.kubernetes.cri-o.Stdin":"false","io.kubernetes.cri-o.StdinOnce":"false","io.kubernetes.cri-o.TTY":"false","io.kubernetes.cri-o.Volumes":"[{\"container_path\":\"/etc/ca-certificates\",\"host_path\":\"/etc/ca-certificates\",\"
readonly\":true,\"propagation\":0,\"selinux_relabel\":false},{\"container_path\":\"/dev/termination-log\",\"host_path\":\"/var/lib/kubelet/pods/bcfd044776fa163108ac9ce9912dd1b1/containers/kube-controller-manager/2c567ce7\",\"readonly\":false,\"propagation\":0,\"selinux_relabel\":false},{\"container_path\":\"/etc/hosts\",\"host_path\":\"/var/lib/kubelet/pods/bcfd044776fa163108ac9ce9912dd1b1/etc-hosts\",\"readonly\":false,\"propagation\":0,\"selinux_relabel\":false},{\"container_path\":\"/etc/ssl/certs\",\"host_path\":\"/etc/ssl/certs\",\"readonly\":true,\"propagation\":0,\"selinux_relabel\":false},{\"container_path\":\"/etc/kubernetes/controller-manager.conf\",\"host_path\":\"/etc/kubernetes/controller-manager.conf\",\"readonly\":true,\"propagation\":0,\"selinux_relabel\":false},{\"container_path\":\"/usr/share/ca-certificates\",\"host_path\":\"/usr/share/ca-certificates\",\"readonly\":true,\"propagation\":0,\"selinux_relabel\":false},{\"container_path\":\"/var/lib/minikube/certs\",\"host_path\":\"/var/lib/min
ikube/certs\",\"readonly\":true,\"propagation\":0,\"selinux_relabel\":false},{\"container_path\":\"/usr/local/share/ca-certificates\",\"host_path\":\"/usr/local/share/ca-certificates\",\"readonly\":true,\"propagation\":0,\"selinux_relabel\":false},{\"container_path\":\"/usr/libexec/kubernetes/kubelet-plugins/volume/exec\",\"host_path\":\"/usr/libexec/kubernetes/kubelet-plugins/volume/exec\",\"readonly\":false,\"propagation\":0,\"selinux_relabel\":false}]","io.kubernetes.pod.name":"kube-controller-manager-functional-919910","io.kubernetes.pod.namespace":"kube-system","io.kubernetes.pod.terminationGracePeriod":"30","io.kubernetes.pod.uid":"bcfd044776fa163108ac9ce9912dd1b1","kubernetes.io/config.hash":"bcfd044776fa163108ac9ce9912dd1b1","kubernetes.io/config.seen":"2024-09-16T10:46:53.802316924Z","kubernetes.io/config.source":"file"},"owner":"root"},{"ociVersion":"1.0.2-dev","id":"3e31d247381fd150f97fed045c0d264e01a0046902133f839fc323ed9d5fa7b9","pid":0,"status":"stopped","bundle":"/run/containers/storage/overlay
-containers/3e31d247381fd150f97fed045c0d264e01a0046902133f839fc323ed9d5fa7b9/userdata","rootfs":"/var/lib/containers/storage/overlay/33ae8f381ec56dbef8842ba8809fd9b503de8020ea42b8f8c194145d6dbea159/merged","created":"2024-09-16T10:47:07.149526142Z","annotations":{"io.container.manager":"cri-o","io.kubernetes.container.hash":"e80daca3","io.kubernetes.container.name":"kindnet-cni","io.kubernetes.container.restartCount":"0","io.kubernetes.container.terminationMessagePath":"/dev/termination-log","io.kubernetes.container.terminationMessagePolicy":"File","io.kubernetes.cri-o.Annotations":"{\"io.kubernetes.container.hash\":\"e80daca3\",\"io.kubernetes.container.restartCount\":\"0\",\"io.kubernetes.container.terminationMessagePath\":\"/dev/termination-log\",\"io.kubernetes.container.terminationMessagePolicy\":\"File\",\"io.kubernetes.pod.terminationGracePeriod\":\"30\"}","io.kubernetes.cri-o.ContainerID":"3e31d247381fd150f97fed045c0d264e01a0046902133f839fc323ed9d5fa7b9","io.kubernetes.cri-o.ContainerType":"container"
,"io.kubernetes.cri-o.Created":"2024-09-16T10:47:07.08917364Z","io.kubernetes.cri-o.Image":"6a23fa8fd2b78ab58e42ba273808edc936a9c53d8ac4a919f6337be094843a51","io.kubernetes.cri-o.ImageName":"docker.io/kindest/kindnetd:v20240813-c6f155d6","io.kubernetes.cri-o.ImageRef":"6a23fa8fd2b78ab58e42ba273808edc936a9c53d8ac4a919f6337be094843a51","io.kubernetes.cri-o.Labels":"{\"io.kubernetes.container.name\":\"kindnet-cni\",\"io.kubernetes.pod.name\":\"kindnet-nb5xl\",\"io.kubernetes.pod.namespace\":\"kube-system\",\"io.kubernetes.pod.uid\":\"1282e172-7d16-4f24-9f7d-33da705832a9\"}","io.kubernetes.cri-o.LogPath":"/var/log/pods/kube-system_kindnet-nb5xl_1282e172-7d16-4f24-9f7d-33da705832a9/kindnet-cni/0.log","io.kubernetes.cri-o.Metadata":"{\"name\":\"kindnet-cni\"}","io.kubernetes.cri-o.MountPoint":"/var/lib/containers/storage/overlay/33ae8f381ec56dbef8842ba8809fd9b503de8020ea42b8f8c194145d6dbea159/merged","io.kubernetes.cri-o.Name":"k8s_kindnet-cni_kindnet-nb5xl_kube-system_1282e172-7d16-4f24-9f7d-33da705832a9_0","io.ku
bernetes.cri-o.ResolvPath":"/run/containers/storage/overlay-containers/306886331d6eea412e2593dd8cefd104ae0353cb2453c12f41db88e1881fec0f/userdata/resolv.conf","io.kubernetes.cri-o.SandboxID":"306886331d6eea412e2593dd8cefd104ae0353cb2453c12f41db88e1881fec0f","io.kubernetes.cri-o.SandboxName":"k8s_kindnet-nb5xl_kube-system_1282e172-7d16-4f24-9f7d-33da705832a9_0","io.kubernetes.cri-o.SeccompProfilePath":"","io.kubernetes.cri-o.Stdin":"false","io.kubernetes.cri-o.StdinOnce":"false","io.kubernetes.cri-o.TTY":"false","io.kubernetes.cri-o.Volumes":"[{\"container_path\":\"/run/xtables.lock\",\"host_path\":\"/run/xtables.lock\",\"readonly\":false,\"propagation\":0,\"selinux_relabel\":false},{\"container_path\":\"/lib/modules\",\"host_path\":\"/lib/modules\",\"readonly\":true,\"propagation\":0,\"selinux_relabel\":false},{\"container_path\":\"/etc/hosts\",\"host_path\":\"/var/lib/kubelet/pods/1282e172-7d16-4f24-9f7d-33da705832a9/etc-hosts\",\"readonly\":false,\"propagation\":0,\"selinux_relabel\":false},{\"container_path
\":\"/dev/termination-log\",\"host_path\":\"/var/lib/kubelet/pods/1282e172-7d16-4f24-9f7d-33da705832a9/containers/kindnet-cni/4675b3f6\",\"readonly\":false,\"propagation\":0,\"selinux_relabel\":false},{\"container_path\":\"/etc/cni/net.d\",\"host_path\":\"/etc/cni/net.d\",\"readonly\":false,\"propagation\":0,\"selinux_relabel\":false},{\"container_path\":\"/var/run/secrets/kubernetes.io/serviceaccount\",\"host_path\":\"/var/lib/kubelet/pods/1282e172-7d16-4f24-9f7d-33da705832a9/volumes/kubernetes.io~projected/kube-api-access-bxwpg\",\"readonly\":true,\"propagation\":0,\"selinux_relabel\":false}]","io.kubernetes.pod.name":"kindnet-nb5xl","io.kubernetes.pod.namespace":"kube-system","io.kubernetes.pod.terminationGracePeriod":"30","io.kubernetes.pod.uid":"1282e172-7d16-4f24-9f7d-33da705832a9","kubernetes.io/config.seen":"2024-09-16T10:47:06.101213303Z","kubernetes.io/config.source":"api"},"owner":"root"},{"ociVersion":"1.0.2-dev","id":"584cffa44f32723af45447c07bf6e3fc641b7c61fe43302aad35c776bd065faf","pid":0,"stat
us":"stopped","bundle":"/run/containers/storage/overlay-containers/584cffa44f32723af45447c07bf6e3fc641b7c61fe43302aad35c776bd065faf/userdata","rootfs":"/var/lib/containers/storage/overlay/0bedb04df165ede72307d852c687b05c750fe9223f4fb8c1d3776f63a28900f8/merged","created":"2024-09-16T10:47:48.333720036Z","annotations":{"io.container.manager":"cri-o","io.kubernetes.container.hash":"6c6bf961","io.kubernetes.container.name":"storage-provisioner","io.kubernetes.container.restartCount":"0","io.kubernetes.container.terminationMessagePath":"/dev/termination-log","io.kubernetes.container.terminationMessagePolicy":"File","io.kubernetes.cri-o.Annotations":"{\"io.kubernetes.container.hash\":\"6c6bf961\",\"io.kubernetes.container.restartCount\":\"0\",\"io.kubernetes.container.terminationMessagePath\":\"/dev/termination-log\",\"io.kubernetes.container.terminationMessagePolicy\":\"File\",\"io.kubernetes.pod.terminationGracePeriod\":\"30\"}","io.kubernetes.cri-o.ContainerID":"584cffa44f32723af45447c07bf6e3fc641b7c61fe43302aad
35c776bd065faf","io.kubernetes.cri-o.ContainerType":"container","io.kubernetes.cri-o.Created":"2024-09-16T10:47:48.29216211Z","io.kubernetes.cri-o.Image":"ba04bb24b95753201135cbc420b233c1b0b9fa2e1fd21d28319c348c33fbcde6","io.kubernetes.cri-o.ImageName":"gcr.io/k8s-minikube/storage-provisioner:v5","io.kubernetes.cri-o.ImageRef":"ba04bb24b95753201135cbc420b233c1b0b9fa2e1fd21d28319c348c33fbcde6","io.kubernetes.cri-o.Labels":"{\"io.kubernetes.container.name\":\"storage-provisioner\",\"io.kubernetes.pod.name\":\"storage-provisioner\",\"io.kubernetes.pod.namespace\":\"kube-system\",\"io.kubernetes.pod.uid\":\"2eb6523f-f61a-4c33-8e91-0bbbb874554b\"}","io.kubernetes.cri-o.LogPath":"/var/log/pods/kube-system_storage-provisioner_2eb6523f-f61a-4c33-8e91-0bbbb874554b/storage-provisioner/0.log","io.kubernetes.cri-o.Metadata":"{\"name\":\"storage-provisioner\"}","io.kubernetes.cri-o.MountPoint":"/var/lib/containers/storage/overlay/0bedb04df165ede72307d852c687b05c750fe9223f4fb8c1d3776f63a28900f8/merged","io.kubernetes.cri-o
.Name":"k8s_storage-provisioner_storage-provisioner_kube-system_2eb6523f-f61a-4c33-8e91-0bbbb874554b_0","io.kubernetes.cri-o.ResolvPath":"/run/containers/storage/overlay-containers/e27809ba106031f0a2ea1939eccfaa14ca2ade78903409cc767b25e9de7c812a/userdata/resolv.conf","io.kubernetes.cri-o.SandboxID":"e27809ba106031f0a2ea1939eccfaa14ca2ade78903409cc767b25e9de7c812a","io.kubernetes.cri-o.SandboxName":"k8s_storage-provisioner_kube-system_2eb6523f-f61a-4c33-8e91-0bbbb874554b_0","io.kubernetes.cri-o.SeccompProfilePath":"","io.kubernetes.cri-o.Stdin":"false","io.kubernetes.cri-o.StdinOnce":"false","io.kubernetes.cri-o.TTY":"false","io.kubernetes.cri-o.Volumes":"[{\"container_path\":\"/tmp\",\"host_path\":\"/tmp\",\"readonly\":false,\"propagation\":0,\"selinux_relabel\":false},{\"container_path\":\"/etc/hosts\",\"host_path\":\"/var/lib/kubelet/pods/2eb6523f-f61a-4c33-8e91-0bbbb874554b/etc-hosts\",\"readonly\":false,\"propagation\":0,\"selinux_relabel\":false},{\"container_path\":\"/dev/termination-log\",\"host_path\"
:\"/var/lib/kubelet/pods/2eb6523f-f61a-4c33-8e91-0bbbb874554b/containers/storage-provisioner/a9710de8\",\"readonly\":false,\"propagation\":0,\"selinux_relabel\":false},{\"container_path\":\"/var/run/secrets/kubernetes.io/serviceaccount\",\"host_path\":\"/var/lib/kubelet/pods/2eb6523f-f61a-4c33-8e91-0bbbb874554b/volumes/kubernetes.io~projected/kube-api-access-kn9qz\",\"readonly\":true,\"propagation\":0,\"selinux_relabel\":false}]","io.kubernetes.pod.name":"storage-provisioner","io.kubernetes.pod.namespace":"kube-system","io.kubernetes.pod.terminationGracePeriod":"30","io.kubernetes.pod.uid":"2eb6523f-f61a-4c33-8e91-0bbbb874554b","kubectl.kubernetes.io/last-applied-configuration":"{\"apiVersion\":\"v1\",\"kind\":\"Pod\",\"metadata\":{\"annotations\":{},\"labels\":{\"addonmanager.kubernetes.io/mode\":\"Reconcile\",\"integration-test\":\"storage-provisioner\"},\"name\":\"storage-provisioner\",\"namespace\":\"kube-system\"},\"spec\":{\"containers\":[{\"command\":[\"/storage-provisioner\"],\"image\":\"gcr.io/k8s-mi
nikube/storage-provisioner:v5\",\"imagePullPolicy\":\"IfNotPresent\",\"name\":\"storage-provisioner\",\"volumeMounts\":[{\"mountPath\":\"/tmp\",\"name\":\"tmp\"}]}],\"hostNetwork\":true,\"serviceAccountName\":\"storage-provisioner\",\"volumes\":[{\"hostPath\":{\"path\":\"/tmp\",\"type\":\"Directory\"},\"name\":\"tmp\"}]}}\n","kubernetes.io/config.seen":"2024-09-16T10:47:47.935314547Z","kubernetes.io/config.source":"api"},"owner":"root"},{"ociVersion":"1.0.2-dev","id":"6d211253a1170338e5b23dda8b3c6a26dde0aa55d2f91ee289142b0410943b49","pid":0,"status":"stopped","bundle":"/run/containers/storage/overlay-containers/6d211253a1170338e5b23dda8b3c6a26dde0aa55d2f91ee289142b0410943b49/userdata","rootfs":"/var/lib/containers/storage/overlay/cab360ca19399b430c9ac3118df3b10c96a9ca4f93f89484957af5838b7a7903/merged","created":"2024-09-16T10:46:54.47745643Z","annotations":{"io.container.manager":"cri-o","io.kubernetes.container.hash":"12faacf7","io.kubernetes.container.name":"kube-scheduler","io.kubernetes.container.restartC
ount":"0","io.kubernetes.container.terminationMessagePath":"/dev/termination-log","io.kubernetes.container.terminationMessagePolicy":"File","io.kubernetes.cri-o.Annotations":"{\"io.kubernetes.container.hash\":\"12faacf7\",\"io.kubernetes.container.restartCount\":\"0\",\"io.kubernetes.container.terminationMessagePath\":\"/dev/termination-log\",\"io.kubernetes.container.terminationMessagePolicy\":\"File\",\"io.kubernetes.pod.terminationGracePeriod\":\"30\"}","io.kubernetes.cri-o.ContainerID":"6d211253a1170338e5b23dda8b3c6a26dde0aa55d2f91ee289142b0410943b49","io.kubernetes.cri-o.ContainerType":"container","io.kubernetes.cri-o.Created":"2024-09-16T10:46:54.420996546Z","io.kubernetes.cri-o.Image":"7f8aa378bb47dffcf430f3a601abe39137e88aee0238e23ed8530fdd18dab82d","io.kubernetes.cri-o.ImageName":"registry.k8s.io/kube-scheduler:v1.31.1","io.kubernetes.cri-o.ImageRef":"7f8aa378bb47dffcf430f3a601abe39137e88aee0238e23ed8530fdd18dab82d","io.kubernetes.cri-o.Labels":"{\"io.kubernetes.container.name\":\"kube-scheduler\",\"
io.kubernetes.pod.name\":\"kube-scheduler-functional-919910\",\"io.kubernetes.pod.namespace\":\"kube-system\",\"io.kubernetes.pod.uid\":\"60f2072c6865fb71ef7928175ceb3dad\"}","io.kubernetes.cri-o.LogPath":"/var/log/pods/kube-system_kube-scheduler-functional-919910_60f2072c6865fb71ef7928175ceb3dad/kube-scheduler/0.log","io.kubernetes.cri-o.Metadata":"{\"name\":\"kube-scheduler\"}","io.kubernetes.cri-o.MountPoint":"/var/lib/containers/storage/overlay/cab360ca19399b430c9ac3118df3b10c96a9ca4f93f89484957af5838b7a7903/merged","io.kubernetes.cri-o.Name":"k8s_kube-scheduler_kube-scheduler-functional-919910_kube-system_60f2072c6865fb71ef7928175ceb3dad_0","io.kubernetes.cri-o.ResolvPath":"/run/containers/storage/overlay-containers/00455a328acb5f6e9ea466104b770d8dfdb288b1e2998c0222c8b30b804b19cb/userdata/resolv.conf","io.kubernetes.cri-o.SandboxID":"00455a328acb5f6e9ea466104b770d8dfdb288b1e2998c0222c8b30b804b19cb","io.kubernetes.cri-o.SandboxName":"k8s_kube-scheduler-functional-919910_kube-system_60f2072c6865fb71ef79281
75ceb3dad_0","io.kubernetes.cri-o.SeccompProfilePath":"","io.kubernetes.cri-o.Stdin":"false","io.kubernetes.cri-o.StdinOnce":"false","io.kubernetes.cri-o.TTY":"false","io.kubernetes.cri-o.Volumes":"[{\"container_path\":\"/etc/hosts\",\"host_path\":\"/var/lib/kubelet/pods/60f2072c6865fb71ef7928175ceb3dad/etc-hosts\",\"readonly\":false,\"propagation\":0,\"selinux_relabel\":false},{\"container_path\":\"/dev/termination-log\",\"host_path\":\"/var/lib/kubelet/pods/60f2072c6865fb71ef7928175ceb3dad/containers/kube-scheduler/e278c329\",\"readonly\":false,\"propagation\":0,\"selinux_relabel\":false},{\"container_path\":\"/etc/kubernetes/scheduler.conf\",\"host_path\":\"/etc/kubernetes/scheduler.conf\",\"readonly\":true,\"propagation\":0,\"selinux_relabel\":false}]","io.kubernetes.pod.name":"kube-scheduler-functional-919910","io.kubernetes.pod.namespace":"kube-system","io.kubernetes.pod.terminationGracePeriod":"30","io.kubernetes.pod.uid":"60f2072c6865fb71ef7928175ceb3dad","kubernetes.io/config.hash":"60f2072c6865fb71e
f7928175ceb3dad","kubernetes.io/config.seen":"2024-09-16T10:46:53.802318072Z","kubernetes.io/config.source":"file"},"owner":"root"},{"ociVersion":"1.0.2-dev","id":"790d8c6b7f5cff6aa8da32ec82eeab04f109110f2b3a39803bda7a570da2cf75","pid":0,"status":"stopped","bundle":"/run/containers/storage/overlay-containers/790d8c6b7f5cff6aa8da32ec82eeab04f109110f2b3a39803bda7a570da2cf75/userdata","rootfs":"/var/lib/containers/storage/overlay/a08c3c563565903a078ef017305fa2825386d400183a5328437db2f752f9752f/merged","created":"2024-09-16T10:46:54.415982086Z","annotations":{"io.container.manager":"cri-o","io.kubernetes.container.hash":"7df2713b","io.kubernetes.container.name":"kube-apiserver","io.kubernetes.container.restartCount":"0","io.kubernetes.container.terminationMessagePath":"/dev/termination-log","io.kubernetes.container.terminationMessagePolicy":"File","io.kubernetes.cri-o.Annotations":"{\"io.kubernetes.container.hash\":\"7df2713b\",\"io.kubernetes.container.restartCount\":\"0\",\"io.kubernetes.container.terminationMe
ssagePath\":\"/dev/termination-log\",\"io.kubernetes.container.terminationMessagePolicy\":\"File\",\"io.kubernetes.pod.terminationGracePeriod\":\"30\"}","io.kubernetes.cri-o.ContainerID":"790d8c6b7f5cff6aa8da32ec82eeab04f109110f2b3a39803bda7a570da2cf75","io.kubernetes.cri-o.ContainerType":"container","io.kubernetes.cri-o.Created":"2024-09-16T10:46:54.357706151Z","io.kubernetes.cri-o.Image":"d3f53a98c0a9d9163c4848bcf34b2d2f5e1e3691b79f3d1dd6d0206809e02853","io.kubernetes.cri-o.ImageName":"registry.k8s.io/kube-apiserver:v1.31.1","io.kubernetes.cri-o.ImageRef":"d3f53a98c0a9d9163c4848bcf34b2d2f5e1e3691b79f3d1dd6d0206809e02853","io.kubernetes.cri-o.Labels":"{\"io.kubernetes.container.name\":\"kube-apiserver\",\"io.kubernetes.pod.name\":\"kube-apiserver-functional-919910\",\"io.kubernetes.pod.namespace\":\"kube-system\",\"io.kubernetes.pod.uid\":\"3d8a6ba31c18f33c5660170029e5cde1\"}","io.kubernetes.cri-o.LogPath":"/var/log/pods/kube-system_kube-apiserver-functional-919910_3d8a6ba31c18f33c5660170029e5cde1/kube-apise
rver/0.log","io.kubernetes.cri-o.Metadata":"{\"name\":\"kube-apiserver\"}","io.kubernetes.cri-o.MountPoint":"/var/lib/containers/storage/overlay/a08c3c563565903a078ef017305fa2825386d400183a5328437db2f752f9752f/merged","io.kubernetes.cri-o.Name":"k8s_kube-apiserver_kube-apiserver-functional-919910_kube-system_3d8a6ba31c18f33c5660170029e5cde1_0","io.kubernetes.cri-o.ResolvPath":"/run/containers/storage/overlay-containers/8fd62fbc34bf1ffb9092b83c48e89b00e7cdd219dbb5b91410c53ba0718a28f1/userdata/resolv.conf","io.kubernetes.cri-o.SandboxID":"8fd62fbc34bf1ffb9092b83c48e89b00e7cdd219dbb5b91410c53ba0718a28f1","io.kubernetes.cri-o.SandboxName":"k8s_kube-apiserver-functional-919910_kube-system_3d8a6ba31c18f33c5660170029e5cde1_0","io.kubernetes.cri-o.SeccompProfilePath":"","io.kubernetes.cri-o.Stdin":"false","io.kubernetes.cri-o.StdinOnce":"false","io.kubernetes.cri-o.TTY":"false","io.kubernetes.cri-o.Volumes":"[{\"container_path\":\"/dev/termination-log\",\"host_path\":\"/var/lib/kubelet/pods/3d8a6ba31c18f33c5660170029
e5cde1/containers/kube-apiserver/e14b8c41\",\"readonly\":false,\"propagation\":0,\"selinux_relabel\":false},{\"container_path\":\"/etc/ca-certificates\",\"host_path\":\"/etc/ca-certificates\",\"readonly\":true,\"propagation\":0,\"selinux_relabel\":false},{\"container_path\":\"/etc/hosts\",\"host_path\":\"/var/lib/kubelet/pods/3d8a6ba31c18f33c5660170029e5cde1/etc-hosts\",\"readonly\":false,\"propagation\":0,\"selinux_relabel\":false},{\"container_path\":\"/usr/share/ca-certificates\",\"host_path\":\"/usr/share/ca-certificates\",\"readonly\":true,\"propagation\":0,\"selinux_relabel\":false},{\"container_path\":\"/etc/ssl/certs\",\"host_path\":\"/etc/ssl/certs\",\"readonly\":true,\"propagation\":0,\"selinux_relabel\":false},{\"container_path\":\"/var/lib/minikube/certs\",\"host_path\":\"/var/lib/minikube/certs\",\"readonly\":true,\"propagation\":0,\"selinux_relabel\":false},{\"container_path\":\"/usr/local/share/ca-certificates\",\"host_path\":\"/usr/local/share/ca-certificates\",\"readonly\":true,\"propagation\
":0,\"selinux_relabel\":false}]","io.kubernetes.pod.name":"kube-apiserver-functional-919910","io.kubernetes.pod.namespace":"kube-system","io.kubernetes.pod.terminationGracePeriod":"30","io.kubernetes.pod.uid":"3d8a6ba31c18f33c5660170029e5cde1","kubeadm.kubernetes.io/kube-apiserver.advertise-address.endpoint":"192.168.49.2:8441","kubernetes.io/config.hash":"3d8a6ba31c18f33c5660170029e5cde1","kubernetes.io/config.seen":"2024-09-16T10:46:53.802315340Z","kubernetes.io/config.source":"file"},"owner":"root"},{"ociVersion":"1.0.2-dev","id":"89084e33c979a76a3a4bbd24eab8c848deb25d8bd474bad381f47a24e0373c2e","pid":0,"status":"stopped","bundle":"/run/containers/storage/overlay-containers/89084e33c979a76a3a4bbd24eab8c848deb25d8bd474bad381f47a24e0373c2e/userdata","rootfs":"/var/lib/containers/storage/overlay/d6960bf7e5d7c4f7e17e6f57c4b342d7214a64f55a8e636cea36824eb532352c/merged","created":"2024-09-16T10:47:48.353491936Z","annotations":{"io.container.manager":"cri-o","io.kubernetes.container.hash":"2a3a204d","io.kubernete
s.container.name":"coredns","io.kubernetes.container.ports":"[{\"name\":\"dns\",\"containerPort\":53,\"protocol\":\"UDP\"},{\"name\":\"dns-tcp\",\"containerPort\":53,\"protocol\":\"TCP\"},{\"name\":\"metrics\",\"containerPort\":9153,\"protocol\":\"TCP\"}]","io.kubernetes.container.restartCount":"0","io.kubernetes.container.terminationMessagePath":"/dev/termination-log","io.kubernetes.container.terminationMessagePolicy":"File","io.kubernetes.cri-o.Annotations":"{\"io.kubernetes.container.hash\":\"2a3a204d\",\"io.kubernetes.container.ports\":\"[{\\\"name\\\":\\\"dns\\\",\\\"containerPort\\\":53,\\\"protocol\\\":\\\"UDP\\\"},{\\\"name\\\":\\\"dns-tcp\\\",\\\"containerPort\\\":53,\\\"protocol\\\":\\\"TCP\\\"},{\\\"name\\\":\\\"metrics\\\",\\\"containerPort\\\":9153,\\\"protocol\\\":\\\"TCP\\\"}]\",\"io.kubernetes.container.restartCount\":\"0\",\"io.kubernetes.container.terminationMessagePath\":\"/dev/termination-log\",\"io.kubernetes.container.terminationMessagePolicy\":\"File\",\"io.kubernetes.pod.terminationGra
cePeriod\":\"30\"}","io.kubernetes.cri-o.ContainerID":"89084e33c979a76a3a4bbd24eab8c848deb25d8bd474bad381f47a24e0373c2e","io.kubernetes.cri-o.ContainerType":"container","io.kubernetes.cri-o.Created":"2024-09-16T10:47:48.313875121Z","io.kubernetes.cri-o.IP.0":"10.244.0.2","io.kubernetes.cri-o.Image":"2f6c962e7b8311337352d9fdea917da2184d9919f4da7695bc2a6517cf392fe4","io.kubernetes.cri-o.ImageName":"registry.k8s.io/coredns/coredns:v1.11.3","io.kubernetes.cri-o.ImageRef":"2f6c962e7b8311337352d9fdea917da2184d9919f4da7695bc2a6517cf392fe4","io.kubernetes.cri-o.Labels":"{\"io.kubernetes.container.name\":\"coredns\",\"io.kubernetes.pod.name\":\"coredns-7c65d6cfc9-qzn8c\",\"io.kubernetes.pod.namespace\":\"kube-system\",\"io.kubernetes.pod.uid\":\"ada36fb7-8486-4afc-9bef-04ab2e65fc7b\"}","io.kubernetes.cri-o.LogPath":"/var/log/pods/kube-system_coredns-7c65d6cfc9-qzn8c_ada36fb7-8486-4afc-9bef-04ab2e65fc7b/coredns/0.log","io.kubernetes.cri-o.Metadata":"{\"name\":\"coredns\"}","io.kubernetes.cri-o.MountPoint":"/var/lib/con
tainers/storage/overlay/d6960bf7e5d7c4f7e17e6f57c4b342d7214a64f55a8e636cea36824eb532352c/merged","io.kubernetes.cri-o.Name":"k8s_coredns_coredns-7c65d6cfc9-qzn8c_kube-system_ada36fb7-8486-4afc-9bef-04ab2e65fc7b_0","io.kubernetes.cri-o.ResolvPath":"/run/containers/storage/overlay-containers/4bae1031966b207c601881c3be1d2b66aa5218cb02e6eb2af68deea5be18503b/userdata/resolv.conf","io.kubernetes.cri-o.SandboxID":"4bae1031966b207c601881c3be1d2b66aa5218cb02e6eb2af68deea5be18503b","io.kubernetes.cri-o.SandboxName":"k8s_coredns-7c65d6cfc9-qzn8c_kube-system_ada36fb7-8486-4afc-9bef-04ab2e65fc7b_0","io.kubernetes.cri-o.SeccompProfilePath":"","io.kubernetes.cri-o.Stdin":"false","io.kubernetes.cri-o.StdinOnce":"false","io.kubernetes.cri-o.TTY":"false","io.kubernetes.cri-o.Volumes":"[{\"container_path\":\"/etc/coredns\",\"host_path\":\"/var/lib/kubelet/pods/ada36fb7-8486-4afc-9bef-04ab2e65fc7b/volumes/kubernetes.io~configmap/config-volume\",\"readonly\":true,\"propagation\":0,\"selinux_relabel\":false},{\"container_path\":\"
/etc/hosts\",\"host_path\":\"/var/lib/kubelet/pods/ada36fb7-8486-4afc-9bef-04ab2e65fc7b/etc-hosts\",\"readonly\":false,\"propagation\":0,\"selinux_relabel\":false},{\"container_path\":\"/dev/termination-log\",\"host_path\":\"/var/lib/kubelet/pods/ada36fb7-8486-4afc-9bef-04ab2e65fc7b/containers/coredns/4fbb99bf\",\"readonly\":false,\"propagation\":0,\"selinux_relabel\":false},{\"container_path\":\"/var/run/secrets/kubernetes.io/serviceaccount\",\"host_path\":\"/var/lib/kubelet/pods/ada36fb7-8486-4afc-9bef-04ab2e65fc7b/volumes/kubernetes.io~projected/kube-api-access-lfgrj\",\"readonly\":true,\"propagation\":0,\"selinux_relabel\":false}]","io.kubernetes.pod.name":"coredns-7c65d6cfc9-qzn8c","io.kubernetes.pod.namespace":"kube-system","io.kubernetes.pod.terminationGracePeriod":"30","io.kubernetes.pod.uid":"ada36fb7-8486-4afc-9bef-04ab2e65fc7b","kubernetes.io/config.seen":"2024-09-16T10:47:47.928368173Z","kubernetes.io/config.source":"api"},"owner":"root"},{"ociVersion":"1.0.2-dev","id":"9fdab793eb970a5f01845e2aeaf
1389846fd7113bbdedbb122c9c796017271d5","pid":0,"status":"stopped","bundle":"/run/containers/storage/overlay-containers/9fdab793eb970a5f01845e2aeaf1389846fd7113bbdedbb122c9c796017271d5/userdata","rootfs":"/var/lib/containers/storage/overlay/ef08c3fd03152a8aa0ede7087e0739682cabfd602e73253e1c7bf91e655d2b30/merged","created":"2024-09-16T10:47:07.30034468Z","annotations":{"io.container.manager":"cri-o","io.kubernetes.container.hash":"159dcc59","io.kubernetes.container.name":"kube-proxy","io.kubernetes.container.restartCount":"0","io.kubernetes.container.terminationMessagePath":"/dev/termination-log","io.kubernetes.container.terminationMessagePolicy":"File","io.kubernetes.cri-o.Annotations":"{\"io.kubernetes.container.hash\":\"159dcc59\",\"io.kubernetes.container.restartCount\":\"0\",\"io.kubernetes.container.terminationMessagePath\":\"/dev/termination-log\",\"io.kubernetes.container.terminationMessagePolicy\":\"File\",\"io.kubernetes.pod.terminationGracePeriod\":\"30\"}","io.kubernetes.cri-o.ContainerID":"9fdab793
eb970a5f01845e2aeaf1389846fd7113bbdedbb122c9c796017271d5","io.kubernetes.cri-o.ContainerType":"container","io.kubernetes.cri-o.Created":"2024-09-16T10:47:07.113935925Z","io.kubernetes.cri-o.Image":"24a140c548c075e487e45d0ee73b1aa89f8bfb40c08a57e05975559728822b1d","io.kubernetes.cri-o.ImageName":"registry.k8s.io/kube-proxy:v1.31.1","io.kubernetes.cri-o.ImageRef":"24a140c548c075e487e45d0ee73b1aa89f8bfb40c08a57e05975559728822b1d","io.kubernetes.cri-o.Labels":"{\"io.kubernetes.container.name\":\"kube-proxy\",\"io.kubernetes.pod.name\":\"kube-proxy-nvpzv\",\"io.kubernetes.pod.namespace\":\"kube-system\",\"io.kubernetes.pod.uid\":\"2e1bfc3e-dea3-4511-a154-e367e28b0898\"}","io.kubernetes.cri-o.LogPath":"/var/log/pods/kube-system_kube-proxy-nvpzv_2e1bfc3e-dea3-4511-a154-e367e28b0898/kube-proxy/0.log","io.kubernetes.cri-o.Metadata":"{\"name\":\"kube-proxy\"}","io.kubernetes.cri-o.MountPoint":"/var/lib/containers/storage/overlay/ef08c3fd03152a8aa0ede7087e0739682cabfd602e73253e1c7bf91e655d2b30/merged","io.kubernetes.cri
-o.Name":"k8s_kube-proxy_kube-proxy-nvpzv_kube-system_2e1bfc3e-dea3-4511-a154-e367e28b0898_0","io.kubernetes.cri-o.ResolvPath":"/run/containers/storage/overlay-containers/46672cf6a1a3cfbb490f865d512383492c0c4c4061599f90461031829a93bd49/userdata/resolv.conf","io.kubernetes.cri-o.SandboxID":"46672cf6a1a3cfbb490f865d512383492c0c4c4061599f90461031829a93bd49","io.kubernetes.cri-o.SandboxName":"k8s_kube-proxy-nvpzv_kube-system_2e1bfc3e-dea3-4511-a154-e367e28b0898_0","io.kubernetes.cri-o.SeccompProfilePath":"","io.kubernetes.cri-o.Stdin":"false","io.kubernetes.cri-o.StdinOnce":"false","io.kubernetes.cri-o.TTY":"false","io.kubernetes.cri-o.Volumes":"[{\"container_path\":\"/run/xtables.lock\",\"host_path\":\"/run/xtables.lock\",\"readonly\":false,\"propagation\":0,\"selinux_relabel\":false},{\"container_path\":\"/lib/modules\",\"host_path\":\"/lib/modules\",\"readonly\":true,\"propagation\":0,\"selinux_relabel\":false},{\"container_path\":\"/etc/hosts\",\"host_path\":\"/var/lib/kubelet/pods/2e1bfc3e-dea3-4511-a154-e36
7e28b0898/etc-hosts\",\"readonly\":false,\"propagation\":0,\"selinux_relabel\":false},{\"container_path\":\"/dev/termination-log\",\"host_path\":\"/var/lib/kubelet/pods/2e1bfc3e-dea3-4511-a154-e367e28b0898/containers/kube-proxy/8c6823e0\",\"readonly\":false,\"propagation\":0,\"selinux_relabel\":false},{\"container_path\":\"/var/lib/kube-proxy\",\"host_path\":\"/var/lib/kubelet/pods/2e1bfc3e-dea3-4511-a154-e367e28b0898/volumes/kubernetes.io~configmap/kube-proxy\",\"readonly\":true,\"propagation\":0,\"selinux_relabel\":false},{\"container_path\":\"/var/run/secrets/kubernetes.io/serviceaccount\",\"host_path\":\"/var/lib/kubelet/pods/2e1bfc3e-dea3-4511-a154-e367e28b0898/volumes/kubernetes.io~projected/kube-api-access-4b6t8\",\"readonly\":true,\"propagation\":0,\"selinux_relabel\":false}]","io.kubernetes.pod.name":"kube-proxy-nvpzv","io.kubernetes.pod.namespace":"kube-system","io.kubernetes.pod.terminationGracePeriod":"30","io.kubernetes.pod.uid":"2e1bfc3e-dea3-4511-a154-e367e28b0898","kubernetes.io/config.seen":"
2024-09-16T10:47:06.101265018Z","kubernetes.io/config.source":"api"},"owner":"root"},{"ociVersion":"1.0.2-dev","id":"b88a79882d73e8e5ca5f134464b8f60ebbeb4a0aa75d6f83d1ec9e3d9f6bd093","pid":0,"status":"stopped","bundle":"/run/containers/storage/overlay-containers/b88a79882d73e8e5ca5f134464b8f60ebbeb4a0aa75d6f83d1ec9e3d9f6bd093/userdata","rootfs":"/var/lib/containers/storage/overlay/19326c5ffd0f01a6b991a3440fcc45df2485a34cf81cae741c2cf03d1a3151c6/merged","created":"2024-09-16T10:46:54.468809185Z","annotations":{"io.container.manager":"cri-o","io.kubernetes.container.hash":"cdf7d3fa","io.kubernetes.container.name":"etcd","io.kubernetes.container.restartCount":"0","io.kubernetes.container.terminationMessagePath":"/dev/termination-log","io.kubernetes.container.terminationMessagePolicy":"File","io.kubernetes.cri-o.Annotations":"{\"io.kubernetes.container.hash\":\"cdf7d3fa\",\"io.kubernetes.container.restartCount\":\"0\",\"io.kubernetes.container.terminationMessagePath\":\"/dev/termination-log\",\"io.kubernetes.cont
ainer.terminationMessagePolicy\":\"File\",\"io.kubernetes.pod.terminationGracePeriod\":\"30\"}","io.kubernetes.cri-o.ContainerID":"b88a79882d73e8e5ca5f134464b8f60ebbeb4a0aa75d6f83d1ec9e3d9f6bd093","io.kubernetes.cri-o.ContainerType":"container","io.kubernetes.cri-o.Created":"2024-09-16T10:46:54.371518695Z","io.kubernetes.cri-o.Image":"27e3830e1402783674d8b594038967deea9d51f0d91b34c93c8f39d2f68af7da","io.kubernetes.cri-o.ImageName":"registry.k8s.io/etcd:3.5.15-0","io.kubernetes.cri-o.ImageRef":"27e3830e1402783674d8b594038967deea9d51f0d91b34c93c8f39d2f68af7da","io.kubernetes.cri-o.Labels":"{\"io.kubernetes.container.name\":\"etcd\",\"io.kubernetes.pod.name\":\"etcd-functional-919910\",\"io.kubernetes.pod.namespace\":\"kube-system\",\"io.kubernetes.pod.uid\":\"3e910b182a705a484fdc6733177892d1\"}","io.kubernetes.cri-o.LogPath":"/var/log/pods/kube-system_etcd-functional-919910_3e910b182a705a484fdc6733177892d1/etcd/0.log","io.kubernetes.cri-o.Metadata":"{\"name\":\"etcd\"}","io.kubernetes.cri-o.MountPoint":"/var/li
b/containers/storage/overlay/19326c5ffd0f01a6b991a3440fcc45df2485a34cf81cae741c2cf03d1a3151c6/merged","io.kubernetes.cri-o.Name":"k8s_etcd_etcd-functional-919910_kube-system_3e910b182a705a484fdc6733177892d1_0","io.kubernetes.cri-o.ResolvPath":"/run/containers/storage/overlay-containers/46079181d292566bf3368ecdd33ca88588287c49a4c55e1ea9b8c742914a5eee/userdata/resolv.conf","io.kubernetes.cri-o.SandboxID":"46079181d292566bf3368ecdd33ca88588287c49a4c55e1ea9b8c742914a5eee","io.kubernetes.cri-o.SandboxName":"k8s_etcd-functional-919910_kube-system_3e910b182a705a484fdc6733177892d1_0","io.kubernetes.cri-o.SeccompProfilePath":"","io.kubernetes.cri-o.Stdin":"false","io.kubernetes.cri-o.StdinOnce":"false","io.kubernetes.cri-o.TTY":"false","io.kubernetes.cri-o.Volumes":"[{\"container_path\":\"/etc/hosts\",\"host_path\":\"/var/lib/kubelet/pods/3e910b182a705a484fdc6733177892d1/etc-hosts\",\"readonly\":false,\"propagation\":0,\"selinux_relabel\":false},{\"container_path\":\"/dev/termination-log\",\"host_path\":\"/var/lib/kub
elet/pods/3e910b182a705a484fdc6733177892d1/containers/etcd/840357dc\",\"readonly\":false,\"propagation\":0,\"selinux_relabel\":false},{\"container_path\":\"/var/lib/minikube/etcd\",\"host_path\":\"/var/lib/minikube/etcd\",\"readonly\":false,\"propagation\":0,\"selinux_relabel\":false},{\"container_path\":\"/var/lib/minikube/certs/etcd\",\"host_path\":\"/var/lib/minikube/certs/etcd\",\"readonly\":false,\"propagation\":0,\"selinux_relabel\":false}]","io.kubernetes.pod.name":"etcd-functional-919910","io.kubernetes.pod.namespace":"kube-system","io.kubernetes.pod.terminationGracePeriod":"30","io.kubernetes.pod.uid":"3e910b182a705a484fdc6733177892d1","kubeadm.kubernetes.io/etcd.advertise-client-urls":"https://192.168.49.2:2379","kubernetes.io/config.hash":"3e910b182a705a484fdc6733177892d1","kubernetes.io/config.seen":"2024-09-16T10:46:53.802310056Z","kubernetes.io/config.source":"file"},"owner":"root"}]
	I0916 10:47:59.955530 1401996 cri.go:116] JSON = [{"ociVersion":"1.0.2-dev","id":"19cb8b26283b5427eeb4adf80032848225300f8293659c95a04c937ca3877ced","pid":0,"status":"stopped","bundle":"/run/containers/storage/overlay-containers/19cb8b26283b5427eeb4adf80032848225300f8293659c95a04c937ca3877ced/userdata","rootfs":"/var/lib/containers/storage/overlay/9d25114c4e5423d24252b77dba36894f36016e9218116badbb9dbec6638e1801/merged","created":"2024-09-16T10:46:54.451546712Z","annotations":{"io.container.manager":"cri-o","io.kubernetes.container.hash":"d1900d79","io.kubernetes.container.name":"kube-controller-manager","io.kubernetes.container.restartCount":"0","io.kubernetes.container.terminationMessagePath":"/dev/termination-log","io.kubernetes.container.terminationMessagePolicy":"File","io.kubernetes.cri-o.Annotations":"{\"io.kubernetes.container.hash\":\"d1900d79\",\"io.kubernetes.container.restartCount\":\"0\",\"io.kubernetes.container.terminationMessagePath\":\"/dev/termination-log\",\"io.kubernetes.container.t
erminationMessagePolicy\":\"File\",\"io.kubernetes.pod.terminationGracePeriod\":\"30\"}","io.kubernetes.cri-o.ContainerID":"19cb8b26283b5427eeb4adf80032848225300f8293659c95a04c937ca3877ced","io.kubernetes.cri-o.ContainerType":"container","io.kubernetes.cri-o.Created":"2024-09-16T10:46:54.388751196Z","io.kubernetes.cri-o.Image":"279f381cb37365bbbcd133c9531fba9c2beb0f38dbbe6ddfcd0b1b1643d3450e","io.kubernetes.cri-o.ImageName":"registry.k8s.io/kube-controller-manager:v1.31.1","io.kubernetes.cri-o.ImageRef":"279f381cb37365bbbcd133c9531fba9c2beb0f38dbbe6ddfcd0b1b1643d3450e","io.kubernetes.cri-o.Labels":"{\"io.kubernetes.container.name\":\"kube-controller-manager\",\"io.kubernetes.pod.name\":\"kube-controller-manager-functional-919910\",\"io.kubernetes.pod.namespace\":\"kube-system\",\"io.kubernetes.pod.uid\":\"bcfd044776fa163108ac9ce9912dd1b1\"}","io.kubernetes.cri-o.LogPath":"/var/log/pods/kube-system_kube-controller-manager-functional-919910_bcfd044776fa163108ac9ce9912dd1b1/kube-controller-manager/0.log","io.kub
ernetes.cri-o.Metadata":"{\"name\":\"kube-controller-manager\"}","io.kubernetes.cri-o.MountPoint":"/var/lib/containers/storage/overlay/9d25114c4e5423d24252b77dba36894f36016e9218116badbb9dbec6638e1801/merged","io.kubernetes.cri-o.Name":"k8s_kube-controller-manager_kube-controller-manager-functional-919910_kube-system_bcfd044776fa163108ac9ce9912dd1b1_0","io.kubernetes.cri-o.ResolvPath":"/run/containers/storage/overlay-containers/0ffab32638624e8f0235604afb94e9e67c3d4e06616208483a5debcc914e3cae/userdata/resolv.conf","io.kubernetes.cri-o.SandboxID":"0ffab32638624e8f0235604afb94e9e67c3d4e06616208483a5debcc914e3cae","io.kubernetes.cri-o.SandboxName":"k8s_kube-controller-manager-functional-919910_kube-system_bcfd044776fa163108ac9ce9912dd1b1_0","io.kubernetes.cri-o.SeccompProfilePath":"","io.kubernetes.cri-o.Stdin":"false","io.kubernetes.cri-o.StdinOnce":"false","io.kubernetes.cri-o.TTY":"false","io.kubernetes.cri-o.Volumes":"[{\"container_path\":\"/etc/ca-certificates\",\"host_path\":\"/etc/ca-certificates\",\"readon
ly\":true,\"propagation\":0,\"selinux_relabel\":false},{\"container_path\":\"/dev/termination-log\",\"host_path\":\"/var/lib/kubelet/pods/bcfd044776fa163108ac9ce9912dd1b1/containers/kube-controller-manager/2c567ce7\",\"readonly\":false,\"propagation\":0,\"selinux_relabel\":false},{\"container_path\":\"/etc/hosts\",\"host_path\":\"/var/lib/kubelet/pods/bcfd044776fa163108ac9ce9912dd1b1/etc-hosts\",\"readonly\":false,\"propagation\":0,\"selinux_relabel\":false},{\"container_path\":\"/etc/ssl/certs\",\"host_path\":\"/etc/ssl/certs\",\"readonly\":true,\"propagation\":0,\"selinux_relabel\":false},{\"container_path\":\"/etc/kubernetes/controller-manager.conf\",\"host_path\":\"/etc/kubernetes/controller-manager.conf\",\"readonly\":true,\"propagation\":0,\"selinux_relabel\":false},{\"container_path\":\"/usr/share/ca-certificates\",\"host_path\":\"/usr/share/ca-certificates\",\"readonly\":true,\"propagation\":0,\"selinux_relabel\":false},{\"container_path\":\"/var/lib/minikube/certs\",\"host_path\":\"/var/lib/minikube/
certs\",\"readonly\":true,\"propagation\":0,\"selinux_relabel\":false},{\"container_path\":\"/usr/local/share/ca-certificates\",\"host_path\":\"/usr/local/share/ca-certificates\",\"readonly\":true,\"propagation\":0,\"selinux_relabel\":false},{\"container_path\":\"/usr/libexec/kubernetes/kubelet-plugins/volume/exec\",\"host_path\":\"/usr/libexec/kubernetes/kubelet-plugins/volume/exec\",\"readonly\":false,\"propagation\":0,\"selinux_relabel\":false}]","io.kubernetes.pod.name":"kube-controller-manager-functional-919910","io.kubernetes.pod.namespace":"kube-system","io.kubernetes.pod.terminationGracePeriod":"30","io.kubernetes.pod.uid":"bcfd044776fa163108ac9ce9912dd1b1","kubernetes.io/config.hash":"bcfd044776fa163108ac9ce9912dd1b1","kubernetes.io/config.seen":"2024-09-16T10:46:53.802316924Z","kubernetes.io/config.source":"file"},"owner":"root"},{"ociVersion":"1.0.2-dev","id":"3e31d247381fd150f97fed045c0d264e01a0046902133f839fc323ed9d5fa7b9","pid":0,"status":"stopped","bundle":"/run/containers/storage/overlay-conta
iners/3e31d247381fd150f97fed045c0d264e01a0046902133f839fc323ed9d5fa7b9/userdata","rootfs":"/var/lib/containers/storage/overlay/33ae8f381ec56dbef8842ba8809fd9b503de8020ea42b8f8c194145d6dbea159/merged","created":"2024-09-16T10:47:07.149526142Z","annotations":{"io.container.manager":"cri-o","io.kubernetes.container.hash":"e80daca3","io.kubernetes.container.name":"kindnet-cni","io.kubernetes.container.restartCount":"0","io.kubernetes.container.terminationMessagePath":"/dev/termination-log","io.kubernetes.container.terminationMessagePolicy":"File","io.kubernetes.cri-o.Annotations":"{\"io.kubernetes.container.hash\":\"e80daca3\",\"io.kubernetes.container.restartCount\":\"0\",\"io.kubernetes.container.terminationMessagePath\":\"/dev/termination-log\",\"io.kubernetes.container.terminationMessagePolicy\":\"File\",\"io.kubernetes.pod.terminationGracePeriod\":\"30\"}","io.kubernetes.cri-o.ContainerID":"3e31d247381fd150f97fed045c0d264e01a0046902133f839fc323ed9d5fa7b9","io.kubernetes.cri-o.ContainerType":"container","io.k
ubernetes.cri-o.Created":"2024-09-16T10:47:07.08917364Z","io.kubernetes.cri-o.Image":"6a23fa8fd2b78ab58e42ba273808edc936a9c53d8ac4a919f6337be094843a51","io.kubernetes.cri-o.ImageName":"docker.io/kindest/kindnetd:v20240813-c6f155d6","io.kubernetes.cri-o.ImageRef":"6a23fa8fd2b78ab58e42ba273808edc936a9c53d8ac4a919f6337be094843a51","io.kubernetes.cri-o.Labels":"{\"io.kubernetes.container.name\":\"kindnet-cni\",\"io.kubernetes.pod.name\":\"kindnet-nb5xl\",\"io.kubernetes.pod.namespace\":\"kube-system\",\"io.kubernetes.pod.uid\":\"1282e172-7d16-4f24-9f7d-33da705832a9\"}","io.kubernetes.cri-o.LogPath":"/var/log/pods/kube-system_kindnet-nb5xl_1282e172-7d16-4f24-9f7d-33da705832a9/kindnet-cni/0.log","io.kubernetes.cri-o.Metadata":"{\"name\":\"kindnet-cni\"}","io.kubernetes.cri-o.MountPoint":"/var/lib/containers/storage/overlay/33ae8f381ec56dbef8842ba8809fd9b503de8020ea42b8f8c194145d6dbea159/merged","io.kubernetes.cri-o.Name":"k8s_kindnet-cni_kindnet-nb5xl_kube-system_1282e172-7d16-4f24-9f7d-33da705832a9_0","io.kubernet
es.cri-o.ResolvPath":"/run/containers/storage/overlay-containers/306886331d6eea412e2593dd8cefd104ae0353cb2453c12f41db88e1881fec0f/userdata/resolv.conf","io.kubernetes.cri-o.SandboxID":"306886331d6eea412e2593dd8cefd104ae0353cb2453c12f41db88e1881fec0f","io.kubernetes.cri-o.SandboxName":"k8s_kindnet-nb5xl_kube-system_1282e172-7d16-4f24-9f7d-33da705832a9_0","io.kubernetes.cri-o.SeccompProfilePath":"","io.kubernetes.cri-o.Stdin":"false","io.kubernetes.cri-o.StdinOnce":"false","io.kubernetes.cri-o.TTY":"false","io.kubernetes.cri-o.Volumes":"[{\"container_path\":\"/run/xtables.lock\",\"host_path\":\"/run/xtables.lock\",\"readonly\":false,\"propagation\":0,\"selinux_relabel\":false},{\"container_path\":\"/lib/modules\",\"host_path\":\"/lib/modules\",\"readonly\":true,\"propagation\":0,\"selinux_relabel\":false},{\"container_path\":\"/etc/hosts\",\"host_path\":\"/var/lib/kubelet/pods/1282e172-7d16-4f24-9f7d-33da705832a9/etc-hosts\",\"readonly\":false,\"propagation\":0,\"selinux_relabel\":false},{\"container_path\":\"/
dev/termination-log\",\"host_path\":\"/var/lib/kubelet/pods/1282e172-7d16-4f24-9f7d-33da705832a9/containers/kindnet-cni/4675b3f6\",\"readonly\":false,\"propagation\":0,\"selinux_relabel\":false},{\"container_path\":\"/etc/cni/net.d\",\"host_path\":\"/etc/cni/net.d\",\"readonly\":false,\"propagation\":0,\"selinux_relabel\":false},{\"container_path\":\"/var/run/secrets/kubernetes.io/serviceaccount\",\"host_path\":\"/var/lib/kubelet/pods/1282e172-7d16-4f24-9f7d-33da705832a9/volumes/kubernetes.io~projected/kube-api-access-bxwpg\",\"readonly\":true,\"propagation\":0,\"selinux_relabel\":false}]","io.kubernetes.pod.name":"kindnet-nb5xl","io.kubernetes.pod.namespace":"kube-system","io.kubernetes.pod.terminationGracePeriod":"30","io.kubernetes.pod.uid":"1282e172-7d16-4f24-9f7d-33da705832a9","kubernetes.io/config.seen":"2024-09-16T10:47:06.101213303Z","kubernetes.io/config.source":"api"},"owner":"root"},{"ociVersion":"1.0.2-dev","id":"584cffa44f32723af45447c07bf6e3fc641b7c61fe43302aad35c776bd065faf","pid":0,"status":"s
topped","bundle":"/run/containers/storage/overlay-containers/584cffa44f32723af45447c07bf6e3fc641b7c61fe43302aad35c776bd065faf/userdata","rootfs":"/var/lib/containers/storage/overlay/0bedb04df165ede72307d852c687b05c750fe9223f4fb8c1d3776f63a28900f8/merged","created":"2024-09-16T10:47:48.333720036Z","annotations":{"io.container.manager":"cri-o","io.kubernetes.container.hash":"6c6bf961","io.kubernetes.container.name":"storage-provisioner","io.kubernetes.container.restartCount":"0","io.kubernetes.container.terminationMessagePath":"/dev/termination-log","io.kubernetes.container.terminationMessagePolicy":"File","io.kubernetes.cri-o.Annotations":"{\"io.kubernetes.container.hash\":\"6c6bf961\",\"io.kubernetes.container.restartCount\":\"0\",\"io.kubernetes.container.terminationMessagePath\":\"/dev/termination-log\",\"io.kubernetes.container.terminationMessagePolicy\":\"File\",\"io.kubernetes.pod.terminationGracePeriod\":\"30\"}","io.kubernetes.cri-o.ContainerID":"584cffa44f32723af45447c07bf6e3fc641b7c61fe43302aad35c776
bd065faf","io.kubernetes.cri-o.ContainerType":"container","io.kubernetes.cri-o.Created":"2024-09-16T10:47:48.29216211Z","io.kubernetes.cri-o.Image":"ba04bb24b95753201135cbc420b233c1b0b9fa2e1fd21d28319c348c33fbcde6","io.kubernetes.cri-o.ImageName":"gcr.io/k8s-minikube/storage-provisioner:v5","io.kubernetes.cri-o.ImageRef":"ba04bb24b95753201135cbc420b233c1b0b9fa2e1fd21d28319c348c33fbcde6","io.kubernetes.cri-o.Labels":"{\"io.kubernetes.container.name\":\"storage-provisioner\",\"io.kubernetes.pod.name\":\"storage-provisioner\",\"io.kubernetes.pod.namespace\":\"kube-system\",\"io.kubernetes.pod.uid\":\"2eb6523f-f61a-4c33-8e91-0bbbb874554b\"}","io.kubernetes.cri-o.LogPath":"/var/log/pods/kube-system_storage-provisioner_2eb6523f-f61a-4c33-8e91-0bbbb874554b/storage-provisioner/0.log","io.kubernetes.cri-o.Metadata":"{\"name\":\"storage-provisioner\"}","io.kubernetes.cri-o.MountPoint":"/var/lib/containers/storage/overlay/0bedb04df165ede72307d852c687b05c750fe9223f4fb8c1d3776f63a28900f8/merged","io.kubernetes.cri-o.Name"
:"k8s_storage-provisioner_storage-provisioner_kube-system_2eb6523f-f61a-4c33-8e91-0bbbb874554b_0","io.kubernetes.cri-o.ResolvPath":"/run/containers/storage/overlay-containers/e27809ba106031f0a2ea1939eccfaa14ca2ade78903409cc767b25e9de7c812a/userdata/resolv.conf","io.kubernetes.cri-o.SandboxID":"e27809ba106031f0a2ea1939eccfaa14ca2ade78903409cc767b25e9de7c812a","io.kubernetes.cri-o.SandboxName":"k8s_storage-provisioner_kube-system_2eb6523f-f61a-4c33-8e91-0bbbb874554b_0","io.kubernetes.cri-o.SeccompProfilePath":"","io.kubernetes.cri-o.Stdin":"false","io.kubernetes.cri-o.StdinOnce":"false","io.kubernetes.cri-o.TTY":"false","io.kubernetes.cri-o.Volumes":"[{\"container_path\":\"/tmp\",\"host_path\":\"/tmp\",\"readonly\":false,\"propagation\":0,\"selinux_relabel\":false},{\"container_path\":\"/etc/hosts\",\"host_path\":\"/var/lib/kubelet/pods/2eb6523f-f61a-4c33-8e91-0bbbb874554b/etc-hosts\",\"readonly\":false,\"propagation\":0,\"selinux_relabel\":false},{\"container_path\":\"/dev/termination-log\",\"host_path\":\"/va
r/lib/kubelet/pods/2eb6523f-f61a-4c33-8e91-0bbbb874554b/containers/storage-provisioner/a9710de8\",\"readonly\":false,\"propagation\":0,\"selinux_relabel\":false},{\"container_path\":\"/var/run/secrets/kubernetes.io/serviceaccount\",\"host_path\":\"/var/lib/kubelet/pods/2eb6523f-f61a-4c33-8e91-0bbbb874554b/volumes/kubernetes.io~projected/kube-api-access-kn9qz\",\"readonly\":true,\"propagation\":0,\"selinux_relabel\":false}]","io.kubernetes.pod.name":"storage-provisioner","io.kubernetes.pod.namespace":"kube-system","io.kubernetes.pod.terminationGracePeriod":"30","io.kubernetes.pod.uid":"2eb6523f-f61a-4c33-8e91-0bbbb874554b","kubectl.kubernetes.io/last-applied-configuration":"{\"apiVersion\":\"v1\",\"kind\":\"Pod\",\"metadata\":{\"annotations\":{},\"labels\":{\"addonmanager.kubernetes.io/mode\":\"Reconcile\",\"integration-test\":\"storage-provisioner\"},\"name\":\"storage-provisioner\",\"namespace\":\"kube-system\"},\"spec\":{\"containers\":[{\"command\":[\"/storage-provisioner\"],\"image\":\"gcr.io/k8s-minikube
/storage-provisioner:v5\",\"imagePullPolicy\":\"IfNotPresent\",\"name\":\"storage-provisioner\",\"volumeMounts\":[{\"mountPath\":\"/tmp\",\"name\":\"tmp\"}]}],\"hostNetwork\":true,\"serviceAccountName\":\"storage-provisioner\",\"volumes\":[{\"hostPath\":{\"path\":\"/tmp\",\"type\":\"Directory\"},\"name\":\"tmp\"}]}}\n","kubernetes.io/config.seen":"2024-09-16T10:47:47.935314547Z","kubernetes.io/config.source":"api"},"owner":"root"},{"ociVersion":"1.0.2-dev","id":"6d211253a1170338e5b23dda8b3c6a26dde0aa55d2f91ee289142b0410943b49","pid":0,"status":"stopped","bundle":"/run/containers/storage/overlay-containers/6d211253a1170338e5b23dda8b3c6a26dde0aa55d2f91ee289142b0410943b49/userdata","rootfs":"/var/lib/containers/storage/overlay/cab360ca19399b430c9ac3118df3b10c96a9ca4f93f89484957af5838b7a7903/merged","created":"2024-09-16T10:46:54.47745643Z","annotations":{"io.container.manager":"cri-o","io.kubernetes.container.hash":"12faacf7","io.kubernetes.container.name":"kube-scheduler","io.kubernetes.container.restartCount":
"0","io.kubernetes.container.terminationMessagePath":"/dev/termination-log","io.kubernetes.container.terminationMessagePolicy":"File","io.kubernetes.cri-o.Annotations":"{\"io.kubernetes.container.hash\":\"12faacf7\",\"io.kubernetes.container.restartCount\":\"0\",\"io.kubernetes.container.terminationMessagePath\":\"/dev/termination-log\",\"io.kubernetes.container.terminationMessagePolicy\":\"File\",\"io.kubernetes.pod.terminationGracePeriod\":\"30\"}","io.kubernetes.cri-o.ContainerID":"6d211253a1170338e5b23dda8b3c6a26dde0aa55d2f91ee289142b0410943b49","io.kubernetes.cri-o.ContainerType":"container","io.kubernetes.cri-o.Created":"2024-09-16T10:46:54.420996546Z","io.kubernetes.cri-o.Image":"7f8aa378bb47dffcf430f3a601abe39137e88aee0238e23ed8530fdd18dab82d","io.kubernetes.cri-o.ImageName":"registry.k8s.io/kube-scheduler:v1.31.1","io.kubernetes.cri-o.ImageRef":"7f8aa378bb47dffcf430f3a601abe39137e88aee0238e23ed8530fdd18dab82d","io.kubernetes.cri-o.Labels":"{\"io.kubernetes.container.name\":\"kube-scheduler\",\"io.kub
ernetes.pod.name\":\"kube-scheduler-functional-919910\",\"io.kubernetes.pod.namespace\":\"kube-system\",\"io.kubernetes.pod.uid\":\"60f2072c6865fb71ef7928175ceb3dad\"}","io.kubernetes.cri-o.LogPath":"/var/log/pods/kube-system_kube-scheduler-functional-919910_60f2072c6865fb71ef7928175ceb3dad/kube-scheduler/0.log","io.kubernetes.cri-o.Metadata":"{\"name\":\"kube-scheduler\"}","io.kubernetes.cri-o.MountPoint":"/var/lib/containers/storage/overlay/cab360ca19399b430c9ac3118df3b10c96a9ca4f93f89484957af5838b7a7903/merged","io.kubernetes.cri-o.Name":"k8s_kube-scheduler_kube-scheduler-functional-919910_kube-system_60f2072c6865fb71ef7928175ceb3dad_0","io.kubernetes.cri-o.ResolvPath":"/run/containers/storage/overlay-containers/00455a328acb5f6e9ea466104b770d8dfdb288b1e2998c0222c8b30b804b19cb/userdata/resolv.conf","io.kubernetes.cri-o.SandboxID":"00455a328acb5f6e9ea466104b770d8dfdb288b1e2998c0222c8b30b804b19cb","io.kubernetes.cri-o.SandboxName":"k8s_kube-scheduler-functional-919910_kube-system_60f2072c6865fb71ef7928175ceb3
dad_0","io.kubernetes.cri-o.SeccompProfilePath":"","io.kubernetes.cri-o.Stdin":"false","io.kubernetes.cri-o.StdinOnce":"false","io.kubernetes.cri-o.TTY":"false","io.kubernetes.cri-o.Volumes":"[{\"container_path\":\"/etc/hosts\",\"host_path\":\"/var/lib/kubelet/pods/60f2072c6865fb71ef7928175ceb3dad/etc-hosts\",\"readonly\":false,\"propagation\":0,\"selinux_relabel\":false},{\"container_path\":\"/dev/termination-log\",\"host_path\":\"/var/lib/kubelet/pods/60f2072c6865fb71ef7928175ceb3dad/containers/kube-scheduler/e278c329\",\"readonly\":false,\"propagation\":0,\"selinux_relabel\":false},{\"container_path\":\"/etc/kubernetes/scheduler.conf\",\"host_path\":\"/etc/kubernetes/scheduler.conf\",\"readonly\":true,\"propagation\":0,\"selinux_relabel\":false}]","io.kubernetes.pod.name":"kube-scheduler-functional-919910","io.kubernetes.pod.namespace":"kube-system","io.kubernetes.pod.terminationGracePeriod":"30","io.kubernetes.pod.uid":"60f2072c6865fb71ef7928175ceb3dad","kubernetes.io/config.hash":"60f2072c6865fb71ef79281
75ceb3dad","kubernetes.io/config.seen":"2024-09-16T10:46:53.802318072Z","kubernetes.io/config.source":"file"},"owner":"root"},{"ociVersion":"1.0.2-dev","id":"790d8c6b7f5cff6aa8da32ec82eeab04f109110f2b3a39803bda7a570da2cf75","pid":0,"status":"stopped","bundle":"/run/containers/storage/overlay-containers/790d8c6b7f5cff6aa8da32ec82eeab04f109110f2b3a39803bda7a570da2cf75/userdata","rootfs":"/var/lib/containers/storage/overlay/a08c3c563565903a078ef017305fa2825386d400183a5328437db2f752f9752f/merged","created":"2024-09-16T10:46:54.415982086Z","annotations":{"io.container.manager":"cri-o","io.kubernetes.container.hash":"7df2713b","io.kubernetes.container.name":"kube-apiserver","io.kubernetes.container.restartCount":"0","io.kubernetes.container.terminationMessagePath":"/dev/termination-log","io.kubernetes.container.terminationMessagePolicy":"File","io.kubernetes.cri-o.Annotations":"{\"io.kubernetes.container.hash\":\"7df2713b\",\"io.kubernetes.container.restartCount\":\"0\",\"io.kubernetes.container.terminationMessageP
ath\":\"/dev/termination-log\",\"io.kubernetes.container.terminationMessagePolicy\":\"File\",\"io.kubernetes.pod.terminationGracePeriod\":\"30\"}","io.kubernetes.cri-o.ContainerID":"790d8c6b7f5cff6aa8da32ec82eeab04f109110f2b3a39803bda7a570da2cf75","io.kubernetes.cri-o.ContainerType":"container","io.kubernetes.cri-o.Created":"2024-09-16T10:46:54.357706151Z","io.kubernetes.cri-o.Image":"d3f53a98c0a9d9163c4848bcf34b2d2f5e1e3691b79f3d1dd6d0206809e02853","io.kubernetes.cri-o.ImageName":"registry.k8s.io/kube-apiserver:v1.31.1","io.kubernetes.cri-o.ImageRef":"d3f53a98c0a9d9163c4848bcf34b2d2f5e1e3691b79f3d1dd6d0206809e02853","io.kubernetes.cri-o.Labels":"{\"io.kubernetes.container.name\":\"kube-apiserver\",\"io.kubernetes.pod.name\":\"kube-apiserver-functional-919910\",\"io.kubernetes.pod.namespace\":\"kube-system\",\"io.kubernetes.pod.uid\":\"3d8a6ba31c18f33c5660170029e5cde1\"}","io.kubernetes.cri-o.LogPath":"/var/log/pods/kube-system_kube-apiserver-functional-919910_3d8a6ba31c18f33c5660170029e5cde1/kube-apiserver/0
.log","io.kubernetes.cri-o.Metadata":"{\"name\":\"kube-apiserver\"}","io.kubernetes.cri-o.MountPoint":"/var/lib/containers/storage/overlay/a08c3c563565903a078ef017305fa2825386d400183a5328437db2f752f9752f/merged","io.kubernetes.cri-o.Name":"k8s_kube-apiserver_kube-apiserver-functional-919910_kube-system_3d8a6ba31c18f33c5660170029e5cde1_0","io.kubernetes.cri-o.ResolvPath":"/run/containers/storage/overlay-containers/8fd62fbc34bf1ffb9092b83c48e89b00e7cdd219dbb5b91410c53ba0718a28f1/userdata/resolv.conf","io.kubernetes.cri-o.SandboxID":"8fd62fbc34bf1ffb9092b83c48e89b00e7cdd219dbb5b91410c53ba0718a28f1","io.kubernetes.cri-o.SandboxName":"k8s_kube-apiserver-functional-919910_kube-system_3d8a6ba31c18f33c5660170029e5cde1_0","io.kubernetes.cri-o.SeccompProfilePath":"","io.kubernetes.cri-o.Stdin":"false","io.kubernetes.cri-o.StdinOnce":"false","io.kubernetes.cri-o.TTY":"false","io.kubernetes.cri-o.Volumes":"[{\"container_path\":\"/dev/termination-log\",\"host_path\":\"/var/lib/kubelet/pods/3d8a6ba31c18f33c5660170029e5cde1
/containers/kube-apiserver/e14b8c41\",\"readonly\":false,\"propagation\":0,\"selinux_relabel\":false},{\"container_path\":\"/etc/ca-certificates\",\"host_path\":\"/etc/ca-certificates\",\"readonly\":true,\"propagation\":0,\"selinux_relabel\":false},{\"container_path\":\"/etc/hosts\",\"host_path\":\"/var/lib/kubelet/pods/3d8a6ba31c18f33c5660170029e5cde1/etc-hosts\",\"readonly\":false,\"propagation\":0,\"selinux_relabel\":false},{\"container_path\":\"/usr/share/ca-certificates\",\"host_path\":\"/usr/share/ca-certificates\",\"readonly\":true,\"propagation\":0,\"selinux_relabel\":false},{\"container_path\":\"/etc/ssl/certs\",\"host_path\":\"/etc/ssl/certs\",\"readonly\":true,\"propagation\":0,\"selinux_relabel\":false},{\"container_path\":\"/var/lib/minikube/certs\",\"host_path\":\"/var/lib/minikube/certs\",\"readonly\":true,\"propagation\":0,\"selinux_relabel\":false},{\"container_path\":\"/usr/local/share/ca-certificates\",\"host_path\":\"/usr/local/share/ca-certificates\",\"readonly\":true,\"propagation\":0,\"
selinux_relabel\":false}]","io.kubernetes.pod.name":"kube-apiserver-functional-919910","io.kubernetes.pod.namespace":"kube-system","io.kubernetes.pod.terminationGracePeriod":"30","io.kubernetes.pod.uid":"3d8a6ba31c18f33c5660170029e5cde1","kubeadm.kubernetes.io/kube-apiserver.advertise-address.endpoint":"192.168.49.2:8441","kubernetes.io/config.hash":"3d8a6ba31c18f33c5660170029e5cde1","kubernetes.io/config.seen":"2024-09-16T10:46:53.802315340Z","kubernetes.io/config.source":"file"},"owner":"root"},{"ociVersion":"1.0.2-dev","id":"89084e33c979a76a3a4bbd24eab8c848deb25d8bd474bad381f47a24e0373c2e","pid":0,"status":"stopped","bundle":"/run/containers/storage/overlay-containers/89084e33c979a76a3a4bbd24eab8c848deb25d8bd474bad381f47a24e0373c2e/userdata","rootfs":"/var/lib/containers/storage/overlay/d6960bf7e5d7c4f7e17e6f57c4b342d7214a64f55a8e636cea36824eb532352c/merged","created":"2024-09-16T10:47:48.353491936Z","annotations":{"io.container.manager":"cri-o","io.kubernetes.container.hash":"2a3a204d","io.kubernetes.cont
ainer.name":"coredns","io.kubernetes.container.ports":"[{\"name\":\"dns\",\"containerPort\":53,\"protocol\":\"UDP\"},{\"name\":\"dns-tcp\",\"containerPort\":53,\"protocol\":\"TCP\"},{\"name\":\"metrics\",\"containerPort\":9153,\"protocol\":\"TCP\"}]","io.kubernetes.container.restartCount":"0","io.kubernetes.container.terminationMessagePath":"/dev/termination-log","io.kubernetes.container.terminationMessagePolicy":"File","io.kubernetes.cri-o.Annotations":"{\"io.kubernetes.container.hash\":\"2a3a204d\",\"io.kubernetes.container.ports\":\"[{\\\"name\\\":\\\"dns\\\",\\\"containerPort\\\":53,\\\"protocol\\\":\\\"UDP\\\"},{\\\"name\\\":\\\"dns-tcp\\\",\\\"containerPort\\\":53,\\\"protocol\\\":\\\"TCP\\\"},{\\\"name\\\":\\\"metrics\\\",\\\"containerPort\\\":9153,\\\"protocol\\\":\\\"TCP\\\"}]\",\"io.kubernetes.container.restartCount\":\"0\",\"io.kubernetes.container.terminationMessagePath\":\"/dev/termination-log\",\"io.kubernetes.container.terminationMessagePolicy\":\"File\",\"io.kubernetes.pod.terminationGracePeri
od\":\"30\"}","io.kubernetes.cri-o.ContainerID":"89084e33c979a76a3a4bbd24eab8c848deb25d8bd474bad381f47a24e0373c2e","io.kubernetes.cri-o.ContainerType":"container","io.kubernetes.cri-o.Created":"2024-09-16T10:47:48.313875121Z","io.kubernetes.cri-o.IP.0":"10.244.0.2","io.kubernetes.cri-o.Image":"2f6c962e7b8311337352d9fdea917da2184d9919f4da7695bc2a6517cf392fe4","io.kubernetes.cri-o.ImageName":"registry.k8s.io/coredns/coredns:v1.11.3","io.kubernetes.cri-o.ImageRef":"2f6c962e7b8311337352d9fdea917da2184d9919f4da7695bc2a6517cf392fe4","io.kubernetes.cri-o.Labels":"{\"io.kubernetes.container.name\":\"coredns\",\"io.kubernetes.pod.name\":\"coredns-7c65d6cfc9-qzn8c\",\"io.kubernetes.pod.namespace\":\"kube-system\",\"io.kubernetes.pod.uid\":\"ada36fb7-8486-4afc-9bef-04ab2e65fc7b\"}","io.kubernetes.cri-o.LogPath":"/var/log/pods/kube-system_coredns-7c65d6cfc9-qzn8c_ada36fb7-8486-4afc-9bef-04ab2e65fc7b/coredns/0.log","io.kubernetes.cri-o.Metadata":"{\"name\":\"coredns\"}","io.kubernetes.cri-o.MountPoint":"/var/lib/container
s/storage/overlay/d6960bf7e5d7c4f7e17e6f57c4b342d7214a64f55a8e636cea36824eb532352c/merged","io.kubernetes.cri-o.Name":"k8s_coredns_coredns-7c65d6cfc9-qzn8c_kube-system_ada36fb7-8486-4afc-9bef-04ab2e65fc7b_0","io.kubernetes.cri-o.ResolvPath":"/run/containers/storage/overlay-containers/4bae1031966b207c601881c3be1d2b66aa5218cb02e6eb2af68deea5be18503b/userdata/resolv.conf","io.kubernetes.cri-o.SandboxID":"4bae1031966b207c601881c3be1d2b66aa5218cb02e6eb2af68deea5be18503b","io.kubernetes.cri-o.SandboxName":"k8s_coredns-7c65d6cfc9-qzn8c_kube-system_ada36fb7-8486-4afc-9bef-04ab2e65fc7b_0","io.kubernetes.cri-o.SeccompProfilePath":"","io.kubernetes.cri-o.Stdin":"false","io.kubernetes.cri-o.StdinOnce":"false","io.kubernetes.cri-o.TTY":"false","io.kubernetes.cri-o.Volumes":"[{\"container_path\":\"/etc/coredns\",\"host_path\":\"/var/lib/kubelet/pods/ada36fb7-8486-4afc-9bef-04ab2e65fc7b/volumes/kubernetes.io~configmap/config-volume\",\"readonly\":true,\"propagation\":0,\"selinux_relabel\":false},{\"container_path\":\"/etc/h
osts\",\"host_path\":\"/var/lib/kubelet/pods/ada36fb7-8486-4afc-9bef-04ab2e65fc7b/etc-hosts\",\"readonly\":false,\"propagation\":0,\"selinux_relabel\":false},{\"container_path\":\"/dev/termination-log\",\"host_path\":\"/var/lib/kubelet/pods/ada36fb7-8486-4afc-9bef-04ab2e65fc7b/containers/coredns/4fbb99bf\",\"readonly\":false,\"propagation\":0,\"selinux_relabel\":false},{\"container_path\":\"/var/run/secrets/kubernetes.io/serviceaccount\",\"host_path\":\"/var/lib/kubelet/pods/ada36fb7-8486-4afc-9bef-04ab2e65fc7b/volumes/kubernetes.io~projected/kube-api-access-lfgrj\",\"readonly\":true,\"propagation\":0,\"selinux_relabel\":false}]","io.kubernetes.pod.name":"coredns-7c65d6cfc9-qzn8c","io.kubernetes.pod.namespace":"kube-system","io.kubernetes.pod.terminationGracePeriod":"30","io.kubernetes.pod.uid":"ada36fb7-8486-4afc-9bef-04ab2e65fc7b","kubernetes.io/config.seen":"2024-09-16T10:47:47.928368173Z","kubernetes.io/config.source":"api"},"owner":"root"},{"ociVersion":"1.0.2-dev","id":"9fdab793eb970a5f01845e2aeaf138984
6fd7113bbdedbb122c9c796017271d5","pid":0,"status":"stopped","bundle":"/run/containers/storage/overlay-containers/9fdab793eb970a5f01845e2aeaf1389846fd7113bbdedbb122c9c796017271d5/userdata","rootfs":"/var/lib/containers/storage/overlay/ef08c3fd03152a8aa0ede7087e0739682cabfd602e73253e1c7bf91e655d2b30/merged","created":"2024-09-16T10:47:07.30034468Z","annotations":{"io.container.manager":"cri-o","io.kubernetes.container.hash":"159dcc59","io.kubernetes.container.name":"kube-proxy","io.kubernetes.container.restartCount":"0","io.kubernetes.container.terminationMessagePath":"/dev/termination-log","io.kubernetes.container.terminationMessagePolicy":"File","io.kubernetes.cri-o.Annotations":"{\"io.kubernetes.container.hash\":\"159dcc59\",\"io.kubernetes.container.restartCount\":\"0\",\"io.kubernetes.container.terminationMessagePath\":\"/dev/termination-log\",\"io.kubernetes.container.terminationMessagePolicy\":\"File\",\"io.kubernetes.pod.terminationGracePeriod\":\"30\"}","io.kubernetes.cri-o.ContainerID":"9fdab793eb970a
5f01845e2aeaf1389846fd7113bbdedbb122c9c796017271d5","io.kubernetes.cri-o.ContainerType":"container","io.kubernetes.cri-o.Created":"2024-09-16T10:47:07.113935925Z","io.kubernetes.cri-o.Image":"24a140c548c075e487e45d0ee73b1aa89f8bfb40c08a57e05975559728822b1d","io.kubernetes.cri-o.ImageName":"registry.k8s.io/kube-proxy:v1.31.1","io.kubernetes.cri-o.ImageRef":"24a140c548c075e487e45d0ee73b1aa89f8bfb40c08a57e05975559728822b1d","io.kubernetes.cri-o.Labels":"{\"io.kubernetes.container.name\":\"kube-proxy\",\"io.kubernetes.pod.name\":\"kube-proxy-nvpzv\",\"io.kubernetes.pod.namespace\":\"kube-system\",\"io.kubernetes.pod.uid\":\"2e1bfc3e-dea3-4511-a154-e367e28b0898\"}","io.kubernetes.cri-o.LogPath":"/var/log/pods/kube-system_kube-proxy-nvpzv_2e1bfc3e-dea3-4511-a154-e367e28b0898/kube-proxy/0.log","io.kubernetes.cri-o.Metadata":"{\"name\":\"kube-proxy\"}","io.kubernetes.cri-o.MountPoint":"/var/lib/containers/storage/overlay/ef08c3fd03152a8aa0ede7087e0739682cabfd602e73253e1c7bf91e655d2b30/merged","io.kubernetes.cri-o.Nam
e":"k8s_kube-proxy_kube-proxy-nvpzv_kube-system_2e1bfc3e-dea3-4511-a154-e367e28b0898_0","io.kubernetes.cri-o.ResolvPath":"/run/containers/storage/overlay-containers/46672cf6a1a3cfbb490f865d512383492c0c4c4061599f90461031829a93bd49/userdata/resolv.conf","io.kubernetes.cri-o.SandboxID":"46672cf6a1a3cfbb490f865d512383492c0c4c4061599f90461031829a93bd49","io.kubernetes.cri-o.SandboxName":"k8s_kube-proxy-nvpzv_kube-system_2e1bfc3e-dea3-4511-a154-e367e28b0898_0","io.kubernetes.cri-o.SeccompProfilePath":"","io.kubernetes.cri-o.Stdin":"false","io.kubernetes.cri-o.StdinOnce":"false","io.kubernetes.cri-o.TTY":"false","io.kubernetes.cri-o.Volumes":"[{\"container_path\":\"/run/xtables.lock\",\"host_path\":\"/run/xtables.lock\",\"readonly\":false,\"propagation\":0,\"selinux_relabel\":false},{\"container_path\":\"/lib/modules\",\"host_path\":\"/lib/modules\",\"readonly\":true,\"propagation\":0,\"selinux_relabel\":false},{\"container_path\":\"/etc/hosts\",\"host_path\":\"/var/lib/kubelet/pods/2e1bfc3e-dea3-4511-a154-e367e28b0
898/etc-hosts\",\"readonly\":false,\"propagation\":0,\"selinux_relabel\":false},{\"container_path\":\"/dev/termination-log\",\"host_path\":\"/var/lib/kubelet/pods/2e1bfc3e-dea3-4511-a154-e367e28b0898/containers/kube-proxy/8c6823e0\",\"readonly\":false,\"propagation\":0,\"selinux_relabel\":false},{\"container_path\":\"/var/lib/kube-proxy\",\"host_path\":\"/var/lib/kubelet/pods/2e1bfc3e-dea3-4511-a154-e367e28b0898/volumes/kubernetes.io~configmap/kube-proxy\",\"readonly\":true,\"propagation\":0,\"selinux_relabel\":false},{\"container_path\":\"/var/run/secrets/kubernetes.io/serviceaccount\",\"host_path\":\"/var/lib/kubelet/pods/2e1bfc3e-dea3-4511-a154-e367e28b0898/volumes/kubernetes.io~projected/kube-api-access-4b6t8\",\"readonly\":true,\"propagation\":0,\"selinux_relabel\":false}]","io.kubernetes.pod.name":"kube-proxy-nvpzv","io.kubernetes.pod.namespace":"kube-system","io.kubernetes.pod.terminationGracePeriod":"30","io.kubernetes.pod.uid":"2e1bfc3e-dea3-4511-a154-e367e28b0898","kubernetes.io/config.seen":"2024-0
9-16T10:47:06.101265018Z","kubernetes.io/config.source":"api"},"owner":"root"},{"ociVersion":"1.0.2-dev","id":"b88a79882d73e8e5ca5f134464b8f60ebbeb4a0aa75d6f83d1ec9e3d9f6bd093","pid":0,"status":"stopped","bundle":"/run/containers/storage/overlay-containers/b88a79882d73e8e5ca5f134464b8f60ebbeb4a0aa75d6f83d1ec9e3d9f6bd093/userdata","rootfs":"/var/lib/containers/storage/overlay/19326c5ffd0f01a6b991a3440fcc45df2485a34cf81cae741c2cf03d1a3151c6/merged","created":"2024-09-16T10:46:54.468809185Z","annotations":{"io.container.manager":"cri-o","io.kubernetes.container.hash":"cdf7d3fa","io.kubernetes.container.name":"etcd","io.kubernetes.container.restartCount":"0","io.kubernetes.container.terminationMessagePath":"/dev/termination-log","io.kubernetes.container.terminationMessagePolicy":"File","io.kubernetes.cri-o.Annotations":"{\"io.kubernetes.container.hash\":\"cdf7d3fa\",\"io.kubernetes.container.restartCount\":\"0\",\"io.kubernetes.container.terminationMessagePath\":\"/dev/termination-log\",\"io.kubernetes.container.
terminationMessagePolicy\":\"File\",\"io.kubernetes.pod.terminationGracePeriod\":\"30\"}","io.kubernetes.cri-o.ContainerID":"b88a79882d73e8e5ca5f134464b8f60ebbeb4a0aa75d6f83d1ec9e3d9f6bd093","io.kubernetes.cri-o.ContainerType":"container","io.kubernetes.cri-o.Created":"2024-09-16T10:46:54.371518695Z","io.kubernetes.cri-o.Image":"27e3830e1402783674d8b594038967deea9d51f0d91b34c93c8f39d2f68af7da","io.kubernetes.cri-o.ImageName":"registry.k8s.io/etcd:3.5.15-0","io.kubernetes.cri-o.ImageRef":"27e3830e1402783674d8b594038967deea9d51f0d91b34c93c8f39d2f68af7da","io.kubernetes.cri-o.Labels":"{\"io.kubernetes.container.name\":\"etcd\",\"io.kubernetes.pod.name\":\"etcd-functional-919910\",\"io.kubernetes.pod.namespace\":\"kube-system\",\"io.kubernetes.pod.uid\":\"3e910b182a705a484fdc6733177892d1\"}","io.kubernetes.cri-o.LogPath":"/var/log/pods/kube-system_etcd-functional-919910_3e910b182a705a484fdc6733177892d1/etcd/0.log","io.kubernetes.cri-o.Metadata":"{\"name\":\"etcd\"}","io.kubernetes.cri-o.MountPoint":"/var/lib/cont
ainers/storage/overlay/19326c5ffd0f01a6b991a3440fcc45df2485a34cf81cae741c2cf03d1a3151c6/merged","io.kubernetes.cri-o.Name":"k8s_etcd_etcd-functional-919910_kube-system_3e910b182a705a484fdc6733177892d1_0","io.kubernetes.cri-o.ResolvPath":"/run/containers/storage/overlay-containers/46079181d292566bf3368ecdd33ca88588287c49a4c55e1ea9b8c742914a5eee/userdata/resolv.conf","io.kubernetes.cri-o.SandboxID":"46079181d292566bf3368ecdd33ca88588287c49a4c55e1ea9b8c742914a5eee","io.kubernetes.cri-o.SandboxName":"k8s_etcd-functional-919910_kube-system_3e910b182a705a484fdc6733177892d1_0","io.kubernetes.cri-o.SeccompProfilePath":"","io.kubernetes.cri-o.Stdin":"false","io.kubernetes.cri-o.StdinOnce":"false","io.kubernetes.cri-o.TTY":"false","io.kubernetes.cri-o.Volumes":"[{\"container_path\":\"/etc/hosts\",\"host_path\":\"/var/lib/kubelet/pods/3e910b182a705a484fdc6733177892d1/etc-hosts\",\"readonly\":false,\"propagation\":0,\"selinux_relabel\":false},{\"container_path\":\"/dev/termination-log\",\"host_path\":\"/var/lib/kubelet/p
ods/3e910b182a705a484fdc6733177892d1/containers/etcd/840357dc\",\"readonly\":false,\"propagation\":0,\"selinux_relabel\":false},{\"container_path\":\"/var/lib/minikube/etcd\",\"host_path\":\"/var/lib/minikube/etcd\",\"readonly\":false,\"propagation\":0,\"selinux_relabel\":false},{\"container_path\":\"/var/lib/minikube/certs/etcd\",\"host_path\":\"/var/lib/minikube/certs/etcd\",\"readonly\":false,\"propagation\":0,\"selinux_relabel\":false}]","io.kubernetes.pod.name":"etcd-functional-919910","io.kubernetes.pod.namespace":"kube-system","io.kubernetes.pod.terminationGracePeriod":"30","io.kubernetes.pod.uid":"3e910b182a705a484fdc6733177892d1","kubeadm.kubernetes.io/etcd.advertise-client-urls":"https://192.168.49.2:2379","kubernetes.io/config.hash":"3e910b182a705a484fdc6733177892d1","kubernetes.io/config.seen":"2024-09-16T10:46:53.802310056Z","kubernetes.io/config.source":"file"},"owner":"root"}]
	I0916 10:47:59.956282 1401996 cri.go:126] list returned 8 containers
	I0916 10:47:59.956312 1401996 cri.go:129] container: {ID:19cb8b26283b5427eeb4adf80032848225300f8293659c95a04c937ca3877ced Status:stopped}
	I0916 10:47:59.956327 1401996 cri.go:135] skipping {19cb8b26283b5427eeb4adf80032848225300f8293659c95a04c937ca3877ced stopped}: state = "stopped", want "paused"
	I0916 10:47:59.956337 1401996 cri.go:129] container: {ID:3e31d247381fd150f97fed045c0d264e01a0046902133f839fc323ed9d5fa7b9 Status:stopped}
	I0916 10:47:59.956343 1401996 cri.go:135] skipping {3e31d247381fd150f97fed045c0d264e01a0046902133f839fc323ed9d5fa7b9 stopped}: state = "stopped", want "paused"
	I0916 10:47:59.956348 1401996 cri.go:129] container: {ID:584cffa44f32723af45447c07bf6e3fc641b7c61fe43302aad35c776bd065faf Status:stopped}
	I0916 10:47:59.956354 1401996 cri.go:135] skipping {584cffa44f32723af45447c07bf6e3fc641b7c61fe43302aad35c776bd065faf stopped}: state = "stopped", want "paused"
	I0916 10:47:59.956363 1401996 cri.go:129] container: {ID:6d211253a1170338e5b23dda8b3c6a26dde0aa55d2f91ee289142b0410943b49 Status:stopped}
	I0916 10:47:59.956368 1401996 cri.go:135] skipping {6d211253a1170338e5b23dda8b3c6a26dde0aa55d2f91ee289142b0410943b49 stopped}: state = "stopped", want "paused"
	I0916 10:47:59.956374 1401996 cri.go:129] container: {ID:790d8c6b7f5cff6aa8da32ec82eeab04f109110f2b3a39803bda7a570da2cf75 Status:stopped}
	I0916 10:47:59.956382 1401996 cri.go:135] skipping {790d8c6b7f5cff6aa8da32ec82eeab04f109110f2b3a39803bda7a570da2cf75 stopped}: state = "stopped", want "paused"
	I0916 10:47:59.956397 1401996 cri.go:129] container: {ID:89084e33c979a76a3a4bbd24eab8c848deb25d8bd474bad381f47a24e0373c2e Status:stopped}
	I0916 10:47:59.956403 1401996 cri.go:135] skipping {89084e33c979a76a3a4bbd24eab8c848deb25d8bd474bad381f47a24e0373c2e stopped}: state = "stopped", want "paused"
	I0916 10:47:59.956409 1401996 cri.go:129] container: {ID:9fdab793eb970a5f01845e2aeaf1389846fd7113bbdedbb122c9c796017271d5 Status:stopped}
	I0916 10:47:59.956414 1401996 cri.go:135] skipping {9fdab793eb970a5f01845e2aeaf1389846fd7113bbdedbb122c9c796017271d5 stopped}: state = "stopped", want "paused"
	I0916 10:47:59.956420 1401996 cri.go:129] container: {ID:b88a79882d73e8e5ca5f134464b8f60ebbeb4a0aa75d6f83d1ec9e3d9f6bd093 Status:stopped}
	I0916 10:47:59.956425 1401996 cri.go:135] skipping {b88a79882d73e8e5ca5f134464b8f60ebbeb4a0aa75d6f83d1ec9e3d9f6bd093 stopped}: state = "stopped", want "paused"
	I0916 10:47:59.956500 1401996 ssh_runner.go:195] Run: sudo ls /var/lib/kubelet/kubeadm-flags.env /var/lib/kubelet/config.yaml /var/lib/minikube/etcd
	I0916 10:47:59.965165 1401996 command_runner.go:130] > /var/lib/kubelet/config.yaml
	I0916 10:47:59.965185 1401996 command_runner.go:130] > /var/lib/kubelet/kubeadm-flags.env
	I0916 10:47:59.965192 1401996 command_runner.go:130] > /var/lib/minikube/etcd:
	I0916 10:47:59.965196 1401996 command_runner.go:130] > member
	I0916 10:47:59.966329 1401996 kubeadm.go:408] found existing configuration files, will attempt cluster restart
	I0916 10:47:59.966344 1401996 kubeadm.go:593] restartPrimaryControlPlane start ...
	I0916 10:47:59.966415 1401996 ssh_runner.go:195] Run: sudo test -d /data/minikube
	I0916 10:47:59.975219 1401996 kubeadm.go:130] /data/minikube skipping compat symlinks: sudo test -d /data/minikube: Process exited with status 1
	stdout:
	
	stderr:
	I0916 10:47:59.975763 1401996 kubeconfig.go:125] found "functional-919910" server: "https://192.168.49.2:8441"
	I0916 10:47:59.976267 1401996 loader.go:395] Config loaded from file:  /home/jenkins/minikube-integration/19651-1378450/kubeconfig
	I0916 10:47:59.976523 1401996 kapi.go:59] client config for functional-919910: &rest.Config{Host:"https://192.168.49.2:8441", APIPath:"", ContentConfig:rest.ContentConfig{AcceptContentTypes:"", ContentType:"", GroupVersion:(*schema.GroupVersion)(nil), NegotiatedSerializer:runtime.NegotiatedSerializer(nil)}, Username:"", Password:"", BearerToken:"", BearerTokenFile:"", Impersonate:rest.ImpersonationConfig{UserName:"", UID:"", Groups:[]string(nil), Extra:map[string][]string(nil)}, AuthProvider:<nil>, AuthConfigPersister:rest.AuthProviderConfigPersister(nil), ExecProvider:<nil>, TLSClientConfig:rest.sanitizedTLSClientConfig{Insecure:false, ServerName:"", CertFile:"/home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/functional-919910/client.crt", KeyFile:"/home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/functional-919910/client.key", CAFile:"/home/jenkins/minikube-integration/19651-1378450/.minikube/ca.crt", CertData:[]uint8(nil), KeyData:[]uint8(nil), CAData:[]uint8(ni
l), NextProtos:[]string(nil)}, UserAgent:"", DisableCompression:false, Transport:http.RoundTripper(nil), WrapTransport:(transport.WrapperFunc)(0x1a1e6c0), QPS:0, Burst:0, RateLimiter:flowcontrol.RateLimiter(nil), WarningHandler:rest.WarningHandler(nil), Timeout:0, Dial:(func(context.Context, string, string) (net.Conn, error))(nil), Proxy:(func(*http.Request) (*url.URL, error))(nil)}
	I0916 10:47:59.977304 1401996 ssh_runner.go:195] Run: sudo diff -u /var/tmp/minikube/kubeadm.yaml /var/tmp/minikube/kubeadm.yaml.new
	I0916 10:47:59.977398 1401996 cert_rotation.go:140] Starting client certificate rotation controller
	I0916 10:47:59.986232 1401996 kubeadm.go:630] The running cluster does not require reconfiguration: 192.168.49.2
	I0916 10:47:59.986266 1401996 kubeadm.go:597] duration metric: took 19.915756ms to restartPrimaryControlPlane
	I0916 10:47:59.986276 1401996 kubeadm.go:394] duration metric: took 92.240124ms to StartCluster
	I0916 10:47:59.986317 1401996 settings.go:142] acquiring lock: {Name:mkc0474d366ad36774e47290c7932cc180a1b9f8 Clock:{} Delay:500ms Timeout:1m0s Cancel:<nil>}
	I0916 10:47:59.986408 1401996 settings.go:150] Updating kubeconfig:  /home/jenkins/minikube-integration/19651-1378450/kubeconfig
	I0916 10:47:59.987095 1401996 lock.go:35] WriteFile acquiring /home/jenkins/minikube-integration/19651-1378450/kubeconfig: {Name:mk806df66aa01ad28d0c99bc1a876b4310e8a3a0 Clock:{} Delay:500ms Timeout:1m0s Cancel:<nil>}
	I0916 10:47:59.987344 1401996 start.go:235] Will wait 6m0s for node &{Name: IP:192.168.49.2 Port:8441 KubernetesVersion:v1.31.1 ContainerRuntime:crio ControlPlane:true Worker:true}
	I0916 10:47:59.987701 1401996 addons.go:507] enable addons start: toEnable=map[ambassador:false auto-pause:false cloud-spanner:false csi-hostpath-driver:false dashboard:false default-storageclass:true efk:false freshpod:false gcp-auth:false gvisor:false headlamp:false helm-tiller:false inaccel:false ingress:false ingress-dns:false inspektor-gadget:false istio:false istio-provisioner:false kong:false kubeflow:false kubevirt:false logviewer:false metallb:false metrics-server:false nvidia-device-plugin:false nvidia-driver-installer:false nvidia-gpu-device-plugin:false olm:false pod-security-policy:false portainer:false registry:false registry-aliases:false registry-creds:false storage-provisioner:true storage-provisioner-gluster:false storage-provisioner-rancher:false volcano:false volumesnapshots:false yakd:false]
	I0916 10:47:59.987810 1401996 addons.go:69] Setting storage-provisioner=true in profile "functional-919910"
	I0916 10:47:59.987840 1401996 addons.go:234] Setting addon storage-provisioner=true in "functional-919910"
	W0916 10:47:59.987862 1401996 addons.go:243] addon storage-provisioner should already be in state true
	I0916 10:47:59.987901 1401996 host.go:66] Checking if "functional-919910" exists ...
	I0916 10:47:59.987966 1401996 config.go:182] Loaded profile config "functional-919910": Driver=docker, ContainerRuntime=crio, KubernetesVersion=v1.31.1
	I0916 10:47:59.988024 1401996 addons.go:69] Setting default-storageclass=true in profile "functional-919910"
	I0916 10:47:59.988041 1401996 addons_storage_classes.go:33] enableOrDisableStorageClasses default-storageclass=true on "functional-919910"
	I0916 10:47:59.988320 1401996 cli_runner.go:164] Run: docker container inspect functional-919910 --format={{.State.Status}}
	I0916 10:47:59.988449 1401996 cli_runner.go:164] Run: docker container inspect functional-919910 --format={{.State.Status}}
	I0916 10:47:59.993883 1401996 out.go:177] * Verifying Kubernetes components...
	I0916 10:47:59.996540 1401996 ssh_runner.go:195] Run: sudo systemctl daemon-reload
	I0916 10:48:00.012508 1401996 loader.go:395] Config loaded from file:  /home/jenkins/minikube-integration/19651-1378450/kubeconfig
	I0916 10:48:00.012872 1401996 kapi.go:59] client config for functional-919910: &rest.Config{Host:"https://192.168.49.2:8441", APIPath:"", ContentConfig:rest.ContentConfig{AcceptContentTypes:"", ContentType:"", GroupVersion:(*schema.GroupVersion)(nil), NegotiatedSerializer:runtime.NegotiatedSerializer(nil)}, Username:"", Password:"", BearerToken:"", BearerTokenFile:"", Impersonate:rest.ImpersonationConfig{UserName:"", UID:"", Groups:[]string(nil), Extra:map[string][]string(nil)}, AuthProvider:<nil>, AuthConfigPersister:rest.AuthProviderConfigPersister(nil), ExecProvider:<nil>, TLSClientConfig:rest.sanitizedTLSClientConfig{Insecure:false, ServerName:"", CertFile:"/home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/functional-919910/client.crt", KeyFile:"/home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/functional-919910/client.key", CAFile:"/home/jenkins/minikube-integration/19651-1378450/.minikube/ca.crt", CertData:[]uint8(nil), KeyData:[]uint8(nil), CAData:[]uint8(ni
l), NextProtos:[]string(nil)}, UserAgent:"", DisableCompression:false, Transport:http.RoundTripper(nil), WrapTransport:(transport.WrapperFunc)(0x1a1e6c0), QPS:0, Burst:0, RateLimiter:flowcontrol.RateLimiter(nil), WarningHandler:rest.WarningHandler(nil), Timeout:0, Dial:(func(context.Context, string, string) (net.Conn, error))(nil), Proxy:(func(*http.Request) (*url.URL, error))(nil)}
	I0916 10:48:00.013941 1401996 addons.go:234] Setting addon default-storageclass=true in "functional-919910"
	W0916 10:48:00.013978 1401996 addons.go:243] addon default-storageclass should already be in state true
	I0916 10:48:00.014011 1401996 host.go:66] Checking if "functional-919910" exists ...
	I0916 10:48:00.015576 1401996 cli_runner.go:164] Run: docker container inspect functional-919910 --format={{.State.Status}}
	I0916 10:48:00.028749 1401996 out.go:177]   - Using image gcr.io/k8s-minikube/storage-provisioner:v5
	I0916 10:48:00.032397 1401996 addons.go:431] installing /etc/kubernetes/addons/storage-provisioner.yaml
	I0916 10:48:00.032435 1401996 ssh_runner.go:362] scp memory --> /etc/kubernetes/addons/storage-provisioner.yaml (2676 bytes)
	I0916 10:48:00.032514 1401996 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" functional-919910
	I0916 10:48:00.057250 1401996 addons.go:431] installing /etc/kubernetes/addons/storageclass.yaml
	I0916 10:48:00.057279 1401996 ssh_runner.go:362] scp storageclass/storageclass.yaml --> /etc/kubernetes/addons/storageclass.yaml (271 bytes)
	I0916 10:48:00.057353 1401996 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" functional-919910
	I0916 10:48:00.088811 1401996 sshutil.go:53] new ssh client: &{IP:127.0.0.1 Port:34613 SSHKeyPath:/home/jenkins/minikube-integration/19651-1378450/.minikube/machines/functional-919910/id_rsa Username:docker}
	I0916 10:48:00.142080 1401996 sshutil.go:53] new ssh client: &{IP:127.0.0.1 Port:34613 SSHKeyPath:/home/jenkins/minikube-integration/19651-1378450/.minikube/machines/functional-919910/id_rsa Username:docker}
	I0916 10:48:00.384195 1401996 ssh_runner.go:195] Run: sudo systemctl start kubelet
	I0916 10:48:00.384326 1401996 ssh_runner.go:195] Run: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.31.1/kubectl apply -f /etc/kubernetes/addons/storage-provisioner.yaml
	I0916 10:48:00.426788 1401996 node_ready.go:35] waiting up to 6m0s for node "functional-919910" to be "Ready" ...
	I0916 10:48:00.427006 1401996 round_trippers.go:463] GET https://192.168.49.2:8441/api/v1/nodes/functional-919910
	I0916 10:48:00.427037 1401996 round_trippers.go:469] Request Headers:
	I0916 10:48:00.427063 1401996 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:48:00.427084 1401996 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:48:00.428469 1401996 round_trippers.go:574] Response Status:  in 0 milliseconds
	I0916 10:48:00.428543 1401996 round_trippers.go:577] Response Headers:
	I0916 10:48:00.433927 1401996 ssh_runner.go:195] Run: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.31.1/kubectl apply -f /etc/kubernetes/addons/storageclass.yaml
	I0916 10:48:00.534904 1401996 command_runner.go:130] ! error: error validating "/etc/kubernetes/addons/storage-provisioner.yaml": error validating data: failed to download openapi: Get "https://localhost:8441/openapi/v2?timeout=32s": dial tcp [::1]:8441: connect: connection refused; if you choose to ignore these errors, turn validation off with --validate=false
	W0916 10:48:00.535009 1401996 addons.go:457] apply failed, will retry: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.31.1/kubectl apply -f /etc/kubernetes/addons/storage-provisioner.yaml: Process exited with status 1
	stdout:
	
	stderr:
	error: error validating "/etc/kubernetes/addons/storage-provisioner.yaml": error validating data: failed to download openapi: Get "https://localhost:8441/openapi/v2?timeout=32s": dial tcp [::1]:8441: connect: connection refused; if you choose to ignore these errors, turn validation off with --validate=false
	I0916 10:48:00.535069 1401996 retry.go:31] will retry after 308.565003ms: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.31.1/kubectl apply -f /etc/kubernetes/addons/storage-provisioner.yaml: Process exited with status 1
	stdout:
	
	stderr:
	error: error validating "/etc/kubernetes/addons/storage-provisioner.yaml": error validating data: failed to download openapi: Get "https://localhost:8441/openapi/v2?timeout=32s": dial tcp [::1]:8441: connect: connection refused; if you choose to ignore these errors, turn validation off with --validate=false
	I0916 10:48:00.575462 1401996 command_runner.go:130] ! error: error validating "/etc/kubernetes/addons/storageclass.yaml": error validating data: failed to download openapi: Get "https://localhost:8441/openapi/v2?timeout=32s": dial tcp [::1]:8441: connect: connection refused; if you choose to ignore these errors, turn validation off with --validate=false
	W0916 10:48:00.575589 1401996 addons.go:457] apply failed, will retry: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.31.1/kubectl apply -f /etc/kubernetes/addons/storageclass.yaml: Process exited with status 1
	stdout:
	
	stderr:
	error: error validating "/etc/kubernetes/addons/storageclass.yaml": error validating data: failed to download openapi: Get "https://localhost:8441/openapi/v2?timeout=32s": dial tcp [::1]:8441: connect: connection refused; if you choose to ignore these errors, turn validation off with --validate=false
	I0916 10:48:00.575652 1401996 retry.go:31] will retry after 310.710217ms: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.31.1/kubectl apply -f /etc/kubernetes/addons/storageclass.yaml: Process exited with status 1
	stdout:
	
	stderr:
	error: error validating "/etc/kubernetes/addons/storageclass.yaml": error validating data: failed to download openapi: Get "https://localhost:8441/openapi/v2?timeout=32s": dial tcp [::1]:8441: connect: connection refused; if you choose to ignore these errors, turn validation off with --validate=false
	I0916 10:48:00.843924 1401996 ssh_runner.go:195] Run: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.31.1/kubectl apply --force -f /etc/kubernetes/addons/storage-provisioner.yaml
	I0916 10:48:00.887536 1401996 ssh_runner.go:195] Run: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.31.1/kubectl apply --force -f /etc/kubernetes/addons/storageclass.yaml
	I0916 10:48:00.927006 1401996 round_trippers.go:463] GET https://192.168.49.2:8441/api/v1/nodes/functional-919910
	I0916 10:48:00.927080 1401996 round_trippers.go:469] Request Headers:
	I0916 10:48:00.927116 1401996 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:48:00.927152 1401996 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:48:00.927470 1401996 round_trippers.go:574] Response Status:  in 0 milliseconds
	I0916 10:48:00.927519 1401996 round_trippers.go:577] Response Headers:
	I0916 10:48:01.062205 1401996 command_runner.go:130] ! error: error validating "/etc/kubernetes/addons/storage-provisioner.yaml": error validating data: failed to download openapi: Get "https://localhost:8441/openapi/v2?timeout=32s": dial tcp [::1]:8441: connect: connection refused; if you choose to ignore these errors, turn validation off with --validate=false
	W0916 10:48:01.062298 1401996 addons.go:457] apply failed, will retry: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.31.1/kubectl apply --force -f /etc/kubernetes/addons/storage-provisioner.yaml: Process exited with status 1
	stdout:
	
	stderr:
	error: error validating "/etc/kubernetes/addons/storage-provisioner.yaml": error validating data: failed to download openapi: Get "https://localhost:8441/openapi/v2?timeout=32s": dial tcp [::1]:8441: connect: connection refused; if you choose to ignore these errors, turn validation off with --validate=false
	I0916 10:48:01.062334 1401996 retry.go:31] will retry after 200.404538ms: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.31.1/kubectl apply --force -f /etc/kubernetes/addons/storage-provisioner.yaml: Process exited with status 1
	stdout:
	
	stderr:
	error: error validating "/etc/kubernetes/addons/storage-provisioner.yaml": error validating data: failed to download openapi: Get "https://localhost:8441/openapi/v2?timeout=32s": dial tcp [::1]:8441: connect: connection refused; if you choose to ignore these errors, turn validation off with --validate=false
	I0916 10:48:01.111099 1401996 command_runner.go:130] ! error: error validating "/etc/kubernetes/addons/storageclass.yaml": error validating data: failed to download openapi: Get "https://localhost:8441/openapi/v2?timeout=32s": dial tcp [::1]:8441: connect: connection refused; if you choose to ignore these errors, turn validation off with --validate=false
	W0916 10:48:01.111213 1401996 addons.go:457] apply failed, will retry: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.31.1/kubectl apply --force -f /etc/kubernetes/addons/storageclass.yaml: Process exited with status 1
	stdout:
	
	stderr:
	error: error validating "/etc/kubernetes/addons/storageclass.yaml": error validating data: failed to download openapi: Get "https://localhost:8441/openapi/v2?timeout=32s": dial tcp [::1]:8441: connect: connection refused; if you choose to ignore these errors, turn validation off with --validate=false
	I0916 10:48:01.111266 1401996 retry.go:31] will retry after 431.025884ms: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.31.1/kubectl apply --force -f /etc/kubernetes/addons/storageclass.yaml: Process exited with status 1
	stdout:
	
	stderr:
	error: error validating "/etc/kubernetes/addons/storageclass.yaml": error validating data: failed to download openapi: Get "https://localhost:8441/openapi/v2?timeout=32s": dial tcp [::1]:8441: connect: connection refused; if you choose to ignore these errors, turn validation off with --validate=false
	I0916 10:48:01.263423 1401996 ssh_runner.go:195] Run: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.31.1/kubectl apply --force -f /etc/kubernetes/addons/storage-provisioner.yaml
	I0916 10:48:01.428245 1401996 round_trippers.go:463] GET https://192.168.49.2:8441/api/v1/nodes/functional-919910
	I0916 10:48:01.428324 1401996 round_trippers.go:469] Request Headers:
	I0916 10:48:01.428359 1401996 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:48:01.428379 1401996 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:48:01.542998 1401996 ssh_runner.go:195] Run: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.31.1/kubectl apply --force -f /etc/kubernetes/addons/storageclass.yaml
	I0916 10:48:05.268811 1401996 round_trippers.go:574] Response Status: 200 OK in 3840 milliseconds
	I0916 10:48:05.268845 1401996 round_trippers.go:577] Response Headers:
	I0916 10:48:05.268865 1401996 round_trippers.go:580]     Audit-Id: b20755fb-7b68-4485-a8ac-3c8c03c63fcc
	I0916 10:48:05.268869 1401996 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 10:48:05.268873 1401996 round_trippers.go:580]     Content-Type: application/json
	I0916 10:48:05.268877 1401996 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 
	I0916 10:48:05.268880 1401996 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 
	I0916 10:48:05.268883 1401996 round_trippers.go:580]     Date: Mon, 16 Sep 2024 10:48:05 GMT
	I0916 10:48:05.290959 1401996 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"functional-919910","uid":"53a8f356-7cc5-491d-804d-fdafec0ed62c","resourceVersion":"423","creationTimestamp":"2024-09-16T10:46:58Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"functional-919910","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"functional-919910","minikube.k8s.io/primary":"true","minikube.k8s.io/updated_at":"2024_09_16T10_47_02_0700","minikube.k8s.io/version":"v1.34.0","node-role.kubernetes.io/control-plane":"","node.kubernetes.io/exclude-from-external-load-balancers":""},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///var/run/crio/crio.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubelet","operation":"Update","ap
iVersion":"v1","time":"2024-09-16T10:46:58Z","fieldsType":"FieldsV1","f [truncated 6033 chars]
	I0916 10:48:05.291832 1401996 node_ready.go:49] node "functional-919910" has status "Ready":"True"
	I0916 10:48:05.291863 1401996 node_ready.go:38] duration metric: took 4.864992104s for node "functional-919910" to be "Ready" ...
	I0916 10:48:05.291873 1401996 pod_ready.go:36] extra waiting up to 6m0s for all system-critical pods including labels [k8s-app=kube-dns component=etcd component=kube-apiserver component=kube-controller-manager k8s-app=kube-proxy component=kube-scheduler] to be "Ready" ...
	I0916 10:48:05.291919 1401996 envvar.go:172] "Feature gate default state" feature="WatchListClient" enabled=false
	I0916 10:48:05.291929 1401996 envvar.go:172] "Feature gate default state" feature="InformerResourceVersion" enabled=false
	I0916 10:48:05.291997 1401996 round_trippers.go:463] GET https://192.168.49.2:8441/api/v1/namespaces/kube-system/pods
	I0916 10:48:05.292001 1401996 round_trippers.go:469] Request Headers:
	I0916 10:48:05.292009 1401996 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:48:05.292013 1401996 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:48:05.403335 1401996 round_trippers.go:574] Response Status: 200 OK in 111 milliseconds
	I0916 10:48:05.403355 1401996 round_trippers.go:577] Response Headers:
	I0916 10:48:05.403364 1401996 round_trippers.go:580]     Audit-Id: 7d422454-4258-4b85-a1cc-37a8b98d571f
	I0916 10:48:05.403368 1401996 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 10:48:05.403371 1401996 round_trippers.go:580]     Content-Type: application/json
	I0916 10:48:05.403374 1401996 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 
	I0916 10:48:05.403376 1401996 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 
	I0916 10:48:05.403379 1401996 round_trippers.go:580]     Date: Mon, 16 Sep 2024 10:48:05 GMT
	I0916 10:48:05.405776 1401996 request.go:1351] Response Body: {"kind":"PodList","apiVersion":"v1","metadata":{"resourceVersion":"427"},"items":[{"metadata":{"name":"coredns-7c65d6cfc9-qzn8c","generateName":"coredns-7c65d6cfc9-","namespace":"kube-system","uid":"ada36fb7-8486-4afc-9bef-04ab2e65fc7b","resourceVersion":"417","creationTimestamp":"2024-09-16T10:47:06Z","labels":{"k8s-app":"kube-dns","pod-template-hash":"7c65d6cfc9"},"ownerReferences":[{"apiVersion":"apps/v1","kind":"ReplicaSet","name":"coredns-7c65d6cfc9","uid":"d0a0989d-3c55-4e39-bd63-2b9459e552ef","controller":true,"blockOwnerDeletion":true}],"managedFields":[{"manager":"kube-controller-manager","operation":"Update","apiVersion":"v1","time":"2024-09-16T10:47:06Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:generateName":{},"f:labels":{".":{},"f:k8s-app":{},"f:pod-template-hash":{}},"f:ownerReferences":{".":{},"k:{\"uid\":\"d0a0989d-3c55-4e39-bd63-2b9459e552ef\"}":{}}},"f:spec":{"f:affinity":{".":{},"f:podAntiAffinity":{".":{},"f
:preferredDuringSchedulingIgnoredDuringExecution":{}}},"f:containers":{ [truncated 59464 chars]
	I0916 10:48:05.410422 1401996 pod_ready.go:79] waiting up to 6m0s for pod "coredns-7c65d6cfc9-qzn8c" in "kube-system" namespace to be "Ready" ...
	I0916 10:48:05.410574 1401996 round_trippers.go:463] GET https://192.168.49.2:8441/api/v1/namespaces/kube-system/pods/coredns-7c65d6cfc9-qzn8c
	I0916 10:48:05.410601 1401996 round_trippers.go:469] Request Headers:
	I0916 10:48:05.410640 1401996 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:48:05.410663 1401996 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:48:05.457641 1401996 round_trippers.go:574] Response Status: 200 OK in 46 milliseconds
	I0916 10:48:05.457719 1401996 round_trippers.go:577] Response Headers:
	I0916 10:48:05.457741 1401996 round_trippers.go:580]     Date: Mon, 16 Sep 2024 10:48:05 GMT
	I0916 10:48:05.457760 1401996 round_trippers.go:580]     Audit-Id: 16dfbd36-f6d0-42fb-b5ed-9209469f4674
	I0916 10:48:05.457792 1401996 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 10:48:05.457813 1401996 round_trippers.go:580]     Content-Type: application/json
	I0916 10:48:05.457833 1401996 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: dde2938c-b5ca-4a57-b9bd-15f56dda7856
	I0916 10:48:05.457850 1401996 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: bc2a80da-1d56-4954-b760-1948662f5b5f
	I0916 10:48:05.462154 1401996 request.go:1351] Response Body: {"kind":"Pod","apiVersion":"v1","metadata":{"name":"coredns-7c65d6cfc9-qzn8c","generateName":"coredns-7c65d6cfc9-","namespace":"kube-system","uid":"ada36fb7-8486-4afc-9bef-04ab2e65fc7b","resourceVersion":"417","creationTimestamp":"2024-09-16T10:47:06Z","labels":{"k8s-app":"kube-dns","pod-template-hash":"7c65d6cfc9"},"ownerReferences":[{"apiVersion":"apps/v1","kind":"ReplicaSet","name":"coredns-7c65d6cfc9","uid":"d0a0989d-3c55-4e39-bd63-2b9459e552ef","controller":true,"blockOwnerDeletion":true}],"managedFields":[{"manager":"kube-controller-manager","operation":"Update","apiVersion":"v1","time":"2024-09-16T10:47:06Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:generateName":{},"f:labels":{".":{},"f:k8s-app":{},"f:pod-template-hash":{}},"f:ownerReferences":{".":{},"k:{\"uid\":\"d0a0989d-3c55-4e39-bd63-2b9459e552ef\"}":{}}},"f:spec":{"f:affinity":{".":{},"f:podAntiAffinity":{".":{},"f:preferredDuringSchedulingIgnoredDuringExecution":{
}}},"f:containers":{"k:{\"name\":\"coredns\"}":{".":{},"f:args":{},"f:i [truncated 6814 chars]
	I0916 10:48:05.462872 1401996 round_trippers.go:463] GET https://192.168.49.2:8441/api/v1/nodes/functional-919910
	I0916 10:48:05.462919 1401996 round_trippers.go:469] Request Headers:
	I0916 10:48:05.462944 1401996 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:48:05.462963 1401996 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:48:05.507563 1401996 round_trippers.go:574] Response Status: 200 OK in 44 milliseconds
	I0916 10:48:05.507637 1401996 round_trippers.go:577] Response Headers:
	I0916 10:48:05.507659 1401996 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: bc2a80da-1d56-4954-b760-1948662f5b5f
	I0916 10:48:05.507679 1401996 round_trippers.go:580]     Date: Mon, 16 Sep 2024 10:48:05 GMT
	I0916 10:48:05.507717 1401996 round_trippers.go:580]     Audit-Id: 2ac7f2e0-df41-4a25-a5a6-013d4b6ff9b5
	I0916 10:48:05.507739 1401996 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 10:48:05.507758 1401996 round_trippers.go:580]     Content-Type: application/json
	I0916 10:48:05.507778 1401996 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: dde2938c-b5ca-4a57-b9bd-15f56dda7856
	I0916 10:48:05.508058 1401996 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"functional-919910","uid":"53a8f356-7cc5-491d-804d-fdafec0ed62c","resourceVersion":"423","creationTimestamp":"2024-09-16T10:46:58Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"functional-919910","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"functional-919910","minikube.k8s.io/primary":"true","minikube.k8s.io/updated_at":"2024_09_16T10_47_02_0700","minikube.k8s.io/version":"v1.34.0","node-role.kubernetes.io/control-plane":"","node.kubernetes.io/exclude-from-external-load-balancers":""},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///var/run/crio/crio.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubelet","operation":"Update","ap
iVersion":"v1","time":"2024-09-16T10:46:58Z","fieldsType":"FieldsV1","f [truncated 6033 chars]
	I0916 10:48:05.508540 1401996 pod_ready.go:93] pod "coredns-7c65d6cfc9-qzn8c" in "kube-system" namespace has status "Ready":"True"
	I0916 10:48:05.508585 1401996 pod_ready.go:82] duration metric: took 98.105074ms for pod "coredns-7c65d6cfc9-qzn8c" in "kube-system" namespace to be "Ready" ...
	I0916 10:48:05.508618 1401996 pod_ready.go:79] waiting up to 6m0s for pod "etcd-functional-919910" in "kube-system" namespace to be "Ready" ...
	I0916 10:48:05.508752 1401996 round_trippers.go:463] GET https://192.168.49.2:8441/api/v1/namespaces/kube-system/pods/etcd-functional-919910
	I0916 10:48:05.508779 1401996 round_trippers.go:469] Request Headers:
	I0916 10:48:05.508800 1401996 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:48:05.508835 1401996 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:48:05.523151 1401996 round_trippers.go:574] Response Status: 200 OK in 14 milliseconds
	I0916 10:48:05.523176 1401996 round_trippers.go:577] Response Headers:
	I0916 10:48:05.523186 1401996 round_trippers.go:580]     Audit-Id: 6019da01-d618-4959-b44c-39894e5dbd68
	I0916 10:48:05.523191 1401996 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 10:48:05.523197 1401996 round_trippers.go:580]     Content-Type: application/json
	I0916 10:48:05.523206 1401996 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: dde2938c-b5ca-4a57-b9bd-15f56dda7856
	I0916 10:48:05.523210 1401996 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: bc2a80da-1d56-4954-b760-1948662f5b5f
	I0916 10:48:05.523214 1401996 round_trippers.go:580]     Date: Mon, 16 Sep 2024 10:48:05 GMT
	I0916 10:48:05.527083 1401996 request.go:1351] Response Body: {"kind":"Pod","apiVersion":"v1","metadata":{"name":"etcd-functional-919910","namespace":"kube-system","uid":"73472289-b523-4c96-8d5d-33ea5c657902","resourceVersion":"385","creationTimestamp":"2024-09-16T10:47:00Z","labels":{"component":"etcd","tier":"control-plane"},"annotations":{"kubeadm.kubernetes.io/etcd.advertise-client-urls":"https://192.168.49.2:2379","kubernetes.io/config.hash":"3e910b182a705a484fdc6733177892d1","kubernetes.io/config.mirror":"3e910b182a705a484fdc6733177892d1","kubernetes.io/config.seen":"2024-09-16T10:46:53.802310056Z","kubernetes.io/config.source":"file"},"ownerReferences":[{"apiVersion":"v1","kind":"Node","name":"functional-919910","uid":"53a8f356-7cc5-491d-804d-fdafec0ed62c","controller":true}],"managedFields":[{"manager":"kubelet","operation":"Update","apiVersion":"v1","time":"2024-09-16T10:47:00Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:annotations":{".":{},"f:kubeadm.kubernetes.io/etcd.advertise-
client-urls":{},"f:kubernetes.io/config.hash":{},"f:kubernetes.io/confi [truncated 6440 chars]
	I0916 10:48:05.527724 1401996 round_trippers.go:463] GET https://192.168.49.2:8441/api/v1/nodes/functional-919910
	I0916 10:48:05.527771 1401996 round_trippers.go:469] Request Headers:
	I0916 10:48:05.527793 1401996 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:48:05.527814 1401996 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:48:05.554600 1401996 round_trippers.go:574] Response Status: 200 OK in 26 milliseconds
	I0916 10:48:05.554677 1401996 round_trippers.go:577] Response Headers:
	I0916 10:48:05.554700 1401996 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: bc2a80da-1d56-4954-b760-1948662f5b5f
	I0916 10:48:05.554721 1401996 round_trippers.go:580]     Date: Mon, 16 Sep 2024 10:48:05 GMT
	I0916 10:48:05.554752 1401996 round_trippers.go:580]     Audit-Id: 64260f4c-bc78-4580-ae77-d48120f6be4a
	I0916 10:48:05.554774 1401996 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 10:48:05.554791 1401996 round_trippers.go:580]     Content-Type: application/json
	I0916 10:48:05.554809 1401996 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: dde2938c-b5ca-4a57-b9bd-15f56dda7856
	I0916 10:48:05.561247 1401996 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"functional-919910","uid":"53a8f356-7cc5-491d-804d-fdafec0ed62c","resourceVersion":"423","creationTimestamp":"2024-09-16T10:46:58Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"functional-919910","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"functional-919910","minikube.k8s.io/primary":"true","minikube.k8s.io/updated_at":"2024_09_16T10_47_02_0700","minikube.k8s.io/version":"v1.34.0","node-role.kubernetes.io/control-plane":"","node.kubernetes.io/exclude-from-external-load-balancers":""},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///var/run/crio/crio.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubelet","operation":"Update","ap
iVersion":"v1","time":"2024-09-16T10:46:58Z","fieldsType":"FieldsV1","f [truncated 6033 chars]
	I0916 10:48:05.561811 1401996 pod_ready.go:93] pod "etcd-functional-919910" in "kube-system" namespace has status "Ready":"True"
	I0916 10:48:05.561852 1401996 pod_ready.go:82] duration metric: took 53.213628ms for pod "etcd-functional-919910" in "kube-system" namespace to be "Ready" ...
	I0916 10:48:05.561889 1401996 pod_ready.go:79] waiting up to 6m0s for pod "kube-apiserver-functional-919910" in "kube-system" namespace to be "Ready" ...
	I0916 10:48:05.562011 1401996 round_trippers.go:463] GET https://192.168.49.2:8441/api/v1/namespaces/kube-system/pods/kube-apiserver-functional-919910
	I0916 10:48:05.562035 1401996 round_trippers.go:469] Request Headers:
	I0916 10:48:05.562059 1401996 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:48:05.562093 1401996 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:48:05.574539 1401996 round_trippers.go:574] Response Status: 200 OK in 12 milliseconds
	I0916 10:48:05.574615 1401996 round_trippers.go:577] Response Headers:
	I0916 10:48:05.574645 1401996 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: dde2938c-b5ca-4a57-b9bd-15f56dda7856
	I0916 10:48:05.574693 1401996 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: bc2a80da-1d56-4954-b760-1948662f5b5f
	I0916 10:48:05.574732 1401996 round_trippers.go:580]     Date: Mon, 16 Sep 2024 10:48:05 GMT
	I0916 10:48:05.574750 1401996 round_trippers.go:580]     Audit-Id: 9a7034d0-d767-43fe-a880-66e341250069
	I0916 10:48:05.574796 1401996 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 10:48:05.574816 1401996 round_trippers.go:580]     Content-Type: application/json
	I0916 10:48:05.575573 1401996 request.go:1351] Response Body: {"kind":"Pod","apiVersion":"v1","metadata":{"name":"kube-apiserver-functional-919910","namespace":"kube-system","uid":"82da7bbe-1484-402c-b1a5-7165f1938703","resourceVersion":"340","creationTimestamp":"2024-09-16T10:47:01Z","labels":{"component":"kube-apiserver","tier":"control-plane"},"annotations":{"kubeadm.kubernetes.io/kube-apiserver.advertise-address.endpoint":"192.168.49.2:8441","kubernetes.io/config.hash":"3d8a6ba31c18f33c5660170029e5cde1","kubernetes.io/config.mirror":"3d8a6ba31c18f33c5660170029e5cde1","kubernetes.io/config.seen":"2024-09-16T10:47:01.310178039Z","kubernetes.io/config.source":"file"},"ownerReferences":[{"apiVersion":"v1","kind":"Node","name":"functional-919910","uid":"53a8f356-7cc5-491d-804d-fdafec0ed62c","controller":true}],"managedFields":[{"manager":"kubelet","operation":"Update","apiVersion":"v1","time":"2024-09-16T10:47:01Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:annotations":{".":{},"f:kubeadm.ku
bernetes.io/kube-apiserver.advertise-address.endpoint":{},"f:kubernetes [truncated 8516 chars]
	I0916 10:48:05.576295 1401996 round_trippers.go:463] GET https://192.168.49.2:8441/api/v1/nodes/functional-919910
	I0916 10:48:05.576351 1401996 round_trippers.go:469] Request Headers:
	I0916 10:48:05.576390 1401996 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:48:05.576409 1401996 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:48:05.597433 1401996 round_trippers.go:574] Response Status: 200 OK in 20 milliseconds
	I0916 10:48:05.597508 1401996 round_trippers.go:577] Response Headers:
	I0916 10:48:05.597531 1401996 round_trippers.go:580]     Audit-Id: 6589c107-019c-408e-845e-a53b672e2cc8
	I0916 10:48:05.597551 1401996 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 10:48:05.597583 1401996 round_trippers.go:580]     Content-Type: application/json
	I0916 10:48:05.597604 1401996 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: dde2938c-b5ca-4a57-b9bd-15f56dda7856
	I0916 10:48:05.597623 1401996 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: bc2a80da-1d56-4954-b760-1948662f5b5f
	I0916 10:48:05.597641 1401996 round_trippers.go:580]     Date: Mon, 16 Sep 2024 10:48:05 GMT
	I0916 10:48:05.598277 1401996 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"functional-919910","uid":"53a8f356-7cc5-491d-804d-fdafec0ed62c","resourceVersion":"423","creationTimestamp":"2024-09-16T10:46:58Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"functional-919910","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"functional-919910","minikube.k8s.io/primary":"true","minikube.k8s.io/updated_at":"2024_09_16T10_47_02_0700","minikube.k8s.io/version":"v1.34.0","node-role.kubernetes.io/control-plane":"","node.kubernetes.io/exclude-from-external-load-balancers":""},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///var/run/crio/crio.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubelet","operation":"Update","ap
iVersion":"v1","time":"2024-09-16T10:46:58Z","fieldsType":"FieldsV1","f [truncated 6033 chars]
	I0916 10:48:05.598798 1401996 pod_ready.go:93] pod "kube-apiserver-functional-919910" in "kube-system" namespace has status "Ready":"True"
	I0916 10:48:05.598847 1401996 pod_ready.go:82] duration metric: took 36.937901ms for pod "kube-apiserver-functional-919910" in "kube-system" namespace to be "Ready" ...
	I0916 10:48:05.598873 1401996 pod_ready.go:79] waiting up to 6m0s for pod "kube-controller-manager-functional-919910" in "kube-system" namespace to be "Ready" ...
	I0916 10:48:05.598980 1401996 round_trippers.go:463] GET https://192.168.49.2:8441/api/v1/namespaces/kube-system/pods/kube-controller-manager-functional-919910
	I0916 10:48:05.599012 1401996 round_trippers.go:469] Request Headers:
	I0916 10:48:05.599033 1401996 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:48:05.599051 1401996 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:48:05.602884 1401996 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:48:05.602953 1401996 round_trippers.go:577] Response Headers:
	I0916 10:48:05.602974 1401996 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: bc2a80da-1d56-4954-b760-1948662f5b5f
	I0916 10:48:05.602992 1401996 round_trippers.go:580]     Date: Mon, 16 Sep 2024 10:48:05 GMT
	I0916 10:48:05.603025 1401996 round_trippers.go:580]     Audit-Id: 58c20f1e-d8c3-47e5-85ef-28bea0245620
	I0916 10:48:05.603047 1401996 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 10:48:05.603064 1401996 round_trippers.go:580]     Content-Type: application/json
	I0916 10:48:05.603082 1401996 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: dde2938c-b5ca-4a57-b9bd-15f56dda7856
	I0916 10:48:05.610241 1401996 request.go:1351] Response Body: {"kind":"Pod","apiVersion":"v1","metadata":{"name":"kube-controller-manager-functional-919910","namespace":"kube-system","uid":"483b3e2c-288a-41e1-a29b-33a95b5b536a","resourceVersion":"389","creationTimestamp":"2024-09-16T10:47:01Z","labels":{"component":"kube-controller-manager","tier":"control-plane"},"annotations":{"kubernetes.io/config.hash":"bcfd044776fa163108ac9ce9912dd1b1","kubernetes.io/config.mirror":"bcfd044776fa163108ac9ce9912dd1b1","kubernetes.io/config.seen":"2024-09-16T10:47:01.310179278Z","kubernetes.io/config.source":"file"},"ownerReferences":[{"apiVersion":"v1","kind":"Node","name":"functional-919910","uid":"53a8f356-7cc5-491d-804d-fdafec0ed62c","controller":true}],"managedFields":[{"manager":"kubelet","operation":"Update","apiVersion":"v1","time":"2024-09-16T10:47:01Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:annotations":{".":{},"f:kubernetes.io/config.hash":{},"f:kubernetes.io/config.mirror":{},"f:kubernetes
.io/config.seen":{},"f:kubernetes.io/config.source":{}},"f:labels":{"." [truncated 8091 chars]
	I0916 10:48:05.610955 1401996 round_trippers.go:463] GET https://192.168.49.2:8441/api/v1/nodes/functional-919910
	I0916 10:48:05.611001 1401996 round_trippers.go:469] Request Headers:
	I0916 10:48:05.611024 1401996 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:48:05.611040 1401996 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:48:05.623206 1401996 round_trippers.go:574] Response Status: 200 OK in 12 milliseconds
	I0916 10:48:05.623278 1401996 round_trippers.go:577] Response Headers:
	I0916 10:48:05.623302 1401996 round_trippers.go:580]     Audit-Id: eba365c8-7916-4bed-afc6-3ac57c8778c6
	I0916 10:48:05.623318 1401996 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 10:48:05.623351 1401996 round_trippers.go:580]     Content-Type: application/json
	I0916 10:48:05.623372 1401996 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: dde2938c-b5ca-4a57-b9bd-15f56dda7856
	I0916 10:48:05.623387 1401996 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: bc2a80da-1d56-4954-b760-1948662f5b5f
	I0916 10:48:05.623404 1401996 round_trippers.go:580]     Date: Mon, 16 Sep 2024 10:48:05 GMT
	I0916 10:48:05.625918 1401996 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"functional-919910","uid":"53a8f356-7cc5-491d-804d-fdafec0ed62c","resourceVersion":"423","creationTimestamp":"2024-09-16T10:46:58Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"functional-919910","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"functional-919910","minikube.k8s.io/primary":"true","minikube.k8s.io/updated_at":"2024_09_16T10_47_02_0700","minikube.k8s.io/version":"v1.34.0","node-role.kubernetes.io/control-plane":"","node.kubernetes.io/exclude-from-external-load-balancers":""},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///var/run/crio/crio.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubelet","operation":"Update","ap
iVersion":"v1","time":"2024-09-16T10:46:58Z","fieldsType":"FieldsV1","f [truncated 6033 chars]
	I0916 10:48:05.626484 1401996 pod_ready.go:93] pod "kube-controller-manager-functional-919910" in "kube-system" namespace has status "Ready":"True"
	I0916 10:48:05.626528 1401996 pod_ready.go:82] duration metric: took 27.634829ms for pod "kube-controller-manager-functional-919910" in "kube-system" namespace to be "Ready" ...
	I0916 10:48:05.626554 1401996 pod_ready.go:79] waiting up to 6m0s for pod "kube-proxy-nvpzv" in "kube-system" namespace to be "Ready" ...
	I0916 10:48:05.626649 1401996 round_trippers.go:463] GET https://192.168.49.2:8441/api/v1/namespaces/kube-system/pods/kube-proxy-nvpzv
	I0916 10:48:05.626682 1401996 round_trippers.go:469] Request Headers:
	I0916 10:48:05.626704 1401996 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:48:05.626724 1401996 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:48:05.639523 1401996 round_trippers.go:574] Response Status: 200 OK in 12 milliseconds
	I0916 10:48:05.639598 1401996 round_trippers.go:577] Response Headers:
	I0916 10:48:05.639619 1401996 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 10:48:05.639635 1401996 round_trippers.go:580]     Content-Type: application/json
	I0916 10:48:05.639654 1401996 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: dde2938c-b5ca-4a57-b9bd-15f56dda7856
	I0916 10:48:05.639687 1401996 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: bc2a80da-1d56-4954-b760-1948662f5b5f
	I0916 10:48:05.639703 1401996 round_trippers.go:580]     Date: Mon, 16 Sep 2024 10:48:05 GMT
	I0916 10:48:05.639721 1401996 round_trippers.go:580]     Audit-Id: 2b6ebbcf-d263-4d6c-b097-8c0ab6e63f49
	I0916 10:48:05.642765 1401996 request.go:1351] Response Body: {"kind":"Pod","apiVersion":"v1","metadata":{"name":"kube-proxy-nvpzv","generateName":"kube-proxy-","namespace":"kube-system","uid":"2e1bfc3e-dea3-4511-a154-e367e28b0898","resourceVersion":"357","creationTimestamp":"2024-09-16T10:47:06Z","labels":{"controller-revision-hash":"648b489c5b","k8s-app":"kube-proxy","pod-template-generation":"1"},"ownerReferences":[{"apiVersion":"apps/v1","kind":"DaemonSet","name":"kube-proxy","uid":"e471ede7-5b70-4fcb-8bb8-8ab058b1f83f","controller":true,"blockOwnerDeletion":true}],"managedFields":[{"manager":"kube-controller-manager","operation":"Update","apiVersion":"v1","time":"2024-09-16T10:47:06Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:generateName":{},"f:labels":{".":{},"f:controller-revision-hash":{},"f:k8s-app":{},"f:pod-template-generation":{}},"f:ownerReferences":{".":{},"k:{\"uid\":\"e471ede7-5b70-4fcb-8bb8-8ab058b1f83f\"}":{}}},"f:spec":{"f:affinity":{".":{},"f:nodeAffinity":{".":{},"f:r
equiredDuringSchedulingIgnoredDuringExecution":{}}},"f:containers":{"k: [truncated 6172 chars]
	I0916 10:48:05.643413 1401996 round_trippers.go:463] GET https://192.168.49.2:8441/api/v1/nodes/functional-919910
	I0916 10:48:05.643461 1401996 round_trippers.go:469] Request Headers:
	I0916 10:48:05.643484 1401996 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:48:05.643505 1401996 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:48:05.652916 1401996 round_trippers.go:574] Response Status: 200 OK in 9 milliseconds
	I0916 10:48:05.652989 1401996 round_trippers.go:577] Response Headers:
	I0916 10:48:05.653011 1401996 round_trippers.go:580]     Audit-Id: 39e2f15b-9376-434d-a0fd-05e78846f93a
	I0916 10:48:05.653031 1401996 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 10:48:05.653124 1401996 round_trippers.go:580]     Content-Type: application/json
	I0916 10:48:05.653146 1401996 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: dde2938c-b5ca-4a57-b9bd-15f56dda7856
	I0916 10:48:05.653163 1401996 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: bc2a80da-1d56-4954-b760-1948662f5b5f
	I0916 10:48:05.653179 1401996 round_trippers.go:580]     Date: Mon, 16 Sep 2024 10:48:05 GMT
	I0916 10:48:05.654529 1401996 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"functional-919910","uid":"53a8f356-7cc5-491d-804d-fdafec0ed62c","resourceVersion":"423","creationTimestamp":"2024-09-16T10:46:58Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"functional-919910","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"functional-919910","minikube.k8s.io/primary":"true","minikube.k8s.io/updated_at":"2024_09_16T10_47_02_0700","minikube.k8s.io/version":"v1.34.0","node-role.kubernetes.io/control-plane":"","node.kubernetes.io/exclude-from-external-load-balancers":""},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///var/run/crio/crio.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubelet","operation":"Update","ap
iVersion":"v1","time":"2024-09-16T10:46:58Z","fieldsType":"FieldsV1","f [truncated 6033 chars]
	I0916 10:48:05.655026 1401996 pod_ready.go:93] pod "kube-proxy-nvpzv" in "kube-system" namespace has status "Ready":"True"
	I0916 10:48:05.655070 1401996 pod_ready.go:82] duration metric: took 28.496567ms for pod "kube-proxy-nvpzv" in "kube-system" namespace to be "Ready" ...
	I0916 10:48:05.655097 1401996 pod_ready.go:79] waiting up to 6m0s for pod "kube-scheduler-functional-919910" in "kube-system" namespace to be "Ready" ...
	I0916 10:48:05.692423 1401996 round_trippers.go:463] GET https://192.168.49.2:8441/api/v1/namespaces/kube-system/pods/kube-scheduler-functional-919910
	I0916 10:48:05.692494 1401996 round_trippers.go:469] Request Headers:
	I0916 10:48:05.692526 1401996 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:48:05.692546 1401996 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:48:05.699783 1401996 round_trippers.go:574] Response Status: 200 OK in 7 milliseconds
	I0916 10:48:05.699856 1401996 round_trippers.go:577] Response Headers:
	I0916 10:48:05.699889 1401996 round_trippers.go:580]     Audit-Id: 6a583f7c-1627-40c3-9614-00b54d361799
	I0916 10:48:05.699908 1401996 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 10:48:05.699937 1401996 round_trippers.go:580]     Content-Type: application/json
	I0916 10:48:05.699957 1401996 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: dde2938c-b5ca-4a57-b9bd-15f56dda7856
	I0916 10:48:05.699976 1401996 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: bc2a80da-1d56-4954-b760-1948662f5b5f
	I0916 10:48:05.699993 1401996 round_trippers.go:580]     Date: Mon, 16 Sep 2024 10:48:05 GMT
	I0916 10:48:05.709127 1401996 request.go:1351] Response Body: {"kind":"Pod","apiVersion":"v1","metadata":{"name":"kube-scheduler-functional-919910","namespace":"kube-system","uid":"80a1c6e8-dcc4-4602-a66a-658796f6ae58","resourceVersion":"430","creationTimestamp":"2024-09-16T10:47:01Z","labels":{"component":"kube-scheduler","tier":"control-plane"},"annotations":{"kubernetes.io/config.hash":"60f2072c6865fb71ef7928175ceb3dad","kubernetes.io/config.mirror":"60f2072c6865fb71ef7928175ceb3dad","kubernetes.io/config.seen":"2024-09-16T10:47:01.310180468Z","kubernetes.io/config.source":"file"},"ownerReferences":[{"apiVersion":"v1","kind":"Node","name":"functional-919910","uid":"53a8f356-7cc5-491d-804d-fdafec0ed62c","controller":true}],"managedFields":[{"manager":"kubelet","operation":"Update","apiVersion":"v1","time":"2024-09-16T10:47:01Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:annotations":{".":{},"f:kubernetes.io/config.hash":{},"f:kubernetes.io/config.mirror":{},"f:kubernetes.io/config.seen":{
},"f:kubernetes.io/config.source":{}},"f:labels":{".":{},"f:component": [truncated 5337 chars]
	I0916 10:48:05.892560 1401996 request.go:632] Waited for 182.854067ms due to client-side throttling, not priority and fairness, request: GET:https://192.168.49.2:8441/api/v1/nodes/functional-919910
	I0916 10:48:05.892662 1401996 round_trippers.go:463] GET https://192.168.49.2:8441/api/v1/nodes/functional-919910
	I0916 10:48:05.892704 1401996 round_trippers.go:469] Request Headers:
	I0916 10:48:05.892736 1401996 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:48:05.892756 1401996 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:48:05.921649 1401996 round_trippers.go:574] Response Status: 200 OK in 28 milliseconds
	I0916 10:48:05.921736 1401996 round_trippers.go:577] Response Headers:
	I0916 10:48:05.921759 1401996 round_trippers.go:580]     Audit-Id: a55d5629-0c4b-4e58-b9f3-f01a607ebccc
	I0916 10:48:05.921818 1401996 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 10:48:05.921840 1401996 round_trippers.go:580]     Content-Type: application/json
	I0916 10:48:05.921858 1401996 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: dde2938c-b5ca-4a57-b9bd-15f56dda7856
	I0916 10:48:05.921885 1401996 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: bc2a80da-1d56-4954-b760-1948662f5b5f
	I0916 10:48:05.921908 1401996 round_trippers.go:580]     Date: Mon, 16 Sep 2024 10:48:05 GMT
	I0916 10:48:05.922118 1401996 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"functional-919910","uid":"53a8f356-7cc5-491d-804d-fdafec0ed62c","resourceVersion":"423","creationTimestamp":"2024-09-16T10:46:58Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"functional-919910","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"functional-919910","minikube.k8s.io/primary":"true","minikube.k8s.io/updated_at":"2024_09_16T10_47_02_0700","minikube.k8s.io/version":"v1.34.0","node-role.kubernetes.io/control-plane":"","node.kubernetes.io/exclude-from-external-load-balancers":""},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///var/run/crio/crio.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubelet","operation":"Update","ap
iVersion":"v1","time":"2024-09-16T10:46:58Z","fieldsType":"FieldsV1","f [truncated 6033 chars]
	I0916 10:48:06.155797 1401996 round_trippers.go:463] GET https://192.168.49.2:8441/api/v1/namespaces/kube-system/pods/kube-scheduler-functional-919910
	I0916 10:48:06.155894 1401996 round_trippers.go:469] Request Headers:
	I0916 10:48:06.155918 1401996 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:48:06.155940 1401996 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:48:06.178184 1401996 round_trippers.go:574] Response Status: 200 OK in 22 milliseconds
	I0916 10:48:06.178261 1401996 round_trippers.go:577] Response Headers:
	I0916 10:48:06.178285 1401996 round_trippers.go:580]     Audit-Id: 651630df-1444-437f-9e5e-0bd74d71db58
	I0916 10:48:06.178304 1401996 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 10:48:06.178336 1401996 round_trippers.go:580]     Content-Type: application/json
	I0916 10:48:06.178359 1401996 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: dde2938c-b5ca-4a57-b9bd-15f56dda7856
	I0916 10:48:06.178379 1401996 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: bc2a80da-1d56-4954-b760-1948662f5b5f
	I0916 10:48:06.178396 1401996 round_trippers.go:580]     Date: Mon, 16 Sep 2024 10:48:06 GMT
	I0916 10:48:06.195434 1401996 request.go:1351] Response Body: {"kind":"Pod","apiVersion":"v1","metadata":{"name":"kube-scheduler-functional-919910","namespace":"kube-system","uid":"80a1c6e8-dcc4-4602-a66a-658796f6ae58","resourceVersion":"460","creationTimestamp":"2024-09-16T10:47:01Z","labels":{"component":"kube-scheduler","tier":"control-plane"},"annotations":{"kubernetes.io/config.hash":"60f2072c6865fb71ef7928175ceb3dad","kubernetes.io/config.mirror":"60f2072c6865fb71ef7928175ceb3dad","kubernetes.io/config.seen":"2024-09-16T10:47:01.310180468Z","kubernetes.io/config.source":"file"},"ownerReferences":[{"apiVersion":"v1","kind":"Node","name":"functional-919910","uid":"53a8f356-7cc5-491d-804d-fdafec0ed62c","controller":true}],"managedFields":[{"manager":"kubelet","operation":"Update","apiVersion":"v1","time":"2024-09-16T10:47:01Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:annotations":{".":{},"f:kubernetes.io/config.hash":{},"f:kubernetes.io/config.mirror":{},"f:kubernetes.io/config.seen":{
},"f:kubernetes.io/config.source":{}},"f:labels":{".":{},"f:component": [truncated 5421 chars]
	I0916 10:48:06.292928 1401996 request.go:632] Waited for 96.929572ms due to client-side throttling, not priority and fairness, request: GET:https://192.168.49.2:8441/api/v1/nodes/functional-919910
	I0916 10:48:06.293057 1401996 round_trippers.go:463] GET https://192.168.49.2:8441/api/v1/nodes/functional-919910
	I0916 10:48:06.293082 1401996 round_trippers.go:469] Request Headers:
	I0916 10:48:06.293117 1401996 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:48:06.293141 1401996 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:48:06.294792 1401996 command_runner.go:130] > serviceaccount/storage-provisioner unchanged
	I0916 10:48:06.294862 1401996 command_runner.go:130] > clusterrolebinding.rbac.authorization.k8s.io/storage-provisioner unchanged
	I0916 10:48:06.294886 1401996 command_runner.go:130] > role.rbac.authorization.k8s.io/system:persistent-volume-provisioner unchanged
	I0916 10:48:06.294907 1401996 command_runner.go:130] > rolebinding.rbac.authorization.k8s.io/system:persistent-volume-provisioner unchanged
	I0916 10:48:06.294941 1401996 command_runner.go:130] > endpoints/k8s.io-minikube-hostpath unchanged
	I0916 10:48:06.294965 1401996 command_runner.go:130] > pod/storage-provisioner configured
	I0916 10:48:06.295030 1401996 command_runner.go:130] > storageclass.storage.k8s.io/standard unchanged
	I0916 10:48:06.295062 1401996 ssh_runner.go:235] Completed: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.31.1/kubectl apply --force -f /etc/kubernetes/addons/storageclass.yaml: (4.752042541s)
	I0916 10:48:06.295182 1401996 round_trippers.go:463] GET https://192.168.49.2:8441/apis/storage.k8s.io/v1/storageclasses
	I0916 10:48:06.295188 1401996 round_trippers.go:469] Request Headers:
	I0916 10:48:06.295197 1401996 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:48:06.295201 1401996 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:48:06.295320 1401996 ssh_runner.go:235] Completed: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.31.1/kubectl apply --force -f /etc/kubernetes/addons/storage-provisioner.yaml: (5.031500772s)
	I0916 10:48:06.302450 1401996 round_trippers.go:574] Response Status: 200 OK in 9 milliseconds
	I0916 10:48:06.302477 1401996 round_trippers.go:577] Response Headers:
	I0916 10:48:06.302486 1401996 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: bc2a80da-1d56-4954-b760-1948662f5b5f
	I0916 10:48:06.302490 1401996 round_trippers.go:580]     Date: Mon, 16 Sep 2024 10:48:06 GMT
	I0916 10:48:06.302493 1401996 round_trippers.go:580]     Audit-Id: a46c5f67-ef7e-4d00-b4b6-bf1b00251284
	I0916 10:48:06.302497 1401996 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 10:48:06.302500 1401996 round_trippers.go:580]     Content-Type: application/json
	I0916 10:48:06.302503 1401996 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: dde2938c-b5ca-4a57-b9bd-15f56dda7856
	I0916 10:48:06.303381 1401996 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"functional-919910","uid":"53a8f356-7cc5-491d-804d-fdafec0ed62c","resourceVersion":"423","creationTimestamp":"2024-09-16T10:46:58Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"functional-919910","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"functional-919910","minikube.k8s.io/primary":"true","minikube.k8s.io/updated_at":"2024_09_16T10_47_02_0700","minikube.k8s.io/version":"v1.34.0","node-role.kubernetes.io/control-plane":"","node.kubernetes.io/exclude-from-external-load-balancers":""},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///var/run/crio/crio.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubelet","operation":"Update","ap
iVersion":"v1","time":"2024-09-16T10:46:58Z","fieldsType":"FieldsV1","f [truncated 6033 chars]
	I0916 10:48:06.306118 1401996 round_trippers.go:574] Response Status: 200 OK in 10 milliseconds
	I0916 10:48:06.306194 1401996 round_trippers.go:577] Response Headers:
	I0916 10:48:06.306216 1401996 round_trippers.go:580]     Audit-Id: efdb759c-528f-47e1-a33c-909f4e747a5b
	I0916 10:48:06.306233 1401996 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 10:48:06.306264 1401996 round_trippers.go:580]     Content-Type: application/json
	I0916 10:48:06.306285 1401996 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: dde2938c-b5ca-4a57-b9bd-15f56dda7856
	I0916 10:48:06.306302 1401996 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: bc2a80da-1d56-4954-b760-1948662f5b5f
	I0916 10:48:06.306320 1401996 round_trippers.go:580]     Content-Length: 1273
	I0916 10:48:06.306349 1401996 round_trippers.go:580]     Date: Mon, 16 Sep 2024 10:48:06 GMT
	I0916 10:48:06.306635 1401996 request.go:1351] Response Body: {"kind":"StorageClassList","apiVersion":"storage.k8s.io/v1","metadata":{"resourceVersion":"472"},"items":[{"metadata":{"name":"standard","uid":"db207f0e-3071-4d4c-96f2-ab3073d7e7e0","resourceVersion":"351","creationTimestamp":"2024-09-16T10:47:07Z","labels":{"addonmanager.kubernetes.io/mode":"EnsureExists"},"annotations":{"kubectl.kubernetes.io/last-applied-configuration":"{\"apiVersion\":\"storage.k8s.io/v1\",\"kind\":\"StorageClass\",\"metadata\":{\"annotations\":{\"storageclass.kubernetes.io/is-default-class\":\"true\"},\"labels\":{\"addonmanager.kubernetes.io/mode\":\"EnsureExists\"},\"name\":\"standard\"},\"provisioner\":\"k8s.io/minikube-hostpath\"}\n","storageclass.kubernetes.io/is-default-class":"true"},"managedFields":[{"manager":"kubectl-client-side-apply","operation":"Update","apiVersion":"storage.k8s.io/v1","time":"2024-09-16T10:47:07Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:annotations":{".":{},"f:kubectl.kuberne
tes.io/last-applied-configuration":{},"f:storageclass.kubernetes.io/is- [truncated 249 chars]
	I0916 10:48:06.307239 1401996 request.go:1351] Request Body: {"kind":"StorageClass","apiVersion":"storage.k8s.io/v1","metadata":{"name":"standard","uid":"db207f0e-3071-4d4c-96f2-ab3073d7e7e0","resourceVersion":"351","creationTimestamp":"2024-09-16T10:47:07Z","labels":{"addonmanager.kubernetes.io/mode":"EnsureExists"},"annotations":{"kubectl.kubernetes.io/last-applied-configuration":"{\"apiVersion\":\"storage.k8s.io/v1\",\"kind\":\"StorageClass\",\"metadata\":{\"annotations\":{\"storageclass.kubernetes.io/is-default-class\":\"true\"},\"labels\":{\"addonmanager.kubernetes.io/mode\":\"EnsureExists\"},\"name\":\"standard\"},\"provisioner\":\"k8s.io/minikube-hostpath\"}\n","storageclass.kubernetes.io/is-default-class":"true"},"managedFields":[{"manager":"kubectl-client-side-apply","operation":"Update","apiVersion":"storage.k8s.io/v1","time":"2024-09-16T10:47:07Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:annotations":{".":{},"f:kubectl.kubernetes.io/last-applied-configuration":{},"f:storageclas
s.kubernetes.io/is-default-class":{}},"f:labels":{".":{},"f:addonmanag [truncated 196 chars]
	I0916 10:48:06.307335 1401996 round_trippers.go:463] PUT https://192.168.49.2:8441/apis/storage.k8s.io/v1/storageclasses/standard
	I0916 10:48:06.307361 1401996 round_trippers.go:469] Request Headers:
	I0916 10:48:06.307396 1401996 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:48:06.307417 1401996 round_trippers.go:473]     Content-Type: application/json
	I0916 10:48:06.307434 1401996 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:48:06.330130 1401996 round_trippers.go:574] Response Status: 200 OK in 22 milliseconds
	I0916 10:48:06.330203 1401996 round_trippers.go:577] Response Headers:
	I0916 10:48:06.330223 1401996 round_trippers.go:580]     Content-Length: 1220
	I0916 10:48:06.330244 1401996 round_trippers.go:580]     Date: Mon, 16 Sep 2024 10:48:06 GMT
	I0916 10:48:06.330275 1401996 round_trippers.go:580]     Audit-Id: c3519c1d-0b15-44be-ba85-74c1a4cd1612
	I0916 10:48:06.330296 1401996 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 10:48:06.330309 1401996 round_trippers.go:580]     Content-Type: application/json
	I0916 10:48:06.330326 1401996 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: dde2938c-b5ca-4a57-b9bd-15f56dda7856
	I0916 10:48:06.330360 1401996 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: bc2a80da-1d56-4954-b760-1948662f5b5f
	I0916 10:48:06.330643 1401996 request.go:1351] Response Body: {"kind":"StorageClass","apiVersion":"storage.k8s.io/v1","metadata":{"name":"standard","uid":"db207f0e-3071-4d4c-96f2-ab3073d7e7e0","resourceVersion":"351","creationTimestamp":"2024-09-16T10:47:07Z","labels":{"addonmanager.kubernetes.io/mode":"EnsureExists"},"annotations":{"kubectl.kubernetes.io/last-applied-configuration":"{\"apiVersion\":\"storage.k8s.io/v1\",\"kind\":\"StorageClass\",\"metadata\":{\"annotations\":{\"storageclass.kubernetes.io/is-default-class\":\"true\"},\"labels\":{\"addonmanager.kubernetes.io/mode\":\"EnsureExists\"},\"name\":\"standard\"},\"provisioner\":\"k8s.io/minikube-hostpath\"}\n","storageclass.kubernetes.io/is-default-class":"true"},"managedFields":[{"manager":"kubectl-client-side-apply","operation":"Update","apiVersion":"storage.k8s.io/v1","time":"2024-09-16T10:47:07Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:annotations":{".":{},"f:kubectl.kubernetes.io/last-applied-configuration":{},"f:storagecla
ss.kubernetes.io/is-default-class":{}},"f:labels":{".":{},"f:addonmanag [truncated 196 chars]
	I0916 10:48:06.335529 1401996 out.go:177] * Enabled addons: storage-provisioner, default-storageclass
	I0916 10:48:06.338267 1401996 addons.go:510] duration metric: took 6.350561855s for enable addons: enabled=[storage-provisioner default-storageclass]
	I0916 10:48:06.655344 1401996 round_trippers.go:463] GET https://192.168.49.2:8441/api/v1/namespaces/kube-system/pods/kube-scheduler-functional-919910
	I0916 10:48:06.655367 1401996 round_trippers.go:469] Request Headers:
	I0916 10:48:06.655377 1401996 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:48:06.655383 1401996 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:48:06.658864 1401996 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:48:06.658890 1401996 round_trippers.go:577] Response Headers:
	I0916 10:48:06.658899 1401996 round_trippers.go:580]     Audit-Id: 27b8243b-941e-466b-899a-27f9168ec15a
	I0916 10:48:06.658903 1401996 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 10:48:06.658908 1401996 round_trippers.go:580]     Content-Type: application/json
	I0916 10:48:06.658911 1401996 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: dde2938c-b5ca-4a57-b9bd-15f56dda7856
	I0916 10:48:06.658914 1401996 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: bc2a80da-1d56-4954-b760-1948662f5b5f
	I0916 10:48:06.658918 1401996 round_trippers.go:580]     Date: Mon, 16 Sep 2024 10:48:06 GMT
	I0916 10:48:06.659036 1401996 request.go:1351] Response Body: {"kind":"Pod","apiVersion":"v1","metadata":{"name":"kube-scheduler-functional-919910","namespace":"kube-system","uid":"80a1c6e8-dcc4-4602-a66a-658796f6ae58","resourceVersion":"460","creationTimestamp":"2024-09-16T10:47:01Z","labels":{"component":"kube-scheduler","tier":"control-plane"},"annotations":{"kubernetes.io/config.hash":"60f2072c6865fb71ef7928175ceb3dad","kubernetes.io/config.mirror":"60f2072c6865fb71ef7928175ceb3dad","kubernetes.io/config.seen":"2024-09-16T10:47:01.310180468Z","kubernetes.io/config.source":"file"},"ownerReferences":[{"apiVersion":"v1","kind":"Node","name":"functional-919910","uid":"53a8f356-7cc5-491d-804d-fdafec0ed62c","controller":true}],"managedFields":[{"manager":"kubelet","operation":"Update","apiVersion":"v1","time":"2024-09-16T10:47:01Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:annotations":{".":{},"f:kubernetes.io/config.hash":{},"f:kubernetes.io/config.mirror":{},"f:kubernetes.io/config.seen":{
},"f:kubernetes.io/config.source":{}},"f:labels":{".":{},"f:component": [truncated 5421 chars]
	I0916 10:48:06.692658 1401996 round_trippers.go:463] GET https://192.168.49.2:8441/api/v1/nodes/functional-919910
	I0916 10:48:06.692701 1401996 round_trippers.go:469] Request Headers:
	I0916 10:48:06.692712 1401996 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:48:06.692717 1401996 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:48:06.695074 1401996 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 10:48:06.695101 1401996 round_trippers.go:577] Response Headers:
	I0916 10:48:06.695110 1401996 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: bc2a80da-1d56-4954-b760-1948662f5b5f
	I0916 10:48:06.695115 1401996 round_trippers.go:580]     Date: Mon, 16 Sep 2024 10:48:06 GMT
	I0916 10:48:06.695118 1401996 round_trippers.go:580]     Audit-Id: de8f589d-df48-428d-a44c-3f5cf6ca5c27
	I0916 10:48:06.695151 1401996 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 10:48:06.695161 1401996 round_trippers.go:580]     Content-Type: application/json
	I0916 10:48:06.695165 1401996 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: dde2938c-b5ca-4a57-b9bd-15f56dda7856
	I0916 10:48:06.695310 1401996 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"functional-919910","uid":"53a8f356-7cc5-491d-804d-fdafec0ed62c","resourceVersion":"423","creationTimestamp":"2024-09-16T10:46:58Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"functional-919910","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"functional-919910","minikube.k8s.io/primary":"true","minikube.k8s.io/updated_at":"2024_09_16T10_47_02_0700","minikube.k8s.io/version":"v1.34.0","node-role.kubernetes.io/control-plane":"","node.kubernetes.io/exclude-from-external-load-balancers":""},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///var/run/crio/crio.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubelet","operation":"Update","ap
iVersion":"v1","time":"2024-09-16T10:46:58Z","fieldsType":"FieldsV1","f [truncated 6033 chars]
	I0916 10:48:07.155348 1401996 round_trippers.go:463] GET https://192.168.49.2:8441/api/v1/namespaces/kube-system/pods/kube-scheduler-functional-919910
	I0916 10:48:07.155373 1401996 round_trippers.go:469] Request Headers:
	I0916 10:48:07.155382 1401996 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:48:07.155386 1401996 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:48:07.157729 1401996 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 10:48:07.157756 1401996 round_trippers.go:577] Response Headers:
	I0916 10:48:07.157764 1401996 round_trippers.go:580]     Audit-Id: e28e28ca-46f8-45a1-9102-49223c47f5ba
	I0916 10:48:07.157770 1401996 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 10:48:07.157773 1401996 round_trippers.go:580]     Content-Type: application/json
	I0916 10:48:07.157776 1401996 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: dde2938c-b5ca-4a57-b9bd-15f56dda7856
	I0916 10:48:07.157782 1401996 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: bc2a80da-1d56-4954-b760-1948662f5b5f
	I0916 10:48:07.157785 1401996 round_trippers.go:580]     Date: Mon, 16 Sep 2024 10:48:07 GMT
	I0916 10:48:07.158536 1401996 request.go:1351] Response Body: {"kind":"Pod","apiVersion":"v1","metadata":{"name":"kube-scheduler-functional-919910","namespace":"kube-system","uid":"80a1c6e8-dcc4-4602-a66a-658796f6ae58","resourceVersion":"460","creationTimestamp":"2024-09-16T10:47:01Z","labels":{"component":"kube-scheduler","tier":"control-plane"},"annotations":{"kubernetes.io/config.hash":"60f2072c6865fb71ef7928175ceb3dad","kubernetes.io/config.mirror":"60f2072c6865fb71ef7928175ceb3dad","kubernetes.io/config.seen":"2024-09-16T10:47:01.310180468Z","kubernetes.io/config.source":"file"},"ownerReferences":[{"apiVersion":"v1","kind":"Node","name":"functional-919910","uid":"53a8f356-7cc5-491d-804d-fdafec0ed62c","controller":true}],"managedFields":[{"manager":"kubelet","operation":"Update","apiVersion":"v1","time":"2024-09-16T10:47:01Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:annotations":{".":{},"f:kubernetes.io/config.hash":{},"f:kubernetes.io/config.mirror":{},"f:kubernetes.io/config.seen":{
},"f:kubernetes.io/config.source":{}},"f:labels":{".":{},"f:component": [truncated 5421 chars]
	I0916 10:48:07.159091 1401996 round_trippers.go:463] GET https://192.168.49.2:8441/api/v1/nodes/functional-919910
	I0916 10:48:07.159146 1401996 round_trippers.go:469] Request Headers:
	I0916 10:48:07.159170 1401996 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:48:07.159190 1401996 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:48:07.161613 1401996 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 10:48:07.161634 1401996 round_trippers.go:577] Response Headers:
	I0916 10:48:07.161641 1401996 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: bc2a80da-1d56-4954-b760-1948662f5b5f
	I0916 10:48:07.161645 1401996 round_trippers.go:580]     Date: Mon, 16 Sep 2024 10:48:07 GMT
	I0916 10:48:07.161648 1401996 round_trippers.go:580]     Audit-Id: 918b0e6f-c4bd-4271-8e3c-5b4ce9d57e17
	I0916 10:48:07.161651 1401996 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 10:48:07.161654 1401996 round_trippers.go:580]     Content-Type: application/json
	I0916 10:48:07.161657 1401996 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: dde2938c-b5ca-4a57-b9bd-15f56dda7856
	I0916 10:48:07.161769 1401996 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"functional-919910","uid":"53a8f356-7cc5-491d-804d-fdafec0ed62c","resourceVersion":"423","creationTimestamp":"2024-09-16T10:46:58Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"functional-919910","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"functional-919910","minikube.k8s.io/primary":"true","minikube.k8s.io/updated_at":"2024_09_16T10_47_02_0700","minikube.k8s.io/version":"v1.34.0","node-role.kubernetes.io/control-plane":"","node.kubernetes.io/exclude-from-external-load-balancers":""},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///var/run/crio/crio.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubelet","operation":"Update","ap
iVersion":"v1","time":"2024-09-16T10:46:58Z","fieldsType":"FieldsV1","f [truncated 6033 chars]
	I0916 10:48:07.655945 1401996 round_trippers.go:463] GET https://192.168.49.2:8441/api/v1/namespaces/kube-system/pods/kube-scheduler-functional-919910
	I0916 10:48:07.655970 1401996 round_trippers.go:469] Request Headers:
	I0916 10:48:07.655978 1401996 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:48:07.655983 1401996 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:48:07.658377 1401996 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 10:48:07.658407 1401996 round_trippers.go:577] Response Headers:
	I0916 10:48:07.658416 1401996 round_trippers.go:580]     Audit-Id: 76d8e195-1414-4d8c-ba2e-8fe0c471961a
	I0916 10:48:07.658430 1401996 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 10:48:07.658434 1401996 round_trippers.go:580]     Content-Type: application/json
	I0916 10:48:07.658437 1401996 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: dde2938c-b5ca-4a57-b9bd-15f56dda7856
	I0916 10:48:07.658457 1401996 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: bc2a80da-1d56-4954-b760-1948662f5b5f
	I0916 10:48:07.658467 1401996 round_trippers.go:580]     Date: Mon, 16 Sep 2024 10:48:07 GMT
	I0916 10:48:07.658601 1401996 request.go:1351] Response Body: {"kind":"Pod","apiVersion":"v1","metadata":{"name":"kube-scheduler-functional-919910","namespace":"kube-system","uid":"80a1c6e8-dcc4-4602-a66a-658796f6ae58","resourceVersion":"460","creationTimestamp":"2024-09-16T10:47:01Z","labels":{"component":"kube-scheduler","tier":"control-plane"},"annotations":{"kubernetes.io/config.hash":"60f2072c6865fb71ef7928175ceb3dad","kubernetes.io/config.mirror":"60f2072c6865fb71ef7928175ceb3dad","kubernetes.io/config.seen":"2024-09-16T10:47:01.310180468Z","kubernetes.io/config.source":"file"},"ownerReferences":[{"apiVersion":"v1","kind":"Node","name":"functional-919910","uid":"53a8f356-7cc5-491d-804d-fdafec0ed62c","controller":true}],"managedFields":[{"manager":"kubelet","operation":"Update","apiVersion":"v1","time":"2024-09-16T10:47:01Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:annotations":{".":{},"f:kubernetes.io/config.hash":{},"f:kubernetes.io/config.mirror":{},"f:kubernetes.io/config.seen":{
},"f:kubernetes.io/config.source":{}},"f:labels":{".":{},"f:component": [truncated 5421 chars]
	I0916 10:48:07.659104 1401996 round_trippers.go:463] GET https://192.168.49.2:8441/api/v1/nodes/functional-919910
	I0916 10:48:07.659122 1401996 round_trippers.go:469] Request Headers:
	I0916 10:48:07.659130 1401996 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:48:07.659135 1401996 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:48:07.661280 1401996 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 10:48:07.661303 1401996 round_trippers.go:577] Response Headers:
	I0916 10:48:07.661313 1401996 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: dde2938c-b5ca-4a57-b9bd-15f56dda7856
	I0916 10:48:07.661318 1401996 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: bc2a80da-1d56-4954-b760-1948662f5b5f
	I0916 10:48:07.661323 1401996 round_trippers.go:580]     Date: Mon, 16 Sep 2024 10:48:07 GMT
	I0916 10:48:07.661326 1401996 round_trippers.go:580]     Audit-Id: 041d95a8-0989-4252-8fc5-0e02d4e5b989
	I0916 10:48:07.661329 1401996 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 10:48:07.661332 1401996 round_trippers.go:580]     Content-Type: application/json
	I0916 10:48:07.661648 1401996 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"functional-919910","uid":"53a8f356-7cc5-491d-804d-fdafec0ed62c","resourceVersion":"423","creationTimestamp":"2024-09-16T10:46:58Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"functional-919910","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"functional-919910","minikube.k8s.io/primary":"true","minikube.k8s.io/updated_at":"2024_09_16T10_47_02_0700","minikube.k8s.io/version":"v1.34.0","node-role.kubernetes.io/control-plane":"","node.kubernetes.io/exclude-from-external-load-balancers":""},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///var/run/crio/crio.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubelet","operation":"Update","ap
iVersion":"v1","time":"2024-09-16T10:46:58Z","fieldsType":"FieldsV1","f [truncated 6033 chars]
	I0916 10:48:07.662055 1401996 pod_ready.go:103] pod "kube-scheduler-functional-919910" in "kube-system" namespace has status "Ready":"False"
	I0916 10:48:08.155546 1401996 round_trippers.go:463] GET https://192.168.49.2:8441/api/v1/namespaces/kube-system/pods/kube-scheduler-functional-919910
	I0916 10:48:08.155619 1401996 round_trippers.go:469] Request Headers:
	I0916 10:48:08.155643 1401996 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:48:08.155665 1401996 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:48:08.158082 1401996 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 10:48:08.158153 1401996 round_trippers.go:577] Response Headers:
	I0916 10:48:08.158175 1401996 round_trippers.go:580]     Audit-Id: 6edd8771-2339-4400-9108-b26a1ef4d0bc
	I0916 10:48:08.158193 1401996 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 10:48:08.158223 1401996 round_trippers.go:580]     Content-Type: application/json
	I0916 10:48:08.158245 1401996 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: dde2938c-b5ca-4a57-b9bd-15f56dda7856
	I0916 10:48:08.158263 1401996 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: bc2a80da-1d56-4954-b760-1948662f5b5f
	I0916 10:48:08.158280 1401996 round_trippers.go:580]     Date: Mon, 16 Sep 2024 10:48:08 GMT
	I0916 10:48:08.158439 1401996 request.go:1351] Response Body: {"kind":"Pod","apiVersion":"v1","metadata":{"name":"kube-scheduler-functional-919910","namespace":"kube-system","uid":"80a1c6e8-dcc4-4602-a66a-658796f6ae58","resourceVersion":"460","creationTimestamp":"2024-09-16T10:47:01Z","labels":{"component":"kube-scheduler","tier":"control-plane"},"annotations":{"kubernetes.io/config.hash":"60f2072c6865fb71ef7928175ceb3dad","kubernetes.io/config.mirror":"60f2072c6865fb71ef7928175ceb3dad","kubernetes.io/config.seen":"2024-09-16T10:47:01.310180468Z","kubernetes.io/config.source":"file"},"ownerReferences":[{"apiVersion":"v1","kind":"Node","name":"functional-919910","uid":"53a8f356-7cc5-491d-804d-fdafec0ed62c","controller":true}],"managedFields":[{"manager":"kubelet","operation":"Update","apiVersion":"v1","time":"2024-09-16T10:47:01Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:annotations":{".":{},"f:kubernetes.io/config.hash":{},"f:kubernetes.io/config.mirror":{},"f:kubernetes.io/config.seen":{
},"f:kubernetes.io/config.source":{}},"f:labels":{".":{},"f:component": [truncated 5421 chars]
	I0916 10:48:08.158935 1401996 round_trippers.go:463] GET https://192.168.49.2:8441/api/v1/nodes/functional-919910
	I0916 10:48:08.158954 1401996 round_trippers.go:469] Request Headers:
	I0916 10:48:08.158963 1401996 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:48:08.158968 1401996 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:48:08.161016 1401996 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 10:48:08.161039 1401996 round_trippers.go:577] Response Headers:
	I0916 10:48:08.161048 1401996 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: dde2938c-b5ca-4a57-b9bd-15f56dda7856
	I0916 10:48:08.161051 1401996 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: bc2a80da-1d56-4954-b760-1948662f5b5f
	I0916 10:48:08.161054 1401996 round_trippers.go:580]     Date: Mon, 16 Sep 2024 10:48:08 GMT
	I0916 10:48:08.161057 1401996 round_trippers.go:580]     Audit-Id: a126ddfb-3d41-4bb4-8107-d94f2aae2022
	I0916 10:48:08.161060 1401996 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 10:48:08.161063 1401996 round_trippers.go:580]     Content-Type: application/json
	I0916 10:48:08.161463 1401996 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"functional-919910","uid":"53a8f356-7cc5-491d-804d-fdafec0ed62c","resourceVersion":"423","creationTimestamp":"2024-09-16T10:46:58Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"functional-919910","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"functional-919910","minikube.k8s.io/primary":"true","minikube.k8s.io/updated_at":"2024_09_16T10_47_02_0700","minikube.k8s.io/version":"v1.34.0","node-role.kubernetes.io/control-plane":"","node.kubernetes.io/exclude-from-external-load-balancers":""},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///var/run/crio/crio.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubelet","operation":"Update","ap
iVersion":"v1","time":"2024-09-16T10:46:58Z","fieldsType":"FieldsV1","f [truncated 6033 chars]
	I0916 10:48:08.656176 1401996 round_trippers.go:463] GET https://192.168.49.2:8441/api/v1/namespaces/kube-system/pods/kube-scheduler-functional-919910
	I0916 10:48:08.656201 1401996 round_trippers.go:469] Request Headers:
	I0916 10:48:08.656217 1401996 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:48:08.656223 1401996 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:48:08.658586 1401996 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 10:48:08.658651 1401996 round_trippers.go:577] Response Headers:
	I0916 10:48:08.658684 1401996 round_trippers.go:580]     Audit-Id: e5465de8-0fe1-4264-aa6e-f64ee2497e82
	I0916 10:48:08.658703 1401996 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 10:48:08.658740 1401996 round_trippers.go:580]     Content-Type: application/json
	I0916 10:48:08.658751 1401996 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: dde2938c-b5ca-4a57-b9bd-15f56dda7856
	I0916 10:48:08.658755 1401996 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: bc2a80da-1d56-4954-b760-1948662f5b5f
	I0916 10:48:08.658759 1401996 round_trippers.go:580]     Date: Mon, 16 Sep 2024 10:48:08 GMT
	I0916 10:48:08.658892 1401996 request.go:1351] Response Body: {"kind":"Pod","apiVersion":"v1","metadata":{"name":"kube-scheduler-functional-919910","namespace":"kube-system","uid":"80a1c6e8-dcc4-4602-a66a-658796f6ae58","resourceVersion":"460","creationTimestamp":"2024-09-16T10:47:01Z","labels":{"component":"kube-scheduler","tier":"control-plane"},"annotations":{"kubernetes.io/config.hash":"60f2072c6865fb71ef7928175ceb3dad","kubernetes.io/config.mirror":"60f2072c6865fb71ef7928175ceb3dad","kubernetes.io/config.seen":"2024-09-16T10:47:01.310180468Z","kubernetes.io/config.source":"file"},"ownerReferences":[{"apiVersion":"v1","kind":"Node","name":"functional-919910","uid":"53a8f356-7cc5-491d-804d-fdafec0ed62c","controller":true}],"managedFields":[{"manager":"kubelet","operation":"Update","apiVersion":"v1","time":"2024-09-16T10:47:01Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:annotations":{".":{},"f:kubernetes.io/config.hash":{},"f:kubernetes.io/config.mirror":{},"f:kubernetes.io/config.seen":{
},"f:kubernetes.io/config.source":{}},"f:labels":{".":{},"f:component": [truncated 5421 chars]
	I0916 10:48:08.659399 1401996 round_trippers.go:463] GET https://192.168.49.2:8441/api/v1/nodes/functional-919910
	I0916 10:48:08.659422 1401996 round_trippers.go:469] Request Headers:
	I0916 10:48:08.659431 1401996 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:48:08.659452 1401996 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:48:08.661658 1401996 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 10:48:08.661694 1401996 round_trippers.go:577] Response Headers:
	I0916 10:48:08.661702 1401996 round_trippers.go:580]     Audit-Id: e04c4294-98da-4d6d-9ca5-5da906e4a84a
	I0916 10:48:08.661706 1401996 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 10:48:08.661709 1401996 round_trippers.go:580]     Content-Type: application/json
	I0916 10:48:08.661712 1401996 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: dde2938c-b5ca-4a57-b9bd-15f56dda7856
	I0916 10:48:08.661714 1401996 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: bc2a80da-1d56-4954-b760-1948662f5b5f
	I0916 10:48:08.661717 1401996 round_trippers.go:580]     Date: Mon, 16 Sep 2024 10:48:08 GMT
	I0916 10:48:08.661890 1401996 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"functional-919910","uid":"53a8f356-7cc5-491d-804d-fdafec0ed62c","resourceVersion":"423","creationTimestamp":"2024-09-16T10:46:58Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"functional-919910","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"functional-919910","minikube.k8s.io/primary":"true","minikube.k8s.io/updated_at":"2024_09_16T10_47_02_0700","minikube.k8s.io/version":"v1.34.0","node-role.kubernetes.io/control-plane":"","node.kubernetes.io/exclude-from-external-load-balancers":""},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///var/run/crio/crio.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubelet","operation":"Update","ap
iVersion":"v1","time":"2024-09-16T10:46:58Z","fieldsType":"FieldsV1","f [truncated 6033 chars]
	I0916 10:48:09.156075 1401996 round_trippers.go:463] GET https://192.168.49.2:8441/api/v1/namespaces/kube-system/pods/kube-scheduler-functional-919910
	I0916 10:48:09.156102 1401996 round_trippers.go:469] Request Headers:
	I0916 10:48:09.156112 1401996 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:48:09.156117 1401996 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:48:09.158504 1401996 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 10:48:09.158531 1401996 round_trippers.go:577] Response Headers:
	I0916 10:48:09.158539 1401996 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: bc2a80da-1d56-4954-b760-1948662f5b5f
	I0916 10:48:09.158545 1401996 round_trippers.go:580]     Date: Mon, 16 Sep 2024 10:48:09 GMT
	I0916 10:48:09.158548 1401996 round_trippers.go:580]     Audit-Id: c771052a-b618-42a7-bfe5-d3cd94727405
	I0916 10:48:09.158551 1401996 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 10:48:09.158561 1401996 round_trippers.go:580]     Content-Type: application/json
	I0916 10:48:09.158564 1401996 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: dde2938c-b5ca-4a57-b9bd-15f56dda7856
	I0916 10:48:09.158817 1401996 request.go:1351] Response Body: {"kind":"Pod","apiVersion":"v1","metadata":{"name":"kube-scheduler-functional-919910","namespace":"kube-system","uid":"80a1c6e8-dcc4-4602-a66a-658796f6ae58","resourceVersion":"460","creationTimestamp":"2024-09-16T10:47:01Z","labels":{"component":"kube-scheduler","tier":"control-plane"},"annotations":{"kubernetes.io/config.hash":"60f2072c6865fb71ef7928175ceb3dad","kubernetes.io/config.mirror":"60f2072c6865fb71ef7928175ceb3dad","kubernetes.io/config.seen":"2024-09-16T10:47:01.310180468Z","kubernetes.io/config.source":"file"},"ownerReferences":[{"apiVersion":"v1","kind":"Node","name":"functional-919910","uid":"53a8f356-7cc5-491d-804d-fdafec0ed62c","controller":true}],"managedFields":[{"manager":"kubelet","operation":"Update","apiVersion":"v1","time":"2024-09-16T10:47:01Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:annotations":{".":{},"f:kubernetes.io/config.hash":{},"f:kubernetes.io/config.mirror":{},"f:kubernetes.io/config.seen":{
},"f:kubernetes.io/config.source":{}},"f:labels":{".":{},"f:component": [truncated 5421 chars]
	I0916 10:48:09.159316 1401996 round_trippers.go:463] GET https://192.168.49.2:8441/api/v1/nodes/functional-919910
	I0916 10:48:09.159333 1401996 round_trippers.go:469] Request Headers:
	I0916 10:48:09.159351 1401996 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:48:09.159360 1401996 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:48:09.161355 1401996 round_trippers.go:574] Response Status: 200 OK in 1 milliseconds
	I0916 10:48:09.161378 1401996 round_trippers.go:577] Response Headers:
	I0916 10:48:09.161387 1401996 round_trippers.go:580]     Audit-Id: 15e3a6f9-f715-4d69-b1cf-94a6b5aeb0cd
	I0916 10:48:09.161392 1401996 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 10:48:09.161396 1401996 round_trippers.go:580]     Content-Type: application/json
	I0916 10:48:09.161399 1401996 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: dde2938c-b5ca-4a57-b9bd-15f56dda7856
	I0916 10:48:09.161402 1401996 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: bc2a80da-1d56-4954-b760-1948662f5b5f
	I0916 10:48:09.161405 1401996 round_trippers.go:580]     Date: Mon, 16 Sep 2024 10:48:09 GMT
	I0916 10:48:09.161846 1401996 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"functional-919910","uid":"53a8f356-7cc5-491d-804d-fdafec0ed62c","resourceVersion":"423","creationTimestamp":"2024-09-16T10:46:58Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"functional-919910","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"functional-919910","minikube.k8s.io/primary":"true","minikube.k8s.io/updated_at":"2024_09_16T10_47_02_0700","minikube.k8s.io/version":"v1.34.0","node-role.kubernetes.io/control-plane":"","node.kubernetes.io/exclude-from-external-load-balancers":""},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///var/run/crio/crio.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubelet","operation":"Update","ap
iVersion":"v1","time":"2024-09-16T10:46:58Z","fieldsType":"FieldsV1","f [truncated 6033 chars]
	I0916 10:48:09.656058 1401996 round_trippers.go:463] GET https://192.168.49.2:8441/api/v1/namespaces/kube-system/pods/kube-scheduler-functional-919910
	I0916 10:48:09.656083 1401996 round_trippers.go:469] Request Headers:
	I0916 10:48:09.656093 1401996 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:48:09.656097 1401996 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:48:09.658445 1401996 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 10:48:09.658510 1401996 round_trippers.go:577] Response Headers:
	I0916 10:48:09.658533 1401996 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 10:48:09.658552 1401996 round_trippers.go:580]     Content-Type: application/json
	I0916 10:48:09.658583 1401996 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: dde2938c-b5ca-4a57-b9bd-15f56dda7856
	I0916 10:48:09.658605 1401996 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: bc2a80da-1d56-4954-b760-1948662f5b5f
	I0916 10:48:09.658623 1401996 round_trippers.go:580]     Date: Mon, 16 Sep 2024 10:48:09 GMT
	I0916 10:48:09.658641 1401996 round_trippers.go:580]     Audit-Id: 0e8ff4b5-169e-445c-adca-58d4f6417f04
	I0916 10:48:09.658850 1401996 request.go:1351] Response Body: {"kind":"Pod","apiVersion":"v1","metadata":{"name":"kube-scheduler-functional-919910","namespace":"kube-system","uid":"80a1c6e8-dcc4-4602-a66a-658796f6ae58","resourceVersion":"460","creationTimestamp":"2024-09-16T10:47:01Z","labels":{"component":"kube-scheduler","tier":"control-plane"},"annotations":{"kubernetes.io/config.hash":"60f2072c6865fb71ef7928175ceb3dad","kubernetes.io/config.mirror":"60f2072c6865fb71ef7928175ceb3dad","kubernetes.io/config.seen":"2024-09-16T10:47:01.310180468Z","kubernetes.io/config.source":"file"},"ownerReferences":[{"apiVersion":"v1","kind":"Node","name":"functional-919910","uid":"53a8f356-7cc5-491d-804d-fdafec0ed62c","controller":true}],"managedFields":[{"manager":"kubelet","operation":"Update","apiVersion":"v1","time":"2024-09-16T10:47:01Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:annotations":{".":{},"f:kubernetes.io/config.hash":{},"f:kubernetes.io/config.mirror":{},"f:kubernetes.io/config.seen":{
},"f:kubernetes.io/config.source":{}},"f:labels":{".":{},"f:component": [truncated 5421 chars]
	I0916 10:48:09.659349 1401996 round_trippers.go:463] GET https://192.168.49.2:8441/api/v1/nodes/functional-919910
	I0916 10:48:09.659366 1401996 round_trippers.go:469] Request Headers:
	I0916 10:48:09.659375 1401996 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:48:09.659379 1401996 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:48:09.661463 1401996 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 10:48:09.661484 1401996 round_trippers.go:577] Response Headers:
	I0916 10:48:09.661491 1401996 round_trippers.go:580]     Audit-Id: 7aabfc6e-075e-41c4-9548-b7c332b6ce4b
	I0916 10:48:09.661495 1401996 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 10:48:09.661522 1401996 round_trippers.go:580]     Content-Type: application/json
	I0916 10:48:09.661541 1401996 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: dde2938c-b5ca-4a57-b9bd-15f56dda7856
	I0916 10:48:09.661544 1401996 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: bc2a80da-1d56-4954-b760-1948662f5b5f
	I0916 10:48:09.661547 1401996 round_trippers.go:580]     Date: Mon, 16 Sep 2024 10:48:09 GMT
	I0916 10:48:09.662008 1401996 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"functional-919910","uid":"53a8f356-7cc5-491d-804d-fdafec0ed62c","resourceVersion":"423","creationTimestamp":"2024-09-16T10:46:58Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"functional-919910","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"functional-919910","minikube.k8s.io/primary":"true","minikube.k8s.io/updated_at":"2024_09_16T10_47_02_0700","minikube.k8s.io/version":"v1.34.0","node-role.kubernetes.io/control-plane":"","node.kubernetes.io/exclude-from-external-load-balancers":""},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///var/run/crio/crio.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubelet","operation":"Update","ap
iVersion":"v1","time":"2024-09-16T10:46:58Z","fieldsType":"FieldsV1","f [truncated 6033 chars]
	I0916 10:48:09.662441 1401996 pod_ready.go:103] pod "kube-scheduler-functional-919910" in "kube-system" namespace has status "Ready":"False"
	I0916 10:48:10.155850 1401996 round_trippers.go:463] GET https://192.168.49.2:8441/api/v1/namespaces/kube-system/pods/kube-scheduler-functional-919910
	I0916 10:48:10.155879 1401996 round_trippers.go:469] Request Headers:
	I0916 10:48:10.155889 1401996 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:48:10.155894 1401996 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:48:10.158351 1401996 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 10:48:10.158379 1401996 round_trippers.go:577] Response Headers:
	I0916 10:48:10.158388 1401996 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: dde2938c-b5ca-4a57-b9bd-15f56dda7856
	I0916 10:48:10.158393 1401996 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: bc2a80da-1d56-4954-b760-1948662f5b5f
	I0916 10:48:10.158397 1401996 round_trippers.go:580]     Date: Mon, 16 Sep 2024 10:48:10 GMT
	I0916 10:48:10.158401 1401996 round_trippers.go:580]     Audit-Id: a2849d4a-9951-48c6-9e96-b95eda339f33
	I0916 10:48:10.158403 1401996 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 10:48:10.158446 1401996 round_trippers.go:580]     Content-Type: application/json
	I0916 10:48:10.158702 1401996 request.go:1351] Response Body: {"kind":"Pod","apiVersion":"v1","metadata":{"name":"kube-scheduler-functional-919910","namespace":"kube-system","uid":"80a1c6e8-dcc4-4602-a66a-658796f6ae58","resourceVersion":"460","creationTimestamp":"2024-09-16T10:47:01Z","labels":{"component":"kube-scheduler","tier":"control-plane"},"annotations":{"kubernetes.io/config.hash":"60f2072c6865fb71ef7928175ceb3dad","kubernetes.io/config.mirror":"60f2072c6865fb71ef7928175ceb3dad","kubernetes.io/config.seen":"2024-09-16T10:47:01.310180468Z","kubernetes.io/config.source":"file"},"ownerReferences":[{"apiVersion":"v1","kind":"Node","name":"functional-919910","uid":"53a8f356-7cc5-491d-804d-fdafec0ed62c","controller":true}],"managedFields":[{"manager":"kubelet","operation":"Update","apiVersion":"v1","time":"2024-09-16T10:47:01Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:annotations":{".":{},"f:kubernetes.io/config.hash":{},"f:kubernetes.io/config.mirror":{},"f:kubernetes.io/config.seen":{
},"f:kubernetes.io/config.source":{}},"f:labels":{".":{},"f:component": [truncated 5421 chars]
	I0916 10:48:10.159227 1401996 round_trippers.go:463] GET https://192.168.49.2:8441/api/v1/nodes/functional-919910
	I0916 10:48:10.159256 1401996 round_trippers.go:469] Request Headers:
	I0916 10:48:10.159266 1401996 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:48:10.159270 1401996 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:48:10.161598 1401996 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 10:48:10.161623 1401996 round_trippers.go:577] Response Headers:
	I0916 10:48:10.161631 1401996 round_trippers.go:580]     Audit-Id: e958b3e9-cc1b-4750-90b1-360b921760fc
	I0916 10:48:10.161636 1401996 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 10:48:10.161640 1401996 round_trippers.go:580]     Content-Type: application/json
	I0916 10:48:10.161644 1401996 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: dde2938c-b5ca-4a57-b9bd-15f56dda7856
	I0916 10:48:10.161647 1401996 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: bc2a80da-1d56-4954-b760-1948662f5b5f
	I0916 10:48:10.161650 1401996 round_trippers.go:580]     Date: Mon, 16 Sep 2024 10:48:10 GMT
	I0916 10:48:10.162063 1401996 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"functional-919910","uid":"53a8f356-7cc5-491d-804d-fdafec0ed62c","resourceVersion":"423","creationTimestamp":"2024-09-16T10:46:58Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"functional-919910","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"functional-919910","minikube.k8s.io/primary":"true","minikube.k8s.io/updated_at":"2024_09_16T10_47_02_0700","minikube.k8s.io/version":"v1.34.0","node-role.kubernetes.io/control-plane":"","node.kubernetes.io/exclude-from-external-load-balancers":""},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///var/run/crio/crio.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubelet","operation":"Update","ap
iVersion":"v1","time":"2024-09-16T10:46:58Z","fieldsType":"FieldsV1","f [truncated 6033 chars]
	I0916 10:48:10.656157 1401996 round_trippers.go:463] GET https://192.168.49.2:8441/api/v1/namespaces/kube-system/pods/kube-scheduler-functional-919910
	I0916 10:48:10.656181 1401996 round_trippers.go:469] Request Headers:
	I0916 10:48:10.656191 1401996 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:48:10.656196 1401996 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:48:10.658610 1401996 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 10:48:10.658635 1401996 round_trippers.go:577] Response Headers:
	I0916 10:48:10.658642 1401996 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: bc2a80da-1d56-4954-b760-1948662f5b5f
	I0916 10:48:10.658647 1401996 round_trippers.go:580]     Date: Mon, 16 Sep 2024 10:48:10 GMT
	I0916 10:48:10.658650 1401996 round_trippers.go:580]     Audit-Id: abc213c3-888d-4372-ab96-c2b656dbe199
	I0916 10:48:10.658653 1401996 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 10:48:10.658656 1401996 round_trippers.go:580]     Content-Type: application/json
	I0916 10:48:10.658664 1401996 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: dde2938c-b5ca-4a57-b9bd-15f56dda7856
	I0916 10:48:10.658943 1401996 request.go:1351] Response Body: {"kind":"Pod","apiVersion":"v1","metadata":{"name":"kube-scheduler-functional-919910","namespace":"kube-system","uid":"80a1c6e8-dcc4-4602-a66a-658796f6ae58","resourceVersion":"460","creationTimestamp":"2024-09-16T10:47:01Z","labels":{"component":"kube-scheduler","tier":"control-plane"},"annotations":{"kubernetes.io/config.hash":"60f2072c6865fb71ef7928175ceb3dad","kubernetes.io/config.mirror":"60f2072c6865fb71ef7928175ceb3dad","kubernetes.io/config.seen":"2024-09-16T10:47:01.310180468Z","kubernetes.io/config.source":"file"},"ownerReferences":[{"apiVersion":"v1","kind":"Node","name":"functional-919910","uid":"53a8f356-7cc5-491d-804d-fdafec0ed62c","controller":true}],"managedFields":[{"manager":"kubelet","operation":"Update","apiVersion":"v1","time":"2024-09-16T10:47:01Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:annotations":{".":{},"f:kubernetes.io/config.hash":{},"f:kubernetes.io/config.mirror":{},"f:kubernetes.io/config.seen":{
},"f:kubernetes.io/config.source":{}},"f:labels":{".":{},"f:component": [truncated 5421 chars]
	I0916 10:48:10.659442 1401996 round_trippers.go:463] GET https://192.168.49.2:8441/api/v1/nodes/functional-919910
	I0916 10:48:10.659460 1401996 round_trippers.go:469] Request Headers:
	I0916 10:48:10.659469 1401996 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:48:10.659474 1401996 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:48:10.661539 1401996 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 10:48:10.661563 1401996 round_trippers.go:577] Response Headers:
	I0916 10:48:10.661572 1401996 round_trippers.go:580]     Content-Type: application/json
	I0916 10:48:10.661577 1401996 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: dde2938c-b5ca-4a57-b9bd-15f56dda7856
	I0916 10:48:10.661581 1401996 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: bc2a80da-1d56-4954-b760-1948662f5b5f
	I0916 10:48:10.661585 1401996 round_trippers.go:580]     Date: Mon, 16 Sep 2024 10:48:10 GMT
	I0916 10:48:10.661589 1401996 round_trippers.go:580]     Audit-Id: d8aea646-e77c-4c0a-ae7d-567910c0c574
	I0916 10:48:10.661592 1401996 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 10:48:10.661726 1401996 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"functional-919910","uid":"53a8f356-7cc5-491d-804d-fdafec0ed62c","resourceVersion":"423","creationTimestamp":"2024-09-16T10:46:58Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"functional-919910","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"functional-919910","minikube.k8s.io/primary":"true","minikube.k8s.io/updated_at":"2024_09_16T10_47_02_0700","minikube.k8s.io/version":"v1.34.0","node-role.kubernetes.io/control-plane":"","node.kubernetes.io/exclude-from-external-load-balancers":""},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///var/run/crio/crio.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubelet","operation":"Update","ap
iVersion":"v1","time":"2024-09-16T10:46:58Z","fieldsType":"FieldsV1","f [truncated 6033 chars]
	I0916 10:48:11.155937 1401996 round_trippers.go:463] GET https://192.168.49.2:8441/api/v1/namespaces/kube-system/pods/kube-scheduler-functional-919910
	I0916 10:48:11.155963 1401996 round_trippers.go:469] Request Headers:
	I0916 10:48:11.155973 1401996 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:48:11.155976 1401996 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:48:11.158585 1401996 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 10:48:11.158614 1401996 round_trippers.go:577] Response Headers:
	I0916 10:48:11.158624 1401996 round_trippers.go:580]     Audit-Id: 4a531874-fb4e-42d1-90e9-20c9231f6650
	I0916 10:48:11.158628 1401996 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 10:48:11.158632 1401996 round_trippers.go:580]     Content-Type: application/json
	I0916 10:48:11.158647 1401996 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: dde2938c-b5ca-4a57-b9bd-15f56dda7856
	I0916 10:48:11.158659 1401996 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: bc2a80da-1d56-4954-b760-1948662f5b5f
	I0916 10:48:11.158663 1401996 round_trippers.go:580]     Date: Mon, 16 Sep 2024 10:48:11 GMT
	I0916 10:48:11.159187 1401996 request.go:1351] Response Body: {"kind":"Pod","apiVersion":"v1","metadata":{"name":"kube-scheduler-functional-919910","namespace":"kube-system","uid":"80a1c6e8-dcc4-4602-a66a-658796f6ae58","resourceVersion":"460","creationTimestamp":"2024-09-16T10:47:01Z","labels":{"component":"kube-scheduler","tier":"control-plane"},"annotations":{"kubernetes.io/config.hash":"60f2072c6865fb71ef7928175ceb3dad","kubernetes.io/config.mirror":"60f2072c6865fb71ef7928175ceb3dad","kubernetes.io/config.seen":"2024-09-16T10:47:01.310180468Z","kubernetes.io/config.source":"file"},"ownerReferences":[{"apiVersion":"v1","kind":"Node","name":"functional-919910","uid":"53a8f356-7cc5-491d-804d-fdafec0ed62c","controller":true}],"managedFields":[{"manager":"kubelet","operation":"Update","apiVersion":"v1","time":"2024-09-16T10:47:01Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:annotations":{".":{},"f:kubernetes.io/config.hash":{},"f:kubernetes.io/config.mirror":{},"f:kubernetes.io/config.seen":{
},"f:kubernetes.io/config.source":{}},"f:labels":{".":{},"f:component": [truncated 5421 chars]
	I0916 10:48:11.159752 1401996 round_trippers.go:463] GET https://192.168.49.2:8441/api/v1/nodes/functional-919910
	I0916 10:48:11.159763 1401996 round_trippers.go:469] Request Headers:
	I0916 10:48:11.159772 1401996 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:48:11.159776 1401996 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:48:11.162269 1401996 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 10:48:11.162298 1401996 round_trippers.go:577] Response Headers:
	I0916 10:48:11.162307 1401996 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: bc2a80da-1d56-4954-b760-1948662f5b5f
	I0916 10:48:11.162314 1401996 round_trippers.go:580]     Date: Mon, 16 Sep 2024 10:48:11 GMT
	I0916 10:48:11.162317 1401996 round_trippers.go:580]     Audit-Id: 78c813d3-a798-456c-b8e9-696c0b388b89
	I0916 10:48:11.162320 1401996 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 10:48:11.162323 1401996 round_trippers.go:580]     Content-Type: application/json
	I0916 10:48:11.162326 1401996 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: dde2938c-b5ca-4a57-b9bd-15f56dda7856
	I0916 10:48:11.162769 1401996 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"functional-919910","uid":"53a8f356-7cc5-491d-804d-fdafec0ed62c","resourceVersion":"423","creationTimestamp":"2024-09-16T10:46:58Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"functional-919910","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"functional-919910","minikube.k8s.io/primary":"true","minikube.k8s.io/updated_at":"2024_09_16T10_47_02_0700","minikube.k8s.io/version":"v1.34.0","node-role.kubernetes.io/control-plane":"","node.kubernetes.io/exclude-from-external-load-balancers":""},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///var/run/crio/crio.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubelet","operation":"Update","ap
iVersion":"v1","time":"2024-09-16T10:46:58Z","fieldsType":"FieldsV1","f [truncated 6033 chars]
	I0916 10:48:11.655388 1401996 round_trippers.go:463] GET https://192.168.49.2:8441/api/v1/namespaces/kube-system/pods/kube-scheduler-functional-919910
	I0916 10:48:11.655416 1401996 round_trippers.go:469] Request Headers:
	I0916 10:48:11.655425 1401996 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:48:11.655430 1401996 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:48:11.657936 1401996 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 10:48:11.657962 1401996 round_trippers.go:577] Response Headers:
	I0916 10:48:11.657971 1401996 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: dde2938c-b5ca-4a57-b9bd-15f56dda7856
	I0916 10:48:11.657975 1401996 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: bc2a80da-1d56-4954-b760-1948662f5b5f
	I0916 10:48:11.657979 1401996 round_trippers.go:580]     Date: Mon, 16 Sep 2024 10:48:11 GMT
	I0916 10:48:11.657982 1401996 round_trippers.go:580]     Audit-Id: d3c4da8c-ad9d-4d03-bd59-3bbb70c528ad
	I0916 10:48:11.657985 1401996 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 10:48:11.657987 1401996 round_trippers.go:580]     Content-Type: application/json
	I0916 10:48:11.658364 1401996 request.go:1351] Response Body: {"kind":"Pod","apiVersion":"v1","metadata":{"name":"kube-scheduler-functional-919910","namespace":"kube-system","uid":"80a1c6e8-dcc4-4602-a66a-658796f6ae58","resourceVersion":"460","creationTimestamp":"2024-09-16T10:47:01Z","labels":{"component":"kube-scheduler","tier":"control-plane"},"annotations":{"kubernetes.io/config.hash":"60f2072c6865fb71ef7928175ceb3dad","kubernetes.io/config.mirror":"60f2072c6865fb71ef7928175ceb3dad","kubernetes.io/config.seen":"2024-09-16T10:47:01.310180468Z","kubernetes.io/config.source":"file"},"ownerReferences":[{"apiVersion":"v1","kind":"Node","name":"functional-919910","uid":"53a8f356-7cc5-491d-804d-fdafec0ed62c","controller":true}],"managedFields":[{"manager":"kubelet","operation":"Update","apiVersion":"v1","time":"2024-09-16T10:47:01Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:annotations":{".":{},"f:kubernetes.io/config.hash":{},"f:kubernetes.io/config.mirror":{},"f:kubernetes.io/config.seen":{
},"f:kubernetes.io/config.source":{}},"f:labels":{".":{},"f:component": [truncated 5421 chars]
	I0916 10:48:11.658832 1401996 round_trippers.go:463] GET https://192.168.49.2:8441/api/v1/nodes/functional-919910
	I0916 10:48:11.658848 1401996 round_trippers.go:469] Request Headers:
	I0916 10:48:11.658857 1401996 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:48:11.658862 1401996 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:48:11.660899 1401996 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 10:48:11.660923 1401996 round_trippers.go:577] Response Headers:
	I0916 10:48:11.660931 1401996 round_trippers.go:580]     Audit-Id: bb877529-f789-4b09-871c-1f09b015efed
	I0916 10:48:11.660935 1401996 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 10:48:11.660939 1401996 round_trippers.go:580]     Content-Type: application/json
	I0916 10:48:11.660985 1401996 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: dde2938c-b5ca-4a57-b9bd-15f56dda7856
	I0916 10:48:11.660994 1401996 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: bc2a80da-1d56-4954-b760-1948662f5b5f
	I0916 10:48:11.660999 1401996 round_trippers.go:580]     Date: Mon, 16 Sep 2024 10:48:11 GMT
	I0916 10:48:11.661117 1401996 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"functional-919910","uid":"53a8f356-7cc5-491d-804d-fdafec0ed62c","resourceVersion":"423","creationTimestamp":"2024-09-16T10:46:58Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"functional-919910","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"functional-919910","minikube.k8s.io/primary":"true","minikube.k8s.io/updated_at":"2024_09_16T10_47_02_0700","minikube.k8s.io/version":"v1.34.0","node-role.kubernetes.io/control-plane":"","node.kubernetes.io/exclude-from-external-load-balancers":""},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///var/run/crio/crio.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubelet","operation":"Update","ap
iVersion":"v1","time":"2024-09-16T10:46:58Z","fieldsType":"FieldsV1","f [truncated 6033 chars]
	I0916 10:48:12.156271 1401996 round_trippers.go:463] GET https://192.168.49.2:8441/api/v1/namespaces/kube-system/pods/kube-scheduler-functional-919910
	I0916 10:48:12.156297 1401996 round_trippers.go:469] Request Headers:
	I0916 10:48:12.156308 1401996 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:48:12.156313 1401996 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:48:12.158870 1401996 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 10:48:12.158894 1401996 round_trippers.go:577] Response Headers:
	I0916 10:48:12.158902 1401996 round_trippers.go:580]     Audit-Id: 0c6eb45b-962b-4873-8662-1f1f93414add
	I0916 10:48:12.158909 1401996 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 10:48:12.158912 1401996 round_trippers.go:580]     Content-Type: application/json
	I0916 10:48:12.158916 1401996 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: dde2938c-b5ca-4a57-b9bd-15f56dda7856
	I0916 10:48:12.158920 1401996 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: bc2a80da-1d56-4954-b760-1948662f5b5f
	I0916 10:48:12.158923 1401996 round_trippers.go:580]     Date: Mon, 16 Sep 2024 10:48:12 GMT
	I0916 10:48:12.159044 1401996 request.go:1351] Response Body: {"kind":"Pod","apiVersion":"v1","metadata":{"name":"kube-scheduler-functional-919910","namespace":"kube-system","uid":"80a1c6e8-dcc4-4602-a66a-658796f6ae58","resourceVersion":"460","creationTimestamp":"2024-09-16T10:47:01Z","labels":{"component":"kube-scheduler","tier":"control-plane"},"annotations":{"kubernetes.io/config.hash":"60f2072c6865fb71ef7928175ceb3dad","kubernetes.io/config.mirror":"60f2072c6865fb71ef7928175ceb3dad","kubernetes.io/config.seen":"2024-09-16T10:47:01.310180468Z","kubernetes.io/config.source":"file"},"ownerReferences":[{"apiVersion":"v1","kind":"Node","name":"functional-919910","uid":"53a8f356-7cc5-491d-804d-fdafec0ed62c","controller":true}],"managedFields":[{"manager":"kubelet","operation":"Update","apiVersion":"v1","time":"2024-09-16T10:47:01Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:annotations":{".":{},"f:kubernetes.io/config.hash":{},"f:kubernetes.io/config.mirror":{},"f:kubernetes.io/config.seen":{
},"f:kubernetes.io/config.source":{}},"f:labels":{".":{},"f:component": [truncated 5421 chars]
	I0916 10:48:12.159576 1401996 round_trippers.go:463] GET https://192.168.49.2:8441/api/v1/nodes/functional-919910
	I0916 10:48:12.159586 1401996 round_trippers.go:469] Request Headers:
	I0916 10:48:12.159594 1401996 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:48:12.159599 1401996 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:48:12.161846 1401996 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 10:48:12.161880 1401996 round_trippers.go:577] Response Headers:
	I0916 10:48:12.161889 1401996 round_trippers.go:580]     Audit-Id: ccde3b03-0793-4985-83c0-12846440be26
	I0916 10:48:12.161894 1401996 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 10:48:12.161898 1401996 round_trippers.go:580]     Content-Type: application/json
	I0916 10:48:12.161901 1401996 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: dde2938c-b5ca-4a57-b9bd-15f56dda7856
	I0916 10:48:12.161904 1401996 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: bc2a80da-1d56-4954-b760-1948662f5b5f
	I0916 10:48:12.161907 1401996 round_trippers.go:580]     Date: Mon, 16 Sep 2024 10:48:12 GMT
	I0916 10:48:12.162157 1401996 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"functional-919910","uid":"53a8f356-7cc5-491d-804d-fdafec0ed62c","resourceVersion":"423","creationTimestamp":"2024-09-16T10:46:58Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"functional-919910","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"functional-919910","minikube.k8s.io/primary":"true","minikube.k8s.io/updated_at":"2024_09_16T10_47_02_0700","minikube.k8s.io/version":"v1.34.0","node-role.kubernetes.io/control-plane":"","node.kubernetes.io/exclude-from-external-load-balancers":""},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///var/run/crio/crio.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubelet","operation":"Update","ap
iVersion":"v1","time":"2024-09-16T10:46:58Z","fieldsType":"FieldsV1","f [truncated 6033 chars]
	I0916 10:48:12.162593 1401996 pod_ready.go:103] pod "kube-scheduler-functional-919910" in "kube-system" namespace has status "Ready":"False"
	I0916 10:48:12.655305 1401996 round_trippers.go:463] GET https://192.168.49.2:8441/api/v1/namespaces/kube-system/pods/kube-scheduler-functional-919910
	I0916 10:48:12.655331 1401996 round_trippers.go:469] Request Headers:
	I0916 10:48:12.655340 1401996 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:48:12.655347 1401996 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:48:12.657713 1401996 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 10:48:12.657740 1401996 round_trippers.go:577] Response Headers:
	I0916 10:48:12.657749 1401996 round_trippers.go:580]     Audit-Id: 07b9928b-c0c0-4875-a188-8b628ddd8e44
	I0916 10:48:12.657754 1401996 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 10:48:12.657758 1401996 round_trippers.go:580]     Content-Type: application/json
	I0916 10:48:12.657761 1401996 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: dde2938c-b5ca-4a57-b9bd-15f56dda7856
	I0916 10:48:12.657764 1401996 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: bc2a80da-1d56-4954-b760-1948662f5b5f
	I0916 10:48:12.657768 1401996 round_trippers.go:580]     Date: Mon, 16 Sep 2024 10:48:12 GMT
	I0916 10:48:12.658062 1401996 request.go:1351] Response Body: {"kind":"Pod","apiVersion":"v1","metadata":{"name":"kube-scheduler-functional-919910","namespace":"kube-system","uid":"80a1c6e8-dcc4-4602-a66a-658796f6ae58","resourceVersion":"460","creationTimestamp":"2024-09-16T10:47:01Z","labels":{"component":"kube-scheduler","tier":"control-plane"},"annotations":{"kubernetes.io/config.hash":"60f2072c6865fb71ef7928175ceb3dad","kubernetes.io/config.mirror":"60f2072c6865fb71ef7928175ceb3dad","kubernetes.io/config.seen":"2024-09-16T10:47:01.310180468Z","kubernetes.io/config.source":"file"},"ownerReferences":[{"apiVersion":"v1","kind":"Node","name":"functional-919910","uid":"53a8f356-7cc5-491d-804d-fdafec0ed62c","controller":true}],"managedFields":[{"manager":"kubelet","operation":"Update","apiVersion":"v1","time":"2024-09-16T10:47:01Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:annotations":{".":{},"f:kubernetes.io/config.hash":{},"f:kubernetes.io/config.mirror":{},"f:kubernetes.io/config.seen":{
},"f:kubernetes.io/config.source":{}},"f:labels":{".":{},"f:component": [truncated 5421 chars]
	I0916 10:48:12.658565 1401996 round_trippers.go:463] GET https://192.168.49.2:8441/api/v1/nodes/functional-919910
	I0916 10:48:12.658583 1401996 round_trippers.go:469] Request Headers:
	I0916 10:48:12.658592 1401996 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:48:12.658597 1401996 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:48:12.660663 1401996 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 10:48:12.660701 1401996 round_trippers.go:577] Response Headers:
	I0916 10:48:12.660709 1401996 round_trippers.go:580]     Audit-Id: c3a14552-eda6-49a2-a911-7d73fe1c4010
	I0916 10:48:12.660713 1401996 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 10:48:12.660717 1401996 round_trippers.go:580]     Content-Type: application/json
	I0916 10:48:12.660721 1401996 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: dde2938c-b5ca-4a57-b9bd-15f56dda7856
	I0916 10:48:12.660726 1401996 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: bc2a80da-1d56-4954-b760-1948662f5b5f
	I0916 10:48:12.660733 1401996 round_trippers.go:580]     Date: Mon, 16 Sep 2024 10:48:12 GMT
	I0916 10:48:12.661029 1401996 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"functional-919910","uid":"53a8f356-7cc5-491d-804d-fdafec0ed62c","resourceVersion":"423","creationTimestamp":"2024-09-16T10:46:58Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"functional-919910","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"functional-919910","minikube.k8s.io/primary":"true","minikube.k8s.io/updated_at":"2024_09_16T10_47_02_0700","minikube.k8s.io/version":"v1.34.0","node-role.kubernetes.io/control-plane":"","node.kubernetes.io/exclude-from-external-load-balancers":""},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///var/run/crio/crio.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubelet","operation":"Update","ap
iVersion":"v1","time":"2024-09-16T10:46:58Z","fieldsType":"FieldsV1","f [truncated 6033 chars]
	I0916 10:48:13.156227 1401996 round_trippers.go:463] GET https://192.168.49.2:8441/api/v1/namespaces/kube-system/pods/kube-scheduler-functional-919910
	I0916 10:48:13.156254 1401996 round_trippers.go:469] Request Headers:
	I0916 10:48:13.156264 1401996 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:48:13.156267 1401996 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:48:13.158747 1401996 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 10:48:13.158778 1401996 round_trippers.go:577] Response Headers:
	I0916 10:48:13.158788 1401996 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 10:48:13.158792 1401996 round_trippers.go:580]     Content-Type: application/json
	I0916 10:48:13.158794 1401996 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: dde2938c-b5ca-4a57-b9bd-15f56dda7856
	I0916 10:48:13.158797 1401996 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: bc2a80da-1d56-4954-b760-1948662f5b5f
	I0916 10:48:13.158801 1401996 round_trippers.go:580]     Date: Mon, 16 Sep 2024 10:48:13 GMT
	I0916 10:48:13.158819 1401996 round_trippers.go:580]     Audit-Id: b4cab94a-002e-4281-b7ab-c555e3ca8d94
	I0916 10:48:13.159195 1401996 request.go:1351] Response Body: {"kind":"Pod","apiVersion":"v1","metadata":{"name":"kube-scheduler-functional-919910","namespace":"kube-system","uid":"80a1c6e8-dcc4-4602-a66a-658796f6ae58","resourceVersion":"460","creationTimestamp":"2024-09-16T10:47:01Z","labels":{"component":"kube-scheduler","tier":"control-plane"},"annotations":{"kubernetes.io/config.hash":"60f2072c6865fb71ef7928175ceb3dad","kubernetes.io/config.mirror":"60f2072c6865fb71ef7928175ceb3dad","kubernetes.io/config.seen":"2024-09-16T10:47:01.310180468Z","kubernetes.io/config.source":"file"},"ownerReferences":[{"apiVersion":"v1","kind":"Node","name":"functional-919910","uid":"53a8f356-7cc5-491d-804d-fdafec0ed62c","controller":true}],"managedFields":[{"manager":"kubelet","operation":"Update","apiVersion":"v1","time":"2024-09-16T10:47:01Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:annotations":{".":{},"f:kubernetes.io/config.hash":{},"f:kubernetes.io/config.mirror":{},"f:kubernetes.io/config.seen":{
},"f:kubernetes.io/config.source":{}},"f:labels":{".":{},"f:component": [truncated 5421 chars]
	I0916 10:48:13.159676 1401996 round_trippers.go:463] GET https://192.168.49.2:8441/api/v1/nodes/functional-919910
	I0916 10:48:13.159694 1401996 round_trippers.go:469] Request Headers:
	I0916 10:48:13.159704 1401996 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:48:13.159710 1401996 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:48:13.162019 1401996 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 10:48:13.162042 1401996 round_trippers.go:577] Response Headers:
	I0916 10:48:13.162050 1401996 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: bc2a80da-1d56-4954-b760-1948662f5b5f
	I0916 10:48:13.162056 1401996 round_trippers.go:580]     Date: Mon, 16 Sep 2024 10:48:13 GMT
	I0916 10:48:13.162059 1401996 round_trippers.go:580]     Audit-Id: f8176ddc-23f5-403b-8117-84fe9f4942e0
	I0916 10:48:13.162062 1401996 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 10:48:13.162066 1401996 round_trippers.go:580]     Content-Type: application/json
	I0916 10:48:13.162069 1401996 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: dde2938c-b5ca-4a57-b9bd-15f56dda7856
	I0916 10:48:13.162500 1401996 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"functional-919910","uid":"53a8f356-7cc5-491d-804d-fdafec0ed62c","resourceVersion":"423","creationTimestamp":"2024-09-16T10:46:58Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"functional-919910","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"functional-919910","minikube.k8s.io/primary":"true","minikube.k8s.io/updated_at":"2024_09_16T10_47_02_0700","minikube.k8s.io/version":"v1.34.0","node-role.kubernetes.io/control-plane":"","node.kubernetes.io/exclude-from-external-load-balancers":""},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///var/run/crio/crio.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubelet","operation":"Update","ap
iVersion":"v1","time":"2024-09-16T10:46:58Z","fieldsType":"FieldsV1","f [truncated 6033 chars]
	I0916 10:48:13.656171 1401996 round_trippers.go:463] GET https://192.168.49.2:8441/api/v1/namespaces/kube-system/pods/kube-scheduler-functional-919910
	I0916 10:48:13.656199 1401996 round_trippers.go:469] Request Headers:
	I0916 10:48:13.656210 1401996 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:48:13.656214 1401996 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:48:13.658620 1401996 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 10:48:13.658646 1401996 round_trippers.go:577] Response Headers:
	I0916 10:48:13.658655 1401996 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 10:48:13.658660 1401996 round_trippers.go:580]     Content-Type: application/json
	I0916 10:48:13.658663 1401996 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: dde2938c-b5ca-4a57-b9bd-15f56dda7856
	I0916 10:48:13.658667 1401996 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: bc2a80da-1d56-4954-b760-1948662f5b5f
	I0916 10:48:13.658671 1401996 round_trippers.go:580]     Date: Mon, 16 Sep 2024 10:48:13 GMT
	I0916 10:48:13.658674 1401996 round_trippers.go:580]     Audit-Id: de20cd4b-0c4a-4fa1-93e9-28c79590dfcd
	I0916 10:48:13.658944 1401996 request.go:1351] Response Body: {"kind":"Pod","apiVersion":"v1","metadata":{"name":"kube-scheduler-functional-919910","namespace":"kube-system","uid":"80a1c6e8-dcc4-4602-a66a-658796f6ae58","resourceVersion":"460","creationTimestamp":"2024-09-16T10:47:01Z","labels":{"component":"kube-scheduler","tier":"control-plane"},"annotations":{"kubernetes.io/config.hash":"60f2072c6865fb71ef7928175ceb3dad","kubernetes.io/config.mirror":"60f2072c6865fb71ef7928175ceb3dad","kubernetes.io/config.seen":"2024-09-16T10:47:01.310180468Z","kubernetes.io/config.source":"file"},"ownerReferences":[{"apiVersion":"v1","kind":"Node","name":"functional-919910","uid":"53a8f356-7cc5-491d-804d-fdafec0ed62c","controller":true}],"managedFields":[{"manager":"kubelet","operation":"Update","apiVersion":"v1","time":"2024-09-16T10:47:01Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:annotations":{".":{},"f:kubernetes.io/config.hash":{},"f:kubernetes.io/config.mirror":{},"f:kubernetes.io/config.seen":{
},"f:kubernetes.io/config.source":{}},"f:labels":{".":{},"f:component": [truncated 5421 chars]
	I0916 10:48:13.659464 1401996 round_trippers.go:463] GET https://192.168.49.2:8441/api/v1/nodes/functional-919910
	I0916 10:48:13.659482 1401996 round_trippers.go:469] Request Headers:
	I0916 10:48:13.659491 1401996 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:48:13.659496 1401996 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:48:13.661610 1401996 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 10:48:13.661629 1401996 round_trippers.go:577] Response Headers:
	I0916 10:48:13.661643 1401996 round_trippers.go:580]     Content-Type: application/json
	I0916 10:48:13.661647 1401996 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: dde2938c-b5ca-4a57-b9bd-15f56dda7856
	I0916 10:48:13.661650 1401996 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: bc2a80da-1d56-4954-b760-1948662f5b5f
	I0916 10:48:13.661653 1401996 round_trippers.go:580]     Date: Mon, 16 Sep 2024 10:48:13 GMT
	I0916 10:48:13.661656 1401996 round_trippers.go:580]     Audit-Id: 4d2de6de-a599-4279-845c-6b9d39f3a34c
	I0916 10:48:13.661658 1401996 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 10:48:13.661879 1401996 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"functional-919910","uid":"53a8f356-7cc5-491d-804d-fdafec0ed62c","resourceVersion":"423","creationTimestamp":"2024-09-16T10:46:58Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"functional-919910","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"functional-919910","minikube.k8s.io/primary":"true","minikube.k8s.io/updated_at":"2024_09_16T10_47_02_0700","minikube.k8s.io/version":"v1.34.0","node-role.kubernetes.io/control-plane":"","node.kubernetes.io/exclude-from-external-load-balancers":""},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///var/run/crio/crio.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubelet","operation":"Update","ap
iVersion":"v1","time":"2024-09-16T10:46:58Z","fieldsType":"FieldsV1","f [truncated 6033 chars]
	I0916 10:48:14.155396 1401996 round_trippers.go:463] GET https://192.168.49.2:8441/api/v1/namespaces/kube-system/pods/kube-scheduler-functional-919910
	I0916 10:48:14.155423 1401996 round_trippers.go:469] Request Headers:
	I0916 10:48:14.155433 1401996 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:48:14.155437 1401996 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:48:14.157856 1401996 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 10:48:14.157884 1401996 round_trippers.go:577] Response Headers:
	I0916 10:48:14.157893 1401996 round_trippers.go:580]     Date: Mon, 16 Sep 2024 10:48:14 GMT
	I0916 10:48:14.157898 1401996 round_trippers.go:580]     Audit-Id: da2be167-9564-4b39-af14-1ec1a09a5827
	I0916 10:48:14.157902 1401996 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 10:48:14.157905 1401996 round_trippers.go:580]     Content-Type: application/json
	I0916 10:48:14.157907 1401996 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: dde2938c-b5ca-4a57-b9bd-15f56dda7856
	I0916 10:48:14.157910 1401996 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: bc2a80da-1d56-4954-b760-1948662f5b5f
	I0916 10:48:14.158046 1401996 request.go:1351] Response Body: {"kind":"Pod","apiVersion":"v1","metadata":{"name":"kube-scheduler-functional-919910","namespace":"kube-system","uid":"80a1c6e8-dcc4-4602-a66a-658796f6ae58","resourceVersion":"460","creationTimestamp":"2024-09-16T10:47:01Z","labels":{"component":"kube-scheduler","tier":"control-plane"},"annotations":{"kubernetes.io/config.hash":"60f2072c6865fb71ef7928175ceb3dad","kubernetes.io/config.mirror":"60f2072c6865fb71ef7928175ceb3dad","kubernetes.io/config.seen":"2024-09-16T10:47:01.310180468Z","kubernetes.io/config.source":"file"},"ownerReferences":[{"apiVersion":"v1","kind":"Node","name":"functional-919910","uid":"53a8f356-7cc5-491d-804d-fdafec0ed62c","controller":true}],"managedFields":[{"manager":"kubelet","operation":"Update","apiVersion":"v1","time":"2024-09-16T10:47:01Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:annotations":{".":{},"f:kubernetes.io/config.hash":{},"f:kubernetes.io/config.mirror":{},"f:kubernetes.io/config.seen":{
},"f:kubernetes.io/config.source":{}},"f:labels":{".":{},"f:component": [truncated 5421 chars]
	I0916 10:48:14.158543 1401996 round_trippers.go:463] GET https://192.168.49.2:8441/api/v1/nodes/functional-919910
	I0916 10:48:14.158559 1401996 round_trippers.go:469] Request Headers:
	I0916 10:48:14.158569 1401996 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:48:14.158573 1401996 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:48:14.160723 1401996 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 10:48:14.160748 1401996 round_trippers.go:577] Response Headers:
	I0916 10:48:14.160757 1401996 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 10:48:14.160760 1401996 round_trippers.go:580]     Content-Type: application/json
	I0916 10:48:14.160763 1401996 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: dde2938c-b5ca-4a57-b9bd-15f56dda7856
	I0916 10:48:14.160767 1401996 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: bc2a80da-1d56-4954-b760-1948662f5b5f
	I0916 10:48:14.160770 1401996 round_trippers.go:580]     Date: Mon, 16 Sep 2024 10:48:14 GMT
	I0916 10:48:14.160773 1401996 round_trippers.go:580]     Audit-Id: 5b1bb651-cb08-4360-bfe0-6f608e2e2e8f
	I0916 10:48:14.160912 1401996 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"functional-919910","uid":"53a8f356-7cc5-491d-804d-fdafec0ed62c","resourceVersion":"423","creationTimestamp":"2024-09-16T10:46:58Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"functional-919910","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"functional-919910","minikube.k8s.io/primary":"true","minikube.k8s.io/updated_at":"2024_09_16T10_47_02_0700","minikube.k8s.io/version":"v1.34.0","node-role.kubernetes.io/control-plane":"","node.kubernetes.io/exclude-from-external-load-balancers":""},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///var/run/crio/crio.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubelet","operation":"Update","ap
iVersion":"v1","time":"2024-09-16T10:46:58Z","fieldsType":"FieldsV1","f [truncated 6033 chars]
	I0916 10:48:14.655393 1401996 round_trippers.go:463] GET https://192.168.49.2:8441/api/v1/namespaces/kube-system/pods/kube-scheduler-functional-919910
	I0916 10:48:14.655431 1401996 round_trippers.go:469] Request Headers:
	I0916 10:48:14.655442 1401996 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:48:14.655446 1401996 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:48:14.657773 1401996 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 10:48:14.657837 1401996 round_trippers.go:577] Response Headers:
	I0916 10:48:14.657853 1401996 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: bc2a80da-1d56-4954-b760-1948662f5b5f
	I0916 10:48:14.657860 1401996 round_trippers.go:580]     Date: Mon, 16 Sep 2024 10:48:14 GMT
	I0916 10:48:14.657865 1401996 round_trippers.go:580]     Audit-Id: c8928e9e-58df-4328-b135-75e55429ffa1
	I0916 10:48:14.657868 1401996 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 10:48:14.657871 1401996 round_trippers.go:580]     Content-Type: application/json
	I0916 10:48:14.657874 1401996 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: dde2938c-b5ca-4a57-b9bd-15f56dda7856
	I0916 10:48:14.658279 1401996 request.go:1351] Response Body: {"kind":"Pod","apiVersion":"v1","metadata":{"name":"kube-scheduler-functional-919910","namespace":"kube-system","uid":"80a1c6e8-dcc4-4602-a66a-658796f6ae58","resourceVersion":"460","creationTimestamp":"2024-09-16T10:47:01Z","labels":{"component":"kube-scheduler","tier":"control-plane"},"annotations":{"kubernetes.io/config.hash":"60f2072c6865fb71ef7928175ceb3dad","kubernetes.io/config.mirror":"60f2072c6865fb71ef7928175ceb3dad","kubernetes.io/config.seen":"2024-09-16T10:47:01.310180468Z","kubernetes.io/config.source":"file"},"ownerReferences":[{"apiVersion":"v1","kind":"Node","name":"functional-919910","uid":"53a8f356-7cc5-491d-804d-fdafec0ed62c","controller":true}],"managedFields":[{"manager":"kubelet","operation":"Update","apiVersion":"v1","time":"2024-09-16T10:47:01Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:annotations":{".":{},"f:kubernetes.io/config.hash":{},"f:kubernetes.io/config.mirror":{},"f:kubernetes.io/config.seen":{
},"f:kubernetes.io/config.source":{}},"f:labels":{".":{},"f:component": [truncated 5421 chars]
	I0916 10:48:14.658859 1401996 round_trippers.go:463] GET https://192.168.49.2:8441/api/v1/nodes/functional-919910
	I0916 10:48:14.658876 1401996 round_trippers.go:469] Request Headers:
	I0916 10:48:14.658885 1401996 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:48:14.658904 1401996 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:48:14.660901 1401996 round_trippers.go:574] Response Status: 200 OK in 1 milliseconds
	I0916 10:48:14.660921 1401996 round_trippers.go:577] Response Headers:
	I0916 10:48:14.660929 1401996 round_trippers.go:580]     Date: Mon, 16 Sep 2024 10:48:14 GMT
	I0916 10:48:14.660933 1401996 round_trippers.go:580]     Audit-Id: a480490d-9daf-4e76-ad83-5c9c102dc605
	I0916 10:48:14.660938 1401996 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 10:48:14.660941 1401996 round_trippers.go:580]     Content-Type: application/json
	I0916 10:48:14.660944 1401996 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: dde2938c-b5ca-4a57-b9bd-15f56dda7856
	I0916 10:48:14.660946 1401996 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: bc2a80da-1d56-4954-b760-1948662f5b5f
	I0916 10:48:14.661467 1401996 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"functional-919910","uid":"53a8f356-7cc5-491d-804d-fdafec0ed62c","resourceVersion":"423","creationTimestamp":"2024-09-16T10:46:58Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"functional-919910","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"functional-919910","minikube.k8s.io/primary":"true","minikube.k8s.io/updated_at":"2024_09_16T10_47_02_0700","minikube.k8s.io/version":"v1.34.0","node-role.kubernetes.io/control-plane":"","node.kubernetes.io/exclude-from-external-load-balancers":""},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///var/run/crio/crio.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubelet","operation":"Update","ap
iVersion":"v1","time":"2024-09-16T10:46:58Z","fieldsType":"FieldsV1","f [truncated 6033 chars]
	I0916 10:48:14.661867 1401996 pod_ready.go:103] pod "kube-scheduler-functional-919910" in "kube-system" namespace has status "Ready":"False"
	I0916 10:48:15.155432 1401996 round_trippers.go:463] GET https://192.168.49.2:8441/api/v1/namespaces/kube-system/pods/kube-scheduler-functional-919910
	I0916 10:48:15.155461 1401996 round_trippers.go:469] Request Headers:
	I0916 10:48:15.155472 1401996 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:48:15.155477 1401996 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:48:15.158073 1401996 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 10:48:15.158152 1401996 round_trippers.go:577] Response Headers:
	I0916 10:48:15.158177 1401996 round_trippers.go:580]     Audit-Id: ac8fbba3-be63-4e5c-9c54-7edad14a5b66
	I0916 10:48:15.158199 1401996 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 10:48:15.158233 1401996 round_trippers.go:580]     Content-Type: application/json
	I0916 10:48:15.158266 1401996 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: dde2938c-b5ca-4a57-b9bd-15f56dda7856
	I0916 10:48:15.158286 1401996 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: bc2a80da-1d56-4954-b760-1948662f5b5f
	I0916 10:48:15.158305 1401996 round_trippers.go:580]     Date: Mon, 16 Sep 2024 10:48:15 GMT
	I0916 10:48:15.158544 1401996 request.go:1351] Response Body: {"kind":"Pod","apiVersion":"v1","metadata":{"name":"kube-scheduler-functional-919910","namespace":"kube-system","uid":"80a1c6e8-dcc4-4602-a66a-658796f6ae58","resourceVersion":"460","creationTimestamp":"2024-09-16T10:47:01Z","labels":{"component":"kube-scheduler","tier":"control-plane"},"annotations":{"kubernetes.io/config.hash":"60f2072c6865fb71ef7928175ceb3dad","kubernetes.io/config.mirror":"60f2072c6865fb71ef7928175ceb3dad","kubernetes.io/config.seen":"2024-09-16T10:47:01.310180468Z","kubernetes.io/config.source":"file"},"ownerReferences":[{"apiVersion":"v1","kind":"Node","name":"functional-919910","uid":"53a8f356-7cc5-491d-804d-fdafec0ed62c","controller":true}],"managedFields":[{"manager":"kubelet","operation":"Update","apiVersion":"v1","time":"2024-09-16T10:47:01Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:annotations":{".":{},"f:kubernetes.io/config.hash":{},"f:kubernetes.io/config.mirror":{},"f:kubernetes.io/config.seen":{
},"f:kubernetes.io/config.source":{}},"f:labels":{".":{},"f:component": [truncated 5421 chars]
	I0916 10:48:15.159054 1401996 round_trippers.go:463] GET https://192.168.49.2:8441/api/v1/nodes/functional-919910
	I0916 10:48:15.159078 1401996 round_trippers.go:469] Request Headers:
	I0916 10:48:15.159087 1401996 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:48:15.159092 1401996 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:48:15.162067 1401996 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 10:48:15.162097 1401996 round_trippers.go:577] Response Headers:
	I0916 10:48:15.162106 1401996 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: bc2a80da-1d56-4954-b760-1948662f5b5f
	I0916 10:48:15.162111 1401996 round_trippers.go:580]     Date: Mon, 16 Sep 2024 10:48:15 GMT
	I0916 10:48:15.162122 1401996 round_trippers.go:580]     Audit-Id: 87aa48de-fda7-4a83-941c-0da0439419b3
	I0916 10:48:15.162125 1401996 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 10:48:15.162129 1401996 round_trippers.go:580]     Content-Type: application/json
	I0916 10:48:15.162131 1401996 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: dde2938c-b5ca-4a57-b9bd-15f56dda7856
	I0916 10:48:15.162359 1401996 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"functional-919910","uid":"53a8f356-7cc5-491d-804d-fdafec0ed62c","resourceVersion":"423","creationTimestamp":"2024-09-16T10:46:58Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"functional-919910","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"functional-919910","minikube.k8s.io/primary":"true","minikube.k8s.io/updated_at":"2024_09_16T10_47_02_0700","minikube.k8s.io/version":"v1.34.0","node-role.kubernetes.io/control-plane":"","node.kubernetes.io/exclude-from-external-load-balancers":""},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///var/run/crio/crio.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubelet","operation":"Update","ap
iVersion":"v1","time":"2024-09-16T10:46:58Z","fieldsType":"FieldsV1","f [truncated 6033 chars]
	I0916 10:48:15.656149 1401996 round_trippers.go:463] GET https://192.168.49.2:8441/api/v1/namespaces/kube-system/pods/kube-scheduler-functional-919910
	I0916 10:48:15.656184 1401996 round_trippers.go:469] Request Headers:
	I0916 10:48:15.656194 1401996 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:48:15.656198 1401996 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:48:15.658899 1401996 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 10:48:15.659009 1401996 round_trippers.go:577] Response Headers:
	I0916 10:48:15.659024 1401996 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: bc2a80da-1d56-4954-b760-1948662f5b5f
	I0916 10:48:15.659029 1401996 round_trippers.go:580]     Date: Mon, 16 Sep 2024 10:48:15 GMT
	I0916 10:48:15.659032 1401996 round_trippers.go:580]     Audit-Id: 5d70e69f-0e76-4702-89cd-db609f376df7
	I0916 10:48:15.659034 1401996 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 10:48:15.659048 1401996 round_trippers.go:580]     Content-Type: application/json
	I0916 10:48:15.659054 1401996 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: dde2938c-b5ca-4a57-b9bd-15f56dda7856
	I0916 10:48:15.659599 1401996 request.go:1351] Response Body: {"kind":"Pod","apiVersion":"v1","metadata":{"name":"kube-scheduler-functional-919910","namespace":"kube-system","uid":"80a1c6e8-dcc4-4602-a66a-658796f6ae58","resourceVersion":"460","creationTimestamp":"2024-09-16T10:47:01Z","labels":{"component":"kube-scheduler","tier":"control-plane"},"annotations":{"kubernetes.io/config.hash":"60f2072c6865fb71ef7928175ceb3dad","kubernetes.io/config.mirror":"60f2072c6865fb71ef7928175ceb3dad","kubernetes.io/config.seen":"2024-09-16T10:47:01.310180468Z","kubernetes.io/config.source":"file"},"ownerReferences":[{"apiVersion":"v1","kind":"Node","name":"functional-919910","uid":"53a8f356-7cc5-491d-804d-fdafec0ed62c","controller":true}],"managedFields":[{"manager":"kubelet","operation":"Update","apiVersion":"v1","time":"2024-09-16T10:47:01Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:annotations":{".":{},"f:kubernetes.io/config.hash":{},"f:kubernetes.io/config.mirror":{},"f:kubernetes.io/config.seen":{
},"f:kubernetes.io/config.source":{}},"f:labels":{".":{},"f:component": [truncated 5421 chars]
	I0916 10:48:15.660068 1401996 round_trippers.go:463] GET https://192.168.49.2:8441/api/v1/nodes/functional-919910
	I0916 10:48:15.660086 1401996 round_trippers.go:469] Request Headers:
	I0916 10:48:15.660094 1401996 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:48:15.660099 1401996 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:48:15.662075 1401996 round_trippers.go:574] Response Status: 200 OK in 1 milliseconds
	I0916 10:48:15.662099 1401996 round_trippers.go:577] Response Headers:
	I0916 10:48:15.662108 1401996 round_trippers.go:580]     Audit-Id: 3d0c68c3-4517-4abd-a8ae-cea0682c0839
	I0916 10:48:15.662112 1401996 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 10:48:15.662115 1401996 round_trippers.go:580]     Content-Type: application/json
	I0916 10:48:15.662117 1401996 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: dde2938c-b5ca-4a57-b9bd-15f56dda7856
	I0916 10:48:15.662120 1401996 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: bc2a80da-1d56-4954-b760-1948662f5b5f
	I0916 10:48:15.662123 1401996 round_trippers.go:580]     Date: Mon, 16 Sep 2024 10:48:15 GMT
	I0916 10:48:15.662312 1401996 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"functional-919910","uid":"53a8f356-7cc5-491d-804d-fdafec0ed62c","resourceVersion":"423","creationTimestamp":"2024-09-16T10:46:58Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"functional-919910","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"functional-919910","minikube.k8s.io/primary":"true","minikube.k8s.io/updated_at":"2024_09_16T10_47_02_0700","minikube.k8s.io/version":"v1.34.0","node-role.kubernetes.io/control-plane":"","node.kubernetes.io/exclude-from-external-load-balancers":""},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///var/run/crio/crio.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubelet","operation":"Update","ap
iVersion":"v1","time":"2024-09-16T10:46:58Z","fieldsType":"FieldsV1","f [truncated 6033 chars]
	I0916 10:48:16.156023 1401996 round_trippers.go:463] GET https://192.168.49.2:8441/api/v1/namespaces/kube-system/pods/kube-scheduler-functional-919910
	I0916 10:48:16.156050 1401996 round_trippers.go:469] Request Headers:
	I0916 10:48:16.156060 1401996 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:48:16.156065 1401996 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:48:16.158425 1401996 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 10:48:16.158450 1401996 round_trippers.go:577] Response Headers:
	I0916 10:48:16.158459 1401996 round_trippers.go:580]     Audit-Id: 0f492724-674f-4757-bc75-460167467303
	I0916 10:48:16.158463 1401996 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 10:48:16.158466 1401996 round_trippers.go:580]     Content-Type: application/json
	I0916 10:48:16.158471 1401996 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: dde2938c-b5ca-4a57-b9bd-15f56dda7856
	I0916 10:48:16.158474 1401996 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: bc2a80da-1d56-4954-b760-1948662f5b5f
	I0916 10:48:16.158477 1401996 round_trippers.go:580]     Date: Mon, 16 Sep 2024 10:48:16 GMT
	I0916 10:48:16.158714 1401996 request.go:1351] Response Body: {"kind":"Pod","apiVersion":"v1","metadata":{"name":"kube-scheduler-functional-919910","namespace":"kube-system","uid":"80a1c6e8-dcc4-4602-a66a-658796f6ae58","resourceVersion":"460","creationTimestamp":"2024-09-16T10:47:01Z","labels":{"component":"kube-scheduler","tier":"control-plane"},"annotations":{"kubernetes.io/config.hash":"60f2072c6865fb71ef7928175ceb3dad","kubernetes.io/config.mirror":"60f2072c6865fb71ef7928175ceb3dad","kubernetes.io/config.seen":"2024-09-16T10:47:01.310180468Z","kubernetes.io/config.source":"file"},"ownerReferences":[{"apiVersion":"v1","kind":"Node","name":"functional-919910","uid":"53a8f356-7cc5-491d-804d-fdafec0ed62c","controller":true}],"managedFields":[{"manager":"kubelet","operation":"Update","apiVersion":"v1","time":"2024-09-16T10:47:01Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:annotations":{".":{},"f:kubernetes.io/config.hash":{},"f:kubernetes.io/config.mirror":{},"f:kubernetes.io/config.seen":{
},"f:kubernetes.io/config.source":{}},"f:labels":{".":{},"f:component": [truncated 5421 chars]
	I0916 10:48:16.159225 1401996 round_trippers.go:463] GET https://192.168.49.2:8441/api/v1/nodes/functional-919910
	I0916 10:48:16.159244 1401996 round_trippers.go:469] Request Headers:
	I0916 10:48:16.159254 1401996 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:48:16.159265 1401996 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:48:16.161350 1401996 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 10:48:16.161368 1401996 round_trippers.go:577] Response Headers:
	I0916 10:48:16.161376 1401996 round_trippers.go:580]     Date: Mon, 16 Sep 2024 10:48:16 GMT
	I0916 10:48:16.161380 1401996 round_trippers.go:580]     Audit-Id: 7ddf8664-34c6-4c2a-846d-5fb6a65279b5
	I0916 10:48:16.161384 1401996 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 10:48:16.161387 1401996 round_trippers.go:580]     Content-Type: application/json
	I0916 10:48:16.161390 1401996 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: dde2938c-b5ca-4a57-b9bd-15f56dda7856
	I0916 10:48:16.161392 1401996 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: bc2a80da-1d56-4954-b760-1948662f5b5f
	I0916 10:48:16.161537 1401996 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"functional-919910","uid":"53a8f356-7cc5-491d-804d-fdafec0ed62c","resourceVersion":"423","creationTimestamp":"2024-09-16T10:46:58Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"functional-919910","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"functional-919910","minikube.k8s.io/primary":"true","minikube.k8s.io/updated_at":"2024_09_16T10_47_02_0700","minikube.k8s.io/version":"v1.34.0","node-role.kubernetes.io/control-plane":"","node.kubernetes.io/exclude-from-external-load-balancers":""},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///var/run/crio/crio.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubelet","operation":"Update","ap
iVersion":"v1","time":"2024-09-16T10:46:58Z","fieldsType":"FieldsV1","f [truncated 6033 chars]
	I0916 10:48:16.656225 1401996 round_trippers.go:463] GET https://192.168.49.2:8441/api/v1/namespaces/kube-system/pods/kube-scheduler-functional-919910
	I0916 10:48:16.656252 1401996 round_trippers.go:469] Request Headers:
	I0916 10:48:16.656262 1401996 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:48:16.656267 1401996 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:48:16.658699 1401996 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 10:48:16.658728 1401996 round_trippers.go:577] Response Headers:
	I0916 10:48:16.658737 1401996 round_trippers.go:580]     Audit-Id: bdd5ae87-e869-44fa-aeeb-fcbe80b5fe07
	I0916 10:48:16.658742 1401996 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 10:48:16.658746 1401996 round_trippers.go:580]     Content-Type: application/json
	I0916 10:48:16.658749 1401996 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: dde2938c-b5ca-4a57-b9bd-15f56dda7856
	I0916 10:48:16.658752 1401996 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: bc2a80da-1d56-4954-b760-1948662f5b5f
	I0916 10:48:16.658756 1401996 round_trippers.go:580]     Date: Mon, 16 Sep 2024 10:48:16 GMT
	I0916 10:48:16.659114 1401996 request.go:1351] Response Body: {"kind":"Pod","apiVersion":"v1","metadata":{"name":"kube-scheduler-functional-919910","namespace":"kube-system","uid":"80a1c6e8-dcc4-4602-a66a-658796f6ae58","resourceVersion":"460","creationTimestamp":"2024-09-16T10:47:01Z","labels":{"component":"kube-scheduler","tier":"control-plane"},"annotations":{"kubernetes.io/config.hash":"60f2072c6865fb71ef7928175ceb3dad","kubernetes.io/config.mirror":"60f2072c6865fb71ef7928175ceb3dad","kubernetes.io/config.seen":"2024-09-16T10:47:01.310180468Z","kubernetes.io/config.source":"file"},"ownerReferences":[{"apiVersion":"v1","kind":"Node","name":"functional-919910","uid":"53a8f356-7cc5-491d-804d-fdafec0ed62c","controller":true}],"managedFields":[{"manager":"kubelet","operation":"Update","apiVersion":"v1","time":"2024-09-16T10:47:01Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:annotations":{".":{},"f:kubernetes.io/config.hash":{},"f:kubernetes.io/config.mirror":{},"f:kubernetes.io/config.seen":{
},"f:kubernetes.io/config.source":{}},"f:labels":{".":{},"f:component": [truncated 5421 chars]
	I0916 10:48:16.659649 1401996 round_trippers.go:463] GET https://192.168.49.2:8441/api/v1/nodes/functional-919910
	I0916 10:48:16.659669 1401996 round_trippers.go:469] Request Headers:
	I0916 10:48:16.659678 1401996 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:48:16.659690 1401996 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:48:16.661913 1401996 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 10:48:16.661940 1401996 round_trippers.go:577] Response Headers:
	I0916 10:48:16.661946 1401996 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: bc2a80da-1d56-4954-b760-1948662f5b5f
	I0916 10:48:16.661952 1401996 round_trippers.go:580]     Date: Mon, 16 Sep 2024 10:48:16 GMT
	I0916 10:48:16.661965 1401996 round_trippers.go:580]     Audit-Id: ecce3ff5-6416-42a6-b0c9-ccd63ec632b6
	I0916 10:48:16.661968 1401996 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 10:48:16.661972 1401996 round_trippers.go:580]     Content-Type: application/json
	I0916 10:48:16.661975 1401996 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: dde2938c-b5ca-4a57-b9bd-15f56dda7856
	I0916 10:48:16.662121 1401996 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"functional-919910","uid":"53a8f356-7cc5-491d-804d-fdafec0ed62c","resourceVersion":"423","creationTimestamp":"2024-09-16T10:46:58Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"functional-919910","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"functional-919910","minikube.k8s.io/primary":"true","minikube.k8s.io/updated_at":"2024_09_16T10_47_02_0700","minikube.k8s.io/version":"v1.34.0","node-role.kubernetes.io/control-plane":"","node.kubernetes.io/exclude-from-external-load-balancers":""},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///var/run/crio/crio.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubelet","operation":"Update","ap
iVersion":"v1","time":"2024-09-16T10:46:58Z","fieldsType":"FieldsV1","f [truncated 6033 chars]
	I0916 10:48:16.662529 1401996 pod_ready.go:103] pod "kube-scheduler-functional-919910" in "kube-system" namespace has status "Ready":"False"
	I0916 10:48:17.155393 1401996 round_trippers.go:463] GET https://192.168.49.2:8441/api/v1/namespaces/kube-system/pods/kube-scheduler-functional-919910
	I0916 10:48:17.155416 1401996 round_trippers.go:469] Request Headers:
	I0916 10:48:17.155426 1401996 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:48:17.155433 1401996 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:48:17.157890 1401996 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 10:48:17.157958 1401996 round_trippers.go:577] Response Headers:
	I0916 10:48:17.157982 1401996 round_trippers.go:580]     Audit-Id: 8e390082-9fa9-482d-8b04-db1bb2e1c058
	I0916 10:48:17.158002 1401996 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 10:48:17.158034 1401996 round_trippers.go:580]     Content-Type: application/json
	I0916 10:48:17.158054 1401996 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: dde2938c-b5ca-4a57-b9bd-15f56dda7856
	I0916 10:48:17.158077 1401996 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: bc2a80da-1d56-4954-b760-1948662f5b5f
	I0916 10:48:17.158080 1401996 round_trippers.go:580]     Date: Mon, 16 Sep 2024 10:48:17 GMT
	I0916 10:48:17.158236 1401996 request.go:1351] Response Body: {"kind":"Pod","apiVersion":"v1","metadata":{"name":"kube-scheduler-functional-919910","namespace":"kube-system","uid":"80a1c6e8-dcc4-4602-a66a-658796f6ae58","resourceVersion":"460","creationTimestamp":"2024-09-16T10:47:01Z","labels":{"component":"kube-scheduler","tier":"control-plane"},"annotations":{"kubernetes.io/config.hash":"60f2072c6865fb71ef7928175ceb3dad","kubernetes.io/config.mirror":"60f2072c6865fb71ef7928175ceb3dad","kubernetes.io/config.seen":"2024-09-16T10:47:01.310180468Z","kubernetes.io/config.source":"file"},"ownerReferences":[{"apiVersion":"v1","kind":"Node","name":"functional-919910","uid":"53a8f356-7cc5-491d-804d-fdafec0ed62c","controller":true}],"managedFields":[{"manager":"kubelet","operation":"Update","apiVersion":"v1","time":"2024-09-16T10:47:01Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:annotations":{".":{},"f:kubernetes.io/config.hash":{},"f:kubernetes.io/config.mirror":{},"f:kubernetes.io/config.seen":{
},"f:kubernetes.io/config.source":{}},"f:labels":{".":{},"f:component": [truncated 5421 chars]
	I0916 10:48:17.158763 1401996 round_trippers.go:463] GET https://192.168.49.2:8441/api/v1/nodes/functional-919910
	I0916 10:48:17.158782 1401996 round_trippers.go:469] Request Headers:
	I0916 10:48:17.158791 1401996 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:48:17.158796 1401996 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:48:17.161280 1401996 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 10:48:17.161308 1401996 round_trippers.go:577] Response Headers:
	I0916 10:48:17.161316 1401996 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: bc2a80da-1d56-4954-b760-1948662f5b5f
	I0916 10:48:17.161322 1401996 round_trippers.go:580]     Date: Mon, 16 Sep 2024 10:48:17 GMT
	I0916 10:48:17.161326 1401996 round_trippers.go:580]     Audit-Id: 1ad80642-0309-4899-a58d-f663ef29271b
	I0916 10:48:17.161343 1401996 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 10:48:17.161347 1401996 round_trippers.go:580]     Content-Type: application/json
	I0916 10:48:17.161351 1401996 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: dde2938c-b5ca-4a57-b9bd-15f56dda7856
	I0916 10:48:17.161629 1401996 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"functional-919910","uid":"53a8f356-7cc5-491d-804d-fdafec0ed62c","resourceVersion":"423","creationTimestamp":"2024-09-16T10:46:58Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"functional-919910","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"functional-919910","minikube.k8s.io/primary":"true","minikube.k8s.io/updated_at":"2024_09_16T10_47_02_0700","minikube.k8s.io/version":"v1.34.0","node-role.kubernetes.io/control-plane":"","node.kubernetes.io/exclude-from-external-load-balancers":""},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///var/run/crio/crio.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubelet","operation":"Update","ap
iVersion":"v1","time":"2024-09-16T10:46:58Z","fieldsType":"FieldsV1","f [truncated 6033 chars]
	I0916 10:48:17.655739 1401996 round_trippers.go:463] GET https://192.168.49.2:8441/api/v1/namespaces/kube-system/pods/kube-scheduler-functional-919910
	I0916 10:48:17.655767 1401996 round_trippers.go:469] Request Headers:
	I0916 10:48:17.655776 1401996 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:48:17.655782 1401996 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:48:17.658224 1401996 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 10:48:17.658251 1401996 round_trippers.go:577] Response Headers:
	I0916 10:48:17.658258 1401996 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: dde2938c-b5ca-4a57-b9bd-15f56dda7856
	I0916 10:48:17.658263 1401996 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: bc2a80da-1d56-4954-b760-1948662f5b5f
	I0916 10:48:17.658267 1401996 round_trippers.go:580]     Date: Mon, 16 Sep 2024 10:48:17 GMT
	I0916 10:48:17.658270 1401996 round_trippers.go:580]     Audit-Id: 1eccb65e-89ac-43ec-ae5b-3075b1605045
	I0916 10:48:17.658273 1401996 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 10:48:17.658275 1401996 round_trippers.go:580]     Content-Type: application/json
	I0916 10:48:17.658511 1401996 request.go:1351] Response Body: {"kind":"Pod","apiVersion":"v1","metadata":{"name":"kube-scheduler-functional-919910","namespace":"kube-system","uid":"80a1c6e8-dcc4-4602-a66a-658796f6ae58","resourceVersion":"460","creationTimestamp":"2024-09-16T10:47:01Z","labels":{"component":"kube-scheduler","tier":"control-plane"},"annotations":{"kubernetes.io/config.hash":"60f2072c6865fb71ef7928175ceb3dad","kubernetes.io/config.mirror":"60f2072c6865fb71ef7928175ceb3dad","kubernetes.io/config.seen":"2024-09-16T10:47:01.310180468Z","kubernetes.io/config.source":"file"},"ownerReferences":[{"apiVersion":"v1","kind":"Node","name":"functional-919910","uid":"53a8f356-7cc5-491d-804d-fdafec0ed62c","controller":true}],"managedFields":[{"manager":"kubelet","operation":"Update","apiVersion":"v1","time":"2024-09-16T10:47:01Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:annotations":{".":{},"f:kubernetes.io/config.hash":{},"f:kubernetes.io/config.mirror":{},"f:kubernetes.io/config.seen":{
},"f:kubernetes.io/config.source":{}},"f:labels":{".":{},"f:component": [truncated 5421 chars]
	I0916 10:48:17.659021 1401996 round_trippers.go:463] GET https://192.168.49.2:8441/api/v1/nodes/functional-919910
	I0916 10:48:17.659040 1401996 round_trippers.go:469] Request Headers:
	I0916 10:48:17.659049 1401996 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:48:17.659056 1401996 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:48:17.661272 1401996 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 10:48:17.661306 1401996 round_trippers.go:577] Response Headers:
	I0916 10:48:17.661315 1401996 round_trippers.go:580]     Audit-Id: 61cb62de-df73-4da2-aa15-e934886c030e
	I0916 10:48:17.661319 1401996 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 10:48:17.661322 1401996 round_trippers.go:580]     Content-Type: application/json
	I0916 10:48:17.661325 1401996 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: dde2938c-b5ca-4a57-b9bd-15f56dda7856
	I0916 10:48:17.661328 1401996 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: bc2a80da-1d56-4954-b760-1948662f5b5f
	I0916 10:48:17.661336 1401996 round_trippers.go:580]     Date: Mon, 16 Sep 2024 10:48:17 GMT
	I0916 10:48:17.661515 1401996 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"functional-919910","uid":"53a8f356-7cc5-491d-804d-fdafec0ed62c","resourceVersion":"423","creationTimestamp":"2024-09-16T10:46:58Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"functional-919910","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"functional-919910","minikube.k8s.io/primary":"true","minikube.k8s.io/updated_at":"2024_09_16T10_47_02_0700","minikube.k8s.io/version":"v1.34.0","node-role.kubernetes.io/control-plane":"","node.kubernetes.io/exclude-from-external-load-balancers":""},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///var/run/crio/crio.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubelet","operation":"Update","ap
iVersion":"v1","time":"2024-09-16T10:46:58Z","fieldsType":"FieldsV1","f [truncated 6033 chars]
	I0916 10:48:18.156182 1401996 round_trippers.go:463] GET https://192.168.49.2:8441/api/v1/namespaces/kube-system/pods/kube-scheduler-functional-919910
	I0916 10:48:18.156208 1401996 round_trippers.go:469] Request Headers:
	I0916 10:48:18.156219 1401996 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:48:18.156223 1401996 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:48:18.158676 1401996 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 10:48:18.158708 1401996 round_trippers.go:577] Response Headers:
	I0916 10:48:18.158724 1401996 round_trippers.go:580]     Audit-Id: 378e2663-2367-4e97-9d34-9dec3d71cad6
	I0916 10:48:18.158730 1401996 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 10:48:18.158733 1401996 round_trippers.go:580]     Content-Type: application/json
	I0916 10:48:18.158736 1401996 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: dde2938c-b5ca-4a57-b9bd-15f56dda7856
	I0916 10:48:18.158740 1401996 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: bc2a80da-1d56-4954-b760-1948662f5b5f
	I0916 10:48:18.158743 1401996 round_trippers.go:580]     Date: Mon, 16 Sep 2024 10:48:18 GMT
	I0916 10:48:18.158866 1401996 request.go:1351] Response Body: {"kind":"Pod","apiVersion":"v1","metadata":{"name":"kube-scheduler-functional-919910","namespace":"kube-system","uid":"80a1c6e8-dcc4-4602-a66a-658796f6ae58","resourceVersion":"460","creationTimestamp":"2024-09-16T10:47:01Z","labels":{"component":"kube-scheduler","tier":"control-plane"},"annotations":{"kubernetes.io/config.hash":"60f2072c6865fb71ef7928175ceb3dad","kubernetes.io/config.mirror":"60f2072c6865fb71ef7928175ceb3dad","kubernetes.io/config.seen":"2024-09-16T10:47:01.310180468Z","kubernetes.io/config.source":"file"},"ownerReferences":[{"apiVersion":"v1","kind":"Node","name":"functional-919910","uid":"53a8f356-7cc5-491d-804d-fdafec0ed62c","controller":true}],"managedFields":[{"manager":"kubelet","operation":"Update","apiVersion":"v1","time":"2024-09-16T10:47:01Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:annotations":{".":{},"f:kubernetes.io/config.hash":{},"f:kubernetes.io/config.mirror":{},"f:kubernetes.io/config.seen":{
},"f:kubernetes.io/config.source":{}},"f:labels":{".":{},"f:component": [truncated 5421 chars]
	I0916 10:48:18.159344 1401996 round_trippers.go:463] GET https://192.168.49.2:8441/api/v1/nodes/functional-919910
	I0916 10:48:18.159361 1401996 round_trippers.go:469] Request Headers:
	I0916 10:48:18.159369 1401996 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:48:18.159374 1401996 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:48:18.161347 1401996 round_trippers.go:574] Response Status: 200 OK in 1 milliseconds
	I0916 10:48:18.161412 1401996 round_trippers.go:577] Response Headers:
	I0916 10:48:18.161436 1401996 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 10:48:18.161454 1401996 round_trippers.go:580]     Content-Type: application/json
	I0916 10:48:18.161488 1401996 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: dde2938c-b5ca-4a57-b9bd-15f56dda7856
	I0916 10:48:18.161493 1401996 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: bc2a80da-1d56-4954-b760-1948662f5b5f
	I0916 10:48:18.161496 1401996 round_trippers.go:580]     Date: Mon, 16 Sep 2024 10:48:18 GMT
	I0916 10:48:18.161499 1401996 round_trippers.go:580]     Audit-Id: bb959ec1-7e1d-44b4-8591-f88b305c6e05
	I0916 10:48:18.161660 1401996 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"functional-919910","uid":"53a8f356-7cc5-491d-804d-fdafec0ed62c","resourceVersion":"423","creationTimestamp":"2024-09-16T10:46:58Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"functional-919910","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"functional-919910","minikube.k8s.io/primary":"true","minikube.k8s.io/updated_at":"2024_09_16T10_47_02_0700","minikube.k8s.io/version":"v1.34.0","node-role.kubernetes.io/control-plane":"","node.kubernetes.io/exclude-from-external-load-balancers":""},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///var/run/crio/crio.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubelet","operation":"Update","ap
iVersion":"v1","time":"2024-09-16T10:46:58Z","fieldsType":"FieldsV1","f [truncated 6033 chars]
	I0916 10:48:18.656149 1401996 round_trippers.go:463] GET https://192.168.49.2:8441/api/v1/namespaces/kube-system/pods/kube-scheduler-functional-919910
	I0916 10:48:18.656175 1401996 round_trippers.go:469] Request Headers:
	I0916 10:48:18.656185 1401996 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:48:18.656191 1401996 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:48:18.658651 1401996 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 10:48:18.658682 1401996 round_trippers.go:577] Response Headers:
	I0916 10:48:18.658696 1401996 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: bc2a80da-1d56-4954-b760-1948662f5b5f
	I0916 10:48:18.658702 1401996 round_trippers.go:580]     Date: Mon, 16 Sep 2024 10:48:18 GMT
	I0916 10:48:18.658706 1401996 round_trippers.go:580]     Audit-Id: 768eca83-3fed-4116-813e-15e61a113bab
	I0916 10:48:18.658710 1401996 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 10:48:18.658714 1401996 round_trippers.go:580]     Content-Type: application/json
	I0916 10:48:18.658718 1401996 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: dde2938c-b5ca-4a57-b9bd-15f56dda7856
	I0916 10:48:18.658859 1401996 request.go:1351] Response Body: {"kind":"Pod","apiVersion":"v1","metadata":{"name":"kube-scheduler-functional-919910","namespace":"kube-system","uid":"80a1c6e8-dcc4-4602-a66a-658796f6ae58","resourceVersion":"460","creationTimestamp":"2024-09-16T10:47:01Z","labels":{"component":"kube-scheduler","tier":"control-plane"},"annotations":{"kubernetes.io/config.hash":"60f2072c6865fb71ef7928175ceb3dad","kubernetes.io/config.mirror":"60f2072c6865fb71ef7928175ceb3dad","kubernetes.io/config.seen":"2024-09-16T10:47:01.310180468Z","kubernetes.io/config.source":"file"},"ownerReferences":[{"apiVersion":"v1","kind":"Node","name":"functional-919910","uid":"53a8f356-7cc5-491d-804d-fdafec0ed62c","controller":true}],"managedFields":[{"manager":"kubelet","operation":"Update","apiVersion":"v1","time":"2024-09-16T10:47:01Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:annotations":{".":{},"f:kubernetes.io/config.hash":{},"f:kubernetes.io/config.mirror":{},"f:kubernetes.io/config.seen":{
},"f:kubernetes.io/config.source":{}},"f:labels":{".":{},"f:component": [truncated 5421 chars]
	I0916 10:48:18.659347 1401996 round_trippers.go:463] GET https://192.168.49.2:8441/api/v1/nodes/functional-919910
	I0916 10:48:18.659365 1401996 round_trippers.go:469] Request Headers:
	I0916 10:48:18.659374 1401996 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:48:18.659378 1401996 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:48:18.661522 1401996 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 10:48:18.661583 1401996 round_trippers.go:577] Response Headers:
	I0916 10:48:18.661606 1401996 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: bc2a80da-1d56-4954-b760-1948662f5b5f
	I0916 10:48:18.661626 1401996 round_trippers.go:580]     Date: Mon, 16 Sep 2024 10:48:18 GMT
	I0916 10:48:18.661659 1401996 round_trippers.go:580]     Audit-Id: d494015f-8294-4098-b4a2-575432838f60
	I0916 10:48:18.661679 1401996 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 10:48:18.661697 1401996 round_trippers.go:580]     Content-Type: application/json
	I0916 10:48:18.661705 1401996 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: dde2938c-b5ca-4a57-b9bd-15f56dda7856
	I0916 10:48:18.661907 1401996 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"functional-919910","uid":"53a8f356-7cc5-491d-804d-fdafec0ed62c","resourceVersion":"423","creationTimestamp":"2024-09-16T10:46:58Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"functional-919910","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"functional-919910","minikube.k8s.io/primary":"true","minikube.k8s.io/updated_at":"2024_09_16T10_47_02_0700","minikube.k8s.io/version":"v1.34.0","node-role.kubernetes.io/control-plane":"","node.kubernetes.io/exclude-from-external-load-balancers":""},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///var/run/crio/crio.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubelet","operation":"Update","ap
iVersion":"v1","time":"2024-09-16T10:46:58Z","fieldsType":"FieldsV1","f [truncated 6033 chars]
	I0916 10:48:19.156120 1401996 round_trippers.go:463] GET https://192.168.49.2:8441/api/v1/namespaces/kube-system/pods/kube-scheduler-functional-919910
	I0916 10:48:19.156144 1401996 round_trippers.go:469] Request Headers:
	I0916 10:48:19.156154 1401996 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:48:19.156158 1401996 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:48:19.158419 1401996 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 10:48:19.158444 1401996 round_trippers.go:577] Response Headers:
	I0916 10:48:19.158452 1401996 round_trippers.go:580]     Audit-Id: 466a2bac-8b73-4458-a018-6e697edd8946
	I0916 10:48:19.158456 1401996 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 10:48:19.158459 1401996 round_trippers.go:580]     Content-Type: application/json
	I0916 10:48:19.158462 1401996 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: dde2938c-b5ca-4a57-b9bd-15f56dda7856
	I0916 10:48:19.158465 1401996 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: bc2a80da-1d56-4954-b760-1948662f5b5f
	I0916 10:48:19.158468 1401996 round_trippers.go:580]     Date: Mon, 16 Sep 2024 10:48:19 GMT
	I0916 10:48:19.158908 1401996 request.go:1351] Response Body: {"kind":"Pod","apiVersion":"v1","metadata":{"name":"kube-scheduler-functional-919910","namespace":"kube-system","uid":"80a1c6e8-dcc4-4602-a66a-658796f6ae58","resourceVersion":"508","creationTimestamp":"2024-09-16T10:47:01Z","labels":{"component":"kube-scheduler","tier":"control-plane"},"annotations":{"kubernetes.io/config.hash":"60f2072c6865fb71ef7928175ceb3dad","kubernetes.io/config.mirror":"60f2072c6865fb71ef7928175ceb3dad","kubernetes.io/config.seen":"2024-09-16T10:47:01.310180468Z","kubernetes.io/config.source":"file"},"ownerReferences":[{"apiVersion":"v1","kind":"Node","name":"functional-919910","uid":"53a8f356-7cc5-491d-804d-fdafec0ed62c","controller":true}],"managedFields":[{"manager":"kubelet","operation":"Update","apiVersion":"v1","time":"2024-09-16T10:47:01Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:annotations":{".":{},"f:kubernetes.io/config.hash":{},"f:kubernetes.io/config.mirror":{},"f:kubernetes.io/config.seen":{
},"f:kubernetes.io/config.source":{}},"f:labels":{".":{},"f:component": [truncated 5177 chars]
	I0916 10:48:19.159391 1401996 round_trippers.go:463] GET https://192.168.49.2:8441/api/v1/nodes/functional-919910
	I0916 10:48:19.159408 1401996 round_trippers.go:469] Request Headers:
	I0916 10:48:19.159416 1401996 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:48:19.159420 1401996 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:48:19.161426 1401996 round_trippers.go:574] Response Status: 200 OK in 1 milliseconds
	I0916 10:48:19.161450 1401996 round_trippers.go:577] Response Headers:
	I0916 10:48:19.161459 1401996 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: bc2a80da-1d56-4954-b760-1948662f5b5f
	I0916 10:48:19.161462 1401996 round_trippers.go:580]     Date: Mon, 16 Sep 2024 10:48:19 GMT
	I0916 10:48:19.161466 1401996 round_trippers.go:580]     Audit-Id: 8ce2e9ae-ed0f-49a1-8a6d-8d61334108e3
	I0916 10:48:19.161474 1401996 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 10:48:19.161481 1401996 round_trippers.go:580]     Content-Type: application/json
	I0916 10:48:19.161485 1401996 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: dde2938c-b5ca-4a57-b9bd-15f56dda7856
	I0916 10:48:19.161628 1401996 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"functional-919910","uid":"53a8f356-7cc5-491d-804d-fdafec0ed62c","resourceVersion":"423","creationTimestamp":"2024-09-16T10:46:58Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"functional-919910","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"functional-919910","minikube.k8s.io/primary":"true","minikube.k8s.io/updated_at":"2024_09_16T10_47_02_0700","minikube.k8s.io/version":"v1.34.0","node-role.kubernetes.io/control-plane":"","node.kubernetes.io/exclude-from-external-load-balancers":""},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///var/run/crio/crio.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubelet","operation":"Update","ap
iVersion":"v1","time":"2024-09-16T10:46:58Z","fieldsType":"FieldsV1","f [truncated 6033 chars]
	I0916 10:48:19.162028 1401996 pod_ready.go:93] pod "kube-scheduler-functional-919910" in "kube-system" namespace has status "Ready":"True"
	I0916 10:48:19.162049 1401996 pod_ready.go:82] duration metric: took 13.506931464s for pod "kube-scheduler-functional-919910" in "kube-system" namespace to be "Ready" ...
	I0916 10:48:19.162062 1401996 pod_ready.go:39] duration metric: took 13.87017796s for extra waiting for all system-critical and pods with labels [k8s-app=kube-dns component=etcd component=kube-apiserver component=kube-controller-manager k8s-app=kube-proxy component=kube-scheduler] to be "Ready" ...
	I0916 10:48:19.162082 1401996 api_server.go:52] waiting for apiserver process to appear ...
	I0916 10:48:19.162155 1401996 ssh_runner.go:195] Run: sudo pgrep -xnf kube-apiserver.*minikube.*
	I0916 10:48:19.171973 1401996 command_runner.go:130] > 2790
	I0916 10:48:19.173159 1401996 api_server.go:72] duration metric: took 19.18578314s to wait for apiserver process to appear ...
	I0916 10:48:19.173209 1401996 api_server.go:88] waiting for apiserver healthz status ...
	I0916 10:48:19.173244 1401996 api_server.go:253] Checking apiserver healthz at https://192.168.49.2:8441/healthz ...
	I0916 10:48:19.180753 1401996 api_server.go:279] https://192.168.49.2:8441/healthz returned 200:
	ok
	I0916 10:48:19.180850 1401996 round_trippers.go:463] GET https://192.168.49.2:8441/version
	I0916 10:48:19.180863 1401996 round_trippers.go:469] Request Headers:
	I0916 10:48:19.180872 1401996 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:48:19.180876 1401996 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:48:19.181761 1401996 round_trippers.go:574] Response Status: 200 OK in 0 milliseconds
	I0916 10:48:19.181779 1401996 round_trippers.go:577] Response Headers:
	I0916 10:48:19.181799 1401996 round_trippers.go:580]     Content-Length: 263
	I0916 10:48:19.181814 1401996 round_trippers.go:580]     Date: Mon, 16 Sep 2024 10:48:19 GMT
	I0916 10:48:19.181823 1401996 round_trippers.go:580]     Audit-Id: 5f8ceb4a-a192-4ab4-8819-0cc719a145a8
	I0916 10:48:19.181826 1401996 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 10:48:19.181829 1401996 round_trippers.go:580]     Content-Type: application/json
	I0916 10:48:19.181832 1401996 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: dde2938c-b5ca-4a57-b9bd-15f56dda7856
	I0916 10:48:19.181835 1401996 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: bc2a80da-1d56-4954-b760-1948662f5b5f
	I0916 10:48:19.181852 1401996 request.go:1351] Response Body: {
	  "major": "1",
	  "minor": "31",
	  "gitVersion": "v1.31.1",
	  "gitCommit": "948afe5ca072329a73c8e79ed5938717a5cb3d21",
	  "gitTreeState": "clean",
	  "buildDate": "2024-09-11T21:22:08Z",
	  "goVersion": "go1.22.6",
	  "compiler": "gc",
	  "platform": "linux/arm64"
	}
	I0916 10:48:19.181976 1401996 api_server.go:141] control plane version: v1.31.1
	I0916 10:48:19.181996 1401996 api_server.go:131] duration metric: took 8.767062ms to wait for apiserver health ...
	I0916 10:48:19.182008 1401996 system_pods.go:43] waiting for kube-system pods to appear ...
	I0916 10:48:19.182072 1401996 round_trippers.go:463] GET https://192.168.49.2:8441/api/v1/namespaces/kube-system/pods
	I0916 10:48:19.182083 1401996 round_trippers.go:469] Request Headers:
	I0916 10:48:19.182090 1401996 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:48:19.182095 1401996 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:48:19.184562 1401996 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 10:48:19.184585 1401996 round_trippers.go:577] Response Headers:
	I0916 10:48:19.184594 1401996 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: bc2a80da-1d56-4954-b760-1948662f5b5f
	I0916 10:48:19.184599 1401996 round_trippers.go:580]     Date: Mon, 16 Sep 2024 10:48:19 GMT
	I0916 10:48:19.184602 1401996 round_trippers.go:580]     Audit-Id: ba5161ea-bc87-4841-a787-f752334f2d4b
	I0916 10:48:19.184605 1401996 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 10:48:19.184608 1401996 round_trippers.go:580]     Content-Type: application/json
	I0916 10:48:19.184610 1401996 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: dde2938c-b5ca-4a57-b9bd-15f56dda7856
	I0916 10:48:19.185526 1401996 request.go:1351] Response Body: {"kind":"PodList","apiVersion":"v1","metadata":{"resourceVersion":"508"},"items":[{"metadata":{"name":"coredns-7c65d6cfc9-qzn8c","generateName":"coredns-7c65d6cfc9-","namespace":"kube-system","uid":"ada36fb7-8486-4afc-9bef-04ab2e65fc7b","resourceVersion":"499","creationTimestamp":"2024-09-16T10:47:06Z","labels":{"k8s-app":"kube-dns","pod-template-hash":"7c65d6cfc9"},"ownerReferences":[{"apiVersion":"apps/v1","kind":"ReplicaSet","name":"coredns-7c65d6cfc9","uid":"d0a0989d-3c55-4e39-bd63-2b9459e552ef","controller":true,"blockOwnerDeletion":true}],"managedFields":[{"manager":"kube-controller-manager","operation":"Update","apiVersion":"v1","time":"2024-09-16T10:47:06Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:generateName":{},"f:labels":{".":{},"f:k8s-app":{},"f:pod-template-hash":{}},"f:ownerReferences":{".":{},"k:{\"uid\":\"d0a0989d-3c55-4e39-bd63-2b9459e552ef\"}":{}}},"f:spec":{"f:affinity":{".":{},"f:podAntiAffinity":{".":{},"f
:preferredDuringSchedulingIgnoredDuringExecution":{}}},"f:containers":{ [truncated 61328 chars]
	I0916 10:48:19.189350 1401996 system_pods.go:59] 8 kube-system pods found
	I0916 10:48:19.189393 1401996 system_pods.go:61] "coredns-7c65d6cfc9-qzn8c" [ada36fb7-8486-4afc-9bef-04ab2e65fc7b] Running
	I0916 10:48:19.189404 1401996 system_pods.go:61] "etcd-functional-919910" [73472289-b523-4c96-8d5d-33ea5c657902] Running / Ready:ContainersNotReady (containers with unready status: [etcd]) / ContainersReady:ContainersNotReady (containers with unready status: [etcd])
	I0916 10:48:19.189409 1401996 system_pods.go:61] "kindnet-nb5xl" [1282e172-7d16-4f24-9f7d-33da705832a9] Running
	I0916 10:48:19.189425 1401996 system_pods.go:61] "kube-apiserver-functional-919910" [82da7bbe-1484-402c-b1a5-7165f1938703] Running
	I0916 10:48:19.189437 1401996 system_pods.go:61] "kube-controller-manager-functional-919910" [483b3e2c-288a-41e1-a29b-33a95b5b536a] Running
	I0916 10:48:19.189442 1401996 system_pods.go:61] "kube-proxy-nvpzv" [2e1bfc3e-dea3-4511-a154-e367e28b0898] Running
	I0916 10:48:19.189446 1401996 system_pods.go:61] "kube-scheduler-functional-919910" [80a1c6e8-dcc4-4602-a66a-658796f6ae58] Running
	I0916 10:48:19.189456 1401996 system_pods.go:61] "storage-provisioner" [2eb6523f-f61a-4c33-8e91-0bbbb874554b] Running
	I0916 10:48:19.189462 1401996 system_pods.go:74] duration metric: took 7.448933ms to wait for pod list to return data ...
	I0916 10:48:19.189477 1401996 default_sa.go:34] waiting for default service account to be created ...
	I0916 10:48:19.189577 1401996 round_trippers.go:463] GET https://192.168.49.2:8441/api/v1/namespaces/default/serviceaccounts
	I0916 10:48:19.189586 1401996 round_trippers.go:469] Request Headers:
	I0916 10:48:19.189594 1401996 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:48:19.189600 1401996 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:48:19.193300 1401996 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:48:19.193324 1401996 round_trippers.go:577] Response Headers:
	I0916 10:48:19.193332 1401996 round_trippers.go:580]     Audit-Id: 9800f864-2310-45e2-b31f-6b3b6d7aa7c3
	I0916 10:48:19.193337 1401996 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 10:48:19.193341 1401996 round_trippers.go:580]     Content-Type: application/json
	I0916 10:48:19.193344 1401996 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: dde2938c-b5ca-4a57-b9bd-15f56dda7856
	I0916 10:48:19.193347 1401996 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: bc2a80da-1d56-4954-b760-1948662f5b5f
	I0916 10:48:19.193349 1401996 round_trippers.go:580]     Content-Length: 261
	I0916 10:48:19.193352 1401996 round_trippers.go:580]     Date: Mon, 16 Sep 2024 10:48:19 GMT
	I0916 10:48:19.193371 1401996 request.go:1351] Response Body: {"kind":"ServiceAccountList","apiVersion":"v1","metadata":{"resourceVersion":"508"},"items":[{"metadata":{"name":"default","namespace":"default","uid":"36ad6c69-803a-49f9-b31a-556a6fc643b7","resourceVersion":"324","creationTimestamp":"2024-09-16T10:47:06Z"}}]}
	I0916 10:48:19.193544 1401996 default_sa.go:45] found service account: "default"
	I0916 10:48:19.193564 1401996 default_sa.go:55] duration metric: took 4.080003ms for default service account to be created ...
	I0916 10:48:19.193573 1401996 system_pods.go:116] waiting for k8s-apps to be running ...
	I0916 10:48:19.193634 1401996 round_trippers.go:463] GET https://192.168.49.2:8441/api/v1/namespaces/kube-system/pods
	I0916 10:48:19.193644 1401996 round_trippers.go:469] Request Headers:
	I0916 10:48:19.193651 1401996 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:48:19.193655 1401996 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:48:19.196161 1401996 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 10:48:19.196184 1401996 round_trippers.go:577] Response Headers:
	I0916 10:48:19.196191 1401996 round_trippers.go:580]     Audit-Id: 8e823e0b-ba03-4d4a-a6eb-7aa20a6ef471
	I0916 10:48:19.196196 1401996 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 10:48:19.196200 1401996 round_trippers.go:580]     Content-Type: application/json
	I0916 10:48:19.196203 1401996 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: dde2938c-b5ca-4a57-b9bd-15f56dda7856
	I0916 10:48:19.196207 1401996 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: bc2a80da-1d56-4954-b760-1948662f5b5f
	I0916 10:48:19.196210 1401996 round_trippers.go:580]     Date: Mon, 16 Sep 2024 10:48:19 GMT
	I0916 10:48:19.196651 1401996 request.go:1351] Response Body: {"kind":"PodList","apiVersion":"v1","metadata":{"resourceVersion":"508"},"items":[{"metadata":{"name":"coredns-7c65d6cfc9-qzn8c","generateName":"coredns-7c65d6cfc9-","namespace":"kube-system","uid":"ada36fb7-8486-4afc-9bef-04ab2e65fc7b","resourceVersion":"499","creationTimestamp":"2024-09-16T10:47:06Z","labels":{"k8s-app":"kube-dns","pod-template-hash":"7c65d6cfc9"},"ownerReferences":[{"apiVersion":"apps/v1","kind":"ReplicaSet","name":"coredns-7c65d6cfc9","uid":"d0a0989d-3c55-4e39-bd63-2b9459e552ef","controller":true,"blockOwnerDeletion":true}],"managedFields":[{"manager":"kube-controller-manager","operation":"Update","apiVersion":"v1","time":"2024-09-16T10:47:06Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:generateName":{},"f:labels":{".":{},"f:k8s-app":{},"f:pod-template-hash":{}},"f:ownerReferences":{".":{},"k:{\"uid\":\"d0a0989d-3c55-4e39-bd63-2b9459e552ef\"}":{}}},"f:spec":{"f:affinity":{".":{},"f:podAntiAffinity":{".":{},"f
:preferredDuringSchedulingIgnoredDuringExecution":{}}},"f:containers":{ [truncated 61328 chars]
	I0916 10:48:19.199402 1401996 system_pods.go:86] 8 kube-system pods found
	I0916 10:48:19.199435 1401996 system_pods.go:89] "coredns-7c65d6cfc9-qzn8c" [ada36fb7-8486-4afc-9bef-04ab2e65fc7b] Running
	I0916 10:48:19.199446 1401996 system_pods.go:89] "etcd-functional-919910" [73472289-b523-4c96-8d5d-33ea5c657902] Running / Ready:ContainersNotReady (containers with unready status: [etcd]) / ContainersReady:ContainersNotReady (containers with unready status: [etcd])
	I0916 10:48:19.199452 1401996 system_pods.go:89] "kindnet-nb5xl" [1282e172-7d16-4f24-9f7d-33da705832a9] Running
	I0916 10:48:19.199457 1401996 system_pods.go:89] "kube-apiserver-functional-919910" [82da7bbe-1484-402c-b1a5-7165f1938703] Running
	I0916 10:48:19.199462 1401996 system_pods.go:89] "kube-controller-manager-functional-919910" [483b3e2c-288a-41e1-a29b-33a95b5b536a] Running
	I0916 10:48:19.199501 1401996 system_pods.go:89] "kube-proxy-nvpzv" [2e1bfc3e-dea3-4511-a154-e367e28b0898] Running
	I0916 10:48:19.199512 1401996 system_pods.go:89] "kube-scheduler-functional-919910" [80a1c6e8-dcc4-4602-a66a-658796f6ae58] Running
	I0916 10:48:19.199517 1401996 system_pods.go:89] "storage-provisioner" [2eb6523f-f61a-4c33-8e91-0bbbb874554b] Running
	I0916 10:48:19.199525 1401996 system_pods.go:126] duration metric: took 5.941781ms to wait for k8s-apps to be running ...
	I0916 10:48:19.199536 1401996 system_svc.go:44] waiting for kubelet service to be running ....
	I0916 10:48:19.199594 1401996 ssh_runner.go:195] Run: sudo systemctl is-active --quiet service kubelet
	I0916 10:48:19.213096 1401996 system_svc.go:56] duration metric: took 13.539501ms WaitForService to wait for kubelet
	I0916 10:48:19.213126 1401996 kubeadm.go:582] duration metric: took 19.225752653s to wait for: map[apiserver:true apps_running:true default_sa:true extra:true kubelet:true node_ready:true system_pods:true]
	I0916 10:48:19.213144 1401996 node_conditions.go:102] verifying NodePressure condition ...
	I0916 10:48:19.213231 1401996 round_trippers.go:463] GET https://192.168.49.2:8441/api/v1/nodes
	I0916 10:48:19.213249 1401996 round_trippers.go:469] Request Headers:
	I0916 10:48:19.213258 1401996 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:48:19.213262 1401996 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:48:19.216028 1401996 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 10:48:19.216055 1401996 round_trippers.go:577] Response Headers:
	I0916 10:48:19.216063 1401996 round_trippers.go:580]     Content-Type: application/json
	I0916 10:48:19.216069 1401996 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: dde2938c-b5ca-4a57-b9bd-15f56dda7856
	I0916 10:48:19.216072 1401996 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: bc2a80da-1d56-4954-b760-1948662f5b5f
	I0916 10:48:19.216078 1401996 round_trippers.go:580]     Date: Mon, 16 Sep 2024 10:48:19 GMT
	I0916 10:48:19.216080 1401996 round_trippers.go:580]     Audit-Id: 61db72f3-2af5-41ed-9166-d37dd31f349b
	I0916 10:48:19.216083 1401996 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 10:48:19.216221 1401996 request.go:1351] Response Body: {"kind":"NodeList","apiVersion":"v1","metadata":{"resourceVersion":"508"},"items":[{"metadata":{"name":"functional-919910","uid":"53a8f356-7cc5-491d-804d-fdafec0ed62c","resourceVersion":"423","creationTimestamp":"2024-09-16T10:46:58Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"functional-919910","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"functional-919910","minikube.k8s.io/primary":"true","minikube.k8s.io/updated_at":"2024_09_16T10_47_02_0700","minikube.k8s.io/version":"v1.34.0","node-role.kubernetes.io/control-plane":"","node.kubernetes.io/exclude-from-external-load-balancers":""},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///var/run/crio/crio.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFie
lds":[{"manager":"kubelet","operation":"Update","apiVersion":"v1","time [truncated 6086 chars]
	I0916 10:48:19.216742 1401996 node_conditions.go:122] node storage ephemeral capacity is 203034800Ki
	I0916 10:48:19.216776 1401996 node_conditions.go:123] node cpu capacity is 2
	I0916 10:48:19.216789 1401996 node_conditions.go:105] duration metric: took 3.638676ms to run NodePressure ...
	I0916 10:48:19.216804 1401996 start.go:241] waiting for startup goroutines ...
	I0916 10:48:19.216814 1401996 start.go:246] waiting for cluster config update ...
	I0916 10:48:19.216825 1401996 start.go:255] writing updated cluster config ...
	I0916 10:48:19.217169 1401996 ssh_runner.go:195] Run: rm -f paused
	I0916 10:48:19.225843 1401996 out.go:177] * Done! kubectl is now configured to use "functional-919910" cluster and "default" namespace by default
	E0916 10:48:19.228445 1401996 start.go:291] kubectl info: exec: fork/exec /usr/local/bin/kubectl: exec format error
	
	
	==> CRI-O <==
	Sep 16 10:48:00 functional-919910 crio[2450]: time="2024-09-16 10:48:00.965477243Z" level=info msg="Started container" PID=2806 containerID=8f5620673b4ff5c0c99db71dd02fc2ce9baec6c9b22460cbdf86d411abc6a715 description=kube-system/kube-controller-manager-functional-919910/kube-controller-manager id=cbbd8d70-0ef6-40ee-b91a-d54d8cb4b085 name=/runtime.v1.RuntimeService/StartContainer sandboxID=0ffab32638624e8f0235604afb94e9e67c3d4e06616208483a5debcc914e3cae
	Sep 16 10:48:00 functional-919910 crio[2450]: time="2024-09-16 10:48:00.967789570Z" level=info msg="Started container" PID=2790 containerID=84ca31fb2ed034d56721c7ab90b9c5e414e315335f55f7d30435fc91501dad28 description=kube-system/kube-apiserver-functional-919910/kube-apiserver id=9dee0322-7a01-42bd-a3f5-c48572b8a62c name=/runtime.v1.RuntimeService/StartContainer sandboxID=8fd62fbc34bf1ffb9092b83c48e89b00e7cdd219dbb5b91410c53ba0718a28f1
	Sep 16 10:48:00 functional-919910 crio[2450]: time="2024-09-16 10:48:00.967919766Z" level=info msg="Started container" PID=2872 containerID=e8aeda4b55bc63f93934a2cc0bed0950a05df3db193d9ed2e77a2dc96b78ec18 description=kube-system/kindnet-nb5xl/kindnet-cni id=30da5d1a-8366-4392-8ba6-af4109e8c65e name=/runtime.v1.RuntimeService/StartContainer sandboxID=306886331d6eea412e2593dd8cefd104ae0353cb2453c12f41db88e1881fec0f
	Sep 16 10:48:00 functional-919910 crio[2450]: time="2024-09-16 10:48:00.969673714Z" level=info msg="Started container" PID=2845 containerID=2089d6c47dd6764fb74a622eaf36e8dda3344083a925f73a4dfcf0ebb952dbf7 description=kube-system/coredns-7c65d6cfc9-qzn8c/coredns id=7f4d1327-f970-4e92-bbc9-1bce58c06e61 name=/runtime.v1.RuntimeService/StartContainer sandboxID=4bae1031966b207c601881c3be1d2b66aa5218cb02e6eb2af68deea5be18503b
	Sep 16 10:48:01 functional-919910 crio[2450]: time="2024-09-16 10:48:01.157628516Z" level=info msg="Created container 68f543d941434df90f12c922b0b45dcb557a7b8316bd36d083123f6f29e0f3d7: kube-system/kube-proxy-nvpzv/kube-proxy" id=134ba4ed-d336-400c-be83-73f40fc44f23 name=/runtime.v1.RuntimeService/CreateContainer
	Sep 16 10:48:01 functional-919910 crio[2450]: time="2024-09-16 10:48:01.158332925Z" level=info msg="Starting container: 68f543d941434df90f12c922b0b45dcb557a7b8316bd36d083123f6f29e0f3d7" id=b2a96f8b-787c-4a6e-abb3-407e88bf0a34 name=/runtime.v1.RuntimeService/StartContainer
	Sep 16 10:48:01 functional-919910 crio[2450]: time="2024-09-16 10:48:01.176145203Z" level=info msg="Created container 67f50b0e25dae16dbad275ffac3a734fe571c8f8cb91d485eaac44783eb641be: kube-system/storage-provisioner/storage-provisioner" id=a8e0d59f-1513-42fa-b3e0-e75ed88b690c name=/runtime.v1.RuntimeService/CreateContainer
	Sep 16 10:48:01 functional-919910 crio[2450]: time="2024-09-16 10:48:01.176991335Z" level=info msg="Starting container: 67f50b0e25dae16dbad275ffac3a734fe571c8f8cb91d485eaac44783eb641be" id=64aa8b98-953a-42de-a047-e95022a4fd7d name=/runtime.v1.RuntimeService/StartContainer
	Sep 16 10:48:01 functional-919910 crio[2450]: time="2024-09-16 10:48:01.192325137Z" level=info msg="Started container" PID=2885 containerID=67f50b0e25dae16dbad275ffac3a734fe571c8f8cb91d485eaac44783eb641be description=kube-system/storage-provisioner/storage-provisioner id=64aa8b98-953a-42de-a047-e95022a4fd7d name=/runtime.v1.RuntimeService/StartContainer sandboxID=e27809ba106031f0a2ea1939eccfaa14ca2ade78903409cc767b25e9de7c812a
	Sep 16 10:48:01 functional-919910 crio[2450]: time="2024-09-16 10:48:01.197212586Z" level=info msg="Started container" PID=2841 containerID=68f543d941434df90f12c922b0b45dcb557a7b8316bd36d083123f6f29e0f3d7 description=kube-system/kube-proxy-nvpzv/kube-proxy id=b2a96f8b-787c-4a6e-abb3-407e88bf0a34 name=/runtime.v1.RuntimeService/StartContainer sandboxID=46672cf6a1a3cfbb490f865d512383492c0c4c4061599f90461031829a93bd49
	Sep 16 10:48:11 functional-919910 crio[2450]: time="2024-09-16 10:48:11.487147142Z" level=info msg="CNI monitoring event \"/etc/cni/net.d/10-kindnet.conflist.temp\": CREATE"
	Sep 16 10:48:11 functional-919910 crio[2450]: time="2024-09-16 10:48:11.491166117Z" level=info msg="Found CNI network kindnet (type=ptp) at /etc/cni/net.d/10-kindnet.conflist"
	Sep 16 10:48:11 functional-919910 crio[2450]: time="2024-09-16 10:48:11.491204975Z" level=info msg="Updated default CNI network name to kindnet"
	Sep 16 10:48:11 functional-919910 crio[2450]: time="2024-09-16 10:48:11.491221582Z" level=info msg="CNI monitoring event \"/etc/cni/net.d/10-kindnet.conflist.temp\": WRITE"
	Sep 16 10:48:11 functional-919910 crio[2450]: time="2024-09-16 10:48:11.494833329Z" level=info msg="Found CNI network kindnet (type=ptp) at /etc/cni/net.d/10-kindnet.conflist"
	Sep 16 10:48:11 functional-919910 crio[2450]: time="2024-09-16 10:48:11.494874403Z" level=info msg="Updated default CNI network name to kindnet"
	Sep 16 10:48:11 functional-919910 crio[2450]: time="2024-09-16 10:48:11.494889976Z" level=info msg="CNI monitoring event \"/etc/cni/net.d/10-kindnet.conflist.temp\": WRITE"
	Sep 16 10:48:11 functional-919910 crio[2450]: time="2024-09-16 10:48:11.497990869Z" level=info msg="Found CNI network kindnet (type=ptp) at /etc/cni/net.d/10-kindnet.conflist"
	Sep 16 10:48:11 functional-919910 crio[2450]: time="2024-09-16 10:48:11.498028357Z" level=info msg="Updated default CNI network name to kindnet"
	Sep 16 10:48:11 functional-919910 crio[2450]: time="2024-09-16 10:48:11.498045604Z" level=info msg="CNI monitoring event \"/etc/cni/net.d/10-kindnet.conflist.temp\": RENAME"
	Sep 16 10:48:11 functional-919910 crio[2450]: time="2024-09-16 10:48:11.501096176Z" level=info msg="Found CNI network kindnet (type=ptp) at /etc/cni/net.d/10-kindnet.conflist"
	Sep 16 10:48:11 functional-919910 crio[2450]: time="2024-09-16 10:48:11.501132680Z" level=info msg="Updated default CNI network name to kindnet"
	Sep 16 10:48:11 functional-919910 crio[2450]: time="2024-09-16 10:48:11.501148638Z" level=info msg="CNI monitoring event \"/etc/cni/net.d/10-kindnet.conflist\": CREATE"
	Sep 16 10:48:11 functional-919910 crio[2450]: time="2024-09-16 10:48:11.504048544Z" level=info msg="Found CNI network kindnet (type=ptp) at /etc/cni/net.d/10-kindnet.conflist"
	Sep 16 10:48:11 functional-919910 crio[2450]: time="2024-09-16 10:48:11.504087410Z" level=info msg="Updated default CNI network name to kindnet"
	
	
	==> container status <==
	CONTAINER           IMAGE                                                              CREATED              STATE               NAME                      ATTEMPT             POD ID              POD
	67f50b0e25dae       ba04bb24b95753201135cbc420b233c1b0b9fa2e1fd21d28319c348c33fbcde6   22 seconds ago       Running             storage-provisioner       1                   e27809ba10603       storage-provisioner
	e8aeda4b55bc6       6a23fa8fd2b78ab58e42ba273808edc936a9c53d8ac4a919f6337be094843a51   22 seconds ago       Running             kindnet-cni               1                   306886331d6ee       kindnet-nb5xl
	68f543d941434       24a140c548c075e487e45d0ee73b1aa89f8bfb40c08a57e05975559728822b1d   22 seconds ago       Running             kube-proxy                1                   46672cf6a1a3c       kube-proxy-nvpzv
	2089d6c47dd67       2f6c962e7b8311337352d9fdea917da2184d9919f4da7695bc2a6517cf392fe4   22 seconds ago       Running             coredns                   1                   4bae1031966b2       coredns-7c65d6cfc9-qzn8c
	84ca31fb2ed03       d3f53a98c0a9d9163c4848bcf34b2d2f5e1e3691b79f3d1dd6d0206809e02853   22 seconds ago       Running             kube-apiserver            1                   8fd62fbc34bf1       kube-apiserver-functional-919910
	8f5620673b4ff       279f381cb37365bbbcd133c9531fba9c2beb0f38dbbe6ddfcd0b1b1643d3450e   22 seconds ago       Running             kube-controller-manager   1                   0ffab32638624       kube-controller-manager-functional-919910
	5bcfe047e4005       27e3830e1402783674d8b594038967deea9d51f0d91b34c93c8f39d2f68af7da   22 seconds ago       Running             etcd                      1                   46079181d2925       etcd-functional-919910
	9a35fb982442f       7f8aa378bb47dffcf430f3a601abe39137e88aee0238e23ed8530fdd18dab82d   22 seconds ago       Running             kube-scheduler            1                   00455a328acb5       kube-scheduler-functional-919910
	89084e33c979a       2f6c962e7b8311337352d9fdea917da2184d9919f4da7695bc2a6517cf392fe4   35 seconds ago       Exited              coredns                   0                   4bae1031966b2       coredns-7c65d6cfc9-qzn8c
	584cffa44f327       ba04bb24b95753201135cbc420b233c1b0b9fa2e1fd21d28319c348c33fbcde6   35 seconds ago       Exited              storage-provisioner       0                   e27809ba10603       storage-provisioner
	9fdab793eb970       24a140c548c075e487e45d0ee73b1aa89f8bfb40c08a57e05975559728822b1d   About a minute ago   Exited              kube-proxy                0                   46672cf6a1a3c       kube-proxy-nvpzv
	3e31d247381fd       6a23fa8fd2b78ab58e42ba273808edc936a9c53d8ac4a919f6337be094843a51   About a minute ago   Exited              kindnet-cni               0                   306886331d6ee       kindnet-nb5xl
	6d211253a1170       7f8aa378bb47dffcf430f3a601abe39137e88aee0238e23ed8530fdd18dab82d   About a minute ago   Exited              kube-scheduler            0                   00455a328acb5       kube-scheduler-functional-919910
	19cb8b26283b5       279f381cb37365bbbcd133c9531fba9c2beb0f38dbbe6ddfcd0b1b1643d3450e   About a minute ago   Exited              kube-controller-manager   0                   0ffab32638624       kube-controller-manager-functional-919910
	b88a79882d73e       27e3830e1402783674d8b594038967deea9d51f0d91b34c93c8f39d2f68af7da   About a minute ago   Exited              etcd                      0                   46079181d2925       etcd-functional-919910
	790d8c6b7f5cf       d3f53a98c0a9d9163c4848bcf34b2d2f5e1e3691b79f3d1dd6d0206809e02853   About a minute ago   Exited              kube-apiserver            0                   8fd62fbc34bf1       kube-apiserver-functional-919910
	
	
	==> coredns [2089d6c47dd6764fb74a622eaf36e8dda3344083a925f73a4dfcf0ebb952dbf7] <==
	[INFO] plugin/kubernetes: pkg/mod/k8s.io/client-go@v0.29.3/tools/cache/reflector.go:229: failed to list *v1.EndpointSlice: Get "https://10.96.0.1:443/apis/discovery.k8s.io/v1/endpointslices?limit=500&resourceVersion=0": dial tcp 10.96.0.1:443: connect: connection refused
	[ERROR] plugin/kubernetes: pkg/mod/k8s.io/client-go@v0.29.3/tools/cache/reflector.go:229: Failed to watch *v1.EndpointSlice: failed to list *v1.EndpointSlice: Get "https://10.96.0.1:443/apis/discovery.k8s.io/v1/endpointslices?limit=500&resourceVersion=0": dial tcp 10.96.0.1:443: connect: connection refused
	[INFO] plugin/kubernetes: pkg/mod/k8s.io/client-go@v0.29.3/tools/cache/reflector.go:229: failed to list *v1.Service: Get "https://10.96.0.1:443/api/v1/services?limit=500&resourceVersion=0": dial tcp 10.96.0.1:443: connect: connection refused
	[ERROR] plugin/kubernetes: pkg/mod/k8s.io/client-go@v0.29.3/tools/cache/reflector.go:229: Failed to watch *v1.Service: failed to list *v1.Service: Get "https://10.96.0.1:443/api/v1/services?limit=500&resourceVersion=0": dial tcp 10.96.0.1:443: connect: connection refused
	[INFO] plugin/kubernetes: pkg/mod/k8s.io/client-go@v0.29.3/tools/cache/reflector.go:229: failed to list *v1.Namespace: Get "https://10.96.0.1:443/api/v1/namespaces?limit=500&resourceVersion=0": dial tcp 10.96.0.1:443: connect: connection refused
	[ERROR] plugin/kubernetes: pkg/mod/k8s.io/client-go@v0.29.3/tools/cache/reflector.go:229: Failed to watch *v1.Namespace: failed to list *v1.Namespace: Get "https://10.96.0.1:443/api/v1/namespaces?limit=500&resourceVersion=0": dial tcp 10.96.0.1:443: connect: connection refused
	[INFO] plugin/ready: Still waiting on: "kubernetes"
	[INFO] plugin/kubernetes: waiting for Kubernetes API before starting server
	[INFO] plugin/kubernetes: waiting for Kubernetes API before starting server
	[INFO] plugin/kubernetes: waiting for Kubernetes API before starting server
	[INFO] plugin/ready: Still waiting on: "kubernetes"
	[INFO] plugin/kubernetes: waiting for Kubernetes API before starting server
	[INFO] plugin/kubernetes: waiting for Kubernetes API before starting server
	[INFO] plugin/kubernetes: waiting for Kubernetes API before starting server
	[INFO] plugin/kubernetes: waiting for Kubernetes API before starting server
	[INFO] plugin/kubernetes: waiting for Kubernetes API before starting server
	.:53
	[INFO] plugin/reload: Running configuration SHA512 = 05e3eaddc414b2d71a69b2e2bc6f2681fc1f4d04bcdd3acc1a41457bb7db518208b95ddfc4c9fffedc59c25a8faf458be1af4915a4a3c0d6777cb7a346bc5d86
	CoreDNS-1.11.3
	linux/arm64, go1.21.11, a6338e9
	[INFO] 127.0.0.1:39206 - 14119 "HINFO IN 5939583222120401635.3946217130147098167. udp 57 false 512" NXDOMAIN qr,rd,ra 57 0.038029402s
	
	
	==> coredns [89084e33c979a76a3a4bbd24eab8c848deb25d8bd474bad381f47a24e0373c2e] <==
	.:53
	[INFO] plugin/reload: Running configuration SHA512 = 05e3eaddc414b2d71a69b2e2bc6f2681fc1f4d04bcdd3acc1a41457bb7db518208b95ddfc4c9fffedc59c25a8faf458be1af4915a4a3c0d6777cb7a346bc5d86
	CoreDNS-1.11.3
	linux/arm64, go1.21.11, a6338e9
	[INFO] 127.0.0.1:48552 - 42859 "HINFO IN 442387380457581256.8648752210731241523. udp 56 false 512" NXDOMAIN qr,rd,ra 56 0.057702013s
	[INFO] SIGTERM: Shutting down servers then terminating
	[INFO] plugin/health: Going into lameduck mode for 5s
	
	
	==> describe nodes <==
	Name:               functional-919910
	Roles:              control-plane
	Labels:             beta.kubernetes.io/arch=arm64
	                    beta.kubernetes.io/os=linux
	                    kubernetes.io/arch=arm64
	                    kubernetes.io/hostname=functional-919910
	                    kubernetes.io/os=linux
	                    minikube.k8s.io/commit=90d544f06ea0f69499271b003be64a9a224d57ed
	                    minikube.k8s.io/name=functional-919910
	                    minikube.k8s.io/primary=true
	                    minikube.k8s.io/updated_at=2024_09_16T10_47_02_0700
	                    minikube.k8s.io/version=v1.34.0
	                    node-role.kubernetes.io/control-plane=
	                    node.kubernetes.io/exclude-from-external-load-balancers=
	Annotations:        kubeadm.alpha.kubernetes.io/cri-socket: unix:///var/run/crio/crio.sock
	                    node.alpha.kubernetes.io/ttl: 0
	                    volumes.kubernetes.io/controller-managed-attach-detach: true
	CreationTimestamp:  Mon, 16 Sep 2024 10:46:58 +0000
	Taints:             <none>
	Unschedulable:      false
	Lease:
	  HolderIdentity:  functional-919910
	  AcquireTime:     <unset>
	  RenewTime:       Mon, 16 Sep 2024 10:48:15 +0000
	Conditions:
	  Type             Status  LastHeartbeatTime                 LastTransitionTime                Reason                       Message
	  ----             ------  -----------------                 ------------------                ------                       -------
	  MemoryPressure   False   Mon, 16 Sep 2024 10:47:47 +0000   Mon, 16 Sep 2024 10:46:55 +0000   KubeletHasSufficientMemory   kubelet has sufficient memory available
	  DiskPressure     False   Mon, 16 Sep 2024 10:47:47 +0000   Mon, 16 Sep 2024 10:46:55 +0000   KubeletHasNoDiskPressure     kubelet has no disk pressure
	  PIDPressure      False   Mon, 16 Sep 2024 10:47:47 +0000   Mon, 16 Sep 2024 10:46:55 +0000   KubeletHasSufficientPID      kubelet has sufficient PID available
	  Ready            True    Mon, 16 Sep 2024 10:47:47 +0000   Mon, 16 Sep 2024 10:47:47 +0000   KubeletReady                 kubelet is posting ready status
	Addresses:
	  InternalIP:  192.168.49.2
	  Hostname:    functional-919910
	Capacity:
	  cpu:                2
	  ephemeral-storage:  203034800Ki
	  hugepages-1Gi:      0
	  hugepages-2Mi:      0
	  hugepages-32Mi:     0
	  hugepages-64Ki:     0
	  memory:             8022304Ki
	  pods:               110
	Allocatable:
	  cpu:                2
	  ephemeral-storage:  203034800Ki
	  hugepages-1Gi:      0
	  hugepages-2Mi:      0
	  hugepages-32Mi:     0
	  hugepages-64Ki:     0
	  memory:             8022304Ki
	  pods:               110
	System Info:
	  Machine ID:                 f14572b8323a44cca0faa88c76f2d4a6
	  System UUID:                d25b0873-ca83-44d4-9ed0-22dc44c6a8ae
	  Boot ID:                    34b2555f-ef29-4c31-9b47-b3b930bd3b4b
	  Kernel Version:             5.15.0-1069-aws
	  OS Image:                   Ubuntu 22.04.4 LTS
	  Operating System:           linux
	  Architecture:               arm64
	  Container Runtime Version:  cri-o://1.24.6
	  Kubelet Version:            v1.31.1
	  Kube-Proxy Version:         v1.31.1
	PodCIDR:                      10.244.0.0/24
	PodCIDRs:                     10.244.0.0/24
	Non-terminated Pods:          (8 in total)
	  Namespace                   Name                                         CPU Requests  CPU Limits  Memory Requests  Memory Limits  Age
	  ---------                   ----                                         ------------  ----------  ---------------  -------------  ---
	  kube-system                 coredns-7c65d6cfc9-qzn8c                     100m (5%)     0 (0%)      70Mi (0%)        170Mi (2%)     77s
	  kube-system                 etcd-functional-919910                       100m (5%)     0 (0%)      100Mi (1%)       0 (0%)         83s
	  kube-system                 kindnet-nb5xl                                100m (5%)     100m (5%)   50Mi (0%)        50Mi (0%)      77s
	  kube-system                 kube-apiserver-functional-919910             250m (12%)    0 (0%)      0 (0%)           0 (0%)         82s
	  kube-system                 kube-controller-manager-functional-919910    200m (10%)    0 (0%)      0 (0%)           0 (0%)         82s
	  kube-system                 kube-proxy-nvpzv                             0 (0%)        0 (0%)      0 (0%)           0 (0%)         77s
	  kube-system                 kube-scheduler-functional-919910             100m (5%)     0 (0%)      0 (0%)           0 (0%)         82s
	  kube-system                 storage-provisioner                          0 (0%)        0 (0%)      0 (0%)           0 (0%)         76s
	Allocated resources:
	  (Total limits may be over 100 percent, i.e., overcommitted.)
	  Resource           Requests    Limits
	  --------           --------    ------
	  cpu                850m (42%)  100m (5%)
	  memory             220Mi (2%)  220Mi (2%)
	  ephemeral-storage  0 (0%)      0 (0%)
	  hugepages-1Gi      0 (0%)      0 (0%)
	  hugepages-2Mi      0 (0%)      0 (0%)
	  hugepages-32Mi     0 (0%)      0 (0%)
	  hugepages-64Ki     0 (0%)      0 (0%)
	Events:
	  Type     Reason                   Age   From             Message
	  ----     ------                   ----  ----             -------
	  Normal   Starting                 75s   kube-proxy       
	  Normal   Starting                 17s   kube-proxy       
	  Normal   Starting                 82s   kubelet          Starting kubelet.
	  Warning  CgroupV1                 82s   kubelet          Cgroup v1 support is in maintenance mode, please migrate to Cgroup v2.
	  Normal   NodeHasSufficientMemory  82s   kubelet          Node functional-919910 status is now: NodeHasSufficientMemory
	  Normal   NodeHasNoDiskPressure    82s   kubelet          Node functional-919910 status is now: NodeHasNoDiskPressure
	  Normal   NodeHasSufficientPID     82s   kubelet          Node functional-919910 status is now: NodeHasSufficientPID
	  Normal   RegisteredNode           78s   node-controller  Node functional-919910 event: Registered Node functional-919910 in Controller
	  Normal   NodeReady                36s   kubelet          Node functional-919910 status is now: NodeReady
	  Normal   RegisteredNode           15s   node-controller  Node functional-919910 event: Registered Node functional-919910 in Controller
	
	
	==> dmesg <==
	[Sep16 10:07] systemd-journald[226]: Failed to send stream file descriptor to service manager: Connection refused
	
	
	==> etcd [5bcfe047e4005e24d6719487f45bde2380924679e0f77e81ce9e05992af73afb] <==
	{"level":"info","ts":"2024-09-16T10:48:01.207309Z","caller":"membership/cluster.go:421","msg":"added member","cluster-id":"fa54960ea34d58be","local-member-id":"aec36adc501070cc","added-peer-id":"aec36adc501070cc","added-peer-peer-urls":["https://192.168.49.2:2380"]}
	{"level":"info","ts":"2024-09-16T10:48:01.207390Z","caller":"membership/cluster.go:584","msg":"set initial cluster version","cluster-id":"fa54960ea34d58be","local-member-id":"aec36adc501070cc","cluster-version":"3.5"}
	{"level":"info","ts":"2024-09-16T10:48:01.207421Z","caller":"api/capability.go:75","msg":"enabled capabilities for version","cluster-version":"3.5"}
	{"level":"info","ts":"2024-09-16T10:48:01.214478Z","caller":"v3rpc/health.go:61","msg":"grpc service status changed","service":"","status":"SERVING"}
	{"level":"info","ts":"2024-09-16T10:48:01.216143Z","caller":"embed/etcd.go:728","msg":"starting with client TLS","tls-info":"cert = /var/lib/minikube/certs/etcd/server.crt, key = /var/lib/minikube/certs/etcd/server.key, client-cert=, client-key=, trusted-ca = /var/lib/minikube/certs/etcd/ca.crt, client-cert-auth = true, crl-file = ","cipher-suites":[]}
	{"level":"info","ts":"2024-09-16T10:48:01.229380Z","caller":"embed/etcd.go:599","msg":"serving peer traffic","address":"192.168.49.2:2380"}
	{"level":"info","ts":"2024-09-16T10:48:01.229419Z","caller":"embed/etcd.go:571","msg":"cmux::serve","address":"192.168.49.2:2380"}
	{"level":"info","ts":"2024-09-16T10:48:01.229922Z","caller":"embed/etcd.go:870","msg":"serving metrics","address":"http://127.0.0.1:2381"}
	{"level":"info","ts":"2024-09-16T10:48:01.229979Z","caller":"embed/etcd.go:279","msg":"now serving peer/client/metrics","local-member-id":"aec36adc501070cc","initial-advertise-peer-urls":["https://192.168.49.2:2380"],"listen-peer-urls":["https://192.168.49.2:2380"],"advertise-client-urls":["https://192.168.49.2:2379"],"listen-client-urls":["https://127.0.0.1:2379","https://192.168.49.2:2379"],"listen-metrics-urls":["http://127.0.0.1:2381"]}
	{"level":"info","ts":"2024-09-16T10:48:02.360729Z","logger":"raft","caller":"etcdserver/zap_raft.go:77","msg":"aec36adc501070cc is starting a new election at term 2"}
	{"level":"info","ts":"2024-09-16T10:48:02.360883Z","logger":"raft","caller":"etcdserver/zap_raft.go:77","msg":"aec36adc501070cc became pre-candidate at term 2"}
	{"level":"info","ts":"2024-09-16T10:48:02.360934Z","logger":"raft","caller":"etcdserver/zap_raft.go:77","msg":"aec36adc501070cc received MsgPreVoteResp from aec36adc501070cc at term 2"}
	{"level":"info","ts":"2024-09-16T10:48:02.360973Z","logger":"raft","caller":"etcdserver/zap_raft.go:77","msg":"aec36adc501070cc became candidate at term 3"}
	{"level":"info","ts":"2024-09-16T10:48:02.361006Z","logger":"raft","caller":"etcdserver/zap_raft.go:77","msg":"aec36adc501070cc received MsgVoteResp from aec36adc501070cc at term 3"}
	{"level":"info","ts":"2024-09-16T10:48:02.361064Z","logger":"raft","caller":"etcdserver/zap_raft.go:77","msg":"aec36adc501070cc became leader at term 3"}
	{"level":"info","ts":"2024-09-16T10:48:02.361099Z","logger":"raft","caller":"etcdserver/zap_raft.go:77","msg":"raft.node: aec36adc501070cc elected leader aec36adc501070cc at term 3"}
	{"level":"info","ts":"2024-09-16T10:48:02.364920Z","caller":"etcdserver/server.go:2118","msg":"published local member to cluster through raft","local-member-id":"aec36adc501070cc","local-member-attributes":"{Name:functional-919910 ClientURLs:[https://192.168.49.2:2379]}","request-path":"/0/members/aec36adc501070cc/attributes","cluster-id":"fa54960ea34d58be","publish-timeout":"7s"}
	{"level":"info","ts":"2024-09-16T10:48:02.365163Z","caller":"embed/serve.go:103","msg":"ready to serve client requests"}
	{"level":"info","ts":"2024-09-16T10:48:02.365549Z","caller":"embed/serve.go:103","msg":"ready to serve client requests"}
	{"level":"info","ts":"2024-09-16T10:48:02.366285Z","caller":"v3rpc/health.go:61","msg":"grpc service status changed","service":"","status":"SERVING"}
	{"level":"info","ts":"2024-09-16T10:48:02.367468Z","caller":"embed/serve.go:250","msg":"serving client traffic securely","traffic":"grpc+http","address":"192.168.49.2:2379"}
	{"level":"info","ts":"2024-09-16T10:48:02.367535Z","caller":"etcdmain/main.go:44","msg":"notifying init daemon"}
	{"level":"info","ts":"2024-09-16T10:48:02.367668Z","caller":"etcdmain/main.go:50","msg":"successfully notified init daemon"}
	{"level":"info","ts":"2024-09-16T10:48:02.369323Z","caller":"v3rpc/health.go:61","msg":"grpc service status changed","service":"","status":"SERVING"}
	{"level":"info","ts":"2024-09-16T10:48:02.370172Z","caller":"embed/serve.go:250","msg":"serving client traffic securely","traffic":"grpc+http","address":"127.0.0.1:2379"}
	
	
	==> etcd [b88a79882d73e8e5ca5f134464b8f60ebbeb4a0aa75d6f83d1ec9e3d9f6bd093] <==
	{"level":"info","ts":"2024-09-16T10:46:55.514339Z","logger":"raft","caller":"etcdserver/zap_raft.go:77","msg":"aec36adc501070cc became leader at term 2"}
	{"level":"info","ts":"2024-09-16T10:46:55.514365Z","logger":"raft","caller":"etcdserver/zap_raft.go:77","msg":"raft.node: aec36adc501070cc elected leader aec36adc501070cc at term 2"}
	{"level":"info","ts":"2024-09-16T10:46:55.520762Z","caller":"etcdserver/server.go:2629","msg":"setting up initial cluster version using v2 API","cluster-version":"3.5"}
	{"level":"info","ts":"2024-09-16T10:46:55.523524Z","caller":"etcdserver/server.go:2118","msg":"published local member to cluster through raft","local-member-id":"aec36adc501070cc","local-member-attributes":"{Name:functional-919910 ClientURLs:[https://192.168.49.2:2379]}","request-path":"/0/members/aec36adc501070cc/attributes","cluster-id":"fa54960ea34d58be","publish-timeout":"7s"}
	{"level":"info","ts":"2024-09-16T10:46:55.528689Z","caller":"embed/serve.go:103","msg":"ready to serve client requests"}
	{"level":"info","ts":"2024-09-16T10:46:55.529056Z","caller":"embed/serve.go:103","msg":"ready to serve client requests"}
	{"level":"info","ts":"2024-09-16T10:46:55.529228Z","caller":"etcdmain/main.go:44","msg":"notifying init daemon"}
	{"level":"info","ts":"2024-09-16T10:46:55.529273Z","caller":"etcdmain/main.go:50","msg":"successfully notified init daemon"}
	{"level":"info","ts":"2024-09-16T10:46:55.529930Z","caller":"v3rpc/health.go:61","msg":"grpc service status changed","service":"","status":"SERVING"}
	{"level":"info","ts":"2024-09-16T10:46:55.530896Z","caller":"embed/serve.go:250","msg":"serving client traffic securely","traffic":"grpc+http","address":"192.168.49.2:2379"}
	{"level":"info","ts":"2024-09-16T10:46:55.531588Z","caller":"v3rpc/health.go:61","msg":"grpc service status changed","service":"","status":"SERVING"}
	{"level":"info","ts":"2024-09-16T10:46:55.538109Z","caller":"embed/serve.go:250","msg":"serving client traffic securely","traffic":"grpc+http","address":"127.0.0.1:2379"}
	{"level":"info","ts":"2024-09-16T10:46:55.537145Z","caller":"membership/cluster.go:584","msg":"set initial cluster version","cluster-id":"fa54960ea34d58be","local-member-id":"aec36adc501070cc","cluster-version":"3.5"}
	{"level":"info","ts":"2024-09-16T10:46:55.560810Z","caller":"api/capability.go:75","msg":"enabled capabilities for version","cluster-version":"3.5"}
	{"level":"info","ts":"2024-09-16T10:46:55.561697Z","caller":"etcdserver/server.go:2653","msg":"cluster version is updated","cluster-version":"3.5"}
	{"level":"info","ts":"2024-09-16T10:47:51.971408Z","caller":"osutil/interrupt_unix.go:64","msg":"received signal; shutting down","signal":"terminated"}
	{"level":"info","ts":"2024-09-16T10:47:51.971462Z","caller":"embed/etcd.go:377","msg":"closing etcd server","name":"functional-919910","data-dir":"/var/lib/minikube/etcd","advertise-peer-urls":["https://192.168.49.2:2380"],"advertise-client-urls":["https://192.168.49.2:2379"]}
	{"level":"warn","ts":"2024-09-16T10:47:51.971540Z","caller":"embed/serve.go:212","msg":"stopping secure grpc server due to error","error":"accept tcp 127.0.0.1:2379: use of closed network connection"}
	{"level":"warn","ts":"2024-09-16T10:47:51.971636Z","caller":"embed/serve.go:214","msg":"stopped secure grpc server due to error","error":"accept tcp 127.0.0.1:2379: use of closed network connection"}
	{"level":"warn","ts":"2024-09-16T10:47:52.033719Z","caller":"embed/serve.go:212","msg":"stopping secure grpc server due to error","error":"accept tcp 192.168.49.2:2379: use of closed network connection"}
	{"level":"warn","ts":"2024-09-16T10:47:52.033852Z","caller":"embed/serve.go:214","msg":"stopped secure grpc server due to error","error":"accept tcp 192.168.49.2:2379: use of closed network connection"}
	{"level":"info","ts":"2024-09-16T10:47:52.033933Z","caller":"etcdserver/server.go:1521","msg":"skipped leadership transfer for single voting member cluster","local-member-id":"aec36adc501070cc","current-leader-member-id":"aec36adc501070cc"}
	{"level":"info","ts":"2024-09-16T10:47:52.036245Z","caller":"embed/etcd.go:581","msg":"stopping serving peer traffic","address":"192.168.49.2:2380"}
	{"level":"info","ts":"2024-09-16T10:47:52.036424Z","caller":"embed/etcd.go:586","msg":"stopped serving peer traffic","address":"192.168.49.2:2380"}
	{"level":"info","ts":"2024-09-16T10:47:52.036463Z","caller":"embed/etcd.go:379","msg":"closed etcd server","name":"functional-919910","data-dir":"/var/lib/minikube/etcd","advertise-peer-urls":["https://192.168.49.2:2380"],"advertise-client-urls":["https://192.168.49.2:2379"]}
	
	
	==> kernel <==
	 10:48:23 up 10:30,  0 users,  load average: 1.83, 1.33, 1.67
	Linux functional-919910 5.15.0-1069-aws #75~20.04.1-Ubuntu SMP Mon Aug 19 16:22:47 UTC 2024 aarch64 aarch64 aarch64 GNU/Linux
	PRETTY_NAME="Ubuntu 22.04.4 LTS"
	
	
	==> kindnet [3e31d247381fd150f97fed045c0d264e01a0046902133f839fc323ed9d5fa7b9] <==
	W0916 10:47:37.742874       1 reflector.go:547] pkg/mod/k8s.io/client-go@v0.30.3/tools/cache/reflector.go:232: failed to list *v1.Pod: Get "https://10.96.0.1:443/api/v1/pods?limit=500&resourceVersion=0": dial tcp 10.96.0.1:443: i/o timeout
	W0916 10:47:37.743031       1 reflector.go:547] pkg/mod/k8s.io/client-go@v0.30.3/tools/cache/reflector.go:232: failed to list *v1.Namespace: Get "https://10.96.0.1:443/api/v1/namespaces?limit=500&resourceVersion=0": dial tcp 10.96.0.1:443: i/o timeout
	I0916 10:47:37.743158       1 trace.go:236] Trace[521782442]: "Reflector ListAndWatch" name:pkg/mod/k8s.io/client-go@v0.30.3/tools/cache/reflector.go:232 (16-Sep-2024 10:47:07.742) (total time: 30000ms):
	Trace[521782442]: ---"Objects listed" error:Get "https://10.96.0.1:443/api/v1/namespaces?limit=500&resourceVersion=0": dial tcp 10.96.0.1:443: i/o timeout 30000ms (10:47:37.743)
	Trace[521782442]: [30.0005021s] [30.0005021s] END
	E0916 10:47:37.743220       1 reflector.go:150] pkg/mod/k8s.io/client-go@v0.30.3/tools/cache/reflector.go:232: Failed to watch *v1.Namespace: failed to list *v1.Namespace: Get "https://10.96.0.1:443/api/v1/namespaces?limit=500&resourceVersion=0": dial tcp 10.96.0.1:443: i/o timeout
	I0916 10:47:37.743054       1 trace.go:236] Trace[974010787]: "Reflector ListAndWatch" name:pkg/mod/k8s.io/client-go@v0.30.3/tools/cache/reflector.go:232 (16-Sep-2024 10:47:07.741) (total time: 30001ms):
	Trace[974010787]: ---"Objects listed" error:Get "https://10.96.0.1:443/api/v1/pods?limit=500&resourceVersion=0": dial tcp 10.96.0.1:443: i/o timeout 30001ms (10:47:37.742)
	Trace[974010787]: [30.001379879s] [30.001379879s] END
	E0916 10:47:37.743260       1 reflector.go:150] pkg/mod/k8s.io/client-go@v0.30.3/tools/cache/reflector.go:232: Failed to watch *v1.Pod: failed to list *v1.Pod: Get "https://10.96.0.1:443/api/v1/pods?limit=500&resourceVersion=0": dial tcp 10.96.0.1:443: i/o timeout
	W0916 10:47:37.742973       1 reflector.go:547] pkg/mod/k8s.io/client-go@v0.30.3/tools/cache/reflector.go:232: failed to list *v1.NetworkPolicy: Get "https://10.96.0.1:443/apis/networking.k8s.io/v1/networkpolicies?limit=500&resourceVersion=0": dial tcp 10.96.0.1:443: i/o timeout
	I0916 10:47:37.743314       1 trace.go:236] Trace[1827800835]: "Reflector ListAndWatch" name:pkg/mod/k8s.io/client-go@v0.30.3/tools/cache/reflector.go:232 (16-Sep-2024 10:47:07.742) (total time: 30000ms):
	Trace[1827800835]: ---"Objects listed" error:Get "https://10.96.0.1:443/apis/networking.k8s.io/v1/networkpolicies?limit=500&resourceVersion=0": dial tcp 10.96.0.1:443: i/o timeout 30000ms (10:47:37.742)
	Trace[1827800835]: [30.000860294s] [30.000860294s] END
	E0916 10:47:37.743332       1 reflector.go:150] pkg/mod/k8s.io/client-go@v0.30.3/tools/cache/reflector.go:232: Failed to watch *v1.NetworkPolicy: failed to list *v1.NetworkPolicy: Get "https://10.96.0.1:443/apis/networking.k8s.io/v1/networkpolicies?limit=500&resourceVersion=0": dial tcp 10.96.0.1:443: i/o timeout
	W0916 10:47:37.742868       1 reflector.go:547] pkg/mod/k8s.io/client-go@v0.30.3/tools/cache/reflector.go:232: failed to list *v1.Node: Get "https://10.96.0.1:443/api/v1/nodes?limit=500&resourceVersion=0": dial tcp 10.96.0.1:443: i/o timeout
	I0916 10:47:37.743361       1 trace.go:236] Trace[215844066]: "Reflector ListAndWatch" name:pkg/mod/k8s.io/client-go@v0.30.3/tools/cache/reflector.go:232 (16-Sep-2024 10:47:07.742) (total time: 30001ms):
	Trace[215844066]: ---"Objects listed" error:Get "https://10.96.0.1:443/api/v1/nodes?limit=500&resourceVersion=0": dial tcp 10.96.0.1:443: i/o timeout 30000ms (10:47:37.742)
	Trace[215844066]: [30.001155081s] [30.001155081s] END
	E0916 10:47:37.743369       1 reflector.go:150] pkg/mod/k8s.io/client-go@v0.30.3/tools/cache/reflector.go:232: Failed to watch *v1.Node: failed to list *v1.Node: Get "https://10.96.0.1:443/api/v1/nodes?limit=500&resourceVersion=0": dial tcp 10.96.0.1:443: i/o timeout
	I0916 10:47:39.343940       1 shared_informer.go:320] Caches are synced for kube-network-policies
	I0916 10:47:39.343985       1 metrics.go:61] Registering metrics
	I0916 10:47:39.344036       1 controller.go:374] Syncing nftables rules
	I0916 10:47:47.742951       1 main.go:295] Handling node with IPs: map[192.168.49.2:{}]
	I0916 10:47:47.743092       1 main.go:299] handling current node
	
	
	==> kindnet [e8aeda4b55bc63f93934a2cc0bed0950a05df3db193d9ed2e77a2dc96b78ec18] <==
	I0916 10:48:01.143502       1 main.go:109] connected to apiserver: https://10.96.0.1:443
	I0916 10:48:01.143730       1 main.go:139] hostIP = 192.168.49.2
	podIP = 192.168.49.2
	I0916 10:48:01.143864       1 main.go:148] setting mtu 1500 for CNI 
	I0916 10:48:01.143886       1 main.go:178] kindnetd IP family: "ipv4"
	I0916 10:48:01.143900       1 main.go:182] noMask IPv4 subnets: [10.244.0.0/16]
	I0916 10:48:01.489821       1 controller.go:334] Starting controller kube-network-policies
	I0916 10:48:01.489995       1 controller.go:338] Waiting for informer caches to sync
	I0916 10:48:01.490034       1 shared_informer.go:313] Waiting for caches to sync for kube-network-policies
	I0916 10:48:05.492976       1 shared_informer.go:320] Caches are synced for kube-network-policies
	I0916 10:48:05.493097       1 metrics.go:61] Registering metrics
	I0916 10:48:05.493204       1 controller.go:374] Syncing nftables rules
	I0916 10:48:11.486739       1 main.go:295] Handling node with IPs: map[192.168.49.2:{}]
	I0916 10:48:11.486849       1 main.go:299] handling current node
	I0916 10:48:21.485705       1 main.go:295] Handling node with IPs: map[192.168.49.2:{}]
	I0916 10:48:21.485797       1 main.go:299] handling current node
	
	
	==> kube-apiserver [790d8c6b7f5cff6aa8da32ec82eeab04f109110f2b3a39803bda7a570da2cf75] <==
	W0916 10:47:52.019895       1 logging.go:55] [core] [Channel #94 SubChannel #95]grpc: addrConn.createTransport failed to connect to {Addr: "127.0.0.1:2379", ServerName: "127.0.0.1:2379", }. Err: connection error: desc = "transport: Error while dialing: dial tcp 127.0.0.1:2379: connect: connection refused"
	W0916 10:47:52.019932       1 logging.go:55] [core] [Channel #25 SubChannel #26]grpc: addrConn.createTransport failed to connect to {Addr: "127.0.0.1:2379", ServerName: "127.0.0.1:2379", }. Err: connection error: desc = "transport: Error while dialing: dial tcp 127.0.0.1:2379: connect: connection refused"
	W0916 10:47:52.019999       1 logging.go:55] [core] [Channel #142 SubChannel #143]grpc: addrConn.createTransport failed to connect to {Addr: "127.0.0.1:2379", ServerName: "127.0.0.1:2379", }. Err: connection error: desc = "transport: Error while dialing: dial tcp 127.0.0.1:2379: connect: connection refused"
	W0916 10:47:52.020055       1 logging.go:55] [core] [Channel #184 SubChannel #185]grpc: addrConn.createTransport failed to connect to {Addr: "127.0.0.1:2379", ServerName: "127.0.0.1:2379", }. Err: connection error: desc = "transport: Error while dialing: dial tcp 127.0.0.1:2379: connect: connection refused"
	W0916 10:47:52.020090       1 logging.go:55] [core] [Channel #163 SubChannel #164]grpc: addrConn.createTransport failed to connect to {Addr: "127.0.0.1:2379", ServerName: "127.0.0.1:2379", }. Err: connection error: desc = "transport: Error while dialing: dial tcp 127.0.0.1:2379: connect: connection refused"
	W0916 10:47:52.020151       1 logging.go:55] [core] [Channel #178 SubChannel #179]grpc: addrConn.createTransport failed to connect to {Addr: "127.0.0.1:2379", ServerName: "127.0.0.1:2379", }. Err: connection error: desc = "transport: Error while dialing: dial tcp 127.0.0.1:2379: connect: connection refused"
	W0916 10:47:52.020210       1 logging.go:55] [core] [Channel #40 SubChannel #41]grpc: addrConn.createTransport failed to connect to {Addr: "127.0.0.1:2379", ServerName: "127.0.0.1:2379", }. Err: connection error: desc = "transport: Error while dialing: dial tcp 127.0.0.1:2379: connect: connection refused"
	W0916 10:47:52.020272       1 logging.go:55] [core] [Channel #88 SubChannel #89]grpc: addrConn.createTransport failed to connect to {Addr: "127.0.0.1:2379", ServerName: "127.0.0.1:2379", }. Err: connection error: desc = "transport: Error while dialing: dial tcp 127.0.0.1:2379: connect: connection refused"
	W0916 10:47:52.020333       1 logging.go:55] [core] [Channel #100 SubChannel #101]grpc: addrConn.createTransport failed to connect to {Addr: "127.0.0.1:2379", ServerName: "127.0.0.1:2379", }. Err: connection error: desc = "transport: Error while dialing: dial tcp 127.0.0.1:2379: connect: connection refused"
	W0916 10:47:52.020367       1 logging.go:55] [core] [Channel #130 SubChannel #131]grpc: addrConn.createTransport failed to connect to {Addr: "127.0.0.1:2379", ServerName: "127.0.0.1:2379", }. Err: connection error: desc = "transport: Error while dialing: dial tcp 127.0.0.1:2379: connect: connection refused"
	W0916 10:47:52.020210       1 logging.go:55] [core] [Channel #52 SubChannel #53]grpc: addrConn.createTransport failed to connect to {Addr: "127.0.0.1:2379", ServerName: "127.0.0.1:2379", }. Err: connection error: desc = "transport: Error while dialing: dial tcp 127.0.0.1:2379: connect: connection refused"
	W0916 10:47:52.020275       1 logging.go:55] [core] [Channel #139 SubChannel #140]grpc: addrConn.createTransport failed to connect to {Addr: "127.0.0.1:2379", ServerName: "127.0.0.1:2379", }. Err: connection error: desc = "transport: Error while dialing: dial tcp 127.0.0.1:2379: connect: connection refused"
	W0916 10:47:52.020486       1 logging.go:55] [core] [Channel #181 SubChannel #182]grpc: addrConn.createTransport failed to connect to {Addr: "127.0.0.1:2379", ServerName: "127.0.0.1:2379", }. Err: connection error: desc = "transport: Error while dialing: dial tcp 127.0.0.1:2379: connect: connection refused"
	W0916 10:47:52.020542       1 logging.go:55] [core] [Channel #64 SubChannel #65]grpc: addrConn.createTransport failed to connect to {Addr: "127.0.0.1:2379", ServerName: "127.0.0.1:2379", }. Err: connection error: desc = "transport: Error while dialing: dial tcp 127.0.0.1:2379: connect: connection refused"
	W0916 10:47:52.020596       1 logging.go:55] [core] [Channel #91 SubChannel #92]grpc: addrConn.createTransport failed to connect to {Addr: "127.0.0.1:2379", ServerName: "127.0.0.1:2379", }. Err: connection error: desc = "transport: Error while dialing: dial tcp 127.0.0.1:2379: connect: connection refused"
	W0916 10:47:52.020631       1 logging.go:55] [core] [Channel #5 SubChannel #6]grpc: addrConn.createTransport failed to connect to {Addr: "127.0.0.1:2379", ServerName: "127.0.0.1:2379", }. Err: connection error: desc = "transport: Error while dialing: dial tcp 127.0.0.1:2379: connect: connection refused"
	W0916 10:47:52.020658       1 logging.go:55] [core] [Channel #46 SubChannel #47]grpc: addrConn.createTransport failed to connect to {Addr: "127.0.0.1:2379", ServerName: "127.0.0.1:2379", }. Err: connection error: desc = "transport: Error while dialing: dial tcp 127.0.0.1:2379: connect: connection refused"
	W0916 10:47:52.020892       1 logging.go:55] [core] [Channel #169 SubChannel #170]grpc: addrConn.createTransport failed to connect to {Addr: "127.0.0.1:2379", ServerName: "127.0.0.1:2379", }. Err: connection error: desc = "transport: Error while dialing: dial tcp 127.0.0.1:2379: connect: connection refused"
	W0916 10:47:52.020542       1 logging.go:55] [core] [Channel #115 SubChannel #116]grpc: addrConn.createTransport failed to connect to {Addr: "127.0.0.1:2379", ServerName: "127.0.0.1:2379", }. Err: connection error: desc = "transport: Error while dialing: dial tcp 127.0.0.1:2379: connect: connection refused"
	W0916 10:47:52.020971       1 logging.go:55] [core] [Channel #112 SubChannel #113]grpc: addrConn.createTransport failed to connect to {Addr: "127.0.0.1:2379", ServerName: "127.0.0.1:2379", }. Err: connection error: desc = "transport: Error while dialing: dial tcp 127.0.0.1:2379: connect: connection refused"
	W0916 10:47:52.021029       1 logging.go:55] [core] [Channel #106 SubChannel #107]grpc: addrConn.createTransport failed to connect to {Addr: "127.0.0.1:2379", ServerName: "127.0.0.1:2379", }. Err: connection error: desc = "transport: Error while dialing: dial tcp 127.0.0.1:2379: connect: connection refused"
	W0916 10:47:52.021078       1 logging.go:55] [core] [Channel #118 SubChannel #119]grpc: addrConn.createTransport failed to connect to {Addr: "127.0.0.1:2379", ServerName: "127.0.0.1:2379", }. Err: connection error: desc = "transport: Error while dialing: dial tcp 127.0.0.1:2379: connect: connection refused"
	W0916 10:47:52.021259       1 logging.go:55] [core] [Channel #49 SubChannel #50]grpc: addrConn.createTransport failed to connect to {Addr: "127.0.0.1:2379", ServerName: "127.0.0.1:2379", }. Err: connection error: desc = "transport: Error while dialing: dial tcp 127.0.0.1:2379: connect: connection refused"
	W0916 10:47:52.021317       1 logging.go:55] [core] [Channel #37 SubChannel #38]grpc: addrConn.createTransport failed to connect to {Addr: "127.0.0.1:2379", ServerName: "127.0.0.1:2379", }. Err: connection error: desc = "transport: Error while dialing: dial tcp 127.0.0.1:2379: connect: connection refused"
	W0916 10:47:52.020062       1 logging.go:55] [core] [Channel #121 SubChannel #122]grpc: addrConn.createTransport failed to connect to {Addr: "127.0.0.1:2379", ServerName: "127.0.0.1:2379", }. Err: connection error: desc = "transport: Error while dialing: dial tcp 127.0.0.1:2379: connect: connection refused"
	
	
	==> kube-apiserver [84ca31fb2ed034d56721c7ab90b9c5e414e315335f55f7d30435fc91501dad28] <==
	I0916 10:48:04.920985       1 nonstructuralschema_controller.go:195] Starting NonStructuralSchemaConditionController
	I0916 10:48:04.921001       1 apiapproval_controller.go:189] Starting KubernetesAPIApprovalPolicyConformantConditionController
	I0916 10:48:04.921012       1 crd_finalizer.go:269] Starting CRDFinalizer
	I0916 10:48:04.921551       1 dynamic_cafile_content.go:160] "Starting controller" name="client-ca-bundle::/var/lib/minikube/certs/ca.crt"
	I0916 10:48:04.921665       1 dynamic_cafile_content.go:160] "Starting controller" name="request-header::/var/lib/minikube/certs/front-proxy-ca.crt"
	I0916 10:48:05.328418       1 shared_informer.go:320] Caches are synced for crd-autoregister
	I0916 10:48:05.333994       1 aggregator.go:171] initial CRD sync complete...
	I0916 10:48:05.334017       1 autoregister_controller.go:144] Starting autoregister controller
	I0916 10:48:05.334025       1 cache.go:32] Waiting for caches to sync for autoregister controller
	I0916 10:48:05.402697       1 shared_informer.go:320] Caches are synced for *generic.policySource[*k8s.io/api/admissionregistration/v1.ValidatingAdmissionPolicy,*k8s.io/api/admissionregistration/v1.ValidatingAdmissionPolicyBinding,k8s.io/apiserver/pkg/admission/plugin/policy/validating.Validator]
	I0916 10:48:05.402737       1 policy_source.go:224] refreshing policies
	I0916 10:48:05.412636       1 shared_informer.go:320] Caches are synced for cluster_authentication_trust_controller
	I0916 10:48:05.413378       1 cache.go:39] Caches are synced for LocalAvailability controller
	I0916 10:48:05.413700       1 apf_controller.go:382] Running API Priority and Fairness config worker
	I0916 10:48:05.413719       1 apf_controller.go:385] Running API Priority and Fairness periodic rebalancing process
	I0916 10:48:05.413984       1 shared_informer.go:320] Caches are synced for configmaps
	I0916 10:48:05.414029       1 cache.go:39] Caches are synced for RemoteAvailability controller
	I0916 10:48:05.420744       1 cache.go:39] Caches are synced for APIServiceRegistrationController controller
	I0916 10:48:05.434267       1 shared_informer.go:320] Caches are synced for node_authorizer
	I0916 10:48:05.434551       1 controller.go:615] quota admission added evaluator for: leases.coordination.k8s.io
	I0916 10:48:05.442758       1 cache.go:39] Caches are synced for autoregister controller
	I0916 10:48:05.448233       1 handler_discovery.go:450] Starting ResourceDiscoveryManager
	E0916 10:48:05.573273       1 controller.go:97] Error removing old endpoints from kubernetes service: no API server IP addresses were listed in storage, refusing to erase all endpoints for the kubernetes Service
	I0916 10:48:05.963340       1 storage_scheduling.go:111] all system priority classes are created successfully or already exist.
	I0916 10:48:22.953992       1 controller.go:615] quota admission added evaluator for: endpoints
	
	
	==> kube-controller-manager [19cb8b26283b5427eeb4adf80032848225300f8293659c95a04c937ca3877ced] <==
	I0916 10:47:05.622532       1 shared_informer.go:320] Caches are synced for certificate-csrsigning-kubelet-client
	I0916 10:47:05.622583       1 shared_informer.go:320] Caches are synced for certificate-csrsigning-kube-apiserver-client
	I0916 10:47:05.622580       1 shared_informer.go:320] Caches are synced for certificate-csrsigning-legacy-unknown
	I0916 10:47:05.625844       1 shared_informer.go:320] Caches are synced for certificate-csrapproving
	I0916 10:47:05.681754       1 shared_informer.go:320] Caches are synced for resource quota
	I0916 10:47:05.685862       1 shared_informer.go:320] Caches are synced for resource quota
	I0916 10:47:06.120562       1 shared_informer.go:320] Caches are synced for garbage collector
	I0916 10:47:06.128166       1 shared_informer.go:320] Caches are synced for garbage collector
	I0916 10:47:06.128203       1 garbagecollector.go:157] "All resource monitors have synced. Proceeding to collect garbage" logger="garbage-collector-controller"
	I0916 10:47:06.298714       1 range_allocator.go:241] "Successfully synced" logger="node-ipam-controller" key="functional-919910"
	I0916 10:47:06.412493       1 replica_set.go:679] "Finished syncing" logger="replicaset-controller" kind="ReplicaSet" key="kube-system/coredns-7c65d6cfc9" duration="928.095132ms"
	I0916 10:47:06.436112       1 replica_set.go:679] "Finished syncing" logger="replicaset-controller" kind="ReplicaSet" key="kube-system/coredns-7c65d6cfc9" duration="23.562472ms"
	I0916 10:47:06.436311       1 replica_set.go:679] "Finished syncing" logger="replicaset-controller" kind="ReplicaSet" key="kube-system/coredns-7c65d6cfc9" duration="76.216µs"
	I0916 10:47:06.436431       1 replica_set.go:679] "Finished syncing" logger="replicaset-controller" kind="ReplicaSet" key="kube-system/coredns-7c65d6cfc9" duration="29.243µs"
	I0916 10:47:07.569236       1 replica_set.go:679] "Finished syncing" logger="replicaset-controller" kind="ReplicaSet" key="kube-system/coredns-7c65d6cfc9" duration="75.056158ms"
	I0916 10:47:07.583187       1 replica_set.go:679] "Finished syncing" logger="replicaset-controller" kind="ReplicaSet" key="kube-system/coredns-7c65d6cfc9" duration="13.90119ms"
	I0916 10:47:07.583320       1 replica_set.go:679] "Finished syncing" logger="replicaset-controller" kind="ReplicaSet" key="kube-system/coredns-7c65d6cfc9" duration="75.289µs"
	I0916 10:47:47.904049       1 range_allocator.go:241] "Successfully synced" logger="node-ipam-controller" key="functional-919910"
	I0916 10:47:47.921959       1 range_allocator.go:241] "Successfully synced" logger="node-ipam-controller" key="functional-919910"
	I0916 10:47:47.932048       1 replica_set.go:679] "Finished syncing" logger="replicaset-controller" kind="ReplicaSet" key="kube-system/coredns-7c65d6cfc9" duration="55.187µs"
	I0916 10:47:47.946797       1 replica_set.go:679] "Finished syncing" logger="replicaset-controller" kind="ReplicaSet" key="kube-system/coredns-7c65d6cfc9" duration="60.782µs"
	I0916 10:47:48.553366       1 replica_set.go:679] "Finished syncing" logger="replicaset-controller" kind="ReplicaSet" key="kube-system/coredns-7c65d6cfc9" duration="11.363114ms"
	I0916 10:47:48.553489       1 replica_set.go:679] "Finished syncing" logger="replicaset-controller" kind="ReplicaSet" key="kube-system/coredns-7c65d6cfc9" duration="53.668µs"
	I0916 10:47:50.460452       1 node_lifecycle_controller.go:1055] "Controller detected that some Nodes are Ready. Exiting master disruption mode" logger="node-lifecycle-controller"
	I0916 10:47:50.460790       1 range_allocator.go:241] "Successfully synced" logger="node-ipam-controller" key="functional-919910"
	
	
	==> kube-controller-manager [8f5620673b4ff5c0c99db71dd02fc2ce9baec6c9b22460cbdf86d411abc6a715] <==
	I0916 10:48:08.527001       1 actual_state_of_world.go:540] "Failed to update statusUpdateNeeded field in actual state of world" logger="persistentvolume-attach-detach-controller" err="Failed to set statusUpdateNeeded to needed true, because nodeName=\"functional-919910\" does not exist"
	I0916 10:48:08.534308       1 shared_informer.go:320] Caches are synced for node
	I0916 10:48:08.534378       1 range_allocator.go:171] "Sending events to api server" logger="node-ipam-controller"
	I0916 10:48:08.534401       1 range_allocator.go:177] "Starting range CIDR allocator" logger="node-ipam-controller"
	I0916 10:48:08.534407       1 shared_informer.go:313] Waiting for caches to sync for cidrallocator
	I0916 10:48:08.534412       1 shared_informer.go:320] Caches are synced for cidrallocator
	I0916 10:48:08.534494       1 range_allocator.go:241] "Successfully synced" logger="node-ipam-controller" key="functional-919910"
	I0916 10:48:08.535941       1 shared_informer.go:320] Caches are synced for attach detach
	I0916 10:48:08.543605       1 shared_informer.go:320] Caches are synced for endpoint_slice
	I0916 10:48:08.549019       1 shared_informer.go:320] Caches are synced for daemon sets
	I0916 10:48:08.554402       1 shared_informer.go:320] Caches are synced for taint-eviction-controller
	I0916 10:48:08.559718       1 shared_informer.go:320] Caches are synced for persistent volume
	I0916 10:48:08.572151       1 shared_informer.go:320] Caches are synced for GC
	I0916 10:48:08.573409       1 shared_informer.go:320] Caches are synced for PV protection
	I0916 10:48:08.615906       1 shared_informer.go:320] Caches are synced for cronjob
	I0916 10:48:08.623850       1 shared_informer.go:320] Caches are synced for TTL
	I0916 10:48:08.625786       1 shared_informer.go:320] Caches are synced for taint
	I0916 10:48:08.625881       1 node_lifecycle_controller.go:1232] "Initializing eviction metric for zone" logger="node-lifecycle-controller" zone=""
	I0916 10:48:08.625973       1 node_lifecycle_controller.go:884] "Missing timestamp for Node. Assuming now as a timestamp" logger="node-lifecycle-controller" node="functional-919910"
	I0916 10:48:08.626024       1 node_lifecycle_controller.go:1078] "Controller detected that zone is now in new state" logger="node-lifecycle-controller" zone="" newState="Normal"
	I0916 10:48:08.681295       1 shared_informer.go:320] Caches are synced for resource quota
	I0916 10:48:08.695472       1 shared_informer.go:320] Caches are synced for resource quota
	I0916 10:48:09.103907       1 shared_informer.go:320] Caches are synced for garbage collector
	I0916 10:48:09.103941       1 garbagecollector.go:157] "All resource monitors have synced. Proceeding to collect garbage" logger="garbage-collector-controller"
	I0916 10:48:09.123641       1 shared_informer.go:320] Caches are synced for garbage collector
	
	
	==> kube-proxy [68f543d941434df90f12c922b0b45dcb557a7b8316bd36d083123f6f29e0f3d7] <==
	I0916 10:48:03.731423       1 server_linux.go:66] "Using iptables proxy"
	I0916 10:48:05.433154       1 server.go:677] "Successfully retrieved node IP(s)" IPs=["192.168.49.2"]
	E0916 10:48:05.488916       1 server.go:234] "Kube-proxy configuration may be incomplete or incorrect" err="nodePortAddresses is unset; NodePort connections will be accepted on all local IPs. Consider using `--nodeport-addresses primary`"
	I0916 10:48:06.322385       1 server.go:243] "kube-proxy running in dual-stack mode" primary ipFamily="IPv4"
	I0916 10:48:06.341489       1 server_linux.go:169] "Using iptables Proxier"
	I0916 10:48:06.355073       1 proxier.go:255] "Setting route_localnet=1 to allow node-ports on localhost; to change this either disable iptables.localhostNodePorts (--iptables-localhost-nodeports) or set nodePortAddresses (--nodeport-addresses) to filter loopback addresses" ipFamily="IPv4"
	I0916 10:48:06.355531       1 server.go:483] "Version info" version="v1.31.1"
	I0916 10:48:06.357367       1 server.go:485] "Golang settings" GOGC="" GOMAXPROCS="" GOTRACEBACK=""
	I0916 10:48:06.358630       1 config.go:199] "Starting service config controller"
	I0916 10:48:06.358729       1 shared_informer.go:313] Waiting for caches to sync for service config
	I0916 10:48:06.358801       1 config.go:105] "Starting endpoint slice config controller"
	I0916 10:48:06.358840       1 shared_informer.go:313] Waiting for caches to sync for endpoint slice config
	I0916 10:48:06.360984       1 config.go:328] "Starting node config controller"
	I0916 10:48:06.361059       1 shared_informer.go:313] Waiting for caches to sync for node config
	I0916 10:48:06.462180       1 shared_informer.go:320] Caches are synced for endpoint slice config
	I0916 10:48:06.462239       1 shared_informer.go:320] Caches are synced for service config
	I0916 10:48:06.464940       1 shared_informer.go:320] Caches are synced for node config
	
	
	==> kube-proxy [9fdab793eb970a5f01845e2aeaf1389846fd7113bbdedbb122c9c796017271d5] <==
	I0916 10:47:07.571834       1 server_linux.go:66] "Using iptables proxy"
	I0916 10:47:07.773307       1 server.go:677] "Successfully retrieved node IP(s)" IPs=["192.168.49.2"]
	E0916 10:47:07.773379       1 server.go:234] "Kube-proxy configuration may be incomplete or incorrect" err="nodePortAddresses is unset; NodePort connections will be accepted on all local IPs. Consider using `--nodeport-addresses primary`"
	I0916 10:47:07.796962       1 server.go:243] "kube-proxy running in dual-stack mode" primary ipFamily="IPv4"
	I0916 10:47:07.797030       1 server_linux.go:169] "Using iptables Proxier"
	I0916 10:47:07.798928       1 proxier.go:255] "Setting route_localnet=1 to allow node-ports on localhost; to change this either disable iptables.localhostNodePorts (--iptables-localhost-nodeports) or set nodePortAddresses (--nodeport-addresses) to filter loopback addresses" ipFamily="IPv4"
	I0916 10:47:07.799225       1 server.go:483] "Version info" version="v1.31.1"
	I0916 10:47:07.799259       1 server.go:485] "Golang settings" GOGC="" GOMAXPROCS="" GOTRACEBACK=""
	I0916 10:47:07.800857       1 config.go:199] "Starting service config controller"
	I0916 10:47:07.800897       1 shared_informer.go:313] Waiting for caches to sync for service config
	I0916 10:47:07.800943       1 config.go:105] "Starting endpoint slice config controller"
	I0916 10:47:07.800953       1 shared_informer.go:313] Waiting for caches to sync for endpoint slice config
	I0916 10:47:07.801557       1 config.go:328] "Starting node config controller"
	I0916 10:47:07.801619       1 shared_informer.go:313] Waiting for caches to sync for node config
	I0916 10:47:07.901018       1 shared_informer.go:320] Caches are synced for endpoint slice config
	I0916 10:47:07.901079       1 shared_informer.go:320] Caches are synced for service config
	I0916 10:47:07.903369       1 shared_informer.go:320] Caches are synced for node config
	
	
	==> kube-scheduler [6d211253a1170338e5b23dda8b3c6a26dde0aa55d2f91ee289142b0410943b49] <==
	E0916 10:46:58.939724       1 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.ReplicationController: failed to list *v1.ReplicationController: replicationcontrollers is forbidden: User \"system:kube-scheduler\" cannot list resource \"replicationcontrollers\" in API group \"\" at the cluster scope" logger="UnhandledError"
	W0916 10:46:58.939898       1 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.StorageClass: storageclasses.storage.k8s.io is forbidden: User "system:kube-scheduler" cannot list resource "storageclasses" in API group "storage.k8s.io" at the cluster scope
	E0916 10:46:58.939956       1 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.StorageClass: failed to list *v1.StorageClass: storageclasses.storage.k8s.io is forbidden: User \"system:kube-scheduler\" cannot list resource \"storageclasses\" in API group \"storage.k8s.io\" at the cluster scope" logger="UnhandledError"
	W0916 10:46:58.940085       1 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.PodDisruptionBudget: poddisruptionbudgets.policy is forbidden: User "system:kube-scheduler" cannot list resource "poddisruptionbudgets" in API group "policy" at the cluster scope
	E0916 10:46:58.940140       1 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.PodDisruptionBudget: failed to list *v1.PodDisruptionBudget: poddisruptionbudgets.policy is forbidden: User \"system:kube-scheduler\" cannot list resource \"poddisruptionbudgets\" in API group \"policy\" at the cluster scope" logger="UnhandledError"
	W0916 10:46:58.940253       1 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.PersistentVolumeClaim: persistentvolumeclaims is forbidden: User "system:kube-scheduler" cannot list resource "persistentvolumeclaims" in API group "" at the cluster scope
	E0916 10:46:58.940307       1 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.PersistentVolumeClaim: failed to list *v1.PersistentVolumeClaim: persistentvolumeclaims is forbidden: User \"system:kube-scheduler\" cannot list resource \"persistentvolumeclaims\" in API group \"\" at the cluster scope" logger="UnhandledError"
	W0916 10:46:58.940426       1 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.CSIStorageCapacity: csistoragecapacities.storage.k8s.io is forbidden: User "system:kube-scheduler" cannot list resource "csistoragecapacities" in API group "storage.k8s.io" at the cluster scope
	E0916 10:46:58.940479       1 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.CSIStorageCapacity: failed to list *v1.CSIStorageCapacity: csistoragecapacities.storage.k8s.io is forbidden: User \"system:kube-scheduler\" cannot list resource \"csistoragecapacities\" in API group \"storage.k8s.io\" at the cluster scope" logger="UnhandledError"
	W0916 10:46:58.940577       1 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.CSIDriver: csidrivers.storage.k8s.io is forbidden: User "system:kube-scheduler" cannot list resource "csidrivers" in API group "storage.k8s.io" at the cluster scope
	E0916 10:46:58.940630       1 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.CSIDriver: failed to list *v1.CSIDriver: csidrivers.storage.k8s.io is forbidden: User \"system:kube-scheduler\" cannot list resource \"csidrivers\" in API group \"storage.k8s.io\" at the cluster scope" logger="UnhandledError"
	W0916 10:46:58.940734       1 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.PersistentVolume: persistentvolumes is forbidden: User "system:kube-scheduler" cannot list resource "persistentvolumes" in API group "" at the cluster scope
	E0916 10:46:58.940791       1 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.PersistentVolume: failed to list *v1.PersistentVolume: persistentvolumes is forbidden: User \"system:kube-scheduler\" cannot list resource \"persistentvolumes\" in API group \"\" at the cluster scope" logger="UnhandledError"
	W0916 10:46:58.940756       1 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.ReplicaSet: replicasets.apps is forbidden: User "system:kube-scheduler" cannot list resource "replicasets" in API group "apps" at the cluster scope
	E0916 10:46:58.940890       1 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.ReplicaSet: failed to list *v1.ReplicaSet: replicasets.apps is forbidden: User \"system:kube-scheduler\" cannot list resource \"replicasets\" in API group \"apps\" at the cluster scope" logger="UnhandledError"
	W0916 10:46:59.748051       1 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.StorageClass: storageclasses.storage.k8s.io is forbidden: User "system:kube-scheduler" cannot list resource "storageclasses" in API group "storage.k8s.io" at the cluster scope
	E0916 10:46:59.748098       1 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.StorageClass: failed to list *v1.StorageClass: storageclasses.storage.k8s.io is forbidden: User \"system:kube-scheduler\" cannot list resource \"storageclasses\" in API group \"storage.k8s.io\" at the cluster scope" logger="UnhandledError"
	W0916 10:46:59.797427       1 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.CSIStorageCapacity: csistoragecapacities.storage.k8s.io is forbidden: User "system:kube-scheduler" cannot list resource "csistoragecapacities" in API group "storage.k8s.io" at the cluster scope
	E0916 10:46:59.797568       1 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.CSIStorageCapacity: failed to list *v1.CSIStorageCapacity: csistoragecapacities.storage.k8s.io is forbidden: User \"system:kube-scheduler\" cannot list resource \"csistoragecapacities\" in API group \"storage.k8s.io\" at the cluster scope" logger="UnhandledError"
	W0916 10:46:59.814459       1 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Service: services is forbidden: User "system:kube-scheduler" cannot list resource "services" in API group "" at the cluster scope
	E0916 10:46:59.814584       1 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Service: failed to list *v1.Service: services is forbidden: User \"system:kube-scheduler\" cannot list resource \"services\" in API group \"\" at the cluster scope" logger="UnhandledError"
	W0916 10:47:00.394732       1 reflector.go:561] runtime/asm_arm64.s:1222: failed to list *v1.ConfigMap: configmaps "extension-apiserver-authentication" is forbidden: User "system:kube-scheduler" cannot list resource "configmaps" in API group "" in the namespace "kube-system"
	E0916 10:47:00.394804       1 reflector.go:158] "Unhandled Error" err="runtime/asm_arm64.s:1222: Failed to watch *v1.ConfigMap: failed to list *v1.ConfigMap: configmaps \"extension-apiserver-authentication\" is forbidden: User \"system:kube-scheduler\" cannot list resource \"configmaps\" in API group \"\" in the namespace \"kube-system\"" logger="UnhandledError"
	I0916 10:47:02.116962       1 shared_informer.go:320] Caches are synced for client-ca::kube-system::extension-apiserver-authentication::client-ca-file
	E0916 10:47:51.976491       1 run.go:72] "command failed" err="finished without leader elect"
	
	
	==> kube-scheduler [9a35fb982442f2ef08963a8588b112f704124f0fecc14cbfc199e94d6085db98] <==
	I0916 10:48:04.872300       1 serving.go:386] Generated self-signed cert in-memory
	I0916 10:48:06.573495       1 server.go:167] "Starting Kubernetes Scheduler" version="v1.31.1"
	I0916 10:48:06.573525       1 server.go:169] "Golang settings" GOGC="" GOMAXPROCS="" GOTRACEBACK=""
	I0916 10:48:06.588423       1 secure_serving.go:213] Serving securely on 127.0.0.1:10259
	I0916 10:48:06.588642       1 configmap_cafile_content.go:205] "Starting controller" name="client-ca::kube-system::extension-apiserver-authentication::client-ca-file"
	I0916 10:48:06.588658       1 configmap_cafile_content.go:205] "Starting controller" name="client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file"
	I0916 10:48:06.588698       1 tlsconfig.go:243] "Starting DynamicServingCertificateController"
	I0916 10:48:06.588607       1 requestheader_controller.go:172] Starting RequestHeaderAuthRequestController
	I0916 10:48:06.592031       1 shared_informer.go:313] Waiting for caches to sync for RequestHeaderAuthRequestController
	I0916 10:48:06.591278       1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::client-ca-file
	I0916 10:48:06.591687       1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file
	I0916 10:48:06.696997       1 shared_informer.go:320] Caches are synced for client-ca::kube-system::extension-apiserver-authentication::client-ca-file
	I0916 10:48:06.697079       1 shared_informer.go:320] Caches are synced for RequestHeaderAuthRequestController
	I0916 10:48:06.697269       1 shared_informer.go:320] Caches are synced for client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file
	
	
	==> kubelet <==
	Sep 16 10:48:00 functional-919910 kubelet[1525]: I0916 10:48:00.563349    1525 status_manager.go:851] "Failed to get status for pod" podUID="bcfd044776fa163108ac9ce9912dd1b1" pod="kube-system/kube-controller-manager-functional-919910" err="Get \"https://control-plane.minikube.internal:8441/api/v1/namespaces/kube-system/pods/kube-controller-manager-functional-919910\": dial tcp 192.168.49.2:8441: connect: connection refused"
	Sep 16 10:48:00 functional-919910 kubelet[1525]: I0916 10:48:00.580042    1525 scope.go:117] "RemoveContainer" containerID="9fdab793eb970a5f01845e2aeaf1389846fd7113bbdedbb122c9c796017271d5"
	Sep 16 10:48:00 functional-919910 kubelet[1525]: I0916 10:48:00.581495    1525 status_manager.go:851] "Failed to get status for pod" podUID="bcfd044776fa163108ac9ce9912dd1b1" pod="kube-system/kube-controller-manager-functional-919910" err="Get \"https://control-plane.minikube.internal:8441/api/v1/namespaces/kube-system/pods/kube-controller-manager-functional-919910\": dial tcp 192.168.49.2:8441: connect: connection refused"
	Sep 16 10:48:00 functional-919910 kubelet[1525]: I0916 10:48:00.584123    1525 scope.go:117] "RemoveContainer" containerID="3e31d247381fd150f97fed045c0d264e01a0046902133f839fc323ed9d5fa7b9"
	Sep 16 10:48:00 functional-919910 kubelet[1525]: I0916 10:48:00.588218    1525 scope.go:117] "RemoveContainer" containerID="584cffa44f32723af45447c07bf6e3fc641b7c61fe43302aad35c776bd065faf"
	Sep 16 10:48:00 functional-919910 kubelet[1525]: I0916 10:48:00.589441    1525 scope.go:117] "RemoveContainer" containerID="89084e33c979a76a3a4bbd24eab8c848deb25d8bd474bad381f47a24e0373c2e"
	Sep 16 10:48:00 functional-919910 kubelet[1525]: I0916 10:48:00.592539    1525 status_manager.go:851] "Failed to get status for pod" podUID="60f2072c6865fb71ef7928175ceb3dad" pod="kube-system/kube-scheduler-functional-919910" err="Get \"https://control-plane.minikube.internal:8441/api/v1/namespaces/kube-system/pods/kube-scheduler-functional-919910\": dial tcp 192.168.49.2:8441: connect: connection refused"
	Sep 16 10:48:00 functional-919910 kubelet[1525]: I0916 10:48:00.593132    1525 status_manager.go:851] "Failed to get status for pod" podUID="3e910b182a705a484fdc6733177892d1" pod="kube-system/etcd-functional-919910" err="Get \"https://control-plane.minikube.internal:8441/api/v1/namespaces/kube-system/pods/etcd-functional-919910\": dial tcp 192.168.49.2:8441: connect: connection refused"
	Sep 16 10:48:00 functional-919910 kubelet[1525]: I0916 10:48:00.599626    1525 status_manager.go:851] "Failed to get status for pod" podUID="3d8a6ba31c18f33c5660170029e5cde1" pod="kube-system/kube-apiserver-functional-919910" err="Get \"https://control-plane.minikube.internal:8441/api/v1/namespaces/kube-system/pods/kube-apiserver-functional-919910\": dial tcp 192.168.49.2:8441: connect: connection refused"
	Sep 16 10:48:00 functional-919910 kubelet[1525]: I0916 10:48:00.603888    1525 status_manager.go:851] "Failed to get status for pod" podUID="2e1bfc3e-dea3-4511-a154-e367e28b0898" pod="kube-system/kube-proxy-nvpzv" err="Get \"https://control-plane.minikube.internal:8441/api/v1/namespaces/kube-system/pods/kube-proxy-nvpzv\": dial tcp 192.168.49.2:8441: connect: connection refused"
	Sep 16 10:48:00 functional-919910 kubelet[1525]: I0916 10:48:00.604793    1525 status_manager.go:851] "Failed to get status for pod" podUID="ada36fb7-8486-4afc-9bef-04ab2e65fc7b" pod="kube-system/coredns-7c65d6cfc9-qzn8c" err="Get \"https://control-plane.minikube.internal:8441/api/v1/namespaces/kube-system/pods/coredns-7c65d6cfc9-qzn8c\": dial tcp 192.168.49.2:8441: connect: connection refused"
	Sep 16 10:48:00 functional-919910 kubelet[1525]: I0916 10:48:00.618533    1525 status_manager.go:851] "Failed to get status for pod" podUID="2eb6523f-f61a-4c33-8e91-0bbbb874554b" pod="kube-system/storage-provisioner" err="Get \"https://control-plane.minikube.internal:8441/api/v1/namespaces/kube-system/pods/storage-provisioner\": dial tcp 192.168.49.2:8441: connect: connection refused"
	Sep 16 10:48:00 functional-919910 kubelet[1525]: I0916 10:48:00.618991    1525 status_manager.go:851] "Failed to get status for pod" podUID="bcfd044776fa163108ac9ce9912dd1b1" pod="kube-system/kube-controller-manager-functional-919910" err="Get \"https://control-plane.minikube.internal:8441/api/v1/namespaces/kube-system/pods/kube-controller-manager-functional-919910\": dial tcp 192.168.49.2:8441: connect: connection refused"
	Sep 16 10:48:00 functional-919910 kubelet[1525]: I0916 10:48:00.619364    1525 status_manager.go:851] "Failed to get status for pod" podUID="60f2072c6865fb71ef7928175ceb3dad" pod="kube-system/kube-scheduler-functional-919910" err="Get \"https://control-plane.minikube.internal:8441/api/v1/namespaces/kube-system/pods/kube-scheduler-functional-919910\": dial tcp 192.168.49.2:8441: connect: connection refused"
	Sep 16 10:48:00 functional-919910 kubelet[1525]: I0916 10:48:00.619674    1525 status_manager.go:851] "Failed to get status for pod" podUID="3e910b182a705a484fdc6733177892d1" pod="kube-system/etcd-functional-919910" err="Get \"https://control-plane.minikube.internal:8441/api/v1/namespaces/kube-system/pods/etcd-functional-919910\": dial tcp 192.168.49.2:8441: connect: connection refused"
	Sep 16 10:48:00 functional-919910 kubelet[1525]: I0916 10:48:00.619915    1525 status_manager.go:851] "Failed to get status for pod" podUID="3d8a6ba31c18f33c5660170029e5cde1" pod="kube-system/kube-apiserver-functional-919910" err="Get \"https://control-plane.minikube.internal:8441/api/v1/namespaces/kube-system/pods/kube-apiserver-functional-919910\": dial tcp 192.168.49.2:8441: connect: connection refused"
	Sep 16 10:48:00 functional-919910 kubelet[1525]: I0916 10:48:00.620147    1525 status_manager.go:851] "Failed to get status for pod" podUID="2e1bfc3e-dea3-4511-a154-e367e28b0898" pod="kube-system/kube-proxy-nvpzv" err="Get \"https://control-plane.minikube.internal:8441/api/v1/namespaces/kube-system/pods/kube-proxy-nvpzv\": dial tcp 192.168.49.2:8441: connect: connection refused"
	Sep 16 10:48:00 functional-919910 kubelet[1525]: I0916 10:48:00.620368    1525 status_manager.go:851] "Failed to get status for pod" podUID="1282e172-7d16-4f24-9f7d-33da705832a9" pod="kube-system/kindnet-nb5xl" err="Get \"https://control-plane.minikube.internal:8441/api/v1/namespaces/kube-system/pods/kindnet-nb5xl\": dial tcp 192.168.49.2:8441: connect: connection refused"
	Sep 16 10:48:01 functional-919910 kubelet[1525]: E0916 10:48:01.441856    1525 eviction_manager.go:257] "Eviction manager: failed to get HasDedicatedImageFs" err="missing image stats: &ImageFsInfoResponse{ImageFilesystems:[]*FilesystemUsage{&FilesystemUsage{Timestamp:1726483681441651840,FsId:&FilesystemIdentifier{Mountpoint:/var/lib/containers/storage/overlay-images,},UsedBytes:&UInt64Value{Value:125700,},InodesUsed:&UInt64Value{Value:57,},},},ContainerFilesystems:[]*FilesystemUsage{},}"
	Sep 16 10:48:01 functional-919910 kubelet[1525]: E0916 10:48:01.441896    1525 eviction_manager.go:212] "Eviction manager: failed to synchronize" err="eviction manager: failed to get HasDedicatedImageFs: missing image stats: &ImageFsInfoResponse{ImageFilesystems:[]*FilesystemUsage{&FilesystemUsage{Timestamp:1726483681441651840,FsId:&FilesystemIdentifier{Mountpoint:/var/lib/containers/storage/overlay-images,},UsedBytes:&UInt64Value{Value:125700,},InodesUsed:&UInt64Value{Value:57,},},},ContainerFilesystems:[]*FilesystemUsage{},}"
	Sep 16 10:48:05 functional-919910 kubelet[1525]: E0916 10:48:05.003719    1525 reflector.go:158] "Unhandled Error" err="object-\"kube-system\"/\"coredns\": Failed to watch *v1.ConfigMap: unknown (get configmaps)" logger="UnhandledError"
	Sep 16 10:48:11 functional-919910 kubelet[1525]: E0916 10:48:11.442879    1525 eviction_manager.go:257] "Eviction manager: failed to get HasDedicatedImageFs" err="missing image stats: &ImageFsInfoResponse{ImageFilesystems:[]*FilesystemUsage{&FilesystemUsage{Timestamp:1726483691442663916,FsId:&FilesystemIdentifier{Mountpoint:/var/lib/containers/storage/overlay-images,},UsedBytes:&UInt64Value{Value:125700,},InodesUsed:&UInt64Value{Value:57,},},},ContainerFilesystems:[]*FilesystemUsage{},}"
	Sep 16 10:48:11 functional-919910 kubelet[1525]: E0916 10:48:11.442932    1525 eviction_manager.go:212] "Eviction manager: failed to synchronize" err="eviction manager: failed to get HasDedicatedImageFs: missing image stats: &ImageFsInfoResponse{ImageFilesystems:[]*FilesystemUsage{&FilesystemUsage{Timestamp:1726483691442663916,FsId:&FilesystemIdentifier{Mountpoint:/var/lib/containers/storage/overlay-images,},UsedBytes:&UInt64Value{Value:125700,},InodesUsed:&UInt64Value{Value:57,},},},ContainerFilesystems:[]*FilesystemUsage{},}"
	Sep 16 10:48:21 functional-919910 kubelet[1525]: E0916 10:48:21.444337    1525 eviction_manager.go:257] "Eviction manager: failed to get HasDedicatedImageFs" err="missing image stats: &ImageFsInfoResponse{ImageFilesystems:[]*FilesystemUsage{&FilesystemUsage{Timestamp:1726483701444023967,FsId:&FilesystemIdentifier{Mountpoint:/var/lib/containers/storage/overlay-images,},UsedBytes:&UInt64Value{Value:125700,},InodesUsed:&UInt64Value{Value:57,},},},ContainerFilesystems:[]*FilesystemUsage{},}"
	Sep 16 10:48:21 functional-919910 kubelet[1525]: E0916 10:48:21.444374    1525 eviction_manager.go:212] "Eviction manager: failed to synchronize" err="eviction manager: failed to get HasDedicatedImageFs: missing image stats: &ImageFsInfoResponse{ImageFilesystems:[]*FilesystemUsage{&FilesystemUsage{Timestamp:1726483701444023967,FsId:&FilesystemIdentifier{Mountpoint:/var/lib/containers/storage/overlay-images,},UsedBytes:&UInt64Value{Value:125700,},InodesUsed:&UInt64Value{Value:57,},},},ContainerFilesystems:[]*FilesystemUsage{},}"
	
	
	==> storage-provisioner [584cffa44f32723af45447c07bf6e3fc641b7c61fe43302aad35c776bd065faf] <==
	I0916 10:47:48.421537       1 storage_provisioner.go:116] Initializing the minikube storage provisioner...
	I0916 10:47:48.454695       1 storage_provisioner.go:141] Storage provisioner initialized, now starting service!
	I0916 10:47:48.454750       1 leaderelection.go:243] attempting to acquire leader lease kube-system/k8s.io-minikube-hostpath...
	I0916 10:47:48.463044       1 leaderelection.go:253] successfully acquired lease kube-system/k8s.io-minikube-hostpath
	I0916 10:47:48.463305       1 controller.go:835] Starting provisioner controller k8s.io/minikube-hostpath_functional-919910_9fff52e8-492a-4bd9-921f-e0e8a999d2a3!
	I0916 10:47:48.464240       1 event.go:282] Event(v1.ObjectReference{Kind:"Endpoints", Namespace:"kube-system", Name:"k8s.io-minikube-hostpath", UID:"a458447e-2e14-46d1-bc5f-e9228298bb58", APIVersion:"v1", ResourceVersion:"414", FieldPath:""}): type: 'Normal' reason: 'LeaderElection' functional-919910_9fff52e8-492a-4bd9-921f-e0e8a999d2a3 became leader
	I0916 10:47:48.563893       1 controller.go:884] Started provisioner controller k8s.io/minikube-hostpath_functional-919910_9fff52e8-492a-4bd9-921f-e0e8a999d2a3!
	
	
	==> storage-provisioner [67f50b0e25dae16dbad275ffac3a734fe571c8f8cb91d485eaac44783eb641be] <==
	I0916 10:48:01.486119       1 storage_provisioner.go:116] Initializing the minikube storage provisioner...
	I0916 10:48:05.527187       1 storage_provisioner.go:141] Storage provisioner initialized, now starting service!
	I0916 10:48:05.529539       1 leaderelection.go:243] attempting to acquire leader lease kube-system/k8s.io-minikube-hostpath...
	I0916 10:48:22.958175       1 leaderelection.go:253] successfully acquired lease kube-system/k8s.io-minikube-hostpath
	I0916 10:48:22.959300       1 controller.go:835] Starting provisioner controller k8s.io/minikube-hostpath_functional-919910_be96807a-e73b-444f-98b3-646320e9e90e!
	I0916 10:48:22.959068       1 event.go:282] Event(v1.ObjectReference{Kind:"Endpoints", Namespace:"kube-system", Name:"k8s.io-minikube-hostpath", UID:"a458447e-2e14-46d1-bc5f-e9228298bb58", APIVersion:"v1", ResourceVersion:"511", FieldPath:""}): type: 'Normal' reason: 'LeaderElection' functional-919910_be96807a-e73b-444f-98b3-646320e9e90e became leader
	I0916 10:48:23.061550       1 controller.go:884] Started provisioner controller k8s.io/minikube-hostpath_functional-919910_be96807a-e73b-444f-98b3-646320e9e90e!
	

                                                
                                                
-- /stdout --
helpers_test.go:254: (dbg) Run:  out/minikube-linux-arm64 status --format={{.APIServer}} -p functional-919910 -n functional-919910
helpers_test.go:261: (dbg) Run:  kubectl --context functional-919910 get po -o=jsonpath={.items[*].metadata.name} -A --field-selector=status.phase!=Running
helpers_test.go:261: (dbg) Non-zero exit: kubectl --context functional-919910 get po -o=jsonpath={.items[*].metadata.name} -A --field-selector=status.phase!=Running: fork/exec /usr/local/bin/kubectl: exec format error (562.8µs)
helpers_test.go:263: kubectl --context functional-919910 get po -o=jsonpath={.items[*].metadata.name} -A --field-selector=status.phase!=Running: fork/exec /usr/local/bin/kubectl: exec format error
--- FAIL: TestFunctional/serial/KubectlGetPods (2.86s)

                                                
                                    
x
+
TestFunctional/serial/ComponentHealth (2.49s)

                                                
                                                
=== RUN   TestFunctional/serial/ComponentHealth
functional_test.go:810: (dbg) Run:  kubectl --context functional-919910 get po -l tier=control-plane -n kube-system -o=json
functional_test.go:810: (dbg) Non-zero exit: kubectl --context functional-919910 get po -l tier=control-plane -n kube-system -o=json: fork/exec /usr/local/bin/kubectl: exec format error (1.879353ms)
functional_test.go:812: failed to get components. args "kubectl --context functional-919910 get po -l tier=control-plane -n kube-system -o=json": fork/exec /usr/local/bin/kubectl: exec format error
helpers_test.go:222: -----------------------post-mortem--------------------------------
helpers_test.go:230: ======>  post-mortem[TestFunctional/serial/ComponentHealth]: docker inspect <======
helpers_test.go:231: (dbg) Run:  docker inspect functional-919910
helpers_test.go:235: (dbg) docker inspect functional-919910:

                                                
                                                
-- stdout --
	[
	    {
	        "Id": "40a7320e94dbd1ca8f99c16961d5283390467882986d80f040baa102ab2046bd",
	        "Created": "2024-09-16T10:46:39.195115177Z",
	        "Path": "/usr/local/bin/entrypoint",
	        "Args": [
	            "/sbin/init"
	        ],
	        "State": {
	            "Status": "running",
	            "Running": true,
	            "Paused": false,
	            "Restarting": false,
	            "OOMKilled": false,
	            "Dead": false,
	            "Pid": 1399656,
	            "ExitCode": 0,
	            "Error": "",
	            "StartedAt": "2024-09-16T10:46:39.363423533Z",
	            "FinishedAt": "0001-01-01T00:00:00Z"
	        },
	        "Image": "sha256:a1b71fa87733590eb4674b16f6945626ae533f3af37066893e3fd70eb9476268",
	        "ResolvConfPath": "/var/lib/docker/containers/40a7320e94dbd1ca8f99c16961d5283390467882986d80f040baa102ab2046bd/resolv.conf",
	        "HostnamePath": "/var/lib/docker/containers/40a7320e94dbd1ca8f99c16961d5283390467882986d80f040baa102ab2046bd/hostname",
	        "HostsPath": "/var/lib/docker/containers/40a7320e94dbd1ca8f99c16961d5283390467882986d80f040baa102ab2046bd/hosts",
	        "LogPath": "/var/lib/docker/containers/40a7320e94dbd1ca8f99c16961d5283390467882986d80f040baa102ab2046bd/40a7320e94dbd1ca8f99c16961d5283390467882986d80f040baa102ab2046bd-json.log",
	        "Name": "/functional-919910",
	        "RestartCount": 0,
	        "Driver": "overlay2",
	        "Platform": "linux",
	        "MountLabel": "",
	        "ProcessLabel": "",
	        "AppArmorProfile": "unconfined",
	        "ExecIDs": null,
	        "HostConfig": {
	            "Binds": [
	                "/lib/modules:/lib/modules:ro",
	                "functional-919910:/var"
	            ],
	            "ContainerIDFile": "",
	            "LogConfig": {
	                "Type": "json-file",
	                "Config": {}
	            },
	            "NetworkMode": "functional-919910",
	            "PortBindings": {
	                "22/tcp": [
	                    {
	                        "HostIp": "127.0.0.1",
	                        "HostPort": ""
	                    }
	                ],
	                "2376/tcp": [
	                    {
	                        "HostIp": "127.0.0.1",
	                        "HostPort": ""
	                    }
	                ],
	                "32443/tcp": [
	                    {
	                        "HostIp": "127.0.0.1",
	                        "HostPort": ""
	                    }
	                ],
	                "5000/tcp": [
	                    {
	                        "HostIp": "127.0.0.1",
	                        "HostPort": ""
	                    }
	                ],
	                "8441/tcp": [
	                    {
	                        "HostIp": "127.0.0.1",
	                        "HostPort": ""
	                    }
	                ]
	            },
	            "RestartPolicy": {
	                "Name": "no",
	                "MaximumRetryCount": 0
	            },
	            "AutoRemove": false,
	            "VolumeDriver": "",
	            "VolumesFrom": null,
	            "ConsoleSize": [
	                0,
	                0
	            ],
	            "CapAdd": null,
	            "CapDrop": null,
	            "CgroupnsMode": "host",
	            "Dns": [],
	            "DnsOptions": [],
	            "DnsSearch": [],
	            "ExtraHosts": null,
	            "GroupAdd": null,
	            "IpcMode": "private",
	            "Cgroup": "",
	            "Links": null,
	            "OomScoreAdj": 0,
	            "PidMode": "",
	            "Privileged": true,
	            "PublishAllPorts": false,
	            "ReadonlyRootfs": false,
	            "SecurityOpt": [
	                "seccomp=unconfined",
	                "apparmor=unconfined",
	                "label=disable"
	            ],
	            "Tmpfs": {
	                "/run": "",
	                "/tmp": ""
	            },
	            "UTSMode": "",
	            "UsernsMode": "",
	            "ShmSize": 67108864,
	            "Runtime": "runc",
	            "Isolation": "",
	            "CpuShares": 0,
	            "Memory": 4194304000,
	            "NanoCpus": 2000000000,
	            "CgroupParent": "",
	            "BlkioWeight": 0,
	            "BlkioWeightDevice": [],
	            "BlkioDeviceReadBps": [],
	            "BlkioDeviceWriteBps": [],
	            "BlkioDeviceReadIOps": [],
	            "BlkioDeviceWriteIOps": [],
	            "CpuPeriod": 0,
	            "CpuQuota": 0,
	            "CpuRealtimePeriod": 0,
	            "CpuRealtimeRuntime": 0,
	            "CpusetCpus": "",
	            "CpusetMems": "",
	            "Devices": [],
	            "DeviceCgroupRules": null,
	            "DeviceRequests": null,
	            "MemoryReservation": 0,
	            "MemorySwap": 8388608000,
	            "MemorySwappiness": null,
	            "OomKillDisable": false,
	            "PidsLimit": null,
	            "Ulimits": [],
	            "CpuCount": 0,
	            "CpuPercent": 0,
	            "IOMaximumIOps": 0,
	            "IOMaximumBandwidth": 0,
	            "MaskedPaths": null,
	            "ReadonlyPaths": null
	        },
	        "GraphDriver": {
	            "Data": {
	                "LowerDir": "/var/lib/docker/overlay2/14032252dd4d379a5dd6bfc812b8514e72a450050f00baaedcadb811ce19b2ca-init/diff:/var/lib/docker/overlay2/1502e35c27c097cfc834a7c6caeee5bb9f58b41375577f491b73f55bc131cbae/diff",
	                "MergedDir": "/var/lib/docker/overlay2/14032252dd4d379a5dd6bfc812b8514e72a450050f00baaedcadb811ce19b2ca/merged",
	                "UpperDir": "/var/lib/docker/overlay2/14032252dd4d379a5dd6bfc812b8514e72a450050f00baaedcadb811ce19b2ca/diff",
	                "WorkDir": "/var/lib/docker/overlay2/14032252dd4d379a5dd6bfc812b8514e72a450050f00baaedcadb811ce19b2ca/work"
	            },
	            "Name": "overlay2"
	        },
	        "Mounts": [
	            {
	                "Type": "bind",
	                "Source": "/lib/modules",
	                "Destination": "/lib/modules",
	                "Mode": "ro",
	                "RW": false,
	                "Propagation": "rprivate"
	            },
	            {
	                "Type": "volume",
	                "Name": "functional-919910",
	                "Source": "/var/lib/docker/volumes/functional-919910/_data",
	                "Destination": "/var",
	                "Driver": "local",
	                "Mode": "z",
	                "RW": true,
	                "Propagation": ""
	            }
	        ],
	        "Config": {
	            "Hostname": "functional-919910",
	            "Domainname": "",
	            "User": "",
	            "AttachStdin": false,
	            "AttachStdout": false,
	            "AttachStderr": false,
	            "ExposedPorts": {
	                "22/tcp": {},
	                "2376/tcp": {},
	                "32443/tcp": {},
	                "5000/tcp": {},
	                "8441/tcp": {}
	            },
	            "Tty": true,
	            "OpenStdin": false,
	            "StdinOnce": false,
	            "Env": [
	                "container=docker",
	                "PATH=/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin"
	            ],
	            "Cmd": null,
	            "Image": "gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0",
	            "Volumes": null,
	            "WorkingDir": "/",
	            "Entrypoint": [
	                "/usr/local/bin/entrypoint",
	                "/sbin/init"
	            ],
	            "OnBuild": null,
	            "Labels": {
	                "created_by.minikube.sigs.k8s.io": "true",
	                "mode.minikube.sigs.k8s.io": "functional-919910",
	                "name.minikube.sigs.k8s.io": "functional-919910",
	                "role.minikube.sigs.k8s.io": ""
	            },
	            "StopSignal": "SIGRTMIN+3"
	        },
	        "NetworkSettings": {
	            "Bridge": "",
	            "SandboxID": "09e546724865183e02638a32689645e28fd2b24039febe37938c93bd516fa319",
	            "SandboxKey": "/var/run/docker/netns/09e546724865",
	            "Ports": {
	                "22/tcp": [
	                    {
	                        "HostIp": "127.0.0.1",
	                        "HostPort": "34613"
	                    }
	                ],
	                "2376/tcp": [
	                    {
	                        "HostIp": "127.0.0.1",
	                        "HostPort": "34614"
	                    }
	                ],
	                "32443/tcp": [
	                    {
	                        "HostIp": "127.0.0.1",
	                        "HostPort": "34617"
	                    }
	                ],
	                "5000/tcp": [
	                    {
	                        "HostIp": "127.0.0.1",
	                        "HostPort": "34615"
	                    }
	                ],
	                "8441/tcp": [
	                    {
	                        "HostIp": "127.0.0.1",
	                        "HostPort": "34616"
	                    }
	                ]
	            },
	            "HairpinMode": false,
	            "LinkLocalIPv6Address": "",
	            "LinkLocalIPv6PrefixLen": 0,
	            "SecondaryIPAddresses": null,
	            "SecondaryIPv6Addresses": null,
	            "EndpointID": "",
	            "Gateway": "",
	            "GlobalIPv6Address": "",
	            "GlobalIPv6PrefixLen": 0,
	            "IPAddress": "",
	            "IPPrefixLen": 0,
	            "IPv6Gateway": "",
	            "MacAddress": "",
	            "Networks": {
	                "functional-919910": {
	                    "IPAMConfig": {
	                        "IPv4Address": "192.168.49.2"
	                    },
	                    "Links": null,
	                    "Aliases": null,
	                    "MacAddress": "02:42:c0:a8:31:02",
	                    "DriverOpts": null,
	                    "NetworkID": "6e0fb93702822d0f6745b0df63c8098af583107dce24967dde54449c81a6a7de",
	                    "EndpointID": "0e4e29393de23184514ee78cc12ea7445e6307e65c69c812751182560a7c0121",
	                    "Gateway": "192.168.49.1",
	                    "IPAddress": "192.168.49.2",
	                    "IPPrefixLen": 24,
	                    "IPv6Gateway": "",
	                    "GlobalIPv6Address": "",
	                    "GlobalIPv6PrefixLen": 0,
	                    "DNSNames": [
	                        "functional-919910",
	                        "40a7320e94db"
	                    ]
	                }
	            }
	        }
	    }
	]

                                                
                                                
-- /stdout --
helpers_test.go:239: (dbg) Run:  out/minikube-linux-arm64 status --format={{.Host}} -p functional-919910 -n functional-919910
helpers_test.go:244: <<< TestFunctional/serial/ComponentHealth FAILED: start of post-mortem logs <<<
helpers_test.go:245: ======>  post-mortem[TestFunctional/serial/ComponentHealth]: minikube logs <======
helpers_test.go:247: (dbg) Run:  out/minikube-linux-arm64 -p functional-919910 logs -n 25
helpers_test.go:247: (dbg) Done: out/minikube-linux-arm64 -p functional-919910 logs -n 25: (1.757346325s)
helpers_test.go:252: TestFunctional/serial/ComponentHealth logs: 
-- stdout --
	
	==> Audit <==
	|---------|--------------------------------------------------------------------------|-------------------|---------|---------|---------------------|---------------------|
	| Command |                                   Args                                   |      Profile      |  User   | Version |     Start Time      |      End Time       |
	|---------|--------------------------------------------------------------------------|-------------------|---------|---------|---------------------|---------------------|
	| unpause | nospam-329014 --log_dir                                                  | nospam-329014     | jenkins | v1.34.0 | 16 Sep 24 10:46 UTC | 16 Sep 24 10:46 UTC |
	|         | /tmp/nospam-329014 unpause                                               |                   |         |         |                     |                     |
	| unpause | nospam-329014 --log_dir                                                  | nospam-329014     | jenkins | v1.34.0 | 16 Sep 24 10:46 UTC | 16 Sep 24 10:46 UTC |
	|         | /tmp/nospam-329014 unpause                                               |                   |         |         |                     |                     |
	| unpause | nospam-329014 --log_dir                                                  | nospam-329014     | jenkins | v1.34.0 | 16 Sep 24 10:46 UTC | 16 Sep 24 10:46 UTC |
	|         | /tmp/nospam-329014 unpause                                               |                   |         |         |                     |                     |
	| stop    | nospam-329014 --log_dir                                                  | nospam-329014     | jenkins | v1.34.0 | 16 Sep 24 10:46 UTC | 16 Sep 24 10:46 UTC |
	|         | /tmp/nospam-329014 stop                                                  |                   |         |         |                     |                     |
	| stop    | nospam-329014 --log_dir                                                  | nospam-329014     | jenkins | v1.34.0 | 16 Sep 24 10:46 UTC | 16 Sep 24 10:46 UTC |
	|         | /tmp/nospam-329014 stop                                                  |                   |         |         |                     |                     |
	| stop    | nospam-329014 --log_dir                                                  | nospam-329014     | jenkins | v1.34.0 | 16 Sep 24 10:46 UTC | 16 Sep 24 10:46 UTC |
	|         | /tmp/nospam-329014 stop                                                  |                   |         |         |                     |                     |
	| delete  | -p nospam-329014                                                         | nospam-329014     | jenkins | v1.34.0 | 16 Sep 24 10:46 UTC | 16 Sep 24 10:46 UTC |
	| start   | -p functional-919910                                                     | functional-919910 | jenkins | v1.34.0 | 16 Sep 24 10:46 UTC | 16 Sep 24 10:47 UTC |
	|         | --memory=4000                                                            |                   |         |         |                     |                     |
	|         | --apiserver-port=8441                                                    |                   |         |         |                     |                     |
	|         | --wait=all --driver=docker                                               |                   |         |         |                     |                     |
	|         | --container-runtime=crio                                                 |                   |         |         |                     |                     |
	| start   | -p functional-919910                                                     | functional-919910 | jenkins | v1.34.0 | 16 Sep 24 10:47 UTC | 16 Sep 24 10:48 UTC |
	|         | --alsologtostderr -v=8                                                   |                   |         |         |                     |                     |
	| cache   | functional-919910 cache add                                              | functional-919910 | jenkins | v1.34.0 | 16 Sep 24 10:48 UTC | 16 Sep 24 10:48 UTC |
	|         | registry.k8s.io/pause:3.1                                                |                   |         |         |                     |                     |
	| cache   | functional-919910 cache add                                              | functional-919910 | jenkins | v1.34.0 | 16 Sep 24 10:48 UTC | 16 Sep 24 10:48 UTC |
	|         | registry.k8s.io/pause:3.3                                                |                   |         |         |                     |                     |
	| cache   | functional-919910 cache add                                              | functional-919910 | jenkins | v1.34.0 | 16 Sep 24 10:48 UTC | 16 Sep 24 10:48 UTC |
	|         | registry.k8s.io/pause:latest                                             |                   |         |         |                     |                     |
	| cache   | functional-919910 cache add                                              | functional-919910 | jenkins | v1.34.0 | 16 Sep 24 10:48 UTC | 16 Sep 24 10:48 UTC |
	|         | minikube-local-cache-test:functional-919910                              |                   |         |         |                     |                     |
	| cache   | functional-919910 cache delete                                           | functional-919910 | jenkins | v1.34.0 | 16 Sep 24 10:48 UTC | 16 Sep 24 10:48 UTC |
	|         | minikube-local-cache-test:functional-919910                              |                   |         |         |                     |                     |
	| cache   | delete                                                                   | minikube          | jenkins | v1.34.0 | 16 Sep 24 10:48 UTC | 16 Sep 24 10:48 UTC |
	|         | registry.k8s.io/pause:3.3                                                |                   |         |         |                     |                     |
	| cache   | list                                                                     | minikube          | jenkins | v1.34.0 | 16 Sep 24 10:48 UTC | 16 Sep 24 10:48 UTC |
	| ssh     | functional-919910 ssh sudo                                               | functional-919910 | jenkins | v1.34.0 | 16 Sep 24 10:48 UTC | 16 Sep 24 10:48 UTC |
	|         | crictl images                                                            |                   |         |         |                     |                     |
	| ssh     | functional-919910                                                        | functional-919910 | jenkins | v1.34.0 | 16 Sep 24 10:48 UTC | 16 Sep 24 10:48 UTC |
	|         | ssh sudo crictl rmi                                                      |                   |         |         |                     |                     |
	|         | registry.k8s.io/pause:latest                                             |                   |         |         |                     |                     |
	| ssh     | functional-919910 ssh                                                    | functional-919910 | jenkins | v1.34.0 | 16 Sep 24 10:48 UTC |                     |
	|         | sudo crictl inspecti                                                     |                   |         |         |                     |                     |
	|         | registry.k8s.io/pause:latest                                             |                   |         |         |                     |                     |
	| cache   | functional-919910 cache reload                                           | functional-919910 | jenkins | v1.34.0 | 16 Sep 24 10:48 UTC | 16 Sep 24 10:48 UTC |
	| ssh     | functional-919910 ssh                                                    | functional-919910 | jenkins | v1.34.0 | 16 Sep 24 10:48 UTC | 16 Sep 24 10:48 UTC |
	|         | sudo crictl inspecti                                                     |                   |         |         |                     |                     |
	|         | registry.k8s.io/pause:latest                                             |                   |         |         |                     |                     |
	| cache   | delete                                                                   | minikube          | jenkins | v1.34.0 | 16 Sep 24 10:48 UTC | 16 Sep 24 10:48 UTC |
	|         | registry.k8s.io/pause:3.1                                                |                   |         |         |                     |                     |
	| cache   | delete                                                                   | minikube          | jenkins | v1.34.0 | 16 Sep 24 10:48 UTC | 16 Sep 24 10:48 UTC |
	|         | registry.k8s.io/pause:latest                                             |                   |         |         |                     |                     |
	| kubectl | functional-919910 kubectl --                                             | functional-919910 | jenkins | v1.34.0 | 16 Sep 24 10:48 UTC | 16 Sep 24 10:48 UTC |
	|         | --context functional-919910                                              |                   |         |         |                     |                     |
	|         | get pods                                                                 |                   |         |         |                     |                     |
	| start   | -p functional-919910                                                     | functional-919910 | jenkins | v1.34.0 | 16 Sep 24 10:48 UTC | 16 Sep 24 10:49 UTC |
	|         | --extra-config=apiserver.enable-admission-plugins=NamespaceAutoProvision |                   |         |         |                     |                     |
	|         | --wait=all                                                               |                   |         |         |                     |                     |
	|---------|--------------------------------------------------------------------------|-------------------|---------|---------|---------------------|---------------------|
	
	
	==> Last Start <==
	Log file created at: 2024/09/16 10:48:33
	Running on machine: ip-172-31-21-244
	Binary: Built with gc go1.23.0 for linux/arm64
	Log line format: [IWEF]mmdd hh:mm:ss.uuuuuu threadid file:line] msg
	I0916 10:48:33.910735 1405153 out.go:345] Setting OutFile to fd 1 ...
	I0916 10:48:33.910918 1405153 out.go:392] TERM=,COLORTERM=, which probably does not support color
	I0916 10:48:33.910922 1405153 out.go:358] Setting ErrFile to fd 2...
	I0916 10:48:33.910927 1405153 out.go:392] TERM=,COLORTERM=, which probably does not support color
	I0916 10:48:33.911187 1405153 root.go:338] Updating PATH: /home/jenkins/minikube-integration/19651-1378450/.minikube/bin
	I0916 10:48:33.911553 1405153 out.go:352] Setting JSON to false
	I0916 10:48:33.912603 1405153 start.go:129] hostinfo: {"hostname":"ip-172-31-21-244","uptime":37859,"bootTime":1726445855,"procs":174,"os":"linux","platform":"ubuntu","platformFamily":"debian","platformVersion":"20.04","kernelVersion":"5.15.0-1069-aws","kernelArch":"aarch64","virtualizationSystem":"","virtualizationRole":"","hostId":"da8ac1fd-6236-412a-a346-95873c98230d"}
	I0916 10:48:33.912669 1405153 start.go:139] virtualization:  
	I0916 10:48:33.916414 1405153 out.go:177] * [functional-919910] minikube v1.34.0 on Ubuntu 20.04 (arm64)
	I0916 10:48:33.921226 1405153 out.go:177]   - MINIKUBE_LOCATION=19651
	I0916 10:48:33.921310 1405153 notify.go:220] Checking for updates...
	I0916 10:48:33.924192 1405153 out.go:177]   - MINIKUBE_SUPPRESS_DOCKER_PERFORMANCE=true
	I0916 10:48:33.926482 1405153 out.go:177]   - KUBECONFIG=/home/jenkins/minikube-integration/19651-1378450/kubeconfig
	I0916 10:48:33.928824 1405153 out.go:177]   - MINIKUBE_HOME=/home/jenkins/minikube-integration/19651-1378450/.minikube
	I0916 10:48:33.930290 1405153 out.go:177]   - MINIKUBE_BIN=out/minikube-linux-arm64
	I0916 10:48:33.932494 1405153 out.go:177]   - MINIKUBE_FORCE_SYSTEMD=
	I0916 10:48:33.936794 1405153 config.go:182] Loaded profile config "functional-919910": Driver=docker, ContainerRuntime=crio, KubernetesVersion=v1.31.1
	I0916 10:48:33.936883 1405153 driver.go:394] Setting default libvirt URI to qemu:///system
	I0916 10:48:33.971073 1405153 docker.go:123] docker version: linux-27.2.1:Docker Engine - Community
	I0916 10:48:33.971194 1405153 cli_runner.go:164] Run: docker system info --format "{{json .}}"
	I0916 10:48:34.033300 1405153 info.go:266] docker info: {ID:5FDH:SA5P:5GCT:NLAS:B73P:SGDQ:PBG5:UBVH:UZY3:RXGO:CI7S:WAIH Containers:1 ContainersRunning:1 ContainersPaused:0 ContainersStopped:0 Images:2 Driver:overlay2 DriverStatus:[[Backing Filesystem extfs] [Supports d_type true] [Using metacopy false] [Native Overlay Diff true] [userxattr false]] SystemStatus:<nil> Plugins:{Volume:[local] Network:[bridge host ipvlan macvlan null overlay] Authorization:<nil> Log:[awslogs fluentd gcplogs gelf journald json-file local splunk syslog]} MemoryLimit:true SwapLimit:true KernelMemory:false KernelMemoryTCP:true CPUCfsPeriod:true CPUCfsQuota:true CPUShares:true CPUSet:true PidsLimit:true IPv4Forwarding:true BridgeNfIptables:true BridgeNfIP6Tables:true Debug:false NFd:32 OomKillDisable:true NGoroutines:64 SystemTime:2024-09-16 10:48:34.023431603 +0000 UTC LoggingDriver:json-file CgroupDriver:cgroupfs NEventsListener:0 KernelVersion:5.15.0-1069-aws OperatingSystem:Ubuntu 20.04.6 LTS OSType:linux Architecture:aar
ch64 IndexServerAddress:https://index.docker.io/v1/ RegistryConfig:{AllowNondistributableArtifactsCIDRs:[] AllowNondistributableArtifactsHostnames:[] InsecureRegistryCIDRs:[127.0.0.0/8] IndexConfigs:{DockerIo:{Name:docker.io Mirrors:[] Secure:true Official:true}} Mirrors:[]} NCPU:2 MemTotal:8214839296 GenericResources:<nil> DockerRootDir:/var/lib/docker HTTPProxy: HTTPSProxy: NoProxy: Name:ip-172-31-21-244 Labels:[] ExperimentalBuild:false ServerVersion:27.2.1 ClusterStore: ClusterAdvertise: Runtimes:{Runc:{Path:runc}} DefaultRuntime:runc Swarm:{NodeID: NodeAddr: LocalNodeState:inactive ControlAvailable:false Error: RemoteManagers:<nil>} LiveRestoreEnabled:false Isolation: InitBinary:docker-init ContainerdCommit:{ID:7f7fdf5fed64eb6a7caf99b3e12efcf9d60e311c Expected:7f7fdf5fed64eb6a7caf99b3e12efcf9d60e311c} RuncCommit:{ID:v1.1.14-0-g2c9f560 Expected:v1.1.14-0-g2c9f560} InitCommit:{ID:de40ad0 Expected:de40ad0} SecurityOptions:[name=apparmor name=seccomp,profile=builtin] ProductLicense: Warnings:<nil> ServerErro
rs:[] ClientInfo:{Debug:false Plugins:[map[Name:buildx Path:/usr/libexec/docker/cli-plugins/docker-buildx SchemaVersion:0.1.0 ShortDescription:Docker Buildx Vendor:Docker Inc. Version:v0.16.2] map[Name:compose Path:/usr/libexec/docker/cli-plugins/docker-compose SchemaVersion:0.1.0 ShortDescription:Docker Compose Vendor:Docker Inc. Version:v2.29.2]] Warnings:<nil>}}
	I0916 10:48:34.033460 1405153 docker.go:318] overlay module found
	I0916 10:48:34.036356 1405153 out.go:177] * Using the docker driver based on existing profile
	I0916 10:48:34.038192 1405153 start.go:297] selected driver: docker
	I0916 10:48:34.038228 1405153 start.go:901] validating driver "docker" against &{Name:functional-919910 KeepContext:false EmbedCerts:false MinikubeISO: KicBaseImage:gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 Memory:4000 CPUs:2 DiskSize:20000 Driver:docker HyperkitVpnKitSock: HyperkitVSockPorts:[] DockerEnv:[] ContainerVolumeMounts:[] InsecureRegistry:[] RegistryMirror:[] HostOnlyCIDR:192.168.59.1/24 HypervVirtualSwitch: HypervUseExternalSwitch:false HypervExternalAdapter: KVMNetwork:default KVMQemuURI:qemu:///system KVMGPU:false KVMHidden:false KVMNUMACount:1 APIServerPort:8441 DockerOpt:[] DisableDriverMounts:false NFSShare:[] NFSSharesRoot:/nfsshares UUID: NoVTXCheck:false DNSProxy:false HostDNSResolver:true HostOnlyNicType:virtio NatNicType:virtio SSHIPAddress: SSHUser:root SSHKey: SSHPort:22 KubernetesConfig:{KubernetesVersion:v1.31.1 ClusterName:functional-919910 Namespace:default APIServerHAVIP: APIServerNa
me:minikubeCA APIServerNames:[] APIServerIPs:[] DNSDomain:cluster.local ContainerRuntime:crio CRISocket: NetworkPlugin:cni FeatureGates: ServiceCIDR:10.96.0.0/12 ImageRepository: LoadBalancerStartIP: LoadBalancerEndIP: CustomIngressCert: RegistryAliases: ExtraOptions:[] ShouldLoadCachedImages:true EnableDefaultCNI:false CNI:} Nodes:[{Name: IP:192.168.49.2 Port:8441 KubernetesVersion:v1.31.1 ContainerRuntime:crio ControlPlane:true Worker:true}] Addons:map[default-storageclass:true storage-provisioner:true] CustomAddonImages:map[] CustomAddonRegistries:map[] VerifyComponents:map[apiserver:true apps_running:true default_sa:true extra:true kubelet:true node_ready:true system_pods:true] StartHostTimeout:6m0s ScheduledStop:<nil> ExposedPorts:[] ListenAddress: Network: Subnet: MultiNodeRequested:false ExtraDisks:0 CertExpiration:26280h0m0s Mount:false MountString:/home/jenkins:/minikube-host Mount9PVersion:9p2000.L MountGID:docker MountIP: MountMSize:262144 MountOptions:[] MountPort:0 MountType:9p MountUID:docker Bi
naryMirror: DisableOptimizations:false DisableMetrics:false CustomQemuFirmwarePath: SocketVMnetClientPath: SocketVMnetPath: StaticIP: SSHAuthSock: SSHAgentPID:0 GPUs: AutoPauseInterval:1m0s}
	I0916 10:48:34.038379 1405153 start.go:912] status for docker: {Installed:true Healthy:true Running:false NeedsImprovement:false Error:<nil> Reason: Fix: Doc: Version:}
	I0916 10:48:34.038507 1405153 cli_runner.go:164] Run: docker system info --format "{{json .}}"
	I0916 10:48:34.092789 1405153 info.go:266] docker info: {ID:5FDH:SA5P:5GCT:NLAS:B73P:SGDQ:PBG5:UBVH:UZY3:RXGO:CI7S:WAIH Containers:1 ContainersRunning:1 ContainersPaused:0 ContainersStopped:0 Images:2 Driver:overlay2 DriverStatus:[[Backing Filesystem extfs] [Supports d_type true] [Using metacopy false] [Native Overlay Diff true] [userxattr false]] SystemStatus:<nil> Plugins:{Volume:[local] Network:[bridge host ipvlan macvlan null overlay] Authorization:<nil> Log:[awslogs fluentd gcplogs gelf journald json-file local splunk syslog]} MemoryLimit:true SwapLimit:true KernelMemory:false KernelMemoryTCP:true CPUCfsPeriod:true CPUCfsQuota:true CPUShares:true CPUSet:true PidsLimit:true IPv4Forwarding:true BridgeNfIptables:true BridgeNfIP6Tables:true Debug:false NFd:32 OomKillDisable:true NGoroutines:64 SystemTime:2024-09-16 10:48:34.082522334 +0000 UTC LoggingDriver:json-file CgroupDriver:cgroupfs NEventsListener:0 KernelVersion:5.15.0-1069-aws OperatingSystem:Ubuntu 20.04.6 LTS OSType:linux Architecture:aar
ch64 IndexServerAddress:https://index.docker.io/v1/ RegistryConfig:{AllowNondistributableArtifactsCIDRs:[] AllowNondistributableArtifactsHostnames:[] InsecureRegistryCIDRs:[127.0.0.0/8] IndexConfigs:{DockerIo:{Name:docker.io Mirrors:[] Secure:true Official:true}} Mirrors:[]} NCPU:2 MemTotal:8214839296 GenericResources:<nil> DockerRootDir:/var/lib/docker HTTPProxy: HTTPSProxy: NoProxy: Name:ip-172-31-21-244 Labels:[] ExperimentalBuild:false ServerVersion:27.2.1 ClusterStore: ClusterAdvertise: Runtimes:{Runc:{Path:runc}} DefaultRuntime:runc Swarm:{NodeID: NodeAddr: LocalNodeState:inactive ControlAvailable:false Error: RemoteManagers:<nil>} LiveRestoreEnabled:false Isolation: InitBinary:docker-init ContainerdCommit:{ID:7f7fdf5fed64eb6a7caf99b3e12efcf9d60e311c Expected:7f7fdf5fed64eb6a7caf99b3e12efcf9d60e311c} RuncCommit:{ID:v1.1.14-0-g2c9f560 Expected:v1.1.14-0-g2c9f560} InitCommit:{ID:de40ad0 Expected:de40ad0} SecurityOptions:[name=apparmor name=seccomp,profile=builtin] ProductLicense: Warnings:<nil> ServerErro
rs:[] ClientInfo:{Debug:false Plugins:[map[Name:buildx Path:/usr/libexec/docker/cli-plugins/docker-buildx SchemaVersion:0.1.0 ShortDescription:Docker Buildx Vendor:Docker Inc. Version:v0.16.2] map[Name:compose Path:/usr/libexec/docker/cli-plugins/docker-compose SchemaVersion:0.1.0 ShortDescription:Docker Compose Vendor:Docker Inc. Version:v2.29.2]] Warnings:<nil>}}
	I0916 10:48:34.093226 1405153 start_flags.go:947] Waiting for all components: map[apiserver:true apps_running:true default_sa:true extra:true kubelet:true node_ready:true system_pods:true]
	I0916 10:48:34.093254 1405153 cni.go:84] Creating CNI manager for ""
	I0916 10:48:34.093309 1405153 cni.go:143] "docker" driver + "crio" runtime found, recommending kindnet
	I0916 10:48:34.093355 1405153 start.go:340] cluster config:
	{Name:functional-919910 KeepContext:false EmbedCerts:false MinikubeISO: KicBaseImage:gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 Memory:4000 CPUs:2 DiskSize:20000 Driver:docker HyperkitVpnKitSock: HyperkitVSockPorts:[] DockerEnv:[] ContainerVolumeMounts:[] InsecureRegistry:[] RegistryMirror:[] HostOnlyCIDR:192.168.59.1/24 HypervVirtualSwitch: HypervUseExternalSwitch:false HypervExternalAdapter: KVMNetwork:default KVMQemuURI:qemu:///system KVMGPU:false KVMHidden:false KVMNUMACount:1 APIServerPort:8441 DockerOpt:[] DisableDriverMounts:false NFSShare:[] NFSSharesRoot:/nfsshares UUID: NoVTXCheck:false DNSProxy:false HostDNSResolver:true HostOnlyNicType:virtio NatNicType:virtio SSHIPAddress: SSHUser:root SSHKey: SSHPort:22 KubernetesConfig:{KubernetesVersion:v1.31.1 ClusterName:functional-919910 Namespace:default APIServerHAVIP: APIServerName:minikubeCA APIServerNames:[] APIServerIPs:[] DNSDomain:cluster.local Containe
rRuntime:crio CRISocket: NetworkPlugin:cni FeatureGates: ServiceCIDR:10.96.0.0/12 ImageRepository: LoadBalancerStartIP: LoadBalancerEndIP: CustomIngressCert: RegistryAliases: ExtraOptions:[{Component:apiserver Key:enable-admission-plugins Value:NamespaceAutoProvision}] ShouldLoadCachedImages:true EnableDefaultCNI:false CNI:} Nodes:[{Name: IP:192.168.49.2 Port:8441 KubernetesVersion:v1.31.1 ContainerRuntime:crio ControlPlane:true Worker:true}] Addons:map[default-storageclass:true storage-provisioner:true] CustomAddonImages:map[] CustomAddonRegistries:map[] VerifyComponents:map[apiserver:true apps_running:true default_sa:true extra:true kubelet:true node_ready:true system_pods:true] StartHostTimeout:6m0s ScheduledStop:<nil> ExposedPorts:[] ListenAddress: Network: Subnet: MultiNodeRequested:false ExtraDisks:0 CertExpiration:26280h0m0s Mount:false MountString:/home/jenkins:/minikube-host Mount9PVersion:9p2000.L MountGID:docker MountIP: MountMSize:262144 MountOptions:[] MountPort:0 MountType:9p MountUID:docker Bin
aryMirror: DisableOptimizations:false DisableMetrics:false CustomQemuFirmwarePath: SocketVMnetClientPath: SocketVMnetPath: StaticIP: SSHAuthSock: SSHAgentPID:0 GPUs: AutoPauseInterval:1m0s}
	I0916 10:48:34.098551 1405153 out.go:177] * Starting "functional-919910" primary control-plane node in "functional-919910" cluster
	I0916 10:48:34.100896 1405153 cache.go:121] Beginning downloading kic base image for docker with crio
	I0916 10:48:34.103035 1405153 out.go:177] * Pulling base image v0.0.45-1726358845-19644 ...
	I0916 10:48:34.104428 1405153 preload.go:131] Checking if preload exists for k8s version v1.31.1 and runtime crio
	I0916 10:48:34.104490 1405153 preload.go:146] Found local preload: /home/jenkins/minikube-integration/19651-1378450/.minikube/cache/preloaded-tarball/preloaded-images-k8s-v18-v1.31.1-cri-o-overlay-arm64.tar.lz4
	I0916 10:48:34.104498 1405153 cache.go:56] Caching tarball of preloaded images
	I0916 10:48:34.104519 1405153 image.go:79] Checking for gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 in local docker daemon
	I0916 10:48:34.104600 1405153 preload.go:172] Found /home/jenkins/minikube-integration/19651-1378450/.minikube/cache/preloaded-tarball/preloaded-images-k8s-v18-v1.31.1-cri-o-overlay-arm64.tar.lz4 in cache, skipping download
	I0916 10:48:34.104608 1405153 cache.go:59] Finished verifying existence of preloaded tar for v1.31.1 on crio
	I0916 10:48:34.104827 1405153 profile.go:143] Saving config to /home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/functional-919910/config.json ...
	W0916 10:48:34.124223 1405153 image.go:95] image gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 is of wrong architecture
	I0916 10:48:34.124234 1405153 cache.go:149] Downloading gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 to local cache
	I0916 10:48:34.124328 1405153 image.go:63] Checking for gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 in local cache directory
	I0916 10:48:34.124345 1405153 image.go:66] Found gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 in local cache directory, skipping pull
	I0916 10:48:34.124350 1405153 image.go:135] gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 exists in cache, skipping pull
	I0916 10:48:34.124357 1405153 cache.go:152] successfully saved gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 as a tarball
	I0916 10:48:34.124362 1405153 cache.go:162] Loading gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 from local cache
	I0916 10:48:34.288917 1405153 cache.go:164] successfully loaded and using gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 from cached tarball
	I0916 10:48:34.288951 1405153 cache.go:194] Successfully downloaded all kic artifacts
	I0916 10:48:34.288982 1405153 start.go:360] acquireMachinesLock for functional-919910: {Name:mkddf275897a7528274aa0390d95d40845ffb1ab Clock:{} Delay:500ms Timeout:10m0s Cancel:<nil>}
	I0916 10:48:34.289059 1405153 start.go:364] duration metric: took 54.522µs to acquireMachinesLock for "functional-919910"
	I0916 10:48:34.289080 1405153 start.go:96] Skipping create...Using existing machine configuration
	I0916 10:48:34.289084 1405153 fix.go:54] fixHost starting: 
	I0916 10:48:34.289431 1405153 cli_runner.go:164] Run: docker container inspect functional-919910 --format={{.State.Status}}
	I0916 10:48:34.305622 1405153 fix.go:112] recreateIfNeeded on functional-919910: state=Running err=<nil>
	W0916 10:48:34.305643 1405153 fix.go:138] unexpected machine state, will restart: <nil>
	I0916 10:48:34.310594 1405153 out.go:177] * Updating the running docker "functional-919910" container ...
	I0916 10:48:34.313486 1405153 machine.go:93] provisionDockerMachine start ...
	I0916 10:48:34.313608 1405153 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" functional-919910
	I0916 10:48:34.330698 1405153 main.go:141] libmachine: Using SSH client type: native
	I0916 10:48:34.331000 1405153 main.go:141] libmachine: &{{{<nil> 0 [] [] []} docker [0x41abe0] 0x41d420 <nil>  [] 0s} 127.0.0.1 34613 <nil> <nil>}
	I0916 10:48:34.331007 1405153 main.go:141] libmachine: About to run SSH command:
	hostname
	I0916 10:48:34.472292 1405153 main.go:141] libmachine: SSH cmd err, output: <nil>: functional-919910
	
	I0916 10:48:34.472306 1405153 ubuntu.go:169] provisioning hostname "functional-919910"
	I0916 10:48:34.472377 1405153 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" functional-919910
	I0916 10:48:34.490933 1405153 main.go:141] libmachine: Using SSH client type: native
	I0916 10:48:34.491182 1405153 main.go:141] libmachine: &{{{<nil> 0 [] [] []} docker [0x41abe0] 0x41d420 <nil>  [] 0s} 127.0.0.1 34613 <nil> <nil>}
	I0916 10:48:34.491193 1405153 main.go:141] libmachine: About to run SSH command:
	sudo hostname functional-919910 && echo "functional-919910" | sudo tee /etc/hostname
	I0916 10:48:34.642095 1405153 main.go:141] libmachine: SSH cmd err, output: <nil>: functional-919910
	
	I0916 10:48:34.642170 1405153 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" functional-919910
	I0916 10:48:34.661757 1405153 main.go:141] libmachine: Using SSH client type: native
	I0916 10:48:34.662006 1405153 main.go:141] libmachine: &{{{<nil> 0 [] [] []} docker [0x41abe0] 0x41d420 <nil>  [] 0s} 127.0.0.1 34613 <nil> <nil>}
	I0916 10:48:34.662021 1405153 main.go:141] libmachine: About to run SSH command:
	
			if ! grep -xq '.*\sfunctional-919910' /etc/hosts; then
				if grep -xq '127.0.1.1\s.*' /etc/hosts; then
					sudo sed -i 's/^127.0.1.1\s.*/127.0.1.1 functional-919910/g' /etc/hosts;
				else 
					echo '127.0.1.1 functional-919910' | sudo tee -a /etc/hosts; 
				fi
			fi
	I0916 10:48:34.801384 1405153 main.go:141] libmachine: SSH cmd err, output: <nil>: 
	I0916 10:48:34.801403 1405153 ubuntu.go:175] set auth options {CertDir:/home/jenkins/minikube-integration/19651-1378450/.minikube CaCertPath:/home/jenkins/minikube-integration/19651-1378450/.minikube/certs/ca.pem CaPrivateKeyPath:/home/jenkins/minikube-integration/19651-1378450/.minikube/certs/ca-key.pem CaCertRemotePath:/etc/docker/ca.pem ServerCertPath:/home/jenkins/minikube-integration/19651-1378450/.minikube/machines/server.pem ServerKeyPath:/home/jenkins/minikube-integration/19651-1378450/.minikube/machines/server-key.pem ClientKeyPath:/home/jenkins/minikube-integration/19651-1378450/.minikube/certs/key.pem ServerCertRemotePath:/etc/docker/server.pem ServerKeyRemotePath:/etc/docker/server-key.pem ClientCertPath:/home/jenkins/minikube-integration/19651-1378450/.minikube/certs/cert.pem ServerCertSANs:[] StorePath:/home/jenkins/minikube-integration/19651-1378450/.minikube}
	I0916 10:48:34.801426 1405153 ubuntu.go:177] setting up certificates
	I0916 10:48:34.801435 1405153 provision.go:84] configureAuth start
	I0916 10:48:34.801501 1405153 cli_runner.go:164] Run: docker container inspect -f "{{range .NetworkSettings.Networks}}{{.IPAddress}},{{.GlobalIPv6Address}}{{end}}" functional-919910
	I0916 10:48:34.820918 1405153 provision.go:143] copyHostCerts
	I0916 10:48:34.820978 1405153 exec_runner.go:144] found /home/jenkins/minikube-integration/19651-1378450/.minikube/ca.pem, removing ...
	I0916 10:48:34.820986 1405153 exec_runner.go:203] rm: /home/jenkins/minikube-integration/19651-1378450/.minikube/ca.pem
	I0916 10:48:34.821065 1405153 exec_runner.go:151] cp: /home/jenkins/minikube-integration/19651-1378450/.minikube/certs/ca.pem --> /home/jenkins/minikube-integration/19651-1378450/.minikube/ca.pem (1078 bytes)
	I0916 10:48:34.821168 1405153 exec_runner.go:144] found /home/jenkins/minikube-integration/19651-1378450/.minikube/cert.pem, removing ...
	I0916 10:48:34.821172 1405153 exec_runner.go:203] rm: /home/jenkins/minikube-integration/19651-1378450/.minikube/cert.pem
	I0916 10:48:34.821197 1405153 exec_runner.go:151] cp: /home/jenkins/minikube-integration/19651-1378450/.minikube/certs/cert.pem --> /home/jenkins/minikube-integration/19651-1378450/.minikube/cert.pem (1123 bytes)
	I0916 10:48:34.821249 1405153 exec_runner.go:144] found /home/jenkins/minikube-integration/19651-1378450/.minikube/key.pem, removing ...
	I0916 10:48:34.821252 1405153 exec_runner.go:203] rm: /home/jenkins/minikube-integration/19651-1378450/.minikube/key.pem
	I0916 10:48:34.821274 1405153 exec_runner.go:151] cp: /home/jenkins/minikube-integration/19651-1378450/.minikube/certs/key.pem --> /home/jenkins/minikube-integration/19651-1378450/.minikube/key.pem (1679 bytes)
	I0916 10:48:34.821320 1405153 provision.go:117] generating server cert: /home/jenkins/minikube-integration/19651-1378450/.minikube/machines/server.pem ca-key=/home/jenkins/minikube-integration/19651-1378450/.minikube/certs/ca.pem private-key=/home/jenkins/minikube-integration/19651-1378450/.minikube/certs/ca-key.pem org=jenkins.functional-919910 san=[127.0.0.1 192.168.49.2 functional-919910 localhost minikube]
	I0916 10:48:35.568371 1405153 provision.go:177] copyRemoteCerts
	I0916 10:48:35.568431 1405153 ssh_runner.go:195] Run: sudo mkdir -p /etc/docker /etc/docker /etc/docker
	I0916 10:48:35.568472 1405153 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" functional-919910
	I0916 10:48:35.587212 1405153 sshutil.go:53] new ssh client: &{IP:127.0.0.1 Port:34613 SSHKeyPath:/home/jenkins/minikube-integration/19651-1378450/.minikube/machines/functional-919910/id_rsa Username:docker}
	I0916 10:48:35.685877 1405153 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-1378450/.minikube/certs/ca.pem --> /etc/docker/ca.pem (1078 bytes)
	I0916 10:48:35.711210 1405153 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-1378450/.minikube/machines/server.pem --> /etc/docker/server.pem (1220 bytes)
	I0916 10:48:35.737883 1405153 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-1378450/.minikube/machines/server-key.pem --> /etc/docker/server-key.pem (1679 bytes)
	I0916 10:48:35.763778 1405153 provision.go:87] duration metric: took 962.330031ms to configureAuth
	I0916 10:48:35.763796 1405153 ubuntu.go:193] setting minikube options for container-runtime
	I0916 10:48:35.763993 1405153 config.go:182] Loaded profile config "functional-919910": Driver=docker, ContainerRuntime=crio, KubernetesVersion=v1.31.1
	I0916 10:48:35.764136 1405153 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" functional-919910
	I0916 10:48:35.780995 1405153 main.go:141] libmachine: Using SSH client type: native
	I0916 10:48:35.781225 1405153 main.go:141] libmachine: &{{{<nil> 0 [] [] []} docker [0x41abe0] 0x41d420 <nil>  [] 0s} 127.0.0.1 34613 <nil> <nil>}
	I0916 10:48:35.781237 1405153 main.go:141] libmachine: About to run SSH command:
	sudo mkdir -p /etc/sysconfig && printf %s "
	CRIO_MINIKUBE_OPTIONS='--insecure-registry 10.96.0.0/12 '
	" | sudo tee /etc/sysconfig/crio.minikube && sudo systemctl restart crio
	I0916 10:48:41.213530 1405153 main.go:141] libmachine: SSH cmd err, output: <nil>: 
	CRIO_MINIKUBE_OPTIONS='--insecure-registry 10.96.0.0/12 '
	
	I0916 10:48:41.213542 1405153 machine.go:96] duration metric: took 6.900045274s to provisionDockerMachine
	I0916 10:48:41.213553 1405153 start.go:293] postStartSetup for "functional-919910" (driver="docker")
	I0916 10:48:41.213563 1405153 start.go:322] creating required directories: [/etc/kubernetes/addons /etc/kubernetes/manifests /var/tmp/minikube /var/lib/minikube /var/lib/minikube/certs /var/lib/minikube/images /var/lib/minikube/binaries /tmp/gvisor /usr/share/ca-certificates /etc/ssl/certs]
	I0916 10:48:41.213629 1405153 ssh_runner.go:195] Run: sudo mkdir -p /etc/kubernetes/addons /etc/kubernetes/manifests /var/tmp/minikube /var/lib/minikube /var/lib/minikube/certs /var/lib/minikube/images /var/lib/minikube/binaries /tmp/gvisor /usr/share/ca-certificates /etc/ssl/certs
	I0916 10:48:41.213668 1405153 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" functional-919910
	I0916 10:48:41.239640 1405153 sshutil.go:53] new ssh client: &{IP:127.0.0.1 Port:34613 SSHKeyPath:/home/jenkins/minikube-integration/19651-1378450/.minikube/machines/functional-919910/id_rsa Username:docker}
	I0916 10:48:41.342947 1405153 ssh_runner.go:195] Run: cat /etc/os-release
	I0916 10:48:41.346242 1405153 main.go:141] libmachine: Couldn't set key VERSION_CODENAME, no corresponding struct field found
	I0916 10:48:41.346267 1405153 main.go:141] libmachine: Couldn't set key PRIVACY_POLICY_URL, no corresponding struct field found
	I0916 10:48:41.346276 1405153 main.go:141] libmachine: Couldn't set key UBUNTU_CODENAME, no corresponding struct field found
	I0916 10:48:41.346282 1405153 info.go:137] Remote host: Ubuntu 22.04.4 LTS
	I0916 10:48:41.346292 1405153 filesync.go:126] Scanning /home/jenkins/minikube-integration/19651-1378450/.minikube/addons for local assets ...
	I0916 10:48:41.346355 1405153 filesync.go:126] Scanning /home/jenkins/minikube-integration/19651-1378450/.minikube/files for local assets ...
	I0916 10:48:41.346443 1405153 filesync.go:149] local asset: /home/jenkins/minikube-integration/19651-1378450/.minikube/files/etc/ssl/certs/13838332.pem -> 13838332.pem in /etc/ssl/certs
	I0916 10:48:41.346519 1405153 filesync.go:149] local asset: /home/jenkins/minikube-integration/19651-1378450/.minikube/files/etc/test/nested/copy/1383833/hosts -> hosts in /etc/test/nested/copy/1383833
	I0916 10:48:41.346565 1405153 ssh_runner.go:195] Run: sudo mkdir -p /etc/ssl/certs /etc/test/nested/copy/1383833
	I0916 10:48:41.355661 1405153 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-1378450/.minikube/files/etc/ssl/certs/13838332.pem --> /etc/ssl/certs/13838332.pem (1708 bytes)
	I0916 10:48:41.381460 1405153 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-1378450/.minikube/files/etc/test/nested/copy/1383833/hosts --> /etc/test/nested/copy/1383833/hosts (40 bytes)
	I0916 10:48:41.406699 1405153 start.go:296] duration metric: took 193.131275ms for postStartSetup
	I0916 10:48:41.406787 1405153 ssh_runner.go:195] Run: sh -c "df -h /var | awk 'NR==2{print $5}'"
	I0916 10:48:41.406826 1405153 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" functional-919910
	I0916 10:48:41.424693 1405153 sshutil.go:53] new ssh client: &{IP:127.0.0.1 Port:34613 SSHKeyPath:/home/jenkins/minikube-integration/19651-1378450/.minikube/machines/functional-919910/id_rsa Username:docker}
	I0916 10:48:41.518715 1405153 ssh_runner.go:195] Run: sh -c "df -BG /var | awk 'NR==2{print $4}'"
	I0916 10:48:41.524705 1405153 fix.go:56] duration metric: took 7.235611151s for fixHost
	I0916 10:48:41.524721 1405153 start.go:83] releasing machines lock for "functional-919910", held for 7.235654293s
	I0916 10:48:41.524821 1405153 cli_runner.go:164] Run: docker container inspect -f "{{range .NetworkSettings.Networks}}{{.IPAddress}},{{.GlobalIPv6Address}}{{end}}" functional-919910
	I0916 10:48:41.541935 1405153 ssh_runner.go:195] Run: cat /version.json
	I0916 10:48:41.541984 1405153 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" functional-919910
	I0916 10:48:41.542020 1405153 ssh_runner.go:195] Run: curl -sS -m 2 https://registry.k8s.io/
	I0916 10:48:41.542086 1405153 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" functional-919910
	I0916 10:48:41.561069 1405153 sshutil.go:53] new ssh client: &{IP:127.0.0.1 Port:34613 SSHKeyPath:/home/jenkins/minikube-integration/19651-1378450/.minikube/machines/functional-919910/id_rsa Username:docker}
	I0916 10:48:41.570434 1405153 sshutil.go:53] new ssh client: &{IP:127.0.0.1 Port:34613 SSHKeyPath:/home/jenkins/minikube-integration/19651-1378450/.minikube/machines/functional-919910/id_rsa Username:docker}
	I0916 10:48:41.656471 1405153 ssh_runner.go:195] Run: systemctl --version
	I0916 10:48:41.785895 1405153 ssh_runner.go:195] Run: sudo sh -c "podman version >/dev/null"
	I0916 10:48:41.929425 1405153 ssh_runner.go:195] Run: sh -c "stat /etc/cni/net.d/*loopback.conf*"
	I0916 10:48:41.933963 1405153 ssh_runner.go:195] Run: sudo find /etc/cni/net.d -maxdepth 1 -type f -name *loopback.conf* -not -name *.mk_disabled -exec sh -c "sudo mv {} {}.mk_disabled" ;
	I0916 10:48:41.943404 1405153 cni.go:221] loopback cni configuration disabled: "/etc/cni/net.d/*loopback.conf*" found
	I0916 10:48:41.943491 1405153 ssh_runner.go:195] Run: sudo find /etc/cni/net.d -maxdepth 1 -type f ( ( -name *bridge* -or -name *podman* ) -and -not -name *.mk_disabled ) -printf "%p, " -exec sh -c "sudo mv {} {}.mk_disabled" ;
	I0916 10:48:41.953183 1405153 cni.go:259] no active bridge cni configs found in "/etc/cni/net.d" - nothing to disable
	I0916 10:48:41.953198 1405153 start.go:495] detecting cgroup driver to use...
	I0916 10:48:41.953235 1405153 detect.go:187] detected "cgroupfs" cgroup driver on host os
	I0916 10:48:41.953293 1405153 ssh_runner.go:195] Run: sudo systemctl stop -f containerd
	I0916 10:48:41.966190 1405153 ssh_runner.go:195] Run: sudo systemctl is-active --quiet service containerd
	I0916 10:48:41.978010 1405153 docker.go:217] disabling cri-docker service (if available) ...
	I0916 10:48:41.978067 1405153 ssh_runner.go:195] Run: sudo systemctl stop -f cri-docker.socket
	I0916 10:48:41.992752 1405153 ssh_runner.go:195] Run: sudo systemctl stop -f cri-docker.service
	I0916 10:48:42.006013 1405153 ssh_runner.go:195] Run: sudo systemctl disable cri-docker.socket
	I0916 10:48:42.151230 1405153 ssh_runner.go:195] Run: sudo systemctl mask cri-docker.service
	I0916 10:48:42.299523 1405153 docker.go:233] disabling docker service ...
	I0916 10:48:42.299589 1405153 ssh_runner.go:195] Run: sudo systemctl stop -f docker.socket
	I0916 10:48:42.315054 1405153 ssh_runner.go:195] Run: sudo systemctl stop -f docker.service
	I0916 10:48:42.329122 1405153 ssh_runner.go:195] Run: sudo systemctl disable docker.socket
	I0916 10:48:42.464786 1405153 ssh_runner.go:195] Run: sudo systemctl mask docker.service
	I0916 10:48:42.586432 1405153 ssh_runner.go:195] Run: sudo systemctl is-active --quiet service docker
	I0916 10:48:42.599418 1405153 ssh_runner.go:195] Run: /bin/bash -c "sudo mkdir -p /etc && printf %s "runtime-endpoint: unix:///var/run/crio/crio.sock
	" | sudo tee /etc/crictl.yaml"
	I0916 10:48:42.615905 1405153 crio.go:59] configure cri-o to use "registry.k8s.io/pause:3.10" pause image...
	I0916 10:48:42.615978 1405153 ssh_runner.go:195] Run: sh -c "sudo sed -i 's|^.*pause_image = .*$|pause_image = "registry.k8s.io/pause:3.10"|' /etc/crio/crio.conf.d/02-crio.conf"
	I0916 10:48:42.625827 1405153 crio.go:70] configuring cri-o to use "cgroupfs" as cgroup driver...
	I0916 10:48:42.625890 1405153 ssh_runner.go:195] Run: sh -c "sudo sed -i 's|^.*cgroup_manager = .*$|cgroup_manager = "cgroupfs"|' /etc/crio/crio.conf.d/02-crio.conf"
	I0916 10:48:42.636100 1405153 ssh_runner.go:195] Run: sh -c "sudo sed -i '/conmon_cgroup = .*/d' /etc/crio/crio.conf.d/02-crio.conf"
	I0916 10:48:42.646444 1405153 ssh_runner.go:195] Run: sh -c "sudo sed -i '/cgroup_manager = .*/a conmon_cgroup = "pod"' /etc/crio/crio.conf.d/02-crio.conf"
	I0916 10:48:42.656847 1405153 ssh_runner.go:195] Run: sh -c "sudo rm -rf /etc/cni/net.mk"
	I0916 10:48:42.666724 1405153 ssh_runner.go:195] Run: sh -c "sudo sed -i '/^ *"net.ipv4.ip_unprivileged_port_start=.*"/d' /etc/crio/crio.conf.d/02-crio.conf"
	I0916 10:48:42.676826 1405153 ssh_runner.go:195] Run: sh -c "sudo grep -q "^ *default_sysctls" /etc/crio/crio.conf.d/02-crio.conf || sudo sed -i '/conmon_cgroup = .*/a default_sysctls = \[\n\]' /etc/crio/crio.conf.d/02-crio.conf"
	I0916 10:48:42.687635 1405153 ssh_runner.go:195] Run: sh -c "sudo sed -i -r 's|^default_sysctls *= *\[|&\n  "net.ipv4.ip_unprivileged_port_start=0",|' /etc/crio/crio.conf.d/02-crio.conf"
	I0916 10:48:42.698381 1405153 ssh_runner.go:195] Run: sudo sysctl net.bridge.bridge-nf-call-iptables
	I0916 10:48:42.707000 1405153 ssh_runner.go:195] Run: sudo sh -c "echo 1 > /proc/sys/net/ipv4/ip_forward"
	I0916 10:48:42.715679 1405153 ssh_runner.go:195] Run: sudo systemctl daemon-reload
	I0916 10:48:42.847692 1405153 ssh_runner.go:195] Run: sudo systemctl restart crio
	I0916 10:48:43.038937 1405153 start.go:542] Will wait 60s for socket path /var/run/crio/crio.sock
	I0916 10:48:43.039005 1405153 ssh_runner.go:195] Run: stat /var/run/crio/crio.sock
	I0916 10:48:43.042800 1405153 start.go:563] Will wait 60s for crictl version
	I0916 10:48:43.042867 1405153 ssh_runner.go:195] Run: which crictl
	I0916 10:48:43.046213 1405153 ssh_runner.go:195] Run: sudo /usr/bin/crictl version
	I0916 10:48:43.092243 1405153 start.go:579] Version:  0.1.0
	RuntimeName:  cri-o
	RuntimeVersion:  1.24.6
	RuntimeApiVersion:  v1
	I0916 10:48:43.092320 1405153 ssh_runner.go:195] Run: crio --version
	I0916 10:48:43.132595 1405153 ssh_runner.go:195] Run: crio --version
	I0916 10:48:43.181201 1405153 out.go:177] * Preparing Kubernetes v1.31.1 on CRI-O 1.24.6 ...
	I0916 10:48:43.184095 1405153 cli_runner.go:164] Run: docker network inspect functional-919910 --format "{"Name": "{{.Name}}","Driver": "{{.Driver}}","Subnet": "{{range .IPAM.Config}}{{.Subnet}}{{end}}","Gateway": "{{range .IPAM.Config}}{{.Gateway}}{{end}}","MTU": {{if (index .Options "com.docker.network.driver.mtu")}}{{(index .Options "com.docker.network.driver.mtu")}}{{else}}0{{end}}, "ContainerIPs": [{{range $k,$v := .Containers }}"{{$v.IPv4Address}}",{{end}}]}"
	I0916 10:48:43.199863 1405153 ssh_runner.go:195] Run: grep 192.168.49.1	host.minikube.internal$ /etc/hosts
	I0916 10:48:43.206470 1405153 out.go:177]   - apiserver.enable-admission-plugins=NamespaceAutoProvision
	I0916 10:48:43.209330 1405153 kubeadm.go:883] updating cluster {Name:functional-919910 KeepContext:false EmbedCerts:false MinikubeISO: KicBaseImage:gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 Memory:4000 CPUs:2 DiskSize:20000 Driver:docker HyperkitVpnKitSock: HyperkitVSockPorts:[] DockerEnv:[] ContainerVolumeMounts:[] InsecureRegistry:[] RegistryMirror:[] HostOnlyCIDR:192.168.59.1/24 HypervVirtualSwitch: HypervUseExternalSwitch:false HypervExternalAdapter: KVMNetwork:default KVMQemuURI:qemu:///system KVMGPU:false KVMHidden:false KVMNUMACount:1 APIServerPort:8441 DockerOpt:[] DisableDriverMounts:false NFSShare:[] NFSSharesRoot:/nfsshares UUID: NoVTXCheck:false DNSProxy:false HostDNSResolver:true HostOnlyNicType:virtio NatNicType:virtio SSHIPAddress: SSHUser:root SSHKey: SSHPort:22 KubernetesConfig:{KubernetesVersion:v1.31.1 ClusterName:functional-919910 Namespace:default APIServerHAVIP: APIServerName:minikubeCA API
ServerNames:[] APIServerIPs:[] DNSDomain:cluster.local ContainerRuntime:crio CRISocket: NetworkPlugin:cni FeatureGates: ServiceCIDR:10.96.0.0/12 ImageRepository: LoadBalancerStartIP: LoadBalancerEndIP: CustomIngressCert: RegistryAliases: ExtraOptions:[{Component:apiserver Key:enable-admission-plugins Value:NamespaceAutoProvision}] ShouldLoadCachedImages:true EnableDefaultCNI:false CNI:} Nodes:[{Name: IP:192.168.49.2 Port:8441 KubernetesVersion:v1.31.1 ContainerRuntime:crio ControlPlane:true Worker:true}] Addons:map[default-storageclass:true storage-provisioner:true] CustomAddonImages:map[] CustomAddonRegistries:map[] VerifyComponents:map[apiserver:true apps_running:true default_sa:true extra:true kubelet:true node_ready:true system_pods:true] StartHostTimeout:6m0s ScheduledStop:<nil> ExposedPorts:[] ListenAddress: Network: Subnet: MultiNodeRequested:false ExtraDisks:0 CertExpiration:26280h0m0s Mount:false MountString:/home/jenkins:/minikube-host Mount9PVersion:9p2000.L MountGID:docker MountIP: MountMSize:2621
44 MountOptions:[] MountPort:0 MountType:9p MountUID:docker BinaryMirror: DisableOptimizations:false DisableMetrics:false CustomQemuFirmwarePath: SocketVMnetClientPath: SocketVMnetPath: StaticIP: SSHAuthSock: SSHAgentPID:0 GPUs: AutoPauseInterval:1m0s} ...
	I0916 10:48:43.209456 1405153 preload.go:131] Checking if preload exists for k8s version v1.31.1 and runtime crio
	I0916 10:48:43.209542 1405153 ssh_runner.go:195] Run: sudo crictl images --output json
	I0916 10:48:43.254938 1405153 crio.go:514] all images are preloaded for cri-o runtime.
	I0916 10:48:43.254950 1405153 crio.go:433] Images already preloaded, skipping extraction
	I0916 10:48:43.255048 1405153 ssh_runner.go:195] Run: sudo crictl images --output json
	I0916 10:48:43.291546 1405153 crio.go:514] all images are preloaded for cri-o runtime.
	I0916 10:48:43.291559 1405153 cache_images.go:84] Images are preloaded, skipping loading
	I0916 10:48:43.291565 1405153 kubeadm.go:934] updating node { 192.168.49.2 8441 v1.31.1 crio true true} ...
	I0916 10:48:43.291676 1405153 kubeadm.go:946] kubelet [Unit]
	Wants=crio.service
	
	[Service]
	ExecStart=
	ExecStart=/var/lib/minikube/binaries/v1.31.1/kubelet --bootstrap-kubeconfig=/etc/kubernetes/bootstrap-kubelet.conf --cgroups-per-qos=false --config=/var/lib/kubelet/config.yaml --enforce-node-allocatable= --hostname-override=functional-919910 --kubeconfig=/etc/kubernetes/kubelet.conf --node-ip=192.168.49.2
	
	[Install]
	 config:
	{KubernetesVersion:v1.31.1 ClusterName:functional-919910 Namespace:default APIServerHAVIP: APIServerName:minikubeCA APIServerNames:[] APIServerIPs:[] DNSDomain:cluster.local ContainerRuntime:crio CRISocket: NetworkPlugin:cni FeatureGates: ServiceCIDR:10.96.0.0/12 ImageRepository: LoadBalancerStartIP: LoadBalancerEndIP: CustomIngressCert: RegistryAliases: ExtraOptions:[{Component:apiserver Key:enable-admission-plugins Value:NamespaceAutoProvision}] ShouldLoadCachedImages:true EnableDefaultCNI:false CNI:}
	I0916 10:48:43.291761 1405153 ssh_runner.go:195] Run: crio config
	I0916 10:48:43.340850 1405153 extraconfig.go:124] Overwriting default enable-admission-plugins=NamespaceLifecycle,LimitRanger,ServiceAccount,DefaultStorageClass,DefaultTolerationSeconds,NodeRestriction,MutatingAdmissionWebhook,ValidatingAdmissionWebhook,ResourceQuota with user provided enable-admission-plugins=NamespaceAutoProvision for component apiserver
	I0916 10:48:43.340962 1405153 cni.go:84] Creating CNI manager for ""
	I0916 10:48:43.340975 1405153 cni.go:143] "docker" driver + "crio" runtime found, recommending kindnet
	I0916 10:48:43.340984 1405153 kubeadm.go:84] Using pod CIDR: 10.244.0.0/16
	I0916 10:48:43.341006 1405153 kubeadm.go:181] kubeadm options: {CertDir:/var/lib/minikube/certs ServiceCIDR:10.96.0.0/12 PodSubnet:10.244.0.0/16 AdvertiseAddress:192.168.49.2 APIServerPort:8441 KubernetesVersion:v1.31.1 EtcdDataDir:/var/lib/minikube/etcd EtcdExtraArgs:map[] ClusterName:functional-919910 NodeName:functional-919910 DNSDomain:cluster.local CRISocket:/var/run/crio/crio.sock ImageRepository: ComponentOptions:[{Component:apiServer ExtraArgs:map[enable-admission-plugins:NamespaceAutoProvision] Pairs:map[certSANs:["127.0.0.1", "localhost", "192.168.49.2"]]} {Component:controllerManager ExtraArgs:map[allocate-node-cidrs:true leader-elect:false] Pairs:map[]} {Component:scheduler ExtraArgs:map[leader-elect:false] Pairs:map[]}] FeatureArgs:map[] NodeIP:192.168.49.2 CgroupDriver:cgroupfs ClientCAFile:/var/lib/minikube/certs/ca.crt StaticPodPath:/etc/kubernetes/manifests ControlPlaneAddress:control-plane.minikube.internal KubeProxyOptions:map[] ResolvConfSearchRegression:false KubeletConfigOpts:ma
p[containerRuntimeEndpoint:unix:///var/run/crio/crio.sock hairpinMode:hairpin-veth runtimeRequestTimeout:15m] PrependCriSocketUnix:true}
	I0916 10:48:43.341148 1405153 kubeadm.go:187] kubeadm config:
	apiVersion: kubeadm.k8s.io/v1beta3
	kind: InitConfiguration
	localAPIEndpoint:
	  advertiseAddress: 192.168.49.2
	  bindPort: 8441
	bootstrapTokens:
	  - groups:
	      - system:bootstrappers:kubeadm:default-node-token
	    ttl: 24h0m0s
	    usages:
	      - signing
	      - authentication
	nodeRegistration:
	  criSocket: unix:///var/run/crio/crio.sock
	  name: "functional-919910"
	  kubeletExtraArgs:
	    node-ip: 192.168.49.2
	  taints: []
	---
	apiVersion: kubeadm.k8s.io/v1beta3
	kind: ClusterConfiguration
	apiServer:
	  certSANs: ["127.0.0.1", "localhost", "192.168.49.2"]
	  extraArgs:
	    enable-admission-plugins: "NamespaceAutoProvision"
	controllerManager:
	  extraArgs:
	    allocate-node-cidrs: "true"
	    leader-elect: "false"
	scheduler:
	  extraArgs:
	    leader-elect: "false"
	certificatesDir: /var/lib/minikube/certs
	clusterName: mk
	controlPlaneEndpoint: control-plane.minikube.internal:8441
	etcd:
	  local:
	    dataDir: /var/lib/minikube/etcd
	    extraArgs:
	      proxy-refresh-interval: "70000"
	kubernetesVersion: v1.31.1
	networking:
	  dnsDomain: cluster.local
	  podSubnet: "10.244.0.0/16"
	  serviceSubnet: 10.96.0.0/12
	---
	apiVersion: kubelet.config.k8s.io/v1beta1
	kind: KubeletConfiguration
	authentication:
	  x509:
	    clientCAFile: /var/lib/minikube/certs/ca.crt
	cgroupDriver: cgroupfs
	containerRuntimeEndpoint: unix:///var/run/crio/crio.sock
	hairpinMode: hairpin-veth
	runtimeRequestTimeout: 15m
	clusterDomain: "cluster.local"
	# disable disk resource management by default
	imageGCHighThresholdPercent: 100
	evictionHard:
	  nodefs.available: "0%"
	  nodefs.inodesFree: "0%"
	  imagefs.available: "0%"
	failSwapOn: false
	staticPodPath: /etc/kubernetes/manifests
	---
	apiVersion: kubeproxy.config.k8s.io/v1alpha1
	kind: KubeProxyConfiguration
	clusterCIDR: "10.244.0.0/16"
	metricsBindAddress: 0.0.0.0:10249
	conntrack:
	  maxPerCore: 0
	# Skip setting "net.netfilter.nf_conntrack_tcp_timeout_established"
	  tcpEstablishedTimeout: 0s
	# Skip setting "net.netfilter.nf_conntrack_tcp_timeout_close"
	  tcpCloseWaitTimeout: 0s
	
	I0916 10:48:43.341215 1405153 ssh_runner.go:195] Run: sudo ls /var/lib/minikube/binaries/v1.31.1
	I0916 10:48:43.350412 1405153 binaries.go:44] Found k8s binaries, skipping transfer
	I0916 10:48:43.350476 1405153 ssh_runner.go:195] Run: sudo mkdir -p /etc/systemd/system/kubelet.service.d /lib/systemd/system /var/tmp/minikube
	I0916 10:48:43.361154 1405153 ssh_runner.go:362] scp memory --> /etc/systemd/system/kubelet.service.d/10-kubeadm.conf (367 bytes)
	I0916 10:48:43.380083 1405153 ssh_runner.go:362] scp memory --> /lib/systemd/system/kubelet.service (352 bytes)
	I0916 10:48:43.398316 1405153 ssh_runner.go:362] scp memory --> /var/tmp/minikube/kubeadm.yaml.new (2005 bytes)
	I0916 10:48:43.417671 1405153 ssh_runner.go:195] Run: grep 192.168.49.2	control-plane.minikube.internal$ /etc/hosts
	I0916 10:48:43.421406 1405153 ssh_runner.go:195] Run: sudo systemctl daemon-reload
	I0916 10:48:43.572628 1405153 ssh_runner.go:195] Run: sudo systemctl start kubelet
	I0916 10:48:43.587702 1405153 certs.go:68] Setting up /home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/functional-919910 for IP: 192.168.49.2
	I0916 10:48:43.587714 1405153 certs.go:194] generating shared ca certs ...
	I0916 10:48:43.587731 1405153 certs.go:226] acquiring lock for ca certs: {Name:mk0ae46b50e2e49d53ad6fcc94535aa50d9156d6 Clock:{} Delay:500ms Timeout:1m0s Cancel:<nil>}
	I0916 10:48:43.587872 1405153 certs.go:235] skipping valid "minikubeCA" ca cert: /home/jenkins/minikube-integration/19651-1378450/.minikube/ca.key
	I0916 10:48:43.587922 1405153 certs.go:235] skipping valid "proxyClientCA" ca cert: /home/jenkins/minikube-integration/19651-1378450/.minikube/proxy-client-ca.key
	I0916 10:48:43.587928 1405153 certs.go:256] generating profile certs ...
	I0916 10:48:43.588013 1405153 certs.go:359] skipping valid signed profile cert regeneration for "minikube-user": /home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/functional-919910/client.key
	I0916 10:48:43.588061 1405153 certs.go:359] skipping valid signed profile cert regeneration for "minikube": /home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/functional-919910/apiserver.key.debd5ef9
	I0916 10:48:43.588099 1405153 certs.go:359] skipping valid signed profile cert regeneration for "aggregator": /home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/functional-919910/proxy-client.key
	I0916 10:48:43.588211 1405153 certs.go:484] found cert: /home/jenkins/minikube-integration/19651-1378450/.minikube/certs/1383833.pem (1338 bytes)
	W0916 10:48:43.588269 1405153 certs.go:480] ignoring /home/jenkins/minikube-integration/19651-1378450/.minikube/certs/1383833_empty.pem, impossibly tiny 0 bytes
	I0916 10:48:43.588278 1405153 certs.go:484] found cert: /home/jenkins/minikube-integration/19651-1378450/.minikube/certs/ca-key.pem (1679 bytes)
	I0916 10:48:43.588301 1405153 certs.go:484] found cert: /home/jenkins/minikube-integration/19651-1378450/.minikube/certs/ca.pem (1078 bytes)
	I0916 10:48:43.588323 1405153 certs.go:484] found cert: /home/jenkins/minikube-integration/19651-1378450/.minikube/certs/cert.pem (1123 bytes)
	I0916 10:48:43.588343 1405153 certs.go:484] found cert: /home/jenkins/minikube-integration/19651-1378450/.minikube/certs/key.pem (1679 bytes)
	I0916 10:48:43.588383 1405153 certs.go:484] found cert: /home/jenkins/minikube-integration/19651-1378450/.minikube/files/etc/ssl/certs/13838332.pem (1708 bytes)
	I0916 10:48:43.589063 1405153 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-1378450/.minikube/ca.crt --> /var/lib/minikube/certs/ca.crt (1111 bytes)
	I0916 10:48:43.615227 1405153 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-1378450/.minikube/ca.key --> /var/lib/minikube/certs/ca.key (1675 bytes)
	I0916 10:48:43.640209 1405153 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-1378450/.minikube/proxy-client-ca.crt --> /var/lib/minikube/certs/proxy-client-ca.crt (1119 bytes)
	I0916 10:48:43.665409 1405153 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-1378450/.minikube/proxy-client-ca.key --> /var/lib/minikube/certs/proxy-client-ca.key (1679 bytes)
	I0916 10:48:43.690396 1405153 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/functional-919910/apiserver.crt --> /var/lib/minikube/certs/apiserver.crt (1424 bytes)
	I0916 10:48:43.715720 1405153 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/functional-919910/apiserver.key --> /var/lib/minikube/certs/apiserver.key (1679 bytes)
	I0916 10:48:43.741758 1405153 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/functional-919910/proxy-client.crt --> /var/lib/minikube/certs/proxy-client.crt (1147 bytes)
	I0916 10:48:43.766512 1405153 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/functional-919910/proxy-client.key --> /var/lib/minikube/certs/proxy-client.key (1679 bytes)
	I0916 10:48:43.790812 1405153 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-1378450/.minikube/ca.crt --> /usr/share/ca-certificates/minikubeCA.pem (1111 bytes)
	I0916 10:48:43.815736 1405153 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-1378450/.minikube/certs/1383833.pem --> /usr/share/ca-certificates/1383833.pem (1338 bytes)
	I0916 10:48:43.840930 1405153 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-1378450/.minikube/files/etc/ssl/certs/13838332.pem --> /usr/share/ca-certificates/13838332.pem (1708 bytes)
	I0916 10:48:43.866622 1405153 ssh_runner.go:362] scp memory --> /var/lib/minikube/kubeconfig (738 bytes)
	I0916 10:48:43.885132 1405153 ssh_runner.go:195] Run: openssl version
	I0916 10:48:43.890673 1405153 ssh_runner.go:195] Run: sudo /bin/bash -c "test -s /usr/share/ca-certificates/minikubeCA.pem && ln -fs /usr/share/ca-certificates/minikubeCA.pem /etc/ssl/certs/minikubeCA.pem"
	I0916 10:48:43.900521 1405153 ssh_runner.go:195] Run: ls -la /usr/share/ca-certificates/minikubeCA.pem
	I0916 10:48:43.904289 1405153 certs.go:528] hashing: -rw-r--r-- 1 root root 1111 Sep 16 10:35 /usr/share/ca-certificates/minikubeCA.pem
	I0916 10:48:43.904360 1405153 ssh_runner.go:195] Run: openssl x509 -hash -noout -in /usr/share/ca-certificates/minikubeCA.pem
	I0916 10:48:43.912159 1405153 ssh_runner.go:195] Run: sudo /bin/bash -c "test -L /etc/ssl/certs/b5213941.0 || ln -fs /etc/ssl/certs/minikubeCA.pem /etc/ssl/certs/b5213941.0"
	I0916 10:48:43.921601 1405153 ssh_runner.go:195] Run: sudo /bin/bash -c "test -s /usr/share/ca-certificates/1383833.pem && ln -fs /usr/share/ca-certificates/1383833.pem /etc/ssl/certs/1383833.pem"
	I0916 10:48:43.931422 1405153 ssh_runner.go:195] Run: ls -la /usr/share/ca-certificates/1383833.pem
	I0916 10:48:43.935145 1405153 certs.go:528] hashing: -rw-r--r-- 1 root root 1338 Sep 16 10:46 /usr/share/ca-certificates/1383833.pem
	I0916 10:48:43.935204 1405153 ssh_runner.go:195] Run: openssl x509 -hash -noout -in /usr/share/ca-certificates/1383833.pem
	I0916 10:48:43.942725 1405153 ssh_runner.go:195] Run: sudo /bin/bash -c "test -L /etc/ssl/certs/51391683.0 || ln -fs /etc/ssl/certs/1383833.pem /etc/ssl/certs/51391683.0"
	I0916 10:48:43.952136 1405153 ssh_runner.go:195] Run: sudo /bin/bash -c "test -s /usr/share/ca-certificates/13838332.pem && ln -fs /usr/share/ca-certificates/13838332.pem /etc/ssl/certs/13838332.pem"
	I0916 10:48:43.962336 1405153 ssh_runner.go:195] Run: ls -la /usr/share/ca-certificates/13838332.pem
	I0916 10:48:43.966066 1405153 certs.go:528] hashing: -rw-r--r-- 1 root root 1708 Sep 16 10:46 /usr/share/ca-certificates/13838332.pem
	I0916 10:48:43.966132 1405153 ssh_runner.go:195] Run: openssl x509 -hash -noout -in /usr/share/ca-certificates/13838332.pem
	I0916 10:48:43.973393 1405153 ssh_runner.go:195] Run: sudo /bin/bash -c "test -L /etc/ssl/certs/3ec20f2e.0 || ln -fs /etc/ssl/certs/13838332.pem /etc/ssl/certs/3ec20f2e.0"
	I0916 10:48:43.983388 1405153 ssh_runner.go:195] Run: stat /var/lib/minikube/certs/apiserver-kubelet-client.crt
	I0916 10:48:43.987432 1405153 ssh_runner.go:195] Run: openssl x509 -noout -in /var/lib/minikube/certs/apiserver-etcd-client.crt -checkend 86400
	I0916 10:48:43.994465 1405153 ssh_runner.go:195] Run: openssl x509 -noout -in /var/lib/minikube/certs/apiserver-kubelet-client.crt -checkend 86400
	I0916 10:48:44.005212 1405153 ssh_runner.go:195] Run: openssl x509 -noout -in /var/lib/minikube/certs/etcd/server.crt -checkend 86400
	I0916 10:48:44.014973 1405153 ssh_runner.go:195] Run: openssl x509 -noout -in /var/lib/minikube/certs/etcd/healthcheck-client.crt -checkend 86400
	I0916 10:48:44.023352 1405153 ssh_runner.go:195] Run: openssl x509 -noout -in /var/lib/minikube/certs/etcd/peer.crt -checkend 86400
	I0916 10:48:44.030882 1405153 ssh_runner.go:195] Run: openssl x509 -noout -in /var/lib/minikube/certs/front-proxy-client.crt -checkend 86400
	I0916 10:48:44.038292 1405153 kubeadm.go:392] StartCluster: {Name:functional-919910 KeepContext:false EmbedCerts:false MinikubeISO: KicBaseImage:gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 Memory:4000 CPUs:2 DiskSize:20000 Driver:docker HyperkitVpnKitSock: HyperkitVSockPorts:[] DockerEnv:[] ContainerVolumeMounts:[] InsecureRegistry:[] RegistryMirror:[] HostOnlyCIDR:192.168.59.1/24 HypervVirtualSwitch: HypervUseExternalSwitch:false HypervExternalAdapter: KVMNetwork:default KVMQemuURI:qemu:///system KVMGPU:false KVMHidden:false KVMNUMACount:1 APIServerPort:8441 DockerOpt:[] DisableDriverMounts:false NFSShare:[] NFSSharesRoot:/nfsshares UUID: NoVTXCheck:false DNSProxy:false HostDNSResolver:true HostOnlyNicType:virtio NatNicType:virtio SSHIPAddress: SSHUser:root SSHKey: SSHPort:22 KubernetesConfig:{KubernetesVersion:v1.31.1 ClusterName:functional-919910 Namespace:default APIServerHAVIP: APIServerName:minikubeCA APISer
verNames:[] APIServerIPs:[] DNSDomain:cluster.local ContainerRuntime:crio CRISocket: NetworkPlugin:cni FeatureGates: ServiceCIDR:10.96.0.0/12 ImageRepository: LoadBalancerStartIP: LoadBalancerEndIP: CustomIngressCert: RegistryAliases: ExtraOptions:[{Component:apiserver Key:enable-admission-plugins Value:NamespaceAutoProvision}] ShouldLoadCachedImages:true EnableDefaultCNI:false CNI:} Nodes:[{Name: IP:192.168.49.2 Port:8441 KubernetesVersion:v1.31.1 ContainerRuntime:crio ControlPlane:true Worker:true}] Addons:map[default-storageclass:true storage-provisioner:true] CustomAddonImages:map[] CustomAddonRegistries:map[] VerifyComponents:map[apiserver:true apps_running:true default_sa:true extra:true kubelet:true node_ready:true system_pods:true] StartHostTimeout:6m0s ScheduledStop:<nil> ExposedPorts:[] ListenAddress: Network: Subnet: MultiNodeRequested:false ExtraDisks:0 CertExpiration:26280h0m0s Mount:false MountString:/home/jenkins:/minikube-host Mount9PVersion:9p2000.L MountGID:docker MountIP: MountMSize:262144
MountOptions:[] MountPort:0 MountType:9p MountUID:docker BinaryMirror: DisableOptimizations:false DisableMetrics:false CustomQemuFirmwarePath: SocketVMnetClientPath: SocketVMnetPath: StaticIP: SSHAuthSock: SSHAgentPID:0 GPUs: AutoPauseInterval:1m0s}
	I0916 10:48:44.038381 1405153 cri.go:54] listing CRI containers in root : {State:paused Name: Namespaces:[kube-system]}
	I0916 10:48:44.038447 1405153 ssh_runner.go:195] Run: sudo -s eval "crictl ps -a --quiet --label io.kubernetes.pod.namespace=kube-system"
	I0916 10:48:44.078250 1405153 cri.go:89] found id: "67f50b0e25dae16dbad275ffac3a734fe571c8f8cb91d485eaac44783eb641be"
	I0916 10:48:44.078263 1405153 cri.go:89] found id: "e8aeda4b55bc63f93934a2cc0bed0950a05df3db193d9ed2e77a2dc96b78ec18"
	I0916 10:48:44.078267 1405153 cri.go:89] found id: "68f543d941434df90f12c922b0b45dcb557a7b8316bd36d083123f6f29e0f3d7"
	I0916 10:48:44.078269 1405153 cri.go:89] found id: "2089d6c47dd6764fb74a622eaf36e8dda3344083a925f73a4dfcf0ebb952dbf7"
	I0916 10:48:44.078272 1405153 cri.go:89] found id: "84ca31fb2ed034d56721c7ab90b9c5e414e315335f55f7d30435fc91501dad28"
	I0916 10:48:44.078275 1405153 cri.go:89] found id: "8f5620673b4ff5c0c99db71dd02fc2ce9baec6c9b22460cbdf86d411abc6a715"
	I0916 10:48:44.078278 1405153 cri.go:89] found id: "5bcfe047e4005e24d6719487f45bde2380924679e0f77e81ce9e05992af73afb"
	I0916 10:48:44.078281 1405153 cri.go:89] found id: "9a35fb982442f2ef08963a8588b112f704124f0fecc14cbfc199e94d6085db98"
	I0916 10:48:44.078283 1405153 cri.go:89] found id: "89084e33c979a76a3a4bbd24eab8c848deb25d8bd474bad381f47a24e0373c2e"
	I0916 10:48:44.078289 1405153 cri.go:89] found id: "584cffa44f32723af45447c07bf6e3fc641b7c61fe43302aad35c776bd065faf"
	I0916 10:48:44.078292 1405153 cri.go:89] found id: "9fdab793eb970a5f01845e2aeaf1389846fd7113bbdedbb122c9c796017271d5"
	I0916 10:48:44.078294 1405153 cri.go:89] found id: "3e31d247381fd150f97fed045c0d264e01a0046902133f839fc323ed9d5fa7b9"
	I0916 10:48:44.078297 1405153 cri.go:89] found id: "6d211253a1170338e5b23dda8b3c6a26dde0aa55d2f91ee289142b0410943b49"
	I0916 10:48:44.078299 1405153 cri.go:89] found id: "19cb8b26283b5427eeb4adf80032848225300f8293659c95a04c937ca3877ced"
	I0916 10:48:44.078303 1405153 cri.go:89] found id: "b88a79882d73e8e5ca5f134464b8f60ebbeb4a0aa75d6f83d1ec9e3d9f6bd093"
	I0916 10:48:44.078305 1405153 cri.go:89] found id: "790d8c6b7f5cff6aa8da32ec82eeab04f109110f2b3a39803bda7a570da2cf75"
	I0916 10:48:44.078307 1405153 cri.go:89] found id: ""
	I0916 10:48:44.078373 1405153 ssh_runner.go:195] Run: sudo runc list -f json
	
	
	==> CRI-O <==
	Sep 16 10:48:52 functional-919910 crio[4619]: time="2024-09-16 10:48:52.535257149Z" level=info msg="Created container 072cecfbf1d3967a28f6cf80f4e3b0bf030253965b58aa0f0089cd01271c49a1: kube-system/kindnet-nb5xl/kindnet-cni" id=3ddce0db-6c08-49cb-afc0-95165b9a8d93 name=/runtime.v1.RuntimeService/CreateContainer
	Sep 16 10:48:52 functional-919910 crio[4619]: time="2024-09-16 10:48:52.535949874Z" level=info msg="Starting container: 072cecfbf1d3967a28f6cf80f4e3b0bf030253965b58aa0f0089cd01271c49a1" id=2d47cfa2-1c51-4e3e-84c6-20565a25b42f name=/runtime.v1.RuntimeService/StartContainer
	Sep 16 10:48:52 functional-919910 crio[4619]: time="2024-09-16 10:48:52.544030939Z" level=info msg="Created container 6f8c0a2f9d3e9fe72768e28685deb8e30624ac7b3cfaa272ac69f57b771050db: kube-system/kube-proxy-nvpzv/kube-proxy" id=3a06185c-7b19-4148-a31f-d6fd8357f9ea name=/runtime.v1.RuntimeService/CreateContainer
	Sep 16 10:48:52 functional-919910 crio[4619]: time="2024-09-16 10:48:52.544745095Z" level=info msg="Starting container: 6f8c0a2f9d3e9fe72768e28685deb8e30624ac7b3cfaa272ac69f57b771050db" id=0568d2b4-f47d-4f64-b678-a50f76abf9da name=/runtime.v1.RuntimeService/StartContainer
	Sep 16 10:48:52 functional-919910 crio[4619]: time="2024-09-16 10:48:52.546185920Z" level=info msg="Created container 4deb5cc6dce54b2b55f84fa620aac8876a4dfb8d163a4e60aa19ebd7ba71d7eb: kube-system/coredns-7c65d6cfc9-qzn8c/coredns" id=1d08d23d-6b30-4ef9-9749-75656f2f22fb name=/runtime.v1.RuntimeService/CreateContainer
	Sep 16 10:48:52 functional-919910 crio[4619]: time="2024-09-16 10:48:52.546884840Z" level=info msg="Starting container: 4deb5cc6dce54b2b55f84fa620aac8876a4dfb8d163a4e60aa19ebd7ba71d7eb" id=9fbfa96f-4211-4584-beb6-a4c1d812c1fa name=/runtime.v1.RuntimeService/StartContainer
	Sep 16 10:48:52 functional-919910 crio[4619]: time="2024-09-16 10:48:52.550182971Z" level=info msg="Started container" PID=5232 containerID=072cecfbf1d3967a28f6cf80f4e3b0bf030253965b58aa0f0089cd01271c49a1 description=kube-system/kindnet-nb5xl/kindnet-cni id=2d47cfa2-1c51-4e3e-84c6-20565a25b42f name=/runtime.v1.RuntimeService/StartContainer sandboxID=306886331d6eea412e2593dd8cefd104ae0353cb2453c12f41db88e1881fec0f
	Sep 16 10:48:52 functional-919910 crio[4619]: time="2024-09-16 10:48:52.573383165Z" level=info msg="Started container" PID=5218 containerID=4deb5cc6dce54b2b55f84fa620aac8876a4dfb8d163a4e60aa19ebd7ba71d7eb description=kube-system/coredns-7c65d6cfc9-qzn8c/coredns id=9fbfa96f-4211-4584-beb6-a4c1d812c1fa name=/runtime.v1.RuntimeService/StartContainer sandboxID=4bae1031966b207c601881c3be1d2b66aa5218cb02e6eb2af68deea5be18503b
	Sep 16 10:48:52 functional-919910 crio[4619]: time="2024-09-16 10:48:52.605671989Z" level=info msg="Started container" PID=5240 containerID=6f8c0a2f9d3e9fe72768e28685deb8e30624ac7b3cfaa272ac69f57b771050db description=kube-system/kube-proxy-nvpzv/kube-proxy id=0568d2b4-f47d-4f64-b678-a50f76abf9da name=/runtime.v1.RuntimeService/StartContainer sandboxID=46672cf6a1a3cfbb490f865d512383492c0c4c4061599f90461031829a93bd49
	Sep 16 10:48:53 functional-919910 crio[4619]: time="2024-09-16 10:48:53.088292803Z" level=info msg="Stopping pod sandbox: 8fd62fbc34bf1ffb9092b83c48e89b00e7cdd219dbb5b91410c53ba0718a28f1" id=9475e9d1-d7b3-4579-ad77-5746df207096 name=/runtime.v1.RuntimeService/StopPodSandbox
	Sep 16 10:48:53 functional-919910 crio[4619]: time="2024-09-16 10:48:53.089356144Z" level=info msg="Stopped pod sandbox: 8fd62fbc34bf1ffb9092b83c48e89b00e7cdd219dbb5b91410c53ba0718a28f1" id=9475e9d1-d7b3-4579-ad77-5746df207096 name=/runtime.v1.RuntimeService/StopPodSandbox
	Sep 16 10:48:53 functional-919910 crio[4619]: time="2024-09-16 10:48:53.162210356Z" level=info msg="Removing container: 84ca31fb2ed034d56721c7ab90b9c5e414e315335f55f7d30435fc91501dad28" id=7ed385df-0fad-4b1c-bc96-41da5fba68ba name=/runtime.v1.RuntimeService/RemoveContainer
	Sep 16 10:48:53 functional-919910 crio[4619]: time="2024-09-16 10:48:53.186675643Z" level=info msg="Removed container 84ca31fb2ed034d56721c7ab90b9c5e414e315335f55f7d30435fc91501dad28: kube-system/kube-apiserver-functional-919910/kube-apiserver" id=7ed385df-0fad-4b1c-bc96-41da5fba68ba name=/runtime.v1.RuntimeService/RemoveContainer
	Sep 16 10:49:03 functional-919910 crio[4619]: time="2024-09-16 10:49:03.038404155Z" level=info msg="CNI monitoring event \"/etc/cni/net.d/10-kindnet.conflist.temp\": CREATE"
	Sep 16 10:49:03 functional-919910 crio[4619]: time="2024-09-16 10:49:03.042553703Z" level=info msg="Found CNI network kindnet (type=ptp) at /etc/cni/net.d/10-kindnet.conflist"
	Sep 16 10:49:03 functional-919910 crio[4619]: time="2024-09-16 10:49:03.042594104Z" level=info msg="Updated default CNI network name to kindnet"
	Sep 16 10:49:03 functional-919910 crio[4619]: time="2024-09-16 10:49:03.042618596Z" level=info msg="CNI monitoring event \"/etc/cni/net.d/10-kindnet.conflist.temp\": WRITE"
	Sep 16 10:49:03 functional-919910 crio[4619]: time="2024-09-16 10:49:03.046006841Z" level=info msg="Found CNI network kindnet (type=ptp) at /etc/cni/net.d/10-kindnet.conflist"
	Sep 16 10:49:03 functional-919910 crio[4619]: time="2024-09-16 10:49:03.046045847Z" level=info msg="Updated default CNI network name to kindnet"
	Sep 16 10:49:03 functional-919910 crio[4619]: time="2024-09-16 10:49:03.046064399Z" level=info msg="CNI monitoring event \"/etc/cni/net.d/10-kindnet.conflist.temp\": RENAME"
	Sep 16 10:49:03 functional-919910 crio[4619]: time="2024-09-16 10:49:03.049550759Z" level=info msg="Found CNI network kindnet (type=ptp) at /etc/cni/net.d/10-kindnet.conflist"
	Sep 16 10:49:03 functional-919910 crio[4619]: time="2024-09-16 10:49:03.049587057Z" level=info msg="Updated default CNI network name to kindnet"
	Sep 16 10:49:03 functional-919910 crio[4619]: time="2024-09-16 10:49:03.049603935Z" level=info msg="CNI monitoring event \"/etc/cni/net.d/10-kindnet.conflist\": CREATE"
	Sep 16 10:49:03 functional-919910 crio[4619]: time="2024-09-16 10:49:03.053090451Z" level=info msg="Found CNI network kindnet (type=ptp) at /etc/cni/net.d/10-kindnet.conflist"
	Sep 16 10:49:03 functional-919910 crio[4619]: time="2024-09-16 10:49:03.053131229Z" level=info msg="Updated default CNI network name to kindnet"
	
	
	==> container status <==
	CONTAINER           IMAGE                                                              CREATED              STATE               NAME                      ATTEMPT             POD ID              POD
	6f8c0a2f9d3e9       24a140c548c075e487e45d0ee73b1aa89f8bfb40c08a57e05975559728822b1d   18 seconds ago       Running             kube-proxy                2                   46672cf6a1a3c       kube-proxy-nvpzv
	072cecfbf1d39       6a23fa8fd2b78ab58e42ba273808edc936a9c53d8ac4a919f6337be094843a51   18 seconds ago       Running             kindnet-cni               2                   306886331d6ee       kindnet-nb5xl
	4deb5cc6dce54       2f6c962e7b8311337352d9fdea917da2184d9919f4da7695bc2a6517cf392fe4   18 seconds ago       Running             coredns                   2                   4bae1031966b2       coredns-7c65d6cfc9-qzn8c
	0318f459801da       ba04bb24b95753201135cbc420b233c1b0b9fa2e1fd21d28319c348c33fbcde6   18 seconds ago       Running             storage-provisioner       2                   e27809ba10603       storage-provisioner
	12f0a29c7ca2a       d3f53a98c0a9d9163c4848bcf34b2d2f5e1e3691b79f3d1dd6d0206809e02853   23 seconds ago       Running             kube-apiserver            0                   00a81472718e2       kube-apiserver-functional-919910
	7fcb94c0bce84       7f8aa378bb47dffcf430f3a601abe39137e88aee0238e23ed8530fdd18dab82d   23 seconds ago       Running             kube-scheduler            2                   00455a328acb5       kube-scheduler-functional-919910
	d69895ce029ae       279f381cb37365bbbcd133c9531fba9c2beb0f38dbbe6ddfcd0b1b1643d3450e   23 seconds ago       Running             kube-controller-manager   2                   0ffab32638624       kube-controller-manager-functional-919910
	0fb814efa9ee9       27e3830e1402783674d8b594038967deea9d51f0d91b34c93c8f39d2f68af7da   23 seconds ago       Running             etcd                      2                   46079181d2925       etcd-functional-919910
	67f50b0e25dae       ba04bb24b95753201135cbc420b233c1b0b9fa2e1fd21d28319c348c33fbcde6   About a minute ago   Exited              storage-provisioner       1                   e27809ba10603       storage-provisioner
	e8aeda4b55bc6       6a23fa8fd2b78ab58e42ba273808edc936a9c53d8ac4a919f6337be094843a51   About a minute ago   Exited              kindnet-cni               1                   306886331d6ee       kindnet-nb5xl
	68f543d941434       24a140c548c075e487e45d0ee73b1aa89f8bfb40c08a57e05975559728822b1d   About a minute ago   Exited              kube-proxy                1                   46672cf6a1a3c       kube-proxy-nvpzv
	2089d6c47dd67       2f6c962e7b8311337352d9fdea917da2184d9919f4da7695bc2a6517cf392fe4   About a minute ago   Exited              coredns                   1                   4bae1031966b2       coredns-7c65d6cfc9-qzn8c
	8f5620673b4ff       279f381cb37365bbbcd133c9531fba9c2beb0f38dbbe6ddfcd0b1b1643d3450e   About a minute ago   Exited              kube-controller-manager   1                   0ffab32638624       kube-controller-manager-functional-919910
	5bcfe047e4005       27e3830e1402783674d8b594038967deea9d51f0d91b34c93c8f39d2f68af7da   About a minute ago   Exited              etcd                      1                   46079181d2925       etcd-functional-919910
	9a35fb982442f       7f8aa378bb47dffcf430f3a601abe39137e88aee0238e23ed8530fdd18dab82d   About a minute ago   Exited              kube-scheduler            1                   00455a328acb5       kube-scheduler-functional-919910
	
	
	==> coredns [2089d6c47dd6764fb74a622eaf36e8dda3344083a925f73a4dfcf0ebb952dbf7] <==
	[INFO] plugin/kubernetes: pkg/mod/k8s.io/client-go@v0.29.3/tools/cache/reflector.go:229: failed to list *v1.EndpointSlice: Get "https://10.96.0.1:443/apis/discovery.k8s.io/v1/endpointslices?limit=500&resourceVersion=0": dial tcp 10.96.0.1:443: connect: connection refused
	[ERROR] plugin/kubernetes: pkg/mod/k8s.io/client-go@v0.29.3/tools/cache/reflector.go:229: Failed to watch *v1.EndpointSlice: failed to list *v1.EndpointSlice: Get "https://10.96.0.1:443/apis/discovery.k8s.io/v1/endpointslices?limit=500&resourceVersion=0": dial tcp 10.96.0.1:443: connect: connection refused
	[INFO] plugin/kubernetes: pkg/mod/k8s.io/client-go@v0.29.3/tools/cache/reflector.go:229: failed to list *v1.Service: Get "https://10.96.0.1:443/api/v1/services?limit=500&resourceVersion=0": dial tcp 10.96.0.1:443: connect: connection refused
	[ERROR] plugin/kubernetes: pkg/mod/k8s.io/client-go@v0.29.3/tools/cache/reflector.go:229: Failed to watch *v1.Service: failed to list *v1.Service: Get "https://10.96.0.1:443/api/v1/services?limit=500&resourceVersion=0": dial tcp 10.96.0.1:443: connect: connection refused
	[INFO] plugin/kubernetes: pkg/mod/k8s.io/client-go@v0.29.3/tools/cache/reflector.go:229: failed to list *v1.Namespace: Get "https://10.96.0.1:443/api/v1/namespaces?limit=500&resourceVersion=0": dial tcp 10.96.0.1:443: connect: connection refused
	[ERROR] plugin/kubernetes: pkg/mod/k8s.io/client-go@v0.29.3/tools/cache/reflector.go:229: Failed to watch *v1.Namespace: failed to list *v1.Namespace: Get "https://10.96.0.1:443/api/v1/namespaces?limit=500&resourceVersion=0": dial tcp 10.96.0.1:443: connect: connection refused
	[INFO] plugin/ready: Still waiting on: "kubernetes"
	[INFO] plugin/kubernetes: waiting for Kubernetes API before starting server
	[INFO] plugin/kubernetes: waiting for Kubernetes API before starting server
	[INFO] plugin/kubernetes: waiting for Kubernetes API before starting server
	[INFO] plugin/ready: Still waiting on: "kubernetes"
	[INFO] plugin/kubernetes: waiting for Kubernetes API before starting server
	[INFO] plugin/kubernetes: waiting for Kubernetes API before starting server
	[INFO] plugin/kubernetes: waiting for Kubernetes API before starting server
	[INFO] plugin/kubernetes: waiting for Kubernetes API before starting server
	[INFO] plugin/kubernetes: waiting for Kubernetes API before starting server
	.:53
	[INFO] plugin/reload: Running configuration SHA512 = 05e3eaddc414b2d71a69b2e2bc6f2681fc1f4d04bcdd3acc1a41457bb7db518208b95ddfc4c9fffedc59c25a8faf458be1af4915a4a3c0d6777cb7a346bc5d86
	CoreDNS-1.11.3
	linux/arm64, go1.21.11, a6338e9
	[INFO] 127.0.0.1:39206 - 14119 "HINFO IN 5939583222120401635.3946217130147098167. udp 57 false 512" NXDOMAIN qr,rd,ra 57 0.038029402s
	[INFO] SIGTERM: Shutting down servers then terminating
	[INFO] plugin/health: Going into lameduck mode for 5s
	
	
	==> coredns [4deb5cc6dce54b2b55f84fa620aac8876a4dfb8d163a4e60aa19ebd7ba71d7eb] <==
	.:53
	[INFO] plugin/reload: Running configuration SHA512 = 05e3eaddc414b2d71a69b2e2bc6f2681fc1f4d04bcdd3acc1a41457bb7db518208b95ddfc4c9fffedc59c25a8faf458be1af4915a4a3c0d6777cb7a346bc5d86
	CoreDNS-1.11.3
	linux/arm64, go1.21.11, a6338e9
	[INFO] 127.0.0.1:38507 - 51569 "HINFO IN 5479759435856645223.8976423270861566953. udp 57 false 512" NXDOMAIN qr,rd,ra 57 0.013474675s
	
	
	==> describe nodes <==
	Name:               functional-919910
	Roles:              control-plane
	Labels:             beta.kubernetes.io/arch=arm64
	                    beta.kubernetes.io/os=linux
	                    kubernetes.io/arch=arm64
	                    kubernetes.io/hostname=functional-919910
	                    kubernetes.io/os=linux
	                    minikube.k8s.io/commit=90d544f06ea0f69499271b003be64a9a224d57ed
	                    minikube.k8s.io/name=functional-919910
	                    minikube.k8s.io/primary=true
	                    minikube.k8s.io/updated_at=2024_09_16T10_47_02_0700
	                    minikube.k8s.io/version=v1.34.0
	                    node-role.kubernetes.io/control-plane=
	                    node.kubernetes.io/exclude-from-external-load-balancers=
	Annotations:        kubeadm.alpha.kubernetes.io/cri-socket: unix:///var/run/crio/crio.sock
	                    node.alpha.kubernetes.io/ttl: 0
	                    volumes.kubernetes.io/controller-managed-attach-detach: true
	CreationTimestamp:  Mon, 16 Sep 2024 10:46:58 +0000
	Taints:             <none>
	Unschedulable:      false
	Lease:
	  HolderIdentity:  functional-919910
	  AcquireTime:     <unset>
	  RenewTime:       Mon, 16 Sep 2024 10:49:01 +0000
	Conditions:
	  Type             Status  LastHeartbeatTime                 LastTransitionTime                Reason                       Message
	  ----             ------  -----------------                 ------------------                ------                       -------
	  MemoryPressure   False   Mon, 16 Sep 2024 10:48:51 +0000   Mon, 16 Sep 2024 10:46:55 +0000   KubeletHasSufficientMemory   kubelet has sufficient memory available
	  DiskPressure     False   Mon, 16 Sep 2024 10:48:51 +0000   Mon, 16 Sep 2024 10:46:55 +0000   KubeletHasNoDiskPressure     kubelet has no disk pressure
	  PIDPressure      False   Mon, 16 Sep 2024 10:48:51 +0000   Mon, 16 Sep 2024 10:46:55 +0000   KubeletHasSufficientPID      kubelet has sufficient PID available
	  Ready            True    Mon, 16 Sep 2024 10:48:51 +0000   Mon, 16 Sep 2024 10:47:47 +0000   KubeletReady                 kubelet is posting ready status
	Addresses:
	  InternalIP:  192.168.49.2
	  Hostname:    functional-919910
	Capacity:
	  cpu:                2
	  ephemeral-storage:  203034800Ki
	  hugepages-1Gi:      0
	  hugepages-2Mi:      0
	  hugepages-32Mi:     0
	  hugepages-64Ki:     0
	  memory:             8022304Ki
	  pods:               110
	Allocatable:
	  cpu:                2
	  ephemeral-storage:  203034800Ki
	  hugepages-1Gi:      0
	  hugepages-2Mi:      0
	  hugepages-32Mi:     0
	  hugepages-64Ki:     0
	  memory:             8022304Ki
	  pods:               110
	System Info:
	  Machine ID:                 f14572b8323a44cca0faa88c76f2d4a6
	  System UUID:                d25b0873-ca83-44d4-9ed0-22dc44c6a8ae
	  Boot ID:                    34b2555f-ef29-4c31-9b47-b3b930bd3b4b
	  Kernel Version:             5.15.0-1069-aws
	  OS Image:                   Ubuntu 22.04.4 LTS
	  Operating System:           linux
	  Architecture:               arm64
	  Container Runtime Version:  cri-o://1.24.6
	  Kubelet Version:            v1.31.1
	  Kube-Proxy Version:         v1.31.1
	PodCIDR:                      10.244.0.0/24
	PodCIDRs:                     10.244.0.0/24
	Non-terminated Pods:          (8 in total)
	  Namespace                   Name                                         CPU Requests  CPU Limits  Memory Requests  Memory Limits  Age
	  ---------                   ----                                         ------------  ----------  ---------------  -------------  ---
	  kube-system                 coredns-7c65d6cfc9-qzn8c                     100m (5%)     0 (0%)      70Mi (0%)        170Mi (2%)     2m4s
	  kube-system                 etcd-functional-919910                       100m (5%)     0 (0%)      100Mi (1%)       0 (0%)         2m10s
	  kube-system                 kindnet-nb5xl                                100m (5%)     100m (5%)   50Mi (0%)        50Mi (0%)      2m4s
	  kube-system                 kube-apiserver-functional-919910             250m (12%)    0 (0%)      0 (0%)           0 (0%)         18s
	  kube-system                 kube-controller-manager-functional-919910    200m (10%)    0 (0%)      0 (0%)           0 (0%)         2m9s
	  kube-system                 kube-proxy-nvpzv                             0 (0%)        0 (0%)      0 (0%)           0 (0%)         2m4s
	  kube-system                 kube-scheduler-functional-919910             100m (5%)     0 (0%)      0 (0%)           0 (0%)         2m9s
	  kube-system                 storage-provisioner                          0 (0%)        0 (0%)      0 (0%)           0 (0%)         2m3s
	Allocated resources:
	  (Total limits may be over 100 percent, i.e., overcommitted.)
	  Resource           Requests    Limits
	  --------           --------    ------
	  cpu                850m (42%)  100m (5%)
	  memory             220Mi (2%)  220Mi (2%)
	  ephemeral-storage  0 (0%)      0 (0%)
	  hugepages-1Gi      0 (0%)      0 (0%)
	  hugepages-2Mi      0 (0%)      0 (0%)
	  hugepages-32Mi     0 (0%)      0 (0%)
	  hugepages-64Ki     0 (0%)      0 (0%)
	Events:
	  Type     Reason                   Age                From             Message
	  ----     ------                   ----               ----             -------
	  Normal   Starting                 2m3s               kube-proxy       
	  Normal   Starting                 18s                kube-proxy       
	  Normal   Starting                 64s                kube-proxy       
	  Warning  CgroupV1                 2m9s               kubelet          Cgroup v1 support is in maintenance mode, please migrate to Cgroup v2.
	  Normal   NodeHasSufficientMemory  2m9s               kubelet          Node functional-919910 status is now: NodeHasSufficientMemory
	  Normal   NodeHasNoDiskPressure    2m9s               kubelet          Node functional-919910 status is now: NodeHasNoDiskPressure
	  Normal   NodeHasSufficientPID     2m9s               kubelet          Node functional-919910 status is now: NodeHasSufficientPID
	  Normal   Starting                 2m9s               kubelet          Starting kubelet.
	  Normal   RegisteredNode           2m5s               node-controller  Node functional-919910 event: Registered Node functional-919910 in Controller
	  Normal   NodeReady                83s                kubelet          Node functional-919910 status is now: NodeReady
	  Normal   RegisteredNode           62s                node-controller  Node functional-919910 event: Registered Node functional-919910 in Controller
	  Normal   NodeHasSufficientMemory  23s (x8 over 23s)  kubelet          Node functional-919910 status is now: NodeHasSufficientMemory
	  Warning  CgroupV1                 23s                kubelet          Cgroup v1 support is in maintenance mode, please migrate to Cgroup v2.
	  Normal   Starting                 23s                kubelet          Starting kubelet.
	  Normal   NodeHasNoDiskPressure    23s (x8 over 23s)  kubelet          Node functional-919910 status is now: NodeHasNoDiskPressure
	  Normal   NodeHasSufficientPID     23s (x7 over 23s)  kubelet          Node functional-919910 status is now: NodeHasSufficientPID
	  Normal   RegisteredNode           16s                node-controller  Node functional-919910 event: Registered Node functional-919910 in Controller
	
	
	==> dmesg <==
	[Sep16 10:07] systemd-journald[226]: Failed to send stream file descriptor to service manager: Connection refused
	
	
	==> etcd [0fb814efa9ee90e98aaa699004b013bf5a6a31aa8325e33f52783fa123bcc384] <==
	{"level":"info","ts":"2024-09-16T10:48:47.877008Z","logger":"raft","caller":"etcdserver/zap_raft.go:77","msg":"aec36adc501070cc switched to configuration voters=(12593026477526642892)"}
	{"level":"info","ts":"2024-09-16T10:48:47.877076Z","caller":"membership/cluster.go:421","msg":"added member","cluster-id":"fa54960ea34d58be","local-member-id":"aec36adc501070cc","added-peer-id":"aec36adc501070cc","added-peer-peer-urls":["https://192.168.49.2:2380"]}
	{"level":"info","ts":"2024-09-16T10:48:47.877156Z","caller":"membership/cluster.go:584","msg":"set initial cluster version","cluster-id":"fa54960ea34d58be","local-member-id":"aec36adc501070cc","cluster-version":"3.5"}
	{"level":"info","ts":"2024-09-16T10:48:47.877188Z","caller":"api/capability.go:75","msg":"enabled capabilities for version","cluster-version":"3.5"}
	{"level":"info","ts":"2024-09-16T10:48:47.899347Z","caller":"embed/etcd.go:728","msg":"starting with client TLS","tls-info":"cert = /var/lib/minikube/certs/etcd/server.crt, key = /var/lib/minikube/certs/etcd/server.key, client-cert=, client-key=, trusted-ca = /var/lib/minikube/certs/etcd/ca.crt, client-cert-auth = true, crl-file = ","cipher-suites":[]}
	{"level":"info","ts":"2024-09-16T10:48:47.899570Z","caller":"embed/etcd.go:279","msg":"now serving peer/client/metrics","local-member-id":"aec36adc501070cc","initial-advertise-peer-urls":["https://192.168.49.2:2380"],"listen-peer-urls":["https://192.168.49.2:2380"],"advertise-client-urls":["https://192.168.49.2:2379"],"listen-client-urls":["https://127.0.0.1:2379","https://192.168.49.2:2379"],"listen-metrics-urls":["http://127.0.0.1:2381"]}
	{"level":"info","ts":"2024-09-16T10:48:47.899599Z","caller":"embed/etcd.go:870","msg":"serving metrics","address":"http://127.0.0.1:2381"}
	{"level":"info","ts":"2024-09-16T10:48:47.899714Z","caller":"embed/etcd.go:599","msg":"serving peer traffic","address":"192.168.49.2:2380"}
	{"level":"info","ts":"2024-09-16T10:48:47.899728Z","caller":"embed/etcd.go:571","msg":"cmux::serve","address":"192.168.49.2:2380"}
	{"level":"info","ts":"2024-09-16T10:48:49.036711Z","logger":"raft","caller":"etcdserver/zap_raft.go:77","msg":"aec36adc501070cc is starting a new election at term 3"}
	{"level":"info","ts":"2024-09-16T10:48:49.036850Z","logger":"raft","caller":"etcdserver/zap_raft.go:77","msg":"aec36adc501070cc became pre-candidate at term 3"}
	{"level":"info","ts":"2024-09-16T10:48:49.036905Z","logger":"raft","caller":"etcdserver/zap_raft.go:77","msg":"aec36adc501070cc received MsgPreVoteResp from aec36adc501070cc at term 3"}
	{"level":"info","ts":"2024-09-16T10:48:49.036946Z","logger":"raft","caller":"etcdserver/zap_raft.go:77","msg":"aec36adc501070cc became candidate at term 4"}
	{"level":"info","ts":"2024-09-16T10:48:49.036978Z","logger":"raft","caller":"etcdserver/zap_raft.go:77","msg":"aec36adc501070cc received MsgVoteResp from aec36adc501070cc at term 4"}
	{"level":"info","ts":"2024-09-16T10:48:49.037016Z","logger":"raft","caller":"etcdserver/zap_raft.go:77","msg":"aec36adc501070cc became leader at term 4"}
	{"level":"info","ts":"2024-09-16T10:48:49.037063Z","logger":"raft","caller":"etcdserver/zap_raft.go:77","msg":"raft.node: aec36adc501070cc elected leader aec36adc501070cc at term 4"}
	{"level":"info","ts":"2024-09-16T10:48:49.040899Z","caller":"etcdserver/server.go:2118","msg":"published local member to cluster through raft","local-member-id":"aec36adc501070cc","local-member-attributes":"{Name:functional-919910 ClientURLs:[https://192.168.49.2:2379]}","request-path":"/0/members/aec36adc501070cc/attributes","cluster-id":"fa54960ea34d58be","publish-timeout":"7s"}
	{"level":"info","ts":"2024-09-16T10:48:49.041104Z","caller":"embed/serve.go:103","msg":"ready to serve client requests"}
	{"level":"info","ts":"2024-09-16T10:48:49.042103Z","caller":"v3rpc/health.go:61","msg":"grpc service status changed","service":"","status":"SERVING"}
	{"level":"info","ts":"2024-09-16T10:48:49.043175Z","caller":"embed/serve.go:250","msg":"serving client traffic securely","traffic":"grpc+http","address":"192.168.49.2:2379"}
	{"level":"info","ts":"2024-09-16T10:48:49.043511Z","caller":"embed/serve.go:103","msg":"ready to serve client requests"}
	{"level":"info","ts":"2024-09-16T10:48:49.045434Z","caller":"v3rpc/health.go:61","msg":"grpc service status changed","service":"","status":"SERVING"}
	{"level":"info","ts":"2024-09-16T10:48:49.046389Z","caller":"embed/serve.go:250","msg":"serving client traffic securely","traffic":"grpc+http","address":"127.0.0.1:2379"}
	{"level":"info","ts":"2024-09-16T10:48:49.045527Z","caller":"etcdmain/main.go:44","msg":"notifying init daemon"}
	{"level":"info","ts":"2024-09-16T10:48:49.050654Z","caller":"etcdmain/main.go:50","msg":"successfully notified init daemon"}
	
	
	==> etcd [5bcfe047e4005e24d6719487f45bde2380924679e0f77e81ce9e05992af73afb] <==
	{"level":"info","ts":"2024-09-16T10:48:02.360883Z","logger":"raft","caller":"etcdserver/zap_raft.go:77","msg":"aec36adc501070cc became pre-candidate at term 2"}
	{"level":"info","ts":"2024-09-16T10:48:02.360934Z","logger":"raft","caller":"etcdserver/zap_raft.go:77","msg":"aec36adc501070cc received MsgPreVoteResp from aec36adc501070cc at term 2"}
	{"level":"info","ts":"2024-09-16T10:48:02.360973Z","logger":"raft","caller":"etcdserver/zap_raft.go:77","msg":"aec36adc501070cc became candidate at term 3"}
	{"level":"info","ts":"2024-09-16T10:48:02.361006Z","logger":"raft","caller":"etcdserver/zap_raft.go:77","msg":"aec36adc501070cc received MsgVoteResp from aec36adc501070cc at term 3"}
	{"level":"info","ts":"2024-09-16T10:48:02.361064Z","logger":"raft","caller":"etcdserver/zap_raft.go:77","msg":"aec36adc501070cc became leader at term 3"}
	{"level":"info","ts":"2024-09-16T10:48:02.361099Z","logger":"raft","caller":"etcdserver/zap_raft.go:77","msg":"raft.node: aec36adc501070cc elected leader aec36adc501070cc at term 3"}
	{"level":"info","ts":"2024-09-16T10:48:02.364920Z","caller":"etcdserver/server.go:2118","msg":"published local member to cluster through raft","local-member-id":"aec36adc501070cc","local-member-attributes":"{Name:functional-919910 ClientURLs:[https://192.168.49.2:2379]}","request-path":"/0/members/aec36adc501070cc/attributes","cluster-id":"fa54960ea34d58be","publish-timeout":"7s"}
	{"level":"info","ts":"2024-09-16T10:48:02.365163Z","caller":"embed/serve.go:103","msg":"ready to serve client requests"}
	{"level":"info","ts":"2024-09-16T10:48:02.365549Z","caller":"embed/serve.go:103","msg":"ready to serve client requests"}
	{"level":"info","ts":"2024-09-16T10:48:02.366285Z","caller":"v3rpc/health.go:61","msg":"grpc service status changed","service":"","status":"SERVING"}
	{"level":"info","ts":"2024-09-16T10:48:02.367468Z","caller":"embed/serve.go:250","msg":"serving client traffic securely","traffic":"grpc+http","address":"192.168.49.2:2379"}
	{"level":"info","ts":"2024-09-16T10:48:02.367535Z","caller":"etcdmain/main.go:44","msg":"notifying init daemon"}
	{"level":"info","ts":"2024-09-16T10:48:02.367668Z","caller":"etcdmain/main.go:50","msg":"successfully notified init daemon"}
	{"level":"info","ts":"2024-09-16T10:48:02.369323Z","caller":"v3rpc/health.go:61","msg":"grpc service status changed","service":"","status":"SERVING"}
	{"level":"info","ts":"2024-09-16T10:48:02.370172Z","caller":"embed/serve.go:250","msg":"serving client traffic securely","traffic":"grpc+http","address":"127.0.0.1:2379"}
	{"level":"info","ts":"2024-09-16T10:48:35.943802Z","caller":"osutil/interrupt_unix.go:64","msg":"received signal; shutting down","signal":"terminated"}
	{"level":"info","ts":"2024-09-16T10:48:35.943843Z","caller":"embed/etcd.go:377","msg":"closing etcd server","name":"functional-919910","data-dir":"/var/lib/minikube/etcd","advertise-peer-urls":["https://192.168.49.2:2380"],"advertise-client-urls":["https://192.168.49.2:2379"]}
	{"level":"warn","ts":"2024-09-16T10:48:35.943911Z","caller":"embed/serve.go:212","msg":"stopping secure grpc server due to error","error":"accept tcp 192.168.49.2:2379: use of closed network connection"}
	{"level":"warn","ts":"2024-09-16T10:48:35.943938Z","caller":"embed/serve.go:214","msg":"stopped secure grpc server due to error","error":"accept tcp 192.168.49.2:2379: use of closed network connection"}
	{"level":"warn","ts":"2024-09-16T10:48:35.945041Z","caller":"embed/serve.go:212","msg":"stopping secure grpc server due to error","error":"accept tcp 127.0.0.1:2379: use of closed network connection"}
	{"level":"warn","ts":"2024-09-16T10:48:35.945137Z","caller":"embed/serve.go:214","msg":"stopped secure grpc server due to error","error":"accept tcp 127.0.0.1:2379: use of closed network connection"}
	{"level":"info","ts":"2024-09-16T10:48:35.990678Z","caller":"etcdserver/server.go:1521","msg":"skipped leadership transfer for single voting member cluster","local-member-id":"aec36adc501070cc","current-leader-member-id":"aec36adc501070cc"}
	{"level":"info","ts":"2024-09-16T10:48:35.995430Z","caller":"embed/etcd.go:581","msg":"stopping serving peer traffic","address":"192.168.49.2:2380"}
	{"level":"info","ts":"2024-09-16T10:48:35.995621Z","caller":"embed/etcd.go:586","msg":"stopped serving peer traffic","address":"192.168.49.2:2380"}
	{"level":"info","ts":"2024-09-16T10:48:35.995642Z","caller":"embed/etcd.go:379","msg":"closed etcd server","name":"functional-919910","data-dir":"/var/lib/minikube/etcd","advertise-peer-urls":["https://192.168.49.2:2380"],"advertise-client-urls":["https://192.168.49.2:2379"]}
	
	
	==> kernel <==
	 10:49:11 up 10:31,  0 users,  load average: 1.96, 1.46, 1.70
	Linux functional-919910 5.15.0-1069-aws #75~20.04.1-Ubuntu SMP Mon Aug 19 16:22:47 UTC 2024 aarch64 aarch64 aarch64 GNU/Linux
	PRETTY_NAME="Ubuntu 22.04.4 LTS"
	
	
	==> kindnet [072cecfbf1d3967a28f6cf80f4e3b0bf030253965b58aa0f0089cd01271c49a1] <==
	I0916 10:48:52.626787       1 main.go:109] connected to apiserver: https://10.96.0.1:443
	I0916 10:48:52.627383       1 main.go:139] hostIP = 192.168.49.2
	podIP = 192.168.49.2
	I0916 10:48:52.629043       1 main.go:148] setting mtu 1500 for CNI 
	I0916 10:48:52.630864       1 main.go:178] kindnetd IP family: "ipv4"
	I0916 10:48:52.630994       1 main.go:182] noMask IPv4 subnets: [10.244.0.0/16]
	I0916 10:48:53.038246       1 controller.go:334] Starting controller kube-network-policies
	I0916 10:48:53.116933       1 controller.go:338] Waiting for informer caches to sync
	I0916 10:48:53.116957       1 shared_informer.go:313] Waiting for caches to sync for kube-network-policies
	I0916 10:48:53.219342       1 shared_informer.go:320] Caches are synced for kube-network-policies
	I0916 10:48:53.222402       1 metrics.go:61] Registering metrics
	I0916 10:48:53.222563       1 controller.go:374] Syncing nftables rules
	I0916 10:49:03.037971       1 main.go:295] Handling node with IPs: map[192.168.49.2:{}]
	I0916 10:49:03.038169       1 main.go:299] handling current node
	
	
	==> kindnet [e8aeda4b55bc63f93934a2cc0bed0950a05df3db193d9ed2e77a2dc96b78ec18] <==
	I0916 10:48:01.143502       1 main.go:109] connected to apiserver: https://10.96.0.1:443
	I0916 10:48:01.143730       1 main.go:139] hostIP = 192.168.49.2
	podIP = 192.168.49.2
	I0916 10:48:01.143864       1 main.go:148] setting mtu 1500 for CNI 
	I0916 10:48:01.143886       1 main.go:178] kindnetd IP family: "ipv4"
	I0916 10:48:01.143900       1 main.go:182] noMask IPv4 subnets: [10.244.0.0/16]
	I0916 10:48:01.489821       1 controller.go:334] Starting controller kube-network-policies
	I0916 10:48:01.489995       1 controller.go:338] Waiting for informer caches to sync
	I0916 10:48:01.490034       1 shared_informer.go:313] Waiting for caches to sync for kube-network-policies
	I0916 10:48:05.492976       1 shared_informer.go:320] Caches are synced for kube-network-policies
	I0916 10:48:05.493097       1 metrics.go:61] Registering metrics
	I0916 10:48:05.493204       1 controller.go:374] Syncing nftables rules
	I0916 10:48:11.486739       1 main.go:295] Handling node with IPs: map[192.168.49.2:{}]
	I0916 10:48:11.486849       1 main.go:299] handling current node
	I0916 10:48:21.485705       1 main.go:295] Handling node with IPs: map[192.168.49.2:{}]
	I0916 10:48:21.485797       1 main.go:299] handling current node
	I0916 10:48:31.492826       1 main.go:295] Handling node with IPs: map[192.168.49.2:{}]
	I0916 10:48:31.492896       1 main.go:299] handling current node
	
	
	==> kube-apiserver [12f0a29c7ca2a4856dd6155d0190d0e3d79e019e8dce0bf7fd4c991c81d14bc5] <==
	I0916 10:48:51.507247       1 shared_informer.go:320] Caches are synced for node_authorizer
	I0916 10:48:51.509566       1 cache.go:39] Caches are synced for LocalAvailability controller
	I0916 10:48:51.514923       1 shared_informer.go:320] Caches are synced for *generic.policySource[*k8s.io/api/admissionregistration/v1.ValidatingAdmissionPolicy,*k8s.io/api/admissionregistration/v1.ValidatingAdmissionPolicyBinding,k8s.io/apiserver/pkg/admission/plugin/policy/validating.Validator]
	I0916 10:48:51.514963       1 policy_source.go:224] refreshing policies
	I0916 10:48:51.517563       1 apf_controller.go:382] Running API Priority and Fairness config worker
	I0916 10:48:51.517673       1 apf_controller.go:385] Running API Priority and Fairness periodic rebalancing process
	I0916 10:48:51.517813       1 cache.go:39] Caches are synced for APIServiceRegistrationController controller
	I0916 10:48:51.520522       1 controller.go:615] quota admission added evaluator for: leases.coordination.k8s.io
	I0916 10:48:51.532063       1 shared_informer.go:320] Caches are synced for cluster_authentication_trust_controller
	I0916 10:48:51.532210       1 shared_informer.go:320] Caches are synced for crd-autoregister
	I0916 10:48:51.532286       1 cache.go:39] Caches are synced for RemoteAvailability controller
	I0916 10:48:51.532417       1 shared_informer.go:320] Caches are synced for configmaps
	I0916 10:48:51.534946       1 handler_discovery.go:450] Starting ResourceDiscoveryManager
	I0916 10:48:51.535625       1 aggregator.go:171] initial CRD sync complete...
	I0916 10:48:51.536172       1 autoregister_controller.go:144] Starting autoregister controller
	I0916 10:48:51.536265       1 cache.go:32] Waiting for caches to sync for autoregister controller
	I0916 10:48:51.536300       1 cache.go:39] Caches are synced for autoregister controller
	E0916 10:48:51.552940       1 controller.go:97] Error removing old endpoints from kubernetes service: no API server IP addresses were listed in storage, refusing to erase all endpoints for the kubernetes Service
	I0916 10:48:52.288516       1 storage_scheduling.go:111] all system priority classes are created successfully or already exist.
	I0916 10:48:53.633170       1 controller.go:615] quota admission added evaluator for: daemonsets.apps
	I0916 10:48:53.763564       1 controller.go:615] quota admission added evaluator for: serviceaccounts
	I0916 10:48:53.775869       1 controller.go:615] quota admission added evaluator for: deployments.apps
	I0916 10:48:53.843592       1 controller.go:615] quota admission added evaluator for: roles.rbac.authorization.k8s.io
	I0916 10:48:53.851287       1 controller.go:615] quota admission added evaluator for: rolebindings.rbac.authorization.k8s.io
	I0916 10:49:10.096044       1 controller.go:615] quota admission added evaluator for: endpoints
	
	
	==> kube-controller-manager [8f5620673b4ff5c0c99db71dd02fc2ce9baec6c9b22460cbdf86d411abc6a715] <==
	I0916 10:48:08.534308       1 shared_informer.go:320] Caches are synced for node
	I0916 10:48:08.534378       1 range_allocator.go:171] "Sending events to api server" logger="node-ipam-controller"
	I0916 10:48:08.534401       1 range_allocator.go:177] "Starting range CIDR allocator" logger="node-ipam-controller"
	I0916 10:48:08.534407       1 shared_informer.go:313] Waiting for caches to sync for cidrallocator
	I0916 10:48:08.534412       1 shared_informer.go:320] Caches are synced for cidrallocator
	I0916 10:48:08.534494       1 range_allocator.go:241] "Successfully synced" logger="node-ipam-controller" key="functional-919910"
	I0916 10:48:08.535941       1 shared_informer.go:320] Caches are synced for attach detach
	I0916 10:48:08.543605       1 shared_informer.go:320] Caches are synced for endpoint_slice
	I0916 10:48:08.549019       1 shared_informer.go:320] Caches are synced for daemon sets
	I0916 10:48:08.554402       1 shared_informer.go:320] Caches are synced for taint-eviction-controller
	I0916 10:48:08.559718       1 shared_informer.go:320] Caches are synced for persistent volume
	I0916 10:48:08.572151       1 shared_informer.go:320] Caches are synced for GC
	I0916 10:48:08.573409       1 shared_informer.go:320] Caches are synced for PV protection
	I0916 10:48:08.615906       1 shared_informer.go:320] Caches are synced for cronjob
	I0916 10:48:08.623850       1 shared_informer.go:320] Caches are synced for TTL
	I0916 10:48:08.625786       1 shared_informer.go:320] Caches are synced for taint
	I0916 10:48:08.625881       1 node_lifecycle_controller.go:1232] "Initializing eviction metric for zone" logger="node-lifecycle-controller" zone=""
	I0916 10:48:08.625973       1 node_lifecycle_controller.go:884] "Missing timestamp for Node. Assuming now as a timestamp" logger="node-lifecycle-controller" node="functional-919910"
	I0916 10:48:08.626024       1 node_lifecycle_controller.go:1078] "Controller detected that zone is now in new state" logger="node-lifecycle-controller" zone="" newState="Normal"
	I0916 10:48:08.681295       1 shared_informer.go:320] Caches are synced for resource quota
	I0916 10:48:08.695472       1 shared_informer.go:320] Caches are synced for resource quota
	I0916 10:48:09.103907       1 shared_informer.go:320] Caches are synced for garbage collector
	I0916 10:48:09.103941       1 garbagecollector.go:157] "All resource monitors have synced. Proceeding to collect garbage" logger="garbage-collector-controller"
	I0916 10:48:09.123641       1 shared_informer.go:320] Caches are synced for garbage collector
	I0916 10:48:33.396647       1 range_allocator.go:241] "Successfully synced" logger="node-ipam-controller" key="functional-919910"
	
	
	==> kube-controller-manager [d69895ce029aea3aacc9c117ed64c274077ed21cefa739082ee00be46e903809] <==
	I0916 10:48:54.785999       1 shared_informer.go:320] Caches are synced for expand
	I0916 10:48:54.794997       1 shared_informer.go:320] Caches are synced for taint-eviction-controller
	I0916 10:48:54.801438       1 shared_informer.go:320] Caches are synced for service account
	I0916 10:48:54.804758       1 shared_informer.go:320] Caches are synced for job
	I0916 10:48:54.804818       1 shared_informer.go:320] Caches are synced for stateful set
	I0916 10:48:54.804841       1 shared_informer.go:320] Caches are synced for PVC protection
	I0916 10:48:54.804854       1 shared_informer.go:320] Caches are synced for certificate-csrapproving
	I0916 10:48:54.804908       1 shared_informer.go:320] Caches are synced for PV protection
	I0916 10:48:54.804944       1 shared_informer.go:320] Caches are synced for TTL
	I0916 10:48:54.804977       1 shared_informer.go:320] Caches are synced for persistent volume
	I0916 10:48:54.805173       1 shared_informer.go:320] Caches are synced for ClusterRoleAggregator
	I0916 10:48:54.807707       1 shared_informer.go:320] Caches are synced for disruption
	I0916 10:48:54.808757       1 shared_informer.go:320] Caches are synced for validatingadmissionpolicy-status
	I0916 10:48:54.812488       1 shared_informer.go:320] Caches are synced for ReplicationController
	I0916 10:48:54.838364       1 shared_informer.go:320] Caches are synced for ReplicaSet
	I0916 10:48:54.838534       1 replica_set.go:679] "Finished syncing" logger="replicaset-controller" kind="ReplicaSet" key="kube-system/coredns-7c65d6cfc9" duration="97.917µs"
	I0916 10:48:54.855717       1 shared_informer.go:320] Caches are synced for bootstrap_signer
	I0916 10:48:54.857096       1 shared_informer.go:320] Caches are synced for cronjob
	I0916 10:48:54.955113       1 shared_informer.go:320] Caches are synced for endpoint_slice
	I0916 10:48:55.012167       1 shared_informer.go:320] Caches are synced for resource quota
	I0916 10:48:55.012318       1 shared_informer.go:320] Caches are synced for endpoint_slice_mirroring
	I0916 10:48:55.042247       1 shared_informer.go:320] Caches are synced for resource quota
	I0916 10:48:55.444362       1 shared_informer.go:320] Caches are synced for garbage collector
	I0916 10:48:55.465215       1 shared_informer.go:320] Caches are synced for garbage collector
	I0916 10:48:55.465250       1 garbagecollector.go:157] "All resource monitors have synced. Proceeding to collect garbage" logger="garbage-collector-controller"
	
	
	==> kube-proxy [68f543d941434df90f12c922b0b45dcb557a7b8316bd36d083123f6f29e0f3d7] <==
	I0916 10:48:03.731423       1 server_linux.go:66] "Using iptables proxy"
	I0916 10:48:05.433154       1 server.go:677] "Successfully retrieved node IP(s)" IPs=["192.168.49.2"]
	E0916 10:48:05.488916       1 server.go:234] "Kube-proxy configuration may be incomplete or incorrect" err="nodePortAddresses is unset; NodePort connections will be accepted on all local IPs. Consider using `--nodeport-addresses primary`"
	I0916 10:48:06.322385       1 server.go:243] "kube-proxy running in dual-stack mode" primary ipFamily="IPv4"
	I0916 10:48:06.341489       1 server_linux.go:169] "Using iptables Proxier"
	I0916 10:48:06.355073       1 proxier.go:255] "Setting route_localnet=1 to allow node-ports on localhost; to change this either disable iptables.localhostNodePorts (--iptables-localhost-nodeports) or set nodePortAddresses (--nodeport-addresses) to filter loopback addresses" ipFamily="IPv4"
	I0916 10:48:06.355531       1 server.go:483] "Version info" version="v1.31.1"
	I0916 10:48:06.357367       1 server.go:485] "Golang settings" GOGC="" GOMAXPROCS="" GOTRACEBACK=""
	I0916 10:48:06.358630       1 config.go:199] "Starting service config controller"
	I0916 10:48:06.358729       1 shared_informer.go:313] Waiting for caches to sync for service config
	I0916 10:48:06.358801       1 config.go:105] "Starting endpoint slice config controller"
	I0916 10:48:06.358840       1 shared_informer.go:313] Waiting for caches to sync for endpoint slice config
	I0916 10:48:06.360984       1 config.go:328] "Starting node config controller"
	I0916 10:48:06.361059       1 shared_informer.go:313] Waiting for caches to sync for node config
	I0916 10:48:06.462180       1 shared_informer.go:320] Caches are synced for endpoint slice config
	I0916 10:48:06.462239       1 shared_informer.go:320] Caches are synced for service config
	I0916 10:48:06.464940       1 shared_informer.go:320] Caches are synced for node config
	
	
	==> kube-proxy [6f8c0a2f9d3e9fe72768e28685deb8e30624ac7b3cfaa272ac69f57b771050db] <==
	I0916 10:48:52.781215       1 server_linux.go:66] "Using iptables proxy"
	I0916 10:48:52.872969       1 server.go:677] "Successfully retrieved node IP(s)" IPs=["192.168.49.2"]
	E0916 10:48:52.873137       1 server.go:234] "Kube-proxy configuration may be incomplete or incorrect" err="nodePortAddresses is unset; NodePort connections will be accepted on all local IPs. Consider using `--nodeport-addresses primary`"
	I0916 10:48:52.892040       1 server.go:243] "kube-proxy running in dual-stack mode" primary ipFamily="IPv4"
	I0916 10:48:52.892101       1 server_linux.go:169] "Using iptables Proxier"
	I0916 10:48:52.893967       1 proxier.go:255] "Setting route_localnet=1 to allow node-ports on localhost; to change this either disable iptables.localhostNodePorts (--iptables-localhost-nodeports) or set nodePortAddresses (--nodeport-addresses) to filter loopback addresses" ipFamily="IPv4"
	I0916 10:48:52.894261       1 server.go:483] "Version info" version="v1.31.1"
	I0916 10:48:52.894296       1 server.go:485] "Golang settings" GOGC="" GOMAXPROCS="" GOTRACEBACK=""
	I0916 10:48:52.896485       1 config.go:199] "Starting service config controller"
	I0916 10:48:52.896530       1 shared_informer.go:313] Waiting for caches to sync for service config
	I0916 10:48:52.898078       1 config.go:105] "Starting endpoint slice config controller"
	I0916 10:48:52.898096       1 shared_informer.go:313] Waiting for caches to sync for endpoint slice config
	I0916 10:48:52.899995       1 config.go:328] "Starting node config controller"
	I0916 10:48:52.900022       1 shared_informer.go:313] Waiting for caches to sync for node config
	I0916 10:48:52.998445       1 shared_informer.go:320] Caches are synced for endpoint slice config
	I0916 10:48:52.998473       1 shared_informer.go:320] Caches are synced for service config
	I0916 10:48:53.000890       1 shared_informer.go:320] Caches are synced for node config
	
	
	==> kube-scheduler [7fcb94c0bce841ce6b01965b0d7eaeedcf47449b34b9a524c16d4f0580db9e76] <==
	I0916 10:48:50.310915       1 serving.go:386] Generated self-signed cert in-memory
	W0916 10:48:51.321329       1 requestheader_controller.go:196] Unable to get configmap/extension-apiserver-authentication in kube-system.  Usually fixed by 'kubectl create rolebinding -n kube-system ROLEBINDING_NAME --role=extension-apiserver-authentication-reader --serviceaccount=YOUR_NS:YOUR_SA'
	W0916 10:48:51.321447       1 authentication.go:370] Error looking up in-cluster authentication configuration: configmaps "extension-apiserver-authentication" is forbidden: User "system:kube-scheduler" cannot get resource "configmaps" in API group "" in the namespace "kube-system"
	W0916 10:48:51.321484       1 authentication.go:371] Continuing without authentication configuration. This may treat all requests as anonymous.
	W0916 10:48:51.321527       1 authentication.go:372] To require authentication configuration lookup to succeed, set --authentication-tolerate-lookup-failure=false
	I0916 10:48:51.482161       1 server.go:167] "Starting Kubernetes Scheduler" version="v1.31.1"
	I0916 10:48:51.488747       1 server.go:169] "Golang settings" GOGC="" GOMAXPROCS="" GOTRACEBACK=""
	I0916 10:48:51.491214       1 secure_serving.go:213] Serving securely on 127.0.0.1:10259
	I0916 10:48:51.491627       1 configmap_cafile_content.go:205] "Starting controller" name="client-ca::kube-system::extension-apiserver-authentication::client-ca-file"
	I0916 10:48:51.497428       1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::client-ca-file
	I0916 10:48:51.491653       1 tlsconfig.go:243] "Starting DynamicServingCertificateController"
	I0916 10:48:51.597727       1 shared_informer.go:320] Caches are synced for client-ca::kube-system::extension-apiserver-authentication::client-ca-file
	
	
	==> kube-scheduler [9a35fb982442f2ef08963a8588b112f704124f0fecc14cbfc199e94d6085db98] <==
	I0916 10:48:04.872300       1 serving.go:386] Generated self-signed cert in-memory
	I0916 10:48:06.573495       1 server.go:167] "Starting Kubernetes Scheduler" version="v1.31.1"
	I0916 10:48:06.573525       1 server.go:169] "Golang settings" GOGC="" GOMAXPROCS="" GOTRACEBACK=""
	I0916 10:48:06.588423       1 secure_serving.go:213] Serving securely on 127.0.0.1:10259
	I0916 10:48:06.588642       1 configmap_cafile_content.go:205] "Starting controller" name="client-ca::kube-system::extension-apiserver-authentication::client-ca-file"
	I0916 10:48:06.588658       1 configmap_cafile_content.go:205] "Starting controller" name="client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file"
	I0916 10:48:06.588698       1 tlsconfig.go:243] "Starting DynamicServingCertificateController"
	I0916 10:48:06.588607       1 requestheader_controller.go:172] Starting RequestHeaderAuthRequestController
	I0916 10:48:06.592031       1 shared_informer.go:313] Waiting for caches to sync for RequestHeaderAuthRequestController
	I0916 10:48:06.591278       1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::client-ca-file
	I0916 10:48:06.591687       1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file
	I0916 10:48:06.696997       1 shared_informer.go:320] Caches are synced for client-ca::kube-system::extension-apiserver-authentication::client-ca-file
	I0916 10:48:06.697079       1 shared_informer.go:320] Caches are synced for RequestHeaderAuthRequestController
	I0916 10:48:06.697269       1 shared_informer.go:320] Caches are synced for client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file
	I0916 10:48:35.945051       1 tlsconfig.go:258] "Shutting down DynamicServingCertificateController"
	I0916 10:48:35.945270       1 secure_serving.go:258] Stopped listening on 127.0.0.1:10259
	E0916 10:48:35.945400       1 run.go:72] "command failed" err="finished without leader elect"
	
	
	==> kubelet <==
	Sep 16 10:48:51 functional-919910 kubelet[4906]: I0916 10:48:51.549865    4906 kubelet_node_status.go:75] "Successfully registered node" node="functional-919910"
	Sep 16 10:48:51 functional-919910 kubelet[4906]: I0916 10:48:51.549896    4906 kuberuntime_manager.go:1635] "Updating runtime config through cri with podcidr" CIDR="10.244.0.0/24"
	Sep 16 10:48:51 functional-919910 kubelet[4906]: I0916 10:48:51.550603    4906 kubelet_network.go:61] "Updating Pod CIDR" originalPodCIDR="" newPodCIDR="10.244.0.0/24"
	Sep 16 10:48:52 functional-919910 kubelet[4906]: I0916 10:48:52.021040    4906 apiserver.go:52] "Watching apiserver"
	Sep 16 10:48:52 functional-919910 kubelet[4906]: I0916 10:48:52.025181    4906 kubelet.go:1895] "Trying to delete pod" pod="kube-system/kube-apiserver-functional-919910" podUID="82da7bbe-1484-402c-b1a5-7165f1938703"
	Sep 16 10:48:52 functional-919910 kubelet[4906]: I0916 10:48:52.047875    4906 kubelet.go:1900] "Deleted mirror pod because it is outdated" pod="kube-system/kube-apiserver-functional-919910"
	Sep 16 10:48:52 functional-919910 kubelet[4906]: I0916 10:48:52.053769    4906 desired_state_of_world_populator.go:154] "Finished populating initial desired state of world"
	Sep 16 10:48:52 functional-919910 kubelet[4906]: I0916 10:48:52.149165    4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cni-cfg\" (UniqueName: \"kubernetes.io/host-path/1282e172-7d16-4f24-9f7d-33da705832a9-cni-cfg\") pod \"kindnet-nb5xl\" (UID: \"1282e172-7d16-4f24-9f7d-33da705832a9\") " pod="kube-system/kindnet-nb5xl"
	Sep 16 10:48:52 functional-919910 kubelet[4906]: I0916 10:48:52.149247    4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"xtables-lock\" (UniqueName: \"kubernetes.io/host-path/2e1bfc3e-dea3-4511-a154-e367e28b0898-xtables-lock\") pod \"kube-proxy-nvpzv\" (UID: \"2e1bfc3e-dea3-4511-a154-e367e28b0898\") " pod="kube-system/kube-proxy-nvpzv"
	Sep 16 10:48:52 functional-919910 kubelet[4906]: I0916 10:48:52.149269    4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"tmp\" (UniqueName: \"kubernetes.io/host-path/2eb6523f-f61a-4c33-8e91-0bbbb874554b-tmp\") pod \"storage-provisioner\" (UID: \"2eb6523f-f61a-4c33-8e91-0bbbb874554b\") " pod="kube-system/storage-provisioner"
	Sep 16 10:48:52 functional-919910 kubelet[4906]: I0916 10:48:52.149303    4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"lib-modules\" (UniqueName: \"kubernetes.io/host-path/2e1bfc3e-dea3-4511-a154-e367e28b0898-lib-modules\") pod \"kube-proxy-nvpzv\" (UID: \"2e1bfc3e-dea3-4511-a154-e367e28b0898\") " pod="kube-system/kube-proxy-nvpzv"
	Sep 16 10:48:52 functional-919910 kubelet[4906]: I0916 10:48:52.149347    4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"xtables-lock\" (UniqueName: \"kubernetes.io/host-path/1282e172-7d16-4f24-9f7d-33da705832a9-xtables-lock\") pod \"kindnet-nb5xl\" (UID: \"1282e172-7d16-4f24-9f7d-33da705832a9\") " pod="kube-system/kindnet-nb5xl"
	Sep 16 10:48:52 functional-919910 kubelet[4906]: I0916 10:48:52.149366    4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"lib-modules\" (UniqueName: \"kubernetes.io/host-path/1282e172-7d16-4f24-9f7d-33da705832a9-lib-modules\") pod \"kindnet-nb5xl\" (UID: \"1282e172-7d16-4f24-9f7d-33da705832a9\") " pod="kube-system/kindnet-nb5xl"
	Sep 16 10:48:52 functional-919910 kubelet[4906]: I0916 10:48:52.171182    4906 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="kube-system/kube-apiserver-functional-919910" podStartSLOduration=0.171161405 podStartE2EDuration="171.161405ms" podCreationTimestamp="2024-09-16 10:48:52 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2024-09-16 10:48:52.146208869 +0000 UTC m=+5.239965270" watchObservedRunningTime="2024-09-16 10:48:52.171161405 +0000 UTC m=+5.264917814"
	Sep 16 10:48:52 functional-919910 kubelet[4906]: I0916 10:48:52.325207    4906 scope.go:117] "RemoveContainer" containerID="67f50b0e25dae16dbad275ffac3a734fe571c8f8cb91d485eaac44783eb641be"
	Sep 16 10:48:52 functional-919910 kubelet[4906]: I0916 10:48:52.326777    4906 scope.go:117] "RemoveContainer" containerID="e8aeda4b55bc63f93934a2cc0bed0950a05df3db193d9ed2e77a2dc96b78ec18"
	Sep 16 10:48:52 functional-919910 kubelet[4906]: I0916 10:48:52.327073    4906 scope.go:117] "RemoveContainer" containerID="68f543d941434df90f12c922b0b45dcb557a7b8316bd36d083123f6f29e0f3d7"
	Sep 16 10:48:52 functional-919910 kubelet[4906]: I0916 10:48:52.327209    4906 scope.go:117] "RemoveContainer" containerID="2089d6c47dd6764fb74a622eaf36e8dda3344083a925f73a4dfcf0ebb952dbf7"
	Sep 16 10:48:53 functional-919910 kubelet[4906]: I0916 10:48:53.160186    4906 scope.go:117] "RemoveContainer" containerID="84ca31fb2ed034d56721c7ab90b9c5e414e315335f55f7d30435fc91501dad28"
	Sep 16 10:48:54 functional-919910 kubelet[4906]: I0916 10:48:54.309904    4906 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness"
	Sep 16 10:48:55 functional-919910 kubelet[4906]: I0916 10:48:55.088403    4906 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3d8a6ba31c18f33c5660170029e5cde1" path="/var/lib/kubelet/pods/3d8a6ba31c18f33c5660170029e5cde1/volumes"
	Sep 16 10:48:57 functional-919910 kubelet[4906]: E0916 10:48:57.149495    4906 eviction_manager.go:257] "Eviction manager: failed to get HasDedicatedImageFs" err="missing image stats: &ImageFsInfoResponse{ImageFilesystems:[]*FilesystemUsage{&FilesystemUsage{Timestamp:1726483737149192699,FsId:&FilesystemIdentifier{Mountpoint:/var/lib/containers/storage/overlay-images,},UsedBytes:&UInt64Value{Value:155470,},InodesUsed:&UInt64Value{Value:77,},},},ContainerFilesystems:[]*FilesystemUsage{},}"
	Sep 16 10:48:57 functional-919910 kubelet[4906]: E0916 10:48:57.149525    4906 eviction_manager.go:212] "Eviction manager: failed to synchronize" err="eviction manager: failed to get HasDedicatedImageFs: missing image stats: &ImageFsInfoResponse{ImageFilesystems:[]*FilesystemUsage{&FilesystemUsage{Timestamp:1726483737149192699,FsId:&FilesystemIdentifier{Mountpoint:/var/lib/containers/storage/overlay-images,},UsedBytes:&UInt64Value{Value:155470,},InodesUsed:&UInt64Value{Value:77,},},},ContainerFilesystems:[]*FilesystemUsage{},}"
	Sep 16 10:49:07 functional-919910 kubelet[4906]: E0916 10:49:07.150843    4906 eviction_manager.go:257] "Eviction manager: failed to get HasDedicatedImageFs" err="missing image stats: &ImageFsInfoResponse{ImageFilesystems:[]*FilesystemUsage{&FilesystemUsage{Timestamp:1726483747150627361,FsId:&FilesystemIdentifier{Mountpoint:/var/lib/containers/storage/overlay-images,},UsedBytes:&UInt64Value{Value:155470,},InodesUsed:&UInt64Value{Value:77,},},},ContainerFilesystems:[]*FilesystemUsage{},}"
	Sep 16 10:49:07 functional-919910 kubelet[4906]: E0916 10:49:07.150885    4906 eviction_manager.go:212] "Eviction manager: failed to synchronize" err="eviction manager: failed to get HasDedicatedImageFs: missing image stats: &ImageFsInfoResponse{ImageFilesystems:[]*FilesystemUsage{&FilesystemUsage{Timestamp:1726483747150627361,FsId:&FilesystemIdentifier{Mountpoint:/var/lib/containers/storage/overlay-images,},UsedBytes:&UInt64Value{Value:155470,},InodesUsed:&UInt64Value{Value:77,},},},ContainerFilesystems:[]*FilesystemUsage{},}"
	
	
	==> storage-provisioner [0318f459801da15bd2e19f5a98b73c1156fff994dcdda61e57a57ddf9e92ccee] <==
	I0916 10:48:52.562259       1 storage_provisioner.go:116] Initializing the minikube storage provisioner...
	I0916 10:48:52.664564       1 storage_provisioner.go:141] Storage provisioner initialized, now starting service!
	I0916 10:48:52.664725       1 leaderelection.go:243] attempting to acquire leader lease kube-system/k8s.io-minikube-hostpath...
	I0916 10:49:10.099884       1 leaderelection.go:253] successfully acquired lease kube-system/k8s.io-minikube-hostpath
	I0916 10:49:10.100345       1 controller.go:835] Starting provisioner controller k8s.io/minikube-hostpath_functional-919910_e4b0a145-d435-49cb-bddf-cb4e7bf200d9!
	I0916 10:49:10.101540       1 event.go:282] Event(v1.ObjectReference{Kind:"Endpoints", Namespace:"kube-system", Name:"k8s.io-minikube-hostpath", UID:"a458447e-2e14-46d1-bc5f-e9228298bb58", APIVersion:"v1", ResourceVersion:"609", FieldPath:""}): type: 'Normal' reason: 'LeaderElection' functional-919910_e4b0a145-d435-49cb-bddf-cb4e7bf200d9 became leader
	I0916 10:49:10.201234       1 controller.go:884] Started provisioner controller k8s.io/minikube-hostpath_functional-919910_e4b0a145-d435-49cb-bddf-cb4e7bf200d9!
	
	
	==> storage-provisioner [67f50b0e25dae16dbad275ffac3a734fe571c8f8cb91d485eaac44783eb641be] <==
	I0916 10:48:01.486119       1 storage_provisioner.go:116] Initializing the minikube storage provisioner...
	I0916 10:48:05.527187       1 storage_provisioner.go:141] Storage provisioner initialized, now starting service!
	I0916 10:48:05.529539       1 leaderelection.go:243] attempting to acquire leader lease kube-system/k8s.io-minikube-hostpath...
	I0916 10:48:22.958175       1 leaderelection.go:253] successfully acquired lease kube-system/k8s.io-minikube-hostpath
	I0916 10:48:22.959300       1 controller.go:835] Starting provisioner controller k8s.io/minikube-hostpath_functional-919910_be96807a-e73b-444f-98b3-646320e9e90e!
	I0916 10:48:22.959068       1 event.go:282] Event(v1.ObjectReference{Kind:"Endpoints", Namespace:"kube-system", Name:"k8s.io-minikube-hostpath", UID:"a458447e-2e14-46d1-bc5f-e9228298bb58", APIVersion:"v1", ResourceVersion:"511", FieldPath:""}): type: 'Normal' reason: 'LeaderElection' functional-919910_be96807a-e73b-444f-98b3-646320e9e90e became leader
	I0916 10:48:23.061550       1 controller.go:884] Started provisioner controller k8s.io/minikube-hostpath_functional-919910_be96807a-e73b-444f-98b3-646320e9e90e!
	

                                                
                                                
-- /stdout --
** stderr ** 
	E0916 10:49:09.995269 1406798 logs.go:258] failed to output last start logs: failed to read file /home/jenkins/minikube-integration/19651-1378450/.minikube/logs/lastStart.txt: bufio.Scanner: token too long

                                                
                                                
** /stderr **
helpers_test.go:254: (dbg) Run:  out/minikube-linux-arm64 status --format={{.APIServer}} -p functional-919910 -n functional-919910
helpers_test.go:261: (dbg) Run:  kubectl --context functional-919910 get po -o=jsonpath={.items[*].metadata.name} -A --field-selector=status.phase!=Running
helpers_test.go:261: (dbg) Non-zero exit: kubectl --context functional-919910 get po -o=jsonpath={.items[*].metadata.name} -A --field-selector=status.phase!=Running: fork/exec /usr/local/bin/kubectl: exec format error (605.056µs)
helpers_test.go:263: kubectl --context functional-919910 get po -o=jsonpath={.items[*].metadata.name} -A --field-selector=status.phase!=Running: fork/exec /usr/local/bin/kubectl: exec format error
--- FAIL: TestFunctional/serial/ComponentHealth (2.49s)

                                                
                                    
x
+
TestFunctional/serial/InvalidService (0s)

                                                
                                                
=== RUN   TestFunctional/serial/InvalidService
functional_test.go:2321: (dbg) Run:  kubectl --context functional-919910 apply -f testdata/invalidsvc.yaml
functional_test.go:2321: (dbg) Non-zero exit: kubectl --context functional-919910 apply -f testdata/invalidsvc.yaml: fork/exec /usr/local/bin/kubectl: exec format error (561.438µs)
functional_test.go:2323: kubectl --context functional-919910 apply -f testdata/invalidsvc.yaml failed: fork/exec /usr/local/bin/kubectl: exec format error
--- FAIL: TestFunctional/serial/InvalidService (0.00s)

                                                
                                    
x
+
TestFunctional/parallel/DashboardCmd (6.95s)

                                                
                                                
=== RUN   TestFunctional/parallel/DashboardCmd
=== PAUSE TestFunctional/parallel/DashboardCmd

                                                
                                                

                                                
                                                
=== CONT  TestFunctional/parallel/DashboardCmd
functional_test.go:905: (dbg) daemon: [out/minikube-linux-arm64 dashboard --url --port 36195 -p functional-919910 --alsologtostderr -v=1]
functional_test.go:918: output didn't produce a URL
functional_test.go:910: (dbg) stopping [out/minikube-linux-arm64 dashboard --url --port 36195 -p functional-919910 --alsologtostderr -v=1] ...
functional_test.go:910: (dbg) [out/minikube-linux-arm64 dashboard --url --port 36195 -p functional-919910 --alsologtostderr -v=1] stdout:
functional_test.go:910: (dbg) [out/minikube-linux-arm64 dashboard --url --port 36195 -p functional-919910 --alsologtostderr -v=1] stderr:
I0916 10:51:02.330526 1411204 out.go:345] Setting OutFile to fd 1 ...
I0916 10:51:02.331864 1411204 out.go:392] TERM=,COLORTERM=, which probably does not support color
I0916 10:51:02.331886 1411204 out.go:358] Setting ErrFile to fd 2...
I0916 10:51:02.331893 1411204 out.go:392] TERM=,COLORTERM=, which probably does not support color
I0916 10:51:02.332181 1411204 root.go:338] Updating PATH: /home/jenkins/minikube-integration/19651-1378450/.minikube/bin
I0916 10:51:02.332489 1411204 mustload.go:65] Loading cluster: functional-919910
I0916 10:51:02.333028 1411204 config.go:182] Loaded profile config "functional-919910": Driver=docker, ContainerRuntime=crio, KubernetesVersion=v1.31.1
I0916 10:51:02.333571 1411204 cli_runner.go:164] Run: docker container inspect functional-919910 --format={{.State.Status}}
I0916 10:51:02.350821 1411204 host.go:66] Checking if "functional-919910" exists ...
I0916 10:51:02.351252 1411204 cli_runner.go:164] Run: docker system info --format "{{json .}}"
I0916 10:51:02.453322 1411204 info.go:266] docker info: {ID:5FDH:SA5P:5GCT:NLAS:B73P:SGDQ:PBG5:UBVH:UZY3:RXGO:CI7S:WAIH Containers:1 ContainersRunning:1 ContainersPaused:0 ContainersStopped:0 Images:2 Driver:overlay2 DriverStatus:[[Backing Filesystem extfs] [Supports d_type true] [Using metacopy false] [Native Overlay Diff true] [userxattr false]] SystemStatus:<nil> Plugins:{Volume:[local] Network:[bridge host ipvlan macvlan null overlay] Authorization:<nil> Log:[awslogs fluentd gcplogs gelf journald json-file local splunk syslog]} MemoryLimit:true SwapLimit:true KernelMemory:false KernelMemoryTCP:true CPUCfsPeriod:true CPUCfsQuota:true CPUShares:true CPUSet:true PidsLimit:true IPv4Forwarding:true BridgeNfIptables:true BridgeNfIP6Tables:true Debug:false NFd:32 OomKillDisable:true NGoroutines:51 SystemTime:2024-09-16 10:51:02.441216936 +0000 UTC LoggingDriver:json-file CgroupDriver:cgroupfs NEventsListener:0 KernelVersion:5.15.0-1069-aws OperatingSystem:Ubuntu 20.04.6 LTS OSType:linux Architecture:aarc
h64 IndexServerAddress:https://index.docker.io/v1/ RegistryConfig:{AllowNondistributableArtifactsCIDRs:[] AllowNondistributableArtifactsHostnames:[] InsecureRegistryCIDRs:[127.0.0.0/8] IndexConfigs:{DockerIo:{Name:docker.io Mirrors:[] Secure:true Official:true}} Mirrors:[]} NCPU:2 MemTotal:8214839296 GenericResources:<nil> DockerRootDir:/var/lib/docker HTTPProxy: HTTPSProxy: NoProxy: Name:ip-172-31-21-244 Labels:[] ExperimentalBuild:false ServerVersion:27.2.1 ClusterStore: ClusterAdvertise: Runtimes:{Runc:{Path:runc}} DefaultRuntime:runc Swarm:{NodeID: NodeAddr: LocalNodeState:inactive ControlAvailable:false Error: RemoteManagers:<nil>} LiveRestoreEnabled:false Isolation: InitBinary:docker-init ContainerdCommit:{ID:7f7fdf5fed64eb6a7caf99b3e12efcf9d60e311c Expected:7f7fdf5fed64eb6a7caf99b3e12efcf9d60e311c} RuncCommit:{ID:v1.1.14-0-g2c9f560 Expected:v1.1.14-0-g2c9f560} InitCommit:{ID:de40ad0 Expected:de40ad0} SecurityOptions:[name=apparmor name=seccomp,profile=builtin] ProductLicense: Warnings:<nil> ServerError
s:[] ClientInfo:{Debug:false Plugins:[map[Name:buildx Path:/usr/libexec/docker/cli-plugins/docker-buildx SchemaVersion:0.1.0 ShortDescription:Docker Buildx Vendor:Docker Inc. Version:v0.16.2] map[Name:compose Path:/usr/libexec/docker/cli-plugins/docker-compose SchemaVersion:0.1.0 ShortDescription:Docker Compose Vendor:Docker Inc. Version:v2.29.2]] Warnings:<nil>}}
I0916 10:51:02.453454 1411204 api_server.go:166] Checking apiserver status ...
I0916 10:51:02.453517 1411204 ssh_runner.go:195] Run: sudo pgrep -xnf kube-apiserver.*minikube.*
I0916 10:51:02.453570 1411204 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" functional-919910
I0916 10:51:02.475551 1411204 sshutil.go:53] new ssh client: &{IP:127.0.0.1 Port:34613 SSHKeyPath:/home/jenkins/minikube-integration/19651-1378450/.minikube/machines/functional-919910/id_rsa Username:docker}
I0916 10:51:02.577943 1411204 ssh_runner.go:195] Run: sudo egrep ^[0-9]+:freezer: /proc/5061/cgroup
I0916 10:51:02.592646 1411204 api_server.go:182] apiserver freezer: "13:freezer:/docker/40a7320e94dbd1ca8f99c16961d5283390467882986d80f040baa102ab2046bd/crio/crio-12f0a29c7ca2a4856dd6155d0190d0e3d79e019e8dce0bf7fd4c991c81d14bc5"
I0916 10:51:02.592744 1411204 ssh_runner.go:195] Run: sudo cat /sys/fs/cgroup/freezer/docker/40a7320e94dbd1ca8f99c16961d5283390467882986d80f040baa102ab2046bd/crio/crio-12f0a29c7ca2a4856dd6155d0190d0e3d79e019e8dce0bf7fd4c991c81d14bc5/freezer.state
I0916 10:51:02.603824 1411204 api_server.go:204] freezer state: "THAWED"
I0916 10:51:02.603854 1411204 api_server.go:253] Checking apiserver healthz at https://192.168.49.2:8441/healthz ...
I0916 10:51:02.613986 1411204 api_server.go:279] https://192.168.49.2:8441/healthz returned 200:
ok
W0916 10:51:02.614029 1411204 out.go:270] * Enabling dashboard ...
* Enabling dashboard ...
I0916 10:51:02.614280 1411204 config.go:182] Loaded profile config "functional-919910": Driver=docker, ContainerRuntime=crio, KubernetesVersion=v1.31.1
I0916 10:51:02.614301 1411204 addons.go:69] Setting dashboard=true in profile "functional-919910"
I0916 10:51:02.614329 1411204 addons.go:234] Setting addon dashboard=true in "functional-919910"
I0916 10:51:02.614378 1411204 host.go:66] Checking if "functional-919910" exists ...
I0916 10:51:02.614894 1411204 cli_runner.go:164] Run: docker container inspect functional-919910 --format={{.State.Status}}
I0916 10:51:02.639991 1411204 out.go:177]   - Using image docker.io/kubernetesui/dashboard:v2.7.0
I0916 10:51:02.642935 1411204 out.go:177]   - Using image docker.io/kubernetesui/metrics-scraper:v1.0.8
I0916 10:51:02.645598 1411204 addons.go:431] installing /etc/kubernetes/addons/dashboard-ns.yaml
I0916 10:51:02.645627 1411204 ssh_runner.go:362] scp dashboard/dashboard-ns.yaml --> /etc/kubernetes/addons/dashboard-ns.yaml (759 bytes)
I0916 10:51:02.645714 1411204 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" functional-919910
I0916 10:51:02.664834 1411204 sshutil.go:53] new ssh client: &{IP:127.0.0.1 Port:34613 SSHKeyPath:/home/jenkins/minikube-integration/19651-1378450/.minikube/machines/functional-919910/id_rsa Username:docker}
I0916 10:51:02.810960 1411204 addons.go:431] installing /etc/kubernetes/addons/dashboard-clusterrole.yaml
I0916 10:51:02.810984 1411204 ssh_runner.go:362] scp dashboard/dashboard-clusterrole.yaml --> /etc/kubernetes/addons/dashboard-clusterrole.yaml (1001 bytes)
I0916 10:51:02.830438 1411204 addons.go:431] installing /etc/kubernetes/addons/dashboard-clusterrolebinding.yaml
I0916 10:51:02.830462 1411204 ssh_runner.go:362] scp dashboard/dashboard-clusterrolebinding.yaml --> /etc/kubernetes/addons/dashboard-clusterrolebinding.yaml (1018 bytes)
I0916 10:51:02.851261 1411204 addons.go:431] installing /etc/kubernetes/addons/dashboard-configmap.yaml
I0916 10:51:02.851284 1411204 ssh_runner.go:362] scp dashboard/dashboard-configmap.yaml --> /etc/kubernetes/addons/dashboard-configmap.yaml (837 bytes)
I0916 10:51:02.877769 1411204 addons.go:431] installing /etc/kubernetes/addons/dashboard-dp.yaml
I0916 10:51:02.877823 1411204 ssh_runner.go:362] scp memory --> /etc/kubernetes/addons/dashboard-dp.yaml (4288 bytes)
I0916 10:51:02.897909 1411204 addons.go:431] installing /etc/kubernetes/addons/dashboard-role.yaml
I0916 10:51:02.897933 1411204 ssh_runner.go:362] scp dashboard/dashboard-role.yaml --> /etc/kubernetes/addons/dashboard-role.yaml (1724 bytes)
I0916 10:51:02.920882 1411204 addons.go:431] installing /etc/kubernetes/addons/dashboard-rolebinding.yaml
I0916 10:51:02.920904 1411204 ssh_runner.go:362] scp dashboard/dashboard-rolebinding.yaml --> /etc/kubernetes/addons/dashboard-rolebinding.yaml (1046 bytes)
I0916 10:51:02.942637 1411204 addons.go:431] installing /etc/kubernetes/addons/dashboard-sa.yaml
I0916 10:51:02.942661 1411204 ssh_runner.go:362] scp dashboard/dashboard-sa.yaml --> /etc/kubernetes/addons/dashboard-sa.yaml (837 bytes)
I0916 10:51:02.971632 1411204 addons.go:431] installing /etc/kubernetes/addons/dashboard-secret.yaml
I0916 10:51:02.971656 1411204 ssh_runner.go:362] scp dashboard/dashboard-secret.yaml --> /etc/kubernetes/addons/dashboard-secret.yaml (1389 bytes)
I0916 10:51:02.992484 1411204 addons.go:431] installing /etc/kubernetes/addons/dashboard-svc.yaml
I0916 10:51:02.992506 1411204 ssh_runner.go:362] scp dashboard/dashboard-svc.yaml --> /etc/kubernetes/addons/dashboard-svc.yaml (1294 bytes)
I0916 10:51:03.013348 1411204 ssh_runner.go:195] Run: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.31.1/kubectl apply -f /etc/kubernetes/addons/dashboard-ns.yaml -f /etc/kubernetes/addons/dashboard-clusterrole.yaml -f /etc/kubernetes/addons/dashboard-clusterrolebinding.yaml -f /etc/kubernetes/addons/dashboard-configmap.yaml -f /etc/kubernetes/addons/dashboard-dp.yaml -f /etc/kubernetes/addons/dashboard-role.yaml -f /etc/kubernetes/addons/dashboard-rolebinding.yaml -f /etc/kubernetes/addons/dashboard-sa.yaml -f /etc/kubernetes/addons/dashboard-secret.yaml -f /etc/kubernetes/addons/dashboard-svc.yaml
I0916 10:51:04.600983 1411204 ssh_runner.go:235] Completed: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.31.1/kubectl apply -f /etc/kubernetes/addons/dashboard-ns.yaml -f /etc/kubernetes/addons/dashboard-clusterrole.yaml -f /etc/kubernetes/addons/dashboard-clusterrolebinding.yaml -f /etc/kubernetes/addons/dashboard-configmap.yaml -f /etc/kubernetes/addons/dashboard-dp.yaml -f /etc/kubernetes/addons/dashboard-role.yaml -f /etc/kubernetes/addons/dashboard-rolebinding.yaml -f /etc/kubernetes/addons/dashboard-sa.yaml -f /etc/kubernetes/addons/dashboard-secret.yaml -f /etc/kubernetes/addons/dashboard-svc.yaml: (1.587588526s)
I0916 10:51:04.606179 1411204 out.go:177] * Some dashboard features require the metrics-server addon. To enable all features please run:

                                                
                                                
	minikube -p functional-919910 addons enable metrics-server

                                                
                                                
I0916 10:51:04.611192 1411204 addons.go:197] Writing out "functional-919910" config to set dashboard=true...
W0916 10:51:04.611467 1411204 out.go:270] * Verifying dashboard health ...
* Verifying dashboard health ...
I0916 10:51:04.612397 1411204 kapi.go:59] client config for functional-919910: &rest.Config{Host:"https://192.168.49.2:8441", APIPath:"", ContentConfig:rest.ContentConfig{AcceptContentTypes:"", ContentType:"", GroupVersion:(*schema.GroupVersion)(nil), NegotiatedSerializer:runtime.NegotiatedSerializer(nil)}, Username:"", Password:"", BearerToken:"", BearerTokenFile:"", Impersonate:rest.ImpersonationConfig{UserName:"", UID:"", Groups:[]string(nil), Extra:map[string][]string(nil)}, AuthProvider:<nil>, AuthConfigPersister:rest.AuthProviderConfigPersister(nil), ExecProvider:<nil>, TLSClientConfig:rest.sanitizedTLSClientConfig{Insecure:false, ServerName:"", CertFile:"/home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/functional-919910/client.crt", KeyFile:"/home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/functional-919910/client.key", CAFile:"/home/jenkins/minikube-integration/19651-1378450/.minikube/ca.crt", CertData:[]uint8(nil), KeyData:[]uint8(nil), CAData:[]uint8(nil
), NextProtos:[]string(nil)}, UserAgent:"", DisableCompression:false, Transport:http.RoundTripper(nil), WrapTransport:(transport.WrapperFunc)(0x1a1e6c0), QPS:0, Burst:0, RateLimiter:flowcontrol.RateLimiter(nil), WarningHandler:rest.WarningHandler(nil), Timeout:0, Dial:(func(context.Context, string, string) (net.Conn, error))(nil), Proxy:(func(*http.Request) (*url.URL, error))(nil)}
I0916 10:51:04.641755 1411204 service.go:214] Found service: &Service{ObjectMeta:{kubernetes-dashboard  kubernetes-dashboard  a4c045fa-2e96-4072-b7d3-314c05e218c6 752 0 2024-09-16 10:51:04 +0000 UTC <nil> <nil> map[addonmanager.kubernetes.io/mode:Reconcile k8s-app:kubernetes-dashboard kubernetes.io/minikube-addons:dashboard] map[kubectl.kubernetes.io/last-applied-configuration:{"apiVersion":"v1","kind":"Service","metadata":{"annotations":{},"labels":{"addonmanager.kubernetes.io/mode":"Reconcile","k8s-app":"kubernetes-dashboard","kubernetes.io/minikube-addons":"dashboard"},"name":"kubernetes-dashboard","namespace":"kubernetes-dashboard"},"spec":{"ports":[{"port":80,"targetPort":9090}],"selector":{"k8s-app":"kubernetes-dashboard"}}}
] [] [] [{kubectl-client-side-apply Update v1 2024-09-16 10:51:04 +0000 UTC FieldsV1 {"f:metadata":{"f:annotations":{".":{},"f:kubectl.kubernetes.io/last-applied-configuration":{}},"f:labels":{".":{},"f:addonmanager.kubernetes.io/mode":{},"f:k8s-app":{},"f:kubernetes.io/minikube-addons":{}}},"f:spec":{"f:internalTrafficPolicy":{},"f:ports":{".":{},"k:{\"port\":80,\"protocol\":\"TCP\"}":{".":{},"f:port":{},"f:protocol":{},"f:targetPort":{}}},"f:selector":{},"f:sessionAffinity":{},"f:type":{}}} }]},Spec:ServiceSpec{Ports:[]ServicePort{ServicePort{Name:,Protocol:TCP,Port:80,TargetPort:{0 9090 },NodePort:0,AppProtocol:nil,},},Selector:map[string]string{k8s-app: kubernetes-dashboard,},ClusterIP:10.99.2.180,Type:ClusterIP,ExternalIPs:[],SessionAffinity:None,LoadBalancerIP:,LoadBalancerSourceRanges:[],ExternalName:,ExternalTrafficPolicy:,HealthCheckNodePort:0,PublishNotReadyAddresses:false,SessionAffinityConfig:nil,IPFamilyPolicy:*SingleStack,ClusterIPs:[10.99.2.180],IPFamilies:[IPv4],AllocateLoadBalancerNod
ePorts:nil,LoadBalancerClass:nil,InternalTrafficPolicy:*Cluster,TrafficDistribution:nil,},Status:ServiceStatus{LoadBalancer:LoadBalancerStatus{Ingress:[]LoadBalancerIngress{},},Conditions:[]Condition{},},}
W0916 10:51:04.641964 1411204 out.go:270] * Launching proxy ...
* Launching proxy ...
I0916 10:51:04.642078 1411204 dashboard.go:152] Executing: /usr/local/bin/kubectl [/usr/local/bin/kubectl --context functional-919910 proxy --port 36195]
I0916 10:51:04.647779 1411204 out.go:201] 
W0916 10:51:04.651563 1411204 out.go:270] X Exiting due to HOST_KUBECTL_PROXY: kubectl proxy: proxy start: fork/exec /usr/local/bin/kubectl: exec format error
X Exiting due to HOST_KUBECTL_PROXY: kubectl proxy: proxy start: fork/exec /usr/local/bin/kubectl: exec format error
W0916 10:51:04.651588 1411204 out.go:270] * 
* 
W0916 10:51:04.778435 1411204 out.go:293] ╭─────────────────────────────────────────────────────────────────────────────────────────────╮
│                                                                                             │
│    * If the above advice does not help, please let us know:                                 │
│      https://github.com/kubernetes/minikube/issues/new/choose                               │
│                                                                                             │
│    * Please run `minikube logs --file=logs.txt` and attach logs.txt to the GitHub issue.    │
│    * Please also attach the following file to the GitHub issue:                             │
│    * - /tmp/minikube_mount_2ff24de83b536a9cdf94e6dc890f569eda357db1_0.log                   │
│                                                                                             │
╰─────────────────────────────────────────────────────────────────────────────────────────────╯
╭─────────────────────────────────────────────────────────────────────────────────────────────╮
│                                                                                             │
│    * If the above advice does not help, please let us know:                                 │
│      https://github.com/kubernetes/minikube/issues/new/choose                               │
│                                                                                             │
│    * Please run `minikube logs --file=logs.txt` and attach logs.txt to the GitHub issue.    │
│    * Please also attach the following file to the GitHub issue:                             │
│    * - /tmp/minikube_mount_2ff24de83b536a9cdf94e6dc890f569eda357db1_0.log                   │
│                                                                                             │
╰─────────────────────────────────────────────────────────────────────────────────────────────╯
I0916 10:51:04.781138 1411204 out.go:201] 
helpers_test.go:222: -----------------------post-mortem--------------------------------
helpers_test.go:230: ======>  post-mortem[TestFunctional/parallel/DashboardCmd]: docker inspect <======
helpers_test.go:231: (dbg) Run:  docker inspect functional-919910
helpers_test.go:235: (dbg) docker inspect functional-919910:

                                                
                                                
-- stdout --
	[
	    {
	        "Id": "40a7320e94dbd1ca8f99c16961d5283390467882986d80f040baa102ab2046bd",
	        "Created": "2024-09-16T10:46:39.195115177Z",
	        "Path": "/usr/local/bin/entrypoint",
	        "Args": [
	            "/sbin/init"
	        ],
	        "State": {
	            "Status": "running",
	            "Running": true,
	            "Paused": false,
	            "Restarting": false,
	            "OOMKilled": false,
	            "Dead": false,
	            "Pid": 1399656,
	            "ExitCode": 0,
	            "Error": "",
	            "StartedAt": "2024-09-16T10:46:39.363423533Z",
	            "FinishedAt": "0001-01-01T00:00:00Z"
	        },
	        "Image": "sha256:a1b71fa87733590eb4674b16f6945626ae533f3af37066893e3fd70eb9476268",
	        "ResolvConfPath": "/var/lib/docker/containers/40a7320e94dbd1ca8f99c16961d5283390467882986d80f040baa102ab2046bd/resolv.conf",
	        "HostnamePath": "/var/lib/docker/containers/40a7320e94dbd1ca8f99c16961d5283390467882986d80f040baa102ab2046bd/hostname",
	        "HostsPath": "/var/lib/docker/containers/40a7320e94dbd1ca8f99c16961d5283390467882986d80f040baa102ab2046bd/hosts",
	        "LogPath": "/var/lib/docker/containers/40a7320e94dbd1ca8f99c16961d5283390467882986d80f040baa102ab2046bd/40a7320e94dbd1ca8f99c16961d5283390467882986d80f040baa102ab2046bd-json.log",
	        "Name": "/functional-919910",
	        "RestartCount": 0,
	        "Driver": "overlay2",
	        "Platform": "linux",
	        "MountLabel": "",
	        "ProcessLabel": "",
	        "AppArmorProfile": "unconfined",
	        "ExecIDs": null,
	        "HostConfig": {
	            "Binds": [
	                "/lib/modules:/lib/modules:ro",
	                "functional-919910:/var"
	            ],
	            "ContainerIDFile": "",
	            "LogConfig": {
	                "Type": "json-file",
	                "Config": {}
	            },
	            "NetworkMode": "functional-919910",
	            "PortBindings": {
	                "22/tcp": [
	                    {
	                        "HostIp": "127.0.0.1",
	                        "HostPort": ""
	                    }
	                ],
	                "2376/tcp": [
	                    {
	                        "HostIp": "127.0.0.1",
	                        "HostPort": ""
	                    }
	                ],
	                "32443/tcp": [
	                    {
	                        "HostIp": "127.0.0.1",
	                        "HostPort": ""
	                    }
	                ],
	                "5000/tcp": [
	                    {
	                        "HostIp": "127.0.0.1",
	                        "HostPort": ""
	                    }
	                ],
	                "8441/tcp": [
	                    {
	                        "HostIp": "127.0.0.1",
	                        "HostPort": ""
	                    }
	                ]
	            },
	            "RestartPolicy": {
	                "Name": "no",
	                "MaximumRetryCount": 0
	            },
	            "AutoRemove": false,
	            "VolumeDriver": "",
	            "VolumesFrom": null,
	            "ConsoleSize": [
	                0,
	                0
	            ],
	            "CapAdd": null,
	            "CapDrop": null,
	            "CgroupnsMode": "host",
	            "Dns": [],
	            "DnsOptions": [],
	            "DnsSearch": [],
	            "ExtraHosts": null,
	            "GroupAdd": null,
	            "IpcMode": "private",
	            "Cgroup": "",
	            "Links": null,
	            "OomScoreAdj": 0,
	            "PidMode": "",
	            "Privileged": true,
	            "PublishAllPorts": false,
	            "ReadonlyRootfs": false,
	            "SecurityOpt": [
	                "seccomp=unconfined",
	                "apparmor=unconfined",
	                "label=disable"
	            ],
	            "Tmpfs": {
	                "/run": "",
	                "/tmp": ""
	            },
	            "UTSMode": "",
	            "UsernsMode": "",
	            "ShmSize": 67108864,
	            "Runtime": "runc",
	            "Isolation": "",
	            "CpuShares": 0,
	            "Memory": 4194304000,
	            "NanoCpus": 2000000000,
	            "CgroupParent": "",
	            "BlkioWeight": 0,
	            "BlkioWeightDevice": [],
	            "BlkioDeviceReadBps": [],
	            "BlkioDeviceWriteBps": [],
	            "BlkioDeviceReadIOps": [],
	            "BlkioDeviceWriteIOps": [],
	            "CpuPeriod": 0,
	            "CpuQuota": 0,
	            "CpuRealtimePeriod": 0,
	            "CpuRealtimeRuntime": 0,
	            "CpusetCpus": "",
	            "CpusetMems": "",
	            "Devices": [],
	            "DeviceCgroupRules": null,
	            "DeviceRequests": null,
	            "MemoryReservation": 0,
	            "MemorySwap": 8388608000,
	            "MemorySwappiness": null,
	            "OomKillDisable": false,
	            "PidsLimit": null,
	            "Ulimits": [],
	            "CpuCount": 0,
	            "CpuPercent": 0,
	            "IOMaximumIOps": 0,
	            "IOMaximumBandwidth": 0,
	            "MaskedPaths": null,
	            "ReadonlyPaths": null
	        },
	        "GraphDriver": {
	            "Data": {
	                "LowerDir": "/var/lib/docker/overlay2/14032252dd4d379a5dd6bfc812b8514e72a450050f00baaedcadb811ce19b2ca-init/diff:/var/lib/docker/overlay2/1502e35c27c097cfc834a7c6caeee5bb9f58b41375577f491b73f55bc131cbae/diff",
	                "MergedDir": "/var/lib/docker/overlay2/14032252dd4d379a5dd6bfc812b8514e72a450050f00baaedcadb811ce19b2ca/merged",
	                "UpperDir": "/var/lib/docker/overlay2/14032252dd4d379a5dd6bfc812b8514e72a450050f00baaedcadb811ce19b2ca/diff",
	                "WorkDir": "/var/lib/docker/overlay2/14032252dd4d379a5dd6bfc812b8514e72a450050f00baaedcadb811ce19b2ca/work"
	            },
	            "Name": "overlay2"
	        },
	        "Mounts": [
	            {
	                "Type": "bind",
	                "Source": "/lib/modules",
	                "Destination": "/lib/modules",
	                "Mode": "ro",
	                "RW": false,
	                "Propagation": "rprivate"
	            },
	            {
	                "Type": "volume",
	                "Name": "functional-919910",
	                "Source": "/var/lib/docker/volumes/functional-919910/_data",
	                "Destination": "/var",
	                "Driver": "local",
	                "Mode": "z",
	                "RW": true,
	                "Propagation": ""
	            }
	        ],
	        "Config": {
	            "Hostname": "functional-919910",
	            "Domainname": "",
	            "User": "",
	            "AttachStdin": false,
	            "AttachStdout": false,
	            "AttachStderr": false,
	            "ExposedPorts": {
	                "22/tcp": {},
	                "2376/tcp": {},
	                "32443/tcp": {},
	                "5000/tcp": {},
	                "8441/tcp": {}
	            },
	            "Tty": true,
	            "OpenStdin": false,
	            "StdinOnce": false,
	            "Env": [
	                "container=docker",
	                "PATH=/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin"
	            ],
	            "Cmd": null,
	            "Image": "gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0",
	            "Volumes": null,
	            "WorkingDir": "/",
	            "Entrypoint": [
	                "/usr/local/bin/entrypoint",
	                "/sbin/init"
	            ],
	            "OnBuild": null,
	            "Labels": {
	                "created_by.minikube.sigs.k8s.io": "true",
	                "mode.minikube.sigs.k8s.io": "functional-919910",
	                "name.minikube.sigs.k8s.io": "functional-919910",
	                "role.minikube.sigs.k8s.io": ""
	            },
	            "StopSignal": "SIGRTMIN+3"
	        },
	        "NetworkSettings": {
	            "Bridge": "",
	            "SandboxID": "09e546724865183e02638a32689645e28fd2b24039febe37938c93bd516fa319",
	            "SandboxKey": "/var/run/docker/netns/09e546724865",
	            "Ports": {
	                "22/tcp": [
	                    {
	                        "HostIp": "127.0.0.1",
	                        "HostPort": "34613"
	                    }
	                ],
	                "2376/tcp": [
	                    {
	                        "HostIp": "127.0.0.1",
	                        "HostPort": "34614"
	                    }
	                ],
	                "32443/tcp": [
	                    {
	                        "HostIp": "127.0.0.1",
	                        "HostPort": "34617"
	                    }
	                ],
	                "5000/tcp": [
	                    {
	                        "HostIp": "127.0.0.1",
	                        "HostPort": "34615"
	                    }
	                ],
	                "8441/tcp": [
	                    {
	                        "HostIp": "127.0.0.1",
	                        "HostPort": "34616"
	                    }
	                ]
	            },
	            "HairpinMode": false,
	            "LinkLocalIPv6Address": "",
	            "LinkLocalIPv6PrefixLen": 0,
	            "SecondaryIPAddresses": null,
	            "SecondaryIPv6Addresses": null,
	            "EndpointID": "",
	            "Gateway": "",
	            "GlobalIPv6Address": "",
	            "GlobalIPv6PrefixLen": 0,
	            "IPAddress": "",
	            "IPPrefixLen": 0,
	            "IPv6Gateway": "",
	            "MacAddress": "",
	            "Networks": {
	                "functional-919910": {
	                    "IPAMConfig": {
	                        "IPv4Address": "192.168.49.2"
	                    },
	                    "Links": null,
	                    "Aliases": null,
	                    "MacAddress": "02:42:c0:a8:31:02",
	                    "DriverOpts": null,
	                    "NetworkID": "6e0fb93702822d0f6745b0df63c8098af583107dce24967dde54449c81a6a7de",
	                    "EndpointID": "0e4e29393de23184514ee78cc12ea7445e6307e65c69c812751182560a7c0121",
	                    "Gateway": "192.168.49.1",
	                    "IPAddress": "192.168.49.2",
	                    "IPPrefixLen": 24,
	                    "IPv6Gateway": "",
	                    "GlobalIPv6Address": "",
	                    "GlobalIPv6PrefixLen": 0,
	                    "DNSNames": [
	                        "functional-919910",
	                        "40a7320e94db"
	                    ]
	                }
	            }
	        }
	    }
	]

                                                
                                                
-- /stdout --
helpers_test.go:239: (dbg) Run:  out/minikube-linux-arm64 status --format={{.Host}} -p functional-919910 -n functional-919910
helpers_test.go:244: <<< TestFunctional/parallel/DashboardCmd FAILED: start of post-mortem logs <<<
helpers_test.go:245: ======>  post-mortem[TestFunctional/parallel/DashboardCmd]: minikube logs <======
helpers_test.go:247: (dbg) Run:  out/minikube-linux-arm64 -p functional-919910 logs -n 25
helpers_test.go:247: (dbg) Done: out/minikube-linux-arm64 -p functional-919910 logs -n 25: (3.179146984s)
helpers_test.go:252: TestFunctional/parallel/DashboardCmd logs: 
-- stdout --
	
	==> Audit <==
	|-----------|-------------------------------------------------------------------------|-------------------|---------|---------|---------------------|---------------------|
	|  Command  |                                  Args                                   |      Profile      |  User   | Version |     Start Time      |      End Time       |
	|-----------|-------------------------------------------------------------------------|-------------------|---------|---------|---------------------|---------------------|
	| service   | functional-919910 service list                                          | functional-919910 | jenkins | v1.34.0 | 16 Sep 24 10:50 UTC | 16 Sep 24 10:50 UTC |
	|           | -o json                                                                 |                   |         |         |                     |                     |
	| service   | functional-919910 service                                               | functional-919910 | jenkins | v1.34.0 | 16 Sep 24 10:50 UTC |                     |
	|           | --namespace=default --https                                             |                   |         |         |                     |                     |
	|           | --url hello-node                                                        |                   |         |         |                     |                     |
	| service   | functional-919910                                                       | functional-919910 | jenkins | v1.34.0 | 16 Sep 24 10:50 UTC |                     |
	|           | service hello-node --url                                                |                   |         |         |                     |                     |
	|           | --format={{.IP}}                                                        |                   |         |         |                     |                     |
	| service   | functional-919910 service                                               | functional-919910 | jenkins | v1.34.0 | 16 Sep 24 10:50 UTC |                     |
	|           | hello-node --url                                                        |                   |         |         |                     |                     |
	| ssh       | functional-919910 ssh findmnt                                           | functional-919910 | jenkins | v1.34.0 | 16 Sep 24 10:50 UTC |                     |
	|           | -T /mount-9p | grep 9p                                                  |                   |         |         |                     |                     |
	| mount     | -p functional-919910                                                    | functional-919910 | jenkins | v1.34.0 | 16 Sep 24 10:50 UTC |                     |
	|           | /tmp/TestFunctionalparallelMountCmdany-port2927168651/001:/mount-9p     |                   |         |         |                     |                     |
	|           | --alsologtostderr -v=1                                                  |                   |         |         |                     |                     |
	| ssh       | functional-919910 ssh findmnt                                           | functional-919910 | jenkins | v1.34.0 | 16 Sep 24 10:50 UTC | 16 Sep 24 10:50 UTC |
	|           | -T /mount-9p | grep 9p                                                  |                   |         |         |                     |                     |
	| ssh       | functional-919910 ssh -- ls                                             | functional-919910 | jenkins | v1.34.0 | 16 Sep 24 10:50 UTC | 16 Sep 24 10:51 UTC |
	|           | -la /mount-9p                                                           |                   |         |         |                     |                     |
	| ssh       | functional-919910 ssh cat                                               | functional-919910 | jenkins | v1.34.0 | 16 Sep 24 10:51 UTC | 16 Sep 24 10:51 UTC |
	|           | /mount-9p/test-1726483858215687711                                      |                   |         |         |                     |                     |
	| ssh       | functional-919910 ssh mount |                                           | functional-919910 | jenkins | v1.34.0 | 16 Sep 24 10:51 UTC |                     |
	|           | grep 9p; ls -la /mount-9p; cat                                          |                   |         |         |                     |                     |
	|           | /mount-9p/pod-dates                                                     |                   |         |         |                     |                     |
	| ssh       | functional-919910 ssh sudo                                              | functional-919910 | jenkins | v1.34.0 | 16 Sep 24 10:51 UTC | 16 Sep 24 10:51 UTC |
	|           | umount -f /mount-9p                                                     |                   |         |         |                     |                     |
	| start     | -p functional-919910                                                    | functional-919910 | jenkins | v1.34.0 | 16 Sep 24 10:51 UTC |                     |
	|           | --dry-run --memory                                                      |                   |         |         |                     |                     |
	|           | 250MB --alsologtostderr                                                 |                   |         |         |                     |                     |
	|           | --driver=docker                                                         |                   |         |         |                     |                     |
	|           | --container-runtime=crio                                                |                   |         |         |                     |                     |
	| ssh       | functional-919910 ssh findmnt                                           | functional-919910 | jenkins | v1.34.0 | 16 Sep 24 10:51 UTC |                     |
	|           | -T /mount-9p | grep 9p                                                  |                   |         |         |                     |                     |
	| mount     | -p functional-919910                                                    | functional-919910 | jenkins | v1.34.0 | 16 Sep 24 10:51 UTC |                     |
	|           | /tmp/TestFunctionalparallelMountCmdspecific-port740621385/001:/mount-9p |                   |         |         |                     |                     |
	|           | --alsologtostderr -v=1 --port 46464                                     |                   |         |         |                     |                     |
	| start     | -p functional-919910                                                    | functional-919910 | jenkins | v1.34.0 | 16 Sep 24 10:51 UTC |                     |
	|           | --dry-run --memory                                                      |                   |         |         |                     |                     |
	|           | 250MB --alsologtostderr                                                 |                   |         |         |                     |                     |
	|           | --driver=docker                                                         |                   |         |         |                     |                     |
	|           | --container-runtime=crio                                                |                   |         |         |                     |                     |
	| start     | -p functional-919910                                                    | functional-919910 | jenkins | v1.34.0 | 16 Sep 24 10:51 UTC |                     |
	|           | --dry-run --alsologtostderr                                             |                   |         |         |                     |                     |
	|           | -v=1 --driver=docker                                                    |                   |         |         |                     |                     |
	|           | --container-runtime=crio                                                |                   |         |         |                     |                     |
	| dashboard | --url --port 36195                                                      | functional-919910 | jenkins | v1.34.0 | 16 Sep 24 10:51 UTC |                     |
	|           | -p functional-919910                                                    |                   |         |         |                     |                     |
	|           | --alsologtostderr -v=1                                                  |                   |         |         |                     |                     |
	| ssh       | functional-919910 ssh findmnt                                           | functional-919910 | jenkins | v1.34.0 | 16 Sep 24 10:51 UTC | 16 Sep 24 10:51 UTC |
	|           | -T /mount-9p | grep 9p                                                  |                   |         |         |                     |                     |
	| ssh       | functional-919910 ssh -- ls                                             | functional-919910 | jenkins | v1.34.0 | 16 Sep 24 10:51 UTC | 16 Sep 24 10:51 UTC |
	|           | -la /mount-9p                                                           |                   |         |         |                     |                     |
	| ssh       | functional-919910 ssh sudo                                              | functional-919910 | jenkins | v1.34.0 | 16 Sep 24 10:51 UTC |                     |
	|           | umount -f /mount-9p                                                     |                   |         |         |                     |                     |
	| mount     | -p functional-919910                                                    | functional-919910 | jenkins | v1.34.0 | 16 Sep 24 10:51 UTC |                     |
	|           | /tmp/TestFunctionalparallelMountCmdVerifyCleanup3682094802/001:/mount2  |                   |         |         |                     |                     |
	|           | --alsologtostderr -v=1                                                  |                   |         |         |                     |                     |
	| ssh       | functional-919910 ssh findmnt                                           | functional-919910 | jenkins | v1.34.0 | 16 Sep 24 10:51 UTC |                     |
	|           | -T /mount1                                                              |                   |         |         |                     |                     |
	| mount     | -p functional-919910                                                    | functional-919910 | jenkins | v1.34.0 | 16 Sep 24 10:51 UTC |                     |
	|           | /tmp/TestFunctionalparallelMountCmdVerifyCleanup3682094802/001:/mount1  |                   |         |         |                     |                     |
	|           | --alsologtostderr -v=1                                                  |                   |         |         |                     |                     |
	| mount     | -p functional-919910                                                    | functional-919910 | jenkins | v1.34.0 | 16 Sep 24 10:51 UTC |                     |
	|           | /tmp/TestFunctionalparallelMountCmdVerifyCleanup3682094802/001:/mount3  |                   |         |         |                     |                     |
	|           | --alsologtostderr -v=1                                                  |                   |         |         |                     |                     |
	| ssh       | functional-919910 ssh findmnt                                           | functional-919910 | jenkins | v1.34.0 | 16 Sep 24 10:51 UTC |                     |
	|           | -T /mount1                                                              |                   |         |         |                     |                     |
	|-----------|-------------------------------------------------------------------------|-------------------|---------|---------|---------------------|---------------------|
	
	
	==> Last Start <==
	Log file created at: 2024/09/16 10:51:02
	Running on machine: ip-172-31-21-244
	Binary: Built with gc go1.23.0 for linux/arm64
	Log line format: [IWEF]mmdd hh:mm:ss.uuuuuu threadid file:line] msg
	I0916 10:51:02.087879 1411119 out.go:345] Setting OutFile to fd 1 ...
	I0916 10:51:02.088043 1411119 out.go:392] TERM=,COLORTERM=, which probably does not support color
	I0916 10:51:02.088070 1411119 out.go:358] Setting ErrFile to fd 2...
	I0916 10:51:02.088090 1411119 out.go:392] TERM=,COLORTERM=, which probably does not support color
	I0916 10:51:02.088355 1411119 root.go:338] Updating PATH: /home/jenkins/minikube-integration/19651-1378450/.minikube/bin
	I0916 10:51:02.088819 1411119 out.go:352] Setting JSON to false
	I0916 10:51:02.089772 1411119 start.go:129] hostinfo: {"hostname":"ip-172-31-21-244","uptime":38007,"bootTime":1726445855,"procs":177,"os":"linux","platform":"ubuntu","platformFamily":"debian","platformVersion":"20.04","kernelVersion":"5.15.0-1069-aws","kernelArch":"aarch64","virtualizationSystem":"","virtualizationRole":"","hostId":"da8ac1fd-6236-412a-a346-95873c98230d"}
	I0916 10:51:02.089847 1411119 start.go:139] virtualization:  
	I0916 10:51:02.092919 1411119 out.go:177] * [functional-919910] minikube v1.34.0 on Ubuntu 20.04 (arm64)
	I0916 10:51:02.096358 1411119 out.go:177]   - MINIKUBE_LOCATION=19651
	I0916 10:51:02.096483 1411119 notify.go:220] Checking for updates...
	I0916 10:51:02.101643 1411119 out.go:177]   - MINIKUBE_SUPPRESS_DOCKER_PERFORMANCE=true
	I0916 10:51:02.104448 1411119 out.go:177]   - KUBECONFIG=/home/jenkins/minikube-integration/19651-1378450/kubeconfig
	I0916 10:51:02.108493 1411119 out.go:177]   - MINIKUBE_HOME=/home/jenkins/minikube-integration/19651-1378450/.minikube
	I0916 10:51:02.113551 1411119 out.go:177]   - MINIKUBE_BIN=out/minikube-linux-arm64
	I0916 10:51:02.116163 1411119 out.go:177]   - MINIKUBE_FORCE_SYSTEMD=
	I0916 10:51:02.119313 1411119 config.go:182] Loaded profile config "functional-919910": Driver=docker, ContainerRuntime=crio, KubernetesVersion=v1.31.1
	I0916 10:51:02.120041 1411119 driver.go:394] Setting default libvirt URI to qemu:///system
	I0916 10:51:02.148042 1411119 docker.go:123] docker version: linux-27.2.1:Docker Engine - Community
	I0916 10:51:02.148169 1411119 cli_runner.go:164] Run: docker system info --format "{{json .}}"
	I0916 10:51:02.215881 1411119 info.go:266] docker info: {ID:5FDH:SA5P:5GCT:NLAS:B73P:SGDQ:PBG5:UBVH:UZY3:RXGO:CI7S:WAIH Containers:1 ContainersRunning:1 ContainersPaused:0 ContainersStopped:0 Images:2 Driver:overlay2 DriverStatus:[[Backing Filesystem extfs] [Supports d_type true] [Using metacopy false] [Native Overlay Diff true] [userxattr false]] SystemStatus:<nil> Plugins:{Volume:[local] Network:[bridge host ipvlan macvlan null overlay] Authorization:<nil> Log:[awslogs fluentd gcplogs gelf journald json-file local splunk syslog]} MemoryLimit:true SwapLimit:true KernelMemory:false KernelMemoryTCP:true CPUCfsPeriod:true CPUCfsQuota:true CPUShares:true CPUSet:true PidsLimit:true IPv4Forwarding:true BridgeNfIptables:true BridgeNfIP6Tables:true Debug:false NFd:32 OomKillDisable:true NGoroutines:51 SystemTime:2024-09-16 10:51:02.206322665 +0000 UTC LoggingDriver:json-file CgroupDriver:cgroupfs NEventsListener:0 KernelVersion:5.15.0-1069-aws OperatingSystem:Ubuntu 20.04.6 LTS OSType:linux Architecture:aar
ch64 IndexServerAddress:https://index.docker.io/v1/ RegistryConfig:{AllowNondistributableArtifactsCIDRs:[] AllowNondistributableArtifactsHostnames:[] InsecureRegistryCIDRs:[127.0.0.0/8] IndexConfigs:{DockerIo:{Name:docker.io Mirrors:[] Secure:true Official:true}} Mirrors:[]} NCPU:2 MemTotal:8214839296 GenericResources:<nil> DockerRootDir:/var/lib/docker HTTPProxy: HTTPSProxy: NoProxy: Name:ip-172-31-21-244 Labels:[] ExperimentalBuild:false ServerVersion:27.2.1 ClusterStore: ClusterAdvertise: Runtimes:{Runc:{Path:runc}} DefaultRuntime:runc Swarm:{NodeID: NodeAddr: LocalNodeState:inactive ControlAvailable:false Error: RemoteManagers:<nil>} LiveRestoreEnabled:false Isolation: InitBinary:docker-init ContainerdCommit:{ID:7f7fdf5fed64eb6a7caf99b3e12efcf9d60e311c Expected:7f7fdf5fed64eb6a7caf99b3e12efcf9d60e311c} RuncCommit:{ID:v1.1.14-0-g2c9f560 Expected:v1.1.14-0-g2c9f560} InitCommit:{ID:de40ad0 Expected:de40ad0} SecurityOptions:[name=apparmor name=seccomp,profile=builtin] ProductLicense: Warnings:<nil> ServerErro
rs:[] ClientInfo:{Debug:false Plugins:[map[Name:buildx Path:/usr/libexec/docker/cli-plugins/docker-buildx SchemaVersion:0.1.0 ShortDescription:Docker Buildx Vendor:Docker Inc. Version:v0.16.2] map[Name:compose Path:/usr/libexec/docker/cli-plugins/docker-compose SchemaVersion:0.1.0 ShortDescription:Docker Compose Vendor:Docker Inc. Version:v2.29.2]] Warnings:<nil>}}
	I0916 10:51:02.215998 1411119 docker.go:318] overlay module found
	I0916 10:51:02.218840 1411119 out.go:177] * Using the docker driver based on existing profile
	I0916 10:51:02.221572 1411119 start.go:297] selected driver: docker
	I0916 10:51:02.221593 1411119 start.go:901] validating driver "docker" against &{Name:functional-919910 KeepContext:false EmbedCerts:false MinikubeISO: KicBaseImage:gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 Memory:4000 CPUs:2 DiskSize:20000 Driver:docker HyperkitVpnKitSock: HyperkitVSockPorts:[] DockerEnv:[] ContainerVolumeMounts:[] InsecureRegistry:[] RegistryMirror:[] HostOnlyCIDR:192.168.59.1/24 HypervVirtualSwitch: HypervUseExternalSwitch:false HypervExternalAdapter: KVMNetwork:default KVMQemuURI:qemu:///system KVMGPU:false KVMHidden:false KVMNUMACount:1 APIServerPort:8441 DockerOpt:[] DisableDriverMounts:false NFSShare:[] NFSSharesRoot:/nfsshares UUID: NoVTXCheck:false DNSProxy:false HostDNSResolver:true HostOnlyNicType:virtio NatNicType:virtio SSHIPAddress: SSHUser:root SSHKey: SSHPort:22 KubernetesConfig:{KubernetesVersion:v1.31.1 ClusterName:functional-919910 Namespace:default APIServerHAVIP: APIServerNa
me:minikubeCA APIServerNames:[] APIServerIPs:[] DNSDomain:cluster.local ContainerRuntime:crio CRISocket: NetworkPlugin:cni FeatureGates: ServiceCIDR:10.96.0.0/12 ImageRepository: LoadBalancerStartIP: LoadBalancerEndIP: CustomIngressCert: RegistryAliases: ExtraOptions:[{Component:apiserver Key:enable-admission-plugins Value:NamespaceAutoProvision}] ShouldLoadCachedImages:true EnableDefaultCNI:false CNI:} Nodes:[{Name: IP:192.168.49.2 Port:8441 KubernetesVersion:v1.31.1 ContainerRuntime:crio ControlPlane:true Worker:true}] Addons:map[default-storageclass:true storage-provisioner:true] CustomAddonImages:map[] CustomAddonRegistries:map[] VerifyComponents:map[apiserver:true apps_running:true default_sa:true extra:true kubelet:true node_ready:true system_pods:true] StartHostTimeout:6m0s ScheduledStop:<nil> ExposedPorts:[] ListenAddress: Network: Subnet: MultiNodeRequested:false ExtraDisks:0 CertExpiration:26280h0m0s Mount:false MountString:/home/jenkins:/minikube-host Mount9PVersion:9p2000.L MountGID:docker MountIP
: MountMSize:262144 MountOptions:[] MountPort:0 MountType:9p MountUID:docker BinaryMirror: DisableOptimizations:false DisableMetrics:false CustomQemuFirmwarePath: SocketVMnetClientPath: SocketVMnetPath: StaticIP: SSHAuthSock: SSHAgentPID:0 GPUs: AutoPauseInterval:1m0s}
	I0916 10:51:02.221708 1411119 start.go:912] status for docker: {Installed:true Healthy:true Running:false NeedsImprovement:false Error:<nil> Reason: Fix: Doc: Version:}
	I0916 10:51:02.221816 1411119 cli_runner.go:164] Run: docker system info --format "{{json .}}"
	I0916 10:51:02.274109 1411119 info.go:266] docker info: {ID:5FDH:SA5P:5GCT:NLAS:B73P:SGDQ:PBG5:UBVH:UZY3:RXGO:CI7S:WAIH Containers:1 ContainersRunning:1 ContainersPaused:0 ContainersStopped:0 Images:2 Driver:overlay2 DriverStatus:[[Backing Filesystem extfs] [Supports d_type true] [Using metacopy false] [Native Overlay Diff true] [userxattr false]] SystemStatus:<nil> Plugins:{Volume:[local] Network:[bridge host ipvlan macvlan null overlay] Authorization:<nil> Log:[awslogs fluentd gcplogs gelf journald json-file local splunk syslog]} MemoryLimit:true SwapLimit:true KernelMemory:false KernelMemoryTCP:true CPUCfsPeriod:true CPUCfsQuota:true CPUShares:true CPUSet:true PidsLimit:true IPv4Forwarding:true BridgeNfIptables:true BridgeNfIP6Tables:true Debug:false NFd:32 OomKillDisable:true NGoroutines:51 SystemTime:2024-09-16 10:51:02.264208469 +0000 UTC LoggingDriver:json-file CgroupDriver:cgroupfs NEventsListener:0 KernelVersion:5.15.0-1069-aws OperatingSystem:Ubuntu 20.04.6 LTS OSType:linux Architecture:aar
ch64 IndexServerAddress:https://index.docker.io/v1/ RegistryConfig:{AllowNondistributableArtifactsCIDRs:[] AllowNondistributableArtifactsHostnames:[] InsecureRegistryCIDRs:[127.0.0.0/8] IndexConfigs:{DockerIo:{Name:docker.io Mirrors:[] Secure:true Official:true}} Mirrors:[]} NCPU:2 MemTotal:8214839296 GenericResources:<nil> DockerRootDir:/var/lib/docker HTTPProxy: HTTPSProxy: NoProxy: Name:ip-172-31-21-244 Labels:[] ExperimentalBuild:false ServerVersion:27.2.1 ClusterStore: ClusterAdvertise: Runtimes:{Runc:{Path:runc}} DefaultRuntime:runc Swarm:{NodeID: NodeAddr: LocalNodeState:inactive ControlAvailable:false Error: RemoteManagers:<nil>} LiveRestoreEnabled:false Isolation: InitBinary:docker-init ContainerdCommit:{ID:7f7fdf5fed64eb6a7caf99b3e12efcf9d60e311c Expected:7f7fdf5fed64eb6a7caf99b3e12efcf9d60e311c} RuncCommit:{ID:v1.1.14-0-g2c9f560 Expected:v1.1.14-0-g2c9f560} InitCommit:{ID:de40ad0 Expected:de40ad0} SecurityOptions:[name=apparmor name=seccomp,profile=builtin] ProductLicense: Warnings:<nil> ServerErro
rs:[] ClientInfo:{Debug:false Plugins:[map[Name:buildx Path:/usr/libexec/docker/cli-plugins/docker-buildx SchemaVersion:0.1.0 ShortDescription:Docker Buildx Vendor:Docker Inc. Version:v0.16.2] map[Name:compose Path:/usr/libexec/docker/cli-plugins/docker-compose SchemaVersion:0.1.0 ShortDescription:Docker Compose Vendor:Docker Inc. Version:v2.29.2]] Warnings:<nil>}}
	I0916 10:51:02.274582 1411119 cni.go:84] Creating CNI manager for ""
	I0916 10:51:02.274637 1411119 cni.go:143] "docker" driver + "crio" runtime found, recommending kindnet
	I0916 10:51:02.274690 1411119 start.go:340] cluster config:
	{Name:functional-919910 KeepContext:false EmbedCerts:false MinikubeISO: KicBaseImage:gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 Memory:4000 CPUs:2 DiskSize:20000 Driver:docker HyperkitVpnKitSock: HyperkitVSockPorts:[] DockerEnv:[] ContainerVolumeMounts:[] InsecureRegistry:[] RegistryMirror:[] HostOnlyCIDR:192.168.59.1/24 HypervVirtualSwitch: HypervUseExternalSwitch:false HypervExternalAdapter: KVMNetwork:default KVMQemuURI:qemu:///system KVMGPU:false KVMHidden:false KVMNUMACount:1 APIServerPort:8441 DockerOpt:[] DisableDriverMounts:false NFSShare:[] NFSSharesRoot:/nfsshares UUID: NoVTXCheck:false DNSProxy:false HostDNSResolver:true HostOnlyNicType:virtio NatNicType:virtio SSHIPAddress: SSHUser:root SSHKey: SSHPort:22 KubernetesConfig:{KubernetesVersion:v1.31.1 ClusterName:functional-919910 Namespace:default APIServerHAVIP: APIServerName:minikubeCA APIServerNames:[] APIServerIPs:[] DNSDomain:cluster.local Containe
rRuntime:crio CRISocket: NetworkPlugin:cni FeatureGates: ServiceCIDR:10.96.0.0/12 ImageRepository: LoadBalancerStartIP: LoadBalancerEndIP: CustomIngressCert: RegistryAliases: ExtraOptions:[{Component:apiserver Key:enable-admission-plugins Value:NamespaceAutoProvision}] ShouldLoadCachedImages:true EnableDefaultCNI:false CNI:} Nodes:[{Name: IP:192.168.49.2 Port:8441 KubernetesVersion:v1.31.1 ContainerRuntime:crio ControlPlane:true Worker:true}] Addons:map[default-storageclass:true storage-provisioner:true] CustomAddonImages:map[] CustomAddonRegistries:map[] VerifyComponents:map[apiserver:true apps_running:true default_sa:true extra:true kubelet:true node_ready:true system_pods:true] StartHostTimeout:6m0s ScheduledStop:<nil> ExposedPorts:[] ListenAddress: Network: Subnet: MultiNodeRequested:false ExtraDisks:0 CertExpiration:26280h0m0s Mount:false MountString:/home/jenkins:/minikube-host Mount9PVersion:9p2000.L MountGID:docker MountIP: MountMSize:262144 MountOptions:[] MountPort:0 MountType:9p MountUID:docker Bin
aryMirror: DisableOptimizations:false DisableMetrics:false CustomQemuFirmwarePath: SocketVMnetClientPath: SocketVMnetPath: StaticIP: SSHAuthSock: SSHAgentPID:0 GPUs: AutoPauseInterval:1m0s}
	I0916 10:51:02.279287 1411119 out.go:177] * dry-run validation complete!
	
	
	==> CRI-O <==
	Sep 16 10:49:47 functional-919910 crio[4619]: time="2024-09-16 10:49:47.256366927Z" level=info msg="Stopped pod sandbox (already stopped): 8fd62fbc34bf1ffb9092b83c48e89b00e7cdd219dbb5b91410c53ba0718a28f1" id=383cd39c-1f42-454d-bd04-0ba447bad713 name=/runtime.v1.RuntimeService/StopPodSandbox
	Sep 16 10:49:47 functional-919910 crio[4619]: time="2024-09-16 10:49:47.257128860Z" level=info msg="Removing pod sandbox: 8fd62fbc34bf1ffb9092b83c48e89b00e7cdd219dbb5b91410c53ba0718a28f1" id=55112393-8c8d-4009-b80a-cc4ad96b5c22 name=/runtime.v1.RuntimeService/RemovePodSandbox
	Sep 16 10:49:47 functional-919910 crio[4619]: time="2024-09-16 10:49:47.264653739Z" level=info msg="Removed pod sandbox: 8fd62fbc34bf1ffb9092b83c48e89b00e7cdd219dbb5b91410c53ba0718a28f1" id=55112393-8c8d-4009-b80a-cc4ad96b5c22 name=/runtime.v1.RuntimeService/RemovePodSandbox
	Sep 16 10:51:04 functional-919910 crio[4619]: time="2024-09-16 10:51:04.531002982Z" level=info msg="Running pod sandbox: kubernetes-dashboard/kubernetes-dashboard-695b96c756-tfx57/POD" id=aa9175e7-f6e7-4e1c-8b89-d3d323f98b0f name=/runtime.v1.RuntimeService/RunPodSandbox
	Sep 16 10:51:04 functional-919910 crio[4619]: time="2024-09-16 10:51:04.531067448Z" level=warning msg="Allowed annotations are specified for workload []"
	Sep 16 10:51:04 functional-919910 crio[4619]: time="2024-09-16 10:51:04.560475873Z" level=info msg="Running pod sandbox: kubernetes-dashboard/dashboard-metrics-scraper-c5db448b4-qqx24/POD" id=5de970f6-f1bf-4d2c-8a1d-8025da975286 name=/runtime.v1.RuntimeService/RunPodSandbox
	Sep 16 10:51:04 functional-919910 crio[4619]: time="2024-09-16 10:51:04.560542587Z" level=warning msg="Allowed annotations are specified for workload []"
	Sep 16 10:51:04 functional-919910 crio[4619]: time="2024-09-16 10:51:04.564001379Z" level=info msg="Got pod network &{Name:kubernetes-dashboard-695b96c756-tfx57 Namespace:kubernetes-dashboard ID:922d838b02c1c0aa85d9e15c00fc4e58fcd00921ff5664f69405eaa786ef16e7 UID:067210bf-0875-4a9d-85ee-79032a148043 NetNS:/var/run/netns/9323874c-bd17-444e-bc2f-5d16fa833b2a Networks:[] RuntimeConfig:map[kindnet:{IP: MAC: PortMappings:[] Bandwidth:<nil> IpRanges:[]}] Aliases:map[]}"
	Sep 16 10:51:04 functional-919910 crio[4619]: time="2024-09-16 10:51:04.564037735Z" level=info msg="Adding pod kubernetes-dashboard_kubernetes-dashboard-695b96c756-tfx57 to CNI network \"kindnet\" (type=ptp)"
	Sep 16 10:51:04 functional-919910 crio[4619]: time="2024-09-16 10:51:04.589049907Z" level=info msg="Got pod network &{Name:dashboard-metrics-scraper-c5db448b4-qqx24 Namespace:kubernetes-dashboard ID:e2a41fd0177c9e78cbe96ed596219767465b54a5c5464fee624d12b77e49a2cf UID:e413c9cc-49a4-456c-81e0-7f2e23692d08 NetNS:/var/run/netns/0b90dad9-5461-4e89-ac71-19d6127a6842 Networks:[] RuntimeConfig:map[kindnet:{IP: MAC: PortMappings:[] Bandwidth:<nil> IpRanges:[]}] Aliases:map[]}"
	Sep 16 10:51:04 functional-919910 crio[4619]: time="2024-09-16 10:51:04.589110180Z" level=info msg="Adding pod kubernetes-dashboard_dashboard-metrics-scraper-c5db448b4-qqx24 to CNI network \"kindnet\" (type=ptp)"
	Sep 16 10:51:04 functional-919910 crio[4619]: time="2024-09-16 10:51:04.623714624Z" level=info msg="Got pod network &{Name:dashboard-metrics-scraper-c5db448b4-qqx24 Namespace:kubernetes-dashboard ID:e2a41fd0177c9e78cbe96ed596219767465b54a5c5464fee624d12b77e49a2cf UID:e413c9cc-49a4-456c-81e0-7f2e23692d08 NetNS:/var/run/netns/0b90dad9-5461-4e89-ac71-19d6127a6842 Networks:[] RuntimeConfig:map[kindnet:{IP: MAC: PortMappings:[] Bandwidth:<nil> IpRanges:[]}] Aliases:map[]}"
	Sep 16 10:51:04 functional-919910 crio[4619]: time="2024-09-16 10:51:04.623887280Z" level=info msg="Checking pod kubernetes-dashboard_dashboard-metrics-scraper-c5db448b4-qqx24 for CNI network kindnet (type=ptp)"
	Sep 16 10:51:04 functional-919910 crio[4619]: time="2024-09-16 10:51:04.630101514Z" level=info msg="Ran pod sandbox e2a41fd0177c9e78cbe96ed596219767465b54a5c5464fee624d12b77e49a2cf with infra container: kubernetes-dashboard/dashboard-metrics-scraper-c5db448b4-qqx24/POD" id=5de970f6-f1bf-4d2c-8a1d-8025da975286 name=/runtime.v1.RuntimeService/RunPodSandbox
	Sep 16 10:51:04 functional-919910 crio[4619]: time="2024-09-16 10:51:04.631566749Z" level=info msg="Checking image status: docker.io/kubernetesui/metrics-scraper:v1.0.8@sha256:76049887f07a0476dc93efc2d3569b9529bf982b22d29f356092ce206e98765c" id=ea79bc2b-4f2e-4010-84b0-703223634f0a name=/runtime.v1.ImageService/ImageStatus
	Sep 16 10:51:04 functional-919910 crio[4619]: time="2024-09-16 10:51:04.631861650Z" level=info msg="Image docker.io/kubernetesui/metrics-scraper:v1.0.8@sha256:76049887f07a0476dc93efc2d3569b9529bf982b22d29f356092ce206e98765c not found" id=ea79bc2b-4f2e-4010-84b0-703223634f0a name=/runtime.v1.ImageService/ImageStatus
	Sep 16 10:51:04 functional-919910 crio[4619]: time="2024-09-16 10:51:04.633128128Z" level=info msg="Pulling image: docker.io/kubernetesui/metrics-scraper:v1.0.8@sha256:76049887f07a0476dc93efc2d3569b9529bf982b22d29f356092ce206e98765c" id=b8983bb3-9f46-487e-b956-48074a39c1da name=/runtime.v1.ImageService/PullImage
	Sep 16 10:51:04 functional-919910 crio[4619]: time="2024-09-16 10:51:04.635381716Z" level=info msg="Trying to access \"docker.io/kubernetesui/metrics-scraper@sha256:76049887f07a0476dc93efc2d3569b9529bf982b22d29f356092ce206e98765c\""
	Sep 16 10:51:04 functional-919910 crio[4619]: time="2024-09-16 10:51:04.650794829Z" level=info msg="Got pod network &{Name:kubernetes-dashboard-695b96c756-tfx57 Namespace:kubernetes-dashboard ID:922d838b02c1c0aa85d9e15c00fc4e58fcd00921ff5664f69405eaa786ef16e7 UID:067210bf-0875-4a9d-85ee-79032a148043 NetNS:/var/run/netns/9323874c-bd17-444e-bc2f-5d16fa833b2a Networks:[] RuntimeConfig:map[kindnet:{IP: MAC: PortMappings:[] Bandwidth:<nil> IpRanges:[]}] Aliases:map[]}"
	Sep 16 10:51:04 functional-919910 crio[4619]: time="2024-09-16 10:51:04.650958345Z" level=info msg="Checking pod kubernetes-dashboard_kubernetes-dashboard-695b96c756-tfx57 for CNI network kindnet (type=ptp)"
	Sep 16 10:51:04 functional-919910 crio[4619]: time="2024-09-16 10:51:04.657861226Z" level=info msg="Ran pod sandbox 922d838b02c1c0aa85d9e15c00fc4e58fcd00921ff5664f69405eaa786ef16e7 with infra container: kubernetes-dashboard/kubernetes-dashboard-695b96c756-tfx57/POD" id=aa9175e7-f6e7-4e1c-8b89-d3d323f98b0f name=/runtime.v1.RuntimeService/RunPodSandbox
	Sep 16 10:51:04 functional-919910 crio[4619]: time="2024-09-16 10:51:04.659582856Z" level=info msg="Checking image status: docker.io/kubernetesui/dashboard:v2.7.0@sha256:2e500d29e9d5f4a086b908eb8dfe7ecac57d2ab09d65b24f588b1d449841ef93" id=5732b229-26fe-4426-8a68-ccd07bb0ed20 name=/runtime.v1.ImageService/ImageStatus
	Sep 16 10:51:04 functional-919910 crio[4619]: time="2024-09-16 10:51:04.659843017Z" level=info msg="Image docker.io/kubernetesui/dashboard:v2.7.0@sha256:2e500d29e9d5f4a086b908eb8dfe7ecac57d2ab09d65b24f588b1d449841ef93 not found" id=5732b229-26fe-4426-8a68-ccd07bb0ed20 name=/runtime.v1.ImageService/ImageStatus
	Sep 16 10:51:04 functional-919910 crio[4619]: time="2024-09-16 10:51:04.920886924Z" level=info msg="Trying to access \"docker.io/kubernetesui/metrics-scraper@sha256:76049887f07a0476dc93efc2d3569b9529bf982b22d29f356092ce206e98765c\""
	Sep 16 10:51:05 functional-919910 crio[4619]: time="2024-09-16 10:51:05.274524642Z" level=info msg="Image operating system mismatch: image uses OS \"linux\"+architecture \"amd64\", expecting one of \"linux+arm64\""
	
	
	==> container status <==
	CONTAINER           IMAGE                                                              CREATED             STATE               NAME                      ATTEMPT             POD ID              POD
	6f8c0a2f9d3e9       24a140c548c075e487e45d0ee73b1aa89f8bfb40c08a57e05975559728822b1d   2 minutes ago       Running             kube-proxy                2                   46672cf6a1a3c       kube-proxy-nvpzv
	072cecfbf1d39       6a23fa8fd2b78ab58e42ba273808edc936a9c53d8ac4a919f6337be094843a51   2 minutes ago       Running             kindnet-cni               2                   306886331d6ee       kindnet-nb5xl
	4deb5cc6dce54       2f6c962e7b8311337352d9fdea917da2184d9919f4da7695bc2a6517cf392fe4   2 minutes ago       Running             coredns                   2                   4bae1031966b2       coredns-7c65d6cfc9-qzn8c
	0318f459801da       ba04bb24b95753201135cbc420b233c1b0b9fa2e1fd21d28319c348c33fbcde6   2 minutes ago       Running             storage-provisioner       2                   e27809ba10603       storage-provisioner
	12f0a29c7ca2a       d3f53a98c0a9d9163c4848bcf34b2d2f5e1e3691b79f3d1dd6d0206809e02853   2 minutes ago       Running             kube-apiserver            0                   00a81472718e2       kube-apiserver-functional-919910
	7fcb94c0bce84       7f8aa378bb47dffcf430f3a601abe39137e88aee0238e23ed8530fdd18dab82d   2 minutes ago       Running             kube-scheduler            2                   00455a328acb5       kube-scheduler-functional-919910
	d69895ce029ae       279f381cb37365bbbcd133c9531fba9c2beb0f38dbbe6ddfcd0b1b1643d3450e   2 minutes ago       Running             kube-controller-manager   2                   0ffab32638624       kube-controller-manager-functional-919910
	0fb814efa9ee9       27e3830e1402783674d8b594038967deea9d51f0d91b34c93c8f39d2f68af7da   2 minutes ago       Running             etcd                      2                   46079181d2925       etcd-functional-919910
	67f50b0e25dae       ba04bb24b95753201135cbc420b233c1b0b9fa2e1fd21d28319c348c33fbcde6   3 minutes ago       Exited              storage-provisioner       1                   e27809ba10603       storage-provisioner
	e8aeda4b55bc6       6a23fa8fd2b78ab58e42ba273808edc936a9c53d8ac4a919f6337be094843a51   3 minutes ago       Exited              kindnet-cni               1                   306886331d6ee       kindnet-nb5xl
	68f543d941434       24a140c548c075e487e45d0ee73b1aa89f8bfb40c08a57e05975559728822b1d   3 minutes ago       Exited              kube-proxy                1                   46672cf6a1a3c       kube-proxy-nvpzv
	2089d6c47dd67       2f6c962e7b8311337352d9fdea917da2184d9919f4da7695bc2a6517cf392fe4   3 minutes ago       Exited              coredns                   1                   4bae1031966b2       coredns-7c65d6cfc9-qzn8c
	8f5620673b4ff       279f381cb37365bbbcd133c9531fba9c2beb0f38dbbe6ddfcd0b1b1643d3450e   3 minutes ago       Exited              kube-controller-manager   1                   0ffab32638624       kube-controller-manager-functional-919910
	5bcfe047e4005       27e3830e1402783674d8b594038967deea9d51f0d91b34c93c8f39d2f68af7da   3 minutes ago       Exited              etcd                      1                   46079181d2925       etcd-functional-919910
	9a35fb982442f       7f8aa378bb47dffcf430f3a601abe39137e88aee0238e23ed8530fdd18dab82d   3 minutes ago       Exited              kube-scheduler            1                   00455a328acb5       kube-scheduler-functional-919910
	
	
	==> coredns [2089d6c47dd6764fb74a622eaf36e8dda3344083a925f73a4dfcf0ebb952dbf7] <==
	[INFO] plugin/kubernetes: pkg/mod/k8s.io/client-go@v0.29.3/tools/cache/reflector.go:229: failed to list *v1.EndpointSlice: Get "https://10.96.0.1:443/apis/discovery.k8s.io/v1/endpointslices?limit=500&resourceVersion=0": dial tcp 10.96.0.1:443: connect: connection refused
	[ERROR] plugin/kubernetes: pkg/mod/k8s.io/client-go@v0.29.3/tools/cache/reflector.go:229: Failed to watch *v1.EndpointSlice: failed to list *v1.EndpointSlice: Get "https://10.96.0.1:443/apis/discovery.k8s.io/v1/endpointslices?limit=500&resourceVersion=0": dial tcp 10.96.0.1:443: connect: connection refused
	[INFO] plugin/kubernetes: pkg/mod/k8s.io/client-go@v0.29.3/tools/cache/reflector.go:229: failed to list *v1.Service: Get "https://10.96.0.1:443/api/v1/services?limit=500&resourceVersion=0": dial tcp 10.96.0.1:443: connect: connection refused
	[ERROR] plugin/kubernetes: pkg/mod/k8s.io/client-go@v0.29.3/tools/cache/reflector.go:229: Failed to watch *v1.Service: failed to list *v1.Service: Get "https://10.96.0.1:443/api/v1/services?limit=500&resourceVersion=0": dial tcp 10.96.0.1:443: connect: connection refused
	[INFO] plugin/kubernetes: pkg/mod/k8s.io/client-go@v0.29.3/tools/cache/reflector.go:229: failed to list *v1.Namespace: Get "https://10.96.0.1:443/api/v1/namespaces?limit=500&resourceVersion=0": dial tcp 10.96.0.1:443: connect: connection refused
	[ERROR] plugin/kubernetes: pkg/mod/k8s.io/client-go@v0.29.3/tools/cache/reflector.go:229: Failed to watch *v1.Namespace: failed to list *v1.Namespace: Get "https://10.96.0.1:443/api/v1/namespaces?limit=500&resourceVersion=0": dial tcp 10.96.0.1:443: connect: connection refused
	[INFO] plugin/ready: Still waiting on: "kubernetes"
	[INFO] plugin/kubernetes: waiting for Kubernetes API before starting server
	[INFO] plugin/kubernetes: waiting for Kubernetes API before starting server
	[INFO] plugin/kubernetes: waiting for Kubernetes API before starting server
	[INFO] plugin/ready: Still waiting on: "kubernetes"
	[INFO] plugin/kubernetes: waiting for Kubernetes API before starting server
	[INFO] plugin/kubernetes: waiting for Kubernetes API before starting server
	[INFO] plugin/kubernetes: waiting for Kubernetes API before starting server
	[INFO] plugin/kubernetes: waiting for Kubernetes API before starting server
	[INFO] plugin/kubernetes: waiting for Kubernetes API before starting server
	.:53
	[INFO] plugin/reload: Running configuration SHA512 = 05e3eaddc414b2d71a69b2e2bc6f2681fc1f4d04bcdd3acc1a41457bb7db518208b95ddfc4c9fffedc59c25a8faf458be1af4915a4a3c0d6777cb7a346bc5d86
	CoreDNS-1.11.3
	linux/arm64, go1.21.11, a6338e9
	[INFO] 127.0.0.1:39206 - 14119 "HINFO IN 5939583222120401635.3946217130147098167. udp 57 false 512" NXDOMAIN qr,rd,ra 57 0.038029402s
	[INFO] SIGTERM: Shutting down servers then terminating
	[INFO] plugin/health: Going into lameduck mode for 5s
	
	
	==> coredns [4deb5cc6dce54b2b55f84fa620aac8876a4dfb8d163a4e60aa19ebd7ba71d7eb] <==
	.:53
	[INFO] plugin/reload: Running configuration SHA512 = 05e3eaddc414b2d71a69b2e2bc6f2681fc1f4d04bcdd3acc1a41457bb7db518208b95ddfc4c9fffedc59c25a8faf458be1af4915a4a3c0d6777cb7a346bc5d86
	CoreDNS-1.11.3
	linux/arm64, go1.21.11, a6338e9
	[INFO] 127.0.0.1:38507 - 51569 "HINFO IN 5479759435856645223.8976423270861566953. udp 57 false 512" NXDOMAIN qr,rd,ra 57 0.013474675s
	
	
	==> describe nodes <==
	Name:               functional-919910
	Roles:              control-plane
	Labels:             beta.kubernetes.io/arch=arm64
	                    beta.kubernetes.io/os=linux
	                    kubernetes.io/arch=arm64
	                    kubernetes.io/hostname=functional-919910
	                    kubernetes.io/os=linux
	                    minikube.k8s.io/commit=90d544f06ea0f69499271b003be64a9a224d57ed
	                    minikube.k8s.io/name=functional-919910
	                    minikube.k8s.io/primary=true
	                    minikube.k8s.io/updated_at=2024_09_16T10_47_02_0700
	                    minikube.k8s.io/version=v1.34.0
	                    node-role.kubernetes.io/control-plane=
	                    node.kubernetes.io/exclude-from-external-load-balancers=
	Annotations:        kubeadm.alpha.kubernetes.io/cri-socket: unix:///var/run/crio/crio.sock
	                    node.alpha.kubernetes.io/ttl: 0
	                    volumes.kubernetes.io/controller-managed-attach-detach: true
	CreationTimestamp:  Mon, 16 Sep 2024 10:46:58 +0000
	Taints:             <none>
	Unschedulable:      false
	Lease:
	  HolderIdentity:  functional-919910
	  AcquireTime:     <unset>
	  RenewTime:       Mon, 16 Sep 2024 10:51:04 +0000
	Conditions:
	  Type             Status  LastHeartbeatTime                 LastTransitionTime                Reason                       Message
	  ----             ------  -----------------                 ------------------                ------                       -------
	  MemoryPressure   False   Mon, 16 Sep 2024 10:48:51 +0000   Mon, 16 Sep 2024 10:46:55 +0000   KubeletHasSufficientMemory   kubelet has sufficient memory available
	  DiskPressure     False   Mon, 16 Sep 2024 10:48:51 +0000   Mon, 16 Sep 2024 10:46:55 +0000   KubeletHasNoDiskPressure     kubelet has no disk pressure
	  PIDPressure      False   Mon, 16 Sep 2024 10:48:51 +0000   Mon, 16 Sep 2024 10:46:55 +0000   KubeletHasSufficientPID      kubelet has sufficient PID available
	  Ready            True    Mon, 16 Sep 2024 10:48:51 +0000   Mon, 16 Sep 2024 10:47:47 +0000   KubeletReady                 kubelet is posting ready status
	Addresses:
	  InternalIP:  192.168.49.2
	  Hostname:    functional-919910
	Capacity:
	  cpu:                2
	  ephemeral-storage:  203034800Ki
	  hugepages-1Gi:      0
	  hugepages-2Mi:      0
	  hugepages-32Mi:     0
	  hugepages-64Ki:     0
	  memory:             8022304Ki
	  pods:               110
	Allocatable:
	  cpu:                2
	  ephemeral-storage:  203034800Ki
	  hugepages-1Gi:      0
	  hugepages-2Mi:      0
	  hugepages-32Mi:     0
	  hugepages-64Ki:     0
	  memory:             8022304Ki
	  pods:               110
	System Info:
	  Machine ID:                 f14572b8323a44cca0faa88c76f2d4a6
	  System UUID:                d25b0873-ca83-44d4-9ed0-22dc44c6a8ae
	  Boot ID:                    34b2555f-ef29-4c31-9b47-b3b930bd3b4b
	  Kernel Version:             5.15.0-1069-aws
	  OS Image:                   Ubuntu 22.04.4 LTS
	  Operating System:           linux
	  Architecture:               arm64
	  Container Runtime Version:  cri-o://1.24.6
	  Kubelet Version:            v1.31.1
	  Kube-Proxy Version:         v1.31.1
	PodCIDR:                      10.244.0.0/24
	PodCIDRs:                     10.244.0.0/24
	Non-terminated Pods:          (10 in total)
	  Namespace                   Name                                         CPU Requests  CPU Limits  Memory Requests  Memory Limits  Age
	  ---------                   ----                                         ------------  ----------  ---------------  -------------  ---
	  kube-system                 coredns-7c65d6cfc9-qzn8c                     100m (5%)     0 (0%)      70Mi (0%)        170Mi (2%)     4m1s
	  kube-system                 etcd-functional-919910                       100m (5%)     0 (0%)      100Mi (1%)       0 (0%)         4m7s
	  kube-system                 kindnet-nb5xl                                100m (5%)     100m (5%)   50Mi (0%)        50Mi (0%)      4m1s
	  kube-system                 kube-apiserver-functional-919910             250m (12%)    0 (0%)      0 (0%)           0 (0%)         2m15s
	  kube-system                 kube-controller-manager-functional-919910    200m (10%)    0 (0%)      0 (0%)           0 (0%)         4m6s
	  kube-system                 kube-proxy-nvpzv                             0 (0%)        0 (0%)      0 (0%)           0 (0%)         4m1s
	  kube-system                 kube-scheduler-functional-919910             100m (5%)     0 (0%)      0 (0%)           0 (0%)         4m6s
	  kube-system                 storage-provisioner                          0 (0%)        0 (0%)      0 (0%)           0 (0%)         4m
	  kubernetes-dashboard        dashboard-metrics-scraper-c5db448b4-qqx24    0 (0%)        0 (0%)      0 (0%)           0 (0%)         3s
	  kubernetes-dashboard        kubernetes-dashboard-695b96c756-tfx57        0 (0%)        0 (0%)      0 (0%)           0 (0%)         3s
	Allocated resources:
	  (Total limits may be over 100 percent, i.e., overcommitted.)
	  Resource           Requests    Limits
	  --------           --------    ------
	  cpu                850m (42%)  100m (5%)
	  memory             220Mi (2%)  220Mi (2%)
	  ephemeral-storage  0 (0%)      0 (0%)
	  hugepages-1Gi      0 (0%)      0 (0%)
	  hugepages-2Mi      0 (0%)      0 (0%)
	  hugepages-32Mi     0 (0%)      0 (0%)
	  hugepages-64Ki     0 (0%)      0 (0%)
	Events:
	  Type     Reason                   Age                    From             Message
	  ----     ------                   ----                   ----             -------
	  Normal   Starting                 3m59s                  kube-proxy       
	  Normal   Starting                 2m14s                  kube-proxy       
	  Normal   Starting                 3m                     kube-proxy       
	  Warning  CgroupV1                 4m6s                   kubelet          Cgroup v1 support is in maintenance mode, please migrate to Cgroup v2.
	  Normal   NodeHasSufficientMemory  4m6s                   kubelet          Node functional-919910 status is now: NodeHasSufficientMemory
	  Normal   NodeHasNoDiskPressure    4m6s                   kubelet          Node functional-919910 status is now: NodeHasNoDiskPressure
	  Normal   NodeHasSufficientPID     4m6s                   kubelet          Node functional-919910 status is now: NodeHasSufficientPID
	  Normal   Starting                 4m6s                   kubelet          Starting kubelet.
	  Normal   RegisteredNode           4m2s                   node-controller  Node functional-919910 event: Registered Node functional-919910 in Controller
	  Normal   NodeReady                3m20s                  kubelet          Node functional-919910 status is now: NodeReady
	  Normal   RegisteredNode           2m59s                  node-controller  Node functional-919910 event: Registered Node functional-919910 in Controller
	  Normal   NodeHasSufficientMemory  2m20s (x8 over 2m20s)  kubelet          Node functional-919910 status is now: NodeHasSufficientMemory
	  Warning  CgroupV1                 2m20s                  kubelet          Cgroup v1 support is in maintenance mode, please migrate to Cgroup v2.
	  Normal   Starting                 2m20s                  kubelet          Starting kubelet.
	  Normal   NodeHasNoDiskPressure    2m20s (x8 over 2m20s)  kubelet          Node functional-919910 status is now: NodeHasNoDiskPressure
	  Normal   NodeHasSufficientPID     2m20s (x7 over 2m20s)  kubelet          Node functional-919910 status is now: NodeHasSufficientPID
	  Normal   RegisteredNode           2m13s                  node-controller  Node functional-919910 event: Registered Node functional-919910 in Controller
	
	
	==> dmesg <==
	[Sep16 10:07] systemd-journald[226]: Failed to send stream file descriptor to service manager: Connection refused
	
	
	==> etcd [0fb814efa9ee90e98aaa699004b013bf5a6a31aa8325e33f52783fa123bcc384] <==
	{"level":"info","ts":"2024-09-16T10:48:47.877008Z","logger":"raft","caller":"etcdserver/zap_raft.go:77","msg":"aec36adc501070cc switched to configuration voters=(12593026477526642892)"}
	{"level":"info","ts":"2024-09-16T10:48:47.877076Z","caller":"membership/cluster.go:421","msg":"added member","cluster-id":"fa54960ea34d58be","local-member-id":"aec36adc501070cc","added-peer-id":"aec36adc501070cc","added-peer-peer-urls":["https://192.168.49.2:2380"]}
	{"level":"info","ts":"2024-09-16T10:48:47.877156Z","caller":"membership/cluster.go:584","msg":"set initial cluster version","cluster-id":"fa54960ea34d58be","local-member-id":"aec36adc501070cc","cluster-version":"3.5"}
	{"level":"info","ts":"2024-09-16T10:48:47.877188Z","caller":"api/capability.go:75","msg":"enabled capabilities for version","cluster-version":"3.5"}
	{"level":"info","ts":"2024-09-16T10:48:47.899347Z","caller":"embed/etcd.go:728","msg":"starting with client TLS","tls-info":"cert = /var/lib/minikube/certs/etcd/server.crt, key = /var/lib/minikube/certs/etcd/server.key, client-cert=, client-key=, trusted-ca = /var/lib/minikube/certs/etcd/ca.crt, client-cert-auth = true, crl-file = ","cipher-suites":[]}
	{"level":"info","ts":"2024-09-16T10:48:47.899570Z","caller":"embed/etcd.go:279","msg":"now serving peer/client/metrics","local-member-id":"aec36adc501070cc","initial-advertise-peer-urls":["https://192.168.49.2:2380"],"listen-peer-urls":["https://192.168.49.2:2380"],"advertise-client-urls":["https://192.168.49.2:2379"],"listen-client-urls":["https://127.0.0.1:2379","https://192.168.49.2:2379"],"listen-metrics-urls":["http://127.0.0.1:2381"]}
	{"level":"info","ts":"2024-09-16T10:48:47.899599Z","caller":"embed/etcd.go:870","msg":"serving metrics","address":"http://127.0.0.1:2381"}
	{"level":"info","ts":"2024-09-16T10:48:47.899714Z","caller":"embed/etcd.go:599","msg":"serving peer traffic","address":"192.168.49.2:2380"}
	{"level":"info","ts":"2024-09-16T10:48:47.899728Z","caller":"embed/etcd.go:571","msg":"cmux::serve","address":"192.168.49.2:2380"}
	{"level":"info","ts":"2024-09-16T10:48:49.036711Z","logger":"raft","caller":"etcdserver/zap_raft.go:77","msg":"aec36adc501070cc is starting a new election at term 3"}
	{"level":"info","ts":"2024-09-16T10:48:49.036850Z","logger":"raft","caller":"etcdserver/zap_raft.go:77","msg":"aec36adc501070cc became pre-candidate at term 3"}
	{"level":"info","ts":"2024-09-16T10:48:49.036905Z","logger":"raft","caller":"etcdserver/zap_raft.go:77","msg":"aec36adc501070cc received MsgPreVoteResp from aec36adc501070cc at term 3"}
	{"level":"info","ts":"2024-09-16T10:48:49.036946Z","logger":"raft","caller":"etcdserver/zap_raft.go:77","msg":"aec36adc501070cc became candidate at term 4"}
	{"level":"info","ts":"2024-09-16T10:48:49.036978Z","logger":"raft","caller":"etcdserver/zap_raft.go:77","msg":"aec36adc501070cc received MsgVoteResp from aec36adc501070cc at term 4"}
	{"level":"info","ts":"2024-09-16T10:48:49.037016Z","logger":"raft","caller":"etcdserver/zap_raft.go:77","msg":"aec36adc501070cc became leader at term 4"}
	{"level":"info","ts":"2024-09-16T10:48:49.037063Z","logger":"raft","caller":"etcdserver/zap_raft.go:77","msg":"raft.node: aec36adc501070cc elected leader aec36adc501070cc at term 4"}
	{"level":"info","ts":"2024-09-16T10:48:49.040899Z","caller":"etcdserver/server.go:2118","msg":"published local member to cluster through raft","local-member-id":"aec36adc501070cc","local-member-attributes":"{Name:functional-919910 ClientURLs:[https://192.168.49.2:2379]}","request-path":"/0/members/aec36adc501070cc/attributes","cluster-id":"fa54960ea34d58be","publish-timeout":"7s"}
	{"level":"info","ts":"2024-09-16T10:48:49.041104Z","caller":"embed/serve.go:103","msg":"ready to serve client requests"}
	{"level":"info","ts":"2024-09-16T10:48:49.042103Z","caller":"v3rpc/health.go:61","msg":"grpc service status changed","service":"","status":"SERVING"}
	{"level":"info","ts":"2024-09-16T10:48:49.043175Z","caller":"embed/serve.go:250","msg":"serving client traffic securely","traffic":"grpc+http","address":"192.168.49.2:2379"}
	{"level":"info","ts":"2024-09-16T10:48:49.043511Z","caller":"embed/serve.go:103","msg":"ready to serve client requests"}
	{"level":"info","ts":"2024-09-16T10:48:49.045434Z","caller":"v3rpc/health.go:61","msg":"grpc service status changed","service":"","status":"SERVING"}
	{"level":"info","ts":"2024-09-16T10:48:49.046389Z","caller":"embed/serve.go:250","msg":"serving client traffic securely","traffic":"grpc+http","address":"127.0.0.1:2379"}
	{"level":"info","ts":"2024-09-16T10:48:49.045527Z","caller":"etcdmain/main.go:44","msg":"notifying init daemon"}
	{"level":"info","ts":"2024-09-16T10:48:49.050654Z","caller":"etcdmain/main.go:50","msg":"successfully notified init daemon"}
	
	
	==> etcd [5bcfe047e4005e24d6719487f45bde2380924679e0f77e81ce9e05992af73afb] <==
	{"level":"info","ts":"2024-09-16T10:48:02.360883Z","logger":"raft","caller":"etcdserver/zap_raft.go:77","msg":"aec36adc501070cc became pre-candidate at term 2"}
	{"level":"info","ts":"2024-09-16T10:48:02.360934Z","logger":"raft","caller":"etcdserver/zap_raft.go:77","msg":"aec36adc501070cc received MsgPreVoteResp from aec36adc501070cc at term 2"}
	{"level":"info","ts":"2024-09-16T10:48:02.360973Z","logger":"raft","caller":"etcdserver/zap_raft.go:77","msg":"aec36adc501070cc became candidate at term 3"}
	{"level":"info","ts":"2024-09-16T10:48:02.361006Z","logger":"raft","caller":"etcdserver/zap_raft.go:77","msg":"aec36adc501070cc received MsgVoteResp from aec36adc501070cc at term 3"}
	{"level":"info","ts":"2024-09-16T10:48:02.361064Z","logger":"raft","caller":"etcdserver/zap_raft.go:77","msg":"aec36adc501070cc became leader at term 3"}
	{"level":"info","ts":"2024-09-16T10:48:02.361099Z","logger":"raft","caller":"etcdserver/zap_raft.go:77","msg":"raft.node: aec36adc501070cc elected leader aec36adc501070cc at term 3"}
	{"level":"info","ts":"2024-09-16T10:48:02.364920Z","caller":"etcdserver/server.go:2118","msg":"published local member to cluster through raft","local-member-id":"aec36adc501070cc","local-member-attributes":"{Name:functional-919910 ClientURLs:[https://192.168.49.2:2379]}","request-path":"/0/members/aec36adc501070cc/attributes","cluster-id":"fa54960ea34d58be","publish-timeout":"7s"}
	{"level":"info","ts":"2024-09-16T10:48:02.365163Z","caller":"embed/serve.go:103","msg":"ready to serve client requests"}
	{"level":"info","ts":"2024-09-16T10:48:02.365549Z","caller":"embed/serve.go:103","msg":"ready to serve client requests"}
	{"level":"info","ts":"2024-09-16T10:48:02.366285Z","caller":"v3rpc/health.go:61","msg":"grpc service status changed","service":"","status":"SERVING"}
	{"level":"info","ts":"2024-09-16T10:48:02.367468Z","caller":"embed/serve.go:250","msg":"serving client traffic securely","traffic":"grpc+http","address":"192.168.49.2:2379"}
	{"level":"info","ts":"2024-09-16T10:48:02.367535Z","caller":"etcdmain/main.go:44","msg":"notifying init daemon"}
	{"level":"info","ts":"2024-09-16T10:48:02.367668Z","caller":"etcdmain/main.go:50","msg":"successfully notified init daemon"}
	{"level":"info","ts":"2024-09-16T10:48:02.369323Z","caller":"v3rpc/health.go:61","msg":"grpc service status changed","service":"","status":"SERVING"}
	{"level":"info","ts":"2024-09-16T10:48:02.370172Z","caller":"embed/serve.go:250","msg":"serving client traffic securely","traffic":"grpc+http","address":"127.0.0.1:2379"}
	{"level":"info","ts":"2024-09-16T10:48:35.943802Z","caller":"osutil/interrupt_unix.go:64","msg":"received signal; shutting down","signal":"terminated"}
	{"level":"info","ts":"2024-09-16T10:48:35.943843Z","caller":"embed/etcd.go:377","msg":"closing etcd server","name":"functional-919910","data-dir":"/var/lib/minikube/etcd","advertise-peer-urls":["https://192.168.49.2:2380"],"advertise-client-urls":["https://192.168.49.2:2379"]}
	{"level":"warn","ts":"2024-09-16T10:48:35.943911Z","caller":"embed/serve.go:212","msg":"stopping secure grpc server due to error","error":"accept tcp 192.168.49.2:2379: use of closed network connection"}
	{"level":"warn","ts":"2024-09-16T10:48:35.943938Z","caller":"embed/serve.go:214","msg":"stopped secure grpc server due to error","error":"accept tcp 192.168.49.2:2379: use of closed network connection"}
	{"level":"warn","ts":"2024-09-16T10:48:35.945041Z","caller":"embed/serve.go:212","msg":"stopping secure grpc server due to error","error":"accept tcp 127.0.0.1:2379: use of closed network connection"}
	{"level":"warn","ts":"2024-09-16T10:48:35.945137Z","caller":"embed/serve.go:214","msg":"stopped secure grpc server due to error","error":"accept tcp 127.0.0.1:2379: use of closed network connection"}
	{"level":"info","ts":"2024-09-16T10:48:35.990678Z","caller":"etcdserver/server.go:1521","msg":"skipped leadership transfer for single voting member cluster","local-member-id":"aec36adc501070cc","current-leader-member-id":"aec36adc501070cc"}
	{"level":"info","ts":"2024-09-16T10:48:35.995430Z","caller":"embed/etcd.go:581","msg":"stopping serving peer traffic","address":"192.168.49.2:2380"}
	{"level":"info","ts":"2024-09-16T10:48:35.995621Z","caller":"embed/etcd.go:586","msg":"stopped serving peer traffic","address":"192.168.49.2:2380"}
	{"level":"info","ts":"2024-09-16T10:48:35.995642Z","caller":"embed/etcd.go:379","msg":"closed etcd server","name":"functional-919910","data-dir":"/var/lib/minikube/etcd","advertise-peer-urls":["https://192.168.49.2:2380"],"advertise-client-urls":["https://192.168.49.2:2379"]}
	
	
	==> kernel <==
	 10:51:07 up 10:33,  0 users,  load average: 1.97, 1.45, 1.66
	Linux functional-919910 5.15.0-1069-aws #75~20.04.1-Ubuntu SMP Mon Aug 19 16:22:47 UTC 2024 aarch64 aarch64 aarch64 GNU/Linux
	PRETTY_NAME="Ubuntu 22.04.4 LTS"
	
	
	==> kindnet [072cecfbf1d3967a28f6cf80f4e3b0bf030253965b58aa0f0089cd01271c49a1] <==
	I0916 10:49:03.038169       1 main.go:299] handling current node
	I0916 10:49:13.040898       1 main.go:295] Handling node with IPs: map[192.168.49.2:{}]
	I0916 10:49:13.041015       1 main.go:299] handling current node
	I0916 10:49:23.042091       1 main.go:295] Handling node with IPs: map[192.168.49.2:{}]
	I0916 10:49:23.042126       1 main.go:299] handling current node
	I0916 10:49:33.044840       1 main.go:295] Handling node with IPs: map[192.168.49.2:{}]
	I0916 10:49:33.044878       1 main.go:299] handling current node
	I0916 10:49:43.040749       1 main.go:295] Handling node with IPs: map[192.168.49.2:{}]
	I0916 10:49:43.040855       1 main.go:299] handling current node
	I0916 10:49:53.038520       1 main.go:295] Handling node with IPs: map[192.168.49.2:{}]
	I0916 10:49:53.038552       1 main.go:299] handling current node
	I0916 10:50:03.038499       1 main.go:295] Handling node with IPs: map[192.168.49.2:{}]
	I0916 10:50:03.038621       1 main.go:299] handling current node
	I0916 10:50:13.047462       1 main.go:295] Handling node with IPs: map[192.168.49.2:{}]
	I0916 10:50:13.047501       1 main.go:299] handling current node
	I0916 10:50:23.044819       1 main.go:295] Handling node with IPs: map[192.168.49.2:{}]
	I0916 10:50:23.044854       1 main.go:299] handling current node
	I0916 10:50:33.037721       1 main.go:295] Handling node with IPs: map[192.168.49.2:{}]
	I0916 10:50:33.037760       1 main.go:299] handling current node
	I0916 10:50:43.043131       1 main.go:295] Handling node with IPs: map[192.168.49.2:{}]
	I0916 10:50:43.043172       1 main.go:299] handling current node
	I0916 10:50:53.038035       1 main.go:295] Handling node with IPs: map[192.168.49.2:{}]
	I0916 10:50:53.038069       1 main.go:299] handling current node
	I0916 10:51:03.037909       1 main.go:295] Handling node with IPs: map[192.168.49.2:{}]
	I0916 10:51:03.037958       1 main.go:299] handling current node
	
	
	==> kindnet [e8aeda4b55bc63f93934a2cc0bed0950a05df3db193d9ed2e77a2dc96b78ec18] <==
	I0916 10:48:01.143502       1 main.go:109] connected to apiserver: https://10.96.0.1:443
	I0916 10:48:01.143730       1 main.go:139] hostIP = 192.168.49.2
	podIP = 192.168.49.2
	I0916 10:48:01.143864       1 main.go:148] setting mtu 1500 for CNI 
	I0916 10:48:01.143886       1 main.go:178] kindnetd IP family: "ipv4"
	I0916 10:48:01.143900       1 main.go:182] noMask IPv4 subnets: [10.244.0.0/16]
	I0916 10:48:01.489821       1 controller.go:334] Starting controller kube-network-policies
	I0916 10:48:01.489995       1 controller.go:338] Waiting for informer caches to sync
	I0916 10:48:01.490034       1 shared_informer.go:313] Waiting for caches to sync for kube-network-policies
	I0916 10:48:05.492976       1 shared_informer.go:320] Caches are synced for kube-network-policies
	I0916 10:48:05.493097       1 metrics.go:61] Registering metrics
	I0916 10:48:05.493204       1 controller.go:374] Syncing nftables rules
	I0916 10:48:11.486739       1 main.go:295] Handling node with IPs: map[192.168.49.2:{}]
	I0916 10:48:11.486849       1 main.go:299] handling current node
	I0916 10:48:21.485705       1 main.go:295] Handling node with IPs: map[192.168.49.2:{}]
	I0916 10:48:21.485797       1 main.go:299] handling current node
	I0916 10:48:31.492826       1 main.go:295] Handling node with IPs: map[192.168.49.2:{}]
	I0916 10:48:31.492896       1 main.go:299] handling current node
	
	
	==> kube-apiserver [12f0a29c7ca2a4856dd6155d0190d0e3d79e019e8dce0bf7fd4c991c81d14bc5] <==
	I0916 10:48:51.517673       1 apf_controller.go:385] Running API Priority and Fairness periodic rebalancing process
	I0916 10:48:51.517813       1 cache.go:39] Caches are synced for APIServiceRegistrationController controller
	I0916 10:48:51.520522       1 controller.go:615] quota admission added evaluator for: leases.coordination.k8s.io
	I0916 10:48:51.532063       1 shared_informer.go:320] Caches are synced for cluster_authentication_trust_controller
	I0916 10:48:51.532210       1 shared_informer.go:320] Caches are synced for crd-autoregister
	I0916 10:48:51.532286       1 cache.go:39] Caches are synced for RemoteAvailability controller
	I0916 10:48:51.532417       1 shared_informer.go:320] Caches are synced for configmaps
	I0916 10:48:51.534946       1 handler_discovery.go:450] Starting ResourceDiscoveryManager
	I0916 10:48:51.535625       1 aggregator.go:171] initial CRD sync complete...
	I0916 10:48:51.536172       1 autoregister_controller.go:144] Starting autoregister controller
	I0916 10:48:51.536265       1 cache.go:32] Waiting for caches to sync for autoregister controller
	I0916 10:48:51.536300       1 cache.go:39] Caches are synced for autoregister controller
	E0916 10:48:51.552940       1 controller.go:97] Error removing old endpoints from kubernetes service: no API server IP addresses were listed in storage, refusing to erase all endpoints for the kubernetes Service
	I0916 10:48:52.288516       1 storage_scheduling.go:111] all system priority classes are created successfully or already exist.
	I0916 10:48:53.633170       1 controller.go:615] quota admission added evaluator for: daemonsets.apps
	I0916 10:48:53.763564       1 controller.go:615] quota admission added evaluator for: serviceaccounts
	I0916 10:48:53.775869       1 controller.go:615] quota admission added evaluator for: deployments.apps
	I0916 10:48:53.843592       1 controller.go:615] quota admission added evaluator for: roles.rbac.authorization.k8s.io
	I0916 10:48:53.851287       1 controller.go:615] quota admission added evaluator for: rolebindings.rbac.authorization.k8s.io
	I0916 10:49:10.096044       1 controller.go:615] quota admission added evaluator for: endpoints
	I0916 10:51:03.868011       1 controller.go:615] quota admission added evaluator for: namespaces
	I0916 10:51:03.979311       1 controller.go:615] quota admission added evaluator for: replicasets.apps
	I0916 10:51:04.465450       1 alloc.go:330] "allocated clusterIPs" service="kubernetes-dashboard/kubernetes-dashboard" clusterIPs={"IPv4":"10.99.2.180"}
	I0916 10:51:04.508474       1 controller.go:615] quota admission added evaluator for: endpointslices.discovery.k8s.io
	I0916 10:51:04.592851       1 alloc.go:330] "allocated clusterIPs" service="kubernetes-dashboard/dashboard-metrics-scraper" clusterIPs={"IPv4":"10.97.225.105"}
	
	
	==> kube-controller-manager [8f5620673b4ff5c0c99db71dd02fc2ce9baec6c9b22460cbdf86d411abc6a715] <==
	I0916 10:48:08.534308       1 shared_informer.go:320] Caches are synced for node
	I0916 10:48:08.534378       1 range_allocator.go:171] "Sending events to api server" logger="node-ipam-controller"
	I0916 10:48:08.534401       1 range_allocator.go:177] "Starting range CIDR allocator" logger="node-ipam-controller"
	I0916 10:48:08.534407       1 shared_informer.go:313] Waiting for caches to sync for cidrallocator
	I0916 10:48:08.534412       1 shared_informer.go:320] Caches are synced for cidrallocator
	I0916 10:48:08.534494       1 range_allocator.go:241] "Successfully synced" logger="node-ipam-controller" key="functional-919910"
	I0916 10:48:08.535941       1 shared_informer.go:320] Caches are synced for attach detach
	I0916 10:48:08.543605       1 shared_informer.go:320] Caches are synced for endpoint_slice
	I0916 10:48:08.549019       1 shared_informer.go:320] Caches are synced for daemon sets
	I0916 10:48:08.554402       1 shared_informer.go:320] Caches are synced for taint-eviction-controller
	I0916 10:48:08.559718       1 shared_informer.go:320] Caches are synced for persistent volume
	I0916 10:48:08.572151       1 shared_informer.go:320] Caches are synced for GC
	I0916 10:48:08.573409       1 shared_informer.go:320] Caches are synced for PV protection
	I0916 10:48:08.615906       1 shared_informer.go:320] Caches are synced for cronjob
	I0916 10:48:08.623850       1 shared_informer.go:320] Caches are synced for TTL
	I0916 10:48:08.625786       1 shared_informer.go:320] Caches are synced for taint
	I0916 10:48:08.625881       1 node_lifecycle_controller.go:1232] "Initializing eviction metric for zone" logger="node-lifecycle-controller" zone=""
	I0916 10:48:08.625973       1 node_lifecycle_controller.go:884] "Missing timestamp for Node. Assuming now as a timestamp" logger="node-lifecycle-controller" node="functional-919910"
	I0916 10:48:08.626024       1 node_lifecycle_controller.go:1078] "Controller detected that zone is now in new state" logger="node-lifecycle-controller" zone="" newState="Normal"
	I0916 10:48:08.681295       1 shared_informer.go:320] Caches are synced for resource quota
	I0916 10:48:08.695472       1 shared_informer.go:320] Caches are synced for resource quota
	I0916 10:48:09.103907       1 shared_informer.go:320] Caches are synced for garbage collector
	I0916 10:48:09.103941       1 garbagecollector.go:157] "All resource monitors have synced. Proceeding to collect garbage" logger="garbage-collector-controller"
	I0916 10:48:09.123641       1 shared_informer.go:320] Caches are synced for garbage collector
	I0916 10:48:33.396647       1 range_allocator.go:241] "Successfully synced" logger="node-ipam-controller" key="functional-919910"
	
	
	==> kube-controller-manager [d69895ce029aea3aacc9c117ed64c274077ed21cefa739082ee00be46e903809] <==
	I0916 10:48:55.444362       1 shared_informer.go:320] Caches are synced for garbage collector
	I0916 10:48:55.465215       1 shared_informer.go:320] Caches are synced for garbage collector
	I0916 10:48:55.465250       1 garbagecollector.go:157] "All resource monitors have synced. Proceeding to collect garbage" logger="garbage-collector-controller"
	I0916 10:51:04.115554       1 replica_set.go:679] "Finished syncing" logger="replicaset-controller" kind="ReplicaSet" key="kubernetes-dashboard/dashboard-metrics-scraper-c5db448b4" duration="128.900629ms"
	E0916 10:51:04.115599       1 replica_set.go:560] "Unhandled Error" err="sync \"kubernetes-dashboard/dashboard-metrics-scraper-c5db448b4\" failed with pods \"dashboard-metrics-scraper-c5db448b4-\" is forbidden: error looking up service account kubernetes-dashboard/kubernetes-dashboard: serviceaccount \"kubernetes-dashboard\" not found" logger="UnhandledError"
	I0916 10:51:04.115667       1 replica_set.go:679] "Finished syncing" logger="replicaset-controller" kind="ReplicaSet" key="kubernetes-dashboard/kubernetes-dashboard-695b96c756" duration="66.936963ms"
	E0916 10:51:04.115680       1 replica_set.go:560] "Unhandled Error" err="sync \"kubernetes-dashboard/kubernetes-dashboard-695b96c756\" failed with pods \"kubernetes-dashboard-695b96c756-\" is forbidden: error looking up service account kubernetes-dashboard/kubernetes-dashboard: serviceaccount \"kubernetes-dashboard\" not found" logger="UnhandledError"
	I0916 10:51:04.161490       1 replica_set.go:679] "Finished syncing" logger="replicaset-controller" kind="ReplicaSet" key="kubernetes-dashboard/dashboard-metrics-scraper-c5db448b4" duration="40.899792ms"
	E0916 10:51:04.161538       1 replica_set.go:560] "Unhandled Error" err="sync \"kubernetes-dashboard/dashboard-metrics-scraper-c5db448b4\" failed with pods \"dashboard-metrics-scraper-c5db448b4-\" is forbidden: error looking up service account kubernetes-dashboard/kubernetes-dashboard: serviceaccount \"kubernetes-dashboard\" not found" logger="UnhandledError"
	I0916 10:51:04.165981       1 replica_set.go:679] "Finished syncing" logger="replicaset-controller" kind="ReplicaSet" key="kubernetes-dashboard/kubernetes-dashboard-695b96c756" duration="46.213986ms"
	E0916 10:51:04.166028       1 replica_set.go:560] "Unhandled Error" err="sync \"kubernetes-dashboard/kubernetes-dashboard-695b96c756\" failed with pods \"kubernetes-dashboard-695b96c756-\" is forbidden: error looking up service account kubernetes-dashboard/kubernetes-dashboard: serviceaccount \"kubernetes-dashboard\" not found" logger="UnhandledError"
	I0916 10:51:04.182575       1 replica_set.go:679] "Finished syncing" logger="replicaset-controller" kind="ReplicaSet" key="kubernetes-dashboard/kubernetes-dashboard-695b96c756" duration="14.905721ms"
	E0916 10:51:04.182622       1 replica_set.go:560] "Unhandled Error" err="sync \"kubernetes-dashboard/kubernetes-dashboard-695b96c756\" failed with pods \"kubernetes-dashboard-695b96c756-\" is forbidden: error looking up service account kubernetes-dashboard/kubernetes-dashboard: serviceaccount \"kubernetes-dashboard\" not found" logger="UnhandledError"
	I0916 10:51:04.182674       1 replica_set.go:679] "Finished syncing" logger="replicaset-controller" kind="ReplicaSet" key="kubernetes-dashboard/dashboard-metrics-scraper-c5db448b4" duration="17.528022ms"
	E0916 10:51:04.182697       1 replica_set.go:560] "Unhandled Error" err="sync \"kubernetes-dashboard/dashboard-metrics-scraper-c5db448b4\" failed with pods \"dashboard-metrics-scraper-c5db448b4-\" is forbidden: error looking up service account kubernetes-dashboard/kubernetes-dashboard: serviceaccount \"kubernetes-dashboard\" not found" logger="UnhandledError"
	I0916 10:51:04.240501       1 replica_set.go:679] "Finished syncing" logger="replicaset-controller" kind="ReplicaSet" key="kubernetes-dashboard/kubernetes-dashboard-695b96c756" duration="56.487547ms"
	I0916 10:51:04.275668       1 replica_set.go:679] "Finished syncing" logger="replicaset-controller" kind="ReplicaSet" key="kubernetes-dashboard/dashboard-metrics-scraper-c5db448b4" duration="91.695454ms"
	I0916 10:51:04.293566       1 replica_set.go:679] "Finished syncing" logger="replicaset-controller" kind="ReplicaSet" key="kubernetes-dashboard/kubernetes-dashboard-695b96c756" duration="50.22195ms"
	I0916 10:51:04.297862       1 replica_set.go:679] "Finished syncing" logger="replicaset-controller" kind="ReplicaSet" key="kubernetes-dashboard/kubernetes-dashboard-695b96c756" duration="48.524µs"
	I0916 10:51:04.299265       1 replica_set.go:679] "Finished syncing" logger="replicaset-controller" kind="ReplicaSet" key="kubernetes-dashboard/kubernetes-dashboard-695b96c756" duration="34.904µs"
	I0916 10:51:04.320393       1 replica_set.go:679] "Finished syncing" logger="replicaset-controller" kind="ReplicaSet" key="kubernetes-dashboard/dashboard-metrics-scraper-c5db448b4" duration="44.594287ms"
	I0916 10:51:04.323280       1 replica_set.go:679] "Finished syncing" logger="replicaset-controller" kind="ReplicaSet" key="kubernetes-dashboard/dashboard-metrics-scraper-c5db448b4" duration="87.555µs"
	I0916 10:51:04.382448       1 replica_set.go:679] "Finished syncing" logger="replicaset-controller" kind="ReplicaSet" key="kubernetes-dashboard/dashboard-metrics-scraper-c5db448b4" duration="54.603µs"
	I0916 10:51:07.501824       1 replica_set.go:679] "Finished syncing" logger="replicaset-controller" kind="ReplicaSet" key="kubernetes-dashboard/dashboard-metrics-scraper-c5db448b4" duration="38.929076ms"
	I0916 10:51:07.502016       1 replica_set.go:679] "Finished syncing" logger="replicaset-controller" kind="ReplicaSet" key="kubernetes-dashboard/dashboard-metrics-scraper-c5db448b4" duration="74.197µs"
	
	
	==> kube-proxy [68f543d941434df90f12c922b0b45dcb557a7b8316bd36d083123f6f29e0f3d7] <==
	I0916 10:48:03.731423       1 server_linux.go:66] "Using iptables proxy"
	I0916 10:48:05.433154       1 server.go:677] "Successfully retrieved node IP(s)" IPs=["192.168.49.2"]
	E0916 10:48:05.488916       1 server.go:234] "Kube-proxy configuration may be incomplete or incorrect" err="nodePortAddresses is unset; NodePort connections will be accepted on all local IPs. Consider using `--nodeport-addresses primary`"
	I0916 10:48:06.322385       1 server.go:243] "kube-proxy running in dual-stack mode" primary ipFamily="IPv4"
	I0916 10:48:06.341489       1 server_linux.go:169] "Using iptables Proxier"
	I0916 10:48:06.355073       1 proxier.go:255] "Setting route_localnet=1 to allow node-ports on localhost; to change this either disable iptables.localhostNodePorts (--iptables-localhost-nodeports) or set nodePortAddresses (--nodeport-addresses) to filter loopback addresses" ipFamily="IPv4"
	I0916 10:48:06.355531       1 server.go:483] "Version info" version="v1.31.1"
	I0916 10:48:06.357367       1 server.go:485] "Golang settings" GOGC="" GOMAXPROCS="" GOTRACEBACK=""
	I0916 10:48:06.358630       1 config.go:199] "Starting service config controller"
	I0916 10:48:06.358729       1 shared_informer.go:313] Waiting for caches to sync for service config
	I0916 10:48:06.358801       1 config.go:105] "Starting endpoint slice config controller"
	I0916 10:48:06.358840       1 shared_informer.go:313] Waiting for caches to sync for endpoint slice config
	I0916 10:48:06.360984       1 config.go:328] "Starting node config controller"
	I0916 10:48:06.361059       1 shared_informer.go:313] Waiting for caches to sync for node config
	I0916 10:48:06.462180       1 shared_informer.go:320] Caches are synced for endpoint slice config
	I0916 10:48:06.462239       1 shared_informer.go:320] Caches are synced for service config
	I0916 10:48:06.464940       1 shared_informer.go:320] Caches are synced for node config
	
	
	==> kube-proxy [6f8c0a2f9d3e9fe72768e28685deb8e30624ac7b3cfaa272ac69f57b771050db] <==
	I0916 10:48:52.781215       1 server_linux.go:66] "Using iptables proxy"
	I0916 10:48:52.872969       1 server.go:677] "Successfully retrieved node IP(s)" IPs=["192.168.49.2"]
	E0916 10:48:52.873137       1 server.go:234] "Kube-proxy configuration may be incomplete or incorrect" err="nodePortAddresses is unset; NodePort connections will be accepted on all local IPs. Consider using `--nodeport-addresses primary`"
	I0916 10:48:52.892040       1 server.go:243] "kube-proxy running in dual-stack mode" primary ipFamily="IPv4"
	I0916 10:48:52.892101       1 server_linux.go:169] "Using iptables Proxier"
	I0916 10:48:52.893967       1 proxier.go:255] "Setting route_localnet=1 to allow node-ports on localhost; to change this either disable iptables.localhostNodePorts (--iptables-localhost-nodeports) or set nodePortAddresses (--nodeport-addresses) to filter loopback addresses" ipFamily="IPv4"
	I0916 10:48:52.894261       1 server.go:483] "Version info" version="v1.31.1"
	I0916 10:48:52.894296       1 server.go:485] "Golang settings" GOGC="" GOMAXPROCS="" GOTRACEBACK=""
	I0916 10:48:52.896485       1 config.go:199] "Starting service config controller"
	I0916 10:48:52.896530       1 shared_informer.go:313] Waiting for caches to sync for service config
	I0916 10:48:52.898078       1 config.go:105] "Starting endpoint slice config controller"
	I0916 10:48:52.898096       1 shared_informer.go:313] Waiting for caches to sync for endpoint slice config
	I0916 10:48:52.899995       1 config.go:328] "Starting node config controller"
	I0916 10:48:52.900022       1 shared_informer.go:313] Waiting for caches to sync for node config
	I0916 10:48:52.998445       1 shared_informer.go:320] Caches are synced for endpoint slice config
	I0916 10:48:52.998473       1 shared_informer.go:320] Caches are synced for service config
	I0916 10:48:53.000890       1 shared_informer.go:320] Caches are synced for node config
	
	
	==> kube-scheduler [7fcb94c0bce841ce6b01965b0d7eaeedcf47449b34b9a524c16d4f0580db9e76] <==
	I0916 10:48:50.310915       1 serving.go:386] Generated self-signed cert in-memory
	W0916 10:48:51.321329       1 requestheader_controller.go:196] Unable to get configmap/extension-apiserver-authentication in kube-system.  Usually fixed by 'kubectl create rolebinding -n kube-system ROLEBINDING_NAME --role=extension-apiserver-authentication-reader --serviceaccount=YOUR_NS:YOUR_SA'
	W0916 10:48:51.321447       1 authentication.go:370] Error looking up in-cluster authentication configuration: configmaps "extension-apiserver-authentication" is forbidden: User "system:kube-scheduler" cannot get resource "configmaps" in API group "" in the namespace "kube-system"
	W0916 10:48:51.321484       1 authentication.go:371] Continuing without authentication configuration. This may treat all requests as anonymous.
	W0916 10:48:51.321527       1 authentication.go:372] To require authentication configuration lookup to succeed, set --authentication-tolerate-lookup-failure=false
	I0916 10:48:51.482161       1 server.go:167] "Starting Kubernetes Scheduler" version="v1.31.1"
	I0916 10:48:51.488747       1 server.go:169] "Golang settings" GOGC="" GOMAXPROCS="" GOTRACEBACK=""
	I0916 10:48:51.491214       1 secure_serving.go:213] Serving securely on 127.0.0.1:10259
	I0916 10:48:51.491627       1 configmap_cafile_content.go:205] "Starting controller" name="client-ca::kube-system::extension-apiserver-authentication::client-ca-file"
	I0916 10:48:51.497428       1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::client-ca-file
	I0916 10:48:51.491653       1 tlsconfig.go:243] "Starting DynamicServingCertificateController"
	I0916 10:48:51.597727       1 shared_informer.go:320] Caches are synced for client-ca::kube-system::extension-apiserver-authentication::client-ca-file
	
	
	==> kube-scheduler [9a35fb982442f2ef08963a8588b112f704124f0fecc14cbfc199e94d6085db98] <==
	I0916 10:48:04.872300       1 serving.go:386] Generated self-signed cert in-memory
	I0916 10:48:06.573495       1 server.go:167] "Starting Kubernetes Scheduler" version="v1.31.1"
	I0916 10:48:06.573525       1 server.go:169] "Golang settings" GOGC="" GOMAXPROCS="" GOTRACEBACK=""
	I0916 10:48:06.588423       1 secure_serving.go:213] Serving securely on 127.0.0.1:10259
	I0916 10:48:06.588642       1 configmap_cafile_content.go:205] "Starting controller" name="client-ca::kube-system::extension-apiserver-authentication::client-ca-file"
	I0916 10:48:06.588658       1 configmap_cafile_content.go:205] "Starting controller" name="client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file"
	I0916 10:48:06.588698       1 tlsconfig.go:243] "Starting DynamicServingCertificateController"
	I0916 10:48:06.588607       1 requestheader_controller.go:172] Starting RequestHeaderAuthRequestController
	I0916 10:48:06.592031       1 shared_informer.go:313] Waiting for caches to sync for RequestHeaderAuthRequestController
	I0916 10:48:06.591278       1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::client-ca-file
	I0916 10:48:06.591687       1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file
	I0916 10:48:06.696997       1 shared_informer.go:320] Caches are synced for client-ca::kube-system::extension-apiserver-authentication::client-ca-file
	I0916 10:48:06.697079       1 shared_informer.go:320] Caches are synced for RequestHeaderAuthRequestController
	I0916 10:48:06.697269       1 shared_informer.go:320] Caches are synced for client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file
	I0916 10:48:35.945051       1 tlsconfig.go:258] "Shutting down DynamicServingCertificateController"
	I0916 10:48:35.945270       1 secure_serving.go:258] Stopped listening on 127.0.0.1:10259
	E0916 10:48:35.945400       1 run.go:72] "command failed" err="finished without leader elect"
	
	
	==> kubelet <==
	Sep 16 10:49:57 functional-919910 kubelet[4906]: E0916 10:49:57.158435    4906 eviction_manager.go:257] "Eviction manager: failed to get HasDedicatedImageFs" err="missing image stats: &ImageFsInfoResponse{ImageFilesystems:[]*FilesystemUsage{&FilesystemUsage{Timestamp:1726483797158278822,FsId:&FilesystemIdentifier{Mountpoint:/var/lib/containers/storage/overlay-images,},UsedBytes:&UInt64Value{Value:155470,},InodesUsed:&UInt64Value{Value:77,},},},ContainerFilesystems:[]*FilesystemUsage{},}"
	Sep 16 10:49:57 functional-919910 kubelet[4906]: E0916 10:49:57.158481    4906 eviction_manager.go:212] "Eviction manager: failed to synchronize" err="eviction manager: failed to get HasDedicatedImageFs: missing image stats: &ImageFsInfoResponse{ImageFilesystems:[]*FilesystemUsage{&FilesystemUsage{Timestamp:1726483797158278822,FsId:&FilesystemIdentifier{Mountpoint:/var/lib/containers/storage/overlay-images,},UsedBytes:&UInt64Value{Value:155470,},InodesUsed:&UInt64Value{Value:77,},},},ContainerFilesystems:[]*FilesystemUsage{},}"
	Sep 16 10:50:07 functional-919910 kubelet[4906]: E0916 10:50:07.159871    4906 eviction_manager.go:257] "Eviction manager: failed to get HasDedicatedImageFs" err="missing image stats: &ImageFsInfoResponse{ImageFilesystems:[]*FilesystemUsage{&FilesystemUsage{Timestamp:1726483807159616730,FsId:&FilesystemIdentifier{Mountpoint:/var/lib/containers/storage/overlay-images,},UsedBytes:&UInt64Value{Value:155470,},InodesUsed:&UInt64Value{Value:77,},},},ContainerFilesystems:[]*FilesystemUsage{},}"
	Sep 16 10:50:07 functional-919910 kubelet[4906]: E0916 10:50:07.159920    4906 eviction_manager.go:212] "Eviction manager: failed to synchronize" err="eviction manager: failed to get HasDedicatedImageFs: missing image stats: &ImageFsInfoResponse{ImageFilesystems:[]*FilesystemUsage{&FilesystemUsage{Timestamp:1726483807159616730,FsId:&FilesystemIdentifier{Mountpoint:/var/lib/containers/storage/overlay-images,},UsedBytes:&UInt64Value{Value:155470,},InodesUsed:&UInt64Value{Value:77,},},},ContainerFilesystems:[]*FilesystemUsage{},}"
	Sep 16 10:50:17 functional-919910 kubelet[4906]: E0916 10:50:17.161814    4906 eviction_manager.go:257] "Eviction manager: failed to get HasDedicatedImageFs" err="missing image stats: &ImageFsInfoResponse{ImageFilesystems:[]*FilesystemUsage{&FilesystemUsage{Timestamp:1726483817161563233,FsId:&FilesystemIdentifier{Mountpoint:/var/lib/containers/storage/overlay-images,},UsedBytes:&UInt64Value{Value:155470,},InodesUsed:&UInt64Value{Value:77,},},},ContainerFilesystems:[]*FilesystemUsage{},}"
	Sep 16 10:50:17 functional-919910 kubelet[4906]: E0916 10:50:17.161855    4906 eviction_manager.go:212] "Eviction manager: failed to synchronize" err="eviction manager: failed to get HasDedicatedImageFs: missing image stats: &ImageFsInfoResponse{ImageFilesystems:[]*FilesystemUsage{&FilesystemUsage{Timestamp:1726483817161563233,FsId:&FilesystemIdentifier{Mountpoint:/var/lib/containers/storage/overlay-images,},UsedBytes:&UInt64Value{Value:155470,},InodesUsed:&UInt64Value{Value:77,},},},ContainerFilesystems:[]*FilesystemUsage{},}"
	Sep 16 10:50:27 functional-919910 kubelet[4906]: E0916 10:50:27.163614    4906 eviction_manager.go:257] "Eviction manager: failed to get HasDedicatedImageFs" err="missing image stats: &ImageFsInfoResponse{ImageFilesystems:[]*FilesystemUsage{&FilesystemUsage{Timestamp:1726483827163296450,FsId:&FilesystemIdentifier{Mountpoint:/var/lib/containers/storage/overlay-images,},UsedBytes:&UInt64Value{Value:155470,},InodesUsed:&UInt64Value{Value:77,},},},ContainerFilesystems:[]*FilesystemUsage{},}"
	Sep 16 10:50:27 functional-919910 kubelet[4906]: E0916 10:50:27.163651    4906 eviction_manager.go:212] "Eviction manager: failed to synchronize" err="eviction manager: failed to get HasDedicatedImageFs: missing image stats: &ImageFsInfoResponse{ImageFilesystems:[]*FilesystemUsage{&FilesystemUsage{Timestamp:1726483827163296450,FsId:&FilesystemIdentifier{Mountpoint:/var/lib/containers/storage/overlay-images,},UsedBytes:&UInt64Value{Value:155470,},InodesUsed:&UInt64Value{Value:77,},},},ContainerFilesystems:[]*FilesystemUsage{},}"
	Sep 16 10:50:37 functional-919910 kubelet[4906]: E0916 10:50:37.164729    4906 eviction_manager.go:257] "Eviction manager: failed to get HasDedicatedImageFs" err="missing image stats: &ImageFsInfoResponse{ImageFilesystems:[]*FilesystemUsage{&FilesystemUsage{Timestamp:1726483837164469074,FsId:&FilesystemIdentifier{Mountpoint:/var/lib/containers/storage/overlay-images,},UsedBytes:&UInt64Value{Value:155470,},InodesUsed:&UInt64Value{Value:77,},},},ContainerFilesystems:[]*FilesystemUsage{},}"
	Sep 16 10:50:37 functional-919910 kubelet[4906]: E0916 10:50:37.164762    4906 eviction_manager.go:212] "Eviction manager: failed to synchronize" err="eviction manager: failed to get HasDedicatedImageFs: missing image stats: &ImageFsInfoResponse{ImageFilesystems:[]*FilesystemUsage{&FilesystemUsage{Timestamp:1726483837164469074,FsId:&FilesystemIdentifier{Mountpoint:/var/lib/containers/storage/overlay-images,},UsedBytes:&UInt64Value{Value:155470,},InodesUsed:&UInt64Value{Value:77,},},},ContainerFilesystems:[]*FilesystemUsage{},}"
	Sep 16 10:50:47 functional-919910 kubelet[4906]: E0916 10:50:47.166381    4906 eviction_manager.go:257] "Eviction manager: failed to get HasDedicatedImageFs" err="missing image stats: &ImageFsInfoResponse{ImageFilesystems:[]*FilesystemUsage{&FilesystemUsage{Timestamp:1726483847166139430,FsId:&FilesystemIdentifier{Mountpoint:/var/lib/containers/storage/overlay-images,},UsedBytes:&UInt64Value{Value:155470,},InodesUsed:&UInt64Value{Value:77,},},},ContainerFilesystems:[]*FilesystemUsage{},}"
	Sep 16 10:50:47 functional-919910 kubelet[4906]: E0916 10:50:47.166973    4906 eviction_manager.go:212] "Eviction manager: failed to synchronize" err="eviction manager: failed to get HasDedicatedImageFs: missing image stats: &ImageFsInfoResponse{ImageFilesystems:[]*FilesystemUsage{&FilesystemUsage{Timestamp:1726483847166139430,FsId:&FilesystemIdentifier{Mountpoint:/var/lib/containers/storage/overlay-images,},UsedBytes:&UInt64Value{Value:155470,},InodesUsed:&UInt64Value{Value:77,},},},ContainerFilesystems:[]*FilesystemUsage{},}"
	Sep 16 10:50:57 functional-919910 kubelet[4906]: E0916 10:50:57.168441    4906 eviction_manager.go:257] "Eviction manager: failed to get HasDedicatedImageFs" err="missing image stats: &ImageFsInfoResponse{ImageFilesystems:[]*FilesystemUsage{&FilesystemUsage{Timestamp:1726483857168020609,FsId:&FilesystemIdentifier{Mountpoint:/var/lib/containers/storage/overlay-images,},UsedBytes:&UInt64Value{Value:155470,},InodesUsed:&UInt64Value{Value:77,},},},ContainerFilesystems:[]*FilesystemUsage{},}"
	Sep 16 10:50:57 functional-919910 kubelet[4906]: E0916 10:50:57.168483    4906 eviction_manager.go:212] "Eviction manager: failed to synchronize" err="eviction manager: failed to get HasDedicatedImageFs: missing image stats: &ImageFsInfoResponse{ImageFilesystems:[]*FilesystemUsage{&FilesystemUsage{Timestamp:1726483857168020609,FsId:&FilesystemIdentifier{Mountpoint:/var/lib/containers/storage/overlay-images,},UsedBytes:&UInt64Value{Value:155470,},InodesUsed:&UInt64Value{Value:77,},},},ContainerFilesystems:[]*FilesystemUsage{},}"
	Sep 16 10:51:04 functional-919910 kubelet[4906]: E0916 10:51:04.228929    4906 cpu_manager.go:395] "RemoveStaleState: removing container" podUID="3d8a6ba31c18f33c5660170029e5cde1" containerName="kube-apiserver"
	Sep 16 10:51:04 functional-919910 kubelet[4906]: I0916 10:51:04.228999    4906 memory_manager.go:354] "RemoveStaleState removing state" podUID="3d8a6ba31c18f33c5660170029e5cde1" containerName="kube-apiserver"
	Sep 16 10:51:04 functional-919910 kubelet[4906]: E0916 10:51:04.258970    4906 cpu_manager.go:395] "RemoveStaleState: removing container" podUID="3d8a6ba31c18f33c5660170029e5cde1" containerName="kube-apiserver"
	Sep 16 10:51:04 functional-919910 kubelet[4906]: I0916 10:51:04.259033    4906 memory_manager.go:354] "RemoveStaleState removing state" podUID="3d8a6ba31c18f33c5660170029e5cde1" containerName="kube-apiserver"
	Sep 16 10:51:04 functional-919910 kubelet[4906]: I0916 10:51:04.294906    4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-g4tsh\" (UniqueName: \"kubernetes.io/projected/067210bf-0875-4a9d-85ee-79032a148043-kube-api-access-g4tsh\") pod \"kubernetes-dashboard-695b96c756-tfx57\" (UID: \"067210bf-0875-4a9d-85ee-79032a148043\") " pod="kubernetes-dashboard/kubernetes-dashboard-695b96c756-tfx57"
	Sep 16 10:51:04 functional-919910 kubelet[4906]: I0916 10:51:04.294960    4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"tmp-volume\" (UniqueName: \"kubernetes.io/empty-dir/067210bf-0875-4a9d-85ee-79032a148043-tmp-volume\") pod \"kubernetes-dashboard-695b96c756-tfx57\" (UID: \"067210bf-0875-4a9d-85ee-79032a148043\") " pod="kubernetes-dashboard/kubernetes-dashboard-695b96c756-tfx57"
	Sep 16 10:51:04 functional-919910 kubelet[4906]: I0916 10:51:04.395817    4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gdcgw\" (UniqueName: \"kubernetes.io/projected/e413c9cc-49a4-456c-81e0-7f2e23692d08-kube-api-access-gdcgw\") pod \"dashboard-metrics-scraper-c5db448b4-qqx24\" (UID: \"e413c9cc-49a4-456c-81e0-7f2e23692d08\") " pod="kubernetes-dashboard/dashboard-metrics-scraper-c5db448b4-qqx24"
	Sep 16 10:51:04 functional-919910 kubelet[4906]: I0916 10:51:04.395884    4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"tmp-volume\" (UniqueName: \"kubernetes.io/empty-dir/e413c9cc-49a4-456c-81e0-7f2e23692d08-tmp-volume\") pod \"dashboard-metrics-scraper-c5db448b4-qqx24\" (UID: \"e413c9cc-49a4-456c-81e0-7f2e23692d08\") " pod="kubernetes-dashboard/dashboard-metrics-scraper-c5db448b4-qqx24"
	Sep 16 10:51:04 functional-919910 kubelet[4906]: I0916 10:51:04.433755    4906 swap_util.go:74] "error creating dir to test if tmpfs noswap is enabled. Assuming not supported" mount path="" error="stat /var/lib/kubelet/plugins/kubernetes.io/empty-dir: no such file or directory"
	Sep 16 10:51:07 functional-919910 kubelet[4906]: E0916 10:51:07.171520    4906 eviction_manager.go:257] "Eviction manager: failed to get HasDedicatedImageFs" err="missing image stats: &ImageFsInfoResponse{ImageFilesystems:[]*FilesystemUsage{&FilesystemUsage{Timestamp:1726483867171044384,FsId:&FilesystemIdentifier{Mountpoint:/var/lib/containers/storage/overlay-images,},UsedBytes:&UInt64Value{Value:165956,},InodesUsed:&UInt64Value{Value:83,},},},ContainerFilesystems:[]*FilesystemUsage{},}"
	Sep 16 10:51:07 functional-919910 kubelet[4906]: E0916 10:51:07.171561    4906 eviction_manager.go:212] "Eviction manager: failed to synchronize" err="eviction manager: failed to get HasDedicatedImageFs: missing image stats: &ImageFsInfoResponse{ImageFilesystems:[]*FilesystemUsage{&FilesystemUsage{Timestamp:1726483867171044384,FsId:&FilesystemIdentifier{Mountpoint:/var/lib/containers/storage/overlay-images,},UsedBytes:&UInt64Value{Value:165956,},InodesUsed:&UInt64Value{Value:83,},},},ContainerFilesystems:[]*FilesystemUsage{},}"
	
	
	==> storage-provisioner [0318f459801da15bd2e19f5a98b73c1156fff994dcdda61e57a57ddf9e92ccee] <==
	I0916 10:48:52.562259       1 storage_provisioner.go:116] Initializing the minikube storage provisioner...
	I0916 10:48:52.664564       1 storage_provisioner.go:141] Storage provisioner initialized, now starting service!
	I0916 10:48:52.664725       1 leaderelection.go:243] attempting to acquire leader lease kube-system/k8s.io-minikube-hostpath...
	I0916 10:49:10.099884       1 leaderelection.go:253] successfully acquired lease kube-system/k8s.io-minikube-hostpath
	I0916 10:49:10.100345       1 controller.go:835] Starting provisioner controller k8s.io/minikube-hostpath_functional-919910_e4b0a145-d435-49cb-bddf-cb4e7bf200d9!
	I0916 10:49:10.101540       1 event.go:282] Event(v1.ObjectReference{Kind:"Endpoints", Namespace:"kube-system", Name:"k8s.io-minikube-hostpath", UID:"a458447e-2e14-46d1-bc5f-e9228298bb58", APIVersion:"v1", ResourceVersion:"609", FieldPath:""}): type: 'Normal' reason: 'LeaderElection' functional-919910_e4b0a145-d435-49cb-bddf-cb4e7bf200d9 became leader
	I0916 10:49:10.201234       1 controller.go:884] Started provisioner controller k8s.io/minikube-hostpath_functional-919910_e4b0a145-d435-49cb-bddf-cb4e7bf200d9!
	
	
	==> storage-provisioner [67f50b0e25dae16dbad275ffac3a734fe571c8f8cb91d485eaac44783eb641be] <==
	I0916 10:48:01.486119       1 storage_provisioner.go:116] Initializing the minikube storage provisioner...
	I0916 10:48:05.527187       1 storage_provisioner.go:141] Storage provisioner initialized, now starting service!
	I0916 10:48:05.529539       1 leaderelection.go:243] attempting to acquire leader lease kube-system/k8s.io-minikube-hostpath...
	I0916 10:48:22.958175       1 leaderelection.go:253] successfully acquired lease kube-system/k8s.io-minikube-hostpath
	I0916 10:48:22.959300       1 controller.go:835] Starting provisioner controller k8s.io/minikube-hostpath_functional-919910_be96807a-e73b-444f-98b3-646320e9e90e!
	I0916 10:48:22.959068       1 event.go:282] Event(v1.ObjectReference{Kind:"Endpoints", Namespace:"kube-system", Name:"k8s.io-minikube-hostpath", UID:"a458447e-2e14-46d1-bc5f-e9228298bb58", APIVersion:"v1", ResourceVersion:"511", FieldPath:""}): type: 'Normal' reason: 'LeaderElection' functional-919910_be96807a-e73b-444f-98b3-646320e9e90e became leader
	I0916 10:48:23.061550       1 controller.go:884] Started provisioner controller k8s.io/minikube-hostpath_functional-919910_be96807a-e73b-444f-98b3-646320e9e90e!
	

                                                
                                                
-- /stdout --
helpers_test.go:254: (dbg) Run:  out/minikube-linux-arm64 status --format={{.APIServer}} -p functional-919910 -n functional-919910
helpers_test.go:261: (dbg) Run:  kubectl --context functional-919910 get po -o=jsonpath={.items[*].metadata.name} -A --field-selector=status.phase!=Running
helpers_test.go:261: (dbg) Non-zero exit: kubectl --context functional-919910 get po -o=jsonpath={.items[*].metadata.name} -A --field-selector=status.phase!=Running: fork/exec /usr/local/bin/kubectl: exec format error (2.507308ms)
helpers_test.go:263: kubectl --context functional-919910 get po -o=jsonpath={.items[*].metadata.name} -A --field-selector=status.phase!=Running: fork/exec /usr/local/bin/kubectl: exec format error
--- FAIL: TestFunctional/parallel/DashboardCmd (6.95s)

                                                
                                    
x
+
TestFunctional/parallel/ServiceCmdConnect (2.46s)

                                                
                                                
=== RUN   TestFunctional/parallel/ServiceCmdConnect
=== PAUSE TestFunctional/parallel/ServiceCmdConnect

                                                
                                                

                                                
                                                
=== CONT  TestFunctional/parallel/ServiceCmdConnect
functional_test.go:1627: (dbg) Run:  kubectl --context functional-919910 create deployment hello-node-connect --image=registry.k8s.io/echoserver-arm:1.8
functional_test.go:1627: (dbg) Non-zero exit: kubectl --context functional-919910 create deployment hello-node-connect --image=registry.k8s.io/echoserver-arm:1.8: fork/exec /usr/local/bin/kubectl: exec format error (590.861µs)
functional_test.go:1633: failed to create hello-node deployment with this command "kubectl --context functional-919910 create deployment hello-node-connect --image=registry.k8s.io/echoserver-arm:1.8": fork/exec /usr/local/bin/kubectl: exec format error.
functional_test.go:1598: service test failed - dumping debug information
functional_test.go:1599: -----------------------service failure post-mortem--------------------------------
functional_test.go:1602: (dbg) Run:  kubectl --context functional-919910 describe po hello-node-connect
functional_test.go:1602: (dbg) Non-zero exit: kubectl --context functional-919910 describe po hello-node-connect: fork/exec /usr/local/bin/kubectl: exec format error (591.846µs)
functional_test.go:1604: "kubectl --context functional-919910 describe po hello-node-connect" failed: fork/exec /usr/local/bin/kubectl: exec format error
functional_test.go:1606: hello-node pod describe:
functional_test.go:1608: (dbg) Run:  kubectl --context functional-919910 logs -l app=hello-node-connect
functional_test.go:1608: (dbg) Non-zero exit: kubectl --context functional-919910 logs -l app=hello-node-connect: fork/exec /usr/local/bin/kubectl: exec format error (524.86µs)
functional_test.go:1610: "kubectl --context functional-919910 logs -l app=hello-node-connect" failed: fork/exec /usr/local/bin/kubectl: exec format error
functional_test.go:1612: hello-node logs:
functional_test.go:1614: (dbg) Run:  kubectl --context functional-919910 describe svc hello-node-connect
functional_test.go:1614: (dbg) Non-zero exit: kubectl --context functional-919910 describe svc hello-node-connect: fork/exec /usr/local/bin/kubectl: exec format error (629.842µs)
functional_test.go:1616: "kubectl --context functional-919910 describe svc hello-node-connect" failed: fork/exec /usr/local/bin/kubectl: exec format error
functional_test.go:1618: hello-node svc describe:
helpers_test.go:222: -----------------------post-mortem--------------------------------
helpers_test.go:230: ======>  post-mortem[TestFunctional/parallel/ServiceCmdConnect]: docker inspect <======
helpers_test.go:231: (dbg) Run:  docker inspect functional-919910
helpers_test.go:235: (dbg) docker inspect functional-919910:

                                                
                                                
-- stdout --
	[
	    {
	        "Id": "40a7320e94dbd1ca8f99c16961d5283390467882986d80f040baa102ab2046bd",
	        "Created": "2024-09-16T10:46:39.195115177Z",
	        "Path": "/usr/local/bin/entrypoint",
	        "Args": [
	            "/sbin/init"
	        ],
	        "State": {
	            "Status": "running",
	            "Running": true,
	            "Paused": false,
	            "Restarting": false,
	            "OOMKilled": false,
	            "Dead": false,
	            "Pid": 1399656,
	            "ExitCode": 0,
	            "Error": "",
	            "StartedAt": "2024-09-16T10:46:39.363423533Z",
	            "FinishedAt": "0001-01-01T00:00:00Z"
	        },
	        "Image": "sha256:a1b71fa87733590eb4674b16f6945626ae533f3af37066893e3fd70eb9476268",
	        "ResolvConfPath": "/var/lib/docker/containers/40a7320e94dbd1ca8f99c16961d5283390467882986d80f040baa102ab2046bd/resolv.conf",
	        "HostnamePath": "/var/lib/docker/containers/40a7320e94dbd1ca8f99c16961d5283390467882986d80f040baa102ab2046bd/hostname",
	        "HostsPath": "/var/lib/docker/containers/40a7320e94dbd1ca8f99c16961d5283390467882986d80f040baa102ab2046bd/hosts",
	        "LogPath": "/var/lib/docker/containers/40a7320e94dbd1ca8f99c16961d5283390467882986d80f040baa102ab2046bd/40a7320e94dbd1ca8f99c16961d5283390467882986d80f040baa102ab2046bd-json.log",
	        "Name": "/functional-919910",
	        "RestartCount": 0,
	        "Driver": "overlay2",
	        "Platform": "linux",
	        "MountLabel": "",
	        "ProcessLabel": "",
	        "AppArmorProfile": "unconfined",
	        "ExecIDs": null,
	        "HostConfig": {
	            "Binds": [
	                "/lib/modules:/lib/modules:ro",
	                "functional-919910:/var"
	            ],
	            "ContainerIDFile": "",
	            "LogConfig": {
	                "Type": "json-file",
	                "Config": {}
	            },
	            "NetworkMode": "functional-919910",
	            "PortBindings": {
	                "22/tcp": [
	                    {
	                        "HostIp": "127.0.0.1",
	                        "HostPort": ""
	                    }
	                ],
	                "2376/tcp": [
	                    {
	                        "HostIp": "127.0.0.1",
	                        "HostPort": ""
	                    }
	                ],
	                "32443/tcp": [
	                    {
	                        "HostIp": "127.0.0.1",
	                        "HostPort": ""
	                    }
	                ],
	                "5000/tcp": [
	                    {
	                        "HostIp": "127.0.0.1",
	                        "HostPort": ""
	                    }
	                ],
	                "8441/tcp": [
	                    {
	                        "HostIp": "127.0.0.1",
	                        "HostPort": ""
	                    }
	                ]
	            },
	            "RestartPolicy": {
	                "Name": "no",
	                "MaximumRetryCount": 0
	            },
	            "AutoRemove": false,
	            "VolumeDriver": "",
	            "VolumesFrom": null,
	            "ConsoleSize": [
	                0,
	                0
	            ],
	            "CapAdd": null,
	            "CapDrop": null,
	            "CgroupnsMode": "host",
	            "Dns": [],
	            "DnsOptions": [],
	            "DnsSearch": [],
	            "ExtraHosts": null,
	            "GroupAdd": null,
	            "IpcMode": "private",
	            "Cgroup": "",
	            "Links": null,
	            "OomScoreAdj": 0,
	            "PidMode": "",
	            "Privileged": true,
	            "PublishAllPorts": false,
	            "ReadonlyRootfs": false,
	            "SecurityOpt": [
	                "seccomp=unconfined",
	                "apparmor=unconfined",
	                "label=disable"
	            ],
	            "Tmpfs": {
	                "/run": "",
	                "/tmp": ""
	            },
	            "UTSMode": "",
	            "UsernsMode": "",
	            "ShmSize": 67108864,
	            "Runtime": "runc",
	            "Isolation": "",
	            "CpuShares": 0,
	            "Memory": 4194304000,
	            "NanoCpus": 2000000000,
	            "CgroupParent": "",
	            "BlkioWeight": 0,
	            "BlkioWeightDevice": [],
	            "BlkioDeviceReadBps": [],
	            "BlkioDeviceWriteBps": [],
	            "BlkioDeviceReadIOps": [],
	            "BlkioDeviceWriteIOps": [],
	            "CpuPeriod": 0,
	            "CpuQuota": 0,
	            "CpuRealtimePeriod": 0,
	            "CpuRealtimeRuntime": 0,
	            "CpusetCpus": "",
	            "CpusetMems": "",
	            "Devices": [],
	            "DeviceCgroupRules": null,
	            "DeviceRequests": null,
	            "MemoryReservation": 0,
	            "MemorySwap": 8388608000,
	            "MemorySwappiness": null,
	            "OomKillDisable": false,
	            "PidsLimit": null,
	            "Ulimits": [],
	            "CpuCount": 0,
	            "CpuPercent": 0,
	            "IOMaximumIOps": 0,
	            "IOMaximumBandwidth": 0,
	            "MaskedPaths": null,
	            "ReadonlyPaths": null
	        },
	        "GraphDriver": {
	            "Data": {
	                "LowerDir": "/var/lib/docker/overlay2/14032252dd4d379a5dd6bfc812b8514e72a450050f00baaedcadb811ce19b2ca-init/diff:/var/lib/docker/overlay2/1502e35c27c097cfc834a7c6caeee5bb9f58b41375577f491b73f55bc131cbae/diff",
	                "MergedDir": "/var/lib/docker/overlay2/14032252dd4d379a5dd6bfc812b8514e72a450050f00baaedcadb811ce19b2ca/merged",
	                "UpperDir": "/var/lib/docker/overlay2/14032252dd4d379a5dd6bfc812b8514e72a450050f00baaedcadb811ce19b2ca/diff",
	                "WorkDir": "/var/lib/docker/overlay2/14032252dd4d379a5dd6bfc812b8514e72a450050f00baaedcadb811ce19b2ca/work"
	            },
	            "Name": "overlay2"
	        },
	        "Mounts": [
	            {
	                "Type": "bind",
	                "Source": "/lib/modules",
	                "Destination": "/lib/modules",
	                "Mode": "ro",
	                "RW": false,
	                "Propagation": "rprivate"
	            },
	            {
	                "Type": "volume",
	                "Name": "functional-919910",
	                "Source": "/var/lib/docker/volumes/functional-919910/_data",
	                "Destination": "/var",
	                "Driver": "local",
	                "Mode": "z",
	                "RW": true,
	                "Propagation": ""
	            }
	        ],
	        "Config": {
	            "Hostname": "functional-919910",
	            "Domainname": "",
	            "User": "",
	            "AttachStdin": false,
	            "AttachStdout": false,
	            "AttachStderr": false,
	            "ExposedPorts": {
	                "22/tcp": {},
	                "2376/tcp": {},
	                "32443/tcp": {},
	                "5000/tcp": {},
	                "8441/tcp": {}
	            },
	            "Tty": true,
	            "OpenStdin": false,
	            "StdinOnce": false,
	            "Env": [
	                "container=docker",
	                "PATH=/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin"
	            ],
	            "Cmd": null,
	            "Image": "gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0",
	            "Volumes": null,
	            "WorkingDir": "/",
	            "Entrypoint": [
	                "/usr/local/bin/entrypoint",
	                "/sbin/init"
	            ],
	            "OnBuild": null,
	            "Labels": {
	                "created_by.minikube.sigs.k8s.io": "true",
	                "mode.minikube.sigs.k8s.io": "functional-919910",
	                "name.minikube.sigs.k8s.io": "functional-919910",
	                "role.minikube.sigs.k8s.io": ""
	            },
	            "StopSignal": "SIGRTMIN+3"
	        },
	        "NetworkSettings": {
	            "Bridge": "",
	            "SandboxID": "09e546724865183e02638a32689645e28fd2b24039febe37938c93bd516fa319",
	            "SandboxKey": "/var/run/docker/netns/09e546724865",
	            "Ports": {
	                "22/tcp": [
	                    {
	                        "HostIp": "127.0.0.1",
	                        "HostPort": "34613"
	                    }
	                ],
	                "2376/tcp": [
	                    {
	                        "HostIp": "127.0.0.1",
	                        "HostPort": "34614"
	                    }
	                ],
	                "32443/tcp": [
	                    {
	                        "HostIp": "127.0.0.1",
	                        "HostPort": "34617"
	                    }
	                ],
	                "5000/tcp": [
	                    {
	                        "HostIp": "127.0.0.1",
	                        "HostPort": "34615"
	                    }
	                ],
	                "8441/tcp": [
	                    {
	                        "HostIp": "127.0.0.1",
	                        "HostPort": "34616"
	                    }
	                ]
	            },
	            "HairpinMode": false,
	            "LinkLocalIPv6Address": "",
	            "LinkLocalIPv6PrefixLen": 0,
	            "SecondaryIPAddresses": null,
	            "SecondaryIPv6Addresses": null,
	            "EndpointID": "",
	            "Gateway": "",
	            "GlobalIPv6Address": "",
	            "GlobalIPv6PrefixLen": 0,
	            "IPAddress": "",
	            "IPPrefixLen": 0,
	            "IPv6Gateway": "",
	            "MacAddress": "",
	            "Networks": {
	                "functional-919910": {
	                    "IPAMConfig": {
	                        "IPv4Address": "192.168.49.2"
	                    },
	                    "Links": null,
	                    "Aliases": null,
	                    "MacAddress": "02:42:c0:a8:31:02",
	                    "DriverOpts": null,
	                    "NetworkID": "6e0fb93702822d0f6745b0df63c8098af583107dce24967dde54449c81a6a7de",
	                    "EndpointID": "0e4e29393de23184514ee78cc12ea7445e6307e65c69c812751182560a7c0121",
	                    "Gateway": "192.168.49.1",
	                    "IPAddress": "192.168.49.2",
	                    "IPPrefixLen": 24,
	                    "IPv6Gateway": "",
	                    "GlobalIPv6Address": "",
	                    "GlobalIPv6PrefixLen": 0,
	                    "DNSNames": [
	                        "functional-919910",
	                        "40a7320e94db"
	                    ]
	                }
	            }
	        }
	    }
	]

                                                
                                                
-- /stdout --
helpers_test.go:239: (dbg) Run:  out/minikube-linux-arm64 status --format={{.Host}} -p functional-919910 -n functional-919910
helpers_test.go:244: <<< TestFunctional/parallel/ServiceCmdConnect FAILED: start of post-mortem logs <<<
helpers_test.go:245: ======>  post-mortem[TestFunctional/parallel/ServiceCmdConnect]: minikube logs <======
helpers_test.go:247: (dbg) Run:  out/minikube-linux-arm64 -p functional-919910 logs -n 25
helpers_test.go:247: (dbg) Done: out/minikube-linux-arm64 -p functional-919910 logs -n 25: (1.73041502s)
helpers_test.go:252: TestFunctional/parallel/ServiceCmdConnect logs: 
-- stdout --
	
	==> Audit <==
	|---------|--------------------------------------------------------------------------|-------------------|---------|---------|---------------------|---------------------|
	| Command |                                   Args                                   |      Profile      |  User   | Version |     Start Time      |      End Time       |
	|---------|--------------------------------------------------------------------------|-------------------|---------|---------|---------------------|---------------------|
	| cache   | functional-919910 cache reload                                           | functional-919910 | jenkins | v1.34.0 | 16 Sep 24 10:48 UTC | 16 Sep 24 10:48 UTC |
	| ssh     | functional-919910 ssh                                                    | functional-919910 | jenkins | v1.34.0 | 16 Sep 24 10:48 UTC | 16 Sep 24 10:48 UTC |
	|         | sudo crictl inspecti                                                     |                   |         |         |                     |                     |
	|         | registry.k8s.io/pause:latest                                             |                   |         |         |                     |                     |
	| cache   | delete                                                                   | minikube          | jenkins | v1.34.0 | 16 Sep 24 10:48 UTC | 16 Sep 24 10:48 UTC |
	|         | registry.k8s.io/pause:3.1                                                |                   |         |         |                     |                     |
	| cache   | delete                                                                   | minikube          | jenkins | v1.34.0 | 16 Sep 24 10:48 UTC | 16 Sep 24 10:48 UTC |
	|         | registry.k8s.io/pause:latest                                             |                   |         |         |                     |                     |
	| kubectl | functional-919910 kubectl --                                             | functional-919910 | jenkins | v1.34.0 | 16 Sep 24 10:48 UTC | 16 Sep 24 10:48 UTC |
	|         | --context functional-919910                                              |                   |         |         |                     |                     |
	|         | get pods                                                                 |                   |         |         |                     |                     |
	| start   | -p functional-919910                                                     | functional-919910 | jenkins | v1.34.0 | 16 Sep 24 10:48 UTC | 16 Sep 24 10:49 UTC |
	|         | --extra-config=apiserver.enable-admission-plugins=NamespaceAutoProvision |                   |         |         |                     |                     |
	|         | --wait=all                                                               |                   |         |         |                     |                     |
	| config  | functional-919910 config unset                                           | functional-919910 | jenkins | v1.34.0 | 16 Sep 24 10:49 UTC | 16 Sep 24 10:49 UTC |
	|         | cpus                                                                     |                   |         |         |                     |                     |
	| cp      | functional-919910 cp                                                     | functional-919910 | jenkins | v1.34.0 | 16 Sep 24 10:49 UTC | 16 Sep 24 10:49 UTC |
	|         | testdata/cp-test.txt                                                     |                   |         |         |                     |                     |
	|         | /home/docker/cp-test.txt                                                 |                   |         |         |                     |                     |
	| config  | functional-919910 config get                                             | functional-919910 | jenkins | v1.34.0 | 16 Sep 24 10:49 UTC |                     |
	|         | cpus                                                                     |                   |         |         |                     |                     |
	| config  | functional-919910 config set                                             | functional-919910 | jenkins | v1.34.0 | 16 Sep 24 10:49 UTC | 16 Sep 24 10:49 UTC |
	|         | cpus 2                                                                   |                   |         |         |                     |                     |
	| config  | functional-919910 config get                                             | functional-919910 | jenkins | v1.34.0 | 16 Sep 24 10:49 UTC | 16 Sep 24 10:49 UTC |
	|         | cpus                                                                     |                   |         |         |                     |                     |
	| config  | functional-919910 config unset                                           | functional-919910 | jenkins | v1.34.0 | 16 Sep 24 10:49 UTC | 16 Sep 24 10:49 UTC |
	|         | cpus                                                                     |                   |         |         |                     |                     |
	| ssh     | functional-919910 ssh -n                                                 | functional-919910 | jenkins | v1.34.0 | 16 Sep 24 10:49 UTC | 16 Sep 24 10:49 UTC |
	|         | functional-919910 sudo cat                                               |                   |         |         |                     |                     |
	|         | /home/docker/cp-test.txt                                                 |                   |         |         |                     |                     |
	| config  | functional-919910 config get                                             | functional-919910 | jenkins | v1.34.0 | 16 Sep 24 10:49 UTC |                     |
	|         | cpus                                                                     |                   |         |         |                     |                     |
	| ssh     | functional-919910 ssh echo                                               | functional-919910 | jenkins | v1.34.0 | 16 Sep 24 10:49 UTC | 16 Sep 24 10:49 UTC |
	|         | hello                                                                    |                   |         |         |                     |                     |
	| cp      | functional-919910 cp                                                     | functional-919910 | jenkins | v1.34.0 | 16 Sep 24 10:49 UTC | 16 Sep 24 10:49 UTC |
	|         | functional-919910:/home/docker/cp-test.txt                               |                   |         |         |                     |                     |
	|         | /tmp/TestFunctionalparallelCpCmd2247858640/001/cp-test.txt               |                   |         |         |                     |                     |
	| ssh     | functional-919910 ssh cat                                                | functional-919910 | jenkins | v1.34.0 | 16 Sep 24 10:49 UTC | 16 Sep 24 10:49 UTC |
	|         | /etc/hostname                                                            |                   |         |         |                     |                     |
	| ssh     | functional-919910 ssh -n                                                 | functional-919910 | jenkins | v1.34.0 | 16 Sep 24 10:49 UTC | 16 Sep 24 10:49 UTC |
	|         | functional-919910 sudo cat                                               |                   |         |         |                     |                     |
	|         | /home/docker/cp-test.txt                                                 |                   |         |         |                     |                     |
	| tunnel  | functional-919910 tunnel                                                 | functional-919910 | jenkins | v1.34.0 | 16 Sep 24 10:49 UTC |                     |
	|         | --alsologtostderr                                                        |                   |         |         |                     |                     |
	| tunnel  | functional-919910 tunnel                                                 | functional-919910 | jenkins | v1.34.0 | 16 Sep 24 10:49 UTC |                     |
	|         | --alsologtostderr                                                        |                   |         |         |                     |                     |
	| cp      | functional-919910 cp                                                     | functional-919910 | jenkins | v1.34.0 | 16 Sep 24 10:49 UTC | 16 Sep 24 10:49 UTC |
	|         | testdata/cp-test.txt                                                     |                   |         |         |                     |                     |
	|         | /tmp/does/not/exist/cp-test.txt                                          |                   |         |         |                     |                     |
	| ssh     | functional-919910 ssh -n                                                 | functional-919910 | jenkins | v1.34.0 | 16 Sep 24 10:49 UTC | 16 Sep 24 10:49 UTC |
	|         | functional-919910 sudo cat                                               |                   |         |         |                     |                     |
	|         | /tmp/does/not/exist/cp-test.txt                                          |                   |         |         |                     |                     |
	| tunnel  | functional-919910 tunnel                                                 | functional-919910 | jenkins | v1.34.0 | 16 Sep 24 10:49 UTC |                     |
	|         | --alsologtostderr                                                        |                   |         |         |                     |                     |
	| addons  | functional-919910 addons list                                            | functional-919910 | jenkins | v1.34.0 | 16 Sep 24 10:50 UTC | 16 Sep 24 10:50 UTC |
	| addons  | functional-919910 addons list                                            | functional-919910 | jenkins | v1.34.0 | 16 Sep 24 10:50 UTC | 16 Sep 24 10:50 UTC |
	|         | -o json                                                                  |                   |         |         |                     |                     |
	|---------|--------------------------------------------------------------------------|-------------------|---------|---------|---------------------|---------------------|
	
	
	==> Last Start <==
	Log file created at: 2024/09/16 10:48:33
	Running on machine: ip-172-31-21-244
	Binary: Built with gc go1.23.0 for linux/arm64
	Log line format: [IWEF]mmdd hh:mm:ss.uuuuuu threadid file:line] msg
	I0916 10:48:33.910735 1405153 out.go:345] Setting OutFile to fd 1 ...
	I0916 10:48:33.910918 1405153 out.go:392] TERM=,COLORTERM=, which probably does not support color
	I0916 10:48:33.910922 1405153 out.go:358] Setting ErrFile to fd 2...
	I0916 10:48:33.910927 1405153 out.go:392] TERM=,COLORTERM=, which probably does not support color
	I0916 10:48:33.911187 1405153 root.go:338] Updating PATH: /home/jenkins/minikube-integration/19651-1378450/.minikube/bin
	I0916 10:48:33.911553 1405153 out.go:352] Setting JSON to false
	I0916 10:48:33.912603 1405153 start.go:129] hostinfo: {"hostname":"ip-172-31-21-244","uptime":37859,"bootTime":1726445855,"procs":174,"os":"linux","platform":"ubuntu","platformFamily":"debian","platformVersion":"20.04","kernelVersion":"5.15.0-1069-aws","kernelArch":"aarch64","virtualizationSystem":"","virtualizationRole":"","hostId":"da8ac1fd-6236-412a-a346-95873c98230d"}
	I0916 10:48:33.912669 1405153 start.go:139] virtualization:  
	I0916 10:48:33.916414 1405153 out.go:177] * [functional-919910] minikube v1.34.0 on Ubuntu 20.04 (arm64)
	I0916 10:48:33.921226 1405153 out.go:177]   - MINIKUBE_LOCATION=19651
	I0916 10:48:33.921310 1405153 notify.go:220] Checking for updates...
	I0916 10:48:33.924192 1405153 out.go:177]   - MINIKUBE_SUPPRESS_DOCKER_PERFORMANCE=true
	I0916 10:48:33.926482 1405153 out.go:177]   - KUBECONFIG=/home/jenkins/minikube-integration/19651-1378450/kubeconfig
	I0916 10:48:33.928824 1405153 out.go:177]   - MINIKUBE_HOME=/home/jenkins/minikube-integration/19651-1378450/.minikube
	I0916 10:48:33.930290 1405153 out.go:177]   - MINIKUBE_BIN=out/minikube-linux-arm64
	I0916 10:48:33.932494 1405153 out.go:177]   - MINIKUBE_FORCE_SYSTEMD=
	I0916 10:48:33.936794 1405153 config.go:182] Loaded profile config "functional-919910": Driver=docker, ContainerRuntime=crio, KubernetesVersion=v1.31.1
	I0916 10:48:33.936883 1405153 driver.go:394] Setting default libvirt URI to qemu:///system
	I0916 10:48:33.971073 1405153 docker.go:123] docker version: linux-27.2.1:Docker Engine - Community
	I0916 10:48:33.971194 1405153 cli_runner.go:164] Run: docker system info --format "{{json .}}"
	I0916 10:48:34.033300 1405153 info.go:266] docker info: {ID:5FDH:SA5P:5GCT:NLAS:B73P:SGDQ:PBG5:UBVH:UZY3:RXGO:CI7S:WAIH Containers:1 ContainersRunning:1 ContainersPaused:0 ContainersStopped:0 Images:2 Driver:overlay2 DriverStatus:[[Backing Filesystem extfs] [Supports d_type true] [Using metacopy false] [Native Overlay Diff true] [userxattr false]] SystemStatus:<nil> Plugins:{Volume:[local] Network:[bridge host ipvlan macvlan null overlay] Authorization:<nil> Log:[awslogs fluentd gcplogs gelf journald json-file local splunk syslog]} MemoryLimit:true SwapLimit:true KernelMemory:false KernelMemoryTCP:true CPUCfsPeriod:true CPUCfsQuota:true CPUShares:true CPUSet:true PidsLimit:true IPv4Forwarding:true BridgeNfIptables:true BridgeNfIP6Tables:true Debug:false NFd:32 OomKillDisable:true NGoroutines:64 SystemTime:2024-09-16 10:48:34.023431603 +0000 UTC LoggingDriver:json-file CgroupDriver:cgroupfs NEventsListener:0 KernelVersion:5.15.0-1069-aws OperatingSystem:Ubuntu 20.04.6 LTS OSType:linux Architecture:aar
ch64 IndexServerAddress:https://index.docker.io/v1/ RegistryConfig:{AllowNondistributableArtifactsCIDRs:[] AllowNondistributableArtifactsHostnames:[] InsecureRegistryCIDRs:[127.0.0.0/8] IndexConfigs:{DockerIo:{Name:docker.io Mirrors:[] Secure:true Official:true}} Mirrors:[]} NCPU:2 MemTotal:8214839296 GenericResources:<nil> DockerRootDir:/var/lib/docker HTTPProxy: HTTPSProxy: NoProxy: Name:ip-172-31-21-244 Labels:[] ExperimentalBuild:false ServerVersion:27.2.1 ClusterStore: ClusterAdvertise: Runtimes:{Runc:{Path:runc}} DefaultRuntime:runc Swarm:{NodeID: NodeAddr: LocalNodeState:inactive ControlAvailable:false Error: RemoteManagers:<nil>} LiveRestoreEnabled:false Isolation: InitBinary:docker-init ContainerdCommit:{ID:7f7fdf5fed64eb6a7caf99b3e12efcf9d60e311c Expected:7f7fdf5fed64eb6a7caf99b3e12efcf9d60e311c} RuncCommit:{ID:v1.1.14-0-g2c9f560 Expected:v1.1.14-0-g2c9f560} InitCommit:{ID:de40ad0 Expected:de40ad0} SecurityOptions:[name=apparmor name=seccomp,profile=builtin] ProductLicense: Warnings:<nil> ServerErro
rs:[] ClientInfo:{Debug:false Plugins:[map[Name:buildx Path:/usr/libexec/docker/cli-plugins/docker-buildx SchemaVersion:0.1.0 ShortDescription:Docker Buildx Vendor:Docker Inc. Version:v0.16.2] map[Name:compose Path:/usr/libexec/docker/cli-plugins/docker-compose SchemaVersion:0.1.0 ShortDescription:Docker Compose Vendor:Docker Inc. Version:v2.29.2]] Warnings:<nil>}}
	I0916 10:48:34.033460 1405153 docker.go:318] overlay module found
	I0916 10:48:34.036356 1405153 out.go:177] * Using the docker driver based on existing profile
	I0916 10:48:34.038192 1405153 start.go:297] selected driver: docker
	I0916 10:48:34.038228 1405153 start.go:901] validating driver "docker" against &{Name:functional-919910 KeepContext:false EmbedCerts:false MinikubeISO: KicBaseImage:gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 Memory:4000 CPUs:2 DiskSize:20000 Driver:docker HyperkitVpnKitSock: HyperkitVSockPorts:[] DockerEnv:[] ContainerVolumeMounts:[] InsecureRegistry:[] RegistryMirror:[] HostOnlyCIDR:192.168.59.1/24 HypervVirtualSwitch: HypervUseExternalSwitch:false HypervExternalAdapter: KVMNetwork:default KVMQemuURI:qemu:///system KVMGPU:false KVMHidden:false KVMNUMACount:1 APIServerPort:8441 DockerOpt:[] DisableDriverMounts:false NFSShare:[] NFSSharesRoot:/nfsshares UUID: NoVTXCheck:false DNSProxy:false HostDNSResolver:true HostOnlyNicType:virtio NatNicType:virtio SSHIPAddress: SSHUser:root SSHKey: SSHPort:22 KubernetesConfig:{KubernetesVersion:v1.31.1 ClusterName:functional-919910 Namespace:default APIServerHAVIP: APIServerNa
me:minikubeCA APIServerNames:[] APIServerIPs:[] DNSDomain:cluster.local ContainerRuntime:crio CRISocket: NetworkPlugin:cni FeatureGates: ServiceCIDR:10.96.0.0/12 ImageRepository: LoadBalancerStartIP: LoadBalancerEndIP: CustomIngressCert: RegistryAliases: ExtraOptions:[] ShouldLoadCachedImages:true EnableDefaultCNI:false CNI:} Nodes:[{Name: IP:192.168.49.2 Port:8441 KubernetesVersion:v1.31.1 ContainerRuntime:crio ControlPlane:true Worker:true}] Addons:map[default-storageclass:true storage-provisioner:true] CustomAddonImages:map[] CustomAddonRegistries:map[] VerifyComponents:map[apiserver:true apps_running:true default_sa:true extra:true kubelet:true node_ready:true system_pods:true] StartHostTimeout:6m0s ScheduledStop:<nil> ExposedPorts:[] ListenAddress: Network: Subnet: MultiNodeRequested:false ExtraDisks:0 CertExpiration:26280h0m0s Mount:false MountString:/home/jenkins:/minikube-host Mount9PVersion:9p2000.L MountGID:docker MountIP: MountMSize:262144 MountOptions:[] MountPort:0 MountType:9p MountUID:docker Bi
naryMirror: DisableOptimizations:false DisableMetrics:false CustomQemuFirmwarePath: SocketVMnetClientPath: SocketVMnetPath: StaticIP: SSHAuthSock: SSHAgentPID:0 GPUs: AutoPauseInterval:1m0s}
	I0916 10:48:34.038379 1405153 start.go:912] status for docker: {Installed:true Healthy:true Running:false NeedsImprovement:false Error:<nil> Reason: Fix: Doc: Version:}
	I0916 10:48:34.038507 1405153 cli_runner.go:164] Run: docker system info --format "{{json .}}"
	I0916 10:48:34.092789 1405153 info.go:266] docker info: {ID:5FDH:SA5P:5GCT:NLAS:B73P:SGDQ:PBG5:UBVH:UZY3:RXGO:CI7S:WAIH Containers:1 ContainersRunning:1 ContainersPaused:0 ContainersStopped:0 Images:2 Driver:overlay2 DriverStatus:[[Backing Filesystem extfs] [Supports d_type true] [Using metacopy false] [Native Overlay Diff true] [userxattr false]] SystemStatus:<nil> Plugins:{Volume:[local] Network:[bridge host ipvlan macvlan null overlay] Authorization:<nil> Log:[awslogs fluentd gcplogs gelf journald json-file local splunk syslog]} MemoryLimit:true SwapLimit:true KernelMemory:false KernelMemoryTCP:true CPUCfsPeriod:true CPUCfsQuota:true CPUShares:true CPUSet:true PidsLimit:true IPv4Forwarding:true BridgeNfIptables:true BridgeNfIP6Tables:true Debug:false NFd:32 OomKillDisable:true NGoroutines:64 SystemTime:2024-09-16 10:48:34.082522334 +0000 UTC LoggingDriver:json-file CgroupDriver:cgroupfs NEventsListener:0 KernelVersion:5.15.0-1069-aws OperatingSystem:Ubuntu 20.04.6 LTS OSType:linux Architecture:aar
ch64 IndexServerAddress:https://index.docker.io/v1/ RegistryConfig:{AllowNondistributableArtifactsCIDRs:[] AllowNondistributableArtifactsHostnames:[] InsecureRegistryCIDRs:[127.0.0.0/8] IndexConfigs:{DockerIo:{Name:docker.io Mirrors:[] Secure:true Official:true}} Mirrors:[]} NCPU:2 MemTotal:8214839296 GenericResources:<nil> DockerRootDir:/var/lib/docker HTTPProxy: HTTPSProxy: NoProxy: Name:ip-172-31-21-244 Labels:[] ExperimentalBuild:false ServerVersion:27.2.1 ClusterStore: ClusterAdvertise: Runtimes:{Runc:{Path:runc}} DefaultRuntime:runc Swarm:{NodeID: NodeAddr: LocalNodeState:inactive ControlAvailable:false Error: RemoteManagers:<nil>} LiveRestoreEnabled:false Isolation: InitBinary:docker-init ContainerdCommit:{ID:7f7fdf5fed64eb6a7caf99b3e12efcf9d60e311c Expected:7f7fdf5fed64eb6a7caf99b3e12efcf9d60e311c} RuncCommit:{ID:v1.1.14-0-g2c9f560 Expected:v1.1.14-0-g2c9f560} InitCommit:{ID:de40ad0 Expected:de40ad0} SecurityOptions:[name=apparmor name=seccomp,profile=builtin] ProductLicense: Warnings:<nil> ServerErro
rs:[] ClientInfo:{Debug:false Plugins:[map[Name:buildx Path:/usr/libexec/docker/cli-plugins/docker-buildx SchemaVersion:0.1.0 ShortDescription:Docker Buildx Vendor:Docker Inc. Version:v0.16.2] map[Name:compose Path:/usr/libexec/docker/cli-plugins/docker-compose SchemaVersion:0.1.0 ShortDescription:Docker Compose Vendor:Docker Inc. Version:v2.29.2]] Warnings:<nil>}}
	I0916 10:48:34.093226 1405153 start_flags.go:947] Waiting for all components: map[apiserver:true apps_running:true default_sa:true extra:true kubelet:true node_ready:true system_pods:true]
	I0916 10:48:34.093254 1405153 cni.go:84] Creating CNI manager for ""
	I0916 10:48:34.093309 1405153 cni.go:143] "docker" driver + "crio" runtime found, recommending kindnet
	I0916 10:48:34.093355 1405153 start.go:340] cluster config:
	{Name:functional-919910 KeepContext:false EmbedCerts:false MinikubeISO: KicBaseImage:gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 Memory:4000 CPUs:2 DiskSize:20000 Driver:docker HyperkitVpnKitSock: HyperkitVSockPorts:[] DockerEnv:[] ContainerVolumeMounts:[] InsecureRegistry:[] RegistryMirror:[] HostOnlyCIDR:192.168.59.1/24 HypervVirtualSwitch: HypervUseExternalSwitch:false HypervExternalAdapter: KVMNetwork:default KVMQemuURI:qemu:///system KVMGPU:false KVMHidden:false KVMNUMACount:1 APIServerPort:8441 DockerOpt:[] DisableDriverMounts:false NFSShare:[] NFSSharesRoot:/nfsshares UUID: NoVTXCheck:false DNSProxy:false HostDNSResolver:true HostOnlyNicType:virtio NatNicType:virtio SSHIPAddress: SSHUser:root SSHKey: SSHPort:22 KubernetesConfig:{KubernetesVersion:v1.31.1 ClusterName:functional-919910 Namespace:default APIServerHAVIP: APIServerName:minikubeCA APIServerNames:[] APIServerIPs:[] DNSDomain:cluster.local Containe
rRuntime:crio CRISocket: NetworkPlugin:cni FeatureGates: ServiceCIDR:10.96.0.0/12 ImageRepository: LoadBalancerStartIP: LoadBalancerEndIP: CustomIngressCert: RegistryAliases: ExtraOptions:[{Component:apiserver Key:enable-admission-plugins Value:NamespaceAutoProvision}] ShouldLoadCachedImages:true EnableDefaultCNI:false CNI:} Nodes:[{Name: IP:192.168.49.2 Port:8441 KubernetesVersion:v1.31.1 ContainerRuntime:crio ControlPlane:true Worker:true}] Addons:map[default-storageclass:true storage-provisioner:true] CustomAddonImages:map[] CustomAddonRegistries:map[] VerifyComponents:map[apiserver:true apps_running:true default_sa:true extra:true kubelet:true node_ready:true system_pods:true] StartHostTimeout:6m0s ScheduledStop:<nil> ExposedPorts:[] ListenAddress: Network: Subnet: MultiNodeRequested:false ExtraDisks:0 CertExpiration:26280h0m0s Mount:false MountString:/home/jenkins:/minikube-host Mount9PVersion:9p2000.L MountGID:docker MountIP: MountMSize:262144 MountOptions:[] MountPort:0 MountType:9p MountUID:docker Bin
aryMirror: DisableOptimizations:false DisableMetrics:false CustomQemuFirmwarePath: SocketVMnetClientPath: SocketVMnetPath: StaticIP: SSHAuthSock: SSHAgentPID:0 GPUs: AutoPauseInterval:1m0s}
	I0916 10:48:34.098551 1405153 out.go:177] * Starting "functional-919910" primary control-plane node in "functional-919910" cluster
	I0916 10:48:34.100896 1405153 cache.go:121] Beginning downloading kic base image for docker with crio
	I0916 10:48:34.103035 1405153 out.go:177] * Pulling base image v0.0.45-1726358845-19644 ...
	I0916 10:48:34.104428 1405153 preload.go:131] Checking if preload exists for k8s version v1.31.1 and runtime crio
	I0916 10:48:34.104490 1405153 preload.go:146] Found local preload: /home/jenkins/minikube-integration/19651-1378450/.minikube/cache/preloaded-tarball/preloaded-images-k8s-v18-v1.31.1-cri-o-overlay-arm64.tar.lz4
	I0916 10:48:34.104498 1405153 cache.go:56] Caching tarball of preloaded images
	I0916 10:48:34.104519 1405153 image.go:79] Checking for gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 in local docker daemon
	I0916 10:48:34.104600 1405153 preload.go:172] Found /home/jenkins/minikube-integration/19651-1378450/.minikube/cache/preloaded-tarball/preloaded-images-k8s-v18-v1.31.1-cri-o-overlay-arm64.tar.lz4 in cache, skipping download
	I0916 10:48:34.104608 1405153 cache.go:59] Finished verifying existence of preloaded tar for v1.31.1 on crio
	I0916 10:48:34.104827 1405153 profile.go:143] Saving config to /home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/functional-919910/config.json ...
	W0916 10:48:34.124223 1405153 image.go:95] image gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 is of wrong architecture
	I0916 10:48:34.124234 1405153 cache.go:149] Downloading gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 to local cache
	I0916 10:48:34.124328 1405153 image.go:63] Checking for gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 in local cache directory
	I0916 10:48:34.124345 1405153 image.go:66] Found gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 in local cache directory, skipping pull
	I0916 10:48:34.124350 1405153 image.go:135] gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 exists in cache, skipping pull
	I0916 10:48:34.124357 1405153 cache.go:152] successfully saved gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 as a tarball
	I0916 10:48:34.124362 1405153 cache.go:162] Loading gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 from local cache
	I0916 10:48:34.288917 1405153 cache.go:164] successfully loaded and using gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 from cached tarball
	I0916 10:48:34.288951 1405153 cache.go:194] Successfully downloaded all kic artifacts
	I0916 10:48:34.288982 1405153 start.go:360] acquireMachinesLock for functional-919910: {Name:mkddf275897a7528274aa0390d95d40845ffb1ab Clock:{} Delay:500ms Timeout:10m0s Cancel:<nil>}
	I0916 10:48:34.289059 1405153 start.go:364] duration metric: took 54.522µs to acquireMachinesLock for "functional-919910"
	I0916 10:48:34.289080 1405153 start.go:96] Skipping create...Using existing machine configuration
	I0916 10:48:34.289084 1405153 fix.go:54] fixHost starting: 
	I0916 10:48:34.289431 1405153 cli_runner.go:164] Run: docker container inspect functional-919910 --format={{.State.Status}}
	I0916 10:48:34.305622 1405153 fix.go:112] recreateIfNeeded on functional-919910: state=Running err=<nil>
	W0916 10:48:34.305643 1405153 fix.go:138] unexpected machine state, will restart: <nil>
	I0916 10:48:34.310594 1405153 out.go:177] * Updating the running docker "functional-919910" container ...
	I0916 10:48:34.313486 1405153 machine.go:93] provisionDockerMachine start ...
	I0916 10:48:34.313608 1405153 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" functional-919910
	I0916 10:48:34.330698 1405153 main.go:141] libmachine: Using SSH client type: native
	I0916 10:48:34.331000 1405153 main.go:141] libmachine: &{{{<nil> 0 [] [] []} docker [0x41abe0] 0x41d420 <nil>  [] 0s} 127.0.0.1 34613 <nil> <nil>}
	I0916 10:48:34.331007 1405153 main.go:141] libmachine: About to run SSH command:
	hostname
	I0916 10:48:34.472292 1405153 main.go:141] libmachine: SSH cmd err, output: <nil>: functional-919910
	
	I0916 10:48:34.472306 1405153 ubuntu.go:169] provisioning hostname "functional-919910"
	I0916 10:48:34.472377 1405153 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" functional-919910
	I0916 10:48:34.490933 1405153 main.go:141] libmachine: Using SSH client type: native
	I0916 10:48:34.491182 1405153 main.go:141] libmachine: &{{{<nil> 0 [] [] []} docker [0x41abe0] 0x41d420 <nil>  [] 0s} 127.0.0.1 34613 <nil> <nil>}
	I0916 10:48:34.491193 1405153 main.go:141] libmachine: About to run SSH command:
	sudo hostname functional-919910 && echo "functional-919910" | sudo tee /etc/hostname
	I0916 10:48:34.642095 1405153 main.go:141] libmachine: SSH cmd err, output: <nil>: functional-919910
	
	I0916 10:48:34.642170 1405153 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" functional-919910
	I0916 10:48:34.661757 1405153 main.go:141] libmachine: Using SSH client type: native
	I0916 10:48:34.662006 1405153 main.go:141] libmachine: &{{{<nil> 0 [] [] []} docker [0x41abe0] 0x41d420 <nil>  [] 0s} 127.0.0.1 34613 <nil> <nil>}
	I0916 10:48:34.662021 1405153 main.go:141] libmachine: About to run SSH command:
	
			if ! grep -xq '.*\sfunctional-919910' /etc/hosts; then
				if grep -xq '127.0.1.1\s.*' /etc/hosts; then
					sudo sed -i 's/^127.0.1.1\s.*/127.0.1.1 functional-919910/g' /etc/hosts;
				else 
					echo '127.0.1.1 functional-919910' | sudo tee -a /etc/hosts; 
				fi
			fi
	I0916 10:48:34.801384 1405153 main.go:141] libmachine: SSH cmd err, output: <nil>: 
	I0916 10:48:34.801403 1405153 ubuntu.go:175] set auth options {CertDir:/home/jenkins/minikube-integration/19651-1378450/.minikube CaCertPath:/home/jenkins/minikube-integration/19651-1378450/.minikube/certs/ca.pem CaPrivateKeyPath:/home/jenkins/minikube-integration/19651-1378450/.minikube/certs/ca-key.pem CaCertRemotePath:/etc/docker/ca.pem ServerCertPath:/home/jenkins/minikube-integration/19651-1378450/.minikube/machines/server.pem ServerKeyPath:/home/jenkins/minikube-integration/19651-1378450/.minikube/machines/server-key.pem ClientKeyPath:/home/jenkins/minikube-integration/19651-1378450/.minikube/certs/key.pem ServerCertRemotePath:/etc/docker/server.pem ServerKeyRemotePath:/etc/docker/server-key.pem ClientCertPath:/home/jenkins/minikube-integration/19651-1378450/.minikube/certs/cert.pem ServerCertSANs:[] StorePath:/home/jenkins/minikube-integration/19651-1378450/.minikube}
	I0916 10:48:34.801426 1405153 ubuntu.go:177] setting up certificates
	I0916 10:48:34.801435 1405153 provision.go:84] configureAuth start
	I0916 10:48:34.801501 1405153 cli_runner.go:164] Run: docker container inspect -f "{{range .NetworkSettings.Networks}}{{.IPAddress}},{{.GlobalIPv6Address}}{{end}}" functional-919910
	I0916 10:48:34.820918 1405153 provision.go:143] copyHostCerts
	I0916 10:48:34.820978 1405153 exec_runner.go:144] found /home/jenkins/minikube-integration/19651-1378450/.minikube/ca.pem, removing ...
	I0916 10:48:34.820986 1405153 exec_runner.go:203] rm: /home/jenkins/minikube-integration/19651-1378450/.minikube/ca.pem
	I0916 10:48:34.821065 1405153 exec_runner.go:151] cp: /home/jenkins/minikube-integration/19651-1378450/.minikube/certs/ca.pem --> /home/jenkins/minikube-integration/19651-1378450/.minikube/ca.pem (1078 bytes)
	I0916 10:48:34.821168 1405153 exec_runner.go:144] found /home/jenkins/minikube-integration/19651-1378450/.minikube/cert.pem, removing ...
	I0916 10:48:34.821172 1405153 exec_runner.go:203] rm: /home/jenkins/minikube-integration/19651-1378450/.minikube/cert.pem
	I0916 10:48:34.821197 1405153 exec_runner.go:151] cp: /home/jenkins/minikube-integration/19651-1378450/.minikube/certs/cert.pem --> /home/jenkins/minikube-integration/19651-1378450/.minikube/cert.pem (1123 bytes)
	I0916 10:48:34.821249 1405153 exec_runner.go:144] found /home/jenkins/minikube-integration/19651-1378450/.minikube/key.pem, removing ...
	I0916 10:48:34.821252 1405153 exec_runner.go:203] rm: /home/jenkins/minikube-integration/19651-1378450/.minikube/key.pem
	I0916 10:48:34.821274 1405153 exec_runner.go:151] cp: /home/jenkins/minikube-integration/19651-1378450/.minikube/certs/key.pem --> /home/jenkins/minikube-integration/19651-1378450/.minikube/key.pem (1679 bytes)
	I0916 10:48:34.821320 1405153 provision.go:117] generating server cert: /home/jenkins/minikube-integration/19651-1378450/.minikube/machines/server.pem ca-key=/home/jenkins/minikube-integration/19651-1378450/.minikube/certs/ca.pem private-key=/home/jenkins/minikube-integration/19651-1378450/.minikube/certs/ca-key.pem org=jenkins.functional-919910 san=[127.0.0.1 192.168.49.2 functional-919910 localhost minikube]
	I0916 10:48:35.568371 1405153 provision.go:177] copyRemoteCerts
	I0916 10:48:35.568431 1405153 ssh_runner.go:195] Run: sudo mkdir -p /etc/docker /etc/docker /etc/docker
	I0916 10:48:35.568472 1405153 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" functional-919910
	I0916 10:48:35.587212 1405153 sshutil.go:53] new ssh client: &{IP:127.0.0.1 Port:34613 SSHKeyPath:/home/jenkins/minikube-integration/19651-1378450/.minikube/machines/functional-919910/id_rsa Username:docker}
	I0916 10:48:35.685877 1405153 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-1378450/.minikube/certs/ca.pem --> /etc/docker/ca.pem (1078 bytes)
	I0916 10:48:35.711210 1405153 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-1378450/.minikube/machines/server.pem --> /etc/docker/server.pem (1220 bytes)
	I0916 10:48:35.737883 1405153 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-1378450/.minikube/machines/server-key.pem --> /etc/docker/server-key.pem (1679 bytes)
	I0916 10:48:35.763778 1405153 provision.go:87] duration metric: took 962.330031ms to configureAuth
	I0916 10:48:35.763796 1405153 ubuntu.go:193] setting minikube options for container-runtime
	I0916 10:48:35.763993 1405153 config.go:182] Loaded profile config "functional-919910": Driver=docker, ContainerRuntime=crio, KubernetesVersion=v1.31.1
	I0916 10:48:35.764136 1405153 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" functional-919910
	I0916 10:48:35.780995 1405153 main.go:141] libmachine: Using SSH client type: native
	I0916 10:48:35.781225 1405153 main.go:141] libmachine: &{{{<nil> 0 [] [] []} docker [0x41abe0] 0x41d420 <nil>  [] 0s} 127.0.0.1 34613 <nil> <nil>}
	I0916 10:48:35.781237 1405153 main.go:141] libmachine: About to run SSH command:
	sudo mkdir -p /etc/sysconfig && printf %s "
	CRIO_MINIKUBE_OPTIONS='--insecure-registry 10.96.0.0/12 '
	" | sudo tee /etc/sysconfig/crio.minikube && sudo systemctl restart crio
	I0916 10:48:41.213530 1405153 main.go:141] libmachine: SSH cmd err, output: <nil>: 
	CRIO_MINIKUBE_OPTIONS='--insecure-registry 10.96.0.0/12 '
	
	I0916 10:48:41.213542 1405153 machine.go:96] duration metric: took 6.900045274s to provisionDockerMachine
	I0916 10:48:41.213553 1405153 start.go:293] postStartSetup for "functional-919910" (driver="docker")
	I0916 10:48:41.213563 1405153 start.go:322] creating required directories: [/etc/kubernetes/addons /etc/kubernetes/manifests /var/tmp/minikube /var/lib/minikube /var/lib/minikube/certs /var/lib/minikube/images /var/lib/minikube/binaries /tmp/gvisor /usr/share/ca-certificates /etc/ssl/certs]
	I0916 10:48:41.213629 1405153 ssh_runner.go:195] Run: sudo mkdir -p /etc/kubernetes/addons /etc/kubernetes/manifests /var/tmp/minikube /var/lib/minikube /var/lib/minikube/certs /var/lib/minikube/images /var/lib/minikube/binaries /tmp/gvisor /usr/share/ca-certificates /etc/ssl/certs
	I0916 10:48:41.213668 1405153 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" functional-919910
	I0916 10:48:41.239640 1405153 sshutil.go:53] new ssh client: &{IP:127.0.0.1 Port:34613 SSHKeyPath:/home/jenkins/minikube-integration/19651-1378450/.minikube/machines/functional-919910/id_rsa Username:docker}
	I0916 10:48:41.342947 1405153 ssh_runner.go:195] Run: cat /etc/os-release
	I0916 10:48:41.346242 1405153 main.go:141] libmachine: Couldn't set key VERSION_CODENAME, no corresponding struct field found
	I0916 10:48:41.346267 1405153 main.go:141] libmachine: Couldn't set key PRIVACY_POLICY_URL, no corresponding struct field found
	I0916 10:48:41.346276 1405153 main.go:141] libmachine: Couldn't set key UBUNTU_CODENAME, no corresponding struct field found
	I0916 10:48:41.346282 1405153 info.go:137] Remote host: Ubuntu 22.04.4 LTS
	I0916 10:48:41.346292 1405153 filesync.go:126] Scanning /home/jenkins/minikube-integration/19651-1378450/.minikube/addons for local assets ...
	I0916 10:48:41.346355 1405153 filesync.go:126] Scanning /home/jenkins/minikube-integration/19651-1378450/.minikube/files for local assets ...
	I0916 10:48:41.346443 1405153 filesync.go:149] local asset: /home/jenkins/minikube-integration/19651-1378450/.minikube/files/etc/ssl/certs/13838332.pem -> 13838332.pem in /etc/ssl/certs
	I0916 10:48:41.346519 1405153 filesync.go:149] local asset: /home/jenkins/minikube-integration/19651-1378450/.minikube/files/etc/test/nested/copy/1383833/hosts -> hosts in /etc/test/nested/copy/1383833
	I0916 10:48:41.346565 1405153 ssh_runner.go:195] Run: sudo mkdir -p /etc/ssl/certs /etc/test/nested/copy/1383833
	I0916 10:48:41.355661 1405153 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-1378450/.minikube/files/etc/ssl/certs/13838332.pem --> /etc/ssl/certs/13838332.pem (1708 bytes)
	I0916 10:48:41.381460 1405153 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-1378450/.minikube/files/etc/test/nested/copy/1383833/hosts --> /etc/test/nested/copy/1383833/hosts (40 bytes)
	I0916 10:48:41.406699 1405153 start.go:296] duration metric: took 193.131275ms for postStartSetup
	I0916 10:48:41.406787 1405153 ssh_runner.go:195] Run: sh -c "df -h /var | awk 'NR==2{print $5}'"
	I0916 10:48:41.406826 1405153 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" functional-919910
	I0916 10:48:41.424693 1405153 sshutil.go:53] new ssh client: &{IP:127.0.0.1 Port:34613 SSHKeyPath:/home/jenkins/minikube-integration/19651-1378450/.minikube/machines/functional-919910/id_rsa Username:docker}
	I0916 10:48:41.518715 1405153 ssh_runner.go:195] Run: sh -c "df -BG /var | awk 'NR==2{print $4}'"
	I0916 10:48:41.524705 1405153 fix.go:56] duration metric: took 7.235611151s for fixHost
	I0916 10:48:41.524721 1405153 start.go:83] releasing machines lock for "functional-919910", held for 7.235654293s
	I0916 10:48:41.524821 1405153 cli_runner.go:164] Run: docker container inspect -f "{{range .NetworkSettings.Networks}}{{.IPAddress}},{{.GlobalIPv6Address}}{{end}}" functional-919910
	I0916 10:48:41.541935 1405153 ssh_runner.go:195] Run: cat /version.json
	I0916 10:48:41.541984 1405153 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" functional-919910
	I0916 10:48:41.542020 1405153 ssh_runner.go:195] Run: curl -sS -m 2 https://registry.k8s.io/
	I0916 10:48:41.542086 1405153 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" functional-919910
	I0916 10:48:41.561069 1405153 sshutil.go:53] new ssh client: &{IP:127.0.0.1 Port:34613 SSHKeyPath:/home/jenkins/minikube-integration/19651-1378450/.minikube/machines/functional-919910/id_rsa Username:docker}
	I0916 10:48:41.570434 1405153 sshutil.go:53] new ssh client: &{IP:127.0.0.1 Port:34613 SSHKeyPath:/home/jenkins/minikube-integration/19651-1378450/.minikube/machines/functional-919910/id_rsa Username:docker}
	I0916 10:48:41.656471 1405153 ssh_runner.go:195] Run: systemctl --version
	I0916 10:48:41.785895 1405153 ssh_runner.go:195] Run: sudo sh -c "podman version >/dev/null"
	I0916 10:48:41.929425 1405153 ssh_runner.go:195] Run: sh -c "stat /etc/cni/net.d/*loopback.conf*"
	I0916 10:48:41.933963 1405153 ssh_runner.go:195] Run: sudo find /etc/cni/net.d -maxdepth 1 -type f -name *loopback.conf* -not -name *.mk_disabled -exec sh -c "sudo mv {} {}.mk_disabled" ;
	I0916 10:48:41.943404 1405153 cni.go:221] loopback cni configuration disabled: "/etc/cni/net.d/*loopback.conf*" found
	I0916 10:48:41.943491 1405153 ssh_runner.go:195] Run: sudo find /etc/cni/net.d -maxdepth 1 -type f ( ( -name *bridge* -or -name *podman* ) -and -not -name *.mk_disabled ) -printf "%p, " -exec sh -c "sudo mv {} {}.mk_disabled" ;
	I0916 10:48:41.953183 1405153 cni.go:259] no active bridge cni configs found in "/etc/cni/net.d" - nothing to disable
	I0916 10:48:41.953198 1405153 start.go:495] detecting cgroup driver to use...
	I0916 10:48:41.953235 1405153 detect.go:187] detected "cgroupfs" cgroup driver on host os
	I0916 10:48:41.953293 1405153 ssh_runner.go:195] Run: sudo systemctl stop -f containerd
	I0916 10:48:41.966190 1405153 ssh_runner.go:195] Run: sudo systemctl is-active --quiet service containerd
	I0916 10:48:41.978010 1405153 docker.go:217] disabling cri-docker service (if available) ...
	I0916 10:48:41.978067 1405153 ssh_runner.go:195] Run: sudo systemctl stop -f cri-docker.socket
	I0916 10:48:41.992752 1405153 ssh_runner.go:195] Run: sudo systemctl stop -f cri-docker.service
	I0916 10:48:42.006013 1405153 ssh_runner.go:195] Run: sudo systemctl disable cri-docker.socket
	I0916 10:48:42.151230 1405153 ssh_runner.go:195] Run: sudo systemctl mask cri-docker.service
	I0916 10:48:42.299523 1405153 docker.go:233] disabling docker service ...
	I0916 10:48:42.299589 1405153 ssh_runner.go:195] Run: sudo systemctl stop -f docker.socket
	I0916 10:48:42.315054 1405153 ssh_runner.go:195] Run: sudo systemctl stop -f docker.service
	I0916 10:48:42.329122 1405153 ssh_runner.go:195] Run: sudo systemctl disable docker.socket
	I0916 10:48:42.464786 1405153 ssh_runner.go:195] Run: sudo systemctl mask docker.service
	I0916 10:48:42.586432 1405153 ssh_runner.go:195] Run: sudo systemctl is-active --quiet service docker
	I0916 10:48:42.599418 1405153 ssh_runner.go:195] Run: /bin/bash -c "sudo mkdir -p /etc && printf %s "runtime-endpoint: unix:///var/run/crio/crio.sock
	" | sudo tee /etc/crictl.yaml"
	I0916 10:48:42.615905 1405153 crio.go:59] configure cri-o to use "registry.k8s.io/pause:3.10" pause image...
	I0916 10:48:42.615978 1405153 ssh_runner.go:195] Run: sh -c "sudo sed -i 's|^.*pause_image = .*$|pause_image = "registry.k8s.io/pause:3.10"|' /etc/crio/crio.conf.d/02-crio.conf"
	I0916 10:48:42.625827 1405153 crio.go:70] configuring cri-o to use "cgroupfs" as cgroup driver...
	I0916 10:48:42.625890 1405153 ssh_runner.go:195] Run: sh -c "sudo sed -i 's|^.*cgroup_manager = .*$|cgroup_manager = "cgroupfs"|' /etc/crio/crio.conf.d/02-crio.conf"
	I0916 10:48:42.636100 1405153 ssh_runner.go:195] Run: sh -c "sudo sed -i '/conmon_cgroup = .*/d' /etc/crio/crio.conf.d/02-crio.conf"
	I0916 10:48:42.646444 1405153 ssh_runner.go:195] Run: sh -c "sudo sed -i '/cgroup_manager = .*/a conmon_cgroup = "pod"' /etc/crio/crio.conf.d/02-crio.conf"
	I0916 10:48:42.656847 1405153 ssh_runner.go:195] Run: sh -c "sudo rm -rf /etc/cni/net.mk"
	I0916 10:48:42.666724 1405153 ssh_runner.go:195] Run: sh -c "sudo sed -i '/^ *"net.ipv4.ip_unprivileged_port_start=.*"/d' /etc/crio/crio.conf.d/02-crio.conf"
	I0916 10:48:42.676826 1405153 ssh_runner.go:195] Run: sh -c "sudo grep -q "^ *default_sysctls" /etc/crio/crio.conf.d/02-crio.conf || sudo sed -i '/conmon_cgroup = .*/a default_sysctls = \[\n\]' /etc/crio/crio.conf.d/02-crio.conf"
	I0916 10:48:42.687635 1405153 ssh_runner.go:195] Run: sh -c "sudo sed -i -r 's|^default_sysctls *= *\[|&\n  "net.ipv4.ip_unprivileged_port_start=0",|' /etc/crio/crio.conf.d/02-crio.conf"
	I0916 10:48:42.698381 1405153 ssh_runner.go:195] Run: sudo sysctl net.bridge.bridge-nf-call-iptables
	I0916 10:48:42.707000 1405153 ssh_runner.go:195] Run: sudo sh -c "echo 1 > /proc/sys/net/ipv4/ip_forward"
	I0916 10:48:42.715679 1405153 ssh_runner.go:195] Run: sudo systemctl daemon-reload
	I0916 10:48:42.847692 1405153 ssh_runner.go:195] Run: sudo systemctl restart crio
	I0916 10:48:43.038937 1405153 start.go:542] Will wait 60s for socket path /var/run/crio/crio.sock
	I0916 10:48:43.039005 1405153 ssh_runner.go:195] Run: stat /var/run/crio/crio.sock
	I0916 10:48:43.042800 1405153 start.go:563] Will wait 60s for crictl version
	I0916 10:48:43.042867 1405153 ssh_runner.go:195] Run: which crictl
	I0916 10:48:43.046213 1405153 ssh_runner.go:195] Run: sudo /usr/bin/crictl version
	I0916 10:48:43.092243 1405153 start.go:579] Version:  0.1.0
	RuntimeName:  cri-o
	RuntimeVersion:  1.24.6
	RuntimeApiVersion:  v1
	I0916 10:48:43.092320 1405153 ssh_runner.go:195] Run: crio --version
	I0916 10:48:43.132595 1405153 ssh_runner.go:195] Run: crio --version
	I0916 10:48:43.181201 1405153 out.go:177] * Preparing Kubernetes v1.31.1 on CRI-O 1.24.6 ...
	I0916 10:48:43.184095 1405153 cli_runner.go:164] Run: docker network inspect functional-919910 --format "{"Name": "{{.Name}}","Driver": "{{.Driver}}","Subnet": "{{range .IPAM.Config}}{{.Subnet}}{{end}}","Gateway": "{{range .IPAM.Config}}{{.Gateway}}{{end}}","MTU": {{if (index .Options "com.docker.network.driver.mtu")}}{{(index .Options "com.docker.network.driver.mtu")}}{{else}}0{{end}}, "ContainerIPs": [{{range $k,$v := .Containers }}"{{$v.IPv4Address}}",{{end}}]}"
	I0916 10:48:43.199863 1405153 ssh_runner.go:195] Run: grep 192.168.49.1	host.minikube.internal$ /etc/hosts
	I0916 10:48:43.206470 1405153 out.go:177]   - apiserver.enable-admission-plugins=NamespaceAutoProvision
	I0916 10:48:43.209330 1405153 kubeadm.go:883] updating cluster {Name:functional-919910 KeepContext:false EmbedCerts:false MinikubeISO: KicBaseImage:gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 Memory:4000 CPUs:2 DiskSize:20000 Driver:docker HyperkitVpnKitSock: HyperkitVSockPorts:[] DockerEnv:[] ContainerVolumeMounts:[] InsecureRegistry:[] RegistryMirror:[] HostOnlyCIDR:192.168.59.1/24 HypervVirtualSwitch: HypervUseExternalSwitch:false HypervExternalAdapter: KVMNetwork:default KVMQemuURI:qemu:///system KVMGPU:false KVMHidden:false KVMNUMACount:1 APIServerPort:8441 DockerOpt:[] DisableDriverMounts:false NFSShare:[] NFSSharesRoot:/nfsshares UUID: NoVTXCheck:false DNSProxy:false HostDNSResolver:true HostOnlyNicType:virtio NatNicType:virtio SSHIPAddress: SSHUser:root SSHKey: SSHPort:22 KubernetesConfig:{KubernetesVersion:v1.31.1 ClusterName:functional-919910 Namespace:default APIServerHAVIP: APIServerName:minikubeCA API
ServerNames:[] APIServerIPs:[] DNSDomain:cluster.local ContainerRuntime:crio CRISocket: NetworkPlugin:cni FeatureGates: ServiceCIDR:10.96.0.0/12 ImageRepository: LoadBalancerStartIP: LoadBalancerEndIP: CustomIngressCert: RegistryAliases: ExtraOptions:[{Component:apiserver Key:enable-admission-plugins Value:NamespaceAutoProvision}] ShouldLoadCachedImages:true EnableDefaultCNI:false CNI:} Nodes:[{Name: IP:192.168.49.2 Port:8441 KubernetesVersion:v1.31.1 ContainerRuntime:crio ControlPlane:true Worker:true}] Addons:map[default-storageclass:true storage-provisioner:true] CustomAddonImages:map[] CustomAddonRegistries:map[] VerifyComponents:map[apiserver:true apps_running:true default_sa:true extra:true kubelet:true node_ready:true system_pods:true] StartHostTimeout:6m0s ScheduledStop:<nil> ExposedPorts:[] ListenAddress: Network: Subnet: MultiNodeRequested:false ExtraDisks:0 CertExpiration:26280h0m0s Mount:false MountString:/home/jenkins:/minikube-host Mount9PVersion:9p2000.L MountGID:docker MountIP: MountMSize:2621
44 MountOptions:[] MountPort:0 MountType:9p MountUID:docker BinaryMirror: DisableOptimizations:false DisableMetrics:false CustomQemuFirmwarePath: SocketVMnetClientPath: SocketVMnetPath: StaticIP: SSHAuthSock: SSHAgentPID:0 GPUs: AutoPauseInterval:1m0s} ...
	I0916 10:48:43.209456 1405153 preload.go:131] Checking if preload exists for k8s version v1.31.1 and runtime crio
	I0916 10:48:43.209542 1405153 ssh_runner.go:195] Run: sudo crictl images --output json
	I0916 10:48:43.254938 1405153 crio.go:514] all images are preloaded for cri-o runtime.
	I0916 10:48:43.254950 1405153 crio.go:433] Images already preloaded, skipping extraction
	I0916 10:48:43.255048 1405153 ssh_runner.go:195] Run: sudo crictl images --output json
	I0916 10:48:43.291546 1405153 crio.go:514] all images are preloaded for cri-o runtime.
	I0916 10:48:43.291559 1405153 cache_images.go:84] Images are preloaded, skipping loading
	I0916 10:48:43.291565 1405153 kubeadm.go:934] updating node { 192.168.49.2 8441 v1.31.1 crio true true} ...
	I0916 10:48:43.291676 1405153 kubeadm.go:946] kubelet [Unit]
	Wants=crio.service
	
	[Service]
	ExecStart=
	ExecStart=/var/lib/minikube/binaries/v1.31.1/kubelet --bootstrap-kubeconfig=/etc/kubernetes/bootstrap-kubelet.conf --cgroups-per-qos=false --config=/var/lib/kubelet/config.yaml --enforce-node-allocatable= --hostname-override=functional-919910 --kubeconfig=/etc/kubernetes/kubelet.conf --node-ip=192.168.49.2
	
	[Install]
	 config:
	{KubernetesVersion:v1.31.1 ClusterName:functional-919910 Namespace:default APIServerHAVIP: APIServerName:minikubeCA APIServerNames:[] APIServerIPs:[] DNSDomain:cluster.local ContainerRuntime:crio CRISocket: NetworkPlugin:cni FeatureGates: ServiceCIDR:10.96.0.0/12 ImageRepository: LoadBalancerStartIP: LoadBalancerEndIP: CustomIngressCert: RegistryAliases: ExtraOptions:[{Component:apiserver Key:enable-admission-plugins Value:NamespaceAutoProvision}] ShouldLoadCachedImages:true EnableDefaultCNI:false CNI:}
	I0916 10:48:43.291761 1405153 ssh_runner.go:195] Run: crio config
	I0916 10:48:43.340850 1405153 extraconfig.go:124] Overwriting default enable-admission-plugins=NamespaceLifecycle,LimitRanger,ServiceAccount,DefaultStorageClass,DefaultTolerationSeconds,NodeRestriction,MutatingAdmissionWebhook,ValidatingAdmissionWebhook,ResourceQuota with user provided enable-admission-plugins=NamespaceAutoProvision for component apiserver
	I0916 10:48:43.340962 1405153 cni.go:84] Creating CNI manager for ""
	I0916 10:48:43.340975 1405153 cni.go:143] "docker" driver + "crio" runtime found, recommending kindnet
	I0916 10:48:43.340984 1405153 kubeadm.go:84] Using pod CIDR: 10.244.0.0/16
	I0916 10:48:43.341006 1405153 kubeadm.go:181] kubeadm options: {CertDir:/var/lib/minikube/certs ServiceCIDR:10.96.0.0/12 PodSubnet:10.244.0.0/16 AdvertiseAddress:192.168.49.2 APIServerPort:8441 KubernetesVersion:v1.31.1 EtcdDataDir:/var/lib/minikube/etcd EtcdExtraArgs:map[] ClusterName:functional-919910 NodeName:functional-919910 DNSDomain:cluster.local CRISocket:/var/run/crio/crio.sock ImageRepository: ComponentOptions:[{Component:apiServer ExtraArgs:map[enable-admission-plugins:NamespaceAutoProvision] Pairs:map[certSANs:["127.0.0.1", "localhost", "192.168.49.2"]]} {Component:controllerManager ExtraArgs:map[allocate-node-cidrs:true leader-elect:false] Pairs:map[]} {Component:scheduler ExtraArgs:map[leader-elect:false] Pairs:map[]}] FeatureArgs:map[] NodeIP:192.168.49.2 CgroupDriver:cgroupfs ClientCAFile:/var/lib/minikube/certs/ca.crt StaticPodPath:/etc/kubernetes/manifests ControlPlaneAddress:control-plane.minikube.internal KubeProxyOptions:map[] ResolvConfSearchRegression:false KubeletConfigOpts:ma
p[containerRuntimeEndpoint:unix:///var/run/crio/crio.sock hairpinMode:hairpin-veth runtimeRequestTimeout:15m] PrependCriSocketUnix:true}
	I0916 10:48:43.341148 1405153 kubeadm.go:187] kubeadm config:
	apiVersion: kubeadm.k8s.io/v1beta3
	kind: InitConfiguration
	localAPIEndpoint:
	  advertiseAddress: 192.168.49.2
	  bindPort: 8441
	bootstrapTokens:
	  - groups:
	      - system:bootstrappers:kubeadm:default-node-token
	    ttl: 24h0m0s
	    usages:
	      - signing
	      - authentication
	nodeRegistration:
	  criSocket: unix:///var/run/crio/crio.sock
	  name: "functional-919910"
	  kubeletExtraArgs:
	    node-ip: 192.168.49.2
	  taints: []
	---
	apiVersion: kubeadm.k8s.io/v1beta3
	kind: ClusterConfiguration
	apiServer:
	  certSANs: ["127.0.0.1", "localhost", "192.168.49.2"]
	  extraArgs:
	    enable-admission-plugins: "NamespaceAutoProvision"
	controllerManager:
	  extraArgs:
	    allocate-node-cidrs: "true"
	    leader-elect: "false"
	scheduler:
	  extraArgs:
	    leader-elect: "false"
	certificatesDir: /var/lib/minikube/certs
	clusterName: mk
	controlPlaneEndpoint: control-plane.minikube.internal:8441
	etcd:
	  local:
	    dataDir: /var/lib/minikube/etcd
	    extraArgs:
	      proxy-refresh-interval: "70000"
	kubernetesVersion: v1.31.1
	networking:
	  dnsDomain: cluster.local
	  podSubnet: "10.244.0.0/16"
	  serviceSubnet: 10.96.0.0/12
	---
	apiVersion: kubelet.config.k8s.io/v1beta1
	kind: KubeletConfiguration
	authentication:
	  x509:
	    clientCAFile: /var/lib/minikube/certs/ca.crt
	cgroupDriver: cgroupfs
	containerRuntimeEndpoint: unix:///var/run/crio/crio.sock
	hairpinMode: hairpin-veth
	runtimeRequestTimeout: 15m
	clusterDomain: "cluster.local"
	# disable disk resource management by default
	imageGCHighThresholdPercent: 100
	evictionHard:
	  nodefs.available: "0%"
	  nodefs.inodesFree: "0%"
	  imagefs.available: "0%"
	failSwapOn: false
	staticPodPath: /etc/kubernetes/manifests
	---
	apiVersion: kubeproxy.config.k8s.io/v1alpha1
	kind: KubeProxyConfiguration
	clusterCIDR: "10.244.0.0/16"
	metricsBindAddress: 0.0.0.0:10249
	conntrack:
	  maxPerCore: 0
	# Skip setting "net.netfilter.nf_conntrack_tcp_timeout_established"
	  tcpEstablishedTimeout: 0s
	# Skip setting "net.netfilter.nf_conntrack_tcp_timeout_close"
	  tcpCloseWaitTimeout: 0s
	
	I0916 10:48:43.341215 1405153 ssh_runner.go:195] Run: sudo ls /var/lib/minikube/binaries/v1.31.1
	I0916 10:48:43.350412 1405153 binaries.go:44] Found k8s binaries, skipping transfer
	I0916 10:48:43.350476 1405153 ssh_runner.go:195] Run: sudo mkdir -p /etc/systemd/system/kubelet.service.d /lib/systemd/system /var/tmp/minikube
	I0916 10:48:43.361154 1405153 ssh_runner.go:362] scp memory --> /etc/systemd/system/kubelet.service.d/10-kubeadm.conf (367 bytes)
	I0916 10:48:43.380083 1405153 ssh_runner.go:362] scp memory --> /lib/systemd/system/kubelet.service (352 bytes)
	I0916 10:48:43.398316 1405153 ssh_runner.go:362] scp memory --> /var/tmp/minikube/kubeadm.yaml.new (2005 bytes)
	I0916 10:48:43.417671 1405153 ssh_runner.go:195] Run: grep 192.168.49.2	control-plane.minikube.internal$ /etc/hosts
	I0916 10:48:43.421406 1405153 ssh_runner.go:195] Run: sudo systemctl daemon-reload
	I0916 10:48:43.572628 1405153 ssh_runner.go:195] Run: sudo systemctl start kubelet
	I0916 10:48:43.587702 1405153 certs.go:68] Setting up /home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/functional-919910 for IP: 192.168.49.2
	I0916 10:48:43.587714 1405153 certs.go:194] generating shared ca certs ...
	I0916 10:48:43.587731 1405153 certs.go:226] acquiring lock for ca certs: {Name:mk0ae46b50e2e49d53ad6fcc94535aa50d9156d6 Clock:{} Delay:500ms Timeout:1m0s Cancel:<nil>}
	I0916 10:48:43.587872 1405153 certs.go:235] skipping valid "minikubeCA" ca cert: /home/jenkins/minikube-integration/19651-1378450/.minikube/ca.key
	I0916 10:48:43.587922 1405153 certs.go:235] skipping valid "proxyClientCA" ca cert: /home/jenkins/minikube-integration/19651-1378450/.minikube/proxy-client-ca.key
	I0916 10:48:43.587928 1405153 certs.go:256] generating profile certs ...
	I0916 10:48:43.588013 1405153 certs.go:359] skipping valid signed profile cert regeneration for "minikube-user": /home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/functional-919910/client.key
	I0916 10:48:43.588061 1405153 certs.go:359] skipping valid signed profile cert regeneration for "minikube": /home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/functional-919910/apiserver.key.debd5ef9
	I0916 10:48:43.588099 1405153 certs.go:359] skipping valid signed profile cert regeneration for "aggregator": /home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/functional-919910/proxy-client.key
	I0916 10:48:43.588211 1405153 certs.go:484] found cert: /home/jenkins/minikube-integration/19651-1378450/.minikube/certs/1383833.pem (1338 bytes)
	W0916 10:48:43.588269 1405153 certs.go:480] ignoring /home/jenkins/minikube-integration/19651-1378450/.minikube/certs/1383833_empty.pem, impossibly tiny 0 bytes
	I0916 10:48:43.588278 1405153 certs.go:484] found cert: /home/jenkins/minikube-integration/19651-1378450/.minikube/certs/ca-key.pem (1679 bytes)
	I0916 10:48:43.588301 1405153 certs.go:484] found cert: /home/jenkins/minikube-integration/19651-1378450/.minikube/certs/ca.pem (1078 bytes)
	I0916 10:48:43.588323 1405153 certs.go:484] found cert: /home/jenkins/minikube-integration/19651-1378450/.minikube/certs/cert.pem (1123 bytes)
	I0916 10:48:43.588343 1405153 certs.go:484] found cert: /home/jenkins/minikube-integration/19651-1378450/.minikube/certs/key.pem (1679 bytes)
	I0916 10:48:43.588383 1405153 certs.go:484] found cert: /home/jenkins/minikube-integration/19651-1378450/.minikube/files/etc/ssl/certs/13838332.pem (1708 bytes)
	I0916 10:48:43.589063 1405153 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-1378450/.minikube/ca.crt --> /var/lib/minikube/certs/ca.crt (1111 bytes)
	I0916 10:48:43.615227 1405153 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-1378450/.minikube/ca.key --> /var/lib/minikube/certs/ca.key (1675 bytes)
	I0916 10:48:43.640209 1405153 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-1378450/.minikube/proxy-client-ca.crt --> /var/lib/minikube/certs/proxy-client-ca.crt (1119 bytes)
	I0916 10:48:43.665409 1405153 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-1378450/.minikube/proxy-client-ca.key --> /var/lib/minikube/certs/proxy-client-ca.key (1679 bytes)
	I0916 10:48:43.690396 1405153 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/functional-919910/apiserver.crt --> /var/lib/minikube/certs/apiserver.crt (1424 bytes)
	I0916 10:48:43.715720 1405153 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/functional-919910/apiserver.key --> /var/lib/minikube/certs/apiserver.key (1679 bytes)
	I0916 10:48:43.741758 1405153 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/functional-919910/proxy-client.crt --> /var/lib/minikube/certs/proxy-client.crt (1147 bytes)
	I0916 10:48:43.766512 1405153 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/functional-919910/proxy-client.key --> /var/lib/minikube/certs/proxy-client.key (1679 bytes)
	I0916 10:48:43.790812 1405153 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-1378450/.minikube/ca.crt --> /usr/share/ca-certificates/minikubeCA.pem (1111 bytes)
	I0916 10:48:43.815736 1405153 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-1378450/.minikube/certs/1383833.pem --> /usr/share/ca-certificates/1383833.pem (1338 bytes)
	I0916 10:48:43.840930 1405153 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-1378450/.minikube/files/etc/ssl/certs/13838332.pem --> /usr/share/ca-certificates/13838332.pem (1708 bytes)
	I0916 10:48:43.866622 1405153 ssh_runner.go:362] scp memory --> /var/lib/minikube/kubeconfig (738 bytes)
	I0916 10:48:43.885132 1405153 ssh_runner.go:195] Run: openssl version
	I0916 10:48:43.890673 1405153 ssh_runner.go:195] Run: sudo /bin/bash -c "test -s /usr/share/ca-certificates/minikubeCA.pem && ln -fs /usr/share/ca-certificates/minikubeCA.pem /etc/ssl/certs/minikubeCA.pem"
	I0916 10:48:43.900521 1405153 ssh_runner.go:195] Run: ls -la /usr/share/ca-certificates/minikubeCA.pem
	I0916 10:48:43.904289 1405153 certs.go:528] hashing: -rw-r--r-- 1 root root 1111 Sep 16 10:35 /usr/share/ca-certificates/minikubeCA.pem
	I0916 10:48:43.904360 1405153 ssh_runner.go:195] Run: openssl x509 -hash -noout -in /usr/share/ca-certificates/minikubeCA.pem
	I0916 10:48:43.912159 1405153 ssh_runner.go:195] Run: sudo /bin/bash -c "test -L /etc/ssl/certs/b5213941.0 || ln -fs /etc/ssl/certs/minikubeCA.pem /etc/ssl/certs/b5213941.0"
	I0916 10:48:43.921601 1405153 ssh_runner.go:195] Run: sudo /bin/bash -c "test -s /usr/share/ca-certificates/1383833.pem && ln -fs /usr/share/ca-certificates/1383833.pem /etc/ssl/certs/1383833.pem"
	I0916 10:48:43.931422 1405153 ssh_runner.go:195] Run: ls -la /usr/share/ca-certificates/1383833.pem
	I0916 10:48:43.935145 1405153 certs.go:528] hashing: -rw-r--r-- 1 root root 1338 Sep 16 10:46 /usr/share/ca-certificates/1383833.pem
	I0916 10:48:43.935204 1405153 ssh_runner.go:195] Run: openssl x509 -hash -noout -in /usr/share/ca-certificates/1383833.pem
	I0916 10:48:43.942725 1405153 ssh_runner.go:195] Run: sudo /bin/bash -c "test -L /etc/ssl/certs/51391683.0 || ln -fs /etc/ssl/certs/1383833.pem /etc/ssl/certs/51391683.0"
	I0916 10:48:43.952136 1405153 ssh_runner.go:195] Run: sudo /bin/bash -c "test -s /usr/share/ca-certificates/13838332.pem && ln -fs /usr/share/ca-certificates/13838332.pem /etc/ssl/certs/13838332.pem"
	I0916 10:48:43.962336 1405153 ssh_runner.go:195] Run: ls -la /usr/share/ca-certificates/13838332.pem
	I0916 10:48:43.966066 1405153 certs.go:528] hashing: -rw-r--r-- 1 root root 1708 Sep 16 10:46 /usr/share/ca-certificates/13838332.pem
	I0916 10:48:43.966132 1405153 ssh_runner.go:195] Run: openssl x509 -hash -noout -in /usr/share/ca-certificates/13838332.pem
	I0916 10:48:43.973393 1405153 ssh_runner.go:195] Run: sudo /bin/bash -c "test -L /etc/ssl/certs/3ec20f2e.0 || ln -fs /etc/ssl/certs/13838332.pem /etc/ssl/certs/3ec20f2e.0"
	I0916 10:48:43.983388 1405153 ssh_runner.go:195] Run: stat /var/lib/minikube/certs/apiserver-kubelet-client.crt
	I0916 10:48:43.987432 1405153 ssh_runner.go:195] Run: openssl x509 -noout -in /var/lib/minikube/certs/apiserver-etcd-client.crt -checkend 86400
	I0916 10:48:43.994465 1405153 ssh_runner.go:195] Run: openssl x509 -noout -in /var/lib/minikube/certs/apiserver-kubelet-client.crt -checkend 86400
	I0916 10:48:44.005212 1405153 ssh_runner.go:195] Run: openssl x509 -noout -in /var/lib/minikube/certs/etcd/server.crt -checkend 86400
	I0916 10:48:44.014973 1405153 ssh_runner.go:195] Run: openssl x509 -noout -in /var/lib/minikube/certs/etcd/healthcheck-client.crt -checkend 86400
	I0916 10:48:44.023352 1405153 ssh_runner.go:195] Run: openssl x509 -noout -in /var/lib/minikube/certs/etcd/peer.crt -checkend 86400
	I0916 10:48:44.030882 1405153 ssh_runner.go:195] Run: openssl x509 -noout -in /var/lib/minikube/certs/front-proxy-client.crt -checkend 86400
	I0916 10:48:44.038292 1405153 kubeadm.go:392] StartCluster: {Name:functional-919910 KeepContext:false EmbedCerts:false MinikubeISO: KicBaseImage:gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 Memory:4000 CPUs:2 DiskSize:20000 Driver:docker HyperkitVpnKitSock: HyperkitVSockPorts:[] DockerEnv:[] ContainerVolumeMounts:[] InsecureRegistry:[] RegistryMirror:[] HostOnlyCIDR:192.168.59.1/24 HypervVirtualSwitch: HypervUseExternalSwitch:false HypervExternalAdapter: KVMNetwork:default KVMQemuURI:qemu:///system KVMGPU:false KVMHidden:false KVMNUMACount:1 APIServerPort:8441 DockerOpt:[] DisableDriverMounts:false NFSShare:[] NFSSharesRoot:/nfsshares UUID: NoVTXCheck:false DNSProxy:false HostDNSResolver:true HostOnlyNicType:virtio NatNicType:virtio SSHIPAddress: SSHUser:root SSHKey: SSHPort:22 KubernetesConfig:{KubernetesVersion:v1.31.1 ClusterName:functional-919910 Namespace:default APIServerHAVIP: APIServerName:minikubeCA APISer
verNames:[] APIServerIPs:[] DNSDomain:cluster.local ContainerRuntime:crio CRISocket: NetworkPlugin:cni FeatureGates: ServiceCIDR:10.96.0.0/12 ImageRepository: LoadBalancerStartIP: LoadBalancerEndIP: CustomIngressCert: RegistryAliases: ExtraOptions:[{Component:apiserver Key:enable-admission-plugins Value:NamespaceAutoProvision}] ShouldLoadCachedImages:true EnableDefaultCNI:false CNI:} Nodes:[{Name: IP:192.168.49.2 Port:8441 KubernetesVersion:v1.31.1 ContainerRuntime:crio ControlPlane:true Worker:true}] Addons:map[default-storageclass:true storage-provisioner:true] CustomAddonImages:map[] CustomAddonRegistries:map[] VerifyComponents:map[apiserver:true apps_running:true default_sa:true extra:true kubelet:true node_ready:true system_pods:true] StartHostTimeout:6m0s ScheduledStop:<nil> ExposedPorts:[] ListenAddress: Network: Subnet: MultiNodeRequested:false ExtraDisks:0 CertExpiration:26280h0m0s Mount:false MountString:/home/jenkins:/minikube-host Mount9PVersion:9p2000.L MountGID:docker MountIP: MountMSize:262144
MountOptions:[] MountPort:0 MountType:9p MountUID:docker BinaryMirror: DisableOptimizations:false DisableMetrics:false CustomQemuFirmwarePath: SocketVMnetClientPath: SocketVMnetPath: StaticIP: SSHAuthSock: SSHAgentPID:0 GPUs: AutoPauseInterval:1m0s}
	I0916 10:48:44.038381 1405153 cri.go:54] listing CRI containers in root : {State:paused Name: Namespaces:[kube-system]}
	I0916 10:48:44.038447 1405153 ssh_runner.go:195] Run: sudo -s eval "crictl ps -a --quiet --label io.kubernetes.pod.namespace=kube-system"
	I0916 10:48:44.078250 1405153 cri.go:89] found id: "67f50b0e25dae16dbad275ffac3a734fe571c8f8cb91d485eaac44783eb641be"
	I0916 10:48:44.078263 1405153 cri.go:89] found id: "e8aeda4b55bc63f93934a2cc0bed0950a05df3db193d9ed2e77a2dc96b78ec18"
	I0916 10:48:44.078267 1405153 cri.go:89] found id: "68f543d941434df90f12c922b0b45dcb557a7b8316bd36d083123f6f29e0f3d7"
	I0916 10:48:44.078269 1405153 cri.go:89] found id: "2089d6c47dd6764fb74a622eaf36e8dda3344083a925f73a4dfcf0ebb952dbf7"
	I0916 10:48:44.078272 1405153 cri.go:89] found id: "84ca31fb2ed034d56721c7ab90b9c5e414e315335f55f7d30435fc91501dad28"
	I0916 10:48:44.078275 1405153 cri.go:89] found id: "8f5620673b4ff5c0c99db71dd02fc2ce9baec6c9b22460cbdf86d411abc6a715"
	I0916 10:48:44.078278 1405153 cri.go:89] found id: "5bcfe047e4005e24d6719487f45bde2380924679e0f77e81ce9e05992af73afb"
	I0916 10:48:44.078281 1405153 cri.go:89] found id: "9a35fb982442f2ef08963a8588b112f704124f0fecc14cbfc199e94d6085db98"
	I0916 10:48:44.078283 1405153 cri.go:89] found id: "89084e33c979a76a3a4bbd24eab8c848deb25d8bd474bad381f47a24e0373c2e"
	I0916 10:48:44.078289 1405153 cri.go:89] found id: "584cffa44f32723af45447c07bf6e3fc641b7c61fe43302aad35c776bd065faf"
	I0916 10:48:44.078292 1405153 cri.go:89] found id: "9fdab793eb970a5f01845e2aeaf1389846fd7113bbdedbb122c9c796017271d5"
	I0916 10:48:44.078294 1405153 cri.go:89] found id: "3e31d247381fd150f97fed045c0d264e01a0046902133f839fc323ed9d5fa7b9"
	I0916 10:48:44.078297 1405153 cri.go:89] found id: "6d211253a1170338e5b23dda8b3c6a26dde0aa55d2f91ee289142b0410943b49"
	I0916 10:48:44.078299 1405153 cri.go:89] found id: "19cb8b26283b5427eeb4adf80032848225300f8293659c95a04c937ca3877ced"
	I0916 10:48:44.078303 1405153 cri.go:89] found id: "b88a79882d73e8e5ca5f134464b8f60ebbeb4a0aa75d6f83d1ec9e3d9f6bd093"
	I0916 10:48:44.078305 1405153 cri.go:89] found id: "790d8c6b7f5cff6aa8da32ec82eeab04f109110f2b3a39803bda7a570da2cf75"
	I0916 10:48:44.078307 1405153 cri.go:89] found id: ""
	I0916 10:48:44.078373 1405153 ssh_runner.go:195] Run: sudo runc list -f json
	
	
	==> CRI-O <==
	Sep 16 10:48:52 functional-919910 crio[4619]: time="2024-09-16 10:48:52.546185920Z" level=info msg="Created container 4deb5cc6dce54b2b55f84fa620aac8876a4dfb8d163a4e60aa19ebd7ba71d7eb: kube-system/coredns-7c65d6cfc9-qzn8c/coredns" id=1d08d23d-6b30-4ef9-9749-75656f2f22fb name=/runtime.v1.RuntimeService/CreateContainer
	Sep 16 10:48:52 functional-919910 crio[4619]: time="2024-09-16 10:48:52.546884840Z" level=info msg="Starting container: 4deb5cc6dce54b2b55f84fa620aac8876a4dfb8d163a4e60aa19ebd7ba71d7eb" id=9fbfa96f-4211-4584-beb6-a4c1d812c1fa name=/runtime.v1.RuntimeService/StartContainer
	Sep 16 10:48:52 functional-919910 crio[4619]: time="2024-09-16 10:48:52.550182971Z" level=info msg="Started container" PID=5232 containerID=072cecfbf1d3967a28f6cf80f4e3b0bf030253965b58aa0f0089cd01271c49a1 description=kube-system/kindnet-nb5xl/kindnet-cni id=2d47cfa2-1c51-4e3e-84c6-20565a25b42f name=/runtime.v1.RuntimeService/StartContainer sandboxID=306886331d6eea412e2593dd8cefd104ae0353cb2453c12f41db88e1881fec0f
	Sep 16 10:48:52 functional-919910 crio[4619]: time="2024-09-16 10:48:52.573383165Z" level=info msg="Started container" PID=5218 containerID=4deb5cc6dce54b2b55f84fa620aac8876a4dfb8d163a4e60aa19ebd7ba71d7eb description=kube-system/coredns-7c65d6cfc9-qzn8c/coredns id=9fbfa96f-4211-4584-beb6-a4c1d812c1fa name=/runtime.v1.RuntimeService/StartContainer sandboxID=4bae1031966b207c601881c3be1d2b66aa5218cb02e6eb2af68deea5be18503b
	Sep 16 10:48:52 functional-919910 crio[4619]: time="2024-09-16 10:48:52.605671989Z" level=info msg="Started container" PID=5240 containerID=6f8c0a2f9d3e9fe72768e28685deb8e30624ac7b3cfaa272ac69f57b771050db description=kube-system/kube-proxy-nvpzv/kube-proxy id=0568d2b4-f47d-4f64-b678-a50f76abf9da name=/runtime.v1.RuntimeService/StartContainer sandboxID=46672cf6a1a3cfbb490f865d512383492c0c4c4061599f90461031829a93bd49
	Sep 16 10:48:53 functional-919910 crio[4619]: time="2024-09-16 10:48:53.088292803Z" level=info msg="Stopping pod sandbox: 8fd62fbc34bf1ffb9092b83c48e89b00e7cdd219dbb5b91410c53ba0718a28f1" id=9475e9d1-d7b3-4579-ad77-5746df207096 name=/runtime.v1.RuntimeService/StopPodSandbox
	Sep 16 10:48:53 functional-919910 crio[4619]: time="2024-09-16 10:48:53.089356144Z" level=info msg="Stopped pod sandbox: 8fd62fbc34bf1ffb9092b83c48e89b00e7cdd219dbb5b91410c53ba0718a28f1" id=9475e9d1-d7b3-4579-ad77-5746df207096 name=/runtime.v1.RuntimeService/StopPodSandbox
	Sep 16 10:48:53 functional-919910 crio[4619]: time="2024-09-16 10:48:53.162210356Z" level=info msg="Removing container: 84ca31fb2ed034d56721c7ab90b9c5e414e315335f55f7d30435fc91501dad28" id=7ed385df-0fad-4b1c-bc96-41da5fba68ba name=/runtime.v1.RuntimeService/RemoveContainer
	Sep 16 10:48:53 functional-919910 crio[4619]: time="2024-09-16 10:48:53.186675643Z" level=info msg="Removed container 84ca31fb2ed034d56721c7ab90b9c5e414e315335f55f7d30435fc91501dad28: kube-system/kube-apiserver-functional-919910/kube-apiserver" id=7ed385df-0fad-4b1c-bc96-41da5fba68ba name=/runtime.v1.RuntimeService/RemoveContainer
	Sep 16 10:49:03 functional-919910 crio[4619]: time="2024-09-16 10:49:03.038404155Z" level=info msg="CNI monitoring event \"/etc/cni/net.d/10-kindnet.conflist.temp\": CREATE"
	Sep 16 10:49:03 functional-919910 crio[4619]: time="2024-09-16 10:49:03.042553703Z" level=info msg="Found CNI network kindnet (type=ptp) at /etc/cni/net.d/10-kindnet.conflist"
	Sep 16 10:49:03 functional-919910 crio[4619]: time="2024-09-16 10:49:03.042594104Z" level=info msg="Updated default CNI network name to kindnet"
	Sep 16 10:49:03 functional-919910 crio[4619]: time="2024-09-16 10:49:03.042618596Z" level=info msg="CNI monitoring event \"/etc/cni/net.d/10-kindnet.conflist.temp\": WRITE"
	Sep 16 10:49:03 functional-919910 crio[4619]: time="2024-09-16 10:49:03.046006841Z" level=info msg="Found CNI network kindnet (type=ptp) at /etc/cni/net.d/10-kindnet.conflist"
	Sep 16 10:49:03 functional-919910 crio[4619]: time="2024-09-16 10:49:03.046045847Z" level=info msg="Updated default CNI network name to kindnet"
	Sep 16 10:49:03 functional-919910 crio[4619]: time="2024-09-16 10:49:03.046064399Z" level=info msg="CNI monitoring event \"/etc/cni/net.d/10-kindnet.conflist.temp\": RENAME"
	Sep 16 10:49:03 functional-919910 crio[4619]: time="2024-09-16 10:49:03.049550759Z" level=info msg="Found CNI network kindnet (type=ptp) at /etc/cni/net.d/10-kindnet.conflist"
	Sep 16 10:49:03 functional-919910 crio[4619]: time="2024-09-16 10:49:03.049587057Z" level=info msg="Updated default CNI network name to kindnet"
	Sep 16 10:49:03 functional-919910 crio[4619]: time="2024-09-16 10:49:03.049603935Z" level=info msg="CNI monitoring event \"/etc/cni/net.d/10-kindnet.conflist\": CREATE"
	Sep 16 10:49:03 functional-919910 crio[4619]: time="2024-09-16 10:49:03.053090451Z" level=info msg="Found CNI network kindnet (type=ptp) at /etc/cni/net.d/10-kindnet.conflist"
	Sep 16 10:49:03 functional-919910 crio[4619]: time="2024-09-16 10:49:03.053131229Z" level=info msg="Updated default CNI network name to kindnet"
	Sep 16 10:49:47 functional-919910 crio[4619]: time="2024-09-16 10:49:47.256320060Z" level=info msg="Stopping pod sandbox: 8fd62fbc34bf1ffb9092b83c48e89b00e7cdd219dbb5b91410c53ba0718a28f1" id=383cd39c-1f42-454d-bd04-0ba447bad713 name=/runtime.v1.RuntimeService/StopPodSandbox
	Sep 16 10:49:47 functional-919910 crio[4619]: time="2024-09-16 10:49:47.256366927Z" level=info msg="Stopped pod sandbox (already stopped): 8fd62fbc34bf1ffb9092b83c48e89b00e7cdd219dbb5b91410c53ba0718a28f1" id=383cd39c-1f42-454d-bd04-0ba447bad713 name=/runtime.v1.RuntimeService/StopPodSandbox
	Sep 16 10:49:47 functional-919910 crio[4619]: time="2024-09-16 10:49:47.257128860Z" level=info msg="Removing pod sandbox: 8fd62fbc34bf1ffb9092b83c48e89b00e7cdd219dbb5b91410c53ba0718a28f1" id=55112393-8c8d-4009-b80a-cc4ad96b5c22 name=/runtime.v1.RuntimeService/RemovePodSandbox
	Sep 16 10:49:47 functional-919910 crio[4619]: time="2024-09-16 10:49:47.264653739Z" level=info msg="Removed pod sandbox: 8fd62fbc34bf1ffb9092b83c48e89b00e7cdd219dbb5b91410c53ba0718a28f1" id=55112393-8c8d-4009-b80a-cc4ad96b5c22 name=/runtime.v1.RuntimeService/RemovePodSandbox
	
	
	==> container status <==
	CONTAINER           IMAGE                                                              CREATED              STATE               NAME                      ATTEMPT             POD ID              POD
	6f8c0a2f9d3e9       24a140c548c075e487e45d0ee73b1aa89f8bfb40c08a57e05975559728822b1d   About a minute ago   Running             kube-proxy                2                   46672cf6a1a3c       kube-proxy-nvpzv
	072cecfbf1d39       6a23fa8fd2b78ab58e42ba273808edc936a9c53d8ac4a919f6337be094843a51   About a minute ago   Running             kindnet-cni               2                   306886331d6ee       kindnet-nb5xl
	4deb5cc6dce54       2f6c962e7b8311337352d9fdea917da2184d9919f4da7695bc2a6517cf392fe4   About a minute ago   Running             coredns                   2                   4bae1031966b2       coredns-7c65d6cfc9-qzn8c
	0318f459801da       ba04bb24b95753201135cbc420b233c1b0b9fa2e1fd21d28319c348c33fbcde6   About a minute ago   Running             storage-provisioner       2                   e27809ba10603       storage-provisioner
	12f0a29c7ca2a       d3f53a98c0a9d9163c4848bcf34b2d2f5e1e3691b79f3d1dd6d0206809e02853   2 minutes ago        Running             kube-apiserver            0                   00a81472718e2       kube-apiserver-functional-919910
	7fcb94c0bce84       7f8aa378bb47dffcf430f3a601abe39137e88aee0238e23ed8530fdd18dab82d   2 minutes ago        Running             kube-scheduler            2                   00455a328acb5       kube-scheduler-functional-919910
	d69895ce029ae       279f381cb37365bbbcd133c9531fba9c2beb0f38dbbe6ddfcd0b1b1643d3450e   2 minutes ago        Running             kube-controller-manager   2                   0ffab32638624       kube-controller-manager-functional-919910
	0fb814efa9ee9       27e3830e1402783674d8b594038967deea9d51f0d91b34c93c8f39d2f68af7da   2 minutes ago        Running             etcd                      2                   46079181d2925       etcd-functional-919910
	67f50b0e25dae       ba04bb24b95753201135cbc420b233c1b0b9fa2e1fd21d28319c348c33fbcde6   2 minutes ago        Exited              storage-provisioner       1                   e27809ba10603       storage-provisioner
	e8aeda4b55bc6       6a23fa8fd2b78ab58e42ba273808edc936a9c53d8ac4a919f6337be094843a51   2 minutes ago        Exited              kindnet-cni               1                   306886331d6ee       kindnet-nb5xl
	68f543d941434       24a140c548c075e487e45d0ee73b1aa89f8bfb40c08a57e05975559728822b1d   2 minutes ago        Exited              kube-proxy                1                   46672cf6a1a3c       kube-proxy-nvpzv
	2089d6c47dd67       2f6c962e7b8311337352d9fdea917da2184d9919f4da7695bc2a6517cf392fe4   2 minutes ago        Exited              coredns                   1                   4bae1031966b2       coredns-7c65d6cfc9-qzn8c
	8f5620673b4ff       279f381cb37365bbbcd133c9531fba9c2beb0f38dbbe6ddfcd0b1b1643d3450e   2 minutes ago        Exited              kube-controller-manager   1                   0ffab32638624       kube-controller-manager-functional-919910
	5bcfe047e4005       27e3830e1402783674d8b594038967deea9d51f0d91b34c93c8f39d2f68af7da   2 minutes ago        Exited              etcd                      1                   46079181d2925       etcd-functional-919910
	9a35fb982442f       7f8aa378bb47dffcf430f3a601abe39137e88aee0238e23ed8530fdd18dab82d   2 minutes ago        Exited              kube-scheduler            1                   00455a328acb5       kube-scheduler-functional-919910
	
	
	==> coredns [2089d6c47dd6764fb74a622eaf36e8dda3344083a925f73a4dfcf0ebb952dbf7] <==
	[INFO] plugin/kubernetes: pkg/mod/k8s.io/client-go@v0.29.3/tools/cache/reflector.go:229: failed to list *v1.EndpointSlice: Get "https://10.96.0.1:443/apis/discovery.k8s.io/v1/endpointslices?limit=500&resourceVersion=0": dial tcp 10.96.0.1:443: connect: connection refused
	[ERROR] plugin/kubernetes: pkg/mod/k8s.io/client-go@v0.29.3/tools/cache/reflector.go:229: Failed to watch *v1.EndpointSlice: failed to list *v1.EndpointSlice: Get "https://10.96.0.1:443/apis/discovery.k8s.io/v1/endpointslices?limit=500&resourceVersion=0": dial tcp 10.96.0.1:443: connect: connection refused
	[INFO] plugin/kubernetes: pkg/mod/k8s.io/client-go@v0.29.3/tools/cache/reflector.go:229: failed to list *v1.Service: Get "https://10.96.0.1:443/api/v1/services?limit=500&resourceVersion=0": dial tcp 10.96.0.1:443: connect: connection refused
	[ERROR] plugin/kubernetes: pkg/mod/k8s.io/client-go@v0.29.3/tools/cache/reflector.go:229: Failed to watch *v1.Service: failed to list *v1.Service: Get "https://10.96.0.1:443/api/v1/services?limit=500&resourceVersion=0": dial tcp 10.96.0.1:443: connect: connection refused
	[INFO] plugin/kubernetes: pkg/mod/k8s.io/client-go@v0.29.3/tools/cache/reflector.go:229: failed to list *v1.Namespace: Get "https://10.96.0.1:443/api/v1/namespaces?limit=500&resourceVersion=0": dial tcp 10.96.0.1:443: connect: connection refused
	[ERROR] plugin/kubernetes: pkg/mod/k8s.io/client-go@v0.29.3/tools/cache/reflector.go:229: Failed to watch *v1.Namespace: failed to list *v1.Namespace: Get "https://10.96.0.1:443/api/v1/namespaces?limit=500&resourceVersion=0": dial tcp 10.96.0.1:443: connect: connection refused
	[INFO] plugin/ready: Still waiting on: "kubernetes"
	[INFO] plugin/kubernetes: waiting for Kubernetes API before starting server
	[INFO] plugin/kubernetes: waiting for Kubernetes API before starting server
	[INFO] plugin/kubernetes: waiting for Kubernetes API before starting server
	[INFO] plugin/ready: Still waiting on: "kubernetes"
	[INFO] plugin/kubernetes: waiting for Kubernetes API before starting server
	[INFO] plugin/kubernetes: waiting for Kubernetes API before starting server
	[INFO] plugin/kubernetes: waiting for Kubernetes API before starting server
	[INFO] plugin/kubernetes: waiting for Kubernetes API before starting server
	[INFO] plugin/kubernetes: waiting for Kubernetes API before starting server
	.:53
	[INFO] plugin/reload: Running configuration SHA512 = 05e3eaddc414b2d71a69b2e2bc6f2681fc1f4d04bcdd3acc1a41457bb7db518208b95ddfc4c9fffedc59c25a8faf458be1af4915a4a3c0d6777cb7a346bc5d86
	CoreDNS-1.11.3
	linux/arm64, go1.21.11, a6338e9
	[INFO] 127.0.0.1:39206 - 14119 "HINFO IN 5939583222120401635.3946217130147098167. udp 57 false 512" NXDOMAIN qr,rd,ra 57 0.038029402s
	[INFO] SIGTERM: Shutting down servers then terminating
	[INFO] plugin/health: Going into lameduck mode for 5s
	
	
	==> coredns [4deb5cc6dce54b2b55f84fa620aac8876a4dfb8d163a4e60aa19ebd7ba71d7eb] <==
	.:53
	[INFO] plugin/reload: Running configuration SHA512 = 05e3eaddc414b2d71a69b2e2bc6f2681fc1f4d04bcdd3acc1a41457bb7db518208b95ddfc4c9fffedc59c25a8faf458be1af4915a4a3c0d6777cb7a346bc5d86
	CoreDNS-1.11.3
	linux/arm64, go1.21.11, a6338e9
	[INFO] 127.0.0.1:38507 - 51569 "HINFO IN 5479759435856645223.8976423270861566953. udp 57 false 512" NXDOMAIN qr,rd,ra 57 0.013474675s
	
	
	==> describe nodes <==
	Name:               functional-919910
	Roles:              control-plane
	Labels:             beta.kubernetes.io/arch=arm64
	                    beta.kubernetes.io/os=linux
	                    kubernetes.io/arch=arm64
	                    kubernetes.io/hostname=functional-919910
	                    kubernetes.io/os=linux
	                    minikube.k8s.io/commit=90d544f06ea0f69499271b003be64a9a224d57ed
	                    minikube.k8s.io/name=functional-919910
	                    minikube.k8s.io/primary=true
	                    minikube.k8s.io/updated_at=2024_09_16T10_47_02_0700
	                    minikube.k8s.io/version=v1.34.0
	                    node-role.kubernetes.io/control-plane=
	                    node.kubernetes.io/exclude-from-external-load-balancers=
	Annotations:        kubeadm.alpha.kubernetes.io/cri-socket: unix:///var/run/crio/crio.sock
	                    node.alpha.kubernetes.io/ttl: 0
	                    volumes.kubernetes.io/controller-managed-attach-detach: true
	CreationTimestamp:  Mon, 16 Sep 2024 10:46:58 +0000
	Taints:             <none>
	Unschedulable:      false
	Lease:
	  HolderIdentity:  functional-919910
	  AcquireTime:     <unset>
	  RenewTime:       Mon, 16 Sep 2024 10:50:43 +0000
	Conditions:
	  Type             Status  LastHeartbeatTime                 LastTransitionTime                Reason                       Message
	  ----             ------  -----------------                 ------------------                ------                       -------
	  MemoryPressure   False   Mon, 16 Sep 2024 10:48:51 +0000   Mon, 16 Sep 2024 10:46:55 +0000   KubeletHasSufficientMemory   kubelet has sufficient memory available
	  DiskPressure     False   Mon, 16 Sep 2024 10:48:51 +0000   Mon, 16 Sep 2024 10:46:55 +0000   KubeletHasNoDiskPressure     kubelet has no disk pressure
	  PIDPressure      False   Mon, 16 Sep 2024 10:48:51 +0000   Mon, 16 Sep 2024 10:46:55 +0000   KubeletHasSufficientPID      kubelet has sufficient PID available
	  Ready            True    Mon, 16 Sep 2024 10:48:51 +0000   Mon, 16 Sep 2024 10:47:47 +0000   KubeletReady                 kubelet is posting ready status
	Addresses:
	  InternalIP:  192.168.49.2
	  Hostname:    functional-919910
	Capacity:
	  cpu:                2
	  ephemeral-storage:  203034800Ki
	  hugepages-1Gi:      0
	  hugepages-2Mi:      0
	  hugepages-32Mi:     0
	  hugepages-64Ki:     0
	  memory:             8022304Ki
	  pods:               110
	Allocatable:
	  cpu:                2
	  ephemeral-storage:  203034800Ki
	  hugepages-1Gi:      0
	  hugepages-2Mi:      0
	  hugepages-32Mi:     0
	  hugepages-64Ki:     0
	  memory:             8022304Ki
	  pods:               110
	System Info:
	  Machine ID:                 f14572b8323a44cca0faa88c76f2d4a6
	  System UUID:                d25b0873-ca83-44d4-9ed0-22dc44c6a8ae
	  Boot ID:                    34b2555f-ef29-4c31-9b47-b3b930bd3b4b
	  Kernel Version:             5.15.0-1069-aws
	  OS Image:                   Ubuntu 22.04.4 LTS
	  Operating System:           linux
	  Architecture:               arm64
	  Container Runtime Version:  cri-o://1.24.6
	  Kubelet Version:            v1.31.1
	  Kube-Proxy Version:         v1.31.1
	PodCIDR:                      10.244.0.0/24
	PodCIDRs:                     10.244.0.0/24
	Non-terminated Pods:          (8 in total)
	  Namespace                   Name                                         CPU Requests  CPU Limits  Memory Requests  Memory Limits  Age
	  ---------                   ----                                         ------------  ----------  ---------------  -------------  ---
	  kube-system                 coredns-7c65d6cfc9-qzn8c                     100m (5%)     0 (0%)      70Mi (0%)        170Mi (2%)     3m45s
	  kube-system                 etcd-functional-919910                       100m (5%)     0 (0%)      100Mi (1%)       0 (0%)         3m51s
	  kube-system                 kindnet-nb5xl                                100m (5%)     100m (5%)   50Mi (0%)        50Mi (0%)      3m45s
	  kube-system                 kube-apiserver-functional-919910             250m (12%)    0 (0%)      0 (0%)           0 (0%)         119s
	  kube-system                 kube-controller-manager-functional-919910    200m (10%)    0 (0%)      0 (0%)           0 (0%)         3m50s
	  kube-system                 kube-proxy-nvpzv                             0 (0%)        0 (0%)      0 (0%)           0 (0%)         3m45s
	  kube-system                 kube-scheduler-functional-919910             100m (5%)     0 (0%)      0 (0%)           0 (0%)         3m50s
	  kube-system                 storage-provisioner                          0 (0%)        0 (0%)      0 (0%)           0 (0%)         3m44s
	Allocated resources:
	  (Total limits may be over 100 percent, i.e., overcommitted.)
	  Resource           Requests    Limits
	  --------           --------    ------
	  cpu                850m (42%)  100m (5%)
	  memory             220Mi (2%)  220Mi (2%)
	  ephemeral-storage  0 (0%)      0 (0%)
	  hugepages-1Gi      0 (0%)      0 (0%)
	  hugepages-2Mi      0 (0%)      0 (0%)
	  hugepages-32Mi     0 (0%)      0 (0%)
	  hugepages-64Ki     0 (0%)      0 (0%)
	Events:
	  Type     Reason                   Age                  From             Message
	  ----     ------                   ----                 ----             -------
	  Normal   Starting                 3m43s                kube-proxy       
	  Normal   Starting                 118s                 kube-proxy       
	  Normal   Starting                 2m45s                kube-proxy       
	  Warning  CgroupV1                 3m50s                kubelet          Cgroup v1 support is in maintenance mode, please migrate to Cgroup v2.
	  Normal   NodeHasSufficientMemory  3m50s                kubelet          Node functional-919910 status is now: NodeHasSufficientMemory
	  Normal   NodeHasNoDiskPressure    3m50s                kubelet          Node functional-919910 status is now: NodeHasNoDiskPressure
	  Normal   NodeHasSufficientPID     3m50s                kubelet          Node functional-919910 status is now: NodeHasSufficientPID
	  Normal   Starting                 3m50s                kubelet          Starting kubelet.
	  Normal   RegisteredNode           3m46s                node-controller  Node functional-919910 event: Registered Node functional-919910 in Controller
	  Normal   NodeReady                3m4s                 kubelet          Node functional-919910 status is now: NodeReady
	  Normal   RegisteredNode           2m43s                node-controller  Node functional-919910 event: Registered Node functional-919910 in Controller
	  Normal   NodeHasSufficientMemory  2m4s (x8 over 2m4s)  kubelet          Node functional-919910 status is now: NodeHasSufficientMemory
	  Warning  CgroupV1                 2m4s                 kubelet          Cgroup v1 support is in maintenance mode, please migrate to Cgroup v2.
	  Normal   Starting                 2m4s                 kubelet          Starting kubelet.
	  Normal   NodeHasNoDiskPressure    2m4s (x8 over 2m4s)  kubelet          Node functional-919910 status is now: NodeHasNoDiskPressure
	  Normal   NodeHasSufficientPID     2m4s (x7 over 2m4s)  kubelet          Node functional-919910 status is now: NodeHasSufficientPID
	  Normal   RegisteredNode           117s                 node-controller  Node functional-919910 event: Registered Node functional-919910 in Controller
	
	
	==> dmesg <==
	[Sep16 10:07] systemd-journald[226]: Failed to send stream file descriptor to service manager: Connection refused
	
	
	==> etcd [0fb814efa9ee90e98aaa699004b013bf5a6a31aa8325e33f52783fa123bcc384] <==
	{"level":"info","ts":"2024-09-16T10:48:47.877008Z","logger":"raft","caller":"etcdserver/zap_raft.go:77","msg":"aec36adc501070cc switched to configuration voters=(12593026477526642892)"}
	{"level":"info","ts":"2024-09-16T10:48:47.877076Z","caller":"membership/cluster.go:421","msg":"added member","cluster-id":"fa54960ea34d58be","local-member-id":"aec36adc501070cc","added-peer-id":"aec36adc501070cc","added-peer-peer-urls":["https://192.168.49.2:2380"]}
	{"level":"info","ts":"2024-09-16T10:48:47.877156Z","caller":"membership/cluster.go:584","msg":"set initial cluster version","cluster-id":"fa54960ea34d58be","local-member-id":"aec36adc501070cc","cluster-version":"3.5"}
	{"level":"info","ts":"2024-09-16T10:48:47.877188Z","caller":"api/capability.go:75","msg":"enabled capabilities for version","cluster-version":"3.5"}
	{"level":"info","ts":"2024-09-16T10:48:47.899347Z","caller":"embed/etcd.go:728","msg":"starting with client TLS","tls-info":"cert = /var/lib/minikube/certs/etcd/server.crt, key = /var/lib/minikube/certs/etcd/server.key, client-cert=, client-key=, trusted-ca = /var/lib/minikube/certs/etcd/ca.crt, client-cert-auth = true, crl-file = ","cipher-suites":[]}
	{"level":"info","ts":"2024-09-16T10:48:47.899570Z","caller":"embed/etcd.go:279","msg":"now serving peer/client/metrics","local-member-id":"aec36adc501070cc","initial-advertise-peer-urls":["https://192.168.49.2:2380"],"listen-peer-urls":["https://192.168.49.2:2380"],"advertise-client-urls":["https://192.168.49.2:2379"],"listen-client-urls":["https://127.0.0.1:2379","https://192.168.49.2:2379"],"listen-metrics-urls":["http://127.0.0.1:2381"]}
	{"level":"info","ts":"2024-09-16T10:48:47.899599Z","caller":"embed/etcd.go:870","msg":"serving metrics","address":"http://127.0.0.1:2381"}
	{"level":"info","ts":"2024-09-16T10:48:47.899714Z","caller":"embed/etcd.go:599","msg":"serving peer traffic","address":"192.168.49.2:2380"}
	{"level":"info","ts":"2024-09-16T10:48:47.899728Z","caller":"embed/etcd.go:571","msg":"cmux::serve","address":"192.168.49.2:2380"}
	{"level":"info","ts":"2024-09-16T10:48:49.036711Z","logger":"raft","caller":"etcdserver/zap_raft.go:77","msg":"aec36adc501070cc is starting a new election at term 3"}
	{"level":"info","ts":"2024-09-16T10:48:49.036850Z","logger":"raft","caller":"etcdserver/zap_raft.go:77","msg":"aec36adc501070cc became pre-candidate at term 3"}
	{"level":"info","ts":"2024-09-16T10:48:49.036905Z","logger":"raft","caller":"etcdserver/zap_raft.go:77","msg":"aec36adc501070cc received MsgPreVoteResp from aec36adc501070cc at term 3"}
	{"level":"info","ts":"2024-09-16T10:48:49.036946Z","logger":"raft","caller":"etcdserver/zap_raft.go:77","msg":"aec36adc501070cc became candidate at term 4"}
	{"level":"info","ts":"2024-09-16T10:48:49.036978Z","logger":"raft","caller":"etcdserver/zap_raft.go:77","msg":"aec36adc501070cc received MsgVoteResp from aec36adc501070cc at term 4"}
	{"level":"info","ts":"2024-09-16T10:48:49.037016Z","logger":"raft","caller":"etcdserver/zap_raft.go:77","msg":"aec36adc501070cc became leader at term 4"}
	{"level":"info","ts":"2024-09-16T10:48:49.037063Z","logger":"raft","caller":"etcdserver/zap_raft.go:77","msg":"raft.node: aec36adc501070cc elected leader aec36adc501070cc at term 4"}
	{"level":"info","ts":"2024-09-16T10:48:49.040899Z","caller":"etcdserver/server.go:2118","msg":"published local member to cluster through raft","local-member-id":"aec36adc501070cc","local-member-attributes":"{Name:functional-919910 ClientURLs:[https://192.168.49.2:2379]}","request-path":"/0/members/aec36adc501070cc/attributes","cluster-id":"fa54960ea34d58be","publish-timeout":"7s"}
	{"level":"info","ts":"2024-09-16T10:48:49.041104Z","caller":"embed/serve.go:103","msg":"ready to serve client requests"}
	{"level":"info","ts":"2024-09-16T10:48:49.042103Z","caller":"v3rpc/health.go:61","msg":"grpc service status changed","service":"","status":"SERVING"}
	{"level":"info","ts":"2024-09-16T10:48:49.043175Z","caller":"embed/serve.go:250","msg":"serving client traffic securely","traffic":"grpc+http","address":"192.168.49.2:2379"}
	{"level":"info","ts":"2024-09-16T10:48:49.043511Z","caller":"embed/serve.go:103","msg":"ready to serve client requests"}
	{"level":"info","ts":"2024-09-16T10:48:49.045434Z","caller":"v3rpc/health.go:61","msg":"grpc service status changed","service":"","status":"SERVING"}
	{"level":"info","ts":"2024-09-16T10:48:49.046389Z","caller":"embed/serve.go:250","msg":"serving client traffic securely","traffic":"grpc+http","address":"127.0.0.1:2379"}
	{"level":"info","ts":"2024-09-16T10:48:49.045527Z","caller":"etcdmain/main.go:44","msg":"notifying init daemon"}
	{"level":"info","ts":"2024-09-16T10:48:49.050654Z","caller":"etcdmain/main.go:50","msg":"successfully notified init daemon"}
	
	
	==> etcd [5bcfe047e4005e24d6719487f45bde2380924679e0f77e81ce9e05992af73afb] <==
	{"level":"info","ts":"2024-09-16T10:48:02.360883Z","logger":"raft","caller":"etcdserver/zap_raft.go:77","msg":"aec36adc501070cc became pre-candidate at term 2"}
	{"level":"info","ts":"2024-09-16T10:48:02.360934Z","logger":"raft","caller":"etcdserver/zap_raft.go:77","msg":"aec36adc501070cc received MsgPreVoteResp from aec36adc501070cc at term 2"}
	{"level":"info","ts":"2024-09-16T10:48:02.360973Z","logger":"raft","caller":"etcdserver/zap_raft.go:77","msg":"aec36adc501070cc became candidate at term 3"}
	{"level":"info","ts":"2024-09-16T10:48:02.361006Z","logger":"raft","caller":"etcdserver/zap_raft.go:77","msg":"aec36adc501070cc received MsgVoteResp from aec36adc501070cc at term 3"}
	{"level":"info","ts":"2024-09-16T10:48:02.361064Z","logger":"raft","caller":"etcdserver/zap_raft.go:77","msg":"aec36adc501070cc became leader at term 3"}
	{"level":"info","ts":"2024-09-16T10:48:02.361099Z","logger":"raft","caller":"etcdserver/zap_raft.go:77","msg":"raft.node: aec36adc501070cc elected leader aec36adc501070cc at term 3"}
	{"level":"info","ts":"2024-09-16T10:48:02.364920Z","caller":"etcdserver/server.go:2118","msg":"published local member to cluster through raft","local-member-id":"aec36adc501070cc","local-member-attributes":"{Name:functional-919910 ClientURLs:[https://192.168.49.2:2379]}","request-path":"/0/members/aec36adc501070cc/attributes","cluster-id":"fa54960ea34d58be","publish-timeout":"7s"}
	{"level":"info","ts":"2024-09-16T10:48:02.365163Z","caller":"embed/serve.go:103","msg":"ready to serve client requests"}
	{"level":"info","ts":"2024-09-16T10:48:02.365549Z","caller":"embed/serve.go:103","msg":"ready to serve client requests"}
	{"level":"info","ts":"2024-09-16T10:48:02.366285Z","caller":"v3rpc/health.go:61","msg":"grpc service status changed","service":"","status":"SERVING"}
	{"level":"info","ts":"2024-09-16T10:48:02.367468Z","caller":"embed/serve.go:250","msg":"serving client traffic securely","traffic":"grpc+http","address":"192.168.49.2:2379"}
	{"level":"info","ts":"2024-09-16T10:48:02.367535Z","caller":"etcdmain/main.go:44","msg":"notifying init daemon"}
	{"level":"info","ts":"2024-09-16T10:48:02.367668Z","caller":"etcdmain/main.go:50","msg":"successfully notified init daemon"}
	{"level":"info","ts":"2024-09-16T10:48:02.369323Z","caller":"v3rpc/health.go:61","msg":"grpc service status changed","service":"","status":"SERVING"}
	{"level":"info","ts":"2024-09-16T10:48:02.370172Z","caller":"embed/serve.go:250","msg":"serving client traffic securely","traffic":"grpc+http","address":"127.0.0.1:2379"}
	{"level":"info","ts":"2024-09-16T10:48:35.943802Z","caller":"osutil/interrupt_unix.go:64","msg":"received signal; shutting down","signal":"terminated"}
	{"level":"info","ts":"2024-09-16T10:48:35.943843Z","caller":"embed/etcd.go:377","msg":"closing etcd server","name":"functional-919910","data-dir":"/var/lib/minikube/etcd","advertise-peer-urls":["https://192.168.49.2:2380"],"advertise-client-urls":["https://192.168.49.2:2379"]}
	{"level":"warn","ts":"2024-09-16T10:48:35.943911Z","caller":"embed/serve.go:212","msg":"stopping secure grpc server due to error","error":"accept tcp 192.168.49.2:2379: use of closed network connection"}
	{"level":"warn","ts":"2024-09-16T10:48:35.943938Z","caller":"embed/serve.go:214","msg":"stopped secure grpc server due to error","error":"accept tcp 192.168.49.2:2379: use of closed network connection"}
	{"level":"warn","ts":"2024-09-16T10:48:35.945041Z","caller":"embed/serve.go:212","msg":"stopping secure grpc server due to error","error":"accept tcp 127.0.0.1:2379: use of closed network connection"}
	{"level":"warn","ts":"2024-09-16T10:48:35.945137Z","caller":"embed/serve.go:214","msg":"stopped secure grpc server due to error","error":"accept tcp 127.0.0.1:2379: use of closed network connection"}
	{"level":"info","ts":"2024-09-16T10:48:35.990678Z","caller":"etcdserver/server.go:1521","msg":"skipped leadership transfer for single voting member cluster","local-member-id":"aec36adc501070cc","current-leader-member-id":"aec36adc501070cc"}
	{"level":"info","ts":"2024-09-16T10:48:35.995430Z","caller":"embed/etcd.go:581","msg":"stopping serving peer traffic","address":"192.168.49.2:2380"}
	{"level":"info","ts":"2024-09-16T10:48:35.995621Z","caller":"embed/etcd.go:586","msg":"stopped serving peer traffic","address":"192.168.49.2:2380"}
	{"level":"info","ts":"2024-09-16T10:48:35.995642Z","caller":"embed/etcd.go:379","msg":"closed etcd server","name":"functional-919910","data-dir":"/var/lib/minikube/etcd","advertise-peer-urls":["https://192.168.49.2:2380"],"advertise-client-urls":["https://192.168.49.2:2379"]}
	
	
	==> kernel <==
	 10:50:51 up 10:33,  0 users,  load average: 0.98, 1.24, 1.60
	Linux functional-919910 5.15.0-1069-aws #75~20.04.1-Ubuntu SMP Mon Aug 19 16:22:47 UTC 2024 aarch64 aarch64 aarch64 GNU/Linux
	PRETTY_NAME="Ubuntu 22.04.4 LTS"
	
	
	==> kindnet [072cecfbf1d3967a28f6cf80f4e3b0bf030253965b58aa0f0089cd01271c49a1] <==
	I0916 10:48:53.219342       1 shared_informer.go:320] Caches are synced for kube-network-policies
	I0916 10:48:53.222402       1 metrics.go:61] Registering metrics
	I0916 10:48:53.222563       1 controller.go:374] Syncing nftables rules
	I0916 10:49:03.037971       1 main.go:295] Handling node with IPs: map[192.168.49.2:{}]
	I0916 10:49:03.038169       1 main.go:299] handling current node
	I0916 10:49:13.040898       1 main.go:295] Handling node with IPs: map[192.168.49.2:{}]
	I0916 10:49:13.041015       1 main.go:299] handling current node
	I0916 10:49:23.042091       1 main.go:295] Handling node with IPs: map[192.168.49.2:{}]
	I0916 10:49:23.042126       1 main.go:299] handling current node
	I0916 10:49:33.044840       1 main.go:295] Handling node with IPs: map[192.168.49.2:{}]
	I0916 10:49:33.044878       1 main.go:299] handling current node
	I0916 10:49:43.040749       1 main.go:295] Handling node with IPs: map[192.168.49.2:{}]
	I0916 10:49:43.040855       1 main.go:299] handling current node
	I0916 10:49:53.038520       1 main.go:295] Handling node with IPs: map[192.168.49.2:{}]
	I0916 10:49:53.038552       1 main.go:299] handling current node
	I0916 10:50:03.038499       1 main.go:295] Handling node with IPs: map[192.168.49.2:{}]
	I0916 10:50:03.038621       1 main.go:299] handling current node
	I0916 10:50:13.047462       1 main.go:295] Handling node with IPs: map[192.168.49.2:{}]
	I0916 10:50:13.047501       1 main.go:299] handling current node
	I0916 10:50:23.044819       1 main.go:295] Handling node with IPs: map[192.168.49.2:{}]
	I0916 10:50:23.044854       1 main.go:299] handling current node
	I0916 10:50:33.037721       1 main.go:295] Handling node with IPs: map[192.168.49.2:{}]
	I0916 10:50:33.037760       1 main.go:299] handling current node
	I0916 10:50:43.043131       1 main.go:295] Handling node with IPs: map[192.168.49.2:{}]
	I0916 10:50:43.043172       1 main.go:299] handling current node
	
	
	==> kindnet [e8aeda4b55bc63f93934a2cc0bed0950a05df3db193d9ed2e77a2dc96b78ec18] <==
	I0916 10:48:01.143502       1 main.go:109] connected to apiserver: https://10.96.0.1:443
	I0916 10:48:01.143730       1 main.go:139] hostIP = 192.168.49.2
	podIP = 192.168.49.2
	I0916 10:48:01.143864       1 main.go:148] setting mtu 1500 for CNI 
	I0916 10:48:01.143886       1 main.go:178] kindnetd IP family: "ipv4"
	I0916 10:48:01.143900       1 main.go:182] noMask IPv4 subnets: [10.244.0.0/16]
	I0916 10:48:01.489821       1 controller.go:334] Starting controller kube-network-policies
	I0916 10:48:01.489995       1 controller.go:338] Waiting for informer caches to sync
	I0916 10:48:01.490034       1 shared_informer.go:313] Waiting for caches to sync for kube-network-policies
	I0916 10:48:05.492976       1 shared_informer.go:320] Caches are synced for kube-network-policies
	I0916 10:48:05.493097       1 metrics.go:61] Registering metrics
	I0916 10:48:05.493204       1 controller.go:374] Syncing nftables rules
	I0916 10:48:11.486739       1 main.go:295] Handling node with IPs: map[192.168.49.2:{}]
	I0916 10:48:11.486849       1 main.go:299] handling current node
	I0916 10:48:21.485705       1 main.go:295] Handling node with IPs: map[192.168.49.2:{}]
	I0916 10:48:21.485797       1 main.go:299] handling current node
	I0916 10:48:31.492826       1 main.go:295] Handling node with IPs: map[192.168.49.2:{}]
	I0916 10:48:31.492896       1 main.go:299] handling current node
	
	
	==> kube-apiserver [12f0a29c7ca2a4856dd6155d0190d0e3d79e019e8dce0bf7fd4c991c81d14bc5] <==
	I0916 10:48:51.507247       1 shared_informer.go:320] Caches are synced for node_authorizer
	I0916 10:48:51.509566       1 cache.go:39] Caches are synced for LocalAvailability controller
	I0916 10:48:51.514923       1 shared_informer.go:320] Caches are synced for *generic.policySource[*k8s.io/api/admissionregistration/v1.ValidatingAdmissionPolicy,*k8s.io/api/admissionregistration/v1.ValidatingAdmissionPolicyBinding,k8s.io/apiserver/pkg/admission/plugin/policy/validating.Validator]
	I0916 10:48:51.514963       1 policy_source.go:224] refreshing policies
	I0916 10:48:51.517563       1 apf_controller.go:382] Running API Priority and Fairness config worker
	I0916 10:48:51.517673       1 apf_controller.go:385] Running API Priority and Fairness periodic rebalancing process
	I0916 10:48:51.517813       1 cache.go:39] Caches are synced for APIServiceRegistrationController controller
	I0916 10:48:51.520522       1 controller.go:615] quota admission added evaluator for: leases.coordination.k8s.io
	I0916 10:48:51.532063       1 shared_informer.go:320] Caches are synced for cluster_authentication_trust_controller
	I0916 10:48:51.532210       1 shared_informer.go:320] Caches are synced for crd-autoregister
	I0916 10:48:51.532286       1 cache.go:39] Caches are synced for RemoteAvailability controller
	I0916 10:48:51.532417       1 shared_informer.go:320] Caches are synced for configmaps
	I0916 10:48:51.534946       1 handler_discovery.go:450] Starting ResourceDiscoveryManager
	I0916 10:48:51.535625       1 aggregator.go:171] initial CRD sync complete...
	I0916 10:48:51.536172       1 autoregister_controller.go:144] Starting autoregister controller
	I0916 10:48:51.536265       1 cache.go:32] Waiting for caches to sync for autoregister controller
	I0916 10:48:51.536300       1 cache.go:39] Caches are synced for autoregister controller
	E0916 10:48:51.552940       1 controller.go:97] Error removing old endpoints from kubernetes service: no API server IP addresses were listed in storage, refusing to erase all endpoints for the kubernetes Service
	I0916 10:48:52.288516       1 storage_scheduling.go:111] all system priority classes are created successfully or already exist.
	I0916 10:48:53.633170       1 controller.go:615] quota admission added evaluator for: daemonsets.apps
	I0916 10:48:53.763564       1 controller.go:615] quota admission added evaluator for: serviceaccounts
	I0916 10:48:53.775869       1 controller.go:615] quota admission added evaluator for: deployments.apps
	I0916 10:48:53.843592       1 controller.go:615] quota admission added evaluator for: roles.rbac.authorization.k8s.io
	I0916 10:48:53.851287       1 controller.go:615] quota admission added evaluator for: rolebindings.rbac.authorization.k8s.io
	I0916 10:49:10.096044       1 controller.go:615] quota admission added evaluator for: endpoints
	
	
	==> kube-controller-manager [8f5620673b4ff5c0c99db71dd02fc2ce9baec6c9b22460cbdf86d411abc6a715] <==
	I0916 10:48:08.534308       1 shared_informer.go:320] Caches are synced for node
	I0916 10:48:08.534378       1 range_allocator.go:171] "Sending events to api server" logger="node-ipam-controller"
	I0916 10:48:08.534401       1 range_allocator.go:177] "Starting range CIDR allocator" logger="node-ipam-controller"
	I0916 10:48:08.534407       1 shared_informer.go:313] Waiting for caches to sync for cidrallocator
	I0916 10:48:08.534412       1 shared_informer.go:320] Caches are synced for cidrallocator
	I0916 10:48:08.534494       1 range_allocator.go:241] "Successfully synced" logger="node-ipam-controller" key="functional-919910"
	I0916 10:48:08.535941       1 shared_informer.go:320] Caches are synced for attach detach
	I0916 10:48:08.543605       1 shared_informer.go:320] Caches are synced for endpoint_slice
	I0916 10:48:08.549019       1 shared_informer.go:320] Caches are synced for daemon sets
	I0916 10:48:08.554402       1 shared_informer.go:320] Caches are synced for taint-eviction-controller
	I0916 10:48:08.559718       1 shared_informer.go:320] Caches are synced for persistent volume
	I0916 10:48:08.572151       1 shared_informer.go:320] Caches are synced for GC
	I0916 10:48:08.573409       1 shared_informer.go:320] Caches are synced for PV protection
	I0916 10:48:08.615906       1 shared_informer.go:320] Caches are synced for cronjob
	I0916 10:48:08.623850       1 shared_informer.go:320] Caches are synced for TTL
	I0916 10:48:08.625786       1 shared_informer.go:320] Caches are synced for taint
	I0916 10:48:08.625881       1 node_lifecycle_controller.go:1232] "Initializing eviction metric for zone" logger="node-lifecycle-controller" zone=""
	I0916 10:48:08.625973       1 node_lifecycle_controller.go:884] "Missing timestamp for Node. Assuming now as a timestamp" logger="node-lifecycle-controller" node="functional-919910"
	I0916 10:48:08.626024       1 node_lifecycle_controller.go:1078] "Controller detected that zone is now in new state" logger="node-lifecycle-controller" zone="" newState="Normal"
	I0916 10:48:08.681295       1 shared_informer.go:320] Caches are synced for resource quota
	I0916 10:48:08.695472       1 shared_informer.go:320] Caches are synced for resource quota
	I0916 10:48:09.103907       1 shared_informer.go:320] Caches are synced for garbage collector
	I0916 10:48:09.103941       1 garbagecollector.go:157] "All resource monitors have synced. Proceeding to collect garbage" logger="garbage-collector-controller"
	I0916 10:48:09.123641       1 shared_informer.go:320] Caches are synced for garbage collector
	I0916 10:48:33.396647       1 range_allocator.go:241] "Successfully synced" logger="node-ipam-controller" key="functional-919910"
	
	
	==> kube-controller-manager [d69895ce029aea3aacc9c117ed64c274077ed21cefa739082ee00be46e903809] <==
	I0916 10:48:54.785999       1 shared_informer.go:320] Caches are synced for expand
	I0916 10:48:54.794997       1 shared_informer.go:320] Caches are synced for taint-eviction-controller
	I0916 10:48:54.801438       1 shared_informer.go:320] Caches are synced for service account
	I0916 10:48:54.804758       1 shared_informer.go:320] Caches are synced for job
	I0916 10:48:54.804818       1 shared_informer.go:320] Caches are synced for stateful set
	I0916 10:48:54.804841       1 shared_informer.go:320] Caches are synced for PVC protection
	I0916 10:48:54.804854       1 shared_informer.go:320] Caches are synced for certificate-csrapproving
	I0916 10:48:54.804908       1 shared_informer.go:320] Caches are synced for PV protection
	I0916 10:48:54.804944       1 shared_informer.go:320] Caches are synced for TTL
	I0916 10:48:54.804977       1 shared_informer.go:320] Caches are synced for persistent volume
	I0916 10:48:54.805173       1 shared_informer.go:320] Caches are synced for ClusterRoleAggregator
	I0916 10:48:54.807707       1 shared_informer.go:320] Caches are synced for disruption
	I0916 10:48:54.808757       1 shared_informer.go:320] Caches are synced for validatingadmissionpolicy-status
	I0916 10:48:54.812488       1 shared_informer.go:320] Caches are synced for ReplicationController
	I0916 10:48:54.838364       1 shared_informer.go:320] Caches are synced for ReplicaSet
	I0916 10:48:54.838534       1 replica_set.go:679] "Finished syncing" logger="replicaset-controller" kind="ReplicaSet" key="kube-system/coredns-7c65d6cfc9" duration="97.917µs"
	I0916 10:48:54.855717       1 shared_informer.go:320] Caches are synced for bootstrap_signer
	I0916 10:48:54.857096       1 shared_informer.go:320] Caches are synced for cronjob
	I0916 10:48:54.955113       1 shared_informer.go:320] Caches are synced for endpoint_slice
	I0916 10:48:55.012167       1 shared_informer.go:320] Caches are synced for resource quota
	I0916 10:48:55.012318       1 shared_informer.go:320] Caches are synced for endpoint_slice_mirroring
	I0916 10:48:55.042247       1 shared_informer.go:320] Caches are synced for resource quota
	I0916 10:48:55.444362       1 shared_informer.go:320] Caches are synced for garbage collector
	I0916 10:48:55.465215       1 shared_informer.go:320] Caches are synced for garbage collector
	I0916 10:48:55.465250       1 garbagecollector.go:157] "All resource monitors have synced. Proceeding to collect garbage" logger="garbage-collector-controller"
	
	
	==> kube-proxy [68f543d941434df90f12c922b0b45dcb557a7b8316bd36d083123f6f29e0f3d7] <==
	I0916 10:48:03.731423       1 server_linux.go:66] "Using iptables proxy"
	I0916 10:48:05.433154       1 server.go:677] "Successfully retrieved node IP(s)" IPs=["192.168.49.2"]
	E0916 10:48:05.488916       1 server.go:234] "Kube-proxy configuration may be incomplete or incorrect" err="nodePortAddresses is unset; NodePort connections will be accepted on all local IPs. Consider using `--nodeport-addresses primary`"
	I0916 10:48:06.322385       1 server.go:243] "kube-proxy running in dual-stack mode" primary ipFamily="IPv4"
	I0916 10:48:06.341489       1 server_linux.go:169] "Using iptables Proxier"
	I0916 10:48:06.355073       1 proxier.go:255] "Setting route_localnet=1 to allow node-ports on localhost; to change this either disable iptables.localhostNodePorts (--iptables-localhost-nodeports) or set nodePortAddresses (--nodeport-addresses) to filter loopback addresses" ipFamily="IPv4"
	I0916 10:48:06.355531       1 server.go:483] "Version info" version="v1.31.1"
	I0916 10:48:06.357367       1 server.go:485] "Golang settings" GOGC="" GOMAXPROCS="" GOTRACEBACK=""
	I0916 10:48:06.358630       1 config.go:199] "Starting service config controller"
	I0916 10:48:06.358729       1 shared_informer.go:313] Waiting for caches to sync for service config
	I0916 10:48:06.358801       1 config.go:105] "Starting endpoint slice config controller"
	I0916 10:48:06.358840       1 shared_informer.go:313] Waiting for caches to sync for endpoint slice config
	I0916 10:48:06.360984       1 config.go:328] "Starting node config controller"
	I0916 10:48:06.361059       1 shared_informer.go:313] Waiting for caches to sync for node config
	I0916 10:48:06.462180       1 shared_informer.go:320] Caches are synced for endpoint slice config
	I0916 10:48:06.462239       1 shared_informer.go:320] Caches are synced for service config
	I0916 10:48:06.464940       1 shared_informer.go:320] Caches are synced for node config
	
	
	==> kube-proxy [6f8c0a2f9d3e9fe72768e28685deb8e30624ac7b3cfaa272ac69f57b771050db] <==
	I0916 10:48:52.781215       1 server_linux.go:66] "Using iptables proxy"
	I0916 10:48:52.872969       1 server.go:677] "Successfully retrieved node IP(s)" IPs=["192.168.49.2"]
	E0916 10:48:52.873137       1 server.go:234] "Kube-proxy configuration may be incomplete or incorrect" err="nodePortAddresses is unset; NodePort connections will be accepted on all local IPs. Consider using `--nodeport-addresses primary`"
	I0916 10:48:52.892040       1 server.go:243] "kube-proxy running in dual-stack mode" primary ipFamily="IPv4"
	I0916 10:48:52.892101       1 server_linux.go:169] "Using iptables Proxier"
	I0916 10:48:52.893967       1 proxier.go:255] "Setting route_localnet=1 to allow node-ports on localhost; to change this either disable iptables.localhostNodePorts (--iptables-localhost-nodeports) or set nodePortAddresses (--nodeport-addresses) to filter loopback addresses" ipFamily="IPv4"
	I0916 10:48:52.894261       1 server.go:483] "Version info" version="v1.31.1"
	I0916 10:48:52.894296       1 server.go:485] "Golang settings" GOGC="" GOMAXPROCS="" GOTRACEBACK=""
	I0916 10:48:52.896485       1 config.go:199] "Starting service config controller"
	I0916 10:48:52.896530       1 shared_informer.go:313] Waiting for caches to sync for service config
	I0916 10:48:52.898078       1 config.go:105] "Starting endpoint slice config controller"
	I0916 10:48:52.898096       1 shared_informer.go:313] Waiting for caches to sync for endpoint slice config
	I0916 10:48:52.899995       1 config.go:328] "Starting node config controller"
	I0916 10:48:52.900022       1 shared_informer.go:313] Waiting for caches to sync for node config
	I0916 10:48:52.998445       1 shared_informer.go:320] Caches are synced for endpoint slice config
	I0916 10:48:52.998473       1 shared_informer.go:320] Caches are synced for service config
	I0916 10:48:53.000890       1 shared_informer.go:320] Caches are synced for node config
	
	
	==> kube-scheduler [7fcb94c0bce841ce6b01965b0d7eaeedcf47449b34b9a524c16d4f0580db9e76] <==
	I0916 10:48:50.310915       1 serving.go:386] Generated self-signed cert in-memory
	W0916 10:48:51.321329       1 requestheader_controller.go:196] Unable to get configmap/extension-apiserver-authentication in kube-system.  Usually fixed by 'kubectl create rolebinding -n kube-system ROLEBINDING_NAME --role=extension-apiserver-authentication-reader --serviceaccount=YOUR_NS:YOUR_SA'
	W0916 10:48:51.321447       1 authentication.go:370] Error looking up in-cluster authentication configuration: configmaps "extension-apiserver-authentication" is forbidden: User "system:kube-scheduler" cannot get resource "configmaps" in API group "" in the namespace "kube-system"
	W0916 10:48:51.321484       1 authentication.go:371] Continuing without authentication configuration. This may treat all requests as anonymous.
	W0916 10:48:51.321527       1 authentication.go:372] To require authentication configuration lookup to succeed, set --authentication-tolerate-lookup-failure=false
	I0916 10:48:51.482161       1 server.go:167] "Starting Kubernetes Scheduler" version="v1.31.1"
	I0916 10:48:51.488747       1 server.go:169] "Golang settings" GOGC="" GOMAXPROCS="" GOTRACEBACK=""
	I0916 10:48:51.491214       1 secure_serving.go:213] Serving securely on 127.0.0.1:10259
	I0916 10:48:51.491627       1 configmap_cafile_content.go:205] "Starting controller" name="client-ca::kube-system::extension-apiserver-authentication::client-ca-file"
	I0916 10:48:51.497428       1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::client-ca-file
	I0916 10:48:51.491653       1 tlsconfig.go:243] "Starting DynamicServingCertificateController"
	I0916 10:48:51.597727       1 shared_informer.go:320] Caches are synced for client-ca::kube-system::extension-apiserver-authentication::client-ca-file
	
	
	==> kube-scheduler [9a35fb982442f2ef08963a8588b112f704124f0fecc14cbfc199e94d6085db98] <==
	I0916 10:48:04.872300       1 serving.go:386] Generated self-signed cert in-memory
	I0916 10:48:06.573495       1 server.go:167] "Starting Kubernetes Scheduler" version="v1.31.1"
	I0916 10:48:06.573525       1 server.go:169] "Golang settings" GOGC="" GOMAXPROCS="" GOTRACEBACK=""
	I0916 10:48:06.588423       1 secure_serving.go:213] Serving securely on 127.0.0.1:10259
	I0916 10:48:06.588642       1 configmap_cafile_content.go:205] "Starting controller" name="client-ca::kube-system::extension-apiserver-authentication::client-ca-file"
	I0916 10:48:06.588658       1 configmap_cafile_content.go:205] "Starting controller" name="client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file"
	I0916 10:48:06.588698       1 tlsconfig.go:243] "Starting DynamicServingCertificateController"
	I0916 10:48:06.588607       1 requestheader_controller.go:172] Starting RequestHeaderAuthRequestController
	I0916 10:48:06.592031       1 shared_informer.go:313] Waiting for caches to sync for RequestHeaderAuthRequestController
	I0916 10:48:06.591278       1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::client-ca-file
	I0916 10:48:06.591687       1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file
	I0916 10:48:06.696997       1 shared_informer.go:320] Caches are synced for client-ca::kube-system::extension-apiserver-authentication::client-ca-file
	I0916 10:48:06.697079       1 shared_informer.go:320] Caches are synced for RequestHeaderAuthRequestController
	I0916 10:48:06.697269       1 shared_informer.go:320] Caches are synced for client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file
	I0916 10:48:35.945051       1 tlsconfig.go:258] "Shutting down DynamicServingCertificateController"
	I0916 10:48:35.945270       1 secure_serving.go:258] Stopped listening on 127.0.0.1:10259
	E0916 10:48:35.945400       1 run.go:72] "command failed" err="finished without leader elect"
	
	
	==> kubelet <==
	Sep 16 10:48:55 functional-919910 kubelet[4906]: I0916 10:48:55.088403    4906 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3d8a6ba31c18f33c5660170029e5cde1" path="/var/lib/kubelet/pods/3d8a6ba31c18f33c5660170029e5cde1/volumes"
	Sep 16 10:48:57 functional-919910 kubelet[4906]: E0916 10:48:57.149495    4906 eviction_manager.go:257] "Eviction manager: failed to get HasDedicatedImageFs" err="missing image stats: &ImageFsInfoResponse{ImageFilesystems:[]*FilesystemUsage{&FilesystemUsage{Timestamp:1726483737149192699,FsId:&FilesystemIdentifier{Mountpoint:/var/lib/containers/storage/overlay-images,},UsedBytes:&UInt64Value{Value:155470,},InodesUsed:&UInt64Value{Value:77,},},},ContainerFilesystems:[]*FilesystemUsage{},}"
	Sep 16 10:48:57 functional-919910 kubelet[4906]: E0916 10:48:57.149525    4906 eviction_manager.go:212] "Eviction manager: failed to synchronize" err="eviction manager: failed to get HasDedicatedImageFs: missing image stats: &ImageFsInfoResponse{ImageFilesystems:[]*FilesystemUsage{&FilesystemUsage{Timestamp:1726483737149192699,FsId:&FilesystemIdentifier{Mountpoint:/var/lib/containers/storage/overlay-images,},UsedBytes:&UInt64Value{Value:155470,},InodesUsed:&UInt64Value{Value:77,},},},ContainerFilesystems:[]*FilesystemUsage{},}"
	Sep 16 10:49:07 functional-919910 kubelet[4906]: E0916 10:49:07.150843    4906 eviction_manager.go:257] "Eviction manager: failed to get HasDedicatedImageFs" err="missing image stats: &ImageFsInfoResponse{ImageFilesystems:[]*FilesystemUsage{&FilesystemUsage{Timestamp:1726483747150627361,FsId:&FilesystemIdentifier{Mountpoint:/var/lib/containers/storage/overlay-images,},UsedBytes:&UInt64Value{Value:155470,},InodesUsed:&UInt64Value{Value:77,},},},ContainerFilesystems:[]*FilesystemUsage{},}"
	Sep 16 10:49:07 functional-919910 kubelet[4906]: E0916 10:49:07.150885    4906 eviction_manager.go:212] "Eviction manager: failed to synchronize" err="eviction manager: failed to get HasDedicatedImageFs: missing image stats: &ImageFsInfoResponse{ImageFilesystems:[]*FilesystemUsage{&FilesystemUsage{Timestamp:1726483747150627361,FsId:&FilesystemIdentifier{Mountpoint:/var/lib/containers/storage/overlay-images,},UsedBytes:&UInt64Value{Value:155470,},InodesUsed:&UInt64Value{Value:77,},},},ContainerFilesystems:[]*FilesystemUsage{},}"
	Sep 16 10:49:17 functional-919910 kubelet[4906]: E0916 10:49:17.152819    4906 eviction_manager.go:257] "Eviction manager: failed to get HasDedicatedImageFs" err="missing image stats: &ImageFsInfoResponse{ImageFilesystems:[]*FilesystemUsage{&FilesystemUsage{Timestamp:1726483757152278637,FsId:&FilesystemIdentifier{Mountpoint:/var/lib/containers/storage/overlay-images,},UsedBytes:&UInt64Value{Value:155470,},InodesUsed:&UInt64Value{Value:77,},},},ContainerFilesystems:[]*FilesystemUsage{},}"
	Sep 16 10:49:17 functional-919910 kubelet[4906]: E0916 10:49:17.152854    4906 eviction_manager.go:212] "Eviction manager: failed to synchronize" err="eviction manager: failed to get HasDedicatedImageFs: missing image stats: &ImageFsInfoResponse{ImageFilesystems:[]*FilesystemUsage{&FilesystemUsage{Timestamp:1726483757152278637,FsId:&FilesystemIdentifier{Mountpoint:/var/lib/containers/storage/overlay-images,},UsedBytes:&UInt64Value{Value:155470,},InodesUsed:&UInt64Value{Value:77,},},},ContainerFilesystems:[]*FilesystemUsage{},}"
	Sep 16 10:49:27 functional-919910 kubelet[4906]: E0916 10:49:27.154325    4906 eviction_manager.go:257] "Eviction manager: failed to get HasDedicatedImageFs" err="missing image stats: &ImageFsInfoResponse{ImageFilesystems:[]*FilesystemUsage{&FilesystemUsage{Timestamp:1726483767154114722,FsId:&FilesystemIdentifier{Mountpoint:/var/lib/containers/storage/overlay-images,},UsedBytes:&UInt64Value{Value:155470,},InodesUsed:&UInt64Value{Value:77,},},},ContainerFilesystems:[]*FilesystemUsage{},}"
	Sep 16 10:49:27 functional-919910 kubelet[4906]: E0916 10:49:27.154362    4906 eviction_manager.go:212] "Eviction manager: failed to synchronize" err="eviction manager: failed to get HasDedicatedImageFs: missing image stats: &ImageFsInfoResponse{ImageFilesystems:[]*FilesystemUsage{&FilesystemUsage{Timestamp:1726483767154114722,FsId:&FilesystemIdentifier{Mountpoint:/var/lib/containers/storage/overlay-images,},UsedBytes:&UInt64Value{Value:155470,},InodesUsed:&UInt64Value{Value:77,},},},ContainerFilesystems:[]*FilesystemUsage{},}"
	Sep 16 10:49:37 functional-919910 kubelet[4906]: E0916 10:49:37.155548    4906 eviction_manager.go:257] "Eviction manager: failed to get HasDedicatedImageFs" err="missing image stats: &ImageFsInfoResponse{ImageFilesystems:[]*FilesystemUsage{&FilesystemUsage{Timestamp:1726483777155245399,FsId:&FilesystemIdentifier{Mountpoint:/var/lib/containers/storage/overlay-images,},UsedBytes:&UInt64Value{Value:155470,},InodesUsed:&UInt64Value{Value:77,},},},ContainerFilesystems:[]*FilesystemUsage{},}"
	Sep 16 10:49:37 functional-919910 kubelet[4906]: E0916 10:49:37.155585    4906 eviction_manager.go:212] "Eviction manager: failed to synchronize" err="eviction manager: failed to get HasDedicatedImageFs: missing image stats: &ImageFsInfoResponse{ImageFilesystems:[]*FilesystemUsage{&FilesystemUsage{Timestamp:1726483777155245399,FsId:&FilesystemIdentifier{Mountpoint:/var/lib/containers/storage/overlay-images,},UsedBytes:&UInt64Value{Value:155470,},InodesUsed:&UInt64Value{Value:77,},},},ContainerFilesystems:[]*FilesystemUsage{},}"
	Sep 16 10:49:47 functional-919910 kubelet[4906]: E0916 10:49:47.157036    4906 eviction_manager.go:257] "Eviction manager: failed to get HasDedicatedImageFs" err="missing image stats: &ImageFsInfoResponse{ImageFilesystems:[]*FilesystemUsage{&FilesystemUsage{Timestamp:1726483787156852576,FsId:&FilesystemIdentifier{Mountpoint:/var/lib/containers/storage/overlay-images,},UsedBytes:&UInt64Value{Value:155470,},InodesUsed:&UInt64Value{Value:77,},},},ContainerFilesystems:[]*FilesystemUsage{},}"
	Sep 16 10:49:47 functional-919910 kubelet[4906]: E0916 10:49:47.157093    4906 eviction_manager.go:212] "Eviction manager: failed to synchronize" err="eviction manager: failed to get HasDedicatedImageFs: missing image stats: &ImageFsInfoResponse{ImageFilesystems:[]*FilesystemUsage{&FilesystemUsage{Timestamp:1726483787156852576,FsId:&FilesystemIdentifier{Mountpoint:/var/lib/containers/storage/overlay-images,},UsedBytes:&UInt64Value{Value:155470,},InodesUsed:&UInt64Value{Value:77,},},},ContainerFilesystems:[]*FilesystemUsage{},}"
	Sep 16 10:49:57 functional-919910 kubelet[4906]: E0916 10:49:57.158435    4906 eviction_manager.go:257] "Eviction manager: failed to get HasDedicatedImageFs" err="missing image stats: &ImageFsInfoResponse{ImageFilesystems:[]*FilesystemUsage{&FilesystemUsage{Timestamp:1726483797158278822,FsId:&FilesystemIdentifier{Mountpoint:/var/lib/containers/storage/overlay-images,},UsedBytes:&UInt64Value{Value:155470,},InodesUsed:&UInt64Value{Value:77,},},},ContainerFilesystems:[]*FilesystemUsage{},}"
	Sep 16 10:49:57 functional-919910 kubelet[4906]: E0916 10:49:57.158481    4906 eviction_manager.go:212] "Eviction manager: failed to synchronize" err="eviction manager: failed to get HasDedicatedImageFs: missing image stats: &ImageFsInfoResponse{ImageFilesystems:[]*FilesystemUsage{&FilesystemUsage{Timestamp:1726483797158278822,FsId:&FilesystemIdentifier{Mountpoint:/var/lib/containers/storage/overlay-images,},UsedBytes:&UInt64Value{Value:155470,},InodesUsed:&UInt64Value{Value:77,},},},ContainerFilesystems:[]*FilesystemUsage{},}"
	Sep 16 10:50:07 functional-919910 kubelet[4906]: E0916 10:50:07.159871    4906 eviction_manager.go:257] "Eviction manager: failed to get HasDedicatedImageFs" err="missing image stats: &ImageFsInfoResponse{ImageFilesystems:[]*FilesystemUsage{&FilesystemUsage{Timestamp:1726483807159616730,FsId:&FilesystemIdentifier{Mountpoint:/var/lib/containers/storage/overlay-images,},UsedBytes:&UInt64Value{Value:155470,},InodesUsed:&UInt64Value{Value:77,},},},ContainerFilesystems:[]*FilesystemUsage{},}"
	Sep 16 10:50:07 functional-919910 kubelet[4906]: E0916 10:50:07.159920    4906 eviction_manager.go:212] "Eviction manager: failed to synchronize" err="eviction manager: failed to get HasDedicatedImageFs: missing image stats: &ImageFsInfoResponse{ImageFilesystems:[]*FilesystemUsage{&FilesystemUsage{Timestamp:1726483807159616730,FsId:&FilesystemIdentifier{Mountpoint:/var/lib/containers/storage/overlay-images,},UsedBytes:&UInt64Value{Value:155470,},InodesUsed:&UInt64Value{Value:77,},},},ContainerFilesystems:[]*FilesystemUsage{},}"
	Sep 16 10:50:17 functional-919910 kubelet[4906]: E0916 10:50:17.161814    4906 eviction_manager.go:257] "Eviction manager: failed to get HasDedicatedImageFs" err="missing image stats: &ImageFsInfoResponse{ImageFilesystems:[]*FilesystemUsage{&FilesystemUsage{Timestamp:1726483817161563233,FsId:&FilesystemIdentifier{Mountpoint:/var/lib/containers/storage/overlay-images,},UsedBytes:&UInt64Value{Value:155470,},InodesUsed:&UInt64Value{Value:77,},},},ContainerFilesystems:[]*FilesystemUsage{},}"
	Sep 16 10:50:17 functional-919910 kubelet[4906]: E0916 10:50:17.161855    4906 eviction_manager.go:212] "Eviction manager: failed to synchronize" err="eviction manager: failed to get HasDedicatedImageFs: missing image stats: &ImageFsInfoResponse{ImageFilesystems:[]*FilesystemUsage{&FilesystemUsage{Timestamp:1726483817161563233,FsId:&FilesystemIdentifier{Mountpoint:/var/lib/containers/storage/overlay-images,},UsedBytes:&UInt64Value{Value:155470,},InodesUsed:&UInt64Value{Value:77,},},},ContainerFilesystems:[]*FilesystemUsage{},}"
	Sep 16 10:50:27 functional-919910 kubelet[4906]: E0916 10:50:27.163614    4906 eviction_manager.go:257] "Eviction manager: failed to get HasDedicatedImageFs" err="missing image stats: &ImageFsInfoResponse{ImageFilesystems:[]*FilesystemUsage{&FilesystemUsage{Timestamp:1726483827163296450,FsId:&FilesystemIdentifier{Mountpoint:/var/lib/containers/storage/overlay-images,},UsedBytes:&UInt64Value{Value:155470,},InodesUsed:&UInt64Value{Value:77,},},},ContainerFilesystems:[]*FilesystemUsage{},}"
	Sep 16 10:50:27 functional-919910 kubelet[4906]: E0916 10:50:27.163651    4906 eviction_manager.go:212] "Eviction manager: failed to synchronize" err="eviction manager: failed to get HasDedicatedImageFs: missing image stats: &ImageFsInfoResponse{ImageFilesystems:[]*FilesystemUsage{&FilesystemUsage{Timestamp:1726483827163296450,FsId:&FilesystemIdentifier{Mountpoint:/var/lib/containers/storage/overlay-images,},UsedBytes:&UInt64Value{Value:155470,},InodesUsed:&UInt64Value{Value:77,},},},ContainerFilesystems:[]*FilesystemUsage{},}"
	Sep 16 10:50:37 functional-919910 kubelet[4906]: E0916 10:50:37.164729    4906 eviction_manager.go:257] "Eviction manager: failed to get HasDedicatedImageFs" err="missing image stats: &ImageFsInfoResponse{ImageFilesystems:[]*FilesystemUsage{&FilesystemUsage{Timestamp:1726483837164469074,FsId:&FilesystemIdentifier{Mountpoint:/var/lib/containers/storage/overlay-images,},UsedBytes:&UInt64Value{Value:155470,},InodesUsed:&UInt64Value{Value:77,},},},ContainerFilesystems:[]*FilesystemUsage{},}"
	Sep 16 10:50:37 functional-919910 kubelet[4906]: E0916 10:50:37.164762    4906 eviction_manager.go:212] "Eviction manager: failed to synchronize" err="eviction manager: failed to get HasDedicatedImageFs: missing image stats: &ImageFsInfoResponse{ImageFilesystems:[]*FilesystemUsage{&FilesystemUsage{Timestamp:1726483837164469074,FsId:&FilesystemIdentifier{Mountpoint:/var/lib/containers/storage/overlay-images,},UsedBytes:&UInt64Value{Value:155470,},InodesUsed:&UInt64Value{Value:77,},},},ContainerFilesystems:[]*FilesystemUsage{},}"
	Sep 16 10:50:47 functional-919910 kubelet[4906]: E0916 10:50:47.166381    4906 eviction_manager.go:257] "Eviction manager: failed to get HasDedicatedImageFs" err="missing image stats: &ImageFsInfoResponse{ImageFilesystems:[]*FilesystemUsage{&FilesystemUsage{Timestamp:1726483847166139430,FsId:&FilesystemIdentifier{Mountpoint:/var/lib/containers/storage/overlay-images,},UsedBytes:&UInt64Value{Value:155470,},InodesUsed:&UInt64Value{Value:77,},},},ContainerFilesystems:[]*FilesystemUsage{},}"
	Sep 16 10:50:47 functional-919910 kubelet[4906]: E0916 10:50:47.166973    4906 eviction_manager.go:212] "Eviction manager: failed to synchronize" err="eviction manager: failed to get HasDedicatedImageFs: missing image stats: &ImageFsInfoResponse{ImageFilesystems:[]*FilesystemUsage{&FilesystemUsage{Timestamp:1726483847166139430,FsId:&FilesystemIdentifier{Mountpoint:/var/lib/containers/storage/overlay-images,},UsedBytes:&UInt64Value{Value:155470,},InodesUsed:&UInt64Value{Value:77,},},},ContainerFilesystems:[]*FilesystemUsage{},}"
	
	
	==> storage-provisioner [0318f459801da15bd2e19f5a98b73c1156fff994dcdda61e57a57ddf9e92ccee] <==
	I0916 10:48:52.562259       1 storage_provisioner.go:116] Initializing the minikube storage provisioner...
	I0916 10:48:52.664564       1 storage_provisioner.go:141] Storage provisioner initialized, now starting service!
	I0916 10:48:52.664725       1 leaderelection.go:243] attempting to acquire leader lease kube-system/k8s.io-minikube-hostpath...
	I0916 10:49:10.099884       1 leaderelection.go:253] successfully acquired lease kube-system/k8s.io-minikube-hostpath
	I0916 10:49:10.100345       1 controller.go:835] Starting provisioner controller k8s.io/minikube-hostpath_functional-919910_e4b0a145-d435-49cb-bddf-cb4e7bf200d9!
	I0916 10:49:10.101540       1 event.go:282] Event(v1.ObjectReference{Kind:"Endpoints", Namespace:"kube-system", Name:"k8s.io-minikube-hostpath", UID:"a458447e-2e14-46d1-bc5f-e9228298bb58", APIVersion:"v1", ResourceVersion:"609", FieldPath:""}): type: 'Normal' reason: 'LeaderElection' functional-919910_e4b0a145-d435-49cb-bddf-cb4e7bf200d9 became leader
	I0916 10:49:10.201234       1 controller.go:884] Started provisioner controller k8s.io/minikube-hostpath_functional-919910_e4b0a145-d435-49cb-bddf-cb4e7bf200d9!
	
	
	==> storage-provisioner [67f50b0e25dae16dbad275ffac3a734fe571c8f8cb91d485eaac44783eb641be] <==
	I0916 10:48:01.486119       1 storage_provisioner.go:116] Initializing the minikube storage provisioner...
	I0916 10:48:05.527187       1 storage_provisioner.go:141] Storage provisioner initialized, now starting service!
	I0916 10:48:05.529539       1 leaderelection.go:243] attempting to acquire leader lease kube-system/k8s.io-minikube-hostpath...
	I0916 10:48:22.958175       1 leaderelection.go:253] successfully acquired lease kube-system/k8s.io-minikube-hostpath
	I0916 10:48:22.959300       1 controller.go:835] Starting provisioner controller k8s.io/minikube-hostpath_functional-919910_be96807a-e73b-444f-98b3-646320e9e90e!
	I0916 10:48:22.959068       1 event.go:282] Event(v1.ObjectReference{Kind:"Endpoints", Namespace:"kube-system", Name:"k8s.io-minikube-hostpath", UID:"a458447e-2e14-46d1-bc5f-e9228298bb58", APIVersion:"v1", ResourceVersion:"511", FieldPath:""}): type: 'Normal' reason: 'LeaderElection' functional-919910_be96807a-e73b-444f-98b3-646320e9e90e became leader
	I0916 10:48:23.061550       1 controller.go:884] Started provisioner controller k8s.io/minikube-hostpath_functional-919910_be96807a-e73b-444f-98b3-646320e9e90e!
	

                                                
                                                
-- /stdout --
** stderr ** 
	E0916 10:50:50.504319 1408604 logs.go:258] failed to output last start logs: failed to read file /home/jenkins/minikube-integration/19651-1378450/.minikube/logs/lastStart.txt: bufio.Scanner: token too long

                                                
                                                
** /stderr **
helpers_test.go:254: (dbg) Run:  out/minikube-linux-arm64 status --format={{.APIServer}} -p functional-919910 -n functional-919910
helpers_test.go:261: (dbg) Run:  kubectl --context functional-919910 get po -o=jsonpath={.items[*].metadata.name} -A --field-selector=status.phase!=Running
helpers_test.go:261: (dbg) Non-zero exit: kubectl --context functional-919910 get po -o=jsonpath={.items[*].metadata.name} -A --field-selector=status.phase!=Running: fork/exec /usr/local/bin/kubectl: exec format error (492.788µs)
helpers_test.go:263: kubectl --context functional-919910 get po -o=jsonpath={.items[*].metadata.name} -A --field-selector=status.phase!=Running: fork/exec /usr/local/bin/kubectl: exec format error
--- FAIL: TestFunctional/parallel/ServiceCmdConnect (2.46s)

                                                
                                    
x
+
TestFunctional/parallel/PersistentVolumeClaim (101.9s)

                                                
                                                
=== RUN   TestFunctional/parallel/PersistentVolumeClaim
=== PAUSE TestFunctional/parallel/PersistentVolumeClaim

                                                
                                                

                                                
                                                
=== CONT  TestFunctional/parallel/PersistentVolumeClaim
functional_test_pvc_test.go:44: (dbg) TestFunctional/parallel/PersistentVolumeClaim: waiting 4m0s for pods matching "integration-test=storage-provisioner" in namespace "kube-system" ...
helpers_test.go:344: "storage-provisioner" [2eb6523f-f61a-4c33-8e91-0bbbb874554b] Running
functional_test_pvc_test.go:44: (dbg) TestFunctional/parallel/PersistentVolumeClaim: integration-test=storage-provisioner healthy within 6.003446772s
functional_test_pvc_test.go:49: (dbg) Run:  kubectl --context functional-919910 get storageclass -o=json
functional_test_pvc_test.go:49: (dbg) Non-zero exit: kubectl --context functional-919910 get storageclass -o=json: fork/exec /usr/local/bin/kubectl: exec format error (478.987µs)
functional_test_pvc_test.go:49: (dbg) Run:  kubectl --context functional-919910 get storageclass -o=json
functional_test_pvc_test.go:49: (dbg) Non-zero exit: kubectl --context functional-919910 get storageclass -o=json: fork/exec /usr/local/bin/kubectl: exec format error (414.34µs)
functional_test_pvc_test.go:49: (dbg) Run:  kubectl --context functional-919910 get storageclass -o=json
functional_test_pvc_test.go:49: (dbg) Non-zero exit: kubectl --context functional-919910 get storageclass -o=json: fork/exec /usr/local/bin/kubectl: exec format error (500.27µs)
functional_test_pvc_test.go:49: (dbg) Run:  kubectl --context functional-919910 get storageclass -o=json
functional_test_pvc_test.go:49: (dbg) Non-zero exit: kubectl --context functional-919910 get storageclass -o=json: fork/exec /usr/local/bin/kubectl: exec format error (409.335µs)
functional_test_pvc_test.go:49: (dbg) Run:  kubectl --context functional-919910 get storageclass -o=json
functional_test_pvc_test.go:49: (dbg) Non-zero exit: kubectl --context functional-919910 get storageclass -o=json: fork/exec /usr/local/bin/kubectl: exec format error (634.273µs)
functional_test_pvc_test.go:49: (dbg) Run:  kubectl --context functional-919910 get storageclass -o=json
functional_test_pvc_test.go:49: (dbg) Non-zero exit: kubectl --context functional-919910 get storageclass -o=json: fork/exec /usr/local/bin/kubectl: exec format error (606.155µs)
functional_test_pvc_test.go:49: (dbg) Run:  kubectl --context functional-919910 get storageclass -o=json
functional_test_pvc_test.go:49: (dbg) Non-zero exit: kubectl --context functional-919910 get storageclass -o=json: fork/exec /usr/local/bin/kubectl: exec format error (377.205µs)
E0916 10:49:34.320924 1383833 cert_rotation.go:171] "Unhandled Error" err="key failed with : open /home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/addons-936355/client.crt: no such file or directory" logger="UnhandledError"
functional_test_pvc_test.go:49: (dbg) Run:  kubectl --context functional-919910 get storageclass -o=json
functional_test_pvc_test.go:49: (dbg) Non-zero exit: kubectl --context functional-919910 get storageclass -o=json: fork/exec /usr/local/bin/kubectl: exec format error (685.972µs)
functional_test_pvc_test.go:49: (dbg) Run:  kubectl --context functional-919910 get storageclass -o=json
functional_test_pvc_test.go:49: (dbg) Non-zero exit: kubectl --context functional-919910 get storageclass -o=json: fork/exec /usr/local/bin/kubectl: exec format error (519.47µs)
functional_test_pvc_test.go:49: (dbg) Run:  kubectl --context functional-919910 get storageclass -o=json
functional_test_pvc_test.go:49: (dbg) Non-zero exit: kubectl --context functional-919910 get storageclass -o=json: fork/exec /usr/local/bin/kubectl: exec format error (564.703µs)
E0916 10:50:15.282227 1383833 cert_rotation.go:171] "Unhandled Error" err="key failed with : open /home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/addons-936355/client.crt: no such file or directory" logger="UnhandledError"
functional_test_pvc_test.go:49: (dbg) Run:  kubectl --context functional-919910 get storageclass -o=json
functional_test_pvc_test.go:49: (dbg) Non-zero exit: kubectl --context functional-919910 get storageclass -o=json: fork/exec /usr/local/bin/kubectl: exec format error (480.02µs)
functional_test_pvc_test.go:49: (dbg) Run:  kubectl --context functional-919910 get storageclass -o=json
functional_test_pvc_test.go:49: (dbg) Non-zero exit: kubectl --context functional-919910 get storageclass -o=json: fork/exec /usr/local/bin/kubectl: exec format error (481.144µs)
functional_test_pvc_test.go:65: failed to check for storage class: fork/exec /usr/local/bin/kubectl: exec format error
functional_test_pvc_test.go:69: (dbg) Run:  kubectl --context functional-919910 apply -f testdata/storage-provisioner/pvc.yaml
functional_test_pvc_test.go:69: (dbg) Non-zero exit: kubectl --context functional-919910 apply -f testdata/storage-provisioner/pvc.yaml: fork/exec /usr/local/bin/kubectl: exec format error (453.379µs)
functional_test_pvc_test.go:71: kubectl apply pvc.yaml failed: args "kubectl --context functional-919910 apply -f testdata/storage-provisioner/pvc.yaml": fork/exec /usr/local/bin/kubectl: exec format error
helpers_test.go:222: -----------------------post-mortem--------------------------------
helpers_test.go:230: ======>  post-mortem[TestFunctional/parallel/PersistentVolumeClaim]: docker inspect <======
helpers_test.go:231: (dbg) Run:  docker inspect functional-919910
helpers_test.go:235: (dbg) docker inspect functional-919910:

                                                
                                                
-- stdout --
	[
	    {
	        "Id": "40a7320e94dbd1ca8f99c16961d5283390467882986d80f040baa102ab2046bd",
	        "Created": "2024-09-16T10:46:39.195115177Z",
	        "Path": "/usr/local/bin/entrypoint",
	        "Args": [
	            "/sbin/init"
	        ],
	        "State": {
	            "Status": "running",
	            "Running": true,
	            "Paused": false,
	            "Restarting": false,
	            "OOMKilled": false,
	            "Dead": false,
	            "Pid": 1399656,
	            "ExitCode": 0,
	            "Error": "",
	            "StartedAt": "2024-09-16T10:46:39.363423533Z",
	            "FinishedAt": "0001-01-01T00:00:00Z"
	        },
	        "Image": "sha256:a1b71fa87733590eb4674b16f6945626ae533f3af37066893e3fd70eb9476268",
	        "ResolvConfPath": "/var/lib/docker/containers/40a7320e94dbd1ca8f99c16961d5283390467882986d80f040baa102ab2046bd/resolv.conf",
	        "HostnamePath": "/var/lib/docker/containers/40a7320e94dbd1ca8f99c16961d5283390467882986d80f040baa102ab2046bd/hostname",
	        "HostsPath": "/var/lib/docker/containers/40a7320e94dbd1ca8f99c16961d5283390467882986d80f040baa102ab2046bd/hosts",
	        "LogPath": "/var/lib/docker/containers/40a7320e94dbd1ca8f99c16961d5283390467882986d80f040baa102ab2046bd/40a7320e94dbd1ca8f99c16961d5283390467882986d80f040baa102ab2046bd-json.log",
	        "Name": "/functional-919910",
	        "RestartCount": 0,
	        "Driver": "overlay2",
	        "Platform": "linux",
	        "MountLabel": "",
	        "ProcessLabel": "",
	        "AppArmorProfile": "unconfined",
	        "ExecIDs": null,
	        "HostConfig": {
	            "Binds": [
	                "/lib/modules:/lib/modules:ro",
	                "functional-919910:/var"
	            ],
	            "ContainerIDFile": "",
	            "LogConfig": {
	                "Type": "json-file",
	                "Config": {}
	            },
	            "NetworkMode": "functional-919910",
	            "PortBindings": {
	                "22/tcp": [
	                    {
	                        "HostIp": "127.0.0.1",
	                        "HostPort": ""
	                    }
	                ],
	                "2376/tcp": [
	                    {
	                        "HostIp": "127.0.0.1",
	                        "HostPort": ""
	                    }
	                ],
	                "32443/tcp": [
	                    {
	                        "HostIp": "127.0.0.1",
	                        "HostPort": ""
	                    }
	                ],
	                "5000/tcp": [
	                    {
	                        "HostIp": "127.0.0.1",
	                        "HostPort": ""
	                    }
	                ],
	                "8441/tcp": [
	                    {
	                        "HostIp": "127.0.0.1",
	                        "HostPort": ""
	                    }
	                ]
	            },
	            "RestartPolicy": {
	                "Name": "no",
	                "MaximumRetryCount": 0
	            },
	            "AutoRemove": false,
	            "VolumeDriver": "",
	            "VolumesFrom": null,
	            "ConsoleSize": [
	                0,
	                0
	            ],
	            "CapAdd": null,
	            "CapDrop": null,
	            "CgroupnsMode": "host",
	            "Dns": [],
	            "DnsOptions": [],
	            "DnsSearch": [],
	            "ExtraHosts": null,
	            "GroupAdd": null,
	            "IpcMode": "private",
	            "Cgroup": "",
	            "Links": null,
	            "OomScoreAdj": 0,
	            "PidMode": "",
	            "Privileged": true,
	            "PublishAllPorts": false,
	            "ReadonlyRootfs": false,
	            "SecurityOpt": [
	                "seccomp=unconfined",
	                "apparmor=unconfined",
	                "label=disable"
	            ],
	            "Tmpfs": {
	                "/run": "",
	                "/tmp": ""
	            },
	            "UTSMode": "",
	            "UsernsMode": "",
	            "ShmSize": 67108864,
	            "Runtime": "runc",
	            "Isolation": "",
	            "CpuShares": 0,
	            "Memory": 4194304000,
	            "NanoCpus": 2000000000,
	            "CgroupParent": "",
	            "BlkioWeight": 0,
	            "BlkioWeightDevice": [],
	            "BlkioDeviceReadBps": [],
	            "BlkioDeviceWriteBps": [],
	            "BlkioDeviceReadIOps": [],
	            "BlkioDeviceWriteIOps": [],
	            "CpuPeriod": 0,
	            "CpuQuota": 0,
	            "CpuRealtimePeriod": 0,
	            "CpuRealtimeRuntime": 0,
	            "CpusetCpus": "",
	            "CpusetMems": "",
	            "Devices": [],
	            "DeviceCgroupRules": null,
	            "DeviceRequests": null,
	            "MemoryReservation": 0,
	            "MemorySwap": 8388608000,
	            "MemorySwappiness": null,
	            "OomKillDisable": false,
	            "PidsLimit": null,
	            "Ulimits": [],
	            "CpuCount": 0,
	            "CpuPercent": 0,
	            "IOMaximumIOps": 0,
	            "IOMaximumBandwidth": 0,
	            "MaskedPaths": null,
	            "ReadonlyPaths": null
	        },
	        "GraphDriver": {
	            "Data": {
	                "LowerDir": "/var/lib/docker/overlay2/14032252dd4d379a5dd6bfc812b8514e72a450050f00baaedcadb811ce19b2ca-init/diff:/var/lib/docker/overlay2/1502e35c27c097cfc834a7c6caeee5bb9f58b41375577f491b73f55bc131cbae/diff",
	                "MergedDir": "/var/lib/docker/overlay2/14032252dd4d379a5dd6bfc812b8514e72a450050f00baaedcadb811ce19b2ca/merged",
	                "UpperDir": "/var/lib/docker/overlay2/14032252dd4d379a5dd6bfc812b8514e72a450050f00baaedcadb811ce19b2ca/diff",
	                "WorkDir": "/var/lib/docker/overlay2/14032252dd4d379a5dd6bfc812b8514e72a450050f00baaedcadb811ce19b2ca/work"
	            },
	            "Name": "overlay2"
	        },
	        "Mounts": [
	            {
	                "Type": "bind",
	                "Source": "/lib/modules",
	                "Destination": "/lib/modules",
	                "Mode": "ro",
	                "RW": false,
	                "Propagation": "rprivate"
	            },
	            {
	                "Type": "volume",
	                "Name": "functional-919910",
	                "Source": "/var/lib/docker/volumes/functional-919910/_data",
	                "Destination": "/var",
	                "Driver": "local",
	                "Mode": "z",
	                "RW": true,
	                "Propagation": ""
	            }
	        ],
	        "Config": {
	            "Hostname": "functional-919910",
	            "Domainname": "",
	            "User": "",
	            "AttachStdin": false,
	            "AttachStdout": false,
	            "AttachStderr": false,
	            "ExposedPorts": {
	                "22/tcp": {},
	                "2376/tcp": {},
	                "32443/tcp": {},
	                "5000/tcp": {},
	                "8441/tcp": {}
	            },
	            "Tty": true,
	            "OpenStdin": false,
	            "StdinOnce": false,
	            "Env": [
	                "container=docker",
	                "PATH=/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin"
	            ],
	            "Cmd": null,
	            "Image": "gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0",
	            "Volumes": null,
	            "WorkingDir": "/",
	            "Entrypoint": [
	                "/usr/local/bin/entrypoint",
	                "/sbin/init"
	            ],
	            "OnBuild": null,
	            "Labels": {
	                "created_by.minikube.sigs.k8s.io": "true",
	                "mode.minikube.sigs.k8s.io": "functional-919910",
	                "name.minikube.sigs.k8s.io": "functional-919910",
	                "role.minikube.sigs.k8s.io": ""
	            },
	            "StopSignal": "SIGRTMIN+3"
	        },
	        "NetworkSettings": {
	            "Bridge": "",
	            "SandboxID": "09e546724865183e02638a32689645e28fd2b24039febe37938c93bd516fa319",
	            "SandboxKey": "/var/run/docker/netns/09e546724865",
	            "Ports": {
	                "22/tcp": [
	                    {
	                        "HostIp": "127.0.0.1",
	                        "HostPort": "34613"
	                    }
	                ],
	                "2376/tcp": [
	                    {
	                        "HostIp": "127.0.0.1",
	                        "HostPort": "34614"
	                    }
	                ],
	                "32443/tcp": [
	                    {
	                        "HostIp": "127.0.0.1",
	                        "HostPort": "34617"
	                    }
	                ],
	                "5000/tcp": [
	                    {
	                        "HostIp": "127.0.0.1",
	                        "HostPort": "34615"
	                    }
	                ],
	                "8441/tcp": [
	                    {
	                        "HostIp": "127.0.0.1",
	                        "HostPort": "34616"
	                    }
	                ]
	            },
	            "HairpinMode": false,
	            "LinkLocalIPv6Address": "",
	            "LinkLocalIPv6PrefixLen": 0,
	            "SecondaryIPAddresses": null,
	            "SecondaryIPv6Addresses": null,
	            "EndpointID": "",
	            "Gateway": "",
	            "GlobalIPv6Address": "",
	            "GlobalIPv6PrefixLen": 0,
	            "IPAddress": "",
	            "IPPrefixLen": 0,
	            "IPv6Gateway": "",
	            "MacAddress": "",
	            "Networks": {
	                "functional-919910": {
	                    "IPAMConfig": {
	                        "IPv4Address": "192.168.49.2"
	                    },
	                    "Links": null,
	                    "Aliases": null,
	                    "MacAddress": "02:42:c0:a8:31:02",
	                    "DriverOpts": null,
	                    "NetworkID": "6e0fb93702822d0f6745b0df63c8098af583107dce24967dde54449c81a6a7de",
	                    "EndpointID": "0e4e29393de23184514ee78cc12ea7445e6307e65c69c812751182560a7c0121",
	                    "Gateway": "192.168.49.1",
	                    "IPAddress": "192.168.49.2",
	                    "IPPrefixLen": 24,
	                    "IPv6Gateway": "",
	                    "GlobalIPv6Address": "",
	                    "GlobalIPv6PrefixLen": 0,
	                    "DNSNames": [
	                        "functional-919910",
	                        "40a7320e94db"
	                    ]
	                }
	            }
	        }
	    }
	]

                                                
                                                
-- /stdout --
helpers_test.go:239: (dbg) Run:  out/minikube-linux-arm64 status --format={{.Host}} -p functional-919910 -n functional-919910
helpers_test.go:244: <<< TestFunctional/parallel/PersistentVolumeClaim FAILED: start of post-mortem logs <<<
helpers_test.go:245: ======>  post-mortem[TestFunctional/parallel/PersistentVolumeClaim]: minikube logs <======
helpers_test.go:247: (dbg) Run:  out/minikube-linux-arm64 -p functional-919910 logs -n 25
helpers_test.go:247: (dbg) Done: out/minikube-linux-arm64 -p functional-919910 logs -n 25: (2.147685242s)
helpers_test.go:252: TestFunctional/parallel/PersistentVolumeClaim logs: 
-- stdout --
	
	==> Audit <==
	|---------|--------------------------------------------------------------------------|-------------------|---------|---------|---------------------|---------------------|
	| Command |                                   Args                                   |      Profile      |  User   | Version |     Start Time      |      End Time       |
	|---------|--------------------------------------------------------------------------|-------------------|---------|---------|---------------------|---------------------|
	| start   | -p functional-919910                                                     | functional-919910 | jenkins | v1.34.0 | 16 Sep 24 10:48 UTC | 16 Sep 24 10:49 UTC |
	|         | --extra-config=apiserver.enable-admission-plugins=NamespaceAutoProvision |                   |         |         |                     |                     |
	|         | --wait=all                                                               |                   |         |         |                     |                     |
	| config  | functional-919910 config unset                                           | functional-919910 | jenkins | v1.34.0 | 16 Sep 24 10:49 UTC | 16 Sep 24 10:49 UTC |
	|         | cpus                                                                     |                   |         |         |                     |                     |
	| cp      | functional-919910 cp                                                     | functional-919910 | jenkins | v1.34.0 | 16 Sep 24 10:49 UTC | 16 Sep 24 10:49 UTC |
	|         | testdata/cp-test.txt                                                     |                   |         |         |                     |                     |
	|         | /home/docker/cp-test.txt                                                 |                   |         |         |                     |                     |
	| config  | functional-919910 config get                                             | functional-919910 | jenkins | v1.34.0 | 16 Sep 24 10:49 UTC |                     |
	|         | cpus                                                                     |                   |         |         |                     |                     |
	| config  | functional-919910 config set                                             | functional-919910 | jenkins | v1.34.0 | 16 Sep 24 10:49 UTC | 16 Sep 24 10:49 UTC |
	|         | cpus 2                                                                   |                   |         |         |                     |                     |
	| config  | functional-919910 config get                                             | functional-919910 | jenkins | v1.34.0 | 16 Sep 24 10:49 UTC | 16 Sep 24 10:49 UTC |
	|         | cpus                                                                     |                   |         |         |                     |                     |
	| config  | functional-919910 config unset                                           | functional-919910 | jenkins | v1.34.0 | 16 Sep 24 10:49 UTC | 16 Sep 24 10:49 UTC |
	|         | cpus                                                                     |                   |         |         |                     |                     |
	| ssh     | functional-919910 ssh -n                                                 | functional-919910 | jenkins | v1.34.0 | 16 Sep 24 10:49 UTC | 16 Sep 24 10:49 UTC |
	|         | functional-919910 sudo cat                                               |                   |         |         |                     |                     |
	|         | /home/docker/cp-test.txt                                                 |                   |         |         |                     |                     |
	| config  | functional-919910 config get                                             | functional-919910 | jenkins | v1.34.0 | 16 Sep 24 10:49 UTC |                     |
	|         | cpus                                                                     |                   |         |         |                     |                     |
	| ssh     | functional-919910 ssh echo                                               | functional-919910 | jenkins | v1.34.0 | 16 Sep 24 10:49 UTC | 16 Sep 24 10:49 UTC |
	|         | hello                                                                    |                   |         |         |                     |                     |
	| cp      | functional-919910 cp                                                     | functional-919910 | jenkins | v1.34.0 | 16 Sep 24 10:49 UTC | 16 Sep 24 10:49 UTC |
	|         | functional-919910:/home/docker/cp-test.txt                               |                   |         |         |                     |                     |
	|         | /tmp/TestFunctionalparallelCpCmd2247858640/001/cp-test.txt               |                   |         |         |                     |                     |
	| ssh     | functional-919910 ssh cat                                                | functional-919910 | jenkins | v1.34.0 | 16 Sep 24 10:49 UTC | 16 Sep 24 10:49 UTC |
	|         | /etc/hostname                                                            |                   |         |         |                     |                     |
	| ssh     | functional-919910 ssh -n                                                 | functional-919910 | jenkins | v1.34.0 | 16 Sep 24 10:49 UTC | 16 Sep 24 10:49 UTC |
	|         | functional-919910 sudo cat                                               |                   |         |         |                     |                     |
	|         | /home/docker/cp-test.txt                                                 |                   |         |         |                     |                     |
	| tunnel  | functional-919910 tunnel                                                 | functional-919910 | jenkins | v1.34.0 | 16 Sep 24 10:49 UTC |                     |
	|         | --alsologtostderr                                                        |                   |         |         |                     |                     |
	| tunnel  | functional-919910 tunnel                                                 | functional-919910 | jenkins | v1.34.0 | 16 Sep 24 10:49 UTC |                     |
	|         | --alsologtostderr                                                        |                   |         |         |                     |                     |
	| cp      | functional-919910 cp                                                     | functional-919910 | jenkins | v1.34.0 | 16 Sep 24 10:49 UTC | 16 Sep 24 10:49 UTC |
	|         | testdata/cp-test.txt                                                     |                   |         |         |                     |                     |
	|         | /tmp/does/not/exist/cp-test.txt                                          |                   |         |         |                     |                     |
	| ssh     | functional-919910 ssh -n                                                 | functional-919910 | jenkins | v1.34.0 | 16 Sep 24 10:49 UTC | 16 Sep 24 10:49 UTC |
	|         | functional-919910 sudo cat                                               |                   |         |         |                     |                     |
	|         | /tmp/does/not/exist/cp-test.txt                                          |                   |         |         |                     |                     |
	| tunnel  | functional-919910 tunnel                                                 | functional-919910 | jenkins | v1.34.0 | 16 Sep 24 10:49 UTC |                     |
	|         | --alsologtostderr                                                        |                   |         |         |                     |                     |
	| addons  | functional-919910 addons list                                            | functional-919910 | jenkins | v1.34.0 | 16 Sep 24 10:50 UTC | 16 Sep 24 10:50 UTC |
	| addons  | functional-919910 addons list                                            | functional-919910 | jenkins | v1.34.0 | 16 Sep 24 10:50 UTC | 16 Sep 24 10:50 UTC |
	|         | -o json                                                                  |                   |         |         |                     |                     |
	| service | functional-919910 service list                                           | functional-919910 | jenkins | v1.34.0 | 16 Sep 24 10:50 UTC | 16 Sep 24 10:50 UTC |
	| service | functional-919910 service list                                           | functional-919910 | jenkins | v1.34.0 | 16 Sep 24 10:50 UTC | 16 Sep 24 10:50 UTC |
	|         | -o json                                                                  |                   |         |         |                     |                     |
	| service | functional-919910 service                                                | functional-919910 | jenkins | v1.34.0 | 16 Sep 24 10:50 UTC |                     |
	|         | --namespace=default --https                                              |                   |         |         |                     |                     |
	|         | --url hello-node                                                         |                   |         |         |                     |                     |
	| service | functional-919910                                                        | functional-919910 | jenkins | v1.34.0 | 16 Sep 24 10:50 UTC |                     |
	|         | service hello-node --url                                                 |                   |         |         |                     |                     |
	|         | --format={{.IP}}                                                         |                   |         |         |                     |                     |
	| service | functional-919910 service                                                | functional-919910 | jenkins | v1.34.0 | 16 Sep 24 10:50 UTC |                     |
	|         | hello-node --url                                                         |                   |         |         |                     |                     |
	|---------|--------------------------------------------------------------------------|-------------------|---------|---------|---------------------|---------------------|
	
	
	==> Last Start <==
	Log file created at: 2024/09/16 10:48:33
	Running on machine: ip-172-31-21-244
	Binary: Built with gc go1.23.0 for linux/arm64
	Log line format: [IWEF]mmdd hh:mm:ss.uuuuuu threadid file:line] msg
	I0916 10:48:33.910735 1405153 out.go:345] Setting OutFile to fd 1 ...
	I0916 10:48:33.910918 1405153 out.go:392] TERM=,COLORTERM=, which probably does not support color
	I0916 10:48:33.910922 1405153 out.go:358] Setting ErrFile to fd 2...
	I0916 10:48:33.910927 1405153 out.go:392] TERM=,COLORTERM=, which probably does not support color
	I0916 10:48:33.911187 1405153 root.go:338] Updating PATH: /home/jenkins/minikube-integration/19651-1378450/.minikube/bin
	I0916 10:48:33.911553 1405153 out.go:352] Setting JSON to false
	I0916 10:48:33.912603 1405153 start.go:129] hostinfo: {"hostname":"ip-172-31-21-244","uptime":37859,"bootTime":1726445855,"procs":174,"os":"linux","platform":"ubuntu","platformFamily":"debian","platformVersion":"20.04","kernelVersion":"5.15.0-1069-aws","kernelArch":"aarch64","virtualizationSystem":"","virtualizationRole":"","hostId":"da8ac1fd-6236-412a-a346-95873c98230d"}
	I0916 10:48:33.912669 1405153 start.go:139] virtualization:  
	I0916 10:48:33.916414 1405153 out.go:177] * [functional-919910] minikube v1.34.0 on Ubuntu 20.04 (arm64)
	I0916 10:48:33.921226 1405153 out.go:177]   - MINIKUBE_LOCATION=19651
	I0916 10:48:33.921310 1405153 notify.go:220] Checking for updates...
	I0916 10:48:33.924192 1405153 out.go:177]   - MINIKUBE_SUPPRESS_DOCKER_PERFORMANCE=true
	I0916 10:48:33.926482 1405153 out.go:177]   - KUBECONFIG=/home/jenkins/minikube-integration/19651-1378450/kubeconfig
	I0916 10:48:33.928824 1405153 out.go:177]   - MINIKUBE_HOME=/home/jenkins/minikube-integration/19651-1378450/.minikube
	I0916 10:48:33.930290 1405153 out.go:177]   - MINIKUBE_BIN=out/minikube-linux-arm64
	I0916 10:48:33.932494 1405153 out.go:177]   - MINIKUBE_FORCE_SYSTEMD=
	I0916 10:48:33.936794 1405153 config.go:182] Loaded profile config "functional-919910": Driver=docker, ContainerRuntime=crio, KubernetesVersion=v1.31.1
	I0916 10:48:33.936883 1405153 driver.go:394] Setting default libvirt URI to qemu:///system
	I0916 10:48:33.971073 1405153 docker.go:123] docker version: linux-27.2.1:Docker Engine - Community
	I0916 10:48:33.971194 1405153 cli_runner.go:164] Run: docker system info --format "{{json .}}"
	I0916 10:48:34.033300 1405153 info.go:266] docker info: {ID:5FDH:SA5P:5GCT:NLAS:B73P:SGDQ:PBG5:UBVH:UZY3:RXGO:CI7S:WAIH Containers:1 ContainersRunning:1 ContainersPaused:0 ContainersStopped:0 Images:2 Driver:overlay2 DriverStatus:[[Backing Filesystem extfs] [Supports d_type true] [Using metacopy false] [Native Overlay Diff true] [userxattr false]] SystemStatus:<nil> Plugins:{Volume:[local] Network:[bridge host ipvlan macvlan null overlay] Authorization:<nil> Log:[awslogs fluentd gcplogs gelf journald json-file local splunk syslog]} MemoryLimit:true SwapLimit:true KernelMemory:false KernelMemoryTCP:true CPUCfsPeriod:true CPUCfsQuota:true CPUShares:true CPUSet:true PidsLimit:true IPv4Forwarding:true BridgeNfIptables:true BridgeNfIP6Tables:true Debug:false NFd:32 OomKillDisable:true NGoroutines:64 SystemTime:2024-09-16 10:48:34.023431603 +0000 UTC LoggingDriver:json-file CgroupDriver:cgroupfs NEventsListener:0 KernelVersion:5.15.0-1069-aws OperatingSystem:Ubuntu 20.04.6 LTS OSType:linux Architecture:aar
ch64 IndexServerAddress:https://index.docker.io/v1/ RegistryConfig:{AllowNondistributableArtifactsCIDRs:[] AllowNondistributableArtifactsHostnames:[] InsecureRegistryCIDRs:[127.0.0.0/8] IndexConfigs:{DockerIo:{Name:docker.io Mirrors:[] Secure:true Official:true}} Mirrors:[]} NCPU:2 MemTotal:8214839296 GenericResources:<nil> DockerRootDir:/var/lib/docker HTTPProxy: HTTPSProxy: NoProxy: Name:ip-172-31-21-244 Labels:[] ExperimentalBuild:false ServerVersion:27.2.1 ClusterStore: ClusterAdvertise: Runtimes:{Runc:{Path:runc}} DefaultRuntime:runc Swarm:{NodeID: NodeAddr: LocalNodeState:inactive ControlAvailable:false Error: RemoteManagers:<nil>} LiveRestoreEnabled:false Isolation: InitBinary:docker-init ContainerdCommit:{ID:7f7fdf5fed64eb6a7caf99b3e12efcf9d60e311c Expected:7f7fdf5fed64eb6a7caf99b3e12efcf9d60e311c} RuncCommit:{ID:v1.1.14-0-g2c9f560 Expected:v1.1.14-0-g2c9f560} InitCommit:{ID:de40ad0 Expected:de40ad0} SecurityOptions:[name=apparmor name=seccomp,profile=builtin] ProductLicense: Warnings:<nil> ServerErro
rs:[] ClientInfo:{Debug:false Plugins:[map[Name:buildx Path:/usr/libexec/docker/cli-plugins/docker-buildx SchemaVersion:0.1.0 ShortDescription:Docker Buildx Vendor:Docker Inc. Version:v0.16.2] map[Name:compose Path:/usr/libexec/docker/cli-plugins/docker-compose SchemaVersion:0.1.0 ShortDescription:Docker Compose Vendor:Docker Inc. Version:v2.29.2]] Warnings:<nil>}}
	I0916 10:48:34.033460 1405153 docker.go:318] overlay module found
	I0916 10:48:34.036356 1405153 out.go:177] * Using the docker driver based on existing profile
	I0916 10:48:34.038192 1405153 start.go:297] selected driver: docker
	I0916 10:48:34.038228 1405153 start.go:901] validating driver "docker" against &{Name:functional-919910 KeepContext:false EmbedCerts:false MinikubeISO: KicBaseImage:gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 Memory:4000 CPUs:2 DiskSize:20000 Driver:docker HyperkitVpnKitSock: HyperkitVSockPorts:[] DockerEnv:[] ContainerVolumeMounts:[] InsecureRegistry:[] RegistryMirror:[] HostOnlyCIDR:192.168.59.1/24 HypervVirtualSwitch: HypervUseExternalSwitch:false HypervExternalAdapter: KVMNetwork:default KVMQemuURI:qemu:///system KVMGPU:false KVMHidden:false KVMNUMACount:1 APIServerPort:8441 DockerOpt:[] DisableDriverMounts:false NFSShare:[] NFSSharesRoot:/nfsshares UUID: NoVTXCheck:false DNSProxy:false HostDNSResolver:true HostOnlyNicType:virtio NatNicType:virtio SSHIPAddress: SSHUser:root SSHKey: SSHPort:22 KubernetesConfig:{KubernetesVersion:v1.31.1 ClusterName:functional-919910 Namespace:default APIServerHAVIP: APIServerNa
me:minikubeCA APIServerNames:[] APIServerIPs:[] DNSDomain:cluster.local ContainerRuntime:crio CRISocket: NetworkPlugin:cni FeatureGates: ServiceCIDR:10.96.0.0/12 ImageRepository: LoadBalancerStartIP: LoadBalancerEndIP: CustomIngressCert: RegistryAliases: ExtraOptions:[] ShouldLoadCachedImages:true EnableDefaultCNI:false CNI:} Nodes:[{Name: IP:192.168.49.2 Port:8441 KubernetesVersion:v1.31.1 ContainerRuntime:crio ControlPlane:true Worker:true}] Addons:map[default-storageclass:true storage-provisioner:true] CustomAddonImages:map[] CustomAddonRegistries:map[] VerifyComponents:map[apiserver:true apps_running:true default_sa:true extra:true kubelet:true node_ready:true system_pods:true] StartHostTimeout:6m0s ScheduledStop:<nil> ExposedPorts:[] ListenAddress: Network: Subnet: MultiNodeRequested:false ExtraDisks:0 CertExpiration:26280h0m0s Mount:false MountString:/home/jenkins:/minikube-host Mount9PVersion:9p2000.L MountGID:docker MountIP: MountMSize:262144 MountOptions:[] MountPort:0 MountType:9p MountUID:docker Bi
naryMirror: DisableOptimizations:false DisableMetrics:false CustomQemuFirmwarePath: SocketVMnetClientPath: SocketVMnetPath: StaticIP: SSHAuthSock: SSHAgentPID:0 GPUs: AutoPauseInterval:1m0s}
	I0916 10:48:34.038379 1405153 start.go:912] status for docker: {Installed:true Healthy:true Running:false NeedsImprovement:false Error:<nil> Reason: Fix: Doc: Version:}
	I0916 10:48:34.038507 1405153 cli_runner.go:164] Run: docker system info --format "{{json .}}"
	I0916 10:48:34.092789 1405153 info.go:266] docker info: {ID:5FDH:SA5P:5GCT:NLAS:B73P:SGDQ:PBG5:UBVH:UZY3:RXGO:CI7S:WAIH Containers:1 ContainersRunning:1 ContainersPaused:0 ContainersStopped:0 Images:2 Driver:overlay2 DriverStatus:[[Backing Filesystem extfs] [Supports d_type true] [Using metacopy false] [Native Overlay Diff true] [userxattr false]] SystemStatus:<nil> Plugins:{Volume:[local] Network:[bridge host ipvlan macvlan null overlay] Authorization:<nil> Log:[awslogs fluentd gcplogs gelf journald json-file local splunk syslog]} MemoryLimit:true SwapLimit:true KernelMemory:false KernelMemoryTCP:true CPUCfsPeriod:true CPUCfsQuota:true CPUShares:true CPUSet:true PidsLimit:true IPv4Forwarding:true BridgeNfIptables:true BridgeNfIP6Tables:true Debug:false NFd:32 OomKillDisable:true NGoroutines:64 SystemTime:2024-09-16 10:48:34.082522334 +0000 UTC LoggingDriver:json-file CgroupDriver:cgroupfs NEventsListener:0 KernelVersion:5.15.0-1069-aws OperatingSystem:Ubuntu 20.04.6 LTS OSType:linux Architecture:aar
ch64 IndexServerAddress:https://index.docker.io/v1/ RegistryConfig:{AllowNondistributableArtifactsCIDRs:[] AllowNondistributableArtifactsHostnames:[] InsecureRegistryCIDRs:[127.0.0.0/8] IndexConfigs:{DockerIo:{Name:docker.io Mirrors:[] Secure:true Official:true}} Mirrors:[]} NCPU:2 MemTotal:8214839296 GenericResources:<nil> DockerRootDir:/var/lib/docker HTTPProxy: HTTPSProxy: NoProxy: Name:ip-172-31-21-244 Labels:[] ExperimentalBuild:false ServerVersion:27.2.1 ClusterStore: ClusterAdvertise: Runtimes:{Runc:{Path:runc}} DefaultRuntime:runc Swarm:{NodeID: NodeAddr: LocalNodeState:inactive ControlAvailable:false Error: RemoteManagers:<nil>} LiveRestoreEnabled:false Isolation: InitBinary:docker-init ContainerdCommit:{ID:7f7fdf5fed64eb6a7caf99b3e12efcf9d60e311c Expected:7f7fdf5fed64eb6a7caf99b3e12efcf9d60e311c} RuncCommit:{ID:v1.1.14-0-g2c9f560 Expected:v1.1.14-0-g2c9f560} InitCommit:{ID:de40ad0 Expected:de40ad0} SecurityOptions:[name=apparmor name=seccomp,profile=builtin] ProductLicense: Warnings:<nil> ServerErro
rs:[] ClientInfo:{Debug:false Plugins:[map[Name:buildx Path:/usr/libexec/docker/cli-plugins/docker-buildx SchemaVersion:0.1.0 ShortDescription:Docker Buildx Vendor:Docker Inc. Version:v0.16.2] map[Name:compose Path:/usr/libexec/docker/cli-plugins/docker-compose SchemaVersion:0.1.0 ShortDescription:Docker Compose Vendor:Docker Inc. Version:v2.29.2]] Warnings:<nil>}}
	I0916 10:48:34.093226 1405153 start_flags.go:947] Waiting for all components: map[apiserver:true apps_running:true default_sa:true extra:true kubelet:true node_ready:true system_pods:true]
	I0916 10:48:34.093254 1405153 cni.go:84] Creating CNI manager for ""
	I0916 10:48:34.093309 1405153 cni.go:143] "docker" driver + "crio" runtime found, recommending kindnet
	I0916 10:48:34.093355 1405153 start.go:340] cluster config:
	{Name:functional-919910 KeepContext:false EmbedCerts:false MinikubeISO: KicBaseImage:gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 Memory:4000 CPUs:2 DiskSize:20000 Driver:docker HyperkitVpnKitSock: HyperkitVSockPorts:[] DockerEnv:[] ContainerVolumeMounts:[] InsecureRegistry:[] RegistryMirror:[] HostOnlyCIDR:192.168.59.1/24 HypervVirtualSwitch: HypervUseExternalSwitch:false HypervExternalAdapter: KVMNetwork:default KVMQemuURI:qemu:///system KVMGPU:false KVMHidden:false KVMNUMACount:1 APIServerPort:8441 DockerOpt:[] DisableDriverMounts:false NFSShare:[] NFSSharesRoot:/nfsshares UUID: NoVTXCheck:false DNSProxy:false HostDNSResolver:true HostOnlyNicType:virtio NatNicType:virtio SSHIPAddress: SSHUser:root SSHKey: SSHPort:22 KubernetesConfig:{KubernetesVersion:v1.31.1 ClusterName:functional-919910 Namespace:default APIServerHAVIP: APIServerName:minikubeCA APIServerNames:[] APIServerIPs:[] DNSDomain:cluster.local Containe
rRuntime:crio CRISocket: NetworkPlugin:cni FeatureGates: ServiceCIDR:10.96.0.0/12 ImageRepository: LoadBalancerStartIP: LoadBalancerEndIP: CustomIngressCert: RegistryAliases: ExtraOptions:[{Component:apiserver Key:enable-admission-plugins Value:NamespaceAutoProvision}] ShouldLoadCachedImages:true EnableDefaultCNI:false CNI:} Nodes:[{Name: IP:192.168.49.2 Port:8441 KubernetesVersion:v1.31.1 ContainerRuntime:crio ControlPlane:true Worker:true}] Addons:map[default-storageclass:true storage-provisioner:true] CustomAddonImages:map[] CustomAddonRegistries:map[] VerifyComponents:map[apiserver:true apps_running:true default_sa:true extra:true kubelet:true node_ready:true system_pods:true] StartHostTimeout:6m0s ScheduledStop:<nil> ExposedPorts:[] ListenAddress: Network: Subnet: MultiNodeRequested:false ExtraDisks:0 CertExpiration:26280h0m0s Mount:false MountString:/home/jenkins:/minikube-host Mount9PVersion:9p2000.L MountGID:docker MountIP: MountMSize:262144 MountOptions:[] MountPort:0 MountType:9p MountUID:docker Bin
aryMirror: DisableOptimizations:false DisableMetrics:false CustomQemuFirmwarePath: SocketVMnetClientPath: SocketVMnetPath: StaticIP: SSHAuthSock: SSHAgentPID:0 GPUs: AutoPauseInterval:1m0s}
	I0916 10:48:34.098551 1405153 out.go:177] * Starting "functional-919910" primary control-plane node in "functional-919910" cluster
	I0916 10:48:34.100896 1405153 cache.go:121] Beginning downloading kic base image for docker with crio
	I0916 10:48:34.103035 1405153 out.go:177] * Pulling base image v0.0.45-1726358845-19644 ...
	I0916 10:48:34.104428 1405153 preload.go:131] Checking if preload exists for k8s version v1.31.1 and runtime crio
	I0916 10:48:34.104490 1405153 preload.go:146] Found local preload: /home/jenkins/minikube-integration/19651-1378450/.minikube/cache/preloaded-tarball/preloaded-images-k8s-v18-v1.31.1-cri-o-overlay-arm64.tar.lz4
	I0916 10:48:34.104498 1405153 cache.go:56] Caching tarball of preloaded images
	I0916 10:48:34.104519 1405153 image.go:79] Checking for gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 in local docker daemon
	I0916 10:48:34.104600 1405153 preload.go:172] Found /home/jenkins/minikube-integration/19651-1378450/.minikube/cache/preloaded-tarball/preloaded-images-k8s-v18-v1.31.1-cri-o-overlay-arm64.tar.lz4 in cache, skipping download
	I0916 10:48:34.104608 1405153 cache.go:59] Finished verifying existence of preloaded tar for v1.31.1 on crio
	I0916 10:48:34.104827 1405153 profile.go:143] Saving config to /home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/functional-919910/config.json ...
	W0916 10:48:34.124223 1405153 image.go:95] image gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 is of wrong architecture
	I0916 10:48:34.124234 1405153 cache.go:149] Downloading gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 to local cache
	I0916 10:48:34.124328 1405153 image.go:63] Checking for gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 in local cache directory
	I0916 10:48:34.124345 1405153 image.go:66] Found gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 in local cache directory, skipping pull
	I0916 10:48:34.124350 1405153 image.go:135] gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 exists in cache, skipping pull
	I0916 10:48:34.124357 1405153 cache.go:152] successfully saved gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 as a tarball
	I0916 10:48:34.124362 1405153 cache.go:162] Loading gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 from local cache
	I0916 10:48:34.288917 1405153 cache.go:164] successfully loaded and using gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 from cached tarball
	I0916 10:48:34.288951 1405153 cache.go:194] Successfully downloaded all kic artifacts
	I0916 10:48:34.288982 1405153 start.go:360] acquireMachinesLock for functional-919910: {Name:mkddf275897a7528274aa0390d95d40845ffb1ab Clock:{} Delay:500ms Timeout:10m0s Cancel:<nil>}
	I0916 10:48:34.289059 1405153 start.go:364] duration metric: took 54.522µs to acquireMachinesLock for "functional-919910"
	I0916 10:48:34.289080 1405153 start.go:96] Skipping create...Using existing machine configuration
	I0916 10:48:34.289084 1405153 fix.go:54] fixHost starting: 
	I0916 10:48:34.289431 1405153 cli_runner.go:164] Run: docker container inspect functional-919910 --format={{.State.Status}}
	I0916 10:48:34.305622 1405153 fix.go:112] recreateIfNeeded on functional-919910: state=Running err=<nil>
	W0916 10:48:34.305643 1405153 fix.go:138] unexpected machine state, will restart: <nil>
	I0916 10:48:34.310594 1405153 out.go:177] * Updating the running docker "functional-919910" container ...
	I0916 10:48:34.313486 1405153 machine.go:93] provisionDockerMachine start ...
	I0916 10:48:34.313608 1405153 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" functional-919910
	I0916 10:48:34.330698 1405153 main.go:141] libmachine: Using SSH client type: native
	I0916 10:48:34.331000 1405153 main.go:141] libmachine: &{{{<nil> 0 [] [] []} docker [0x41abe0] 0x41d420 <nil>  [] 0s} 127.0.0.1 34613 <nil> <nil>}
	I0916 10:48:34.331007 1405153 main.go:141] libmachine: About to run SSH command:
	hostname
	I0916 10:48:34.472292 1405153 main.go:141] libmachine: SSH cmd err, output: <nil>: functional-919910
	
	I0916 10:48:34.472306 1405153 ubuntu.go:169] provisioning hostname "functional-919910"
	I0916 10:48:34.472377 1405153 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" functional-919910
	I0916 10:48:34.490933 1405153 main.go:141] libmachine: Using SSH client type: native
	I0916 10:48:34.491182 1405153 main.go:141] libmachine: &{{{<nil> 0 [] [] []} docker [0x41abe0] 0x41d420 <nil>  [] 0s} 127.0.0.1 34613 <nil> <nil>}
	I0916 10:48:34.491193 1405153 main.go:141] libmachine: About to run SSH command:
	sudo hostname functional-919910 && echo "functional-919910" | sudo tee /etc/hostname
	I0916 10:48:34.642095 1405153 main.go:141] libmachine: SSH cmd err, output: <nil>: functional-919910
	
	I0916 10:48:34.642170 1405153 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" functional-919910
	I0916 10:48:34.661757 1405153 main.go:141] libmachine: Using SSH client type: native
	I0916 10:48:34.662006 1405153 main.go:141] libmachine: &{{{<nil> 0 [] [] []} docker [0x41abe0] 0x41d420 <nil>  [] 0s} 127.0.0.1 34613 <nil> <nil>}
	I0916 10:48:34.662021 1405153 main.go:141] libmachine: About to run SSH command:
	
			if ! grep -xq '.*\sfunctional-919910' /etc/hosts; then
				if grep -xq '127.0.1.1\s.*' /etc/hosts; then
					sudo sed -i 's/^127.0.1.1\s.*/127.0.1.1 functional-919910/g' /etc/hosts;
				else 
					echo '127.0.1.1 functional-919910' | sudo tee -a /etc/hosts; 
				fi
			fi
	I0916 10:48:34.801384 1405153 main.go:141] libmachine: SSH cmd err, output: <nil>: 
	I0916 10:48:34.801403 1405153 ubuntu.go:175] set auth options {CertDir:/home/jenkins/minikube-integration/19651-1378450/.minikube CaCertPath:/home/jenkins/minikube-integration/19651-1378450/.minikube/certs/ca.pem CaPrivateKeyPath:/home/jenkins/minikube-integration/19651-1378450/.minikube/certs/ca-key.pem CaCertRemotePath:/etc/docker/ca.pem ServerCertPath:/home/jenkins/minikube-integration/19651-1378450/.minikube/machines/server.pem ServerKeyPath:/home/jenkins/minikube-integration/19651-1378450/.minikube/machines/server-key.pem ClientKeyPath:/home/jenkins/minikube-integration/19651-1378450/.minikube/certs/key.pem ServerCertRemotePath:/etc/docker/server.pem ServerKeyRemotePath:/etc/docker/server-key.pem ClientCertPath:/home/jenkins/minikube-integration/19651-1378450/.minikube/certs/cert.pem ServerCertSANs:[] StorePath:/home/jenkins/minikube-integration/19651-1378450/.minikube}
	I0916 10:48:34.801426 1405153 ubuntu.go:177] setting up certificates
	I0916 10:48:34.801435 1405153 provision.go:84] configureAuth start
	I0916 10:48:34.801501 1405153 cli_runner.go:164] Run: docker container inspect -f "{{range .NetworkSettings.Networks}}{{.IPAddress}},{{.GlobalIPv6Address}}{{end}}" functional-919910
	I0916 10:48:34.820918 1405153 provision.go:143] copyHostCerts
	I0916 10:48:34.820978 1405153 exec_runner.go:144] found /home/jenkins/minikube-integration/19651-1378450/.minikube/ca.pem, removing ...
	I0916 10:48:34.820986 1405153 exec_runner.go:203] rm: /home/jenkins/minikube-integration/19651-1378450/.minikube/ca.pem
	I0916 10:48:34.821065 1405153 exec_runner.go:151] cp: /home/jenkins/minikube-integration/19651-1378450/.minikube/certs/ca.pem --> /home/jenkins/minikube-integration/19651-1378450/.minikube/ca.pem (1078 bytes)
	I0916 10:48:34.821168 1405153 exec_runner.go:144] found /home/jenkins/minikube-integration/19651-1378450/.minikube/cert.pem, removing ...
	I0916 10:48:34.821172 1405153 exec_runner.go:203] rm: /home/jenkins/minikube-integration/19651-1378450/.minikube/cert.pem
	I0916 10:48:34.821197 1405153 exec_runner.go:151] cp: /home/jenkins/minikube-integration/19651-1378450/.minikube/certs/cert.pem --> /home/jenkins/minikube-integration/19651-1378450/.minikube/cert.pem (1123 bytes)
	I0916 10:48:34.821249 1405153 exec_runner.go:144] found /home/jenkins/minikube-integration/19651-1378450/.minikube/key.pem, removing ...
	I0916 10:48:34.821252 1405153 exec_runner.go:203] rm: /home/jenkins/minikube-integration/19651-1378450/.minikube/key.pem
	I0916 10:48:34.821274 1405153 exec_runner.go:151] cp: /home/jenkins/minikube-integration/19651-1378450/.minikube/certs/key.pem --> /home/jenkins/minikube-integration/19651-1378450/.minikube/key.pem (1679 bytes)
	I0916 10:48:34.821320 1405153 provision.go:117] generating server cert: /home/jenkins/minikube-integration/19651-1378450/.minikube/machines/server.pem ca-key=/home/jenkins/minikube-integration/19651-1378450/.minikube/certs/ca.pem private-key=/home/jenkins/minikube-integration/19651-1378450/.minikube/certs/ca-key.pem org=jenkins.functional-919910 san=[127.0.0.1 192.168.49.2 functional-919910 localhost minikube]
	I0916 10:48:35.568371 1405153 provision.go:177] copyRemoteCerts
	I0916 10:48:35.568431 1405153 ssh_runner.go:195] Run: sudo mkdir -p /etc/docker /etc/docker /etc/docker
	I0916 10:48:35.568472 1405153 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" functional-919910
	I0916 10:48:35.587212 1405153 sshutil.go:53] new ssh client: &{IP:127.0.0.1 Port:34613 SSHKeyPath:/home/jenkins/minikube-integration/19651-1378450/.minikube/machines/functional-919910/id_rsa Username:docker}
	I0916 10:48:35.685877 1405153 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-1378450/.minikube/certs/ca.pem --> /etc/docker/ca.pem (1078 bytes)
	I0916 10:48:35.711210 1405153 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-1378450/.minikube/machines/server.pem --> /etc/docker/server.pem (1220 bytes)
	I0916 10:48:35.737883 1405153 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-1378450/.minikube/machines/server-key.pem --> /etc/docker/server-key.pem (1679 bytes)
	I0916 10:48:35.763778 1405153 provision.go:87] duration metric: took 962.330031ms to configureAuth
	I0916 10:48:35.763796 1405153 ubuntu.go:193] setting minikube options for container-runtime
	I0916 10:48:35.763993 1405153 config.go:182] Loaded profile config "functional-919910": Driver=docker, ContainerRuntime=crio, KubernetesVersion=v1.31.1
	I0916 10:48:35.764136 1405153 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" functional-919910
	I0916 10:48:35.780995 1405153 main.go:141] libmachine: Using SSH client type: native
	I0916 10:48:35.781225 1405153 main.go:141] libmachine: &{{{<nil> 0 [] [] []} docker [0x41abe0] 0x41d420 <nil>  [] 0s} 127.0.0.1 34613 <nil> <nil>}
	I0916 10:48:35.781237 1405153 main.go:141] libmachine: About to run SSH command:
	sudo mkdir -p /etc/sysconfig && printf %s "
	CRIO_MINIKUBE_OPTIONS='--insecure-registry 10.96.0.0/12 '
	" | sudo tee /etc/sysconfig/crio.minikube && sudo systemctl restart crio
	I0916 10:48:41.213530 1405153 main.go:141] libmachine: SSH cmd err, output: <nil>: 
	CRIO_MINIKUBE_OPTIONS='--insecure-registry 10.96.0.0/12 '
	
	I0916 10:48:41.213542 1405153 machine.go:96] duration metric: took 6.900045274s to provisionDockerMachine
	I0916 10:48:41.213553 1405153 start.go:293] postStartSetup for "functional-919910" (driver="docker")
	I0916 10:48:41.213563 1405153 start.go:322] creating required directories: [/etc/kubernetes/addons /etc/kubernetes/manifests /var/tmp/minikube /var/lib/minikube /var/lib/minikube/certs /var/lib/minikube/images /var/lib/minikube/binaries /tmp/gvisor /usr/share/ca-certificates /etc/ssl/certs]
	I0916 10:48:41.213629 1405153 ssh_runner.go:195] Run: sudo mkdir -p /etc/kubernetes/addons /etc/kubernetes/manifests /var/tmp/minikube /var/lib/minikube /var/lib/minikube/certs /var/lib/minikube/images /var/lib/minikube/binaries /tmp/gvisor /usr/share/ca-certificates /etc/ssl/certs
	I0916 10:48:41.213668 1405153 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" functional-919910
	I0916 10:48:41.239640 1405153 sshutil.go:53] new ssh client: &{IP:127.0.0.1 Port:34613 SSHKeyPath:/home/jenkins/minikube-integration/19651-1378450/.minikube/machines/functional-919910/id_rsa Username:docker}
	I0916 10:48:41.342947 1405153 ssh_runner.go:195] Run: cat /etc/os-release
	I0916 10:48:41.346242 1405153 main.go:141] libmachine: Couldn't set key VERSION_CODENAME, no corresponding struct field found
	I0916 10:48:41.346267 1405153 main.go:141] libmachine: Couldn't set key PRIVACY_POLICY_URL, no corresponding struct field found
	I0916 10:48:41.346276 1405153 main.go:141] libmachine: Couldn't set key UBUNTU_CODENAME, no corresponding struct field found
	I0916 10:48:41.346282 1405153 info.go:137] Remote host: Ubuntu 22.04.4 LTS
	I0916 10:48:41.346292 1405153 filesync.go:126] Scanning /home/jenkins/minikube-integration/19651-1378450/.minikube/addons for local assets ...
	I0916 10:48:41.346355 1405153 filesync.go:126] Scanning /home/jenkins/minikube-integration/19651-1378450/.minikube/files for local assets ...
	I0916 10:48:41.346443 1405153 filesync.go:149] local asset: /home/jenkins/minikube-integration/19651-1378450/.minikube/files/etc/ssl/certs/13838332.pem -> 13838332.pem in /etc/ssl/certs
	I0916 10:48:41.346519 1405153 filesync.go:149] local asset: /home/jenkins/minikube-integration/19651-1378450/.minikube/files/etc/test/nested/copy/1383833/hosts -> hosts in /etc/test/nested/copy/1383833
	I0916 10:48:41.346565 1405153 ssh_runner.go:195] Run: sudo mkdir -p /etc/ssl/certs /etc/test/nested/copy/1383833
	I0916 10:48:41.355661 1405153 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-1378450/.minikube/files/etc/ssl/certs/13838332.pem --> /etc/ssl/certs/13838332.pem (1708 bytes)
	I0916 10:48:41.381460 1405153 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-1378450/.minikube/files/etc/test/nested/copy/1383833/hosts --> /etc/test/nested/copy/1383833/hosts (40 bytes)
	I0916 10:48:41.406699 1405153 start.go:296] duration metric: took 193.131275ms for postStartSetup
	I0916 10:48:41.406787 1405153 ssh_runner.go:195] Run: sh -c "df -h /var | awk 'NR==2{print $5}'"
	I0916 10:48:41.406826 1405153 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" functional-919910
	I0916 10:48:41.424693 1405153 sshutil.go:53] new ssh client: &{IP:127.0.0.1 Port:34613 SSHKeyPath:/home/jenkins/minikube-integration/19651-1378450/.minikube/machines/functional-919910/id_rsa Username:docker}
	I0916 10:48:41.518715 1405153 ssh_runner.go:195] Run: sh -c "df -BG /var | awk 'NR==2{print $4}'"
	I0916 10:48:41.524705 1405153 fix.go:56] duration metric: took 7.235611151s for fixHost
	I0916 10:48:41.524721 1405153 start.go:83] releasing machines lock for "functional-919910", held for 7.235654293s
	I0916 10:48:41.524821 1405153 cli_runner.go:164] Run: docker container inspect -f "{{range .NetworkSettings.Networks}}{{.IPAddress}},{{.GlobalIPv6Address}}{{end}}" functional-919910
	I0916 10:48:41.541935 1405153 ssh_runner.go:195] Run: cat /version.json
	I0916 10:48:41.541984 1405153 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" functional-919910
	I0916 10:48:41.542020 1405153 ssh_runner.go:195] Run: curl -sS -m 2 https://registry.k8s.io/
	I0916 10:48:41.542086 1405153 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" functional-919910
	I0916 10:48:41.561069 1405153 sshutil.go:53] new ssh client: &{IP:127.0.0.1 Port:34613 SSHKeyPath:/home/jenkins/minikube-integration/19651-1378450/.minikube/machines/functional-919910/id_rsa Username:docker}
	I0916 10:48:41.570434 1405153 sshutil.go:53] new ssh client: &{IP:127.0.0.1 Port:34613 SSHKeyPath:/home/jenkins/minikube-integration/19651-1378450/.minikube/machines/functional-919910/id_rsa Username:docker}
	I0916 10:48:41.656471 1405153 ssh_runner.go:195] Run: systemctl --version
	I0916 10:48:41.785895 1405153 ssh_runner.go:195] Run: sudo sh -c "podman version >/dev/null"
	I0916 10:48:41.929425 1405153 ssh_runner.go:195] Run: sh -c "stat /etc/cni/net.d/*loopback.conf*"
	I0916 10:48:41.933963 1405153 ssh_runner.go:195] Run: sudo find /etc/cni/net.d -maxdepth 1 -type f -name *loopback.conf* -not -name *.mk_disabled -exec sh -c "sudo mv {} {}.mk_disabled" ;
	I0916 10:48:41.943404 1405153 cni.go:221] loopback cni configuration disabled: "/etc/cni/net.d/*loopback.conf*" found
	I0916 10:48:41.943491 1405153 ssh_runner.go:195] Run: sudo find /etc/cni/net.d -maxdepth 1 -type f ( ( -name *bridge* -or -name *podman* ) -and -not -name *.mk_disabled ) -printf "%p, " -exec sh -c "sudo mv {} {}.mk_disabled" ;
	I0916 10:48:41.953183 1405153 cni.go:259] no active bridge cni configs found in "/etc/cni/net.d" - nothing to disable
	I0916 10:48:41.953198 1405153 start.go:495] detecting cgroup driver to use...
	I0916 10:48:41.953235 1405153 detect.go:187] detected "cgroupfs" cgroup driver on host os
	I0916 10:48:41.953293 1405153 ssh_runner.go:195] Run: sudo systemctl stop -f containerd
	I0916 10:48:41.966190 1405153 ssh_runner.go:195] Run: sudo systemctl is-active --quiet service containerd
	I0916 10:48:41.978010 1405153 docker.go:217] disabling cri-docker service (if available) ...
	I0916 10:48:41.978067 1405153 ssh_runner.go:195] Run: sudo systemctl stop -f cri-docker.socket
	I0916 10:48:41.992752 1405153 ssh_runner.go:195] Run: sudo systemctl stop -f cri-docker.service
	I0916 10:48:42.006013 1405153 ssh_runner.go:195] Run: sudo systemctl disable cri-docker.socket
	I0916 10:48:42.151230 1405153 ssh_runner.go:195] Run: sudo systemctl mask cri-docker.service
	I0916 10:48:42.299523 1405153 docker.go:233] disabling docker service ...
	I0916 10:48:42.299589 1405153 ssh_runner.go:195] Run: sudo systemctl stop -f docker.socket
	I0916 10:48:42.315054 1405153 ssh_runner.go:195] Run: sudo systemctl stop -f docker.service
	I0916 10:48:42.329122 1405153 ssh_runner.go:195] Run: sudo systemctl disable docker.socket
	I0916 10:48:42.464786 1405153 ssh_runner.go:195] Run: sudo systemctl mask docker.service
	I0916 10:48:42.586432 1405153 ssh_runner.go:195] Run: sudo systemctl is-active --quiet service docker
	I0916 10:48:42.599418 1405153 ssh_runner.go:195] Run: /bin/bash -c "sudo mkdir -p /etc && printf %s "runtime-endpoint: unix:///var/run/crio/crio.sock
	" | sudo tee /etc/crictl.yaml"
	I0916 10:48:42.615905 1405153 crio.go:59] configure cri-o to use "registry.k8s.io/pause:3.10" pause image...
	I0916 10:48:42.615978 1405153 ssh_runner.go:195] Run: sh -c "sudo sed -i 's|^.*pause_image = .*$|pause_image = "registry.k8s.io/pause:3.10"|' /etc/crio/crio.conf.d/02-crio.conf"
	I0916 10:48:42.625827 1405153 crio.go:70] configuring cri-o to use "cgroupfs" as cgroup driver...
	I0916 10:48:42.625890 1405153 ssh_runner.go:195] Run: sh -c "sudo sed -i 's|^.*cgroup_manager = .*$|cgroup_manager = "cgroupfs"|' /etc/crio/crio.conf.d/02-crio.conf"
	I0916 10:48:42.636100 1405153 ssh_runner.go:195] Run: sh -c "sudo sed -i '/conmon_cgroup = .*/d' /etc/crio/crio.conf.d/02-crio.conf"
	I0916 10:48:42.646444 1405153 ssh_runner.go:195] Run: sh -c "sudo sed -i '/cgroup_manager = .*/a conmon_cgroup = "pod"' /etc/crio/crio.conf.d/02-crio.conf"
	I0916 10:48:42.656847 1405153 ssh_runner.go:195] Run: sh -c "sudo rm -rf /etc/cni/net.mk"
	I0916 10:48:42.666724 1405153 ssh_runner.go:195] Run: sh -c "sudo sed -i '/^ *"net.ipv4.ip_unprivileged_port_start=.*"/d' /etc/crio/crio.conf.d/02-crio.conf"
	I0916 10:48:42.676826 1405153 ssh_runner.go:195] Run: sh -c "sudo grep -q "^ *default_sysctls" /etc/crio/crio.conf.d/02-crio.conf || sudo sed -i '/conmon_cgroup = .*/a default_sysctls = \[\n\]' /etc/crio/crio.conf.d/02-crio.conf"
	I0916 10:48:42.687635 1405153 ssh_runner.go:195] Run: sh -c "sudo sed -i -r 's|^default_sysctls *= *\[|&\n  "net.ipv4.ip_unprivileged_port_start=0",|' /etc/crio/crio.conf.d/02-crio.conf"
	I0916 10:48:42.698381 1405153 ssh_runner.go:195] Run: sudo sysctl net.bridge.bridge-nf-call-iptables
	I0916 10:48:42.707000 1405153 ssh_runner.go:195] Run: sudo sh -c "echo 1 > /proc/sys/net/ipv4/ip_forward"
	I0916 10:48:42.715679 1405153 ssh_runner.go:195] Run: sudo systemctl daemon-reload
	I0916 10:48:42.847692 1405153 ssh_runner.go:195] Run: sudo systemctl restart crio
	I0916 10:48:43.038937 1405153 start.go:542] Will wait 60s for socket path /var/run/crio/crio.sock
	I0916 10:48:43.039005 1405153 ssh_runner.go:195] Run: stat /var/run/crio/crio.sock
	I0916 10:48:43.042800 1405153 start.go:563] Will wait 60s for crictl version
	I0916 10:48:43.042867 1405153 ssh_runner.go:195] Run: which crictl
	I0916 10:48:43.046213 1405153 ssh_runner.go:195] Run: sudo /usr/bin/crictl version
	I0916 10:48:43.092243 1405153 start.go:579] Version:  0.1.0
	RuntimeName:  cri-o
	RuntimeVersion:  1.24.6
	RuntimeApiVersion:  v1
	I0916 10:48:43.092320 1405153 ssh_runner.go:195] Run: crio --version
	I0916 10:48:43.132595 1405153 ssh_runner.go:195] Run: crio --version
	I0916 10:48:43.181201 1405153 out.go:177] * Preparing Kubernetes v1.31.1 on CRI-O 1.24.6 ...
	I0916 10:48:43.184095 1405153 cli_runner.go:164] Run: docker network inspect functional-919910 --format "{"Name": "{{.Name}}","Driver": "{{.Driver}}","Subnet": "{{range .IPAM.Config}}{{.Subnet}}{{end}}","Gateway": "{{range .IPAM.Config}}{{.Gateway}}{{end}}","MTU": {{if (index .Options "com.docker.network.driver.mtu")}}{{(index .Options "com.docker.network.driver.mtu")}}{{else}}0{{end}}, "ContainerIPs": [{{range $k,$v := .Containers }}"{{$v.IPv4Address}}",{{end}}]}"
	I0916 10:48:43.199863 1405153 ssh_runner.go:195] Run: grep 192.168.49.1	host.minikube.internal$ /etc/hosts
	I0916 10:48:43.206470 1405153 out.go:177]   - apiserver.enable-admission-plugins=NamespaceAutoProvision
	I0916 10:48:43.209330 1405153 kubeadm.go:883] updating cluster {Name:functional-919910 KeepContext:false EmbedCerts:false MinikubeISO: KicBaseImage:gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 Memory:4000 CPUs:2 DiskSize:20000 Driver:docker HyperkitVpnKitSock: HyperkitVSockPorts:[] DockerEnv:[] ContainerVolumeMounts:[] InsecureRegistry:[] RegistryMirror:[] HostOnlyCIDR:192.168.59.1/24 HypervVirtualSwitch: HypervUseExternalSwitch:false HypervExternalAdapter: KVMNetwork:default KVMQemuURI:qemu:///system KVMGPU:false KVMHidden:false KVMNUMACount:1 APIServerPort:8441 DockerOpt:[] DisableDriverMounts:false NFSShare:[] NFSSharesRoot:/nfsshares UUID: NoVTXCheck:false DNSProxy:false HostDNSResolver:true HostOnlyNicType:virtio NatNicType:virtio SSHIPAddress: SSHUser:root SSHKey: SSHPort:22 KubernetesConfig:{KubernetesVersion:v1.31.1 ClusterName:functional-919910 Namespace:default APIServerHAVIP: APIServerName:minikubeCA API
ServerNames:[] APIServerIPs:[] DNSDomain:cluster.local ContainerRuntime:crio CRISocket: NetworkPlugin:cni FeatureGates: ServiceCIDR:10.96.0.0/12 ImageRepository: LoadBalancerStartIP: LoadBalancerEndIP: CustomIngressCert: RegistryAliases: ExtraOptions:[{Component:apiserver Key:enable-admission-plugins Value:NamespaceAutoProvision}] ShouldLoadCachedImages:true EnableDefaultCNI:false CNI:} Nodes:[{Name: IP:192.168.49.2 Port:8441 KubernetesVersion:v1.31.1 ContainerRuntime:crio ControlPlane:true Worker:true}] Addons:map[default-storageclass:true storage-provisioner:true] CustomAddonImages:map[] CustomAddonRegistries:map[] VerifyComponents:map[apiserver:true apps_running:true default_sa:true extra:true kubelet:true node_ready:true system_pods:true] StartHostTimeout:6m0s ScheduledStop:<nil> ExposedPorts:[] ListenAddress: Network: Subnet: MultiNodeRequested:false ExtraDisks:0 CertExpiration:26280h0m0s Mount:false MountString:/home/jenkins:/minikube-host Mount9PVersion:9p2000.L MountGID:docker MountIP: MountMSize:2621
44 MountOptions:[] MountPort:0 MountType:9p MountUID:docker BinaryMirror: DisableOptimizations:false DisableMetrics:false CustomQemuFirmwarePath: SocketVMnetClientPath: SocketVMnetPath: StaticIP: SSHAuthSock: SSHAgentPID:0 GPUs: AutoPauseInterval:1m0s} ...
	I0916 10:48:43.209456 1405153 preload.go:131] Checking if preload exists for k8s version v1.31.1 and runtime crio
	I0916 10:48:43.209542 1405153 ssh_runner.go:195] Run: sudo crictl images --output json
	I0916 10:48:43.254938 1405153 crio.go:514] all images are preloaded for cri-o runtime.
	I0916 10:48:43.254950 1405153 crio.go:433] Images already preloaded, skipping extraction
	I0916 10:48:43.255048 1405153 ssh_runner.go:195] Run: sudo crictl images --output json
	I0916 10:48:43.291546 1405153 crio.go:514] all images are preloaded for cri-o runtime.
	I0916 10:48:43.291559 1405153 cache_images.go:84] Images are preloaded, skipping loading
	I0916 10:48:43.291565 1405153 kubeadm.go:934] updating node { 192.168.49.2 8441 v1.31.1 crio true true} ...
	I0916 10:48:43.291676 1405153 kubeadm.go:946] kubelet [Unit]
	Wants=crio.service
	
	[Service]
	ExecStart=
	ExecStart=/var/lib/minikube/binaries/v1.31.1/kubelet --bootstrap-kubeconfig=/etc/kubernetes/bootstrap-kubelet.conf --cgroups-per-qos=false --config=/var/lib/kubelet/config.yaml --enforce-node-allocatable= --hostname-override=functional-919910 --kubeconfig=/etc/kubernetes/kubelet.conf --node-ip=192.168.49.2
	
	[Install]
	 config:
	{KubernetesVersion:v1.31.1 ClusterName:functional-919910 Namespace:default APIServerHAVIP: APIServerName:minikubeCA APIServerNames:[] APIServerIPs:[] DNSDomain:cluster.local ContainerRuntime:crio CRISocket: NetworkPlugin:cni FeatureGates: ServiceCIDR:10.96.0.0/12 ImageRepository: LoadBalancerStartIP: LoadBalancerEndIP: CustomIngressCert: RegistryAliases: ExtraOptions:[{Component:apiserver Key:enable-admission-plugins Value:NamespaceAutoProvision}] ShouldLoadCachedImages:true EnableDefaultCNI:false CNI:}
	I0916 10:48:43.291761 1405153 ssh_runner.go:195] Run: crio config
	I0916 10:48:43.340850 1405153 extraconfig.go:124] Overwriting default enable-admission-plugins=NamespaceLifecycle,LimitRanger,ServiceAccount,DefaultStorageClass,DefaultTolerationSeconds,NodeRestriction,MutatingAdmissionWebhook,ValidatingAdmissionWebhook,ResourceQuota with user provided enable-admission-plugins=NamespaceAutoProvision for component apiserver
	I0916 10:48:43.340962 1405153 cni.go:84] Creating CNI manager for ""
	I0916 10:48:43.340975 1405153 cni.go:143] "docker" driver + "crio" runtime found, recommending kindnet
	I0916 10:48:43.340984 1405153 kubeadm.go:84] Using pod CIDR: 10.244.0.0/16
	I0916 10:48:43.341006 1405153 kubeadm.go:181] kubeadm options: {CertDir:/var/lib/minikube/certs ServiceCIDR:10.96.0.0/12 PodSubnet:10.244.0.0/16 AdvertiseAddress:192.168.49.2 APIServerPort:8441 KubernetesVersion:v1.31.1 EtcdDataDir:/var/lib/minikube/etcd EtcdExtraArgs:map[] ClusterName:functional-919910 NodeName:functional-919910 DNSDomain:cluster.local CRISocket:/var/run/crio/crio.sock ImageRepository: ComponentOptions:[{Component:apiServer ExtraArgs:map[enable-admission-plugins:NamespaceAutoProvision] Pairs:map[certSANs:["127.0.0.1", "localhost", "192.168.49.2"]]} {Component:controllerManager ExtraArgs:map[allocate-node-cidrs:true leader-elect:false] Pairs:map[]} {Component:scheduler ExtraArgs:map[leader-elect:false] Pairs:map[]}] FeatureArgs:map[] NodeIP:192.168.49.2 CgroupDriver:cgroupfs ClientCAFile:/var/lib/minikube/certs/ca.crt StaticPodPath:/etc/kubernetes/manifests ControlPlaneAddress:control-plane.minikube.internal KubeProxyOptions:map[] ResolvConfSearchRegression:false KubeletConfigOpts:ma
p[containerRuntimeEndpoint:unix:///var/run/crio/crio.sock hairpinMode:hairpin-veth runtimeRequestTimeout:15m] PrependCriSocketUnix:true}
	I0916 10:48:43.341148 1405153 kubeadm.go:187] kubeadm config:
	apiVersion: kubeadm.k8s.io/v1beta3
	kind: InitConfiguration
	localAPIEndpoint:
	  advertiseAddress: 192.168.49.2
	  bindPort: 8441
	bootstrapTokens:
	  - groups:
	      - system:bootstrappers:kubeadm:default-node-token
	    ttl: 24h0m0s
	    usages:
	      - signing
	      - authentication
	nodeRegistration:
	  criSocket: unix:///var/run/crio/crio.sock
	  name: "functional-919910"
	  kubeletExtraArgs:
	    node-ip: 192.168.49.2
	  taints: []
	---
	apiVersion: kubeadm.k8s.io/v1beta3
	kind: ClusterConfiguration
	apiServer:
	  certSANs: ["127.0.0.1", "localhost", "192.168.49.2"]
	  extraArgs:
	    enable-admission-plugins: "NamespaceAutoProvision"
	controllerManager:
	  extraArgs:
	    allocate-node-cidrs: "true"
	    leader-elect: "false"
	scheduler:
	  extraArgs:
	    leader-elect: "false"
	certificatesDir: /var/lib/minikube/certs
	clusterName: mk
	controlPlaneEndpoint: control-plane.minikube.internal:8441
	etcd:
	  local:
	    dataDir: /var/lib/minikube/etcd
	    extraArgs:
	      proxy-refresh-interval: "70000"
	kubernetesVersion: v1.31.1
	networking:
	  dnsDomain: cluster.local
	  podSubnet: "10.244.0.0/16"
	  serviceSubnet: 10.96.0.0/12
	---
	apiVersion: kubelet.config.k8s.io/v1beta1
	kind: KubeletConfiguration
	authentication:
	  x509:
	    clientCAFile: /var/lib/minikube/certs/ca.crt
	cgroupDriver: cgroupfs
	containerRuntimeEndpoint: unix:///var/run/crio/crio.sock
	hairpinMode: hairpin-veth
	runtimeRequestTimeout: 15m
	clusterDomain: "cluster.local"
	# disable disk resource management by default
	imageGCHighThresholdPercent: 100
	evictionHard:
	  nodefs.available: "0%"
	  nodefs.inodesFree: "0%"
	  imagefs.available: "0%"
	failSwapOn: false
	staticPodPath: /etc/kubernetes/manifests
	---
	apiVersion: kubeproxy.config.k8s.io/v1alpha1
	kind: KubeProxyConfiguration
	clusterCIDR: "10.244.0.0/16"
	metricsBindAddress: 0.0.0.0:10249
	conntrack:
	  maxPerCore: 0
	# Skip setting "net.netfilter.nf_conntrack_tcp_timeout_established"
	  tcpEstablishedTimeout: 0s
	# Skip setting "net.netfilter.nf_conntrack_tcp_timeout_close"
	  tcpCloseWaitTimeout: 0s
	
	I0916 10:48:43.341215 1405153 ssh_runner.go:195] Run: sudo ls /var/lib/minikube/binaries/v1.31.1
	I0916 10:48:43.350412 1405153 binaries.go:44] Found k8s binaries, skipping transfer
	I0916 10:48:43.350476 1405153 ssh_runner.go:195] Run: sudo mkdir -p /etc/systemd/system/kubelet.service.d /lib/systemd/system /var/tmp/minikube
	I0916 10:48:43.361154 1405153 ssh_runner.go:362] scp memory --> /etc/systemd/system/kubelet.service.d/10-kubeadm.conf (367 bytes)
	I0916 10:48:43.380083 1405153 ssh_runner.go:362] scp memory --> /lib/systemd/system/kubelet.service (352 bytes)
	I0916 10:48:43.398316 1405153 ssh_runner.go:362] scp memory --> /var/tmp/minikube/kubeadm.yaml.new (2005 bytes)
	I0916 10:48:43.417671 1405153 ssh_runner.go:195] Run: grep 192.168.49.2	control-plane.minikube.internal$ /etc/hosts
	I0916 10:48:43.421406 1405153 ssh_runner.go:195] Run: sudo systemctl daemon-reload
	I0916 10:48:43.572628 1405153 ssh_runner.go:195] Run: sudo systemctl start kubelet
	I0916 10:48:43.587702 1405153 certs.go:68] Setting up /home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/functional-919910 for IP: 192.168.49.2
	I0916 10:48:43.587714 1405153 certs.go:194] generating shared ca certs ...
	I0916 10:48:43.587731 1405153 certs.go:226] acquiring lock for ca certs: {Name:mk0ae46b50e2e49d53ad6fcc94535aa50d9156d6 Clock:{} Delay:500ms Timeout:1m0s Cancel:<nil>}
	I0916 10:48:43.587872 1405153 certs.go:235] skipping valid "minikubeCA" ca cert: /home/jenkins/minikube-integration/19651-1378450/.minikube/ca.key
	I0916 10:48:43.587922 1405153 certs.go:235] skipping valid "proxyClientCA" ca cert: /home/jenkins/minikube-integration/19651-1378450/.minikube/proxy-client-ca.key
	I0916 10:48:43.587928 1405153 certs.go:256] generating profile certs ...
	I0916 10:48:43.588013 1405153 certs.go:359] skipping valid signed profile cert regeneration for "minikube-user": /home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/functional-919910/client.key
	I0916 10:48:43.588061 1405153 certs.go:359] skipping valid signed profile cert regeneration for "minikube": /home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/functional-919910/apiserver.key.debd5ef9
	I0916 10:48:43.588099 1405153 certs.go:359] skipping valid signed profile cert regeneration for "aggregator": /home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/functional-919910/proxy-client.key
	I0916 10:48:43.588211 1405153 certs.go:484] found cert: /home/jenkins/minikube-integration/19651-1378450/.minikube/certs/1383833.pem (1338 bytes)
	W0916 10:48:43.588269 1405153 certs.go:480] ignoring /home/jenkins/minikube-integration/19651-1378450/.minikube/certs/1383833_empty.pem, impossibly tiny 0 bytes
	I0916 10:48:43.588278 1405153 certs.go:484] found cert: /home/jenkins/minikube-integration/19651-1378450/.minikube/certs/ca-key.pem (1679 bytes)
	I0916 10:48:43.588301 1405153 certs.go:484] found cert: /home/jenkins/minikube-integration/19651-1378450/.minikube/certs/ca.pem (1078 bytes)
	I0916 10:48:43.588323 1405153 certs.go:484] found cert: /home/jenkins/minikube-integration/19651-1378450/.minikube/certs/cert.pem (1123 bytes)
	I0916 10:48:43.588343 1405153 certs.go:484] found cert: /home/jenkins/minikube-integration/19651-1378450/.minikube/certs/key.pem (1679 bytes)
	I0916 10:48:43.588383 1405153 certs.go:484] found cert: /home/jenkins/minikube-integration/19651-1378450/.minikube/files/etc/ssl/certs/13838332.pem (1708 bytes)
	I0916 10:48:43.589063 1405153 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-1378450/.minikube/ca.crt --> /var/lib/minikube/certs/ca.crt (1111 bytes)
	I0916 10:48:43.615227 1405153 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-1378450/.minikube/ca.key --> /var/lib/minikube/certs/ca.key (1675 bytes)
	I0916 10:48:43.640209 1405153 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-1378450/.minikube/proxy-client-ca.crt --> /var/lib/minikube/certs/proxy-client-ca.crt (1119 bytes)
	I0916 10:48:43.665409 1405153 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-1378450/.minikube/proxy-client-ca.key --> /var/lib/minikube/certs/proxy-client-ca.key (1679 bytes)
	I0916 10:48:43.690396 1405153 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/functional-919910/apiserver.crt --> /var/lib/minikube/certs/apiserver.crt (1424 bytes)
	I0916 10:48:43.715720 1405153 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/functional-919910/apiserver.key --> /var/lib/minikube/certs/apiserver.key (1679 bytes)
	I0916 10:48:43.741758 1405153 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/functional-919910/proxy-client.crt --> /var/lib/minikube/certs/proxy-client.crt (1147 bytes)
	I0916 10:48:43.766512 1405153 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/functional-919910/proxy-client.key --> /var/lib/minikube/certs/proxy-client.key (1679 bytes)
	I0916 10:48:43.790812 1405153 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-1378450/.minikube/ca.crt --> /usr/share/ca-certificates/minikubeCA.pem (1111 bytes)
	I0916 10:48:43.815736 1405153 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-1378450/.minikube/certs/1383833.pem --> /usr/share/ca-certificates/1383833.pem (1338 bytes)
	I0916 10:48:43.840930 1405153 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-1378450/.minikube/files/etc/ssl/certs/13838332.pem --> /usr/share/ca-certificates/13838332.pem (1708 bytes)
	I0916 10:48:43.866622 1405153 ssh_runner.go:362] scp memory --> /var/lib/minikube/kubeconfig (738 bytes)
	I0916 10:48:43.885132 1405153 ssh_runner.go:195] Run: openssl version
	I0916 10:48:43.890673 1405153 ssh_runner.go:195] Run: sudo /bin/bash -c "test -s /usr/share/ca-certificates/minikubeCA.pem && ln -fs /usr/share/ca-certificates/minikubeCA.pem /etc/ssl/certs/minikubeCA.pem"
	I0916 10:48:43.900521 1405153 ssh_runner.go:195] Run: ls -la /usr/share/ca-certificates/minikubeCA.pem
	I0916 10:48:43.904289 1405153 certs.go:528] hashing: -rw-r--r-- 1 root root 1111 Sep 16 10:35 /usr/share/ca-certificates/minikubeCA.pem
	I0916 10:48:43.904360 1405153 ssh_runner.go:195] Run: openssl x509 -hash -noout -in /usr/share/ca-certificates/minikubeCA.pem
	I0916 10:48:43.912159 1405153 ssh_runner.go:195] Run: sudo /bin/bash -c "test -L /etc/ssl/certs/b5213941.0 || ln -fs /etc/ssl/certs/minikubeCA.pem /etc/ssl/certs/b5213941.0"
	I0916 10:48:43.921601 1405153 ssh_runner.go:195] Run: sudo /bin/bash -c "test -s /usr/share/ca-certificates/1383833.pem && ln -fs /usr/share/ca-certificates/1383833.pem /etc/ssl/certs/1383833.pem"
	I0916 10:48:43.931422 1405153 ssh_runner.go:195] Run: ls -la /usr/share/ca-certificates/1383833.pem
	I0916 10:48:43.935145 1405153 certs.go:528] hashing: -rw-r--r-- 1 root root 1338 Sep 16 10:46 /usr/share/ca-certificates/1383833.pem
	I0916 10:48:43.935204 1405153 ssh_runner.go:195] Run: openssl x509 -hash -noout -in /usr/share/ca-certificates/1383833.pem
	I0916 10:48:43.942725 1405153 ssh_runner.go:195] Run: sudo /bin/bash -c "test -L /etc/ssl/certs/51391683.0 || ln -fs /etc/ssl/certs/1383833.pem /etc/ssl/certs/51391683.0"
	I0916 10:48:43.952136 1405153 ssh_runner.go:195] Run: sudo /bin/bash -c "test -s /usr/share/ca-certificates/13838332.pem && ln -fs /usr/share/ca-certificates/13838332.pem /etc/ssl/certs/13838332.pem"
	I0916 10:48:43.962336 1405153 ssh_runner.go:195] Run: ls -la /usr/share/ca-certificates/13838332.pem
	I0916 10:48:43.966066 1405153 certs.go:528] hashing: -rw-r--r-- 1 root root 1708 Sep 16 10:46 /usr/share/ca-certificates/13838332.pem
	I0916 10:48:43.966132 1405153 ssh_runner.go:195] Run: openssl x509 -hash -noout -in /usr/share/ca-certificates/13838332.pem
	I0916 10:48:43.973393 1405153 ssh_runner.go:195] Run: sudo /bin/bash -c "test -L /etc/ssl/certs/3ec20f2e.0 || ln -fs /etc/ssl/certs/13838332.pem /etc/ssl/certs/3ec20f2e.0"
	I0916 10:48:43.983388 1405153 ssh_runner.go:195] Run: stat /var/lib/minikube/certs/apiserver-kubelet-client.crt
	I0916 10:48:43.987432 1405153 ssh_runner.go:195] Run: openssl x509 -noout -in /var/lib/minikube/certs/apiserver-etcd-client.crt -checkend 86400
	I0916 10:48:43.994465 1405153 ssh_runner.go:195] Run: openssl x509 -noout -in /var/lib/minikube/certs/apiserver-kubelet-client.crt -checkend 86400
	I0916 10:48:44.005212 1405153 ssh_runner.go:195] Run: openssl x509 -noout -in /var/lib/minikube/certs/etcd/server.crt -checkend 86400
	I0916 10:48:44.014973 1405153 ssh_runner.go:195] Run: openssl x509 -noout -in /var/lib/minikube/certs/etcd/healthcheck-client.crt -checkend 86400
	I0916 10:48:44.023352 1405153 ssh_runner.go:195] Run: openssl x509 -noout -in /var/lib/minikube/certs/etcd/peer.crt -checkend 86400
	I0916 10:48:44.030882 1405153 ssh_runner.go:195] Run: openssl x509 -noout -in /var/lib/minikube/certs/front-proxy-client.crt -checkend 86400
	I0916 10:48:44.038292 1405153 kubeadm.go:392] StartCluster: {Name:functional-919910 KeepContext:false EmbedCerts:false MinikubeISO: KicBaseImage:gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 Memory:4000 CPUs:2 DiskSize:20000 Driver:docker HyperkitVpnKitSock: HyperkitVSockPorts:[] DockerEnv:[] ContainerVolumeMounts:[] InsecureRegistry:[] RegistryMirror:[] HostOnlyCIDR:192.168.59.1/24 HypervVirtualSwitch: HypervUseExternalSwitch:false HypervExternalAdapter: KVMNetwork:default KVMQemuURI:qemu:///system KVMGPU:false KVMHidden:false KVMNUMACount:1 APIServerPort:8441 DockerOpt:[] DisableDriverMounts:false NFSShare:[] NFSSharesRoot:/nfsshares UUID: NoVTXCheck:false DNSProxy:false HostDNSResolver:true HostOnlyNicType:virtio NatNicType:virtio SSHIPAddress: SSHUser:root SSHKey: SSHPort:22 KubernetesConfig:{KubernetesVersion:v1.31.1 ClusterName:functional-919910 Namespace:default APIServerHAVIP: APIServerName:minikubeCA APISer
verNames:[] APIServerIPs:[] DNSDomain:cluster.local ContainerRuntime:crio CRISocket: NetworkPlugin:cni FeatureGates: ServiceCIDR:10.96.0.0/12 ImageRepository: LoadBalancerStartIP: LoadBalancerEndIP: CustomIngressCert: RegistryAliases: ExtraOptions:[{Component:apiserver Key:enable-admission-plugins Value:NamespaceAutoProvision}] ShouldLoadCachedImages:true EnableDefaultCNI:false CNI:} Nodes:[{Name: IP:192.168.49.2 Port:8441 KubernetesVersion:v1.31.1 ContainerRuntime:crio ControlPlane:true Worker:true}] Addons:map[default-storageclass:true storage-provisioner:true] CustomAddonImages:map[] CustomAddonRegistries:map[] VerifyComponents:map[apiserver:true apps_running:true default_sa:true extra:true kubelet:true node_ready:true system_pods:true] StartHostTimeout:6m0s ScheduledStop:<nil> ExposedPorts:[] ListenAddress: Network: Subnet: MultiNodeRequested:false ExtraDisks:0 CertExpiration:26280h0m0s Mount:false MountString:/home/jenkins:/minikube-host Mount9PVersion:9p2000.L MountGID:docker MountIP: MountMSize:262144
MountOptions:[] MountPort:0 MountType:9p MountUID:docker BinaryMirror: DisableOptimizations:false DisableMetrics:false CustomQemuFirmwarePath: SocketVMnetClientPath: SocketVMnetPath: StaticIP: SSHAuthSock: SSHAgentPID:0 GPUs: AutoPauseInterval:1m0s}
	I0916 10:48:44.038381 1405153 cri.go:54] listing CRI containers in root : {State:paused Name: Namespaces:[kube-system]}
	I0916 10:48:44.038447 1405153 ssh_runner.go:195] Run: sudo -s eval "crictl ps -a --quiet --label io.kubernetes.pod.namespace=kube-system"
	I0916 10:48:44.078250 1405153 cri.go:89] found id: "67f50b0e25dae16dbad275ffac3a734fe571c8f8cb91d485eaac44783eb641be"
	I0916 10:48:44.078263 1405153 cri.go:89] found id: "e8aeda4b55bc63f93934a2cc0bed0950a05df3db193d9ed2e77a2dc96b78ec18"
	I0916 10:48:44.078267 1405153 cri.go:89] found id: "68f543d941434df90f12c922b0b45dcb557a7b8316bd36d083123f6f29e0f3d7"
	I0916 10:48:44.078269 1405153 cri.go:89] found id: "2089d6c47dd6764fb74a622eaf36e8dda3344083a925f73a4dfcf0ebb952dbf7"
	I0916 10:48:44.078272 1405153 cri.go:89] found id: "84ca31fb2ed034d56721c7ab90b9c5e414e315335f55f7d30435fc91501dad28"
	I0916 10:48:44.078275 1405153 cri.go:89] found id: "8f5620673b4ff5c0c99db71dd02fc2ce9baec6c9b22460cbdf86d411abc6a715"
	I0916 10:48:44.078278 1405153 cri.go:89] found id: "5bcfe047e4005e24d6719487f45bde2380924679e0f77e81ce9e05992af73afb"
	I0916 10:48:44.078281 1405153 cri.go:89] found id: "9a35fb982442f2ef08963a8588b112f704124f0fecc14cbfc199e94d6085db98"
	I0916 10:48:44.078283 1405153 cri.go:89] found id: "89084e33c979a76a3a4bbd24eab8c848deb25d8bd474bad381f47a24e0373c2e"
	I0916 10:48:44.078289 1405153 cri.go:89] found id: "584cffa44f32723af45447c07bf6e3fc641b7c61fe43302aad35c776bd065faf"
	I0916 10:48:44.078292 1405153 cri.go:89] found id: "9fdab793eb970a5f01845e2aeaf1389846fd7113bbdedbb122c9c796017271d5"
	I0916 10:48:44.078294 1405153 cri.go:89] found id: "3e31d247381fd150f97fed045c0d264e01a0046902133f839fc323ed9d5fa7b9"
	I0916 10:48:44.078297 1405153 cri.go:89] found id: "6d211253a1170338e5b23dda8b3c6a26dde0aa55d2f91ee289142b0410943b49"
	I0916 10:48:44.078299 1405153 cri.go:89] found id: "19cb8b26283b5427eeb4adf80032848225300f8293659c95a04c937ca3877ced"
	I0916 10:48:44.078303 1405153 cri.go:89] found id: "b88a79882d73e8e5ca5f134464b8f60ebbeb4a0aa75d6f83d1ec9e3d9f6bd093"
	I0916 10:48:44.078305 1405153 cri.go:89] found id: "790d8c6b7f5cff6aa8da32ec82eeab04f109110f2b3a39803bda7a570da2cf75"
	I0916 10:48:44.078307 1405153 cri.go:89] found id: ""
	I0916 10:48:44.078373 1405153 ssh_runner.go:195] Run: sudo runc list -f json
	
	
	==> CRI-O <==
	Sep 16 10:48:52 functional-919910 crio[4619]: time="2024-09-16 10:48:52.546185920Z" level=info msg="Created container 4deb5cc6dce54b2b55f84fa620aac8876a4dfb8d163a4e60aa19ebd7ba71d7eb: kube-system/coredns-7c65d6cfc9-qzn8c/coredns" id=1d08d23d-6b30-4ef9-9749-75656f2f22fb name=/runtime.v1.RuntimeService/CreateContainer
	Sep 16 10:48:52 functional-919910 crio[4619]: time="2024-09-16 10:48:52.546884840Z" level=info msg="Starting container: 4deb5cc6dce54b2b55f84fa620aac8876a4dfb8d163a4e60aa19ebd7ba71d7eb" id=9fbfa96f-4211-4584-beb6-a4c1d812c1fa name=/runtime.v1.RuntimeService/StartContainer
	Sep 16 10:48:52 functional-919910 crio[4619]: time="2024-09-16 10:48:52.550182971Z" level=info msg="Started container" PID=5232 containerID=072cecfbf1d3967a28f6cf80f4e3b0bf030253965b58aa0f0089cd01271c49a1 description=kube-system/kindnet-nb5xl/kindnet-cni id=2d47cfa2-1c51-4e3e-84c6-20565a25b42f name=/runtime.v1.RuntimeService/StartContainer sandboxID=306886331d6eea412e2593dd8cefd104ae0353cb2453c12f41db88e1881fec0f
	Sep 16 10:48:52 functional-919910 crio[4619]: time="2024-09-16 10:48:52.573383165Z" level=info msg="Started container" PID=5218 containerID=4deb5cc6dce54b2b55f84fa620aac8876a4dfb8d163a4e60aa19ebd7ba71d7eb description=kube-system/coredns-7c65d6cfc9-qzn8c/coredns id=9fbfa96f-4211-4584-beb6-a4c1d812c1fa name=/runtime.v1.RuntimeService/StartContainer sandboxID=4bae1031966b207c601881c3be1d2b66aa5218cb02e6eb2af68deea5be18503b
	Sep 16 10:48:52 functional-919910 crio[4619]: time="2024-09-16 10:48:52.605671989Z" level=info msg="Started container" PID=5240 containerID=6f8c0a2f9d3e9fe72768e28685deb8e30624ac7b3cfaa272ac69f57b771050db description=kube-system/kube-proxy-nvpzv/kube-proxy id=0568d2b4-f47d-4f64-b678-a50f76abf9da name=/runtime.v1.RuntimeService/StartContainer sandboxID=46672cf6a1a3cfbb490f865d512383492c0c4c4061599f90461031829a93bd49
	Sep 16 10:48:53 functional-919910 crio[4619]: time="2024-09-16 10:48:53.088292803Z" level=info msg="Stopping pod sandbox: 8fd62fbc34bf1ffb9092b83c48e89b00e7cdd219dbb5b91410c53ba0718a28f1" id=9475e9d1-d7b3-4579-ad77-5746df207096 name=/runtime.v1.RuntimeService/StopPodSandbox
	Sep 16 10:48:53 functional-919910 crio[4619]: time="2024-09-16 10:48:53.089356144Z" level=info msg="Stopped pod sandbox: 8fd62fbc34bf1ffb9092b83c48e89b00e7cdd219dbb5b91410c53ba0718a28f1" id=9475e9d1-d7b3-4579-ad77-5746df207096 name=/runtime.v1.RuntimeService/StopPodSandbox
	Sep 16 10:48:53 functional-919910 crio[4619]: time="2024-09-16 10:48:53.162210356Z" level=info msg="Removing container: 84ca31fb2ed034d56721c7ab90b9c5e414e315335f55f7d30435fc91501dad28" id=7ed385df-0fad-4b1c-bc96-41da5fba68ba name=/runtime.v1.RuntimeService/RemoveContainer
	Sep 16 10:48:53 functional-919910 crio[4619]: time="2024-09-16 10:48:53.186675643Z" level=info msg="Removed container 84ca31fb2ed034d56721c7ab90b9c5e414e315335f55f7d30435fc91501dad28: kube-system/kube-apiserver-functional-919910/kube-apiserver" id=7ed385df-0fad-4b1c-bc96-41da5fba68ba name=/runtime.v1.RuntimeService/RemoveContainer
	Sep 16 10:49:03 functional-919910 crio[4619]: time="2024-09-16 10:49:03.038404155Z" level=info msg="CNI monitoring event \"/etc/cni/net.d/10-kindnet.conflist.temp\": CREATE"
	Sep 16 10:49:03 functional-919910 crio[4619]: time="2024-09-16 10:49:03.042553703Z" level=info msg="Found CNI network kindnet (type=ptp) at /etc/cni/net.d/10-kindnet.conflist"
	Sep 16 10:49:03 functional-919910 crio[4619]: time="2024-09-16 10:49:03.042594104Z" level=info msg="Updated default CNI network name to kindnet"
	Sep 16 10:49:03 functional-919910 crio[4619]: time="2024-09-16 10:49:03.042618596Z" level=info msg="CNI monitoring event \"/etc/cni/net.d/10-kindnet.conflist.temp\": WRITE"
	Sep 16 10:49:03 functional-919910 crio[4619]: time="2024-09-16 10:49:03.046006841Z" level=info msg="Found CNI network kindnet (type=ptp) at /etc/cni/net.d/10-kindnet.conflist"
	Sep 16 10:49:03 functional-919910 crio[4619]: time="2024-09-16 10:49:03.046045847Z" level=info msg="Updated default CNI network name to kindnet"
	Sep 16 10:49:03 functional-919910 crio[4619]: time="2024-09-16 10:49:03.046064399Z" level=info msg="CNI monitoring event \"/etc/cni/net.d/10-kindnet.conflist.temp\": RENAME"
	Sep 16 10:49:03 functional-919910 crio[4619]: time="2024-09-16 10:49:03.049550759Z" level=info msg="Found CNI network kindnet (type=ptp) at /etc/cni/net.d/10-kindnet.conflist"
	Sep 16 10:49:03 functional-919910 crio[4619]: time="2024-09-16 10:49:03.049587057Z" level=info msg="Updated default CNI network name to kindnet"
	Sep 16 10:49:03 functional-919910 crio[4619]: time="2024-09-16 10:49:03.049603935Z" level=info msg="CNI monitoring event \"/etc/cni/net.d/10-kindnet.conflist\": CREATE"
	Sep 16 10:49:03 functional-919910 crio[4619]: time="2024-09-16 10:49:03.053090451Z" level=info msg="Found CNI network kindnet (type=ptp) at /etc/cni/net.d/10-kindnet.conflist"
	Sep 16 10:49:03 functional-919910 crio[4619]: time="2024-09-16 10:49:03.053131229Z" level=info msg="Updated default CNI network name to kindnet"
	Sep 16 10:49:47 functional-919910 crio[4619]: time="2024-09-16 10:49:47.256320060Z" level=info msg="Stopping pod sandbox: 8fd62fbc34bf1ffb9092b83c48e89b00e7cdd219dbb5b91410c53ba0718a28f1" id=383cd39c-1f42-454d-bd04-0ba447bad713 name=/runtime.v1.RuntimeService/StopPodSandbox
	Sep 16 10:49:47 functional-919910 crio[4619]: time="2024-09-16 10:49:47.256366927Z" level=info msg="Stopped pod sandbox (already stopped): 8fd62fbc34bf1ffb9092b83c48e89b00e7cdd219dbb5b91410c53ba0718a28f1" id=383cd39c-1f42-454d-bd04-0ba447bad713 name=/runtime.v1.RuntimeService/StopPodSandbox
	Sep 16 10:49:47 functional-919910 crio[4619]: time="2024-09-16 10:49:47.257128860Z" level=info msg="Removing pod sandbox: 8fd62fbc34bf1ffb9092b83c48e89b00e7cdd219dbb5b91410c53ba0718a28f1" id=55112393-8c8d-4009-b80a-cc4ad96b5c22 name=/runtime.v1.RuntimeService/RemovePodSandbox
	Sep 16 10:49:47 functional-919910 crio[4619]: time="2024-09-16 10:49:47.264653739Z" level=info msg="Removed pod sandbox: 8fd62fbc34bf1ffb9092b83c48e89b00e7cdd219dbb5b91410c53ba0718a28f1" id=55112393-8c8d-4009-b80a-cc4ad96b5c22 name=/runtime.v1.RuntimeService/RemovePodSandbox
	
	
	==> container status <==
	CONTAINER           IMAGE                                                              CREATED             STATE               NAME                      ATTEMPT             POD ID              POD
	6f8c0a2f9d3e9       24a140c548c075e487e45d0ee73b1aa89f8bfb40c08a57e05975559728822b1d   2 minutes ago       Running             kube-proxy                2                   46672cf6a1a3c       kube-proxy-nvpzv
	072cecfbf1d39       6a23fa8fd2b78ab58e42ba273808edc936a9c53d8ac4a919f6337be094843a51   2 minutes ago       Running             kindnet-cni               2                   306886331d6ee       kindnet-nb5xl
	4deb5cc6dce54       2f6c962e7b8311337352d9fdea917da2184d9919f4da7695bc2a6517cf392fe4   2 minutes ago       Running             coredns                   2                   4bae1031966b2       coredns-7c65d6cfc9-qzn8c
	0318f459801da       ba04bb24b95753201135cbc420b233c1b0b9fa2e1fd21d28319c348c33fbcde6   2 minutes ago       Running             storage-provisioner       2                   e27809ba10603       storage-provisioner
	12f0a29c7ca2a       d3f53a98c0a9d9163c4848bcf34b2d2f5e1e3691b79f3d1dd6d0206809e02853   2 minutes ago       Running             kube-apiserver            0                   00a81472718e2       kube-apiserver-functional-919910
	7fcb94c0bce84       7f8aa378bb47dffcf430f3a601abe39137e88aee0238e23ed8530fdd18dab82d   2 minutes ago       Running             kube-scheduler            2                   00455a328acb5       kube-scheduler-functional-919910
	d69895ce029ae       279f381cb37365bbbcd133c9531fba9c2beb0f38dbbe6ddfcd0b1b1643d3450e   2 minutes ago       Running             kube-controller-manager   2                   0ffab32638624       kube-controller-manager-functional-919910
	0fb814efa9ee9       27e3830e1402783674d8b594038967deea9d51f0d91b34c93c8f39d2f68af7da   2 minutes ago       Running             etcd                      2                   46079181d2925       etcd-functional-919910
	67f50b0e25dae       ba04bb24b95753201135cbc420b233c1b0b9fa2e1fd21d28319c348c33fbcde6   2 minutes ago       Exited              storage-provisioner       1                   e27809ba10603       storage-provisioner
	e8aeda4b55bc6       6a23fa8fd2b78ab58e42ba273808edc936a9c53d8ac4a919f6337be094843a51   2 minutes ago       Exited              kindnet-cni               1                   306886331d6ee       kindnet-nb5xl
	68f543d941434       24a140c548c075e487e45d0ee73b1aa89f8bfb40c08a57e05975559728822b1d   2 minutes ago       Exited              kube-proxy                1                   46672cf6a1a3c       kube-proxy-nvpzv
	2089d6c47dd67       2f6c962e7b8311337352d9fdea917da2184d9919f4da7695bc2a6517cf392fe4   2 minutes ago       Exited              coredns                   1                   4bae1031966b2       coredns-7c65d6cfc9-qzn8c
	8f5620673b4ff       279f381cb37365bbbcd133c9531fba9c2beb0f38dbbe6ddfcd0b1b1643d3450e   2 minutes ago       Exited              kube-controller-manager   1                   0ffab32638624       kube-controller-manager-functional-919910
	5bcfe047e4005       27e3830e1402783674d8b594038967deea9d51f0d91b34c93c8f39d2f68af7da   2 minutes ago       Exited              etcd                      1                   46079181d2925       etcd-functional-919910
	9a35fb982442f       7f8aa378bb47dffcf430f3a601abe39137e88aee0238e23ed8530fdd18dab82d   2 minutes ago       Exited              kube-scheduler            1                   00455a328acb5       kube-scheduler-functional-919910
	
	
	==> coredns [2089d6c47dd6764fb74a622eaf36e8dda3344083a925f73a4dfcf0ebb952dbf7] <==
	[INFO] plugin/kubernetes: pkg/mod/k8s.io/client-go@v0.29.3/tools/cache/reflector.go:229: failed to list *v1.EndpointSlice: Get "https://10.96.0.1:443/apis/discovery.k8s.io/v1/endpointslices?limit=500&resourceVersion=0": dial tcp 10.96.0.1:443: connect: connection refused
	[ERROR] plugin/kubernetes: pkg/mod/k8s.io/client-go@v0.29.3/tools/cache/reflector.go:229: Failed to watch *v1.EndpointSlice: failed to list *v1.EndpointSlice: Get "https://10.96.0.1:443/apis/discovery.k8s.io/v1/endpointslices?limit=500&resourceVersion=0": dial tcp 10.96.0.1:443: connect: connection refused
	[INFO] plugin/kubernetes: pkg/mod/k8s.io/client-go@v0.29.3/tools/cache/reflector.go:229: failed to list *v1.Service: Get "https://10.96.0.1:443/api/v1/services?limit=500&resourceVersion=0": dial tcp 10.96.0.1:443: connect: connection refused
	[ERROR] plugin/kubernetes: pkg/mod/k8s.io/client-go@v0.29.3/tools/cache/reflector.go:229: Failed to watch *v1.Service: failed to list *v1.Service: Get "https://10.96.0.1:443/api/v1/services?limit=500&resourceVersion=0": dial tcp 10.96.0.1:443: connect: connection refused
	[INFO] plugin/kubernetes: pkg/mod/k8s.io/client-go@v0.29.3/tools/cache/reflector.go:229: failed to list *v1.Namespace: Get "https://10.96.0.1:443/api/v1/namespaces?limit=500&resourceVersion=0": dial tcp 10.96.0.1:443: connect: connection refused
	[ERROR] plugin/kubernetes: pkg/mod/k8s.io/client-go@v0.29.3/tools/cache/reflector.go:229: Failed to watch *v1.Namespace: failed to list *v1.Namespace: Get "https://10.96.0.1:443/api/v1/namespaces?limit=500&resourceVersion=0": dial tcp 10.96.0.1:443: connect: connection refused
	[INFO] plugin/ready: Still waiting on: "kubernetes"
	[INFO] plugin/kubernetes: waiting for Kubernetes API before starting server
	[INFO] plugin/kubernetes: waiting for Kubernetes API before starting server
	[INFO] plugin/kubernetes: waiting for Kubernetes API before starting server
	[INFO] plugin/ready: Still waiting on: "kubernetes"
	[INFO] plugin/kubernetes: waiting for Kubernetes API before starting server
	[INFO] plugin/kubernetes: waiting for Kubernetes API before starting server
	[INFO] plugin/kubernetes: waiting for Kubernetes API before starting server
	[INFO] plugin/kubernetes: waiting for Kubernetes API before starting server
	[INFO] plugin/kubernetes: waiting for Kubernetes API before starting server
	.:53
	[INFO] plugin/reload: Running configuration SHA512 = 05e3eaddc414b2d71a69b2e2bc6f2681fc1f4d04bcdd3acc1a41457bb7db518208b95ddfc4c9fffedc59c25a8faf458be1af4915a4a3c0d6777cb7a346bc5d86
	CoreDNS-1.11.3
	linux/arm64, go1.21.11, a6338e9
	[INFO] 127.0.0.1:39206 - 14119 "HINFO IN 5939583222120401635.3946217130147098167. udp 57 false 512" NXDOMAIN qr,rd,ra 57 0.038029402s
	[INFO] SIGTERM: Shutting down servers then terminating
	[INFO] plugin/health: Going into lameduck mode for 5s
	
	
	==> coredns [4deb5cc6dce54b2b55f84fa620aac8876a4dfb8d163a4e60aa19ebd7ba71d7eb] <==
	.:53
	[INFO] plugin/reload: Running configuration SHA512 = 05e3eaddc414b2d71a69b2e2bc6f2681fc1f4d04bcdd3acc1a41457bb7db518208b95ddfc4c9fffedc59c25a8faf458be1af4915a4a3c0d6777cb7a346bc5d86
	CoreDNS-1.11.3
	linux/arm64, go1.21.11, a6338e9
	[INFO] 127.0.0.1:38507 - 51569 "HINFO IN 5479759435856645223.8976423270861566953. udp 57 false 512" NXDOMAIN qr,rd,ra 57 0.013474675s
	
	
	==> describe nodes <==
	Name:               functional-919910
	Roles:              control-plane
	Labels:             beta.kubernetes.io/arch=arm64
	                    beta.kubernetes.io/os=linux
	                    kubernetes.io/arch=arm64
	                    kubernetes.io/hostname=functional-919910
	                    kubernetes.io/os=linux
	                    minikube.k8s.io/commit=90d544f06ea0f69499271b003be64a9a224d57ed
	                    minikube.k8s.io/name=functional-919910
	                    minikube.k8s.io/primary=true
	                    minikube.k8s.io/updated_at=2024_09_16T10_47_02_0700
	                    minikube.k8s.io/version=v1.34.0
	                    node-role.kubernetes.io/control-plane=
	                    node.kubernetes.io/exclude-from-external-load-balancers=
	Annotations:        kubeadm.alpha.kubernetes.io/cri-socket: unix:///var/run/crio/crio.sock
	                    node.alpha.kubernetes.io/ttl: 0
	                    volumes.kubernetes.io/controller-managed-attach-detach: true
	CreationTimestamp:  Mon, 16 Sep 2024 10:46:58 +0000
	Taints:             <none>
	Unschedulable:      false
	Lease:
	  HolderIdentity:  functional-919910
	  AcquireTime:     <unset>
	  RenewTime:       Mon, 16 Sep 2024 10:50:54 +0000
	Conditions:
	  Type             Status  LastHeartbeatTime                 LastTransitionTime                Reason                       Message
	  ----             ------  -----------------                 ------------------                ------                       -------
	  MemoryPressure   False   Mon, 16 Sep 2024 10:48:51 +0000   Mon, 16 Sep 2024 10:46:55 +0000   KubeletHasSufficientMemory   kubelet has sufficient memory available
	  DiskPressure     False   Mon, 16 Sep 2024 10:48:51 +0000   Mon, 16 Sep 2024 10:46:55 +0000   KubeletHasNoDiskPressure     kubelet has no disk pressure
	  PIDPressure      False   Mon, 16 Sep 2024 10:48:51 +0000   Mon, 16 Sep 2024 10:46:55 +0000   KubeletHasSufficientPID      kubelet has sufficient PID available
	  Ready            True    Mon, 16 Sep 2024 10:48:51 +0000   Mon, 16 Sep 2024 10:47:47 +0000   KubeletReady                 kubelet is posting ready status
	Addresses:
	  InternalIP:  192.168.49.2
	  Hostname:    functional-919910
	Capacity:
	  cpu:                2
	  ephemeral-storage:  203034800Ki
	  hugepages-1Gi:      0
	  hugepages-2Mi:      0
	  hugepages-32Mi:     0
	  hugepages-64Ki:     0
	  memory:             8022304Ki
	  pods:               110
	Allocatable:
	  cpu:                2
	  ephemeral-storage:  203034800Ki
	  hugepages-1Gi:      0
	  hugepages-2Mi:      0
	  hugepages-32Mi:     0
	  hugepages-64Ki:     0
	  memory:             8022304Ki
	  pods:               110
	System Info:
	  Machine ID:                 f14572b8323a44cca0faa88c76f2d4a6
	  System UUID:                d25b0873-ca83-44d4-9ed0-22dc44c6a8ae
	  Boot ID:                    34b2555f-ef29-4c31-9b47-b3b930bd3b4b
	  Kernel Version:             5.15.0-1069-aws
	  OS Image:                   Ubuntu 22.04.4 LTS
	  Operating System:           linux
	  Architecture:               arm64
	  Container Runtime Version:  cri-o://1.24.6
	  Kubelet Version:            v1.31.1
	  Kube-Proxy Version:         v1.31.1
	PodCIDR:                      10.244.0.0/24
	PodCIDRs:                     10.244.0.0/24
	Non-terminated Pods:          (8 in total)
	  Namespace                   Name                                         CPU Requests  CPU Limits  Memory Requests  Memory Limits  Age
	  ---------                   ----                                         ------------  ----------  ---------------  -------------  ---
	  kube-system                 coredns-7c65d6cfc9-qzn8c                     100m (5%)     0 (0%)      70Mi (0%)        170Mi (2%)     3m52s
	  kube-system                 etcd-functional-919910                       100m (5%)     0 (0%)      100Mi (1%)       0 (0%)         3m58s
	  kube-system                 kindnet-nb5xl                                100m (5%)     100m (5%)   50Mi (0%)        50Mi (0%)      3m52s
	  kube-system                 kube-apiserver-functional-919910             250m (12%)    0 (0%)      0 (0%)           0 (0%)         2m6s
	  kube-system                 kube-controller-manager-functional-919910    200m (10%)    0 (0%)      0 (0%)           0 (0%)         3m57s
	  kube-system                 kube-proxy-nvpzv                             0 (0%)        0 (0%)      0 (0%)           0 (0%)         3m52s
	  kube-system                 kube-scheduler-functional-919910             100m (5%)     0 (0%)      0 (0%)           0 (0%)         3m57s
	  kube-system                 storage-provisioner                          0 (0%)        0 (0%)      0 (0%)           0 (0%)         3m51s
	Allocated resources:
	  (Total limits may be over 100 percent, i.e., overcommitted.)
	  Resource           Requests    Limits
	  --------           --------    ------
	  cpu                850m (42%)  100m (5%)
	  memory             220Mi (2%)  220Mi (2%)
	  ephemeral-storage  0 (0%)      0 (0%)
	  hugepages-1Gi      0 (0%)      0 (0%)
	  hugepages-2Mi      0 (0%)      0 (0%)
	  hugepages-32Mi     0 (0%)      0 (0%)
	  hugepages-64Ki     0 (0%)      0 (0%)
	Events:
	  Type     Reason                   Age                    From             Message
	  ----     ------                   ----                   ----             -------
	  Normal   Starting                 3m50s                  kube-proxy       
	  Normal   Starting                 2m5s                   kube-proxy       
	  Normal   Starting                 2m52s                  kube-proxy       
	  Warning  CgroupV1                 3m57s                  kubelet          Cgroup v1 support is in maintenance mode, please migrate to Cgroup v2.
	  Normal   NodeHasSufficientMemory  3m57s                  kubelet          Node functional-919910 status is now: NodeHasSufficientMemory
	  Normal   NodeHasNoDiskPressure    3m57s                  kubelet          Node functional-919910 status is now: NodeHasNoDiskPressure
	  Normal   NodeHasSufficientPID     3m57s                  kubelet          Node functional-919910 status is now: NodeHasSufficientPID
	  Normal   Starting                 3m57s                  kubelet          Starting kubelet.
	  Normal   RegisteredNode           3m53s                  node-controller  Node functional-919910 event: Registered Node functional-919910 in Controller
	  Normal   NodeReady                3m11s                  kubelet          Node functional-919910 status is now: NodeReady
	  Normal   RegisteredNode           2m50s                  node-controller  Node functional-919910 event: Registered Node functional-919910 in Controller
	  Normal   NodeHasSufficientMemory  2m11s (x8 over 2m11s)  kubelet          Node functional-919910 status is now: NodeHasSufficientMemory
	  Warning  CgroupV1                 2m11s                  kubelet          Cgroup v1 support is in maintenance mode, please migrate to Cgroup v2.
	  Normal   Starting                 2m11s                  kubelet          Starting kubelet.
	  Normal   NodeHasNoDiskPressure    2m11s (x8 over 2m11s)  kubelet          Node functional-919910 status is now: NodeHasNoDiskPressure
	  Normal   NodeHasSufficientPID     2m11s (x7 over 2m11s)  kubelet          Node functional-919910 status is now: NodeHasSufficientPID
	  Normal   RegisteredNode           2m4s                   node-controller  Node functional-919910 event: Registered Node functional-919910 in Controller
	
	
	==> dmesg <==
	[Sep16 10:07] systemd-journald[226]: Failed to send stream file descriptor to service manager: Connection refused
	
	
	==> etcd [0fb814efa9ee90e98aaa699004b013bf5a6a31aa8325e33f52783fa123bcc384] <==
	{"level":"info","ts":"2024-09-16T10:48:47.877008Z","logger":"raft","caller":"etcdserver/zap_raft.go:77","msg":"aec36adc501070cc switched to configuration voters=(12593026477526642892)"}
	{"level":"info","ts":"2024-09-16T10:48:47.877076Z","caller":"membership/cluster.go:421","msg":"added member","cluster-id":"fa54960ea34d58be","local-member-id":"aec36adc501070cc","added-peer-id":"aec36adc501070cc","added-peer-peer-urls":["https://192.168.49.2:2380"]}
	{"level":"info","ts":"2024-09-16T10:48:47.877156Z","caller":"membership/cluster.go:584","msg":"set initial cluster version","cluster-id":"fa54960ea34d58be","local-member-id":"aec36adc501070cc","cluster-version":"3.5"}
	{"level":"info","ts":"2024-09-16T10:48:47.877188Z","caller":"api/capability.go:75","msg":"enabled capabilities for version","cluster-version":"3.5"}
	{"level":"info","ts":"2024-09-16T10:48:47.899347Z","caller":"embed/etcd.go:728","msg":"starting with client TLS","tls-info":"cert = /var/lib/minikube/certs/etcd/server.crt, key = /var/lib/minikube/certs/etcd/server.key, client-cert=, client-key=, trusted-ca = /var/lib/minikube/certs/etcd/ca.crt, client-cert-auth = true, crl-file = ","cipher-suites":[]}
	{"level":"info","ts":"2024-09-16T10:48:47.899570Z","caller":"embed/etcd.go:279","msg":"now serving peer/client/metrics","local-member-id":"aec36adc501070cc","initial-advertise-peer-urls":["https://192.168.49.2:2380"],"listen-peer-urls":["https://192.168.49.2:2380"],"advertise-client-urls":["https://192.168.49.2:2379"],"listen-client-urls":["https://127.0.0.1:2379","https://192.168.49.2:2379"],"listen-metrics-urls":["http://127.0.0.1:2381"]}
	{"level":"info","ts":"2024-09-16T10:48:47.899599Z","caller":"embed/etcd.go:870","msg":"serving metrics","address":"http://127.0.0.1:2381"}
	{"level":"info","ts":"2024-09-16T10:48:47.899714Z","caller":"embed/etcd.go:599","msg":"serving peer traffic","address":"192.168.49.2:2380"}
	{"level":"info","ts":"2024-09-16T10:48:47.899728Z","caller":"embed/etcd.go:571","msg":"cmux::serve","address":"192.168.49.2:2380"}
	{"level":"info","ts":"2024-09-16T10:48:49.036711Z","logger":"raft","caller":"etcdserver/zap_raft.go:77","msg":"aec36adc501070cc is starting a new election at term 3"}
	{"level":"info","ts":"2024-09-16T10:48:49.036850Z","logger":"raft","caller":"etcdserver/zap_raft.go:77","msg":"aec36adc501070cc became pre-candidate at term 3"}
	{"level":"info","ts":"2024-09-16T10:48:49.036905Z","logger":"raft","caller":"etcdserver/zap_raft.go:77","msg":"aec36adc501070cc received MsgPreVoteResp from aec36adc501070cc at term 3"}
	{"level":"info","ts":"2024-09-16T10:48:49.036946Z","logger":"raft","caller":"etcdserver/zap_raft.go:77","msg":"aec36adc501070cc became candidate at term 4"}
	{"level":"info","ts":"2024-09-16T10:48:49.036978Z","logger":"raft","caller":"etcdserver/zap_raft.go:77","msg":"aec36adc501070cc received MsgVoteResp from aec36adc501070cc at term 4"}
	{"level":"info","ts":"2024-09-16T10:48:49.037016Z","logger":"raft","caller":"etcdserver/zap_raft.go:77","msg":"aec36adc501070cc became leader at term 4"}
	{"level":"info","ts":"2024-09-16T10:48:49.037063Z","logger":"raft","caller":"etcdserver/zap_raft.go:77","msg":"raft.node: aec36adc501070cc elected leader aec36adc501070cc at term 4"}
	{"level":"info","ts":"2024-09-16T10:48:49.040899Z","caller":"etcdserver/server.go:2118","msg":"published local member to cluster through raft","local-member-id":"aec36adc501070cc","local-member-attributes":"{Name:functional-919910 ClientURLs:[https://192.168.49.2:2379]}","request-path":"/0/members/aec36adc501070cc/attributes","cluster-id":"fa54960ea34d58be","publish-timeout":"7s"}
	{"level":"info","ts":"2024-09-16T10:48:49.041104Z","caller":"embed/serve.go:103","msg":"ready to serve client requests"}
	{"level":"info","ts":"2024-09-16T10:48:49.042103Z","caller":"v3rpc/health.go:61","msg":"grpc service status changed","service":"","status":"SERVING"}
	{"level":"info","ts":"2024-09-16T10:48:49.043175Z","caller":"embed/serve.go:250","msg":"serving client traffic securely","traffic":"grpc+http","address":"192.168.49.2:2379"}
	{"level":"info","ts":"2024-09-16T10:48:49.043511Z","caller":"embed/serve.go:103","msg":"ready to serve client requests"}
	{"level":"info","ts":"2024-09-16T10:48:49.045434Z","caller":"v3rpc/health.go:61","msg":"grpc service status changed","service":"","status":"SERVING"}
	{"level":"info","ts":"2024-09-16T10:48:49.046389Z","caller":"embed/serve.go:250","msg":"serving client traffic securely","traffic":"grpc+http","address":"127.0.0.1:2379"}
	{"level":"info","ts":"2024-09-16T10:48:49.045527Z","caller":"etcdmain/main.go:44","msg":"notifying init daemon"}
	{"level":"info","ts":"2024-09-16T10:48:49.050654Z","caller":"etcdmain/main.go:50","msg":"successfully notified init daemon"}
	
	
	==> etcd [5bcfe047e4005e24d6719487f45bde2380924679e0f77e81ce9e05992af73afb] <==
	{"level":"info","ts":"2024-09-16T10:48:02.360883Z","logger":"raft","caller":"etcdserver/zap_raft.go:77","msg":"aec36adc501070cc became pre-candidate at term 2"}
	{"level":"info","ts":"2024-09-16T10:48:02.360934Z","logger":"raft","caller":"etcdserver/zap_raft.go:77","msg":"aec36adc501070cc received MsgPreVoteResp from aec36adc501070cc at term 2"}
	{"level":"info","ts":"2024-09-16T10:48:02.360973Z","logger":"raft","caller":"etcdserver/zap_raft.go:77","msg":"aec36adc501070cc became candidate at term 3"}
	{"level":"info","ts":"2024-09-16T10:48:02.361006Z","logger":"raft","caller":"etcdserver/zap_raft.go:77","msg":"aec36adc501070cc received MsgVoteResp from aec36adc501070cc at term 3"}
	{"level":"info","ts":"2024-09-16T10:48:02.361064Z","logger":"raft","caller":"etcdserver/zap_raft.go:77","msg":"aec36adc501070cc became leader at term 3"}
	{"level":"info","ts":"2024-09-16T10:48:02.361099Z","logger":"raft","caller":"etcdserver/zap_raft.go:77","msg":"raft.node: aec36adc501070cc elected leader aec36adc501070cc at term 3"}
	{"level":"info","ts":"2024-09-16T10:48:02.364920Z","caller":"etcdserver/server.go:2118","msg":"published local member to cluster through raft","local-member-id":"aec36adc501070cc","local-member-attributes":"{Name:functional-919910 ClientURLs:[https://192.168.49.2:2379]}","request-path":"/0/members/aec36adc501070cc/attributes","cluster-id":"fa54960ea34d58be","publish-timeout":"7s"}
	{"level":"info","ts":"2024-09-16T10:48:02.365163Z","caller":"embed/serve.go:103","msg":"ready to serve client requests"}
	{"level":"info","ts":"2024-09-16T10:48:02.365549Z","caller":"embed/serve.go:103","msg":"ready to serve client requests"}
	{"level":"info","ts":"2024-09-16T10:48:02.366285Z","caller":"v3rpc/health.go:61","msg":"grpc service status changed","service":"","status":"SERVING"}
	{"level":"info","ts":"2024-09-16T10:48:02.367468Z","caller":"embed/serve.go:250","msg":"serving client traffic securely","traffic":"grpc+http","address":"192.168.49.2:2379"}
	{"level":"info","ts":"2024-09-16T10:48:02.367535Z","caller":"etcdmain/main.go:44","msg":"notifying init daemon"}
	{"level":"info","ts":"2024-09-16T10:48:02.367668Z","caller":"etcdmain/main.go:50","msg":"successfully notified init daemon"}
	{"level":"info","ts":"2024-09-16T10:48:02.369323Z","caller":"v3rpc/health.go:61","msg":"grpc service status changed","service":"","status":"SERVING"}
	{"level":"info","ts":"2024-09-16T10:48:02.370172Z","caller":"embed/serve.go:250","msg":"serving client traffic securely","traffic":"grpc+http","address":"127.0.0.1:2379"}
	{"level":"info","ts":"2024-09-16T10:48:35.943802Z","caller":"osutil/interrupt_unix.go:64","msg":"received signal; shutting down","signal":"terminated"}
	{"level":"info","ts":"2024-09-16T10:48:35.943843Z","caller":"embed/etcd.go:377","msg":"closing etcd server","name":"functional-919910","data-dir":"/var/lib/minikube/etcd","advertise-peer-urls":["https://192.168.49.2:2380"],"advertise-client-urls":["https://192.168.49.2:2379"]}
	{"level":"warn","ts":"2024-09-16T10:48:35.943911Z","caller":"embed/serve.go:212","msg":"stopping secure grpc server due to error","error":"accept tcp 192.168.49.2:2379: use of closed network connection"}
	{"level":"warn","ts":"2024-09-16T10:48:35.943938Z","caller":"embed/serve.go:214","msg":"stopped secure grpc server due to error","error":"accept tcp 192.168.49.2:2379: use of closed network connection"}
	{"level":"warn","ts":"2024-09-16T10:48:35.945041Z","caller":"embed/serve.go:212","msg":"stopping secure grpc server due to error","error":"accept tcp 127.0.0.1:2379: use of closed network connection"}
	{"level":"warn","ts":"2024-09-16T10:48:35.945137Z","caller":"embed/serve.go:214","msg":"stopped secure grpc server due to error","error":"accept tcp 127.0.0.1:2379: use of closed network connection"}
	{"level":"info","ts":"2024-09-16T10:48:35.990678Z","caller":"etcdserver/server.go:1521","msg":"skipped leadership transfer for single voting member cluster","local-member-id":"aec36adc501070cc","current-leader-member-id":"aec36adc501070cc"}
	{"level":"info","ts":"2024-09-16T10:48:35.995430Z","caller":"embed/etcd.go:581","msg":"stopping serving peer traffic","address":"192.168.49.2:2380"}
	{"level":"info","ts":"2024-09-16T10:48:35.995621Z","caller":"embed/etcd.go:586","msg":"stopped serving peer traffic","address":"192.168.49.2:2380"}
	{"level":"info","ts":"2024-09-16T10:48:35.995642Z","caller":"embed/etcd.go:379","msg":"closed etcd server","name":"functional-919910","data-dir":"/var/lib/minikube/etcd","advertise-peer-urls":["https://192.168.49.2:2380"],"advertise-client-urls":["https://192.168.49.2:2379"]}
	
	
	==> kernel <==
	 10:50:58 up 10:33,  0 users,  load average: 0.98, 1.24, 1.59
	Linux functional-919910 5.15.0-1069-aws #75~20.04.1-Ubuntu SMP Mon Aug 19 16:22:47 UTC 2024 aarch64 aarch64 aarch64 GNU/Linux
	PRETTY_NAME="Ubuntu 22.04.4 LTS"
	
	
	==> kindnet [072cecfbf1d3967a28f6cf80f4e3b0bf030253965b58aa0f0089cd01271c49a1] <==
	I0916 10:48:53.222563       1 controller.go:374] Syncing nftables rules
	I0916 10:49:03.037971       1 main.go:295] Handling node with IPs: map[192.168.49.2:{}]
	I0916 10:49:03.038169       1 main.go:299] handling current node
	I0916 10:49:13.040898       1 main.go:295] Handling node with IPs: map[192.168.49.2:{}]
	I0916 10:49:13.041015       1 main.go:299] handling current node
	I0916 10:49:23.042091       1 main.go:295] Handling node with IPs: map[192.168.49.2:{}]
	I0916 10:49:23.042126       1 main.go:299] handling current node
	I0916 10:49:33.044840       1 main.go:295] Handling node with IPs: map[192.168.49.2:{}]
	I0916 10:49:33.044878       1 main.go:299] handling current node
	I0916 10:49:43.040749       1 main.go:295] Handling node with IPs: map[192.168.49.2:{}]
	I0916 10:49:43.040855       1 main.go:299] handling current node
	I0916 10:49:53.038520       1 main.go:295] Handling node with IPs: map[192.168.49.2:{}]
	I0916 10:49:53.038552       1 main.go:299] handling current node
	I0916 10:50:03.038499       1 main.go:295] Handling node with IPs: map[192.168.49.2:{}]
	I0916 10:50:03.038621       1 main.go:299] handling current node
	I0916 10:50:13.047462       1 main.go:295] Handling node with IPs: map[192.168.49.2:{}]
	I0916 10:50:13.047501       1 main.go:299] handling current node
	I0916 10:50:23.044819       1 main.go:295] Handling node with IPs: map[192.168.49.2:{}]
	I0916 10:50:23.044854       1 main.go:299] handling current node
	I0916 10:50:33.037721       1 main.go:295] Handling node with IPs: map[192.168.49.2:{}]
	I0916 10:50:33.037760       1 main.go:299] handling current node
	I0916 10:50:43.043131       1 main.go:295] Handling node with IPs: map[192.168.49.2:{}]
	I0916 10:50:43.043172       1 main.go:299] handling current node
	I0916 10:50:53.038035       1 main.go:295] Handling node with IPs: map[192.168.49.2:{}]
	I0916 10:50:53.038069       1 main.go:299] handling current node
	
	
	==> kindnet [e8aeda4b55bc63f93934a2cc0bed0950a05df3db193d9ed2e77a2dc96b78ec18] <==
	I0916 10:48:01.143502       1 main.go:109] connected to apiserver: https://10.96.0.1:443
	I0916 10:48:01.143730       1 main.go:139] hostIP = 192.168.49.2
	podIP = 192.168.49.2
	I0916 10:48:01.143864       1 main.go:148] setting mtu 1500 for CNI 
	I0916 10:48:01.143886       1 main.go:178] kindnetd IP family: "ipv4"
	I0916 10:48:01.143900       1 main.go:182] noMask IPv4 subnets: [10.244.0.0/16]
	I0916 10:48:01.489821       1 controller.go:334] Starting controller kube-network-policies
	I0916 10:48:01.489995       1 controller.go:338] Waiting for informer caches to sync
	I0916 10:48:01.490034       1 shared_informer.go:313] Waiting for caches to sync for kube-network-policies
	I0916 10:48:05.492976       1 shared_informer.go:320] Caches are synced for kube-network-policies
	I0916 10:48:05.493097       1 metrics.go:61] Registering metrics
	I0916 10:48:05.493204       1 controller.go:374] Syncing nftables rules
	I0916 10:48:11.486739       1 main.go:295] Handling node with IPs: map[192.168.49.2:{}]
	I0916 10:48:11.486849       1 main.go:299] handling current node
	I0916 10:48:21.485705       1 main.go:295] Handling node with IPs: map[192.168.49.2:{}]
	I0916 10:48:21.485797       1 main.go:299] handling current node
	I0916 10:48:31.492826       1 main.go:295] Handling node with IPs: map[192.168.49.2:{}]
	I0916 10:48:31.492896       1 main.go:299] handling current node
	
	
	==> kube-apiserver [12f0a29c7ca2a4856dd6155d0190d0e3d79e019e8dce0bf7fd4c991c81d14bc5] <==
	I0916 10:48:51.507247       1 shared_informer.go:320] Caches are synced for node_authorizer
	I0916 10:48:51.509566       1 cache.go:39] Caches are synced for LocalAvailability controller
	I0916 10:48:51.514923       1 shared_informer.go:320] Caches are synced for *generic.policySource[*k8s.io/api/admissionregistration/v1.ValidatingAdmissionPolicy,*k8s.io/api/admissionregistration/v1.ValidatingAdmissionPolicyBinding,k8s.io/apiserver/pkg/admission/plugin/policy/validating.Validator]
	I0916 10:48:51.514963       1 policy_source.go:224] refreshing policies
	I0916 10:48:51.517563       1 apf_controller.go:382] Running API Priority and Fairness config worker
	I0916 10:48:51.517673       1 apf_controller.go:385] Running API Priority and Fairness periodic rebalancing process
	I0916 10:48:51.517813       1 cache.go:39] Caches are synced for APIServiceRegistrationController controller
	I0916 10:48:51.520522       1 controller.go:615] quota admission added evaluator for: leases.coordination.k8s.io
	I0916 10:48:51.532063       1 shared_informer.go:320] Caches are synced for cluster_authentication_trust_controller
	I0916 10:48:51.532210       1 shared_informer.go:320] Caches are synced for crd-autoregister
	I0916 10:48:51.532286       1 cache.go:39] Caches are synced for RemoteAvailability controller
	I0916 10:48:51.532417       1 shared_informer.go:320] Caches are synced for configmaps
	I0916 10:48:51.534946       1 handler_discovery.go:450] Starting ResourceDiscoveryManager
	I0916 10:48:51.535625       1 aggregator.go:171] initial CRD sync complete...
	I0916 10:48:51.536172       1 autoregister_controller.go:144] Starting autoregister controller
	I0916 10:48:51.536265       1 cache.go:32] Waiting for caches to sync for autoregister controller
	I0916 10:48:51.536300       1 cache.go:39] Caches are synced for autoregister controller
	E0916 10:48:51.552940       1 controller.go:97] Error removing old endpoints from kubernetes service: no API server IP addresses were listed in storage, refusing to erase all endpoints for the kubernetes Service
	I0916 10:48:52.288516       1 storage_scheduling.go:111] all system priority classes are created successfully or already exist.
	I0916 10:48:53.633170       1 controller.go:615] quota admission added evaluator for: daemonsets.apps
	I0916 10:48:53.763564       1 controller.go:615] quota admission added evaluator for: serviceaccounts
	I0916 10:48:53.775869       1 controller.go:615] quota admission added evaluator for: deployments.apps
	I0916 10:48:53.843592       1 controller.go:615] quota admission added evaluator for: roles.rbac.authorization.k8s.io
	I0916 10:48:53.851287       1 controller.go:615] quota admission added evaluator for: rolebindings.rbac.authorization.k8s.io
	I0916 10:49:10.096044       1 controller.go:615] quota admission added evaluator for: endpoints
	
	
	==> kube-controller-manager [8f5620673b4ff5c0c99db71dd02fc2ce9baec6c9b22460cbdf86d411abc6a715] <==
	I0916 10:48:08.534308       1 shared_informer.go:320] Caches are synced for node
	I0916 10:48:08.534378       1 range_allocator.go:171] "Sending events to api server" logger="node-ipam-controller"
	I0916 10:48:08.534401       1 range_allocator.go:177] "Starting range CIDR allocator" logger="node-ipam-controller"
	I0916 10:48:08.534407       1 shared_informer.go:313] Waiting for caches to sync for cidrallocator
	I0916 10:48:08.534412       1 shared_informer.go:320] Caches are synced for cidrallocator
	I0916 10:48:08.534494       1 range_allocator.go:241] "Successfully synced" logger="node-ipam-controller" key="functional-919910"
	I0916 10:48:08.535941       1 shared_informer.go:320] Caches are synced for attach detach
	I0916 10:48:08.543605       1 shared_informer.go:320] Caches are synced for endpoint_slice
	I0916 10:48:08.549019       1 shared_informer.go:320] Caches are synced for daemon sets
	I0916 10:48:08.554402       1 shared_informer.go:320] Caches are synced for taint-eviction-controller
	I0916 10:48:08.559718       1 shared_informer.go:320] Caches are synced for persistent volume
	I0916 10:48:08.572151       1 shared_informer.go:320] Caches are synced for GC
	I0916 10:48:08.573409       1 shared_informer.go:320] Caches are synced for PV protection
	I0916 10:48:08.615906       1 shared_informer.go:320] Caches are synced for cronjob
	I0916 10:48:08.623850       1 shared_informer.go:320] Caches are synced for TTL
	I0916 10:48:08.625786       1 shared_informer.go:320] Caches are synced for taint
	I0916 10:48:08.625881       1 node_lifecycle_controller.go:1232] "Initializing eviction metric for zone" logger="node-lifecycle-controller" zone=""
	I0916 10:48:08.625973       1 node_lifecycle_controller.go:884] "Missing timestamp for Node. Assuming now as a timestamp" logger="node-lifecycle-controller" node="functional-919910"
	I0916 10:48:08.626024       1 node_lifecycle_controller.go:1078] "Controller detected that zone is now in new state" logger="node-lifecycle-controller" zone="" newState="Normal"
	I0916 10:48:08.681295       1 shared_informer.go:320] Caches are synced for resource quota
	I0916 10:48:08.695472       1 shared_informer.go:320] Caches are synced for resource quota
	I0916 10:48:09.103907       1 shared_informer.go:320] Caches are synced for garbage collector
	I0916 10:48:09.103941       1 garbagecollector.go:157] "All resource monitors have synced. Proceeding to collect garbage" logger="garbage-collector-controller"
	I0916 10:48:09.123641       1 shared_informer.go:320] Caches are synced for garbage collector
	I0916 10:48:33.396647       1 range_allocator.go:241] "Successfully synced" logger="node-ipam-controller" key="functional-919910"
	
	
	==> kube-controller-manager [d69895ce029aea3aacc9c117ed64c274077ed21cefa739082ee00be46e903809] <==
	I0916 10:48:54.785999       1 shared_informer.go:320] Caches are synced for expand
	I0916 10:48:54.794997       1 shared_informer.go:320] Caches are synced for taint-eviction-controller
	I0916 10:48:54.801438       1 shared_informer.go:320] Caches are synced for service account
	I0916 10:48:54.804758       1 shared_informer.go:320] Caches are synced for job
	I0916 10:48:54.804818       1 shared_informer.go:320] Caches are synced for stateful set
	I0916 10:48:54.804841       1 shared_informer.go:320] Caches are synced for PVC protection
	I0916 10:48:54.804854       1 shared_informer.go:320] Caches are synced for certificate-csrapproving
	I0916 10:48:54.804908       1 shared_informer.go:320] Caches are synced for PV protection
	I0916 10:48:54.804944       1 shared_informer.go:320] Caches are synced for TTL
	I0916 10:48:54.804977       1 shared_informer.go:320] Caches are synced for persistent volume
	I0916 10:48:54.805173       1 shared_informer.go:320] Caches are synced for ClusterRoleAggregator
	I0916 10:48:54.807707       1 shared_informer.go:320] Caches are synced for disruption
	I0916 10:48:54.808757       1 shared_informer.go:320] Caches are synced for validatingadmissionpolicy-status
	I0916 10:48:54.812488       1 shared_informer.go:320] Caches are synced for ReplicationController
	I0916 10:48:54.838364       1 shared_informer.go:320] Caches are synced for ReplicaSet
	I0916 10:48:54.838534       1 replica_set.go:679] "Finished syncing" logger="replicaset-controller" kind="ReplicaSet" key="kube-system/coredns-7c65d6cfc9" duration="97.917µs"
	I0916 10:48:54.855717       1 shared_informer.go:320] Caches are synced for bootstrap_signer
	I0916 10:48:54.857096       1 shared_informer.go:320] Caches are synced for cronjob
	I0916 10:48:54.955113       1 shared_informer.go:320] Caches are synced for endpoint_slice
	I0916 10:48:55.012167       1 shared_informer.go:320] Caches are synced for resource quota
	I0916 10:48:55.012318       1 shared_informer.go:320] Caches are synced for endpoint_slice_mirroring
	I0916 10:48:55.042247       1 shared_informer.go:320] Caches are synced for resource quota
	I0916 10:48:55.444362       1 shared_informer.go:320] Caches are synced for garbage collector
	I0916 10:48:55.465215       1 shared_informer.go:320] Caches are synced for garbage collector
	I0916 10:48:55.465250       1 garbagecollector.go:157] "All resource monitors have synced. Proceeding to collect garbage" logger="garbage-collector-controller"
	
	
	==> kube-proxy [68f543d941434df90f12c922b0b45dcb557a7b8316bd36d083123f6f29e0f3d7] <==
	I0916 10:48:03.731423       1 server_linux.go:66] "Using iptables proxy"
	I0916 10:48:05.433154       1 server.go:677] "Successfully retrieved node IP(s)" IPs=["192.168.49.2"]
	E0916 10:48:05.488916       1 server.go:234] "Kube-proxy configuration may be incomplete or incorrect" err="nodePortAddresses is unset; NodePort connections will be accepted on all local IPs. Consider using `--nodeport-addresses primary`"
	I0916 10:48:06.322385       1 server.go:243] "kube-proxy running in dual-stack mode" primary ipFamily="IPv4"
	I0916 10:48:06.341489       1 server_linux.go:169] "Using iptables Proxier"
	I0916 10:48:06.355073       1 proxier.go:255] "Setting route_localnet=1 to allow node-ports on localhost; to change this either disable iptables.localhostNodePorts (--iptables-localhost-nodeports) or set nodePortAddresses (--nodeport-addresses) to filter loopback addresses" ipFamily="IPv4"
	I0916 10:48:06.355531       1 server.go:483] "Version info" version="v1.31.1"
	I0916 10:48:06.357367       1 server.go:485] "Golang settings" GOGC="" GOMAXPROCS="" GOTRACEBACK=""
	I0916 10:48:06.358630       1 config.go:199] "Starting service config controller"
	I0916 10:48:06.358729       1 shared_informer.go:313] Waiting for caches to sync for service config
	I0916 10:48:06.358801       1 config.go:105] "Starting endpoint slice config controller"
	I0916 10:48:06.358840       1 shared_informer.go:313] Waiting for caches to sync for endpoint slice config
	I0916 10:48:06.360984       1 config.go:328] "Starting node config controller"
	I0916 10:48:06.361059       1 shared_informer.go:313] Waiting for caches to sync for node config
	I0916 10:48:06.462180       1 shared_informer.go:320] Caches are synced for endpoint slice config
	I0916 10:48:06.462239       1 shared_informer.go:320] Caches are synced for service config
	I0916 10:48:06.464940       1 shared_informer.go:320] Caches are synced for node config
	
	
	==> kube-proxy [6f8c0a2f9d3e9fe72768e28685deb8e30624ac7b3cfaa272ac69f57b771050db] <==
	I0916 10:48:52.781215       1 server_linux.go:66] "Using iptables proxy"
	I0916 10:48:52.872969       1 server.go:677] "Successfully retrieved node IP(s)" IPs=["192.168.49.2"]
	E0916 10:48:52.873137       1 server.go:234] "Kube-proxy configuration may be incomplete or incorrect" err="nodePortAddresses is unset; NodePort connections will be accepted on all local IPs. Consider using `--nodeport-addresses primary`"
	I0916 10:48:52.892040       1 server.go:243] "kube-proxy running in dual-stack mode" primary ipFamily="IPv4"
	I0916 10:48:52.892101       1 server_linux.go:169] "Using iptables Proxier"
	I0916 10:48:52.893967       1 proxier.go:255] "Setting route_localnet=1 to allow node-ports on localhost; to change this either disable iptables.localhostNodePorts (--iptables-localhost-nodeports) or set nodePortAddresses (--nodeport-addresses) to filter loopback addresses" ipFamily="IPv4"
	I0916 10:48:52.894261       1 server.go:483] "Version info" version="v1.31.1"
	I0916 10:48:52.894296       1 server.go:485] "Golang settings" GOGC="" GOMAXPROCS="" GOTRACEBACK=""
	I0916 10:48:52.896485       1 config.go:199] "Starting service config controller"
	I0916 10:48:52.896530       1 shared_informer.go:313] Waiting for caches to sync for service config
	I0916 10:48:52.898078       1 config.go:105] "Starting endpoint slice config controller"
	I0916 10:48:52.898096       1 shared_informer.go:313] Waiting for caches to sync for endpoint slice config
	I0916 10:48:52.899995       1 config.go:328] "Starting node config controller"
	I0916 10:48:52.900022       1 shared_informer.go:313] Waiting for caches to sync for node config
	I0916 10:48:52.998445       1 shared_informer.go:320] Caches are synced for endpoint slice config
	I0916 10:48:52.998473       1 shared_informer.go:320] Caches are synced for service config
	I0916 10:48:53.000890       1 shared_informer.go:320] Caches are synced for node config
	
	
	==> kube-scheduler [7fcb94c0bce841ce6b01965b0d7eaeedcf47449b34b9a524c16d4f0580db9e76] <==
	I0916 10:48:50.310915       1 serving.go:386] Generated self-signed cert in-memory
	W0916 10:48:51.321329       1 requestheader_controller.go:196] Unable to get configmap/extension-apiserver-authentication in kube-system.  Usually fixed by 'kubectl create rolebinding -n kube-system ROLEBINDING_NAME --role=extension-apiserver-authentication-reader --serviceaccount=YOUR_NS:YOUR_SA'
	W0916 10:48:51.321447       1 authentication.go:370] Error looking up in-cluster authentication configuration: configmaps "extension-apiserver-authentication" is forbidden: User "system:kube-scheduler" cannot get resource "configmaps" in API group "" in the namespace "kube-system"
	W0916 10:48:51.321484       1 authentication.go:371] Continuing without authentication configuration. This may treat all requests as anonymous.
	W0916 10:48:51.321527       1 authentication.go:372] To require authentication configuration lookup to succeed, set --authentication-tolerate-lookup-failure=false
	I0916 10:48:51.482161       1 server.go:167] "Starting Kubernetes Scheduler" version="v1.31.1"
	I0916 10:48:51.488747       1 server.go:169] "Golang settings" GOGC="" GOMAXPROCS="" GOTRACEBACK=""
	I0916 10:48:51.491214       1 secure_serving.go:213] Serving securely on 127.0.0.1:10259
	I0916 10:48:51.491627       1 configmap_cafile_content.go:205] "Starting controller" name="client-ca::kube-system::extension-apiserver-authentication::client-ca-file"
	I0916 10:48:51.497428       1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::client-ca-file
	I0916 10:48:51.491653       1 tlsconfig.go:243] "Starting DynamicServingCertificateController"
	I0916 10:48:51.597727       1 shared_informer.go:320] Caches are synced for client-ca::kube-system::extension-apiserver-authentication::client-ca-file
	
	
	==> kube-scheduler [9a35fb982442f2ef08963a8588b112f704124f0fecc14cbfc199e94d6085db98] <==
	I0916 10:48:04.872300       1 serving.go:386] Generated self-signed cert in-memory
	I0916 10:48:06.573495       1 server.go:167] "Starting Kubernetes Scheduler" version="v1.31.1"
	I0916 10:48:06.573525       1 server.go:169] "Golang settings" GOGC="" GOMAXPROCS="" GOTRACEBACK=""
	I0916 10:48:06.588423       1 secure_serving.go:213] Serving securely on 127.0.0.1:10259
	I0916 10:48:06.588642       1 configmap_cafile_content.go:205] "Starting controller" name="client-ca::kube-system::extension-apiserver-authentication::client-ca-file"
	I0916 10:48:06.588658       1 configmap_cafile_content.go:205] "Starting controller" name="client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file"
	I0916 10:48:06.588698       1 tlsconfig.go:243] "Starting DynamicServingCertificateController"
	I0916 10:48:06.588607       1 requestheader_controller.go:172] Starting RequestHeaderAuthRequestController
	I0916 10:48:06.592031       1 shared_informer.go:313] Waiting for caches to sync for RequestHeaderAuthRequestController
	I0916 10:48:06.591278       1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::client-ca-file
	I0916 10:48:06.591687       1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file
	I0916 10:48:06.696997       1 shared_informer.go:320] Caches are synced for client-ca::kube-system::extension-apiserver-authentication::client-ca-file
	I0916 10:48:06.697079       1 shared_informer.go:320] Caches are synced for RequestHeaderAuthRequestController
	I0916 10:48:06.697269       1 shared_informer.go:320] Caches are synced for client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file
	I0916 10:48:35.945051       1 tlsconfig.go:258] "Shutting down DynamicServingCertificateController"
	I0916 10:48:35.945270       1 secure_serving.go:258] Stopped listening on 127.0.0.1:10259
	E0916 10:48:35.945400       1 run.go:72] "command failed" err="finished without leader elect"
	
	
	==> kubelet <==
	Sep 16 10:48:57 functional-919910 kubelet[4906]: E0916 10:48:57.149525    4906 eviction_manager.go:212] "Eviction manager: failed to synchronize" err="eviction manager: failed to get HasDedicatedImageFs: missing image stats: &ImageFsInfoResponse{ImageFilesystems:[]*FilesystemUsage{&FilesystemUsage{Timestamp:1726483737149192699,FsId:&FilesystemIdentifier{Mountpoint:/var/lib/containers/storage/overlay-images,},UsedBytes:&UInt64Value{Value:155470,},InodesUsed:&UInt64Value{Value:77,},},},ContainerFilesystems:[]*FilesystemUsage{},}"
	Sep 16 10:49:07 functional-919910 kubelet[4906]: E0916 10:49:07.150843    4906 eviction_manager.go:257] "Eviction manager: failed to get HasDedicatedImageFs" err="missing image stats: &ImageFsInfoResponse{ImageFilesystems:[]*FilesystemUsage{&FilesystemUsage{Timestamp:1726483747150627361,FsId:&FilesystemIdentifier{Mountpoint:/var/lib/containers/storage/overlay-images,},UsedBytes:&UInt64Value{Value:155470,},InodesUsed:&UInt64Value{Value:77,},},},ContainerFilesystems:[]*FilesystemUsage{},}"
	Sep 16 10:49:07 functional-919910 kubelet[4906]: E0916 10:49:07.150885    4906 eviction_manager.go:212] "Eviction manager: failed to synchronize" err="eviction manager: failed to get HasDedicatedImageFs: missing image stats: &ImageFsInfoResponse{ImageFilesystems:[]*FilesystemUsage{&FilesystemUsage{Timestamp:1726483747150627361,FsId:&FilesystemIdentifier{Mountpoint:/var/lib/containers/storage/overlay-images,},UsedBytes:&UInt64Value{Value:155470,},InodesUsed:&UInt64Value{Value:77,},},},ContainerFilesystems:[]*FilesystemUsage{},}"
	Sep 16 10:49:17 functional-919910 kubelet[4906]: E0916 10:49:17.152819    4906 eviction_manager.go:257] "Eviction manager: failed to get HasDedicatedImageFs" err="missing image stats: &ImageFsInfoResponse{ImageFilesystems:[]*FilesystemUsage{&FilesystemUsage{Timestamp:1726483757152278637,FsId:&FilesystemIdentifier{Mountpoint:/var/lib/containers/storage/overlay-images,},UsedBytes:&UInt64Value{Value:155470,},InodesUsed:&UInt64Value{Value:77,},},},ContainerFilesystems:[]*FilesystemUsage{},}"
	Sep 16 10:49:17 functional-919910 kubelet[4906]: E0916 10:49:17.152854    4906 eviction_manager.go:212] "Eviction manager: failed to synchronize" err="eviction manager: failed to get HasDedicatedImageFs: missing image stats: &ImageFsInfoResponse{ImageFilesystems:[]*FilesystemUsage{&FilesystemUsage{Timestamp:1726483757152278637,FsId:&FilesystemIdentifier{Mountpoint:/var/lib/containers/storage/overlay-images,},UsedBytes:&UInt64Value{Value:155470,},InodesUsed:&UInt64Value{Value:77,},},},ContainerFilesystems:[]*FilesystemUsage{},}"
	Sep 16 10:49:27 functional-919910 kubelet[4906]: E0916 10:49:27.154325    4906 eviction_manager.go:257] "Eviction manager: failed to get HasDedicatedImageFs" err="missing image stats: &ImageFsInfoResponse{ImageFilesystems:[]*FilesystemUsage{&FilesystemUsage{Timestamp:1726483767154114722,FsId:&FilesystemIdentifier{Mountpoint:/var/lib/containers/storage/overlay-images,},UsedBytes:&UInt64Value{Value:155470,},InodesUsed:&UInt64Value{Value:77,},},},ContainerFilesystems:[]*FilesystemUsage{},}"
	Sep 16 10:49:27 functional-919910 kubelet[4906]: E0916 10:49:27.154362    4906 eviction_manager.go:212] "Eviction manager: failed to synchronize" err="eviction manager: failed to get HasDedicatedImageFs: missing image stats: &ImageFsInfoResponse{ImageFilesystems:[]*FilesystemUsage{&FilesystemUsage{Timestamp:1726483767154114722,FsId:&FilesystemIdentifier{Mountpoint:/var/lib/containers/storage/overlay-images,},UsedBytes:&UInt64Value{Value:155470,},InodesUsed:&UInt64Value{Value:77,},},},ContainerFilesystems:[]*FilesystemUsage{},}"
	Sep 16 10:49:37 functional-919910 kubelet[4906]: E0916 10:49:37.155548    4906 eviction_manager.go:257] "Eviction manager: failed to get HasDedicatedImageFs" err="missing image stats: &ImageFsInfoResponse{ImageFilesystems:[]*FilesystemUsage{&FilesystemUsage{Timestamp:1726483777155245399,FsId:&FilesystemIdentifier{Mountpoint:/var/lib/containers/storage/overlay-images,},UsedBytes:&UInt64Value{Value:155470,},InodesUsed:&UInt64Value{Value:77,},},},ContainerFilesystems:[]*FilesystemUsage{},}"
	Sep 16 10:49:37 functional-919910 kubelet[4906]: E0916 10:49:37.155585    4906 eviction_manager.go:212] "Eviction manager: failed to synchronize" err="eviction manager: failed to get HasDedicatedImageFs: missing image stats: &ImageFsInfoResponse{ImageFilesystems:[]*FilesystemUsage{&FilesystemUsage{Timestamp:1726483777155245399,FsId:&FilesystemIdentifier{Mountpoint:/var/lib/containers/storage/overlay-images,},UsedBytes:&UInt64Value{Value:155470,},InodesUsed:&UInt64Value{Value:77,},},},ContainerFilesystems:[]*FilesystemUsage{},}"
	Sep 16 10:49:47 functional-919910 kubelet[4906]: E0916 10:49:47.157036    4906 eviction_manager.go:257] "Eviction manager: failed to get HasDedicatedImageFs" err="missing image stats: &ImageFsInfoResponse{ImageFilesystems:[]*FilesystemUsage{&FilesystemUsage{Timestamp:1726483787156852576,FsId:&FilesystemIdentifier{Mountpoint:/var/lib/containers/storage/overlay-images,},UsedBytes:&UInt64Value{Value:155470,},InodesUsed:&UInt64Value{Value:77,},},},ContainerFilesystems:[]*FilesystemUsage{},}"
	Sep 16 10:49:47 functional-919910 kubelet[4906]: E0916 10:49:47.157093    4906 eviction_manager.go:212] "Eviction manager: failed to synchronize" err="eviction manager: failed to get HasDedicatedImageFs: missing image stats: &ImageFsInfoResponse{ImageFilesystems:[]*FilesystemUsage{&FilesystemUsage{Timestamp:1726483787156852576,FsId:&FilesystemIdentifier{Mountpoint:/var/lib/containers/storage/overlay-images,},UsedBytes:&UInt64Value{Value:155470,},InodesUsed:&UInt64Value{Value:77,},},},ContainerFilesystems:[]*FilesystemUsage{},}"
	Sep 16 10:49:57 functional-919910 kubelet[4906]: E0916 10:49:57.158435    4906 eviction_manager.go:257] "Eviction manager: failed to get HasDedicatedImageFs" err="missing image stats: &ImageFsInfoResponse{ImageFilesystems:[]*FilesystemUsage{&FilesystemUsage{Timestamp:1726483797158278822,FsId:&FilesystemIdentifier{Mountpoint:/var/lib/containers/storage/overlay-images,},UsedBytes:&UInt64Value{Value:155470,},InodesUsed:&UInt64Value{Value:77,},},},ContainerFilesystems:[]*FilesystemUsage{},}"
	Sep 16 10:49:57 functional-919910 kubelet[4906]: E0916 10:49:57.158481    4906 eviction_manager.go:212] "Eviction manager: failed to synchronize" err="eviction manager: failed to get HasDedicatedImageFs: missing image stats: &ImageFsInfoResponse{ImageFilesystems:[]*FilesystemUsage{&FilesystemUsage{Timestamp:1726483797158278822,FsId:&FilesystemIdentifier{Mountpoint:/var/lib/containers/storage/overlay-images,},UsedBytes:&UInt64Value{Value:155470,},InodesUsed:&UInt64Value{Value:77,},},},ContainerFilesystems:[]*FilesystemUsage{},}"
	Sep 16 10:50:07 functional-919910 kubelet[4906]: E0916 10:50:07.159871    4906 eviction_manager.go:257] "Eviction manager: failed to get HasDedicatedImageFs" err="missing image stats: &ImageFsInfoResponse{ImageFilesystems:[]*FilesystemUsage{&FilesystemUsage{Timestamp:1726483807159616730,FsId:&FilesystemIdentifier{Mountpoint:/var/lib/containers/storage/overlay-images,},UsedBytes:&UInt64Value{Value:155470,},InodesUsed:&UInt64Value{Value:77,},},},ContainerFilesystems:[]*FilesystemUsage{},}"
	Sep 16 10:50:07 functional-919910 kubelet[4906]: E0916 10:50:07.159920    4906 eviction_manager.go:212] "Eviction manager: failed to synchronize" err="eviction manager: failed to get HasDedicatedImageFs: missing image stats: &ImageFsInfoResponse{ImageFilesystems:[]*FilesystemUsage{&FilesystemUsage{Timestamp:1726483807159616730,FsId:&FilesystemIdentifier{Mountpoint:/var/lib/containers/storage/overlay-images,},UsedBytes:&UInt64Value{Value:155470,},InodesUsed:&UInt64Value{Value:77,},},},ContainerFilesystems:[]*FilesystemUsage{},}"
	Sep 16 10:50:17 functional-919910 kubelet[4906]: E0916 10:50:17.161814    4906 eviction_manager.go:257] "Eviction manager: failed to get HasDedicatedImageFs" err="missing image stats: &ImageFsInfoResponse{ImageFilesystems:[]*FilesystemUsage{&FilesystemUsage{Timestamp:1726483817161563233,FsId:&FilesystemIdentifier{Mountpoint:/var/lib/containers/storage/overlay-images,},UsedBytes:&UInt64Value{Value:155470,},InodesUsed:&UInt64Value{Value:77,},},},ContainerFilesystems:[]*FilesystemUsage{},}"
	Sep 16 10:50:17 functional-919910 kubelet[4906]: E0916 10:50:17.161855    4906 eviction_manager.go:212] "Eviction manager: failed to synchronize" err="eviction manager: failed to get HasDedicatedImageFs: missing image stats: &ImageFsInfoResponse{ImageFilesystems:[]*FilesystemUsage{&FilesystemUsage{Timestamp:1726483817161563233,FsId:&FilesystemIdentifier{Mountpoint:/var/lib/containers/storage/overlay-images,},UsedBytes:&UInt64Value{Value:155470,},InodesUsed:&UInt64Value{Value:77,},},},ContainerFilesystems:[]*FilesystemUsage{},}"
	Sep 16 10:50:27 functional-919910 kubelet[4906]: E0916 10:50:27.163614    4906 eviction_manager.go:257] "Eviction manager: failed to get HasDedicatedImageFs" err="missing image stats: &ImageFsInfoResponse{ImageFilesystems:[]*FilesystemUsage{&FilesystemUsage{Timestamp:1726483827163296450,FsId:&FilesystemIdentifier{Mountpoint:/var/lib/containers/storage/overlay-images,},UsedBytes:&UInt64Value{Value:155470,},InodesUsed:&UInt64Value{Value:77,},},},ContainerFilesystems:[]*FilesystemUsage{},}"
	Sep 16 10:50:27 functional-919910 kubelet[4906]: E0916 10:50:27.163651    4906 eviction_manager.go:212] "Eviction manager: failed to synchronize" err="eviction manager: failed to get HasDedicatedImageFs: missing image stats: &ImageFsInfoResponse{ImageFilesystems:[]*FilesystemUsage{&FilesystemUsage{Timestamp:1726483827163296450,FsId:&FilesystemIdentifier{Mountpoint:/var/lib/containers/storage/overlay-images,},UsedBytes:&UInt64Value{Value:155470,},InodesUsed:&UInt64Value{Value:77,},},},ContainerFilesystems:[]*FilesystemUsage{},}"
	Sep 16 10:50:37 functional-919910 kubelet[4906]: E0916 10:50:37.164729    4906 eviction_manager.go:257] "Eviction manager: failed to get HasDedicatedImageFs" err="missing image stats: &ImageFsInfoResponse{ImageFilesystems:[]*FilesystemUsage{&FilesystemUsage{Timestamp:1726483837164469074,FsId:&FilesystemIdentifier{Mountpoint:/var/lib/containers/storage/overlay-images,},UsedBytes:&UInt64Value{Value:155470,},InodesUsed:&UInt64Value{Value:77,},},},ContainerFilesystems:[]*FilesystemUsage{},}"
	Sep 16 10:50:37 functional-919910 kubelet[4906]: E0916 10:50:37.164762    4906 eviction_manager.go:212] "Eviction manager: failed to synchronize" err="eviction manager: failed to get HasDedicatedImageFs: missing image stats: &ImageFsInfoResponse{ImageFilesystems:[]*FilesystemUsage{&FilesystemUsage{Timestamp:1726483837164469074,FsId:&FilesystemIdentifier{Mountpoint:/var/lib/containers/storage/overlay-images,},UsedBytes:&UInt64Value{Value:155470,},InodesUsed:&UInt64Value{Value:77,},},},ContainerFilesystems:[]*FilesystemUsage{},}"
	Sep 16 10:50:47 functional-919910 kubelet[4906]: E0916 10:50:47.166381    4906 eviction_manager.go:257] "Eviction manager: failed to get HasDedicatedImageFs" err="missing image stats: &ImageFsInfoResponse{ImageFilesystems:[]*FilesystemUsage{&FilesystemUsage{Timestamp:1726483847166139430,FsId:&FilesystemIdentifier{Mountpoint:/var/lib/containers/storage/overlay-images,},UsedBytes:&UInt64Value{Value:155470,},InodesUsed:&UInt64Value{Value:77,},},},ContainerFilesystems:[]*FilesystemUsage{},}"
	Sep 16 10:50:47 functional-919910 kubelet[4906]: E0916 10:50:47.166973    4906 eviction_manager.go:212] "Eviction manager: failed to synchronize" err="eviction manager: failed to get HasDedicatedImageFs: missing image stats: &ImageFsInfoResponse{ImageFilesystems:[]*FilesystemUsage{&FilesystemUsage{Timestamp:1726483847166139430,FsId:&FilesystemIdentifier{Mountpoint:/var/lib/containers/storage/overlay-images,},UsedBytes:&UInt64Value{Value:155470,},InodesUsed:&UInt64Value{Value:77,},},},ContainerFilesystems:[]*FilesystemUsage{},}"
	Sep 16 10:50:57 functional-919910 kubelet[4906]: E0916 10:50:57.168441    4906 eviction_manager.go:257] "Eviction manager: failed to get HasDedicatedImageFs" err="missing image stats: &ImageFsInfoResponse{ImageFilesystems:[]*FilesystemUsage{&FilesystemUsage{Timestamp:1726483857168020609,FsId:&FilesystemIdentifier{Mountpoint:/var/lib/containers/storage/overlay-images,},UsedBytes:&UInt64Value{Value:155470,},InodesUsed:&UInt64Value{Value:77,},},},ContainerFilesystems:[]*FilesystemUsage{},}"
	Sep 16 10:50:57 functional-919910 kubelet[4906]: E0916 10:50:57.168483    4906 eviction_manager.go:212] "Eviction manager: failed to synchronize" err="eviction manager: failed to get HasDedicatedImageFs: missing image stats: &ImageFsInfoResponse{ImageFilesystems:[]*FilesystemUsage{&FilesystemUsage{Timestamp:1726483857168020609,FsId:&FilesystemIdentifier{Mountpoint:/var/lib/containers/storage/overlay-images,},UsedBytes:&UInt64Value{Value:155470,},InodesUsed:&UInt64Value{Value:77,},},},ContainerFilesystems:[]*FilesystemUsage{},}"
	
	
	==> storage-provisioner [0318f459801da15bd2e19f5a98b73c1156fff994dcdda61e57a57ddf9e92ccee] <==
	I0916 10:48:52.562259       1 storage_provisioner.go:116] Initializing the minikube storage provisioner...
	I0916 10:48:52.664564       1 storage_provisioner.go:141] Storage provisioner initialized, now starting service!
	I0916 10:48:52.664725       1 leaderelection.go:243] attempting to acquire leader lease kube-system/k8s.io-minikube-hostpath...
	I0916 10:49:10.099884       1 leaderelection.go:253] successfully acquired lease kube-system/k8s.io-minikube-hostpath
	I0916 10:49:10.100345       1 controller.go:835] Starting provisioner controller k8s.io/minikube-hostpath_functional-919910_e4b0a145-d435-49cb-bddf-cb4e7bf200d9!
	I0916 10:49:10.101540       1 event.go:282] Event(v1.ObjectReference{Kind:"Endpoints", Namespace:"kube-system", Name:"k8s.io-minikube-hostpath", UID:"a458447e-2e14-46d1-bc5f-e9228298bb58", APIVersion:"v1", ResourceVersion:"609", FieldPath:""}): type: 'Normal' reason: 'LeaderElection' functional-919910_e4b0a145-d435-49cb-bddf-cb4e7bf200d9 became leader
	I0916 10:49:10.201234       1 controller.go:884] Started provisioner controller k8s.io/minikube-hostpath_functional-919910_e4b0a145-d435-49cb-bddf-cb4e7bf200d9!
	
	
	==> storage-provisioner [67f50b0e25dae16dbad275ffac3a734fe571c8f8cb91d485eaac44783eb641be] <==
	I0916 10:48:01.486119       1 storage_provisioner.go:116] Initializing the minikube storage provisioner...
	I0916 10:48:05.527187       1 storage_provisioner.go:141] Storage provisioner initialized, now starting service!
	I0916 10:48:05.529539       1 leaderelection.go:243] attempting to acquire leader lease kube-system/k8s.io-minikube-hostpath...
	I0916 10:48:22.958175       1 leaderelection.go:253] successfully acquired lease kube-system/k8s.io-minikube-hostpath
	I0916 10:48:22.959300       1 controller.go:835] Starting provisioner controller k8s.io/minikube-hostpath_functional-919910_be96807a-e73b-444f-98b3-646320e9e90e!
	I0916 10:48:22.959068       1 event.go:282] Event(v1.ObjectReference{Kind:"Endpoints", Namespace:"kube-system", Name:"k8s.io-minikube-hostpath", UID:"a458447e-2e14-46d1-bc5f-e9228298bb58", APIVersion:"v1", ResourceVersion:"511", FieldPath:""}): type: 'Normal' reason: 'LeaderElection' functional-919910_be96807a-e73b-444f-98b3-646320e9e90e became leader
	I0916 10:48:23.061550       1 controller.go:884] Started provisioner controller k8s.io/minikube-hostpath_functional-919910_be96807a-e73b-444f-98b3-646320e9e90e!
	

                                                
                                                
-- /stdout --
** stderr ** 
	E0916 10:50:57.241290 1409803 logs.go:258] failed to output last start logs: failed to read file /home/jenkins/minikube-integration/19651-1378450/.minikube/logs/lastStart.txt: bufio.Scanner: token too long

                                                
                                                
** /stderr **
helpers_test.go:254: (dbg) Run:  out/minikube-linux-arm64 status --format={{.APIServer}} -p functional-919910 -n functional-919910
helpers_test.go:261: (dbg) Run:  kubectl --context functional-919910 get po -o=jsonpath={.items[*].metadata.name} -A --field-selector=status.phase!=Running
helpers_test.go:261: (dbg) Non-zero exit: kubectl --context functional-919910 get po -o=jsonpath={.items[*].metadata.name} -A --field-selector=status.phase!=Running: fork/exec /usr/local/bin/kubectl: exec format error (1.721006ms)
helpers_test.go:263: kubectl --context functional-919910 get po -o=jsonpath={.items[*].metadata.name} -A --field-selector=status.phase!=Running: fork/exec /usr/local/bin/kubectl: exec format error
--- FAIL: TestFunctional/parallel/PersistentVolumeClaim (101.90s)

                                                
                                    
x
+
TestFunctional/parallel/NodeLabels (4.24s)

                                                
                                                
=== RUN   TestFunctional/parallel/NodeLabels
=== PAUSE TestFunctional/parallel/NodeLabels

                                                
                                                

                                                
                                                
=== CONT  TestFunctional/parallel/NodeLabels
functional_test.go:219: (dbg) Run:  kubectl --context functional-919910 get nodes --output=go-template "--template='{{range $k, $v := (index .items 0).metadata.labels}}{{$k}} {{end}}'"
functional_test.go:219: (dbg) Non-zero exit: kubectl --context functional-919910 get nodes --output=go-template "--template='{{range $k, $v := (index .items 0).metadata.labels}}{{$k}} {{end}}'": fork/exec /usr/local/bin/kubectl: exec format error (727.751µs)
functional_test.go:221: failed to 'kubectl get nodes' with args "kubectl --context functional-919910 get nodes --output=go-template \"--template='{{range $k, $v := (index .items 0).metadata.labels}}{{$k}} {{end}}'\"": fork/exec /usr/local/bin/kubectl: exec format error
functional_test.go:227: expected to have label "minikube.k8s.io/commit" in node labels but got : 
functional_test.go:227: expected to have label "minikube.k8s.io/version" in node labels but got : 
functional_test.go:227: expected to have label "minikube.k8s.io/updated_at" in node labels but got : 
functional_test.go:227: expected to have label "minikube.k8s.io/name" in node labels but got : 
functional_test.go:227: expected to have label "minikube.k8s.io/primary" in node labels but got : 
helpers_test.go:222: -----------------------post-mortem--------------------------------
helpers_test.go:230: ======>  post-mortem[TestFunctional/parallel/NodeLabels]: docker inspect <======
helpers_test.go:231: (dbg) Run:  docker inspect functional-919910
helpers_test.go:235: (dbg) docker inspect functional-919910:

                                                
                                                
-- stdout --
	[
	    {
	        "Id": "40a7320e94dbd1ca8f99c16961d5283390467882986d80f040baa102ab2046bd",
	        "Created": "2024-09-16T10:46:39.195115177Z",
	        "Path": "/usr/local/bin/entrypoint",
	        "Args": [
	            "/sbin/init"
	        ],
	        "State": {
	            "Status": "running",
	            "Running": true,
	            "Paused": false,
	            "Restarting": false,
	            "OOMKilled": false,
	            "Dead": false,
	            "Pid": 1399656,
	            "ExitCode": 0,
	            "Error": "",
	            "StartedAt": "2024-09-16T10:46:39.363423533Z",
	            "FinishedAt": "0001-01-01T00:00:00Z"
	        },
	        "Image": "sha256:a1b71fa87733590eb4674b16f6945626ae533f3af37066893e3fd70eb9476268",
	        "ResolvConfPath": "/var/lib/docker/containers/40a7320e94dbd1ca8f99c16961d5283390467882986d80f040baa102ab2046bd/resolv.conf",
	        "HostnamePath": "/var/lib/docker/containers/40a7320e94dbd1ca8f99c16961d5283390467882986d80f040baa102ab2046bd/hostname",
	        "HostsPath": "/var/lib/docker/containers/40a7320e94dbd1ca8f99c16961d5283390467882986d80f040baa102ab2046bd/hosts",
	        "LogPath": "/var/lib/docker/containers/40a7320e94dbd1ca8f99c16961d5283390467882986d80f040baa102ab2046bd/40a7320e94dbd1ca8f99c16961d5283390467882986d80f040baa102ab2046bd-json.log",
	        "Name": "/functional-919910",
	        "RestartCount": 0,
	        "Driver": "overlay2",
	        "Platform": "linux",
	        "MountLabel": "",
	        "ProcessLabel": "",
	        "AppArmorProfile": "unconfined",
	        "ExecIDs": null,
	        "HostConfig": {
	            "Binds": [
	                "/lib/modules:/lib/modules:ro",
	                "functional-919910:/var"
	            ],
	            "ContainerIDFile": "",
	            "LogConfig": {
	                "Type": "json-file",
	                "Config": {}
	            },
	            "NetworkMode": "functional-919910",
	            "PortBindings": {
	                "22/tcp": [
	                    {
	                        "HostIp": "127.0.0.1",
	                        "HostPort": ""
	                    }
	                ],
	                "2376/tcp": [
	                    {
	                        "HostIp": "127.0.0.1",
	                        "HostPort": ""
	                    }
	                ],
	                "32443/tcp": [
	                    {
	                        "HostIp": "127.0.0.1",
	                        "HostPort": ""
	                    }
	                ],
	                "5000/tcp": [
	                    {
	                        "HostIp": "127.0.0.1",
	                        "HostPort": ""
	                    }
	                ],
	                "8441/tcp": [
	                    {
	                        "HostIp": "127.0.0.1",
	                        "HostPort": ""
	                    }
	                ]
	            },
	            "RestartPolicy": {
	                "Name": "no",
	                "MaximumRetryCount": 0
	            },
	            "AutoRemove": false,
	            "VolumeDriver": "",
	            "VolumesFrom": null,
	            "ConsoleSize": [
	                0,
	                0
	            ],
	            "CapAdd": null,
	            "CapDrop": null,
	            "CgroupnsMode": "host",
	            "Dns": [],
	            "DnsOptions": [],
	            "DnsSearch": [],
	            "ExtraHosts": null,
	            "GroupAdd": null,
	            "IpcMode": "private",
	            "Cgroup": "",
	            "Links": null,
	            "OomScoreAdj": 0,
	            "PidMode": "",
	            "Privileged": true,
	            "PublishAllPorts": false,
	            "ReadonlyRootfs": false,
	            "SecurityOpt": [
	                "seccomp=unconfined",
	                "apparmor=unconfined",
	                "label=disable"
	            ],
	            "Tmpfs": {
	                "/run": "",
	                "/tmp": ""
	            },
	            "UTSMode": "",
	            "UsernsMode": "",
	            "ShmSize": 67108864,
	            "Runtime": "runc",
	            "Isolation": "",
	            "CpuShares": 0,
	            "Memory": 4194304000,
	            "NanoCpus": 2000000000,
	            "CgroupParent": "",
	            "BlkioWeight": 0,
	            "BlkioWeightDevice": [],
	            "BlkioDeviceReadBps": [],
	            "BlkioDeviceWriteBps": [],
	            "BlkioDeviceReadIOps": [],
	            "BlkioDeviceWriteIOps": [],
	            "CpuPeriod": 0,
	            "CpuQuota": 0,
	            "CpuRealtimePeriod": 0,
	            "CpuRealtimeRuntime": 0,
	            "CpusetCpus": "",
	            "CpusetMems": "",
	            "Devices": [],
	            "DeviceCgroupRules": null,
	            "DeviceRequests": null,
	            "MemoryReservation": 0,
	            "MemorySwap": 8388608000,
	            "MemorySwappiness": null,
	            "OomKillDisable": false,
	            "PidsLimit": null,
	            "Ulimits": [],
	            "CpuCount": 0,
	            "CpuPercent": 0,
	            "IOMaximumIOps": 0,
	            "IOMaximumBandwidth": 0,
	            "MaskedPaths": null,
	            "ReadonlyPaths": null
	        },
	        "GraphDriver": {
	            "Data": {
	                "LowerDir": "/var/lib/docker/overlay2/14032252dd4d379a5dd6bfc812b8514e72a450050f00baaedcadb811ce19b2ca-init/diff:/var/lib/docker/overlay2/1502e35c27c097cfc834a7c6caeee5bb9f58b41375577f491b73f55bc131cbae/diff",
	                "MergedDir": "/var/lib/docker/overlay2/14032252dd4d379a5dd6bfc812b8514e72a450050f00baaedcadb811ce19b2ca/merged",
	                "UpperDir": "/var/lib/docker/overlay2/14032252dd4d379a5dd6bfc812b8514e72a450050f00baaedcadb811ce19b2ca/diff",
	                "WorkDir": "/var/lib/docker/overlay2/14032252dd4d379a5dd6bfc812b8514e72a450050f00baaedcadb811ce19b2ca/work"
	            },
	            "Name": "overlay2"
	        },
	        "Mounts": [
	            {
	                "Type": "bind",
	                "Source": "/lib/modules",
	                "Destination": "/lib/modules",
	                "Mode": "ro",
	                "RW": false,
	                "Propagation": "rprivate"
	            },
	            {
	                "Type": "volume",
	                "Name": "functional-919910",
	                "Source": "/var/lib/docker/volumes/functional-919910/_data",
	                "Destination": "/var",
	                "Driver": "local",
	                "Mode": "z",
	                "RW": true,
	                "Propagation": ""
	            }
	        ],
	        "Config": {
	            "Hostname": "functional-919910",
	            "Domainname": "",
	            "User": "",
	            "AttachStdin": false,
	            "AttachStdout": false,
	            "AttachStderr": false,
	            "ExposedPorts": {
	                "22/tcp": {},
	                "2376/tcp": {},
	                "32443/tcp": {},
	                "5000/tcp": {},
	                "8441/tcp": {}
	            },
	            "Tty": true,
	            "OpenStdin": false,
	            "StdinOnce": false,
	            "Env": [
	                "container=docker",
	                "PATH=/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin"
	            ],
	            "Cmd": null,
	            "Image": "gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0",
	            "Volumes": null,
	            "WorkingDir": "/",
	            "Entrypoint": [
	                "/usr/local/bin/entrypoint",
	                "/sbin/init"
	            ],
	            "OnBuild": null,
	            "Labels": {
	                "created_by.minikube.sigs.k8s.io": "true",
	                "mode.minikube.sigs.k8s.io": "functional-919910",
	                "name.minikube.sigs.k8s.io": "functional-919910",
	                "role.minikube.sigs.k8s.io": ""
	            },
	            "StopSignal": "SIGRTMIN+3"
	        },
	        "NetworkSettings": {
	            "Bridge": "",
	            "SandboxID": "09e546724865183e02638a32689645e28fd2b24039febe37938c93bd516fa319",
	            "SandboxKey": "/var/run/docker/netns/09e546724865",
	            "Ports": {
	                "22/tcp": [
	                    {
	                        "HostIp": "127.0.0.1",
	                        "HostPort": "34613"
	                    }
	                ],
	                "2376/tcp": [
	                    {
	                        "HostIp": "127.0.0.1",
	                        "HostPort": "34614"
	                    }
	                ],
	                "32443/tcp": [
	                    {
	                        "HostIp": "127.0.0.1",
	                        "HostPort": "34617"
	                    }
	                ],
	                "5000/tcp": [
	                    {
	                        "HostIp": "127.0.0.1",
	                        "HostPort": "34615"
	                    }
	                ],
	                "8441/tcp": [
	                    {
	                        "HostIp": "127.0.0.1",
	                        "HostPort": "34616"
	                    }
	                ]
	            },
	            "HairpinMode": false,
	            "LinkLocalIPv6Address": "",
	            "LinkLocalIPv6PrefixLen": 0,
	            "SecondaryIPAddresses": null,
	            "SecondaryIPv6Addresses": null,
	            "EndpointID": "",
	            "Gateway": "",
	            "GlobalIPv6Address": "",
	            "GlobalIPv6PrefixLen": 0,
	            "IPAddress": "",
	            "IPPrefixLen": 0,
	            "IPv6Gateway": "",
	            "MacAddress": "",
	            "Networks": {
	                "functional-919910": {
	                    "IPAMConfig": {
	                        "IPv4Address": "192.168.49.2"
	                    },
	                    "Links": null,
	                    "Aliases": null,
	                    "MacAddress": "02:42:c0:a8:31:02",
	                    "DriverOpts": null,
	                    "NetworkID": "6e0fb93702822d0f6745b0df63c8098af583107dce24967dde54449c81a6a7de",
	                    "EndpointID": "0e4e29393de23184514ee78cc12ea7445e6307e65c69c812751182560a7c0121",
	                    "Gateway": "192.168.49.1",
	                    "IPAddress": "192.168.49.2",
	                    "IPPrefixLen": 24,
	                    "IPv6Gateway": "",
	                    "GlobalIPv6Address": "",
	                    "GlobalIPv6PrefixLen": 0,
	                    "DNSNames": [
	                        "functional-919910",
	                        "40a7320e94db"
	                    ]
	                }
	            }
	        }
	    }
	]

                                                
                                                
-- /stdout --
helpers_test.go:239: (dbg) Run:  out/minikube-linux-arm64 status --format={{.Host}} -p functional-919910 -n functional-919910
helpers_test.go:244: <<< TestFunctional/parallel/NodeLabels FAILED: start of post-mortem logs <<<
helpers_test.go:245: ======>  post-mortem[TestFunctional/parallel/NodeLabels]: minikube logs <======
helpers_test.go:247: (dbg) Run:  out/minikube-linux-arm64 -p functional-919910 logs -n 25
helpers_test.go:247: (dbg) Done: out/minikube-linux-arm64 -p functional-919910 logs -n 25: (3.03863413s)
helpers_test.go:252: TestFunctional/parallel/NodeLabels logs: 
-- stdout --
	
	==> Audit <==
	|-----------|-------------------------------------------------------------------------|-------------------|---------|---------|---------------------|---------------------|
	|  Command  |                                  Args                                   |      Profile      |  User   | Version |     Start Time      |      End Time       |
	|-----------|-------------------------------------------------------------------------|-------------------|---------|---------|---------------------|---------------------|
	| ssh       | functional-919910 ssh -- ls                                             | functional-919910 | jenkins | v1.34.0 | 16 Sep 24 10:50 UTC | 16 Sep 24 10:51 UTC |
	|           | -la /mount-9p                                                           |                   |         |         |                     |                     |
	| ssh       | functional-919910 ssh cat                                               | functional-919910 | jenkins | v1.34.0 | 16 Sep 24 10:51 UTC | 16 Sep 24 10:51 UTC |
	|           | /mount-9p/test-1726483858215687711                                      |                   |         |         |                     |                     |
	| ssh       | functional-919910 ssh mount |                                           | functional-919910 | jenkins | v1.34.0 | 16 Sep 24 10:51 UTC |                     |
	|           | grep 9p; ls -la /mount-9p; cat                                          |                   |         |         |                     |                     |
	|           | /mount-9p/pod-dates                                                     |                   |         |         |                     |                     |
	| ssh       | functional-919910 ssh sudo                                              | functional-919910 | jenkins | v1.34.0 | 16 Sep 24 10:51 UTC | 16 Sep 24 10:51 UTC |
	|           | umount -f /mount-9p                                                     |                   |         |         |                     |                     |
	| start     | -p functional-919910                                                    | functional-919910 | jenkins | v1.34.0 | 16 Sep 24 10:51 UTC |                     |
	|           | --dry-run --memory                                                      |                   |         |         |                     |                     |
	|           | 250MB --alsologtostderr                                                 |                   |         |         |                     |                     |
	|           | --driver=docker                                                         |                   |         |         |                     |                     |
	|           | --container-runtime=crio                                                |                   |         |         |                     |                     |
	| ssh       | functional-919910 ssh findmnt                                           | functional-919910 | jenkins | v1.34.0 | 16 Sep 24 10:51 UTC |                     |
	|           | -T /mount-9p | grep 9p                                                  |                   |         |         |                     |                     |
	| mount     | -p functional-919910                                                    | functional-919910 | jenkins | v1.34.0 | 16 Sep 24 10:51 UTC |                     |
	|           | /tmp/TestFunctionalparallelMountCmdspecific-port740621385/001:/mount-9p |                   |         |         |                     |                     |
	|           | --alsologtostderr -v=1 --port 46464                                     |                   |         |         |                     |                     |
	| start     | -p functional-919910                                                    | functional-919910 | jenkins | v1.34.0 | 16 Sep 24 10:51 UTC |                     |
	|           | --dry-run --memory                                                      |                   |         |         |                     |                     |
	|           | 250MB --alsologtostderr                                                 |                   |         |         |                     |                     |
	|           | --driver=docker                                                         |                   |         |         |                     |                     |
	|           | --container-runtime=crio                                                |                   |         |         |                     |                     |
	| start     | -p functional-919910                                                    | functional-919910 | jenkins | v1.34.0 | 16 Sep 24 10:51 UTC |                     |
	|           | --dry-run --alsologtostderr                                             |                   |         |         |                     |                     |
	|           | -v=1 --driver=docker                                                    |                   |         |         |                     |                     |
	|           | --container-runtime=crio                                                |                   |         |         |                     |                     |
	| dashboard | --url --port 36195                                                      | functional-919910 | jenkins | v1.34.0 | 16 Sep 24 10:51 UTC |                     |
	|           | -p functional-919910                                                    |                   |         |         |                     |                     |
	|           | --alsologtostderr -v=1                                                  |                   |         |         |                     |                     |
	| ssh       | functional-919910 ssh findmnt                                           | functional-919910 | jenkins | v1.34.0 | 16 Sep 24 10:51 UTC | 16 Sep 24 10:51 UTC |
	|           | -T /mount-9p | grep 9p                                                  |                   |         |         |                     |                     |
	| ssh       | functional-919910 ssh -- ls                                             | functional-919910 | jenkins | v1.34.0 | 16 Sep 24 10:51 UTC | 16 Sep 24 10:51 UTC |
	|           | -la /mount-9p                                                           |                   |         |         |                     |                     |
	| ssh       | functional-919910 ssh sudo                                              | functional-919910 | jenkins | v1.34.0 | 16 Sep 24 10:51 UTC |                     |
	|           | umount -f /mount-9p                                                     |                   |         |         |                     |                     |
	| mount     | -p functional-919910                                                    | functional-919910 | jenkins | v1.34.0 | 16 Sep 24 10:51 UTC |                     |
	|           | /tmp/TestFunctionalparallelMountCmdVerifyCleanup3682094802/001:/mount2  |                   |         |         |                     |                     |
	|           | --alsologtostderr -v=1                                                  |                   |         |         |                     |                     |
	| ssh       | functional-919910 ssh findmnt                                           | functional-919910 | jenkins | v1.34.0 | 16 Sep 24 10:51 UTC |                     |
	|           | -T /mount1                                                              |                   |         |         |                     |                     |
	| mount     | -p functional-919910                                                    | functional-919910 | jenkins | v1.34.0 | 16 Sep 24 10:51 UTC |                     |
	|           | /tmp/TestFunctionalparallelMountCmdVerifyCleanup3682094802/001:/mount1  |                   |         |         |                     |                     |
	|           | --alsologtostderr -v=1                                                  |                   |         |         |                     |                     |
	| mount     | -p functional-919910                                                    | functional-919910 | jenkins | v1.34.0 | 16 Sep 24 10:51 UTC |                     |
	|           | /tmp/TestFunctionalparallelMountCmdVerifyCleanup3682094802/001:/mount3  |                   |         |         |                     |                     |
	|           | --alsologtostderr -v=1                                                  |                   |         |         |                     |                     |
	| ssh       | functional-919910 ssh findmnt                                           | functional-919910 | jenkins | v1.34.0 | 16 Sep 24 10:51 UTC | 16 Sep 24 10:51 UTC |
	|           | -T /mount1                                                              |                   |         |         |                     |                     |
	| ssh       | functional-919910 ssh findmnt                                           | functional-919910 | jenkins | v1.34.0 | 16 Sep 24 10:51 UTC | 16 Sep 24 10:51 UTC |
	|           | -T /mount2                                                              |                   |         |         |                     |                     |
	| ssh       | functional-919910 ssh findmnt                                           | functional-919910 | jenkins | v1.34.0 | 16 Sep 24 10:51 UTC | 16 Sep 24 10:51 UTC |
	|           | -T /mount3                                                              |                   |         |         |                     |                     |
	| mount     | -p functional-919910                                                    | functional-919910 | jenkins | v1.34.0 | 16 Sep 24 10:51 UTC |                     |
	|           | --kill=true                                                             |                   |         |         |                     |                     |
	| license   |                                                                         | minikube          | jenkins | v1.34.0 | 16 Sep 24 10:51 UTC | 16 Sep 24 10:51 UTC |
	| ssh       | functional-919910 ssh sudo                                              | functional-919910 | jenkins | v1.34.0 | 16 Sep 24 10:51 UTC |                     |
	|           | systemctl is-active docker                                              |                   |         |         |                     |                     |
	| ssh       | functional-919910 ssh sudo                                              | functional-919910 | jenkins | v1.34.0 | 16 Sep 24 10:51 UTC |                     |
	|           | systemctl is-active containerd                                          |                   |         |         |                     |                     |
	| image     | functional-919910 image load --daemon                                   | functional-919910 | jenkins | v1.34.0 | 16 Sep 24 10:51 UTC |                     |
	|           | kicbase/echo-server:functional-919910                                   |                   |         |         |                     |                     |
	|           | --alsologtostderr                                                       |                   |         |         |                     |                     |
	|-----------|-------------------------------------------------------------------------|-------------------|---------|---------|---------------------|---------------------|
	
	
	==> Last Start <==
	Log file created at: 2024/09/16 10:51:02
	Running on machine: ip-172-31-21-244
	Binary: Built with gc go1.23.0 for linux/arm64
	Log line format: [IWEF]mmdd hh:mm:ss.uuuuuu threadid file:line] msg
	I0916 10:51:02.087879 1411119 out.go:345] Setting OutFile to fd 1 ...
	I0916 10:51:02.088043 1411119 out.go:392] TERM=,COLORTERM=, which probably does not support color
	I0916 10:51:02.088070 1411119 out.go:358] Setting ErrFile to fd 2...
	I0916 10:51:02.088090 1411119 out.go:392] TERM=,COLORTERM=, which probably does not support color
	I0916 10:51:02.088355 1411119 root.go:338] Updating PATH: /home/jenkins/minikube-integration/19651-1378450/.minikube/bin
	I0916 10:51:02.088819 1411119 out.go:352] Setting JSON to false
	I0916 10:51:02.089772 1411119 start.go:129] hostinfo: {"hostname":"ip-172-31-21-244","uptime":38007,"bootTime":1726445855,"procs":177,"os":"linux","platform":"ubuntu","platformFamily":"debian","platformVersion":"20.04","kernelVersion":"5.15.0-1069-aws","kernelArch":"aarch64","virtualizationSystem":"","virtualizationRole":"","hostId":"da8ac1fd-6236-412a-a346-95873c98230d"}
	I0916 10:51:02.089847 1411119 start.go:139] virtualization:  
	I0916 10:51:02.092919 1411119 out.go:177] * [functional-919910] minikube v1.34.0 on Ubuntu 20.04 (arm64)
	I0916 10:51:02.096358 1411119 out.go:177]   - MINIKUBE_LOCATION=19651
	I0916 10:51:02.096483 1411119 notify.go:220] Checking for updates...
	I0916 10:51:02.101643 1411119 out.go:177]   - MINIKUBE_SUPPRESS_DOCKER_PERFORMANCE=true
	I0916 10:51:02.104448 1411119 out.go:177]   - KUBECONFIG=/home/jenkins/minikube-integration/19651-1378450/kubeconfig
	I0916 10:51:02.108493 1411119 out.go:177]   - MINIKUBE_HOME=/home/jenkins/minikube-integration/19651-1378450/.minikube
	I0916 10:51:02.113551 1411119 out.go:177]   - MINIKUBE_BIN=out/minikube-linux-arm64
	I0916 10:51:02.116163 1411119 out.go:177]   - MINIKUBE_FORCE_SYSTEMD=
	I0916 10:51:02.119313 1411119 config.go:182] Loaded profile config "functional-919910": Driver=docker, ContainerRuntime=crio, KubernetesVersion=v1.31.1
	I0916 10:51:02.120041 1411119 driver.go:394] Setting default libvirt URI to qemu:///system
	I0916 10:51:02.148042 1411119 docker.go:123] docker version: linux-27.2.1:Docker Engine - Community
	I0916 10:51:02.148169 1411119 cli_runner.go:164] Run: docker system info --format "{{json .}}"
	I0916 10:51:02.215881 1411119 info.go:266] docker info: {ID:5FDH:SA5P:5GCT:NLAS:B73P:SGDQ:PBG5:UBVH:UZY3:RXGO:CI7S:WAIH Containers:1 ContainersRunning:1 ContainersPaused:0 ContainersStopped:0 Images:2 Driver:overlay2 DriverStatus:[[Backing Filesystem extfs] [Supports d_type true] [Using metacopy false] [Native Overlay Diff true] [userxattr false]] SystemStatus:<nil> Plugins:{Volume:[local] Network:[bridge host ipvlan macvlan null overlay] Authorization:<nil> Log:[awslogs fluentd gcplogs gelf journald json-file local splunk syslog]} MemoryLimit:true SwapLimit:true KernelMemory:false KernelMemoryTCP:true CPUCfsPeriod:true CPUCfsQuota:true CPUShares:true CPUSet:true PidsLimit:true IPv4Forwarding:true BridgeNfIptables:true BridgeNfIP6Tables:true Debug:false NFd:32 OomKillDisable:true NGoroutines:51 SystemTime:2024-09-16 10:51:02.206322665 +0000 UTC LoggingDriver:json-file CgroupDriver:cgroupfs NEventsListener:0 KernelVersion:5.15.0-1069-aws OperatingSystem:Ubuntu 20.04.6 LTS OSType:linux Architecture:aar
ch64 IndexServerAddress:https://index.docker.io/v1/ RegistryConfig:{AllowNondistributableArtifactsCIDRs:[] AllowNondistributableArtifactsHostnames:[] InsecureRegistryCIDRs:[127.0.0.0/8] IndexConfigs:{DockerIo:{Name:docker.io Mirrors:[] Secure:true Official:true}} Mirrors:[]} NCPU:2 MemTotal:8214839296 GenericResources:<nil> DockerRootDir:/var/lib/docker HTTPProxy: HTTPSProxy: NoProxy: Name:ip-172-31-21-244 Labels:[] ExperimentalBuild:false ServerVersion:27.2.1 ClusterStore: ClusterAdvertise: Runtimes:{Runc:{Path:runc}} DefaultRuntime:runc Swarm:{NodeID: NodeAddr: LocalNodeState:inactive ControlAvailable:false Error: RemoteManagers:<nil>} LiveRestoreEnabled:false Isolation: InitBinary:docker-init ContainerdCommit:{ID:7f7fdf5fed64eb6a7caf99b3e12efcf9d60e311c Expected:7f7fdf5fed64eb6a7caf99b3e12efcf9d60e311c} RuncCommit:{ID:v1.1.14-0-g2c9f560 Expected:v1.1.14-0-g2c9f560} InitCommit:{ID:de40ad0 Expected:de40ad0} SecurityOptions:[name=apparmor name=seccomp,profile=builtin] ProductLicense: Warnings:<nil> ServerErro
rs:[] ClientInfo:{Debug:false Plugins:[map[Name:buildx Path:/usr/libexec/docker/cli-plugins/docker-buildx SchemaVersion:0.1.0 ShortDescription:Docker Buildx Vendor:Docker Inc. Version:v0.16.2] map[Name:compose Path:/usr/libexec/docker/cli-plugins/docker-compose SchemaVersion:0.1.0 ShortDescription:Docker Compose Vendor:Docker Inc. Version:v2.29.2]] Warnings:<nil>}}
	I0916 10:51:02.215998 1411119 docker.go:318] overlay module found
	I0916 10:51:02.218840 1411119 out.go:177] * Using the docker driver based on existing profile
	I0916 10:51:02.221572 1411119 start.go:297] selected driver: docker
	I0916 10:51:02.221593 1411119 start.go:901] validating driver "docker" against &{Name:functional-919910 KeepContext:false EmbedCerts:false MinikubeISO: KicBaseImage:gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 Memory:4000 CPUs:2 DiskSize:20000 Driver:docker HyperkitVpnKitSock: HyperkitVSockPorts:[] DockerEnv:[] ContainerVolumeMounts:[] InsecureRegistry:[] RegistryMirror:[] HostOnlyCIDR:192.168.59.1/24 HypervVirtualSwitch: HypervUseExternalSwitch:false HypervExternalAdapter: KVMNetwork:default KVMQemuURI:qemu:///system KVMGPU:false KVMHidden:false KVMNUMACount:1 APIServerPort:8441 DockerOpt:[] DisableDriverMounts:false NFSShare:[] NFSSharesRoot:/nfsshares UUID: NoVTXCheck:false DNSProxy:false HostDNSResolver:true HostOnlyNicType:virtio NatNicType:virtio SSHIPAddress: SSHUser:root SSHKey: SSHPort:22 KubernetesConfig:{KubernetesVersion:v1.31.1 ClusterName:functional-919910 Namespace:default APIServerHAVIP: APIServerNa
me:minikubeCA APIServerNames:[] APIServerIPs:[] DNSDomain:cluster.local ContainerRuntime:crio CRISocket: NetworkPlugin:cni FeatureGates: ServiceCIDR:10.96.0.0/12 ImageRepository: LoadBalancerStartIP: LoadBalancerEndIP: CustomIngressCert: RegistryAliases: ExtraOptions:[{Component:apiserver Key:enable-admission-plugins Value:NamespaceAutoProvision}] ShouldLoadCachedImages:true EnableDefaultCNI:false CNI:} Nodes:[{Name: IP:192.168.49.2 Port:8441 KubernetesVersion:v1.31.1 ContainerRuntime:crio ControlPlane:true Worker:true}] Addons:map[default-storageclass:true storage-provisioner:true] CustomAddonImages:map[] CustomAddonRegistries:map[] VerifyComponents:map[apiserver:true apps_running:true default_sa:true extra:true kubelet:true node_ready:true system_pods:true] StartHostTimeout:6m0s ScheduledStop:<nil> ExposedPorts:[] ListenAddress: Network: Subnet: MultiNodeRequested:false ExtraDisks:0 CertExpiration:26280h0m0s Mount:false MountString:/home/jenkins:/minikube-host Mount9PVersion:9p2000.L MountGID:docker MountIP
: MountMSize:262144 MountOptions:[] MountPort:0 MountType:9p MountUID:docker BinaryMirror: DisableOptimizations:false DisableMetrics:false CustomQemuFirmwarePath: SocketVMnetClientPath: SocketVMnetPath: StaticIP: SSHAuthSock: SSHAgentPID:0 GPUs: AutoPauseInterval:1m0s}
	I0916 10:51:02.221708 1411119 start.go:912] status for docker: {Installed:true Healthy:true Running:false NeedsImprovement:false Error:<nil> Reason: Fix: Doc: Version:}
	I0916 10:51:02.221816 1411119 cli_runner.go:164] Run: docker system info --format "{{json .}}"
	I0916 10:51:02.274109 1411119 info.go:266] docker info: {ID:5FDH:SA5P:5GCT:NLAS:B73P:SGDQ:PBG5:UBVH:UZY3:RXGO:CI7S:WAIH Containers:1 ContainersRunning:1 ContainersPaused:0 ContainersStopped:0 Images:2 Driver:overlay2 DriverStatus:[[Backing Filesystem extfs] [Supports d_type true] [Using metacopy false] [Native Overlay Diff true] [userxattr false]] SystemStatus:<nil> Plugins:{Volume:[local] Network:[bridge host ipvlan macvlan null overlay] Authorization:<nil> Log:[awslogs fluentd gcplogs gelf journald json-file local splunk syslog]} MemoryLimit:true SwapLimit:true KernelMemory:false KernelMemoryTCP:true CPUCfsPeriod:true CPUCfsQuota:true CPUShares:true CPUSet:true PidsLimit:true IPv4Forwarding:true BridgeNfIptables:true BridgeNfIP6Tables:true Debug:false NFd:32 OomKillDisable:true NGoroutines:51 SystemTime:2024-09-16 10:51:02.264208469 +0000 UTC LoggingDriver:json-file CgroupDriver:cgroupfs NEventsListener:0 KernelVersion:5.15.0-1069-aws OperatingSystem:Ubuntu 20.04.6 LTS OSType:linux Architecture:aar
ch64 IndexServerAddress:https://index.docker.io/v1/ RegistryConfig:{AllowNondistributableArtifactsCIDRs:[] AllowNondistributableArtifactsHostnames:[] InsecureRegistryCIDRs:[127.0.0.0/8] IndexConfigs:{DockerIo:{Name:docker.io Mirrors:[] Secure:true Official:true}} Mirrors:[]} NCPU:2 MemTotal:8214839296 GenericResources:<nil> DockerRootDir:/var/lib/docker HTTPProxy: HTTPSProxy: NoProxy: Name:ip-172-31-21-244 Labels:[] ExperimentalBuild:false ServerVersion:27.2.1 ClusterStore: ClusterAdvertise: Runtimes:{Runc:{Path:runc}} DefaultRuntime:runc Swarm:{NodeID: NodeAddr: LocalNodeState:inactive ControlAvailable:false Error: RemoteManagers:<nil>} LiveRestoreEnabled:false Isolation: InitBinary:docker-init ContainerdCommit:{ID:7f7fdf5fed64eb6a7caf99b3e12efcf9d60e311c Expected:7f7fdf5fed64eb6a7caf99b3e12efcf9d60e311c} RuncCommit:{ID:v1.1.14-0-g2c9f560 Expected:v1.1.14-0-g2c9f560} InitCommit:{ID:de40ad0 Expected:de40ad0} SecurityOptions:[name=apparmor name=seccomp,profile=builtin] ProductLicense: Warnings:<nil> ServerErro
rs:[] ClientInfo:{Debug:false Plugins:[map[Name:buildx Path:/usr/libexec/docker/cli-plugins/docker-buildx SchemaVersion:0.1.0 ShortDescription:Docker Buildx Vendor:Docker Inc. Version:v0.16.2] map[Name:compose Path:/usr/libexec/docker/cli-plugins/docker-compose SchemaVersion:0.1.0 ShortDescription:Docker Compose Vendor:Docker Inc. Version:v2.29.2]] Warnings:<nil>}}
	I0916 10:51:02.274582 1411119 cni.go:84] Creating CNI manager for ""
	I0916 10:51:02.274637 1411119 cni.go:143] "docker" driver + "crio" runtime found, recommending kindnet
	I0916 10:51:02.274690 1411119 start.go:340] cluster config:
	{Name:functional-919910 KeepContext:false EmbedCerts:false MinikubeISO: KicBaseImage:gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 Memory:4000 CPUs:2 DiskSize:20000 Driver:docker HyperkitVpnKitSock: HyperkitVSockPorts:[] DockerEnv:[] ContainerVolumeMounts:[] InsecureRegistry:[] RegistryMirror:[] HostOnlyCIDR:192.168.59.1/24 HypervVirtualSwitch: HypervUseExternalSwitch:false HypervExternalAdapter: KVMNetwork:default KVMQemuURI:qemu:///system KVMGPU:false KVMHidden:false KVMNUMACount:1 APIServerPort:8441 DockerOpt:[] DisableDriverMounts:false NFSShare:[] NFSSharesRoot:/nfsshares UUID: NoVTXCheck:false DNSProxy:false HostDNSResolver:true HostOnlyNicType:virtio NatNicType:virtio SSHIPAddress: SSHUser:root SSHKey: SSHPort:22 KubernetesConfig:{KubernetesVersion:v1.31.1 ClusterName:functional-919910 Namespace:default APIServerHAVIP: APIServerName:minikubeCA APIServerNames:[] APIServerIPs:[] DNSDomain:cluster.local Containe
rRuntime:crio CRISocket: NetworkPlugin:cni FeatureGates: ServiceCIDR:10.96.0.0/12 ImageRepository: LoadBalancerStartIP: LoadBalancerEndIP: CustomIngressCert: RegistryAliases: ExtraOptions:[{Component:apiserver Key:enable-admission-plugins Value:NamespaceAutoProvision}] ShouldLoadCachedImages:true EnableDefaultCNI:false CNI:} Nodes:[{Name: IP:192.168.49.2 Port:8441 KubernetesVersion:v1.31.1 ContainerRuntime:crio ControlPlane:true Worker:true}] Addons:map[default-storageclass:true storage-provisioner:true] CustomAddonImages:map[] CustomAddonRegistries:map[] VerifyComponents:map[apiserver:true apps_running:true default_sa:true extra:true kubelet:true node_ready:true system_pods:true] StartHostTimeout:6m0s ScheduledStop:<nil> ExposedPorts:[] ListenAddress: Network: Subnet: MultiNodeRequested:false ExtraDisks:0 CertExpiration:26280h0m0s Mount:false MountString:/home/jenkins:/minikube-host Mount9PVersion:9p2000.L MountGID:docker MountIP: MountMSize:262144 MountOptions:[] MountPort:0 MountType:9p MountUID:docker Bin
aryMirror: DisableOptimizations:false DisableMetrics:false CustomQemuFirmwarePath: SocketVMnetClientPath: SocketVMnetPath: StaticIP: SSHAuthSock: SSHAgentPID:0 GPUs: AutoPauseInterval:1m0s}
	I0916 10:51:02.279287 1411119 out.go:177] * dry-run validation complete!
	
	
	==> CRI-O <==
	Sep 16 10:51:04 functional-919910 crio[4619]: time="2024-09-16 10:51:04.650958345Z" level=info msg="Checking pod kubernetes-dashboard_kubernetes-dashboard-695b96c756-tfx57 for CNI network kindnet (type=ptp)"
	Sep 16 10:51:04 functional-919910 crio[4619]: time="2024-09-16 10:51:04.657861226Z" level=info msg="Ran pod sandbox 922d838b02c1c0aa85d9e15c00fc4e58fcd00921ff5664f69405eaa786ef16e7 with infra container: kubernetes-dashboard/kubernetes-dashboard-695b96c756-tfx57/POD" id=aa9175e7-f6e7-4e1c-8b89-d3d323f98b0f name=/runtime.v1.RuntimeService/RunPodSandbox
	Sep 16 10:51:04 functional-919910 crio[4619]: time="2024-09-16 10:51:04.659582856Z" level=info msg="Checking image status: docker.io/kubernetesui/dashboard:v2.7.0@sha256:2e500d29e9d5f4a086b908eb8dfe7ecac57d2ab09d65b24f588b1d449841ef93" id=5732b229-26fe-4426-8a68-ccd07bb0ed20 name=/runtime.v1.ImageService/ImageStatus
	Sep 16 10:51:04 functional-919910 crio[4619]: time="2024-09-16 10:51:04.659843017Z" level=info msg="Image docker.io/kubernetesui/dashboard:v2.7.0@sha256:2e500d29e9d5f4a086b908eb8dfe7ecac57d2ab09d65b24f588b1d449841ef93 not found" id=5732b229-26fe-4426-8a68-ccd07bb0ed20 name=/runtime.v1.ImageService/ImageStatus
	Sep 16 10:51:04 functional-919910 crio[4619]: time="2024-09-16 10:51:04.920886924Z" level=info msg="Trying to access \"docker.io/kubernetesui/metrics-scraper@sha256:76049887f07a0476dc93efc2d3569b9529bf982b22d29f356092ce206e98765c\""
	Sep 16 10:51:05 functional-919910 crio[4619]: time="2024-09-16 10:51:05.274524642Z" level=info msg="Image operating system mismatch: image uses OS \"linux\"+architecture \"amd64\", expecting one of \"linux+arm64\""
	Sep 16 10:51:06 functional-919910 crio[4619]: time="2024-09-16 10:51:06.809554779Z" level=info msg="Pulled image: docker.io/kubernetesui/metrics-scraper@sha256:76049887f07a0476dc93efc2d3569b9529bf982b22d29f356092ce206e98765c" id=b8983bb3-9f46-487e-b956-48074a39c1da name=/runtime.v1.ImageService/PullImage
	Sep 16 10:51:06 functional-919910 crio[4619]: time="2024-09-16 10:51:06.810726343Z" level=info msg="Checking image status: docker.io/kubernetesui/metrics-scraper:v1.0.8@sha256:76049887f07a0476dc93efc2d3569b9529bf982b22d29f356092ce206e98765c" id=1fb4479f-9d9e-405f-b962-76f86661fc2e name=/runtime.v1.ImageService/ImageStatus
	Sep 16 10:51:06 functional-919910 crio[4619]: time="2024-09-16 10:51:06.811842327Z" level=info msg="Image status: &ImageStatusResponse{Image:&Image{Id:a422e0e982356f6c1cf0e5bb7b733363caae3992a07c99951fbcc73e58ed656a,RepoTags:[],RepoDigests:[docker.io/kubernetesui/metrics-scraper@sha256:76049887f07a0476dc93efc2d3569b9529bf982b22d29f356092ce206e98765c docker.io/kubernetesui/metrics-scraper@sha256:853c43f3cced687cb211708aa0024304a5adb33ec45ebf5915d318358822e09a],Size_:42263767,Uid:nil,Username:nonroot,Spec:nil,},Info:map[string]string{},}" id=1fb4479f-9d9e-405f-b962-76f86661fc2e name=/runtime.v1.ImageService/ImageStatus
	Sep 16 10:51:06 functional-919910 crio[4619]: time="2024-09-16 10:51:06.813069569Z" level=info msg="Checking image status: docker.io/kubernetesui/metrics-scraper:v1.0.8@sha256:76049887f07a0476dc93efc2d3569b9529bf982b22d29f356092ce206e98765c" id=7cffcf10-9298-4f94-8de8-2fe35f021c93 name=/runtime.v1.ImageService/ImageStatus
	Sep 16 10:51:06 functional-919910 crio[4619]: time="2024-09-16 10:51:06.814078331Z" level=info msg="Image status: &ImageStatusResponse{Image:&Image{Id:a422e0e982356f6c1cf0e5bb7b733363caae3992a07c99951fbcc73e58ed656a,RepoTags:[],RepoDigests:[docker.io/kubernetesui/metrics-scraper@sha256:76049887f07a0476dc93efc2d3569b9529bf982b22d29f356092ce206e98765c docker.io/kubernetesui/metrics-scraper@sha256:853c43f3cced687cb211708aa0024304a5adb33ec45ebf5915d318358822e09a],Size_:42263767,Uid:nil,Username:nonroot,Spec:nil,},Info:map[string]string{},}" id=7cffcf10-9298-4f94-8de8-2fe35f021c93 name=/runtime.v1.ImageService/ImageStatus
	Sep 16 10:51:06 functional-919910 crio[4619]: time="2024-09-16 10:51:06.814264280Z" level=info msg="Pulling image: docker.io/kubernetesui/dashboard:v2.7.0@sha256:2e500d29e9d5f4a086b908eb8dfe7ecac57d2ab09d65b24f588b1d449841ef93" id=6c62d380-cf14-49aa-b7a2-d24b2316a5fb name=/runtime.v1.ImageService/PullImage
	Sep 16 10:51:06 functional-919910 crio[4619]: time="2024-09-16 10:51:06.820808802Z" level=info msg="Creating container: kubernetes-dashboard/dashboard-metrics-scraper-c5db448b4-qqx24/dashboard-metrics-scraper" id=3a422ca5-25a4-41ee-a396-c524965015eb name=/runtime.v1.RuntimeService/CreateContainer
	Sep 16 10:51:06 functional-919910 crio[4619]: time="2024-09-16 10:51:06.821229247Z" level=warning msg="Allowed annotations are specified for workload []"
	Sep 16 10:51:06 functional-919910 crio[4619]: time="2024-09-16 10:51:06.820892574Z" level=info msg="Trying to access \"docker.io/kubernetesui/dashboard@sha256:2e500d29e9d5f4a086b908eb8dfe7ecac57d2ab09d65b24f588b1d449841ef93\""
	Sep 16 10:51:06 functional-919910 crio[4619]: time="2024-09-16 10:51:06.850953742Z" level=warning msg="Failed to open /etc/group: open /var/lib/containers/storage/overlay/4610a5ca443e51a0ae1f7e480a1a7fe2f24f0e79ec75468315387086c9869380/merged/etc/group: no such file or directory"
	Sep 16 10:51:06 functional-919910 crio[4619]: time="2024-09-16 10:51:06.951621308Z" level=info msg="Created container f16eb7cacb8ca2166b0751a397c99c5d8ce000c98e19ba296399e6ff3bdf6cc7: kubernetes-dashboard/dashboard-metrics-scraper-c5db448b4-qqx24/dashboard-metrics-scraper" id=3a422ca5-25a4-41ee-a396-c524965015eb name=/runtime.v1.RuntimeService/CreateContainer
	Sep 16 10:51:06 functional-919910 crio[4619]: time="2024-09-16 10:51:06.952652445Z" level=info msg="Starting container: f16eb7cacb8ca2166b0751a397c99c5d8ce000c98e19ba296399e6ff3bdf6cc7" id=b8efc6ad-7379-407f-8297-70f302f74604 name=/runtime.v1.RuntimeService/StartContainer
	Sep 16 10:51:06 functional-919910 crio[4619]: time="2024-09-16 10:51:06.969471980Z" level=info msg="Started container" PID=7528 containerID=f16eb7cacb8ca2166b0751a397c99c5d8ce000c98e19ba296399e6ff3bdf6cc7 description=kubernetes-dashboard/dashboard-metrics-scraper-c5db448b4-qqx24/dashboard-metrics-scraper id=b8efc6ad-7379-407f-8297-70f302f74604 name=/runtime.v1.RuntimeService/StartContainer sandboxID=e2a41fd0177c9e78cbe96ed596219767465b54a5c5464fee624d12b77e49a2cf
	Sep 16 10:51:07 functional-919910 crio[4619]: time="2024-09-16 10:51:07.096319139Z" level=info msg="Trying to access \"docker.io/kubernetesui/dashboard@sha256:2e500d29e9d5f4a086b908eb8dfe7ecac57d2ab09d65b24f588b1d449841ef93\""
	Sep 16 10:51:09 functional-919910 crio[4619]: time="2024-09-16 10:51:09.508304910Z" level=info msg="Checking image status: kicbase/echo-server:functional-919910" id=362c8234-3151-42c1-afc6-8d2e030061fa name=/runtime.v1.ImageService/ImageStatus
	Sep 16 10:51:09 functional-919910 crio[4619]: time="2024-09-16 10:51:09.557126149Z" level=info msg="Checking image status: docker.io/kicbase/echo-server:functional-919910" id=57d30fbc-2208-4b21-9717-0941c9de7d98 name=/runtime.v1.ImageService/ImageStatus
	Sep 16 10:51:09 functional-919910 crio[4619]: time="2024-09-16 10:51:09.557364043Z" level=info msg="Image docker.io/kicbase/echo-server:functional-919910 not found" id=57d30fbc-2208-4b21-9717-0941c9de7d98 name=/runtime.v1.ImageService/ImageStatus
	Sep 16 10:51:09 functional-919910 crio[4619]: time="2024-09-16 10:51:09.637699910Z" level=info msg="Checking image status: localhost/kicbase/echo-server:functional-919910" id=7ff9e6fe-4971-41ff-a6e0-49058aa63eff name=/runtime.v1.ImageService/ImageStatus
	Sep 16 10:51:09 functional-919910 crio[4619]: time="2024-09-16 10:51:09.637949503Z" level=info msg="Image localhost/kicbase/echo-server:functional-919910 not found" id=7ff9e6fe-4971-41ff-a6e0-49058aa63eff name=/runtime.v1.ImageService/ImageStatus
	
	
	==> container status <==
	CONTAINER           IMAGE                                                                                                            CREATED             STATE               NAME                        ATTEMPT             POD ID              POD
	f16eb7cacb8ca       docker.io/kubernetesui/metrics-scraper@sha256:76049887f07a0476dc93efc2d3569b9529bf982b22d29f356092ce206e98765c   4 seconds ago       Running             dashboard-metrics-scraper   0                   e2a41fd0177c9       dashboard-metrics-scraper-c5db448b4-qqx24
	6f8c0a2f9d3e9       24a140c548c075e487e45d0ee73b1aa89f8bfb40c08a57e05975559728822b1d                                                 2 minutes ago       Running             kube-proxy                  2                   46672cf6a1a3c       kube-proxy-nvpzv
	072cecfbf1d39       6a23fa8fd2b78ab58e42ba273808edc936a9c53d8ac4a919f6337be094843a51                                                 2 minutes ago       Running             kindnet-cni                 2                   306886331d6ee       kindnet-nb5xl
	4deb5cc6dce54       2f6c962e7b8311337352d9fdea917da2184d9919f4da7695bc2a6517cf392fe4                                                 2 minutes ago       Running             coredns                     2                   4bae1031966b2       coredns-7c65d6cfc9-qzn8c
	0318f459801da       ba04bb24b95753201135cbc420b233c1b0b9fa2e1fd21d28319c348c33fbcde6                                                 2 minutes ago       Running             storage-provisioner         2                   e27809ba10603       storage-provisioner
	12f0a29c7ca2a       d3f53a98c0a9d9163c4848bcf34b2d2f5e1e3691b79f3d1dd6d0206809e02853                                                 2 minutes ago       Running             kube-apiserver              0                   00a81472718e2       kube-apiserver-functional-919910
	7fcb94c0bce84       7f8aa378bb47dffcf430f3a601abe39137e88aee0238e23ed8530fdd18dab82d                                                 2 minutes ago       Running             kube-scheduler              2                   00455a328acb5       kube-scheduler-functional-919910
	d69895ce029ae       279f381cb37365bbbcd133c9531fba9c2beb0f38dbbe6ddfcd0b1b1643d3450e                                                 2 minutes ago       Running             kube-controller-manager     2                   0ffab32638624       kube-controller-manager-functional-919910
	0fb814efa9ee9       27e3830e1402783674d8b594038967deea9d51f0d91b34c93c8f39d2f68af7da                                                 2 minutes ago       Running             etcd                        2                   46079181d2925       etcd-functional-919910
	67f50b0e25dae       ba04bb24b95753201135cbc420b233c1b0b9fa2e1fd21d28319c348c33fbcde6                                                 3 minutes ago       Exited              storage-provisioner         1                   e27809ba10603       storage-provisioner
	e8aeda4b55bc6       6a23fa8fd2b78ab58e42ba273808edc936a9c53d8ac4a919f6337be094843a51                                                 3 minutes ago       Exited              kindnet-cni                 1                   306886331d6ee       kindnet-nb5xl
	68f543d941434       24a140c548c075e487e45d0ee73b1aa89f8bfb40c08a57e05975559728822b1d                                                 3 minutes ago       Exited              kube-proxy                  1                   46672cf6a1a3c       kube-proxy-nvpzv
	2089d6c47dd67       2f6c962e7b8311337352d9fdea917da2184d9919f4da7695bc2a6517cf392fe4                                                 3 minutes ago       Exited              coredns                     1                   4bae1031966b2       coredns-7c65d6cfc9-qzn8c
	8f5620673b4ff       279f381cb37365bbbcd133c9531fba9c2beb0f38dbbe6ddfcd0b1b1643d3450e                                                 3 minutes ago       Exited              kube-controller-manager     1                   0ffab32638624       kube-controller-manager-functional-919910
	5bcfe047e4005       27e3830e1402783674d8b594038967deea9d51f0d91b34c93c8f39d2f68af7da                                                 3 minutes ago       Exited              etcd                        1                   46079181d2925       etcd-functional-919910
	9a35fb982442f       7f8aa378bb47dffcf430f3a601abe39137e88aee0238e23ed8530fdd18dab82d                                                 3 minutes ago       Exited              kube-scheduler              1                   00455a328acb5       kube-scheduler-functional-919910
	
	
	==> coredns [2089d6c47dd6764fb74a622eaf36e8dda3344083a925f73a4dfcf0ebb952dbf7] <==
	[INFO] plugin/kubernetes: pkg/mod/k8s.io/client-go@v0.29.3/tools/cache/reflector.go:229: failed to list *v1.EndpointSlice: Get "https://10.96.0.1:443/apis/discovery.k8s.io/v1/endpointslices?limit=500&resourceVersion=0": dial tcp 10.96.0.1:443: connect: connection refused
	[ERROR] plugin/kubernetes: pkg/mod/k8s.io/client-go@v0.29.3/tools/cache/reflector.go:229: Failed to watch *v1.EndpointSlice: failed to list *v1.EndpointSlice: Get "https://10.96.0.1:443/apis/discovery.k8s.io/v1/endpointslices?limit=500&resourceVersion=0": dial tcp 10.96.0.1:443: connect: connection refused
	[INFO] plugin/kubernetes: pkg/mod/k8s.io/client-go@v0.29.3/tools/cache/reflector.go:229: failed to list *v1.Service: Get "https://10.96.0.1:443/api/v1/services?limit=500&resourceVersion=0": dial tcp 10.96.0.1:443: connect: connection refused
	[ERROR] plugin/kubernetes: pkg/mod/k8s.io/client-go@v0.29.3/tools/cache/reflector.go:229: Failed to watch *v1.Service: failed to list *v1.Service: Get "https://10.96.0.1:443/api/v1/services?limit=500&resourceVersion=0": dial tcp 10.96.0.1:443: connect: connection refused
	[INFO] plugin/kubernetes: pkg/mod/k8s.io/client-go@v0.29.3/tools/cache/reflector.go:229: failed to list *v1.Namespace: Get "https://10.96.0.1:443/api/v1/namespaces?limit=500&resourceVersion=0": dial tcp 10.96.0.1:443: connect: connection refused
	[ERROR] plugin/kubernetes: pkg/mod/k8s.io/client-go@v0.29.3/tools/cache/reflector.go:229: Failed to watch *v1.Namespace: failed to list *v1.Namespace: Get "https://10.96.0.1:443/api/v1/namespaces?limit=500&resourceVersion=0": dial tcp 10.96.0.1:443: connect: connection refused
	[INFO] plugin/ready: Still waiting on: "kubernetes"
	[INFO] plugin/kubernetes: waiting for Kubernetes API before starting server
	[INFO] plugin/kubernetes: waiting for Kubernetes API before starting server
	[INFO] plugin/kubernetes: waiting for Kubernetes API before starting server
	[INFO] plugin/ready: Still waiting on: "kubernetes"
	[INFO] plugin/kubernetes: waiting for Kubernetes API before starting server
	[INFO] plugin/kubernetes: waiting for Kubernetes API before starting server
	[INFO] plugin/kubernetes: waiting for Kubernetes API before starting server
	[INFO] plugin/kubernetes: waiting for Kubernetes API before starting server
	[INFO] plugin/kubernetes: waiting for Kubernetes API before starting server
	.:53
	[INFO] plugin/reload: Running configuration SHA512 = 05e3eaddc414b2d71a69b2e2bc6f2681fc1f4d04bcdd3acc1a41457bb7db518208b95ddfc4c9fffedc59c25a8faf458be1af4915a4a3c0d6777cb7a346bc5d86
	CoreDNS-1.11.3
	linux/arm64, go1.21.11, a6338e9
	[INFO] 127.0.0.1:39206 - 14119 "HINFO IN 5939583222120401635.3946217130147098167. udp 57 false 512" NXDOMAIN qr,rd,ra 57 0.038029402s
	[INFO] SIGTERM: Shutting down servers then terminating
	[INFO] plugin/health: Going into lameduck mode for 5s
	
	
	==> coredns [4deb5cc6dce54b2b55f84fa620aac8876a4dfb8d163a4e60aa19ebd7ba71d7eb] <==
	.:53
	[INFO] plugin/reload: Running configuration SHA512 = 05e3eaddc414b2d71a69b2e2bc6f2681fc1f4d04bcdd3acc1a41457bb7db518208b95ddfc4c9fffedc59c25a8faf458be1af4915a4a3c0d6777cb7a346bc5d86
	CoreDNS-1.11.3
	linux/arm64, go1.21.11, a6338e9
	[INFO] 127.0.0.1:38507 - 51569 "HINFO IN 5479759435856645223.8976423270861566953. udp 57 false 512" NXDOMAIN qr,rd,ra 57 0.013474675s
	
	
	==> describe nodes <==
	Name:               functional-919910
	Roles:              control-plane
	Labels:             beta.kubernetes.io/arch=arm64
	                    beta.kubernetes.io/os=linux
	                    kubernetes.io/arch=arm64
	                    kubernetes.io/hostname=functional-919910
	                    kubernetes.io/os=linux
	                    minikube.k8s.io/commit=90d544f06ea0f69499271b003be64a9a224d57ed
	                    minikube.k8s.io/name=functional-919910
	                    minikube.k8s.io/primary=true
	                    minikube.k8s.io/updated_at=2024_09_16T10_47_02_0700
	                    minikube.k8s.io/version=v1.34.0
	                    node-role.kubernetes.io/control-plane=
	                    node.kubernetes.io/exclude-from-external-load-balancers=
	Annotations:        kubeadm.alpha.kubernetes.io/cri-socket: unix:///var/run/crio/crio.sock
	                    node.alpha.kubernetes.io/ttl: 0
	                    volumes.kubernetes.io/controller-managed-attach-detach: true
	CreationTimestamp:  Mon, 16 Sep 2024 10:46:58 +0000
	Taints:             <none>
	Unschedulable:      false
	Lease:
	  HolderIdentity:  functional-919910
	  AcquireTime:     <unset>
	  RenewTime:       Mon, 16 Sep 2024 10:51:04 +0000
	Conditions:
	  Type             Status  LastHeartbeatTime                 LastTransitionTime                Reason                       Message
	  ----             ------  -----------------                 ------------------                ------                       -------
	  MemoryPressure   False   Mon, 16 Sep 2024 10:48:51 +0000   Mon, 16 Sep 2024 10:46:55 +0000   KubeletHasSufficientMemory   kubelet has sufficient memory available
	  DiskPressure     False   Mon, 16 Sep 2024 10:48:51 +0000   Mon, 16 Sep 2024 10:46:55 +0000   KubeletHasNoDiskPressure     kubelet has no disk pressure
	  PIDPressure      False   Mon, 16 Sep 2024 10:48:51 +0000   Mon, 16 Sep 2024 10:46:55 +0000   KubeletHasSufficientPID      kubelet has sufficient PID available
	  Ready            True    Mon, 16 Sep 2024 10:48:51 +0000   Mon, 16 Sep 2024 10:47:47 +0000   KubeletReady                 kubelet is posting ready status
	Addresses:
	  InternalIP:  192.168.49.2
	  Hostname:    functional-919910
	Capacity:
	  cpu:                2
	  ephemeral-storage:  203034800Ki
	  hugepages-1Gi:      0
	  hugepages-2Mi:      0
	  hugepages-32Mi:     0
	  hugepages-64Ki:     0
	  memory:             8022304Ki
	  pods:               110
	Allocatable:
	  cpu:                2
	  ephemeral-storage:  203034800Ki
	  hugepages-1Gi:      0
	  hugepages-2Mi:      0
	  hugepages-32Mi:     0
	  hugepages-64Ki:     0
	  memory:             8022304Ki
	  pods:               110
	System Info:
	  Machine ID:                 f14572b8323a44cca0faa88c76f2d4a6
	  System UUID:                d25b0873-ca83-44d4-9ed0-22dc44c6a8ae
	  Boot ID:                    34b2555f-ef29-4c31-9b47-b3b930bd3b4b
	  Kernel Version:             5.15.0-1069-aws
	  OS Image:                   Ubuntu 22.04.4 LTS
	  Operating System:           linux
	  Architecture:               arm64
	  Container Runtime Version:  cri-o://1.24.6
	  Kubelet Version:            v1.31.1
	  Kube-Proxy Version:         v1.31.1
	PodCIDR:                      10.244.0.0/24
	PodCIDRs:                     10.244.0.0/24
	Non-terminated Pods:          (10 in total)
	  Namespace                   Name                                         CPU Requests  CPU Limits  Memory Requests  Memory Limits  Age
	  ---------                   ----                                         ------------  ----------  ---------------  -------------  ---
	  kube-system                 coredns-7c65d6cfc9-qzn8c                     100m (5%)     0 (0%)      70Mi (0%)        170Mi (2%)     4m5s
	  kube-system                 etcd-functional-919910                       100m (5%)     0 (0%)      100Mi (1%)       0 (0%)         4m11s
	  kube-system                 kindnet-nb5xl                                100m (5%)     100m (5%)   50Mi (0%)        50Mi (0%)      4m5s
	  kube-system                 kube-apiserver-functional-919910             250m (12%)    0 (0%)      0 (0%)           0 (0%)         2m19s
	  kube-system                 kube-controller-manager-functional-919910    200m (10%)    0 (0%)      0 (0%)           0 (0%)         4m10s
	  kube-system                 kube-proxy-nvpzv                             0 (0%)        0 (0%)      0 (0%)           0 (0%)         4m5s
	  kube-system                 kube-scheduler-functional-919910             100m (5%)     0 (0%)      0 (0%)           0 (0%)         4m10s
	  kube-system                 storage-provisioner                          0 (0%)        0 (0%)      0 (0%)           0 (0%)         4m4s
	  kubernetes-dashboard        dashboard-metrics-scraper-c5db448b4-qqx24    0 (0%)        0 (0%)      0 (0%)           0 (0%)         7s
	  kubernetes-dashboard        kubernetes-dashboard-695b96c756-tfx57        0 (0%)        0 (0%)      0 (0%)           0 (0%)         7s
	Allocated resources:
	  (Total limits may be over 100 percent, i.e., overcommitted.)
	  Resource           Requests    Limits
	  --------           --------    ------
	  cpu                850m (42%)  100m (5%)
	  memory             220Mi (2%)  220Mi (2%)
	  ephemeral-storage  0 (0%)      0 (0%)
	  hugepages-1Gi      0 (0%)      0 (0%)
	  hugepages-2Mi      0 (0%)      0 (0%)
	  hugepages-32Mi     0 (0%)      0 (0%)
	  hugepages-64Ki     0 (0%)      0 (0%)
	Events:
	  Type     Reason                   Age                    From             Message
	  ----     ------                   ----                   ----             -------
	  Normal   Starting                 4m3s                   kube-proxy       
	  Normal   Starting                 2m18s                  kube-proxy       
	  Normal   Starting                 3m5s                   kube-proxy       
	  Warning  CgroupV1                 4m10s                  kubelet          Cgroup v1 support is in maintenance mode, please migrate to Cgroup v2.
	  Normal   NodeHasSufficientMemory  4m10s                  kubelet          Node functional-919910 status is now: NodeHasSufficientMemory
	  Normal   NodeHasNoDiskPressure    4m10s                  kubelet          Node functional-919910 status is now: NodeHasNoDiskPressure
	  Normal   NodeHasSufficientPID     4m10s                  kubelet          Node functional-919910 status is now: NodeHasSufficientPID
	  Normal   Starting                 4m10s                  kubelet          Starting kubelet.
	  Normal   RegisteredNode           4m6s                   node-controller  Node functional-919910 event: Registered Node functional-919910 in Controller
	  Normal   NodeReady                3m24s                  kubelet          Node functional-919910 status is now: NodeReady
	  Normal   RegisteredNode           3m3s                   node-controller  Node functional-919910 event: Registered Node functional-919910 in Controller
	  Normal   NodeHasSufficientMemory  2m24s (x8 over 2m24s)  kubelet          Node functional-919910 status is now: NodeHasSufficientMemory
	  Warning  CgroupV1                 2m24s                  kubelet          Cgroup v1 support is in maintenance mode, please migrate to Cgroup v2.
	  Normal   Starting                 2m24s                  kubelet          Starting kubelet.
	  Normal   NodeHasNoDiskPressure    2m24s (x8 over 2m24s)  kubelet          Node functional-919910 status is now: NodeHasNoDiskPressure
	  Normal   NodeHasSufficientPID     2m24s (x7 over 2m24s)  kubelet          Node functional-919910 status is now: NodeHasSufficientPID
	  Normal   RegisteredNode           2m17s                  node-controller  Node functional-919910 event: Registered Node functional-919910 in Controller
	
	
	==> dmesg <==
	[Sep16 10:07] systemd-journald[226]: Failed to send stream file descriptor to service manager: Connection refused
	
	
	==> etcd [0fb814efa9ee90e98aaa699004b013bf5a6a31aa8325e33f52783fa123bcc384] <==
	{"level":"info","ts":"2024-09-16T10:48:47.877008Z","logger":"raft","caller":"etcdserver/zap_raft.go:77","msg":"aec36adc501070cc switched to configuration voters=(12593026477526642892)"}
	{"level":"info","ts":"2024-09-16T10:48:47.877076Z","caller":"membership/cluster.go:421","msg":"added member","cluster-id":"fa54960ea34d58be","local-member-id":"aec36adc501070cc","added-peer-id":"aec36adc501070cc","added-peer-peer-urls":["https://192.168.49.2:2380"]}
	{"level":"info","ts":"2024-09-16T10:48:47.877156Z","caller":"membership/cluster.go:584","msg":"set initial cluster version","cluster-id":"fa54960ea34d58be","local-member-id":"aec36adc501070cc","cluster-version":"3.5"}
	{"level":"info","ts":"2024-09-16T10:48:47.877188Z","caller":"api/capability.go:75","msg":"enabled capabilities for version","cluster-version":"3.5"}
	{"level":"info","ts":"2024-09-16T10:48:47.899347Z","caller":"embed/etcd.go:728","msg":"starting with client TLS","tls-info":"cert = /var/lib/minikube/certs/etcd/server.crt, key = /var/lib/minikube/certs/etcd/server.key, client-cert=, client-key=, trusted-ca = /var/lib/minikube/certs/etcd/ca.crt, client-cert-auth = true, crl-file = ","cipher-suites":[]}
	{"level":"info","ts":"2024-09-16T10:48:47.899570Z","caller":"embed/etcd.go:279","msg":"now serving peer/client/metrics","local-member-id":"aec36adc501070cc","initial-advertise-peer-urls":["https://192.168.49.2:2380"],"listen-peer-urls":["https://192.168.49.2:2380"],"advertise-client-urls":["https://192.168.49.2:2379"],"listen-client-urls":["https://127.0.0.1:2379","https://192.168.49.2:2379"],"listen-metrics-urls":["http://127.0.0.1:2381"]}
	{"level":"info","ts":"2024-09-16T10:48:47.899599Z","caller":"embed/etcd.go:870","msg":"serving metrics","address":"http://127.0.0.1:2381"}
	{"level":"info","ts":"2024-09-16T10:48:47.899714Z","caller":"embed/etcd.go:599","msg":"serving peer traffic","address":"192.168.49.2:2380"}
	{"level":"info","ts":"2024-09-16T10:48:47.899728Z","caller":"embed/etcd.go:571","msg":"cmux::serve","address":"192.168.49.2:2380"}
	{"level":"info","ts":"2024-09-16T10:48:49.036711Z","logger":"raft","caller":"etcdserver/zap_raft.go:77","msg":"aec36adc501070cc is starting a new election at term 3"}
	{"level":"info","ts":"2024-09-16T10:48:49.036850Z","logger":"raft","caller":"etcdserver/zap_raft.go:77","msg":"aec36adc501070cc became pre-candidate at term 3"}
	{"level":"info","ts":"2024-09-16T10:48:49.036905Z","logger":"raft","caller":"etcdserver/zap_raft.go:77","msg":"aec36adc501070cc received MsgPreVoteResp from aec36adc501070cc at term 3"}
	{"level":"info","ts":"2024-09-16T10:48:49.036946Z","logger":"raft","caller":"etcdserver/zap_raft.go:77","msg":"aec36adc501070cc became candidate at term 4"}
	{"level":"info","ts":"2024-09-16T10:48:49.036978Z","logger":"raft","caller":"etcdserver/zap_raft.go:77","msg":"aec36adc501070cc received MsgVoteResp from aec36adc501070cc at term 4"}
	{"level":"info","ts":"2024-09-16T10:48:49.037016Z","logger":"raft","caller":"etcdserver/zap_raft.go:77","msg":"aec36adc501070cc became leader at term 4"}
	{"level":"info","ts":"2024-09-16T10:48:49.037063Z","logger":"raft","caller":"etcdserver/zap_raft.go:77","msg":"raft.node: aec36adc501070cc elected leader aec36adc501070cc at term 4"}
	{"level":"info","ts":"2024-09-16T10:48:49.040899Z","caller":"etcdserver/server.go:2118","msg":"published local member to cluster through raft","local-member-id":"aec36adc501070cc","local-member-attributes":"{Name:functional-919910 ClientURLs:[https://192.168.49.2:2379]}","request-path":"/0/members/aec36adc501070cc/attributes","cluster-id":"fa54960ea34d58be","publish-timeout":"7s"}
	{"level":"info","ts":"2024-09-16T10:48:49.041104Z","caller":"embed/serve.go:103","msg":"ready to serve client requests"}
	{"level":"info","ts":"2024-09-16T10:48:49.042103Z","caller":"v3rpc/health.go:61","msg":"grpc service status changed","service":"","status":"SERVING"}
	{"level":"info","ts":"2024-09-16T10:48:49.043175Z","caller":"embed/serve.go:250","msg":"serving client traffic securely","traffic":"grpc+http","address":"192.168.49.2:2379"}
	{"level":"info","ts":"2024-09-16T10:48:49.043511Z","caller":"embed/serve.go:103","msg":"ready to serve client requests"}
	{"level":"info","ts":"2024-09-16T10:48:49.045434Z","caller":"v3rpc/health.go:61","msg":"grpc service status changed","service":"","status":"SERVING"}
	{"level":"info","ts":"2024-09-16T10:48:49.046389Z","caller":"embed/serve.go:250","msg":"serving client traffic securely","traffic":"grpc+http","address":"127.0.0.1:2379"}
	{"level":"info","ts":"2024-09-16T10:48:49.045527Z","caller":"etcdmain/main.go:44","msg":"notifying init daemon"}
	{"level":"info","ts":"2024-09-16T10:48:49.050654Z","caller":"etcdmain/main.go:50","msg":"successfully notified init daemon"}
	
	
	==> etcd [5bcfe047e4005e24d6719487f45bde2380924679e0f77e81ce9e05992af73afb] <==
	{"level":"info","ts":"2024-09-16T10:48:02.360883Z","logger":"raft","caller":"etcdserver/zap_raft.go:77","msg":"aec36adc501070cc became pre-candidate at term 2"}
	{"level":"info","ts":"2024-09-16T10:48:02.360934Z","logger":"raft","caller":"etcdserver/zap_raft.go:77","msg":"aec36adc501070cc received MsgPreVoteResp from aec36adc501070cc at term 2"}
	{"level":"info","ts":"2024-09-16T10:48:02.360973Z","logger":"raft","caller":"etcdserver/zap_raft.go:77","msg":"aec36adc501070cc became candidate at term 3"}
	{"level":"info","ts":"2024-09-16T10:48:02.361006Z","logger":"raft","caller":"etcdserver/zap_raft.go:77","msg":"aec36adc501070cc received MsgVoteResp from aec36adc501070cc at term 3"}
	{"level":"info","ts":"2024-09-16T10:48:02.361064Z","logger":"raft","caller":"etcdserver/zap_raft.go:77","msg":"aec36adc501070cc became leader at term 3"}
	{"level":"info","ts":"2024-09-16T10:48:02.361099Z","logger":"raft","caller":"etcdserver/zap_raft.go:77","msg":"raft.node: aec36adc501070cc elected leader aec36adc501070cc at term 3"}
	{"level":"info","ts":"2024-09-16T10:48:02.364920Z","caller":"etcdserver/server.go:2118","msg":"published local member to cluster through raft","local-member-id":"aec36adc501070cc","local-member-attributes":"{Name:functional-919910 ClientURLs:[https://192.168.49.2:2379]}","request-path":"/0/members/aec36adc501070cc/attributes","cluster-id":"fa54960ea34d58be","publish-timeout":"7s"}
	{"level":"info","ts":"2024-09-16T10:48:02.365163Z","caller":"embed/serve.go:103","msg":"ready to serve client requests"}
	{"level":"info","ts":"2024-09-16T10:48:02.365549Z","caller":"embed/serve.go:103","msg":"ready to serve client requests"}
	{"level":"info","ts":"2024-09-16T10:48:02.366285Z","caller":"v3rpc/health.go:61","msg":"grpc service status changed","service":"","status":"SERVING"}
	{"level":"info","ts":"2024-09-16T10:48:02.367468Z","caller":"embed/serve.go:250","msg":"serving client traffic securely","traffic":"grpc+http","address":"192.168.49.2:2379"}
	{"level":"info","ts":"2024-09-16T10:48:02.367535Z","caller":"etcdmain/main.go:44","msg":"notifying init daemon"}
	{"level":"info","ts":"2024-09-16T10:48:02.367668Z","caller":"etcdmain/main.go:50","msg":"successfully notified init daemon"}
	{"level":"info","ts":"2024-09-16T10:48:02.369323Z","caller":"v3rpc/health.go:61","msg":"grpc service status changed","service":"","status":"SERVING"}
	{"level":"info","ts":"2024-09-16T10:48:02.370172Z","caller":"embed/serve.go:250","msg":"serving client traffic securely","traffic":"grpc+http","address":"127.0.0.1:2379"}
	{"level":"info","ts":"2024-09-16T10:48:35.943802Z","caller":"osutil/interrupt_unix.go:64","msg":"received signal; shutting down","signal":"terminated"}
	{"level":"info","ts":"2024-09-16T10:48:35.943843Z","caller":"embed/etcd.go:377","msg":"closing etcd server","name":"functional-919910","data-dir":"/var/lib/minikube/etcd","advertise-peer-urls":["https://192.168.49.2:2380"],"advertise-client-urls":["https://192.168.49.2:2379"]}
	{"level":"warn","ts":"2024-09-16T10:48:35.943911Z","caller":"embed/serve.go:212","msg":"stopping secure grpc server due to error","error":"accept tcp 192.168.49.2:2379: use of closed network connection"}
	{"level":"warn","ts":"2024-09-16T10:48:35.943938Z","caller":"embed/serve.go:214","msg":"stopped secure grpc server due to error","error":"accept tcp 192.168.49.2:2379: use of closed network connection"}
	{"level":"warn","ts":"2024-09-16T10:48:35.945041Z","caller":"embed/serve.go:212","msg":"stopping secure grpc server due to error","error":"accept tcp 127.0.0.1:2379: use of closed network connection"}
	{"level":"warn","ts":"2024-09-16T10:48:35.945137Z","caller":"embed/serve.go:214","msg":"stopped secure grpc server due to error","error":"accept tcp 127.0.0.1:2379: use of closed network connection"}
	{"level":"info","ts":"2024-09-16T10:48:35.990678Z","caller":"etcdserver/server.go:1521","msg":"skipped leadership transfer for single voting member cluster","local-member-id":"aec36adc501070cc","current-leader-member-id":"aec36adc501070cc"}
	{"level":"info","ts":"2024-09-16T10:48:35.995430Z","caller":"embed/etcd.go:581","msg":"stopping serving peer traffic","address":"192.168.49.2:2380"}
	{"level":"info","ts":"2024-09-16T10:48:35.995621Z","caller":"embed/etcd.go:586","msg":"stopped serving peer traffic","address":"192.168.49.2:2380"}
	{"level":"info","ts":"2024-09-16T10:48:35.995642Z","caller":"embed/etcd.go:379","msg":"closed etcd server","name":"functional-919910","data-dir":"/var/lib/minikube/etcd","advertise-peer-urls":["https://192.168.49.2:2380"],"advertise-client-urls":["https://192.168.49.2:2379"]}
	
	
	==> kernel <==
	 10:51:11 up 10:33,  0 users,  load average: 2.05, 1.47, 1.66
	Linux functional-919910 5.15.0-1069-aws #75~20.04.1-Ubuntu SMP Mon Aug 19 16:22:47 UTC 2024 aarch64 aarch64 aarch64 GNU/Linux
	PRETTY_NAME="Ubuntu 22.04.4 LTS"
	
	
	==> kindnet [072cecfbf1d3967a28f6cf80f4e3b0bf030253965b58aa0f0089cd01271c49a1] <==
	I0916 10:49:03.038169       1 main.go:299] handling current node
	I0916 10:49:13.040898       1 main.go:295] Handling node with IPs: map[192.168.49.2:{}]
	I0916 10:49:13.041015       1 main.go:299] handling current node
	I0916 10:49:23.042091       1 main.go:295] Handling node with IPs: map[192.168.49.2:{}]
	I0916 10:49:23.042126       1 main.go:299] handling current node
	I0916 10:49:33.044840       1 main.go:295] Handling node with IPs: map[192.168.49.2:{}]
	I0916 10:49:33.044878       1 main.go:299] handling current node
	I0916 10:49:43.040749       1 main.go:295] Handling node with IPs: map[192.168.49.2:{}]
	I0916 10:49:43.040855       1 main.go:299] handling current node
	I0916 10:49:53.038520       1 main.go:295] Handling node with IPs: map[192.168.49.2:{}]
	I0916 10:49:53.038552       1 main.go:299] handling current node
	I0916 10:50:03.038499       1 main.go:295] Handling node with IPs: map[192.168.49.2:{}]
	I0916 10:50:03.038621       1 main.go:299] handling current node
	I0916 10:50:13.047462       1 main.go:295] Handling node with IPs: map[192.168.49.2:{}]
	I0916 10:50:13.047501       1 main.go:299] handling current node
	I0916 10:50:23.044819       1 main.go:295] Handling node with IPs: map[192.168.49.2:{}]
	I0916 10:50:23.044854       1 main.go:299] handling current node
	I0916 10:50:33.037721       1 main.go:295] Handling node with IPs: map[192.168.49.2:{}]
	I0916 10:50:33.037760       1 main.go:299] handling current node
	I0916 10:50:43.043131       1 main.go:295] Handling node with IPs: map[192.168.49.2:{}]
	I0916 10:50:43.043172       1 main.go:299] handling current node
	I0916 10:50:53.038035       1 main.go:295] Handling node with IPs: map[192.168.49.2:{}]
	I0916 10:50:53.038069       1 main.go:299] handling current node
	I0916 10:51:03.037909       1 main.go:295] Handling node with IPs: map[192.168.49.2:{}]
	I0916 10:51:03.037958       1 main.go:299] handling current node
	
	
	==> kindnet [e8aeda4b55bc63f93934a2cc0bed0950a05df3db193d9ed2e77a2dc96b78ec18] <==
	I0916 10:48:01.143502       1 main.go:109] connected to apiserver: https://10.96.0.1:443
	I0916 10:48:01.143730       1 main.go:139] hostIP = 192.168.49.2
	podIP = 192.168.49.2
	I0916 10:48:01.143864       1 main.go:148] setting mtu 1500 for CNI 
	I0916 10:48:01.143886       1 main.go:178] kindnetd IP family: "ipv4"
	I0916 10:48:01.143900       1 main.go:182] noMask IPv4 subnets: [10.244.0.0/16]
	I0916 10:48:01.489821       1 controller.go:334] Starting controller kube-network-policies
	I0916 10:48:01.489995       1 controller.go:338] Waiting for informer caches to sync
	I0916 10:48:01.490034       1 shared_informer.go:313] Waiting for caches to sync for kube-network-policies
	I0916 10:48:05.492976       1 shared_informer.go:320] Caches are synced for kube-network-policies
	I0916 10:48:05.493097       1 metrics.go:61] Registering metrics
	I0916 10:48:05.493204       1 controller.go:374] Syncing nftables rules
	I0916 10:48:11.486739       1 main.go:295] Handling node with IPs: map[192.168.49.2:{}]
	I0916 10:48:11.486849       1 main.go:299] handling current node
	I0916 10:48:21.485705       1 main.go:295] Handling node with IPs: map[192.168.49.2:{}]
	I0916 10:48:21.485797       1 main.go:299] handling current node
	I0916 10:48:31.492826       1 main.go:295] Handling node with IPs: map[192.168.49.2:{}]
	I0916 10:48:31.492896       1 main.go:299] handling current node
	
	
	==> kube-apiserver [12f0a29c7ca2a4856dd6155d0190d0e3d79e019e8dce0bf7fd4c991c81d14bc5] <==
	I0916 10:48:51.517673       1 apf_controller.go:385] Running API Priority and Fairness periodic rebalancing process
	I0916 10:48:51.517813       1 cache.go:39] Caches are synced for APIServiceRegistrationController controller
	I0916 10:48:51.520522       1 controller.go:615] quota admission added evaluator for: leases.coordination.k8s.io
	I0916 10:48:51.532063       1 shared_informer.go:320] Caches are synced for cluster_authentication_trust_controller
	I0916 10:48:51.532210       1 shared_informer.go:320] Caches are synced for crd-autoregister
	I0916 10:48:51.532286       1 cache.go:39] Caches are synced for RemoteAvailability controller
	I0916 10:48:51.532417       1 shared_informer.go:320] Caches are synced for configmaps
	I0916 10:48:51.534946       1 handler_discovery.go:450] Starting ResourceDiscoveryManager
	I0916 10:48:51.535625       1 aggregator.go:171] initial CRD sync complete...
	I0916 10:48:51.536172       1 autoregister_controller.go:144] Starting autoregister controller
	I0916 10:48:51.536265       1 cache.go:32] Waiting for caches to sync for autoregister controller
	I0916 10:48:51.536300       1 cache.go:39] Caches are synced for autoregister controller
	E0916 10:48:51.552940       1 controller.go:97] Error removing old endpoints from kubernetes service: no API server IP addresses were listed in storage, refusing to erase all endpoints for the kubernetes Service
	I0916 10:48:52.288516       1 storage_scheduling.go:111] all system priority classes are created successfully or already exist.
	I0916 10:48:53.633170       1 controller.go:615] quota admission added evaluator for: daemonsets.apps
	I0916 10:48:53.763564       1 controller.go:615] quota admission added evaluator for: serviceaccounts
	I0916 10:48:53.775869       1 controller.go:615] quota admission added evaluator for: deployments.apps
	I0916 10:48:53.843592       1 controller.go:615] quota admission added evaluator for: roles.rbac.authorization.k8s.io
	I0916 10:48:53.851287       1 controller.go:615] quota admission added evaluator for: rolebindings.rbac.authorization.k8s.io
	I0916 10:49:10.096044       1 controller.go:615] quota admission added evaluator for: endpoints
	I0916 10:51:03.868011       1 controller.go:615] quota admission added evaluator for: namespaces
	I0916 10:51:03.979311       1 controller.go:615] quota admission added evaluator for: replicasets.apps
	I0916 10:51:04.465450       1 alloc.go:330] "allocated clusterIPs" service="kubernetes-dashboard/kubernetes-dashboard" clusterIPs={"IPv4":"10.99.2.180"}
	I0916 10:51:04.508474       1 controller.go:615] quota admission added evaluator for: endpointslices.discovery.k8s.io
	I0916 10:51:04.592851       1 alloc.go:330] "allocated clusterIPs" service="kubernetes-dashboard/dashboard-metrics-scraper" clusterIPs={"IPv4":"10.97.225.105"}
	
	
	==> kube-controller-manager [8f5620673b4ff5c0c99db71dd02fc2ce9baec6c9b22460cbdf86d411abc6a715] <==
	I0916 10:48:08.534308       1 shared_informer.go:320] Caches are synced for node
	I0916 10:48:08.534378       1 range_allocator.go:171] "Sending events to api server" logger="node-ipam-controller"
	I0916 10:48:08.534401       1 range_allocator.go:177] "Starting range CIDR allocator" logger="node-ipam-controller"
	I0916 10:48:08.534407       1 shared_informer.go:313] Waiting for caches to sync for cidrallocator
	I0916 10:48:08.534412       1 shared_informer.go:320] Caches are synced for cidrallocator
	I0916 10:48:08.534494       1 range_allocator.go:241] "Successfully synced" logger="node-ipam-controller" key="functional-919910"
	I0916 10:48:08.535941       1 shared_informer.go:320] Caches are synced for attach detach
	I0916 10:48:08.543605       1 shared_informer.go:320] Caches are synced for endpoint_slice
	I0916 10:48:08.549019       1 shared_informer.go:320] Caches are synced for daemon sets
	I0916 10:48:08.554402       1 shared_informer.go:320] Caches are synced for taint-eviction-controller
	I0916 10:48:08.559718       1 shared_informer.go:320] Caches are synced for persistent volume
	I0916 10:48:08.572151       1 shared_informer.go:320] Caches are synced for GC
	I0916 10:48:08.573409       1 shared_informer.go:320] Caches are synced for PV protection
	I0916 10:48:08.615906       1 shared_informer.go:320] Caches are synced for cronjob
	I0916 10:48:08.623850       1 shared_informer.go:320] Caches are synced for TTL
	I0916 10:48:08.625786       1 shared_informer.go:320] Caches are synced for taint
	I0916 10:48:08.625881       1 node_lifecycle_controller.go:1232] "Initializing eviction metric for zone" logger="node-lifecycle-controller" zone=""
	I0916 10:48:08.625973       1 node_lifecycle_controller.go:884] "Missing timestamp for Node. Assuming now as a timestamp" logger="node-lifecycle-controller" node="functional-919910"
	I0916 10:48:08.626024       1 node_lifecycle_controller.go:1078] "Controller detected that zone is now in new state" logger="node-lifecycle-controller" zone="" newState="Normal"
	I0916 10:48:08.681295       1 shared_informer.go:320] Caches are synced for resource quota
	I0916 10:48:08.695472       1 shared_informer.go:320] Caches are synced for resource quota
	I0916 10:48:09.103907       1 shared_informer.go:320] Caches are synced for garbage collector
	I0916 10:48:09.103941       1 garbagecollector.go:157] "All resource monitors have synced. Proceeding to collect garbage" logger="garbage-collector-controller"
	I0916 10:48:09.123641       1 shared_informer.go:320] Caches are synced for garbage collector
	I0916 10:48:33.396647       1 range_allocator.go:241] "Successfully synced" logger="node-ipam-controller" key="functional-919910"
	
	
	==> kube-controller-manager [d69895ce029aea3aacc9c117ed64c274077ed21cefa739082ee00be46e903809] <==
	I0916 10:48:55.444362       1 shared_informer.go:320] Caches are synced for garbage collector
	I0916 10:48:55.465215       1 shared_informer.go:320] Caches are synced for garbage collector
	I0916 10:48:55.465250       1 garbagecollector.go:157] "All resource monitors have synced. Proceeding to collect garbage" logger="garbage-collector-controller"
	I0916 10:51:04.115554       1 replica_set.go:679] "Finished syncing" logger="replicaset-controller" kind="ReplicaSet" key="kubernetes-dashboard/dashboard-metrics-scraper-c5db448b4" duration="128.900629ms"
	E0916 10:51:04.115599       1 replica_set.go:560] "Unhandled Error" err="sync \"kubernetes-dashboard/dashboard-metrics-scraper-c5db448b4\" failed with pods \"dashboard-metrics-scraper-c5db448b4-\" is forbidden: error looking up service account kubernetes-dashboard/kubernetes-dashboard: serviceaccount \"kubernetes-dashboard\" not found" logger="UnhandledError"
	I0916 10:51:04.115667       1 replica_set.go:679] "Finished syncing" logger="replicaset-controller" kind="ReplicaSet" key="kubernetes-dashboard/kubernetes-dashboard-695b96c756" duration="66.936963ms"
	E0916 10:51:04.115680       1 replica_set.go:560] "Unhandled Error" err="sync \"kubernetes-dashboard/kubernetes-dashboard-695b96c756\" failed with pods \"kubernetes-dashboard-695b96c756-\" is forbidden: error looking up service account kubernetes-dashboard/kubernetes-dashboard: serviceaccount \"kubernetes-dashboard\" not found" logger="UnhandledError"
	I0916 10:51:04.161490       1 replica_set.go:679] "Finished syncing" logger="replicaset-controller" kind="ReplicaSet" key="kubernetes-dashboard/dashboard-metrics-scraper-c5db448b4" duration="40.899792ms"
	E0916 10:51:04.161538       1 replica_set.go:560] "Unhandled Error" err="sync \"kubernetes-dashboard/dashboard-metrics-scraper-c5db448b4\" failed with pods \"dashboard-metrics-scraper-c5db448b4-\" is forbidden: error looking up service account kubernetes-dashboard/kubernetes-dashboard: serviceaccount \"kubernetes-dashboard\" not found" logger="UnhandledError"
	I0916 10:51:04.165981       1 replica_set.go:679] "Finished syncing" logger="replicaset-controller" kind="ReplicaSet" key="kubernetes-dashboard/kubernetes-dashboard-695b96c756" duration="46.213986ms"
	E0916 10:51:04.166028       1 replica_set.go:560] "Unhandled Error" err="sync \"kubernetes-dashboard/kubernetes-dashboard-695b96c756\" failed with pods \"kubernetes-dashboard-695b96c756-\" is forbidden: error looking up service account kubernetes-dashboard/kubernetes-dashboard: serviceaccount \"kubernetes-dashboard\" not found" logger="UnhandledError"
	I0916 10:51:04.182575       1 replica_set.go:679] "Finished syncing" logger="replicaset-controller" kind="ReplicaSet" key="kubernetes-dashboard/kubernetes-dashboard-695b96c756" duration="14.905721ms"
	E0916 10:51:04.182622       1 replica_set.go:560] "Unhandled Error" err="sync \"kubernetes-dashboard/kubernetes-dashboard-695b96c756\" failed with pods \"kubernetes-dashboard-695b96c756-\" is forbidden: error looking up service account kubernetes-dashboard/kubernetes-dashboard: serviceaccount \"kubernetes-dashboard\" not found" logger="UnhandledError"
	I0916 10:51:04.182674       1 replica_set.go:679] "Finished syncing" logger="replicaset-controller" kind="ReplicaSet" key="kubernetes-dashboard/dashboard-metrics-scraper-c5db448b4" duration="17.528022ms"
	E0916 10:51:04.182697       1 replica_set.go:560] "Unhandled Error" err="sync \"kubernetes-dashboard/dashboard-metrics-scraper-c5db448b4\" failed with pods \"dashboard-metrics-scraper-c5db448b4-\" is forbidden: error looking up service account kubernetes-dashboard/kubernetes-dashboard: serviceaccount \"kubernetes-dashboard\" not found" logger="UnhandledError"
	I0916 10:51:04.240501       1 replica_set.go:679] "Finished syncing" logger="replicaset-controller" kind="ReplicaSet" key="kubernetes-dashboard/kubernetes-dashboard-695b96c756" duration="56.487547ms"
	I0916 10:51:04.275668       1 replica_set.go:679] "Finished syncing" logger="replicaset-controller" kind="ReplicaSet" key="kubernetes-dashboard/dashboard-metrics-scraper-c5db448b4" duration="91.695454ms"
	I0916 10:51:04.293566       1 replica_set.go:679] "Finished syncing" logger="replicaset-controller" kind="ReplicaSet" key="kubernetes-dashboard/kubernetes-dashboard-695b96c756" duration="50.22195ms"
	I0916 10:51:04.297862       1 replica_set.go:679] "Finished syncing" logger="replicaset-controller" kind="ReplicaSet" key="kubernetes-dashboard/kubernetes-dashboard-695b96c756" duration="48.524µs"
	I0916 10:51:04.299265       1 replica_set.go:679] "Finished syncing" logger="replicaset-controller" kind="ReplicaSet" key="kubernetes-dashboard/kubernetes-dashboard-695b96c756" duration="34.904µs"
	I0916 10:51:04.320393       1 replica_set.go:679] "Finished syncing" logger="replicaset-controller" kind="ReplicaSet" key="kubernetes-dashboard/dashboard-metrics-scraper-c5db448b4" duration="44.594287ms"
	I0916 10:51:04.323280       1 replica_set.go:679] "Finished syncing" logger="replicaset-controller" kind="ReplicaSet" key="kubernetes-dashboard/dashboard-metrics-scraper-c5db448b4" duration="87.555µs"
	I0916 10:51:04.382448       1 replica_set.go:679] "Finished syncing" logger="replicaset-controller" kind="ReplicaSet" key="kubernetes-dashboard/dashboard-metrics-scraper-c5db448b4" duration="54.603µs"
	I0916 10:51:07.501824       1 replica_set.go:679] "Finished syncing" logger="replicaset-controller" kind="ReplicaSet" key="kubernetes-dashboard/dashboard-metrics-scraper-c5db448b4" duration="38.929076ms"
	I0916 10:51:07.502016       1 replica_set.go:679] "Finished syncing" logger="replicaset-controller" kind="ReplicaSet" key="kubernetes-dashboard/dashboard-metrics-scraper-c5db448b4" duration="74.197µs"
	
	
	==> kube-proxy [68f543d941434df90f12c922b0b45dcb557a7b8316bd36d083123f6f29e0f3d7] <==
	I0916 10:48:03.731423       1 server_linux.go:66] "Using iptables proxy"
	I0916 10:48:05.433154       1 server.go:677] "Successfully retrieved node IP(s)" IPs=["192.168.49.2"]
	E0916 10:48:05.488916       1 server.go:234] "Kube-proxy configuration may be incomplete or incorrect" err="nodePortAddresses is unset; NodePort connections will be accepted on all local IPs. Consider using `--nodeport-addresses primary`"
	I0916 10:48:06.322385       1 server.go:243] "kube-proxy running in dual-stack mode" primary ipFamily="IPv4"
	I0916 10:48:06.341489       1 server_linux.go:169] "Using iptables Proxier"
	I0916 10:48:06.355073       1 proxier.go:255] "Setting route_localnet=1 to allow node-ports on localhost; to change this either disable iptables.localhostNodePorts (--iptables-localhost-nodeports) or set nodePortAddresses (--nodeport-addresses) to filter loopback addresses" ipFamily="IPv4"
	I0916 10:48:06.355531       1 server.go:483] "Version info" version="v1.31.1"
	I0916 10:48:06.357367       1 server.go:485] "Golang settings" GOGC="" GOMAXPROCS="" GOTRACEBACK=""
	I0916 10:48:06.358630       1 config.go:199] "Starting service config controller"
	I0916 10:48:06.358729       1 shared_informer.go:313] Waiting for caches to sync for service config
	I0916 10:48:06.358801       1 config.go:105] "Starting endpoint slice config controller"
	I0916 10:48:06.358840       1 shared_informer.go:313] Waiting for caches to sync for endpoint slice config
	I0916 10:48:06.360984       1 config.go:328] "Starting node config controller"
	I0916 10:48:06.361059       1 shared_informer.go:313] Waiting for caches to sync for node config
	I0916 10:48:06.462180       1 shared_informer.go:320] Caches are synced for endpoint slice config
	I0916 10:48:06.462239       1 shared_informer.go:320] Caches are synced for service config
	I0916 10:48:06.464940       1 shared_informer.go:320] Caches are synced for node config
	
	
	==> kube-proxy [6f8c0a2f9d3e9fe72768e28685deb8e30624ac7b3cfaa272ac69f57b771050db] <==
	I0916 10:48:52.781215       1 server_linux.go:66] "Using iptables proxy"
	I0916 10:48:52.872969       1 server.go:677] "Successfully retrieved node IP(s)" IPs=["192.168.49.2"]
	E0916 10:48:52.873137       1 server.go:234] "Kube-proxy configuration may be incomplete or incorrect" err="nodePortAddresses is unset; NodePort connections will be accepted on all local IPs. Consider using `--nodeport-addresses primary`"
	I0916 10:48:52.892040       1 server.go:243] "kube-proxy running in dual-stack mode" primary ipFamily="IPv4"
	I0916 10:48:52.892101       1 server_linux.go:169] "Using iptables Proxier"
	I0916 10:48:52.893967       1 proxier.go:255] "Setting route_localnet=1 to allow node-ports on localhost; to change this either disable iptables.localhostNodePorts (--iptables-localhost-nodeports) or set nodePortAddresses (--nodeport-addresses) to filter loopback addresses" ipFamily="IPv4"
	I0916 10:48:52.894261       1 server.go:483] "Version info" version="v1.31.1"
	I0916 10:48:52.894296       1 server.go:485] "Golang settings" GOGC="" GOMAXPROCS="" GOTRACEBACK=""
	I0916 10:48:52.896485       1 config.go:199] "Starting service config controller"
	I0916 10:48:52.896530       1 shared_informer.go:313] Waiting for caches to sync for service config
	I0916 10:48:52.898078       1 config.go:105] "Starting endpoint slice config controller"
	I0916 10:48:52.898096       1 shared_informer.go:313] Waiting for caches to sync for endpoint slice config
	I0916 10:48:52.899995       1 config.go:328] "Starting node config controller"
	I0916 10:48:52.900022       1 shared_informer.go:313] Waiting for caches to sync for node config
	I0916 10:48:52.998445       1 shared_informer.go:320] Caches are synced for endpoint slice config
	I0916 10:48:52.998473       1 shared_informer.go:320] Caches are synced for service config
	I0916 10:48:53.000890       1 shared_informer.go:320] Caches are synced for node config
	
	
	==> kube-scheduler [7fcb94c0bce841ce6b01965b0d7eaeedcf47449b34b9a524c16d4f0580db9e76] <==
	I0916 10:48:50.310915       1 serving.go:386] Generated self-signed cert in-memory
	W0916 10:48:51.321329       1 requestheader_controller.go:196] Unable to get configmap/extension-apiserver-authentication in kube-system.  Usually fixed by 'kubectl create rolebinding -n kube-system ROLEBINDING_NAME --role=extension-apiserver-authentication-reader --serviceaccount=YOUR_NS:YOUR_SA'
	W0916 10:48:51.321447       1 authentication.go:370] Error looking up in-cluster authentication configuration: configmaps "extension-apiserver-authentication" is forbidden: User "system:kube-scheduler" cannot get resource "configmaps" in API group "" in the namespace "kube-system"
	W0916 10:48:51.321484       1 authentication.go:371] Continuing without authentication configuration. This may treat all requests as anonymous.
	W0916 10:48:51.321527       1 authentication.go:372] To require authentication configuration lookup to succeed, set --authentication-tolerate-lookup-failure=false
	I0916 10:48:51.482161       1 server.go:167] "Starting Kubernetes Scheduler" version="v1.31.1"
	I0916 10:48:51.488747       1 server.go:169] "Golang settings" GOGC="" GOMAXPROCS="" GOTRACEBACK=""
	I0916 10:48:51.491214       1 secure_serving.go:213] Serving securely on 127.0.0.1:10259
	I0916 10:48:51.491627       1 configmap_cafile_content.go:205] "Starting controller" name="client-ca::kube-system::extension-apiserver-authentication::client-ca-file"
	I0916 10:48:51.497428       1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::client-ca-file
	I0916 10:48:51.491653       1 tlsconfig.go:243] "Starting DynamicServingCertificateController"
	I0916 10:48:51.597727       1 shared_informer.go:320] Caches are synced for client-ca::kube-system::extension-apiserver-authentication::client-ca-file
	
	
	==> kube-scheduler [9a35fb982442f2ef08963a8588b112f704124f0fecc14cbfc199e94d6085db98] <==
	I0916 10:48:04.872300       1 serving.go:386] Generated self-signed cert in-memory
	I0916 10:48:06.573495       1 server.go:167] "Starting Kubernetes Scheduler" version="v1.31.1"
	I0916 10:48:06.573525       1 server.go:169] "Golang settings" GOGC="" GOMAXPROCS="" GOTRACEBACK=""
	I0916 10:48:06.588423       1 secure_serving.go:213] Serving securely on 127.0.0.1:10259
	I0916 10:48:06.588642       1 configmap_cafile_content.go:205] "Starting controller" name="client-ca::kube-system::extension-apiserver-authentication::client-ca-file"
	I0916 10:48:06.588658       1 configmap_cafile_content.go:205] "Starting controller" name="client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file"
	I0916 10:48:06.588698       1 tlsconfig.go:243] "Starting DynamicServingCertificateController"
	I0916 10:48:06.588607       1 requestheader_controller.go:172] Starting RequestHeaderAuthRequestController
	I0916 10:48:06.592031       1 shared_informer.go:313] Waiting for caches to sync for RequestHeaderAuthRequestController
	I0916 10:48:06.591278       1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::client-ca-file
	I0916 10:48:06.591687       1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file
	I0916 10:48:06.696997       1 shared_informer.go:320] Caches are synced for client-ca::kube-system::extension-apiserver-authentication::client-ca-file
	I0916 10:48:06.697079       1 shared_informer.go:320] Caches are synced for RequestHeaderAuthRequestController
	I0916 10:48:06.697269       1 shared_informer.go:320] Caches are synced for client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file
	I0916 10:48:35.945051       1 tlsconfig.go:258] "Shutting down DynamicServingCertificateController"
	I0916 10:48:35.945270       1 secure_serving.go:258] Stopped listening on 127.0.0.1:10259
	E0916 10:48:35.945400       1 run.go:72] "command failed" err="finished without leader elect"
	
	
	==> kubelet <==
	Sep 16 10:49:57 functional-919910 kubelet[4906]: E0916 10:49:57.158435    4906 eviction_manager.go:257] "Eviction manager: failed to get HasDedicatedImageFs" err="missing image stats: &ImageFsInfoResponse{ImageFilesystems:[]*FilesystemUsage{&FilesystemUsage{Timestamp:1726483797158278822,FsId:&FilesystemIdentifier{Mountpoint:/var/lib/containers/storage/overlay-images,},UsedBytes:&UInt64Value{Value:155470,},InodesUsed:&UInt64Value{Value:77,},},},ContainerFilesystems:[]*FilesystemUsage{},}"
	Sep 16 10:49:57 functional-919910 kubelet[4906]: E0916 10:49:57.158481    4906 eviction_manager.go:212] "Eviction manager: failed to synchronize" err="eviction manager: failed to get HasDedicatedImageFs: missing image stats: &ImageFsInfoResponse{ImageFilesystems:[]*FilesystemUsage{&FilesystemUsage{Timestamp:1726483797158278822,FsId:&FilesystemIdentifier{Mountpoint:/var/lib/containers/storage/overlay-images,},UsedBytes:&UInt64Value{Value:155470,},InodesUsed:&UInt64Value{Value:77,},},},ContainerFilesystems:[]*FilesystemUsage{},}"
	Sep 16 10:50:07 functional-919910 kubelet[4906]: E0916 10:50:07.159871    4906 eviction_manager.go:257] "Eviction manager: failed to get HasDedicatedImageFs" err="missing image stats: &ImageFsInfoResponse{ImageFilesystems:[]*FilesystemUsage{&FilesystemUsage{Timestamp:1726483807159616730,FsId:&FilesystemIdentifier{Mountpoint:/var/lib/containers/storage/overlay-images,},UsedBytes:&UInt64Value{Value:155470,},InodesUsed:&UInt64Value{Value:77,},},},ContainerFilesystems:[]*FilesystemUsage{},}"
	Sep 16 10:50:07 functional-919910 kubelet[4906]: E0916 10:50:07.159920    4906 eviction_manager.go:212] "Eviction manager: failed to synchronize" err="eviction manager: failed to get HasDedicatedImageFs: missing image stats: &ImageFsInfoResponse{ImageFilesystems:[]*FilesystemUsage{&FilesystemUsage{Timestamp:1726483807159616730,FsId:&FilesystemIdentifier{Mountpoint:/var/lib/containers/storage/overlay-images,},UsedBytes:&UInt64Value{Value:155470,},InodesUsed:&UInt64Value{Value:77,},},},ContainerFilesystems:[]*FilesystemUsage{},}"
	Sep 16 10:50:17 functional-919910 kubelet[4906]: E0916 10:50:17.161814    4906 eviction_manager.go:257] "Eviction manager: failed to get HasDedicatedImageFs" err="missing image stats: &ImageFsInfoResponse{ImageFilesystems:[]*FilesystemUsage{&FilesystemUsage{Timestamp:1726483817161563233,FsId:&FilesystemIdentifier{Mountpoint:/var/lib/containers/storage/overlay-images,},UsedBytes:&UInt64Value{Value:155470,},InodesUsed:&UInt64Value{Value:77,},},},ContainerFilesystems:[]*FilesystemUsage{},}"
	Sep 16 10:50:17 functional-919910 kubelet[4906]: E0916 10:50:17.161855    4906 eviction_manager.go:212] "Eviction manager: failed to synchronize" err="eviction manager: failed to get HasDedicatedImageFs: missing image stats: &ImageFsInfoResponse{ImageFilesystems:[]*FilesystemUsage{&FilesystemUsage{Timestamp:1726483817161563233,FsId:&FilesystemIdentifier{Mountpoint:/var/lib/containers/storage/overlay-images,},UsedBytes:&UInt64Value{Value:155470,},InodesUsed:&UInt64Value{Value:77,},},},ContainerFilesystems:[]*FilesystemUsage{},}"
	Sep 16 10:50:27 functional-919910 kubelet[4906]: E0916 10:50:27.163614    4906 eviction_manager.go:257] "Eviction manager: failed to get HasDedicatedImageFs" err="missing image stats: &ImageFsInfoResponse{ImageFilesystems:[]*FilesystemUsage{&FilesystemUsage{Timestamp:1726483827163296450,FsId:&FilesystemIdentifier{Mountpoint:/var/lib/containers/storage/overlay-images,},UsedBytes:&UInt64Value{Value:155470,},InodesUsed:&UInt64Value{Value:77,},},},ContainerFilesystems:[]*FilesystemUsage{},}"
	Sep 16 10:50:27 functional-919910 kubelet[4906]: E0916 10:50:27.163651    4906 eviction_manager.go:212] "Eviction manager: failed to synchronize" err="eviction manager: failed to get HasDedicatedImageFs: missing image stats: &ImageFsInfoResponse{ImageFilesystems:[]*FilesystemUsage{&FilesystemUsage{Timestamp:1726483827163296450,FsId:&FilesystemIdentifier{Mountpoint:/var/lib/containers/storage/overlay-images,},UsedBytes:&UInt64Value{Value:155470,},InodesUsed:&UInt64Value{Value:77,},},},ContainerFilesystems:[]*FilesystemUsage{},}"
	Sep 16 10:50:37 functional-919910 kubelet[4906]: E0916 10:50:37.164729    4906 eviction_manager.go:257] "Eviction manager: failed to get HasDedicatedImageFs" err="missing image stats: &ImageFsInfoResponse{ImageFilesystems:[]*FilesystemUsage{&FilesystemUsage{Timestamp:1726483837164469074,FsId:&FilesystemIdentifier{Mountpoint:/var/lib/containers/storage/overlay-images,},UsedBytes:&UInt64Value{Value:155470,},InodesUsed:&UInt64Value{Value:77,},},},ContainerFilesystems:[]*FilesystemUsage{},}"
	Sep 16 10:50:37 functional-919910 kubelet[4906]: E0916 10:50:37.164762    4906 eviction_manager.go:212] "Eviction manager: failed to synchronize" err="eviction manager: failed to get HasDedicatedImageFs: missing image stats: &ImageFsInfoResponse{ImageFilesystems:[]*FilesystemUsage{&FilesystemUsage{Timestamp:1726483837164469074,FsId:&FilesystemIdentifier{Mountpoint:/var/lib/containers/storage/overlay-images,},UsedBytes:&UInt64Value{Value:155470,},InodesUsed:&UInt64Value{Value:77,},},},ContainerFilesystems:[]*FilesystemUsage{},}"
	Sep 16 10:50:47 functional-919910 kubelet[4906]: E0916 10:50:47.166381    4906 eviction_manager.go:257] "Eviction manager: failed to get HasDedicatedImageFs" err="missing image stats: &ImageFsInfoResponse{ImageFilesystems:[]*FilesystemUsage{&FilesystemUsage{Timestamp:1726483847166139430,FsId:&FilesystemIdentifier{Mountpoint:/var/lib/containers/storage/overlay-images,},UsedBytes:&UInt64Value{Value:155470,},InodesUsed:&UInt64Value{Value:77,},},},ContainerFilesystems:[]*FilesystemUsage{},}"
	Sep 16 10:50:47 functional-919910 kubelet[4906]: E0916 10:50:47.166973    4906 eviction_manager.go:212] "Eviction manager: failed to synchronize" err="eviction manager: failed to get HasDedicatedImageFs: missing image stats: &ImageFsInfoResponse{ImageFilesystems:[]*FilesystemUsage{&FilesystemUsage{Timestamp:1726483847166139430,FsId:&FilesystemIdentifier{Mountpoint:/var/lib/containers/storage/overlay-images,},UsedBytes:&UInt64Value{Value:155470,},InodesUsed:&UInt64Value{Value:77,},},},ContainerFilesystems:[]*FilesystemUsage{},}"
	Sep 16 10:50:57 functional-919910 kubelet[4906]: E0916 10:50:57.168441    4906 eviction_manager.go:257] "Eviction manager: failed to get HasDedicatedImageFs" err="missing image stats: &ImageFsInfoResponse{ImageFilesystems:[]*FilesystemUsage{&FilesystemUsage{Timestamp:1726483857168020609,FsId:&FilesystemIdentifier{Mountpoint:/var/lib/containers/storage/overlay-images,},UsedBytes:&UInt64Value{Value:155470,},InodesUsed:&UInt64Value{Value:77,},},},ContainerFilesystems:[]*FilesystemUsage{},}"
	Sep 16 10:50:57 functional-919910 kubelet[4906]: E0916 10:50:57.168483    4906 eviction_manager.go:212] "Eviction manager: failed to synchronize" err="eviction manager: failed to get HasDedicatedImageFs: missing image stats: &ImageFsInfoResponse{ImageFilesystems:[]*FilesystemUsage{&FilesystemUsage{Timestamp:1726483857168020609,FsId:&FilesystemIdentifier{Mountpoint:/var/lib/containers/storage/overlay-images,},UsedBytes:&UInt64Value{Value:155470,},InodesUsed:&UInt64Value{Value:77,},},},ContainerFilesystems:[]*FilesystemUsage{},}"
	Sep 16 10:51:04 functional-919910 kubelet[4906]: E0916 10:51:04.228929    4906 cpu_manager.go:395] "RemoveStaleState: removing container" podUID="3d8a6ba31c18f33c5660170029e5cde1" containerName="kube-apiserver"
	Sep 16 10:51:04 functional-919910 kubelet[4906]: I0916 10:51:04.228999    4906 memory_manager.go:354] "RemoveStaleState removing state" podUID="3d8a6ba31c18f33c5660170029e5cde1" containerName="kube-apiserver"
	Sep 16 10:51:04 functional-919910 kubelet[4906]: E0916 10:51:04.258970    4906 cpu_manager.go:395] "RemoveStaleState: removing container" podUID="3d8a6ba31c18f33c5660170029e5cde1" containerName="kube-apiserver"
	Sep 16 10:51:04 functional-919910 kubelet[4906]: I0916 10:51:04.259033    4906 memory_manager.go:354] "RemoveStaleState removing state" podUID="3d8a6ba31c18f33c5660170029e5cde1" containerName="kube-apiserver"
	Sep 16 10:51:04 functional-919910 kubelet[4906]: I0916 10:51:04.294906    4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-g4tsh\" (UniqueName: \"kubernetes.io/projected/067210bf-0875-4a9d-85ee-79032a148043-kube-api-access-g4tsh\") pod \"kubernetes-dashboard-695b96c756-tfx57\" (UID: \"067210bf-0875-4a9d-85ee-79032a148043\") " pod="kubernetes-dashboard/kubernetes-dashboard-695b96c756-tfx57"
	Sep 16 10:51:04 functional-919910 kubelet[4906]: I0916 10:51:04.294960    4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"tmp-volume\" (UniqueName: \"kubernetes.io/empty-dir/067210bf-0875-4a9d-85ee-79032a148043-tmp-volume\") pod \"kubernetes-dashboard-695b96c756-tfx57\" (UID: \"067210bf-0875-4a9d-85ee-79032a148043\") " pod="kubernetes-dashboard/kubernetes-dashboard-695b96c756-tfx57"
	Sep 16 10:51:04 functional-919910 kubelet[4906]: I0916 10:51:04.395817    4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gdcgw\" (UniqueName: \"kubernetes.io/projected/e413c9cc-49a4-456c-81e0-7f2e23692d08-kube-api-access-gdcgw\") pod \"dashboard-metrics-scraper-c5db448b4-qqx24\" (UID: \"e413c9cc-49a4-456c-81e0-7f2e23692d08\") " pod="kubernetes-dashboard/dashboard-metrics-scraper-c5db448b4-qqx24"
	Sep 16 10:51:04 functional-919910 kubelet[4906]: I0916 10:51:04.395884    4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"tmp-volume\" (UniqueName: \"kubernetes.io/empty-dir/e413c9cc-49a4-456c-81e0-7f2e23692d08-tmp-volume\") pod \"dashboard-metrics-scraper-c5db448b4-qqx24\" (UID: \"e413c9cc-49a4-456c-81e0-7f2e23692d08\") " pod="kubernetes-dashboard/dashboard-metrics-scraper-c5db448b4-qqx24"
	Sep 16 10:51:04 functional-919910 kubelet[4906]: I0916 10:51:04.433755    4906 swap_util.go:74] "error creating dir to test if tmpfs noswap is enabled. Assuming not supported" mount path="" error="stat /var/lib/kubelet/plugins/kubernetes.io/empty-dir: no such file or directory"
	Sep 16 10:51:07 functional-919910 kubelet[4906]: E0916 10:51:07.171520    4906 eviction_manager.go:257] "Eviction manager: failed to get HasDedicatedImageFs" err="missing image stats: &ImageFsInfoResponse{ImageFilesystems:[]*FilesystemUsage{&FilesystemUsage{Timestamp:1726483867171044384,FsId:&FilesystemIdentifier{Mountpoint:/var/lib/containers/storage/overlay-images,},UsedBytes:&UInt64Value{Value:165956,},InodesUsed:&UInt64Value{Value:83,},},},ContainerFilesystems:[]*FilesystemUsage{},}"
	Sep 16 10:51:07 functional-919910 kubelet[4906]: E0916 10:51:07.171561    4906 eviction_manager.go:212] "Eviction manager: failed to synchronize" err="eviction manager: failed to get HasDedicatedImageFs: missing image stats: &ImageFsInfoResponse{ImageFilesystems:[]*FilesystemUsage{&FilesystemUsage{Timestamp:1726483867171044384,FsId:&FilesystemIdentifier{Mountpoint:/var/lib/containers/storage/overlay-images,},UsedBytes:&UInt64Value{Value:165956,},InodesUsed:&UInt64Value{Value:83,},},},ContainerFilesystems:[]*FilesystemUsage{},}"
	
	
	==> storage-provisioner [0318f459801da15bd2e19f5a98b73c1156fff994dcdda61e57a57ddf9e92ccee] <==
	I0916 10:48:52.562259       1 storage_provisioner.go:116] Initializing the minikube storage provisioner...
	I0916 10:48:52.664564       1 storage_provisioner.go:141] Storage provisioner initialized, now starting service!
	I0916 10:48:52.664725       1 leaderelection.go:243] attempting to acquire leader lease kube-system/k8s.io-minikube-hostpath...
	I0916 10:49:10.099884       1 leaderelection.go:253] successfully acquired lease kube-system/k8s.io-minikube-hostpath
	I0916 10:49:10.100345       1 controller.go:835] Starting provisioner controller k8s.io/minikube-hostpath_functional-919910_e4b0a145-d435-49cb-bddf-cb4e7bf200d9!
	I0916 10:49:10.101540       1 event.go:282] Event(v1.ObjectReference{Kind:"Endpoints", Namespace:"kube-system", Name:"k8s.io-minikube-hostpath", UID:"a458447e-2e14-46d1-bc5f-e9228298bb58", APIVersion:"v1", ResourceVersion:"609", FieldPath:""}): type: 'Normal' reason: 'LeaderElection' functional-919910_e4b0a145-d435-49cb-bddf-cb4e7bf200d9 became leader
	I0916 10:49:10.201234       1 controller.go:884] Started provisioner controller k8s.io/minikube-hostpath_functional-919910_e4b0a145-d435-49cb-bddf-cb4e7bf200d9!
	
	
	==> storage-provisioner [67f50b0e25dae16dbad275ffac3a734fe571c8f8cb91d485eaac44783eb641be] <==
	I0916 10:48:01.486119       1 storage_provisioner.go:116] Initializing the minikube storage provisioner...
	I0916 10:48:05.527187       1 storage_provisioner.go:141] Storage provisioner initialized, now starting service!
	I0916 10:48:05.529539       1 leaderelection.go:243] attempting to acquire leader lease kube-system/k8s.io-minikube-hostpath...
	I0916 10:48:22.958175       1 leaderelection.go:253] successfully acquired lease kube-system/k8s.io-minikube-hostpath
	I0916 10:48:22.959300       1 controller.go:835] Starting provisioner controller k8s.io/minikube-hostpath_functional-919910_be96807a-e73b-444f-98b3-646320e9e90e!
	I0916 10:48:22.959068       1 event.go:282] Event(v1.ObjectReference{Kind:"Endpoints", Namespace:"kube-system", Name:"k8s.io-minikube-hostpath", UID:"a458447e-2e14-46d1-bc5f-e9228298bb58", APIVersion:"v1", ResourceVersion:"511", FieldPath:""}): type: 'Normal' reason: 'LeaderElection' functional-919910_be96807a-e73b-444f-98b3-646320e9e90e became leader
	I0916 10:48:23.061550       1 controller.go:884] Started provisioner controller k8s.io/minikube-hostpath_functional-919910_be96807a-e73b-444f-98b3-646320e9e90e!
	

                                                
                                                
-- /stdout --
helpers_test.go:254: (dbg) Run:  out/minikube-linux-arm64 status --format={{.APIServer}} -p functional-919910 -n functional-919910
helpers_test.go:261: (dbg) Run:  kubectl --context functional-919910 get po -o=jsonpath={.items[*].metadata.name} -A --field-selector=status.phase!=Running
helpers_test.go:261: (dbg) Non-zero exit: kubectl --context functional-919910 get po -o=jsonpath={.items[*].metadata.name} -A --field-selector=status.phase!=Running: fork/exec /usr/local/bin/kubectl: exec format error (8.40676ms)
helpers_test.go:263: kubectl --context functional-919910 get po -o=jsonpath={.items[*].metadata.name} -A --field-selector=status.phase!=Running: fork/exec /usr/local/bin/kubectl: exec format error
--- FAIL: TestFunctional/parallel/NodeLabels (4.24s)

                                                
                                    
x
+
TestFunctional/parallel/TunnelCmd/serial/WaitService/Setup (0s)

                                                
                                                
=== RUN   TestFunctional/parallel/TunnelCmd/serial/WaitService/Setup
functional_test_tunnel_test.go:212: (dbg) Run:  kubectl --context functional-919910 apply -f testdata/testsvc.yaml
functional_test_tunnel_test.go:212: (dbg) Non-zero exit: kubectl --context functional-919910 apply -f testdata/testsvc.yaml: fork/exec /usr/local/bin/kubectl: exec format error (2.311268ms)
functional_test_tunnel_test.go:214: kubectl --context functional-919910 apply -f testdata/testsvc.yaml failed: fork/exec /usr/local/bin/kubectl: exec format error
--- FAIL: TestFunctional/parallel/TunnelCmd/serial/WaitService/Setup (0.00s)

                                                
                                    
x
+
TestFunctional/parallel/TunnelCmd/serial/AccessDirect (92.32s)

                                                
                                                
=== RUN   TestFunctional/parallel/TunnelCmd/serial/AccessDirect
functional_test_tunnel_test.go:288: failed to hit nginx at "http://": Temporary Error: Get "http:": http: no Host in request URL
functional_test_tunnel_test.go:290: (dbg) Run:  kubectl --context functional-919910 get svc nginx-svc
functional_test_tunnel_test.go:290: (dbg) Non-zero exit: kubectl --context functional-919910 get svc nginx-svc: fork/exec /usr/local/bin/kubectl: exec format error (581.368µs)
functional_test_tunnel_test.go:292: kubectl --context functional-919910 get svc nginx-svc failed: fork/exec /usr/local/bin/kubectl: exec format error
functional_test_tunnel_test.go:294: failed to kubectl get svc nginx-svc:
functional_test_tunnel_test.go:301: expected body to contain "Welcome to nginx!", but got *""*
--- FAIL: TestFunctional/parallel/TunnelCmd/serial/AccessDirect (92.32s)

                                                
                                    
x
+
TestFunctional/parallel/ServiceCmd/DeployApp (0s)

                                                
                                                
=== RUN   TestFunctional/parallel/ServiceCmd/DeployApp
functional_test.go:1437: (dbg) Run:  kubectl --context functional-919910 create deployment hello-node --image=registry.k8s.io/echoserver-arm:1.8
functional_test.go:1437: (dbg) Non-zero exit: kubectl --context functional-919910 create deployment hello-node --image=registry.k8s.io/echoserver-arm:1.8: fork/exec /usr/local/bin/kubectl: exec format error (625.797µs)
functional_test.go:1443: failed to create hello-node deployment with this command "kubectl --context functional-919910 create deployment hello-node --image=registry.k8s.io/echoserver-arm:1.8": fork/exec /usr/local/bin/kubectl: exec format error.
--- FAIL: TestFunctional/parallel/ServiceCmd/DeployApp (0.00s)

                                                
                                    
x
+
TestFunctional/parallel/ServiceCmd/List (0.34s)

                                                
                                                
=== RUN   TestFunctional/parallel/ServiceCmd/List
functional_test.go:1459: (dbg) Run:  out/minikube-linux-arm64 -p functional-919910 service list
functional_test.go:1464: expected 'service list' to contain *hello-node* but got -"|-------------|------------|--------------|-----|\n|  NAMESPACE  |    NAME    | TARGET PORT  | URL |\n|-------------|------------|--------------|-----|\n| default     | kubernetes | No node port |     |\n| kube-system | kube-dns   | No node port |     |\n|-------------|------------|--------------|-----|\n"-
--- FAIL: TestFunctional/parallel/ServiceCmd/List (0.34s)

                                                
                                    
x
+
TestFunctional/parallel/ServiceCmd/JSONOutput (0.33s)

                                                
                                                
=== RUN   TestFunctional/parallel/ServiceCmd/JSONOutput
functional_test.go:1489: (dbg) Run:  out/minikube-linux-arm64 -p functional-919910 service list -o json
functional_test.go:1494: Took "330.581384ms" to run "out/minikube-linux-arm64 -p functional-919910 service list -o json"
functional_test.go:1498: expected the json of 'service list' to include "hello-node" but got *"[{\"Namespace\":\"default\",\"Name\":\"kubernetes\",\"URLs\":[],\"PortNames\":[\"No node port\"]},{\"Namespace\":\"kube-system\",\"Name\":\"kube-dns\",\"URLs\":[],\"PortNames\":[\"No node port\"]}]"*. args: "out/minikube-linux-arm64 -p functional-919910 service list -o json"
--- FAIL: TestFunctional/parallel/ServiceCmd/JSONOutput (0.33s)

                                                
                                    
x
+
TestFunctional/parallel/ServiceCmd/HTTPS (0.33s)

                                                
                                                
=== RUN   TestFunctional/parallel/ServiceCmd/HTTPS
functional_test.go:1509: (dbg) Run:  out/minikube-linux-arm64 -p functional-919910 service --namespace=default --https --url hello-node
functional_test.go:1509: (dbg) Non-zero exit: out/minikube-linux-arm64 -p functional-919910 service --namespace=default --https --url hello-node: exit status 115 (334.789375ms)

                                                
                                                
-- stdout --
	
	

                                                
                                                
-- /stdout --
** stderr ** 
	X Exiting due to SVC_NOT_FOUND: Service 'hello-node' was not found in 'default' namespace.
	You may select another namespace by using 'minikube service hello-node -n <namespace>'. Or list out all the services using 'minikube service list'

                                                
                                                
** /stderr **
functional_test.go:1511: failed to get service url. args "out/minikube-linux-arm64 -p functional-919910 service --namespace=default --https --url hello-node" : exit status 115
--- FAIL: TestFunctional/parallel/ServiceCmd/HTTPS (0.33s)

                                                
                                    
x
+
TestFunctional/parallel/ServiceCmd/Format (0.33s)

                                                
                                                
=== RUN   TestFunctional/parallel/ServiceCmd/Format
functional_test.go:1540: (dbg) Run:  out/minikube-linux-arm64 -p functional-919910 service hello-node --url --format={{.IP}}
functional_test.go:1540: (dbg) Non-zero exit: out/minikube-linux-arm64 -p functional-919910 service hello-node --url --format={{.IP}}: exit status 115 (331.239615ms)

                                                
                                                
-- stdout --
	
	

                                                
                                                
-- /stdout --
** stderr ** 
	X Exiting due to SVC_NOT_FOUND: Service 'hello-node' was not found in 'default' namespace.
	You may select another namespace by using 'minikube service hello-node -n <namespace>'. Or list out all the services using 'minikube service list'

                                                
                                                
** /stderr **
functional_test.go:1542: failed to get service url with custom format. args "out/minikube-linux-arm64 -p functional-919910 service hello-node --url --format={{.IP}}": exit status 115
functional_test.go:1548: "" is not a valid IP
--- FAIL: TestFunctional/parallel/ServiceCmd/Format (0.33s)

                                                
                                    
x
+
TestFunctional/parallel/ServiceCmd/URL (0.33s)

                                                
                                                
=== RUN   TestFunctional/parallel/ServiceCmd/URL
functional_test.go:1559: (dbg) Run:  out/minikube-linux-arm64 -p functional-919910 service hello-node --url
functional_test.go:1559: (dbg) Non-zero exit: out/minikube-linux-arm64 -p functional-919910 service hello-node --url: exit status 115 (334.262955ms)

                                                
                                                
-- stdout --
	
	

                                                
                                                
-- /stdout --
** stderr ** 
	X Exiting due to SVC_NOT_FOUND: Service 'hello-node' was not found in 'default' namespace.
	You may select another namespace by using 'minikube service hello-node -n <namespace>'. Or list out all the services using 'minikube service list'

                                                
                                                
** /stderr **
functional_test.go:1561: failed to get service url. args: "out/minikube-linux-arm64 -p functional-919910 service hello-node --url": exit status 115
functional_test.go:1565: found endpoint for hello-node: 
functional_test.go:1573: expected scheme to be -"http"- got scheme: *""*
--- FAIL: TestFunctional/parallel/ServiceCmd/URL (0.33s)

                                                
                                    
x
+
TestFunctional/parallel/MountCmd/any-port (3.36s)

                                                
                                                
=== RUN   TestFunctional/parallel/MountCmd/any-port
functional_test_mount_test.go:73: (dbg) daemon: [out/minikube-linux-arm64 mount -p functional-919910 /tmp/TestFunctionalparallelMountCmdany-port2927168651/001:/mount-9p --alsologtostderr -v=1]
functional_test_mount_test.go:107: wrote "test-1726483858215687711" to /tmp/TestFunctionalparallelMountCmdany-port2927168651/001/created-by-test
functional_test_mount_test.go:107: wrote "test-1726483858215687711" to /tmp/TestFunctionalparallelMountCmdany-port2927168651/001/created-by-test-removed-by-pod
functional_test_mount_test.go:107: wrote "test-1726483858215687711" to /tmp/TestFunctionalparallelMountCmdany-port2927168651/001/test-1726483858215687711
functional_test_mount_test.go:115: (dbg) Run:  out/minikube-linux-arm64 -p functional-919910 ssh "findmnt -T /mount-9p | grep 9p"
functional_test_mount_test.go:115: (dbg) Non-zero exit: out/minikube-linux-arm64 -p functional-919910 ssh "findmnt -T /mount-9p | grep 9p": exit status 1 (452.341853ms)

                                                
                                                
** stderr ** 
	ssh: Process exited with status 1

                                                
                                                
** /stderr **
functional_test_mount_test.go:115: (dbg) Run:  out/minikube-linux-arm64 -p functional-919910 ssh "findmnt -T /mount-9p | grep 9p"
functional_test_mount_test.go:129: (dbg) Run:  out/minikube-linux-arm64 -p functional-919910 ssh -- ls -la /mount-9p
functional_test_mount_test.go:133: guest mount directory contents
total 2
-rw-r--r-- 1 docker docker 24 Sep 16 10:50 created-by-test
-rw-r--r-- 1 docker docker 24 Sep 16 10:50 created-by-test-removed-by-pod
-rw-r--r-- 1 docker docker 24 Sep 16 10:50 test-1726483858215687711
functional_test_mount_test.go:137: (dbg) Run:  out/minikube-linux-arm64 -p functional-919910 ssh cat /mount-9p/test-1726483858215687711
functional_test_mount_test.go:148: (dbg) Run:  kubectl --context functional-919910 replace --force -f testdata/busybox-mount-test.yaml
functional_test_mount_test.go:148: (dbg) Non-zero exit: kubectl --context functional-919910 replace --force -f testdata/busybox-mount-test.yaml: fork/exec /usr/local/bin/kubectl: exec format error (585.749µs)
functional_test_mount_test.go:150: failed to 'kubectl replace' for busybox-mount-test. args "kubectl --context functional-919910 replace --force -f testdata/busybox-mount-test.yaml" : fork/exec /usr/local/bin/kubectl: exec format error
functional_test_mount_test.go:80: "TestFunctional/parallel/MountCmd/any-port" failed, getting debug info...
functional_test_mount_test.go:81: (dbg) Run:  out/minikube-linux-arm64 -p functional-919910 ssh "mount | grep 9p; ls -la /mount-9p; cat /mount-9p/pod-dates"
functional_test_mount_test.go:81: (dbg) Non-zero exit: out/minikube-linux-arm64 -p functional-919910 ssh "mount | grep 9p; ls -la /mount-9p; cat /mount-9p/pod-dates": exit status 1 (356.920834ms)

                                                
                                                
-- stdout --
	192.168.49.1 on /mount-9p type 9p (rw,relatime,sync,dirsync,dfltuid=1000,dfltgid=999,access=any,msize=262144,trans=tcp,noextend,port=39103)
	total 2
	-rw-r--r-- 1 docker docker 24 Sep 16 10:50 created-by-test
	-rw-r--r-- 1 docker docker 24 Sep 16 10:50 created-by-test-removed-by-pod
	-rw-r--r-- 1 docker docker 24 Sep 16 10:50 test-1726483858215687711
	cat: /mount-9p/pod-dates: No such file or directory

                                                
                                                
-- /stdout --
** stderr ** 
	ssh: Process exited with status 1

                                                
                                                
** /stderr **
functional_test_mount_test.go:83: debugging command "out/minikube-linux-arm64 -p functional-919910 ssh \"mount | grep 9p; ls -la /mount-9p; cat /mount-9p/pod-dates\"" failed : exit status 1
functional_test_mount_test.go:90: (dbg) Run:  out/minikube-linux-arm64 -p functional-919910 ssh "sudo umount -f /mount-9p"
functional_test_mount_test.go:94: (dbg) stopping [out/minikube-linux-arm64 mount -p functional-919910 /tmp/TestFunctionalparallelMountCmdany-port2927168651/001:/mount-9p --alsologtostderr -v=1] ...
functional_test_mount_test.go:94: (dbg) [out/minikube-linux-arm64 mount -p functional-919910 /tmp/TestFunctionalparallelMountCmdany-port2927168651/001:/mount-9p --alsologtostderr -v=1] stdout:
* Mounting host path /tmp/TestFunctionalparallelMountCmdany-port2927168651/001 into VM as /mount-9p ...
- Mount type:   9p
- User ID:      docker
- Group ID:     docker
- Version:      9p2000.L
- Message Size: 262144
- Options:      map[]
- Bind Address: 192.168.49.1:39103
* Userspace file server: ufs starting
* Successfully mounted /tmp/TestFunctionalparallelMountCmdany-port2927168651/001 to /mount-9p

                                                
                                                
* NOTE: This process must stay alive for the mount to be accessible ...
* Unmounting /mount-9p ...

                                                
                                                

                                                
                                                
functional_test_mount_test.go:94: (dbg) [out/minikube-linux-arm64 mount -p functional-919910 /tmp/TestFunctionalparallelMountCmdany-port2927168651/001:/mount-9p --alsologtostderr -v=1] stderr:
I0916 10:50:58.323109 1410098 out.go:345] Setting OutFile to fd 1 ...
I0916 10:50:58.323362 1410098 out.go:392] TERM=,COLORTERM=, which probably does not support color
I0916 10:50:58.323383 1410098 out.go:358] Setting ErrFile to fd 2...
I0916 10:50:58.323398 1410098 out.go:392] TERM=,COLORTERM=, which probably does not support color
I0916 10:50:58.323738 1410098 root.go:338] Updating PATH: /home/jenkins/minikube-integration/19651-1378450/.minikube/bin
I0916 10:50:58.324103 1410098 mustload.go:65] Loading cluster: functional-919910
I0916 10:50:58.324590 1410098 config.go:182] Loaded profile config "functional-919910": Driver=docker, ContainerRuntime=crio, KubernetesVersion=v1.31.1
I0916 10:50:58.325254 1410098 cli_runner.go:164] Run: docker container inspect functional-919910 --format={{.State.Status}}
I0916 10:50:58.365977 1410098 host.go:66] Checking if "functional-919910" exists ...
I0916 10:50:58.366333 1410098 cli_runner.go:164] Run: docker system info --format "{{json .}}"
I0916 10:50:58.515261 1410098 info.go:266] docker info: {ID:5FDH:SA5P:5GCT:NLAS:B73P:SGDQ:PBG5:UBVH:UZY3:RXGO:CI7S:WAIH Containers:1 ContainersRunning:1 ContainersPaused:0 ContainersStopped:0 Images:2 Driver:overlay2 DriverStatus:[[Backing Filesystem extfs] [Supports d_type true] [Using metacopy false] [Native Overlay Diff true] [userxattr false]] SystemStatus:<nil> Plugins:{Volume:[local] Network:[bridge host ipvlan macvlan null overlay] Authorization:<nil> Log:[awslogs fluentd gcplogs gelf journald json-file local splunk syslog]} MemoryLimit:true SwapLimit:true KernelMemory:false KernelMemoryTCP:true CPUCfsPeriod:true CPUCfsQuota:true CPUShares:true CPUSet:true PidsLimit:true IPv4Forwarding:true BridgeNfIptables:true BridgeNfIP6Tables:true Debug:false NFd:32 OomKillDisable:true NGoroutines:51 SystemTime:2024-09-16 10:50:58.502295979 +0000 UTC LoggingDriver:json-file CgroupDriver:cgroupfs NEventsListener:0 KernelVersion:5.15.0-1069-aws OperatingSystem:Ubuntu 20.04.6 LTS OSType:linux Architecture:aarc
h64 IndexServerAddress:https://index.docker.io/v1/ RegistryConfig:{AllowNondistributableArtifactsCIDRs:[] AllowNondistributableArtifactsHostnames:[] InsecureRegistryCIDRs:[127.0.0.0/8] IndexConfigs:{DockerIo:{Name:docker.io Mirrors:[] Secure:true Official:true}} Mirrors:[]} NCPU:2 MemTotal:8214839296 GenericResources:<nil> DockerRootDir:/var/lib/docker HTTPProxy: HTTPSProxy: NoProxy: Name:ip-172-31-21-244 Labels:[] ExperimentalBuild:false ServerVersion:27.2.1 ClusterStore: ClusterAdvertise: Runtimes:{Runc:{Path:runc}} DefaultRuntime:runc Swarm:{NodeID: NodeAddr: LocalNodeState:inactive ControlAvailable:false Error: RemoteManagers:<nil>} LiveRestoreEnabled:false Isolation: InitBinary:docker-init ContainerdCommit:{ID:7f7fdf5fed64eb6a7caf99b3e12efcf9d60e311c Expected:7f7fdf5fed64eb6a7caf99b3e12efcf9d60e311c} RuncCommit:{ID:v1.1.14-0-g2c9f560 Expected:v1.1.14-0-g2c9f560} InitCommit:{ID:de40ad0 Expected:de40ad0} SecurityOptions:[name=apparmor name=seccomp,profile=builtin] ProductLicense: Warnings:<nil> ServerError
s:[] ClientInfo:{Debug:false Plugins:[map[Name:buildx Path:/usr/libexec/docker/cli-plugins/docker-buildx SchemaVersion:0.1.0 ShortDescription:Docker Buildx Vendor:Docker Inc. Version:v0.16.2] map[Name:compose Path:/usr/libexec/docker/cli-plugins/docker-compose SchemaVersion:0.1.0 ShortDescription:Docker Compose Vendor:Docker Inc. Version:v2.29.2]] Warnings:<nil>}}
I0916 10:50:58.515432 1410098 cli_runner.go:164] Run: docker network inspect functional-919910 --format "{"Name": "{{.Name}}","Driver": "{{.Driver}}","Subnet": "{{range .IPAM.Config}}{{.Subnet}}{{end}}","Gateway": "{{range .IPAM.Config}}{{.Gateway}}{{end}}","MTU": {{if (index .Options "com.docker.network.driver.mtu")}}{{(index .Options "com.docker.network.driver.mtu")}}{{else}}0{{end}}, "ContainerIPs": [{{range $k,$v := .Containers }}"{{$v.IPv4Address}}",{{end}}]}"
I0916 10:50:58.541673 1410098 out.go:177] * Mounting host path /tmp/TestFunctionalparallelMountCmdany-port2927168651/001 into VM as /mount-9p ...
I0916 10:50:58.544703 1410098 out.go:177]   - Mount type:   9p
I0916 10:50:58.548273 1410098 out.go:177]   - User ID:      docker
I0916 10:50:58.550815 1410098 out.go:177]   - Group ID:     docker
I0916 10:50:58.553304 1410098 out.go:177]   - Version:      9p2000.L
I0916 10:50:58.555800 1410098 out.go:177]   - Message Size: 262144
I0916 10:50:58.558498 1410098 out.go:177]   - Options:      map[]
I0916 10:50:58.561082 1410098 out.go:177]   - Bind Address: 192.168.49.1:39103
I0916 10:50:58.563661 1410098 out.go:177] * Userspace file server: 
I0916 10:50:58.564004 1410098 ssh_runner.go:195] Run: /bin/bash -c "[ "x$(findmnt -T /mount-9p | grep /mount-9p)" != "x" ] && sudo umount -f /mount-9p || echo "
I0916 10:50:58.564105 1410098 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" functional-919910
I0916 10:50:58.593656 1410098 sshutil.go:53] new ssh client: &{IP:127.0.0.1 Port:34613 SSHKeyPath:/home/jenkins/minikube-integration/19651-1378450/.minikube/machines/functional-919910/id_rsa Username:docker}
I0916 10:50:58.715371 1410098 mount.go:180] unmount for /mount-9p ran successfully
I0916 10:50:58.715408 1410098 ssh_runner.go:195] Run: /bin/bash -c "sudo mkdir -p /mount-9p"
I0916 10:50:58.726206 1410098 ssh_runner.go:195] Run: /bin/bash -c "sudo mount -t 9p -o dfltgid=$(grep ^docker: /etc/group | cut -d: -f3),dfltuid=$(id -u docker),msize=262144,port=39103,trans=tcp,version=9p2000.L 192.168.49.1 /mount-9p"
I0916 10:50:58.745428 1410098 main.go:125] stdlog: ufs.go:141 connected
I0916 10:50:58.745772 1410098 main.go:125] stdlog: srv_conn.go:133 >>> 192.168.49.2:49764 Tversion tag 65535 msize 262144 version '9P2000.L'
I0916 10:50:58.745835 1410098 main.go:125] stdlog: srv_conn.go:190 <<< 192.168.49.2:49764 Rversion tag 65535 msize 262144 version '9P2000'
I0916 10:50:58.746066 1410098 main.go:125] stdlog: srv_conn.go:133 >>> 192.168.49.2:49764 Tattach tag 0 fid 0 afid 4294967295 uname 'nobody' nuname 0 aname ''
I0916 10:50:58.746130 1410098 main.go:125] stdlog: srv_conn.go:190 <<< 192.168.49.2:49764 Rattach tag 0 aqid (3b657a fa750323 'd')
I0916 10:50:58.746436 1410098 main.go:125] stdlog: srv_conn.go:133 >>> 192.168.49.2:49764 Tstat tag 0 fid 0
I0916 10:50:58.746522 1410098 main.go:125] stdlog: srv_conn.go:190 <<< 192.168.49.2:49764 Rstat tag 0 st ('001' 'jenkins' 'jenkins' '' q (3b657a fa750323 'd') m d775 at 0 mt 1726483858 l 4096 t 0 d 0 ext )
I0916 10:50:58.759385 1410098 lock.go:50] WriteFile acquiring /home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/functional-919910/.mount-process: {Name:mkb63a55e64295b742c42e4beab3a86a34fba39c Clock:{} Delay:500ms Timeout:1m0s Cancel:<nil>}
I0916 10:50:58.760063 1410098 mount.go:105] mount successful: ""
I0916 10:50:58.763267 1410098 out.go:177] * Successfully mounted /tmp/TestFunctionalparallelMountCmdany-port2927168651/001 to /mount-9p
I0916 10:50:58.765788 1410098 out.go:201] 
I0916 10:50:58.768417 1410098 out.go:177] * NOTE: This process must stay alive for the mount to be accessible ...
I0916 10:51:00.210327 1410098 main.go:125] stdlog: srv_conn.go:133 >>> 192.168.49.2:49764 Tstat tag 0 fid 0
I0916 10:51:00.210430 1410098 main.go:125] stdlog: srv_conn.go:190 <<< 192.168.49.2:49764 Rstat tag 0 st ('001' 'jenkins' 'jenkins' '' q (3b657a fa750323 'd') m d775 at 0 mt 1726483858 l 4096 t 0 d 0 ext )
I0916 10:51:00.210902 1410098 main.go:125] stdlog: srv_conn.go:133 >>> 192.168.49.2:49764 Twalk tag 0 fid 0 newfid 1 
I0916 10:51:00.210945 1410098 main.go:125] stdlog: srv_conn.go:190 <<< 192.168.49.2:49764 Rwalk tag 0 
I0916 10:51:00.211131 1410098 main.go:125] stdlog: srv_conn.go:133 >>> 192.168.49.2:49764 Topen tag 0 fid 1 mode 0
I0916 10:51:00.211212 1410098 main.go:125] stdlog: srv_conn.go:190 <<< 192.168.49.2:49764 Ropen tag 0 qid (3b657a fa750323 'd') iounit 0
I0916 10:51:00.211362 1410098 main.go:125] stdlog: srv_conn.go:133 >>> 192.168.49.2:49764 Tstat tag 0 fid 0
I0916 10:51:00.211401 1410098 main.go:125] stdlog: srv_conn.go:190 <<< 192.168.49.2:49764 Rstat tag 0 st ('001' 'jenkins' 'jenkins' '' q (3b657a fa750323 'd') m d775 at 0 mt 1726483858 l 4096 t 0 d 0 ext )
I0916 10:51:00.211575 1410098 main.go:125] stdlog: srv_conn.go:133 >>> 192.168.49.2:49764 Tread tag 0 fid 1 offset 0 count 262120
I0916 10:51:00.211716 1410098 main.go:125] stdlog: srv_conn.go:190 <<< 192.168.49.2:49764 Rread tag 0 count 258
I0916 10:51:00.211854 1410098 main.go:125] stdlog: srv_conn.go:133 >>> 192.168.49.2:49764 Tread tag 0 fid 1 offset 258 count 261862
I0916 10:51:00.211896 1410098 main.go:125] stdlog: srv_conn.go:190 <<< 192.168.49.2:49764 Rread tag 0 count 0
I0916 10:51:00.212016 1410098 main.go:125] stdlog: srv_conn.go:133 >>> 192.168.49.2:49764 Tread tag 0 fid 1 offset 258 count 262120
I0916 10:51:00.212048 1410098 main.go:125] stdlog: srv_conn.go:190 <<< 192.168.49.2:49764 Rread tag 0 count 0
I0916 10:51:00.212182 1410098 main.go:125] stdlog: srv_conn.go:133 >>> 192.168.49.2:49764 Twalk tag 0 fid 0 newfid 2 0:'created-by-test' 
I0916 10:51:00.212215 1410098 main.go:125] stdlog: srv_conn.go:190 <<< 192.168.49.2:49764 Rwalk tag 0 (3b657b fa750323 '') 
I0916 10:51:00.212336 1410098 main.go:125] stdlog: srv_conn.go:133 >>> 192.168.49.2:49764 Tstat tag 0 fid 2
I0916 10:51:00.212374 1410098 main.go:125] stdlog: srv_conn.go:190 <<< 192.168.49.2:49764 Rstat tag 0 st ('created-by-test' 'jenkins' 'jenkins' '' q (3b657b fa750323 '') m 644 at 0 mt 1726483858 l 24 t 0 d 0 ext )
I0916 10:51:00.212496 1410098 main.go:125] stdlog: srv_conn.go:133 >>> 192.168.49.2:49764 Tstat tag 0 fid 2
I0916 10:51:00.212531 1410098 main.go:125] stdlog: srv_conn.go:190 <<< 192.168.49.2:49764 Rstat tag 0 st ('created-by-test' 'jenkins' 'jenkins' '' q (3b657b fa750323 '') m 644 at 0 mt 1726483858 l 24 t 0 d 0 ext )
I0916 10:51:00.212731 1410098 main.go:125] stdlog: srv_conn.go:133 >>> 192.168.49.2:49764 Tclunk tag 0 fid 2
I0916 10:51:00.212779 1410098 main.go:125] stdlog: srv_conn.go:190 <<< 192.168.49.2:49764 Rclunk tag 0
I0916 10:51:00.212931 1410098 main.go:125] stdlog: srv_conn.go:133 >>> 192.168.49.2:49764 Twalk tag 0 fid 0 newfid 2 0:'test-1726483858215687711' 
I0916 10:51:00.212983 1410098 main.go:125] stdlog: srv_conn.go:190 <<< 192.168.49.2:49764 Rwalk tag 0 (3b657d fa750323 '') 
I0916 10:51:00.213101 1410098 main.go:125] stdlog: srv_conn.go:133 >>> 192.168.49.2:49764 Tstat tag 0 fid 2
I0916 10:51:00.213157 1410098 main.go:125] stdlog: srv_conn.go:190 <<< 192.168.49.2:49764 Rstat tag 0 st ('test-1726483858215687711' 'jenkins' 'jenkins' '' q (3b657d fa750323 '') m 644 at 0 mt 1726483858 l 24 t 0 d 0 ext )
I0916 10:51:00.213271 1410098 main.go:125] stdlog: srv_conn.go:133 >>> 192.168.49.2:49764 Tstat tag 0 fid 2
I0916 10:51:00.213304 1410098 main.go:125] stdlog: srv_conn.go:190 <<< 192.168.49.2:49764 Rstat tag 0 st ('test-1726483858215687711' 'jenkins' 'jenkins' '' q (3b657d fa750323 '') m 644 at 0 mt 1726483858 l 24 t 0 d 0 ext )
I0916 10:51:00.213430 1410098 main.go:125] stdlog: srv_conn.go:133 >>> 192.168.49.2:49764 Tclunk tag 0 fid 2
I0916 10:51:00.213452 1410098 main.go:125] stdlog: srv_conn.go:190 <<< 192.168.49.2:49764 Rclunk tag 0
I0916 10:51:00.213570 1410098 main.go:125] stdlog: srv_conn.go:133 >>> 192.168.49.2:49764 Twalk tag 0 fid 0 newfid 2 0:'created-by-test-removed-by-pod' 
I0916 10:51:00.213612 1410098 main.go:125] stdlog: srv_conn.go:190 <<< 192.168.49.2:49764 Rwalk tag 0 (3b657c fa750323 '') 
I0916 10:51:00.213748 1410098 main.go:125] stdlog: srv_conn.go:133 >>> 192.168.49.2:49764 Tstat tag 0 fid 2
I0916 10:51:00.213781 1410098 main.go:125] stdlog: srv_conn.go:190 <<< 192.168.49.2:49764 Rstat tag 0 st ('created-by-test-removed-by-pod' 'jenkins' 'jenkins' '' q (3b657c fa750323 '') m 644 at 0 mt 1726483858 l 24 t 0 d 0 ext )
I0916 10:51:00.220281 1410098 main.go:125] stdlog: srv_conn.go:133 >>> 192.168.49.2:49764 Tstat tag 0 fid 2
I0916 10:51:00.220360 1410098 main.go:125] stdlog: srv_conn.go:190 <<< 192.168.49.2:49764 Rstat tag 0 st ('created-by-test-removed-by-pod' 'jenkins' 'jenkins' '' q (3b657c fa750323 '') m 644 at 0 mt 1726483858 l 24 t 0 d 0 ext )
I0916 10:51:00.220608 1410098 main.go:125] stdlog: srv_conn.go:133 >>> 192.168.49.2:49764 Tclunk tag 0 fid 2
I0916 10:51:00.220656 1410098 main.go:125] stdlog: srv_conn.go:190 <<< 192.168.49.2:49764 Rclunk tag 0
I0916 10:51:00.220832 1410098 main.go:125] stdlog: srv_conn.go:133 >>> 192.168.49.2:49764 Tread tag 0 fid 1 offset 258 count 262120
I0916 10:51:00.221068 1410098 main.go:125] stdlog: srv_conn.go:190 <<< 192.168.49.2:49764 Rread tag 0 count 0
I0916 10:51:00.221298 1410098 main.go:125] stdlog: srv_conn.go:133 >>> 192.168.49.2:49764 Tclunk tag 0 fid 1
I0916 10:51:00.221333 1410098 main.go:125] stdlog: srv_conn.go:190 <<< 192.168.49.2:49764 Rclunk tag 0
I0916 10:51:00.725199 1410098 main.go:125] stdlog: srv_conn.go:133 >>> 192.168.49.2:49764 Twalk tag 0 fid 0 newfid 1 0:'test-1726483858215687711' 
I0916 10:51:00.725314 1410098 main.go:125] stdlog: srv_conn.go:190 <<< 192.168.49.2:49764 Rwalk tag 0 (3b657d fa750323 '') 
I0916 10:51:00.725463 1410098 main.go:125] stdlog: srv_conn.go:133 >>> 192.168.49.2:49764 Tstat tag 0 fid 1
I0916 10:51:00.725522 1410098 main.go:125] stdlog: srv_conn.go:190 <<< 192.168.49.2:49764 Rstat tag 0 st ('test-1726483858215687711' 'jenkins' 'jenkins' '' q (3b657d fa750323 '') m 644 at 0 mt 1726483858 l 24 t 0 d 0 ext )
I0916 10:51:00.725651 1410098 main.go:125] stdlog: srv_conn.go:133 >>> 192.168.49.2:49764 Twalk tag 0 fid 1 newfid 2 
I0916 10:51:00.725686 1410098 main.go:125] stdlog: srv_conn.go:190 <<< 192.168.49.2:49764 Rwalk tag 0 
I0916 10:51:00.725783 1410098 main.go:125] stdlog: srv_conn.go:133 >>> 192.168.49.2:49764 Topen tag 0 fid 2 mode 0
I0916 10:51:00.725858 1410098 main.go:125] stdlog: srv_conn.go:190 <<< 192.168.49.2:49764 Ropen tag 0 qid (3b657d fa750323 '') iounit 0
I0916 10:51:00.725975 1410098 main.go:125] stdlog: srv_conn.go:133 >>> 192.168.49.2:49764 Tstat tag 0 fid 1
I0916 10:51:00.726017 1410098 main.go:125] stdlog: srv_conn.go:190 <<< 192.168.49.2:49764 Rstat tag 0 st ('test-1726483858215687711' 'jenkins' 'jenkins' '' q (3b657d fa750323 '') m 644 at 0 mt 1726483858 l 24 t 0 d 0 ext )
I0916 10:51:00.726139 1410098 main.go:125] stdlog: srv_conn.go:133 >>> 192.168.49.2:49764 Tread tag 0 fid 2 offset 0 count 262120
I0916 10:51:00.726201 1410098 main.go:125] stdlog: srv_conn.go:190 <<< 192.168.49.2:49764 Rread tag 0 count 24
I0916 10:51:00.726319 1410098 main.go:125] stdlog: srv_conn.go:133 >>> 192.168.49.2:49764 Tread tag 0 fid 2 offset 24 count 262120
I0916 10:51:00.726361 1410098 main.go:125] stdlog: srv_conn.go:190 <<< 192.168.49.2:49764 Rread tag 0 count 0
I0916 10:51:00.726484 1410098 main.go:125] stdlog: srv_conn.go:133 >>> 192.168.49.2:49764 Tread tag 0 fid 2 offset 24 count 262120
I0916 10:51:00.726534 1410098 main.go:125] stdlog: srv_conn.go:190 <<< 192.168.49.2:49764 Rread tag 0 count 0
I0916 10:51:00.726747 1410098 main.go:125] stdlog: srv_conn.go:133 >>> 192.168.49.2:49764 Tclunk tag 0 fid 2
I0916 10:51:00.726781 1410098 main.go:125] stdlog: srv_conn.go:190 <<< 192.168.49.2:49764 Rclunk tag 0
I0916 10:51:00.726901 1410098 main.go:125] stdlog: srv_conn.go:133 >>> 192.168.49.2:49764 Tclunk tag 0 fid 1
I0916 10:51:00.726927 1410098 main.go:125] stdlog: srv_conn.go:190 <<< 192.168.49.2:49764 Rclunk tag 0
I0916 10:51:01.081393 1410098 main.go:125] stdlog: srv_conn.go:133 >>> 192.168.49.2:49764 Tstat tag 0 fid 0
I0916 10:51:01.081475 1410098 main.go:125] stdlog: srv_conn.go:190 <<< 192.168.49.2:49764 Rstat tag 0 st ('001' 'jenkins' 'jenkins' '' q (3b657a fa750323 'd') m d775 at 0 mt 1726483858 l 4096 t 0 d 0 ext )
I0916 10:51:01.081825 1410098 main.go:125] stdlog: srv_conn.go:133 >>> 192.168.49.2:49764 Twalk tag 0 fid 0 newfid 1 
I0916 10:51:01.081863 1410098 main.go:125] stdlog: srv_conn.go:190 <<< 192.168.49.2:49764 Rwalk tag 0 
I0916 10:51:01.081966 1410098 main.go:125] stdlog: srv_conn.go:133 >>> 192.168.49.2:49764 Topen tag 0 fid 1 mode 0
I0916 10:51:01.082037 1410098 main.go:125] stdlog: srv_conn.go:190 <<< 192.168.49.2:49764 Ropen tag 0 qid (3b657a fa750323 'd') iounit 0
I0916 10:51:01.082208 1410098 main.go:125] stdlog: srv_conn.go:133 >>> 192.168.49.2:49764 Tstat tag 0 fid 0
I0916 10:51:01.082286 1410098 main.go:125] stdlog: srv_conn.go:190 <<< 192.168.49.2:49764 Rstat tag 0 st ('001' 'jenkins' 'jenkins' '' q (3b657a fa750323 'd') m d775 at 0 mt 1726483858 l 4096 t 0 d 0 ext )
I0916 10:51:01.082457 1410098 main.go:125] stdlog: srv_conn.go:133 >>> 192.168.49.2:49764 Tread tag 0 fid 1 offset 0 count 262120
I0916 10:51:01.082594 1410098 main.go:125] stdlog: srv_conn.go:190 <<< 192.168.49.2:49764 Rread tag 0 count 258
I0916 10:51:01.082725 1410098 main.go:125] stdlog: srv_conn.go:133 >>> 192.168.49.2:49764 Tread tag 0 fid 1 offset 258 count 261862
I0916 10:51:01.082766 1410098 main.go:125] stdlog: srv_conn.go:190 <<< 192.168.49.2:49764 Rread tag 0 count 0
I0916 10:51:01.082862 1410098 main.go:125] stdlog: srv_conn.go:133 >>> 192.168.49.2:49764 Tread tag 0 fid 1 offset 258 count 262120
I0916 10:51:01.082914 1410098 main.go:125] stdlog: srv_conn.go:190 <<< 192.168.49.2:49764 Rread tag 0 count 0
I0916 10:51:01.083032 1410098 main.go:125] stdlog: srv_conn.go:133 >>> 192.168.49.2:49764 Twalk tag 0 fid 0 newfid 2 0:'created-by-test' 
I0916 10:51:01.083087 1410098 main.go:125] stdlog: srv_conn.go:190 <<< 192.168.49.2:49764 Rwalk tag 0 (3b657b fa750323 '') 
I0916 10:51:01.083196 1410098 main.go:125] stdlog: srv_conn.go:133 >>> 192.168.49.2:49764 Tstat tag 0 fid 2
I0916 10:51:01.083250 1410098 main.go:125] stdlog: srv_conn.go:190 <<< 192.168.49.2:49764 Rstat tag 0 st ('created-by-test' 'jenkins' 'jenkins' '' q (3b657b fa750323 '') m 644 at 0 mt 1726483858 l 24 t 0 d 0 ext )
I0916 10:51:01.083358 1410098 main.go:125] stdlog: srv_conn.go:133 >>> 192.168.49.2:49764 Tstat tag 0 fid 2
I0916 10:51:01.083402 1410098 main.go:125] stdlog: srv_conn.go:190 <<< 192.168.49.2:49764 Rstat tag 0 st ('created-by-test' 'jenkins' 'jenkins' '' q (3b657b fa750323 '') m 644 at 0 mt 1726483858 l 24 t 0 d 0 ext )
I0916 10:51:01.083512 1410098 main.go:125] stdlog: srv_conn.go:133 >>> 192.168.49.2:49764 Tclunk tag 0 fid 2
I0916 10:51:01.083547 1410098 main.go:125] stdlog: srv_conn.go:190 <<< 192.168.49.2:49764 Rclunk tag 0
I0916 10:51:01.083683 1410098 main.go:125] stdlog: srv_conn.go:133 >>> 192.168.49.2:49764 Twalk tag 0 fid 0 newfid 2 0:'test-1726483858215687711' 
I0916 10:51:01.083743 1410098 main.go:125] stdlog: srv_conn.go:190 <<< 192.168.49.2:49764 Rwalk tag 0 (3b657d fa750323 '') 
I0916 10:51:01.083852 1410098 main.go:125] stdlog: srv_conn.go:133 >>> 192.168.49.2:49764 Tstat tag 0 fid 2
I0916 10:51:01.083905 1410098 main.go:125] stdlog: srv_conn.go:190 <<< 192.168.49.2:49764 Rstat tag 0 st ('test-1726483858215687711' 'jenkins' 'jenkins' '' q (3b657d fa750323 '') m 644 at 0 mt 1726483858 l 24 t 0 d 0 ext )
I0916 10:51:01.084010 1410098 main.go:125] stdlog: srv_conn.go:133 >>> 192.168.49.2:49764 Tstat tag 0 fid 2
I0916 10:51:01.084067 1410098 main.go:125] stdlog: srv_conn.go:190 <<< 192.168.49.2:49764 Rstat tag 0 st ('test-1726483858215687711' 'jenkins' 'jenkins' '' q (3b657d fa750323 '') m 644 at 0 mt 1726483858 l 24 t 0 d 0 ext )
I0916 10:51:01.084173 1410098 main.go:125] stdlog: srv_conn.go:133 >>> 192.168.49.2:49764 Tclunk tag 0 fid 2
I0916 10:51:01.084207 1410098 main.go:125] stdlog: srv_conn.go:190 <<< 192.168.49.2:49764 Rclunk tag 0
I0916 10:51:01.084345 1410098 main.go:125] stdlog: srv_conn.go:133 >>> 192.168.49.2:49764 Twalk tag 0 fid 0 newfid 2 0:'created-by-test-removed-by-pod' 
I0916 10:51:01.084416 1410098 main.go:125] stdlog: srv_conn.go:190 <<< 192.168.49.2:49764 Rwalk tag 0 (3b657c fa750323 '') 
I0916 10:51:01.084528 1410098 main.go:125] stdlog: srv_conn.go:133 >>> 192.168.49.2:49764 Tstat tag 0 fid 2
I0916 10:51:01.084583 1410098 main.go:125] stdlog: srv_conn.go:190 <<< 192.168.49.2:49764 Rstat tag 0 st ('created-by-test-removed-by-pod' 'jenkins' 'jenkins' '' q (3b657c fa750323 '') m 644 at 0 mt 1726483858 l 24 t 0 d 0 ext )
I0916 10:51:01.084716 1410098 main.go:125] stdlog: srv_conn.go:133 >>> 192.168.49.2:49764 Tstat tag 0 fid 2
I0916 10:51:01.084780 1410098 main.go:125] stdlog: srv_conn.go:190 <<< 192.168.49.2:49764 Rstat tag 0 st ('created-by-test-removed-by-pod' 'jenkins' 'jenkins' '' q (3b657c fa750323 '') m 644 at 0 mt 1726483858 l 24 t 0 d 0 ext )
I0916 10:51:01.084896 1410098 main.go:125] stdlog: srv_conn.go:133 >>> 192.168.49.2:49764 Tclunk tag 0 fid 2
I0916 10:51:01.084932 1410098 main.go:125] stdlog: srv_conn.go:190 <<< 192.168.49.2:49764 Rclunk tag 0
I0916 10:51:01.085043 1410098 main.go:125] stdlog: srv_conn.go:133 >>> 192.168.49.2:49764 Tread tag 0 fid 1 offset 258 count 262120
I0916 10:51:01.085101 1410098 main.go:125] stdlog: srv_conn.go:190 <<< 192.168.49.2:49764 Rread tag 0 count 0
I0916 10:51:01.085228 1410098 main.go:125] stdlog: srv_conn.go:133 >>> 192.168.49.2:49764 Tclunk tag 0 fid 1
I0916 10:51:01.085280 1410098 main.go:125] stdlog: srv_conn.go:190 <<< 192.168.49.2:49764 Rclunk tag 0
I0916 10:51:01.087236 1410098 main.go:125] stdlog: srv_conn.go:133 >>> 192.168.49.2:49764 Twalk tag 0 fid 0 newfid 1 0:'pod-dates' 
I0916 10:51:01.087346 1410098 main.go:125] stdlog: srv_conn.go:190 <<< 192.168.49.2:49764 Rerror tag 0 ename 'file not found' ecode 0
I0916 10:51:01.444631 1410098 main.go:125] stdlog: srv_conn.go:133 >>> 192.168.49.2:49764 Tclunk tag 0 fid 0
I0916 10:51:01.446977 1410098 main.go:125] stdlog: srv_conn.go:190 <<< 192.168.49.2:49764 Rclunk tag 0
I0916 10:51:01.448417 1410098 main.go:125] stdlog: ufs.go:147 disconnected
I0916 10:51:01.470392 1410098 out.go:177] * Unmounting /mount-9p ...
I0916 10:51:01.472542 1410098 ssh_runner.go:195] Run: /bin/bash -c "[ "x$(findmnt -T /mount-9p | grep /mount-9p)" != "x" ] && sudo umount -f /mount-9p || echo "
I0916 10:51:01.481648 1410098 mount.go:180] unmount for /mount-9p ran successfully
I0916 10:51:01.481767 1410098 lock.go:35] WriteFile acquiring /home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/functional-919910/.mount-process: {Name:mkb63a55e64295b742c42e4beab3a86a34fba39c Clock:{} Delay:500ms Timeout:1m0s Cancel:<nil>}
I0916 10:51:01.484786 1410098 out.go:201] 
W0916 10:51:01.487843 1410098 out.go:270] X Exiting due to MK_INTERRUPTED: Received terminated signal
X Exiting due to MK_INTERRUPTED: Received terminated signal
I0916 10:51:01.490174 1410098 out.go:201] 
--- FAIL: TestFunctional/parallel/MountCmd/any-port (3.36s)

                                                
                                    
x
+
TestMultiControlPlane/serial/NodeLabels (3.15s)

                                                
                                                
=== RUN   TestMultiControlPlane/serial/NodeLabels
ha_test.go:255: (dbg) Run:  kubectl --context ha-334765 get nodes -o "jsonpath=[{range .items[*]}{.metadata.labels},{end}]"
ha_test.go:255: (dbg) Non-zero exit: kubectl --context ha-334765 get nodes -o "jsonpath=[{range .items[*]}{.metadata.labels},{end}]": fork/exec /usr/local/bin/kubectl: exec format error (415.932µs)
ha_test.go:257: failed to 'kubectl get nodes' with args "kubectl --context ha-334765 get nodes -o \"jsonpath=[{range .items[*]}{.metadata.labels},{end}]\"": fork/exec /usr/local/bin/kubectl: exec format error
ha_test.go:264: failed to decode json from label list: args "kubectl --context ha-334765 get nodes -o \"jsonpath=[{range .items[*]}{.metadata.labels},{end}]\"": unexpected end of JSON input
helpers_test.go:222: -----------------------post-mortem--------------------------------
helpers_test.go:230: ======>  post-mortem[TestMultiControlPlane/serial/NodeLabels]: docker inspect <======
helpers_test.go:231: (dbg) Run:  docker inspect ha-334765
helpers_test.go:235: (dbg) docker inspect ha-334765:

                                                
                                                
-- stdout --
	[
	    {
	        "Id": "471d2d625f18ea254879cc15bae69f2fa706198361173916de05b257110d78a5",
	        "Created": "2024-09-16T10:51:30.912390622Z",
	        "Path": "/usr/local/bin/entrypoint",
	        "Args": [
	            "/sbin/init"
	        ],
	        "State": {
	            "Status": "running",
	            "Running": true,
	            "Paused": false,
	            "Restarting": false,
	            "OOMKilled": false,
	            "Dead": false,
	            "Pid": 1415494,
	            "ExitCode": 0,
	            "Error": "",
	            "StartedAt": "2024-09-16T10:51:31.080722061Z",
	            "FinishedAt": "0001-01-01T00:00:00Z"
	        },
	        "Image": "sha256:a1b71fa87733590eb4674b16f6945626ae533f3af37066893e3fd70eb9476268",
	        "ResolvConfPath": "/var/lib/docker/containers/471d2d625f18ea254879cc15bae69f2fa706198361173916de05b257110d78a5/resolv.conf",
	        "HostnamePath": "/var/lib/docker/containers/471d2d625f18ea254879cc15bae69f2fa706198361173916de05b257110d78a5/hostname",
	        "HostsPath": "/var/lib/docker/containers/471d2d625f18ea254879cc15bae69f2fa706198361173916de05b257110d78a5/hosts",
	        "LogPath": "/var/lib/docker/containers/471d2d625f18ea254879cc15bae69f2fa706198361173916de05b257110d78a5/471d2d625f18ea254879cc15bae69f2fa706198361173916de05b257110d78a5-json.log",
	        "Name": "/ha-334765",
	        "RestartCount": 0,
	        "Driver": "overlay2",
	        "Platform": "linux",
	        "MountLabel": "",
	        "ProcessLabel": "",
	        "AppArmorProfile": "unconfined",
	        "ExecIDs": null,
	        "HostConfig": {
	            "Binds": [
	                "/lib/modules:/lib/modules:ro",
	                "ha-334765:/var"
	            ],
	            "ContainerIDFile": "",
	            "LogConfig": {
	                "Type": "json-file",
	                "Config": {}
	            },
	            "NetworkMode": "ha-334765",
	            "PortBindings": {
	                "22/tcp": [
	                    {
	                        "HostIp": "127.0.0.1",
	                        "HostPort": ""
	                    }
	                ],
	                "2376/tcp": [
	                    {
	                        "HostIp": "127.0.0.1",
	                        "HostPort": ""
	                    }
	                ],
	                "32443/tcp": [
	                    {
	                        "HostIp": "127.0.0.1",
	                        "HostPort": ""
	                    }
	                ],
	                "5000/tcp": [
	                    {
	                        "HostIp": "127.0.0.1",
	                        "HostPort": ""
	                    }
	                ],
	                "8443/tcp": [
	                    {
	                        "HostIp": "127.0.0.1",
	                        "HostPort": ""
	                    }
	                ]
	            },
	            "RestartPolicy": {
	                "Name": "no",
	                "MaximumRetryCount": 0
	            },
	            "AutoRemove": false,
	            "VolumeDriver": "",
	            "VolumesFrom": null,
	            "ConsoleSize": [
	                0,
	                0
	            ],
	            "CapAdd": null,
	            "CapDrop": null,
	            "CgroupnsMode": "host",
	            "Dns": [],
	            "DnsOptions": [],
	            "DnsSearch": [],
	            "ExtraHosts": null,
	            "GroupAdd": null,
	            "IpcMode": "private",
	            "Cgroup": "",
	            "Links": null,
	            "OomScoreAdj": 0,
	            "PidMode": "",
	            "Privileged": true,
	            "PublishAllPorts": false,
	            "ReadonlyRootfs": false,
	            "SecurityOpt": [
	                "seccomp=unconfined",
	                "apparmor=unconfined",
	                "label=disable"
	            ],
	            "Tmpfs": {
	                "/run": "",
	                "/tmp": ""
	            },
	            "UTSMode": "",
	            "UsernsMode": "",
	            "ShmSize": 67108864,
	            "Runtime": "runc",
	            "Isolation": "",
	            "CpuShares": 0,
	            "Memory": 2306867200,
	            "NanoCpus": 2000000000,
	            "CgroupParent": "",
	            "BlkioWeight": 0,
	            "BlkioWeightDevice": [],
	            "BlkioDeviceReadBps": [],
	            "BlkioDeviceWriteBps": [],
	            "BlkioDeviceReadIOps": [],
	            "BlkioDeviceWriteIOps": [],
	            "CpuPeriod": 0,
	            "CpuQuota": 0,
	            "CpuRealtimePeriod": 0,
	            "CpuRealtimeRuntime": 0,
	            "CpusetCpus": "",
	            "CpusetMems": "",
	            "Devices": [],
	            "DeviceCgroupRules": null,
	            "DeviceRequests": null,
	            "MemoryReservation": 0,
	            "MemorySwap": 4613734400,
	            "MemorySwappiness": null,
	            "OomKillDisable": false,
	            "PidsLimit": null,
	            "Ulimits": [],
	            "CpuCount": 0,
	            "CpuPercent": 0,
	            "IOMaximumIOps": 0,
	            "IOMaximumBandwidth": 0,
	            "MaskedPaths": null,
	            "ReadonlyPaths": null
	        },
	        "GraphDriver": {
	            "Data": {
	                "LowerDir": "/var/lib/docker/overlay2/e8db6f6ac1e96cd2638477ad27706691a225de8009dee3e5127d903edb7d7779-init/diff:/var/lib/docker/overlay2/1502e35c27c097cfc834a7c6caeee5bb9f58b41375577f491b73f55bc131cbae/diff",
	                "MergedDir": "/var/lib/docker/overlay2/e8db6f6ac1e96cd2638477ad27706691a225de8009dee3e5127d903edb7d7779/merged",
	                "UpperDir": "/var/lib/docker/overlay2/e8db6f6ac1e96cd2638477ad27706691a225de8009dee3e5127d903edb7d7779/diff",
	                "WorkDir": "/var/lib/docker/overlay2/e8db6f6ac1e96cd2638477ad27706691a225de8009dee3e5127d903edb7d7779/work"
	            },
	            "Name": "overlay2"
	        },
	        "Mounts": [
	            {
	                "Type": "bind",
	                "Source": "/lib/modules",
	                "Destination": "/lib/modules",
	                "Mode": "ro",
	                "RW": false,
	                "Propagation": "rprivate"
	            },
	            {
	                "Type": "volume",
	                "Name": "ha-334765",
	                "Source": "/var/lib/docker/volumes/ha-334765/_data",
	                "Destination": "/var",
	                "Driver": "local",
	                "Mode": "z",
	                "RW": true,
	                "Propagation": ""
	            }
	        ],
	        "Config": {
	            "Hostname": "ha-334765",
	            "Domainname": "",
	            "User": "",
	            "AttachStdin": false,
	            "AttachStdout": false,
	            "AttachStderr": false,
	            "ExposedPorts": {
	                "22/tcp": {},
	                "2376/tcp": {},
	                "32443/tcp": {},
	                "5000/tcp": {},
	                "8443/tcp": {}
	            },
	            "Tty": true,
	            "OpenStdin": false,
	            "StdinOnce": false,
	            "Env": [
	                "container=docker",
	                "PATH=/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin"
	            ],
	            "Cmd": null,
	            "Image": "gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0",
	            "Volumes": null,
	            "WorkingDir": "/",
	            "Entrypoint": [
	                "/usr/local/bin/entrypoint",
	                "/sbin/init"
	            ],
	            "OnBuild": null,
	            "Labels": {
	                "created_by.minikube.sigs.k8s.io": "true",
	                "mode.minikube.sigs.k8s.io": "ha-334765",
	                "name.minikube.sigs.k8s.io": "ha-334765",
	                "role.minikube.sigs.k8s.io": ""
	            },
	            "StopSignal": "SIGRTMIN+3"
	        },
	        "NetworkSettings": {
	            "Bridge": "",
	            "SandboxID": "12d5a1b21703aeee0dc587f61286e8eedd5eadc5d72d885400108c3582ba6858",
	            "SandboxKey": "/var/run/docker/netns/12d5a1b21703",
	            "Ports": {
	                "22/tcp": [
	                    {
	                        "HostIp": "127.0.0.1",
	                        "HostPort": "34618"
	                    }
	                ],
	                "2376/tcp": [
	                    {
	                        "HostIp": "127.0.0.1",
	                        "HostPort": "34619"
	                    }
	                ],
	                "32443/tcp": [
	                    {
	                        "HostIp": "127.0.0.1",
	                        "HostPort": "34622"
	                    }
	                ],
	                "5000/tcp": [
	                    {
	                        "HostIp": "127.0.0.1",
	                        "HostPort": "34620"
	                    }
	                ],
	                "8443/tcp": [
	                    {
	                        "HostIp": "127.0.0.1",
	                        "HostPort": "34621"
	                    }
	                ]
	            },
	            "HairpinMode": false,
	            "LinkLocalIPv6Address": "",
	            "LinkLocalIPv6PrefixLen": 0,
	            "SecondaryIPAddresses": null,
	            "SecondaryIPv6Addresses": null,
	            "EndpointID": "",
	            "Gateway": "",
	            "GlobalIPv6Address": "",
	            "GlobalIPv6PrefixLen": 0,
	            "IPAddress": "",
	            "IPPrefixLen": 0,
	            "IPv6Gateway": "",
	            "MacAddress": "",
	            "Networks": {
	                "ha-334765": {
	                    "IPAMConfig": {
	                        "IPv4Address": "192.168.49.2"
	                    },
	                    "Links": null,
	                    "Aliases": null,
	                    "MacAddress": "02:42:c0:a8:31:02",
	                    "DriverOpts": null,
	                    "NetworkID": "a49e1846148d74f15aa5bd587e5d2d6b8a3c4246e7c45cf081cf9063a160d645",
	                    "EndpointID": "698461d25faa71c3e4175824b7994fac5706f91d7d306412be9930c5e2592d23",
	                    "Gateway": "192.168.49.1",
	                    "IPAddress": "192.168.49.2",
	                    "IPPrefixLen": 24,
	                    "IPv6Gateway": "",
	                    "GlobalIPv6Address": "",
	                    "GlobalIPv6PrefixLen": 0,
	                    "DNSNames": [
	                        "ha-334765",
	                        "471d2d625f18"
	                    ]
	                }
	            }
	        }
	    }
	]

                                                
                                                
-- /stdout --
helpers_test.go:239: (dbg) Run:  out/minikube-linux-arm64 status --format={{.Host}} -p ha-334765 -n ha-334765
helpers_test.go:244: <<< TestMultiControlPlane/serial/NodeLabels FAILED: start of post-mortem logs <<<
helpers_test.go:245: ======>  post-mortem[TestMultiControlPlane/serial/NodeLabels]: minikube logs <======
helpers_test.go:247: (dbg) Run:  out/minikube-linux-arm64 -p ha-334765 logs -n 25
helpers_test.go:247: (dbg) Done: out/minikube-linux-arm64 -p ha-334765 logs -n 25: (1.85668197s)
helpers_test.go:252: TestMultiControlPlane/serial/NodeLabels logs: 
-- stdout --
	
	==> Audit <==
	|---------|--------------------------------------|-------------------|---------|---------|---------------------|---------------------|
	| Command |                 Args                 |      Profile      |  User   | Version |     Start Time      |      End Time       |
	|---------|--------------------------------------|-------------------|---------|---------|---------------------|---------------------|
	| image   | functional-919910 image build -t     | functional-919910 | jenkins | v1.34.0 | 16 Sep 24 10:51 UTC | 16 Sep 24 10:51 UTC |
	|         | localhost/my-image:functional-919910 |                   |         |         |                     |                     |
	|         | testdata/build --alsologtostderr     |                   |         |         |                     |                     |
	| image   | functional-919910 image ls           | functional-919910 | jenkins | v1.34.0 | 16 Sep 24 10:51 UTC | 16 Sep 24 10:51 UTC |
	| delete  | -p functional-919910                 | functional-919910 | jenkins | v1.34.0 | 16 Sep 24 10:51 UTC | 16 Sep 24 10:51 UTC |
	| start   | -p ha-334765 --wait=true             | ha-334765         | jenkins | v1.34.0 | 16 Sep 24 10:51 UTC | 16 Sep 24 10:54 UTC |
	|         | --memory=2200 --ha                   |                   |         |         |                     |                     |
	|         | -v=7 --alsologtostderr               |                   |         |         |                     |                     |
	|         | --driver=docker                      |                   |         |         |                     |                     |
	|         | --container-runtime=crio             |                   |         |         |                     |                     |
	| kubectl | -p ha-334765 -- apply -f             | ha-334765         | jenkins | v1.34.0 | 16 Sep 24 10:54 UTC | 16 Sep 24 10:54 UTC |
	|         | ./testdata/ha/ha-pod-dns-test.yaml   |                   |         |         |                     |                     |
	| kubectl | -p ha-334765 -- rollout status       | ha-334765         | jenkins | v1.34.0 | 16 Sep 24 10:54 UTC | 16 Sep 24 10:54 UTC |
	|         | deployment/busybox                   |                   |         |         |                     |                     |
	| kubectl | -p ha-334765 -- get pods -o          | ha-334765         | jenkins | v1.34.0 | 16 Sep 24 10:54 UTC | 16 Sep 24 10:54 UTC |
	|         | jsonpath='{.items[*].status.podIP}'  |                   |         |         |                     |                     |
	| kubectl | -p ha-334765 -- get pods -o          | ha-334765         | jenkins | v1.34.0 | 16 Sep 24 10:54 UTC | 16 Sep 24 10:54 UTC |
	|         | jsonpath='{.items[*].metadata.name}' |                   |         |         |                     |                     |
	| kubectl | -p ha-334765 -- exec                 | ha-334765         | jenkins | v1.34.0 | 16 Sep 24 10:54 UTC | 16 Sep 24 10:54 UTC |
	|         | busybox-7dff88458-mbfkp --           |                   |         |         |                     |                     |
	|         | nslookup kubernetes.io               |                   |         |         |                     |                     |
	| kubectl | -p ha-334765 -- exec                 | ha-334765         | jenkins | v1.34.0 | 16 Sep 24 10:54 UTC | 16 Sep 24 10:54 UTC |
	|         | busybox-7dff88458-mh2kc --           |                   |         |         |                     |                     |
	|         | nslookup kubernetes.io               |                   |         |         |                     |                     |
	| kubectl | -p ha-334765 -- exec                 | ha-334765         | jenkins | v1.34.0 | 16 Sep 24 10:54 UTC | 16 Sep 24 10:54 UTC |
	|         | busybox-7dff88458-tczms --           |                   |         |         |                     |                     |
	|         | nslookup kubernetes.io               |                   |         |         |                     |                     |
	| kubectl | -p ha-334765 -- exec                 | ha-334765         | jenkins | v1.34.0 | 16 Sep 24 10:54 UTC | 16 Sep 24 10:54 UTC |
	|         | busybox-7dff88458-mbfkp --           |                   |         |         |                     |                     |
	|         | nslookup kubernetes.default          |                   |         |         |                     |                     |
	| kubectl | -p ha-334765 -- exec                 | ha-334765         | jenkins | v1.34.0 | 16 Sep 24 10:54 UTC | 16 Sep 24 10:54 UTC |
	|         | busybox-7dff88458-mh2kc --           |                   |         |         |                     |                     |
	|         | nslookup kubernetes.default          |                   |         |         |                     |                     |
	| kubectl | -p ha-334765 -- exec                 | ha-334765         | jenkins | v1.34.0 | 16 Sep 24 10:54 UTC | 16 Sep 24 10:54 UTC |
	|         | busybox-7dff88458-tczms --           |                   |         |         |                     |                     |
	|         | nslookup kubernetes.default          |                   |         |         |                     |                     |
	| kubectl | -p ha-334765 -- exec                 | ha-334765         | jenkins | v1.34.0 | 16 Sep 24 10:54 UTC | 16 Sep 24 10:54 UTC |
	|         | busybox-7dff88458-mbfkp -- nslookup  |                   |         |         |                     |                     |
	|         | kubernetes.default.svc.cluster.local |                   |         |         |                     |                     |
	| kubectl | -p ha-334765 -- exec                 | ha-334765         | jenkins | v1.34.0 | 16 Sep 24 10:54 UTC | 16 Sep 24 10:54 UTC |
	|         | busybox-7dff88458-mh2kc -- nslookup  |                   |         |         |                     |                     |
	|         | kubernetes.default.svc.cluster.local |                   |         |         |                     |                     |
	| kubectl | -p ha-334765 -- exec                 | ha-334765         | jenkins | v1.34.0 | 16 Sep 24 10:54 UTC | 16 Sep 24 10:54 UTC |
	|         | busybox-7dff88458-tczms -- nslookup  |                   |         |         |                     |                     |
	|         | kubernetes.default.svc.cluster.local |                   |         |         |                     |                     |
	| kubectl | -p ha-334765 -- get pods -o          | ha-334765         | jenkins | v1.34.0 | 16 Sep 24 10:54 UTC | 16 Sep 24 10:54 UTC |
	|         | jsonpath='{.items[*].metadata.name}' |                   |         |         |                     |                     |
	| kubectl | -p ha-334765 -- exec                 | ha-334765         | jenkins | v1.34.0 | 16 Sep 24 10:54 UTC | 16 Sep 24 10:54 UTC |
	|         | busybox-7dff88458-mbfkp              |                   |         |         |                     |                     |
	|         | -- sh -c nslookup                    |                   |         |         |                     |                     |
	|         | host.minikube.internal | awk         |                   |         |         |                     |                     |
	|         | 'NR==5' | cut -d' ' -f3              |                   |         |         |                     |                     |
	| kubectl | -p ha-334765 -- exec                 | ha-334765         | jenkins | v1.34.0 | 16 Sep 24 10:54 UTC | 16 Sep 24 10:54 UTC |
	|         | busybox-7dff88458-mbfkp -- sh        |                   |         |         |                     |                     |
	|         | -c ping -c 1 192.168.49.1            |                   |         |         |                     |                     |
	| kubectl | -p ha-334765 -- exec                 | ha-334765         | jenkins | v1.34.0 | 16 Sep 24 10:54 UTC | 16 Sep 24 10:54 UTC |
	|         | busybox-7dff88458-mh2kc              |                   |         |         |                     |                     |
	|         | -- sh -c nslookup                    |                   |         |         |                     |                     |
	|         | host.minikube.internal | awk         |                   |         |         |                     |                     |
	|         | 'NR==5' | cut -d' ' -f3              |                   |         |         |                     |                     |
	| kubectl | -p ha-334765 -- exec                 | ha-334765         | jenkins | v1.34.0 | 16 Sep 24 10:54 UTC | 16 Sep 24 10:54 UTC |
	|         | busybox-7dff88458-mh2kc -- sh        |                   |         |         |                     |                     |
	|         | -c ping -c 1 192.168.49.1            |                   |         |         |                     |                     |
	| kubectl | -p ha-334765 -- exec                 | ha-334765         | jenkins | v1.34.0 | 16 Sep 24 10:54 UTC | 16 Sep 24 10:54 UTC |
	|         | busybox-7dff88458-tczms              |                   |         |         |                     |                     |
	|         | -- sh -c nslookup                    |                   |         |         |                     |                     |
	|         | host.minikube.internal | awk         |                   |         |         |                     |                     |
	|         | 'NR==5' | cut -d' ' -f3              |                   |         |         |                     |                     |
	| kubectl | -p ha-334765 -- exec                 | ha-334765         | jenkins | v1.34.0 | 16 Sep 24 10:54 UTC | 16 Sep 24 10:54 UTC |
	|         | busybox-7dff88458-tczms -- sh        |                   |         |         |                     |                     |
	|         | -c ping -c 1 192.168.49.1            |                   |         |         |                     |                     |
	| node    | add -p ha-334765 -v=7                | ha-334765         | jenkins | v1.34.0 | 16 Sep 24 10:54 UTC | 16 Sep 24 10:55 UTC |
	|         | --alsologtostderr                    |                   |         |         |                     |                     |
	|---------|--------------------------------------|-------------------|---------|---------|---------------------|---------------------|
	
	
	==> Last Start <==
	Log file created at: 2024/09/16 10:51:25
	Running on machine: ip-172-31-21-244
	Binary: Built with gc go1.23.0 for linux/arm64
	Log line format: [IWEF]mmdd hh:mm:ss.uuuuuu threadid file:line] msg
	I0916 10:51:25.456983 1415006 out.go:345] Setting OutFile to fd 1 ...
	I0916 10:51:25.457102 1415006 out.go:392] TERM=,COLORTERM=, which probably does not support color
	I0916 10:51:25.457115 1415006 out.go:358] Setting ErrFile to fd 2...
	I0916 10:51:25.457121 1415006 out.go:392] TERM=,COLORTERM=, which probably does not support color
	I0916 10:51:25.457390 1415006 root.go:338] Updating PATH: /home/jenkins/minikube-integration/19651-1378450/.minikube/bin
	I0916 10:51:25.457821 1415006 out.go:352] Setting JSON to false
	I0916 10:51:25.458668 1415006 start.go:129] hostinfo: {"hostname":"ip-172-31-21-244","uptime":38031,"bootTime":1726445855,"procs":154,"os":"linux","platform":"ubuntu","platformFamily":"debian","platformVersion":"20.04","kernelVersion":"5.15.0-1069-aws","kernelArch":"aarch64","virtualizationSystem":"","virtualizationRole":"","hostId":"da8ac1fd-6236-412a-a346-95873c98230d"}
	I0916 10:51:25.458740 1415006 start.go:139] virtualization:  
	I0916 10:51:25.462420 1415006 out.go:177] * [ha-334765] minikube v1.34.0 on Ubuntu 20.04 (arm64)
	I0916 10:51:25.466540 1415006 out.go:177]   - MINIKUBE_LOCATION=19651
	I0916 10:51:25.466681 1415006 notify.go:220] Checking for updates...
	I0916 10:51:25.472214 1415006 out.go:177]   - MINIKUBE_SUPPRESS_DOCKER_PERFORMANCE=true
	I0916 10:51:25.474945 1415006 out.go:177]   - KUBECONFIG=/home/jenkins/minikube-integration/19651-1378450/kubeconfig
	I0916 10:51:25.477743 1415006 out.go:177]   - MINIKUBE_HOME=/home/jenkins/minikube-integration/19651-1378450/.minikube
	I0916 10:51:25.480408 1415006 out.go:177]   - MINIKUBE_BIN=out/minikube-linux-arm64
	I0916 10:51:25.483253 1415006 out.go:177]   - MINIKUBE_FORCE_SYSTEMD=
	I0916 10:51:25.486288 1415006 driver.go:394] Setting default libvirt URI to qemu:///system
	I0916 10:51:25.516837 1415006 docker.go:123] docker version: linux-27.2.1:Docker Engine - Community
	I0916 10:51:25.516993 1415006 cli_runner.go:164] Run: docker system info --format "{{json .}}"
	I0916 10:51:25.574760 1415006 info.go:266] docker info: {ID:5FDH:SA5P:5GCT:NLAS:B73P:SGDQ:PBG5:UBVH:UZY3:RXGO:CI7S:WAIH Containers:0 ContainersRunning:0 ContainersPaused:0 ContainersStopped:0 Images:3 Driver:overlay2 DriverStatus:[[Backing Filesystem extfs] [Supports d_type true] [Using metacopy false] [Native Overlay Diff true] [userxattr false]] SystemStatus:<nil> Plugins:{Volume:[local] Network:[bridge host ipvlan macvlan null overlay] Authorization:<nil> Log:[awslogs fluentd gcplogs gelf journald json-file local splunk syslog]} MemoryLimit:true SwapLimit:true KernelMemory:false KernelMemoryTCP:true CPUCfsPeriod:true CPUCfsQuota:true CPUShares:true CPUSet:true PidsLimit:true IPv4Forwarding:true BridgeNfIptables:true BridgeNfIP6Tables:true Debug:false NFd:25 OomKillDisable:true NGoroutines:41 SystemTime:2024-09-16 10:51:25.563814697 +0000 UTC LoggingDriver:json-file CgroupDriver:cgroupfs NEventsListener:0 KernelVersion:5.15.0-1069-aws OperatingSystem:Ubuntu 20.04.6 LTS OSType:linux Architecture:aar
ch64 IndexServerAddress:https://index.docker.io/v1/ RegistryConfig:{AllowNondistributableArtifactsCIDRs:[] AllowNondistributableArtifactsHostnames:[] InsecureRegistryCIDRs:[127.0.0.0/8] IndexConfigs:{DockerIo:{Name:docker.io Mirrors:[] Secure:true Official:true}} Mirrors:[]} NCPU:2 MemTotal:8214839296 GenericResources:<nil> DockerRootDir:/var/lib/docker HTTPProxy: HTTPSProxy: NoProxy: Name:ip-172-31-21-244 Labels:[] ExperimentalBuild:false ServerVersion:27.2.1 ClusterStore: ClusterAdvertise: Runtimes:{Runc:{Path:runc}} DefaultRuntime:runc Swarm:{NodeID: NodeAddr: LocalNodeState:inactive ControlAvailable:false Error: RemoteManagers:<nil>} LiveRestoreEnabled:false Isolation: InitBinary:docker-init ContainerdCommit:{ID:7f7fdf5fed64eb6a7caf99b3e12efcf9d60e311c Expected:7f7fdf5fed64eb6a7caf99b3e12efcf9d60e311c} RuncCommit:{ID:v1.1.14-0-g2c9f560 Expected:v1.1.14-0-g2c9f560} InitCommit:{ID:de40ad0 Expected:de40ad0} SecurityOptions:[name=apparmor name=seccomp,profile=builtin] ProductLicense: Warnings:<nil> ServerErro
rs:[] ClientInfo:{Debug:false Plugins:[map[Name:buildx Path:/usr/libexec/docker/cli-plugins/docker-buildx SchemaVersion:0.1.0 ShortDescription:Docker Buildx Vendor:Docker Inc. Version:v0.16.2] map[Name:compose Path:/usr/libexec/docker/cli-plugins/docker-compose SchemaVersion:0.1.0 ShortDescription:Docker Compose Vendor:Docker Inc. Version:v2.29.2]] Warnings:<nil>}}
	I0916 10:51:25.574877 1415006 docker.go:318] overlay module found
	I0916 10:51:25.577720 1415006 out.go:177] * Using the docker driver based on user configuration
	I0916 10:51:25.580504 1415006 start.go:297] selected driver: docker
	I0916 10:51:25.580528 1415006 start.go:901] validating driver "docker" against <nil>
	I0916 10:51:25.580545 1415006 start.go:912] status for docker: {Installed:true Healthy:true Running:false NeedsImprovement:false Error:<nil> Reason: Fix: Doc: Version:}
	I0916 10:51:25.581296 1415006 cli_runner.go:164] Run: docker system info --format "{{json .}}"
	I0916 10:51:25.647223 1415006 info.go:266] docker info: {ID:5FDH:SA5P:5GCT:NLAS:B73P:SGDQ:PBG5:UBVH:UZY3:RXGO:CI7S:WAIH Containers:0 ContainersRunning:0 ContainersPaused:0 ContainersStopped:0 Images:3 Driver:overlay2 DriverStatus:[[Backing Filesystem extfs] [Supports d_type true] [Using metacopy false] [Native Overlay Diff true] [userxattr false]] SystemStatus:<nil> Plugins:{Volume:[local] Network:[bridge host ipvlan macvlan null overlay] Authorization:<nil> Log:[awslogs fluentd gcplogs gelf journald json-file local splunk syslog]} MemoryLimit:true SwapLimit:true KernelMemory:false KernelMemoryTCP:true CPUCfsPeriod:true CPUCfsQuota:true CPUShares:true CPUSet:true PidsLimit:true IPv4Forwarding:true BridgeNfIptables:true BridgeNfIP6Tables:true Debug:false NFd:25 OomKillDisable:true NGoroutines:41 SystemTime:2024-09-16 10:51:25.637994422 +0000 UTC LoggingDriver:json-file CgroupDriver:cgroupfs NEventsListener:0 KernelVersion:5.15.0-1069-aws OperatingSystem:Ubuntu 20.04.6 LTS OSType:linux Architecture:aar
ch64 IndexServerAddress:https://index.docker.io/v1/ RegistryConfig:{AllowNondistributableArtifactsCIDRs:[] AllowNondistributableArtifactsHostnames:[] InsecureRegistryCIDRs:[127.0.0.0/8] IndexConfigs:{DockerIo:{Name:docker.io Mirrors:[] Secure:true Official:true}} Mirrors:[]} NCPU:2 MemTotal:8214839296 GenericResources:<nil> DockerRootDir:/var/lib/docker HTTPProxy: HTTPSProxy: NoProxy: Name:ip-172-31-21-244 Labels:[] ExperimentalBuild:false ServerVersion:27.2.1 ClusterStore: ClusterAdvertise: Runtimes:{Runc:{Path:runc}} DefaultRuntime:runc Swarm:{NodeID: NodeAddr: LocalNodeState:inactive ControlAvailable:false Error: RemoteManagers:<nil>} LiveRestoreEnabled:false Isolation: InitBinary:docker-init ContainerdCommit:{ID:7f7fdf5fed64eb6a7caf99b3e12efcf9d60e311c Expected:7f7fdf5fed64eb6a7caf99b3e12efcf9d60e311c} RuncCommit:{ID:v1.1.14-0-g2c9f560 Expected:v1.1.14-0-g2c9f560} InitCommit:{ID:de40ad0 Expected:de40ad0} SecurityOptions:[name=apparmor name=seccomp,profile=builtin] ProductLicense: Warnings:<nil> ServerErro
rs:[] ClientInfo:{Debug:false Plugins:[map[Name:buildx Path:/usr/libexec/docker/cli-plugins/docker-buildx SchemaVersion:0.1.0 ShortDescription:Docker Buildx Vendor:Docker Inc. Version:v0.16.2] map[Name:compose Path:/usr/libexec/docker/cli-plugins/docker-compose SchemaVersion:0.1.0 ShortDescription:Docker Compose Vendor:Docker Inc. Version:v2.29.2]] Warnings:<nil>}}
	I0916 10:51:25.647541 1415006 start_flags.go:310] no existing cluster config was found, will generate one from the flags 
	I0916 10:51:25.647914 1415006 start_flags.go:947] Waiting for all components: map[apiserver:true apps_running:true default_sa:true extra:true kubelet:true node_ready:true system_pods:true]
	I0916 10:51:25.650821 1415006 out.go:177] * Using Docker driver with root privileges
	I0916 10:51:25.653558 1415006 cni.go:84] Creating CNI manager for ""
	I0916 10:51:25.653636 1415006 cni.go:136] multinode detected (0 nodes found), recommending kindnet
	I0916 10:51:25.653650 1415006 start_flags.go:319] Found "CNI" CNI - setting NetworkPlugin=cni
	I0916 10:51:25.653740 1415006 start.go:340] cluster config:
	{Name:ha-334765 KeepContext:false EmbedCerts:false MinikubeISO: KicBaseImage:gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 Memory:2200 CPUs:2 DiskSize:20000 Driver:docker HyperkitVpnKitSock: HyperkitVSockPorts:[] DockerEnv:[] ContainerVolumeMounts:[] InsecureRegistry:[] RegistryMirror:[] HostOnlyCIDR:192.168.59.1/24 HypervVirtualSwitch: HypervUseExternalSwitch:false HypervExternalAdapter: KVMNetwork:default KVMQemuURI:qemu:///system KVMGPU:false KVMHidden:false KVMNUMACount:1 APIServerPort:8443 DockerOpt:[] DisableDriverMounts:false NFSShare:[] NFSSharesRoot:/nfsshares UUID: NoVTXCheck:false DNSProxy:false HostDNSResolver:true HostOnlyNicType:virtio NatNicType:virtio SSHIPAddress: SSHUser:root SSHKey: SSHPort:22 KubernetesConfig:{KubernetesVersion:v1.31.1 ClusterName:ha-334765 Namespace:default APIServerHAVIP: APIServerName:minikubeCA APIServerNames:[] APIServerIPs:[] DNSDomain:cluster.local ContainerRuntime:crio CR
ISocket: NetworkPlugin:cni FeatureGates: ServiceCIDR:10.96.0.0/12 ImageRepository: LoadBalancerStartIP: LoadBalancerEndIP: CustomIngressCert: RegistryAliases: ExtraOptions:[] ShouldLoadCachedImages:true EnableDefaultCNI:false CNI:} Nodes:[{Name: IP: Port:8443 KubernetesVersion:v1.31.1 ContainerRuntime:crio ControlPlane:true Worker:true}] Addons:map[] CustomAddonImages:map[] CustomAddonRegistries:map[] VerifyComponents:map[apiserver:true apps_running:true default_sa:true extra:true kubelet:true node_ready:true system_pods:true] StartHostTimeout:6m0s ScheduledStop:<nil> ExposedPorts:[] ListenAddress: Network: Subnet: MultiNodeRequested:true ExtraDisks:0 CertExpiration:26280h0m0s Mount:false MountString:/home/jenkins:/minikube-host Mount9PVersion:9p2000.L MountGID:docker MountIP: MountMSize:262144 MountOptions:[] MountPort:0 MountType:9p MountUID:docker BinaryMirror: DisableOptimizations:false DisableMetrics:false CustomQemuFirmwarePath: SocketVMnetClientPath: SocketVMnetPath: StaticIP: SSHAuthSock: SSHAgentPID:
0 GPUs: AutoPauseInterval:1m0s}
	I0916 10:51:25.658763 1415006 out.go:177] * Starting "ha-334765" primary control-plane node in "ha-334765" cluster
	I0916 10:51:25.661650 1415006 cache.go:121] Beginning downloading kic base image for docker with crio
	I0916 10:51:25.664257 1415006 out.go:177] * Pulling base image v0.0.45-1726358845-19644 ...
	I0916 10:51:25.666746 1415006 preload.go:131] Checking if preload exists for k8s version v1.31.1 and runtime crio
	I0916 10:51:25.666809 1415006 preload.go:146] Found local preload: /home/jenkins/minikube-integration/19651-1378450/.minikube/cache/preloaded-tarball/preloaded-images-k8s-v18-v1.31.1-cri-o-overlay-arm64.tar.lz4
	I0916 10:51:25.666820 1415006 cache.go:56] Caching tarball of preloaded images
	I0916 10:51:25.666836 1415006 image.go:79] Checking for gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 in local docker daemon
	I0916 10:51:25.666937 1415006 preload.go:172] Found /home/jenkins/minikube-integration/19651-1378450/.minikube/cache/preloaded-tarball/preloaded-images-k8s-v18-v1.31.1-cri-o-overlay-arm64.tar.lz4 in cache, skipping download
	I0916 10:51:25.666948 1415006 cache.go:59] Finished verifying existence of preloaded tar for v1.31.1 on crio
	I0916 10:51:25.667345 1415006 profile.go:143] Saving config to /home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/ha-334765/config.json ...
	I0916 10:51:25.667377 1415006 lock.go:35] WriteFile acquiring /home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/ha-334765/config.json: {Name:mk57f344adf6e8ac17121e88734a44d2f855cf4f Clock:{} Delay:500ms Timeout:1m0s Cancel:<nil>}
	W0916 10:51:25.695836 1415006 image.go:95] image gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 is of wrong architecture
	I0916 10:51:25.695859 1415006 cache.go:149] Downloading gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 to local cache
	I0916 10:51:25.695952 1415006 image.go:63] Checking for gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 in local cache directory
	I0916 10:51:25.695986 1415006 image.go:66] Found gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 in local cache directory, skipping pull
	I0916 10:51:25.695996 1415006 image.go:135] gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 exists in cache, skipping pull
	I0916 10:51:25.696005 1415006 cache.go:152] successfully saved gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 as a tarball
	I0916 10:51:25.696010 1415006 cache.go:162] Loading gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 from local cache
	I0916 10:51:25.697318 1415006 image.go:273] response: 
	I0916 10:51:25.821505 1415006 cache.go:164] successfully loaded and using gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 from cached tarball
	I0916 10:51:25.821546 1415006 cache.go:194] Successfully downloaded all kic artifacts
	I0916 10:51:25.821578 1415006 start.go:360] acquireMachinesLock for ha-334765: {Name:mk63c1424907d32e4e30c00d74a2bae6eec53e1d Clock:{} Delay:500ms Timeout:10m0s Cancel:<nil>}
	I0916 10:51:25.821700 1415006 start.go:364] duration metric: took 99.698µs to acquireMachinesLock for "ha-334765"
	I0916 10:51:25.821736 1415006 start.go:93] Provisioning new machine with config: &{Name:ha-334765 KeepContext:false EmbedCerts:false MinikubeISO: KicBaseImage:gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 Memory:2200 CPUs:2 DiskSize:20000 Driver:docker HyperkitVpnKitSock: HyperkitVSockPorts:[] DockerEnv:[] ContainerVolumeMounts:[] InsecureRegistry:[] RegistryMirror:[] HostOnlyCIDR:192.168.59.1/24 HypervVirtualSwitch: HypervUseExternalSwitch:false HypervExternalAdapter: KVMNetwork:default KVMQemuURI:qemu:///system KVMGPU:false KVMHidden:false KVMNUMACount:1 APIServerPort:8443 DockerOpt:[] DisableDriverMounts:false NFSShare:[] NFSSharesRoot:/nfsshares UUID: NoVTXCheck:false DNSProxy:false HostDNSResolver:true HostOnlyNicType:virtio NatNicType:virtio SSHIPAddress: SSHUser:root SSHKey: SSHPort:22 KubernetesConfig:{KubernetesVersion:v1.31.1 ClusterName:ha-334765 Namespace:default APIServerHAVIP: APIServerName:minikubeCA
APIServerNames:[] APIServerIPs:[] DNSDomain:cluster.local ContainerRuntime:crio CRISocket: NetworkPlugin:cni FeatureGates: ServiceCIDR:10.96.0.0/12 ImageRepository: LoadBalancerStartIP: LoadBalancerEndIP: CustomIngressCert: RegistryAliases: ExtraOptions:[] ShouldLoadCachedImages:true EnableDefaultCNI:false CNI:} Nodes:[{Name: IP: Port:8443 KubernetesVersion:v1.31.1 ContainerRuntime:crio ControlPlane:true Worker:true}] Addons:map[] CustomAddonImages:map[] CustomAddonRegistries:map[] VerifyComponents:map[apiserver:true apps_running:true default_sa:true extra:true kubelet:true node_ready:true system_pods:true] StartHostTimeout:6m0s ScheduledStop:<nil> ExposedPorts:[] ListenAddress: Network: Subnet: MultiNodeRequested:true ExtraDisks:0 CertExpiration:26280h0m0s Mount:false MountString:/home/jenkins:/minikube-host Mount9PVersion:9p2000.L MountGID:docker MountIP: MountMSize:262144 MountOptions:[] MountPort:0 MountType:9p MountUID:docker BinaryMirror: DisableOptimizations:false DisableMetrics:false CustomQemuFirmwar
ePath: SocketVMnetClientPath: SocketVMnetPath: StaticIP: SSHAuthSock: SSHAgentPID:0 GPUs: AutoPauseInterval:1m0s} &{Name: IP: Port:8443 KubernetesVersion:v1.31.1 ContainerRuntime:crio ControlPlane:true Worker:true}
	I0916 10:51:25.821822 1415006 start.go:125] createHost starting for "" (driver="docker")
	I0916 10:51:25.825119 1415006 out.go:235] * Creating docker container (CPUs=2, Memory=2200MB) ...
	I0916 10:51:25.825384 1415006 start.go:159] libmachine.API.Create for "ha-334765" (driver="docker")
	I0916 10:51:25.825424 1415006 client.go:168] LocalClient.Create starting
	I0916 10:51:25.825513 1415006 main.go:141] libmachine: Reading certificate data from /home/jenkins/minikube-integration/19651-1378450/.minikube/certs/ca.pem
	I0916 10:51:25.825559 1415006 main.go:141] libmachine: Decoding PEM data...
	I0916 10:51:25.825580 1415006 main.go:141] libmachine: Parsing certificate...
	I0916 10:51:25.825637 1415006 main.go:141] libmachine: Reading certificate data from /home/jenkins/minikube-integration/19651-1378450/.minikube/certs/cert.pem
	I0916 10:51:25.825660 1415006 main.go:141] libmachine: Decoding PEM data...
	I0916 10:51:25.825671 1415006 main.go:141] libmachine: Parsing certificate...
	I0916 10:51:25.826062 1415006 cli_runner.go:164] Run: docker network inspect ha-334765 --format "{"Name": "{{.Name}}","Driver": "{{.Driver}}","Subnet": "{{range .IPAM.Config}}{{.Subnet}}{{end}}","Gateway": "{{range .IPAM.Config}}{{.Gateway}}{{end}}","MTU": {{if (index .Options "com.docker.network.driver.mtu")}}{{(index .Options "com.docker.network.driver.mtu")}}{{else}}0{{end}}, "ContainerIPs": [{{range $k,$v := .Containers }}"{{$v.IPv4Address}}",{{end}}]}"
	W0916 10:51:25.841603 1415006 cli_runner.go:211] docker network inspect ha-334765 --format "{"Name": "{{.Name}}","Driver": "{{.Driver}}","Subnet": "{{range .IPAM.Config}}{{.Subnet}}{{end}}","Gateway": "{{range .IPAM.Config}}{{.Gateway}}{{end}}","MTU": {{if (index .Options "com.docker.network.driver.mtu")}}{{(index .Options "com.docker.network.driver.mtu")}}{{else}}0{{end}}, "ContainerIPs": [{{range $k,$v := .Containers }}"{{$v.IPv4Address}}",{{end}}]}" returned with exit code 1
	I0916 10:51:25.841687 1415006 network_create.go:284] running [docker network inspect ha-334765] to gather additional debugging logs...
	I0916 10:51:25.841709 1415006 cli_runner.go:164] Run: docker network inspect ha-334765
	W0916 10:51:25.856558 1415006 cli_runner.go:211] docker network inspect ha-334765 returned with exit code 1
	I0916 10:51:25.856595 1415006 network_create.go:287] error running [docker network inspect ha-334765]: docker network inspect ha-334765: exit status 1
	stdout:
	[]
	
	stderr:
	Error response from daemon: network ha-334765 not found
	I0916 10:51:25.856620 1415006 network_create.go:289] output of [docker network inspect ha-334765]: -- stdout --
	[]
	
	-- /stdout --
	** stderr ** 
	Error response from daemon: network ha-334765 not found
	
	** /stderr **
	I0916 10:51:25.856747 1415006 cli_runner.go:164] Run: docker network inspect bridge --format "{"Name": "{{.Name}}","Driver": "{{.Driver}}","Subnet": "{{range .IPAM.Config}}{{.Subnet}}{{end}}","Gateway": "{{range .IPAM.Config}}{{.Gateway}}{{end}}","MTU": {{if (index .Options "com.docker.network.driver.mtu")}}{{(index .Options "com.docker.network.driver.mtu")}}{{else}}0{{end}}, "ContainerIPs": [{{range $k,$v := .Containers }}"{{$v.IPv4Address}}",{{end}}]}"
	I0916 10:51:25.873027 1415006 network.go:206] using free private subnet 192.168.49.0/24: &{IP:192.168.49.0 Netmask:255.255.255.0 Prefix:24 CIDR:192.168.49.0/24 Gateway:192.168.49.1 ClientMin:192.168.49.2 ClientMax:192.168.49.254 Broadcast:192.168.49.255 IsPrivate:true Interface:{IfaceName: IfaceIPv4: IfaceMTU:0 IfaceMAC:} reservation:0x400181cbc0}
	I0916 10:51:25.873072 1415006 network_create.go:124] attempt to create docker network ha-334765 192.168.49.0/24 with gateway 192.168.49.1 and MTU of 1500 ...
	I0916 10:51:25.873140 1415006 cli_runner.go:164] Run: docker network create --driver=bridge --subnet=192.168.49.0/24 --gateway=192.168.49.1 -o --ip-masq -o --icc -o com.docker.network.driver.mtu=1500 --label=created_by.minikube.sigs.k8s.io=true --label=name.minikube.sigs.k8s.io=ha-334765 ha-334765
	I0916 10:51:25.941986 1415006 network_create.go:108] docker network ha-334765 192.168.49.0/24 created
	I0916 10:51:25.942025 1415006 kic.go:121] calculated static IP "192.168.49.2" for the "ha-334765" container
	I0916 10:51:25.942101 1415006 cli_runner.go:164] Run: docker ps -a --format {{.Names}}
	I0916 10:51:25.957289 1415006 cli_runner.go:164] Run: docker volume create ha-334765 --label name.minikube.sigs.k8s.io=ha-334765 --label created_by.minikube.sigs.k8s.io=true
	I0916 10:51:25.973680 1415006 oci.go:103] Successfully created a docker volume ha-334765
	I0916 10:51:25.973783 1415006 cli_runner.go:164] Run: docker run --rm --name ha-334765-preload-sidecar --label created_by.minikube.sigs.k8s.io=true --label name.minikube.sigs.k8s.io=ha-334765 --entrypoint /usr/bin/test -v ha-334765:/var gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 -d /var/lib
	I0916 10:51:26.603240 1415006 oci.go:107] Successfully prepared a docker volume ha-334765
	I0916 10:51:26.603299 1415006 preload.go:131] Checking if preload exists for k8s version v1.31.1 and runtime crio
	I0916 10:51:26.603320 1415006 kic.go:194] Starting extracting preloaded images to volume ...
	I0916 10:51:26.603404 1415006 cli_runner.go:164] Run: docker run --rm --entrypoint /usr/bin/tar -v /home/jenkins/minikube-integration/19651-1378450/.minikube/cache/preloaded-tarball/preloaded-images-k8s-v18-v1.31.1-cri-o-overlay-arm64.tar.lz4:/preloaded.tar:ro -v ha-334765:/extractDir gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 -I lz4 -xf /preloaded.tar -C /extractDir
	I0916 10:51:30.841466 1415006 cli_runner.go:217] Completed: docker run --rm --entrypoint /usr/bin/tar -v /home/jenkins/minikube-integration/19651-1378450/.minikube/cache/preloaded-tarball/preloaded-images-k8s-v18-v1.31.1-cri-o-overlay-arm64.tar.lz4:/preloaded.tar:ro -v ha-334765:/extractDir gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 -I lz4 -xf /preloaded.tar -C /extractDir: (4.238008699s)
	I0916 10:51:30.841504 1415006 kic.go:203] duration metric: took 4.238180437s to extract preloaded images to volume ...
	W0916 10:51:30.841642 1415006 cgroups_linux.go:77] Your kernel does not support swap limit capabilities or the cgroup is not mounted.
	I0916 10:51:30.841800 1415006 cli_runner.go:164] Run: docker info --format "'{{json .SecurityOptions}}'"
	I0916 10:51:30.897286 1415006 cli_runner.go:164] Run: docker run -d -t --privileged --security-opt seccomp=unconfined --tmpfs /tmp --tmpfs /run -v /lib/modules:/lib/modules:ro --hostname ha-334765 --name ha-334765 --label created_by.minikube.sigs.k8s.io=true --label name.minikube.sigs.k8s.io=ha-334765 --label role.minikube.sigs.k8s.io= --label mode.minikube.sigs.k8s.io=ha-334765 --network ha-334765 --ip 192.168.49.2 --volume ha-334765:/var --security-opt apparmor=unconfined --memory=2200mb --cpus=2 -e container=docker --expose 8443 --publish=127.0.0.1::8443 --publish=127.0.0.1::22 --publish=127.0.0.1::2376 --publish=127.0.0.1::5000 --publish=127.0.0.1::32443 gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0
	I0916 10:51:31.252747 1415006 cli_runner.go:164] Run: docker container inspect ha-334765 --format={{.State.Running}}
	I0916 10:51:31.275416 1415006 cli_runner.go:164] Run: docker container inspect ha-334765 --format={{.State.Status}}
	I0916 10:51:31.296888 1415006 cli_runner.go:164] Run: docker exec ha-334765 stat /var/lib/dpkg/alternatives/iptables
	I0916 10:51:31.359548 1415006 oci.go:144] the created container "ha-334765" has a running status.
	I0916 10:51:31.359577 1415006 kic.go:225] Creating ssh key for kic: /home/jenkins/minikube-integration/19651-1378450/.minikube/machines/ha-334765/id_rsa...
	I0916 10:51:32.562429 1415006 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-1378450/.minikube/machines/ha-334765/id_rsa.pub -> /home/docker/.ssh/authorized_keys
	I0916 10:51:32.562482 1415006 kic_runner.go:191] docker (temp): /home/jenkins/minikube-integration/19651-1378450/.minikube/machines/ha-334765/id_rsa.pub --> /home/docker/.ssh/authorized_keys (381 bytes)
	I0916 10:51:32.581119 1415006 cli_runner.go:164] Run: docker container inspect ha-334765 --format={{.State.Status}}
	I0916 10:51:32.597670 1415006 kic_runner.go:93] Run: chown docker:docker /home/docker/.ssh/authorized_keys
	I0916 10:51:32.597695 1415006 kic_runner.go:114] Args: [docker exec --privileged ha-334765 chown docker:docker /home/docker/.ssh/authorized_keys]
	I0916 10:51:32.646661 1415006 cli_runner.go:164] Run: docker container inspect ha-334765 --format={{.State.Status}}
	I0916 10:51:32.667372 1415006 machine.go:93] provisionDockerMachine start ...
	I0916 10:51:32.667472 1415006 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" ha-334765
	I0916 10:51:32.686050 1415006 main.go:141] libmachine: Using SSH client type: native
	I0916 10:51:32.686336 1415006 main.go:141] libmachine: &{{{<nil> 0 [] [] []} docker [0x41abe0] 0x41d420 <nil>  [] 0s} 127.0.0.1 34618 <nil> <nil>}
	I0916 10:51:32.686346 1415006 main.go:141] libmachine: About to run SSH command:
	hostname
	I0916 10:51:32.825950 1415006 main.go:141] libmachine: SSH cmd err, output: <nil>: ha-334765
	
	I0916 10:51:32.825982 1415006 ubuntu.go:169] provisioning hostname "ha-334765"
	I0916 10:51:32.826109 1415006 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" ha-334765
	I0916 10:51:32.843087 1415006 main.go:141] libmachine: Using SSH client type: native
	I0916 10:51:32.843385 1415006 main.go:141] libmachine: &{{{<nil> 0 [] [] []} docker [0x41abe0] 0x41d420 <nil>  [] 0s} 127.0.0.1 34618 <nil> <nil>}
	I0916 10:51:32.843403 1415006 main.go:141] libmachine: About to run SSH command:
	sudo hostname ha-334765 && echo "ha-334765" | sudo tee /etc/hostname
	I0916 10:51:32.992997 1415006 main.go:141] libmachine: SSH cmd err, output: <nil>: ha-334765
	
	I0916 10:51:32.993079 1415006 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" ha-334765
	I0916 10:51:33.015722 1415006 main.go:141] libmachine: Using SSH client type: native
	I0916 10:51:33.015997 1415006 main.go:141] libmachine: &{{{<nil> 0 [] [] []} docker [0x41abe0] 0x41d420 <nil>  [] 0s} 127.0.0.1 34618 <nil> <nil>}
	I0916 10:51:33.016017 1415006 main.go:141] libmachine: About to run SSH command:
	
			if ! grep -xq '.*\sha-334765' /etc/hosts; then
				if grep -xq '127.0.1.1\s.*' /etc/hosts; then
					sudo sed -i 's/^127.0.1.1\s.*/127.0.1.1 ha-334765/g' /etc/hosts;
				else 
					echo '127.0.1.1 ha-334765' | sudo tee -a /etc/hosts; 
				fi
			fi
	I0916 10:51:33.157078 1415006 main.go:141] libmachine: SSH cmd err, output: <nil>: 
	I0916 10:51:33.157107 1415006 ubuntu.go:175] set auth options {CertDir:/home/jenkins/minikube-integration/19651-1378450/.minikube CaCertPath:/home/jenkins/minikube-integration/19651-1378450/.minikube/certs/ca.pem CaPrivateKeyPath:/home/jenkins/minikube-integration/19651-1378450/.minikube/certs/ca-key.pem CaCertRemotePath:/etc/docker/ca.pem ServerCertPath:/home/jenkins/minikube-integration/19651-1378450/.minikube/machines/server.pem ServerKeyPath:/home/jenkins/minikube-integration/19651-1378450/.minikube/machines/server-key.pem ClientKeyPath:/home/jenkins/minikube-integration/19651-1378450/.minikube/certs/key.pem ServerCertRemotePath:/etc/docker/server.pem ServerKeyRemotePath:/etc/docker/server-key.pem ClientCertPath:/home/jenkins/minikube-integration/19651-1378450/.minikube/certs/cert.pem ServerCertSANs:[] StorePath:/home/jenkins/minikube-integration/19651-1378450/.minikube}
	I0916 10:51:33.157127 1415006 ubuntu.go:177] setting up certificates
	I0916 10:51:33.157138 1415006 provision.go:84] configureAuth start
	I0916 10:51:33.157201 1415006 cli_runner.go:164] Run: docker container inspect -f "{{range .NetworkSettings.Networks}}{{.IPAddress}},{{.GlobalIPv6Address}}{{end}}" ha-334765
	I0916 10:51:33.176122 1415006 provision.go:143] copyHostCerts
	I0916 10:51:33.176171 1415006 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-1378450/.minikube/certs/cert.pem -> /home/jenkins/minikube-integration/19651-1378450/.minikube/cert.pem
	I0916 10:51:33.176210 1415006 exec_runner.go:144] found /home/jenkins/minikube-integration/19651-1378450/.minikube/cert.pem, removing ...
	I0916 10:51:33.176223 1415006 exec_runner.go:203] rm: /home/jenkins/minikube-integration/19651-1378450/.minikube/cert.pem
	I0916 10:51:33.176305 1415006 exec_runner.go:151] cp: /home/jenkins/minikube-integration/19651-1378450/.minikube/certs/cert.pem --> /home/jenkins/minikube-integration/19651-1378450/.minikube/cert.pem (1123 bytes)
	I0916 10:51:33.176404 1415006 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-1378450/.minikube/certs/key.pem -> /home/jenkins/minikube-integration/19651-1378450/.minikube/key.pem
	I0916 10:51:33.176430 1415006 exec_runner.go:144] found /home/jenkins/minikube-integration/19651-1378450/.minikube/key.pem, removing ...
	I0916 10:51:33.176438 1415006 exec_runner.go:203] rm: /home/jenkins/minikube-integration/19651-1378450/.minikube/key.pem
	I0916 10:51:33.176469 1415006 exec_runner.go:151] cp: /home/jenkins/minikube-integration/19651-1378450/.minikube/certs/key.pem --> /home/jenkins/minikube-integration/19651-1378450/.minikube/key.pem (1679 bytes)
	I0916 10:51:33.176521 1415006 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-1378450/.minikube/certs/ca.pem -> /home/jenkins/minikube-integration/19651-1378450/.minikube/ca.pem
	I0916 10:51:33.176541 1415006 exec_runner.go:144] found /home/jenkins/minikube-integration/19651-1378450/.minikube/ca.pem, removing ...
	I0916 10:51:33.176555 1415006 exec_runner.go:203] rm: /home/jenkins/minikube-integration/19651-1378450/.minikube/ca.pem
	I0916 10:51:33.176586 1415006 exec_runner.go:151] cp: /home/jenkins/minikube-integration/19651-1378450/.minikube/certs/ca.pem --> /home/jenkins/minikube-integration/19651-1378450/.minikube/ca.pem (1078 bytes)
	I0916 10:51:33.176750 1415006 provision.go:117] generating server cert: /home/jenkins/minikube-integration/19651-1378450/.minikube/machines/server.pem ca-key=/home/jenkins/minikube-integration/19651-1378450/.minikube/certs/ca.pem private-key=/home/jenkins/minikube-integration/19651-1378450/.minikube/certs/ca-key.pem org=jenkins.ha-334765 san=[127.0.0.1 192.168.49.2 ha-334765 localhost minikube]
	I0916 10:51:33.387204 1415006 provision.go:177] copyRemoteCerts
	I0916 10:51:33.387279 1415006 ssh_runner.go:195] Run: sudo mkdir -p /etc/docker /etc/docker /etc/docker
	I0916 10:51:33.387325 1415006 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" ha-334765
	I0916 10:51:33.404017 1415006 sshutil.go:53] new ssh client: &{IP:127.0.0.1 Port:34618 SSHKeyPath:/home/jenkins/minikube-integration/19651-1378450/.minikube/machines/ha-334765/id_rsa Username:docker}
	I0916 10:51:33.501800 1415006 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-1378450/.minikube/machines/server.pem -> /etc/docker/server.pem
	I0916 10:51:33.501869 1415006 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-1378450/.minikube/machines/server.pem --> /etc/docker/server.pem (1196 bytes)
	I0916 10:51:33.527570 1415006 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-1378450/.minikube/machines/server-key.pem -> /etc/docker/server-key.pem
	I0916 10:51:33.527639 1415006 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-1378450/.minikube/machines/server-key.pem --> /etc/docker/server-key.pem (1675 bytes)
	I0916 10:51:33.552418 1415006 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-1378450/.minikube/certs/ca.pem -> /etc/docker/ca.pem
	I0916 10:51:33.552488 1415006 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-1378450/.minikube/certs/ca.pem --> /etc/docker/ca.pem (1078 bytes)
	I0916 10:51:33.577987 1415006 provision.go:87] duration metric: took 420.824053ms to configureAuth
	I0916 10:51:33.578013 1415006 ubuntu.go:193] setting minikube options for container-runtime
	I0916 10:51:33.578211 1415006 config.go:182] Loaded profile config "ha-334765": Driver=docker, ContainerRuntime=crio, KubernetesVersion=v1.31.1
	I0916 10:51:33.578321 1415006 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" ha-334765
	I0916 10:51:33.594979 1415006 main.go:141] libmachine: Using SSH client type: native
	I0916 10:51:33.595231 1415006 main.go:141] libmachine: &{{{<nil> 0 [] [] []} docker [0x41abe0] 0x41d420 <nil>  [] 0s} 127.0.0.1 34618 <nil> <nil>}
	I0916 10:51:33.595250 1415006 main.go:141] libmachine: About to run SSH command:
	sudo mkdir -p /etc/sysconfig && printf %s "
	CRIO_MINIKUBE_OPTIONS='--insecure-registry 10.96.0.0/12 '
	" | sudo tee /etc/sysconfig/crio.minikube && sudo systemctl restart crio
	I0916 10:51:33.834727 1415006 main.go:141] libmachine: SSH cmd err, output: <nil>: 
	CRIO_MINIKUBE_OPTIONS='--insecure-registry 10.96.0.0/12 '
	
	I0916 10:51:33.834750 1415006 machine.go:96] duration metric: took 1.167359262s to provisionDockerMachine
	I0916 10:51:33.834761 1415006 client.go:171] duration metric: took 8.009325433s to LocalClient.Create
	I0916 10:51:33.834773 1415006 start.go:167] duration metric: took 8.009392033s to libmachine.API.Create "ha-334765"
	I0916 10:51:33.834781 1415006 start.go:293] postStartSetup for "ha-334765" (driver="docker")
	I0916 10:51:33.834792 1415006 start.go:322] creating required directories: [/etc/kubernetes/addons /etc/kubernetes/manifests /var/tmp/minikube /var/lib/minikube /var/lib/minikube/certs /var/lib/minikube/images /var/lib/minikube/binaries /tmp/gvisor /usr/share/ca-certificates /etc/ssl/certs]
	I0916 10:51:33.834877 1415006 ssh_runner.go:195] Run: sudo mkdir -p /etc/kubernetes/addons /etc/kubernetes/manifests /var/tmp/minikube /var/lib/minikube /var/lib/minikube/certs /var/lib/minikube/images /var/lib/minikube/binaries /tmp/gvisor /usr/share/ca-certificates /etc/ssl/certs
	I0916 10:51:33.834923 1415006 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" ha-334765
	I0916 10:51:33.854871 1415006 sshutil.go:53] new ssh client: &{IP:127.0.0.1 Port:34618 SSHKeyPath:/home/jenkins/minikube-integration/19651-1378450/.minikube/machines/ha-334765/id_rsa Username:docker}
	I0916 10:51:33.956078 1415006 ssh_runner.go:195] Run: cat /etc/os-release
	I0916 10:51:33.959774 1415006 main.go:141] libmachine: Couldn't set key VERSION_CODENAME, no corresponding struct field found
	I0916 10:51:33.959810 1415006 main.go:141] libmachine: Couldn't set key PRIVACY_POLICY_URL, no corresponding struct field found
	I0916 10:51:33.959840 1415006 main.go:141] libmachine: Couldn't set key UBUNTU_CODENAME, no corresponding struct field found
	I0916 10:51:33.959852 1415006 info.go:137] Remote host: Ubuntu 22.04.4 LTS
	I0916 10:51:33.959863 1415006 filesync.go:126] Scanning /home/jenkins/minikube-integration/19651-1378450/.minikube/addons for local assets ...
	I0916 10:51:33.959942 1415006 filesync.go:126] Scanning /home/jenkins/minikube-integration/19651-1378450/.minikube/files for local assets ...
	I0916 10:51:33.960046 1415006 filesync.go:149] local asset: /home/jenkins/minikube-integration/19651-1378450/.minikube/files/etc/ssl/certs/13838332.pem -> 13838332.pem in /etc/ssl/certs
	I0916 10:51:33.960058 1415006 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-1378450/.minikube/files/etc/ssl/certs/13838332.pem -> /etc/ssl/certs/13838332.pem
	I0916 10:51:33.960175 1415006 ssh_runner.go:195] Run: sudo mkdir -p /etc/ssl/certs
	I0916 10:51:33.969263 1415006 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-1378450/.minikube/files/etc/ssl/certs/13838332.pem --> /etc/ssl/certs/13838332.pem (1708 bytes)
	I0916 10:51:33.994141 1415006 start.go:296] duration metric: took 159.34391ms for postStartSetup
	I0916 10:51:33.994530 1415006 cli_runner.go:164] Run: docker container inspect -f "{{range .NetworkSettings.Networks}}{{.IPAddress}},{{.GlobalIPv6Address}}{{end}}" ha-334765
	I0916 10:51:34.017741 1415006 profile.go:143] Saving config to /home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/ha-334765/config.json ...
	I0916 10:51:34.018093 1415006 ssh_runner.go:195] Run: sh -c "df -h /var | awk 'NR==2{print $5}'"
	I0916 10:51:34.018149 1415006 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" ha-334765
	I0916 10:51:34.037405 1415006 sshutil.go:53] new ssh client: &{IP:127.0.0.1 Port:34618 SSHKeyPath:/home/jenkins/minikube-integration/19651-1378450/.minikube/machines/ha-334765/id_rsa Username:docker}
	I0916 10:51:34.129485 1415006 ssh_runner.go:195] Run: sh -c "df -BG /var | awk 'NR==2{print $4}'"
	I0916 10:51:34.134543 1415006 start.go:128] duration metric: took 8.31270074s to createHost
	I0916 10:51:34.134567 1415006 start.go:83] releasing machines lock for "ha-334765", held for 8.312850874s
	I0916 10:51:34.134646 1415006 cli_runner.go:164] Run: docker container inspect -f "{{range .NetworkSettings.Networks}}{{.IPAddress}},{{.GlobalIPv6Address}}{{end}}" ha-334765
	I0916 10:51:34.155746 1415006 ssh_runner.go:195] Run: cat /version.json
	I0916 10:51:34.155802 1415006 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" ha-334765
	I0916 10:51:34.156041 1415006 ssh_runner.go:195] Run: curl -sS -m 2 https://registry.k8s.io/
	I0916 10:51:34.156130 1415006 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" ha-334765
	I0916 10:51:34.182433 1415006 sshutil.go:53] new ssh client: &{IP:127.0.0.1 Port:34618 SSHKeyPath:/home/jenkins/minikube-integration/19651-1378450/.minikube/machines/ha-334765/id_rsa Username:docker}
	I0916 10:51:34.183685 1415006 sshutil.go:53] new ssh client: &{IP:127.0.0.1 Port:34618 SSHKeyPath:/home/jenkins/minikube-integration/19651-1378450/.minikube/machines/ha-334765/id_rsa Username:docker}
	I0916 10:51:34.401806 1415006 ssh_runner.go:195] Run: systemctl --version
	I0916 10:51:34.406056 1415006 ssh_runner.go:195] Run: sudo sh -c "podman version >/dev/null"
	I0916 10:51:34.551646 1415006 ssh_runner.go:195] Run: sh -c "stat /etc/cni/net.d/*loopback.conf*"
	I0916 10:51:34.556053 1415006 ssh_runner.go:195] Run: sudo find /etc/cni/net.d -maxdepth 1 -type f -name *loopback.conf* -not -name *.mk_disabled -exec sh -c "sudo mv {} {}.mk_disabled" ;
	I0916 10:51:34.578073 1415006 cni.go:221] loopback cni configuration disabled: "/etc/cni/net.d/*loopback.conf*" found
	I0916 10:51:34.578159 1415006 ssh_runner.go:195] Run: sudo find /etc/cni/net.d -maxdepth 1 -type f ( ( -name *bridge* -or -name *podman* ) -and -not -name *.mk_disabled ) -printf "%p, " -exec sh -c "sudo mv {} {}.mk_disabled" ;
	I0916 10:51:34.617437 1415006 cni.go:262] disabled [/etc/cni/net.d/87-podman-bridge.conflist, /etc/cni/net.d/100-crio-bridge.conf] bridge cni config(s)
	I0916 10:51:34.617461 1415006 start.go:495] detecting cgroup driver to use...
	I0916 10:51:34.617511 1415006 detect.go:187] detected "cgroupfs" cgroup driver on host os
	I0916 10:51:34.617570 1415006 ssh_runner.go:195] Run: sudo systemctl stop -f containerd
	I0916 10:51:34.635159 1415006 ssh_runner.go:195] Run: sudo systemctl is-active --quiet service containerd
	I0916 10:51:34.647920 1415006 docker.go:217] disabling cri-docker service (if available) ...
	I0916 10:51:34.648006 1415006 ssh_runner.go:195] Run: sudo systemctl stop -f cri-docker.socket
	I0916 10:51:34.663065 1415006 ssh_runner.go:195] Run: sudo systemctl stop -f cri-docker.service
	I0916 10:51:34.678927 1415006 ssh_runner.go:195] Run: sudo systemctl disable cri-docker.socket
	I0916 10:51:34.776926 1415006 ssh_runner.go:195] Run: sudo systemctl mask cri-docker.service
	I0916 10:51:34.882717 1415006 docker.go:233] disabling docker service ...
	I0916 10:51:34.882788 1415006 ssh_runner.go:195] Run: sudo systemctl stop -f docker.socket
	I0916 10:51:34.904617 1415006 ssh_runner.go:195] Run: sudo systemctl stop -f docker.service
	I0916 10:51:34.918027 1415006 ssh_runner.go:195] Run: sudo systemctl disable docker.socket
	I0916 10:51:35.022662 1415006 ssh_runner.go:195] Run: sudo systemctl mask docker.service
	I0916 10:51:35.132381 1415006 ssh_runner.go:195] Run: sudo systemctl is-active --quiet service docker
	I0916 10:51:35.146125 1415006 ssh_runner.go:195] Run: /bin/bash -c "sudo mkdir -p /etc && printf %s "runtime-endpoint: unix:///var/run/crio/crio.sock
	" | sudo tee /etc/crictl.yaml"
	I0916 10:51:35.165336 1415006 crio.go:59] configure cri-o to use "registry.k8s.io/pause:3.10" pause image...
	I0916 10:51:35.165438 1415006 ssh_runner.go:195] Run: sh -c "sudo sed -i 's|^.*pause_image = .*$|pause_image = "registry.k8s.io/pause:3.10"|' /etc/crio/crio.conf.d/02-crio.conf"
	I0916 10:51:35.176174 1415006 crio.go:70] configuring cri-o to use "cgroupfs" as cgroup driver...
	I0916 10:51:35.176279 1415006 ssh_runner.go:195] Run: sh -c "sudo sed -i 's|^.*cgroup_manager = .*$|cgroup_manager = "cgroupfs"|' /etc/crio/crio.conf.d/02-crio.conf"
	I0916 10:51:35.186819 1415006 ssh_runner.go:195] Run: sh -c "sudo sed -i '/conmon_cgroup = .*/d' /etc/crio/crio.conf.d/02-crio.conf"
	I0916 10:51:35.197810 1415006 ssh_runner.go:195] Run: sh -c "sudo sed -i '/cgroup_manager = .*/a conmon_cgroup = "pod"' /etc/crio/crio.conf.d/02-crio.conf"
	I0916 10:51:35.207939 1415006 ssh_runner.go:195] Run: sh -c "sudo rm -rf /etc/cni/net.mk"
	I0916 10:51:35.217303 1415006 ssh_runner.go:195] Run: sh -c "sudo sed -i '/^ *"net.ipv4.ip_unprivileged_port_start=.*"/d' /etc/crio/crio.conf.d/02-crio.conf"
	I0916 10:51:35.227306 1415006 ssh_runner.go:195] Run: sh -c "sudo grep -q "^ *default_sysctls" /etc/crio/crio.conf.d/02-crio.conf || sudo sed -i '/conmon_cgroup = .*/a default_sysctls = \[\n\]' /etc/crio/crio.conf.d/02-crio.conf"
	I0916 10:51:35.244795 1415006 ssh_runner.go:195] Run: sh -c "sudo sed -i -r 's|^default_sysctls *= *\[|&\n  "net.ipv4.ip_unprivileged_port_start=0",|' /etc/crio/crio.conf.d/02-crio.conf"
	I0916 10:51:35.255247 1415006 ssh_runner.go:195] Run: sudo sysctl net.bridge.bridge-nf-call-iptables
	I0916 10:51:35.264668 1415006 ssh_runner.go:195] Run: sudo sh -c "echo 1 > /proc/sys/net/ipv4/ip_forward"
	I0916 10:51:35.273525 1415006 ssh_runner.go:195] Run: sudo systemctl daemon-reload
	I0916 10:51:35.365962 1415006 ssh_runner.go:195] Run: sudo systemctl restart crio
	I0916 10:51:35.482948 1415006 start.go:542] Will wait 60s for socket path /var/run/crio/crio.sock
	I0916 10:51:35.483024 1415006 ssh_runner.go:195] Run: stat /var/run/crio/crio.sock
	I0916 10:51:35.487409 1415006 start.go:563] Will wait 60s for crictl version
	I0916 10:51:35.487488 1415006 ssh_runner.go:195] Run: which crictl
	I0916 10:51:35.490997 1415006 ssh_runner.go:195] Run: sudo /usr/bin/crictl version
	I0916 10:51:35.530436 1415006 start.go:579] Version:  0.1.0
	RuntimeName:  cri-o
	RuntimeVersion:  1.24.6
	RuntimeApiVersion:  v1
	I0916 10:51:35.530550 1415006 ssh_runner.go:195] Run: crio --version
	I0916 10:51:35.579393 1415006 ssh_runner.go:195] Run: crio --version
	I0916 10:51:35.621956 1415006 out.go:177] * Preparing Kubernetes v1.31.1 on CRI-O 1.24.6 ...
	I0916 10:51:35.623893 1415006 cli_runner.go:164] Run: docker network inspect ha-334765 --format "{"Name": "{{.Name}}","Driver": "{{.Driver}}","Subnet": "{{range .IPAM.Config}}{{.Subnet}}{{end}}","Gateway": "{{range .IPAM.Config}}{{.Gateway}}{{end}}","MTU": {{if (index .Options "com.docker.network.driver.mtu")}}{{(index .Options "com.docker.network.driver.mtu")}}{{else}}0{{end}}, "ContainerIPs": [{{range $k,$v := .Containers }}"{{$v.IPv4Address}}",{{end}}]}"
	I0916 10:51:35.639065 1415006 ssh_runner.go:195] Run: grep 192.168.49.1	host.minikube.internal$ /etc/hosts
	I0916 10:51:35.642843 1415006 ssh_runner.go:195] Run: /bin/bash -c "{ grep -v $'\thost.minikube.internal$' "/etc/hosts"; echo "192.168.49.1	host.minikube.internal"; } > /tmp/h.$$; sudo cp /tmp/h.$$ "/etc/hosts""
	I0916 10:51:35.654560 1415006 kubeadm.go:883] updating cluster {Name:ha-334765 KeepContext:false EmbedCerts:false MinikubeISO: KicBaseImage:gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 Memory:2200 CPUs:2 DiskSize:20000 Driver:docker HyperkitVpnKitSock: HyperkitVSockPorts:[] DockerEnv:[] ContainerVolumeMounts:[] InsecureRegistry:[] RegistryMirror:[] HostOnlyCIDR:192.168.59.1/24 HypervVirtualSwitch: HypervUseExternalSwitch:false HypervExternalAdapter: KVMNetwork:default KVMQemuURI:qemu:///system KVMGPU:false KVMHidden:false KVMNUMACount:1 APIServerPort:8443 DockerOpt:[] DisableDriverMounts:false NFSShare:[] NFSSharesRoot:/nfsshares UUID: NoVTXCheck:false DNSProxy:false HostDNSResolver:true HostOnlyNicType:virtio NatNicType:virtio SSHIPAddress: SSHUser:root SSHKey: SSHPort:22 KubernetesConfig:{KubernetesVersion:v1.31.1 ClusterName:ha-334765 Namespace:default APIServerHAVIP:192.168.49.254 APIServerName:minikubeCA APISe
rverNames:[] APIServerIPs:[] DNSDomain:cluster.local ContainerRuntime:crio CRISocket: NetworkPlugin:cni FeatureGates: ServiceCIDR:10.96.0.0/12 ImageRepository: LoadBalancerStartIP: LoadBalancerEndIP: CustomIngressCert: RegistryAliases: ExtraOptions:[] ShouldLoadCachedImages:true EnableDefaultCNI:false CNI:} Nodes:[{Name: IP:192.168.49.2 Port:8443 KubernetesVersion:v1.31.1 ContainerRuntime:crio ControlPlane:true Worker:true}] Addons:map[] CustomAddonImages:map[] CustomAddonRegistries:map[] VerifyComponents:map[apiserver:true apps_running:true default_sa:true extra:true kubelet:true node_ready:true system_pods:true] StartHostTimeout:6m0s ScheduledStop:<nil> ExposedPorts:[] ListenAddress: Network: Subnet: MultiNodeRequested:true ExtraDisks:0 CertExpiration:26280h0m0s Mount:false MountString:/home/jenkins:/minikube-host Mount9PVersion:9p2000.L MountGID:docker MountIP: MountMSize:262144 MountOptions:[] MountPort:0 MountType:9p MountUID:docker BinaryMirror: DisableOptimizations:false DisableMetrics:false CustomQemu
FirmwarePath: SocketVMnetClientPath: SocketVMnetPath: StaticIP: SSHAuthSock: SSHAgentPID:0 GPUs: AutoPauseInterval:1m0s} ...
	I0916 10:51:35.654692 1415006 preload.go:131] Checking if preload exists for k8s version v1.31.1 and runtime crio
	I0916 10:51:35.654752 1415006 ssh_runner.go:195] Run: sudo crictl images --output json
	I0916 10:51:35.729057 1415006 crio.go:514] all images are preloaded for cri-o runtime.
	I0916 10:51:35.729080 1415006 crio.go:433] Images already preloaded, skipping extraction
	I0916 10:51:35.729137 1415006 ssh_runner.go:195] Run: sudo crictl images --output json
	I0916 10:51:35.765757 1415006 crio.go:514] all images are preloaded for cri-o runtime.
	I0916 10:51:35.765780 1415006 cache_images.go:84] Images are preloaded, skipping loading
	I0916 10:51:35.765788 1415006 kubeadm.go:934] updating node { 192.168.49.2 8443 v1.31.1 crio true true} ...
	I0916 10:51:35.765907 1415006 kubeadm.go:946] kubelet [Unit]
	Wants=crio.service
	
	[Service]
	ExecStart=
	ExecStart=/var/lib/minikube/binaries/v1.31.1/kubelet --bootstrap-kubeconfig=/etc/kubernetes/bootstrap-kubelet.conf --cgroups-per-qos=false --config=/var/lib/kubelet/config.yaml --enforce-node-allocatable= --hostname-override=ha-334765 --kubeconfig=/etc/kubernetes/kubelet.conf --node-ip=192.168.49.2
	
	[Install]
	 config:
	{KubernetesVersion:v1.31.1 ClusterName:ha-334765 Namespace:default APIServerHAVIP:192.168.49.254 APIServerName:minikubeCA APIServerNames:[] APIServerIPs:[] DNSDomain:cluster.local ContainerRuntime:crio CRISocket: NetworkPlugin:cni FeatureGates: ServiceCIDR:10.96.0.0/12 ImageRepository: LoadBalancerStartIP: LoadBalancerEndIP: CustomIngressCert: RegistryAliases: ExtraOptions:[] ShouldLoadCachedImages:true EnableDefaultCNI:false CNI:}
	I0916 10:51:35.766013 1415006 ssh_runner.go:195] Run: crio config
	I0916 10:51:35.818760 1415006 cni.go:84] Creating CNI manager for ""
	I0916 10:51:35.818782 1415006 cni.go:136] multinode detected (1 nodes found), recommending kindnet
	I0916 10:51:35.818797 1415006 kubeadm.go:84] Using pod CIDR: 10.244.0.0/16
	I0916 10:51:35.818828 1415006 kubeadm.go:181] kubeadm options: {CertDir:/var/lib/minikube/certs ServiceCIDR:10.96.0.0/12 PodSubnet:10.244.0.0/16 AdvertiseAddress:192.168.49.2 APIServerPort:8443 KubernetesVersion:v1.31.1 EtcdDataDir:/var/lib/minikube/etcd EtcdExtraArgs:map[] ClusterName:ha-334765 NodeName:ha-334765 DNSDomain:cluster.local CRISocket:/var/run/crio/crio.sock ImageRepository: ComponentOptions:[{Component:apiServer ExtraArgs:map[enable-admission-plugins:NamespaceLifecycle,LimitRanger,ServiceAccount,DefaultStorageClass,DefaultTolerationSeconds,NodeRestriction,MutatingAdmissionWebhook,ValidatingAdmissionWebhook,ResourceQuota] Pairs:map[certSANs:["127.0.0.1", "localhost", "192.168.49.2"]]} {Component:controllerManager ExtraArgs:map[allocate-node-cidrs:true leader-elect:false] Pairs:map[]} {Component:scheduler ExtraArgs:map[leader-elect:false] Pairs:map[]}] FeatureArgs:map[] NodeIP:192.168.49.2 CgroupDriver:cgroupfs ClientCAFile:/var/lib/minikube/certs/ca.crt StaticPodPath:/etc/kubernetes/mani
fests ControlPlaneAddress:control-plane.minikube.internal KubeProxyOptions:map[] ResolvConfSearchRegression:false KubeletConfigOpts:map[containerRuntimeEndpoint:unix:///var/run/crio/crio.sock hairpinMode:hairpin-veth runtimeRequestTimeout:15m] PrependCriSocketUnix:true}
	I0916 10:51:35.818987 1415006 kubeadm.go:187] kubeadm config:
	apiVersion: kubeadm.k8s.io/v1beta3
	kind: InitConfiguration
	localAPIEndpoint:
	  advertiseAddress: 192.168.49.2
	  bindPort: 8443
	bootstrapTokens:
	  - groups:
	      - system:bootstrappers:kubeadm:default-node-token
	    ttl: 24h0m0s
	    usages:
	      - signing
	      - authentication
	nodeRegistration:
	  criSocket: unix:///var/run/crio/crio.sock
	  name: "ha-334765"
	  kubeletExtraArgs:
	    node-ip: 192.168.49.2
	  taints: []
	---
	apiVersion: kubeadm.k8s.io/v1beta3
	kind: ClusterConfiguration
	apiServer:
	  certSANs: ["127.0.0.1", "localhost", "192.168.49.2"]
	  extraArgs:
	    enable-admission-plugins: "NamespaceLifecycle,LimitRanger,ServiceAccount,DefaultStorageClass,DefaultTolerationSeconds,NodeRestriction,MutatingAdmissionWebhook,ValidatingAdmissionWebhook,ResourceQuota"
	controllerManager:
	  extraArgs:
	    allocate-node-cidrs: "true"
	    leader-elect: "false"
	scheduler:
	  extraArgs:
	    leader-elect: "false"
	certificatesDir: /var/lib/minikube/certs
	clusterName: mk
	controlPlaneEndpoint: control-plane.minikube.internal:8443
	etcd:
	  local:
	    dataDir: /var/lib/minikube/etcd
	    extraArgs:
	      proxy-refresh-interval: "70000"
	kubernetesVersion: v1.31.1
	networking:
	  dnsDomain: cluster.local
	  podSubnet: "10.244.0.0/16"
	  serviceSubnet: 10.96.0.0/12
	---
	apiVersion: kubelet.config.k8s.io/v1beta1
	kind: KubeletConfiguration
	authentication:
	  x509:
	    clientCAFile: /var/lib/minikube/certs/ca.crt
	cgroupDriver: cgroupfs
	containerRuntimeEndpoint: unix:///var/run/crio/crio.sock
	hairpinMode: hairpin-veth
	runtimeRequestTimeout: 15m
	clusterDomain: "cluster.local"
	# disable disk resource management by default
	imageGCHighThresholdPercent: 100
	evictionHard:
	  nodefs.available: "0%"
	  nodefs.inodesFree: "0%"
	  imagefs.available: "0%"
	failSwapOn: false
	staticPodPath: /etc/kubernetes/manifests
	---
	apiVersion: kubeproxy.config.k8s.io/v1alpha1
	kind: KubeProxyConfiguration
	clusterCIDR: "10.244.0.0/16"
	metricsBindAddress: 0.0.0.0:10249
	conntrack:
	  maxPerCore: 0
	# Skip setting "net.netfilter.nf_conntrack_tcp_timeout_established"
	  tcpEstablishedTimeout: 0s
	# Skip setting "net.netfilter.nf_conntrack_tcp_timeout_close"
	  tcpCloseWaitTimeout: 0s
	
	I0916 10:51:35.819024 1415006 kube-vip.go:115] generating kube-vip config ...
	I0916 10:51:35.819086 1415006 ssh_runner.go:195] Run: sudo sh -c "lsmod | grep ip_vs"
	I0916 10:51:35.832413 1415006 kube-vip.go:167] auto-enabling control-plane load-balancing in kube-vip
	I0916 10:51:35.832538 1415006 kube-vip.go:137] kube-vip config:
	apiVersion: v1
	kind: Pod
	metadata:
	  creationTimestamp: null
	  name: kube-vip
	  namespace: kube-system
	spec:
	  containers:
	  - args:
	    - manager
	    env:
	    - name: vip_arp
	      value: "true"
	    - name: port
	      value: "8443"
	    - name: vip_nodename
	      valueFrom:
	        fieldRef:
	          fieldPath: spec.nodeName
	    - name: vip_interface
	      value: eth0
	    - name: vip_cidr
	      value: "32"
	    - name: dns_mode
	      value: first
	    - name: cp_enable
	      value: "true"
	    - name: cp_namespace
	      value: kube-system
	    - name: vip_leaderelection
	      value: "true"
	    - name: vip_leasename
	      value: plndr-cp-lock
	    - name: vip_leaseduration
	      value: "5"
	    - name: vip_renewdeadline
	      value: "3"
	    - name: vip_retryperiod
	      value: "1"
	    - name: address
	      value: 192.168.49.254
	    - name: prometheus_server
	      value: :2112
	    - name : lb_enable
	      value: "true"
	    - name: lb_port
	      value: "8443"
	    image: ghcr.io/kube-vip/kube-vip:v0.8.0
	    imagePullPolicy: IfNotPresent
	    name: kube-vip
	    resources: {}
	    securityContext:
	      capabilities:
	        add:
	        - NET_ADMIN
	        - NET_RAW
	    volumeMounts:
	    - mountPath: /etc/kubernetes/admin.conf
	      name: kubeconfig
	  hostAliases:
	  - hostnames:
	    - kubernetes
	    ip: 127.0.0.1
	  hostNetwork: true
	  volumes:
	  - hostPath:
	      path: "/etc/kubernetes/super-admin.conf"
	    name: kubeconfig
	status: {}
	I0916 10:51:35.832614 1415006 ssh_runner.go:195] Run: sudo ls /var/lib/minikube/binaries/v1.31.1
	I0916 10:51:35.842197 1415006 binaries.go:44] Found k8s binaries, skipping transfer
	I0916 10:51:35.842293 1415006 ssh_runner.go:195] Run: sudo mkdir -p /etc/systemd/system/kubelet.service.d /lib/systemd/system /var/tmp/minikube /etc/kubernetes/manifests
	I0916 10:51:35.851742 1415006 ssh_runner.go:362] scp memory --> /etc/systemd/system/kubelet.service.d/10-kubeadm.conf (359 bytes)
	I0916 10:51:35.870725 1415006 ssh_runner.go:362] scp memory --> /lib/systemd/system/kubelet.service (352 bytes)
	I0916 10:51:35.890021 1415006 ssh_runner.go:362] scp memory --> /var/tmp/minikube/kubeadm.yaml.new (2147 bytes)
	I0916 10:51:35.908444 1415006 ssh_runner.go:362] scp memory --> /etc/kubernetes/manifests/kube-vip.yaml (1447 bytes)
	I0916 10:51:35.927349 1415006 ssh_runner.go:195] Run: grep 192.168.49.254	control-plane.minikube.internal$ /etc/hosts
	I0916 10:51:35.930875 1415006 ssh_runner.go:195] Run: /bin/bash -c "{ grep -v $'\tcontrol-plane.minikube.internal$' "/etc/hosts"; echo "192.168.49.254	control-plane.minikube.internal"; } > /tmp/h.$$; sudo cp /tmp/h.$$ "/etc/hosts""
	I0916 10:51:35.942258 1415006 ssh_runner.go:195] Run: sudo systemctl daemon-reload
	I0916 10:51:36.026713 1415006 ssh_runner.go:195] Run: sudo systemctl start kubelet
	I0916 10:51:36.042941 1415006 certs.go:68] Setting up /home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/ha-334765 for IP: 192.168.49.2
	I0916 10:51:36.043025 1415006 certs.go:194] generating shared ca certs ...
	I0916 10:51:36.043060 1415006 certs.go:226] acquiring lock for ca certs: {Name:mk0ae46b50e2e49d53ad6fcc94535aa50d9156d6 Clock:{} Delay:500ms Timeout:1m0s Cancel:<nil>}
	I0916 10:51:36.043290 1415006 certs.go:235] skipping valid "minikubeCA" ca cert: /home/jenkins/minikube-integration/19651-1378450/.minikube/ca.key
	I0916 10:51:36.043394 1415006 certs.go:235] skipping valid "proxyClientCA" ca cert: /home/jenkins/minikube-integration/19651-1378450/.minikube/proxy-client-ca.key
	I0916 10:51:36.043435 1415006 certs.go:256] generating profile certs ...
	I0916 10:51:36.043538 1415006 certs.go:363] generating signed profile cert for "minikube-user": /home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/ha-334765/client.key
	I0916 10:51:36.043607 1415006 crypto.go:68] Generating cert /home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/ha-334765/client.crt with IP's: []
	I0916 10:51:36.860300 1415006 crypto.go:156] Writing cert to /home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/ha-334765/client.crt ...
	I0916 10:51:36.860339 1415006 lock.go:35] WriteFile acquiring /home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/ha-334765/client.crt: {Name:mkac681ee25aa1e7951b0d028bab38cc7560cf3c Clock:{} Delay:500ms Timeout:1m0s Cancel:<nil>}
	I0916 10:51:36.860557 1415006 crypto.go:164] Writing key to /home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/ha-334765/client.key ...
	I0916 10:51:36.860571 1415006 lock.go:35] WriteFile acquiring /home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/ha-334765/client.key: {Name:mke38ba3a8fcfbd63628a4d07faa22aaaef77bf5 Clock:{} Delay:500ms Timeout:1m0s Cancel:<nil>}
	I0916 10:51:36.860669 1415006 certs.go:363] generating signed profile cert for "minikube": /home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/ha-334765/apiserver.key.46ae49fb
	I0916 10:51:36.860707 1415006 crypto.go:68] Generating cert /home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/ha-334765/apiserver.crt.46ae49fb with IP's: [10.96.0.1 127.0.0.1 10.0.0.1 192.168.49.2 192.168.49.254]
	I0916 10:51:37.048960 1415006 crypto.go:156] Writing cert to /home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/ha-334765/apiserver.crt.46ae49fb ...
	I0916 10:51:37.048992 1415006 lock.go:35] WriteFile acquiring /home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/ha-334765/apiserver.crt.46ae49fb: {Name:mkaafec7697140c2d12a2897ae395e3bc3762e0a Clock:{} Delay:500ms Timeout:1m0s Cancel:<nil>}
	I0916 10:51:37.049189 1415006 crypto.go:164] Writing key to /home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/ha-334765/apiserver.key.46ae49fb ...
	I0916 10:51:37.049204 1415006 lock.go:35] WriteFile acquiring /home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/ha-334765/apiserver.key.46ae49fb: {Name:mk87074ff89a14f89698ca51659210ac44ba3c7c Clock:{} Delay:500ms Timeout:1m0s Cancel:<nil>}
	I0916 10:51:37.049290 1415006 certs.go:381] copying /home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/ha-334765/apiserver.crt.46ae49fb -> /home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/ha-334765/apiserver.crt
	I0916 10:51:37.049379 1415006 certs.go:385] copying /home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/ha-334765/apiserver.key.46ae49fb -> /home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/ha-334765/apiserver.key
	I0916 10:51:37.049441 1415006 certs.go:363] generating signed profile cert for "aggregator": /home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/ha-334765/proxy-client.key
	I0916 10:51:37.049460 1415006 crypto.go:68] Generating cert /home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/ha-334765/proxy-client.crt with IP's: []
	I0916 10:51:37.224344 1415006 crypto.go:156] Writing cert to /home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/ha-334765/proxy-client.crt ...
	I0916 10:51:37.224377 1415006 lock.go:35] WriteFile acquiring /home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/ha-334765/proxy-client.crt: {Name:mkc239e75fbb7cdc72d962754cc320dca19a354f Clock:{} Delay:500ms Timeout:1m0s Cancel:<nil>}
	I0916 10:51:37.224588 1415006 crypto.go:164] Writing key to /home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/ha-334765/proxy-client.key ...
	I0916 10:51:37.224606 1415006 lock.go:35] WriteFile acquiring /home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/ha-334765/proxy-client.key: {Name:mkec03ccf4bbffad63e69f8efaf7009f71a70043 Clock:{} Delay:500ms Timeout:1m0s Cancel:<nil>}
	I0916 10:51:37.224722 1415006 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-1378450/.minikube/ca.crt -> /var/lib/minikube/certs/ca.crt
	I0916 10:51:37.224746 1415006 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-1378450/.minikube/ca.key -> /var/lib/minikube/certs/ca.key
	I0916 10:51:37.224759 1415006 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-1378450/.minikube/proxy-client-ca.crt -> /var/lib/minikube/certs/proxy-client-ca.crt
	I0916 10:51:37.224776 1415006 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-1378450/.minikube/proxy-client-ca.key -> /var/lib/minikube/certs/proxy-client-ca.key
	I0916 10:51:37.224788 1415006 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/ha-334765/apiserver.crt -> /var/lib/minikube/certs/apiserver.crt
	I0916 10:51:37.224806 1415006 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/ha-334765/apiserver.key -> /var/lib/minikube/certs/apiserver.key
	I0916 10:51:37.224824 1415006 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/ha-334765/proxy-client.crt -> /var/lib/minikube/certs/proxy-client.crt
	I0916 10:51:37.224837 1415006 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/ha-334765/proxy-client.key -> /var/lib/minikube/certs/proxy-client.key
	I0916 10:51:37.224902 1415006 certs.go:484] found cert: /home/jenkins/minikube-integration/19651-1378450/.minikube/certs/1383833.pem (1338 bytes)
	W0916 10:51:37.224948 1415006 certs.go:480] ignoring /home/jenkins/minikube-integration/19651-1378450/.minikube/certs/1383833_empty.pem, impossibly tiny 0 bytes
	I0916 10:51:37.224961 1415006 certs.go:484] found cert: /home/jenkins/minikube-integration/19651-1378450/.minikube/certs/ca-key.pem (1679 bytes)
	I0916 10:51:37.224995 1415006 certs.go:484] found cert: /home/jenkins/minikube-integration/19651-1378450/.minikube/certs/ca.pem (1078 bytes)
	I0916 10:51:37.225027 1415006 certs.go:484] found cert: /home/jenkins/minikube-integration/19651-1378450/.minikube/certs/cert.pem (1123 bytes)
	I0916 10:51:37.225056 1415006 certs.go:484] found cert: /home/jenkins/minikube-integration/19651-1378450/.minikube/certs/key.pem (1679 bytes)
	I0916 10:51:37.225103 1415006 certs.go:484] found cert: /home/jenkins/minikube-integration/19651-1378450/.minikube/files/etc/ssl/certs/13838332.pem (1708 bytes)
	I0916 10:51:37.225139 1415006 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-1378450/.minikube/ca.crt -> /usr/share/ca-certificates/minikubeCA.pem
	I0916 10:51:37.225156 1415006 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-1378450/.minikube/certs/1383833.pem -> /usr/share/ca-certificates/1383833.pem
	I0916 10:51:37.225170 1415006 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-1378450/.minikube/files/etc/ssl/certs/13838332.pem -> /usr/share/ca-certificates/13838332.pem
	I0916 10:51:37.225758 1415006 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-1378450/.minikube/ca.crt --> /var/lib/minikube/certs/ca.crt (1111 bytes)
	I0916 10:51:37.251857 1415006 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-1378450/.minikube/ca.key --> /var/lib/minikube/certs/ca.key (1675 bytes)
	I0916 10:51:37.277918 1415006 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-1378450/.minikube/proxy-client-ca.crt --> /var/lib/minikube/certs/proxy-client-ca.crt (1119 bytes)
	I0916 10:51:37.304245 1415006 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-1378450/.minikube/proxy-client-ca.key --> /var/lib/minikube/certs/proxy-client-ca.key (1679 bytes)
	I0916 10:51:37.330461 1415006 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/ha-334765/apiserver.crt --> /var/lib/minikube/certs/apiserver.crt (1424 bytes)
	I0916 10:51:37.356181 1415006 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/ha-334765/apiserver.key --> /var/lib/minikube/certs/apiserver.key (1679 bytes)
	I0916 10:51:37.381415 1415006 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/ha-334765/proxy-client.crt --> /var/lib/minikube/certs/proxy-client.crt (1147 bytes)
	I0916 10:51:37.406811 1415006 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/ha-334765/proxy-client.key --> /var/lib/minikube/certs/proxy-client.key (1679 bytes)
	I0916 10:51:37.432020 1415006 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-1378450/.minikube/ca.crt --> /usr/share/ca-certificates/minikubeCA.pem (1111 bytes)
	I0916 10:51:37.459768 1415006 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-1378450/.minikube/certs/1383833.pem --> /usr/share/ca-certificates/1383833.pem (1338 bytes)
	I0916 10:51:37.485131 1415006 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-1378450/.minikube/files/etc/ssl/certs/13838332.pem --> /usr/share/ca-certificates/13838332.pem (1708 bytes)
	I0916 10:51:37.512047 1415006 ssh_runner.go:362] scp memory --> /var/lib/minikube/kubeconfig (738 bytes)
	I0916 10:51:37.530835 1415006 ssh_runner.go:195] Run: openssl version
	I0916 10:51:37.536447 1415006 ssh_runner.go:195] Run: sudo /bin/bash -c "test -s /usr/share/ca-certificates/13838332.pem && ln -fs /usr/share/ca-certificates/13838332.pem /etc/ssl/certs/13838332.pem"
	I0916 10:51:37.546453 1415006 ssh_runner.go:195] Run: ls -la /usr/share/ca-certificates/13838332.pem
	I0916 10:51:37.550332 1415006 certs.go:528] hashing: -rw-r--r-- 1 root root 1708 Sep 16 10:46 /usr/share/ca-certificates/13838332.pem
	I0916 10:51:37.550430 1415006 ssh_runner.go:195] Run: openssl x509 -hash -noout -in /usr/share/ca-certificates/13838332.pem
	I0916 10:51:37.557447 1415006 ssh_runner.go:195] Run: sudo /bin/bash -c "test -L /etc/ssl/certs/3ec20f2e.0 || ln -fs /etc/ssl/certs/13838332.pem /etc/ssl/certs/3ec20f2e.0"
	I0916 10:51:37.567434 1415006 ssh_runner.go:195] Run: sudo /bin/bash -c "test -s /usr/share/ca-certificates/minikubeCA.pem && ln -fs /usr/share/ca-certificates/minikubeCA.pem /etc/ssl/certs/minikubeCA.pem"
	I0916 10:51:37.577308 1415006 ssh_runner.go:195] Run: ls -la /usr/share/ca-certificates/minikubeCA.pem
	I0916 10:51:37.581116 1415006 certs.go:528] hashing: -rw-r--r-- 1 root root 1111 Sep 16 10:35 /usr/share/ca-certificates/minikubeCA.pem
	I0916 10:51:37.581185 1415006 ssh_runner.go:195] Run: openssl x509 -hash -noout -in /usr/share/ca-certificates/minikubeCA.pem
	I0916 10:51:37.588264 1415006 ssh_runner.go:195] Run: sudo /bin/bash -c "test -L /etc/ssl/certs/b5213941.0 || ln -fs /etc/ssl/certs/minikubeCA.pem /etc/ssl/certs/b5213941.0"
	I0916 10:51:37.597960 1415006 ssh_runner.go:195] Run: sudo /bin/bash -c "test -s /usr/share/ca-certificates/1383833.pem && ln -fs /usr/share/ca-certificates/1383833.pem /etc/ssl/certs/1383833.pem"
	I0916 10:51:37.607811 1415006 ssh_runner.go:195] Run: ls -la /usr/share/ca-certificates/1383833.pem
	I0916 10:51:37.611428 1415006 certs.go:528] hashing: -rw-r--r-- 1 root root 1338 Sep 16 10:46 /usr/share/ca-certificates/1383833.pem
	I0916 10:51:37.611500 1415006 ssh_runner.go:195] Run: openssl x509 -hash -noout -in /usr/share/ca-certificates/1383833.pem
	I0916 10:51:37.618516 1415006 ssh_runner.go:195] Run: sudo /bin/bash -c "test -L /etc/ssl/certs/51391683.0 || ln -fs /etc/ssl/certs/1383833.pem /etc/ssl/certs/51391683.0"
	I0916 10:51:37.628255 1415006 ssh_runner.go:195] Run: stat /var/lib/minikube/certs/apiserver-kubelet-client.crt
	I0916 10:51:37.631886 1415006 certs.go:399] 'apiserver-kubelet-client' cert doesn't exist, likely first start: stat /var/lib/minikube/certs/apiserver-kubelet-client.crt: Process exited with status 1
	stdout:
	
	stderr:
	stat: cannot statx '/var/lib/minikube/certs/apiserver-kubelet-client.crt': No such file or directory
	I0916 10:51:37.631942 1415006 kubeadm.go:392] StartCluster: {Name:ha-334765 KeepContext:false EmbedCerts:false MinikubeISO: KicBaseImage:gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 Memory:2200 CPUs:2 DiskSize:20000 Driver:docker HyperkitVpnKitSock: HyperkitVSockPorts:[] DockerEnv:[] ContainerVolumeMounts:[] InsecureRegistry:[] RegistryMirror:[] HostOnlyCIDR:192.168.59.1/24 HypervVirtualSwitch: HypervUseExternalSwitch:false HypervExternalAdapter: KVMNetwork:default KVMQemuURI:qemu:///system KVMGPU:false KVMHidden:false KVMNUMACount:1 APIServerPort:8443 DockerOpt:[] DisableDriverMounts:false NFSShare:[] NFSSharesRoot:/nfsshares UUID: NoVTXCheck:false DNSProxy:false HostDNSResolver:true HostOnlyNicType:virtio NatNicType:virtio SSHIPAddress: SSHUser:root SSHKey: SSHPort:22 KubernetesConfig:{KubernetesVersion:v1.31.1 ClusterName:ha-334765 Namespace:default APIServerHAVIP:192.168.49.254 APIServerName:minikubeCA APIServe
rNames:[] APIServerIPs:[] DNSDomain:cluster.local ContainerRuntime:crio CRISocket: NetworkPlugin:cni FeatureGates: ServiceCIDR:10.96.0.0/12 ImageRepository: LoadBalancerStartIP: LoadBalancerEndIP: CustomIngressCert: RegistryAliases: ExtraOptions:[] ShouldLoadCachedImages:true EnableDefaultCNI:false CNI:} Nodes:[{Name: IP:192.168.49.2 Port:8443 KubernetesVersion:v1.31.1 ContainerRuntime:crio ControlPlane:true Worker:true}] Addons:map[] CustomAddonImages:map[] CustomAddonRegistries:map[] VerifyComponents:map[apiserver:true apps_running:true default_sa:true extra:true kubelet:true node_ready:true system_pods:true] StartHostTimeout:6m0s ScheduledStop:<nil> ExposedPorts:[] ListenAddress: Network: Subnet: MultiNodeRequested:true ExtraDisks:0 CertExpiration:26280h0m0s Mount:false MountString:/home/jenkins:/minikube-host Mount9PVersion:9p2000.L MountGID:docker MountIP: MountMSize:262144 MountOptions:[] MountPort:0 MountType:9p MountUID:docker BinaryMirror: DisableOptimizations:false DisableMetrics:false CustomQemuFir
mwarePath: SocketVMnetClientPath: SocketVMnetPath: StaticIP: SSHAuthSock: SSHAgentPID:0 GPUs: AutoPauseInterval:1m0s}
	I0916 10:51:37.632030 1415006 cri.go:54] listing CRI containers in root : {State:paused Name: Namespaces:[kube-system]}
	I0916 10:51:37.632090 1415006 ssh_runner.go:195] Run: sudo -s eval "crictl ps -a --quiet --label io.kubernetes.pod.namespace=kube-system"
	I0916 10:51:37.679540 1415006 cri.go:89] found id: ""
	I0916 10:51:37.679610 1415006 ssh_runner.go:195] Run: sudo ls /var/lib/kubelet/kubeadm-flags.env /var/lib/kubelet/config.yaml /var/lib/minikube/etcd
	I0916 10:51:37.690855 1415006 ssh_runner.go:195] Run: sudo cp /var/tmp/minikube/kubeadm.yaml.new /var/tmp/minikube/kubeadm.yaml
	I0916 10:51:37.701130 1415006 kubeadm.go:214] ignoring SystemVerification for kubeadm because of docker driver
	I0916 10:51:37.701246 1415006 ssh_runner.go:195] Run: sudo ls -la /etc/kubernetes/admin.conf /etc/kubernetes/kubelet.conf /etc/kubernetes/controller-manager.conf /etc/kubernetes/scheduler.conf
	I0916 10:51:37.714045 1415006 kubeadm.go:155] config check failed, skipping stale config cleanup: sudo ls -la /etc/kubernetes/admin.conf /etc/kubernetes/kubelet.conf /etc/kubernetes/controller-manager.conf /etc/kubernetes/scheduler.conf: Process exited with status 2
	stdout:
	
	stderr:
	ls: cannot access '/etc/kubernetes/admin.conf': No such file or directory
	ls: cannot access '/etc/kubernetes/kubelet.conf': No such file or directory
	ls: cannot access '/etc/kubernetes/controller-manager.conf': No such file or directory
	ls: cannot access '/etc/kubernetes/scheduler.conf': No such file or directory
	I0916 10:51:37.714077 1415006 kubeadm.go:157] found existing configuration files:
	
	I0916 10:51:37.714133 1415006 ssh_runner.go:195] Run: sudo grep https://control-plane.minikube.internal:8443 /etc/kubernetes/admin.conf
	I0916 10:51:37.724303 1415006 kubeadm.go:163] "https://control-plane.minikube.internal:8443" may not be in /etc/kubernetes/admin.conf - will remove: sudo grep https://control-plane.minikube.internal:8443 /etc/kubernetes/admin.conf: Process exited with status 2
	stdout:
	
	stderr:
	grep: /etc/kubernetes/admin.conf: No such file or directory
	I0916 10:51:37.724373 1415006 ssh_runner.go:195] Run: sudo rm -f /etc/kubernetes/admin.conf
	I0916 10:51:37.733985 1415006 ssh_runner.go:195] Run: sudo grep https://control-plane.minikube.internal:8443 /etc/kubernetes/kubelet.conf
	I0916 10:51:37.743731 1415006 kubeadm.go:163] "https://control-plane.minikube.internal:8443" may not be in /etc/kubernetes/kubelet.conf - will remove: sudo grep https://control-plane.minikube.internal:8443 /etc/kubernetes/kubelet.conf: Process exited with status 2
	stdout:
	
	stderr:
	grep: /etc/kubernetes/kubelet.conf: No such file or directory
	I0916 10:51:37.743822 1415006 ssh_runner.go:195] Run: sudo rm -f /etc/kubernetes/kubelet.conf
	I0916 10:51:37.752897 1415006 ssh_runner.go:195] Run: sudo grep https://control-plane.minikube.internal:8443 /etc/kubernetes/controller-manager.conf
	I0916 10:51:37.762961 1415006 kubeadm.go:163] "https://control-plane.minikube.internal:8443" may not be in /etc/kubernetes/controller-manager.conf - will remove: sudo grep https://control-plane.minikube.internal:8443 /etc/kubernetes/controller-manager.conf: Process exited with status 2
	stdout:
	
	stderr:
	grep: /etc/kubernetes/controller-manager.conf: No such file or directory
	I0916 10:51:37.763033 1415006 ssh_runner.go:195] Run: sudo rm -f /etc/kubernetes/controller-manager.conf
	I0916 10:51:37.771597 1415006 ssh_runner.go:195] Run: sudo grep https://control-plane.minikube.internal:8443 /etc/kubernetes/scheduler.conf
	I0916 10:51:37.782188 1415006 kubeadm.go:163] "https://control-plane.minikube.internal:8443" may not be in /etc/kubernetes/scheduler.conf - will remove: sudo grep https://control-plane.minikube.internal:8443 /etc/kubernetes/scheduler.conf: Process exited with status 2
	stdout:
	
	stderr:
	grep: /etc/kubernetes/scheduler.conf: No such file or directory
	I0916 10:51:37.782276 1415006 ssh_runner.go:195] Run: sudo rm -f /etc/kubernetes/scheduler.conf
	I0916 10:51:37.791510 1415006 ssh_runner.go:286] Start: /bin/bash -c "sudo env PATH="/var/lib/minikube/binaries/v1.31.1:$PATH" kubeadm init --config /var/tmp/minikube/kubeadm.yaml  --ignore-preflight-errors=DirAvailable--etc-kubernetes-manifests,DirAvailable--var-lib-minikube,DirAvailable--var-lib-minikube-etcd,FileAvailable--etc-kubernetes-manifests-kube-scheduler.yaml,FileAvailable--etc-kubernetes-manifests-kube-apiserver.yaml,FileAvailable--etc-kubernetes-manifests-kube-controller-manager.yaml,FileAvailable--etc-kubernetes-manifests-etcd.yaml,Port-10250,Swap,NumCPU,Mem,SystemVerification,FileContent--proc-sys-net-bridge-bridge-nf-call-iptables"
	I0916 10:51:37.840159 1415006 kubeadm.go:310] [init] Using Kubernetes version: v1.31.1
	I0916 10:51:37.840310 1415006 kubeadm.go:310] [preflight] Running pre-flight checks
	I0916 10:51:37.859474 1415006 kubeadm.go:310] [preflight] The system verification failed. Printing the output from the verification:
	I0916 10:51:37.859550 1415006 kubeadm.go:310] KERNEL_VERSION: 5.15.0-1069-aws
	I0916 10:51:37.859589 1415006 kubeadm.go:310] OS: Linux
	I0916 10:51:37.859641 1415006 kubeadm.go:310] CGROUPS_CPU: enabled
	I0916 10:51:37.859694 1415006 kubeadm.go:310] CGROUPS_CPUACCT: enabled
	I0916 10:51:37.859744 1415006 kubeadm.go:310] CGROUPS_CPUSET: enabled
	I0916 10:51:37.859794 1415006 kubeadm.go:310] CGROUPS_DEVICES: enabled
	I0916 10:51:37.859844 1415006 kubeadm.go:310] CGROUPS_FREEZER: enabled
	I0916 10:51:37.859895 1415006 kubeadm.go:310] CGROUPS_MEMORY: enabled
	I0916 10:51:37.859950 1415006 kubeadm.go:310] CGROUPS_PIDS: enabled
	I0916 10:51:37.860003 1415006 kubeadm.go:310] CGROUPS_HUGETLB: enabled
	I0916 10:51:37.860051 1415006 kubeadm.go:310] CGROUPS_BLKIO: enabled
	I0916 10:51:37.925874 1415006 kubeadm.go:310] [preflight] Pulling images required for setting up a Kubernetes cluster
	I0916 10:51:37.926070 1415006 kubeadm.go:310] [preflight] This might take a minute or two, depending on the speed of your internet connection
	I0916 10:51:37.926217 1415006 kubeadm.go:310] [preflight] You can also perform this action beforehand using 'kubeadm config images pull'
	I0916 10:51:37.937195 1415006 kubeadm.go:310] [certs] Using certificateDir folder "/var/lib/minikube/certs"
	I0916 10:51:37.942368 1415006 out.go:235]   - Generating certificates and keys ...
	I0916 10:51:37.942551 1415006 kubeadm.go:310] [certs] Using existing ca certificate authority
	I0916 10:51:37.942655 1415006 kubeadm.go:310] [certs] Using existing apiserver certificate and key on disk
	I0916 10:51:38.125701 1415006 kubeadm.go:310] [certs] Generating "apiserver-kubelet-client" certificate and key
	I0916 10:51:38.357438 1415006 kubeadm.go:310] [certs] Generating "front-proxy-ca" certificate and key
	I0916 10:51:38.774277 1415006 kubeadm.go:310] [certs] Generating "front-proxy-client" certificate and key
	I0916 10:51:39.506214 1415006 kubeadm.go:310] [certs] Generating "etcd/ca" certificate and key
	I0916 10:51:39.748178 1415006 kubeadm.go:310] [certs] Generating "etcd/server" certificate and key
	I0916 10:51:39.748426 1415006 kubeadm.go:310] [certs] etcd/server serving cert is signed for DNS names [ha-334765 localhost] and IPs [192.168.49.2 127.0.0.1 ::1]
	I0916 10:51:40.295627 1415006 kubeadm.go:310] [certs] Generating "etcd/peer" certificate and key
	I0916 10:51:40.295763 1415006 kubeadm.go:310] [certs] etcd/peer serving cert is signed for DNS names [ha-334765 localhost] and IPs [192.168.49.2 127.0.0.1 ::1]
	I0916 10:51:41.210520 1415006 kubeadm.go:310] [certs] Generating "etcd/healthcheck-client" certificate and key
	I0916 10:51:41.539038 1415006 kubeadm.go:310] [certs] Generating "apiserver-etcd-client" certificate and key
	I0916 10:51:41.729080 1415006 kubeadm.go:310] [certs] Generating "sa" key and public key
	I0916 10:51:41.729486 1415006 kubeadm.go:310] [kubeconfig] Using kubeconfig folder "/etc/kubernetes"
	I0916 10:51:42.180776 1415006 kubeadm.go:310] [kubeconfig] Writing "admin.conf" kubeconfig file
	I0916 10:51:42.580385 1415006 kubeadm.go:310] [kubeconfig] Writing "super-admin.conf" kubeconfig file
	I0916 10:51:43.184585 1415006 kubeadm.go:310] [kubeconfig] Writing "kubelet.conf" kubeconfig file
	I0916 10:51:43.602984 1415006 kubeadm.go:310] [kubeconfig] Writing "controller-manager.conf" kubeconfig file
	I0916 10:51:44.189060 1415006 kubeadm.go:310] [kubeconfig] Writing "scheduler.conf" kubeconfig file
	I0916 10:51:44.189820 1415006 kubeadm.go:310] [etcd] Creating static Pod manifest for local etcd in "/etc/kubernetes/manifests"
	I0916 10:51:44.192872 1415006 kubeadm.go:310] [control-plane] Using manifest folder "/etc/kubernetes/manifests"
	I0916 10:51:44.195817 1415006 out.go:235]   - Booting up control plane ...
	I0916 10:51:44.195921 1415006 kubeadm.go:310] [control-plane] Creating static Pod manifest for "kube-apiserver"
	I0916 10:51:44.195997 1415006 kubeadm.go:310] [control-plane] Creating static Pod manifest for "kube-controller-manager"
	I0916 10:51:44.196541 1415006 kubeadm.go:310] [control-plane] Creating static Pod manifest for "kube-scheduler"
	I0916 10:51:44.207462 1415006 kubeadm.go:310] [kubelet-start] Writing kubelet environment file with flags to file "/var/lib/kubelet/kubeadm-flags.env"
	I0916 10:51:44.213486 1415006 kubeadm.go:310] [kubelet-start] Writing kubelet configuration to file "/var/lib/kubelet/config.yaml"
	I0916 10:51:44.213756 1415006 kubeadm.go:310] [kubelet-start] Starting the kubelet
	I0916 10:51:44.308979 1415006 kubeadm.go:310] [wait-control-plane] Waiting for the kubelet to boot up the control plane as static Pods from directory "/etc/kubernetes/manifests"
	I0916 10:51:44.309098 1415006 kubeadm.go:310] [kubelet-check] Waiting for a healthy kubelet at http://127.0.0.1:10248/healthz. This can take up to 4m0s
	I0916 10:51:45.817452 1415006 kubeadm.go:310] [kubelet-check] The kubelet is healthy after 1.508735214s
	I0916 10:51:45.817543 1415006 kubeadm.go:310] [api-check] Waiting for a healthy API server. This can take up to 4m0s
	I0916 10:51:54.973977 1415006 kubeadm.go:310] [api-check] The API server is healthy after 9.156530405s
	I0916 10:51:54.996383 1415006 kubeadm.go:310] [upload-config] Storing the configuration used in ConfigMap "kubeadm-config" in the "kube-system" Namespace
	I0916 10:51:55.042892 1415006 kubeadm.go:310] [kubelet] Creating a ConfigMap "kubelet-config" in namespace kube-system with the configuration for the kubelets in the cluster
	I0916 10:51:55.081931 1415006 kubeadm.go:310] [upload-certs] Skipping phase. Please see --upload-certs
	I0916 10:51:55.082125 1415006 kubeadm.go:310] [mark-control-plane] Marking the node ha-334765 as control-plane by adding the labels: [node-role.kubernetes.io/control-plane node.kubernetes.io/exclude-from-external-load-balancers]
	I0916 10:51:55.097789 1415006 kubeadm.go:310] [bootstrap-token] Using token: 718qfm.gar9p3a9mv3c0rdq
	I0916 10:51:55.100512 1415006 out.go:235]   - Configuring RBAC rules ...
	I0916 10:51:55.100662 1415006 kubeadm.go:310] [bootstrap-token] Configuring bootstrap tokens, cluster-info ConfigMap, RBAC Roles
	I0916 10:51:55.113526 1415006 kubeadm.go:310] [bootstrap-token] Configured RBAC rules to allow Node Bootstrap tokens to get nodes
	I0916 10:51:55.128064 1415006 kubeadm.go:310] [bootstrap-token] Configured RBAC rules to allow Node Bootstrap tokens to post CSRs in order for nodes to get long term certificate credentials
	I0916 10:51:55.132619 1415006 kubeadm.go:310] [bootstrap-token] Configured RBAC rules to allow the csrapprover controller automatically approve CSRs from a Node Bootstrap Token
	I0916 10:51:55.137079 1415006 kubeadm.go:310] [bootstrap-token] Configured RBAC rules to allow certificate rotation for all node client certificates in the cluster
	I0916 10:51:55.142883 1415006 kubeadm.go:310] [bootstrap-token] Creating the "cluster-info" ConfigMap in the "kube-public" namespace
	I0916 10:51:55.385469 1415006 kubeadm.go:310] [kubelet-finalize] Updating "/etc/kubernetes/kubelet.conf" to point to a rotatable kubelet client certificate and key
	I0916 10:51:55.841464 1415006 kubeadm.go:310] [addons] Applied essential addon: CoreDNS
	I0916 10:51:56.384537 1415006 kubeadm.go:310] [addons] Applied essential addon: kube-proxy
	I0916 10:51:56.386235 1415006 kubeadm.go:310] 
	I0916 10:51:56.386313 1415006 kubeadm.go:310] Your Kubernetes control-plane has initialized successfully!
	I0916 10:51:56.386323 1415006 kubeadm.go:310] 
	I0916 10:51:56.386399 1415006 kubeadm.go:310] To start using your cluster, you need to run the following as a regular user:
	I0916 10:51:56.386409 1415006 kubeadm.go:310] 
	I0916 10:51:56.386434 1415006 kubeadm.go:310]   mkdir -p $HOME/.kube
	I0916 10:51:56.386496 1415006 kubeadm.go:310]   sudo cp -i /etc/kubernetes/admin.conf $HOME/.kube/config
	I0916 10:51:56.386556 1415006 kubeadm.go:310]   sudo chown $(id -u):$(id -g) $HOME/.kube/config
	I0916 10:51:56.386567 1415006 kubeadm.go:310] 
	I0916 10:51:56.386620 1415006 kubeadm.go:310] Alternatively, if you are the root user, you can run:
	I0916 10:51:56.386629 1415006 kubeadm.go:310] 
	I0916 10:51:56.386675 1415006 kubeadm.go:310]   export KUBECONFIG=/etc/kubernetes/admin.conf
	I0916 10:51:56.386683 1415006 kubeadm.go:310] 
	I0916 10:51:56.386735 1415006 kubeadm.go:310] You should now deploy a pod network to the cluster.
	I0916 10:51:56.386812 1415006 kubeadm.go:310] Run "kubectl apply -f [podnetwork].yaml" with one of the options listed at:
	I0916 10:51:56.386883 1415006 kubeadm.go:310]   https://kubernetes.io/docs/concepts/cluster-administration/addons/
	I0916 10:51:56.386891 1415006 kubeadm.go:310] 
	I0916 10:51:56.386974 1415006 kubeadm.go:310] You can now join any number of control-plane nodes by copying certificate authorities
	I0916 10:51:56.387052 1415006 kubeadm.go:310] and service account keys on each node and then running the following as root:
	I0916 10:51:56.387060 1415006 kubeadm.go:310] 
	I0916 10:51:56.387147 1415006 kubeadm.go:310]   kubeadm join control-plane.minikube.internal:8443 --token 718qfm.gar9p3a9mv3c0rdq \
	I0916 10:51:56.387251 1415006 kubeadm.go:310] 	--discovery-token-ca-cert-hash sha256:a39d4a6e06a2efc97f5d9564a89b81063790e757dde370e866d9dc4c2ed0ec07 \
	I0916 10:51:56.387276 1415006 kubeadm.go:310] 	--control-plane 
	I0916 10:51:56.387284 1415006 kubeadm.go:310] 
	I0916 10:51:56.387367 1415006 kubeadm.go:310] Then you can join any number of worker nodes by running the following on each as root:
	I0916 10:51:56.387375 1415006 kubeadm.go:310] 
	I0916 10:51:56.387456 1415006 kubeadm.go:310] kubeadm join control-plane.minikube.internal:8443 --token 718qfm.gar9p3a9mv3c0rdq \
	I0916 10:51:56.387558 1415006 kubeadm.go:310] 	--discovery-token-ca-cert-hash sha256:a39d4a6e06a2efc97f5d9564a89b81063790e757dde370e866d9dc4c2ed0ec07 
	I0916 10:51:56.392511 1415006 kubeadm.go:310] W0916 10:51:37.836530    1224 common.go:101] your configuration file uses a deprecated API spec: "kubeadm.k8s.io/v1beta3" (kind: "ClusterConfiguration"). Please use 'kubeadm config migrate --old-config old.yaml --new-config new.yaml', which will write the new, similar spec using a newer API version.
	I0916 10:51:56.392831 1415006 kubeadm.go:310] W0916 10:51:37.837607    1224 common.go:101] your configuration file uses a deprecated API spec: "kubeadm.k8s.io/v1beta3" (kind: "InitConfiguration"). Please use 'kubeadm config migrate --old-config old.yaml --new-config new.yaml', which will write the new, similar spec using a newer API version.
	I0916 10:51:56.393042 1415006 kubeadm.go:310] 	[WARNING SystemVerification]: failed to parse kernel config: unable to load kernel module: "configs", output: "modprobe: FATAL: Module configs not found in directory /lib/modules/5.15.0-1069-aws\n", err: exit status 1
	I0916 10:51:56.393147 1415006 kubeadm.go:310] 	[WARNING Service-Kubelet]: kubelet service is not enabled, please run 'systemctl enable kubelet.service'
	I0916 10:51:56.393167 1415006 cni.go:84] Creating CNI manager for ""
	I0916 10:51:56.393178 1415006 cni.go:136] multinode detected (1 nodes found), recommending kindnet
	I0916 10:51:56.396114 1415006 out.go:177] * Configuring CNI (Container Networking Interface) ...
	I0916 10:51:56.398793 1415006 ssh_runner.go:195] Run: stat /opt/cni/bin/portmap
	I0916 10:51:56.402925 1415006 cni.go:182] applying CNI manifest using /var/lib/minikube/binaries/v1.31.1/kubectl ...
	I0916 10:51:56.402950 1415006 ssh_runner.go:362] scp memory --> /var/tmp/minikube/cni.yaml (2601 bytes)
	I0916 10:51:56.422637 1415006 ssh_runner.go:195] Run: sudo /var/lib/minikube/binaries/v1.31.1/kubectl apply --kubeconfig=/var/lib/minikube/kubeconfig -f /var/tmp/minikube/cni.yaml
	I0916 10:51:56.718869 1415006 ssh_runner.go:195] Run: /bin/bash -c "cat /proc/$(pgrep kube-apiserver)/oom_adj"
	I0916 10:51:56.719008 1415006 ssh_runner.go:195] Run: sudo /var/lib/minikube/binaries/v1.31.1/kubectl create clusterrolebinding minikube-rbac --clusterrole=cluster-admin --serviceaccount=kube-system:default --kubeconfig=/var/lib/minikube/kubeconfig
	I0916 10:51:56.719095 1415006 ssh_runner.go:195] Run: sudo /var/lib/minikube/binaries/v1.31.1/kubectl --kubeconfig=/var/lib/minikube/kubeconfig label --overwrite nodes ha-334765 minikube.k8s.io/updated_at=2024_09_16T10_51_56_0700 minikube.k8s.io/version=v1.34.0 minikube.k8s.io/commit=90d544f06ea0f69499271b003be64a9a224d57ed minikube.k8s.io/name=ha-334765 minikube.k8s.io/primary=true
	I0916 10:51:56.882861 1415006 ssh_runner.go:195] Run: sudo /var/lib/minikube/binaries/v1.31.1/kubectl get sa default --kubeconfig=/var/lib/minikube/kubeconfig
	I0916 10:51:56.882923 1415006 ops.go:34] apiserver oom_adj: -16
	I0916 10:51:57.382970 1415006 ssh_runner.go:195] Run: sudo /var/lib/minikube/binaries/v1.31.1/kubectl get sa default --kubeconfig=/var/lib/minikube/kubeconfig
	I0916 10:51:57.883653 1415006 ssh_runner.go:195] Run: sudo /var/lib/minikube/binaries/v1.31.1/kubectl get sa default --kubeconfig=/var/lib/minikube/kubeconfig
	I0916 10:51:58.383021 1415006 ssh_runner.go:195] Run: sudo /var/lib/minikube/binaries/v1.31.1/kubectl get sa default --kubeconfig=/var/lib/minikube/kubeconfig
	I0916 10:51:58.882914 1415006 ssh_runner.go:195] Run: sudo /var/lib/minikube/binaries/v1.31.1/kubectl get sa default --kubeconfig=/var/lib/minikube/kubeconfig
	I0916 10:51:59.112415 1415006 kubeadm.go:1113] duration metric: took 2.393450688s to wait for elevateKubeSystemPrivileges
	I0916 10:51:59.112452 1415006 kubeadm.go:394] duration metric: took 21.480514885s to StartCluster
	I0916 10:51:59.112470 1415006 settings.go:142] acquiring lock: {Name:mkc0474d366ad36774e47290c7932cc180a1b9f8 Clock:{} Delay:500ms Timeout:1m0s Cancel:<nil>}
	I0916 10:51:59.112550 1415006 settings.go:150] Updating kubeconfig:  /home/jenkins/minikube-integration/19651-1378450/kubeconfig
	I0916 10:51:59.113710 1415006 lock.go:35] WriteFile acquiring /home/jenkins/minikube-integration/19651-1378450/kubeconfig: {Name:mk806df66aa01ad28d0c99bc1a876b4310e8a3a0 Clock:{} Delay:500ms Timeout:1m0s Cancel:<nil>}
	I0916 10:51:59.114020 1415006 start.go:233] HA (multi-control plane) cluster: will skip waiting for primary control-plane node &{Name: IP:192.168.49.2 Port:8443 KubernetesVersion:v1.31.1 ContainerRuntime:crio ControlPlane:true Worker:true}
	I0916 10:51:59.114054 1415006 start.go:241] waiting for startup goroutines ...
	I0916 10:51:59.114066 1415006 addons.go:507] enable addons start: toEnable=map[ambassador:false auto-pause:false cloud-spanner:false csi-hostpath-driver:false dashboard:false default-storageclass:true efk:false freshpod:false gcp-auth:false gvisor:false headlamp:false helm-tiller:false inaccel:false ingress:false ingress-dns:false inspektor-gadget:false istio:false istio-provisioner:false kong:false kubeflow:false kubevirt:false logviewer:false metallb:false metrics-server:false nvidia-device-plugin:false nvidia-driver-installer:false nvidia-gpu-device-plugin:false olm:false pod-security-policy:false portainer:false registry:false registry-aliases:false registry-creds:false storage-provisioner:true storage-provisioner-gluster:false storage-provisioner-rancher:false volcano:false volumesnapshots:false yakd:false]
	I0916 10:51:59.114144 1415006 addons.go:69] Setting storage-provisioner=true in profile "ha-334765"
	I0916 10:51:59.114180 1415006 addons.go:234] Setting addon storage-provisioner=true in "ha-334765"
	I0916 10:51:59.114212 1415006 host.go:66] Checking if "ha-334765" exists ...
	I0916 10:51:59.115033 1415006 ssh_runner.go:195] Run: /bin/bash -c "sudo /var/lib/minikube/binaries/v1.31.1/kubectl --kubeconfig=/var/lib/minikube/kubeconfig -n kube-system get configmap coredns -o yaml"
	I0916 10:51:59.115542 1415006 config.go:182] Loaded profile config "ha-334765": Driver=docker, ContainerRuntime=crio, KubernetesVersion=v1.31.1
	I0916 10:51:59.115785 1415006 addons.go:69] Setting default-storageclass=true in profile "ha-334765"
	I0916 10:51:59.115823 1415006 addons_storage_classes.go:33] enableOrDisableStorageClasses default-storageclass=true on "ha-334765"
	I0916 10:51:59.116311 1415006 cli_runner.go:164] Run: docker container inspect ha-334765 --format={{.State.Status}}
	I0916 10:51:59.116930 1415006 cli_runner.go:164] Run: docker container inspect ha-334765 --format={{.State.Status}}
	I0916 10:51:59.169684 1415006 loader.go:395] Config loaded from file:  /home/jenkins/minikube-integration/19651-1378450/kubeconfig
	I0916 10:51:59.170166 1415006 kapi.go:59] client config for ha-334765: &rest.Config{Host:"https://192.168.49.254:8443", APIPath:"", ContentConfig:rest.ContentConfig{AcceptContentTypes:"", ContentType:"", GroupVersion:(*schema.GroupVersion)(nil), NegotiatedSerializer:runtime.NegotiatedSerializer(nil)}, Username:"", Password:"", BearerToken:"", BearerTokenFile:"", Impersonate:rest.ImpersonationConfig{UserName:"", UID:"", Groups:[]string(nil), Extra:map[string][]string(nil)}, AuthProvider:<nil>, AuthConfigPersister:rest.AuthProviderConfigPersister(nil), ExecProvider:<nil>, TLSClientConfig:rest.sanitizedTLSClientConfig{Insecure:false, ServerName:"", CertFile:"/home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/ha-334765/client.crt", KeyFile:"/home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/ha-334765/client.key", CAFile:"/home/jenkins/minikube-integration/19651-1378450/.minikube/ca.crt", CertData:[]uint8(nil), KeyData:[]uint8(nil), CAData:[]uint8(nil), NextProtos:[]strin
g(nil)}, UserAgent:"", DisableCompression:false, Transport:http.RoundTripper(nil), WrapTransport:(transport.WrapperFunc)(0x1a1e6c0), QPS:0, Burst:0, RateLimiter:flowcontrol.RateLimiter(nil), WarningHandler:rest.WarningHandler(nil), Timeout:0, Dial:(func(context.Context, string, string) (net.Conn, error))(nil), Proxy:(func(*http.Request) (*url.URL, error))(nil)}
	I0916 10:51:59.170889 1415006 cert_rotation.go:140] Starting client certificate rotation controller
	I0916 10:51:59.171320 1415006 addons.go:234] Setting addon default-storageclass=true in "ha-334765"
	I0916 10:51:59.171398 1415006 host.go:66] Checking if "ha-334765" exists ...
	I0916 10:51:59.172115 1415006 cli_runner.go:164] Run: docker container inspect ha-334765 --format={{.State.Status}}
	I0916 10:51:59.180288 1415006 out.go:177]   - Using image gcr.io/k8s-minikube/storage-provisioner:v5
	I0916 10:51:59.186707 1415006 addons.go:431] installing /etc/kubernetes/addons/storage-provisioner.yaml
	I0916 10:51:59.186737 1415006 ssh_runner.go:362] scp memory --> /etc/kubernetes/addons/storage-provisioner.yaml (2676 bytes)
	I0916 10:51:59.186871 1415006 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" ha-334765
	I0916 10:51:59.195156 1415006 addons.go:431] installing /etc/kubernetes/addons/storageclass.yaml
	I0916 10:51:59.195179 1415006 ssh_runner.go:362] scp storageclass/storageclass.yaml --> /etc/kubernetes/addons/storageclass.yaml (271 bytes)
	I0916 10:51:59.195247 1415006 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" ha-334765
	I0916 10:51:59.219386 1415006 sshutil.go:53] new ssh client: &{IP:127.0.0.1 Port:34618 SSHKeyPath:/home/jenkins/minikube-integration/19651-1378450/.minikube/machines/ha-334765/id_rsa Username:docker}
	I0916 10:51:59.244755 1415006 sshutil.go:53] new ssh client: &{IP:127.0.0.1 Port:34618 SSHKeyPath:/home/jenkins/minikube-integration/19651-1378450/.minikube/machines/ha-334765/id_rsa Username:docker}
	I0916 10:51:59.373862 1415006 ssh_runner.go:195] Run: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.31.1/kubectl apply -f /etc/kubernetes/addons/storageclass.yaml
	I0916 10:51:59.379193 1415006 ssh_runner.go:195] Run: /bin/bash -c "sudo /var/lib/minikube/binaries/v1.31.1/kubectl --kubeconfig=/var/lib/minikube/kubeconfig -n kube-system get configmap coredns -o yaml | sed -e '/^        forward . \/etc\/resolv.conf.*/i \        hosts {\n           192.168.49.1 host.minikube.internal\n           fallthrough\n        }' -e '/^        errors *$/i \        log' | sudo /var/lib/minikube/binaries/v1.31.1/kubectl --kubeconfig=/var/lib/minikube/kubeconfig replace -f -"
	I0916 10:51:59.496553 1415006 ssh_runner.go:195] Run: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.31.1/kubectl apply -f /etc/kubernetes/addons/storage-provisioner.yaml
	I0916 10:51:59.727370 1415006 envvar.go:172] "Feature gate default state" feature="WatchListClient" enabled=false
	I0916 10:51:59.727403 1415006 envvar.go:172] "Feature gate default state" feature="InformerResourceVersion" enabled=false
	I0916 10:51:59.727509 1415006 round_trippers.go:463] GET https://192.168.49.254:8443/apis/storage.k8s.io/v1/storageclasses
	I0916 10:51:59.727521 1415006 round_trippers.go:469] Request Headers:
	I0916 10:51:59.727530 1415006 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:51:59.727541 1415006 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:51:59.751265 1415006 round_trippers.go:574] Response Status: 200 OK in 23 milliseconds
	I0916 10:51:59.751932 1415006 round_trippers.go:463] PUT https://192.168.49.254:8443/apis/storage.k8s.io/v1/storageclasses/standard
	I0916 10:51:59.751957 1415006 round_trippers.go:469] Request Headers:
	I0916 10:51:59.751966 1415006 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:51:59.751973 1415006 round_trippers.go:473]     Content-Type: application/json
	I0916 10:51:59.751977 1415006 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:51:59.778964 1415006 round_trippers.go:574] Response Status: 200 OK in 26 milliseconds
	I0916 10:51:59.833987 1415006 start.go:971] {"host.minikube.internal": 192.168.49.1} host record injected into CoreDNS's ConfigMap
	I0916 10:52:00.418014 1415006 out.go:177] * Enabled addons: default-storageclass, storage-provisioner
	I0916 10:52:00.421055 1415006 addons.go:510] duration metric: took 1.30671994s for enable addons: enabled=[default-storageclass storage-provisioner]
	I0916 10:52:00.421168 1415006 start.go:246] waiting for cluster config update ...
	I0916 10:52:00.421196 1415006 start.go:255] writing updated cluster config ...
	I0916 10:52:00.425091 1415006 out.go:201] 
	I0916 10:52:00.428130 1415006 config.go:182] Loaded profile config "ha-334765": Driver=docker, ContainerRuntime=crio, KubernetesVersion=v1.31.1
	I0916 10:52:00.428306 1415006 profile.go:143] Saving config to /home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/ha-334765/config.json ...
	I0916 10:52:00.432343 1415006 out.go:177] * Starting "ha-334765-m02" control-plane node in "ha-334765" cluster
	I0916 10:52:00.440079 1415006 cache.go:121] Beginning downloading kic base image for docker with crio
	I0916 10:52:00.445821 1415006 out.go:177] * Pulling base image v0.0.45-1726358845-19644 ...
	I0916 10:52:00.448662 1415006 preload.go:131] Checking if preload exists for k8s version v1.31.1 and runtime crio
	I0916 10:52:00.448823 1415006 cache.go:56] Caching tarball of preloaded images
	I0916 10:52:00.448750 1415006 image.go:79] Checking for gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 in local docker daemon
	I0916 10:52:00.448981 1415006 preload.go:172] Found /home/jenkins/minikube-integration/19651-1378450/.minikube/cache/preloaded-tarball/preloaded-images-k8s-v18-v1.31.1-cri-o-overlay-arm64.tar.lz4 in cache, skipping download
	I0916 10:52:00.449001 1415006 cache.go:59] Finished verifying existence of preloaded tar for v1.31.1 on crio
	I0916 10:52:00.449175 1415006 profile.go:143] Saving config to /home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/ha-334765/config.json ...
	W0916 10:52:00.479753 1415006 image.go:95] image gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 is of wrong architecture
	I0916 10:52:00.479779 1415006 cache.go:149] Downloading gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 to local cache
	I0916 10:52:00.479898 1415006 image.go:63] Checking for gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 in local cache directory
	I0916 10:52:00.479921 1415006 image.go:66] Found gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 in local cache directory, skipping pull
	I0916 10:52:00.479926 1415006 image.go:135] gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 exists in cache, skipping pull
	I0916 10:52:00.479938 1415006 cache.go:152] successfully saved gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 as a tarball
	I0916 10:52:00.479943 1415006 cache.go:162] Loading gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 from local cache
	I0916 10:52:00.484731 1415006 image.go:273] response: 
	I0916 10:52:00.664934 1415006 cache.go:164] successfully loaded and using gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 from cached tarball
	I0916 10:52:00.664975 1415006 cache.go:194] Successfully downloaded all kic artifacts
	I0916 10:52:00.665008 1415006 start.go:360] acquireMachinesLock for ha-334765-m02: {Name:mkb176e2cfa3ae927444127935258ba37ca2bc0a Clock:{} Delay:500ms Timeout:10m0s Cancel:<nil>}
	I0916 10:52:00.665145 1415006 start.go:364] duration metric: took 112.153µs to acquireMachinesLock for "ha-334765-m02"
	I0916 10:52:00.665182 1415006 start.go:93] Provisioning new machine with config: &{Name:ha-334765 KeepContext:false EmbedCerts:false MinikubeISO: KicBaseImage:gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 Memory:2200 CPUs:2 DiskSize:20000 Driver:docker HyperkitVpnKitSock: HyperkitVSockPorts:[] DockerEnv:[] ContainerVolumeMounts:[] InsecureRegistry:[] RegistryMirror:[] HostOnlyCIDR:192.168.59.1/24 HypervVirtualSwitch: HypervUseExternalSwitch:false HypervExternalAdapter: KVMNetwork:default KVMQemuURI:qemu:///system KVMGPU:false KVMHidden:false KVMNUMACount:1 APIServerPort:8443 DockerOpt:[] DisableDriverMounts:false NFSShare:[] NFSSharesRoot:/nfsshares UUID: NoVTXCheck:false DNSProxy:false HostDNSResolver:true HostOnlyNicType:virtio NatNicType:virtio SSHIPAddress: SSHUser:root SSHKey: SSHPort:22 KubernetesConfig:{KubernetesVersion:v1.31.1 ClusterName:ha-334765 Namespace:default APIServerHAVIP:192.168.49.254 APIServerNa
me:minikubeCA APIServerNames:[] APIServerIPs:[] DNSDomain:cluster.local ContainerRuntime:crio CRISocket: NetworkPlugin:cni FeatureGates: ServiceCIDR:10.96.0.0/12 ImageRepository: LoadBalancerStartIP: LoadBalancerEndIP: CustomIngressCert: RegistryAliases: ExtraOptions:[] ShouldLoadCachedImages:true EnableDefaultCNI:false CNI:} Nodes:[{Name: IP:192.168.49.2 Port:8443 KubernetesVersion:v1.31.1 ContainerRuntime:crio ControlPlane:true Worker:true} {Name:m02 IP: Port:8443 KubernetesVersion:v1.31.1 ContainerRuntime:crio ControlPlane:true Worker:true}] Addons:map[default-storageclass:true storage-provisioner:true] CustomAddonImages:map[] CustomAddonRegistries:map[] VerifyComponents:map[apiserver:true apps_running:true default_sa:true extra:true kubelet:true node_ready:true system_pods:true] StartHostTimeout:6m0s ScheduledStop:<nil> ExposedPorts:[] ListenAddress: Network: Subnet: MultiNodeRequested:true ExtraDisks:0 CertExpiration:26280h0m0s Mount:false MountString:/home/jenkins:/minikube-host Mount9PVersion:9p2000.L
MountGID:docker MountIP: MountMSize:262144 MountOptions:[] MountPort:0 MountType:9p MountUID:docker BinaryMirror: DisableOptimizations:false DisableMetrics:false CustomQemuFirmwarePath: SocketVMnetClientPath: SocketVMnetPath: StaticIP: SSHAuthSock: SSHAgentPID:0 GPUs: AutoPauseInterval:1m0s} &{Name:m02 IP: Port:8443 KubernetesVersion:v1.31.1 ContainerRuntime:crio ControlPlane:true Worker:true}
	I0916 10:52:00.665284 1415006 start.go:125] createHost starting for "m02" (driver="docker")
	I0916 10:52:00.670287 1415006 out.go:235] * Creating docker container (CPUs=2, Memory=2200MB) ...
	I0916 10:52:00.670431 1415006 start.go:159] libmachine.API.Create for "ha-334765" (driver="docker")
	I0916 10:52:00.670464 1415006 client.go:168] LocalClient.Create starting
	I0916 10:52:00.670549 1415006 main.go:141] libmachine: Reading certificate data from /home/jenkins/minikube-integration/19651-1378450/.minikube/certs/ca.pem
	I0916 10:52:00.670604 1415006 main.go:141] libmachine: Decoding PEM data...
	I0916 10:52:00.670627 1415006 main.go:141] libmachine: Parsing certificate...
	I0916 10:52:00.670689 1415006 main.go:141] libmachine: Reading certificate data from /home/jenkins/minikube-integration/19651-1378450/.minikube/certs/cert.pem
	I0916 10:52:00.670711 1415006 main.go:141] libmachine: Decoding PEM data...
	I0916 10:52:00.670722 1415006 main.go:141] libmachine: Parsing certificate...
	I0916 10:52:00.671002 1415006 cli_runner.go:164] Run: docker network inspect ha-334765 --format "{"Name": "{{.Name}}","Driver": "{{.Driver}}","Subnet": "{{range .IPAM.Config}}{{.Subnet}}{{end}}","Gateway": "{{range .IPAM.Config}}{{.Gateway}}{{end}}","MTU": {{if (index .Options "com.docker.network.driver.mtu")}}{{(index .Options "com.docker.network.driver.mtu")}}{{else}}0{{end}}, "ContainerIPs": [{{range $k,$v := .Containers }}"{{$v.IPv4Address}}",{{end}}]}"
	I0916 10:52:00.688356 1415006 network_create.go:77] Found existing network {name:ha-334765 subnet:0x4001b0e3c0 gateway:[0 0 0 0 0 0 0 0 0 0 255 255 192 168 49 1] mtu:1500}
	I0916 10:52:00.688402 1415006 kic.go:121] calculated static IP "192.168.49.3" for the "ha-334765-m02" container
	I0916 10:52:00.688484 1415006 cli_runner.go:164] Run: docker ps -a --format {{.Names}}
	I0916 10:52:00.707198 1415006 cli_runner.go:164] Run: docker volume create ha-334765-m02 --label name.minikube.sigs.k8s.io=ha-334765-m02 --label created_by.minikube.sigs.k8s.io=true
	I0916 10:52:00.728522 1415006 oci.go:103] Successfully created a docker volume ha-334765-m02
	I0916 10:52:00.728619 1415006 cli_runner.go:164] Run: docker run --rm --name ha-334765-m02-preload-sidecar --label created_by.minikube.sigs.k8s.io=true --label name.minikube.sigs.k8s.io=ha-334765-m02 --entrypoint /usr/bin/test -v ha-334765-m02:/var gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 -d /var/lib
	I0916 10:52:01.372104 1415006 oci.go:107] Successfully prepared a docker volume ha-334765-m02
	I0916 10:52:01.372154 1415006 preload.go:131] Checking if preload exists for k8s version v1.31.1 and runtime crio
	I0916 10:52:01.372177 1415006 kic.go:194] Starting extracting preloaded images to volume ...
	I0916 10:52:01.372250 1415006 cli_runner.go:164] Run: docker run --rm --entrypoint /usr/bin/tar -v /home/jenkins/minikube-integration/19651-1378450/.minikube/cache/preloaded-tarball/preloaded-images-k8s-v18-v1.31.1-cri-o-overlay-arm64.tar.lz4:/preloaded.tar:ro -v ha-334765-m02:/extractDir gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 -I lz4 -xf /preloaded.tar -C /extractDir
	I0916 10:52:05.551632 1415006 cli_runner.go:217] Completed: docker run --rm --entrypoint /usr/bin/tar -v /home/jenkins/minikube-integration/19651-1378450/.minikube/cache/preloaded-tarball/preloaded-images-k8s-v18-v1.31.1-cri-o-overlay-arm64.tar.lz4:/preloaded.tar:ro -v ha-334765-m02:/extractDir gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 -I lz4 -xf /preloaded.tar -C /extractDir: (4.179343083s)
	I0916 10:52:05.551662 1415006 kic.go:203] duration metric: took 4.179483371s to extract preloaded images to volume ...
	W0916 10:52:05.551816 1415006 cgroups_linux.go:77] Your kernel does not support swap limit capabilities or the cgroup is not mounted.
	I0916 10:52:05.551922 1415006 cli_runner.go:164] Run: docker info --format "'{{json .SecurityOptions}}'"
	I0916 10:52:05.604077 1415006 cli_runner.go:164] Run: docker run -d -t --privileged --security-opt seccomp=unconfined --tmpfs /tmp --tmpfs /run -v /lib/modules:/lib/modules:ro --hostname ha-334765-m02 --name ha-334765-m02 --label created_by.minikube.sigs.k8s.io=true --label name.minikube.sigs.k8s.io=ha-334765-m02 --label role.minikube.sigs.k8s.io= --label mode.minikube.sigs.k8s.io=ha-334765-m02 --network ha-334765 --ip 192.168.49.3 --volume ha-334765-m02:/var --security-opt apparmor=unconfined --memory=2200mb --cpus=2 -e container=docker --expose 8443 --publish=127.0.0.1::8443 --publish=127.0.0.1::22 --publish=127.0.0.1::2376 --publish=127.0.0.1::5000 --publish=127.0.0.1::32443 gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0
	I0916 10:52:05.935589 1415006 cli_runner.go:164] Run: docker container inspect ha-334765-m02 --format={{.State.Running}}
	I0916 10:52:05.956962 1415006 cli_runner.go:164] Run: docker container inspect ha-334765-m02 --format={{.State.Status}}
	I0916 10:52:05.978082 1415006 cli_runner.go:164] Run: docker exec ha-334765-m02 stat /var/lib/dpkg/alternatives/iptables
	I0916 10:52:06.058726 1415006 oci.go:144] the created container "ha-334765-m02" has a running status.
	I0916 10:52:06.058757 1415006 kic.go:225] Creating ssh key for kic: /home/jenkins/minikube-integration/19651-1378450/.minikube/machines/ha-334765-m02/id_rsa...
	I0916 10:52:06.312828 1415006 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-1378450/.minikube/machines/ha-334765-m02/id_rsa.pub -> /home/docker/.ssh/authorized_keys
	I0916 10:52:06.312939 1415006 kic_runner.go:191] docker (temp): /home/jenkins/minikube-integration/19651-1378450/.minikube/machines/ha-334765-m02/id_rsa.pub --> /home/docker/.ssh/authorized_keys (381 bytes)
	I0916 10:52:06.355435 1415006 cli_runner.go:164] Run: docker container inspect ha-334765-m02 --format={{.State.Status}}
	I0916 10:52:06.377811 1415006 kic_runner.go:93] Run: chown docker:docker /home/docker/.ssh/authorized_keys
	I0916 10:52:06.377836 1415006 kic_runner.go:114] Args: [docker exec --privileged ha-334765-m02 chown docker:docker /home/docker/.ssh/authorized_keys]
	I0916 10:52:06.445402 1415006 cli_runner.go:164] Run: docker container inspect ha-334765-m02 --format={{.State.Status}}
	I0916 10:52:06.466570 1415006 machine.go:93] provisionDockerMachine start ...
	I0916 10:52:06.466665 1415006 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" ha-334765-m02
	I0916 10:52:06.494178 1415006 main.go:141] libmachine: Using SSH client type: native
	I0916 10:52:06.494511 1415006 main.go:141] libmachine: &{{{<nil> 0 [] [] []} docker [0x41abe0] 0x41d420 <nil>  [] 0s} 127.0.0.1 34623 <nil> <nil>}
	I0916 10:52:06.494529 1415006 main.go:141] libmachine: About to run SSH command:
	hostname
	I0916 10:52:06.495167 1415006 main.go:141] libmachine: Error dialing TCP: ssh: handshake failed: read tcp 127.0.0.1:51890->127.0.0.1:34623: read: connection reset by peer
	I0916 10:52:09.636482 1415006 main.go:141] libmachine: SSH cmd err, output: <nil>: ha-334765-m02
	
	I0916 10:52:09.636510 1415006 ubuntu.go:169] provisioning hostname "ha-334765-m02"
	I0916 10:52:09.636580 1415006 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" ha-334765-m02
	I0916 10:52:09.656805 1415006 main.go:141] libmachine: Using SSH client type: native
	I0916 10:52:09.657052 1415006 main.go:141] libmachine: &{{{<nil> 0 [] [] []} docker [0x41abe0] 0x41d420 <nil>  [] 0s} 127.0.0.1 34623 <nil> <nil>}
	I0916 10:52:09.657070 1415006 main.go:141] libmachine: About to run SSH command:
	sudo hostname ha-334765-m02 && echo "ha-334765-m02" | sudo tee /etc/hostname
	I0916 10:52:09.804617 1415006 main.go:141] libmachine: SSH cmd err, output: <nil>: ha-334765-m02
	
	I0916 10:52:09.804866 1415006 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" ha-334765-m02
	I0916 10:52:09.823409 1415006 main.go:141] libmachine: Using SSH client type: native
	I0916 10:52:09.823663 1415006 main.go:141] libmachine: &{{{<nil> 0 [] [] []} docker [0x41abe0] 0x41d420 <nil>  [] 0s} 127.0.0.1 34623 <nil> <nil>}
	I0916 10:52:09.823686 1415006 main.go:141] libmachine: About to run SSH command:
	
			if ! grep -xq '.*\sha-334765-m02' /etc/hosts; then
				if grep -xq '127.0.1.1\s.*' /etc/hosts; then
					sudo sed -i 's/^127.0.1.1\s.*/127.0.1.1 ha-334765-m02/g' /etc/hosts;
				else 
					echo '127.0.1.1 ha-334765-m02' | sudo tee -a /etc/hosts; 
				fi
			fi
	I0916 10:52:09.960849 1415006 main.go:141] libmachine: SSH cmd err, output: <nil>: 
	I0916 10:52:09.960874 1415006 ubuntu.go:175] set auth options {CertDir:/home/jenkins/minikube-integration/19651-1378450/.minikube CaCertPath:/home/jenkins/minikube-integration/19651-1378450/.minikube/certs/ca.pem CaPrivateKeyPath:/home/jenkins/minikube-integration/19651-1378450/.minikube/certs/ca-key.pem CaCertRemotePath:/etc/docker/ca.pem ServerCertPath:/home/jenkins/minikube-integration/19651-1378450/.minikube/machines/server.pem ServerKeyPath:/home/jenkins/minikube-integration/19651-1378450/.minikube/machines/server-key.pem ClientKeyPath:/home/jenkins/minikube-integration/19651-1378450/.minikube/certs/key.pem ServerCertRemotePath:/etc/docker/server.pem ServerKeyRemotePath:/etc/docker/server-key.pem ClientCertPath:/home/jenkins/minikube-integration/19651-1378450/.minikube/certs/cert.pem ServerCertSANs:[] StorePath:/home/jenkins/minikube-integration/19651-1378450/.minikube}
	I0916 10:52:09.960892 1415006 ubuntu.go:177] setting up certificates
	I0916 10:52:09.960902 1415006 provision.go:84] configureAuth start
	I0916 10:52:09.960969 1415006 cli_runner.go:164] Run: docker container inspect -f "{{range .NetworkSettings.Networks}}{{.IPAddress}},{{.GlobalIPv6Address}}{{end}}" ha-334765-m02
	I0916 10:52:09.978719 1415006 provision.go:143] copyHostCerts
	I0916 10:52:09.978772 1415006 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-1378450/.minikube/certs/ca.pem -> /home/jenkins/minikube-integration/19651-1378450/.minikube/ca.pem
	I0916 10:52:09.978810 1415006 exec_runner.go:144] found /home/jenkins/minikube-integration/19651-1378450/.minikube/ca.pem, removing ...
	I0916 10:52:09.978820 1415006 exec_runner.go:203] rm: /home/jenkins/minikube-integration/19651-1378450/.minikube/ca.pem
	I0916 10:52:09.978900 1415006 exec_runner.go:151] cp: /home/jenkins/minikube-integration/19651-1378450/.minikube/certs/ca.pem --> /home/jenkins/minikube-integration/19651-1378450/.minikube/ca.pem (1078 bytes)
	I0916 10:52:09.978988 1415006 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-1378450/.minikube/certs/cert.pem -> /home/jenkins/minikube-integration/19651-1378450/.minikube/cert.pem
	I0916 10:52:09.979011 1415006 exec_runner.go:144] found /home/jenkins/minikube-integration/19651-1378450/.minikube/cert.pem, removing ...
	I0916 10:52:09.979022 1415006 exec_runner.go:203] rm: /home/jenkins/minikube-integration/19651-1378450/.minikube/cert.pem
	I0916 10:52:09.979052 1415006 exec_runner.go:151] cp: /home/jenkins/minikube-integration/19651-1378450/.minikube/certs/cert.pem --> /home/jenkins/minikube-integration/19651-1378450/.minikube/cert.pem (1123 bytes)
	I0916 10:52:09.979108 1415006 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-1378450/.minikube/certs/key.pem -> /home/jenkins/minikube-integration/19651-1378450/.minikube/key.pem
	I0916 10:52:09.979130 1415006 exec_runner.go:144] found /home/jenkins/minikube-integration/19651-1378450/.minikube/key.pem, removing ...
	I0916 10:52:09.979138 1415006 exec_runner.go:203] rm: /home/jenkins/minikube-integration/19651-1378450/.minikube/key.pem
	I0916 10:52:09.979165 1415006 exec_runner.go:151] cp: /home/jenkins/minikube-integration/19651-1378450/.minikube/certs/key.pem --> /home/jenkins/minikube-integration/19651-1378450/.minikube/key.pem (1679 bytes)
	I0916 10:52:09.979220 1415006 provision.go:117] generating server cert: /home/jenkins/minikube-integration/19651-1378450/.minikube/machines/server.pem ca-key=/home/jenkins/minikube-integration/19651-1378450/.minikube/certs/ca.pem private-key=/home/jenkins/minikube-integration/19651-1378450/.minikube/certs/ca-key.pem org=jenkins.ha-334765-m02 san=[127.0.0.1 192.168.49.3 ha-334765-m02 localhost minikube]
	I0916 10:52:10.489529 1415006 provision.go:177] copyRemoteCerts
	I0916 10:52:10.489607 1415006 ssh_runner.go:195] Run: sudo mkdir -p /etc/docker /etc/docker /etc/docker
	I0916 10:52:10.489649 1415006 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" ha-334765-m02
	I0916 10:52:10.506758 1415006 sshutil.go:53] new ssh client: &{IP:127.0.0.1 Port:34623 SSHKeyPath:/home/jenkins/minikube-integration/19651-1378450/.minikube/machines/ha-334765-m02/id_rsa Username:docker}
	I0916 10:52:10.605930 1415006 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-1378450/.minikube/certs/ca.pem -> /etc/docker/ca.pem
	I0916 10:52:10.605997 1415006 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-1378450/.minikube/certs/ca.pem --> /etc/docker/ca.pem (1078 bytes)
	I0916 10:52:10.631509 1415006 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-1378450/.minikube/machines/server.pem -> /etc/docker/server.pem
	I0916 10:52:10.631612 1415006 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-1378450/.minikube/machines/server.pem --> /etc/docker/server.pem (1208 bytes)
	I0916 10:52:10.658840 1415006 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-1378450/.minikube/machines/server-key.pem -> /etc/docker/server-key.pem
	I0916 10:52:10.658912 1415006 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-1378450/.minikube/machines/server-key.pem --> /etc/docker/server-key.pem (1679 bytes)
	I0916 10:52:10.685131 1415006 provision.go:87] duration metric: took 724.214128ms to configureAuth
	I0916 10:52:10.685158 1415006 ubuntu.go:193] setting minikube options for container-runtime
	I0916 10:52:10.685357 1415006 config.go:182] Loaded profile config "ha-334765": Driver=docker, ContainerRuntime=crio, KubernetesVersion=v1.31.1
	I0916 10:52:10.685466 1415006 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" ha-334765-m02
	I0916 10:52:10.703073 1415006 main.go:141] libmachine: Using SSH client type: native
	I0916 10:52:10.703366 1415006 main.go:141] libmachine: &{{{<nil> 0 [] [] []} docker [0x41abe0] 0x41d420 <nil>  [] 0s} 127.0.0.1 34623 <nil> <nil>}
	I0916 10:52:10.703384 1415006 main.go:141] libmachine: About to run SSH command:
	sudo mkdir -p /etc/sysconfig && printf %s "
	CRIO_MINIKUBE_OPTIONS='--insecure-registry 10.96.0.0/12 '
	" | sudo tee /etc/sysconfig/crio.minikube && sudo systemctl restart crio
	I0916 10:52:10.958686 1415006 main.go:141] libmachine: SSH cmd err, output: <nil>: 
	CRIO_MINIKUBE_OPTIONS='--insecure-registry 10.96.0.0/12 '
	
	I0916 10:52:10.958712 1415006 machine.go:96] duration metric: took 4.492118802s to provisionDockerMachine
	I0916 10:52:10.958733 1415006 client.go:171] duration metric: took 10.28824946s to LocalClient.Create
	I0916 10:52:10.958771 1415006 start.go:167] duration metric: took 10.288341355s to libmachine.API.Create "ha-334765"
	I0916 10:52:10.958783 1415006 start.go:293] postStartSetup for "ha-334765-m02" (driver="docker")
	I0916 10:52:10.958794 1415006 start.go:322] creating required directories: [/etc/kubernetes/addons /etc/kubernetes/manifests /var/tmp/minikube /var/lib/minikube /var/lib/minikube/certs /var/lib/minikube/images /var/lib/minikube/binaries /tmp/gvisor /usr/share/ca-certificates /etc/ssl/certs]
	I0916 10:52:10.958860 1415006 ssh_runner.go:195] Run: sudo mkdir -p /etc/kubernetes/addons /etc/kubernetes/manifests /var/tmp/minikube /var/lib/minikube /var/lib/minikube/certs /var/lib/minikube/images /var/lib/minikube/binaries /tmp/gvisor /usr/share/ca-certificates /etc/ssl/certs
	I0916 10:52:10.958926 1415006 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" ha-334765-m02
	I0916 10:52:10.975981 1415006 sshutil.go:53] new ssh client: &{IP:127.0.0.1 Port:34623 SSHKeyPath:/home/jenkins/minikube-integration/19651-1378450/.minikube/machines/ha-334765-m02/id_rsa Username:docker}
	I0916 10:52:11.075624 1415006 ssh_runner.go:195] Run: cat /etc/os-release
	I0916 10:52:11.079601 1415006 main.go:141] libmachine: Couldn't set key VERSION_CODENAME, no corresponding struct field found
	I0916 10:52:11.079640 1415006 main.go:141] libmachine: Couldn't set key PRIVACY_POLICY_URL, no corresponding struct field found
	I0916 10:52:11.079651 1415006 main.go:141] libmachine: Couldn't set key UBUNTU_CODENAME, no corresponding struct field found
	I0916 10:52:11.079659 1415006 info.go:137] Remote host: Ubuntu 22.04.4 LTS
	I0916 10:52:11.079677 1415006 filesync.go:126] Scanning /home/jenkins/minikube-integration/19651-1378450/.minikube/addons for local assets ...
	I0916 10:52:11.079746 1415006 filesync.go:126] Scanning /home/jenkins/minikube-integration/19651-1378450/.minikube/files for local assets ...
	I0916 10:52:11.079827 1415006 filesync.go:149] local asset: /home/jenkins/minikube-integration/19651-1378450/.minikube/files/etc/ssl/certs/13838332.pem -> 13838332.pem in /etc/ssl/certs
	I0916 10:52:11.079840 1415006 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-1378450/.minikube/files/etc/ssl/certs/13838332.pem -> /etc/ssl/certs/13838332.pem
	I0916 10:52:11.079941 1415006 ssh_runner.go:195] Run: sudo mkdir -p /etc/ssl/certs
	I0916 10:52:11.090420 1415006 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-1378450/.minikube/files/etc/ssl/certs/13838332.pem --> /etc/ssl/certs/13838332.pem (1708 bytes)
	I0916 10:52:11.118562 1415006 start.go:296] duration metric: took 159.762972ms for postStartSetup
	I0916 10:52:11.118977 1415006 cli_runner.go:164] Run: docker container inspect -f "{{range .NetworkSettings.Networks}}{{.IPAddress}},{{.GlobalIPv6Address}}{{end}}" ha-334765-m02
	I0916 10:52:11.136980 1415006 profile.go:143] Saving config to /home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/ha-334765/config.json ...
	I0916 10:52:11.137307 1415006 ssh_runner.go:195] Run: sh -c "df -h /var | awk 'NR==2{print $5}'"
	I0916 10:52:11.137363 1415006 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" ha-334765-m02
	I0916 10:52:11.158476 1415006 sshutil.go:53] new ssh client: &{IP:127.0.0.1 Port:34623 SSHKeyPath:/home/jenkins/minikube-integration/19651-1378450/.minikube/machines/ha-334765-m02/id_rsa Username:docker}
	I0916 10:52:11.253634 1415006 ssh_runner.go:195] Run: sh -c "df -BG /var | awk 'NR==2{print $4}'"
	I0916 10:52:11.258728 1415006 start.go:128] duration metric: took 10.593426369s to createHost
	I0916 10:52:11.258802 1415006 start.go:83] releasing machines lock for "ha-334765-m02", held for 10.593639804s
	I0916 10:52:11.258914 1415006 cli_runner.go:164] Run: docker container inspect -f "{{range .NetworkSettings.Networks}}{{.IPAddress}},{{.GlobalIPv6Address}}{{end}}" ha-334765-m02
	I0916 10:52:11.279190 1415006 out.go:177] * Found network options:
	I0916 10:52:11.281657 1415006 out.go:177]   - NO_PROXY=192.168.49.2
	W0916 10:52:11.284231 1415006 proxy.go:119] fail to check proxy env: Error ip not in block
	W0916 10:52:11.284273 1415006 proxy.go:119] fail to check proxy env: Error ip not in block
	I0916 10:52:11.284347 1415006 ssh_runner.go:195] Run: sudo sh -c "podman version >/dev/null"
	I0916 10:52:11.284399 1415006 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" ha-334765-m02
	I0916 10:52:11.284724 1415006 ssh_runner.go:195] Run: curl -sS -m 2 https://registry.k8s.io/
	I0916 10:52:11.284780 1415006 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" ha-334765-m02
	I0916 10:52:11.307462 1415006 sshutil.go:53] new ssh client: &{IP:127.0.0.1 Port:34623 SSHKeyPath:/home/jenkins/minikube-integration/19651-1378450/.minikube/machines/ha-334765-m02/id_rsa Username:docker}
	I0916 10:52:11.314487 1415006 sshutil.go:53] new ssh client: &{IP:127.0.0.1 Port:34623 SSHKeyPath:/home/jenkins/minikube-integration/19651-1378450/.minikube/machines/ha-334765-m02/id_rsa Username:docker}
	I0916 10:52:11.587011 1415006 ssh_runner.go:195] Run: sh -c "stat /etc/cni/net.d/*loopback.conf*"
	I0916 10:52:11.592179 1415006 ssh_runner.go:195] Run: sudo find /etc/cni/net.d -maxdepth 1 -type f -name *loopback.conf* -not -name *.mk_disabled -exec sh -c "sudo mv {} {}.mk_disabled" ;
	I0916 10:52:11.617718 1415006 cni.go:221] loopback cni configuration disabled: "/etc/cni/net.d/*loopback.conf*" found
	I0916 10:52:11.617855 1415006 ssh_runner.go:195] Run: sudo find /etc/cni/net.d -maxdepth 1 -type f ( ( -name *bridge* -or -name *podman* ) -and -not -name *.mk_disabled ) -printf "%p, " -exec sh -c "sudo mv {} {}.mk_disabled" ;
	I0916 10:52:11.657673 1415006 cni.go:262] disabled [/etc/cni/net.d/87-podman-bridge.conflist, /etc/cni/net.d/100-crio-bridge.conf] bridge cni config(s)
	I0916 10:52:11.657699 1415006 start.go:495] detecting cgroup driver to use...
	I0916 10:52:11.657763 1415006 detect.go:187] detected "cgroupfs" cgroup driver on host os
	I0916 10:52:11.657834 1415006 ssh_runner.go:195] Run: sudo systemctl stop -f containerd
	I0916 10:52:11.676285 1415006 ssh_runner.go:195] Run: sudo systemctl is-active --quiet service containerd
	I0916 10:52:11.689619 1415006 docker.go:217] disabling cri-docker service (if available) ...
	I0916 10:52:11.689740 1415006 ssh_runner.go:195] Run: sudo systemctl stop -f cri-docker.socket
	I0916 10:52:11.706308 1415006 ssh_runner.go:195] Run: sudo systemctl stop -f cri-docker.service
	I0916 10:52:11.725075 1415006 ssh_runner.go:195] Run: sudo systemctl disable cri-docker.socket
	I0916 10:52:11.820645 1415006 ssh_runner.go:195] Run: sudo systemctl mask cri-docker.service
	I0916 10:52:11.926313 1415006 docker.go:233] disabling docker service ...
	I0916 10:52:11.926394 1415006 ssh_runner.go:195] Run: sudo systemctl stop -f docker.socket
	I0916 10:52:11.949345 1415006 ssh_runner.go:195] Run: sudo systemctl stop -f docker.service
	I0916 10:52:11.962262 1415006 ssh_runner.go:195] Run: sudo systemctl disable docker.socket
	I0916 10:52:12.060126 1415006 ssh_runner.go:195] Run: sudo systemctl mask docker.service
	I0916 10:52:12.163880 1415006 ssh_runner.go:195] Run: sudo systemctl is-active --quiet service docker
	I0916 10:52:12.177575 1415006 ssh_runner.go:195] Run: /bin/bash -c "sudo mkdir -p /etc && printf %s "runtime-endpoint: unix:///var/run/crio/crio.sock
	" | sudo tee /etc/crictl.yaml"
	I0916 10:52:12.197810 1415006 crio.go:59] configure cri-o to use "registry.k8s.io/pause:3.10" pause image...
	I0916 10:52:12.197937 1415006 ssh_runner.go:195] Run: sh -c "sudo sed -i 's|^.*pause_image = .*$|pause_image = "registry.k8s.io/pause:3.10"|' /etc/crio/crio.conf.d/02-crio.conf"
	I0916 10:52:12.208512 1415006 crio.go:70] configuring cri-o to use "cgroupfs" as cgroup driver...
	I0916 10:52:12.208708 1415006 ssh_runner.go:195] Run: sh -c "sudo sed -i 's|^.*cgroup_manager = .*$|cgroup_manager = "cgroupfs"|' /etc/crio/crio.conf.d/02-crio.conf"
	I0916 10:52:12.219913 1415006 ssh_runner.go:195] Run: sh -c "sudo sed -i '/conmon_cgroup = .*/d' /etc/crio/crio.conf.d/02-crio.conf"
	I0916 10:52:12.230153 1415006 ssh_runner.go:195] Run: sh -c "sudo sed -i '/cgroup_manager = .*/a conmon_cgroup = "pod"' /etc/crio/crio.conf.d/02-crio.conf"
	I0916 10:52:12.240821 1415006 ssh_runner.go:195] Run: sh -c "sudo rm -rf /etc/cni/net.mk"
	I0916 10:52:12.250534 1415006 ssh_runner.go:195] Run: sh -c "sudo sed -i '/^ *"net.ipv4.ip_unprivileged_port_start=.*"/d' /etc/crio/crio.conf.d/02-crio.conf"
	I0916 10:52:12.260743 1415006 ssh_runner.go:195] Run: sh -c "sudo grep -q "^ *default_sysctls" /etc/crio/crio.conf.d/02-crio.conf || sudo sed -i '/conmon_cgroup = .*/a default_sysctls = \[\n\]' /etc/crio/crio.conf.d/02-crio.conf"
	I0916 10:52:12.277289 1415006 ssh_runner.go:195] Run: sh -c "sudo sed -i -r 's|^default_sysctls *= *\[|&\n  "net.ipv4.ip_unprivileged_port_start=0",|' /etc/crio/crio.conf.d/02-crio.conf"
	I0916 10:52:12.287823 1415006 ssh_runner.go:195] Run: sudo sysctl net.bridge.bridge-nf-call-iptables
	I0916 10:52:12.296715 1415006 ssh_runner.go:195] Run: sudo sh -c "echo 1 > /proc/sys/net/ipv4/ip_forward"
	I0916 10:52:12.306946 1415006 ssh_runner.go:195] Run: sudo systemctl daemon-reload
	I0916 10:52:12.399434 1415006 ssh_runner.go:195] Run: sudo systemctl restart crio
	I0916 10:52:12.524967 1415006 start.go:542] Will wait 60s for socket path /var/run/crio/crio.sock
	I0916 10:52:12.525125 1415006 ssh_runner.go:195] Run: stat /var/run/crio/crio.sock
	I0916 10:52:12.529140 1415006 start.go:563] Will wait 60s for crictl version
	I0916 10:52:12.529213 1415006 ssh_runner.go:195] Run: which crictl
	I0916 10:52:12.532859 1415006 ssh_runner.go:195] Run: sudo /usr/bin/crictl version
	I0916 10:52:12.574886 1415006 start.go:579] Version:  0.1.0
	RuntimeName:  cri-o
	RuntimeVersion:  1.24.6
	RuntimeApiVersion:  v1
	I0916 10:52:12.574997 1415006 ssh_runner.go:195] Run: crio --version
	I0916 10:52:12.614823 1415006 ssh_runner.go:195] Run: crio --version
	I0916 10:52:12.657597 1415006 out.go:177] * Preparing Kubernetes v1.31.1 on CRI-O 1.24.6 ...
	I0916 10:52:12.660197 1415006 out.go:177]   - env NO_PROXY=192.168.49.2
	I0916 10:52:12.662736 1415006 cli_runner.go:164] Run: docker network inspect ha-334765 --format "{"Name": "{{.Name}}","Driver": "{{.Driver}}","Subnet": "{{range .IPAM.Config}}{{.Subnet}}{{end}}","Gateway": "{{range .IPAM.Config}}{{.Gateway}}{{end}}","MTU": {{if (index .Options "com.docker.network.driver.mtu")}}{{(index .Options "com.docker.network.driver.mtu")}}{{else}}0{{end}}, "ContainerIPs": [{{range $k,$v := .Containers }}"{{$v.IPv4Address}}",{{end}}]}"
	I0916 10:52:12.678392 1415006 ssh_runner.go:195] Run: grep 192.168.49.1	host.minikube.internal$ /etc/hosts
	I0916 10:52:12.682057 1415006 ssh_runner.go:195] Run: /bin/bash -c "{ grep -v $'\thost.minikube.internal$' "/etc/hosts"; echo "192.168.49.1	host.minikube.internal"; } > /tmp/h.$$; sudo cp /tmp/h.$$ "/etc/hosts""
	I0916 10:52:12.694028 1415006 mustload.go:65] Loading cluster: ha-334765
	I0916 10:52:12.694263 1415006 config.go:182] Loaded profile config "ha-334765": Driver=docker, ContainerRuntime=crio, KubernetesVersion=v1.31.1
	I0916 10:52:12.694518 1415006 cli_runner.go:164] Run: docker container inspect ha-334765 --format={{.State.Status}}
	I0916 10:52:12.710565 1415006 host.go:66] Checking if "ha-334765" exists ...
	I0916 10:52:12.710858 1415006 certs.go:68] Setting up /home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/ha-334765 for IP: 192.168.49.3
	I0916 10:52:12.710873 1415006 certs.go:194] generating shared ca certs ...
	I0916 10:52:12.710970 1415006 certs.go:226] acquiring lock for ca certs: {Name:mk0ae46b50e2e49d53ad6fcc94535aa50d9156d6 Clock:{} Delay:500ms Timeout:1m0s Cancel:<nil>}
	I0916 10:52:12.711140 1415006 certs.go:235] skipping valid "minikubeCA" ca cert: /home/jenkins/minikube-integration/19651-1378450/.minikube/ca.key
	I0916 10:52:12.711191 1415006 certs.go:235] skipping valid "proxyClientCA" ca cert: /home/jenkins/minikube-integration/19651-1378450/.minikube/proxy-client-ca.key
	I0916 10:52:12.711203 1415006 certs.go:256] generating profile certs ...
	I0916 10:52:12.711281 1415006 certs.go:359] skipping valid signed profile cert regeneration for "minikube-user": /home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/ha-334765/client.key
	I0916 10:52:12.711314 1415006 certs.go:363] generating signed profile cert for "minikube": /home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/ha-334765/apiserver.key.5b1cf632
	I0916 10:52:12.711333 1415006 crypto.go:68] Generating cert /home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/ha-334765/apiserver.crt.5b1cf632 with IP's: [10.96.0.1 127.0.0.1 10.0.0.1 192.168.49.2 192.168.49.3 192.168.49.254]
	I0916 10:52:13.104970 1415006 crypto.go:156] Writing cert to /home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/ha-334765/apiserver.crt.5b1cf632 ...
	I0916 10:52:13.105002 1415006 lock.go:35] WriteFile acquiring /home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/ha-334765/apiserver.crt.5b1cf632: {Name:mk332993607c190de4cef2cfaffaf260af064109 Clock:{} Delay:500ms Timeout:1m0s Cancel:<nil>}
	I0916 10:52:13.105202 1415006 crypto.go:164] Writing key to /home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/ha-334765/apiserver.key.5b1cf632 ...
	I0916 10:52:13.105218 1415006 lock.go:35] WriteFile acquiring /home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/ha-334765/apiserver.key.5b1cf632: {Name:mk50822762cb3f23eab85fa836ddc46c7035cd54 Clock:{} Delay:500ms Timeout:1m0s Cancel:<nil>}
	I0916 10:52:13.105303 1415006 certs.go:381] copying /home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/ha-334765/apiserver.crt.5b1cf632 -> /home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/ha-334765/apiserver.crt
	I0916 10:52:13.105444 1415006 certs.go:385] copying /home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/ha-334765/apiserver.key.5b1cf632 -> /home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/ha-334765/apiserver.key
	I0916 10:52:13.105587 1415006 certs.go:359] skipping valid signed profile cert regeneration for "aggregator": /home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/ha-334765/proxy-client.key
	I0916 10:52:13.105605 1415006 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-1378450/.minikube/ca.crt -> /var/lib/minikube/certs/ca.crt
	I0916 10:52:13.105621 1415006 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-1378450/.minikube/ca.key -> /var/lib/minikube/certs/ca.key
	I0916 10:52:13.105637 1415006 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-1378450/.minikube/proxy-client-ca.crt -> /var/lib/minikube/certs/proxy-client-ca.crt
	I0916 10:52:13.105654 1415006 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-1378450/.minikube/proxy-client-ca.key -> /var/lib/minikube/certs/proxy-client-ca.key
	I0916 10:52:13.105668 1415006 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/ha-334765/apiserver.crt -> /var/lib/minikube/certs/apiserver.crt
	I0916 10:52:13.105688 1415006 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/ha-334765/apiserver.key -> /var/lib/minikube/certs/apiserver.key
	I0916 10:52:13.105698 1415006 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/ha-334765/proxy-client.crt -> /var/lib/minikube/certs/proxy-client.crt
	I0916 10:52:13.105711 1415006 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/ha-334765/proxy-client.key -> /var/lib/minikube/certs/proxy-client.key
	I0916 10:52:13.105761 1415006 certs.go:484] found cert: /home/jenkins/minikube-integration/19651-1378450/.minikube/certs/1383833.pem (1338 bytes)
	W0916 10:52:13.105795 1415006 certs.go:480] ignoring /home/jenkins/minikube-integration/19651-1378450/.minikube/certs/1383833_empty.pem, impossibly tiny 0 bytes
	I0916 10:52:13.105808 1415006 certs.go:484] found cert: /home/jenkins/minikube-integration/19651-1378450/.minikube/certs/ca-key.pem (1679 bytes)
	I0916 10:52:13.105832 1415006 certs.go:484] found cert: /home/jenkins/minikube-integration/19651-1378450/.minikube/certs/ca.pem (1078 bytes)
	I0916 10:52:13.105857 1415006 certs.go:484] found cert: /home/jenkins/minikube-integration/19651-1378450/.minikube/certs/cert.pem (1123 bytes)
	I0916 10:52:13.105881 1415006 certs.go:484] found cert: /home/jenkins/minikube-integration/19651-1378450/.minikube/certs/key.pem (1679 bytes)
	I0916 10:52:13.105926 1415006 certs.go:484] found cert: /home/jenkins/minikube-integration/19651-1378450/.minikube/files/etc/ssl/certs/13838332.pem (1708 bytes)
	I0916 10:52:13.105959 1415006 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-1378450/.minikube/certs/1383833.pem -> /usr/share/ca-certificates/1383833.pem
	I0916 10:52:13.105973 1415006 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-1378450/.minikube/files/etc/ssl/certs/13838332.pem -> /usr/share/ca-certificates/13838332.pem
	I0916 10:52:13.105986 1415006 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-1378450/.minikube/ca.crt -> /usr/share/ca-certificates/minikubeCA.pem
	I0916 10:52:13.106049 1415006 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" ha-334765
	I0916 10:52:13.123479 1415006 sshutil.go:53] new ssh client: &{IP:127.0.0.1 Port:34618 SSHKeyPath:/home/jenkins/minikube-integration/19651-1378450/.minikube/machines/ha-334765/id_rsa Username:docker}
	I0916 10:52:13.213053 1415006 ssh_runner.go:195] Run: stat -c %s /var/lib/minikube/certs/sa.pub
	I0916 10:52:13.216592 1415006 ssh_runner.go:447] scp /var/lib/minikube/certs/sa.pub --> memory (451 bytes)
	I0916 10:52:13.228773 1415006 ssh_runner.go:195] Run: stat -c %s /var/lib/minikube/certs/sa.key
	I0916 10:52:13.232197 1415006 ssh_runner.go:447] scp /var/lib/minikube/certs/sa.key --> memory (1675 bytes)
	I0916 10:52:13.244494 1415006 ssh_runner.go:195] Run: stat -c %s /var/lib/minikube/certs/front-proxy-ca.crt
	I0916 10:52:13.247967 1415006 ssh_runner.go:447] scp /var/lib/minikube/certs/front-proxy-ca.crt --> memory (1123 bytes)
	I0916 10:52:13.260282 1415006 ssh_runner.go:195] Run: stat -c %s /var/lib/minikube/certs/front-proxy-ca.key
	I0916 10:52:13.263770 1415006 ssh_runner.go:447] scp /var/lib/minikube/certs/front-proxy-ca.key --> memory (1675 bytes)
	I0916 10:52:13.276165 1415006 ssh_runner.go:195] Run: stat -c %s /var/lib/minikube/certs/etcd/ca.crt
	I0916 10:52:13.279651 1415006 ssh_runner.go:447] scp /var/lib/minikube/certs/etcd/ca.crt --> memory (1094 bytes)
	I0916 10:52:13.291769 1415006 ssh_runner.go:195] Run: stat -c %s /var/lib/minikube/certs/etcd/ca.key
	I0916 10:52:13.295289 1415006 ssh_runner.go:447] scp /var/lib/minikube/certs/etcd/ca.key --> memory (1679 bytes)
	I0916 10:52:13.308129 1415006 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-1378450/.minikube/ca.crt --> /var/lib/minikube/certs/ca.crt (1111 bytes)
	I0916 10:52:13.333370 1415006 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-1378450/.minikube/ca.key --> /var/lib/minikube/certs/ca.key (1675 bytes)
	I0916 10:52:13.359716 1415006 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-1378450/.minikube/proxy-client-ca.crt --> /var/lib/minikube/certs/proxy-client-ca.crt (1119 bytes)
	I0916 10:52:13.384764 1415006 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-1378450/.minikube/proxy-client-ca.key --> /var/lib/minikube/certs/proxy-client-ca.key (1679 bytes)
	I0916 10:52:13.409876 1415006 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/ha-334765/apiserver.crt --> /var/lib/minikube/certs/apiserver.crt (1436 bytes)
	I0916 10:52:13.434260 1415006 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/ha-334765/apiserver.key --> /var/lib/minikube/certs/apiserver.key (1675 bytes)
	I0916 10:52:13.458675 1415006 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/ha-334765/proxy-client.crt --> /var/lib/minikube/certs/proxy-client.crt (1147 bytes)
	I0916 10:52:13.488573 1415006 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/ha-334765/proxy-client.key --> /var/lib/minikube/certs/proxy-client.key (1679 bytes)
	I0916 10:52:13.514770 1415006 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-1378450/.minikube/certs/1383833.pem --> /usr/share/ca-certificates/1383833.pem (1338 bytes)
	I0916 10:52:13.540012 1415006 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-1378450/.minikube/files/etc/ssl/certs/13838332.pem --> /usr/share/ca-certificates/13838332.pem (1708 bytes)
	I0916 10:52:13.566866 1415006 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-1378450/.minikube/ca.crt --> /usr/share/ca-certificates/minikubeCA.pem (1111 bytes)
	I0916 10:52:13.594872 1415006 ssh_runner.go:362] scp memory --> /var/lib/minikube/certs/sa.pub (451 bytes)
	I0916 10:52:13.614472 1415006 ssh_runner.go:362] scp memory --> /var/lib/minikube/certs/sa.key (1675 bytes)
	I0916 10:52:13.633347 1415006 ssh_runner.go:362] scp memory --> /var/lib/minikube/certs/front-proxy-ca.crt (1123 bytes)
	I0916 10:52:13.654026 1415006 ssh_runner.go:362] scp memory --> /var/lib/minikube/certs/front-proxy-ca.key (1675 bytes)
	I0916 10:52:13.672630 1415006 ssh_runner.go:362] scp memory --> /var/lib/minikube/certs/etcd/ca.crt (1094 bytes)
	I0916 10:52:13.691149 1415006 ssh_runner.go:362] scp memory --> /var/lib/minikube/certs/etcd/ca.key (1679 bytes)
	I0916 10:52:13.711987 1415006 ssh_runner.go:362] scp memory --> /var/lib/minikube/kubeconfig (744 bytes)
	I0916 10:52:13.731237 1415006 ssh_runner.go:195] Run: openssl version
	I0916 10:52:13.736872 1415006 ssh_runner.go:195] Run: sudo /bin/bash -c "test -s /usr/share/ca-certificates/13838332.pem && ln -fs /usr/share/ca-certificates/13838332.pem /etc/ssl/certs/13838332.pem"
	I0916 10:52:13.746441 1415006 ssh_runner.go:195] Run: ls -la /usr/share/ca-certificates/13838332.pem
	I0916 10:52:13.750286 1415006 certs.go:528] hashing: -rw-r--r-- 1 root root 1708 Sep 16 10:46 /usr/share/ca-certificates/13838332.pem
	I0916 10:52:13.750353 1415006 ssh_runner.go:195] Run: openssl x509 -hash -noout -in /usr/share/ca-certificates/13838332.pem
	I0916 10:52:13.757939 1415006 ssh_runner.go:195] Run: sudo /bin/bash -c "test -L /etc/ssl/certs/3ec20f2e.0 || ln -fs /etc/ssl/certs/13838332.pem /etc/ssl/certs/3ec20f2e.0"
	I0916 10:52:13.767433 1415006 ssh_runner.go:195] Run: sudo /bin/bash -c "test -s /usr/share/ca-certificates/minikubeCA.pem && ln -fs /usr/share/ca-certificates/minikubeCA.pem /etc/ssl/certs/minikubeCA.pem"
	I0916 10:52:13.776739 1415006 ssh_runner.go:195] Run: ls -la /usr/share/ca-certificates/minikubeCA.pem
	I0916 10:52:13.780254 1415006 certs.go:528] hashing: -rw-r--r-- 1 root root 1111 Sep 16 10:35 /usr/share/ca-certificates/minikubeCA.pem
	I0916 10:52:13.780320 1415006 ssh_runner.go:195] Run: openssl x509 -hash -noout -in /usr/share/ca-certificates/minikubeCA.pem
	I0916 10:52:13.787147 1415006 ssh_runner.go:195] Run: sudo /bin/bash -c "test -L /etc/ssl/certs/b5213941.0 || ln -fs /etc/ssl/certs/minikubeCA.pem /etc/ssl/certs/b5213941.0"
	I0916 10:52:13.796665 1415006 ssh_runner.go:195] Run: sudo /bin/bash -c "test -s /usr/share/ca-certificates/1383833.pem && ln -fs /usr/share/ca-certificates/1383833.pem /etc/ssl/certs/1383833.pem"
	I0916 10:52:13.806165 1415006 ssh_runner.go:195] Run: ls -la /usr/share/ca-certificates/1383833.pem
	I0916 10:52:13.809849 1415006 certs.go:528] hashing: -rw-r--r-- 1 root root 1338 Sep 16 10:46 /usr/share/ca-certificates/1383833.pem
	I0916 10:52:13.809948 1415006 ssh_runner.go:195] Run: openssl x509 -hash -noout -in /usr/share/ca-certificates/1383833.pem
	I0916 10:52:13.818466 1415006 ssh_runner.go:195] Run: sudo /bin/bash -c "test -L /etc/ssl/certs/51391683.0 || ln -fs /etc/ssl/certs/1383833.pem /etc/ssl/certs/51391683.0"
	I0916 10:52:13.828214 1415006 ssh_runner.go:195] Run: stat /var/lib/minikube/certs/apiserver-kubelet-client.crt
	I0916 10:52:13.832140 1415006 certs.go:399] 'apiserver-kubelet-client' cert doesn't exist, likely first start: stat /var/lib/minikube/certs/apiserver-kubelet-client.crt: Process exited with status 1
	stdout:
	
	stderr:
	stat: cannot statx '/var/lib/minikube/certs/apiserver-kubelet-client.crt': No such file or directory
	I0916 10:52:13.832240 1415006 kubeadm.go:934] updating node {m02 192.168.49.3 8443 v1.31.1 crio true true} ...
	I0916 10:52:13.832362 1415006 kubeadm.go:946] kubelet [Unit]
	Wants=crio.service
	
	[Service]
	ExecStart=
	ExecStart=/var/lib/minikube/binaries/v1.31.1/kubelet --bootstrap-kubeconfig=/etc/kubernetes/bootstrap-kubelet.conf --cgroups-per-qos=false --config=/var/lib/kubelet/config.yaml --enforce-node-allocatable= --hostname-override=ha-334765-m02 --kubeconfig=/etc/kubernetes/kubelet.conf --node-ip=192.168.49.3
	
	[Install]
	 config:
	{KubernetesVersion:v1.31.1 ClusterName:ha-334765 Namespace:default APIServerHAVIP:192.168.49.254 APIServerName:minikubeCA APIServerNames:[] APIServerIPs:[] DNSDomain:cluster.local ContainerRuntime:crio CRISocket: NetworkPlugin:cni FeatureGates: ServiceCIDR:10.96.0.0/12 ImageRepository: LoadBalancerStartIP: LoadBalancerEndIP: CustomIngressCert: RegistryAliases: ExtraOptions:[] ShouldLoadCachedImages:true EnableDefaultCNI:false CNI:}
	I0916 10:52:13.832410 1415006 kube-vip.go:115] generating kube-vip config ...
	I0916 10:52:13.832465 1415006 ssh_runner.go:195] Run: sudo sh -c "lsmod | grep ip_vs"
	I0916 10:52:13.845768 1415006 kube-vip.go:167] auto-enabling control-plane load-balancing in kube-vip
	I0916 10:52:13.845888 1415006 kube-vip.go:137] kube-vip config:
	apiVersion: v1
	kind: Pod
	metadata:
	  creationTimestamp: null
	  name: kube-vip
	  namespace: kube-system
	spec:
	  containers:
	  - args:
	    - manager
	    env:
	    - name: vip_arp
	      value: "true"
	    - name: port
	      value: "8443"
	    - name: vip_nodename
	      valueFrom:
	        fieldRef:
	          fieldPath: spec.nodeName
	    - name: vip_interface
	      value: eth0
	    - name: vip_cidr
	      value: "32"
	    - name: dns_mode
	      value: first
	    - name: cp_enable
	      value: "true"
	    - name: cp_namespace
	      value: kube-system
	    - name: vip_leaderelection
	      value: "true"
	    - name: vip_leasename
	      value: plndr-cp-lock
	    - name: vip_leaseduration
	      value: "5"
	    - name: vip_renewdeadline
	      value: "3"
	    - name: vip_retryperiod
	      value: "1"
	    - name: address
	      value: 192.168.49.254
	    - name: prometheus_server
	      value: :2112
	    - name : lb_enable
	      value: "true"
	    - name: lb_port
	      value: "8443"
	    image: ghcr.io/kube-vip/kube-vip:v0.8.0
	    imagePullPolicy: IfNotPresent
	    name: kube-vip
	    resources: {}
	    securityContext:
	      capabilities:
	        add:
	        - NET_ADMIN
	        - NET_RAW
	    volumeMounts:
	    - mountPath: /etc/kubernetes/admin.conf
	      name: kubeconfig
	  hostAliases:
	  - hostnames:
	    - kubernetes
	    ip: 127.0.0.1
	  hostNetwork: true
	  volumes:
	  - hostPath:
	      path: "/etc/kubernetes/admin.conf"
	    name: kubeconfig
	status: {}
	I0916 10:52:13.845981 1415006 ssh_runner.go:195] Run: sudo ls /var/lib/minikube/binaries/v1.31.1
	I0916 10:52:13.854977 1415006 binaries.go:44] Found k8s binaries, skipping transfer
	I0916 10:52:13.855079 1415006 ssh_runner.go:195] Run: sudo mkdir -p /etc/systemd/system/kubelet.service.d /lib/systemd/system /etc/kubernetes/manifests
	I0916 10:52:13.863878 1415006 ssh_runner.go:362] scp memory --> /etc/systemd/system/kubelet.service.d/10-kubeadm.conf (363 bytes)
	I0916 10:52:13.883201 1415006 ssh_runner.go:362] scp memory --> /lib/systemd/system/kubelet.service (352 bytes)
	I0916 10:52:13.902444 1415006 ssh_runner.go:362] scp memory --> /etc/kubernetes/manifests/kube-vip.yaml (1441 bytes)
	I0916 10:52:13.922631 1415006 ssh_runner.go:195] Run: grep 192.168.49.254	control-plane.minikube.internal$ /etc/hosts
	I0916 10:52:13.926287 1415006 ssh_runner.go:195] Run: /bin/bash -c "{ grep -v $'\tcontrol-plane.minikube.internal$' "/etc/hosts"; echo "192.168.49.254	control-plane.minikube.internal"; } > /tmp/h.$$; sudo cp /tmp/h.$$ "/etc/hosts""
	I0916 10:52:13.937523 1415006 ssh_runner.go:195] Run: sudo systemctl daemon-reload
	I0916 10:52:14.028531 1415006 ssh_runner.go:195] Run: sudo systemctl start kubelet
	I0916 10:52:14.042898 1415006 host.go:66] Checking if "ha-334765" exists ...
	I0916 10:52:14.043261 1415006 start.go:317] joinCluster: &{Name:ha-334765 KeepContext:false EmbedCerts:false MinikubeISO: KicBaseImage:gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 Memory:2200 CPUs:2 DiskSize:20000 Driver:docker HyperkitVpnKitSock: HyperkitVSockPorts:[] DockerEnv:[] ContainerVolumeMounts:[] InsecureRegistry:[] RegistryMirror:[] HostOnlyCIDR:192.168.59.1/24 HypervVirtualSwitch: HypervUseExternalSwitch:false HypervExternalAdapter: KVMNetwork:default KVMQemuURI:qemu:///system KVMGPU:false KVMHidden:false KVMNUMACount:1 APIServerPort:8443 DockerOpt:[] DisableDriverMounts:false NFSShare:[] NFSSharesRoot:/nfsshares UUID: NoVTXCheck:false DNSProxy:false HostDNSResolver:true HostOnlyNicType:virtio NatNicType:virtio SSHIPAddress: SSHUser:root SSHKey: SSHPort:22 KubernetesConfig:{KubernetesVersion:v1.31.1 ClusterName:ha-334765 Namespace:default APIServerHAVIP:192.168.49.254 APIServerName:minikubeCA APIServerN
ames:[] APIServerIPs:[] DNSDomain:cluster.local ContainerRuntime:crio CRISocket: NetworkPlugin:cni FeatureGates: ServiceCIDR:10.96.0.0/12 ImageRepository: LoadBalancerStartIP: LoadBalancerEndIP: CustomIngressCert: RegistryAliases: ExtraOptions:[] ShouldLoadCachedImages:true EnableDefaultCNI:false CNI:} Nodes:[{Name: IP:192.168.49.2 Port:8443 KubernetesVersion:v1.31.1 ContainerRuntime:crio ControlPlane:true Worker:true} {Name:m02 IP:192.168.49.3 Port:8443 KubernetesVersion:v1.31.1 ContainerRuntime:crio ControlPlane:true Worker:true}] Addons:map[default-storageclass:true storage-provisioner:true] CustomAddonImages:map[] CustomAddonRegistries:map[] VerifyComponents:map[apiserver:true apps_running:true default_sa:true extra:true kubelet:true node_ready:true system_pods:true] StartHostTimeout:6m0s ScheduledStop:<nil> ExposedPorts:[] ListenAddress: Network: Subnet: MultiNodeRequested:true ExtraDisks:0 CertExpiration:26280h0m0s Mount:false MountString:/home/jenkins:/minikube-host Mount9PVersion:9p2000.L MountGID:doc
ker MountIP: MountMSize:262144 MountOptions:[] MountPort:0 MountType:9p MountUID:docker BinaryMirror: DisableOptimizations:false DisableMetrics:false CustomQemuFirmwarePath: SocketVMnetClientPath: SocketVMnetPath: StaticIP: SSHAuthSock: SSHAgentPID:0 GPUs: AutoPauseInterval:1m0s}
	I0916 10:52:14.043396 1415006 ssh_runner.go:195] Run: /bin/bash -c "sudo env PATH="/var/lib/minikube/binaries/v1.31.1:$PATH" kubeadm token create --print-join-command --ttl=0"
	I0916 10:52:14.043473 1415006 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" ha-334765
	I0916 10:52:14.062096 1415006 sshutil.go:53] new ssh client: &{IP:127.0.0.1 Port:34618 SSHKeyPath:/home/jenkins/minikube-integration/19651-1378450/.minikube/machines/ha-334765/id_rsa Username:docker}
	I0916 10:52:14.228472 1415006 start.go:343] trying to join control-plane node "m02" to cluster: &{Name:m02 IP:192.168.49.3 Port:8443 KubernetesVersion:v1.31.1 ContainerRuntime:crio ControlPlane:true Worker:true}
	I0916 10:52:14.228566 1415006 ssh_runner.go:195] Run: /bin/bash -c "sudo env PATH="/var/lib/minikube/binaries/v1.31.1:$PATH" kubeadm join control-plane.minikube.internal:8443 --token 8hrwpz.mj79utjeence4u1y --discovery-token-ca-cert-hash sha256:a39d4a6e06a2efc97f5d9564a89b81063790e757dde370e866d9dc4c2ed0ec07 --ignore-preflight-errors=all --cri-socket unix:///var/run/crio/crio.sock --node-name=ha-334765-m02 --control-plane --apiserver-advertise-address=192.168.49.3 --apiserver-bind-port=8443"
	I0916 10:52:22.726920 1415006 ssh_runner.go:235] Completed: /bin/bash -c "sudo env PATH="/var/lib/minikube/binaries/v1.31.1:$PATH" kubeadm join control-plane.minikube.internal:8443 --token 8hrwpz.mj79utjeence4u1y --discovery-token-ca-cert-hash sha256:a39d4a6e06a2efc97f5d9564a89b81063790e757dde370e866d9dc4c2ed0ec07 --ignore-preflight-errors=all --cri-socket unix:///var/run/crio/crio.sock --node-name=ha-334765-m02 --control-plane --apiserver-advertise-address=192.168.49.3 --apiserver-bind-port=8443": (8.498319708s)
	I0916 10:52:22.726953 1415006 ssh_runner.go:195] Run: /bin/bash -c "sudo systemctl daemon-reload && sudo systemctl enable kubelet && sudo systemctl start kubelet"
	I0916 10:52:23.147534 1415006 ssh_runner.go:195] Run: sudo /var/lib/minikube/binaries/v1.31.1/kubectl --kubeconfig=/var/lib/minikube/kubeconfig label --overwrite nodes ha-334765-m02 minikube.k8s.io/updated_at=2024_09_16T10_52_23_0700 minikube.k8s.io/version=v1.34.0 minikube.k8s.io/commit=90d544f06ea0f69499271b003be64a9a224d57ed minikube.k8s.io/name=ha-334765 minikube.k8s.io/primary=false
	I0916 10:52:23.305867 1415006 ssh_runner.go:195] Run: sudo /var/lib/minikube/binaries/v1.31.1/kubectl --kubeconfig=/var/lib/minikube/kubeconfig taint nodes ha-334765-m02 node-role.kubernetes.io/control-plane:NoSchedule-
	I0916 10:52:23.515255 1415006 start.go:319] duration metric: took 9.471990207s to joinCluster
	I0916 10:52:23.515311 1415006 start.go:235] Will wait 6m0s for node &{Name:m02 IP:192.168.49.3 Port:8443 KubernetesVersion:v1.31.1 ContainerRuntime:crio ControlPlane:true Worker:true}
	I0916 10:52:23.515715 1415006 config.go:182] Loaded profile config "ha-334765": Driver=docker, ContainerRuntime=crio, KubernetesVersion=v1.31.1
	I0916 10:52:23.518211 1415006 out.go:177] * Verifying Kubernetes components...
	I0916 10:52:23.521068 1415006 ssh_runner.go:195] Run: sudo systemctl daemon-reload
	I0916 10:52:23.709941 1415006 ssh_runner.go:195] Run: sudo systemctl start kubelet
	I0916 10:52:23.740820 1415006 loader.go:395] Config loaded from file:  /home/jenkins/minikube-integration/19651-1378450/kubeconfig
	I0916 10:52:23.741096 1415006 kapi.go:59] client config for ha-334765: &rest.Config{Host:"https://192.168.49.254:8443", APIPath:"", ContentConfig:rest.ContentConfig{AcceptContentTypes:"", ContentType:"", GroupVersion:(*schema.GroupVersion)(nil), NegotiatedSerializer:runtime.NegotiatedSerializer(nil)}, Username:"", Password:"", BearerToken:"", BearerTokenFile:"", Impersonate:rest.ImpersonationConfig{UserName:"", UID:"", Groups:[]string(nil), Extra:map[string][]string(nil)}, AuthProvider:<nil>, AuthConfigPersister:rest.AuthProviderConfigPersister(nil), ExecProvider:<nil>, TLSClientConfig:rest.sanitizedTLSClientConfig{Insecure:false, ServerName:"", CertFile:"/home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/ha-334765/client.crt", KeyFile:"/home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/ha-334765/client.key", CAFile:"/home/jenkins/minikube-integration/19651-1378450/.minikube/ca.crt", CertData:[]uint8(nil), KeyData:[]uint8(nil), CAData:[]uint8(nil), NextProtos:[]strin
g(nil)}, UserAgent:"", DisableCompression:false, Transport:http.RoundTripper(nil), WrapTransport:(transport.WrapperFunc)(0x1a1e6c0), QPS:0, Burst:0, RateLimiter:flowcontrol.RateLimiter(nil), WarningHandler:rest.WarningHandler(nil), Timeout:0, Dial:(func(context.Context, string, string) (net.Conn, error))(nil), Proxy:(func(*http.Request) (*url.URL, error))(nil)}
	W0916 10:52:23.741154 1415006 kubeadm.go:483] Overriding stale ClientConfig host https://192.168.49.254:8443 with https://192.168.49.2:8443
	I0916 10:52:23.741371 1415006 node_ready.go:35] waiting up to 6m0s for node "ha-334765-m02" to be "Ready" ...
	I0916 10:52:23.741457 1415006 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-334765-m02
	I0916 10:52:23.741463 1415006 round_trippers.go:469] Request Headers:
	I0916 10:52:23.741471 1415006 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:52:23.741475 1415006 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:52:23.781124 1415006 round_trippers.go:574] Response Status: 200 OK in 39 milliseconds
	I0916 10:52:24.241976 1415006 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-334765-m02
	I0916 10:52:24.241996 1415006 round_trippers.go:469] Request Headers:
	I0916 10:52:24.242006 1415006 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:52:24.242010 1415006 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:52:24.245900 1415006 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:52:24.741943 1415006 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-334765-m02
	I0916 10:52:24.741965 1415006 round_trippers.go:469] Request Headers:
	I0916 10:52:24.741974 1415006 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:52:24.741979 1415006 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:52:24.749099 1415006 round_trippers.go:574] Response Status: 200 OK in 7 milliseconds
	I0916 10:52:25.241610 1415006 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-334765-m02
	I0916 10:52:25.241631 1415006 round_trippers.go:469] Request Headers:
	I0916 10:52:25.241640 1415006 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:52:25.241644 1415006 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:52:25.249860 1415006 round_trippers.go:574] Response Status: 200 OK in 8 milliseconds
	I0916 10:52:25.742514 1415006 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-334765-m02
	I0916 10:52:25.742535 1415006 round_trippers.go:469] Request Headers:
	I0916 10:52:25.742544 1415006 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:52:25.742548 1415006 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:52:25.753199 1415006 round_trippers.go:574] Response Status: 200 OK in 10 milliseconds
	I0916 10:52:25.754210 1415006 node_ready.go:53] node "ha-334765-m02" has status "Ready":"False"
	I0916 10:52:26.242467 1415006 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-334765-m02
	I0916 10:52:26.242488 1415006 round_trippers.go:469] Request Headers:
	I0916 10:52:26.242498 1415006 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:52:26.242502 1415006 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:52:26.245197 1415006 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 10:52:26.741733 1415006 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-334765-m02
	I0916 10:52:26.741753 1415006 round_trippers.go:469] Request Headers:
	I0916 10:52:26.741763 1415006 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:52:26.741768 1415006 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:52:26.746424 1415006 round_trippers.go:574] Response Status: 200 OK in 4 milliseconds
	I0916 10:52:27.241954 1415006 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-334765-m02
	I0916 10:52:27.241977 1415006 round_trippers.go:469] Request Headers:
	I0916 10:52:27.241987 1415006 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:52:27.241992 1415006 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:52:27.244752 1415006 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 10:52:27.741709 1415006 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-334765-m02
	I0916 10:52:27.741735 1415006 round_trippers.go:469] Request Headers:
	I0916 10:52:27.741746 1415006 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:52:27.741751 1415006 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:52:27.744984 1415006 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:52:28.241770 1415006 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-334765-m02
	I0916 10:52:28.241795 1415006 round_trippers.go:469] Request Headers:
	I0916 10:52:28.241805 1415006 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:52:28.241811 1415006 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:52:28.244551 1415006 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 10:52:28.245309 1415006 node_ready.go:53] node "ha-334765-m02" has status "Ready":"False"
	I0916 10:52:28.741681 1415006 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-334765-m02
	I0916 10:52:28.741710 1415006 round_trippers.go:469] Request Headers:
	I0916 10:52:28.741720 1415006 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:52:28.741724 1415006 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:52:28.744578 1415006 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 10:52:29.242147 1415006 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-334765-m02
	I0916 10:52:29.242172 1415006 round_trippers.go:469] Request Headers:
	I0916 10:52:29.242182 1415006 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:52:29.242187 1415006 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:52:29.245114 1415006 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 10:52:29.742566 1415006 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-334765-m02
	I0916 10:52:29.742593 1415006 round_trippers.go:469] Request Headers:
	I0916 10:52:29.742608 1415006 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:52:29.742615 1415006 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:52:29.761729 1415006 round_trippers.go:574] Response Status: 200 OK in 19 milliseconds
	I0916 10:52:30.242409 1415006 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-334765-m02
	I0916 10:52:30.242439 1415006 round_trippers.go:469] Request Headers:
	I0916 10:52:30.242449 1415006 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:52:30.242454 1415006 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:52:30.250146 1415006 round_trippers.go:574] Response Status: 200 OK in 7 milliseconds
	I0916 10:52:30.252106 1415006 node_ready.go:53] node "ha-334765-m02" has status "Ready":"False"
	I0916 10:52:30.741944 1415006 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-334765-m02
	I0916 10:52:30.741966 1415006 round_trippers.go:469] Request Headers:
	I0916 10:52:30.741975 1415006 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:52:30.741981 1415006 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:52:30.745078 1415006 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:52:31.242008 1415006 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-334765-m02
	I0916 10:52:31.242033 1415006 round_trippers.go:469] Request Headers:
	I0916 10:52:31.242044 1415006 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:52:31.242048 1415006 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:52:31.245015 1415006 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 10:52:31.742091 1415006 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-334765-m02
	I0916 10:52:31.742113 1415006 round_trippers.go:469] Request Headers:
	I0916 10:52:31.742124 1415006 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:52:31.742129 1415006 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:52:31.745699 1415006 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:52:32.241705 1415006 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-334765-m02
	I0916 10:52:32.241728 1415006 round_trippers.go:469] Request Headers:
	I0916 10:52:32.241736 1415006 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:52:32.241740 1415006 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:52:32.244364 1415006 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 10:52:32.741603 1415006 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-334765-m02
	I0916 10:52:32.741629 1415006 round_trippers.go:469] Request Headers:
	I0916 10:52:32.741640 1415006 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:52:32.741645 1415006 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:52:32.744539 1415006 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 10:52:32.745432 1415006 node_ready.go:53] node "ha-334765-m02" has status "Ready":"False"
	I0916 10:52:33.241887 1415006 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-334765-m02
	I0916 10:52:33.241913 1415006 round_trippers.go:469] Request Headers:
	I0916 10:52:33.241924 1415006 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:52:33.241929 1415006 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:52:33.244745 1415006 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 10:52:33.742125 1415006 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-334765-m02
	I0916 10:52:33.742151 1415006 round_trippers.go:469] Request Headers:
	I0916 10:52:33.742161 1415006 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:52:33.742166 1415006 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:52:33.745404 1415006 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:52:34.241644 1415006 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-334765-m02
	I0916 10:52:34.241667 1415006 round_trippers.go:469] Request Headers:
	I0916 10:52:34.241685 1415006 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:52:34.241690 1415006 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:52:34.244563 1415006 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 10:52:34.742175 1415006 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-334765-m02
	I0916 10:52:34.742199 1415006 round_trippers.go:469] Request Headers:
	I0916 10:52:34.742209 1415006 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:52:34.742216 1415006 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:52:34.745722 1415006 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:52:34.746318 1415006 node_ready.go:53] node "ha-334765-m02" has status "Ready":"False"
	I0916 10:52:35.242600 1415006 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-334765-m02
	I0916 10:52:35.242623 1415006 round_trippers.go:469] Request Headers:
	I0916 10:52:35.242642 1415006 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:52:35.242646 1415006 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:52:35.245475 1415006 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 10:52:35.741735 1415006 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-334765-m02
	I0916 10:52:35.741761 1415006 round_trippers.go:469] Request Headers:
	I0916 10:52:35.741770 1415006 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:52:35.741774 1415006 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:52:35.744768 1415006 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 10:52:36.242263 1415006 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-334765-m02
	I0916 10:52:36.242286 1415006 round_trippers.go:469] Request Headers:
	I0916 10:52:36.242296 1415006 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:52:36.242301 1415006 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:52:36.245814 1415006 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:52:36.741563 1415006 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-334765-m02
	I0916 10:52:36.741588 1415006 round_trippers.go:469] Request Headers:
	I0916 10:52:36.741598 1415006 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:52:36.741602 1415006 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:52:36.744548 1415006 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 10:52:37.242185 1415006 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-334765-m02
	I0916 10:52:37.242212 1415006 round_trippers.go:469] Request Headers:
	I0916 10:52:37.242222 1415006 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:52:37.242227 1415006 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:52:37.245048 1415006 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 10:52:37.245765 1415006 node_ready.go:53] node "ha-334765-m02" has status "Ready":"False"
	I0916 10:52:37.742194 1415006 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-334765-m02
	I0916 10:52:37.742226 1415006 round_trippers.go:469] Request Headers:
	I0916 10:52:37.742237 1415006 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:52:37.742242 1415006 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:52:37.745736 1415006 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:52:38.242165 1415006 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-334765-m02
	I0916 10:52:38.242192 1415006 round_trippers.go:469] Request Headers:
	I0916 10:52:38.242201 1415006 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:52:38.242206 1415006 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:52:38.245009 1415006 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 10:52:38.742540 1415006 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-334765-m02
	I0916 10:52:38.742572 1415006 round_trippers.go:469] Request Headers:
	I0916 10:52:38.742581 1415006 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:52:38.742586 1415006 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:52:38.745597 1415006 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 10:52:39.242203 1415006 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-334765-m02
	I0916 10:52:39.242228 1415006 round_trippers.go:469] Request Headers:
	I0916 10:52:39.242238 1415006 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:52:39.242243 1415006 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:52:39.245253 1415006 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 10:52:39.246012 1415006 node_ready.go:53] node "ha-334765-m02" has status "Ready":"False"
	I0916 10:52:39.741671 1415006 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-334765-m02
	I0916 10:52:39.741695 1415006 round_trippers.go:469] Request Headers:
	I0916 10:52:39.741705 1415006 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:52:39.741709 1415006 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:52:39.744942 1415006 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:52:40.242598 1415006 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-334765-m02
	I0916 10:52:40.242624 1415006 round_trippers.go:469] Request Headers:
	I0916 10:52:40.242635 1415006 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:52:40.242639 1415006 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:52:40.250216 1415006 round_trippers.go:574] Response Status: 200 OK in 7 milliseconds
	I0916 10:52:40.741940 1415006 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-334765-m02
	I0916 10:52:40.741961 1415006 round_trippers.go:469] Request Headers:
	I0916 10:52:40.741972 1415006 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:52:40.741976 1415006 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:52:40.756155 1415006 round_trippers.go:574] Response Status: 200 OK in 14 milliseconds
	I0916 10:52:41.241625 1415006 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-334765-m02
	I0916 10:52:41.241647 1415006 round_trippers.go:469] Request Headers:
	I0916 10:52:41.241657 1415006 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:52:41.241662 1415006 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:52:41.245894 1415006 round_trippers.go:574] Response Status: 200 OK in 4 milliseconds
	I0916 10:52:41.247063 1415006 node_ready.go:53] node "ha-334765-m02" has status "Ready":"False"
	I0916 10:52:41.742510 1415006 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-334765-m02
	I0916 10:52:41.742545 1415006 round_trippers.go:469] Request Headers:
	I0916 10:52:41.742556 1415006 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:52:41.742560 1415006 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:52:41.745490 1415006 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 10:52:42.242360 1415006 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-334765-m02
	I0916 10:52:42.242390 1415006 round_trippers.go:469] Request Headers:
	I0916 10:52:42.242400 1415006 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:52:42.242406 1415006 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:52:42.246532 1415006 round_trippers.go:574] Response Status: 200 OK in 4 milliseconds
	I0916 10:52:42.742541 1415006 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-334765-m02
	I0916 10:52:42.742563 1415006 round_trippers.go:469] Request Headers:
	I0916 10:52:42.742572 1415006 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:52:42.742576 1415006 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:52:42.745391 1415006 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 10:52:43.241682 1415006 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-334765-m02
	I0916 10:52:43.241707 1415006 round_trippers.go:469] Request Headers:
	I0916 10:52:43.241739 1415006 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:52:43.241745 1415006 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:52:43.244745 1415006 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 10:52:43.741996 1415006 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-334765-m02
	I0916 10:52:43.742022 1415006 round_trippers.go:469] Request Headers:
	I0916 10:52:43.742032 1415006 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:52:43.742039 1415006 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:52:43.745752 1415006 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:52:43.746562 1415006 node_ready.go:53] node "ha-334765-m02" has status "Ready":"False"
	I0916 10:52:44.242297 1415006 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-334765-m02
	I0916 10:52:44.242323 1415006 round_trippers.go:469] Request Headers:
	I0916 10:52:44.242334 1415006 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:52:44.242338 1415006 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:52:44.245186 1415006 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 10:52:44.742276 1415006 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-334765-m02
	I0916 10:52:44.742305 1415006 round_trippers.go:469] Request Headers:
	I0916 10:52:44.742319 1415006 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:52:44.742327 1415006 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:52:44.747590 1415006 round_trippers.go:574] Response Status: 200 OK in 5 milliseconds
	I0916 10:52:45.241653 1415006 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-334765-m02
	I0916 10:52:45.241683 1415006 round_trippers.go:469] Request Headers:
	I0916 10:52:45.241691 1415006 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:52:45.241696 1415006 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:52:45.244961 1415006 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:52:45.742638 1415006 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-334765-m02
	I0916 10:52:45.742709 1415006 round_trippers.go:469] Request Headers:
	I0916 10:52:45.742733 1415006 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:52:45.742753 1415006 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:52:45.745915 1415006 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:52:46.242019 1415006 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-334765-m02
	I0916 10:52:46.242051 1415006 round_trippers.go:469] Request Headers:
	I0916 10:52:46.242061 1415006 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:52:46.242068 1415006 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:52:46.245110 1415006 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:52:46.246183 1415006 node_ready.go:53] node "ha-334765-m02" has status "Ready":"False"
	I0916 10:52:46.742450 1415006 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-334765-m02
	I0916 10:52:46.742475 1415006 round_trippers.go:469] Request Headers:
	I0916 10:52:46.742484 1415006 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:52:46.742487 1415006 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:52:46.745682 1415006 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:52:47.241933 1415006 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-334765-m02
	I0916 10:52:47.241954 1415006 round_trippers.go:469] Request Headers:
	I0916 10:52:47.241965 1415006 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:52:47.241969 1415006 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:52:47.245806 1415006 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:52:47.741951 1415006 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-334765-m02
	I0916 10:52:47.741979 1415006 round_trippers.go:469] Request Headers:
	I0916 10:52:47.741987 1415006 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:52:47.741992 1415006 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:52:47.745513 1415006 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:52:48.241606 1415006 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-334765-m02
	I0916 10:52:48.241627 1415006 round_trippers.go:469] Request Headers:
	I0916 10:52:48.241637 1415006 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:52:48.241641 1415006 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:52:48.244434 1415006 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 10:52:48.742220 1415006 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-334765-m02
	I0916 10:52:48.742246 1415006 round_trippers.go:469] Request Headers:
	I0916 10:52:48.742256 1415006 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:52:48.742263 1415006 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:52:48.745643 1415006 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:52:48.746444 1415006 node_ready.go:53] node "ha-334765-m02" has status "Ready":"False"
	I0916 10:52:49.242145 1415006 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-334765-m02
	I0916 10:52:49.242167 1415006 round_trippers.go:469] Request Headers:
	I0916 10:52:49.242188 1415006 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:52:49.242193 1415006 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:52:49.245054 1415006 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 10:52:49.742183 1415006 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-334765-m02
	I0916 10:52:49.742208 1415006 round_trippers.go:469] Request Headers:
	I0916 10:52:49.742218 1415006 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:52:49.742225 1415006 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:52:49.745773 1415006 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:52:50.242281 1415006 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-334765-m02
	I0916 10:52:50.242307 1415006 round_trippers.go:469] Request Headers:
	I0916 10:52:50.242316 1415006 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:52:50.242323 1415006 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:52:50.245234 1415006 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 10:52:50.742303 1415006 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-334765-m02
	I0916 10:52:50.742328 1415006 round_trippers.go:469] Request Headers:
	I0916 10:52:50.742339 1415006 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:52:50.742343 1415006 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:52:50.745257 1415006 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 10:52:51.242595 1415006 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-334765-m02
	I0916 10:52:51.242620 1415006 round_trippers.go:469] Request Headers:
	I0916 10:52:51.242630 1415006 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:52:51.242634 1415006 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:52:51.245376 1415006 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 10:52:51.246283 1415006 node_ready.go:53] node "ha-334765-m02" has status "Ready":"False"
	I0916 10:52:51.742591 1415006 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-334765-m02
	I0916 10:52:51.742616 1415006 round_trippers.go:469] Request Headers:
	I0916 10:52:51.742625 1415006 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:52:51.742630 1415006 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:52:51.745485 1415006 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 10:52:52.241886 1415006 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-334765-m02
	I0916 10:52:52.241907 1415006 round_trippers.go:469] Request Headers:
	I0916 10:52:52.241916 1415006 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:52:52.241920 1415006 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:52:52.247220 1415006 round_trippers.go:574] Response Status: 200 OK in 5 milliseconds
	I0916 10:52:52.742341 1415006 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-334765-m02
	I0916 10:52:52.742363 1415006 round_trippers.go:469] Request Headers:
	I0916 10:52:52.742372 1415006 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:52:52.742377 1415006 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:52:52.745917 1415006 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:52:53.241555 1415006 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-334765-m02
	I0916 10:52:53.241580 1415006 round_trippers.go:469] Request Headers:
	I0916 10:52:53.241591 1415006 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:52:53.241597 1415006 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:52:53.244293 1415006 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 10:52:53.741926 1415006 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-334765-m02
	I0916 10:52:53.741947 1415006 round_trippers.go:469] Request Headers:
	I0916 10:52:53.741957 1415006 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:52:53.741962 1415006 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:52:53.744959 1415006 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 10:52:53.745473 1415006 node_ready.go:53] node "ha-334765-m02" has status "Ready":"False"
	I0916 10:52:54.242061 1415006 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-334765-m02
	I0916 10:52:54.242086 1415006 round_trippers.go:469] Request Headers:
	I0916 10:52:54.242097 1415006 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:52:54.242103 1415006 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:52:54.244841 1415006 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 10:52:54.742033 1415006 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-334765-m02
	I0916 10:52:54.742063 1415006 round_trippers.go:469] Request Headers:
	I0916 10:52:54.742073 1415006 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:52:54.742077 1415006 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:52:54.745079 1415006 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 10:52:55.242356 1415006 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-334765-m02
	I0916 10:52:55.242378 1415006 round_trippers.go:469] Request Headers:
	I0916 10:52:55.242388 1415006 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:52:55.242393 1415006 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:52:55.245278 1415006 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 10:52:55.741833 1415006 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-334765-m02
	I0916 10:52:55.741860 1415006 round_trippers.go:469] Request Headers:
	I0916 10:52:55.741870 1415006 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:52:55.741876 1415006 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:52:55.744609 1415006 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 10:52:56.241780 1415006 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-334765-m02
	I0916 10:52:56.241808 1415006 round_trippers.go:469] Request Headers:
	I0916 10:52:56.241818 1415006 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:52:56.241823 1415006 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:52:56.244615 1415006 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 10:52:56.245318 1415006 node_ready.go:53] node "ha-334765-m02" has status "Ready":"False"
	I0916 10:52:56.741743 1415006 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-334765-m02
	I0916 10:52:56.741766 1415006 round_trippers.go:469] Request Headers:
	I0916 10:52:56.741776 1415006 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:52:56.741780 1415006 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:52:56.744633 1415006 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 10:52:57.241613 1415006 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-334765-m02
	I0916 10:52:57.241636 1415006 round_trippers.go:469] Request Headers:
	I0916 10:52:57.241646 1415006 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:52:57.241652 1415006 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:52:57.244216 1415006 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 10:52:57.742036 1415006 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-334765-m02
	I0916 10:52:57.742062 1415006 round_trippers.go:469] Request Headers:
	I0916 10:52:57.742072 1415006 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:52:57.742077 1415006 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:52:57.746530 1415006 round_trippers.go:574] Response Status: 200 OK in 4 milliseconds
	I0916 10:52:58.241978 1415006 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-334765-m02
	I0916 10:52:58.242006 1415006 round_trippers.go:469] Request Headers:
	I0916 10:52:58.242021 1415006 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:52:58.242027 1415006 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:52:58.244775 1415006 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 10:52:58.741646 1415006 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-334765-m02
	I0916 10:52:58.741667 1415006 round_trippers.go:469] Request Headers:
	I0916 10:52:58.741677 1415006 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:52:58.741682 1415006 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:52:58.745084 1415006 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:52:58.745902 1415006 node_ready.go:53] node "ha-334765-m02" has status "Ready":"False"
	I0916 10:52:59.242241 1415006 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-334765-m02
	I0916 10:52:59.242268 1415006 round_trippers.go:469] Request Headers:
	I0916 10:52:59.242278 1415006 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:52:59.242282 1415006 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:52:59.245123 1415006 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 10:52:59.741914 1415006 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-334765-m02
	I0916 10:52:59.741938 1415006 round_trippers.go:469] Request Headers:
	I0916 10:52:59.741947 1415006 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:52:59.741952 1415006 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:52:59.744805 1415006 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 10:53:00.241767 1415006 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-334765-m02
	I0916 10:53:00.241795 1415006 round_trippers.go:469] Request Headers:
	I0916 10:53:00.241808 1415006 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:53:00.241818 1415006 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:53:00.246348 1415006 round_trippers.go:574] Response Status: 200 OK in 4 milliseconds
	I0916 10:53:00.741743 1415006 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-334765-m02
	I0916 10:53:00.741765 1415006 round_trippers.go:469] Request Headers:
	I0916 10:53:00.741775 1415006 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:53:00.741779 1415006 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:53:00.749230 1415006 round_trippers.go:574] Response Status: 200 OK in 7 milliseconds
	I0916 10:53:00.750039 1415006 node_ready.go:53] node "ha-334765-m02" has status "Ready":"False"
	I0916 10:53:01.241929 1415006 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-334765-m02
	I0916 10:53:01.241955 1415006 round_trippers.go:469] Request Headers:
	I0916 10:53:01.241966 1415006 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:53:01.241971 1415006 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:53:01.244762 1415006 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 10:53:01.741929 1415006 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-334765-m02
	I0916 10:53:01.741956 1415006 round_trippers.go:469] Request Headers:
	I0916 10:53:01.741965 1415006 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:53:01.741968 1415006 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:53:01.745371 1415006 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:53:02.242363 1415006 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-334765-m02
	I0916 10:53:02.242389 1415006 round_trippers.go:469] Request Headers:
	I0916 10:53:02.242400 1415006 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:53:02.242404 1415006 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:53:02.245485 1415006 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:53:02.246489 1415006 node_ready.go:49] node "ha-334765-m02" has status "Ready":"True"
	I0916 10:53:02.246513 1415006 node_ready.go:38] duration metric: took 38.505119639s for node "ha-334765-m02" to be "Ready" ...
	I0916 10:53:02.246524 1415006 pod_ready.go:36] extra waiting up to 6m0s for all system-critical pods including labels [k8s-app=kube-dns component=etcd component=kube-apiserver component=kube-controller-manager k8s-app=kube-proxy component=kube-scheduler] to be "Ready" ...
	I0916 10:53:02.246667 1415006 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods
	I0916 10:53:02.246679 1415006 round_trippers.go:469] Request Headers:
	I0916 10:53:02.246688 1415006 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:53:02.246693 1415006 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:53:02.251351 1415006 round_trippers.go:574] Response Status: 200 OK in 4 milliseconds
	I0916 10:53:02.268461 1415006 pod_ready.go:79] waiting up to 6m0s for pod "coredns-7c65d6cfc9-q5xr7" in "kube-system" namespace to be "Ready" ...
	I0916 10:53:02.268572 1415006 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/coredns-7c65d6cfc9-q5xr7
	I0916 10:53:02.268585 1415006 round_trippers.go:469] Request Headers:
	I0916 10:53:02.268595 1415006 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:53:02.268602 1415006 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:53:02.272439 1415006 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:53:02.273315 1415006 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-334765
	I0916 10:53:02.273339 1415006 round_trippers.go:469] Request Headers:
	I0916 10:53:02.273349 1415006 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:53:02.273354 1415006 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:53:02.275959 1415006 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 10:53:02.276538 1415006 pod_ready.go:93] pod "coredns-7c65d6cfc9-q5xr7" in "kube-system" namespace has status "Ready":"True"
	I0916 10:53:02.276558 1415006 pod_ready.go:82] duration metric: took 8.057814ms for pod "coredns-7c65d6cfc9-q5xr7" in "kube-system" namespace to be "Ready" ...
	I0916 10:53:02.276569 1415006 pod_ready.go:79] waiting up to 6m0s for pod "coredns-7c65d6cfc9-s9fp9" in "kube-system" namespace to be "Ready" ...
	I0916 10:53:02.276638 1415006 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/coredns-7c65d6cfc9-s9fp9
	I0916 10:53:02.276649 1415006 round_trippers.go:469] Request Headers:
	I0916 10:53:02.276657 1415006 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:53:02.276661 1415006 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:53:02.279530 1415006 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 10:53:02.280211 1415006 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-334765
	I0916 10:53:02.280229 1415006 round_trippers.go:469] Request Headers:
	I0916 10:53:02.280238 1415006 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:53:02.280242 1415006 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:53:02.283048 1415006 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 10:53:02.283574 1415006 pod_ready.go:93] pod "coredns-7c65d6cfc9-s9fp9" in "kube-system" namespace has status "Ready":"True"
	I0916 10:53:02.283596 1415006 pod_ready.go:82] duration metric: took 7.016289ms for pod "coredns-7c65d6cfc9-s9fp9" in "kube-system" namespace to be "Ready" ...
	I0916 10:53:02.283607 1415006 pod_ready.go:79] waiting up to 6m0s for pod "etcd-ha-334765" in "kube-system" namespace to be "Ready" ...
	I0916 10:53:02.283675 1415006 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/etcd-ha-334765
	I0916 10:53:02.283687 1415006 round_trippers.go:469] Request Headers:
	I0916 10:53:02.283695 1415006 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:53:02.283701 1415006 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:53:02.286265 1415006 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 10:53:02.286865 1415006 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-334765
	I0916 10:53:02.286896 1415006 round_trippers.go:469] Request Headers:
	I0916 10:53:02.286906 1415006 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:53:02.286911 1415006 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:53:02.289188 1415006 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 10:53:02.290118 1415006 pod_ready.go:93] pod "etcd-ha-334765" in "kube-system" namespace has status "Ready":"True"
	I0916 10:53:02.290142 1415006 pod_ready.go:82] duration metric: took 6.523247ms for pod "etcd-ha-334765" in "kube-system" namespace to be "Ready" ...
	I0916 10:53:02.290153 1415006 pod_ready.go:79] waiting up to 6m0s for pod "etcd-ha-334765-m02" in "kube-system" namespace to be "Ready" ...
	I0916 10:53:02.290253 1415006 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/etcd-ha-334765-m02
	I0916 10:53:02.290262 1415006 round_trippers.go:469] Request Headers:
	I0916 10:53:02.290270 1415006 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:53:02.290274 1415006 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:53:02.292803 1415006 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 10:53:02.293555 1415006 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-334765-m02
	I0916 10:53:02.293576 1415006 round_trippers.go:469] Request Headers:
	I0916 10:53:02.293585 1415006 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:53:02.293588 1415006 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:53:02.296047 1415006 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 10:53:02.296705 1415006 pod_ready.go:93] pod "etcd-ha-334765-m02" in "kube-system" namespace has status "Ready":"True"
	I0916 10:53:02.296754 1415006 pod_ready.go:82] duration metric: took 6.57187ms for pod "etcd-ha-334765-m02" in "kube-system" namespace to be "Ready" ...
	I0916 10:53:02.296779 1415006 pod_ready.go:79] waiting up to 6m0s for pod "kube-apiserver-ha-334765" in "kube-system" namespace to be "Ready" ...
	I0916 10:53:02.443144 1415006 request.go:632] Waited for 146.287429ms due to client-side throttling, not priority and fairness, request: GET:https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/kube-apiserver-ha-334765
	I0916 10:53:02.443230 1415006 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/kube-apiserver-ha-334765
	I0916 10:53:02.443240 1415006 round_trippers.go:469] Request Headers:
	I0916 10:53:02.443249 1415006 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:53:02.443262 1415006 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:53:02.446555 1415006 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:53:02.642805 1415006 request.go:632] Waited for 195.379106ms due to client-side throttling, not priority and fairness, request: GET:https://192.168.49.2:8443/api/v1/nodes/ha-334765
	I0916 10:53:02.642916 1415006 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-334765
	I0916 10:53:02.642930 1415006 round_trippers.go:469] Request Headers:
	I0916 10:53:02.642952 1415006 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:53:02.642970 1415006 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:53:02.646665 1415006 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:53:02.647425 1415006 pod_ready.go:93] pod "kube-apiserver-ha-334765" in "kube-system" namespace has status "Ready":"True"
	I0916 10:53:02.647451 1415006 pod_ready.go:82] duration metric: took 350.660949ms for pod "kube-apiserver-ha-334765" in "kube-system" namespace to be "Ready" ...
	I0916 10:53:02.647465 1415006 pod_ready.go:79] waiting up to 6m0s for pod "kube-apiserver-ha-334765-m02" in "kube-system" namespace to be "Ready" ...
	I0916 10:53:02.842366 1415006 request.go:632] Waited for 194.827087ms due to client-side throttling, not priority and fairness, request: GET:https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/kube-apiserver-ha-334765-m02
	I0916 10:53:02.842458 1415006 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/kube-apiserver-ha-334765-m02
	I0916 10:53:02.842468 1415006 round_trippers.go:469] Request Headers:
	I0916 10:53:02.842477 1415006 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:53:02.842482 1415006 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:53:02.845485 1415006 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 10:53:03.042509 1415006 request.go:632] Waited for 196.214326ms due to client-side throttling, not priority and fairness, request: GET:https://192.168.49.2:8443/api/v1/nodes/ha-334765-m02
	I0916 10:53:03.042622 1415006 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-334765-m02
	I0916 10:53:03.042634 1415006 round_trippers.go:469] Request Headers:
	I0916 10:53:03.042644 1415006 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:53:03.042652 1415006 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:53:03.045759 1415006 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:53:03.046402 1415006 pod_ready.go:93] pod "kube-apiserver-ha-334765-m02" in "kube-system" namespace has status "Ready":"True"
	I0916 10:53:03.046486 1415006 pod_ready.go:82] duration metric: took 399.011706ms for pod "kube-apiserver-ha-334765-m02" in "kube-system" namespace to be "Ready" ...
	I0916 10:53:03.046515 1415006 pod_ready.go:79] waiting up to 6m0s for pod "kube-controller-manager-ha-334765" in "kube-system" namespace to be "Ready" ...
	I0916 10:53:03.242890 1415006 request.go:632] Waited for 196.300429ms due to client-side throttling, not priority and fairness, request: GET:https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/kube-controller-manager-ha-334765
	I0916 10:53:03.242965 1415006 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/kube-controller-manager-ha-334765
	I0916 10:53:03.242973 1415006 round_trippers.go:469] Request Headers:
	I0916 10:53:03.242981 1415006 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:53:03.242985 1415006 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:53:03.246378 1415006 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:53:03.442460 1415006 request.go:632] Waited for 195.246531ms due to client-side throttling, not priority and fairness, request: GET:https://192.168.49.2:8443/api/v1/nodes/ha-334765
	I0916 10:53:03.442537 1415006 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-334765
	I0916 10:53:03.442548 1415006 round_trippers.go:469] Request Headers:
	I0916 10:53:03.442559 1415006 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:53:03.442574 1415006 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:53:03.445445 1415006 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 10:53:03.446203 1415006 pod_ready.go:93] pod "kube-controller-manager-ha-334765" in "kube-system" namespace has status "Ready":"True"
	I0916 10:53:03.446231 1415006 pod_ready.go:82] duration metric: took 399.70594ms for pod "kube-controller-manager-ha-334765" in "kube-system" namespace to be "Ready" ...
	I0916 10:53:03.446244 1415006 pod_ready.go:79] waiting up to 6m0s for pod "kube-controller-manager-ha-334765-m02" in "kube-system" namespace to be "Ready" ...
	I0916 10:53:03.642600 1415006 request.go:632] Waited for 196.265943ms due to client-side throttling, not priority and fairness, request: GET:https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/kube-controller-manager-ha-334765-m02
	I0916 10:53:03.642664 1415006 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/kube-controller-manager-ha-334765-m02
	I0916 10:53:03.642670 1415006 round_trippers.go:469] Request Headers:
	I0916 10:53:03.642679 1415006 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:53:03.642687 1415006 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:53:03.645564 1415006 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 10:53:03.843031 1415006 request.go:632] Waited for 196.277414ms due to client-side throttling, not priority and fairness, request: GET:https://192.168.49.2:8443/api/v1/nodes/ha-334765-m02
	I0916 10:53:03.843103 1415006 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-334765-m02
	I0916 10:53:03.843109 1415006 round_trippers.go:469] Request Headers:
	I0916 10:53:03.843118 1415006 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:53:03.843122 1415006 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:53:03.846031 1415006 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 10:53:03.846685 1415006 pod_ready.go:93] pod "kube-controller-manager-ha-334765-m02" in "kube-system" namespace has status "Ready":"True"
	I0916 10:53:03.846705 1415006 pod_ready.go:82] duration metric: took 400.453524ms for pod "kube-controller-manager-ha-334765-m02" in "kube-system" namespace to be "Ready" ...
	I0916 10:53:03.846717 1415006 pod_ready.go:79] waiting up to 6m0s for pod "kube-proxy-l998t" in "kube-system" namespace to be "Ready" ...
	I0916 10:53:04.043206 1415006 request.go:632] Waited for 196.413574ms due to client-side throttling, not priority and fairness, request: GET:https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/kube-proxy-l998t
	I0916 10:53:04.043312 1415006 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/kube-proxy-l998t
	I0916 10:53:04.043345 1415006 round_trippers.go:469] Request Headers:
	I0916 10:53:04.043361 1415006 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:53:04.043366 1415006 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:53:04.046273 1415006 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 10:53:04.242448 1415006 request.go:632] Waited for 195.253449ms due to client-side throttling, not priority and fairness, request: GET:https://192.168.49.2:8443/api/v1/nodes/ha-334765-m02
	I0916 10:53:04.242555 1415006 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-334765-m02
	I0916 10:53:04.242568 1415006 round_trippers.go:469] Request Headers:
	I0916 10:53:04.242589 1415006 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:53:04.242602 1415006 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:53:04.245443 1415006 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 10:53:04.246596 1415006 pod_ready.go:93] pod "kube-proxy-l998t" in "kube-system" namespace has status "Ready":"True"
	I0916 10:53:04.246635 1415006 pod_ready.go:82] duration metric: took 399.906535ms for pod "kube-proxy-l998t" in "kube-system" namespace to be "Ready" ...
	I0916 10:53:04.246683 1415006 pod_ready.go:79] waiting up to 6m0s for pod "kube-proxy-tlfs7" in "kube-system" namespace to be "Ready" ...
	I0916 10:53:04.443057 1415006 request.go:632] Waited for 196.287465ms due to client-side throttling, not priority and fairness, request: GET:https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/kube-proxy-tlfs7
	I0916 10:53:04.443134 1415006 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/kube-proxy-tlfs7
	I0916 10:53:04.443146 1415006 round_trippers.go:469] Request Headers:
	I0916 10:53:04.443153 1415006 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:53:04.443157 1415006 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:53:04.446204 1415006 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:53:04.643359 1415006 request.go:632] Waited for 196.339574ms due to client-side throttling, not priority and fairness, request: GET:https://192.168.49.2:8443/api/v1/nodes/ha-334765
	I0916 10:53:04.643433 1415006 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-334765
	I0916 10:53:04.643443 1415006 round_trippers.go:469] Request Headers:
	I0916 10:53:04.643452 1415006 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:53:04.643464 1415006 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:53:04.646461 1415006 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 10:53:04.647237 1415006 pod_ready.go:93] pod "kube-proxy-tlfs7" in "kube-system" namespace has status "Ready":"True"
	I0916 10:53:04.647261 1415006 pod_ready.go:82] duration metric: took 400.564553ms for pod "kube-proxy-tlfs7" in "kube-system" namespace to be "Ready" ...
	I0916 10:53:04.647274 1415006 pod_ready.go:79] waiting up to 6m0s for pod "kube-scheduler-ha-334765" in "kube-system" namespace to be "Ready" ...
	I0916 10:53:04.843076 1415006 request.go:632] Waited for 195.706318ms due to client-side throttling, not priority and fairness, request: GET:https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/kube-scheduler-ha-334765
	I0916 10:53:04.843175 1415006 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/kube-scheduler-ha-334765
	I0916 10:53:04.843185 1415006 round_trippers.go:469] Request Headers:
	I0916 10:53:04.843195 1415006 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:53:04.843212 1415006 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:53:04.846214 1415006 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 10:53:05.043290 1415006 request.go:632] Waited for 196.31747ms due to client-side throttling, not priority and fairness, request: GET:https://192.168.49.2:8443/api/v1/nodes/ha-334765
	I0916 10:53:05.043357 1415006 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-334765
	I0916 10:53:05.043364 1415006 round_trippers.go:469] Request Headers:
	I0916 10:53:05.043373 1415006 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:53:05.043383 1415006 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:53:05.046606 1415006 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:53:05.047589 1415006 pod_ready.go:93] pod "kube-scheduler-ha-334765" in "kube-system" namespace has status "Ready":"True"
	I0916 10:53:05.047661 1415006 pod_ready.go:82] duration metric: took 400.344866ms for pod "kube-scheduler-ha-334765" in "kube-system" namespace to be "Ready" ...
	I0916 10:53:05.047689 1415006 pod_ready.go:79] waiting up to 6m0s for pod "kube-scheduler-ha-334765-m02" in "kube-system" namespace to be "Ready" ...
	I0916 10:53:05.242459 1415006 request.go:632] Waited for 194.67199ms due to client-side throttling, not priority and fairness, request: GET:https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/kube-scheduler-ha-334765-m02
	I0916 10:53:05.242544 1415006 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/kube-scheduler-ha-334765-m02
	I0916 10:53:05.242555 1415006 round_trippers.go:469] Request Headers:
	I0916 10:53:05.242571 1415006 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:53:05.242577 1415006 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:53:05.245337 1415006 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 10:53:05.442971 1415006 request.go:632] Waited for 196.967989ms due to client-side throttling, not priority and fairness, request: GET:https://192.168.49.2:8443/api/v1/nodes/ha-334765-m02
	I0916 10:53:05.443047 1415006 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-334765-m02
	I0916 10:53:05.443053 1415006 round_trippers.go:469] Request Headers:
	I0916 10:53:05.443063 1415006 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:53:05.443067 1415006 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:53:05.445902 1415006 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 10:53:05.446757 1415006 pod_ready.go:93] pod "kube-scheduler-ha-334765-m02" in "kube-system" namespace has status "Ready":"True"
	I0916 10:53:05.446777 1415006 pod_ready.go:82] duration metric: took 399.067697ms for pod "kube-scheduler-ha-334765-m02" in "kube-system" namespace to be "Ready" ...
	I0916 10:53:05.446790 1415006 pod_ready.go:39] duration metric: took 3.200220972s for extra waiting for all system-critical and pods with labels [k8s-app=kube-dns component=etcd component=kube-apiserver component=kube-controller-manager k8s-app=kube-proxy component=kube-scheduler] to be "Ready" ...
	I0916 10:53:05.446806 1415006 api_server.go:52] waiting for apiserver process to appear ...
	I0916 10:53:05.446880 1415006 ssh_runner.go:195] Run: sudo pgrep -xnf kube-apiserver.*minikube.*
	I0916 10:53:05.458901 1415006 api_server.go:72] duration metric: took 41.943544799s to wait for apiserver process to appear ...
	I0916 10:53:05.458967 1415006 api_server.go:88] waiting for apiserver healthz status ...
	I0916 10:53:05.459005 1415006 api_server.go:253] Checking apiserver healthz at https://192.168.49.2:8443/healthz ...
	I0916 10:53:05.467138 1415006 api_server.go:279] https://192.168.49.2:8443/healthz returned 200:
	ok
	I0916 10:53:05.467221 1415006 round_trippers.go:463] GET https://192.168.49.2:8443/version
	I0916 10:53:05.467232 1415006 round_trippers.go:469] Request Headers:
	I0916 10:53:05.467242 1415006 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:53:05.467248 1415006 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:53:05.468101 1415006 round_trippers.go:574] Response Status: 200 OK in 0 milliseconds
	I0916 10:53:05.468214 1415006 api_server.go:141] control plane version: v1.31.1
	I0916 10:53:05.468228 1415006 api_server.go:131] duration metric: took 9.240118ms to wait for apiserver health ...
	I0916 10:53:05.468236 1415006 system_pods.go:43] waiting for kube-system pods to appear ...
	I0916 10:53:05.642462 1415006 request.go:632] Waited for 174.157713ms due to client-side throttling, not priority and fairness, request: GET:https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods
	I0916 10:53:05.642633 1415006 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods
	I0916 10:53:05.642647 1415006 round_trippers.go:469] Request Headers:
	I0916 10:53:05.642655 1415006 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:53:05.642660 1415006 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:53:05.647406 1415006 round_trippers.go:574] Response Status: 200 OK in 4 milliseconds
	I0916 10:53:05.653624 1415006 system_pods.go:59] 17 kube-system pods found
	I0916 10:53:05.653662 1415006 system_pods.go:61] "coredns-7c65d6cfc9-q5xr7" [14514e6e-34ae-4a79-b0e0-008742ae46b9] Running
	I0916 10:53:05.653668 1415006 system_pods.go:61] "coredns-7c65d6cfc9-s9fp9" [0e29200a-0909-47e1-8521-bf5f9b645d6c] Running
	I0916 10:53:05.653673 1415006 system_pods.go:61] "etcd-ha-334765" [9a0b9474-60f4-440e-a898-d397f7425086] Running
	I0916 10:53:05.653677 1415006 system_pods.go:61] "etcd-ha-334765-m02" [635fd2d2-f9cc-4e08-b73b-18633a58b6e4] Running
	I0916 10:53:05.653682 1415006 system_pods.go:61] "kindnet-7s5t5" [e1832b94-ac8f-43c0-af10-ddc6afbb229b] Running
	I0916 10:53:05.653686 1415006 system_pods.go:61] "kindnet-vj27j" [61e290b4-d19c-40f3-a50d-bfa09fddb710] Running
	I0916 10:53:05.653690 1415006 system_pods.go:61] "kube-apiserver-ha-334765" [471aea01-5646-4ce8-91e0-b0b39f8a275a] Running
	I0916 10:53:05.653694 1415006 system_pods.go:61] "kube-apiserver-ha-334765-m02" [877c49f9-6fae-4cdb-b208-940eba98383b] Running
	I0916 10:53:05.653698 1415006 system_pods.go:61] "kube-controller-manager-ha-334765" [23b2f4a4-942f-4ea7-afef-561ab69ac144] Running
	I0916 10:53:05.653703 1415006 system_pods.go:61] "kube-controller-manager-ha-334765-m02" [07411ea7-458c-475c-93ff-5db4f6c1c4b1] Running
	I0916 10:53:05.653706 1415006 system_pods.go:61] "kube-proxy-l998t" [e92c97ea-9eb8-40c4-a7f6-aeb43c89e6f4] Running
	I0916 10:53:05.653711 1415006 system_pods.go:61] "kube-proxy-tlfs7" [6a873882-8023-44b5-82d9-2f18e70f8ef1] Running
	I0916 10:53:05.653722 1415006 system_pods.go:61] "kube-scheduler-ha-334765" [6189b5cd-f342-4b6a-ae21-b6b7125e4f06] Running
	I0916 10:53:05.653725 1415006 system_pods.go:61] "kube-scheduler-ha-334765-m02" [61387062-d6b0-4e2d-b2f9-10f29b0bcef6] Running
	I0916 10:53:05.653730 1415006 system_pods.go:61] "kube-vip-ha-334765" [65843776-1f0b-4a2b-b30c-62d55f497269] Running
	I0916 10:53:05.653735 1415006 system_pods.go:61] "kube-vip-ha-334765-m02" [450bd3f6-46b4-426c-a6b2-2ad37b58b171] Running
	I0916 10:53:05.653739 1415006 system_pods.go:61] "storage-provisioner" [4db2490d-9707-4734-973b-adac5570e275] Running
	I0916 10:53:05.653751 1415006 system_pods.go:74] duration metric: took 185.50876ms to wait for pod list to return data ...
	I0916 10:53:05.653760 1415006 default_sa.go:34] waiting for default service account to be created ...
	I0916 10:53:05.843013 1415006 request.go:632] Waited for 189.171469ms due to client-side throttling, not priority and fairness, request: GET:https://192.168.49.2:8443/api/v1/namespaces/default/serviceaccounts
	I0916 10:53:05.843072 1415006 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/default/serviceaccounts
	I0916 10:53:05.843111 1415006 round_trippers.go:469] Request Headers:
	I0916 10:53:05.843124 1415006 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:53:05.843128 1415006 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:53:05.846194 1415006 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:53:05.846512 1415006 default_sa.go:45] found service account: "default"
	I0916 10:53:05.846554 1415006 default_sa.go:55] duration metric: took 192.782373ms for default service account to be created ...
	I0916 10:53:05.846579 1415006 system_pods.go:116] waiting for k8s-apps to be running ...
	I0916 10:53:06.042639 1415006 request.go:632] Waited for 195.976491ms due to client-side throttling, not priority and fairness, request: GET:https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods
	I0916 10:53:06.042754 1415006 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods
	I0916 10:53:06.042771 1415006 round_trippers.go:469] Request Headers:
	I0916 10:53:06.042810 1415006 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:53:06.042816 1415006 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:53:06.047399 1415006 round_trippers.go:574] Response Status: 200 OK in 4 milliseconds
	I0916 10:53:06.053539 1415006 system_pods.go:86] 17 kube-system pods found
	I0916 10:53:06.053625 1415006 system_pods.go:89] "coredns-7c65d6cfc9-q5xr7" [14514e6e-34ae-4a79-b0e0-008742ae46b9] Running
	I0916 10:53:06.053663 1415006 system_pods.go:89] "coredns-7c65d6cfc9-s9fp9" [0e29200a-0909-47e1-8521-bf5f9b645d6c] Running
	I0916 10:53:06.053691 1415006 system_pods.go:89] "etcd-ha-334765" [9a0b9474-60f4-440e-a898-d397f7425086] Running
	I0916 10:53:06.053716 1415006 system_pods.go:89] "etcd-ha-334765-m02" [635fd2d2-f9cc-4e08-b73b-18633a58b6e4] Running
	I0916 10:53:06.053750 1415006 system_pods.go:89] "kindnet-7s5t5" [e1832b94-ac8f-43c0-af10-ddc6afbb229b] Running
	I0916 10:53:06.053773 1415006 system_pods.go:89] "kindnet-vj27j" [61e290b4-d19c-40f3-a50d-bfa09fddb710] Running
	I0916 10:53:06.053795 1415006 system_pods.go:89] "kube-apiserver-ha-334765" [471aea01-5646-4ce8-91e0-b0b39f8a275a] Running
	I0916 10:53:06.053831 1415006 system_pods.go:89] "kube-apiserver-ha-334765-m02" [877c49f9-6fae-4cdb-b208-940eba98383b] Running
	I0916 10:53:06.053857 1415006 system_pods.go:89] "kube-controller-manager-ha-334765" [23b2f4a4-942f-4ea7-afef-561ab69ac144] Running
	I0916 10:53:06.053877 1415006 system_pods.go:89] "kube-controller-manager-ha-334765-m02" [07411ea7-458c-475c-93ff-5db4f6c1c4b1] Running
	I0916 10:53:06.053914 1415006 system_pods.go:89] "kube-proxy-l998t" [e92c97ea-9eb8-40c4-a7f6-aeb43c89e6f4] Running
	I0916 10:53:06.053941 1415006 system_pods.go:89] "kube-proxy-tlfs7" [6a873882-8023-44b5-82d9-2f18e70f8ef1] Running
	I0916 10:53:06.053961 1415006 system_pods.go:89] "kube-scheduler-ha-334765" [6189b5cd-f342-4b6a-ae21-b6b7125e4f06] Running
	I0916 10:53:06.053998 1415006 system_pods.go:89] "kube-scheduler-ha-334765-m02" [61387062-d6b0-4e2d-b2f9-10f29b0bcef6] Running
	I0916 10:53:06.054022 1415006 system_pods.go:89] "kube-vip-ha-334765" [65843776-1f0b-4a2b-b30c-62d55f497269] Running
	I0916 10:53:06.054041 1415006 system_pods.go:89] "kube-vip-ha-334765-m02" [450bd3f6-46b4-426c-a6b2-2ad37b58b171] Running
	I0916 10:53:06.054075 1415006 system_pods.go:89] "storage-provisioner" [4db2490d-9707-4734-973b-adac5570e275] Running
	I0916 10:53:06.054100 1415006 system_pods.go:126] duration metric: took 207.502425ms to wait for k8s-apps to be running ...
	I0916 10:53:06.054122 1415006 system_svc.go:44] waiting for kubelet service to be running ....
	I0916 10:53:06.054223 1415006 ssh_runner.go:195] Run: sudo systemctl is-active --quiet service kubelet
	I0916 10:53:06.069303 1415006 system_svc.go:56] duration metric: took 15.171233ms WaitForService to wait for kubelet
	I0916 10:53:06.069331 1415006 kubeadm.go:582] duration metric: took 42.55399515s to wait for: map[apiserver:true apps_running:true default_sa:true extra:true kubelet:true node_ready:true system_pods:true]
	I0916 10:53:06.069351 1415006 node_conditions.go:102] verifying NodePressure condition ...
	I0916 10:53:06.242778 1415006 request.go:632] Waited for 173.332043ms due to client-side throttling, not priority and fairness, request: GET:https://192.168.49.2:8443/api/v1/nodes
	I0916 10:53:06.242857 1415006 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes
	I0916 10:53:06.242867 1415006 round_trippers.go:469] Request Headers:
	I0916 10:53:06.242876 1415006 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:53:06.242882 1415006 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:53:06.246168 1415006 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:53:06.247202 1415006 node_conditions.go:122] node storage ephemeral capacity is 203034800Ki
	I0916 10:53:06.247235 1415006 node_conditions.go:123] node cpu capacity is 2
	I0916 10:53:06.247246 1415006 node_conditions.go:122] node storage ephemeral capacity is 203034800Ki
	I0916 10:53:06.247251 1415006 node_conditions.go:123] node cpu capacity is 2
	I0916 10:53:06.247256 1415006 node_conditions.go:105] duration metric: took 177.90001ms to run NodePressure ...
	I0916 10:53:06.247268 1415006 start.go:241] waiting for startup goroutines ...
	I0916 10:53:06.247300 1415006 start.go:255] writing updated cluster config ...
	I0916 10:53:06.249092 1415006 out.go:201] 
	I0916 10:53:06.250653 1415006 config.go:182] Loaded profile config "ha-334765": Driver=docker, ContainerRuntime=crio, KubernetesVersion=v1.31.1
	I0916 10:53:06.250796 1415006 profile.go:143] Saving config to /home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/ha-334765/config.json ...
	I0916 10:53:06.252540 1415006 out.go:177] * Starting "ha-334765-m03" control-plane node in "ha-334765" cluster
	I0916 10:53:06.254018 1415006 cache.go:121] Beginning downloading kic base image for docker with crio
	I0916 10:53:06.255307 1415006 out.go:177] * Pulling base image v0.0.45-1726358845-19644 ...
	I0916 10:53:06.256474 1415006 preload.go:131] Checking if preload exists for k8s version v1.31.1 and runtime crio
	I0916 10:53:06.256499 1415006 cache.go:56] Caching tarball of preloaded images
	I0916 10:53:06.256560 1415006 image.go:79] Checking for gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 in local docker daemon
	I0916 10:53:06.256644 1415006 preload.go:172] Found /home/jenkins/minikube-integration/19651-1378450/.minikube/cache/preloaded-tarball/preloaded-images-k8s-v18-v1.31.1-cri-o-overlay-arm64.tar.lz4 in cache, skipping download
	I0916 10:53:06.256656 1415006 cache.go:59] Finished verifying existence of preloaded tar for v1.31.1 on crio
	I0916 10:53:06.256871 1415006 profile.go:143] Saving config to /home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/ha-334765/config.json ...
	W0916 10:53:06.275827 1415006 image.go:95] image gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 is of wrong architecture
	I0916 10:53:06.275854 1415006 cache.go:149] Downloading gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 to local cache
	I0916 10:53:06.275935 1415006 image.go:63] Checking for gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 in local cache directory
	I0916 10:53:06.275957 1415006 image.go:66] Found gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 in local cache directory, skipping pull
	I0916 10:53:06.275965 1415006 image.go:135] gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 exists in cache, skipping pull
	I0916 10:53:06.275973 1415006 cache.go:152] successfully saved gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 as a tarball
	I0916 10:53:06.275981 1415006 cache.go:162] Loading gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 from local cache
	I0916 10:53:06.278155 1415006 image.go:273] response: 
	I0916 10:53:06.474580 1415006 cache.go:164] successfully loaded and using gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 from cached tarball
	I0916 10:53:06.474619 1415006 cache.go:194] Successfully downloaded all kic artifacts
	I0916 10:53:06.474653 1415006 start.go:360] acquireMachinesLock for ha-334765-m03: {Name:mkfee903f3f5d2ff3d5e015b57c571ebdaa535f2 Clock:{} Delay:500ms Timeout:10m0s Cancel:<nil>}
	I0916 10:53:06.474774 1415006 start.go:364] duration metric: took 99.583µs to acquireMachinesLock for "ha-334765-m03"
	I0916 10:53:06.474806 1415006 start.go:93] Provisioning new machine with config: &{Name:ha-334765 KeepContext:false EmbedCerts:false MinikubeISO: KicBaseImage:gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 Memory:2200 CPUs:2 DiskSize:20000 Driver:docker HyperkitVpnKitSock: HyperkitVSockPorts:[] DockerEnv:[] ContainerVolumeMounts:[] InsecureRegistry:[] RegistryMirror:[] HostOnlyCIDR:192.168.59.1/24 HypervVirtualSwitch: HypervUseExternalSwitch:false HypervExternalAdapter: KVMNetwork:default KVMQemuURI:qemu:///system KVMGPU:false KVMHidden:false KVMNUMACount:1 APIServerPort:8443 DockerOpt:[] DisableDriverMounts:false NFSShare:[] NFSSharesRoot:/nfsshares UUID: NoVTXCheck:false DNSProxy:false HostDNSResolver:true HostOnlyNicType:virtio NatNicType:virtio SSHIPAddress: SSHUser:root SSHKey: SSHPort:22 KubernetesConfig:{KubernetesVersion:v1.31.1 ClusterName:ha-334765 Namespace:default APIServerHAVIP:192.168.49.254 APIServerNa
me:minikubeCA APIServerNames:[] APIServerIPs:[] DNSDomain:cluster.local ContainerRuntime:crio CRISocket: NetworkPlugin:cni FeatureGates: ServiceCIDR:10.96.0.0/12 ImageRepository: LoadBalancerStartIP: LoadBalancerEndIP: CustomIngressCert: RegistryAliases: ExtraOptions:[] ShouldLoadCachedImages:true EnableDefaultCNI:false CNI:} Nodes:[{Name: IP:192.168.49.2 Port:8443 KubernetesVersion:v1.31.1 ContainerRuntime:crio ControlPlane:true Worker:true} {Name:m02 IP:192.168.49.3 Port:8443 KubernetesVersion:v1.31.1 ContainerRuntime:crio ControlPlane:true Worker:true} {Name:m03 IP: Port:8443 KubernetesVersion:v1.31.1 ContainerRuntime:crio ControlPlane:true Worker:true}] Addons:map[ambassador:false auto-pause:false cloud-spanner:false csi-hostpath-driver:false dashboard:false default-storageclass:false efk:false freshpod:false gcp-auth:false gvisor:false headlamp:false helm-tiller:false inaccel:false ingress:false ingress-dns:false inspektor-gadget:false istio:false istio-provisioner:false kong:false kubeflow:false kubevir
t:false logviewer:false metallb:false metrics-server:false nvidia-device-plugin:false nvidia-driver-installer:false nvidia-gpu-device-plugin:false olm:false pod-security-policy:false portainer:false registry:false registry-aliases:false registry-creds:false storage-provisioner:false storage-provisioner-gluster:false storage-provisioner-rancher:false volcano:false volumesnapshots:false yakd:false] CustomAddonImages:map[] CustomAddonRegistries:map[] VerifyComponents:map[apiserver:true apps_running:true default_sa:true extra:true kubelet:true node_ready:true system_pods:true] StartHostTimeout:6m0s ScheduledStop:<nil> ExposedPorts:[] ListenAddress: Network: Subnet: MultiNodeRequested:true ExtraDisks:0 CertExpiration:26280h0m0s Mount:false MountString:/home/jenkins:/minikube-host Mount9PVersion:9p2000.L MountGID:docker MountIP: MountMSize:262144 MountOptions:[] MountPort:0 MountType:9p MountUID:docker BinaryMirror: DisableOptimizations:false DisableMetrics:false CustomQemuFirmwarePath: SocketVMnetClientPath: Socke
tVMnetPath: StaticIP: SSHAuthSock: SSHAgentPID:0 GPUs: AutoPauseInterval:1m0s} &{Name:m03 IP: Port:8443 KubernetesVersion:v1.31.1 ContainerRuntime:crio ControlPlane:true Worker:true}
	I0916 10:53:06.474936 1415006 start.go:125] createHost starting for "m03" (driver="docker")
	I0916 10:53:06.476575 1415006 out.go:235] * Creating docker container (CPUs=2, Memory=2200MB) ...
	I0916 10:53:06.476719 1415006 start.go:159] libmachine.API.Create for "ha-334765" (driver="docker")
	I0916 10:53:06.476783 1415006 client.go:168] LocalClient.Create starting
	I0916 10:53:06.476872 1415006 main.go:141] libmachine: Reading certificate data from /home/jenkins/minikube-integration/19651-1378450/.minikube/certs/ca.pem
	I0916 10:53:06.476909 1415006 main.go:141] libmachine: Decoding PEM data...
	I0916 10:53:06.476931 1415006 main.go:141] libmachine: Parsing certificate...
	I0916 10:53:06.476987 1415006 main.go:141] libmachine: Reading certificate data from /home/jenkins/minikube-integration/19651-1378450/.minikube/certs/cert.pem
	I0916 10:53:06.477009 1415006 main.go:141] libmachine: Decoding PEM data...
	I0916 10:53:06.477024 1415006 main.go:141] libmachine: Parsing certificate...
	I0916 10:53:06.477270 1415006 cli_runner.go:164] Run: docker network inspect ha-334765 --format "{"Name": "{{.Name}}","Driver": "{{.Driver}}","Subnet": "{{range .IPAM.Config}}{{.Subnet}}{{end}}","Gateway": "{{range .IPAM.Config}}{{.Gateway}}{{end}}","MTU": {{if (index .Options "com.docker.network.driver.mtu")}}{{(index .Options "com.docker.network.driver.mtu")}}{{else}}0{{end}}, "ContainerIPs": [{{range $k,$v := .Containers }}"{{$v.IPv4Address}}",{{end}}]}"
	I0916 10:53:06.493092 1415006 network_create.go:77] Found existing network {name:ha-334765 subnet:0x400180ced0 gateway:[0 0 0 0 0 0 0 0 0 0 255 255 192 168 49 1] mtu:1500}
	I0916 10:53:06.493133 1415006 kic.go:121] calculated static IP "192.168.49.4" for the "ha-334765-m03" container
	I0916 10:53:06.493213 1415006 cli_runner.go:164] Run: docker ps -a --format {{.Names}}
	I0916 10:53:06.507809 1415006 cli_runner.go:164] Run: docker volume create ha-334765-m03 --label name.minikube.sigs.k8s.io=ha-334765-m03 --label created_by.minikube.sigs.k8s.io=true
	I0916 10:53:06.524398 1415006 oci.go:103] Successfully created a docker volume ha-334765-m03
	I0916 10:53:06.524488 1415006 cli_runner.go:164] Run: docker run --rm --name ha-334765-m03-preload-sidecar --label created_by.minikube.sigs.k8s.io=true --label name.minikube.sigs.k8s.io=ha-334765-m03 --entrypoint /usr/bin/test -v ha-334765-m03:/var gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 -d /var/lib
	I0916 10:53:07.123195 1415006 oci.go:107] Successfully prepared a docker volume ha-334765-m03
	I0916 10:53:07.123241 1415006 preload.go:131] Checking if preload exists for k8s version v1.31.1 and runtime crio
	I0916 10:53:07.123262 1415006 kic.go:194] Starting extracting preloaded images to volume ...
	I0916 10:53:07.123339 1415006 cli_runner.go:164] Run: docker run --rm --entrypoint /usr/bin/tar -v /home/jenkins/minikube-integration/19651-1378450/.minikube/cache/preloaded-tarball/preloaded-images-k8s-v18-v1.31.1-cri-o-overlay-arm64.tar.lz4:/preloaded.tar:ro -v ha-334765-m03:/extractDir gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 -I lz4 -xf /preloaded.tar -C /extractDir
	I0916 10:53:11.291684 1415006 cli_runner.go:217] Completed: docker run --rm --entrypoint /usr/bin/tar -v /home/jenkins/minikube-integration/19651-1378450/.minikube/cache/preloaded-tarball/preloaded-images-k8s-v18-v1.31.1-cri-o-overlay-arm64.tar.lz4:/preloaded.tar:ro -v ha-334765-m03:/extractDir gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 -I lz4 -xf /preloaded.tar -C /extractDir: (4.168299443s)
	I0916 10:53:11.291718 1415006 kic.go:203] duration metric: took 4.168452038s to extract preloaded images to volume ...
	W0916 10:53:11.291863 1415006 cgroups_linux.go:77] Your kernel does not support swap limit capabilities or the cgroup is not mounted.
	I0916 10:53:11.291988 1415006 cli_runner.go:164] Run: docker info --format "'{{json .SecurityOptions}}'"
	I0916 10:53:11.362214 1415006 cli_runner.go:164] Run: docker run -d -t --privileged --security-opt seccomp=unconfined --tmpfs /tmp --tmpfs /run -v /lib/modules:/lib/modules:ro --hostname ha-334765-m03 --name ha-334765-m03 --label created_by.minikube.sigs.k8s.io=true --label name.minikube.sigs.k8s.io=ha-334765-m03 --label role.minikube.sigs.k8s.io= --label mode.minikube.sigs.k8s.io=ha-334765-m03 --network ha-334765 --ip 192.168.49.4 --volume ha-334765-m03:/var --security-opt apparmor=unconfined --memory=2200mb --cpus=2 -e container=docker --expose 8443 --publish=127.0.0.1::8443 --publish=127.0.0.1::22 --publish=127.0.0.1::2376 --publish=127.0.0.1::5000 --publish=127.0.0.1::32443 gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0
	I0916 10:53:11.713441 1415006 cli_runner.go:164] Run: docker container inspect ha-334765-m03 --format={{.State.Running}}
	I0916 10:53:11.735379 1415006 cli_runner.go:164] Run: docker container inspect ha-334765-m03 --format={{.State.Status}}
	I0916 10:53:11.764201 1415006 cli_runner.go:164] Run: docker exec ha-334765-m03 stat /var/lib/dpkg/alternatives/iptables
	I0916 10:53:11.835446 1415006 oci.go:144] the created container "ha-334765-m03" has a running status.
	I0916 10:53:11.835475 1415006 kic.go:225] Creating ssh key for kic: /home/jenkins/minikube-integration/19651-1378450/.minikube/machines/ha-334765-m03/id_rsa...
	I0916 10:53:12.082378 1415006 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-1378450/.minikube/machines/ha-334765-m03/id_rsa.pub -> /home/docker/.ssh/authorized_keys
	I0916 10:53:12.082457 1415006 kic_runner.go:191] docker (temp): /home/jenkins/minikube-integration/19651-1378450/.minikube/machines/ha-334765-m03/id_rsa.pub --> /home/docker/.ssh/authorized_keys (381 bytes)
	I0916 10:53:12.107481 1415006 cli_runner.go:164] Run: docker container inspect ha-334765-m03 --format={{.State.Status}}
	I0916 10:53:12.143696 1415006 kic_runner.go:93] Run: chown docker:docker /home/docker/.ssh/authorized_keys
	I0916 10:53:12.143723 1415006 kic_runner.go:114] Args: [docker exec --privileged ha-334765-m03 chown docker:docker /home/docker/.ssh/authorized_keys]
	I0916 10:53:12.261270 1415006 cli_runner.go:164] Run: docker container inspect ha-334765-m03 --format={{.State.Status}}
	I0916 10:53:12.291299 1415006 machine.go:93] provisionDockerMachine start ...
	I0916 10:53:12.291406 1415006 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" ha-334765-m03
	I0916 10:53:12.331691 1415006 main.go:141] libmachine: Using SSH client type: native
	I0916 10:53:12.331980 1415006 main.go:141] libmachine: &{{{<nil> 0 [] [] []} docker [0x41abe0] 0x41d420 <nil>  [] 0s} 127.0.0.1 34628 <nil> <nil>}
	I0916 10:53:12.331996 1415006 main.go:141] libmachine: About to run SSH command:
	hostname
	I0916 10:53:12.332609 1415006 main.go:141] libmachine: Error dialing TCP: ssh: handshake failed: read tcp 127.0.0.1:34576->127.0.0.1:34628: read: connection reset by peer
	I0916 10:53:15.472981 1415006 main.go:141] libmachine: SSH cmd err, output: <nil>: ha-334765-m03
	
	I0916 10:53:15.473007 1415006 ubuntu.go:169] provisioning hostname "ha-334765-m03"
	I0916 10:53:15.473076 1415006 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" ha-334765-m03
	I0916 10:53:15.494704 1415006 main.go:141] libmachine: Using SSH client type: native
	I0916 10:53:15.495028 1415006 main.go:141] libmachine: &{{{<nil> 0 [] [] []} docker [0x41abe0] 0x41d420 <nil>  [] 0s} 127.0.0.1 34628 <nil> <nil>}
	I0916 10:53:15.495067 1415006 main.go:141] libmachine: About to run SSH command:
	sudo hostname ha-334765-m03 && echo "ha-334765-m03" | sudo tee /etc/hostname
	I0916 10:53:15.650225 1415006 main.go:141] libmachine: SSH cmd err, output: <nil>: ha-334765-m03
	
	I0916 10:53:15.650316 1415006 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" ha-334765-m03
	I0916 10:53:15.672331 1415006 main.go:141] libmachine: Using SSH client type: native
	I0916 10:53:15.672616 1415006 main.go:141] libmachine: &{{{<nil> 0 [] [] []} docker [0x41abe0] 0x41d420 <nil>  [] 0s} 127.0.0.1 34628 <nil> <nil>}
	I0916 10:53:15.672637 1415006 main.go:141] libmachine: About to run SSH command:
	
			if ! grep -xq '.*\sha-334765-m03' /etc/hosts; then
				if grep -xq '127.0.1.1\s.*' /etc/hosts; then
					sudo sed -i 's/^127.0.1.1\s.*/127.0.1.1 ha-334765-m03/g' /etc/hosts;
				else 
					echo '127.0.1.1 ha-334765-m03' | sudo tee -a /etc/hosts; 
				fi
			fi
	I0916 10:53:15.814223 1415006 main.go:141] libmachine: SSH cmd err, output: <nil>: 
	I0916 10:53:15.814255 1415006 ubuntu.go:175] set auth options {CertDir:/home/jenkins/minikube-integration/19651-1378450/.minikube CaCertPath:/home/jenkins/minikube-integration/19651-1378450/.minikube/certs/ca.pem CaPrivateKeyPath:/home/jenkins/minikube-integration/19651-1378450/.minikube/certs/ca-key.pem CaCertRemotePath:/etc/docker/ca.pem ServerCertPath:/home/jenkins/minikube-integration/19651-1378450/.minikube/machines/server.pem ServerKeyPath:/home/jenkins/minikube-integration/19651-1378450/.minikube/machines/server-key.pem ClientKeyPath:/home/jenkins/minikube-integration/19651-1378450/.minikube/certs/key.pem ServerCertRemotePath:/etc/docker/server.pem ServerKeyRemotePath:/etc/docker/server-key.pem ClientCertPath:/home/jenkins/minikube-integration/19651-1378450/.minikube/certs/cert.pem ServerCertSANs:[] StorePath:/home/jenkins/minikube-integration/19651-1378450/.minikube}
	I0916 10:53:15.814272 1415006 ubuntu.go:177] setting up certificates
	I0916 10:53:15.814281 1415006 provision.go:84] configureAuth start
	I0916 10:53:15.814347 1415006 cli_runner.go:164] Run: docker container inspect -f "{{range .NetworkSettings.Networks}}{{.IPAddress}},{{.GlobalIPv6Address}}{{end}}" ha-334765-m03
	I0916 10:53:15.833647 1415006 provision.go:143] copyHostCerts
	I0916 10:53:15.833696 1415006 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-1378450/.minikube/certs/ca.pem -> /home/jenkins/minikube-integration/19651-1378450/.minikube/ca.pem
	I0916 10:53:15.833730 1415006 exec_runner.go:144] found /home/jenkins/minikube-integration/19651-1378450/.minikube/ca.pem, removing ...
	I0916 10:53:15.833741 1415006 exec_runner.go:203] rm: /home/jenkins/minikube-integration/19651-1378450/.minikube/ca.pem
	I0916 10:53:15.833820 1415006 exec_runner.go:151] cp: /home/jenkins/minikube-integration/19651-1378450/.minikube/certs/ca.pem --> /home/jenkins/minikube-integration/19651-1378450/.minikube/ca.pem (1078 bytes)
	I0916 10:53:15.833906 1415006 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-1378450/.minikube/certs/cert.pem -> /home/jenkins/minikube-integration/19651-1378450/.minikube/cert.pem
	I0916 10:53:15.833927 1415006 exec_runner.go:144] found /home/jenkins/minikube-integration/19651-1378450/.minikube/cert.pem, removing ...
	I0916 10:53:15.833932 1415006 exec_runner.go:203] rm: /home/jenkins/minikube-integration/19651-1378450/.minikube/cert.pem
	I0916 10:53:15.833963 1415006 exec_runner.go:151] cp: /home/jenkins/minikube-integration/19651-1378450/.minikube/certs/cert.pem --> /home/jenkins/minikube-integration/19651-1378450/.minikube/cert.pem (1123 bytes)
	I0916 10:53:15.834012 1415006 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-1378450/.minikube/certs/key.pem -> /home/jenkins/minikube-integration/19651-1378450/.minikube/key.pem
	I0916 10:53:15.834032 1415006 exec_runner.go:144] found /home/jenkins/minikube-integration/19651-1378450/.minikube/key.pem, removing ...
	I0916 10:53:15.834039 1415006 exec_runner.go:203] rm: /home/jenkins/minikube-integration/19651-1378450/.minikube/key.pem
	I0916 10:53:15.834064 1415006 exec_runner.go:151] cp: /home/jenkins/minikube-integration/19651-1378450/.minikube/certs/key.pem --> /home/jenkins/minikube-integration/19651-1378450/.minikube/key.pem (1679 bytes)
	I0916 10:53:15.834129 1415006 provision.go:117] generating server cert: /home/jenkins/minikube-integration/19651-1378450/.minikube/machines/server.pem ca-key=/home/jenkins/minikube-integration/19651-1378450/.minikube/certs/ca.pem private-key=/home/jenkins/minikube-integration/19651-1378450/.minikube/certs/ca-key.pem org=jenkins.ha-334765-m03 san=[127.0.0.1 192.168.49.4 ha-334765-m03 localhost minikube]
	I0916 10:53:16.104183 1415006 provision.go:177] copyRemoteCerts
	I0916 10:53:16.104255 1415006 ssh_runner.go:195] Run: sudo mkdir -p /etc/docker /etc/docker /etc/docker
	I0916 10:53:16.104300 1415006 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" ha-334765-m03
	I0916 10:53:16.123646 1415006 sshutil.go:53] new ssh client: &{IP:127.0.0.1 Port:34628 SSHKeyPath:/home/jenkins/minikube-integration/19651-1378450/.minikube/machines/ha-334765-m03/id_rsa Username:docker}
	I0916 10:53:16.222535 1415006 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-1378450/.minikube/certs/ca.pem -> /etc/docker/ca.pem
	I0916 10:53:16.222611 1415006 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-1378450/.minikube/certs/ca.pem --> /etc/docker/ca.pem (1078 bytes)
	I0916 10:53:16.251274 1415006 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-1378450/.minikube/machines/server.pem -> /etc/docker/server.pem
	I0916 10:53:16.251343 1415006 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-1378450/.minikube/machines/server.pem --> /etc/docker/server.pem (1208 bytes)
	I0916 10:53:16.278542 1415006 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-1378450/.minikube/machines/server-key.pem -> /etc/docker/server-key.pem
	I0916 10:53:16.278611 1415006 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-1378450/.minikube/machines/server-key.pem --> /etc/docker/server-key.pem (1679 bytes)
	I0916 10:53:16.306453 1415006 provision.go:87] duration metric: took 492.157045ms to configureAuth
	I0916 10:53:16.306481 1415006 ubuntu.go:193] setting minikube options for container-runtime
	I0916 10:53:16.306725 1415006 config.go:182] Loaded profile config "ha-334765": Driver=docker, ContainerRuntime=crio, KubernetesVersion=v1.31.1
	I0916 10:53:16.306838 1415006 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" ha-334765-m03
	I0916 10:53:16.324544 1415006 main.go:141] libmachine: Using SSH client type: native
	I0916 10:53:16.324981 1415006 main.go:141] libmachine: &{{{<nil> 0 [] [] []} docker [0x41abe0] 0x41d420 <nil>  [] 0s} 127.0.0.1 34628 <nil> <nil>}
	I0916 10:53:16.325005 1415006 main.go:141] libmachine: About to run SSH command:
	sudo mkdir -p /etc/sysconfig && printf %s "
	CRIO_MINIKUBE_OPTIONS='--insecure-registry 10.96.0.0/12 '
	" | sudo tee /etc/sysconfig/crio.minikube && sudo systemctl restart crio
	I0916 10:53:16.627483 1415006 main.go:141] libmachine: SSH cmd err, output: <nil>: 
	CRIO_MINIKUBE_OPTIONS='--insecure-registry 10.96.0.0/12 '
	
	I0916 10:53:16.627505 1415006 machine.go:96] duration metric: took 4.336184696s to provisionDockerMachine
	I0916 10:53:16.627515 1415006 client.go:171] duration metric: took 10.150719198s to LocalClient.Create
	I0916 10:53:16.627528 1415006 start.go:167] duration metric: took 10.150810715s to libmachine.API.Create "ha-334765"
	I0916 10:53:16.627535 1415006 start.go:293] postStartSetup for "ha-334765-m03" (driver="docker")
	I0916 10:53:16.627546 1415006 start.go:322] creating required directories: [/etc/kubernetes/addons /etc/kubernetes/manifests /var/tmp/minikube /var/lib/minikube /var/lib/minikube/certs /var/lib/minikube/images /var/lib/minikube/binaries /tmp/gvisor /usr/share/ca-certificates /etc/ssl/certs]
	I0916 10:53:16.627615 1415006 ssh_runner.go:195] Run: sudo mkdir -p /etc/kubernetes/addons /etc/kubernetes/manifests /var/tmp/minikube /var/lib/minikube /var/lib/minikube/certs /var/lib/minikube/images /var/lib/minikube/binaries /tmp/gvisor /usr/share/ca-certificates /etc/ssl/certs
	I0916 10:53:16.627656 1415006 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" ha-334765-m03
	I0916 10:53:16.654164 1415006 sshutil.go:53] new ssh client: &{IP:127.0.0.1 Port:34628 SSHKeyPath:/home/jenkins/minikube-integration/19651-1378450/.minikube/machines/ha-334765-m03/id_rsa Username:docker}
	I0916 10:53:16.758791 1415006 ssh_runner.go:195] Run: cat /etc/os-release
	I0916 10:53:16.762707 1415006 main.go:141] libmachine: Couldn't set key VERSION_CODENAME, no corresponding struct field found
	I0916 10:53:16.762743 1415006 main.go:141] libmachine: Couldn't set key PRIVACY_POLICY_URL, no corresponding struct field found
	I0916 10:53:16.762754 1415006 main.go:141] libmachine: Couldn't set key UBUNTU_CODENAME, no corresponding struct field found
	I0916 10:53:16.762761 1415006 info.go:137] Remote host: Ubuntu 22.04.4 LTS
	I0916 10:53:16.762772 1415006 filesync.go:126] Scanning /home/jenkins/minikube-integration/19651-1378450/.minikube/addons for local assets ...
	I0916 10:53:16.762840 1415006 filesync.go:126] Scanning /home/jenkins/minikube-integration/19651-1378450/.minikube/files for local assets ...
	I0916 10:53:16.762923 1415006 filesync.go:149] local asset: /home/jenkins/minikube-integration/19651-1378450/.minikube/files/etc/ssl/certs/13838332.pem -> 13838332.pem in /etc/ssl/certs
	I0916 10:53:16.762936 1415006 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-1378450/.minikube/files/etc/ssl/certs/13838332.pem -> /etc/ssl/certs/13838332.pem
	I0916 10:53:16.763037 1415006 ssh_runner.go:195] Run: sudo mkdir -p /etc/ssl/certs
	I0916 10:53:16.772014 1415006 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-1378450/.minikube/files/etc/ssl/certs/13838332.pem --> /etc/ssl/certs/13838332.pem (1708 bytes)
	I0916 10:53:16.801813 1415006 start.go:296] duration metric: took 174.260965ms for postStartSetup
	I0916 10:53:16.802211 1415006 cli_runner.go:164] Run: docker container inspect -f "{{range .NetworkSettings.Networks}}{{.IPAddress}},{{.GlobalIPv6Address}}{{end}}" ha-334765-m03
	I0916 10:53:16.821029 1415006 profile.go:143] Saving config to /home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/ha-334765/config.json ...
	I0916 10:53:16.821355 1415006 ssh_runner.go:195] Run: sh -c "df -h /var | awk 'NR==2{print $5}'"
	I0916 10:53:16.821396 1415006 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" ha-334765-m03
	I0916 10:53:16.839877 1415006 sshutil.go:53] new ssh client: &{IP:127.0.0.1 Port:34628 SSHKeyPath:/home/jenkins/minikube-integration/19651-1378450/.minikube/machines/ha-334765-m03/id_rsa Username:docker}
	I0916 10:53:16.934101 1415006 ssh_runner.go:195] Run: sh -c "df -BG /var | awk 'NR==2{print $4}'"
	I0916 10:53:16.939654 1415006 start.go:128] duration metric: took 10.464702951s to createHost
	I0916 10:53:16.939677 1415006 start.go:83] releasing machines lock for "ha-334765-m03", held for 10.464890261s
	I0916 10:53:16.939761 1415006 cli_runner.go:164] Run: docker container inspect -f "{{range .NetworkSettings.Networks}}{{.IPAddress}},{{.GlobalIPv6Address}}{{end}}" ha-334765-m03
	I0916 10:53:16.965319 1415006 out.go:177] * Found network options:
	I0916 10:53:16.968311 1415006 out.go:177]   - NO_PROXY=192.168.49.2,192.168.49.3
	W0916 10:53:16.970944 1415006 proxy.go:119] fail to check proxy env: Error ip not in block
	W0916 10:53:16.970983 1415006 proxy.go:119] fail to check proxy env: Error ip not in block
	W0916 10:53:16.971012 1415006 proxy.go:119] fail to check proxy env: Error ip not in block
	W0916 10:53:16.971033 1415006 proxy.go:119] fail to check proxy env: Error ip not in block
	I0916 10:53:16.971117 1415006 ssh_runner.go:195] Run: sudo sh -c "podman version >/dev/null"
	I0916 10:53:16.971163 1415006 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" ha-334765-m03
	I0916 10:53:16.971452 1415006 ssh_runner.go:195] Run: curl -sS -m 2 https://registry.k8s.io/
	I0916 10:53:16.971510 1415006 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" ha-334765-m03
	I0916 10:53:16.989731 1415006 sshutil.go:53] new ssh client: &{IP:127.0.0.1 Port:34628 SSHKeyPath:/home/jenkins/minikube-integration/19651-1378450/.minikube/machines/ha-334765-m03/id_rsa Username:docker}
	I0916 10:53:16.994076 1415006 sshutil.go:53] new ssh client: &{IP:127.0.0.1 Port:34628 SSHKeyPath:/home/jenkins/minikube-integration/19651-1378450/.minikube/machines/ha-334765-m03/id_rsa Username:docker}
	I0916 10:53:17.260397 1415006 ssh_runner.go:195] Run: sh -c "stat /etc/cni/net.d/*loopback.conf*"
	I0916 10:53:17.265904 1415006 ssh_runner.go:195] Run: sudo find /etc/cni/net.d -maxdepth 1 -type f -name *loopback.conf* -not -name *.mk_disabled -exec sh -c "sudo mv {} {}.mk_disabled" ;
	I0916 10:53:17.288372 1415006 cni.go:221] loopback cni configuration disabled: "/etc/cni/net.d/*loopback.conf*" found
	I0916 10:53:17.288452 1415006 ssh_runner.go:195] Run: sudo find /etc/cni/net.d -maxdepth 1 -type f ( ( -name *bridge* -or -name *podman* ) -and -not -name *.mk_disabled ) -printf "%p, " -exec sh -c "sudo mv {} {}.mk_disabled" ;
	I0916 10:53:17.373761 1415006 cni.go:262] disabled [/etc/cni/net.d/87-podman-bridge.conflist, /etc/cni/net.d/100-crio-bridge.conf] bridge cni config(s)
	I0916 10:53:17.373782 1415006 start.go:495] detecting cgroup driver to use...
	I0916 10:53:17.373815 1415006 detect.go:187] detected "cgroupfs" cgroup driver on host os
	I0916 10:53:17.373866 1415006 ssh_runner.go:195] Run: sudo systemctl stop -f containerd
	I0916 10:53:17.400994 1415006 ssh_runner.go:195] Run: sudo systemctl is-active --quiet service containerd
	I0916 10:53:17.415230 1415006 docker.go:217] disabling cri-docker service (if available) ...
	I0916 10:53:17.415320 1415006 ssh_runner.go:195] Run: sudo systemctl stop -f cri-docker.socket
	I0916 10:53:17.432013 1415006 ssh_runner.go:195] Run: sudo systemctl stop -f cri-docker.service
	I0916 10:53:17.449884 1415006 ssh_runner.go:195] Run: sudo systemctl disable cri-docker.socket
	I0916 10:53:17.567007 1415006 ssh_runner.go:195] Run: sudo systemctl mask cri-docker.service
	I0916 10:53:17.678362 1415006 docker.go:233] disabling docker service ...
	I0916 10:53:17.678486 1415006 ssh_runner.go:195] Run: sudo systemctl stop -f docker.socket
	I0916 10:53:17.714391 1415006 ssh_runner.go:195] Run: sudo systemctl stop -f docker.service
	I0916 10:53:17.728114 1415006 ssh_runner.go:195] Run: sudo systemctl disable docker.socket
	I0916 10:53:17.828765 1415006 ssh_runner.go:195] Run: sudo systemctl mask docker.service
	I0916 10:53:17.931898 1415006 ssh_runner.go:195] Run: sudo systemctl is-active --quiet service docker
	I0916 10:53:17.944776 1415006 ssh_runner.go:195] Run: /bin/bash -c "sudo mkdir -p /etc && printf %s "runtime-endpoint: unix:///var/run/crio/crio.sock
	" | sudo tee /etc/crictl.yaml"
	I0916 10:53:17.962967 1415006 crio.go:59] configure cri-o to use "registry.k8s.io/pause:3.10" pause image...
	I0916 10:53:17.963074 1415006 ssh_runner.go:195] Run: sh -c "sudo sed -i 's|^.*pause_image = .*$|pause_image = "registry.k8s.io/pause:3.10"|' /etc/crio/crio.conf.d/02-crio.conf"
	I0916 10:53:17.974753 1415006 crio.go:70] configuring cri-o to use "cgroupfs" as cgroup driver...
	I0916 10:53:17.974862 1415006 ssh_runner.go:195] Run: sh -c "sudo sed -i 's|^.*cgroup_manager = .*$|cgroup_manager = "cgroupfs"|' /etc/crio/crio.conf.d/02-crio.conf"
	I0916 10:53:17.987324 1415006 ssh_runner.go:195] Run: sh -c "sudo sed -i '/conmon_cgroup = .*/d' /etc/crio/crio.conf.d/02-crio.conf"
	I0916 10:53:17.999085 1415006 ssh_runner.go:195] Run: sh -c "sudo sed -i '/cgroup_manager = .*/a conmon_cgroup = "pod"' /etc/crio/crio.conf.d/02-crio.conf"
	I0916 10:53:18.015477 1415006 ssh_runner.go:195] Run: sh -c "sudo rm -rf /etc/cni/net.mk"
	I0916 10:53:18.026175 1415006 ssh_runner.go:195] Run: sh -c "sudo sed -i '/^ *"net.ipv4.ip_unprivileged_port_start=.*"/d' /etc/crio/crio.conf.d/02-crio.conf"
	I0916 10:53:18.039368 1415006 ssh_runner.go:195] Run: sh -c "sudo grep -q "^ *default_sysctls" /etc/crio/crio.conf.d/02-crio.conf || sudo sed -i '/conmon_cgroup = .*/a default_sysctls = \[\n\]' /etc/crio/crio.conf.d/02-crio.conf"
	I0916 10:53:18.063804 1415006 ssh_runner.go:195] Run: sh -c "sudo sed -i -r 's|^default_sysctls *= *\[|&\n  "net.ipv4.ip_unprivileged_port_start=0",|' /etc/crio/crio.conf.d/02-crio.conf"
	I0916 10:53:18.074316 1415006 ssh_runner.go:195] Run: sudo sysctl net.bridge.bridge-nf-call-iptables
	I0916 10:53:18.084063 1415006 ssh_runner.go:195] Run: sudo sh -c "echo 1 > /proc/sys/net/ipv4/ip_forward"
	I0916 10:53:18.093683 1415006 ssh_runner.go:195] Run: sudo systemctl daemon-reload
	I0916 10:53:18.187365 1415006 ssh_runner.go:195] Run: sudo systemctl restart crio
	I0916 10:53:18.309318 1415006 start.go:542] Will wait 60s for socket path /var/run/crio/crio.sock
	I0916 10:53:18.309407 1415006 ssh_runner.go:195] Run: stat /var/run/crio/crio.sock
	I0916 10:53:18.314492 1415006 start.go:563] Will wait 60s for crictl version
	I0916 10:53:18.314584 1415006 ssh_runner.go:195] Run: which crictl
	I0916 10:53:18.318911 1415006 ssh_runner.go:195] Run: sudo /usr/bin/crictl version
	I0916 10:53:18.363698 1415006 start.go:579] Version:  0.1.0
	RuntimeName:  cri-o
	RuntimeVersion:  1.24.6
	RuntimeApiVersion:  v1
	I0916 10:53:18.363788 1415006 ssh_runner.go:195] Run: crio --version
	I0916 10:53:18.404796 1415006 ssh_runner.go:195] Run: crio --version
	I0916 10:53:18.451943 1415006 out.go:177] * Preparing Kubernetes v1.31.1 on CRI-O 1.24.6 ...
	I0916 10:53:18.455026 1415006 out.go:177]   - env NO_PROXY=192.168.49.2
	I0916 10:53:18.457816 1415006 out.go:177]   - env NO_PROXY=192.168.49.2,192.168.49.3
	I0916 10:53:18.460419 1415006 cli_runner.go:164] Run: docker network inspect ha-334765 --format "{"Name": "{{.Name}}","Driver": "{{.Driver}}","Subnet": "{{range .IPAM.Config}}{{.Subnet}}{{end}}","Gateway": "{{range .IPAM.Config}}{{.Gateway}}{{end}}","MTU": {{if (index .Options "com.docker.network.driver.mtu")}}{{(index .Options "com.docker.network.driver.mtu")}}{{else}}0{{end}}, "ContainerIPs": [{{range $k,$v := .Containers }}"{{$v.IPv4Address}}",{{end}}]}"
	I0916 10:53:18.476580 1415006 ssh_runner.go:195] Run: grep 192.168.49.1	host.minikube.internal$ /etc/hosts
	I0916 10:53:18.480547 1415006 ssh_runner.go:195] Run: /bin/bash -c "{ grep -v $'\thost.minikube.internal$' "/etc/hosts"; echo "192.168.49.1	host.minikube.internal"; } > /tmp/h.$$; sudo cp /tmp/h.$$ "/etc/hosts""
	I0916 10:53:18.491447 1415006 mustload.go:65] Loading cluster: ha-334765
	I0916 10:53:18.491704 1415006 config.go:182] Loaded profile config "ha-334765": Driver=docker, ContainerRuntime=crio, KubernetesVersion=v1.31.1
	I0916 10:53:18.491965 1415006 cli_runner.go:164] Run: docker container inspect ha-334765 --format={{.State.Status}}
	I0916 10:53:18.508964 1415006 host.go:66] Checking if "ha-334765" exists ...
	I0916 10:53:18.509259 1415006 certs.go:68] Setting up /home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/ha-334765 for IP: 192.168.49.4
	I0916 10:53:18.509275 1415006 certs.go:194] generating shared ca certs ...
	I0916 10:53:18.509289 1415006 certs.go:226] acquiring lock for ca certs: {Name:mk0ae46b50e2e49d53ad6fcc94535aa50d9156d6 Clock:{} Delay:500ms Timeout:1m0s Cancel:<nil>}
	I0916 10:53:18.509407 1415006 certs.go:235] skipping valid "minikubeCA" ca cert: /home/jenkins/minikube-integration/19651-1378450/.minikube/ca.key
	I0916 10:53:18.509455 1415006 certs.go:235] skipping valid "proxyClientCA" ca cert: /home/jenkins/minikube-integration/19651-1378450/.minikube/proxy-client-ca.key
	I0916 10:53:18.509475 1415006 certs.go:256] generating profile certs ...
	I0916 10:53:18.509554 1415006 certs.go:359] skipping valid signed profile cert regeneration for "minikube-user": /home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/ha-334765/client.key
	I0916 10:53:18.509582 1415006 certs.go:363] generating signed profile cert for "minikube": /home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/ha-334765/apiserver.key.ce2d4ce7
	I0916 10:53:18.509602 1415006 crypto.go:68] Generating cert /home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/ha-334765/apiserver.crt.ce2d4ce7 with IP's: [10.96.0.1 127.0.0.1 10.0.0.1 192.168.49.2 192.168.49.3 192.168.49.4 192.168.49.254]
	I0916 10:53:19.047943 1415006 crypto.go:156] Writing cert to /home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/ha-334765/apiserver.crt.ce2d4ce7 ...
	I0916 10:53:19.047976 1415006 lock.go:35] WriteFile acquiring /home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/ha-334765/apiserver.crt.ce2d4ce7: {Name:mk3f67d3b8ad284ed3fcbe0ef0f2362b0ca1a10f Clock:{} Delay:500ms Timeout:1m0s Cancel:<nil>}
	I0916 10:53:19.048216 1415006 crypto.go:164] Writing key to /home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/ha-334765/apiserver.key.ce2d4ce7 ...
	I0916 10:53:19.048232 1415006 lock.go:35] WriteFile acquiring /home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/ha-334765/apiserver.key.ce2d4ce7: {Name:mka9b6e6481e41e152d6f053685783f8774b731a Clock:{} Delay:500ms Timeout:1m0s Cancel:<nil>}
	I0916 10:53:19.048319 1415006 certs.go:381] copying /home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/ha-334765/apiserver.crt.ce2d4ce7 -> /home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/ha-334765/apiserver.crt
	I0916 10:53:19.048466 1415006 certs.go:385] copying /home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/ha-334765/apiserver.key.ce2d4ce7 -> /home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/ha-334765/apiserver.key
	I0916 10:53:19.048628 1415006 certs.go:359] skipping valid signed profile cert regeneration for "aggregator": /home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/ha-334765/proxy-client.key
	I0916 10:53:19.048647 1415006 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-1378450/.minikube/ca.crt -> /var/lib/minikube/certs/ca.crt
	I0916 10:53:19.048664 1415006 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-1378450/.minikube/ca.key -> /var/lib/minikube/certs/ca.key
	I0916 10:53:19.048702 1415006 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-1378450/.minikube/proxy-client-ca.crt -> /var/lib/minikube/certs/proxy-client-ca.crt
	I0916 10:53:19.048719 1415006 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-1378450/.minikube/proxy-client-ca.key -> /var/lib/minikube/certs/proxy-client-ca.key
	I0916 10:53:19.048734 1415006 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/ha-334765/apiserver.crt -> /var/lib/minikube/certs/apiserver.crt
	I0916 10:53:19.048746 1415006 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/ha-334765/apiserver.key -> /var/lib/minikube/certs/apiserver.key
	I0916 10:53:19.048760 1415006 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/ha-334765/proxy-client.crt -> /var/lib/minikube/certs/proxy-client.crt
	I0916 10:53:19.048776 1415006 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/ha-334765/proxy-client.key -> /var/lib/minikube/certs/proxy-client.key
	I0916 10:53:19.048832 1415006 certs.go:484] found cert: /home/jenkins/minikube-integration/19651-1378450/.minikube/certs/1383833.pem (1338 bytes)
	W0916 10:53:19.048865 1415006 certs.go:480] ignoring /home/jenkins/minikube-integration/19651-1378450/.minikube/certs/1383833_empty.pem, impossibly tiny 0 bytes
	I0916 10:53:19.048877 1415006 certs.go:484] found cert: /home/jenkins/minikube-integration/19651-1378450/.minikube/certs/ca-key.pem (1679 bytes)
	I0916 10:53:19.048941 1415006 certs.go:484] found cert: /home/jenkins/minikube-integration/19651-1378450/.minikube/certs/ca.pem (1078 bytes)
	I0916 10:53:19.048973 1415006 certs.go:484] found cert: /home/jenkins/minikube-integration/19651-1378450/.minikube/certs/cert.pem (1123 bytes)
	I0916 10:53:19.048998 1415006 certs.go:484] found cert: /home/jenkins/minikube-integration/19651-1378450/.minikube/certs/key.pem (1679 bytes)
	I0916 10:53:19.049041 1415006 certs.go:484] found cert: /home/jenkins/minikube-integration/19651-1378450/.minikube/files/etc/ssl/certs/13838332.pem (1708 bytes)
	I0916 10:53:19.049074 1415006 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-1378450/.minikube/ca.crt -> /usr/share/ca-certificates/minikubeCA.pem
	I0916 10:53:19.049095 1415006 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-1378450/.minikube/certs/1383833.pem -> /usr/share/ca-certificates/1383833.pem
	I0916 10:53:19.049110 1415006 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-1378450/.minikube/files/etc/ssl/certs/13838332.pem -> /usr/share/ca-certificates/13838332.pem
	I0916 10:53:19.049167 1415006 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" ha-334765
	I0916 10:53:19.067596 1415006 sshutil.go:53] new ssh client: &{IP:127.0.0.1 Port:34618 SSHKeyPath:/home/jenkins/minikube-integration/19651-1378450/.minikube/machines/ha-334765/id_rsa Username:docker}
	I0916 10:53:19.165164 1415006 ssh_runner.go:195] Run: stat -c %s /var/lib/minikube/certs/sa.pub
	I0916 10:53:19.170272 1415006 ssh_runner.go:447] scp /var/lib/minikube/certs/sa.pub --> memory (451 bytes)
	I0916 10:53:19.184795 1415006 ssh_runner.go:195] Run: stat -c %s /var/lib/minikube/certs/sa.key
	I0916 10:53:19.188445 1415006 ssh_runner.go:447] scp /var/lib/minikube/certs/sa.key --> memory (1675 bytes)
	I0916 10:53:19.201392 1415006 ssh_runner.go:195] Run: stat -c %s /var/lib/minikube/certs/front-proxy-ca.crt
	I0916 10:53:19.205315 1415006 ssh_runner.go:447] scp /var/lib/minikube/certs/front-proxy-ca.crt --> memory (1123 bytes)
	I0916 10:53:19.218566 1415006 ssh_runner.go:195] Run: stat -c %s /var/lib/minikube/certs/front-proxy-ca.key
	I0916 10:53:19.222202 1415006 ssh_runner.go:447] scp /var/lib/minikube/certs/front-proxy-ca.key --> memory (1675 bytes)
	I0916 10:53:19.234664 1415006 ssh_runner.go:195] Run: stat -c %s /var/lib/minikube/certs/etcd/ca.crt
	I0916 10:53:19.238432 1415006 ssh_runner.go:447] scp /var/lib/minikube/certs/etcd/ca.crt --> memory (1094 bytes)
	I0916 10:53:19.250860 1415006 ssh_runner.go:195] Run: stat -c %s /var/lib/minikube/certs/etcd/ca.key
	I0916 10:53:19.254569 1415006 ssh_runner.go:447] scp /var/lib/minikube/certs/etcd/ca.key --> memory (1679 bytes)
	I0916 10:53:19.267856 1415006 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-1378450/.minikube/ca.crt --> /var/lib/minikube/certs/ca.crt (1111 bytes)
	I0916 10:53:19.293781 1415006 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-1378450/.minikube/ca.key --> /var/lib/minikube/certs/ca.key (1675 bytes)
	I0916 10:53:19.325208 1415006 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-1378450/.minikube/proxy-client-ca.crt --> /var/lib/minikube/certs/proxy-client-ca.crt (1119 bytes)
	I0916 10:53:19.355506 1415006 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-1378450/.minikube/proxy-client-ca.key --> /var/lib/minikube/certs/proxy-client-ca.key (1679 bytes)
	I0916 10:53:19.382558 1415006 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/ha-334765/apiserver.crt --> /var/lib/minikube/certs/apiserver.crt (1444 bytes)
	I0916 10:53:19.411948 1415006 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/ha-334765/apiserver.key --> /var/lib/minikube/certs/apiserver.key (1675 bytes)
	I0916 10:53:19.439207 1415006 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/ha-334765/proxy-client.crt --> /var/lib/minikube/certs/proxy-client.crt (1147 bytes)
	I0916 10:53:19.472121 1415006 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/ha-334765/proxy-client.key --> /var/lib/minikube/certs/proxy-client.key (1679 bytes)
	I0916 10:53:19.503271 1415006 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-1378450/.minikube/ca.crt --> /usr/share/ca-certificates/minikubeCA.pem (1111 bytes)
	I0916 10:53:19.528254 1415006 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-1378450/.minikube/certs/1383833.pem --> /usr/share/ca-certificates/1383833.pem (1338 bytes)
	I0916 10:53:19.556903 1415006 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-1378450/.minikube/files/etc/ssl/certs/13838332.pem --> /usr/share/ca-certificates/13838332.pem (1708 bytes)
	I0916 10:53:19.589671 1415006 ssh_runner.go:362] scp memory --> /var/lib/minikube/certs/sa.pub (451 bytes)
	I0916 10:53:19.610404 1415006 ssh_runner.go:362] scp memory --> /var/lib/minikube/certs/sa.key (1675 bytes)
	I0916 10:53:19.631169 1415006 ssh_runner.go:362] scp memory --> /var/lib/minikube/certs/front-proxy-ca.crt (1123 bytes)
	I0916 10:53:19.650809 1415006 ssh_runner.go:362] scp memory --> /var/lib/minikube/certs/front-proxy-ca.key (1675 bytes)
	I0916 10:53:19.670985 1415006 ssh_runner.go:362] scp memory --> /var/lib/minikube/certs/etcd/ca.crt (1094 bytes)
	I0916 10:53:19.691580 1415006 ssh_runner.go:362] scp memory --> /var/lib/minikube/certs/etcd/ca.key (1679 bytes)
	I0916 10:53:19.712009 1415006 ssh_runner.go:362] scp memory --> /var/lib/minikube/kubeconfig (744 bytes)
	I0916 10:53:19.733794 1415006 ssh_runner.go:195] Run: openssl version
	I0916 10:53:19.741003 1415006 ssh_runner.go:195] Run: sudo /bin/bash -c "test -s /usr/share/ca-certificates/13838332.pem && ln -fs /usr/share/ca-certificates/13838332.pem /etc/ssl/certs/13838332.pem"
	I0916 10:53:19.752608 1415006 ssh_runner.go:195] Run: ls -la /usr/share/ca-certificates/13838332.pem
	I0916 10:53:19.756420 1415006 certs.go:528] hashing: -rw-r--r-- 1 root root 1708 Sep 16 10:46 /usr/share/ca-certificates/13838332.pem
	I0916 10:53:19.756518 1415006 ssh_runner.go:195] Run: openssl x509 -hash -noout -in /usr/share/ca-certificates/13838332.pem
	I0916 10:53:19.763995 1415006 ssh_runner.go:195] Run: sudo /bin/bash -c "test -L /etc/ssl/certs/3ec20f2e.0 || ln -fs /etc/ssl/certs/13838332.pem /etc/ssl/certs/3ec20f2e.0"
	I0916 10:53:19.773757 1415006 ssh_runner.go:195] Run: sudo /bin/bash -c "test -s /usr/share/ca-certificates/minikubeCA.pem && ln -fs /usr/share/ca-certificates/minikubeCA.pem /etc/ssl/certs/minikubeCA.pem"
	I0916 10:53:19.783253 1415006 ssh_runner.go:195] Run: ls -la /usr/share/ca-certificates/minikubeCA.pem
	I0916 10:53:19.786986 1415006 certs.go:528] hashing: -rw-r--r-- 1 root root 1111 Sep 16 10:35 /usr/share/ca-certificates/minikubeCA.pem
	I0916 10:53:19.787055 1415006 ssh_runner.go:195] Run: openssl x509 -hash -noout -in /usr/share/ca-certificates/minikubeCA.pem
	I0916 10:53:19.794466 1415006 ssh_runner.go:195] Run: sudo /bin/bash -c "test -L /etc/ssl/certs/b5213941.0 || ln -fs /etc/ssl/certs/minikubeCA.pem /etc/ssl/certs/b5213941.0"
	I0916 10:53:19.804322 1415006 ssh_runner.go:195] Run: sudo /bin/bash -c "test -s /usr/share/ca-certificates/1383833.pem && ln -fs /usr/share/ca-certificates/1383833.pem /etc/ssl/certs/1383833.pem"
	I0916 10:53:19.814048 1415006 ssh_runner.go:195] Run: ls -la /usr/share/ca-certificates/1383833.pem
	I0916 10:53:19.818016 1415006 certs.go:528] hashing: -rw-r--r-- 1 root root 1338 Sep 16 10:46 /usr/share/ca-certificates/1383833.pem
	I0916 10:53:19.818130 1415006 ssh_runner.go:195] Run: openssl x509 -hash -noout -in /usr/share/ca-certificates/1383833.pem
	I0916 10:53:19.826289 1415006 ssh_runner.go:195] Run: sudo /bin/bash -c "test -L /etc/ssl/certs/51391683.0 || ln -fs /etc/ssl/certs/1383833.pem /etc/ssl/certs/51391683.0"
	I0916 10:53:19.836101 1415006 ssh_runner.go:195] Run: stat /var/lib/minikube/certs/apiserver-kubelet-client.crt
	I0916 10:53:19.840415 1415006 certs.go:399] 'apiserver-kubelet-client' cert doesn't exist, likely first start: stat /var/lib/minikube/certs/apiserver-kubelet-client.crt: Process exited with status 1
	stdout:
	
	stderr:
	stat: cannot statx '/var/lib/minikube/certs/apiserver-kubelet-client.crt': No such file or directory
	I0916 10:53:19.840472 1415006 kubeadm.go:934] updating node {m03 192.168.49.4 8443 v1.31.1 crio true true} ...
	I0916 10:53:19.840565 1415006 kubeadm.go:946] kubelet [Unit]
	Wants=crio.service
	
	[Service]
	ExecStart=
	ExecStart=/var/lib/minikube/binaries/v1.31.1/kubelet --bootstrap-kubeconfig=/etc/kubernetes/bootstrap-kubelet.conf --cgroups-per-qos=false --config=/var/lib/kubelet/config.yaml --enforce-node-allocatable= --hostname-override=ha-334765-m03 --kubeconfig=/etc/kubernetes/kubelet.conf --node-ip=192.168.49.4
	
	[Install]
	 config:
	{KubernetesVersion:v1.31.1 ClusterName:ha-334765 Namespace:default APIServerHAVIP:192.168.49.254 APIServerName:minikubeCA APIServerNames:[] APIServerIPs:[] DNSDomain:cluster.local ContainerRuntime:crio CRISocket: NetworkPlugin:cni FeatureGates: ServiceCIDR:10.96.0.0/12 ImageRepository: LoadBalancerStartIP: LoadBalancerEndIP: CustomIngressCert: RegistryAliases: ExtraOptions:[] ShouldLoadCachedImages:true EnableDefaultCNI:false CNI:}
	I0916 10:53:19.840598 1415006 kube-vip.go:115] generating kube-vip config ...
	I0916 10:53:19.840650 1415006 ssh_runner.go:195] Run: sudo sh -c "lsmod | grep ip_vs"
	I0916 10:53:19.855502 1415006 kube-vip.go:167] auto-enabling control-plane load-balancing in kube-vip
	I0916 10:53:19.855572 1415006 kube-vip.go:137] kube-vip config:
	apiVersion: v1
	kind: Pod
	metadata:
	  creationTimestamp: null
	  name: kube-vip
	  namespace: kube-system
	spec:
	  containers:
	  - args:
	    - manager
	    env:
	    - name: vip_arp
	      value: "true"
	    - name: port
	      value: "8443"
	    - name: vip_nodename
	      valueFrom:
	        fieldRef:
	          fieldPath: spec.nodeName
	    - name: vip_interface
	      value: eth0
	    - name: vip_cidr
	      value: "32"
	    - name: dns_mode
	      value: first
	    - name: cp_enable
	      value: "true"
	    - name: cp_namespace
	      value: kube-system
	    - name: vip_leaderelection
	      value: "true"
	    - name: vip_leasename
	      value: plndr-cp-lock
	    - name: vip_leaseduration
	      value: "5"
	    - name: vip_renewdeadline
	      value: "3"
	    - name: vip_retryperiod
	      value: "1"
	    - name: address
	      value: 192.168.49.254
	    - name: prometheus_server
	      value: :2112
	    - name : lb_enable
	      value: "true"
	    - name: lb_port
	      value: "8443"
	    image: ghcr.io/kube-vip/kube-vip:v0.8.0
	    imagePullPolicy: IfNotPresent
	    name: kube-vip
	    resources: {}
	    securityContext:
	      capabilities:
	        add:
	        - NET_ADMIN
	        - NET_RAW
	    volumeMounts:
	    - mountPath: /etc/kubernetes/admin.conf
	      name: kubeconfig
	  hostAliases:
	  - hostnames:
	    - kubernetes
	    ip: 127.0.0.1
	  hostNetwork: true
	  volumes:
	  - hostPath:
	      path: "/etc/kubernetes/admin.conf"
	    name: kubeconfig
	status: {}
	I0916 10:53:19.855644 1415006 ssh_runner.go:195] Run: sudo ls /var/lib/minikube/binaries/v1.31.1
	I0916 10:53:19.865367 1415006 binaries.go:44] Found k8s binaries, skipping transfer
	I0916 10:53:19.865511 1415006 ssh_runner.go:195] Run: sudo mkdir -p /etc/systemd/system/kubelet.service.d /lib/systemd/system /etc/kubernetes/manifests
	I0916 10:53:19.875152 1415006 ssh_runner.go:362] scp memory --> /etc/systemd/system/kubelet.service.d/10-kubeadm.conf (363 bytes)
	I0916 10:53:19.894518 1415006 ssh_runner.go:362] scp memory --> /lib/systemd/system/kubelet.service (352 bytes)
	I0916 10:53:19.914639 1415006 ssh_runner.go:362] scp memory --> /etc/kubernetes/manifests/kube-vip.yaml (1441 bytes)
	I0916 10:53:19.933891 1415006 ssh_runner.go:195] Run: grep 192.168.49.254	control-plane.minikube.internal$ /etc/hosts
	I0916 10:53:19.937701 1415006 ssh_runner.go:195] Run: /bin/bash -c "{ grep -v $'\tcontrol-plane.minikube.internal$' "/etc/hosts"; echo "192.168.49.254	control-plane.minikube.internal"; } > /tmp/h.$$; sudo cp /tmp/h.$$ "/etc/hosts""
	I0916 10:53:19.949318 1415006 ssh_runner.go:195] Run: sudo systemctl daemon-reload
	I0916 10:53:20.046913 1415006 ssh_runner.go:195] Run: sudo systemctl start kubelet
	I0916 10:53:20.072657 1415006 host.go:66] Checking if "ha-334765" exists ...
	I0916 10:53:20.073165 1415006 start.go:317] joinCluster: &{Name:ha-334765 KeepContext:false EmbedCerts:false MinikubeISO: KicBaseImage:gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 Memory:2200 CPUs:2 DiskSize:20000 Driver:docker HyperkitVpnKitSock: HyperkitVSockPorts:[] DockerEnv:[] ContainerVolumeMounts:[] InsecureRegistry:[] RegistryMirror:[] HostOnlyCIDR:192.168.59.1/24 HypervVirtualSwitch: HypervUseExternalSwitch:false HypervExternalAdapter: KVMNetwork:default KVMQemuURI:qemu:///system KVMGPU:false KVMHidden:false KVMNUMACount:1 APIServerPort:8443 DockerOpt:[] DisableDriverMounts:false NFSShare:[] NFSSharesRoot:/nfsshares UUID: NoVTXCheck:false DNSProxy:false HostDNSResolver:true HostOnlyNicType:virtio NatNicType:virtio SSHIPAddress: SSHUser:root SSHKey: SSHPort:22 KubernetesConfig:{KubernetesVersion:v1.31.1 ClusterName:ha-334765 Namespace:default APIServerHAVIP:192.168.49.254 APIServerName:minikubeCA APIServerN
ames:[] APIServerIPs:[] DNSDomain:cluster.local ContainerRuntime:crio CRISocket: NetworkPlugin:cni FeatureGates: ServiceCIDR:10.96.0.0/12 ImageRepository: LoadBalancerStartIP: LoadBalancerEndIP: CustomIngressCert: RegistryAliases: ExtraOptions:[] ShouldLoadCachedImages:true EnableDefaultCNI:false CNI:} Nodes:[{Name: IP:192.168.49.2 Port:8443 KubernetesVersion:v1.31.1 ContainerRuntime:crio ControlPlane:true Worker:true} {Name:m02 IP:192.168.49.3 Port:8443 KubernetesVersion:v1.31.1 ContainerRuntime:crio ControlPlane:true Worker:true} {Name:m03 IP:192.168.49.4 Port:8443 KubernetesVersion:v1.31.1 ContainerRuntime:crio ControlPlane:true Worker:true}] Addons:map[ambassador:false auto-pause:false cloud-spanner:false csi-hostpath-driver:false dashboard:false default-storageclass:false efk:false freshpod:false gcp-auth:false gvisor:false headlamp:false helm-tiller:false inaccel:false ingress:false ingress-dns:false inspektor-gadget:false istio:false istio-provisioner:false kong:false kubeflow:false kubevirt:false logv
iewer:false metallb:false metrics-server:false nvidia-device-plugin:false nvidia-driver-installer:false nvidia-gpu-device-plugin:false olm:false pod-security-policy:false portainer:false registry:false registry-aliases:false registry-creds:false storage-provisioner:false storage-provisioner-gluster:false storage-provisioner-rancher:false volcano:false volumesnapshots:false yakd:false] CustomAddonImages:map[] CustomAddonRegistries:map[] VerifyComponents:map[apiserver:true apps_running:true default_sa:true extra:true kubelet:true node_ready:true system_pods:true] StartHostTimeout:6m0s ScheduledStop:<nil> ExposedPorts:[] ListenAddress: Network: Subnet: MultiNodeRequested:true ExtraDisks:0 CertExpiration:26280h0m0s Mount:false MountString:/home/jenkins:/minikube-host Mount9PVersion:9p2000.L MountGID:docker MountIP: MountMSize:262144 MountOptions:[] MountPort:0 MountType:9p MountUID:docker BinaryMirror: DisableOptimizations:false DisableMetrics:false CustomQemuFirmwarePath: SocketVMnetClientPath: SocketVMnetPath:
StaticIP: SSHAuthSock: SSHAgentPID:0 GPUs: AutoPauseInterval:1m0s}
	I0916 10:53:20.073363 1415006 ssh_runner.go:195] Run: /bin/bash -c "sudo env PATH="/var/lib/minikube/binaries/v1.31.1:$PATH" kubeadm token create --print-join-command --ttl=0"
	I0916 10:53:20.073461 1415006 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" ha-334765
	I0916 10:53:20.100714 1415006 sshutil.go:53] new ssh client: &{IP:127.0.0.1 Port:34618 SSHKeyPath:/home/jenkins/minikube-integration/19651-1378450/.minikube/machines/ha-334765/id_rsa Username:docker}
	I0916 10:53:20.274860 1415006 start.go:343] trying to join control-plane node "m03" to cluster: &{Name:m03 IP:192.168.49.4 Port:8443 KubernetesVersion:v1.31.1 ContainerRuntime:crio ControlPlane:true Worker:true}
	I0916 10:53:20.274906 1415006 ssh_runner.go:195] Run: /bin/bash -c "sudo env PATH="/var/lib/minikube/binaries/v1.31.1:$PATH" kubeadm join control-plane.minikube.internal:8443 --token ksxyw6.2d1ymdt7tddb4km5 --discovery-token-ca-cert-hash sha256:a39d4a6e06a2efc97f5d9564a89b81063790e757dde370e866d9dc4c2ed0ec07 --ignore-preflight-errors=all --cri-socket unix:///var/run/crio/crio.sock --node-name=ha-334765-m03 --control-plane --apiserver-advertise-address=192.168.49.4 --apiserver-bind-port=8443"
	I0916 10:53:30.640812 1415006 ssh_runner.go:235] Completed: /bin/bash -c "sudo env PATH="/var/lib/minikube/binaries/v1.31.1:$PATH" kubeadm join control-plane.minikube.internal:8443 --token ksxyw6.2d1ymdt7tddb4km5 --discovery-token-ca-cert-hash sha256:a39d4a6e06a2efc97f5d9564a89b81063790e757dde370e866d9dc4c2ed0ec07 --ignore-preflight-errors=all --cri-socket unix:///var/run/crio/crio.sock --node-name=ha-334765-m03 --control-plane --apiserver-advertise-address=192.168.49.4 --apiserver-bind-port=8443": (10.365883142s)
	I0916 10:53:30.640843 1415006 ssh_runner.go:195] Run: /bin/bash -c "sudo systemctl daemon-reload && sudo systemctl enable kubelet && sudo systemctl start kubelet"
	I0916 10:53:31.125803 1415006 ssh_runner.go:195] Run: sudo /var/lib/minikube/binaries/v1.31.1/kubectl --kubeconfig=/var/lib/minikube/kubeconfig label --overwrite nodes ha-334765-m03 minikube.k8s.io/updated_at=2024_09_16T10_53_31_0700 minikube.k8s.io/version=v1.34.0 minikube.k8s.io/commit=90d544f06ea0f69499271b003be64a9a224d57ed minikube.k8s.io/name=ha-334765 minikube.k8s.io/primary=false
	I0916 10:53:31.396293 1415006 ssh_runner.go:195] Run: sudo /var/lib/minikube/binaries/v1.31.1/kubectl --kubeconfig=/var/lib/minikube/kubeconfig taint nodes ha-334765-m03 node-role.kubernetes.io/control-plane:NoSchedule-
	I0916 10:53:31.598835 1415006 start.go:319] duration metric: took 11.525668168s to joinCluster
	I0916 10:53:31.598894 1415006 start.go:235] Will wait 6m0s for node &{Name:m03 IP:192.168.49.4 Port:8443 KubernetesVersion:v1.31.1 ContainerRuntime:crio ControlPlane:true Worker:true}
	I0916 10:53:31.599454 1415006 config.go:182] Loaded profile config "ha-334765": Driver=docker, ContainerRuntime=crio, KubernetesVersion=v1.31.1
	I0916 10:53:31.604275 1415006 out.go:177] * Verifying Kubernetes components...
	I0916 10:53:31.606980 1415006 ssh_runner.go:195] Run: sudo systemctl daemon-reload
	I0916 10:53:31.797463 1415006 ssh_runner.go:195] Run: sudo systemctl start kubelet
	I0916 10:53:31.833344 1415006 loader.go:395] Config loaded from file:  /home/jenkins/minikube-integration/19651-1378450/kubeconfig
	I0916 10:53:31.833687 1415006 kapi.go:59] client config for ha-334765: &rest.Config{Host:"https://192.168.49.254:8443", APIPath:"", ContentConfig:rest.ContentConfig{AcceptContentTypes:"", ContentType:"", GroupVersion:(*schema.GroupVersion)(nil), NegotiatedSerializer:runtime.NegotiatedSerializer(nil)}, Username:"", Password:"", BearerToken:"", BearerTokenFile:"", Impersonate:rest.ImpersonationConfig{UserName:"", UID:"", Groups:[]string(nil), Extra:map[string][]string(nil)}, AuthProvider:<nil>, AuthConfigPersister:rest.AuthProviderConfigPersister(nil), ExecProvider:<nil>, TLSClientConfig:rest.sanitizedTLSClientConfig{Insecure:false, ServerName:"", CertFile:"/home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/ha-334765/client.crt", KeyFile:"/home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/ha-334765/client.key", CAFile:"/home/jenkins/minikube-integration/19651-1378450/.minikube/ca.crt", CertData:[]uint8(nil), KeyData:[]uint8(nil), CAData:[]uint8(nil), NextProtos:[]strin
g(nil)}, UserAgent:"", DisableCompression:false, Transport:http.RoundTripper(nil), WrapTransport:(transport.WrapperFunc)(0x1a1e6c0), QPS:0, Burst:0, RateLimiter:flowcontrol.RateLimiter(nil), WarningHandler:rest.WarningHandler(nil), Timeout:0, Dial:(func(context.Context, string, string) (net.Conn, error))(nil), Proxy:(func(*http.Request) (*url.URL, error))(nil)}
	W0916 10:53:31.833751 1415006 kubeadm.go:483] Overriding stale ClientConfig host https://192.168.49.254:8443 with https://192.168.49.2:8443
	I0916 10:53:31.834041 1415006 node_ready.go:35] waiting up to 6m0s for node "ha-334765-m03" to be "Ready" ...
	I0916 10:53:31.834123 1415006 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-334765-m03
	I0916 10:53:31.834129 1415006 round_trippers.go:469] Request Headers:
	I0916 10:53:31.834137 1415006 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:53:31.834141 1415006 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:53:31.837376 1415006 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:53:32.334572 1415006 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-334765-m03
	I0916 10:53:32.334647 1415006 round_trippers.go:469] Request Headers:
	I0916 10:53:32.334670 1415006 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:53:32.334689 1415006 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:53:32.337666 1415006 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 10:53:32.834705 1415006 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-334765-m03
	I0916 10:53:32.834724 1415006 round_trippers.go:469] Request Headers:
	I0916 10:53:32.834734 1415006 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:53:32.834738 1415006 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:53:32.837580 1415006 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 10:53:33.334634 1415006 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-334765-m03
	I0916 10:53:33.334707 1415006 round_trippers.go:469] Request Headers:
	I0916 10:53:33.334731 1415006 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:53:33.334751 1415006 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:53:33.338340 1415006 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:53:33.834288 1415006 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-334765-m03
	I0916 10:53:33.834357 1415006 round_trippers.go:469] Request Headers:
	I0916 10:53:33.834380 1415006 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:53:33.834397 1415006 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:53:33.837256 1415006 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 10:53:33.838513 1415006 node_ready.go:53] node "ha-334765-m03" has status "Ready":"False"
	I0916 10:53:34.334672 1415006 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-334765-m03
	I0916 10:53:34.334696 1415006 round_trippers.go:469] Request Headers:
	I0916 10:53:34.334707 1415006 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:53:34.334711 1415006 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:53:34.337733 1415006 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:53:34.834338 1415006 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-334765-m03
	I0916 10:53:34.834361 1415006 round_trippers.go:469] Request Headers:
	I0916 10:53:34.834380 1415006 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:53:34.834402 1415006 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:53:34.838423 1415006 round_trippers.go:574] Response Status: 200 OK in 4 milliseconds
	I0916 10:53:35.334337 1415006 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-334765-m03
	I0916 10:53:35.334361 1415006 round_trippers.go:469] Request Headers:
	I0916 10:53:35.334370 1415006 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:53:35.334374 1415006 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:53:35.337532 1415006 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:53:35.834533 1415006 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-334765-m03
	I0916 10:53:35.834556 1415006 round_trippers.go:469] Request Headers:
	I0916 10:53:35.834567 1415006 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:53:35.834572 1415006 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:53:35.837777 1415006 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:53:36.334925 1415006 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-334765-m03
	I0916 10:53:36.334962 1415006 round_trippers.go:469] Request Headers:
	I0916 10:53:36.334979 1415006 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:53:36.334984 1415006 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:53:36.338421 1415006 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:53:36.339093 1415006 node_ready.go:53] node "ha-334765-m03" has status "Ready":"False"
	I0916 10:53:36.834319 1415006 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-334765-m03
	I0916 10:53:36.834345 1415006 round_trippers.go:469] Request Headers:
	I0916 10:53:36.834355 1415006 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:53:36.834359 1415006 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:53:36.837761 1415006 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:53:37.335214 1415006 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-334765-m03
	I0916 10:53:37.335242 1415006 round_trippers.go:469] Request Headers:
	I0916 10:53:37.335251 1415006 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:53:37.335257 1415006 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:53:37.338207 1415006 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 10:53:37.834909 1415006 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-334765-m03
	I0916 10:53:37.834927 1415006 round_trippers.go:469] Request Headers:
	I0916 10:53:37.834937 1415006 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:53:37.834942 1415006 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:53:37.839069 1415006 round_trippers.go:574] Response Status: 200 OK in 4 milliseconds
	I0916 10:53:38.334529 1415006 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-334765-m03
	I0916 10:53:38.334552 1415006 round_trippers.go:469] Request Headers:
	I0916 10:53:38.334562 1415006 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:53:38.334566 1415006 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:53:38.337585 1415006 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 10:53:38.835032 1415006 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-334765-m03
	I0916 10:53:38.835058 1415006 round_trippers.go:469] Request Headers:
	I0916 10:53:38.835068 1415006 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:53:38.835072 1415006 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:53:38.838726 1415006 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:53:38.839539 1415006 node_ready.go:53] node "ha-334765-m03" has status "Ready":"False"
	I0916 10:53:39.335389 1415006 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-334765-m03
	I0916 10:53:39.335414 1415006 round_trippers.go:469] Request Headers:
	I0916 10:53:39.335424 1415006 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:53:39.335430 1415006 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:53:39.338809 1415006 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:53:39.835229 1415006 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-334765-m03
	I0916 10:53:39.835254 1415006 round_trippers.go:469] Request Headers:
	I0916 10:53:39.835264 1415006 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:53:39.835268 1415006 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:53:39.839005 1415006 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:53:40.335096 1415006 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-334765-m03
	I0916 10:53:40.335121 1415006 round_trippers.go:469] Request Headers:
	I0916 10:53:40.335131 1415006 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:53:40.335137 1415006 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:53:40.338609 1415006 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:53:40.835192 1415006 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-334765-m03
	I0916 10:53:40.835217 1415006 round_trippers.go:469] Request Headers:
	I0916 10:53:40.835227 1415006 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:53:40.835231 1415006 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:53:40.838036 1415006 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 10:53:41.334291 1415006 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-334765-m03
	I0916 10:53:41.334317 1415006 round_trippers.go:469] Request Headers:
	I0916 10:53:41.334327 1415006 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:53:41.334330 1415006 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:53:41.337742 1415006 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:53:41.338409 1415006 node_ready.go:53] node "ha-334765-m03" has status "Ready":"False"
	I0916 10:53:41.834696 1415006 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-334765-m03
	I0916 10:53:41.834721 1415006 round_trippers.go:469] Request Headers:
	I0916 10:53:41.834731 1415006 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:53:41.834736 1415006 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:53:41.837505 1415006 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 10:53:42.334987 1415006 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-334765-m03
	I0916 10:53:42.335014 1415006 round_trippers.go:469] Request Headers:
	I0916 10:53:42.335024 1415006 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:53:42.335035 1415006 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:53:42.338292 1415006 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:53:42.834327 1415006 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-334765-m03
	I0916 10:53:42.834350 1415006 round_trippers.go:469] Request Headers:
	I0916 10:53:42.834362 1415006 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:53:42.834370 1415006 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:53:42.837711 1415006 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:53:43.335129 1415006 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-334765-m03
	I0916 10:53:43.335154 1415006 round_trippers.go:469] Request Headers:
	I0916 10:53:43.335164 1415006 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:53:43.335169 1415006 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:53:43.338064 1415006 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 10:53:43.338799 1415006 node_ready.go:53] node "ha-334765-m03" has status "Ready":"False"
	I0916 10:53:43.834278 1415006 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-334765-m03
	I0916 10:53:43.834303 1415006 round_trippers.go:469] Request Headers:
	I0916 10:53:43.834311 1415006 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:53:43.834315 1415006 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:53:43.837366 1415006 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:53:44.334790 1415006 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-334765-m03
	I0916 10:53:44.334813 1415006 round_trippers.go:469] Request Headers:
	I0916 10:53:44.334831 1415006 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:53:44.334836 1415006 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:53:44.340073 1415006 round_trippers.go:574] Response Status: 200 OK in 5 milliseconds
	I0916 10:53:44.834381 1415006 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-334765-m03
	I0916 10:53:44.834404 1415006 round_trippers.go:469] Request Headers:
	I0916 10:53:44.834412 1415006 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:53:44.834416 1415006 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:53:44.837293 1415006 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 10:53:45.334302 1415006 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-334765-m03
	I0916 10:53:45.334337 1415006 round_trippers.go:469] Request Headers:
	I0916 10:53:45.334348 1415006 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:53:45.334356 1415006 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:53:45.338454 1415006 round_trippers.go:574] Response Status: 200 OK in 4 milliseconds
	I0916 10:53:45.339100 1415006 node_ready.go:53] node "ha-334765-m03" has status "Ready":"False"
	I0916 10:53:45.835035 1415006 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-334765-m03
	I0916 10:53:45.835074 1415006 round_trippers.go:469] Request Headers:
	I0916 10:53:45.835108 1415006 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:53:45.835114 1415006 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:53:45.838226 1415006 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:53:46.334329 1415006 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-334765-m03
	I0916 10:53:46.334356 1415006 round_trippers.go:469] Request Headers:
	I0916 10:53:46.334365 1415006 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:53:46.334369 1415006 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:53:46.337611 1415006 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:53:46.835198 1415006 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-334765-m03
	I0916 10:53:46.835227 1415006 round_trippers.go:469] Request Headers:
	I0916 10:53:46.835236 1415006 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:53:46.835243 1415006 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:53:46.839134 1415006 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:53:47.334400 1415006 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-334765-m03
	I0916 10:53:47.334427 1415006 round_trippers.go:469] Request Headers:
	I0916 10:53:47.334436 1415006 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:53:47.334443 1415006 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:53:47.337505 1415006 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:53:47.834710 1415006 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-334765-m03
	I0916 10:53:47.834735 1415006 round_trippers.go:469] Request Headers:
	I0916 10:53:47.834745 1415006 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:53:47.834749 1415006 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:53:47.837659 1415006 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 10:53:47.838212 1415006 node_ready.go:53] node "ha-334765-m03" has status "Ready":"False"
	I0916 10:53:48.335021 1415006 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-334765-m03
	I0916 10:53:48.335043 1415006 round_trippers.go:469] Request Headers:
	I0916 10:53:48.335052 1415006 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:53:48.335057 1415006 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:53:48.337824 1415006 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 10:53:48.834207 1415006 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-334765-m03
	I0916 10:53:48.834230 1415006 round_trippers.go:469] Request Headers:
	I0916 10:53:48.834240 1415006 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:53:48.834244 1415006 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:53:48.837316 1415006 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:53:49.334248 1415006 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-334765-m03
	I0916 10:53:49.334275 1415006 round_trippers.go:469] Request Headers:
	I0916 10:53:49.334285 1415006 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:53:49.334291 1415006 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:53:49.338068 1415006 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:53:49.834749 1415006 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-334765-m03
	I0916 10:53:49.834771 1415006 round_trippers.go:469] Request Headers:
	I0916 10:53:49.834781 1415006 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:53:49.834784 1415006 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:53:49.837961 1415006 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:53:49.838637 1415006 node_ready.go:53] node "ha-334765-m03" has status "Ready":"False"
	I0916 10:53:50.334263 1415006 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-334765-m03
	I0916 10:53:50.334288 1415006 round_trippers.go:469] Request Headers:
	I0916 10:53:50.334298 1415006 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:53:50.334303 1415006 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:53:50.337326 1415006 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:53:50.834342 1415006 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-334765-m03
	I0916 10:53:50.834369 1415006 round_trippers.go:469] Request Headers:
	I0916 10:53:50.834380 1415006 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:53:50.834384 1415006 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:53:50.837734 1415006 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:53:51.334695 1415006 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-334765-m03
	I0916 10:53:51.334733 1415006 round_trippers.go:469] Request Headers:
	I0916 10:53:51.334743 1415006 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:53:51.334748 1415006 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:53:51.338065 1415006 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:53:51.834867 1415006 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-334765-m03
	I0916 10:53:51.834892 1415006 round_trippers.go:469] Request Headers:
	I0916 10:53:51.834902 1415006 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:53:51.834908 1415006 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:53:51.838148 1415006 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:53:51.838877 1415006 node_ready.go:53] node "ha-334765-m03" has status "Ready":"False"
	I0916 10:53:52.334577 1415006 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-334765-m03
	I0916 10:53:52.334601 1415006 round_trippers.go:469] Request Headers:
	I0916 10:53:52.334611 1415006 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:53:52.334615 1415006 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:53:52.337642 1415006 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:53:52.835278 1415006 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-334765-m03
	I0916 10:53:52.835306 1415006 round_trippers.go:469] Request Headers:
	I0916 10:53:52.835316 1415006 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:53:52.835321 1415006 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:53:52.838258 1415006 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 10:53:53.334951 1415006 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-334765-m03
	I0916 10:53:53.334976 1415006 round_trippers.go:469] Request Headers:
	I0916 10:53:53.334992 1415006 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:53:53.334997 1415006 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:53:53.338675 1415006 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:53:53.834618 1415006 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-334765-m03
	I0916 10:53:53.834648 1415006 round_trippers.go:469] Request Headers:
	I0916 10:53:53.834658 1415006 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:53:53.834662 1415006 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:53:53.838336 1415006 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:53:53.839444 1415006 node_ready.go:53] node "ha-334765-m03" has status "Ready":"False"
	I0916 10:53:54.334672 1415006 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-334765-m03
	I0916 10:53:54.334709 1415006 round_trippers.go:469] Request Headers:
	I0916 10:53:54.334720 1415006 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:53:54.334724 1415006 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:53:54.339608 1415006 round_trippers.go:574] Response Status: 200 OK in 4 milliseconds
	I0916 10:53:54.835193 1415006 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-334765-m03
	I0916 10:53:54.835228 1415006 round_trippers.go:469] Request Headers:
	I0916 10:53:54.835242 1415006 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:53:54.835247 1415006 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:53:54.838653 1415006 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:53:55.335250 1415006 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-334765-m03
	I0916 10:53:55.335277 1415006 round_trippers.go:469] Request Headers:
	I0916 10:53:55.335287 1415006 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:53:55.335291 1415006 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:53:55.338230 1415006 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 10:53:55.834762 1415006 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-334765-m03
	I0916 10:53:55.834784 1415006 round_trippers.go:469] Request Headers:
	I0916 10:53:55.834797 1415006 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:53:55.834801 1415006 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:53:55.838056 1415006 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:53:56.334285 1415006 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-334765-m03
	I0916 10:53:56.334308 1415006 round_trippers.go:469] Request Headers:
	I0916 10:53:56.334318 1415006 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:53:56.334323 1415006 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:53:56.337775 1415006 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:53:56.338362 1415006 node_ready.go:53] node "ha-334765-m03" has status "Ready":"False"
	I0916 10:53:56.834647 1415006 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-334765-m03
	I0916 10:53:56.834671 1415006 round_trippers.go:469] Request Headers:
	I0916 10:53:56.834681 1415006 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:53:56.834685 1415006 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:53:56.837569 1415006 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 10:53:57.334657 1415006 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-334765-m03
	I0916 10:53:57.334678 1415006 round_trippers.go:469] Request Headers:
	I0916 10:53:57.334691 1415006 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:53:57.334694 1415006 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:53:57.343098 1415006 round_trippers.go:574] Response Status: 200 OK in 8 milliseconds
	I0916 10:53:57.835006 1415006 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-334765-m03
	I0916 10:53:57.835033 1415006 round_trippers.go:469] Request Headers:
	I0916 10:53:57.835042 1415006 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:53:57.835049 1415006 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:53:57.837720 1415006 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 10:53:58.334429 1415006 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-334765-m03
	I0916 10:53:58.334456 1415006 round_trippers.go:469] Request Headers:
	I0916 10:53:58.334465 1415006 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:53:58.334472 1415006 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:53:58.337511 1415006 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:53:58.834605 1415006 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-334765-m03
	I0916 10:53:58.834628 1415006 round_trippers.go:469] Request Headers:
	I0916 10:53:58.834637 1415006 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:53:58.834642 1415006 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:53:58.837378 1415006 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 10:53:58.837903 1415006 node_ready.go:53] node "ha-334765-m03" has status "Ready":"False"
	I0916 10:53:59.334925 1415006 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-334765-m03
	I0916 10:53:59.334951 1415006 round_trippers.go:469] Request Headers:
	I0916 10:53:59.334962 1415006 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:53:59.334968 1415006 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:53:59.338253 1415006 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:53:59.834904 1415006 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-334765-m03
	I0916 10:53:59.834929 1415006 round_trippers.go:469] Request Headers:
	I0916 10:53:59.834939 1415006 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:53:59.834945 1415006 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:53:59.838010 1415006 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:54:00.335143 1415006 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-334765-m03
	I0916 10:54:00.335205 1415006 round_trippers.go:469] Request Headers:
	I0916 10:54:00.335217 1415006 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:54:00.335221 1415006 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:54:00.345031 1415006 round_trippers.go:574] Response Status: 200 OK in 9 milliseconds
	I0916 10:54:00.834926 1415006 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-334765-m03
	I0916 10:54:00.834952 1415006 round_trippers.go:469] Request Headers:
	I0916 10:54:00.834962 1415006 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:54:00.834966 1415006 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:54:00.838074 1415006 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:54:00.839282 1415006 node_ready.go:53] node "ha-334765-m03" has status "Ready":"False"
	I0916 10:54:01.334380 1415006 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-334765-m03
	I0916 10:54:01.334408 1415006 round_trippers.go:469] Request Headers:
	I0916 10:54:01.334417 1415006 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:54:01.334424 1415006 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:54:01.339710 1415006 round_trippers.go:574] Response Status: 200 OK in 5 milliseconds
	I0916 10:54:01.834219 1415006 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-334765-m03
	I0916 10:54:01.834247 1415006 round_trippers.go:469] Request Headers:
	I0916 10:54:01.834264 1415006 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:54:01.834270 1415006 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:54:01.837969 1415006 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:54:02.334202 1415006 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-334765-m03
	I0916 10:54:02.334232 1415006 round_trippers.go:469] Request Headers:
	I0916 10:54:02.334242 1415006 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:54:02.334247 1415006 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:54:02.337619 1415006 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:54:02.834905 1415006 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-334765-m03
	I0916 10:54:02.834928 1415006 round_trippers.go:469] Request Headers:
	I0916 10:54:02.834938 1415006 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:54:02.834943 1415006 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:54:02.845883 1415006 round_trippers.go:574] Response Status: 200 OK in 10 milliseconds
	I0916 10:54:02.847840 1415006 node_ready.go:53] node "ha-334765-m03" has status "Ready":"False"
	I0916 10:54:03.334290 1415006 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-334765-m03
	I0916 10:54:03.334312 1415006 round_trippers.go:469] Request Headers:
	I0916 10:54:03.334322 1415006 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:54:03.334326 1415006 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:54:03.337176 1415006 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 10:54:03.834315 1415006 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-334765-m03
	I0916 10:54:03.834339 1415006 round_trippers.go:469] Request Headers:
	I0916 10:54:03.834349 1415006 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:54:03.834354 1415006 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:54:03.837206 1415006 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 10:54:04.334616 1415006 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-334765-m03
	I0916 10:54:04.334641 1415006 round_trippers.go:469] Request Headers:
	I0916 10:54:04.334651 1415006 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:54:04.334656 1415006 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:54:04.337620 1415006 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 10:54:04.834866 1415006 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-334765-m03
	I0916 10:54:04.834896 1415006 round_trippers.go:469] Request Headers:
	I0916 10:54:04.834906 1415006 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:54:04.834910 1415006 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:54:04.837695 1415006 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 10:54:05.335209 1415006 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-334765-m03
	I0916 10:54:05.335237 1415006 round_trippers.go:469] Request Headers:
	I0916 10:54:05.335247 1415006 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:54:05.335251 1415006 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:54:05.338244 1415006 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 10:54:05.339099 1415006 node_ready.go:53] node "ha-334765-m03" has status "Ready":"False"
	I0916 10:54:05.835148 1415006 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-334765-m03
	I0916 10:54:05.835172 1415006 round_trippers.go:469] Request Headers:
	I0916 10:54:05.835181 1415006 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:54:05.835188 1415006 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:54:05.838744 1415006 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:54:06.334296 1415006 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-334765-m03
	I0916 10:54:06.334319 1415006 round_trippers.go:469] Request Headers:
	I0916 10:54:06.334329 1415006 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:54:06.334335 1415006 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:54:06.337643 1415006 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:54:06.834660 1415006 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-334765-m03
	I0916 10:54:06.834698 1415006 round_trippers.go:469] Request Headers:
	I0916 10:54:06.834708 1415006 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:54:06.834712 1415006 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:54:06.837310 1415006 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 10:54:07.334318 1415006 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-334765-m03
	I0916 10:54:07.334341 1415006 round_trippers.go:469] Request Headers:
	I0916 10:54:07.334349 1415006 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:54:07.334354 1415006 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:54:07.337293 1415006 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 10:54:07.834294 1415006 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-334765-m03
	I0916 10:54:07.834318 1415006 round_trippers.go:469] Request Headers:
	I0916 10:54:07.834332 1415006 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:54:07.834336 1415006 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:54:07.836888 1415006 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 10:54:07.837784 1415006 node_ready.go:53] node "ha-334765-m03" has status "Ready":"False"
	I0916 10:54:08.334459 1415006 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-334765-m03
	I0916 10:54:08.334481 1415006 round_trippers.go:469] Request Headers:
	I0916 10:54:08.334495 1415006 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:54:08.334502 1415006 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:54:08.337582 1415006 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:54:08.834924 1415006 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-334765-m03
	I0916 10:54:08.834949 1415006 round_trippers.go:469] Request Headers:
	I0916 10:54:08.834958 1415006 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:54:08.834964 1415006 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:54:08.837781 1415006 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 10:54:09.335244 1415006 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-334765-m03
	I0916 10:54:09.335271 1415006 round_trippers.go:469] Request Headers:
	I0916 10:54:09.335281 1415006 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:54:09.335285 1415006 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:54:09.338282 1415006 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 10:54:09.834676 1415006 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-334765-m03
	I0916 10:54:09.834700 1415006 round_trippers.go:469] Request Headers:
	I0916 10:54:09.834711 1415006 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:54:09.834715 1415006 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:54:09.849261 1415006 round_trippers.go:574] Response Status: 200 OK in 14 milliseconds
	I0916 10:54:09.854065 1415006 node_ready.go:49] node "ha-334765-m03" has status "Ready":"True"
	I0916 10:54:09.854092 1415006 node_ready.go:38] duration metric: took 38.020036174s for node "ha-334765-m03" to be "Ready" ...
	I0916 10:54:09.854104 1415006 pod_ready.go:36] extra waiting up to 6m0s for all system-critical pods including labels [k8s-app=kube-dns component=etcd component=kube-apiserver component=kube-controller-manager k8s-app=kube-proxy component=kube-scheduler] to be "Ready" ...
	I0916 10:54:09.854175 1415006 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods
	I0916 10:54:09.854187 1415006 round_trippers.go:469] Request Headers:
	I0916 10:54:09.854196 1415006 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:54:09.854200 1415006 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:54:09.867206 1415006 round_trippers.go:574] Response Status: 200 OK in 12 milliseconds
	I0916 10:54:09.877172 1415006 pod_ready.go:79] waiting up to 6m0s for pod "coredns-7c65d6cfc9-q5xr7" in "kube-system" namespace to be "Ready" ...
	I0916 10:54:09.877276 1415006 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/coredns-7c65d6cfc9-q5xr7
	I0916 10:54:09.877289 1415006 round_trippers.go:469] Request Headers:
	I0916 10:54:09.877298 1415006 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:54:09.877301 1415006 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:54:09.880381 1415006 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:54:09.881181 1415006 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-334765
	I0916 10:54:09.881246 1415006 round_trippers.go:469] Request Headers:
	I0916 10:54:09.881269 1415006 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:54:09.881285 1415006 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:54:09.883907 1415006 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 10:54:09.884653 1415006 pod_ready.go:93] pod "coredns-7c65d6cfc9-q5xr7" in "kube-system" namespace has status "Ready":"True"
	I0916 10:54:09.884746 1415006 pod_ready.go:82] duration metric: took 7.469866ms for pod "coredns-7c65d6cfc9-q5xr7" in "kube-system" namespace to be "Ready" ...
	I0916 10:54:09.884760 1415006 pod_ready.go:79] waiting up to 6m0s for pod "coredns-7c65d6cfc9-s9fp9" in "kube-system" namespace to be "Ready" ...
	I0916 10:54:09.884831 1415006 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/coredns-7c65d6cfc9-s9fp9
	I0916 10:54:09.884846 1415006 round_trippers.go:469] Request Headers:
	I0916 10:54:09.884855 1415006 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:54:09.884860 1415006 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:54:09.887917 1415006 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:54:09.888873 1415006 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-334765
	I0916 10:54:09.888895 1415006 round_trippers.go:469] Request Headers:
	I0916 10:54:09.888904 1415006 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:54:09.888908 1415006 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:54:09.891637 1415006 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 10:54:09.892242 1415006 pod_ready.go:93] pod "coredns-7c65d6cfc9-s9fp9" in "kube-system" namespace has status "Ready":"True"
	I0916 10:54:09.892283 1415006 pod_ready.go:82] duration metric: took 7.514525ms for pod "coredns-7c65d6cfc9-s9fp9" in "kube-system" namespace to be "Ready" ...
	I0916 10:54:09.892321 1415006 pod_ready.go:79] waiting up to 6m0s for pod "etcd-ha-334765" in "kube-system" namespace to be "Ready" ...
	I0916 10:54:09.892421 1415006 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/etcd-ha-334765
	I0916 10:54:09.892446 1415006 round_trippers.go:469] Request Headers:
	I0916 10:54:09.892481 1415006 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:54:09.892502 1415006 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:54:09.895188 1415006 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 10:54:09.896014 1415006 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-334765
	I0916 10:54:09.896037 1415006 round_trippers.go:469] Request Headers:
	I0916 10:54:09.896046 1415006 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:54:09.896051 1415006 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:54:09.898613 1415006 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 10:54:09.899255 1415006 pod_ready.go:93] pod "etcd-ha-334765" in "kube-system" namespace has status "Ready":"True"
	I0916 10:54:09.899305 1415006 pod_ready.go:82] duration metric: took 6.957584ms for pod "etcd-ha-334765" in "kube-system" namespace to be "Ready" ...
	I0916 10:54:09.899334 1415006 pod_ready.go:79] waiting up to 6m0s for pod "etcd-ha-334765-m02" in "kube-system" namespace to be "Ready" ...
	I0916 10:54:09.899455 1415006 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/etcd-ha-334765-m02
	I0916 10:54:09.899482 1415006 round_trippers.go:469] Request Headers:
	I0916 10:54:09.899507 1415006 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:54:09.899539 1415006 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:54:09.902359 1415006 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 10:54:09.903536 1415006 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-334765-m02
	I0916 10:54:09.903578 1415006 round_trippers.go:469] Request Headers:
	I0916 10:54:09.903616 1415006 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:54:09.903639 1415006 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:54:09.906380 1415006 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 10:54:09.907138 1415006 pod_ready.go:93] pod "etcd-ha-334765-m02" in "kube-system" namespace has status "Ready":"True"
	I0916 10:54:09.907159 1415006 pod_ready.go:82] duration metric: took 7.804151ms for pod "etcd-ha-334765-m02" in "kube-system" namespace to be "Ready" ...
	I0916 10:54:09.907173 1415006 pod_ready.go:79] waiting up to 6m0s for pod "etcd-ha-334765-m03" in "kube-system" namespace to be "Ready" ...
	I0916 10:54:10.035614 1415006 request.go:632] Waited for 128.346187ms due to client-side throttling, not priority and fairness, request: GET:https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/etcd-ha-334765-m03
	I0916 10:54:10.035727 1415006 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/etcd-ha-334765-m03
	I0916 10:54:10.035768 1415006 round_trippers.go:469] Request Headers:
	I0916 10:54:10.035784 1415006 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:54:10.035791 1415006 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:54:10.039843 1415006 round_trippers.go:574] Response Status: 200 OK in 4 milliseconds
	I0916 10:54:10.235249 1415006 request.go:632] Waited for 194.457282ms due to client-side throttling, not priority and fairness, request: GET:https://192.168.49.2:8443/api/v1/nodes/ha-334765-m03
	I0916 10:54:10.235331 1415006 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-334765-m03
	I0916 10:54:10.235337 1415006 round_trippers.go:469] Request Headers:
	I0916 10:54:10.235355 1415006 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:54:10.235375 1415006 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:54:10.238233 1415006 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 10:54:10.239108 1415006 pod_ready.go:93] pod "etcd-ha-334765-m03" in "kube-system" namespace has status "Ready":"True"
	I0916 10:54:10.239141 1415006 pod_ready.go:82] duration metric: took 331.959241ms for pod "etcd-ha-334765-m03" in "kube-system" namespace to be "Ready" ...
	I0916 10:54:10.239179 1415006 pod_ready.go:79] waiting up to 6m0s for pod "kube-apiserver-ha-334765" in "kube-system" namespace to be "Ready" ...
	I0916 10:54:10.435280 1415006 request.go:632] Waited for 195.986876ms due to client-side throttling, not priority and fairness, request: GET:https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/kube-apiserver-ha-334765
	I0916 10:54:10.435346 1415006 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/kube-apiserver-ha-334765
	I0916 10:54:10.435353 1415006 round_trippers.go:469] Request Headers:
	I0916 10:54:10.435380 1415006 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:54:10.435410 1415006 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:54:10.438185 1415006 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 10:54:10.634844 1415006 request.go:632] Waited for 195.912753ms due to client-side throttling, not priority and fairness, request: GET:https://192.168.49.2:8443/api/v1/nodes/ha-334765
	I0916 10:54:10.634955 1415006 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-334765
	I0916 10:54:10.634984 1415006 round_trippers.go:469] Request Headers:
	I0916 10:54:10.635000 1415006 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:54:10.635005 1415006 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:54:10.638444 1415006 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:54:10.639291 1415006 pod_ready.go:93] pod "kube-apiserver-ha-334765" in "kube-system" namespace has status "Ready":"True"
	I0916 10:54:10.639315 1415006 pod_ready.go:82] duration metric: took 400.119572ms for pod "kube-apiserver-ha-334765" in "kube-system" namespace to be "Ready" ...
	I0916 10:54:10.639327 1415006 pod_ready.go:79] waiting up to 6m0s for pod "kube-apiserver-ha-334765-m02" in "kube-system" namespace to be "Ready" ...
	I0916 10:54:10.835341 1415006 request.go:632] Waited for 195.940683ms due to client-side throttling, not priority and fairness, request: GET:https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/kube-apiserver-ha-334765-m02
	I0916 10:54:10.835478 1415006 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/kube-apiserver-ha-334765-m02
	I0916 10:54:10.835491 1415006 round_trippers.go:469] Request Headers:
	I0916 10:54:10.835500 1415006 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:54:10.835504 1415006 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:54:10.839135 1415006 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:54:11.035271 1415006 request.go:632] Waited for 195.339951ms due to client-side throttling, not priority and fairness, request: GET:https://192.168.49.2:8443/api/v1/nodes/ha-334765-m02
	I0916 10:54:11.035412 1415006 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-334765-m02
	I0916 10:54:11.035425 1415006 round_trippers.go:469] Request Headers:
	I0916 10:54:11.035437 1415006 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:54:11.035443 1415006 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:54:11.038915 1415006 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:54:11.039662 1415006 pod_ready.go:93] pod "kube-apiserver-ha-334765-m02" in "kube-system" namespace has status "Ready":"True"
	I0916 10:54:11.039683 1415006 pod_ready.go:82] duration metric: took 400.348678ms for pod "kube-apiserver-ha-334765-m02" in "kube-system" namespace to be "Ready" ...
	I0916 10:54:11.039713 1415006 pod_ready.go:79] waiting up to 6m0s for pod "kube-apiserver-ha-334765-m03" in "kube-system" namespace to be "Ready" ...
	I0916 10:54:11.235305 1415006 request.go:632] Waited for 195.488895ms due to client-side throttling, not priority and fairness, request: GET:https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/kube-apiserver-ha-334765-m03
	I0916 10:54:11.235369 1415006 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/kube-apiserver-ha-334765-m03
	I0916 10:54:11.235376 1415006 round_trippers.go:469] Request Headers:
	I0916 10:54:11.235385 1415006 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:54:11.235393 1415006 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:54:11.238269 1415006 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 10:54:11.435527 1415006 request.go:632] Waited for 196.346243ms due to client-side throttling, not priority and fairness, request: GET:https://192.168.49.2:8443/api/v1/nodes/ha-334765-m03
	I0916 10:54:11.435600 1415006 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-334765-m03
	I0916 10:54:11.435612 1415006 round_trippers.go:469] Request Headers:
	I0916 10:54:11.435621 1415006 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:54:11.435627 1415006 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:54:11.438790 1415006 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:54:11.439339 1415006 pod_ready.go:93] pod "kube-apiserver-ha-334765-m03" in "kube-system" namespace has status "Ready":"True"
	I0916 10:54:11.439358 1415006 pod_ready.go:82] duration metric: took 399.629697ms for pod "kube-apiserver-ha-334765-m03" in "kube-system" namespace to be "Ready" ...
	I0916 10:54:11.439371 1415006 pod_ready.go:79] waiting up to 6m0s for pod "kube-controller-manager-ha-334765" in "kube-system" namespace to be "Ready" ...
	I0916 10:54:11.635328 1415006 request.go:632] Waited for 195.882494ms due to client-side throttling, not priority and fairness, request: GET:https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/kube-controller-manager-ha-334765
	I0916 10:54:11.635388 1415006 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/kube-controller-manager-ha-334765
	I0916 10:54:11.635394 1415006 round_trippers.go:469] Request Headers:
	I0916 10:54:11.635403 1415006 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:54:11.635439 1415006 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:54:11.638604 1415006 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:54:11.834747 1415006 request.go:632] Waited for 195.26705ms due to client-side throttling, not priority and fairness, request: GET:https://192.168.49.2:8443/api/v1/nodes/ha-334765
	I0916 10:54:11.834844 1415006 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-334765
	I0916 10:54:11.834858 1415006 round_trippers.go:469] Request Headers:
	I0916 10:54:11.834868 1415006 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:54:11.834872 1415006 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:54:11.838076 1415006 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:54:11.839207 1415006 pod_ready.go:93] pod "kube-controller-manager-ha-334765" in "kube-system" namespace has status "Ready":"True"
	I0916 10:54:11.839242 1415006 pod_ready.go:82] duration metric: took 399.858516ms for pod "kube-controller-manager-ha-334765" in "kube-system" namespace to be "Ready" ...
	I0916 10:54:11.839256 1415006 pod_ready.go:79] waiting up to 6m0s for pod "kube-controller-manager-ha-334765-m02" in "kube-system" namespace to be "Ready" ...
	I0916 10:54:12.035143 1415006 request.go:632] Waited for 195.790983ms due to client-side throttling, not priority and fairness, request: GET:https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/kube-controller-manager-ha-334765-m02
	I0916 10:54:12.035255 1415006 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/kube-controller-manager-ha-334765-m02
	I0916 10:54:12.035270 1415006 round_trippers.go:469] Request Headers:
	I0916 10:54:12.035295 1415006 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:54:12.035308 1415006 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:54:12.038799 1415006 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:54:12.234779 1415006 request.go:632] Waited for 195.242846ms due to client-side throttling, not priority and fairness, request: GET:https://192.168.49.2:8443/api/v1/nodes/ha-334765-m02
	I0916 10:54:12.234836 1415006 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-334765-m02
	I0916 10:54:12.234846 1415006 round_trippers.go:469] Request Headers:
	I0916 10:54:12.234855 1415006 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:54:12.234865 1415006 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:54:12.237789 1415006 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 10:54:12.238627 1415006 pod_ready.go:93] pod "kube-controller-manager-ha-334765-m02" in "kube-system" namespace has status "Ready":"True"
	I0916 10:54:12.238647 1415006 pod_ready.go:82] duration metric: took 399.383731ms for pod "kube-controller-manager-ha-334765-m02" in "kube-system" namespace to be "Ready" ...
	I0916 10:54:12.238659 1415006 pod_ready.go:79] waiting up to 6m0s for pod "kube-controller-manager-ha-334765-m03" in "kube-system" namespace to be "Ready" ...
	I0916 10:54:12.435640 1415006 request.go:632] Waited for 196.863621ms due to client-side throttling, not priority and fairness, request: GET:https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/kube-controller-manager-ha-334765-m03
	I0916 10:54:12.435728 1415006 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/kube-controller-manager-ha-334765-m03
	I0916 10:54:12.435741 1415006 round_trippers.go:469] Request Headers:
	I0916 10:54:12.435750 1415006 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:54:12.435760 1415006 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:54:12.440030 1415006 round_trippers.go:574] Response Status: 200 OK in 4 milliseconds
	I0916 10:54:12.635380 1415006 request.go:632] Waited for 194.362713ms due to client-side throttling, not priority and fairness, request: GET:https://192.168.49.2:8443/api/v1/nodes/ha-334765-m03
	I0916 10:54:12.635440 1415006 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-334765-m03
	I0916 10:54:12.635446 1415006 round_trippers.go:469] Request Headers:
	I0916 10:54:12.635454 1415006 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:54:12.635464 1415006 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:54:12.638081 1415006 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 10:54:12.638751 1415006 pod_ready.go:93] pod "kube-controller-manager-ha-334765-m03" in "kube-system" namespace has status "Ready":"True"
	I0916 10:54:12.638770 1415006 pod_ready.go:82] duration metric: took 400.0763ms for pod "kube-controller-manager-ha-334765-m03" in "kube-system" namespace to be "Ready" ...
	I0916 10:54:12.638782 1415006 pod_ready.go:79] waiting up to 6m0s for pod "kube-proxy-4vsvh" in "kube-system" namespace to be "Ready" ...
	I0916 10:54:12.834723 1415006 request.go:632] Waited for 195.809084ms due to client-side throttling, not priority and fairness, request: GET:https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/kube-proxy-4vsvh
	I0916 10:54:12.834840 1415006 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/kube-proxy-4vsvh
	I0916 10:54:12.834876 1415006 round_trippers.go:469] Request Headers:
	I0916 10:54:12.834893 1415006 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:54:12.834899 1415006 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:54:12.838099 1415006 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:54:13.035554 1415006 request.go:632] Waited for 196.329366ms due to client-side throttling, not priority and fairness, request: GET:https://192.168.49.2:8443/api/v1/nodes/ha-334765-m03
	I0916 10:54:13.035628 1415006 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-334765-m03
	I0916 10:54:13.035637 1415006 round_trippers.go:469] Request Headers:
	I0916 10:54:13.035647 1415006 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:54:13.035652 1415006 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:54:13.038454 1415006 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 10:54:13.039118 1415006 pod_ready.go:93] pod "kube-proxy-4vsvh" in "kube-system" namespace has status "Ready":"True"
	I0916 10:54:13.039141 1415006 pod_ready.go:82] duration metric: took 400.351329ms for pod "kube-proxy-4vsvh" in "kube-system" namespace to be "Ready" ...
	I0916 10:54:13.039169 1415006 pod_ready.go:79] waiting up to 6m0s for pod "kube-proxy-l998t" in "kube-system" namespace to be "Ready" ...
	I0916 10:54:13.235690 1415006 request.go:632] Waited for 196.449666ms due to client-side throttling, not priority and fairness, request: GET:https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/kube-proxy-l998t
	I0916 10:54:13.235755 1415006 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/kube-proxy-l998t
	I0916 10:54:13.235764 1415006 round_trippers.go:469] Request Headers:
	I0916 10:54:13.235783 1415006 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:54:13.235796 1415006 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:54:13.238489 1415006 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 10:54:13.435561 1415006 request.go:632] Waited for 196.222948ms due to client-side throttling, not priority and fairness, request: GET:https://192.168.49.2:8443/api/v1/nodes/ha-334765-m02
	I0916 10:54:13.435623 1415006 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-334765-m02
	I0916 10:54:13.435635 1415006 round_trippers.go:469] Request Headers:
	I0916 10:54:13.435645 1415006 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:54:13.435662 1415006 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:54:13.442733 1415006 round_trippers.go:574] Response Status: 200 OK in 7 milliseconds
	I0916 10:54:13.443704 1415006 pod_ready.go:93] pod "kube-proxy-l998t" in "kube-system" namespace has status "Ready":"True"
	I0916 10:54:13.443726 1415006 pod_ready.go:82] duration metric: took 404.544453ms for pod "kube-proxy-l998t" in "kube-system" namespace to be "Ready" ...
	I0916 10:54:13.443738 1415006 pod_ready.go:79] waiting up to 6m0s for pod "kube-proxy-tlfs7" in "kube-system" namespace to be "Ready" ...
	I0916 10:54:13.635099 1415006 request.go:632] Waited for 191.290889ms due to client-side throttling, not priority and fairness, request: GET:https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/kube-proxy-tlfs7
	I0916 10:54:13.635183 1415006 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/kube-proxy-tlfs7
	I0916 10:54:13.635194 1415006 round_trippers.go:469] Request Headers:
	I0916 10:54:13.635202 1415006 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:54:13.635213 1415006 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:54:13.638457 1415006 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:54:13.835588 1415006 request.go:632] Waited for 196.290122ms due to client-side throttling, not priority and fairness, request: GET:https://192.168.49.2:8443/api/v1/nodes/ha-334765
	I0916 10:54:13.835645 1415006 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-334765
	I0916 10:54:13.835655 1415006 round_trippers.go:469] Request Headers:
	I0916 10:54:13.835674 1415006 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:54:13.835686 1415006 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:54:13.838539 1415006 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 10:54:13.839403 1415006 pod_ready.go:93] pod "kube-proxy-tlfs7" in "kube-system" namespace has status "Ready":"True"
	I0916 10:54:13.839467 1415006 pod_ready.go:82] duration metric: took 395.719603ms for pod "kube-proxy-tlfs7" in "kube-system" namespace to be "Ready" ...
	I0916 10:54:13.839486 1415006 pod_ready.go:79] waiting up to 6m0s for pod "kube-scheduler-ha-334765" in "kube-system" namespace to be "Ready" ...
	I0916 10:54:14.034857 1415006 request.go:632] Waited for 195.30385ms due to client-side throttling, not priority and fairness, request: GET:https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/kube-scheduler-ha-334765
	I0916 10:54:14.034956 1415006 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/kube-scheduler-ha-334765
	I0916 10:54:14.034967 1415006 round_trippers.go:469] Request Headers:
	I0916 10:54:14.035001 1415006 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:54:14.035009 1415006 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:54:14.038346 1415006 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:54:14.235315 1415006 request.go:632] Waited for 196.258591ms due to client-side throttling, not priority and fairness, request: GET:https://192.168.49.2:8443/api/v1/nodes/ha-334765
	I0916 10:54:14.235402 1415006 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-334765
	I0916 10:54:14.235434 1415006 round_trippers.go:469] Request Headers:
	I0916 10:54:14.235450 1415006 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:54:14.235458 1415006 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:54:14.238407 1415006 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 10:54:14.239032 1415006 pod_ready.go:93] pod "kube-scheduler-ha-334765" in "kube-system" namespace has status "Ready":"True"
	I0916 10:54:14.239059 1415006 pod_ready.go:82] duration metric: took 399.557224ms for pod "kube-scheduler-ha-334765" in "kube-system" namespace to be "Ready" ...
	I0916 10:54:14.239091 1415006 pod_ready.go:79] waiting up to 6m0s for pod "kube-scheduler-ha-334765-m02" in "kube-system" namespace to be "Ready" ...
	I0916 10:54:14.435529 1415006 request.go:632] Waited for 196.359683ms due to client-side throttling, not priority and fairness, request: GET:https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/kube-scheduler-ha-334765-m02
	I0916 10:54:14.435602 1415006 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/kube-scheduler-ha-334765-m02
	I0916 10:54:14.435611 1415006 round_trippers.go:469] Request Headers:
	I0916 10:54:14.435621 1415006 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:54:14.435628 1415006 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:54:14.438377 1415006 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 10:54:14.635344 1415006 request.go:632] Waited for 196.32153ms due to client-side throttling, not priority and fairness, request: GET:https://192.168.49.2:8443/api/v1/nodes/ha-334765-m02
	I0916 10:54:14.635413 1415006 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-334765-m02
	I0916 10:54:14.635422 1415006 round_trippers.go:469] Request Headers:
	I0916 10:54:14.635430 1415006 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:54:14.635440 1415006 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:54:14.638829 1415006 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:54:14.639457 1415006 pod_ready.go:93] pod "kube-scheduler-ha-334765-m02" in "kube-system" namespace has status "Ready":"True"
	I0916 10:54:14.639474 1415006 pod_ready.go:82] duration metric: took 400.371735ms for pod "kube-scheduler-ha-334765-m02" in "kube-system" namespace to be "Ready" ...
	I0916 10:54:14.639486 1415006 pod_ready.go:79] waiting up to 6m0s for pod "kube-scheduler-ha-334765-m03" in "kube-system" namespace to be "Ready" ...
	I0916 10:54:14.835382 1415006 request.go:632] Waited for 195.82633ms due to client-side throttling, not priority and fairness, request: GET:https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/kube-scheduler-ha-334765-m03
	I0916 10:54:14.835464 1415006 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/kube-scheduler-ha-334765-m03
	I0916 10:54:14.835493 1415006 round_trippers.go:469] Request Headers:
	I0916 10:54:14.835508 1415006 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:54:14.835512 1415006 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:54:14.838577 1415006 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:54:15.034892 1415006 request.go:632] Waited for 195.597561ms due to client-side throttling, not priority and fairness, request: GET:https://192.168.49.2:8443/api/v1/nodes/ha-334765-m03
	I0916 10:54:15.035037 1415006 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-334765-m03
	I0916 10:54:15.035073 1415006 round_trippers.go:469] Request Headers:
	I0916 10:54:15.035096 1415006 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:54:15.035104 1415006 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:54:15.043345 1415006 round_trippers.go:574] Response Status: 200 OK in 8 milliseconds
	I0916 10:54:15.044498 1415006 pod_ready.go:93] pod "kube-scheduler-ha-334765-m03" in "kube-system" namespace has status "Ready":"True"
	I0916 10:54:15.044529 1415006 pod_ready.go:82] duration metric: took 405.03117ms for pod "kube-scheduler-ha-334765-m03" in "kube-system" namespace to be "Ready" ...
	I0916 10:54:15.044549 1415006 pod_ready.go:39] duration metric: took 5.190434658s for extra waiting for all system-critical and pods with labels [k8s-app=kube-dns component=etcd component=kube-apiserver component=kube-controller-manager k8s-app=kube-proxy component=kube-scheduler] to be "Ready" ...
	I0916 10:54:15.044567 1415006 api_server.go:52] waiting for apiserver process to appear ...
	I0916 10:54:15.044658 1415006 ssh_runner.go:195] Run: sudo pgrep -xnf kube-apiserver.*minikube.*
	I0916 10:54:15.065826 1415006 api_server.go:72] duration metric: took 43.466898987s to wait for apiserver process to appear ...
	I0916 10:54:15.065865 1415006 api_server.go:88] waiting for apiserver healthz status ...
	I0916 10:54:15.065890 1415006 api_server.go:253] Checking apiserver healthz at https://192.168.49.2:8443/healthz ...
	I0916 10:54:15.081492 1415006 api_server.go:279] https://192.168.49.2:8443/healthz returned 200:
	ok
	I0916 10:54:15.081586 1415006 round_trippers.go:463] GET https://192.168.49.2:8443/version
	I0916 10:54:15.081606 1415006 round_trippers.go:469] Request Headers:
	I0916 10:54:15.081617 1415006 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:54:15.081627 1415006 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:54:15.085787 1415006 round_trippers.go:574] Response Status: 200 OK in 4 milliseconds
	I0916 10:54:15.085894 1415006 api_server.go:141] control plane version: v1.31.1
	I0916 10:54:15.085925 1415006 api_server.go:131] duration metric: took 20.052103ms to wait for apiserver health ...
	I0916 10:54:15.085936 1415006 system_pods.go:43] waiting for kube-system pods to appear ...
	I0916 10:54:15.235309 1415006 request.go:632] Waited for 149.268405ms due to client-side throttling, not priority and fairness, request: GET:https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods
	I0916 10:54:15.235368 1415006 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods
	I0916 10:54:15.235374 1415006 round_trippers.go:469] Request Headers:
	I0916 10:54:15.235384 1415006 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:54:15.235393 1415006 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:54:15.241237 1415006 round_trippers.go:574] Response Status: 200 OK in 5 milliseconds
	I0916 10:54:15.250252 1415006 system_pods.go:59] 24 kube-system pods found
	I0916 10:54:15.250287 1415006 system_pods.go:61] "coredns-7c65d6cfc9-q5xr7" [14514e6e-34ae-4a79-b0e0-008742ae46b9] Running
	I0916 10:54:15.250295 1415006 system_pods.go:61] "coredns-7c65d6cfc9-s9fp9" [0e29200a-0909-47e1-8521-bf5f9b645d6c] Running
	I0916 10:54:15.250299 1415006 system_pods.go:61] "etcd-ha-334765" [9a0b9474-60f4-440e-a898-d397f7425086] Running
	I0916 10:54:15.250304 1415006 system_pods.go:61] "etcd-ha-334765-m02" [635fd2d2-f9cc-4e08-b73b-18633a58b6e4] Running
	I0916 10:54:15.250309 1415006 system_pods.go:61] "etcd-ha-334765-m03" [9527225b-e7ae-481b-b5b1-47b445990b4b] Running
	I0916 10:54:15.250313 1415006 system_pods.go:61] "kindnet-7s5t5" [e1832b94-ac8f-43c0-af10-ddc6afbb229b] Running
	I0916 10:54:15.250317 1415006 system_pods.go:61] "kindnet-rfw69" [396f204a-53ea-4720-85fc-05ba54d285ca] Running
	I0916 10:54:15.250322 1415006 system_pods.go:61] "kindnet-vj27j" [61e290b4-d19c-40f3-a50d-bfa09fddb710] Running
	I0916 10:54:15.250326 1415006 system_pods.go:61] "kube-apiserver-ha-334765" [471aea01-5646-4ce8-91e0-b0b39f8a275a] Running
	I0916 10:54:15.250331 1415006 system_pods.go:61] "kube-apiserver-ha-334765-m02" [877c49f9-6fae-4cdb-b208-940eba98383b] Running
	I0916 10:54:15.250341 1415006 system_pods.go:61] "kube-apiserver-ha-334765-m03" [b14f2a2b-6410-438a-99e2-86fa58140695] Running
	I0916 10:54:15.250348 1415006 system_pods.go:61] "kube-controller-manager-ha-334765" [23b2f4a4-942f-4ea7-afef-561ab69ac144] Running
	I0916 10:54:15.250357 1415006 system_pods.go:61] "kube-controller-manager-ha-334765-m02" [07411ea7-458c-475c-93ff-5db4f6c1c4b1] Running
	I0916 10:54:15.250361 1415006 system_pods.go:61] "kube-controller-manager-ha-334765-m03" [2aa8cca1-22de-4cd0-88a2-ac864da09d8d] Running
	I0916 10:54:15.250365 1415006 system_pods.go:61] "kube-proxy-4vsvh" [551f3711-d8b3-4360-8a18-d6183d4aec6d] Running
	I0916 10:54:15.250369 1415006 system_pods.go:61] "kube-proxy-l998t" [e92c97ea-9eb8-40c4-a7f6-aeb43c89e6f4] Running
	I0916 10:54:15.250373 1415006 system_pods.go:61] "kube-proxy-tlfs7" [6a873882-8023-44b5-82d9-2f18e70f8ef1] Running
	I0916 10:54:15.250385 1415006 system_pods.go:61] "kube-scheduler-ha-334765" [6189b5cd-f342-4b6a-ae21-b6b7125e4f06] Running
	I0916 10:54:15.250389 1415006 system_pods.go:61] "kube-scheduler-ha-334765-m02" [61387062-d6b0-4e2d-b2f9-10f29b0bcef6] Running
	I0916 10:54:15.250393 1415006 system_pods.go:61] "kube-scheduler-ha-334765-m03" [98c99d71-0ea3-46a3-ab06-7b5971730ba8] Running
	I0916 10:54:15.250396 1415006 system_pods.go:61] "kube-vip-ha-334765" [65843776-1f0b-4a2b-b30c-62d55f497269] Running
	I0916 10:54:15.250400 1415006 system_pods.go:61] "kube-vip-ha-334765-m02" [450bd3f6-46b4-426c-a6b2-2ad37b58b171] Running
	I0916 10:54:15.250404 1415006 system_pods.go:61] "kube-vip-ha-334765-m03" [efeb2f57-409a-45a7-87e2-dae52a680b3e] Running
	I0916 10:54:15.250407 1415006 system_pods.go:61] "storage-provisioner" [4db2490d-9707-4734-973b-adac5570e275] Running
	I0916 10:54:15.250413 1415006 system_pods.go:74] duration metric: took 164.468004ms to wait for pod list to return data ...
	I0916 10:54:15.250420 1415006 default_sa.go:34] waiting for default service account to be created ...
	I0916 10:54:15.435244 1415006 request.go:632] Waited for 184.740884ms due to client-side throttling, not priority and fairness, request: GET:https://192.168.49.2:8443/api/v1/namespaces/default/serviceaccounts
	I0916 10:54:15.435302 1415006 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/default/serviceaccounts
	I0916 10:54:15.435308 1415006 round_trippers.go:469] Request Headers:
	I0916 10:54:15.435317 1415006 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:54:15.435322 1415006 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:54:15.438665 1415006 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:54:15.438792 1415006 default_sa.go:45] found service account: "default"
	I0916 10:54:15.438811 1415006 default_sa.go:55] duration metric: took 188.382261ms for default service account to be created ...
	I0916 10:54:15.438821 1415006 system_pods.go:116] waiting for k8s-apps to be running ...
	I0916 10:54:15.635182 1415006 request.go:632] Waited for 196.288021ms due to client-side throttling, not priority and fairness, request: GET:https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods
	I0916 10:54:15.635243 1415006 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods
	I0916 10:54:15.635249 1415006 round_trippers.go:469] Request Headers:
	I0916 10:54:15.635259 1415006 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:54:15.635275 1415006 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:54:15.642044 1415006 round_trippers.go:574] Response Status: 200 OK in 6 milliseconds
	I0916 10:54:15.651279 1415006 system_pods.go:86] 24 kube-system pods found
	I0916 10:54:15.651317 1415006 system_pods.go:89] "coredns-7c65d6cfc9-q5xr7" [14514e6e-34ae-4a79-b0e0-008742ae46b9] Running
	I0916 10:54:15.651325 1415006 system_pods.go:89] "coredns-7c65d6cfc9-s9fp9" [0e29200a-0909-47e1-8521-bf5f9b645d6c] Running
	I0916 10:54:15.651330 1415006 system_pods.go:89] "etcd-ha-334765" [9a0b9474-60f4-440e-a898-d397f7425086] Running
	I0916 10:54:15.651336 1415006 system_pods.go:89] "etcd-ha-334765-m02" [635fd2d2-f9cc-4e08-b73b-18633a58b6e4] Running
	I0916 10:54:15.651341 1415006 system_pods.go:89] "etcd-ha-334765-m03" [9527225b-e7ae-481b-b5b1-47b445990b4b] Running
	I0916 10:54:15.651345 1415006 system_pods.go:89] "kindnet-7s5t5" [e1832b94-ac8f-43c0-af10-ddc6afbb229b] Running
	I0916 10:54:15.651350 1415006 system_pods.go:89] "kindnet-rfw69" [396f204a-53ea-4720-85fc-05ba54d285ca] Running
	I0916 10:54:15.651354 1415006 system_pods.go:89] "kindnet-vj27j" [61e290b4-d19c-40f3-a50d-bfa09fddb710] Running
	I0916 10:54:15.651359 1415006 system_pods.go:89] "kube-apiserver-ha-334765" [471aea01-5646-4ce8-91e0-b0b39f8a275a] Running
	I0916 10:54:15.651369 1415006 system_pods.go:89] "kube-apiserver-ha-334765-m02" [877c49f9-6fae-4cdb-b208-940eba98383b] Running
	I0916 10:54:15.651376 1415006 system_pods.go:89] "kube-apiserver-ha-334765-m03" [b14f2a2b-6410-438a-99e2-86fa58140695] Running
	I0916 10:54:15.651384 1415006 system_pods.go:89] "kube-controller-manager-ha-334765" [23b2f4a4-942f-4ea7-afef-561ab69ac144] Running
	I0916 10:54:15.651389 1415006 system_pods.go:89] "kube-controller-manager-ha-334765-m02" [07411ea7-458c-475c-93ff-5db4f6c1c4b1] Running
	I0916 10:54:15.651393 1415006 system_pods.go:89] "kube-controller-manager-ha-334765-m03" [2aa8cca1-22de-4cd0-88a2-ac864da09d8d] Running
	I0916 10:54:15.651397 1415006 system_pods.go:89] "kube-proxy-4vsvh" [551f3711-d8b3-4360-8a18-d6183d4aec6d] Running
	I0916 10:54:15.651404 1415006 system_pods.go:89] "kube-proxy-l998t" [e92c97ea-9eb8-40c4-a7f6-aeb43c89e6f4] Running
	I0916 10:54:15.651408 1415006 system_pods.go:89] "kube-proxy-tlfs7" [6a873882-8023-44b5-82d9-2f18e70f8ef1] Running
	I0916 10:54:15.651419 1415006 system_pods.go:89] "kube-scheduler-ha-334765" [6189b5cd-f342-4b6a-ae21-b6b7125e4f06] Running
	I0916 10:54:15.651423 1415006 system_pods.go:89] "kube-scheduler-ha-334765-m02" [61387062-d6b0-4e2d-b2f9-10f29b0bcef6] Running
	I0916 10:54:15.651427 1415006 system_pods.go:89] "kube-scheduler-ha-334765-m03" [98c99d71-0ea3-46a3-ab06-7b5971730ba8] Running
	I0916 10:54:15.651437 1415006 system_pods.go:89] "kube-vip-ha-334765" [65843776-1f0b-4a2b-b30c-62d55f497269] Running
	I0916 10:54:15.651440 1415006 system_pods.go:89] "kube-vip-ha-334765-m02" [450bd3f6-46b4-426c-a6b2-2ad37b58b171] Running
	I0916 10:54:15.651444 1415006 system_pods.go:89] "kube-vip-ha-334765-m03" [efeb2f57-409a-45a7-87e2-dae52a680b3e] Running
	I0916 10:54:15.651447 1415006 system_pods.go:89] "storage-provisioner" [4db2490d-9707-4734-973b-adac5570e275] Running
	I0916 10:54:15.651455 1415006 system_pods.go:126] duration metric: took 212.62378ms to wait for k8s-apps to be running ...
	I0916 10:54:15.651465 1415006 system_svc.go:44] waiting for kubelet service to be running ....
	I0916 10:54:15.651526 1415006 ssh_runner.go:195] Run: sudo systemctl is-active --quiet service kubelet
	I0916 10:54:15.668882 1415006 system_svc.go:56] duration metric: took 17.401012ms WaitForService to wait for kubelet
	I0916 10:54:15.668913 1415006 kubeadm.go:582] duration metric: took 44.069992713s to wait for: map[apiserver:true apps_running:true default_sa:true extra:true kubelet:true node_ready:true system_pods:true]
	I0916 10:54:15.668930 1415006 node_conditions.go:102] verifying NodePressure condition ...
	I0916 10:54:15.835321 1415006 request.go:632] Waited for 166.314013ms due to client-side throttling, not priority and fairness, request: GET:https://192.168.49.2:8443/api/v1/nodes
	I0916 10:54:15.835384 1415006 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes
	I0916 10:54:15.835393 1415006 round_trippers.go:469] Request Headers:
	I0916 10:54:15.835402 1415006 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:54:15.835412 1415006 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:54:15.838719 1415006 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:54:15.839904 1415006 node_conditions.go:122] node storage ephemeral capacity is 203034800Ki
	I0916 10:54:15.839923 1415006 node_conditions.go:123] node cpu capacity is 2
	I0916 10:54:15.839933 1415006 node_conditions.go:122] node storage ephemeral capacity is 203034800Ki
	I0916 10:54:15.839938 1415006 node_conditions.go:123] node cpu capacity is 2
	I0916 10:54:15.839942 1415006 node_conditions.go:122] node storage ephemeral capacity is 203034800Ki
	I0916 10:54:15.839947 1415006 node_conditions.go:123] node cpu capacity is 2
	I0916 10:54:15.839951 1415006 node_conditions.go:105] duration metric: took 171.01571ms to run NodePressure ...
	I0916 10:54:15.839962 1415006 start.go:241] waiting for startup goroutines ...
	I0916 10:54:15.839983 1415006 start.go:255] writing updated cluster config ...
	I0916 10:54:15.840309 1415006 ssh_runner.go:195] Run: rm -f paused
	I0916 10:54:15.848531 1415006 out.go:177] * Done! kubectl is now configured to use "ha-334765" cluster and "default" namespace by default
	E0916 10:54:15.851249 1415006 start.go:291] kubectl info: exec: fork/exec /usr/local/bin/kubectl: exec format error
	
	
	==> CRI-O <==
	Sep 16 10:52:40 ha-334765 crio[979]: time="2024-09-16 10:52:40.519999141Z" level=info msg="Ran pod sandbox 58def518e92ee8a0fbdff450f0b2d89cb0c01f90c327d847d65637cb8734d5ef with infra container: kube-system/coredns-7c65d6cfc9-s9fp9/POD" id=b97bb72b-3885-4af7-928b-0a634327ae92 name=/runtime.v1.RuntimeService/RunPodSandbox
	Sep 16 10:52:40 ha-334765 crio[979]: time="2024-09-16 10:52:40.522707339Z" level=info msg="Ran pod sandbox 001c841a1318492ebe49fd68b6823f248801c7a32495817dbb43f485bef2bd21 with infra container: kube-system/storage-provisioner/POD" id=59aa4d27-56a5-43cb-81fe-847651495177 name=/runtime.v1.RuntimeService/RunPodSandbox
	Sep 16 10:52:40 ha-334765 crio[979]: time="2024-09-16 10:52:40.525885728Z" level=info msg="Checking image status: registry.k8s.io/coredns/coredns:v1.11.3" id=25186bff-a43f-4ae0-bdca-828d22fa3027 name=/runtime.v1.ImageService/ImageStatus
	Sep 16 10:52:40 ha-334765 crio[979]: time="2024-09-16 10:52:40.526109533Z" level=info msg="Image status: &ImageStatusResponse{Image:&Image{Id:2f6c962e7b8311337352d9fdea917da2184d9919f4da7695bc2a6517cf392fe4,RepoTags:[registry.k8s.io/coredns/coredns:v1.11.3],RepoDigests:[registry.k8s.io/coredns/coredns@sha256:31440a2bef59e2f1ffb600113b557103740ff851e27b0aef5b849f6e3ab994a6 registry.k8s.io/coredns/coredns@sha256:9caabbf6238b189a65d0d6e6ac138de60d6a1c419e5a341fbbb7c78382559c6e],Size_:61647114,Uid:nil,Username:nonroot,Spec:nil,},Info:map[string]string{},}" id=25186bff-a43f-4ae0-bdca-828d22fa3027 name=/runtime.v1.ImageService/ImageStatus
	Sep 16 10:52:40 ha-334765 crio[979]: time="2024-09-16 10:52:40.526333946Z" level=info msg="Checking image status: gcr.io/k8s-minikube/storage-provisioner:v5" id=6769606b-7ada-4f83-9512-2a827d754b53 name=/runtime.v1.ImageService/ImageStatus
	Sep 16 10:52:40 ha-334765 crio[979]: time="2024-09-16 10:52:40.526525621Z" level=info msg="Image status: &ImageStatusResponse{Image:&Image{Id:ba04bb24b95753201135cbc420b233c1b0b9fa2e1fd21d28319c348c33fbcde6,RepoTags:[gcr.io/k8s-minikube/storage-provisioner:v5],RepoDigests:[gcr.io/k8s-minikube/storage-provisioner@sha256:0ba370588274b88531ab311a5d2e645d240a853555c1e58fd1dd428fc333c9d2 gcr.io/k8s-minikube/storage-provisioner@sha256:18eb69d1418e854ad5a19e399310e52808a8321e4c441c1dddad8977a0d7a944],Size_:29037500,Uid:nil,Username:,Spec:nil,},Info:map[string]string{},}" id=6769606b-7ada-4f83-9512-2a827d754b53 name=/runtime.v1.ImageService/ImageStatus
	Sep 16 10:52:40 ha-334765 crio[979]: time="2024-09-16 10:52:40.527705160Z" level=info msg="Checking image status: registry.k8s.io/coredns/coredns:v1.11.3" id=5792ef5a-f8a6-4c30-98ce-8a1d8627905f name=/runtime.v1.ImageService/ImageStatus
	Sep 16 10:52:40 ha-334765 crio[979]: time="2024-09-16 10:52:40.527885176Z" level=info msg="Image status: &ImageStatusResponse{Image:&Image{Id:2f6c962e7b8311337352d9fdea917da2184d9919f4da7695bc2a6517cf392fe4,RepoTags:[registry.k8s.io/coredns/coredns:v1.11.3],RepoDigests:[registry.k8s.io/coredns/coredns@sha256:31440a2bef59e2f1ffb600113b557103740ff851e27b0aef5b849f6e3ab994a6 registry.k8s.io/coredns/coredns@sha256:9caabbf6238b189a65d0d6e6ac138de60d6a1c419e5a341fbbb7c78382559c6e],Size_:61647114,Uid:nil,Username:nonroot,Spec:nil,},Info:map[string]string{},}" id=5792ef5a-f8a6-4c30-98ce-8a1d8627905f name=/runtime.v1.ImageService/ImageStatus
	Sep 16 10:52:40 ha-334765 crio[979]: time="2024-09-16 10:52:40.527977917Z" level=info msg="Checking image status: gcr.io/k8s-minikube/storage-provisioner:v5" id=52ff5be7-7636-43cb-ac60-9ae276cfae05 name=/runtime.v1.ImageService/ImageStatus
	Sep 16 10:52:40 ha-334765 crio[979]: time="2024-09-16 10:52:40.528113946Z" level=info msg="Image status: &ImageStatusResponse{Image:&Image{Id:ba04bb24b95753201135cbc420b233c1b0b9fa2e1fd21d28319c348c33fbcde6,RepoTags:[gcr.io/k8s-minikube/storage-provisioner:v5],RepoDigests:[gcr.io/k8s-minikube/storage-provisioner@sha256:0ba370588274b88531ab311a5d2e645d240a853555c1e58fd1dd428fc333c9d2 gcr.io/k8s-minikube/storage-provisioner@sha256:18eb69d1418e854ad5a19e399310e52808a8321e4c441c1dddad8977a0d7a944],Size_:29037500,Uid:nil,Username:,Spec:nil,},Info:map[string]string{},}" id=52ff5be7-7636-43cb-ac60-9ae276cfae05 name=/runtime.v1.ImageService/ImageStatus
	Sep 16 10:52:40 ha-334765 crio[979]: time="2024-09-16 10:52:40.529330186Z" level=info msg="Creating container: kube-system/coredns-7c65d6cfc9-s9fp9/coredns" id=279c9e3d-b080-49c2-b226-0fda1393d0ec name=/runtime.v1.RuntimeService/CreateContainer
	Sep 16 10:52:40 ha-334765 crio[979]: time="2024-09-16 10:52:40.529432961Z" level=warning msg="Allowed annotations are specified for workload []"
	Sep 16 10:52:40 ha-334765 crio[979]: time="2024-09-16 10:52:40.529985636Z" level=info msg="Creating container: kube-system/storage-provisioner/storage-provisioner" id=5f70674d-8771-49d2-85b5-96eb9e34fb96 name=/runtime.v1.RuntimeService/CreateContainer
	Sep 16 10:52:40 ha-334765 crio[979]: time="2024-09-16 10:52:40.530062664Z" level=warning msg="Allowed annotations are specified for workload []"
	Sep 16 10:52:40 ha-334765 crio[979]: time="2024-09-16 10:52:40.568868882Z" level=warning msg="Failed to open /etc/passwd: open /var/lib/containers/storage/overlay/38e307f07a96e8cb0e3f723de2521d79276944e6b03e4c010ba60f4188b9044d/merged/etc/passwd: no such file or directory"
	Sep 16 10:52:40 ha-334765 crio[979]: time="2024-09-16 10:52:40.569202847Z" level=warning msg="Failed to open /etc/group: open /var/lib/containers/storage/overlay/38e307f07a96e8cb0e3f723de2521d79276944e6b03e4c010ba60f4188b9044d/merged/etc/group: no such file or directory"
	Sep 16 10:52:40 ha-334765 crio[979]: time="2024-09-16 10:52:40.625291958Z" level=info msg="Created container e1bb424abe07892ff88db2f7855ccf353bfd83d107d295aff42c09caa6cf6ac1: kube-system/coredns-7c65d6cfc9-s9fp9/coredns" id=279c9e3d-b080-49c2-b226-0fda1393d0ec name=/runtime.v1.RuntimeService/CreateContainer
	Sep 16 10:52:40 ha-334765 crio[979]: time="2024-09-16 10:52:40.626422350Z" level=info msg="Starting container: e1bb424abe07892ff88db2f7855ccf353bfd83d107d295aff42c09caa6cf6ac1" id=52fc3345-1e73-4dc6-986e-f1b6156a351f name=/runtime.v1.RuntimeService/StartContainer
	Sep 16 10:52:40 ha-334765 crio[979]: time="2024-09-16 10:52:40.643361825Z" level=info msg="Created container b39228db2d4d897b215a20c448db60b4d17c171274e5a653470b7613c63ab22d: kube-system/coredns-7c65d6cfc9-q5xr7/coredns" id=1294853a-0d22-434b-b838-17f1e72ebfae name=/runtime.v1.RuntimeService/CreateContainer
	Sep 16 10:52:40 ha-334765 crio[979]: time="2024-09-16 10:52:40.644135237Z" level=info msg="Starting container: b39228db2d4d897b215a20c448db60b4d17c171274e5a653470b7613c63ab22d" id=41b1f1a1-a94b-41e0-8d20-44f1215c9eda name=/runtime.v1.RuntimeService/StartContainer
	Sep 16 10:52:40 ha-334765 crio[979]: time="2024-09-16 10:52:40.646585350Z" level=info msg="Created container adc8f9784b1ec4d53b7c9ba64a0d20f459c9e1d35382e118dd0e8702427c3cfd: kube-system/storage-provisioner/storage-provisioner" id=5f70674d-8771-49d2-85b5-96eb9e34fb96 name=/runtime.v1.RuntimeService/CreateContainer
	Sep 16 10:52:40 ha-334765 crio[979]: time="2024-09-16 10:52:40.647109808Z" level=info msg="Starting container: adc8f9784b1ec4d53b7c9ba64a0d20f459c9e1d35382e118dd0e8702427c3cfd" id=997cd3e4-00a7-4c11-8fb4-5acb1d0d1593 name=/runtime.v1.RuntimeService/StartContainer
	Sep 16 10:52:40 ha-334765 crio[979]: time="2024-09-16 10:52:40.652753414Z" level=info msg="Started container" PID=2095 containerID=e1bb424abe07892ff88db2f7855ccf353bfd83d107d295aff42c09caa6cf6ac1 description=kube-system/coredns-7c65d6cfc9-s9fp9/coredns id=52fc3345-1e73-4dc6-986e-f1b6156a351f name=/runtime.v1.RuntimeService/StartContainer sandboxID=58def518e92ee8a0fbdff450f0b2d89cb0c01f90c327d847d65637cb8734d5ef
	Sep 16 10:52:40 ha-334765 crio[979]: time="2024-09-16 10:52:40.666247702Z" level=info msg="Started container" PID=2101 containerID=adc8f9784b1ec4d53b7c9ba64a0d20f459c9e1d35382e118dd0e8702427c3cfd description=kube-system/storage-provisioner/storage-provisioner id=997cd3e4-00a7-4c11-8fb4-5acb1d0d1593 name=/runtime.v1.RuntimeService/StartContainer sandboxID=001c841a1318492ebe49fd68b6823f248801c7a32495817dbb43f485bef2bd21
	Sep 16 10:52:40 ha-334765 crio[979]: time="2024-09-16 10:52:40.673868062Z" level=info msg="Started container" PID=2076 containerID=b39228db2d4d897b215a20c448db60b4d17c171274e5a653470b7613c63ab22d description=kube-system/coredns-7c65d6cfc9-q5xr7/coredns id=41b1f1a1-a94b-41e0-8d20-44f1215c9eda name=/runtime.v1.RuntimeService/StartContainer sandboxID=fcf9d17c32966868a5889905be4475a5c0e703c6754eaf0e3a6a870b0986519b
	
	
	==> container status <==
	CONTAINER           IMAGE                                                                                               CREATED             STATE               NAME                      ATTEMPT             POD ID              POD
	adc8f9784b1ec       ba04bb24b95753201135cbc420b233c1b0b9fa2e1fd21d28319c348c33fbcde6                                    2 minutes ago       Running             storage-provisioner       0                   001c841a13184       storage-provisioner
	e1bb424abe078       2f6c962e7b8311337352d9fdea917da2184d9919f4da7695bc2a6517cf392fe4                                    2 minutes ago       Running             coredns                   0                   58def518e92ee       coredns-7c65d6cfc9-s9fp9
	b39228db2d4d8       2f6c962e7b8311337352d9fdea917da2184d9919f4da7695bc2a6517cf392fe4                                    2 minutes ago       Running             coredns                   0                   fcf9d17c32966       coredns-7c65d6cfc9-q5xr7
	4e367c2e592f9       6a23fa8fd2b78ab58e42ba273808edc936a9c53d8ac4a919f6337be094843a51                                    3 minutes ago       Running             kindnet-cni               0                   1e25f226ecf5a       kindnet-7s5t5
	e1979b8578120       24a140c548c075e487e45d0ee73b1aa89f8bfb40c08a57e05975559728822b1d                                    3 minutes ago       Running             kube-proxy                0                   facacbf959961       kube-proxy-tlfs7
	8dfb86491b77a       ghcr.io/kube-vip/kube-vip@sha256:360f0c5d02322075cc80edb9e4e0d2171e941e55072184f1f902203fafc81d0f   3 minutes ago       Running             kube-vip                  0                   afc2dcca571a8       kube-vip-ha-334765
	942911c4142a5       279f381cb37365bbbcd133c9531fba9c2beb0f38dbbe6ddfcd0b1b1643d3450e                                    3 minutes ago       Running             kube-controller-manager   0                   a2380692f5f7e       kube-controller-manager-ha-334765
	42f82617ee823       d3f53a98c0a9d9163c4848bcf34b2d2f5e1e3691b79f3d1dd6d0206809e02853                                    3 minutes ago       Running             kube-apiserver            0                   4b932f715d94f       kube-apiserver-ha-334765
	04fb33f068e50       7f8aa378bb47dffcf430f3a601abe39137e88aee0238e23ed8530fdd18dab82d                                    3 minutes ago       Running             kube-scheduler            0                   9743c9508108d       kube-scheduler-ha-334765
	87df03de85a8a       27e3830e1402783674d8b594038967deea9d51f0d91b34c93c8f39d2f68af7da                                    3 minutes ago       Running             etcd                      0                   eb6b8de72187f       etcd-ha-334765
	
	
	==> coredns [b39228db2d4d897b215a20c448db60b4d17c171274e5a653470b7613c63ab22d] <==
	[INFO] 10.244.2.2:60718 - 6 "A IN kubernetes.default. udp 36 false 512" NXDOMAIN qr,rd,ra 36 0.001988979s
	[INFO] 10.244.2.2:60533 - 9 "PTR IN 1.0.96.10.in-addr.arpa. udp 40 false 512" NOERROR qr,aa,rd 112 0.00017378s
	[INFO] 10.244.2.3:49282 - 3 "AAAA IN kubernetes.default. udp 36 false 512" NXDOMAIN qr,rd,ra 36 0.001552216s
	[INFO] 10.244.2.3:43574 - 6 "A IN kubernetes.default. udp 36 false 512" NXDOMAIN qr,rd,ra 36 0.00119709s
	[INFO] 10.244.2.3:56190 - 7 "A IN kubernetes.default.default.svc.cluster.local. udp 62 false 512" NXDOMAIN qr,aa,rd 155 0.00012963s
	[INFO] 10.244.2.3:34905 - 8 "A IN kubernetes.default.svc.cluster.local. udp 54 false 512" NOERROR qr,aa,rd 106 0.000217717s
	[INFO] 10.244.1.2:51860 - 2 "PTR IN 10.0.96.10.in-addr.arpa. udp 41 false 512" NOERROR qr,aa,rd 116 0.000199527s
	[INFO] 10.244.1.2:35957 - 4 "AAAA IN kubernetes.default.default.svc.cluster.local. udp 62 false 512" NXDOMAIN qr,aa,rd 155 0.000152029s
	[INFO] 10.244.1.2:55907 - 7 "A IN kubernetes.default.default.svc.cluster.local. udp 62 false 512" NXDOMAIN qr,aa,rd 155 0.000096942s
	[INFO] 10.244.1.2:50978 - 8 "A IN kubernetes.default.svc.cluster.local. udp 54 false 512" NOERROR qr,aa,rd 106 0.000068675s
	[INFO] 10.244.1.2:51125 - 9 "PTR IN 1.0.96.10.in-addr.arpa. udp 40 false 512" NOERROR qr,aa,rd 112 0.000129671s
	[INFO] 10.244.2.2:60214 - 3 "AAAA IN kubernetes.default.svc.cluster.local. udp 54 false 512" NOERROR qr,aa,rd 147 0.000138237s
	[INFO] 10.244.2.2:45302 - 4 "A IN kubernetes.default.svc.cluster.local. udp 54 false 512" NOERROR qr,aa,rd 106 0.000086324s
	[INFO] 10.244.2.2:45998 - 5 "PTR IN 1.0.96.10.in-addr.arpa. udp 40 false 512" NOERROR qr,aa,rd 112 0.000103053s
	[INFO] 10.244.2.3:42017 - 2 "PTR IN 10.0.96.10.in-addr.arpa. udp 41 false 512" NOERROR qr,aa,rd 116 0.000096736s
	[INFO] 10.244.2.3:35753 - 4 "A IN kubernetes.default.svc.cluster.local. udp 54 false 512" NOERROR qr,aa,rd 106 0.000071342s
	[INFO] 10.244.1.2:40346 - 3 "AAAA IN kubernetes.default.svc.cluster.local. udp 54 false 512" NOERROR qr,aa,rd 147 0.000096064s
	[INFO] 10.244.1.2:40215 - 4 "A IN kubernetes.default.svc.cluster.local. udp 54 false 512" NOERROR qr,aa,rd 106 0.000063826s
	[INFO] 10.244.1.2:43106 - 5 "PTR IN 1.0.96.10.in-addr.arpa. udp 40 false 512" NOERROR qr,aa,rd 112 0.000085905s
	[INFO] 10.244.2.2:45784 - 2 "PTR IN 10.0.96.10.in-addr.arpa. udp 41 false 512" NOERROR qr,aa,rd 116 0.000096999s
	[INFO] 10.244.2.3:36002 - 2 "PTR IN 10.0.96.10.in-addr.arpa. udp 41 false 512" NOERROR qr,aa,rd 116 0.000099427s
	[INFO] 10.244.2.3:38212 - 3 "AAAA IN host.minikube.internal. udp 40 false 512" NOERROR qr,aa,rd 40 0.000105285s
	[INFO] 10.244.2.3:45229 - 4 "A IN host.minikube.internal. udp 40 false 512" NOERROR qr,aa,rd 78 0.000091272s
	[INFO] 10.244.2.3:44135 - 5 "PTR IN 1.49.168.192.in-addr.arpa. udp 43 false 512" NOERROR qr,aa,rd 104 0.000091395s
	[INFO] 10.244.1.2:42055 - 3 "AAAA IN host.minikube.internal. udp 40 false 512" NOERROR qr,aa,rd 40 0.000083707s
	
	
	==> coredns [e1bb424abe07892ff88db2f7855ccf353bfd83d107d295aff42c09caa6cf6ac1] <==
	[INFO] 10.244.2.2:44124 - 4 "A IN kubernetes.io. udp 31 false 512" NOERROR qr,rd,ra 60 0.002159321s
	[INFO] 10.244.2.2:58166 - 5 "PTR IN 148.40.75.147.in-addr.arpa. udp 44 false 512" NXDOMAIN qr,rd,ra 44 0.086819453s
	[INFO] 10.244.1.2:43380 - 2 "PTR IN 10.0.96.10.in-addr.arpa. udp 41 false 512" NOERROR qr,aa,rd 116 0.000269975s
	[INFO] 10.244.1.2:50566 - 3 "AAAA IN kubernetes.io. udp 31 false 512" NOERROR qr,aa,rd,ra 31 0.000084445s
	[INFO] 10.244.2.2:60735 - 2 "PTR IN 10.0.96.10.in-addr.arpa. udp 41 false 512" NOERROR qr,aa,rd 116 0.000146342s
	[INFO] 10.244.2.2:55629 - 3 "AAAA IN kubernetes.default. udp 36 false 512" NXDOMAIN qr,rd,ra 36 0.003742478s
	[INFO] 10.244.2.2:58855 - 7 "A IN kubernetes.default.default.svc.cluster.local. udp 62 false 512" NXDOMAIN qr,aa,rd 155 0.000236597s
	[INFO] 10.244.2.2:49910 - 8 "A IN kubernetes.default.svc.cluster.local. udp 54 false 512" NOERROR qr,aa,rd 106 0.000148123s
	[INFO] 10.244.2.3:57067 - 2 "PTR IN 10.0.96.10.in-addr.arpa. udp 41 false 512" NOERROR qr,aa,rd 116 0.000182543s
	[INFO] 10.244.2.3:59470 - 4 "AAAA IN kubernetes.default.default.svc.cluster.local. udp 62 false 512" NXDOMAIN qr,aa,rd 155 0.000231354s
	[INFO] 10.244.2.3:35175 - 5 "AAAA IN kubernetes.default.svc.cluster.local. udp 54 false 512" NOERROR qr,aa,rd 147 0.000359483s
	[INFO] 10.244.2.3:58611 - 9 "PTR IN 1.0.96.10.in-addr.arpa. udp 40 false 512" NOERROR qr,aa,rd 112 0.00017314s
	[INFO] 10.244.1.2:47364 - 3 "AAAA IN kubernetes.default. udp 36 false 512" NXDOMAIN qr,rd,ra 36 0.001674788s
	[INFO] 10.244.1.2:34355 - 5 "AAAA IN kubernetes.default.svc.cluster.local. udp 54 false 512" NOERROR qr,aa,rd 147 0.000164033s
	[INFO] 10.244.1.2:39537 - 6 "A IN kubernetes.default. udp 36 false 512" NXDOMAIN qr,rd,ra 36 0.001387132s
	[INFO] 10.244.2.2:57450 - 2 "PTR IN 10.0.96.10.in-addr.arpa. udp 41 false 512" NOERROR qr,aa,rd 116 0.000150922s
	[INFO] 10.244.2.3:47756 - 3 "AAAA IN kubernetes.default.svc.cluster.local. udp 54 false 512" NOERROR qr,aa,rd 147 0.000109207s
	[INFO] 10.244.2.3:49471 - 5 "PTR IN 1.0.96.10.in-addr.arpa. udp 40 false 512" NOERROR qr,aa,rd 112 0.000084264s
	[INFO] 10.244.1.2:51431 - 2 "PTR IN 10.0.96.10.in-addr.arpa. udp 41 false 512" NOERROR qr,aa,rd 116 0.000153063s
	[INFO] 10.244.2.2:35440 - 3 "AAAA IN host.minikube.internal. udp 40 false 512" NOERROR qr,aa,rd 40 0.000221902s
	[INFO] 10.244.2.2:38547 - 4 "A IN host.minikube.internal. udp 40 false 512" NOERROR qr,aa,rd 78 0.000131312s
	[INFO] 10.244.2.2:58642 - 5 "PTR IN 1.49.168.192.in-addr.arpa. udp 43 false 512" NOERROR qr,aa,rd 104 0.00016226s
	[INFO] 10.244.1.2:48380 - 2 "PTR IN 10.0.96.10.in-addr.arpa. udp 41 false 512" NOERROR qr,aa,rd 116 0.000230402s
	[INFO] 10.244.1.2:40950 - 4 "A IN host.minikube.internal. udp 40 false 512" NOERROR qr,aa,rd 78 0.000091698s
	[INFO] 10.244.1.2:41025 - 5 "PTR IN 1.49.168.192.in-addr.arpa. udp 43 false 512" NOERROR qr,aa,rd 104 0.000084191s
	
	
	==> describe nodes <==
	Name:               ha-334765
	Roles:              control-plane
	Labels:             beta.kubernetes.io/arch=arm64
	                    beta.kubernetes.io/os=linux
	                    kubernetes.io/arch=arm64
	                    kubernetes.io/hostname=ha-334765
	                    kubernetes.io/os=linux
	                    minikube.k8s.io/commit=90d544f06ea0f69499271b003be64a9a224d57ed
	                    minikube.k8s.io/name=ha-334765
	                    minikube.k8s.io/primary=true
	                    minikube.k8s.io/updated_at=2024_09_16T10_51_56_0700
	                    minikube.k8s.io/version=v1.34.0
	                    node-role.kubernetes.io/control-plane=
	                    node.kubernetes.io/exclude-from-external-load-balancers=
	Annotations:        kubeadm.alpha.kubernetes.io/cri-socket: unix:///var/run/crio/crio.sock
	                    node.alpha.kubernetes.io/ttl: 0
	                    volumes.kubernetes.io/controller-managed-attach-detach: true
	CreationTimestamp:  Mon, 16 Sep 2024 10:51:54 +0000
	Taints:             <none>
	Unschedulable:      false
	Lease:
	  HolderIdentity:  ha-334765
	  AcquireTime:     <unset>
	  RenewTime:       Mon, 16 Sep 2024 10:55:30 +0000
	Conditions:
	  Type             Status  LastHeartbeatTime                 LastTransitionTime                Reason                       Message
	  ----             ------  -----------------                 ------------------                ------                       -------
	  MemoryPressure   False   Mon, 16 Sep 2024 10:52:40 +0000   Mon, 16 Sep 2024 10:51:52 +0000   KubeletHasSufficientMemory   kubelet has sufficient memory available
	  DiskPressure     False   Mon, 16 Sep 2024 10:52:40 +0000   Mon, 16 Sep 2024 10:51:52 +0000   KubeletHasNoDiskPressure     kubelet has no disk pressure
	  PIDPressure      False   Mon, 16 Sep 2024 10:52:40 +0000   Mon, 16 Sep 2024 10:51:52 +0000   KubeletHasSufficientPID      kubelet has sufficient PID available
	  Ready            True    Mon, 16 Sep 2024 10:52:40 +0000   Mon, 16 Sep 2024 10:52:40 +0000   KubeletReady                 kubelet is posting ready status
	Addresses:
	  InternalIP:  192.168.49.2
	  Hostname:    ha-334765
	Capacity:
	  cpu:                2
	  ephemeral-storage:  203034800Ki
	  hugepages-1Gi:      0
	  hugepages-2Mi:      0
	  hugepages-32Mi:     0
	  hugepages-64Ki:     0
	  memory:             8022304Ki
	  pods:               110
	Allocatable:
	  cpu:                2
	  ephemeral-storage:  203034800Ki
	  hugepages-1Gi:      0
	  hugepages-2Mi:      0
	  hugepages-32Mi:     0
	  hugepages-64Ki:     0
	  memory:             8022304Ki
	  pods:               110
	System Info:
	  Machine ID:                 87dc8482c7ba4d99a2731913dc3dcad5
	  System UUID:                15c23ccf-7aa3-4a1a-8aeb-2a833bffb1e5
	  Boot ID:                    34b2555f-ef29-4c31-9b47-b3b930bd3b4b
	  Kernel Version:             5.15.0-1069-aws
	  OS Image:                   Ubuntu 22.04.4 LTS
	  Operating System:           linux
	  Architecture:               arm64
	  Container Runtime Version:  cri-o://1.24.6
	  Kubelet Version:            v1.31.1
	  Kube-Proxy Version:         v1.31.1
	PodCIDR:                      10.244.0.0/24
	PodCIDRs:                     10.244.0.0/24
	Non-terminated Pods:          (10 in total)
	  Namespace                   Name                                 CPU Requests  CPU Limits  Memory Requests  Memory Limits  Age
	  ---------                   ----                                 ------------  ----------  ---------------  -------------  ---
	  kube-system                 coredns-7c65d6cfc9-q5xr7             100m (5%)     0 (0%)      70Mi (0%)        170Mi (2%)     3m35s
	  kube-system                 coredns-7c65d6cfc9-s9fp9             100m (5%)     0 (0%)      70Mi (0%)        170Mi (2%)     3m35s
	  kube-system                 etcd-ha-334765                       100m (5%)     0 (0%)      100Mi (1%)       0 (0%)         3m38s
	  kube-system                 kindnet-7s5t5                        100m (5%)     100m (5%)   50Mi (0%)        50Mi (0%)      3m35s
	  kube-system                 kube-apiserver-ha-334765             250m (12%)    0 (0%)      0 (0%)           0 (0%)         3m38s
	  kube-system                 kube-controller-manager-ha-334765    200m (10%)    0 (0%)      0 (0%)           0 (0%)         3m38s
	  kube-system                 kube-proxy-tlfs7                     0 (0%)        0 (0%)      0 (0%)           0 (0%)         3m35s
	  kube-system                 kube-scheduler-ha-334765             100m (5%)     0 (0%)      0 (0%)           0 (0%)         3m38s
	  kube-system                 kube-vip-ha-334765                   0 (0%)        0 (0%)      0 (0%)           0 (0%)         3m38s
	  kube-system                 storage-provisioner                  0 (0%)        0 (0%)      0 (0%)           0 (0%)         3m33s
	Allocated resources:
	  (Total limits may be over 100 percent, i.e., overcommitted.)
	  Resource           Requests    Limits
	  --------           --------    ------
	  cpu                950m (47%)  100m (5%)
	  memory             290Mi (3%)  390Mi (4%)
	  ephemeral-storage  0 (0%)      0 (0%)
	  hugepages-1Gi      0 (0%)      0 (0%)
	  hugepages-2Mi      0 (0%)      0 (0%)
	  hugepages-32Mi     0 (0%)      0 (0%)
	  hugepages-64Ki     0 (0%)      0 (0%)
	Events:
	  Type     Reason                   Age                    From             Message
	  ----     ------                   ----                   ----             -------
	  Normal   Starting                 3m34s                  kube-proxy       
	  Normal   NodeHasNoDiskPressure    3m48s (x8 over 3m48s)  kubelet          Node ha-334765 status is now: NodeHasNoDiskPressure
	  Normal   NodeHasSufficientPID     3m48s (x7 over 3m48s)  kubelet          Node ha-334765 status is now: NodeHasSufficientPID
	  Normal   Starting                 3m48s                  kubelet          Starting kubelet.
	  Warning  CgroupV1                 3m48s                  kubelet          Cgroup v1 support is in maintenance mode, please migrate to Cgroup v2.
	  Normal   NodeHasSufficientMemory  3m48s (x8 over 3m48s)  kubelet          Node ha-334765 status is now: NodeHasSufficientMemory
	  Normal   Starting                 3m38s                  kubelet          Starting kubelet.
	  Warning  CgroupV1                 3m38s                  kubelet          Cgroup v1 support is in maintenance mode, please migrate to Cgroup v2.
	  Normal   NodeHasSufficientMemory  3m38s                  kubelet          Node ha-334765 status is now: NodeHasSufficientMemory
	  Normal   NodeHasNoDiskPressure    3m38s                  kubelet          Node ha-334765 status is now: NodeHasNoDiskPressure
	  Normal   NodeHasSufficientPID     3m38s                  kubelet          Node ha-334765 status is now: NodeHasSufficientPID
	  Normal   RegisteredNode           3m36s                  node-controller  Node ha-334765 event: Registered Node ha-334765 in Controller
	  Normal   RegisteredNode           3m4s                   node-controller  Node ha-334765 event: Registered Node ha-334765 in Controller
	  Normal   NodeReady                2m53s                  kubelet          Node ha-334765 status is now: NodeReady
	  Normal   RegisteredNode           116s                   node-controller  Node ha-334765 event: Registered Node ha-334765 in Controller
	
	
	Name:               ha-334765-m02
	Roles:              control-plane
	Labels:             beta.kubernetes.io/arch=arm64
	                    beta.kubernetes.io/os=linux
	                    kubernetes.io/arch=arm64
	                    kubernetes.io/hostname=ha-334765-m02
	                    kubernetes.io/os=linux
	                    minikube.k8s.io/commit=90d544f06ea0f69499271b003be64a9a224d57ed
	                    minikube.k8s.io/name=ha-334765
	                    minikube.k8s.io/primary=false
	                    minikube.k8s.io/updated_at=2024_09_16T10_52_23_0700
	                    minikube.k8s.io/version=v1.34.0
	                    node-role.kubernetes.io/control-plane=
	                    node.kubernetes.io/exclude-from-external-load-balancers=
	Annotations:        kubeadm.alpha.kubernetes.io/cri-socket: unix:///var/run/crio/crio.sock
	                    node.alpha.kubernetes.io/ttl: 0
	                    volumes.kubernetes.io/controller-managed-attach-detach: true
	CreationTimestamp:  Mon, 16 Sep 2024 10:52:19 +0000
	Taints:             <none>
	Unschedulable:      false
	Lease:
	  HolderIdentity:  ha-334765-m02
	  AcquireTime:     <unset>
	  RenewTime:       Mon, 16 Sep 2024 10:55:33 +0000
	Conditions:
	  Type             Status  LastHeartbeatTime                 LastTransitionTime                Reason                       Message
	  ----             ------  -----------------                 ------------------                ------                       -------
	  MemoryPressure   False   Mon, 16 Sep 2024 10:54:52 +0000   Mon, 16 Sep 2024 10:52:19 +0000   KubeletHasSufficientMemory   kubelet has sufficient memory available
	  DiskPressure     False   Mon, 16 Sep 2024 10:54:52 +0000   Mon, 16 Sep 2024 10:52:19 +0000   KubeletHasNoDiskPressure     kubelet has no disk pressure
	  PIDPressure      False   Mon, 16 Sep 2024 10:54:52 +0000   Mon, 16 Sep 2024 10:52:19 +0000   KubeletHasSufficientPID      kubelet has sufficient PID available
	  Ready            True    Mon, 16 Sep 2024 10:54:52 +0000   Mon, 16 Sep 2024 10:53:02 +0000   KubeletReady                 kubelet is posting ready status
	Addresses:
	  InternalIP:  192.168.49.3
	  Hostname:    ha-334765-m02
	Capacity:
	  cpu:                2
	  ephemeral-storage:  203034800Ki
	  hugepages-1Gi:      0
	  hugepages-2Mi:      0
	  hugepages-32Mi:     0
	  hugepages-64Ki:     0
	  memory:             8022304Ki
	  pods:               110
	Allocatable:
	  cpu:                2
	  ephemeral-storage:  203034800Ki
	  hugepages-1Gi:      0
	  hugepages-2Mi:      0
	  hugepages-32Mi:     0
	  hugepages-64Ki:     0
	  memory:             8022304Ki
	  pods:               110
	System Info:
	  Machine ID:                 8714038823364065b7fb0ea0ea167837
	  System UUID:                aea91ea0-3fb3-4815-9747-a2bcb9506f24
	  Boot ID:                    34b2555f-ef29-4c31-9b47-b3b930bd3b4b
	  Kernel Version:             5.15.0-1069-aws
	  OS Image:                   Ubuntu 22.04.4 LTS
	  Operating System:           linux
	  Architecture:               arm64
	  Container Runtime Version:  cri-o://1.24.6
	  Kubelet Version:            v1.31.1
	  Kube-Proxy Version:         v1.31.1
	PodCIDR:                      10.244.1.0/24
	PodCIDRs:                     10.244.1.0/24
	Non-terminated Pods:          (8 in total)
	  Namespace                   Name                                     CPU Requests  CPU Limits  Memory Requests  Memory Limits  Age
	  ---------                   ----                                     ------------  ----------  ---------------  -------------  ---
	  default                     busybox-7dff88458-tczms                  0 (0%)        0 (0%)      0 (0%)           0 (0%)         77s
	  kube-system                 etcd-ha-334765-m02                       100m (5%)     0 (0%)      100Mi (1%)       0 (0%)         3m14s
	  kube-system                 kindnet-vj27j                            100m (5%)     100m (5%)   50Mi (0%)        50Mi (0%)      3m15s
	  kube-system                 kube-apiserver-ha-334765-m02             250m (12%)    0 (0%)      0 (0%)           0 (0%)         3m14s
	  kube-system                 kube-controller-manager-ha-334765-m02    200m (10%)    0 (0%)      0 (0%)           0 (0%)         3m14s
	  kube-system                 kube-proxy-l998t                         0 (0%)        0 (0%)      0 (0%)           0 (0%)         3m15s
	  kube-system                 kube-scheduler-ha-334765-m02             100m (5%)     0 (0%)      0 (0%)           0 (0%)         3m14s
	  kube-system                 kube-vip-ha-334765-m02                   0 (0%)        0 (0%)      0 (0%)           0 (0%)         3m11s
	Allocated resources:
	  (Total limits may be over 100 percent, i.e., overcommitted.)
	  Resource           Requests    Limits
	  --------           --------    ------
	  cpu                750m (37%)  100m (5%)
	  memory             150Mi (1%)  50Mi (0%)
	  ephemeral-storage  0 (0%)      0 (0%)
	  hugepages-1Gi      0 (0%)      0 (0%)
	  hugepages-2Mi      0 (0%)      0 (0%)
	  hugepages-32Mi     0 (0%)      0 (0%)
	  hugepages-64Ki     0 (0%)      0 (0%)
	Events:
	  Type    Reason                   Age                    From             Message
	  ----    ------                   ----                   ----             -------
	  Normal  Starting                 3m7s                   kube-proxy       
	  Normal  NodeHasSufficientMemory  3m15s (x8 over 3m15s)  kubelet          Node ha-334765-m02 status is now: NodeHasSufficientMemory
	  Normal  NodeHasNoDiskPressure    3m15s (x8 over 3m15s)  kubelet          Node ha-334765-m02 status is now: NodeHasNoDiskPressure
	  Normal  NodeHasSufficientPID     3m15s (x7 over 3m15s)  kubelet          Node ha-334765-m02 status is now: NodeHasSufficientPID
	  Normal  RegisteredNode           3m12s                  node-controller  Node ha-334765-m02 event: Registered Node ha-334765-m02 in Controller
	  Normal  RegisteredNode           3m5s                   node-controller  Node ha-334765-m02 event: Registered Node ha-334765-m02 in Controller
	  Normal  RegisteredNode           117s                   node-controller  Node ha-334765-m02 event: Registered Node ha-334765-m02 in Controller
	
	
	Name:               ha-334765-m03
	Roles:              control-plane
	Labels:             beta.kubernetes.io/arch=arm64
	                    beta.kubernetes.io/os=linux
	                    kubernetes.io/arch=arm64
	                    kubernetes.io/hostname=ha-334765-m03
	                    kubernetes.io/os=linux
	                    minikube.k8s.io/commit=90d544f06ea0f69499271b003be64a9a224d57ed
	                    minikube.k8s.io/name=ha-334765
	                    minikube.k8s.io/primary=false
	                    minikube.k8s.io/updated_at=2024_09_16T10_53_31_0700
	                    minikube.k8s.io/version=v1.34.0
	                    node-role.kubernetes.io/control-plane=
	                    node.kubernetes.io/exclude-from-external-load-balancers=
	Annotations:        kubeadm.alpha.kubernetes.io/cri-socket: unix:///var/run/crio/crio.sock
	                    node.alpha.kubernetes.io/ttl: 0
	                    volumes.kubernetes.io/controller-managed-attach-detach: true
	CreationTimestamp:  Mon, 16 Sep 2024 10:53:26 +0000
	Taints:             <none>
	Unschedulable:      false
	Lease:
	  HolderIdentity:  ha-334765-m03
	  AcquireTime:     <unset>
	  RenewTime:       Mon, 16 Sep 2024 10:55:28 +0000
	Conditions:
	  Type             Status  LastHeartbeatTime                 LastTransitionTime                Reason                       Message
	  ----             ------  -----------------                 ------------------                ------                       -------
	  MemoryPressure   False   Mon, 16 Sep 2024 10:54:27 +0000   Mon, 16 Sep 2024 10:53:26 +0000   KubeletHasSufficientMemory   kubelet has sufficient memory available
	  DiskPressure     False   Mon, 16 Sep 2024 10:54:27 +0000   Mon, 16 Sep 2024 10:53:26 +0000   KubeletHasNoDiskPressure     kubelet has no disk pressure
	  PIDPressure      False   Mon, 16 Sep 2024 10:54:27 +0000   Mon, 16 Sep 2024 10:53:26 +0000   KubeletHasSufficientPID      kubelet has sufficient PID available
	  Ready            True    Mon, 16 Sep 2024 10:54:27 +0000   Mon, 16 Sep 2024 10:54:09 +0000   KubeletReady                 kubelet is posting ready status
	Addresses:
	  InternalIP:  192.168.49.4
	  Hostname:    ha-334765-m03
	Capacity:
	  cpu:                2
	  ephemeral-storage:  203034800Ki
	  hugepages-1Gi:      0
	  hugepages-2Mi:      0
	  hugepages-32Mi:     0
	  hugepages-64Ki:     0
	  memory:             8022304Ki
	  pods:               110
	Allocatable:
	  cpu:                2
	  ephemeral-storage:  203034800Ki
	  hugepages-1Gi:      0
	  hugepages-2Mi:      0
	  hugepages-32Mi:     0
	  hugepages-64Ki:     0
	  memory:             8022304Ki
	  pods:               110
	System Info:
	  Machine ID:                 d5114246fab6471585539c19d438fdb6
	  System UUID:                f0fee577-b975-499e-b5ad-667fb8443848
	  Boot ID:                    34b2555f-ef29-4c31-9b47-b3b930bd3b4b
	  Kernel Version:             5.15.0-1069-aws
	  OS Image:                   Ubuntu 22.04.4 LTS
	  Operating System:           linux
	  Architecture:               arm64
	  Container Runtime Version:  cri-o://1.24.6
	  Kubelet Version:            v1.31.1
	  Kube-Proxy Version:         v1.31.1
	PodCIDR:                      10.244.2.0/24
	PodCIDRs:                     10.244.2.0/24
	Non-terminated Pods:          (9 in total)
	  Namespace                   Name                                     CPU Requests  CPU Limits  Memory Requests  Memory Limits  Age
	  ---------                   ----                                     ------------  ----------  ---------------  -------------  ---
	  default                     busybox-7dff88458-mbfkp                  0 (0%)        0 (0%)      0 (0%)           0 (0%)         78s
	  default                     busybox-7dff88458-mh2kc                  0 (0%)        0 (0%)      0 (0%)           0 (0%)         78s
	  kube-system                 etcd-ha-334765-m03                       100m (5%)     0 (0%)      100Mi (1%)       0 (0%)         2m6s
	  kube-system                 kindnet-rfw69                            100m (5%)     100m (5%)   50Mi (0%)        50Mi (0%)      2m8s
	  kube-system                 kube-apiserver-ha-334765-m03             250m (12%)    0 (0%)      0 (0%)           0 (0%)         2m6s
	  kube-system                 kube-controller-manager-ha-334765-m03    200m (10%)    0 (0%)      0 (0%)           0 (0%)         2m4s
	  kube-system                 kube-proxy-4vsvh                         0 (0%)        0 (0%)      0 (0%)           0 (0%)         2m8s
	  kube-system                 kube-scheduler-ha-334765-m03             100m (5%)     0 (0%)      0 (0%)           0 (0%)         2m2s
	  kube-system                 kube-vip-ha-334765-m03                   0 (0%)        0 (0%)      0 (0%)           0 (0%)         2m4s
	Allocated resources:
	  (Total limits may be over 100 percent, i.e., overcommitted.)
	  Resource           Requests    Limits
	  --------           --------    ------
	  cpu                750m (37%)  100m (5%)
	  memory             150Mi (1%)  50Mi (0%)
	  ephemeral-storage  0 (0%)      0 (0%)
	  hugepages-1Gi      0 (0%)      0 (0%)
	  hugepages-2Mi      0 (0%)      0 (0%)
	  hugepages-32Mi     0 (0%)      0 (0%)
	  hugepages-64Ki     0 (0%)      0 (0%)
	Events:
	  Type    Reason                   Age                  From             Message
	  ----    ------                   ----                 ----             -------
	  Normal  Starting                 2m                   kube-proxy       
	  Normal  NodeHasSufficientMemory  2m8s (x8 over 2m8s)  kubelet          Node ha-334765-m03 status is now: NodeHasSufficientMemory
	  Normal  NodeHasNoDiskPressure    2m8s (x8 over 2m8s)  kubelet          Node ha-334765-m03 status is now: NodeHasNoDiskPressure
	  Normal  NodeHasSufficientPID     2m8s (x7 over 2m8s)  kubelet          Node ha-334765-m03 status is now: NodeHasSufficientPID
	  Normal  RegisteredNode           2m7s                 node-controller  Node ha-334765-m03 event: Registered Node ha-334765-m03 in Controller
	  Normal  RegisteredNode           2m4s                 node-controller  Node ha-334765-m03 event: Registered Node ha-334765-m03 in Controller
	  Normal  RegisteredNode           117s                 node-controller  Node ha-334765-m03 event: Registered Node ha-334765-m03 in Controller
	
	
	Name:               ha-334765-m04
	Roles:              <none>
	Labels:             beta.kubernetes.io/arch=arm64
	                    beta.kubernetes.io/os=linux
	                    kubernetes.io/arch=arm64
	                    kubernetes.io/hostname=ha-334765-m04
	                    kubernetes.io/os=linux
	                    minikube.k8s.io/commit=90d544f06ea0f69499271b003be64a9a224d57ed
	                    minikube.k8s.io/name=ha-334765
	                    minikube.k8s.io/primary=false
	                    minikube.k8s.io/updated_at=2024_09_16T10_54_43_0700
	                    minikube.k8s.io/version=v1.34.0
	Annotations:        kubeadm.alpha.kubernetes.io/cri-socket: unix:///var/run/crio/crio.sock
	                    node.alpha.kubernetes.io/ttl: 0
	                    volumes.kubernetes.io/controller-managed-attach-detach: true
	CreationTimestamp:  Mon, 16 Sep 2024 10:54:42 +0000
	Taints:             <none>
	Unschedulable:      false
	Lease:
	  HolderIdentity:  ha-334765-m04
	  AcquireTime:     <unset>
	  RenewTime:       Mon, 16 Sep 2024 10:55:34 +0000
	Conditions:
	  Type             Status  LastHeartbeatTime                 LastTransitionTime                Reason                       Message
	  ----             ------  -----------------                 ------------------                ------                       -------
	  MemoryPressure   False   Mon, 16 Sep 2024 10:55:25 +0000   Mon, 16 Sep 2024 10:54:42 +0000   KubeletHasSufficientMemory   kubelet has sufficient memory available
	  DiskPressure     False   Mon, 16 Sep 2024 10:55:25 +0000   Mon, 16 Sep 2024 10:54:42 +0000   KubeletHasNoDiskPressure     kubelet has no disk pressure
	  PIDPressure      False   Mon, 16 Sep 2024 10:55:25 +0000   Mon, 16 Sep 2024 10:54:42 +0000   KubeletHasSufficientPID      kubelet has sufficient PID available
	  Ready            True    Mon, 16 Sep 2024 10:55:25 +0000   Mon, 16 Sep 2024 10:55:25 +0000   KubeletReady                 kubelet is posting ready status
	Addresses:
	  InternalIP:  192.168.49.5
	  Hostname:    ha-334765-m04
	Capacity:
	  cpu:                2
	  ephemeral-storage:  203034800Ki
	  hugepages-1Gi:      0
	  hugepages-2Mi:      0
	  hugepages-32Mi:     0
	  hugepages-64Ki:     0
	  memory:             8022304Ki
	  pods:               110
	Allocatable:
	  cpu:                2
	  ephemeral-storage:  203034800Ki
	  hugepages-1Gi:      0
	  hugepages-2Mi:      0
	  hugepages-32Mi:     0
	  hugepages-64Ki:     0
	  memory:             8022304Ki
	  pods:               110
	System Info:
	  Machine ID:                 4dd286f9ed0c4b2ea7ffee02c4c1d337
	  System UUID:                2ce236e7-eff0-4b96-a330-3e2c709a50e7
	  Boot ID:                    34b2555f-ef29-4c31-9b47-b3b930bd3b4b
	  Kernel Version:             5.15.0-1069-aws
	  OS Image:                   Ubuntu 22.04.4 LTS
	  Operating System:           linux
	  Architecture:               arm64
	  Container Runtime Version:  cri-o://1.24.6
	  Kubelet Version:            v1.31.1
	  Kube-Proxy Version:         v1.31.1
	PodCIDR:                      10.244.3.0/24
	PodCIDRs:                     10.244.3.0/24
	Non-terminated Pods:          (2 in total)
	  Namespace                   Name                CPU Requests  CPU Limits  Memory Requests  Memory Limits  Age
	  ---------                   ----                ------------  ----------  ---------------  -------------  ---
	  kube-system                 kindnet-plxdg       100m (5%)     100m (5%)   50Mi (0%)        50Mi (0%)      52s
	  kube-system                 kube-proxy-br496    0 (0%)        0 (0%)      0 (0%)           0 (0%)         52s
	Allocated resources:
	  (Total limits may be over 100 percent, i.e., overcommitted.)
	  Resource           Requests   Limits
	  --------           --------   ------
	  cpu                100m (5%)  100m (5%)
	  memory             50Mi (0%)  50Mi (0%)
	  ephemeral-storage  0 (0%)     0 (0%)
	  hugepages-1Gi      0 (0%)     0 (0%)
	  hugepages-2Mi      0 (0%)     0 (0%)
	  hugepages-32Mi     0 (0%)     0 (0%)
	  hugepages-64Ki     0 (0%)     0 (0%)
	Events:
	  Type     Reason                   Age                From             Message
	  ----     ------                   ----               ----             -------
	  Normal   Starting                 49s                kube-proxy       
	  Normal   RegisteredNode           52s                node-controller  Node ha-334765-m04 event: Registered Node ha-334765-m04 in Controller
	  Warning  CgroupV1                 52s                kubelet          Cgroup v1 support is in maintenance mode, please migrate to Cgroup v2.
	  Normal   NodeHasSufficientMemory  52s (x2 over 52s)  kubelet          Node ha-334765-m04 status is now: NodeHasSufficientMemory
	  Normal   NodeHasNoDiskPressure    52s (x2 over 52s)  kubelet          Node ha-334765-m04 status is now: NodeHasNoDiskPressure
	  Normal   NodeHasSufficientPID     52s (x2 over 52s)  kubelet          Node ha-334765-m04 status is now: NodeHasSufficientPID
	  Normal   RegisteredNode           49s                node-controller  Node ha-334765-m04 event: Registered Node ha-334765-m04 in Controller
	  Normal   RegisteredNode           47s                node-controller  Node ha-334765-m04 event: Registered Node ha-334765-m04 in Controller
	  Normal   NodeReady                9s                 kubelet          Node ha-334765-m04 status is now: NodeReady
	
	
	==> dmesg <==
	[Sep16 10:07] systemd-journald[226]: Failed to send stream file descriptor to service manager: Connection refused
	
	
	==> etcd [87df03de85a8a5a35eeb988107885097dc0af1971560276e2869960289fc36f0] <==
	{"level":"info","ts":"2024-09-16T10:53:27.068286Z","caller":"rafthttp/pipeline.go:72","msg":"started HTTP pipelining with remote peer","local-member-id":"aec36adc501070cc","remote-peer-id":"e131de7e5408ffcb"}
	{"level":"info","ts":"2024-09-16T10:53:27.068580Z","caller":"rafthttp/peer.go:137","msg":"started remote peer","remote-peer-id":"e131de7e5408ffcb"}
	{"level":"info","ts":"2024-09-16T10:53:27.068654Z","caller":"rafthttp/transport.go:317","msg":"added remote peer","local-member-id":"aec36adc501070cc","remote-peer-id":"e131de7e5408ffcb","remote-peer-urls":["https://192.168.49.4:2380"]}
	{"level":"info","ts":"2024-09-16T10:53:27.068741Z","caller":"etcdserver/server.go:1996","msg":"applied a configuration change through raft","local-member-id":"aec36adc501070cc","raft-conf-change":"ConfChangeAddLearnerNode","raft-conf-change-node-id":"e131de7e5408ffcb"}
	{"level":"info","ts":"2024-09-16T10:53:27.068972Z","caller":"rafthttp/stream.go:169","msg":"started stream writer with remote peer","local-member-id":"aec36adc501070cc","remote-peer-id":"e131de7e5408ffcb"}
	{"level":"info","ts":"2024-09-16T10:53:27.069041Z","caller":"rafthttp/stream.go:169","msg":"started stream writer with remote peer","local-member-id":"aec36adc501070cc","remote-peer-id":"e131de7e5408ffcb"}
	{"level":"info","ts":"2024-09-16T10:53:27.069076Z","caller":"rafthttp/stream.go:395","msg":"started stream reader with remote peer","stream-reader-type":"stream MsgApp v2","local-member-id":"aec36adc501070cc","remote-peer-id":"e131de7e5408ffcb"}
	{"level":"info","ts":"2024-09-16T10:53:27.069337Z","caller":"rafthttp/stream.go:395","msg":"started stream reader with remote peer","stream-reader-type":"stream Message","local-member-id":"aec36adc501070cc","remote-peer-id":"e131de7e5408ffcb"}
	{"level":"warn","ts":"2024-09-16T10:53:27.099349Z","caller":"etcdserver/cluster_util.go:294","msg":"failed to reach the peer URL","address":"https://192.168.49.4:2380/version","remote-member-id":"e131de7e5408ffcb","error":"Get \"https://192.168.49.4:2380/version\": dial tcp 192.168.49.4:2380: connect: connection refused"}
	{"level":"warn","ts":"2024-09-16T10:53:27.099512Z","caller":"etcdserver/cluster_util.go:158","msg":"failed to get version","remote-member-id":"e131de7e5408ffcb","error":"Get \"https://192.168.49.4:2380/version\": dial tcp 192.168.49.4:2380: connect: connection refused"}
	{"level":"warn","ts":"2024-09-16T10:53:27.116171Z","caller":"embed/config_logging.go:170","msg":"rejected connection on client endpoint","remote-addr":"192.168.49.4:60656","server-name":"","error":"read tcp 192.168.49.2:2379->192.168.49.4:60656: read: connection reset by peer"}
	{"level":"warn","ts":"2024-09-16T10:53:27.706186Z","caller":"etcdhttp/peer.go:150","msg":"failed to promote a member","member-id":"e131de7e5408ffcb","error":"etcdserver: can only promote a learner member which is in sync with leader"}
	{"level":"warn","ts":"2024-09-16T10:53:28.693088Z","caller":"etcdhttp/peer.go:150","msg":"failed to promote a member","member-id":"e131de7e5408ffcb","error":"etcdserver: can only promote a learner member which is in sync with leader"}
	{"level":"info","ts":"2024-09-16T10:53:28.831019Z","caller":"rafthttp/peer_status.go:53","msg":"peer became active","peer-id":"e131de7e5408ffcb"}
	{"level":"info","ts":"2024-09-16T10:53:28.884927Z","caller":"rafthttp/stream.go:412","msg":"established TCP streaming connection with remote peer","stream-reader-type":"stream Message","local-member-id":"aec36adc501070cc","remote-peer-id":"e131de7e5408ffcb"}
	{"level":"info","ts":"2024-09-16T10:53:28.900059Z","caller":"rafthttp/stream.go:412","msg":"established TCP streaming connection with remote peer","stream-reader-type":"stream MsgApp v2","local-member-id":"aec36adc501070cc","remote-peer-id":"e131de7e5408ffcb"}
	{"level":"info","ts":"2024-09-16T10:53:28.958909Z","caller":"rafthttp/stream.go:249","msg":"set message encoder","from":"aec36adc501070cc","to":"e131de7e5408ffcb","stream-type":"stream Message"}
	{"level":"info","ts":"2024-09-16T10:53:28.958952Z","caller":"rafthttp/stream.go:274","msg":"established TCP streaming connection with remote peer","stream-writer-type":"stream Message","local-member-id":"aec36adc501070cc","remote-peer-id":"e131de7e5408ffcb"}
	{"level":"info","ts":"2024-09-16T10:53:28.988870Z","caller":"rafthttp/stream.go:249","msg":"set message encoder","from":"aec36adc501070cc","to":"e131de7e5408ffcb","stream-type":"stream MsgApp v2"}
	{"level":"info","ts":"2024-09-16T10:53:28.988912Z","caller":"rafthttp/stream.go:274","msg":"established TCP streaming connection with remote peer","stream-writer-type":"stream MsgApp v2","local-member-id":"aec36adc501070cc","remote-peer-id":"e131de7e5408ffcb"}
	{"level":"warn","ts":"2024-09-16T10:53:29.291184Z","caller":"embed/config_logging.go:170","msg":"rejected connection on peer endpoint","remote-addr":"192.168.49.4:45614","server-name":"","error":"EOF"}
	{"level":"warn","ts":"2024-09-16T10:53:29.705265Z","caller":"etcdhttp/peer.go:150","msg":"failed to promote a member","member-id":"e131de7e5408ffcb","error":"etcdserver: can only promote a learner member which is in sync with leader"}
	{"level":"info","ts":"2024-09-16T10:53:30.192625Z","logger":"raft","caller":"etcdserver/zap_raft.go:77","msg":"aec36adc501070cc switched to configuration voters=(2225418148823231912 12593026477526642892 16226995566549729227)"}
	{"level":"info","ts":"2024-09-16T10:53:30.193020Z","caller":"membership/cluster.go:535","msg":"promote member","cluster-id":"fa54960ea34d58be","local-member-id":"aec36adc501070cc"}
	{"level":"info","ts":"2024-09-16T10:53:30.193063Z","caller":"etcdserver/server.go:1996","msg":"applied a configuration change through raft","local-member-id":"aec36adc501070cc","raft-conf-change":"ConfChangeAddNode","raft-conf-change-node-id":"e131de7e5408ffcb"}
	
	
	==> kernel <==
	 10:55:34 up 10:37,  0 users,  load average: 1.55, 1.87, 1.81
	Linux ha-334765 5.15.0-1069-aws #75~20.04.1-Ubuntu SMP Mon Aug 19 16:22:47 UTC 2024 aarch64 aarch64 aarch64 GNU/Linux
	PRETTY_NAME="Ubuntu 22.04.4 LTS"
	
	
	==> kindnet [4e367c2e592f9aea5b6f808fe6b2f319782cc7f486aef441ea9eccd8a2234ceb] <==
	I0916 10:54:59.622552       1 main.go:322] Node ha-334765-m04 has CIDR [10.244.3.0/24] 
	I0916 10:55:09.628788       1 main.go:295] Handling node with IPs: map[192.168.49.2:{}]
	I0916 10:55:09.628826       1 main.go:299] handling current node
	I0916 10:55:09.628843       1 main.go:295] Handling node with IPs: map[192.168.49.3:{}]
	I0916 10:55:09.628856       1 main.go:322] Node ha-334765-m02 has CIDR [10.244.1.0/24] 
	I0916 10:55:09.628968       1 main.go:295] Handling node with IPs: map[192.168.49.4:{}]
	I0916 10:55:09.628981       1 main.go:322] Node ha-334765-m03 has CIDR [10.244.2.0/24] 
	I0916 10:55:09.629024       1 main.go:295] Handling node with IPs: map[192.168.49.5:{}]
	I0916 10:55:09.629045       1 main.go:322] Node ha-334765-m04 has CIDR [10.244.3.0/24] 
	I0916 10:55:19.622889       1 main.go:295] Handling node with IPs: map[192.168.49.5:{}]
	I0916 10:55:19.622932       1 main.go:322] Node ha-334765-m04 has CIDR [10.244.3.0/24] 
	I0916 10:55:19.623056       1 main.go:295] Handling node with IPs: map[192.168.49.2:{}]
	I0916 10:55:19.623067       1 main.go:299] handling current node
	I0916 10:55:19.623079       1 main.go:295] Handling node with IPs: map[192.168.49.3:{}]
	I0916 10:55:19.623090       1 main.go:322] Node ha-334765-m02 has CIDR [10.244.1.0/24] 
	I0916 10:55:19.623136       1 main.go:295] Handling node with IPs: map[192.168.49.4:{}]
	I0916 10:55:19.623149       1 main.go:322] Node ha-334765-m03 has CIDR [10.244.2.0/24] 
	I0916 10:55:29.625249       1 main.go:295] Handling node with IPs: map[192.168.49.3:{}]
	I0916 10:55:29.625289       1 main.go:322] Node ha-334765-m02 has CIDR [10.244.1.0/24] 
	I0916 10:55:29.625440       1 main.go:295] Handling node with IPs: map[192.168.49.4:{}]
	I0916 10:55:29.625455       1 main.go:322] Node ha-334765-m03 has CIDR [10.244.2.0/24] 
	I0916 10:55:29.625500       1 main.go:295] Handling node with IPs: map[192.168.49.5:{}]
	I0916 10:55:29.625512       1 main.go:322] Node ha-334765-m04 has CIDR [10.244.3.0/24] 
	I0916 10:55:29.625558       1 main.go:295] Handling node with IPs: map[192.168.49.2:{}]
	I0916 10:55:29.625570       1 main.go:299] handling current node
	
	
	==> kube-apiserver [42f82617ee823d573b9d4f28daf99d7e25b6909d4243e3187869a02bdce9fdff] <==
	I0916 10:51:52.062598       1 storage_scheduling.go:111] all system priority classes are created successfully or already exist.
	I0916 10:51:52.734853       1 controller.go:615] quota admission added evaluator for: roles.rbac.authorization.k8s.io
	I0916 10:51:52.790464       1 controller.go:615] quota admission added evaluator for: rolebindings.rbac.authorization.k8s.io
	I0916 10:51:52.897833       1 alloc.go:330] "allocated clusterIPs" service="default/kubernetes" clusterIPs={"IPv4":"10.96.0.1"}
	W0916 10:51:52.905594       1 lease.go:265] Resetting endpoints for master service "kubernetes" to [192.168.49.2]
	I0916 10:51:52.906850       1 controller.go:615] quota admission added evaluator for: endpoints
	I0916 10:51:52.912174       1 controller.go:615] quota admission added evaluator for: endpointslices.discovery.k8s.io
	I0916 10:51:53.112482       1 controller.go:615] quota admission added evaluator for: serviceaccounts
	I0916 10:51:55.796509       1 controller.go:615] quota admission added evaluator for: deployments.apps
	I0916 10:51:55.839773       1 alloc.go:330] "allocated clusterIPs" service="kube-system/kube-dns" clusterIPs={"IPv4":"10.96.0.10"}
	I0916 10:51:55.858150       1 controller.go:615] quota admission added evaluator for: daemonsets.apps
	I0916 10:51:58.517329       1 controller.go:615] quota admission added evaluator for: controllerrevisions.apps
	I0916 10:51:58.779036       1 controller.go:615] quota admission added evaluator for: replicasets.apps
	E0916 10:54:24.784781       1 watch.go:250] "Unhandled Error" err="http2: stream closed" logger="UnhandledError"
	E0916 10:54:25.822674       1 conn.go:339] Error on socket receive: read tcp 192.168.49.254:8443->192.168.49.1:41276: use of closed network connection
	E0916 10:54:26.081024       1 conn.go:339] Error on socket receive: read tcp 192.168.49.254:8443->192.168.49.1:56708: use of closed network connection
	E0916 10:54:26.313044       1 conn.go:339] Error on socket receive: read tcp 192.168.49.254:8443->192.168.49.1:56730: use of closed network connection
	E0916 10:54:26.827467       1 conn.go:339] Error on socket receive: read tcp 192.168.49.254:8443->192.168.49.1:56778: use of closed network connection
	E0916 10:54:27.093710       1 conn.go:339] Error on socket receive: read tcp 192.168.49.254:8443->192.168.49.1:56798: use of closed network connection
	E0916 10:54:27.327051       1 conn.go:339] Error on socket receive: read tcp 192.168.49.254:8443->192.168.49.1:56820: use of closed network connection
	E0916 10:54:27.554546       1 conn.go:339] Error on socket receive: read tcp 192.168.49.254:8443->192.168.49.1:56844: use of closed network connection
	E0916 10:54:27.987173       1 conn.go:339] Error on socket receive: read tcp 192.168.49.254:8443->192.168.49.1:56870: use of closed network connection
	E0916 10:54:28.225420       1 conn.go:339] Error on socket receive: read tcp 192.168.49.254:8443->192.168.49.1:56880: use of closed network connection
	E0916 10:54:28.678732       1 conn.go:339] Error on socket receive: read tcp 192.168.49.254:8443->192.168.49.1:56908: use of closed network connection
	E0916 10:54:28.913617       1 conn.go:339] Error on socket receive: read tcp 192.168.49.254:8443->192.168.49.1:56920: use of closed network connection
	
	
	==> kube-controller-manager [942911c4142a59d4fc2b1d92ba267126bbdb629387bc6b3fa725bafa1a1d00d1] <==
	I0916 10:54:22.667010       1 replica_set.go:679] "Finished syncing" logger="replicaset-controller" kind="ReplicaSet" key="default/busybox-7dff88458" duration="59.827546ms"
	I0916 10:54:22.667173       1 replica_set.go:679] "Finished syncing" logger="replicaset-controller" kind="ReplicaSet" key="default/busybox-7dff88458" duration="39.942µs"
	I0916 10:54:24.834250       1 replica_set.go:679] "Finished syncing" logger="replicaset-controller" kind="ReplicaSet" key="default/busybox-7dff88458" duration="71.659115ms"
	I0916 10:54:24.834771       1 replica_set.go:679] "Finished syncing" logger="replicaset-controller" kind="ReplicaSet" key="default/busybox-7dff88458" duration="33.025µs"
	I0916 10:54:27.680828       1 range_allocator.go:241] "Successfully synced" logger="node-ipam-controller" key="ha-334765-m03"
	E0916 10:54:42.064745       1 certificate_controller.go:151] "Unhandled Error" err="Sync csr-xwk47 failed with : error updating signature for csr: Operation cannot be fulfilled on certificatesigningrequests.certificates.k8s.io \"csr-xwk47\": the object has been modified; please apply your changes to the latest version and try again" logger="UnhandledError"
	I0916 10:54:42.353962       1 actual_state_of_world.go:540] "Failed to update statusUpdateNeeded field in actual state of world" logger="persistentvolume-attach-detach-controller" err="Failed to set statusUpdateNeeded to needed true, because nodeName=\"ha-334765-m04\" does not exist"
	I0916 10:54:42.406194       1 range_allocator.go:422] "Set node PodCIDR" logger="node-ipam-controller" node="ha-334765-m04" podCIDRs=["10.244.3.0/24"]
	I0916 10:54:42.406304       1 range_allocator.go:241] "Successfully synced" logger="node-ipam-controller" key="ha-334765-m04"
	I0916 10:54:42.406358       1 range_allocator.go:241] "Successfully synced" logger="node-ipam-controller" key="ha-334765-m04"
	I0916 10:54:42.422131       1 range_allocator.go:241] "Successfully synced" logger="node-ipam-controller" key="ha-334765-m04"
	I0916 10:54:42.779654       1 range_allocator.go:241] "Successfully synced" logger="node-ipam-controller" key="ha-334765-m04"
	I0916 10:54:42.858731       1 node_lifecycle_controller.go:884] "Missing timestamp for Node. Assuming now as a timestamp" logger="node-lifecycle-controller" node="ha-334765-m04"
	I0916 10:54:42.947669       1 range_allocator.go:241] "Successfully synced" logger="node-ipam-controller" key="ha-334765-m04"
	I0916 10:54:43.315307       1 range_allocator.go:241] "Successfully synced" logger="node-ipam-controller" key="ha-334765-m04"
	I0916 10:54:45.387280       1 range_allocator.go:241] "Successfully synced" logger="node-ipam-controller" key="ha-334765-m04"
	I0916 10:54:45.427214       1 range_allocator.go:241] "Successfully synced" logger="node-ipam-controller" key="ha-334765-m04"
	I0916 10:54:47.156328       1 range_allocator.go:241] "Successfully synced" logger="node-ipam-controller" key="ha-334765-m04"
	I0916 10:54:47.224629       1 range_allocator.go:241] "Successfully synced" logger="node-ipam-controller" key="ha-334765-m04"
	I0916 10:54:52.113885       1 range_allocator.go:241] "Successfully synced" logger="node-ipam-controller" key="ha-334765-m02"
	I0916 10:54:52.775659       1 range_allocator.go:241] "Successfully synced" logger="node-ipam-controller" key="ha-334765-m04"
	I0916 10:55:25.183913       1 range_allocator.go:241] "Successfully synced" logger="node-ipam-controller" key="ha-334765-m04"
	I0916 10:55:25.184407       1 topologycache.go:237] "Can't get CPU or zone information for node" logger="endpointslice-controller" node="ha-334765-m04"
	I0916 10:55:25.213152       1 range_allocator.go:241] "Successfully synced" logger="node-ipam-controller" key="ha-334765-m04"
	I0916 10:55:25.412910       1 range_allocator.go:241] "Successfully synced" logger="node-ipam-controller" key="ha-334765-m04"
	
	
	==> kube-proxy [e1979b857812014745feb8baa7c2bc7b3750644c2185150532d37f3bf6389742] <==
	I0916 10:51:59.283351       1 server_linux.go:66] "Using iptables proxy"
	I0916 10:51:59.600524       1 server.go:677] "Successfully retrieved node IP(s)" IPs=["192.168.49.2"]
	E0916 10:51:59.600625       1 server.go:234] "Kube-proxy configuration may be incomplete or incorrect" err="nodePortAddresses is unset; NodePort connections will be accepted on all local IPs. Consider using `--nodeport-addresses primary`"
	I0916 10:51:59.738471       1 server.go:243] "kube-proxy running in dual-stack mode" primary ipFamily="IPv4"
	I0916 10:51:59.776540       1 server_linux.go:169] "Using iptables Proxier"
	I0916 10:51:59.805947       1 proxier.go:255] "Setting route_localnet=1 to allow node-ports on localhost; to change this either disable iptables.localhostNodePorts (--iptables-localhost-nodeports) or set nodePortAddresses (--nodeport-addresses) to filter loopback addresses" ipFamily="IPv4"
	I0916 10:51:59.806378       1 server.go:483] "Version info" version="v1.31.1"
	I0916 10:51:59.806546       1 server.go:485] "Golang settings" GOGC="" GOMAXPROCS="" GOTRACEBACK=""
	I0916 10:51:59.807688       1 config.go:199] "Starting service config controller"
	I0916 10:51:59.807772       1 shared_informer.go:313] Waiting for caches to sync for service config
	I0916 10:51:59.807831       1 config.go:105] "Starting endpoint slice config controller"
	I0916 10:51:59.807860       1 shared_informer.go:313] Waiting for caches to sync for endpoint slice config
	I0916 10:51:59.808420       1 config.go:328] "Starting node config controller"
	I0916 10:51:59.837473       1 shared_informer.go:313] Waiting for caches to sync for node config
	I0916 10:51:59.908782       1 shared_informer.go:320] Caches are synced for endpoint slice config
	I0916 10:51:59.908839       1 shared_informer.go:320] Caches are synced for service config
	I0916 10:51:59.941440       1 shared_informer.go:320] Caches are synced for node config
	
	
	==> kube-scheduler [04fb33f068e50df557ad2766e0b9f19ce855957bef36f74835dedc91014730a4] <==
	E0916 10:52:40.159246       1 framework.go:1305] "Plugin Failed" err="Operation cannot be fulfilled on pods/binding \"coredns-7c65d6cfc9-s9fp9\": pod coredns-7c65d6cfc9-s9fp9 is already assigned to node \"ha-334765\"" plugin="DefaultBinder" pod="kube-system/coredns-7c65d6cfc9-s9fp9" node="ha-334765"
	E0916 10:52:40.159345       1 schedule_one.go:348] "scheduler cache ForgetPod failed" err="pod 0e29200a-0909-47e1-8521-bf5f9b645d6c(kube-system/coredns-7c65d6cfc9-s9fp9) wasn't assumed so cannot be forgotten" pod="kube-system/coredns-7c65d6cfc9-s9fp9"
	E0916 10:52:40.159399       1 schedule_one.go:1057] "Error scheduling pod; retrying" err="running Bind plugin \"DefaultBinder\": Operation cannot be fulfilled on pods/binding \"coredns-7c65d6cfc9-s9fp9\": pod coredns-7c65d6cfc9-s9fp9 is already assigned to node \"ha-334765\"" pod="kube-system/coredns-7c65d6cfc9-s9fp9"
	I0916 10:52:40.159422       1 schedule_one.go:1070] "Pod has been assigned to node. Abort adding it back to queue." pod="kube-system/coredns-7c65d6cfc9-s9fp9" node="ha-334765"
	E0916 10:52:40.163345       1 framework.go:1305] "Plugin Failed" err="Operation cannot be fulfilled on pods/binding \"storage-provisioner\": pod storage-provisioner is already assigned to node \"ha-334765\"" plugin="DefaultBinder" pod="kube-system/storage-provisioner" node="ha-334765"
	E0916 10:52:40.163500       1 schedule_one.go:1057] "Error scheduling pod; retrying" err="running Bind plugin \"DefaultBinder\": Operation cannot be fulfilled on pods/binding \"storage-provisioner\": pod storage-provisioner is already assigned to node \"ha-334765\"" pod="kube-system/storage-provisioner"
	E0916 10:53:26.592667       1 framework.go:1305] "Plugin Failed" err="Operation cannot be fulfilled on pods/binding \"kindnet-rfw69\": pod kindnet-rfw69 is already assigned to node \"ha-334765-m03\"" plugin="DefaultBinder" pod="kube-system/kindnet-rfw69" node="ha-334765-m03"
	E0916 10:53:26.592854       1 schedule_one.go:348] "scheduler cache ForgetPod failed" err="pod 396f204a-53ea-4720-85fc-05ba54d285ca(kube-system/kindnet-rfw69) wasn't assumed so cannot be forgotten" pod="kube-system/kindnet-rfw69"
	E0916 10:53:26.592899       1 schedule_one.go:1057] "Error scheduling pod; retrying" err="running Bind plugin \"DefaultBinder\": Operation cannot be fulfilled on pods/binding \"kindnet-rfw69\": pod kindnet-rfw69 is already assigned to node \"ha-334765-m03\"" pod="kube-system/kindnet-rfw69"
	I0916 10:53:26.592943       1 schedule_one.go:1070] "Pod has been assigned to node. Abort adding it back to queue." pod="kube-system/kindnet-rfw69" node="ha-334765-m03"
	I0916 10:54:17.119262       1 cache.go:503] "Pod was added to a different node than it was assumed" podKey="d1ebb37a-bf5c-499a-b26a-5fb9e3076c6a" pod="default/busybox-7dff88458-mh2kc" assumedNode="ha-334765-m03" currentNode="ha-334765-m02"
	E0916 10:54:17.160604       1 framework.go:1305] "Plugin Failed" err="Operation cannot be fulfilled on pods/binding \"busybox-7dff88458-mh2kc\": pod busybox-7dff88458-mh2kc is already assigned to node \"ha-334765-m03\"" plugin="DefaultBinder" pod="default/busybox-7dff88458-mh2kc" node="ha-334765-m02"
	E0916 10:54:17.160660       1 schedule_one.go:348] "scheduler cache ForgetPod failed" err="pod d1ebb37a-bf5c-499a-b26a-5fb9e3076c6a(default/busybox-7dff88458-mh2kc) was assumed on ha-334765-m02 but assigned to ha-334765-m03" pod="default/busybox-7dff88458-mh2kc"
	E0916 10:54:17.161445       1 schedule_one.go:1057] "Error scheduling pod; retrying" err="running Bind plugin \"DefaultBinder\": Operation cannot be fulfilled on pods/binding \"busybox-7dff88458-mh2kc\": pod busybox-7dff88458-mh2kc is already assigned to node \"ha-334765-m03\"" pod="default/busybox-7dff88458-mh2kc"
	I0916 10:54:17.161498       1 schedule_one.go:1070] "Pod has been assigned to node. Abort adding it back to queue." pod="default/busybox-7dff88458-mh2kc" node="ha-334765-m03"
	E0916 10:54:17.196982       1 framework.go:1305] "Plugin Failed" err="Operation cannot be fulfilled on pods/binding \"busybox-7dff88458-jnlww\": pod busybox-7dff88458-jnlww is already assigned to node \"ha-334765-m02\"" plugin="DefaultBinder" pod="default/busybox-7dff88458-jnlww" node="ha-334765-m02"
	E0916 10:54:17.197090       1 schedule_one.go:1057] "Error scheduling pod; retrying" err="running Bind plugin \"DefaultBinder\": Operation cannot be fulfilled on pods/binding \"busybox-7dff88458-jnlww\": pod busybox-7dff88458-jnlww is already assigned to node \"ha-334765-m02\"" pod="default/busybox-7dff88458-jnlww"
	E0916 10:54:17.197808       1 framework.go:1305] "Plugin Failed" err="Operation cannot be fulfilled on pods/binding \"busybox-7dff88458-89hpp\": pod busybox-7dff88458-89hpp is already assigned to node \"ha-334765\"" plugin="DefaultBinder" pod="default/busybox-7dff88458-89hpp" node="ha-334765"
	E0916 10:54:17.197870       1 schedule_one.go:1057] "Error scheduling pod; retrying" err="running Bind plugin \"DefaultBinder\": Operation cannot be fulfilled on pods/binding \"busybox-7dff88458-89hpp\": pod busybox-7dff88458-89hpp is already assigned to node \"ha-334765\"" pod="default/busybox-7dff88458-89hpp"
	E0916 10:54:42.504006       1 framework.go:1305] "Plugin Failed" err="Operation cannot be fulfilled on pods/binding \"kube-proxy-br496\": pod kube-proxy-br496 is already assigned to node \"ha-334765-m04\"" plugin="DefaultBinder" pod="kube-system/kube-proxy-br496" node="ha-334765-m04"
	E0916 10:54:42.504170       1 schedule_one.go:1057] "Error scheduling pod; retrying" err="running Bind plugin \"DefaultBinder\": Operation cannot be fulfilled on pods/binding \"kube-proxy-br496\": pod kube-proxy-br496 is already assigned to node \"ha-334765-m04\"" pod="kube-system/kube-proxy-br496"
	E0916 10:54:42.565201       1 framework.go:1305] "Plugin Failed" err="Operation cannot be fulfilled on pods/binding \"kindnet-plxdg\": pod kindnet-plxdg is already assigned to node \"ha-334765-m04\"" plugin="DefaultBinder" pod="kube-system/kindnet-plxdg" node="ha-334765-m04"
	E0916 10:54:42.565280       1 schedule_one.go:1057] "Error scheduling pod; retrying" err="running Bind plugin \"DefaultBinder\": Operation cannot be fulfilled on pods/binding \"kindnet-plxdg\": pod kindnet-plxdg is already assigned to node \"ha-334765-m04\"" pod="kube-system/kindnet-plxdg"
	E0916 10:54:42.730269       1 framework.go:1305] "Plugin Failed" err="Operation cannot be fulfilled on pods/binding \"kube-proxy-4tn75\": pod kube-proxy-4tn75 is already assigned to node \"ha-334765-m04\"" plugin="DefaultBinder" pod="kube-system/kube-proxy-4tn75" node="ha-334765-m04"
	E0916 10:54:42.730443       1 schedule_one.go:1057] "Error scheduling pod; retrying" err="running Bind plugin \"DefaultBinder\": Operation cannot be fulfilled on pods/binding \"kube-proxy-4tn75\": pod kube-proxy-4tn75 is already assigned to node \"ha-334765-m04\"" pod="kube-system/kube-proxy-4tn75"
	
	
	==> kubelet <==
	Sep 16 10:54:05 ha-334765 kubelet[1577]: E0916 10:54:05.852038    1577 eviction_manager.go:257] "Eviction manager: failed to get HasDedicatedImageFs" err="missing image stats: &ImageFsInfoResponse{ImageFilesystems:[]*FilesystemUsage{&FilesystemUsage{Timestamp:1726484045851828805,FsId:&FilesystemIdentifier{Mountpoint:/var/lib/containers/storage/overlay-images,},UsedBytes:&UInt64Value{Value:137825,},InodesUsed:&UInt64Value{Value:63,},},},ContainerFilesystems:[]*FilesystemUsage{},}"
	Sep 16 10:54:05 ha-334765 kubelet[1577]: E0916 10:54:05.852072    1577 eviction_manager.go:212] "Eviction manager: failed to synchronize" err="eviction manager: failed to get HasDedicatedImageFs: missing image stats: &ImageFsInfoResponse{ImageFilesystems:[]*FilesystemUsage{&FilesystemUsage{Timestamp:1726484045851828805,FsId:&FilesystemIdentifier{Mountpoint:/var/lib/containers/storage/overlay-images,},UsedBytes:&UInt64Value{Value:137825,},InodesUsed:&UInt64Value{Value:63,},},},ContainerFilesystems:[]*FilesystemUsage{},}"
	Sep 16 10:54:15 ha-334765 kubelet[1577]: E0916 10:54:15.853773    1577 eviction_manager.go:257] "Eviction manager: failed to get HasDedicatedImageFs" err="missing image stats: &ImageFsInfoResponse{ImageFilesystems:[]*FilesystemUsage{&FilesystemUsage{Timestamp:1726484055853338885,FsId:&FilesystemIdentifier{Mountpoint:/var/lib/containers/storage/overlay-images,},UsedBytes:&UInt64Value{Value:137825,},InodesUsed:&UInt64Value{Value:63,},},},ContainerFilesystems:[]*FilesystemUsage{},}"
	Sep 16 10:54:15 ha-334765 kubelet[1577]: E0916 10:54:15.853809    1577 eviction_manager.go:212] "Eviction manager: failed to synchronize" err="eviction manager: failed to get HasDedicatedImageFs: missing image stats: &ImageFsInfoResponse{ImageFilesystems:[]*FilesystemUsage{&FilesystemUsage{Timestamp:1726484055853338885,FsId:&FilesystemIdentifier{Mountpoint:/var/lib/containers/storage/overlay-images,},UsedBytes:&UInt64Value{Value:137825,},InodesUsed:&UInt64Value{Value:63,},},},ContainerFilesystems:[]*FilesystemUsage{},}"
	Sep 16 10:54:17 ha-334765 kubelet[1577]: I0916 10:54:17.215469    1577 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="kube-system/coredns-7c65d6cfc9-q5xr7" podStartSLOduration=139.21544987 podStartE2EDuration="2m19.21544987s" podCreationTimestamp="2024-09-16 10:51:58 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2024-09-16 10:52:41.084484448 +0000 UTC m=+45.487365406" watchObservedRunningTime="2024-09-16 10:54:17.21544987 +0000 UTC m=+141.618330812"
	Sep 16 10:54:17 ha-334765 kubelet[1577]: I0916 10:54:17.337108    1577 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rls85\" (UniqueName: \"kubernetes.io/projected/3ae886b8-49b8-4ed7-ae5f-98b630edc5b6-kube-api-access-rls85\") pod \"busybox-7dff88458-89hpp\" (UID: \"3ae886b8-49b8-4ed7-ae5f-98b630edc5b6\") " pod="default/busybox-7dff88458-89hpp"
	Sep 16 10:54:17 ha-334765 kubelet[1577]: E0916 10:54:17.422790    1577 pod_workers.go:1301] "Error syncing pod, skipping" err="unmounted volumes=[kube-api-access-rls85], unattached volumes=[], failed to process volumes=[]: context canceled" pod="default/busybox-7dff88458-89hpp" podUID="3ae886b8-49b8-4ed7-ae5f-98b630edc5b6"
	Sep 16 10:54:18 ha-334765 kubelet[1577]: I0916 10:54:18.247316    1577 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rls85\" (UniqueName: \"kubernetes.io/projected/3ae886b8-49b8-4ed7-ae5f-98b630edc5b6-kube-api-access-rls85\") pod \"3ae886b8-49b8-4ed7-ae5f-98b630edc5b6\" (UID: \"3ae886b8-49b8-4ed7-ae5f-98b630edc5b6\") "
	Sep 16 10:54:18 ha-334765 kubelet[1577]: I0916 10:54:18.254264    1577 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3ae886b8-49b8-4ed7-ae5f-98b630edc5b6-kube-api-access-rls85" (OuterVolumeSpecName: "kube-api-access-rls85") pod "3ae886b8-49b8-4ed7-ae5f-98b630edc5b6" (UID: "3ae886b8-49b8-4ed7-ae5f-98b630edc5b6"). InnerVolumeSpecName "kube-api-access-rls85". PluginName "kubernetes.io/projected", VolumeGidValue ""
	Sep 16 10:54:18 ha-334765 kubelet[1577]: I0916 10:54:18.348022    1577 reconciler_common.go:288] "Volume detached for volume \"kube-api-access-rls85\" (UniqueName: \"kubernetes.io/projected/3ae886b8-49b8-4ed7-ae5f-98b630edc5b6-kube-api-access-rls85\") on node \"ha-334765\" DevicePath \"\""
	Sep 16 10:54:19 ha-334765 kubelet[1577]: I0916 10:54:19.750873    1577 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3ae886b8-49b8-4ed7-ae5f-98b630edc5b6" path="/var/lib/kubelet/pods/3ae886b8-49b8-4ed7-ae5f-98b630edc5b6/volumes"
	Sep 16 10:54:25 ha-334765 kubelet[1577]: E0916 10:54:25.855441    1577 eviction_manager.go:257] "Eviction manager: failed to get HasDedicatedImageFs" err="missing image stats: &ImageFsInfoResponse{ImageFilesystems:[]*FilesystemUsage{&FilesystemUsage{Timestamp:1726484065855269170,FsId:&FilesystemIdentifier{Mountpoint:/var/lib/containers/storage/overlay-images,},UsedBytes:&UInt64Value{Value:137825,},InodesUsed:&UInt64Value{Value:63,},},},ContainerFilesystems:[]*FilesystemUsage{},}"
	Sep 16 10:54:25 ha-334765 kubelet[1577]: E0916 10:54:25.855479    1577 eviction_manager.go:212] "Eviction manager: failed to synchronize" err="eviction manager: failed to get HasDedicatedImageFs: missing image stats: &ImageFsInfoResponse{ImageFilesystems:[]*FilesystemUsage{&FilesystemUsage{Timestamp:1726484065855269170,FsId:&FilesystemIdentifier{Mountpoint:/var/lib/containers/storage/overlay-images,},UsedBytes:&UInt64Value{Value:137825,},InodesUsed:&UInt64Value{Value:63,},},},ContainerFilesystems:[]*FilesystemUsage{},}"
	Sep 16 10:54:35 ha-334765 kubelet[1577]: E0916 10:54:35.857824    1577 eviction_manager.go:257] "Eviction manager: failed to get HasDedicatedImageFs" err="missing image stats: &ImageFsInfoResponse{ImageFilesystems:[]*FilesystemUsage{&FilesystemUsage{Timestamp:1726484075857635600,FsId:&FilesystemIdentifier{Mountpoint:/var/lib/containers/storage/overlay-images,},UsedBytes:&UInt64Value{Value:137825,},InodesUsed:&UInt64Value{Value:63,},},},ContainerFilesystems:[]*FilesystemUsage{},}"
	Sep 16 10:54:35 ha-334765 kubelet[1577]: E0916 10:54:35.857867    1577 eviction_manager.go:212] "Eviction manager: failed to synchronize" err="eviction manager: failed to get HasDedicatedImageFs: missing image stats: &ImageFsInfoResponse{ImageFilesystems:[]*FilesystemUsage{&FilesystemUsage{Timestamp:1726484075857635600,FsId:&FilesystemIdentifier{Mountpoint:/var/lib/containers/storage/overlay-images,},UsedBytes:&UInt64Value{Value:137825,},InodesUsed:&UInt64Value{Value:63,},},},ContainerFilesystems:[]*FilesystemUsage{},}"
	Sep 16 10:54:45 ha-334765 kubelet[1577]: E0916 10:54:45.858881    1577 eviction_manager.go:257] "Eviction manager: failed to get HasDedicatedImageFs" err="missing image stats: &ImageFsInfoResponse{ImageFilesystems:[]*FilesystemUsage{&FilesystemUsage{Timestamp:1726484085858695976,FsId:&FilesystemIdentifier{Mountpoint:/var/lib/containers/storage/overlay-images,},UsedBytes:&UInt64Value{Value:137825,},InodesUsed:&UInt64Value{Value:63,},},},ContainerFilesystems:[]*FilesystemUsage{},}"
	Sep 16 10:54:45 ha-334765 kubelet[1577]: E0916 10:54:45.858916    1577 eviction_manager.go:212] "Eviction manager: failed to synchronize" err="eviction manager: failed to get HasDedicatedImageFs: missing image stats: &ImageFsInfoResponse{ImageFilesystems:[]*FilesystemUsage{&FilesystemUsage{Timestamp:1726484085858695976,FsId:&FilesystemIdentifier{Mountpoint:/var/lib/containers/storage/overlay-images,},UsedBytes:&UInt64Value{Value:137825,},InodesUsed:&UInt64Value{Value:63,},},},ContainerFilesystems:[]*FilesystemUsage{},}"
	Sep 16 10:54:55 ha-334765 kubelet[1577]: E0916 10:54:55.869218    1577 eviction_manager.go:257] "Eviction manager: failed to get HasDedicatedImageFs" err="missing image stats: &ImageFsInfoResponse{ImageFilesystems:[]*FilesystemUsage{&FilesystemUsage{Timestamp:1726484095868808670,FsId:&FilesystemIdentifier{Mountpoint:/var/lib/containers/storage/overlay-images,},UsedBytes:&UInt64Value{Value:137825,},InodesUsed:&UInt64Value{Value:63,},},},ContainerFilesystems:[]*FilesystemUsage{},}"
	Sep 16 10:54:55 ha-334765 kubelet[1577]: E0916 10:54:55.869262    1577 eviction_manager.go:212] "Eviction manager: failed to synchronize" err="eviction manager: failed to get HasDedicatedImageFs: missing image stats: &ImageFsInfoResponse{ImageFilesystems:[]*FilesystemUsage{&FilesystemUsage{Timestamp:1726484095868808670,FsId:&FilesystemIdentifier{Mountpoint:/var/lib/containers/storage/overlay-images,},UsedBytes:&UInt64Value{Value:137825,},InodesUsed:&UInt64Value{Value:63,},},},ContainerFilesystems:[]*FilesystemUsage{},}"
	Sep 16 10:55:05 ha-334765 kubelet[1577]: E0916 10:55:05.870908    1577 eviction_manager.go:257] "Eviction manager: failed to get HasDedicatedImageFs" err="missing image stats: &ImageFsInfoResponse{ImageFilesystems:[]*FilesystemUsage{&FilesystemUsage{Timestamp:1726484105870695517,FsId:&FilesystemIdentifier{Mountpoint:/var/lib/containers/storage/overlay-images,},UsedBytes:&UInt64Value{Value:137825,},InodesUsed:&UInt64Value{Value:63,},},},ContainerFilesystems:[]*FilesystemUsage{},}"
	Sep 16 10:55:05 ha-334765 kubelet[1577]: E0916 10:55:05.870946    1577 eviction_manager.go:212] "Eviction manager: failed to synchronize" err="eviction manager: failed to get HasDedicatedImageFs: missing image stats: &ImageFsInfoResponse{ImageFilesystems:[]*FilesystemUsage{&FilesystemUsage{Timestamp:1726484105870695517,FsId:&FilesystemIdentifier{Mountpoint:/var/lib/containers/storage/overlay-images,},UsedBytes:&UInt64Value{Value:137825,},InodesUsed:&UInt64Value{Value:63,},},},ContainerFilesystems:[]*FilesystemUsage{},}"
	Sep 16 10:55:15 ha-334765 kubelet[1577]: E0916 10:55:15.872457    1577 eviction_manager.go:257] "Eviction manager: failed to get HasDedicatedImageFs" err="missing image stats: &ImageFsInfoResponse{ImageFilesystems:[]*FilesystemUsage{&FilesystemUsage{Timestamp:1726484115872243215,FsId:&FilesystemIdentifier{Mountpoint:/var/lib/containers/storage/overlay-images,},UsedBytes:&UInt64Value{Value:137825,},InodesUsed:&UInt64Value{Value:63,},},},ContainerFilesystems:[]*FilesystemUsage{},}"
	Sep 16 10:55:15 ha-334765 kubelet[1577]: E0916 10:55:15.872498    1577 eviction_manager.go:212] "Eviction manager: failed to synchronize" err="eviction manager: failed to get HasDedicatedImageFs: missing image stats: &ImageFsInfoResponse{ImageFilesystems:[]*FilesystemUsage{&FilesystemUsage{Timestamp:1726484115872243215,FsId:&FilesystemIdentifier{Mountpoint:/var/lib/containers/storage/overlay-images,},UsedBytes:&UInt64Value{Value:137825,},InodesUsed:&UInt64Value{Value:63,},},},ContainerFilesystems:[]*FilesystemUsage{},}"
	Sep 16 10:55:25 ha-334765 kubelet[1577]: E0916 10:55:25.873717    1577 eviction_manager.go:257] "Eviction manager: failed to get HasDedicatedImageFs" err="missing image stats: &ImageFsInfoResponse{ImageFilesystems:[]*FilesystemUsage{&FilesystemUsage{Timestamp:1726484125873489809,FsId:&FilesystemIdentifier{Mountpoint:/var/lib/containers/storage/overlay-images,},UsedBytes:&UInt64Value{Value:137825,},InodesUsed:&UInt64Value{Value:63,},},},ContainerFilesystems:[]*FilesystemUsage{},}"
	Sep 16 10:55:25 ha-334765 kubelet[1577]: E0916 10:55:25.873757    1577 eviction_manager.go:212] "Eviction manager: failed to synchronize" err="eviction manager: failed to get HasDedicatedImageFs: missing image stats: &ImageFsInfoResponse{ImageFilesystems:[]*FilesystemUsage{&FilesystemUsage{Timestamp:1726484125873489809,FsId:&FilesystemIdentifier{Mountpoint:/var/lib/containers/storage/overlay-images,},UsedBytes:&UInt64Value{Value:137825,},InodesUsed:&UInt64Value{Value:63,},},},ContainerFilesystems:[]*FilesystemUsage{},}"
	

                                                
                                                
-- /stdout --
helpers_test.go:254: (dbg) Run:  out/minikube-linux-arm64 status --format={{.APIServer}} -p ha-334765 -n ha-334765
helpers_test.go:261: (dbg) Run:  kubectl --context ha-334765 get po -o=jsonpath={.items[*].metadata.name} -A --field-selector=status.phase!=Running
helpers_test.go:261: (dbg) Non-zero exit: kubectl --context ha-334765 get po -o=jsonpath={.items[*].metadata.name} -A --field-selector=status.phase!=Running: fork/exec /usr/local/bin/kubectl: exec format error (466.228µs)
helpers_test.go:263: kubectl --context ha-334765 get po -o=jsonpath={.items[*].metadata.name} -A --field-selector=status.phase!=Running: fork/exec /usr/local/bin/kubectl: exec format error
--- FAIL: TestMultiControlPlane/serial/NodeLabels (3.15s)

                                                
                                    
x
+
TestMultiControlPlane/serial/RestartSecondaryNode (29.54s)

                                                
                                                
=== RUN   TestMultiControlPlane/serial/RestartSecondaryNode
ha_test.go:420: (dbg) Run:  out/minikube-linux-arm64 -p ha-334765 node start m02 -v=7 --alsologtostderr
ha_test.go:420: (dbg) Done: out/minikube-linux-arm64 -p ha-334765 node start m02 -v=7 --alsologtostderr: (23.397094046s)
ha_test.go:428: (dbg) Run:  out/minikube-linux-arm64 -p ha-334765 status -v=7 --alsologtostderr
ha_test.go:428: (dbg) Done: out/minikube-linux-arm64 -p ha-334765 status -v=7 --alsologtostderr: (1.380551292s)
ha_test.go:448: (dbg) Run:  kubectl get nodes
ha_test.go:448: (dbg) Non-zero exit: kubectl get nodes: fork/exec /usr/local/bin/kubectl: exec format error (653.374µs)
ha_test.go:450: failed to kubectl get nodes. args "kubectl get nodes" : fork/exec /usr/local/bin/kubectl: exec format error
helpers_test.go:222: -----------------------post-mortem--------------------------------
helpers_test.go:230: ======>  post-mortem[TestMultiControlPlane/serial/RestartSecondaryNode]: docker inspect <======
helpers_test.go:231: (dbg) Run:  docker inspect ha-334765
helpers_test.go:235: (dbg) docker inspect ha-334765:

                                                
                                                
-- stdout --
	[
	    {
	        "Id": "471d2d625f18ea254879cc15bae69f2fa706198361173916de05b257110d78a5",
	        "Created": "2024-09-16T10:51:30.912390622Z",
	        "Path": "/usr/local/bin/entrypoint",
	        "Args": [
	            "/sbin/init"
	        ],
	        "State": {
	            "Status": "running",
	            "Running": true,
	            "Paused": false,
	            "Restarting": false,
	            "OOMKilled": false,
	            "Dead": false,
	            "Pid": 1415494,
	            "ExitCode": 0,
	            "Error": "",
	            "StartedAt": "2024-09-16T10:51:31.080722061Z",
	            "FinishedAt": "0001-01-01T00:00:00Z"
	        },
	        "Image": "sha256:a1b71fa87733590eb4674b16f6945626ae533f3af37066893e3fd70eb9476268",
	        "ResolvConfPath": "/var/lib/docker/containers/471d2d625f18ea254879cc15bae69f2fa706198361173916de05b257110d78a5/resolv.conf",
	        "HostnamePath": "/var/lib/docker/containers/471d2d625f18ea254879cc15bae69f2fa706198361173916de05b257110d78a5/hostname",
	        "HostsPath": "/var/lib/docker/containers/471d2d625f18ea254879cc15bae69f2fa706198361173916de05b257110d78a5/hosts",
	        "LogPath": "/var/lib/docker/containers/471d2d625f18ea254879cc15bae69f2fa706198361173916de05b257110d78a5/471d2d625f18ea254879cc15bae69f2fa706198361173916de05b257110d78a5-json.log",
	        "Name": "/ha-334765",
	        "RestartCount": 0,
	        "Driver": "overlay2",
	        "Platform": "linux",
	        "MountLabel": "",
	        "ProcessLabel": "",
	        "AppArmorProfile": "unconfined",
	        "ExecIDs": null,
	        "HostConfig": {
	            "Binds": [
	                "/lib/modules:/lib/modules:ro",
	                "ha-334765:/var"
	            ],
	            "ContainerIDFile": "",
	            "LogConfig": {
	                "Type": "json-file",
	                "Config": {}
	            },
	            "NetworkMode": "ha-334765",
	            "PortBindings": {
	                "22/tcp": [
	                    {
	                        "HostIp": "127.0.0.1",
	                        "HostPort": ""
	                    }
	                ],
	                "2376/tcp": [
	                    {
	                        "HostIp": "127.0.0.1",
	                        "HostPort": ""
	                    }
	                ],
	                "32443/tcp": [
	                    {
	                        "HostIp": "127.0.0.1",
	                        "HostPort": ""
	                    }
	                ],
	                "5000/tcp": [
	                    {
	                        "HostIp": "127.0.0.1",
	                        "HostPort": ""
	                    }
	                ],
	                "8443/tcp": [
	                    {
	                        "HostIp": "127.0.0.1",
	                        "HostPort": ""
	                    }
	                ]
	            },
	            "RestartPolicy": {
	                "Name": "no",
	                "MaximumRetryCount": 0
	            },
	            "AutoRemove": false,
	            "VolumeDriver": "",
	            "VolumesFrom": null,
	            "ConsoleSize": [
	                0,
	                0
	            ],
	            "CapAdd": null,
	            "CapDrop": null,
	            "CgroupnsMode": "host",
	            "Dns": [],
	            "DnsOptions": [],
	            "DnsSearch": [],
	            "ExtraHosts": null,
	            "GroupAdd": null,
	            "IpcMode": "private",
	            "Cgroup": "",
	            "Links": null,
	            "OomScoreAdj": 0,
	            "PidMode": "",
	            "Privileged": true,
	            "PublishAllPorts": false,
	            "ReadonlyRootfs": false,
	            "SecurityOpt": [
	                "seccomp=unconfined",
	                "apparmor=unconfined",
	                "label=disable"
	            ],
	            "Tmpfs": {
	                "/run": "",
	                "/tmp": ""
	            },
	            "UTSMode": "",
	            "UsernsMode": "",
	            "ShmSize": 67108864,
	            "Runtime": "runc",
	            "Isolation": "",
	            "CpuShares": 0,
	            "Memory": 2306867200,
	            "NanoCpus": 2000000000,
	            "CgroupParent": "",
	            "BlkioWeight": 0,
	            "BlkioWeightDevice": [],
	            "BlkioDeviceReadBps": [],
	            "BlkioDeviceWriteBps": [],
	            "BlkioDeviceReadIOps": [],
	            "BlkioDeviceWriteIOps": [],
	            "CpuPeriod": 0,
	            "CpuQuota": 0,
	            "CpuRealtimePeriod": 0,
	            "CpuRealtimeRuntime": 0,
	            "CpusetCpus": "",
	            "CpusetMems": "",
	            "Devices": [],
	            "DeviceCgroupRules": null,
	            "DeviceRequests": null,
	            "MemoryReservation": 0,
	            "MemorySwap": 4613734400,
	            "MemorySwappiness": null,
	            "OomKillDisable": false,
	            "PidsLimit": null,
	            "Ulimits": [],
	            "CpuCount": 0,
	            "CpuPercent": 0,
	            "IOMaximumIOps": 0,
	            "IOMaximumBandwidth": 0,
	            "MaskedPaths": null,
	            "ReadonlyPaths": null
	        },
	        "GraphDriver": {
	            "Data": {
	                "LowerDir": "/var/lib/docker/overlay2/e8db6f6ac1e96cd2638477ad27706691a225de8009dee3e5127d903edb7d7779-init/diff:/var/lib/docker/overlay2/1502e35c27c097cfc834a7c6caeee5bb9f58b41375577f491b73f55bc131cbae/diff",
	                "MergedDir": "/var/lib/docker/overlay2/e8db6f6ac1e96cd2638477ad27706691a225de8009dee3e5127d903edb7d7779/merged",
	                "UpperDir": "/var/lib/docker/overlay2/e8db6f6ac1e96cd2638477ad27706691a225de8009dee3e5127d903edb7d7779/diff",
	                "WorkDir": "/var/lib/docker/overlay2/e8db6f6ac1e96cd2638477ad27706691a225de8009dee3e5127d903edb7d7779/work"
	            },
	            "Name": "overlay2"
	        },
	        "Mounts": [
	            {
	                "Type": "bind",
	                "Source": "/lib/modules",
	                "Destination": "/lib/modules",
	                "Mode": "ro",
	                "RW": false,
	                "Propagation": "rprivate"
	            },
	            {
	                "Type": "volume",
	                "Name": "ha-334765",
	                "Source": "/var/lib/docker/volumes/ha-334765/_data",
	                "Destination": "/var",
	                "Driver": "local",
	                "Mode": "z",
	                "RW": true,
	                "Propagation": ""
	            }
	        ],
	        "Config": {
	            "Hostname": "ha-334765",
	            "Domainname": "",
	            "User": "",
	            "AttachStdin": false,
	            "AttachStdout": false,
	            "AttachStderr": false,
	            "ExposedPorts": {
	                "22/tcp": {},
	                "2376/tcp": {},
	                "32443/tcp": {},
	                "5000/tcp": {},
	                "8443/tcp": {}
	            },
	            "Tty": true,
	            "OpenStdin": false,
	            "StdinOnce": false,
	            "Env": [
	                "container=docker",
	                "PATH=/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin"
	            ],
	            "Cmd": null,
	            "Image": "gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0",
	            "Volumes": null,
	            "WorkingDir": "/",
	            "Entrypoint": [
	                "/usr/local/bin/entrypoint",
	                "/sbin/init"
	            ],
	            "OnBuild": null,
	            "Labels": {
	                "created_by.minikube.sigs.k8s.io": "true",
	                "mode.minikube.sigs.k8s.io": "ha-334765",
	                "name.minikube.sigs.k8s.io": "ha-334765",
	                "role.minikube.sigs.k8s.io": ""
	            },
	            "StopSignal": "SIGRTMIN+3"
	        },
	        "NetworkSettings": {
	            "Bridge": "",
	            "SandboxID": "12d5a1b21703aeee0dc587f61286e8eedd5eadc5d72d885400108c3582ba6858",
	            "SandboxKey": "/var/run/docker/netns/12d5a1b21703",
	            "Ports": {
	                "22/tcp": [
	                    {
	                        "HostIp": "127.0.0.1",
	                        "HostPort": "34618"
	                    }
	                ],
	                "2376/tcp": [
	                    {
	                        "HostIp": "127.0.0.1",
	                        "HostPort": "34619"
	                    }
	                ],
	                "32443/tcp": [
	                    {
	                        "HostIp": "127.0.0.1",
	                        "HostPort": "34622"
	                    }
	                ],
	                "5000/tcp": [
	                    {
	                        "HostIp": "127.0.0.1",
	                        "HostPort": "34620"
	                    }
	                ],
	                "8443/tcp": [
	                    {
	                        "HostIp": "127.0.0.1",
	                        "HostPort": "34621"
	                    }
	                ]
	            },
	            "HairpinMode": false,
	            "LinkLocalIPv6Address": "",
	            "LinkLocalIPv6PrefixLen": 0,
	            "SecondaryIPAddresses": null,
	            "SecondaryIPv6Addresses": null,
	            "EndpointID": "",
	            "Gateway": "",
	            "GlobalIPv6Address": "",
	            "GlobalIPv6PrefixLen": 0,
	            "IPAddress": "",
	            "IPPrefixLen": 0,
	            "IPv6Gateway": "",
	            "MacAddress": "",
	            "Networks": {
	                "ha-334765": {
	                    "IPAMConfig": {
	                        "IPv4Address": "192.168.49.2"
	                    },
	                    "Links": null,
	                    "Aliases": null,
	                    "MacAddress": "02:42:c0:a8:31:02",
	                    "DriverOpts": null,
	                    "NetworkID": "a49e1846148d74f15aa5bd587e5d2d6b8a3c4246e7c45cf081cf9063a160d645",
	                    "EndpointID": "698461d25faa71c3e4175824b7994fac5706f91d7d306412be9930c5e2592d23",
	                    "Gateway": "192.168.49.1",
	                    "IPAddress": "192.168.49.2",
	                    "IPPrefixLen": 24,
	                    "IPv6Gateway": "",
	                    "GlobalIPv6Address": "",
	                    "GlobalIPv6PrefixLen": 0,
	                    "DNSNames": [
	                        "ha-334765",
	                        "471d2d625f18"
	                    ]
	                }
	            }
	        }
	    }
	]

                                                
                                                
-- /stdout --
helpers_test.go:239: (dbg) Run:  out/minikube-linux-arm64 status --format={{.Host}} -p ha-334765 -n ha-334765
helpers_test.go:244: <<< TestMultiControlPlane/serial/RestartSecondaryNode FAILED: start of post-mortem logs <<<
helpers_test.go:245: ======>  post-mortem[TestMultiControlPlane/serial/RestartSecondaryNode]: minikube logs <======
helpers_test.go:247: (dbg) Run:  out/minikube-linux-arm64 -p ha-334765 logs -n 25
helpers_test.go:247: (dbg) Done: out/minikube-linux-arm64 -p ha-334765 logs -n 25: (3.033102076s)
helpers_test.go:252: TestMultiControlPlane/serial/RestartSecondaryNode logs: 
-- stdout --
	
	==> Audit <==
	|---------|----------------------------------------------------------------------------------|-----------|---------|---------|---------------------|---------------------|
	| Command |                                       Args                                       |  Profile  |  User   | Version |     Start Time      |      End Time       |
	|---------|----------------------------------------------------------------------------------|-----------|---------|---------|---------------------|---------------------|
	| ssh     | ha-334765 ssh -n                                                                 | ha-334765 | jenkins | v1.34.0 | 16 Sep 24 10:55 UTC | 16 Sep 24 10:55 UTC |
	|         | ha-334765-m03 sudo cat                                                           |           |         |         |                     |                     |
	|         | /home/docker/cp-test.txt                                                         |           |         |         |                     |                     |
	| cp      | ha-334765 cp ha-334765-m03:/home/docker/cp-test.txt                              | ha-334765 | jenkins | v1.34.0 | 16 Sep 24 10:55 UTC | 16 Sep 24 10:55 UTC |
	|         | ha-334765:/home/docker/cp-test_ha-334765-m03_ha-334765.txt                       |           |         |         |                     |                     |
	| ssh     | ha-334765 ssh -n                                                                 | ha-334765 | jenkins | v1.34.0 | 16 Sep 24 10:55 UTC | 16 Sep 24 10:55 UTC |
	|         | ha-334765-m03 sudo cat                                                           |           |         |         |                     |                     |
	|         | /home/docker/cp-test.txt                                                         |           |         |         |                     |                     |
	| ssh     | ha-334765 ssh -n ha-334765 sudo cat                                              | ha-334765 | jenkins | v1.34.0 | 16 Sep 24 10:55 UTC | 16 Sep 24 10:55 UTC |
	|         | /home/docker/cp-test_ha-334765-m03_ha-334765.txt                                 |           |         |         |                     |                     |
	| cp      | ha-334765 cp ha-334765-m03:/home/docker/cp-test.txt                              | ha-334765 | jenkins | v1.34.0 | 16 Sep 24 10:55 UTC | 16 Sep 24 10:55 UTC |
	|         | ha-334765-m02:/home/docker/cp-test_ha-334765-m03_ha-334765-m02.txt               |           |         |         |                     |                     |
	| ssh     | ha-334765 ssh -n                                                                 | ha-334765 | jenkins | v1.34.0 | 16 Sep 24 10:55 UTC | 16 Sep 24 10:55 UTC |
	|         | ha-334765-m03 sudo cat                                                           |           |         |         |                     |                     |
	|         | /home/docker/cp-test.txt                                                         |           |         |         |                     |                     |
	| ssh     | ha-334765 ssh -n ha-334765-m02 sudo cat                                          | ha-334765 | jenkins | v1.34.0 | 16 Sep 24 10:55 UTC | 16 Sep 24 10:55 UTC |
	|         | /home/docker/cp-test_ha-334765-m03_ha-334765-m02.txt                             |           |         |         |                     |                     |
	| cp      | ha-334765 cp ha-334765-m03:/home/docker/cp-test.txt                              | ha-334765 | jenkins | v1.34.0 | 16 Sep 24 10:55 UTC | 16 Sep 24 10:55 UTC |
	|         | ha-334765-m04:/home/docker/cp-test_ha-334765-m03_ha-334765-m04.txt               |           |         |         |                     |                     |
	| ssh     | ha-334765 ssh -n                                                                 | ha-334765 | jenkins | v1.34.0 | 16 Sep 24 10:55 UTC | 16 Sep 24 10:55 UTC |
	|         | ha-334765-m03 sudo cat                                                           |           |         |         |                     |                     |
	|         | /home/docker/cp-test.txt                                                         |           |         |         |                     |                     |
	| ssh     | ha-334765 ssh -n ha-334765-m04 sudo cat                                          | ha-334765 | jenkins | v1.34.0 | 16 Sep 24 10:55 UTC | 16 Sep 24 10:55 UTC |
	|         | /home/docker/cp-test_ha-334765-m03_ha-334765-m04.txt                             |           |         |         |                     |                     |
	| cp      | ha-334765 cp testdata/cp-test.txt                                                | ha-334765 | jenkins | v1.34.0 | 16 Sep 24 10:55 UTC | 16 Sep 24 10:55 UTC |
	|         | ha-334765-m04:/home/docker/cp-test.txt                                           |           |         |         |                     |                     |
	| ssh     | ha-334765 ssh -n                                                                 | ha-334765 | jenkins | v1.34.0 | 16 Sep 24 10:55 UTC | 16 Sep 24 10:55 UTC |
	|         | ha-334765-m04 sudo cat                                                           |           |         |         |                     |                     |
	|         | /home/docker/cp-test.txt                                                         |           |         |         |                     |                     |
	| cp      | ha-334765 cp ha-334765-m04:/home/docker/cp-test.txt                              | ha-334765 | jenkins | v1.34.0 | 16 Sep 24 10:55 UTC | 16 Sep 24 10:55 UTC |
	|         | /tmp/TestMultiControlPlaneserialCopyFile3524304278/001/cp-test_ha-334765-m04.txt |           |         |         |                     |                     |
	| ssh     | ha-334765 ssh -n                                                                 | ha-334765 | jenkins | v1.34.0 | 16 Sep 24 10:55 UTC | 16 Sep 24 10:55 UTC |
	|         | ha-334765-m04 sudo cat                                                           |           |         |         |                     |                     |
	|         | /home/docker/cp-test.txt                                                         |           |         |         |                     |                     |
	| cp      | ha-334765 cp ha-334765-m04:/home/docker/cp-test.txt                              | ha-334765 | jenkins | v1.34.0 | 16 Sep 24 10:55 UTC | 16 Sep 24 10:55 UTC |
	|         | ha-334765:/home/docker/cp-test_ha-334765-m04_ha-334765.txt                       |           |         |         |                     |                     |
	| ssh     | ha-334765 ssh -n                                                                 | ha-334765 | jenkins | v1.34.0 | 16 Sep 24 10:55 UTC | 16 Sep 24 10:55 UTC |
	|         | ha-334765-m04 sudo cat                                                           |           |         |         |                     |                     |
	|         | /home/docker/cp-test.txt                                                         |           |         |         |                     |                     |
	| ssh     | ha-334765 ssh -n ha-334765 sudo cat                                              | ha-334765 | jenkins | v1.34.0 | 16 Sep 24 10:55 UTC | 16 Sep 24 10:55 UTC |
	|         | /home/docker/cp-test_ha-334765-m04_ha-334765.txt                                 |           |         |         |                     |                     |
	| cp      | ha-334765 cp ha-334765-m04:/home/docker/cp-test.txt                              | ha-334765 | jenkins | v1.34.0 | 16 Sep 24 10:55 UTC | 16 Sep 24 10:55 UTC |
	|         | ha-334765-m02:/home/docker/cp-test_ha-334765-m04_ha-334765-m02.txt               |           |         |         |                     |                     |
	| ssh     | ha-334765 ssh -n                                                                 | ha-334765 | jenkins | v1.34.0 | 16 Sep 24 10:55 UTC | 16 Sep 24 10:55 UTC |
	|         | ha-334765-m04 sudo cat                                                           |           |         |         |                     |                     |
	|         | /home/docker/cp-test.txt                                                         |           |         |         |                     |                     |
	| ssh     | ha-334765 ssh -n ha-334765-m02 sudo cat                                          | ha-334765 | jenkins | v1.34.0 | 16 Sep 24 10:55 UTC | 16 Sep 24 10:55 UTC |
	|         | /home/docker/cp-test_ha-334765-m04_ha-334765-m02.txt                             |           |         |         |                     |                     |
	| cp      | ha-334765 cp ha-334765-m04:/home/docker/cp-test.txt                              | ha-334765 | jenkins | v1.34.0 | 16 Sep 24 10:55 UTC | 16 Sep 24 10:55 UTC |
	|         | ha-334765-m03:/home/docker/cp-test_ha-334765-m04_ha-334765-m03.txt               |           |         |         |                     |                     |
	| ssh     | ha-334765 ssh -n                                                                 | ha-334765 | jenkins | v1.34.0 | 16 Sep 24 10:55 UTC | 16 Sep 24 10:55 UTC |
	|         | ha-334765-m04 sudo cat                                                           |           |         |         |                     |                     |
	|         | /home/docker/cp-test.txt                                                         |           |         |         |                     |                     |
	| ssh     | ha-334765 ssh -n ha-334765-m03 sudo cat                                          | ha-334765 | jenkins | v1.34.0 | 16 Sep 24 10:55 UTC | 16 Sep 24 10:55 UTC |
	|         | /home/docker/cp-test_ha-334765-m04_ha-334765-m03.txt                             |           |         |         |                     |                     |
	| node    | ha-334765 node stop m02 -v=7                                                     | ha-334765 | jenkins | v1.34.0 | 16 Sep 24 10:55 UTC | 16 Sep 24 10:56 UTC |
	|         | --alsologtostderr                                                                |           |         |         |                     |                     |
	| node    | ha-334765 node start m02 -v=7                                                    | ha-334765 | jenkins | v1.34.0 | 16 Sep 24 10:56 UTC | 16 Sep 24 10:56 UTC |
	|         | --alsologtostderr                                                                |           |         |         |                     |                     |
	|---------|----------------------------------------------------------------------------------|-----------|---------|---------|---------------------|---------------------|
	
	
	==> Last Start <==
	Log file created at: 2024/09/16 10:51:25
	Running on machine: ip-172-31-21-244
	Binary: Built with gc go1.23.0 for linux/arm64
	Log line format: [IWEF]mmdd hh:mm:ss.uuuuuu threadid file:line] msg
	I0916 10:51:25.456983 1415006 out.go:345] Setting OutFile to fd 1 ...
	I0916 10:51:25.457102 1415006 out.go:392] TERM=,COLORTERM=, which probably does not support color
	I0916 10:51:25.457115 1415006 out.go:358] Setting ErrFile to fd 2...
	I0916 10:51:25.457121 1415006 out.go:392] TERM=,COLORTERM=, which probably does not support color
	I0916 10:51:25.457390 1415006 root.go:338] Updating PATH: /home/jenkins/minikube-integration/19651-1378450/.minikube/bin
	I0916 10:51:25.457821 1415006 out.go:352] Setting JSON to false
	I0916 10:51:25.458668 1415006 start.go:129] hostinfo: {"hostname":"ip-172-31-21-244","uptime":38031,"bootTime":1726445855,"procs":154,"os":"linux","platform":"ubuntu","platformFamily":"debian","platformVersion":"20.04","kernelVersion":"5.15.0-1069-aws","kernelArch":"aarch64","virtualizationSystem":"","virtualizationRole":"","hostId":"da8ac1fd-6236-412a-a346-95873c98230d"}
	I0916 10:51:25.458740 1415006 start.go:139] virtualization:  
	I0916 10:51:25.462420 1415006 out.go:177] * [ha-334765] minikube v1.34.0 on Ubuntu 20.04 (arm64)
	I0916 10:51:25.466540 1415006 out.go:177]   - MINIKUBE_LOCATION=19651
	I0916 10:51:25.466681 1415006 notify.go:220] Checking for updates...
	I0916 10:51:25.472214 1415006 out.go:177]   - MINIKUBE_SUPPRESS_DOCKER_PERFORMANCE=true
	I0916 10:51:25.474945 1415006 out.go:177]   - KUBECONFIG=/home/jenkins/minikube-integration/19651-1378450/kubeconfig
	I0916 10:51:25.477743 1415006 out.go:177]   - MINIKUBE_HOME=/home/jenkins/minikube-integration/19651-1378450/.minikube
	I0916 10:51:25.480408 1415006 out.go:177]   - MINIKUBE_BIN=out/minikube-linux-arm64
	I0916 10:51:25.483253 1415006 out.go:177]   - MINIKUBE_FORCE_SYSTEMD=
	I0916 10:51:25.486288 1415006 driver.go:394] Setting default libvirt URI to qemu:///system
	I0916 10:51:25.516837 1415006 docker.go:123] docker version: linux-27.2.1:Docker Engine - Community
	I0916 10:51:25.516993 1415006 cli_runner.go:164] Run: docker system info --format "{{json .}}"
	I0916 10:51:25.574760 1415006 info.go:266] docker info: {ID:5FDH:SA5P:5GCT:NLAS:B73P:SGDQ:PBG5:UBVH:UZY3:RXGO:CI7S:WAIH Containers:0 ContainersRunning:0 ContainersPaused:0 ContainersStopped:0 Images:3 Driver:overlay2 DriverStatus:[[Backing Filesystem extfs] [Supports d_type true] [Using metacopy false] [Native Overlay Diff true] [userxattr false]] SystemStatus:<nil> Plugins:{Volume:[local] Network:[bridge host ipvlan macvlan null overlay] Authorization:<nil> Log:[awslogs fluentd gcplogs gelf journald json-file local splunk syslog]} MemoryLimit:true SwapLimit:true KernelMemory:false KernelMemoryTCP:true CPUCfsPeriod:true CPUCfsQuota:true CPUShares:true CPUSet:true PidsLimit:true IPv4Forwarding:true BridgeNfIptables:true BridgeNfIP6Tables:true Debug:false NFd:25 OomKillDisable:true NGoroutines:41 SystemTime:2024-09-16 10:51:25.563814697 +0000 UTC LoggingDriver:json-file CgroupDriver:cgroupfs NEventsListener:0 KernelVersion:5.15.0-1069-aws OperatingSystem:Ubuntu 20.04.6 LTS OSType:linux Architecture:aar
ch64 IndexServerAddress:https://index.docker.io/v1/ RegistryConfig:{AllowNondistributableArtifactsCIDRs:[] AllowNondistributableArtifactsHostnames:[] InsecureRegistryCIDRs:[127.0.0.0/8] IndexConfigs:{DockerIo:{Name:docker.io Mirrors:[] Secure:true Official:true}} Mirrors:[]} NCPU:2 MemTotal:8214839296 GenericResources:<nil> DockerRootDir:/var/lib/docker HTTPProxy: HTTPSProxy: NoProxy: Name:ip-172-31-21-244 Labels:[] ExperimentalBuild:false ServerVersion:27.2.1 ClusterStore: ClusterAdvertise: Runtimes:{Runc:{Path:runc}} DefaultRuntime:runc Swarm:{NodeID: NodeAddr: LocalNodeState:inactive ControlAvailable:false Error: RemoteManagers:<nil>} LiveRestoreEnabled:false Isolation: InitBinary:docker-init ContainerdCommit:{ID:7f7fdf5fed64eb6a7caf99b3e12efcf9d60e311c Expected:7f7fdf5fed64eb6a7caf99b3e12efcf9d60e311c} RuncCommit:{ID:v1.1.14-0-g2c9f560 Expected:v1.1.14-0-g2c9f560} InitCommit:{ID:de40ad0 Expected:de40ad0} SecurityOptions:[name=apparmor name=seccomp,profile=builtin] ProductLicense: Warnings:<nil> ServerErro
rs:[] ClientInfo:{Debug:false Plugins:[map[Name:buildx Path:/usr/libexec/docker/cli-plugins/docker-buildx SchemaVersion:0.1.0 ShortDescription:Docker Buildx Vendor:Docker Inc. Version:v0.16.2] map[Name:compose Path:/usr/libexec/docker/cli-plugins/docker-compose SchemaVersion:0.1.0 ShortDescription:Docker Compose Vendor:Docker Inc. Version:v2.29.2]] Warnings:<nil>}}
	I0916 10:51:25.574877 1415006 docker.go:318] overlay module found
	I0916 10:51:25.577720 1415006 out.go:177] * Using the docker driver based on user configuration
	I0916 10:51:25.580504 1415006 start.go:297] selected driver: docker
	I0916 10:51:25.580528 1415006 start.go:901] validating driver "docker" against <nil>
	I0916 10:51:25.580545 1415006 start.go:912] status for docker: {Installed:true Healthy:true Running:false NeedsImprovement:false Error:<nil> Reason: Fix: Doc: Version:}
	I0916 10:51:25.581296 1415006 cli_runner.go:164] Run: docker system info --format "{{json .}}"
	I0916 10:51:25.647223 1415006 info.go:266] docker info: {ID:5FDH:SA5P:5GCT:NLAS:B73P:SGDQ:PBG5:UBVH:UZY3:RXGO:CI7S:WAIH Containers:0 ContainersRunning:0 ContainersPaused:0 ContainersStopped:0 Images:3 Driver:overlay2 DriverStatus:[[Backing Filesystem extfs] [Supports d_type true] [Using metacopy false] [Native Overlay Diff true] [userxattr false]] SystemStatus:<nil> Plugins:{Volume:[local] Network:[bridge host ipvlan macvlan null overlay] Authorization:<nil> Log:[awslogs fluentd gcplogs gelf journald json-file local splunk syslog]} MemoryLimit:true SwapLimit:true KernelMemory:false KernelMemoryTCP:true CPUCfsPeriod:true CPUCfsQuota:true CPUShares:true CPUSet:true PidsLimit:true IPv4Forwarding:true BridgeNfIptables:true BridgeNfIP6Tables:true Debug:false NFd:25 OomKillDisable:true NGoroutines:41 SystemTime:2024-09-16 10:51:25.637994422 +0000 UTC LoggingDriver:json-file CgroupDriver:cgroupfs NEventsListener:0 KernelVersion:5.15.0-1069-aws OperatingSystem:Ubuntu 20.04.6 LTS OSType:linux Architecture:aar
ch64 IndexServerAddress:https://index.docker.io/v1/ RegistryConfig:{AllowNondistributableArtifactsCIDRs:[] AllowNondistributableArtifactsHostnames:[] InsecureRegistryCIDRs:[127.0.0.0/8] IndexConfigs:{DockerIo:{Name:docker.io Mirrors:[] Secure:true Official:true}} Mirrors:[]} NCPU:2 MemTotal:8214839296 GenericResources:<nil> DockerRootDir:/var/lib/docker HTTPProxy: HTTPSProxy: NoProxy: Name:ip-172-31-21-244 Labels:[] ExperimentalBuild:false ServerVersion:27.2.1 ClusterStore: ClusterAdvertise: Runtimes:{Runc:{Path:runc}} DefaultRuntime:runc Swarm:{NodeID: NodeAddr: LocalNodeState:inactive ControlAvailable:false Error: RemoteManagers:<nil>} LiveRestoreEnabled:false Isolation: InitBinary:docker-init ContainerdCommit:{ID:7f7fdf5fed64eb6a7caf99b3e12efcf9d60e311c Expected:7f7fdf5fed64eb6a7caf99b3e12efcf9d60e311c} RuncCommit:{ID:v1.1.14-0-g2c9f560 Expected:v1.1.14-0-g2c9f560} InitCommit:{ID:de40ad0 Expected:de40ad0} SecurityOptions:[name=apparmor name=seccomp,profile=builtin] ProductLicense: Warnings:<nil> ServerErro
rs:[] ClientInfo:{Debug:false Plugins:[map[Name:buildx Path:/usr/libexec/docker/cli-plugins/docker-buildx SchemaVersion:0.1.0 ShortDescription:Docker Buildx Vendor:Docker Inc. Version:v0.16.2] map[Name:compose Path:/usr/libexec/docker/cli-plugins/docker-compose SchemaVersion:0.1.0 ShortDescription:Docker Compose Vendor:Docker Inc. Version:v2.29.2]] Warnings:<nil>}}
	I0916 10:51:25.647541 1415006 start_flags.go:310] no existing cluster config was found, will generate one from the flags 
	I0916 10:51:25.647914 1415006 start_flags.go:947] Waiting for all components: map[apiserver:true apps_running:true default_sa:true extra:true kubelet:true node_ready:true system_pods:true]
	I0916 10:51:25.650821 1415006 out.go:177] * Using Docker driver with root privileges
	I0916 10:51:25.653558 1415006 cni.go:84] Creating CNI manager for ""
	I0916 10:51:25.653636 1415006 cni.go:136] multinode detected (0 nodes found), recommending kindnet
	I0916 10:51:25.653650 1415006 start_flags.go:319] Found "CNI" CNI - setting NetworkPlugin=cni
	I0916 10:51:25.653740 1415006 start.go:340] cluster config:
	{Name:ha-334765 KeepContext:false EmbedCerts:false MinikubeISO: KicBaseImage:gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 Memory:2200 CPUs:2 DiskSize:20000 Driver:docker HyperkitVpnKitSock: HyperkitVSockPorts:[] DockerEnv:[] ContainerVolumeMounts:[] InsecureRegistry:[] RegistryMirror:[] HostOnlyCIDR:192.168.59.1/24 HypervVirtualSwitch: HypervUseExternalSwitch:false HypervExternalAdapter: KVMNetwork:default KVMQemuURI:qemu:///system KVMGPU:false KVMHidden:false KVMNUMACount:1 APIServerPort:8443 DockerOpt:[] DisableDriverMounts:false NFSShare:[] NFSSharesRoot:/nfsshares UUID: NoVTXCheck:false DNSProxy:false HostDNSResolver:true HostOnlyNicType:virtio NatNicType:virtio SSHIPAddress: SSHUser:root SSHKey: SSHPort:22 KubernetesConfig:{KubernetesVersion:v1.31.1 ClusterName:ha-334765 Namespace:default APIServerHAVIP: APIServerName:minikubeCA APIServerNames:[] APIServerIPs:[] DNSDomain:cluster.local ContainerRuntime:crio CR
ISocket: NetworkPlugin:cni FeatureGates: ServiceCIDR:10.96.0.0/12 ImageRepository: LoadBalancerStartIP: LoadBalancerEndIP: CustomIngressCert: RegistryAliases: ExtraOptions:[] ShouldLoadCachedImages:true EnableDefaultCNI:false CNI:} Nodes:[{Name: IP: Port:8443 KubernetesVersion:v1.31.1 ContainerRuntime:crio ControlPlane:true Worker:true}] Addons:map[] CustomAddonImages:map[] CustomAddonRegistries:map[] VerifyComponents:map[apiserver:true apps_running:true default_sa:true extra:true kubelet:true node_ready:true system_pods:true] StartHostTimeout:6m0s ScheduledStop:<nil> ExposedPorts:[] ListenAddress: Network: Subnet: MultiNodeRequested:true ExtraDisks:0 CertExpiration:26280h0m0s Mount:false MountString:/home/jenkins:/minikube-host Mount9PVersion:9p2000.L MountGID:docker MountIP: MountMSize:262144 MountOptions:[] MountPort:0 MountType:9p MountUID:docker BinaryMirror: DisableOptimizations:false DisableMetrics:false CustomQemuFirmwarePath: SocketVMnetClientPath: SocketVMnetPath: StaticIP: SSHAuthSock: SSHAgentPID:
0 GPUs: AutoPauseInterval:1m0s}
	I0916 10:51:25.658763 1415006 out.go:177] * Starting "ha-334765" primary control-plane node in "ha-334765" cluster
	I0916 10:51:25.661650 1415006 cache.go:121] Beginning downloading kic base image for docker with crio
	I0916 10:51:25.664257 1415006 out.go:177] * Pulling base image v0.0.45-1726358845-19644 ...
	I0916 10:51:25.666746 1415006 preload.go:131] Checking if preload exists for k8s version v1.31.1 and runtime crio
	I0916 10:51:25.666809 1415006 preload.go:146] Found local preload: /home/jenkins/minikube-integration/19651-1378450/.minikube/cache/preloaded-tarball/preloaded-images-k8s-v18-v1.31.1-cri-o-overlay-arm64.tar.lz4
	I0916 10:51:25.666820 1415006 cache.go:56] Caching tarball of preloaded images
	I0916 10:51:25.666836 1415006 image.go:79] Checking for gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 in local docker daemon
	I0916 10:51:25.666937 1415006 preload.go:172] Found /home/jenkins/minikube-integration/19651-1378450/.minikube/cache/preloaded-tarball/preloaded-images-k8s-v18-v1.31.1-cri-o-overlay-arm64.tar.lz4 in cache, skipping download
	I0916 10:51:25.666948 1415006 cache.go:59] Finished verifying existence of preloaded tar for v1.31.1 on crio
	I0916 10:51:25.667345 1415006 profile.go:143] Saving config to /home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/ha-334765/config.json ...
	I0916 10:51:25.667377 1415006 lock.go:35] WriteFile acquiring /home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/ha-334765/config.json: {Name:mk57f344adf6e8ac17121e88734a44d2f855cf4f Clock:{} Delay:500ms Timeout:1m0s Cancel:<nil>}
	W0916 10:51:25.695836 1415006 image.go:95] image gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 is of wrong architecture
	I0916 10:51:25.695859 1415006 cache.go:149] Downloading gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 to local cache
	I0916 10:51:25.695952 1415006 image.go:63] Checking for gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 in local cache directory
	I0916 10:51:25.695986 1415006 image.go:66] Found gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 in local cache directory, skipping pull
	I0916 10:51:25.695996 1415006 image.go:135] gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 exists in cache, skipping pull
	I0916 10:51:25.696005 1415006 cache.go:152] successfully saved gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 as a tarball
	I0916 10:51:25.696010 1415006 cache.go:162] Loading gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 from local cache
	I0916 10:51:25.697318 1415006 image.go:273] response: 
	I0916 10:51:25.821505 1415006 cache.go:164] successfully loaded and using gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 from cached tarball
	I0916 10:51:25.821546 1415006 cache.go:194] Successfully downloaded all kic artifacts
	I0916 10:51:25.821578 1415006 start.go:360] acquireMachinesLock for ha-334765: {Name:mk63c1424907d32e4e30c00d74a2bae6eec53e1d Clock:{} Delay:500ms Timeout:10m0s Cancel:<nil>}
	I0916 10:51:25.821700 1415006 start.go:364] duration metric: took 99.698µs to acquireMachinesLock for "ha-334765"
	I0916 10:51:25.821736 1415006 start.go:93] Provisioning new machine with config: &{Name:ha-334765 KeepContext:false EmbedCerts:false MinikubeISO: KicBaseImage:gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 Memory:2200 CPUs:2 DiskSize:20000 Driver:docker HyperkitVpnKitSock: HyperkitVSockPorts:[] DockerEnv:[] ContainerVolumeMounts:[] InsecureRegistry:[] RegistryMirror:[] HostOnlyCIDR:192.168.59.1/24 HypervVirtualSwitch: HypervUseExternalSwitch:false HypervExternalAdapter: KVMNetwork:default KVMQemuURI:qemu:///system KVMGPU:false KVMHidden:false KVMNUMACount:1 APIServerPort:8443 DockerOpt:[] DisableDriverMounts:false NFSShare:[] NFSSharesRoot:/nfsshares UUID: NoVTXCheck:false DNSProxy:false HostDNSResolver:true HostOnlyNicType:virtio NatNicType:virtio SSHIPAddress: SSHUser:root SSHKey: SSHPort:22 KubernetesConfig:{KubernetesVersion:v1.31.1 ClusterName:ha-334765 Namespace:default APIServerHAVIP: APIServerName:minikubeCA
APIServerNames:[] APIServerIPs:[] DNSDomain:cluster.local ContainerRuntime:crio CRISocket: NetworkPlugin:cni FeatureGates: ServiceCIDR:10.96.0.0/12 ImageRepository: LoadBalancerStartIP: LoadBalancerEndIP: CustomIngressCert: RegistryAliases: ExtraOptions:[] ShouldLoadCachedImages:true EnableDefaultCNI:false CNI:} Nodes:[{Name: IP: Port:8443 KubernetesVersion:v1.31.1 ContainerRuntime:crio ControlPlane:true Worker:true}] Addons:map[] CustomAddonImages:map[] CustomAddonRegistries:map[] VerifyComponents:map[apiserver:true apps_running:true default_sa:true extra:true kubelet:true node_ready:true system_pods:true] StartHostTimeout:6m0s ScheduledStop:<nil> ExposedPorts:[] ListenAddress: Network: Subnet: MultiNodeRequested:true ExtraDisks:0 CertExpiration:26280h0m0s Mount:false MountString:/home/jenkins:/minikube-host Mount9PVersion:9p2000.L MountGID:docker MountIP: MountMSize:262144 MountOptions:[] MountPort:0 MountType:9p MountUID:docker BinaryMirror: DisableOptimizations:false DisableMetrics:false CustomQemuFirmwar
ePath: SocketVMnetClientPath: SocketVMnetPath: StaticIP: SSHAuthSock: SSHAgentPID:0 GPUs: AutoPauseInterval:1m0s} &{Name: IP: Port:8443 KubernetesVersion:v1.31.1 ContainerRuntime:crio ControlPlane:true Worker:true}
	I0916 10:51:25.821822 1415006 start.go:125] createHost starting for "" (driver="docker")
	I0916 10:51:25.825119 1415006 out.go:235] * Creating docker container (CPUs=2, Memory=2200MB) ...
	I0916 10:51:25.825384 1415006 start.go:159] libmachine.API.Create for "ha-334765" (driver="docker")
	I0916 10:51:25.825424 1415006 client.go:168] LocalClient.Create starting
	I0916 10:51:25.825513 1415006 main.go:141] libmachine: Reading certificate data from /home/jenkins/minikube-integration/19651-1378450/.minikube/certs/ca.pem
	I0916 10:51:25.825559 1415006 main.go:141] libmachine: Decoding PEM data...
	I0916 10:51:25.825580 1415006 main.go:141] libmachine: Parsing certificate...
	I0916 10:51:25.825637 1415006 main.go:141] libmachine: Reading certificate data from /home/jenkins/minikube-integration/19651-1378450/.minikube/certs/cert.pem
	I0916 10:51:25.825660 1415006 main.go:141] libmachine: Decoding PEM data...
	I0916 10:51:25.825671 1415006 main.go:141] libmachine: Parsing certificate...
	I0916 10:51:25.826062 1415006 cli_runner.go:164] Run: docker network inspect ha-334765 --format "{"Name": "{{.Name}}","Driver": "{{.Driver}}","Subnet": "{{range .IPAM.Config}}{{.Subnet}}{{end}}","Gateway": "{{range .IPAM.Config}}{{.Gateway}}{{end}}","MTU": {{if (index .Options "com.docker.network.driver.mtu")}}{{(index .Options "com.docker.network.driver.mtu")}}{{else}}0{{end}}, "ContainerIPs": [{{range $k,$v := .Containers }}"{{$v.IPv4Address}}",{{end}}]}"
	W0916 10:51:25.841603 1415006 cli_runner.go:211] docker network inspect ha-334765 --format "{"Name": "{{.Name}}","Driver": "{{.Driver}}","Subnet": "{{range .IPAM.Config}}{{.Subnet}}{{end}}","Gateway": "{{range .IPAM.Config}}{{.Gateway}}{{end}}","MTU": {{if (index .Options "com.docker.network.driver.mtu")}}{{(index .Options "com.docker.network.driver.mtu")}}{{else}}0{{end}}, "ContainerIPs": [{{range $k,$v := .Containers }}"{{$v.IPv4Address}}",{{end}}]}" returned with exit code 1
	I0916 10:51:25.841687 1415006 network_create.go:284] running [docker network inspect ha-334765] to gather additional debugging logs...
	I0916 10:51:25.841709 1415006 cli_runner.go:164] Run: docker network inspect ha-334765
	W0916 10:51:25.856558 1415006 cli_runner.go:211] docker network inspect ha-334765 returned with exit code 1
	I0916 10:51:25.856595 1415006 network_create.go:287] error running [docker network inspect ha-334765]: docker network inspect ha-334765: exit status 1
	stdout:
	[]
	
	stderr:
	Error response from daemon: network ha-334765 not found
	I0916 10:51:25.856620 1415006 network_create.go:289] output of [docker network inspect ha-334765]: -- stdout --
	[]
	
	-- /stdout --
	** stderr ** 
	Error response from daemon: network ha-334765 not found
	
	** /stderr **
	I0916 10:51:25.856747 1415006 cli_runner.go:164] Run: docker network inspect bridge --format "{"Name": "{{.Name}}","Driver": "{{.Driver}}","Subnet": "{{range .IPAM.Config}}{{.Subnet}}{{end}}","Gateway": "{{range .IPAM.Config}}{{.Gateway}}{{end}}","MTU": {{if (index .Options "com.docker.network.driver.mtu")}}{{(index .Options "com.docker.network.driver.mtu")}}{{else}}0{{end}}, "ContainerIPs": [{{range $k,$v := .Containers }}"{{$v.IPv4Address}}",{{end}}]}"
	I0916 10:51:25.873027 1415006 network.go:206] using free private subnet 192.168.49.0/24: &{IP:192.168.49.0 Netmask:255.255.255.0 Prefix:24 CIDR:192.168.49.0/24 Gateway:192.168.49.1 ClientMin:192.168.49.2 ClientMax:192.168.49.254 Broadcast:192.168.49.255 IsPrivate:true Interface:{IfaceName: IfaceIPv4: IfaceMTU:0 IfaceMAC:} reservation:0x400181cbc0}
	I0916 10:51:25.873072 1415006 network_create.go:124] attempt to create docker network ha-334765 192.168.49.0/24 with gateway 192.168.49.1 and MTU of 1500 ...
	I0916 10:51:25.873140 1415006 cli_runner.go:164] Run: docker network create --driver=bridge --subnet=192.168.49.0/24 --gateway=192.168.49.1 -o --ip-masq -o --icc -o com.docker.network.driver.mtu=1500 --label=created_by.minikube.sigs.k8s.io=true --label=name.minikube.sigs.k8s.io=ha-334765 ha-334765
	I0916 10:51:25.941986 1415006 network_create.go:108] docker network ha-334765 192.168.49.0/24 created
	I0916 10:51:25.942025 1415006 kic.go:121] calculated static IP "192.168.49.2" for the "ha-334765" container
	I0916 10:51:25.942101 1415006 cli_runner.go:164] Run: docker ps -a --format {{.Names}}
	I0916 10:51:25.957289 1415006 cli_runner.go:164] Run: docker volume create ha-334765 --label name.minikube.sigs.k8s.io=ha-334765 --label created_by.minikube.sigs.k8s.io=true
	I0916 10:51:25.973680 1415006 oci.go:103] Successfully created a docker volume ha-334765
	I0916 10:51:25.973783 1415006 cli_runner.go:164] Run: docker run --rm --name ha-334765-preload-sidecar --label created_by.minikube.sigs.k8s.io=true --label name.minikube.sigs.k8s.io=ha-334765 --entrypoint /usr/bin/test -v ha-334765:/var gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 -d /var/lib
	I0916 10:51:26.603240 1415006 oci.go:107] Successfully prepared a docker volume ha-334765
	I0916 10:51:26.603299 1415006 preload.go:131] Checking if preload exists for k8s version v1.31.1 and runtime crio
	I0916 10:51:26.603320 1415006 kic.go:194] Starting extracting preloaded images to volume ...
	I0916 10:51:26.603404 1415006 cli_runner.go:164] Run: docker run --rm --entrypoint /usr/bin/tar -v /home/jenkins/minikube-integration/19651-1378450/.minikube/cache/preloaded-tarball/preloaded-images-k8s-v18-v1.31.1-cri-o-overlay-arm64.tar.lz4:/preloaded.tar:ro -v ha-334765:/extractDir gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 -I lz4 -xf /preloaded.tar -C /extractDir
	I0916 10:51:30.841466 1415006 cli_runner.go:217] Completed: docker run --rm --entrypoint /usr/bin/tar -v /home/jenkins/minikube-integration/19651-1378450/.minikube/cache/preloaded-tarball/preloaded-images-k8s-v18-v1.31.1-cri-o-overlay-arm64.tar.lz4:/preloaded.tar:ro -v ha-334765:/extractDir gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 -I lz4 -xf /preloaded.tar -C /extractDir: (4.238008699s)
	I0916 10:51:30.841504 1415006 kic.go:203] duration metric: took 4.238180437s to extract preloaded images to volume ...
	W0916 10:51:30.841642 1415006 cgroups_linux.go:77] Your kernel does not support swap limit capabilities or the cgroup is not mounted.
	I0916 10:51:30.841800 1415006 cli_runner.go:164] Run: docker info --format "'{{json .SecurityOptions}}'"
	I0916 10:51:30.897286 1415006 cli_runner.go:164] Run: docker run -d -t --privileged --security-opt seccomp=unconfined --tmpfs /tmp --tmpfs /run -v /lib/modules:/lib/modules:ro --hostname ha-334765 --name ha-334765 --label created_by.minikube.sigs.k8s.io=true --label name.minikube.sigs.k8s.io=ha-334765 --label role.minikube.sigs.k8s.io= --label mode.minikube.sigs.k8s.io=ha-334765 --network ha-334765 --ip 192.168.49.2 --volume ha-334765:/var --security-opt apparmor=unconfined --memory=2200mb --cpus=2 -e container=docker --expose 8443 --publish=127.0.0.1::8443 --publish=127.0.0.1::22 --publish=127.0.0.1::2376 --publish=127.0.0.1::5000 --publish=127.0.0.1::32443 gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0
	I0916 10:51:31.252747 1415006 cli_runner.go:164] Run: docker container inspect ha-334765 --format={{.State.Running}}
	I0916 10:51:31.275416 1415006 cli_runner.go:164] Run: docker container inspect ha-334765 --format={{.State.Status}}
	I0916 10:51:31.296888 1415006 cli_runner.go:164] Run: docker exec ha-334765 stat /var/lib/dpkg/alternatives/iptables
	I0916 10:51:31.359548 1415006 oci.go:144] the created container "ha-334765" has a running status.
	I0916 10:51:31.359577 1415006 kic.go:225] Creating ssh key for kic: /home/jenkins/minikube-integration/19651-1378450/.minikube/machines/ha-334765/id_rsa...
	I0916 10:51:32.562429 1415006 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-1378450/.minikube/machines/ha-334765/id_rsa.pub -> /home/docker/.ssh/authorized_keys
	I0916 10:51:32.562482 1415006 kic_runner.go:191] docker (temp): /home/jenkins/minikube-integration/19651-1378450/.minikube/machines/ha-334765/id_rsa.pub --> /home/docker/.ssh/authorized_keys (381 bytes)
	I0916 10:51:32.581119 1415006 cli_runner.go:164] Run: docker container inspect ha-334765 --format={{.State.Status}}
	I0916 10:51:32.597670 1415006 kic_runner.go:93] Run: chown docker:docker /home/docker/.ssh/authorized_keys
	I0916 10:51:32.597695 1415006 kic_runner.go:114] Args: [docker exec --privileged ha-334765 chown docker:docker /home/docker/.ssh/authorized_keys]
	I0916 10:51:32.646661 1415006 cli_runner.go:164] Run: docker container inspect ha-334765 --format={{.State.Status}}
	I0916 10:51:32.667372 1415006 machine.go:93] provisionDockerMachine start ...
	I0916 10:51:32.667472 1415006 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" ha-334765
	I0916 10:51:32.686050 1415006 main.go:141] libmachine: Using SSH client type: native
	I0916 10:51:32.686336 1415006 main.go:141] libmachine: &{{{<nil> 0 [] [] []} docker [0x41abe0] 0x41d420 <nil>  [] 0s} 127.0.0.1 34618 <nil> <nil>}
	I0916 10:51:32.686346 1415006 main.go:141] libmachine: About to run SSH command:
	hostname
	I0916 10:51:32.825950 1415006 main.go:141] libmachine: SSH cmd err, output: <nil>: ha-334765
	
	I0916 10:51:32.825982 1415006 ubuntu.go:169] provisioning hostname "ha-334765"
	I0916 10:51:32.826109 1415006 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" ha-334765
	I0916 10:51:32.843087 1415006 main.go:141] libmachine: Using SSH client type: native
	I0916 10:51:32.843385 1415006 main.go:141] libmachine: &{{{<nil> 0 [] [] []} docker [0x41abe0] 0x41d420 <nil>  [] 0s} 127.0.0.1 34618 <nil> <nil>}
	I0916 10:51:32.843403 1415006 main.go:141] libmachine: About to run SSH command:
	sudo hostname ha-334765 && echo "ha-334765" | sudo tee /etc/hostname
	I0916 10:51:32.992997 1415006 main.go:141] libmachine: SSH cmd err, output: <nil>: ha-334765
	
	I0916 10:51:32.993079 1415006 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" ha-334765
	I0916 10:51:33.015722 1415006 main.go:141] libmachine: Using SSH client type: native
	I0916 10:51:33.015997 1415006 main.go:141] libmachine: &{{{<nil> 0 [] [] []} docker [0x41abe0] 0x41d420 <nil>  [] 0s} 127.0.0.1 34618 <nil> <nil>}
	I0916 10:51:33.016017 1415006 main.go:141] libmachine: About to run SSH command:
	
			if ! grep -xq '.*\sha-334765' /etc/hosts; then
				if grep -xq '127.0.1.1\s.*' /etc/hosts; then
					sudo sed -i 's/^127.0.1.1\s.*/127.0.1.1 ha-334765/g' /etc/hosts;
				else 
					echo '127.0.1.1 ha-334765' | sudo tee -a /etc/hosts; 
				fi
			fi
	I0916 10:51:33.157078 1415006 main.go:141] libmachine: SSH cmd err, output: <nil>: 
	I0916 10:51:33.157107 1415006 ubuntu.go:175] set auth options {CertDir:/home/jenkins/minikube-integration/19651-1378450/.minikube CaCertPath:/home/jenkins/minikube-integration/19651-1378450/.minikube/certs/ca.pem CaPrivateKeyPath:/home/jenkins/minikube-integration/19651-1378450/.minikube/certs/ca-key.pem CaCertRemotePath:/etc/docker/ca.pem ServerCertPath:/home/jenkins/minikube-integration/19651-1378450/.minikube/machines/server.pem ServerKeyPath:/home/jenkins/minikube-integration/19651-1378450/.minikube/machines/server-key.pem ClientKeyPath:/home/jenkins/minikube-integration/19651-1378450/.minikube/certs/key.pem ServerCertRemotePath:/etc/docker/server.pem ServerKeyRemotePath:/etc/docker/server-key.pem ClientCertPath:/home/jenkins/minikube-integration/19651-1378450/.minikube/certs/cert.pem ServerCertSANs:[] StorePath:/home/jenkins/minikube-integration/19651-1378450/.minikube}
	I0916 10:51:33.157127 1415006 ubuntu.go:177] setting up certificates
	I0916 10:51:33.157138 1415006 provision.go:84] configureAuth start
	I0916 10:51:33.157201 1415006 cli_runner.go:164] Run: docker container inspect -f "{{range .NetworkSettings.Networks}}{{.IPAddress}},{{.GlobalIPv6Address}}{{end}}" ha-334765
	I0916 10:51:33.176122 1415006 provision.go:143] copyHostCerts
	I0916 10:51:33.176171 1415006 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-1378450/.minikube/certs/cert.pem -> /home/jenkins/minikube-integration/19651-1378450/.minikube/cert.pem
	I0916 10:51:33.176210 1415006 exec_runner.go:144] found /home/jenkins/minikube-integration/19651-1378450/.minikube/cert.pem, removing ...
	I0916 10:51:33.176223 1415006 exec_runner.go:203] rm: /home/jenkins/minikube-integration/19651-1378450/.minikube/cert.pem
	I0916 10:51:33.176305 1415006 exec_runner.go:151] cp: /home/jenkins/minikube-integration/19651-1378450/.minikube/certs/cert.pem --> /home/jenkins/minikube-integration/19651-1378450/.minikube/cert.pem (1123 bytes)
	I0916 10:51:33.176404 1415006 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-1378450/.minikube/certs/key.pem -> /home/jenkins/minikube-integration/19651-1378450/.minikube/key.pem
	I0916 10:51:33.176430 1415006 exec_runner.go:144] found /home/jenkins/minikube-integration/19651-1378450/.minikube/key.pem, removing ...
	I0916 10:51:33.176438 1415006 exec_runner.go:203] rm: /home/jenkins/minikube-integration/19651-1378450/.minikube/key.pem
	I0916 10:51:33.176469 1415006 exec_runner.go:151] cp: /home/jenkins/minikube-integration/19651-1378450/.minikube/certs/key.pem --> /home/jenkins/minikube-integration/19651-1378450/.minikube/key.pem (1679 bytes)
	I0916 10:51:33.176521 1415006 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-1378450/.minikube/certs/ca.pem -> /home/jenkins/minikube-integration/19651-1378450/.minikube/ca.pem
	I0916 10:51:33.176541 1415006 exec_runner.go:144] found /home/jenkins/minikube-integration/19651-1378450/.minikube/ca.pem, removing ...
	I0916 10:51:33.176555 1415006 exec_runner.go:203] rm: /home/jenkins/minikube-integration/19651-1378450/.minikube/ca.pem
	I0916 10:51:33.176586 1415006 exec_runner.go:151] cp: /home/jenkins/minikube-integration/19651-1378450/.minikube/certs/ca.pem --> /home/jenkins/minikube-integration/19651-1378450/.minikube/ca.pem (1078 bytes)
	I0916 10:51:33.176750 1415006 provision.go:117] generating server cert: /home/jenkins/minikube-integration/19651-1378450/.minikube/machines/server.pem ca-key=/home/jenkins/minikube-integration/19651-1378450/.minikube/certs/ca.pem private-key=/home/jenkins/minikube-integration/19651-1378450/.minikube/certs/ca-key.pem org=jenkins.ha-334765 san=[127.0.0.1 192.168.49.2 ha-334765 localhost minikube]
	I0916 10:51:33.387204 1415006 provision.go:177] copyRemoteCerts
	I0916 10:51:33.387279 1415006 ssh_runner.go:195] Run: sudo mkdir -p /etc/docker /etc/docker /etc/docker
	I0916 10:51:33.387325 1415006 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" ha-334765
	I0916 10:51:33.404017 1415006 sshutil.go:53] new ssh client: &{IP:127.0.0.1 Port:34618 SSHKeyPath:/home/jenkins/minikube-integration/19651-1378450/.minikube/machines/ha-334765/id_rsa Username:docker}
	I0916 10:51:33.501800 1415006 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-1378450/.minikube/machines/server.pem -> /etc/docker/server.pem
	I0916 10:51:33.501869 1415006 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-1378450/.minikube/machines/server.pem --> /etc/docker/server.pem (1196 bytes)
	I0916 10:51:33.527570 1415006 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-1378450/.minikube/machines/server-key.pem -> /etc/docker/server-key.pem
	I0916 10:51:33.527639 1415006 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-1378450/.minikube/machines/server-key.pem --> /etc/docker/server-key.pem (1675 bytes)
	I0916 10:51:33.552418 1415006 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-1378450/.minikube/certs/ca.pem -> /etc/docker/ca.pem
	I0916 10:51:33.552488 1415006 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-1378450/.minikube/certs/ca.pem --> /etc/docker/ca.pem (1078 bytes)
	I0916 10:51:33.577987 1415006 provision.go:87] duration metric: took 420.824053ms to configureAuth
	I0916 10:51:33.578013 1415006 ubuntu.go:193] setting minikube options for container-runtime
	I0916 10:51:33.578211 1415006 config.go:182] Loaded profile config "ha-334765": Driver=docker, ContainerRuntime=crio, KubernetesVersion=v1.31.1
	I0916 10:51:33.578321 1415006 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" ha-334765
	I0916 10:51:33.594979 1415006 main.go:141] libmachine: Using SSH client type: native
	I0916 10:51:33.595231 1415006 main.go:141] libmachine: &{{{<nil> 0 [] [] []} docker [0x41abe0] 0x41d420 <nil>  [] 0s} 127.0.0.1 34618 <nil> <nil>}
	I0916 10:51:33.595250 1415006 main.go:141] libmachine: About to run SSH command:
	sudo mkdir -p /etc/sysconfig && printf %s "
	CRIO_MINIKUBE_OPTIONS='--insecure-registry 10.96.0.0/12 '
	" | sudo tee /etc/sysconfig/crio.minikube && sudo systemctl restart crio
	I0916 10:51:33.834727 1415006 main.go:141] libmachine: SSH cmd err, output: <nil>: 
	CRIO_MINIKUBE_OPTIONS='--insecure-registry 10.96.0.0/12 '
	
	I0916 10:51:33.834750 1415006 machine.go:96] duration metric: took 1.167359262s to provisionDockerMachine
	I0916 10:51:33.834761 1415006 client.go:171] duration metric: took 8.009325433s to LocalClient.Create
	I0916 10:51:33.834773 1415006 start.go:167] duration metric: took 8.009392033s to libmachine.API.Create "ha-334765"
	I0916 10:51:33.834781 1415006 start.go:293] postStartSetup for "ha-334765" (driver="docker")
	I0916 10:51:33.834792 1415006 start.go:322] creating required directories: [/etc/kubernetes/addons /etc/kubernetes/manifests /var/tmp/minikube /var/lib/minikube /var/lib/minikube/certs /var/lib/minikube/images /var/lib/minikube/binaries /tmp/gvisor /usr/share/ca-certificates /etc/ssl/certs]
	I0916 10:51:33.834877 1415006 ssh_runner.go:195] Run: sudo mkdir -p /etc/kubernetes/addons /etc/kubernetes/manifests /var/tmp/minikube /var/lib/minikube /var/lib/minikube/certs /var/lib/minikube/images /var/lib/minikube/binaries /tmp/gvisor /usr/share/ca-certificates /etc/ssl/certs
	I0916 10:51:33.834923 1415006 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" ha-334765
	I0916 10:51:33.854871 1415006 sshutil.go:53] new ssh client: &{IP:127.0.0.1 Port:34618 SSHKeyPath:/home/jenkins/minikube-integration/19651-1378450/.minikube/machines/ha-334765/id_rsa Username:docker}
	I0916 10:51:33.956078 1415006 ssh_runner.go:195] Run: cat /etc/os-release
	I0916 10:51:33.959774 1415006 main.go:141] libmachine: Couldn't set key VERSION_CODENAME, no corresponding struct field found
	I0916 10:51:33.959810 1415006 main.go:141] libmachine: Couldn't set key PRIVACY_POLICY_URL, no corresponding struct field found
	I0916 10:51:33.959840 1415006 main.go:141] libmachine: Couldn't set key UBUNTU_CODENAME, no corresponding struct field found
	I0916 10:51:33.959852 1415006 info.go:137] Remote host: Ubuntu 22.04.4 LTS
	I0916 10:51:33.959863 1415006 filesync.go:126] Scanning /home/jenkins/minikube-integration/19651-1378450/.minikube/addons for local assets ...
	I0916 10:51:33.959942 1415006 filesync.go:126] Scanning /home/jenkins/minikube-integration/19651-1378450/.minikube/files for local assets ...
	I0916 10:51:33.960046 1415006 filesync.go:149] local asset: /home/jenkins/minikube-integration/19651-1378450/.minikube/files/etc/ssl/certs/13838332.pem -> 13838332.pem in /etc/ssl/certs
	I0916 10:51:33.960058 1415006 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-1378450/.minikube/files/etc/ssl/certs/13838332.pem -> /etc/ssl/certs/13838332.pem
	I0916 10:51:33.960175 1415006 ssh_runner.go:195] Run: sudo mkdir -p /etc/ssl/certs
	I0916 10:51:33.969263 1415006 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-1378450/.minikube/files/etc/ssl/certs/13838332.pem --> /etc/ssl/certs/13838332.pem (1708 bytes)
	I0916 10:51:33.994141 1415006 start.go:296] duration metric: took 159.34391ms for postStartSetup
	I0916 10:51:33.994530 1415006 cli_runner.go:164] Run: docker container inspect -f "{{range .NetworkSettings.Networks}}{{.IPAddress}},{{.GlobalIPv6Address}}{{end}}" ha-334765
	I0916 10:51:34.017741 1415006 profile.go:143] Saving config to /home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/ha-334765/config.json ...
	I0916 10:51:34.018093 1415006 ssh_runner.go:195] Run: sh -c "df -h /var | awk 'NR==2{print $5}'"
	I0916 10:51:34.018149 1415006 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" ha-334765
	I0916 10:51:34.037405 1415006 sshutil.go:53] new ssh client: &{IP:127.0.0.1 Port:34618 SSHKeyPath:/home/jenkins/minikube-integration/19651-1378450/.minikube/machines/ha-334765/id_rsa Username:docker}
	I0916 10:51:34.129485 1415006 ssh_runner.go:195] Run: sh -c "df -BG /var | awk 'NR==2{print $4}'"
	I0916 10:51:34.134543 1415006 start.go:128] duration metric: took 8.31270074s to createHost
	I0916 10:51:34.134567 1415006 start.go:83] releasing machines lock for "ha-334765", held for 8.312850874s
	I0916 10:51:34.134646 1415006 cli_runner.go:164] Run: docker container inspect -f "{{range .NetworkSettings.Networks}}{{.IPAddress}},{{.GlobalIPv6Address}}{{end}}" ha-334765
	I0916 10:51:34.155746 1415006 ssh_runner.go:195] Run: cat /version.json
	I0916 10:51:34.155802 1415006 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" ha-334765
	I0916 10:51:34.156041 1415006 ssh_runner.go:195] Run: curl -sS -m 2 https://registry.k8s.io/
	I0916 10:51:34.156130 1415006 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" ha-334765
	I0916 10:51:34.182433 1415006 sshutil.go:53] new ssh client: &{IP:127.0.0.1 Port:34618 SSHKeyPath:/home/jenkins/minikube-integration/19651-1378450/.minikube/machines/ha-334765/id_rsa Username:docker}
	I0916 10:51:34.183685 1415006 sshutil.go:53] new ssh client: &{IP:127.0.0.1 Port:34618 SSHKeyPath:/home/jenkins/minikube-integration/19651-1378450/.minikube/machines/ha-334765/id_rsa Username:docker}
	I0916 10:51:34.401806 1415006 ssh_runner.go:195] Run: systemctl --version
	I0916 10:51:34.406056 1415006 ssh_runner.go:195] Run: sudo sh -c "podman version >/dev/null"
	I0916 10:51:34.551646 1415006 ssh_runner.go:195] Run: sh -c "stat /etc/cni/net.d/*loopback.conf*"
	I0916 10:51:34.556053 1415006 ssh_runner.go:195] Run: sudo find /etc/cni/net.d -maxdepth 1 -type f -name *loopback.conf* -not -name *.mk_disabled -exec sh -c "sudo mv {} {}.mk_disabled" ;
	I0916 10:51:34.578073 1415006 cni.go:221] loopback cni configuration disabled: "/etc/cni/net.d/*loopback.conf*" found
	I0916 10:51:34.578159 1415006 ssh_runner.go:195] Run: sudo find /etc/cni/net.d -maxdepth 1 -type f ( ( -name *bridge* -or -name *podman* ) -and -not -name *.mk_disabled ) -printf "%p, " -exec sh -c "sudo mv {} {}.mk_disabled" ;
	I0916 10:51:34.617437 1415006 cni.go:262] disabled [/etc/cni/net.d/87-podman-bridge.conflist, /etc/cni/net.d/100-crio-bridge.conf] bridge cni config(s)
	I0916 10:51:34.617461 1415006 start.go:495] detecting cgroup driver to use...
	I0916 10:51:34.617511 1415006 detect.go:187] detected "cgroupfs" cgroup driver on host os
	I0916 10:51:34.617570 1415006 ssh_runner.go:195] Run: sudo systemctl stop -f containerd
	I0916 10:51:34.635159 1415006 ssh_runner.go:195] Run: sudo systemctl is-active --quiet service containerd
	I0916 10:51:34.647920 1415006 docker.go:217] disabling cri-docker service (if available) ...
	I0916 10:51:34.648006 1415006 ssh_runner.go:195] Run: sudo systemctl stop -f cri-docker.socket
	I0916 10:51:34.663065 1415006 ssh_runner.go:195] Run: sudo systemctl stop -f cri-docker.service
	I0916 10:51:34.678927 1415006 ssh_runner.go:195] Run: sudo systemctl disable cri-docker.socket
	I0916 10:51:34.776926 1415006 ssh_runner.go:195] Run: sudo systemctl mask cri-docker.service
	I0916 10:51:34.882717 1415006 docker.go:233] disabling docker service ...
	I0916 10:51:34.882788 1415006 ssh_runner.go:195] Run: sudo systemctl stop -f docker.socket
	I0916 10:51:34.904617 1415006 ssh_runner.go:195] Run: sudo systemctl stop -f docker.service
	I0916 10:51:34.918027 1415006 ssh_runner.go:195] Run: sudo systemctl disable docker.socket
	I0916 10:51:35.022662 1415006 ssh_runner.go:195] Run: sudo systemctl mask docker.service
	I0916 10:51:35.132381 1415006 ssh_runner.go:195] Run: sudo systemctl is-active --quiet service docker
	I0916 10:51:35.146125 1415006 ssh_runner.go:195] Run: /bin/bash -c "sudo mkdir -p /etc && printf %s "runtime-endpoint: unix:///var/run/crio/crio.sock
	" | sudo tee /etc/crictl.yaml"
	I0916 10:51:35.165336 1415006 crio.go:59] configure cri-o to use "registry.k8s.io/pause:3.10" pause image...
	I0916 10:51:35.165438 1415006 ssh_runner.go:195] Run: sh -c "sudo sed -i 's|^.*pause_image = .*$|pause_image = "registry.k8s.io/pause:3.10"|' /etc/crio/crio.conf.d/02-crio.conf"
	I0916 10:51:35.176174 1415006 crio.go:70] configuring cri-o to use "cgroupfs" as cgroup driver...
	I0916 10:51:35.176279 1415006 ssh_runner.go:195] Run: sh -c "sudo sed -i 's|^.*cgroup_manager = .*$|cgroup_manager = "cgroupfs"|' /etc/crio/crio.conf.d/02-crio.conf"
	I0916 10:51:35.186819 1415006 ssh_runner.go:195] Run: sh -c "sudo sed -i '/conmon_cgroup = .*/d' /etc/crio/crio.conf.d/02-crio.conf"
	I0916 10:51:35.197810 1415006 ssh_runner.go:195] Run: sh -c "sudo sed -i '/cgroup_manager = .*/a conmon_cgroup = "pod"' /etc/crio/crio.conf.d/02-crio.conf"
	I0916 10:51:35.207939 1415006 ssh_runner.go:195] Run: sh -c "sudo rm -rf /etc/cni/net.mk"
	I0916 10:51:35.217303 1415006 ssh_runner.go:195] Run: sh -c "sudo sed -i '/^ *"net.ipv4.ip_unprivileged_port_start=.*"/d' /etc/crio/crio.conf.d/02-crio.conf"
	I0916 10:51:35.227306 1415006 ssh_runner.go:195] Run: sh -c "sudo grep -q "^ *default_sysctls" /etc/crio/crio.conf.d/02-crio.conf || sudo sed -i '/conmon_cgroup = .*/a default_sysctls = \[\n\]' /etc/crio/crio.conf.d/02-crio.conf"
	I0916 10:51:35.244795 1415006 ssh_runner.go:195] Run: sh -c "sudo sed -i -r 's|^default_sysctls *= *\[|&\n  "net.ipv4.ip_unprivileged_port_start=0",|' /etc/crio/crio.conf.d/02-crio.conf"
	I0916 10:51:35.255247 1415006 ssh_runner.go:195] Run: sudo sysctl net.bridge.bridge-nf-call-iptables
	I0916 10:51:35.264668 1415006 ssh_runner.go:195] Run: sudo sh -c "echo 1 > /proc/sys/net/ipv4/ip_forward"
	I0916 10:51:35.273525 1415006 ssh_runner.go:195] Run: sudo systemctl daemon-reload
	I0916 10:51:35.365962 1415006 ssh_runner.go:195] Run: sudo systemctl restart crio
	I0916 10:51:35.482948 1415006 start.go:542] Will wait 60s for socket path /var/run/crio/crio.sock
	I0916 10:51:35.483024 1415006 ssh_runner.go:195] Run: stat /var/run/crio/crio.sock
	I0916 10:51:35.487409 1415006 start.go:563] Will wait 60s for crictl version
	I0916 10:51:35.487488 1415006 ssh_runner.go:195] Run: which crictl
	I0916 10:51:35.490997 1415006 ssh_runner.go:195] Run: sudo /usr/bin/crictl version
	I0916 10:51:35.530436 1415006 start.go:579] Version:  0.1.0
	RuntimeName:  cri-o
	RuntimeVersion:  1.24.6
	RuntimeApiVersion:  v1
	I0916 10:51:35.530550 1415006 ssh_runner.go:195] Run: crio --version
	I0916 10:51:35.579393 1415006 ssh_runner.go:195] Run: crio --version
	I0916 10:51:35.621956 1415006 out.go:177] * Preparing Kubernetes v1.31.1 on CRI-O 1.24.6 ...
	I0916 10:51:35.623893 1415006 cli_runner.go:164] Run: docker network inspect ha-334765 --format "{"Name": "{{.Name}}","Driver": "{{.Driver}}","Subnet": "{{range .IPAM.Config}}{{.Subnet}}{{end}}","Gateway": "{{range .IPAM.Config}}{{.Gateway}}{{end}}","MTU": {{if (index .Options "com.docker.network.driver.mtu")}}{{(index .Options "com.docker.network.driver.mtu")}}{{else}}0{{end}}, "ContainerIPs": [{{range $k,$v := .Containers }}"{{$v.IPv4Address}}",{{end}}]}"
	I0916 10:51:35.639065 1415006 ssh_runner.go:195] Run: grep 192.168.49.1	host.minikube.internal$ /etc/hosts
	I0916 10:51:35.642843 1415006 ssh_runner.go:195] Run: /bin/bash -c "{ grep -v $'\thost.minikube.internal$' "/etc/hosts"; echo "192.168.49.1	host.minikube.internal"; } > /tmp/h.$$; sudo cp /tmp/h.$$ "/etc/hosts""
	I0916 10:51:35.654560 1415006 kubeadm.go:883] updating cluster {Name:ha-334765 KeepContext:false EmbedCerts:false MinikubeISO: KicBaseImage:gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 Memory:2200 CPUs:2 DiskSize:20000 Driver:docker HyperkitVpnKitSock: HyperkitVSockPorts:[] DockerEnv:[] ContainerVolumeMounts:[] InsecureRegistry:[] RegistryMirror:[] HostOnlyCIDR:192.168.59.1/24 HypervVirtualSwitch: HypervUseExternalSwitch:false HypervExternalAdapter: KVMNetwork:default KVMQemuURI:qemu:///system KVMGPU:false KVMHidden:false KVMNUMACount:1 APIServerPort:8443 DockerOpt:[] DisableDriverMounts:false NFSShare:[] NFSSharesRoot:/nfsshares UUID: NoVTXCheck:false DNSProxy:false HostDNSResolver:true HostOnlyNicType:virtio NatNicType:virtio SSHIPAddress: SSHUser:root SSHKey: SSHPort:22 KubernetesConfig:{KubernetesVersion:v1.31.1 ClusterName:ha-334765 Namespace:default APIServerHAVIP:192.168.49.254 APIServerName:minikubeCA APISe
rverNames:[] APIServerIPs:[] DNSDomain:cluster.local ContainerRuntime:crio CRISocket: NetworkPlugin:cni FeatureGates: ServiceCIDR:10.96.0.0/12 ImageRepository: LoadBalancerStartIP: LoadBalancerEndIP: CustomIngressCert: RegistryAliases: ExtraOptions:[] ShouldLoadCachedImages:true EnableDefaultCNI:false CNI:} Nodes:[{Name: IP:192.168.49.2 Port:8443 KubernetesVersion:v1.31.1 ContainerRuntime:crio ControlPlane:true Worker:true}] Addons:map[] CustomAddonImages:map[] CustomAddonRegistries:map[] VerifyComponents:map[apiserver:true apps_running:true default_sa:true extra:true kubelet:true node_ready:true system_pods:true] StartHostTimeout:6m0s ScheduledStop:<nil> ExposedPorts:[] ListenAddress: Network: Subnet: MultiNodeRequested:true ExtraDisks:0 CertExpiration:26280h0m0s Mount:false MountString:/home/jenkins:/minikube-host Mount9PVersion:9p2000.L MountGID:docker MountIP: MountMSize:262144 MountOptions:[] MountPort:0 MountType:9p MountUID:docker BinaryMirror: DisableOptimizations:false DisableMetrics:false CustomQemu
FirmwarePath: SocketVMnetClientPath: SocketVMnetPath: StaticIP: SSHAuthSock: SSHAgentPID:0 GPUs: AutoPauseInterval:1m0s} ...
	I0916 10:51:35.654692 1415006 preload.go:131] Checking if preload exists for k8s version v1.31.1 and runtime crio
	I0916 10:51:35.654752 1415006 ssh_runner.go:195] Run: sudo crictl images --output json
	I0916 10:51:35.729057 1415006 crio.go:514] all images are preloaded for cri-o runtime.
	I0916 10:51:35.729080 1415006 crio.go:433] Images already preloaded, skipping extraction
	I0916 10:51:35.729137 1415006 ssh_runner.go:195] Run: sudo crictl images --output json
	I0916 10:51:35.765757 1415006 crio.go:514] all images are preloaded for cri-o runtime.
	I0916 10:51:35.765780 1415006 cache_images.go:84] Images are preloaded, skipping loading
	I0916 10:51:35.765788 1415006 kubeadm.go:934] updating node { 192.168.49.2 8443 v1.31.1 crio true true} ...
	I0916 10:51:35.765907 1415006 kubeadm.go:946] kubelet [Unit]
	Wants=crio.service
	
	[Service]
	ExecStart=
	ExecStart=/var/lib/minikube/binaries/v1.31.1/kubelet --bootstrap-kubeconfig=/etc/kubernetes/bootstrap-kubelet.conf --cgroups-per-qos=false --config=/var/lib/kubelet/config.yaml --enforce-node-allocatable= --hostname-override=ha-334765 --kubeconfig=/etc/kubernetes/kubelet.conf --node-ip=192.168.49.2
	
	[Install]
	 config:
	{KubernetesVersion:v1.31.1 ClusterName:ha-334765 Namespace:default APIServerHAVIP:192.168.49.254 APIServerName:minikubeCA APIServerNames:[] APIServerIPs:[] DNSDomain:cluster.local ContainerRuntime:crio CRISocket: NetworkPlugin:cni FeatureGates: ServiceCIDR:10.96.0.0/12 ImageRepository: LoadBalancerStartIP: LoadBalancerEndIP: CustomIngressCert: RegistryAliases: ExtraOptions:[] ShouldLoadCachedImages:true EnableDefaultCNI:false CNI:}
	I0916 10:51:35.766013 1415006 ssh_runner.go:195] Run: crio config
	I0916 10:51:35.818760 1415006 cni.go:84] Creating CNI manager for ""
	I0916 10:51:35.818782 1415006 cni.go:136] multinode detected (1 nodes found), recommending kindnet
	I0916 10:51:35.818797 1415006 kubeadm.go:84] Using pod CIDR: 10.244.0.0/16
	I0916 10:51:35.818828 1415006 kubeadm.go:181] kubeadm options: {CertDir:/var/lib/minikube/certs ServiceCIDR:10.96.0.0/12 PodSubnet:10.244.0.0/16 AdvertiseAddress:192.168.49.2 APIServerPort:8443 KubernetesVersion:v1.31.1 EtcdDataDir:/var/lib/minikube/etcd EtcdExtraArgs:map[] ClusterName:ha-334765 NodeName:ha-334765 DNSDomain:cluster.local CRISocket:/var/run/crio/crio.sock ImageRepository: ComponentOptions:[{Component:apiServer ExtraArgs:map[enable-admission-plugins:NamespaceLifecycle,LimitRanger,ServiceAccount,DefaultStorageClass,DefaultTolerationSeconds,NodeRestriction,MutatingAdmissionWebhook,ValidatingAdmissionWebhook,ResourceQuota] Pairs:map[certSANs:["127.0.0.1", "localhost", "192.168.49.2"]]} {Component:controllerManager ExtraArgs:map[allocate-node-cidrs:true leader-elect:false] Pairs:map[]} {Component:scheduler ExtraArgs:map[leader-elect:false] Pairs:map[]}] FeatureArgs:map[] NodeIP:192.168.49.2 CgroupDriver:cgroupfs ClientCAFile:/var/lib/minikube/certs/ca.crt StaticPodPath:/etc/kubernetes/mani
fests ControlPlaneAddress:control-plane.minikube.internal KubeProxyOptions:map[] ResolvConfSearchRegression:false KubeletConfigOpts:map[containerRuntimeEndpoint:unix:///var/run/crio/crio.sock hairpinMode:hairpin-veth runtimeRequestTimeout:15m] PrependCriSocketUnix:true}
	I0916 10:51:35.818987 1415006 kubeadm.go:187] kubeadm config:
	apiVersion: kubeadm.k8s.io/v1beta3
	kind: InitConfiguration
	localAPIEndpoint:
	  advertiseAddress: 192.168.49.2
	  bindPort: 8443
	bootstrapTokens:
	  - groups:
	      - system:bootstrappers:kubeadm:default-node-token
	    ttl: 24h0m0s
	    usages:
	      - signing
	      - authentication
	nodeRegistration:
	  criSocket: unix:///var/run/crio/crio.sock
	  name: "ha-334765"
	  kubeletExtraArgs:
	    node-ip: 192.168.49.2
	  taints: []
	---
	apiVersion: kubeadm.k8s.io/v1beta3
	kind: ClusterConfiguration
	apiServer:
	  certSANs: ["127.0.0.1", "localhost", "192.168.49.2"]
	  extraArgs:
	    enable-admission-plugins: "NamespaceLifecycle,LimitRanger,ServiceAccount,DefaultStorageClass,DefaultTolerationSeconds,NodeRestriction,MutatingAdmissionWebhook,ValidatingAdmissionWebhook,ResourceQuota"
	controllerManager:
	  extraArgs:
	    allocate-node-cidrs: "true"
	    leader-elect: "false"
	scheduler:
	  extraArgs:
	    leader-elect: "false"
	certificatesDir: /var/lib/minikube/certs
	clusterName: mk
	controlPlaneEndpoint: control-plane.minikube.internal:8443
	etcd:
	  local:
	    dataDir: /var/lib/minikube/etcd
	    extraArgs:
	      proxy-refresh-interval: "70000"
	kubernetesVersion: v1.31.1
	networking:
	  dnsDomain: cluster.local
	  podSubnet: "10.244.0.0/16"
	  serviceSubnet: 10.96.0.0/12
	---
	apiVersion: kubelet.config.k8s.io/v1beta1
	kind: KubeletConfiguration
	authentication:
	  x509:
	    clientCAFile: /var/lib/minikube/certs/ca.crt
	cgroupDriver: cgroupfs
	containerRuntimeEndpoint: unix:///var/run/crio/crio.sock
	hairpinMode: hairpin-veth
	runtimeRequestTimeout: 15m
	clusterDomain: "cluster.local"
	# disable disk resource management by default
	imageGCHighThresholdPercent: 100
	evictionHard:
	  nodefs.available: "0%"
	  nodefs.inodesFree: "0%"
	  imagefs.available: "0%"
	failSwapOn: false
	staticPodPath: /etc/kubernetes/manifests
	---
	apiVersion: kubeproxy.config.k8s.io/v1alpha1
	kind: KubeProxyConfiguration
	clusterCIDR: "10.244.0.0/16"
	metricsBindAddress: 0.0.0.0:10249
	conntrack:
	  maxPerCore: 0
	# Skip setting "net.netfilter.nf_conntrack_tcp_timeout_established"
	  tcpEstablishedTimeout: 0s
	# Skip setting "net.netfilter.nf_conntrack_tcp_timeout_close"
	  tcpCloseWaitTimeout: 0s
	
	I0916 10:51:35.819024 1415006 kube-vip.go:115] generating kube-vip config ...
	I0916 10:51:35.819086 1415006 ssh_runner.go:195] Run: sudo sh -c "lsmod | grep ip_vs"
	I0916 10:51:35.832413 1415006 kube-vip.go:167] auto-enabling control-plane load-balancing in kube-vip
	I0916 10:51:35.832538 1415006 kube-vip.go:137] kube-vip config:
	apiVersion: v1
	kind: Pod
	metadata:
	  creationTimestamp: null
	  name: kube-vip
	  namespace: kube-system
	spec:
	  containers:
	  - args:
	    - manager
	    env:
	    - name: vip_arp
	      value: "true"
	    - name: port
	      value: "8443"
	    - name: vip_nodename
	      valueFrom:
	        fieldRef:
	          fieldPath: spec.nodeName
	    - name: vip_interface
	      value: eth0
	    - name: vip_cidr
	      value: "32"
	    - name: dns_mode
	      value: first
	    - name: cp_enable
	      value: "true"
	    - name: cp_namespace
	      value: kube-system
	    - name: vip_leaderelection
	      value: "true"
	    - name: vip_leasename
	      value: plndr-cp-lock
	    - name: vip_leaseduration
	      value: "5"
	    - name: vip_renewdeadline
	      value: "3"
	    - name: vip_retryperiod
	      value: "1"
	    - name: address
	      value: 192.168.49.254
	    - name: prometheus_server
	      value: :2112
	    - name : lb_enable
	      value: "true"
	    - name: lb_port
	      value: "8443"
	    image: ghcr.io/kube-vip/kube-vip:v0.8.0
	    imagePullPolicy: IfNotPresent
	    name: kube-vip
	    resources: {}
	    securityContext:
	      capabilities:
	        add:
	        - NET_ADMIN
	        - NET_RAW
	    volumeMounts:
	    - mountPath: /etc/kubernetes/admin.conf
	      name: kubeconfig
	  hostAliases:
	  - hostnames:
	    - kubernetes
	    ip: 127.0.0.1
	  hostNetwork: true
	  volumes:
	  - hostPath:
	      path: "/etc/kubernetes/super-admin.conf"
	    name: kubeconfig
	status: {}
	I0916 10:51:35.832614 1415006 ssh_runner.go:195] Run: sudo ls /var/lib/minikube/binaries/v1.31.1
	I0916 10:51:35.842197 1415006 binaries.go:44] Found k8s binaries, skipping transfer
	I0916 10:51:35.842293 1415006 ssh_runner.go:195] Run: sudo mkdir -p /etc/systemd/system/kubelet.service.d /lib/systemd/system /var/tmp/minikube /etc/kubernetes/manifests
	I0916 10:51:35.851742 1415006 ssh_runner.go:362] scp memory --> /etc/systemd/system/kubelet.service.d/10-kubeadm.conf (359 bytes)
	I0916 10:51:35.870725 1415006 ssh_runner.go:362] scp memory --> /lib/systemd/system/kubelet.service (352 bytes)
	I0916 10:51:35.890021 1415006 ssh_runner.go:362] scp memory --> /var/tmp/minikube/kubeadm.yaml.new (2147 bytes)
	I0916 10:51:35.908444 1415006 ssh_runner.go:362] scp memory --> /etc/kubernetes/manifests/kube-vip.yaml (1447 bytes)
	I0916 10:51:35.927349 1415006 ssh_runner.go:195] Run: grep 192.168.49.254	control-plane.minikube.internal$ /etc/hosts
	I0916 10:51:35.930875 1415006 ssh_runner.go:195] Run: /bin/bash -c "{ grep -v $'\tcontrol-plane.minikube.internal$' "/etc/hosts"; echo "192.168.49.254	control-plane.minikube.internal"; } > /tmp/h.$$; sudo cp /tmp/h.$$ "/etc/hosts""
	I0916 10:51:35.942258 1415006 ssh_runner.go:195] Run: sudo systemctl daemon-reload
	I0916 10:51:36.026713 1415006 ssh_runner.go:195] Run: sudo systemctl start kubelet
	I0916 10:51:36.042941 1415006 certs.go:68] Setting up /home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/ha-334765 for IP: 192.168.49.2
	I0916 10:51:36.043025 1415006 certs.go:194] generating shared ca certs ...
	I0916 10:51:36.043060 1415006 certs.go:226] acquiring lock for ca certs: {Name:mk0ae46b50e2e49d53ad6fcc94535aa50d9156d6 Clock:{} Delay:500ms Timeout:1m0s Cancel:<nil>}
	I0916 10:51:36.043290 1415006 certs.go:235] skipping valid "minikubeCA" ca cert: /home/jenkins/minikube-integration/19651-1378450/.minikube/ca.key
	I0916 10:51:36.043394 1415006 certs.go:235] skipping valid "proxyClientCA" ca cert: /home/jenkins/minikube-integration/19651-1378450/.minikube/proxy-client-ca.key
	I0916 10:51:36.043435 1415006 certs.go:256] generating profile certs ...
	I0916 10:51:36.043538 1415006 certs.go:363] generating signed profile cert for "minikube-user": /home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/ha-334765/client.key
	I0916 10:51:36.043607 1415006 crypto.go:68] Generating cert /home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/ha-334765/client.crt with IP's: []
	I0916 10:51:36.860300 1415006 crypto.go:156] Writing cert to /home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/ha-334765/client.crt ...
	I0916 10:51:36.860339 1415006 lock.go:35] WriteFile acquiring /home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/ha-334765/client.crt: {Name:mkac681ee25aa1e7951b0d028bab38cc7560cf3c Clock:{} Delay:500ms Timeout:1m0s Cancel:<nil>}
	I0916 10:51:36.860557 1415006 crypto.go:164] Writing key to /home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/ha-334765/client.key ...
	I0916 10:51:36.860571 1415006 lock.go:35] WriteFile acquiring /home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/ha-334765/client.key: {Name:mke38ba3a8fcfbd63628a4d07faa22aaaef77bf5 Clock:{} Delay:500ms Timeout:1m0s Cancel:<nil>}
	I0916 10:51:36.860669 1415006 certs.go:363] generating signed profile cert for "minikube": /home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/ha-334765/apiserver.key.46ae49fb
	I0916 10:51:36.860707 1415006 crypto.go:68] Generating cert /home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/ha-334765/apiserver.crt.46ae49fb with IP's: [10.96.0.1 127.0.0.1 10.0.0.1 192.168.49.2 192.168.49.254]
	I0916 10:51:37.048960 1415006 crypto.go:156] Writing cert to /home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/ha-334765/apiserver.crt.46ae49fb ...
	I0916 10:51:37.048992 1415006 lock.go:35] WriteFile acquiring /home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/ha-334765/apiserver.crt.46ae49fb: {Name:mkaafec7697140c2d12a2897ae395e3bc3762e0a Clock:{} Delay:500ms Timeout:1m0s Cancel:<nil>}
	I0916 10:51:37.049189 1415006 crypto.go:164] Writing key to /home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/ha-334765/apiserver.key.46ae49fb ...
	I0916 10:51:37.049204 1415006 lock.go:35] WriteFile acquiring /home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/ha-334765/apiserver.key.46ae49fb: {Name:mk87074ff89a14f89698ca51659210ac44ba3c7c Clock:{} Delay:500ms Timeout:1m0s Cancel:<nil>}
	I0916 10:51:37.049290 1415006 certs.go:381] copying /home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/ha-334765/apiserver.crt.46ae49fb -> /home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/ha-334765/apiserver.crt
	I0916 10:51:37.049379 1415006 certs.go:385] copying /home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/ha-334765/apiserver.key.46ae49fb -> /home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/ha-334765/apiserver.key
	I0916 10:51:37.049441 1415006 certs.go:363] generating signed profile cert for "aggregator": /home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/ha-334765/proxy-client.key
	I0916 10:51:37.049460 1415006 crypto.go:68] Generating cert /home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/ha-334765/proxy-client.crt with IP's: []
	I0916 10:51:37.224344 1415006 crypto.go:156] Writing cert to /home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/ha-334765/proxy-client.crt ...
	I0916 10:51:37.224377 1415006 lock.go:35] WriteFile acquiring /home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/ha-334765/proxy-client.crt: {Name:mkc239e75fbb7cdc72d962754cc320dca19a354f Clock:{} Delay:500ms Timeout:1m0s Cancel:<nil>}
	I0916 10:51:37.224588 1415006 crypto.go:164] Writing key to /home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/ha-334765/proxy-client.key ...
	I0916 10:51:37.224606 1415006 lock.go:35] WriteFile acquiring /home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/ha-334765/proxy-client.key: {Name:mkec03ccf4bbffad63e69f8efaf7009f71a70043 Clock:{} Delay:500ms Timeout:1m0s Cancel:<nil>}
	I0916 10:51:37.224722 1415006 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-1378450/.minikube/ca.crt -> /var/lib/minikube/certs/ca.crt
	I0916 10:51:37.224746 1415006 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-1378450/.minikube/ca.key -> /var/lib/minikube/certs/ca.key
	I0916 10:51:37.224759 1415006 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-1378450/.minikube/proxy-client-ca.crt -> /var/lib/minikube/certs/proxy-client-ca.crt
	I0916 10:51:37.224776 1415006 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-1378450/.minikube/proxy-client-ca.key -> /var/lib/minikube/certs/proxy-client-ca.key
	I0916 10:51:37.224788 1415006 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/ha-334765/apiserver.crt -> /var/lib/minikube/certs/apiserver.crt
	I0916 10:51:37.224806 1415006 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/ha-334765/apiserver.key -> /var/lib/minikube/certs/apiserver.key
	I0916 10:51:37.224824 1415006 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/ha-334765/proxy-client.crt -> /var/lib/minikube/certs/proxy-client.crt
	I0916 10:51:37.224837 1415006 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/ha-334765/proxy-client.key -> /var/lib/minikube/certs/proxy-client.key
	I0916 10:51:37.224902 1415006 certs.go:484] found cert: /home/jenkins/minikube-integration/19651-1378450/.minikube/certs/1383833.pem (1338 bytes)
	W0916 10:51:37.224948 1415006 certs.go:480] ignoring /home/jenkins/minikube-integration/19651-1378450/.minikube/certs/1383833_empty.pem, impossibly tiny 0 bytes
	I0916 10:51:37.224961 1415006 certs.go:484] found cert: /home/jenkins/minikube-integration/19651-1378450/.minikube/certs/ca-key.pem (1679 bytes)
	I0916 10:51:37.224995 1415006 certs.go:484] found cert: /home/jenkins/minikube-integration/19651-1378450/.minikube/certs/ca.pem (1078 bytes)
	I0916 10:51:37.225027 1415006 certs.go:484] found cert: /home/jenkins/minikube-integration/19651-1378450/.minikube/certs/cert.pem (1123 bytes)
	I0916 10:51:37.225056 1415006 certs.go:484] found cert: /home/jenkins/minikube-integration/19651-1378450/.minikube/certs/key.pem (1679 bytes)
	I0916 10:51:37.225103 1415006 certs.go:484] found cert: /home/jenkins/minikube-integration/19651-1378450/.minikube/files/etc/ssl/certs/13838332.pem (1708 bytes)
	I0916 10:51:37.225139 1415006 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-1378450/.minikube/ca.crt -> /usr/share/ca-certificates/minikubeCA.pem
	I0916 10:51:37.225156 1415006 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-1378450/.minikube/certs/1383833.pem -> /usr/share/ca-certificates/1383833.pem
	I0916 10:51:37.225170 1415006 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-1378450/.minikube/files/etc/ssl/certs/13838332.pem -> /usr/share/ca-certificates/13838332.pem
	I0916 10:51:37.225758 1415006 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-1378450/.minikube/ca.crt --> /var/lib/minikube/certs/ca.crt (1111 bytes)
	I0916 10:51:37.251857 1415006 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-1378450/.minikube/ca.key --> /var/lib/minikube/certs/ca.key (1675 bytes)
	I0916 10:51:37.277918 1415006 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-1378450/.minikube/proxy-client-ca.crt --> /var/lib/minikube/certs/proxy-client-ca.crt (1119 bytes)
	I0916 10:51:37.304245 1415006 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-1378450/.minikube/proxy-client-ca.key --> /var/lib/minikube/certs/proxy-client-ca.key (1679 bytes)
	I0916 10:51:37.330461 1415006 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/ha-334765/apiserver.crt --> /var/lib/minikube/certs/apiserver.crt (1424 bytes)
	I0916 10:51:37.356181 1415006 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/ha-334765/apiserver.key --> /var/lib/minikube/certs/apiserver.key (1679 bytes)
	I0916 10:51:37.381415 1415006 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/ha-334765/proxy-client.crt --> /var/lib/minikube/certs/proxy-client.crt (1147 bytes)
	I0916 10:51:37.406811 1415006 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/ha-334765/proxy-client.key --> /var/lib/minikube/certs/proxy-client.key (1679 bytes)
	I0916 10:51:37.432020 1415006 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-1378450/.minikube/ca.crt --> /usr/share/ca-certificates/minikubeCA.pem (1111 bytes)
	I0916 10:51:37.459768 1415006 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-1378450/.minikube/certs/1383833.pem --> /usr/share/ca-certificates/1383833.pem (1338 bytes)
	I0916 10:51:37.485131 1415006 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-1378450/.minikube/files/etc/ssl/certs/13838332.pem --> /usr/share/ca-certificates/13838332.pem (1708 bytes)
	I0916 10:51:37.512047 1415006 ssh_runner.go:362] scp memory --> /var/lib/minikube/kubeconfig (738 bytes)
	I0916 10:51:37.530835 1415006 ssh_runner.go:195] Run: openssl version
	I0916 10:51:37.536447 1415006 ssh_runner.go:195] Run: sudo /bin/bash -c "test -s /usr/share/ca-certificates/13838332.pem && ln -fs /usr/share/ca-certificates/13838332.pem /etc/ssl/certs/13838332.pem"
	I0916 10:51:37.546453 1415006 ssh_runner.go:195] Run: ls -la /usr/share/ca-certificates/13838332.pem
	I0916 10:51:37.550332 1415006 certs.go:528] hashing: -rw-r--r-- 1 root root 1708 Sep 16 10:46 /usr/share/ca-certificates/13838332.pem
	I0916 10:51:37.550430 1415006 ssh_runner.go:195] Run: openssl x509 -hash -noout -in /usr/share/ca-certificates/13838332.pem
	I0916 10:51:37.557447 1415006 ssh_runner.go:195] Run: sudo /bin/bash -c "test -L /etc/ssl/certs/3ec20f2e.0 || ln -fs /etc/ssl/certs/13838332.pem /etc/ssl/certs/3ec20f2e.0"
	I0916 10:51:37.567434 1415006 ssh_runner.go:195] Run: sudo /bin/bash -c "test -s /usr/share/ca-certificates/minikubeCA.pem && ln -fs /usr/share/ca-certificates/minikubeCA.pem /etc/ssl/certs/minikubeCA.pem"
	I0916 10:51:37.577308 1415006 ssh_runner.go:195] Run: ls -la /usr/share/ca-certificates/minikubeCA.pem
	I0916 10:51:37.581116 1415006 certs.go:528] hashing: -rw-r--r-- 1 root root 1111 Sep 16 10:35 /usr/share/ca-certificates/minikubeCA.pem
	I0916 10:51:37.581185 1415006 ssh_runner.go:195] Run: openssl x509 -hash -noout -in /usr/share/ca-certificates/minikubeCA.pem
	I0916 10:51:37.588264 1415006 ssh_runner.go:195] Run: sudo /bin/bash -c "test -L /etc/ssl/certs/b5213941.0 || ln -fs /etc/ssl/certs/minikubeCA.pem /etc/ssl/certs/b5213941.0"
	I0916 10:51:37.597960 1415006 ssh_runner.go:195] Run: sudo /bin/bash -c "test -s /usr/share/ca-certificates/1383833.pem && ln -fs /usr/share/ca-certificates/1383833.pem /etc/ssl/certs/1383833.pem"
	I0916 10:51:37.607811 1415006 ssh_runner.go:195] Run: ls -la /usr/share/ca-certificates/1383833.pem
	I0916 10:51:37.611428 1415006 certs.go:528] hashing: -rw-r--r-- 1 root root 1338 Sep 16 10:46 /usr/share/ca-certificates/1383833.pem
	I0916 10:51:37.611500 1415006 ssh_runner.go:195] Run: openssl x509 -hash -noout -in /usr/share/ca-certificates/1383833.pem
	I0916 10:51:37.618516 1415006 ssh_runner.go:195] Run: sudo /bin/bash -c "test -L /etc/ssl/certs/51391683.0 || ln -fs /etc/ssl/certs/1383833.pem /etc/ssl/certs/51391683.0"
	I0916 10:51:37.628255 1415006 ssh_runner.go:195] Run: stat /var/lib/minikube/certs/apiserver-kubelet-client.crt
	I0916 10:51:37.631886 1415006 certs.go:399] 'apiserver-kubelet-client' cert doesn't exist, likely first start: stat /var/lib/minikube/certs/apiserver-kubelet-client.crt: Process exited with status 1
	stdout:
	
	stderr:
	stat: cannot statx '/var/lib/minikube/certs/apiserver-kubelet-client.crt': No such file or directory
	I0916 10:51:37.631942 1415006 kubeadm.go:392] StartCluster: {Name:ha-334765 KeepContext:false EmbedCerts:false MinikubeISO: KicBaseImage:gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 Memory:2200 CPUs:2 DiskSize:20000 Driver:docker HyperkitVpnKitSock: HyperkitVSockPorts:[] DockerEnv:[] ContainerVolumeMounts:[] InsecureRegistry:[] RegistryMirror:[] HostOnlyCIDR:192.168.59.1/24 HypervVirtualSwitch: HypervUseExternalSwitch:false HypervExternalAdapter: KVMNetwork:default KVMQemuURI:qemu:///system KVMGPU:false KVMHidden:false KVMNUMACount:1 APIServerPort:8443 DockerOpt:[] DisableDriverMounts:false NFSShare:[] NFSSharesRoot:/nfsshares UUID: NoVTXCheck:false DNSProxy:false HostDNSResolver:true HostOnlyNicType:virtio NatNicType:virtio SSHIPAddress: SSHUser:root SSHKey: SSHPort:22 KubernetesConfig:{KubernetesVersion:v1.31.1 ClusterName:ha-334765 Namespace:default APIServerHAVIP:192.168.49.254 APIServerName:minikubeCA APIServe
rNames:[] APIServerIPs:[] DNSDomain:cluster.local ContainerRuntime:crio CRISocket: NetworkPlugin:cni FeatureGates: ServiceCIDR:10.96.0.0/12 ImageRepository: LoadBalancerStartIP: LoadBalancerEndIP: CustomIngressCert: RegistryAliases: ExtraOptions:[] ShouldLoadCachedImages:true EnableDefaultCNI:false CNI:} Nodes:[{Name: IP:192.168.49.2 Port:8443 KubernetesVersion:v1.31.1 ContainerRuntime:crio ControlPlane:true Worker:true}] Addons:map[] CustomAddonImages:map[] CustomAddonRegistries:map[] VerifyComponents:map[apiserver:true apps_running:true default_sa:true extra:true kubelet:true node_ready:true system_pods:true] StartHostTimeout:6m0s ScheduledStop:<nil> ExposedPorts:[] ListenAddress: Network: Subnet: MultiNodeRequested:true ExtraDisks:0 CertExpiration:26280h0m0s Mount:false MountString:/home/jenkins:/minikube-host Mount9PVersion:9p2000.L MountGID:docker MountIP: MountMSize:262144 MountOptions:[] MountPort:0 MountType:9p MountUID:docker BinaryMirror: DisableOptimizations:false DisableMetrics:false CustomQemuFir
mwarePath: SocketVMnetClientPath: SocketVMnetPath: StaticIP: SSHAuthSock: SSHAgentPID:0 GPUs: AutoPauseInterval:1m0s}
	I0916 10:51:37.632030 1415006 cri.go:54] listing CRI containers in root : {State:paused Name: Namespaces:[kube-system]}
	I0916 10:51:37.632090 1415006 ssh_runner.go:195] Run: sudo -s eval "crictl ps -a --quiet --label io.kubernetes.pod.namespace=kube-system"
	I0916 10:51:37.679540 1415006 cri.go:89] found id: ""
	I0916 10:51:37.679610 1415006 ssh_runner.go:195] Run: sudo ls /var/lib/kubelet/kubeadm-flags.env /var/lib/kubelet/config.yaml /var/lib/minikube/etcd
	I0916 10:51:37.690855 1415006 ssh_runner.go:195] Run: sudo cp /var/tmp/minikube/kubeadm.yaml.new /var/tmp/minikube/kubeadm.yaml
	I0916 10:51:37.701130 1415006 kubeadm.go:214] ignoring SystemVerification for kubeadm because of docker driver
	I0916 10:51:37.701246 1415006 ssh_runner.go:195] Run: sudo ls -la /etc/kubernetes/admin.conf /etc/kubernetes/kubelet.conf /etc/kubernetes/controller-manager.conf /etc/kubernetes/scheduler.conf
	I0916 10:51:37.714045 1415006 kubeadm.go:155] config check failed, skipping stale config cleanup: sudo ls -la /etc/kubernetes/admin.conf /etc/kubernetes/kubelet.conf /etc/kubernetes/controller-manager.conf /etc/kubernetes/scheduler.conf: Process exited with status 2
	stdout:
	
	stderr:
	ls: cannot access '/etc/kubernetes/admin.conf': No such file or directory
	ls: cannot access '/etc/kubernetes/kubelet.conf': No such file or directory
	ls: cannot access '/etc/kubernetes/controller-manager.conf': No such file or directory
	ls: cannot access '/etc/kubernetes/scheduler.conf': No such file or directory
	I0916 10:51:37.714077 1415006 kubeadm.go:157] found existing configuration files:
	
	I0916 10:51:37.714133 1415006 ssh_runner.go:195] Run: sudo grep https://control-plane.minikube.internal:8443 /etc/kubernetes/admin.conf
	I0916 10:51:37.724303 1415006 kubeadm.go:163] "https://control-plane.minikube.internal:8443" may not be in /etc/kubernetes/admin.conf - will remove: sudo grep https://control-plane.minikube.internal:8443 /etc/kubernetes/admin.conf: Process exited with status 2
	stdout:
	
	stderr:
	grep: /etc/kubernetes/admin.conf: No such file or directory
	I0916 10:51:37.724373 1415006 ssh_runner.go:195] Run: sudo rm -f /etc/kubernetes/admin.conf
	I0916 10:51:37.733985 1415006 ssh_runner.go:195] Run: sudo grep https://control-plane.minikube.internal:8443 /etc/kubernetes/kubelet.conf
	I0916 10:51:37.743731 1415006 kubeadm.go:163] "https://control-plane.minikube.internal:8443" may not be in /etc/kubernetes/kubelet.conf - will remove: sudo grep https://control-plane.minikube.internal:8443 /etc/kubernetes/kubelet.conf: Process exited with status 2
	stdout:
	
	stderr:
	grep: /etc/kubernetes/kubelet.conf: No such file or directory
	I0916 10:51:37.743822 1415006 ssh_runner.go:195] Run: sudo rm -f /etc/kubernetes/kubelet.conf
	I0916 10:51:37.752897 1415006 ssh_runner.go:195] Run: sudo grep https://control-plane.minikube.internal:8443 /etc/kubernetes/controller-manager.conf
	I0916 10:51:37.762961 1415006 kubeadm.go:163] "https://control-plane.minikube.internal:8443" may not be in /etc/kubernetes/controller-manager.conf - will remove: sudo grep https://control-plane.minikube.internal:8443 /etc/kubernetes/controller-manager.conf: Process exited with status 2
	stdout:
	
	stderr:
	grep: /etc/kubernetes/controller-manager.conf: No such file or directory
	I0916 10:51:37.763033 1415006 ssh_runner.go:195] Run: sudo rm -f /etc/kubernetes/controller-manager.conf
	I0916 10:51:37.771597 1415006 ssh_runner.go:195] Run: sudo grep https://control-plane.minikube.internal:8443 /etc/kubernetes/scheduler.conf
	I0916 10:51:37.782188 1415006 kubeadm.go:163] "https://control-plane.minikube.internal:8443" may not be in /etc/kubernetes/scheduler.conf - will remove: sudo grep https://control-plane.minikube.internal:8443 /etc/kubernetes/scheduler.conf: Process exited with status 2
	stdout:
	
	stderr:
	grep: /etc/kubernetes/scheduler.conf: No such file or directory
	I0916 10:51:37.782276 1415006 ssh_runner.go:195] Run: sudo rm -f /etc/kubernetes/scheduler.conf
	I0916 10:51:37.791510 1415006 ssh_runner.go:286] Start: /bin/bash -c "sudo env PATH="/var/lib/minikube/binaries/v1.31.1:$PATH" kubeadm init --config /var/tmp/minikube/kubeadm.yaml  --ignore-preflight-errors=DirAvailable--etc-kubernetes-manifests,DirAvailable--var-lib-minikube,DirAvailable--var-lib-minikube-etcd,FileAvailable--etc-kubernetes-manifests-kube-scheduler.yaml,FileAvailable--etc-kubernetes-manifests-kube-apiserver.yaml,FileAvailable--etc-kubernetes-manifests-kube-controller-manager.yaml,FileAvailable--etc-kubernetes-manifests-etcd.yaml,Port-10250,Swap,NumCPU,Mem,SystemVerification,FileContent--proc-sys-net-bridge-bridge-nf-call-iptables"
	I0916 10:51:37.840159 1415006 kubeadm.go:310] [init] Using Kubernetes version: v1.31.1
	I0916 10:51:37.840310 1415006 kubeadm.go:310] [preflight] Running pre-flight checks
	I0916 10:51:37.859474 1415006 kubeadm.go:310] [preflight] The system verification failed. Printing the output from the verification:
	I0916 10:51:37.859550 1415006 kubeadm.go:310] KERNEL_VERSION: 5.15.0-1069-aws
	I0916 10:51:37.859589 1415006 kubeadm.go:310] OS: Linux
	I0916 10:51:37.859641 1415006 kubeadm.go:310] CGROUPS_CPU: enabled
	I0916 10:51:37.859694 1415006 kubeadm.go:310] CGROUPS_CPUACCT: enabled
	I0916 10:51:37.859744 1415006 kubeadm.go:310] CGROUPS_CPUSET: enabled
	I0916 10:51:37.859794 1415006 kubeadm.go:310] CGROUPS_DEVICES: enabled
	I0916 10:51:37.859844 1415006 kubeadm.go:310] CGROUPS_FREEZER: enabled
	I0916 10:51:37.859895 1415006 kubeadm.go:310] CGROUPS_MEMORY: enabled
	I0916 10:51:37.859950 1415006 kubeadm.go:310] CGROUPS_PIDS: enabled
	I0916 10:51:37.860003 1415006 kubeadm.go:310] CGROUPS_HUGETLB: enabled
	I0916 10:51:37.860051 1415006 kubeadm.go:310] CGROUPS_BLKIO: enabled
	I0916 10:51:37.925874 1415006 kubeadm.go:310] [preflight] Pulling images required for setting up a Kubernetes cluster
	I0916 10:51:37.926070 1415006 kubeadm.go:310] [preflight] This might take a minute or two, depending on the speed of your internet connection
	I0916 10:51:37.926217 1415006 kubeadm.go:310] [preflight] You can also perform this action beforehand using 'kubeadm config images pull'
	I0916 10:51:37.937195 1415006 kubeadm.go:310] [certs] Using certificateDir folder "/var/lib/minikube/certs"
	I0916 10:51:37.942368 1415006 out.go:235]   - Generating certificates and keys ...
	I0916 10:51:37.942551 1415006 kubeadm.go:310] [certs] Using existing ca certificate authority
	I0916 10:51:37.942655 1415006 kubeadm.go:310] [certs] Using existing apiserver certificate and key on disk
	I0916 10:51:38.125701 1415006 kubeadm.go:310] [certs] Generating "apiserver-kubelet-client" certificate and key
	I0916 10:51:38.357438 1415006 kubeadm.go:310] [certs] Generating "front-proxy-ca" certificate and key
	I0916 10:51:38.774277 1415006 kubeadm.go:310] [certs] Generating "front-proxy-client" certificate and key
	I0916 10:51:39.506214 1415006 kubeadm.go:310] [certs] Generating "etcd/ca" certificate and key
	I0916 10:51:39.748178 1415006 kubeadm.go:310] [certs] Generating "etcd/server" certificate and key
	I0916 10:51:39.748426 1415006 kubeadm.go:310] [certs] etcd/server serving cert is signed for DNS names [ha-334765 localhost] and IPs [192.168.49.2 127.0.0.1 ::1]
	I0916 10:51:40.295627 1415006 kubeadm.go:310] [certs] Generating "etcd/peer" certificate and key
	I0916 10:51:40.295763 1415006 kubeadm.go:310] [certs] etcd/peer serving cert is signed for DNS names [ha-334765 localhost] and IPs [192.168.49.2 127.0.0.1 ::1]
	I0916 10:51:41.210520 1415006 kubeadm.go:310] [certs] Generating "etcd/healthcheck-client" certificate and key
	I0916 10:51:41.539038 1415006 kubeadm.go:310] [certs] Generating "apiserver-etcd-client" certificate and key
	I0916 10:51:41.729080 1415006 kubeadm.go:310] [certs] Generating "sa" key and public key
	I0916 10:51:41.729486 1415006 kubeadm.go:310] [kubeconfig] Using kubeconfig folder "/etc/kubernetes"
	I0916 10:51:42.180776 1415006 kubeadm.go:310] [kubeconfig] Writing "admin.conf" kubeconfig file
	I0916 10:51:42.580385 1415006 kubeadm.go:310] [kubeconfig] Writing "super-admin.conf" kubeconfig file
	I0916 10:51:43.184585 1415006 kubeadm.go:310] [kubeconfig] Writing "kubelet.conf" kubeconfig file
	I0916 10:51:43.602984 1415006 kubeadm.go:310] [kubeconfig] Writing "controller-manager.conf" kubeconfig file
	I0916 10:51:44.189060 1415006 kubeadm.go:310] [kubeconfig] Writing "scheduler.conf" kubeconfig file
	I0916 10:51:44.189820 1415006 kubeadm.go:310] [etcd] Creating static Pod manifest for local etcd in "/etc/kubernetes/manifests"
	I0916 10:51:44.192872 1415006 kubeadm.go:310] [control-plane] Using manifest folder "/etc/kubernetes/manifests"
	I0916 10:51:44.195817 1415006 out.go:235]   - Booting up control plane ...
	I0916 10:51:44.195921 1415006 kubeadm.go:310] [control-plane] Creating static Pod manifest for "kube-apiserver"
	I0916 10:51:44.195997 1415006 kubeadm.go:310] [control-plane] Creating static Pod manifest for "kube-controller-manager"
	I0916 10:51:44.196541 1415006 kubeadm.go:310] [control-plane] Creating static Pod manifest for "kube-scheduler"
	I0916 10:51:44.207462 1415006 kubeadm.go:310] [kubelet-start] Writing kubelet environment file with flags to file "/var/lib/kubelet/kubeadm-flags.env"
	I0916 10:51:44.213486 1415006 kubeadm.go:310] [kubelet-start] Writing kubelet configuration to file "/var/lib/kubelet/config.yaml"
	I0916 10:51:44.213756 1415006 kubeadm.go:310] [kubelet-start] Starting the kubelet
	I0916 10:51:44.308979 1415006 kubeadm.go:310] [wait-control-plane] Waiting for the kubelet to boot up the control plane as static Pods from directory "/etc/kubernetes/manifests"
	I0916 10:51:44.309098 1415006 kubeadm.go:310] [kubelet-check] Waiting for a healthy kubelet at http://127.0.0.1:10248/healthz. This can take up to 4m0s
	I0916 10:51:45.817452 1415006 kubeadm.go:310] [kubelet-check] The kubelet is healthy after 1.508735214s
	I0916 10:51:45.817543 1415006 kubeadm.go:310] [api-check] Waiting for a healthy API server. This can take up to 4m0s
	I0916 10:51:54.973977 1415006 kubeadm.go:310] [api-check] The API server is healthy after 9.156530405s
	I0916 10:51:54.996383 1415006 kubeadm.go:310] [upload-config] Storing the configuration used in ConfigMap "kubeadm-config" in the "kube-system" Namespace
	I0916 10:51:55.042892 1415006 kubeadm.go:310] [kubelet] Creating a ConfigMap "kubelet-config" in namespace kube-system with the configuration for the kubelets in the cluster
	I0916 10:51:55.081931 1415006 kubeadm.go:310] [upload-certs] Skipping phase. Please see --upload-certs
	I0916 10:51:55.082125 1415006 kubeadm.go:310] [mark-control-plane] Marking the node ha-334765 as control-plane by adding the labels: [node-role.kubernetes.io/control-plane node.kubernetes.io/exclude-from-external-load-balancers]
	I0916 10:51:55.097789 1415006 kubeadm.go:310] [bootstrap-token] Using token: 718qfm.gar9p3a9mv3c0rdq
	I0916 10:51:55.100512 1415006 out.go:235]   - Configuring RBAC rules ...
	I0916 10:51:55.100662 1415006 kubeadm.go:310] [bootstrap-token] Configuring bootstrap tokens, cluster-info ConfigMap, RBAC Roles
	I0916 10:51:55.113526 1415006 kubeadm.go:310] [bootstrap-token] Configured RBAC rules to allow Node Bootstrap tokens to get nodes
	I0916 10:51:55.128064 1415006 kubeadm.go:310] [bootstrap-token] Configured RBAC rules to allow Node Bootstrap tokens to post CSRs in order for nodes to get long term certificate credentials
	I0916 10:51:55.132619 1415006 kubeadm.go:310] [bootstrap-token] Configured RBAC rules to allow the csrapprover controller automatically approve CSRs from a Node Bootstrap Token
	I0916 10:51:55.137079 1415006 kubeadm.go:310] [bootstrap-token] Configured RBAC rules to allow certificate rotation for all node client certificates in the cluster
	I0916 10:51:55.142883 1415006 kubeadm.go:310] [bootstrap-token] Creating the "cluster-info" ConfigMap in the "kube-public" namespace
	I0916 10:51:55.385469 1415006 kubeadm.go:310] [kubelet-finalize] Updating "/etc/kubernetes/kubelet.conf" to point to a rotatable kubelet client certificate and key
	I0916 10:51:55.841464 1415006 kubeadm.go:310] [addons] Applied essential addon: CoreDNS
	I0916 10:51:56.384537 1415006 kubeadm.go:310] [addons] Applied essential addon: kube-proxy
	I0916 10:51:56.386235 1415006 kubeadm.go:310] 
	I0916 10:51:56.386313 1415006 kubeadm.go:310] Your Kubernetes control-plane has initialized successfully!
	I0916 10:51:56.386323 1415006 kubeadm.go:310] 
	I0916 10:51:56.386399 1415006 kubeadm.go:310] To start using your cluster, you need to run the following as a regular user:
	I0916 10:51:56.386409 1415006 kubeadm.go:310] 
	I0916 10:51:56.386434 1415006 kubeadm.go:310]   mkdir -p $HOME/.kube
	I0916 10:51:56.386496 1415006 kubeadm.go:310]   sudo cp -i /etc/kubernetes/admin.conf $HOME/.kube/config
	I0916 10:51:56.386556 1415006 kubeadm.go:310]   sudo chown $(id -u):$(id -g) $HOME/.kube/config
	I0916 10:51:56.386567 1415006 kubeadm.go:310] 
	I0916 10:51:56.386620 1415006 kubeadm.go:310] Alternatively, if you are the root user, you can run:
	I0916 10:51:56.386629 1415006 kubeadm.go:310] 
	I0916 10:51:56.386675 1415006 kubeadm.go:310]   export KUBECONFIG=/etc/kubernetes/admin.conf
	I0916 10:51:56.386683 1415006 kubeadm.go:310] 
	I0916 10:51:56.386735 1415006 kubeadm.go:310] You should now deploy a pod network to the cluster.
	I0916 10:51:56.386812 1415006 kubeadm.go:310] Run "kubectl apply -f [podnetwork].yaml" with one of the options listed at:
	I0916 10:51:56.386883 1415006 kubeadm.go:310]   https://kubernetes.io/docs/concepts/cluster-administration/addons/
	I0916 10:51:56.386891 1415006 kubeadm.go:310] 
	I0916 10:51:56.386974 1415006 kubeadm.go:310] You can now join any number of control-plane nodes by copying certificate authorities
	I0916 10:51:56.387052 1415006 kubeadm.go:310] and service account keys on each node and then running the following as root:
	I0916 10:51:56.387060 1415006 kubeadm.go:310] 
	I0916 10:51:56.387147 1415006 kubeadm.go:310]   kubeadm join control-plane.minikube.internal:8443 --token 718qfm.gar9p3a9mv3c0rdq \
	I0916 10:51:56.387251 1415006 kubeadm.go:310] 	--discovery-token-ca-cert-hash sha256:a39d4a6e06a2efc97f5d9564a89b81063790e757dde370e866d9dc4c2ed0ec07 \
	I0916 10:51:56.387276 1415006 kubeadm.go:310] 	--control-plane 
	I0916 10:51:56.387284 1415006 kubeadm.go:310] 
	I0916 10:51:56.387367 1415006 kubeadm.go:310] Then you can join any number of worker nodes by running the following on each as root:
	I0916 10:51:56.387375 1415006 kubeadm.go:310] 
	I0916 10:51:56.387456 1415006 kubeadm.go:310] kubeadm join control-plane.minikube.internal:8443 --token 718qfm.gar9p3a9mv3c0rdq \
	I0916 10:51:56.387558 1415006 kubeadm.go:310] 	--discovery-token-ca-cert-hash sha256:a39d4a6e06a2efc97f5d9564a89b81063790e757dde370e866d9dc4c2ed0ec07 
	I0916 10:51:56.392511 1415006 kubeadm.go:310] W0916 10:51:37.836530    1224 common.go:101] your configuration file uses a deprecated API spec: "kubeadm.k8s.io/v1beta3" (kind: "ClusterConfiguration"). Please use 'kubeadm config migrate --old-config old.yaml --new-config new.yaml', which will write the new, similar spec using a newer API version.
	I0916 10:51:56.392831 1415006 kubeadm.go:310] W0916 10:51:37.837607    1224 common.go:101] your configuration file uses a deprecated API spec: "kubeadm.k8s.io/v1beta3" (kind: "InitConfiguration"). Please use 'kubeadm config migrate --old-config old.yaml --new-config new.yaml', which will write the new, similar spec using a newer API version.
	I0916 10:51:56.393042 1415006 kubeadm.go:310] 	[WARNING SystemVerification]: failed to parse kernel config: unable to load kernel module: "configs", output: "modprobe: FATAL: Module configs not found in directory /lib/modules/5.15.0-1069-aws\n", err: exit status 1
	I0916 10:51:56.393147 1415006 kubeadm.go:310] 	[WARNING Service-Kubelet]: kubelet service is not enabled, please run 'systemctl enable kubelet.service'
	I0916 10:51:56.393167 1415006 cni.go:84] Creating CNI manager for ""
	I0916 10:51:56.393178 1415006 cni.go:136] multinode detected (1 nodes found), recommending kindnet
	I0916 10:51:56.396114 1415006 out.go:177] * Configuring CNI (Container Networking Interface) ...
	I0916 10:51:56.398793 1415006 ssh_runner.go:195] Run: stat /opt/cni/bin/portmap
	I0916 10:51:56.402925 1415006 cni.go:182] applying CNI manifest using /var/lib/minikube/binaries/v1.31.1/kubectl ...
	I0916 10:51:56.402950 1415006 ssh_runner.go:362] scp memory --> /var/tmp/minikube/cni.yaml (2601 bytes)
	I0916 10:51:56.422637 1415006 ssh_runner.go:195] Run: sudo /var/lib/minikube/binaries/v1.31.1/kubectl apply --kubeconfig=/var/lib/minikube/kubeconfig -f /var/tmp/minikube/cni.yaml
	I0916 10:51:56.718869 1415006 ssh_runner.go:195] Run: /bin/bash -c "cat /proc/$(pgrep kube-apiserver)/oom_adj"
	I0916 10:51:56.719008 1415006 ssh_runner.go:195] Run: sudo /var/lib/minikube/binaries/v1.31.1/kubectl create clusterrolebinding minikube-rbac --clusterrole=cluster-admin --serviceaccount=kube-system:default --kubeconfig=/var/lib/minikube/kubeconfig
	I0916 10:51:56.719095 1415006 ssh_runner.go:195] Run: sudo /var/lib/minikube/binaries/v1.31.1/kubectl --kubeconfig=/var/lib/minikube/kubeconfig label --overwrite nodes ha-334765 minikube.k8s.io/updated_at=2024_09_16T10_51_56_0700 minikube.k8s.io/version=v1.34.0 minikube.k8s.io/commit=90d544f06ea0f69499271b003be64a9a224d57ed minikube.k8s.io/name=ha-334765 minikube.k8s.io/primary=true
	I0916 10:51:56.882861 1415006 ssh_runner.go:195] Run: sudo /var/lib/minikube/binaries/v1.31.1/kubectl get sa default --kubeconfig=/var/lib/minikube/kubeconfig
	I0916 10:51:56.882923 1415006 ops.go:34] apiserver oom_adj: -16
	I0916 10:51:57.382970 1415006 ssh_runner.go:195] Run: sudo /var/lib/minikube/binaries/v1.31.1/kubectl get sa default --kubeconfig=/var/lib/minikube/kubeconfig
	I0916 10:51:57.883653 1415006 ssh_runner.go:195] Run: sudo /var/lib/minikube/binaries/v1.31.1/kubectl get sa default --kubeconfig=/var/lib/minikube/kubeconfig
	I0916 10:51:58.383021 1415006 ssh_runner.go:195] Run: sudo /var/lib/minikube/binaries/v1.31.1/kubectl get sa default --kubeconfig=/var/lib/minikube/kubeconfig
	I0916 10:51:58.882914 1415006 ssh_runner.go:195] Run: sudo /var/lib/minikube/binaries/v1.31.1/kubectl get sa default --kubeconfig=/var/lib/minikube/kubeconfig
	I0916 10:51:59.112415 1415006 kubeadm.go:1113] duration metric: took 2.393450688s to wait for elevateKubeSystemPrivileges
	I0916 10:51:59.112452 1415006 kubeadm.go:394] duration metric: took 21.480514885s to StartCluster
	I0916 10:51:59.112470 1415006 settings.go:142] acquiring lock: {Name:mkc0474d366ad36774e47290c7932cc180a1b9f8 Clock:{} Delay:500ms Timeout:1m0s Cancel:<nil>}
	I0916 10:51:59.112550 1415006 settings.go:150] Updating kubeconfig:  /home/jenkins/minikube-integration/19651-1378450/kubeconfig
	I0916 10:51:59.113710 1415006 lock.go:35] WriteFile acquiring /home/jenkins/minikube-integration/19651-1378450/kubeconfig: {Name:mk806df66aa01ad28d0c99bc1a876b4310e8a3a0 Clock:{} Delay:500ms Timeout:1m0s Cancel:<nil>}
	I0916 10:51:59.114020 1415006 start.go:233] HA (multi-control plane) cluster: will skip waiting for primary control-plane node &{Name: IP:192.168.49.2 Port:8443 KubernetesVersion:v1.31.1 ContainerRuntime:crio ControlPlane:true Worker:true}
	I0916 10:51:59.114054 1415006 start.go:241] waiting for startup goroutines ...
	I0916 10:51:59.114066 1415006 addons.go:507] enable addons start: toEnable=map[ambassador:false auto-pause:false cloud-spanner:false csi-hostpath-driver:false dashboard:false default-storageclass:true efk:false freshpod:false gcp-auth:false gvisor:false headlamp:false helm-tiller:false inaccel:false ingress:false ingress-dns:false inspektor-gadget:false istio:false istio-provisioner:false kong:false kubeflow:false kubevirt:false logviewer:false metallb:false metrics-server:false nvidia-device-plugin:false nvidia-driver-installer:false nvidia-gpu-device-plugin:false olm:false pod-security-policy:false portainer:false registry:false registry-aliases:false registry-creds:false storage-provisioner:true storage-provisioner-gluster:false storage-provisioner-rancher:false volcano:false volumesnapshots:false yakd:false]
	I0916 10:51:59.114144 1415006 addons.go:69] Setting storage-provisioner=true in profile "ha-334765"
	I0916 10:51:59.114180 1415006 addons.go:234] Setting addon storage-provisioner=true in "ha-334765"
	I0916 10:51:59.114212 1415006 host.go:66] Checking if "ha-334765" exists ...
	I0916 10:51:59.115033 1415006 ssh_runner.go:195] Run: /bin/bash -c "sudo /var/lib/minikube/binaries/v1.31.1/kubectl --kubeconfig=/var/lib/minikube/kubeconfig -n kube-system get configmap coredns -o yaml"
	I0916 10:51:59.115542 1415006 config.go:182] Loaded profile config "ha-334765": Driver=docker, ContainerRuntime=crio, KubernetesVersion=v1.31.1
	I0916 10:51:59.115785 1415006 addons.go:69] Setting default-storageclass=true in profile "ha-334765"
	I0916 10:51:59.115823 1415006 addons_storage_classes.go:33] enableOrDisableStorageClasses default-storageclass=true on "ha-334765"
	I0916 10:51:59.116311 1415006 cli_runner.go:164] Run: docker container inspect ha-334765 --format={{.State.Status}}
	I0916 10:51:59.116930 1415006 cli_runner.go:164] Run: docker container inspect ha-334765 --format={{.State.Status}}
	I0916 10:51:59.169684 1415006 loader.go:395] Config loaded from file:  /home/jenkins/minikube-integration/19651-1378450/kubeconfig
	I0916 10:51:59.170166 1415006 kapi.go:59] client config for ha-334765: &rest.Config{Host:"https://192.168.49.254:8443", APIPath:"", ContentConfig:rest.ContentConfig{AcceptContentTypes:"", ContentType:"", GroupVersion:(*schema.GroupVersion)(nil), NegotiatedSerializer:runtime.NegotiatedSerializer(nil)}, Username:"", Password:"", BearerToken:"", BearerTokenFile:"", Impersonate:rest.ImpersonationConfig{UserName:"", UID:"", Groups:[]string(nil), Extra:map[string][]string(nil)}, AuthProvider:<nil>, AuthConfigPersister:rest.AuthProviderConfigPersister(nil), ExecProvider:<nil>, TLSClientConfig:rest.sanitizedTLSClientConfig{Insecure:false, ServerName:"", CertFile:"/home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/ha-334765/client.crt", KeyFile:"/home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/ha-334765/client.key", CAFile:"/home/jenkins/minikube-integration/19651-1378450/.minikube/ca.crt", CertData:[]uint8(nil), KeyData:[]uint8(nil), CAData:[]uint8(nil), NextProtos:[]strin
g(nil)}, UserAgent:"", DisableCompression:false, Transport:http.RoundTripper(nil), WrapTransport:(transport.WrapperFunc)(0x1a1e6c0), QPS:0, Burst:0, RateLimiter:flowcontrol.RateLimiter(nil), WarningHandler:rest.WarningHandler(nil), Timeout:0, Dial:(func(context.Context, string, string) (net.Conn, error))(nil), Proxy:(func(*http.Request) (*url.URL, error))(nil)}
	I0916 10:51:59.170889 1415006 cert_rotation.go:140] Starting client certificate rotation controller
	I0916 10:51:59.171320 1415006 addons.go:234] Setting addon default-storageclass=true in "ha-334765"
	I0916 10:51:59.171398 1415006 host.go:66] Checking if "ha-334765" exists ...
	I0916 10:51:59.172115 1415006 cli_runner.go:164] Run: docker container inspect ha-334765 --format={{.State.Status}}
	I0916 10:51:59.180288 1415006 out.go:177]   - Using image gcr.io/k8s-minikube/storage-provisioner:v5
	I0916 10:51:59.186707 1415006 addons.go:431] installing /etc/kubernetes/addons/storage-provisioner.yaml
	I0916 10:51:59.186737 1415006 ssh_runner.go:362] scp memory --> /etc/kubernetes/addons/storage-provisioner.yaml (2676 bytes)
	I0916 10:51:59.186871 1415006 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" ha-334765
	I0916 10:51:59.195156 1415006 addons.go:431] installing /etc/kubernetes/addons/storageclass.yaml
	I0916 10:51:59.195179 1415006 ssh_runner.go:362] scp storageclass/storageclass.yaml --> /etc/kubernetes/addons/storageclass.yaml (271 bytes)
	I0916 10:51:59.195247 1415006 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" ha-334765
	I0916 10:51:59.219386 1415006 sshutil.go:53] new ssh client: &{IP:127.0.0.1 Port:34618 SSHKeyPath:/home/jenkins/minikube-integration/19651-1378450/.minikube/machines/ha-334765/id_rsa Username:docker}
	I0916 10:51:59.244755 1415006 sshutil.go:53] new ssh client: &{IP:127.0.0.1 Port:34618 SSHKeyPath:/home/jenkins/minikube-integration/19651-1378450/.minikube/machines/ha-334765/id_rsa Username:docker}
	I0916 10:51:59.373862 1415006 ssh_runner.go:195] Run: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.31.1/kubectl apply -f /etc/kubernetes/addons/storageclass.yaml
	I0916 10:51:59.379193 1415006 ssh_runner.go:195] Run: /bin/bash -c "sudo /var/lib/minikube/binaries/v1.31.1/kubectl --kubeconfig=/var/lib/minikube/kubeconfig -n kube-system get configmap coredns -o yaml | sed -e '/^        forward . \/etc\/resolv.conf.*/i \        hosts {\n           192.168.49.1 host.minikube.internal\n           fallthrough\n        }' -e '/^        errors *$/i \        log' | sudo /var/lib/minikube/binaries/v1.31.1/kubectl --kubeconfig=/var/lib/minikube/kubeconfig replace -f -"
	I0916 10:51:59.496553 1415006 ssh_runner.go:195] Run: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.31.1/kubectl apply -f /etc/kubernetes/addons/storage-provisioner.yaml
	I0916 10:51:59.727370 1415006 envvar.go:172] "Feature gate default state" feature="WatchListClient" enabled=false
	I0916 10:51:59.727403 1415006 envvar.go:172] "Feature gate default state" feature="InformerResourceVersion" enabled=false
	I0916 10:51:59.727509 1415006 round_trippers.go:463] GET https://192.168.49.254:8443/apis/storage.k8s.io/v1/storageclasses
	I0916 10:51:59.727521 1415006 round_trippers.go:469] Request Headers:
	I0916 10:51:59.727530 1415006 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:51:59.727541 1415006 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:51:59.751265 1415006 round_trippers.go:574] Response Status: 200 OK in 23 milliseconds
	I0916 10:51:59.751932 1415006 round_trippers.go:463] PUT https://192.168.49.254:8443/apis/storage.k8s.io/v1/storageclasses/standard
	I0916 10:51:59.751957 1415006 round_trippers.go:469] Request Headers:
	I0916 10:51:59.751966 1415006 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:51:59.751973 1415006 round_trippers.go:473]     Content-Type: application/json
	I0916 10:51:59.751977 1415006 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:51:59.778964 1415006 round_trippers.go:574] Response Status: 200 OK in 26 milliseconds
	I0916 10:51:59.833987 1415006 start.go:971] {"host.minikube.internal": 192.168.49.1} host record injected into CoreDNS's ConfigMap
	I0916 10:52:00.418014 1415006 out.go:177] * Enabled addons: default-storageclass, storage-provisioner
	I0916 10:52:00.421055 1415006 addons.go:510] duration metric: took 1.30671994s for enable addons: enabled=[default-storageclass storage-provisioner]
	I0916 10:52:00.421168 1415006 start.go:246] waiting for cluster config update ...
	I0916 10:52:00.421196 1415006 start.go:255] writing updated cluster config ...
	I0916 10:52:00.425091 1415006 out.go:201] 
	I0916 10:52:00.428130 1415006 config.go:182] Loaded profile config "ha-334765": Driver=docker, ContainerRuntime=crio, KubernetesVersion=v1.31.1
	I0916 10:52:00.428306 1415006 profile.go:143] Saving config to /home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/ha-334765/config.json ...
	I0916 10:52:00.432343 1415006 out.go:177] * Starting "ha-334765-m02" control-plane node in "ha-334765" cluster
	I0916 10:52:00.440079 1415006 cache.go:121] Beginning downloading kic base image for docker with crio
	I0916 10:52:00.445821 1415006 out.go:177] * Pulling base image v0.0.45-1726358845-19644 ...
	I0916 10:52:00.448662 1415006 preload.go:131] Checking if preload exists for k8s version v1.31.1 and runtime crio
	I0916 10:52:00.448823 1415006 cache.go:56] Caching tarball of preloaded images
	I0916 10:52:00.448750 1415006 image.go:79] Checking for gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 in local docker daemon
	I0916 10:52:00.448981 1415006 preload.go:172] Found /home/jenkins/minikube-integration/19651-1378450/.minikube/cache/preloaded-tarball/preloaded-images-k8s-v18-v1.31.1-cri-o-overlay-arm64.tar.lz4 in cache, skipping download
	I0916 10:52:00.449001 1415006 cache.go:59] Finished verifying existence of preloaded tar for v1.31.1 on crio
	I0916 10:52:00.449175 1415006 profile.go:143] Saving config to /home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/ha-334765/config.json ...
	W0916 10:52:00.479753 1415006 image.go:95] image gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 is of wrong architecture
	I0916 10:52:00.479779 1415006 cache.go:149] Downloading gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 to local cache
	I0916 10:52:00.479898 1415006 image.go:63] Checking for gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 in local cache directory
	I0916 10:52:00.479921 1415006 image.go:66] Found gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 in local cache directory, skipping pull
	I0916 10:52:00.479926 1415006 image.go:135] gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 exists in cache, skipping pull
	I0916 10:52:00.479938 1415006 cache.go:152] successfully saved gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 as a tarball
	I0916 10:52:00.479943 1415006 cache.go:162] Loading gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 from local cache
	I0916 10:52:00.484731 1415006 image.go:273] response: 
	I0916 10:52:00.664934 1415006 cache.go:164] successfully loaded and using gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 from cached tarball
	I0916 10:52:00.664975 1415006 cache.go:194] Successfully downloaded all kic artifacts
	I0916 10:52:00.665008 1415006 start.go:360] acquireMachinesLock for ha-334765-m02: {Name:mkb176e2cfa3ae927444127935258ba37ca2bc0a Clock:{} Delay:500ms Timeout:10m0s Cancel:<nil>}
	I0916 10:52:00.665145 1415006 start.go:364] duration metric: took 112.153µs to acquireMachinesLock for "ha-334765-m02"
	I0916 10:52:00.665182 1415006 start.go:93] Provisioning new machine with config: &{Name:ha-334765 KeepContext:false EmbedCerts:false MinikubeISO: KicBaseImage:gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 Memory:2200 CPUs:2 DiskSize:20000 Driver:docker HyperkitVpnKitSock: HyperkitVSockPorts:[] DockerEnv:[] ContainerVolumeMounts:[] InsecureRegistry:[] RegistryMirror:[] HostOnlyCIDR:192.168.59.1/24 HypervVirtualSwitch: HypervUseExternalSwitch:false HypervExternalAdapter: KVMNetwork:default KVMQemuURI:qemu:///system KVMGPU:false KVMHidden:false KVMNUMACount:1 APIServerPort:8443 DockerOpt:[] DisableDriverMounts:false NFSShare:[] NFSSharesRoot:/nfsshares UUID: NoVTXCheck:false DNSProxy:false HostDNSResolver:true HostOnlyNicType:virtio NatNicType:virtio SSHIPAddress: SSHUser:root SSHKey: SSHPort:22 KubernetesConfig:{KubernetesVersion:v1.31.1 ClusterName:ha-334765 Namespace:default APIServerHAVIP:192.168.49.254 APIServerNa
me:minikubeCA APIServerNames:[] APIServerIPs:[] DNSDomain:cluster.local ContainerRuntime:crio CRISocket: NetworkPlugin:cni FeatureGates: ServiceCIDR:10.96.0.0/12 ImageRepository: LoadBalancerStartIP: LoadBalancerEndIP: CustomIngressCert: RegistryAliases: ExtraOptions:[] ShouldLoadCachedImages:true EnableDefaultCNI:false CNI:} Nodes:[{Name: IP:192.168.49.2 Port:8443 KubernetesVersion:v1.31.1 ContainerRuntime:crio ControlPlane:true Worker:true} {Name:m02 IP: Port:8443 KubernetesVersion:v1.31.1 ContainerRuntime:crio ControlPlane:true Worker:true}] Addons:map[default-storageclass:true storage-provisioner:true] CustomAddonImages:map[] CustomAddonRegistries:map[] VerifyComponents:map[apiserver:true apps_running:true default_sa:true extra:true kubelet:true node_ready:true system_pods:true] StartHostTimeout:6m0s ScheduledStop:<nil> ExposedPorts:[] ListenAddress: Network: Subnet: MultiNodeRequested:true ExtraDisks:0 CertExpiration:26280h0m0s Mount:false MountString:/home/jenkins:/minikube-host Mount9PVersion:9p2000.L
MountGID:docker MountIP: MountMSize:262144 MountOptions:[] MountPort:0 MountType:9p MountUID:docker BinaryMirror: DisableOptimizations:false DisableMetrics:false CustomQemuFirmwarePath: SocketVMnetClientPath: SocketVMnetPath: StaticIP: SSHAuthSock: SSHAgentPID:0 GPUs: AutoPauseInterval:1m0s} &{Name:m02 IP: Port:8443 KubernetesVersion:v1.31.1 ContainerRuntime:crio ControlPlane:true Worker:true}
	I0916 10:52:00.665284 1415006 start.go:125] createHost starting for "m02" (driver="docker")
	I0916 10:52:00.670287 1415006 out.go:235] * Creating docker container (CPUs=2, Memory=2200MB) ...
	I0916 10:52:00.670431 1415006 start.go:159] libmachine.API.Create for "ha-334765" (driver="docker")
	I0916 10:52:00.670464 1415006 client.go:168] LocalClient.Create starting
	I0916 10:52:00.670549 1415006 main.go:141] libmachine: Reading certificate data from /home/jenkins/minikube-integration/19651-1378450/.minikube/certs/ca.pem
	I0916 10:52:00.670604 1415006 main.go:141] libmachine: Decoding PEM data...
	I0916 10:52:00.670627 1415006 main.go:141] libmachine: Parsing certificate...
	I0916 10:52:00.670689 1415006 main.go:141] libmachine: Reading certificate data from /home/jenkins/minikube-integration/19651-1378450/.minikube/certs/cert.pem
	I0916 10:52:00.670711 1415006 main.go:141] libmachine: Decoding PEM data...
	I0916 10:52:00.670722 1415006 main.go:141] libmachine: Parsing certificate...
	I0916 10:52:00.671002 1415006 cli_runner.go:164] Run: docker network inspect ha-334765 --format "{"Name": "{{.Name}}","Driver": "{{.Driver}}","Subnet": "{{range .IPAM.Config}}{{.Subnet}}{{end}}","Gateway": "{{range .IPAM.Config}}{{.Gateway}}{{end}}","MTU": {{if (index .Options "com.docker.network.driver.mtu")}}{{(index .Options "com.docker.network.driver.mtu")}}{{else}}0{{end}}, "ContainerIPs": [{{range $k,$v := .Containers }}"{{$v.IPv4Address}}",{{end}}]}"
	I0916 10:52:00.688356 1415006 network_create.go:77] Found existing network {name:ha-334765 subnet:0x4001b0e3c0 gateway:[0 0 0 0 0 0 0 0 0 0 255 255 192 168 49 1] mtu:1500}
	I0916 10:52:00.688402 1415006 kic.go:121] calculated static IP "192.168.49.3" for the "ha-334765-m02" container
	I0916 10:52:00.688484 1415006 cli_runner.go:164] Run: docker ps -a --format {{.Names}}
	I0916 10:52:00.707198 1415006 cli_runner.go:164] Run: docker volume create ha-334765-m02 --label name.minikube.sigs.k8s.io=ha-334765-m02 --label created_by.minikube.sigs.k8s.io=true
	I0916 10:52:00.728522 1415006 oci.go:103] Successfully created a docker volume ha-334765-m02
	I0916 10:52:00.728619 1415006 cli_runner.go:164] Run: docker run --rm --name ha-334765-m02-preload-sidecar --label created_by.minikube.sigs.k8s.io=true --label name.minikube.sigs.k8s.io=ha-334765-m02 --entrypoint /usr/bin/test -v ha-334765-m02:/var gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 -d /var/lib
	I0916 10:52:01.372104 1415006 oci.go:107] Successfully prepared a docker volume ha-334765-m02
	I0916 10:52:01.372154 1415006 preload.go:131] Checking if preload exists for k8s version v1.31.1 and runtime crio
	I0916 10:52:01.372177 1415006 kic.go:194] Starting extracting preloaded images to volume ...
	I0916 10:52:01.372250 1415006 cli_runner.go:164] Run: docker run --rm --entrypoint /usr/bin/tar -v /home/jenkins/minikube-integration/19651-1378450/.minikube/cache/preloaded-tarball/preloaded-images-k8s-v18-v1.31.1-cri-o-overlay-arm64.tar.lz4:/preloaded.tar:ro -v ha-334765-m02:/extractDir gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 -I lz4 -xf /preloaded.tar -C /extractDir
	I0916 10:52:05.551632 1415006 cli_runner.go:217] Completed: docker run --rm --entrypoint /usr/bin/tar -v /home/jenkins/minikube-integration/19651-1378450/.minikube/cache/preloaded-tarball/preloaded-images-k8s-v18-v1.31.1-cri-o-overlay-arm64.tar.lz4:/preloaded.tar:ro -v ha-334765-m02:/extractDir gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 -I lz4 -xf /preloaded.tar -C /extractDir: (4.179343083s)
	I0916 10:52:05.551662 1415006 kic.go:203] duration metric: took 4.179483371s to extract preloaded images to volume ...
	W0916 10:52:05.551816 1415006 cgroups_linux.go:77] Your kernel does not support swap limit capabilities or the cgroup is not mounted.
	I0916 10:52:05.551922 1415006 cli_runner.go:164] Run: docker info --format "'{{json .SecurityOptions}}'"
	I0916 10:52:05.604077 1415006 cli_runner.go:164] Run: docker run -d -t --privileged --security-opt seccomp=unconfined --tmpfs /tmp --tmpfs /run -v /lib/modules:/lib/modules:ro --hostname ha-334765-m02 --name ha-334765-m02 --label created_by.minikube.sigs.k8s.io=true --label name.minikube.sigs.k8s.io=ha-334765-m02 --label role.minikube.sigs.k8s.io= --label mode.minikube.sigs.k8s.io=ha-334765-m02 --network ha-334765 --ip 192.168.49.3 --volume ha-334765-m02:/var --security-opt apparmor=unconfined --memory=2200mb --cpus=2 -e container=docker --expose 8443 --publish=127.0.0.1::8443 --publish=127.0.0.1::22 --publish=127.0.0.1::2376 --publish=127.0.0.1::5000 --publish=127.0.0.1::32443 gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0
	I0916 10:52:05.935589 1415006 cli_runner.go:164] Run: docker container inspect ha-334765-m02 --format={{.State.Running}}
	I0916 10:52:05.956962 1415006 cli_runner.go:164] Run: docker container inspect ha-334765-m02 --format={{.State.Status}}
	I0916 10:52:05.978082 1415006 cli_runner.go:164] Run: docker exec ha-334765-m02 stat /var/lib/dpkg/alternatives/iptables
	I0916 10:52:06.058726 1415006 oci.go:144] the created container "ha-334765-m02" has a running status.
	I0916 10:52:06.058757 1415006 kic.go:225] Creating ssh key for kic: /home/jenkins/minikube-integration/19651-1378450/.minikube/machines/ha-334765-m02/id_rsa...
	I0916 10:52:06.312828 1415006 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-1378450/.minikube/machines/ha-334765-m02/id_rsa.pub -> /home/docker/.ssh/authorized_keys
	I0916 10:52:06.312939 1415006 kic_runner.go:191] docker (temp): /home/jenkins/minikube-integration/19651-1378450/.minikube/machines/ha-334765-m02/id_rsa.pub --> /home/docker/.ssh/authorized_keys (381 bytes)
	I0916 10:52:06.355435 1415006 cli_runner.go:164] Run: docker container inspect ha-334765-m02 --format={{.State.Status}}
	I0916 10:52:06.377811 1415006 kic_runner.go:93] Run: chown docker:docker /home/docker/.ssh/authorized_keys
	I0916 10:52:06.377836 1415006 kic_runner.go:114] Args: [docker exec --privileged ha-334765-m02 chown docker:docker /home/docker/.ssh/authorized_keys]
	I0916 10:52:06.445402 1415006 cli_runner.go:164] Run: docker container inspect ha-334765-m02 --format={{.State.Status}}
	I0916 10:52:06.466570 1415006 machine.go:93] provisionDockerMachine start ...
	I0916 10:52:06.466665 1415006 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" ha-334765-m02
	I0916 10:52:06.494178 1415006 main.go:141] libmachine: Using SSH client type: native
	I0916 10:52:06.494511 1415006 main.go:141] libmachine: &{{{<nil> 0 [] [] []} docker [0x41abe0] 0x41d420 <nil>  [] 0s} 127.0.0.1 34623 <nil> <nil>}
	I0916 10:52:06.494529 1415006 main.go:141] libmachine: About to run SSH command:
	hostname
	I0916 10:52:06.495167 1415006 main.go:141] libmachine: Error dialing TCP: ssh: handshake failed: read tcp 127.0.0.1:51890->127.0.0.1:34623: read: connection reset by peer
	I0916 10:52:09.636482 1415006 main.go:141] libmachine: SSH cmd err, output: <nil>: ha-334765-m02
	
	I0916 10:52:09.636510 1415006 ubuntu.go:169] provisioning hostname "ha-334765-m02"
	I0916 10:52:09.636580 1415006 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" ha-334765-m02
	I0916 10:52:09.656805 1415006 main.go:141] libmachine: Using SSH client type: native
	I0916 10:52:09.657052 1415006 main.go:141] libmachine: &{{{<nil> 0 [] [] []} docker [0x41abe0] 0x41d420 <nil>  [] 0s} 127.0.0.1 34623 <nil> <nil>}
	I0916 10:52:09.657070 1415006 main.go:141] libmachine: About to run SSH command:
	sudo hostname ha-334765-m02 && echo "ha-334765-m02" | sudo tee /etc/hostname
	I0916 10:52:09.804617 1415006 main.go:141] libmachine: SSH cmd err, output: <nil>: ha-334765-m02
	
	I0916 10:52:09.804866 1415006 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" ha-334765-m02
	I0916 10:52:09.823409 1415006 main.go:141] libmachine: Using SSH client type: native
	I0916 10:52:09.823663 1415006 main.go:141] libmachine: &{{{<nil> 0 [] [] []} docker [0x41abe0] 0x41d420 <nil>  [] 0s} 127.0.0.1 34623 <nil> <nil>}
	I0916 10:52:09.823686 1415006 main.go:141] libmachine: About to run SSH command:
	
			if ! grep -xq '.*\sha-334765-m02' /etc/hosts; then
				if grep -xq '127.0.1.1\s.*' /etc/hosts; then
					sudo sed -i 's/^127.0.1.1\s.*/127.0.1.1 ha-334765-m02/g' /etc/hosts;
				else 
					echo '127.0.1.1 ha-334765-m02' | sudo tee -a /etc/hosts; 
				fi
			fi
	I0916 10:52:09.960849 1415006 main.go:141] libmachine: SSH cmd err, output: <nil>: 
	I0916 10:52:09.960874 1415006 ubuntu.go:175] set auth options {CertDir:/home/jenkins/minikube-integration/19651-1378450/.minikube CaCertPath:/home/jenkins/minikube-integration/19651-1378450/.minikube/certs/ca.pem CaPrivateKeyPath:/home/jenkins/minikube-integration/19651-1378450/.minikube/certs/ca-key.pem CaCertRemotePath:/etc/docker/ca.pem ServerCertPath:/home/jenkins/minikube-integration/19651-1378450/.minikube/machines/server.pem ServerKeyPath:/home/jenkins/minikube-integration/19651-1378450/.minikube/machines/server-key.pem ClientKeyPath:/home/jenkins/minikube-integration/19651-1378450/.minikube/certs/key.pem ServerCertRemotePath:/etc/docker/server.pem ServerKeyRemotePath:/etc/docker/server-key.pem ClientCertPath:/home/jenkins/minikube-integration/19651-1378450/.minikube/certs/cert.pem ServerCertSANs:[] StorePath:/home/jenkins/minikube-integration/19651-1378450/.minikube}
	I0916 10:52:09.960892 1415006 ubuntu.go:177] setting up certificates
	I0916 10:52:09.960902 1415006 provision.go:84] configureAuth start
	I0916 10:52:09.960969 1415006 cli_runner.go:164] Run: docker container inspect -f "{{range .NetworkSettings.Networks}}{{.IPAddress}},{{.GlobalIPv6Address}}{{end}}" ha-334765-m02
	I0916 10:52:09.978719 1415006 provision.go:143] copyHostCerts
	I0916 10:52:09.978772 1415006 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-1378450/.minikube/certs/ca.pem -> /home/jenkins/minikube-integration/19651-1378450/.minikube/ca.pem
	I0916 10:52:09.978810 1415006 exec_runner.go:144] found /home/jenkins/minikube-integration/19651-1378450/.minikube/ca.pem, removing ...
	I0916 10:52:09.978820 1415006 exec_runner.go:203] rm: /home/jenkins/minikube-integration/19651-1378450/.minikube/ca.pem
	I0916 10:52:09.978900 1415006 exec_runner.go:151] cp: /home/jenkins/minikube-integration/19651-1378450/.minikube/certs/ca.pem --> /home/jenkins/minikube-integration/19651-1378450/.minikube/ca.pem (1078 bytes)
	I0916 10:52:09.978988 1415006 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-1378450/.minikube/certs/cert.pem -> /home/jenkins/minikube-integration/19651-1378450/.minikube/cert.pem
	I0916 10:52:09.979011 1415006 exec_runner.go:144] found /home/jenkins/minikube-integration/19651-1378450/.minikube/cert.pem, removing ...
	I0916 10:52:09.979022 1415006 exec_runner.go:203] rm: /home/jenkins/minikube-integration/19651-1378450/.minikube/cert.pem
	I0916 10:52:09.979052 1415006 exec_runner.go:151] cp: /home/jenkins/minikube-integration/19651-1378450/.minikube/certs/cert.pem --> /home/jenkins/minikube-integration/19651-1378450/.minikube/cert.pem (1123 bytes)
	I0916 10:52:09.979108 1415006 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-1378450/.minikube/certs/key.pem -> /home/jenkins/minikube-integration/19651-1378450/.minikube/key.pem
	I0916 10:52:09.979130 1415006 exec_runner.go:144] found /home/jenkins/minikube-integration/19651-1378450/.minikube/key.pem, removing ...
	I0916 10:52:09.979138 1415006 exec_runner.go:203] rm: /home/jenkins/minikube-integration/19651-1378450/.minikube/key.pem
	I0916 10:52:09.979165 1415006 exec_runner.go:151] cp: /home/jenkins/minikube-integration/19651-1378450/.minikube/certs/key.pem --> /home/jenkins/minikube-integration/19651-1378450/.minikube/key.pem (1679 bytes)
	I0916 10:52:09.979220 1415006 provision.go:117] generating server cert: /home/jenkins/minikube-integration/19651-1378450/.minikube/machines/server.pem ca-key=/home/jenkins/minikube-integration/19651-1378450/.minikube/certs/ca.pem private-key=/home/jenkins/minikube-integration/19651-1378450/.minikube/certs/ca-key.pem org=jenkins.ha-334765-m02 san=[127.0.0.1 192.168.49.3 ha-334765-m02 localhost minikube]
	I0916 10:52:10.489529 1415006 provision.go:177] copyRemoteCerts
	I0916 10:52:10.489607 1415006 ssh_runner.go:195] Run: sudo mkdir -p /etc/docker /etc/docker /etc/docker
	I0916 10:52:10.489649 1415006 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" ha-334765-m02
	I0916 10:52:10.506758 1415006 sshutil.go:53] new ssh client: &{IP:127.0.0.1 Port:34623 SSHKeyPath:/home/jenkins/minikube-integration/19651-1378450/.minikube/machines/ha-334765-m02/id_rsa Username:docker}
	I0916 10:52:10.605930 1415006 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-1378450/.minikube/certs/ca.pem -> /etc/docker/ca.pem
	I0916 10:52:10.605997 1415006 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-1378450/.minikube/certs/ca.pem --> /etc/docker/ca.pem (1078 bytes)
	I0916 10:52:10.631509 1415006 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-1378450/.minikube/machines/server.pem -> /etc/docker/server.pem
	I0916 10:52:10.631612 1415006 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-1378450/.minikube/machines/server.pem --> /etc/docker/server.pem (1208 bytes)
	I0916 10:52:10.658840 1415006 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-1378450/.minikube/machines/server-key.pem -> /etc/docker/server-key.pem
	I0916 10:52:10.658912 1415006 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-1378450/.minikube/machines/server-key.pem --> /etc/docker/server-key.pem (1679 bytes)
	I0916 10:52:10.685131 1415006 provision.go:87] duration metric: took 724.214128ms to configureAuth
	I0916 10:52:10.685158 1415006 ubuntu.go:193] setting minikube options for container-runtime
	I0916 10:52:10.685357 1415006 config.go:182] Loaded profile config "ha-334765": Driver=docker, ContainerRuntime=crio, KubernetesVersion=v1.31.1
	I0916 10:52:10.685466 1415006 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" ha-334765-m02
	I0916 10:52:10.703073 1415006 main.go:141] libmachine: Using SSH client type: native
	I0916 10:52:10.703366 1415006 main.go:141] libmachine: &{{{<nil> 0 [] [] []} docker [0x41abe0] 0x41d420 <nil>  [] 0s} 127.0.0.1 34623 <nil> <nil>}
	I0916 10:52:10.703384 1415006 main.go:141] libmachine: About to run SSH command:
	sudo mkdir -p /etc/sysconfig && printf %s "
	CRIO_MINIKUBE_OPTIONS='--insecure-registry 10.96.0.0/12 '
	" | sudo tee /etc/sysconfig/crio.minikube && sudo systemctl restart crio
	I0916 10:52:10.958686 1415006 main.go:141] libmachine: SSH cmd err, output: <nil>: 
	CRIO_MINIKUBE_OPTIONS='--insecure-registry 10.96.0.0/12 '
	
	I0916 10:52:10.958712 1415006 machine.go:96] duration metric: took 4.492118802s to provisionDockerMachine
	I0916 10:52:10.958733 1415006 client.go:171] duration metric: took 10.28824946s to LocalClient.Create
	I0916 10:52:10.958771 1415006 start.go:167] duration metric: took 10.288341355s to libmachine.API.Create "ha-334765"
	I0916 10:52:10.958783 1415006 start.go:293] postStartSetup for "ha-334765-m02" (driver="docker")
	I0916 10:52:10.958794 1415006 start.go:322] creating required directories: [/etc/kubernetes/addons /etc/kubernetes/manifests /var/tmp/minikube /var/lib/minikube /var/lib/minikube/certs /var/lib/minikube/images /var/lib/minikube/binaries /tmp/gvisor /usr/share/ca-certificates /etc/ssl/certs]
	I0916 10:52:10.958860 1415006 ssh_runner.go:195] Run: sudo mkdir -p /etc/kubernetes/addons /etc/kubernetes/manifests /var/tmp/minikube /var/lib/minikube /var/lib/minikube/certs /var/lib/minikube/images /var/lib/minikube/binaries /tmp/gvisor /usr/share/ca-certificates /etc/ssl/certs
	I0916 10:52:10.958926 1415006 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" ha-334765-m02
	I0916 10:52:10.975981 1415006 sshutil.go:53] new ssh client: &{IP:127.0.0.1 Port:34623 SSHKeyPath:/home/jenkins/minikube-integration/19651-1378450/.minikube/machines/ha-334765-m02/id_rsa Username:docker}
	I0916 10:52:11.075624 1415006 ssh_runner.go:195] Run: cat /etc/os-release
	I0916 10:52:11.079601 1415006 main.go:141] libmachine: Couldn't set key VERSION_CODENAME, no corresponding struct field found
	I0916 10:52:11.079640 1415006 main.go:141] libmachine: Couldn't set key PRIVACY_POLICY_URL, no corresponding struct field found
	I0916 10:52:11.079651 1415006 main.go:141] libmachine: Couldn't set key UBUNTU_CODENAME, no corresponding struct field found
	I0916 10:52:11.079659 1415006 info.go:137] Remote host: Ubuntu 22.04.4 LTS
	I0916 10:52:11.079677 1415006 filesync.go:126] Scanning /home/jenkins/minikube-integration/19651-1378450/.minikube/addons for local assets ...
	I0916 10:52:11.079746 1415006 filesync.go:126] Scanning /home/jenkins/minikube-integration/19651-1378450/.minikube/files for local assets ...
	I0916 10:52:11.079827 1415006 filesync.go:149] local asset: /home/jenkins/minikube-integration/19651-1378450/.minikube/files/etc/ssl/certs/13838332.pem -> 13838332.pem in /etc/ssl/certs
	I0916 10:52:11.079840 1415006 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-1378450/.minikube/files/etc/ssl/certs/13838332.pem -> /etc/ssl/certs/13838332.pem
	I0916 10:52:11.079941 1415006 ssh_runner.go:195] Run: sudo mkdir -p /etc/ssl/certs
	I0916 10:52:11.090420 1415006 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-1378450/.minikube/files/etc/ssl/certs/13838332.pem --> /etc/ssl/certs/13838332.pem (1708 bytes)
	I0916 10:52:11.118562 1415006 start.go:296] duration metric: took 159.762972ms for postStartSetup
	I0916 10:52:11.118977 1415006 cli_runner.go:164] Run: docker container inspect -f "{{range .NetworkSettings.Networks}}{{.IPAddress}},{{.GlobalIPv6Address}}{{end}}" ha-334765-m02
	I0916 10:52:11.136980 1415006 profile.go:143] Saving config to /home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/ha-334765/config.json ...
	I0916 10:52:11.137307 1415006 ssh_runner.go:195] Run: sh -c "df -h /var | awk 'NR==2{print $5}'"
	I0916 10:52:11.137363 1415006 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" ha-334765-m02
	I0916 10:52:11.158476 1415006 sshutil.go:53] new ssh client: &{IP:127.0.0.1 Port:34623 SSHKeyPath:/home/jenkins/minikube-integration/19651-1378450/.minikube/machines/ha-334765-m02/id_rsa Username:docker}
	I0916 10:52:11.253634 1415006 ssh_runner.go:195] Run: sh -c "df -BG /var | awk 'NR==2{print $4}'"
	I0916 10:52:11.258728 1415006 start.go:128] duration metric: took 10.593426369s to createHost
	I0916 10:52:11.258802 1415006 start.go:83] releasing machines lock for "ha-334765-m02", held for 10.593639804s
	I0916 10:52:11.258914 1415006 cli_runner.go:164] Run: docker container inspect -f "{{range .NetworkSettings.Networks}}{{.IPAddress}},{{.GlobalIPv6Address}}{{end}}" ha-334765-m02
	I0916 10:52:11.279190 1415006 out.go:177] * Found network options:
	I0916 10:52:11.281657 1415006 out.go:177]   - NO_PROXY=192.168.49.2
	W0916 10:52:11.284231 1415006 proxy.go:119] fail to check proxy env: Error ip not in block
	W0916 10:52:11.284273 1415006 proxy.go:119] fail to check proxy env: Error ip not in block
	I0916 10:52:11.284347 1415006 ssh_runner.go:195] Run: sudo sh -c "podman version >/dev/null"
	I0916 10:52:11.284399 1415006 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" ha-334765-m02
	I0916 10:52:11.284724 1415006 ssh_runner.go:195] Run: curl -sS -m 2 https://registry.k8s.io/
	I0916 10:52:11.284780 1415006 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" ha-334765-m02
	I0916 10:52:11.307462 1415006 sshutil.go:53] new ssh client: &{IP:127.0.0.1 Port:34623 SSHKeyPath:/home/jenkins/minikube-integration/19651-1378450/.minikube/machines/ha-334765-m02/id_rsa Username:docker}
	I0916 10:52:11.314487 1415006 sshutil.go:53] new ssh client: &{IP:127.0.0.1 Port:34623 SSHKeyPath:/home/jenkins/minikube-integration/19651-1378450/.minikube/machines/ha-334765-m02/id_rsa Username:docker}
	I0916 10:52:11.587011 1415006 ssh_runner.go:195] Run: sh -c "stat /etc/cni/net.d/*loopback.conf*"
	I0916 10:52:11.592179 1415006 ssh_runner.go:195] Run: sudo find /etc/cni/net.d -maxdepth 1 -type f -name *loopback.conf* -not -name *.mk_disabled -exec sh -c "sudo mv {} {}.mk_disabled" ;
	I0916 10:52:11.617718 1415006 cni.go:221] loopback cni configuration disabled: "/etc/cni/net.d/*loopback.conf*" found
	I0916 10:52:11.617855 1415006 ssh_runner.go:195] Run: sudo find /etc/cni/net.d -maxdepth 1 -type f ( ( -name *bridge* -or -name *podman* ) -and -not -name *.mk_disabled ) -printf "%p, " -exec sh -c "sudo mv {} {}.mk_disabled" ;
	I0916 10:52:11.657673 1415006 cni.go:262] disabled [/etc/cni/net.d/87-podman-bridge.conflist, /etc/cni/net.d/100-crio-bridge.conf] bridge cni config(s)
	I0916 10:52:11.657699 1415006 start.go:495] detecting cgroup driver to use...
	I0916 10:52:11.657763 1415006 detect.go:187] detected "cgroupfs" cgroup driver on host os
	I0916 10:52:11.657834 1415006 ssh_runner.go:195] Run: sudo systemctl stop -f containerd
	I0916 10:52:11.676285 1415006 ssh_runner.go:195] Run: sudo systemctl is-active --quiet service containerd
	I0916 10:52:11.689619 1415006 docker.go:217] disabling cri-docker service (if available) ...
	I0916 10:52:11.689740 1415006 ssh_runner.go:195] Run: sudo systemctl stop -f cri-docker.socket
	I0916 10:52:11.706308 1415006 ssh_runner.go:195] Run: sudo systemctl stop -f cri-docker.service
	I0916 10:52:11.725075 1415006 ssh_runner.go:195] Run: sudo systemctl disable cri-docker.socket
	I0916 10:52:11.820645 1415006 ssh_runner.go:195] Run: sudo systemctl mask cri-docker.service
	I0916 10:52:11.926313 1415006 docker.go:233] disabling docker service ...
	I0916 10:52:11.926394 1415006 ssh_runner.go:195] Run: sudo systemctl stop -f docker.socket
	I0916 10:52:11.949345 1415006 ssh_runner.go:195] Run: sudo systemctl stop -f docker.service
	I0916 10:52:11.962262 1415006 ssh_runner.go:195] Run: sudo systemctl disable docker.socket
	I0916 10:52:12.060126 1415006 ssh_runner.go:195] Run: sudo systemctl mask docker.service
	I0916 10:52:12.163880 1415006 ssh_runner.go:195] Run: sudo systemctl is-active --quiet service docker
	I0916 10:52:12.177575 1415006 ssh_runner.go:195] Run: /bin/bash -c "sudo mkdir -p /etc && printf %s "runtime-endpoint: unix:///var/run/crio/crio.sock
	" | sudo tee /etc/crictl.yaml"
	I0916 10:52:12.197810 1415006 crio.go:59] configure cri-o to use "registry.k8s.io/pause:3.10" pause image...
	I0916 10:52:12.197937 1415006 ssh_runner.go:195] Run: sh -c "sudo sed -i 's|^.*pause_image = .*$|pause_image = "registry.k8s.io/pause:3.10"|' /etc/crio/crio.conf.d/02-crio.conf"
	I0916 10:52:12.208512 1415006 crio.go:70] configuring cri-o to use "cgroupfs" as cgroup driver...
	I0916 10:52:12.208708 1415006 ssh_runner.go:195] Run: sh -c "sudo sed -i 's|^.*cgroup_manager = .*$|cgroup_manager = "cgroupfs"|' /etc/crio/crio.conf.d/02-crio.conf"
	I0916 10:52:12.219913 1415006 ssh_runner.go:195] Run: sh -c "sudo sed -i '/conmon_cgroup = .*/d' /etc/crio/crio.conf.d/02-crio.conf"
	I0916 10:52:12.230153 1415006 ssh_runner.go:195] Run: sh -c "sudo sed -i '/cgroup_manager = .*/a conmon_cgroup = "pod"' /etc/crio/crio.conf.d/02-crio.conf"
	I0916 10:52:12.240821 1415006 ssh_runner.go:195] Run: sh -c "sudo rm -rf /etc/cni/net.mk"
	I0916 10:52:12.250534 1415006 ssh_runner.go:195] Run: sh -c "sudo sed -i '/^ *"net.ipv4.ip_unprivileged_port_start=.*"/d' /etc/crio/crio.conf.d/02-crio.conf"
	I0916 10:52:12.260743 1415006 ssh_runner.go:195] Run: sh -c "sudo grep -q "^ *default_sysctls" /etc/crio/crio.conf.d/02-crio.conf || sudo sed -i '/conmon_cgroup = .*/a default_sysctls = \[\n\]' /etc/crio/crio.conf.d/02-crio.conf"
	I0916 10:52:12.277289 1415006 ssh_runner.go:195] Run: sh -c "sudo sed -i -r 's|^default_sysctls *= *\[|&\n  "net.ipv4.ip_unprivileged_port_start=0",|' /etc/crio/crio.conf.d/02-crio.conf"
	I0916 10:52:12.287823 1415006 ssh_runner.go:195] Run: sudo sysctl net.bridge.bridge-nf-call-iptables
	I0916 10:52:12.296715 1415006 ssh_runner.go:195] Run: sudo sh -c "echo 1 > /proc/sys/net/ipv4/ip_forward"
	I0916 10:52:12.306946 1415006 ssh_runner.go:195] Run: sudo systemctl daemon-reload
	I0916 10:52:12.399434 1415006 ssh_runner.go:195] Run: sudo systemctl restart crio
	I0916 10:52:12.524967 1415006 start.go:542] Will wait 60s for socket path /var/run/crio/crio.sock
	I0916 10:52:12.525125 1415006 ssh_runner.go:195] Run: stat /var/run/crio/crio.sock
	I0916 10:52:12.529140 1415006 start.go:563] Will wait 60s for crictl version
	I0916 10:52:12.529213 1415006 ssh_runner.go:195] Run: which crictl
	I0916 10:52:12.532859 1415006 ssh_runner.go:195] Run: sudo /usr/bin/crictl version
	I0916 10:52:12.574886 1415006 start.go:579] Version:  0.1.0
	RuntimeName:  cri-o
	RuntimeVersion:  1.24.6
	RuntimeApiVersion:  v1
	I0916 10:52:12.574997 1415006 ssh_runner.go:195] Run: crio --version
	I0916 10:52:12.614823 1415006 ssh_runner.go:195] Run: crio --version
	I0916 10:52:12.657597 1415006 out.go:177] * Preparing Kubernetes v1.31.1 on CRI-O 1.24.6 ...
	I0916 10:52:12.660197 1415006 out.go:177]   - env NO_PROXY=192.168.49.2
	I0916 10:52:12.662736 1415006 cli_runner.go:164] Run: docker network inspect ha-334765 --format "{"Name": "{{.Name}}","Driver": "{{.Driver}}","Subnet": "{{range .IPAM.Config}}{{.Subnet}}{{end}}","Gateway": "{{range .IPAM.Config}}{{.Gateway}}{{end}}","MTU": {{if (index .Options "com.docker.network.driver.mtu")}}{{(index .Options "com.docker.network.driver.mtu")}}{{else}}0{{end}}, "ContainerIPs": [{{range $k,$v := .Containers }}"{{$v.IPv4Address}}",{{end}}]}"
	I0916 10:52:12.678392 1415006 ssh_runner.go:195] Run: grep 192.168.49.1	host.minikube.internal$ /etc/hosts
	I0916 10:52:12.682057 1415006 ssh_runner.go:195] Run: /bin/bash -c "{ grep -v $'\thost.minikube.internal$' "/etc/hosts"; echo "192.168.49.1	host.minikube.internal"; } > /tmp/h.$$; sudo cp /tmp/h.$$ "/etc/hosts""
	I0916 10:52:12.694028 1415006 mustload.go:65] Loading cluster: ha-334765
	I0916 10:52:12.694263 1415006 config.go:182] Loaded profile config "ha-334765": Driver=docker, ContainerRuntime=crio, KubernetesVersion=v1.31.1
	I0916 10:52:12.694518 1415006 cli_runner.go:164] Run: docker container inspect ha-334765 --format={{.State.Status}}
	I0916 10:52:12.710565 1415006 host.go:66] Checking if "ha-334765" exists ...
	I0916 10:52:12.710858 1415006 certs.go:68] Setting up /home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/ha-334765 for IP: 192.168.49.3
	I0916 10:52:12.710873 1415006 certs.go:194] generating shared ca certs ...
	I0916 10:52:12.710970 1415006 certs.go:226] acquiring lock for ca certs: {Name:mk0ae46b50e2e49d53ad6fcc94535aa50d9156d6 Clock:{} Delay:500ms Timeout:1m0s Cancel:<nil>}
	I0916 10:52:12.711140 1415006 certs.go:235] skipping valid "minikubeCA" ca cert: /home/jenkins/minikube-integration/19651-1378450/.minikube/ca.key
	I0916 10:52:12.711191 1415006 certs.go:235] skipping valid "proxyClientCA" ca cert: /home/jenkins/minikube-integration/19651-1378450/.minikube/proxy-client-ca.key
	I0916 10:52:12.711203 1415006 certs.go:256] generating profile certs ...
	I0916 10:52:12.711281 1415006 certs.go:359] skipping valid signed profile cert regeneration for "minikube-user": /home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/ha-334765/client.key
	I0916 10:52:12.711314 1415006 certs.go:363] generating signed profile cert for "minikube": /home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/ha-334765/apiserver.key.5b1cf632
	I0916 10:52:12.711333 1415006 crypto.go:68] Generating cert /home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/ha-334765/apiserver.crt.5b1cf632 with IP's: [10.96.0.1 127.0.0.1 10.0.0.1 192.168.49.2 192.168.49.3 192.168.49.254]
	I0916 10:52:13.104970 1415006 crypto.go:156] Writing cert to /home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/ha-334765/apiserver.crt.5b1cf632 ...
	I0916 10:52:13.105002 1415006 lock.go:35] WriteFile acquiring /home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/ha-334765/apiserver.crt.5b1cf632: {Name:mk332993607c190de4cef2cfaffaf260af064109 Clock:{} Delay:500ms Timeout:1m0s Cancel:<nil>}
	I0916 10:52:13.105202 1415006 crypto.go:164] Writing key to /home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/ha-334765/apiserver.key.5b1cf632 ...
	I0916 10:52:13.105218 1415006 lock.go:35] WriteFile acquiring /home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/ha-334765/apiserver.key.5b1cf632: {Name:mk50822762cb3f23eab85fa836ddc46c7035cd54 Clock:{} Delay:500ms Timeout:1m0s Cancel:<nil>}
	I0916 10:52:13.105303 1415006 certs.go:381] copying /home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/ha-334765/apiserver.crt.5b1cf632 -> /home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/ha-334765/apiserver.crt
	I0916 10:52:13.105444 1415006 certs.go:385] copying /home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/ha-334765/apiserver.key.5b1cf632 -> /home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/ha-334765/apiserver.key
	I0916 10:52:13.105587 1415006 certs.go:359] skipping valid signed profile cert regeneration for "aggregator": /home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/ha-334765/proxy-client.key
	I0916 10:52:13.105605 1415006 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-1378450/.minikube/ca.crt -> /var/lib/minikube/certs/ca.crt
	I0916 10:52:13.105621 1415006 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-1378450/.minikube/ca.key -> /var/lib/minikube/certs/ca.key
	I0916 10:52:13.105637 1415006 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-1378450/.minikube/proxy-client-ca.crt -> /var/lib/minikube/certs/proxy-client-ca.crt
	I0916 10:52:13.105654 1415006 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-1378450/.minikube/proxy-client-ca.key -> /var/lib/minikube/certs/proxy-client-ca.key
	I0916 10:52:13.105668 1415006 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/ha-334765/apiserver.crt -> /var/lib/minikube/certs/apiserver.crt
	I0916 10:52:13.105688 1415006 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/ha-334765/apiserver.key -> /var/lib/minikube/certs/apiserver.key
	I0916 10:52:13.105698 1415006 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/ha-334765/proxy-client.crt -> /var/lib/minikube/certs/proxy-client.crt
	I0916 10:52:13.105711 1415006 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/ha-334765/proxy-client.key -> /var/lib/minikube/certs/proxy-client.key
	I0916 10:52:13.105761 1415006 certs.go:484] found cert: /home/jenkins/minikube-integration/19651-1378450/.minikube/certs/1383833.pem (1338 bytes)
	W0916 10:52:13.105795 1415006 certs.go:480] ignoring /home/jenkins/minikube-integration/19651-1378450/.minikube/certs/1383833_empty.pem, impossibly tiny 0 bytes
	I0916 10:52:13.105808 1415006 certs.go:484] found cert: /home/jenkins/minikube-integration/19651-1378450/.minikube/certs/ca-key.pem (1679 bytes)
	I0916 10:52:13.105832 1415006 certs.go:484] found cert: /home/jenkins/minikube-integration/19651-1378450/.minikube/certs/ca.pem (1078 bytes)
	I0916 10:52:13.105857 1415006 certs.go:484] found cert: /home/jenkins/minikube-integration/19651-1378450/.minikube/certs/cert.pem (1123 bytes)
	I0916 10:52:13.105881 1415006 certs.go:484] found cert: /home/jenkins/minikube-integration/19651-1378450/.minikube/certs/key.pem (1679 bytes)
	I0916 10:52:13.105926 1415006 certs.go:484] found cert: /home/jenkins/minikube-integration/19651-1378450/.minikube/files/etc/ssl/certs/13838332.pem (1708 bytes)
	I0916 10:52:13.105959 1415006 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-1378450/.minikube/certs/1383833.pem -> /usr/share/ca-certificates/1383833.pem
	I0916 10:52:13.105973 1415006 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-1378450/.minikube/files/etc/ssl/certs/13838332.pem -> /usr/share/ca-certificates/13838332.pem
	I0916 10:52:13.105986 1415006 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-1378450/.minikube/ca.crt -> /usr/share/ca-certificates/minikubeCA.pem
	I0916 10:52:13.106049 1415006 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" ha-334765
	I0916 10:52:13.123479 1415006 sshutil.go:53] new ssh client: &{IP:127.0.0.1 Port:34618 SSHKeyPath:/home/jenkins/minikube-integration/19651-1378450/.minikube/machines/ha-334765/id_rsa Username:docker}
	I0916 10:52:13.213053 1415006 ssh_runner.go:195] Run: stat -c %s /var/lib/minikube/certs/sa.pub
	I0916 10:52:13.216592 1415006 ssh_runner.go:447] scp /var/lib/minikube/certs/sa.pub --> memory (451 bytes)
	I0916 10:52:13.228773 1415006 ssh_runner.go:195] Run: stat -c %s /var/lib/minikube/certs/sa.key
	I0916 10:52:13.232197 1415006 ssh_runner.go:447] scp /var/lib/minikube/certs/sa.key --> memory (1675 bytes)
	I0916 10:52:13.244494 1415006 ssh_runner.go:195] Run: stat -c %s /var/lib/minikube/certs/front-proxy-ca.crt
	I0916 10:52:13.247967 1415006 ssh_runner.go:447] scp /var/lib/minikube/certs/front-proxy-ca.crt --> memory (1123 bytes)
	I0916 10:52:13.260282 1415006 ssh_runner.go:195] Run: stat -c %s /var/lib/minikube/certs/front-proxy-ca.key
	I0916 10:52:13.263770 1415006 ssh_runner.go:447] scp /var/lib/minikube/certs/front-proxy-ca.key --> memory (1675 bytes)
	I0916 10:52:13.276165 1415006 ssh_runner.go:195] Run: stat -c %s /var/lib/minikube/certs/etcd/ca.crt
	I0916 10:52:13.279651 1415006 ssh_runner.go:447] scp /var/lib/minikube/certs/etcd/ca.crt --> memory (1094 bytes)
	I0916 10:52:13.291769 1415006 ssh_runner.go:195] Run: stat -c %s /var/lib/minikube/certs/etcd/ca.key
	I0916 10:52:13.295289 1415006 ssh_runner.go:447] scp /var/lib/minikube/certs/etcd/ca.key --> memory (1679 bytes)
	I0916 10:52:13.308129 1415006 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-1378450/.minikube/ca.crt --> /var/lib/minikube/certs/ca.crt (1111 bytes)
	I0916 10:52:13.333370 1415006 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-1378450/.minikube/ca.key --> /var/lib/minikube/certs/ca.key (1675 bytes)
	I0916 10:52:13.359716 1415006 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-1378450/.minikube/proxy-client-ca.crt --> /var/lib/minikube/certs/proxy-client-ca.crt (1119 bytes)
	I0916 10:52:13.384764 1415006 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-1378450/.minikube/proxy-client-ca.key --> /var/lib/minikube/certs/proxy-client-ca.key (1679 bytes)
	I0916 10:52:13.409876 1415006 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/ha-334765/apiserver.crt --> /var/lib/minikube/certs/apiserver.crt (1436 bytes)
	I0916 10:52:13.434260 1415006 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/ha-334765/apiserver.key --> /var/lib/minikube/certs/apiserver.key (1675 bytes)
	I0916 10:52:13.458675 1415006 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/ha-334765/proxy-client.crt --> /var/lib/minikube/certs/proxy-client.crt (1147 bytes)
	I0916 10:52:13.488573 1415006 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/ha-334765/proxy-client.key --> /var/lib/minikube/certs/proxy-client.key (1679 bytes)
	I0916 10:52:13.514770 1415006 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-1378450/.minikube/certs/1383833.pem --> /usr/share/ca-certificates/1383833.pem (1338 bytes)
	I0916 10:52:13.540012 1415006 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-1378450/.minikube/files/etc/ssl/certs/13838332.pem --> /usr/share/ca-certificates/13838332.pem (1708 bytes)
	I0916 10:52:13.566866 1415006 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-1378450/.minikube/ca.crt --> /usr/share/ca-certificates/minikubeCA.pem (1111 bytes)
	I0916 10:52:13.594872 1415006 ssh_runner.go:362] scp memory --> /var/lib/minikube/certs/sa.pub (451 bytes)
	I0916 10:52:13.614472 1415006 ssh_runner.go:362] scp memory --> /var/lib/minikube/certs/sa.key (1675 bytes)
	I0916 10:52:13.633347 1415006 ssh_runner.go:362] scp memory --> /var/lib/minikube/certs/front-proxy-ca.crt (1123 bytes)
	I0916 10:52:13.654026 1415006 ssh_runner.go:362] scp memory --> /var/lib/minikube/certs/front-proxy-ca.key (1675 bytes)
	I0916 10:52:13.672630 1415006 ssh_runner.go:362] scp memory --> /var/lib/minikube/certs/etcd/ca.crt (1094 bytes)
	I0916 10:52:13.691149 1415006 ssh_runner.go:362] scp memory --> /var/lib/minikube/certs/etcd/ca.key (1679 bytes)
	I0916 10:52:13.711987 1415006 ssh_runner.go:362] scp memory --> /var/lib/minikube/kubeconfig (744 bytes)
	I0916 10:52:13.731237 1415006 ssh_runner.go:195] Run: openssl version
	I0916 10:52:13.736872 1415006 ssh_runner.go:195] Run: sudo /bin/bash -c "test -s /usr/share/ca-certificates/13838332.pem && ln -fs /usr/share/ca-certificates/13838332.pem /etc/ssl/certs/13838332.pem"
	I0916 10:52:13.746441 1415006 ssh_runner.go:195] Run: ls -la /usr/share/ca-certificates/13838332.pem
	I0916 10:52:13.750286 1415006 certs.go:528] hashing: -rw-r--r-- 1 root root 1708 Sep 16 10:46 /usr/share/ca-certificates/13838332.pem
	I0916 10:52:13.750353 1415006 ssh_runner.go:195] Run: openssl x509 -hash -noout -in /usr/share/ca-certificates/13838332.pem
	I0916 10:52:13.757939 1415006 ssh_runner.go:195] Run: sudo /bin/bash -c "test -L /etc/ssl/certs/3ec20f2e.0 || ln -fs /etc/ssl/certs/13838332.pem /etc/ssl/certs/3ec20f2e.0"
	I0916 10:52:13.767433 1415006 ssh_runner.go:195] Run: sudo /bin/bash -c "test -s /usr/share/ca-certificates/minikubeCA.pem && ln -fs /usr/share/ca-certificates/minikubeCA.pem /etc/ssl/certs/minikubeCA.pem"
	I0916 10:52:13.776739 1415006 ssh_runner.go:195] Run: ls -la /usr/share/ca-certificates/minikubeCA.pem
	I0916 10:52:13.780254 1415006 certs.go:528] hashing: -rw-r--r-- 1 root root 1111 Sep 16 10:35 /usr/share/ca-certificates/minikubeCA.pem
	I0916 10:52:13.780320 1415006 ssh_runner.go:195] Run: openssl x509 -hash -noout -in /usr/share/ca-certificates/minikubeCA.pem
	I0916 10:52:13.787147 1415006 ssh_runner.go:195] Run: sudo /bin/bash -c "test -L /etc/ssl/certs/b5213941.0 || ln -fs /etc/ssl/certs/minikubeCA.pem /etc/ssl/certs/b5213941.0"
	I0916 10:52:13.796665 1415006 ssh_runner.go:195] Run: sudo /bin/bash -c "test -s /usr/share/ca-certificates/1383833.pem && ln -fs /usr/share/ca-certificates/1383833.pem /etc/ssl/certs/1383833.pem"
	I0916 10:52:13.806165 1415006 ssh_runner.go:195] Run: ls -la /usr/share/ca-certificates/1383833.pem
	I0916 10:52:13.809849 1415006 certs.go:528] hashing: -rw-r--r-- 1 root root 1338 Sep 16 10:46 /usr/share/ca-certificates/1383833.pem
	I0916 10:52:13.809948 1415006 ssh_runner.go:195] Run: openssl x509 -hash -noout -in /usr/share/ca-certificates/1383833.pem
	I0916 10:52:13.818466 1415006 ssh_runner.go:195] Run: sudo /bin/bash -c "test -L /etc/ssl/certs/51391683.0 || ln -fs /etc/ssl/certs/1383833.pem /etc/ssl/certs/51391683.0"
	I0916 10:52:13.828214 1415006 ssh_runner.go:195] Run: stat /var/lib/minikube/certs/apiserver-kubelet-client.crt
	I0916 10:52:13.832140 1415006 certs.go:399] 'apiserver-kubelet-client' cert doesn't exist, likely first start: stat /var/lib/minikube/certs/apiserver-kubelet-client.crt: Process exited with status 1
	stdout:
	
	stderr:
	stat: cannot statx '/var/lib/minikube/certs/apiserver-kubelet-client.crt': No such file or directory
	I0916 10:52:13.832240 1415006 kubeadm.go:934] updating node {m02 192.168.49.3 8443 v1.31.1 crio true true} ...
	I0916 10:52:13.832362 1415006 kubeadm.go:946] kubelet [Unit]
	Wants=crio.service
	
	[Service]
	ExecStart=
	ExecStart=/var/lib/minikube/binaries/v1.31.1/kubelet --bootstrap-kubeconfig=/etc/kubernetes/bootstrap-kubelet.conf --cgroups-per-qos=false --config=/var/lib/kubelet/config.yaml --enforce-node-allocatable= --hostname-override=ha-334765-m02 --kubeconfig=/etc/kubernetes/kubelet.conf --node-ip=192.168.49.3
	
	[Install]
	 config:
	{KubernetesVersion:v1.31.1 ClusterName:ha-334765 Namespace:default APIServerHAVIP:192.168.49.254 APIServerName:minikubeCA APIServerNames:[] APIServerIPs:[] DNSDomain:cluster.local ContainerRuntime:crio CRISocket: NetworkPlugin:cni FeatureGates: ServiceCIDR:10.96.0.0/12 ImageRepository: LoadBalancerStartIP: LoadBalancerEndIP: CustomIngressCert: RegistryAliases: ExtraOptions:[] ShouldLoadCachedImages:true EnableDefaultCNI:false CNI:}
	I0916 10:52:13.832410 1415006 kube-vip.go:115] generating kube-vip config ...
	I0916 10:52:13.832465 1415006 ssh_runner.go:195] Run: sudo sh -c "lsmod | grep ip_vs"
	I0916 10:52:13.845768 1415006 kube-vip.go:167] auto-enabling control-plane load-balancing in kube-vip
	I0916 10:52:13.845888 1415006 kube-vip.go:137] kube-vip config:
	apiVersion: v1
	kind: Pod
	metadata:
	  creationTimestamp: null
	  name: kube-vip
	  namespace: kube-system
	spec:
	  containers:
	  - args:
	    - manager
	    env:
	    - name: vip_arp
	      value: "true"
	    - name: port
	      value: "8443"
	    - name: vip_nodename
	      valueFrom:
	        fieldRef:
	          fieldPath: spec.nodeName
	    - name: vip_interface
	      value: eth0
	    - name: vip_cidr
	      value: "32"
	    - name: dns_mode
	      value: first
	    - name: cp_enable
	      value: "true"
	    - name: cp_namespace
	      value: kube-system
	    - name: vip_leaderelection
	      value: "true"
	    - name: vip_leasename
	      value: plndr-cp-lock
	    - name: vip_leaseduration
	      value: "5"
	    - name: vip_renewdeadline
	      value: "3"
	    - name: vip_retryperiod
	      value: "1"
	    - name: address
	      value: 192.168.49.254
	    - name: prometheus_server
	      value: :2112
	    - name : lb_enable
	      value: "true"
	    - name: lb_port
	      value: "8443"
	    image: ghcr.io/kube-vip/kube-vip:v0.8.0
	    imagePullPolicy: IfNotPresent
	    name: kube-vip
	    resources: {}
	    securityContext:
	      capabilities:
	        add:
	        - NET_ADMIN
	        - NET_RAW
	    volumeMounts:
	    - mountPath: /etc/kubernetes/admin.conf
	      name: kubeconfig
	  hostAliases:
	  - hostnames:
	    - kubernetes
	    ip: 127.0.0.1
	  hostNetwork: true
	  volumes:
	  - hostPath:
	      path: "/etc/kubernetes/admin.conf"
	    name: kubeconfig
	status: {}
	I0916 10:52:13.845981 1415006 ssh_runner.go:195] Run: sudo ls /var/lib/minikube/binaries/v1.31.1
	I0916 10:52:13.854977 1415006 binaries.go:44] Found k8s binaries, skipping transfer
	I0916 10:52:13.855079 1415006 ssh_runner.go:195] Run: sudo mkdir -p /etc/systemd/system/kubelet.service.d /lib/systemd/system /etc/kubernetes/manifests
	I0916 10:52:13.863878 1415006 ssh_runner.go:362] scp memory --> /etc/systemd/system/kubelet.service.d/10-kubeadm.conf (363 bytes)
	I0916 10:52:13.883201 1415006 ssh_runner.go:362] scp memory --> /lib/systemd/system/kubelet.service (352 bytes)
	I0916 10:52:13.902444 1415006 ssh_runner.go:362] scp memory --> /etc/kubernetes/manifests/kube-vip.yaml (1441 bytes)
	I0916 10:52:13.922631 1415006 ssh_runner.go:195] Run: grep 192.168.49.254	control-plane.minikube.internal$ /etc/hosts
	I0916 10:52:13.926287 1415006 ssh_runner.go:195] Run: /bin/bash -c "{ grep -v $'\tcontrol-plane.minikube.internal$' "/etc/hosts"; echo "192.168.49.254	control-plane.minikube.internal"; } > /tmp/h.$$; sudo cp /tmp/h.$$ "/etc/hosts""
	I0916 10:52:13.937523 1415006 ssh_runner.go:195] Run: sudo systemctl daemon-reload
	I0916 10:52:14.028531 1415006 ssh_runner.go:195] Run: sudo systemctl start kubelet
	I0916 10:52:14.042898 1415006 host.go:66] Checking if "ha-334765" exists ...
	I0916 10:52:14.043261 1415006 start.go:317] joinCluster: &{Name:ha-334765 KeepContext:false EmbedCerts:false MinikubeISO: KicBaseImage:gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 Memory:2200 CPUs:2 DiskSize:20000 Driver:docker HyperkitVpnKitSock: HyperkitVSockPorts:[] DockerEnv:[] ContainerVolumeMounts:[] InsecureRegistry:[] RegistryMirror:[] HostOnlyCIDR:192.168.59.1/24 HypervVirtualSwitch: HypervUseExternalSwitch:false HypervExternalAdapter: KVMNetwork:default KVMQemuURI:qemu:///system KVMGPU:false KVMHidden:false KVMNUMACount:1 APIServerPort:8443 DockerOpt:[] DisableDriverMounts:false NFSShare:[] NFSSharesRoot:/nfsshares UUID: NoVTXCheck:false DNSProxy:false HostDNSResolver:true HostOnlyNicType:virtio NatNicType:virtio SSHIPAddress: SSHUser:root SSHKey: SSHPort:22 KubernetesConfig:{KubernetesVersion:v1.31.1 ClusterName:ha-334765 Namespace:default APIServerHAVIP:192.168.49.254 APIServerName:minikubeCA APIServerN
ames:[] APIServerIPs:[] DNSDomain:cluster.local ContainerRuntime:crio CRISocket: NetworkPlugin:cni FeatureGates: ServiceCIDR:10.96.0.0/12 ImageRepository: LoadBalancerStartIP: LoadBalancerEndIP: CustomIngressCert: RegistryAliases: ExtraOptions:[] ShouldLoadCachedImages:true EnableDefaultCNI:false CNI:} Nodes:[{Name: IP:192.168.49.2 Port:8443 KubernetesVersion:v1.31.1 ContainerRuntime:crio ControlPlane:true Worker:true} {Name:m02 IP:192.168.49.3 Port:8443 KubernetesVersion:v1.31.1 ContainerRuntime:crio ControlPlane:true Worker:true}] Addons:map[default-storageclass:true storage-provisioner:true] CustomAddonImages:map[] CustomAddonRegistries:map[] VerifyComponents:map[apiserver:true apps_running:true default_sa:true extra:true kubelet:true node_ready:true system_pods:true] StartHostTimeout:6m0s ScheduledStop:<nil> ExposedPorts:[] ListenAddress: Network: Subnet: MultiNodeRequested:true ExtraDisks:0 CertExpiration:26280h0m0s Mount:false MountString:/home/jenkins:/minikube-host Mount9PVersion:9p2000.L MountGID:doc
ker MountIP: MountMSize:262144 MountOptions:[] MountPort:0 MountType:9p MountUID:docker BinaryMirror: DisableOptimizations:false DisableMetrics:false CustomQemuFirmwarePath: SocketVMnetClientPath: SocketVMnetPath: StaticIP: SSHAuthSock: SSHAgentPID:0 GPUs: AutoPauseInterval:1m0s}
	I0916 10:52:14.043396 1415006 ssh_runner.go:195] Run: /bin/bash -c "sudo env PATH="/var/lib/minikube/binaries/v1.31.1:$PATH" kubeadm token create --print-join-command --ttl=0"
	I0916 10:52:14.043473 1415006 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" ha-334765
	I0916 10:52:14.062096 1415006 sshutil.go:53] new ssh client: &{IP:127.0.0.1 Port:34618 SSHKeyPath:/home/jenkins/minikube-integration/19651-1378450/.minikube/machines/ha-334765/id_rsa Username:docker}
	I0916 10:52:14.228472 1415006 start.go:343] trying to join control-plane node "m02" to cluster: &{Name:m02 IP:192.168.49.3 Port:8443 KubernetesVersion:v1.31.1 ContainerRuntime:crio ControlPlane:true Worker:true}
	I0916 10:52:14.228566 1415006 ssh_runner.go:195] Run: /bin/bash -c "sudo env PATH="/var/lib/minikube/binaries/v1.31.1:$PATH" kubeadm join control-plane.minikube.internal:8443 --token 8hrwpz.mj79utjeence4u1y --discovery-token-ca-cert-hash sha256:a39d4a6e06a2efc97f5d9564a89b81063790e757dde370e866d9dc4c2ed0ec07 --ignore-preflight-errors=all --cri-socket unix:///var/run/crio/crio.sock --node-name=ha-334765-m02 --control-plane --apiserver-advertise-address=192.168.49.3 --apiserver-bind-port=8443"
	I0916 10:52:22.726920 1415006 ssh_runner.go:235] Completed: /bin/bash -c "sudo env PATH="/var/lib/minikube/binaries/v1.31.1:$PATH" kubeadm join control-plane.minikube.internal:8443 --token 8hrwpz.mj79utjeence4u1y --discovery-token-ca-cert-hash sha256:a39d4a6e06a2efc97f5d9564a89b81063790e757dde370e866d9dc4c2ed0ec07 --ignore-preflight-errors=all --cri-socket unix:///var/run/crio/crio.sock --node-name=ha-334765-m02 --control-plane --apiserver-advertise-address=192.168.49.3 --apiserver-bind-port=8443": (8.498319708s)
	I0916 10:52:22.726953 1415006 ssh_runner.go:195] Run: /bin/bash -c "sudo systemctl daemon-reload && sudo systemctl enable kubelet && sudo systemctl start kubelet"
	I0916 10:52:23.147534 1415006 ssh_runner.go:195] Run: sudo /var/lib/minikube/binaries/v1.31.1/kubectl --kubeconfig=/var/lib/minikube/kubeconfig label --overwrite nodes ha-334765-m02 minikube.k8s.io/updated_at=2024_09_16T10_52_23_0700 minikube.k8s.io/version=v1.34.0 minikube.k8s.io/commit=90d544f06ea0f69499271b003be64a9a224d57ed minikube.k8s.io/name=ha-334765 minikube.k8s.io/primary=false
	I0916 10:52:23.305867 1415006 ssh_runner.go:195] Run: sudo /var/lib/minikube/binaries/v1.31.1/kubectl --kubeconfig=/var/lib/minikube/kubeconfig taint nodes ha-334765-m02 node-role.kubernetes.io/control-plane:NoSchedule-
	I0916 10:52:23.515255 1415006 start.go:319] duration metric: took 9.471990207s to joinCluster
	I0916 10:52:23.515311 1415006 start.go:235] Will wait 6m0s for node &{Name:m02 IP:192.168.49.3 Port:8443 KubernetesVersion:v1.31.1 ContainerRuntime:crio ControlPlane:true Worker:true}
	I0916 10:52:23.515715 1415006 config.go:182] Loaded profile config "ha-334765": Driver=docker, ContainerRuntime=crio, KubernetesVersion=v1.31.1
	I0916 10:52:23.518211 1415006 out.go:177] * Verifying Kubernetes components...
	I0916 10:52:23.521068 1415006 ssh_runner.go:195] Run: sudo systemctl daemon-reload
	I0916 10:52:23.709941 1415006 ssh_runner.go:195] Run: sudo systemctl start kubelet
	I0916 10:52:23.740820 1415006 loader.go:395] Config loaded from file:  /home/jenkins/minikube-integration/19651-1378450/kubeconfig
	I0916 10:52:23.741096 1415006 kapi.go:59] client config for ha-334765: &rest.Config{Host:"https://192.168.49.254:8443", APIPath:"", ContentConfig:rest.ContentConfig{AcceptContentTypes:"", ContentType:"", GroupVersion:(*schema.GroupVersion)(nil), NegotiatedSerializer:runtime.NegotiatedSerializer(nil)}, Username:"", Password:"", BearerToken:"", BearerTokenFile:"", Impersonate:rest.ImpersonationConfig{UserName:"", UID:"", Groups:[]string(nil), Extra:map[string][]string(nil)}, AuthProvider:<nil>, AuthConfigPersister:rest.AuthProviderConfigPersister(nil), ExecProvider:<nil>, TLSClientConfig:rest.sanitizedTLSClientConfig{Insecure:false, ServerName:"", CertFile:"/home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/ha-334765/client.crt", KeyFile:"/home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/ha-334765/client.key", CAFile:"/home/jenkins/minikube-integration/19651-1378450/.minikube/ca.crt", CertData:[]uint8(nil), KeyData:[]uint8(nil), CAData:[]uint8(nil), NextProtos:[]strin
g(nil)}, UserAgent:"", DisableCompression:false, Transport:http.RoundTripper(nil), WrapTransport:(transport.WrapperFunc)(0x1a1e6c0), QPS:0, Burst:0, RateLimiter:flowcontrol.RateLimiter(nil), WarningHandler:rest.WarningHandler(nil), Timeout:0, Dial:(func(context.Context, string, string) (net.Conn, error))(nil), Proxy:(func(*http.Request) (*url.URL, error))(nil)}
	W0916 10:52:23.741154 1415006 kubeadm.go:483] Overriding stale ClientConfig host https://192.168.49.254:8443 with https://192.168.49.2:8443
	I0916 10:52:23.741371 1415006 node_ready.go:35] waiting up to 6m0s for node "ha-334765-m02" to be "Ready" ...
	I0916 10:52:23.741457 1415006 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-334765-m02
	I0916 10:52:23.741463 1415006 round_trippers.go:469] Request Headers:
	I0916 10:52:23.741471 1415006 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:52:23.741475 1415006 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:52:23.781124 1415006 round_trippers.go:574] Response Status: 200 OK in 39 milliseconds
	I0916 10:52:24.241976 1415006 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-334765-m02
	I0916 10:52:24.241996 1415006 round_trippers.go:469] Request Headers:
	I0916 10:52:24.242006 1415006 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:52:24.242010 1415006 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:52:24.245900 1415006 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:52:24.741943 1415006 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-334765-m02
	I0916 10:52:24.741965 1415006 round_trippers.go:469] Request Headers:
	I0916 10:52:24.741974 1415006 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:52:24.741979 1415006 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:52:24.749099 1415006 round_trippers.go:574] Response Status: 200 OK in 7 milliseconds
	I0916 10:52:25.241610 1415006 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-334765-m02
	I0916 10:52:25.241631 1415006 round_trippers.go:469] Request Headers:
	I0916 10:52:25.241640 1415006 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:52:25.241644 1415006 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:52:25.249860 1415006 round_trippers.go:574] Response Status: 200 OK in 8 milliseconds
	I0916 10:52:25.742514 1415006 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-334765-m02
	I0916 10:52:25.742535 1415006 round_trippers.go:469] Request Headers:
	I0916 10:52:25.742544 1415006 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:52:25.742548 1415006 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:52:25.753199 1415006 round_trippers.go:574] Response Status: 200 OK in 10 milliseconds
	I0916 10:52:25.754210 1415006 node_ready.go:53] node "ha-334765-m02" has status "Ready":"False"
	I0916 10:52:26.242467 1415006 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-334765-m02
	I0916 10:52:26.242488 1415006 round_trippers.go:469] Request Headers:
	I0916 10:52:26.242498 1415006 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:52:26.242502 1415006 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:52:26.245197 1415006 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 10:52:26.741733 1415006 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-334765-m02
	I0916 10:52:26.741753 1415006 round_trippers.go:469] Request Headers:
	I0916 10:52:26.741763 1415006 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:52:26.741768 1415006 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:52:26.746424 1415006 round_trippers.go:574] Response Status: 200 OK in 4 milliseconds
	I0916 10:52:27.241954 1415006 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-334765-m02
	I0916 10:52:27.241977 1415006 round_trippers.go:469] Request Headers:
	I0916 10:52:27.241987 1415006 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:52:27.241992 1415006 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:52:27.244752 1415006 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 10:52:27.741709 1415006 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-334765-m02
	I0916 10:52:27.741735 1415006 round_trippers.go:469] Request Headers:
	I0916 10:52:27.741746 1415006 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:52:27.741751 1415006 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:52:27.744984 1415006 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:52:28.241770 1415006 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-334765-m02
	I0916 10:52:28.241795 1415006 round_trippers.go:469] Request Headers:
	I0916 10:52:28.241805 1415006 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:52:28.241811 1415006 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:52:28.244551 1415006 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 10:52:28.245309 1415006 node_ready.go:53] node "ha-334765-m02" has status "Ready":"False"
	I0916 10:52:28.741681 1415006 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-334765-m02
	I0916 10:52:28.741710 1415006 round_trippers.go:469] Request Headers:
	I0916 10:52:28.741720 1415006 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:52:28.741724 1415006 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:52:28.744578 1415006 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 10:52:29.242147 1415006 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-334765-m02
	I0916 10:52:29.242172 1415006 round_trippers.go:469] Request Headers:
	I0916 10:52:29.242182 1415006 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:52:29.242187 1415006 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:52:29.245114 1415006 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 10:52:29.742566 1415006 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-334765-m02
	I0916 10:52:29.742593 1415006 round_trippers.go:469] Request Headers:
	I0916 10:52:29.742608 1415006 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:52:29.742615 1415006 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:52:29.761729 1415006 round_trippers.go:574] Response Status: 200 OK in 19 milliseconds
	I0916 10:52:30.242409 1415006 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-334765-m02
	I0916 10:52:30.242439 1415006 round_trippers.go:469] Request Headers:
	I0916 10:52:30.242449 1415006 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:52:30.242454 1415006 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:52:30.250146 1415006 round_trippers.go:574] Response Status: 200 OK in 7 milliseconds
	I0916 10:52:30.252106 1415006 node_ready.go:53] node "ha-334765-m02" has status "Ready":"False"
	I0916 10:52:30.741944 1415006 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-334765-m02
	I0916 10:52:30.741966 1415006 round_trippers.go:469] Request Headers:
	I0916 10:52:30.741975 1415006 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:52:30.741981 1415006 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:52:30.745078 1415006 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:52:31.242008 1415006 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-334765-m02
	I0916 10:52:31.242033 1415006 round_trippers.go:469] Request Headers:
	I0916 10:52:31.242044 1415006 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:52:31.242048 1415006 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:52:31.245015 1415006 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 10:52:31.742091 1415006 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-334765-m02
	I0916 10:52:31.742113 1415006 round_trippers.go:469] Request Headers:
	I0916 10:52:31.742124 1415006 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:52:31.742129 1415006 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:52:31.745699 1415006 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:52:32.241705 1415006 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-334765-m02
	I0916 10:52:32.241728 1415006 round_trippers.go:469] Request Headers:
	I0916 10:52:32.241736 1415006 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:52:32.241740 1415006 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:52:32.244364 1415006 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 10:52:32.741603 1415006 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-334765-m02
	I0916 10:52:32.741629 1415006 round_trippers.go:469] Request Headers:
	I0916 10:52:32.741640 1415006 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:52:32.741645 1415006 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:52:32.744539 1415006 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 10:52:32.745432 1415006 node_ready.go:53] node "ha-334765-m02" has status "Ready":"False"
	I0916 10:52:33.241887 1415006 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-334765-m02
	I0916 10:52:33.241913 1415006 round_trippers.go:469] Request Headers:
	I0916 10:52:33.241924 1415006 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:52:33.241929 1415006 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:52:33.244745 1415006 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 10:52:33.742125 1415006 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-334765-m02
	I0916 10:52:33.742151 1415006 round_trippers.go:469] Request Headers:
	I0916 10:52:33.742161 1415006 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:52:33.742166 1415006 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:52:33.745404 1415006 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:52:34.241644 1415006 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-334765-m02
	I0916 10:52:34.241667 1415006 round_trippers.go:469] Request Headers:
	I0916 10:52:34.241685 1415006 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:52:34.241690 1415006 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:52:34.244563 1415006 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 10:52:34.742175 1415006 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-334765-m02
	I0916 10:52:34.742199 1415006 round_trippers.go:469] Request Headers:
	I0916 10:52:34.742209 1415006 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:52:34.742216 1415006 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:52:34.745722 1415006 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:52:34.746318 1415006 node_ready.go:53] node "ha-334765-m02" has status "Ready":"False"
	I0916 10:52:35.242600 1415006 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-334765-m02
	I0916 10:52:35.242623 1415006 round_trippers.go:469] Request Headers:
	I0916 10:52:35.242642 1415006 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:52:35.242646 1415006 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:52:35.245475 1415006 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 10:52:35.741735 1415006 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-334765-m02
	I0916 10:52:35.741761 1415006 round_trippers.go:469] Request Headers:
	I0916 10:52:35.741770 1415006 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:52:35.741774 1415006 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:52:35.744768 1415006 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 10:52:36.242263 1415006 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-334765-m02
	I0916 10:52:36.242286 1415006 round_trippers.go:469] Request Headers:
	I0916 10:52:36.242296 1415006 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:52:36.242301 1415006 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:52:36.245814 1415006 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:52:36.741563 1415006 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-334765-m02
	I0916 10:52:36.741588 1415006 round_trippers.go:469] Request Headers:
	I0916 10:52:36.741598 1415006 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:52:36.741602 1415006 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:52:36.744548 1415006 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 10:52:37.242185 1415006 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-334765-m02
	I0916 10:52:37.242212 1415006 round_trippers.go:469] Request Headers:
	I0916 10:52:37.242222 1415006 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:52:37.242227 1415006 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:52:37.245048 1415006 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 10:52:37.245765 1415006 node_ready.go:53] node "ha-334765-m02" has status "Ready":"False"
	I0916 10:52:37.742194 1415006 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-334765-m02
	I0916 10:52:37.742226 1415006 round_trippers.go:469] Request Headers:
	I0916 10:52:37.742237 1415006 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:52:37.742242 1415006 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:52:37.745736 1415006 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:52:38.242165 1415006 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-334765-m02
	I0916 10:52:38.242192 1415006 round_trippers.go:469] Request Headers:
	I0916 10:52:38.242201 1415006 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:52:38.242206 1415006 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:52:38.245009 1415006 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 10:52:38.742540 1415006 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-334765-m02
	I0916 10:52:38.742572 1415006 round_trippers.go:469] Request Headers:
	I0916 10:52:38.742581 1415006 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:52:38.742586 1415006 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:52:38.745597 1415006 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 10:52:39.242203 1415006 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-334765-m02
	I0916 10:52:39.242228 1415006 round_trippers.go:469] Request Headers:
	I0916 10:52:39.242238 1415006 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:52:39.242243 1415006 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:52:39.245253 1415006 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 10:52:39.246012 1415006 node_ready.go:53] node "ha-334765-m02" has status "Ready":"False"
	I0916 10:52:39.741671 1415006 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-334765-m02
	I0916 10:52:39.741695 1415006 round_trippers.go:469] Request Headers:
	I0916 10:52:39.741705 1415006 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:52:39.741709 1415006 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:52:39.744942 1415006 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:52:40.242598 1415006 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-334765-m02
	I0916 10:52:40.242624 1415006 round_trippers.go:469] Request Headers:
	I0916 10:52:40.242635 1415006 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:52:40.242639 1415006 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:52:40.250216 1415006 round_trippers.go:574] Response Status: 200 OK in 7 milliseconds
	I0916 10:52:40.741940 1415006 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-334765-m02
	I0916 10:52:40.741961 1415006 round_trippers.go:469] Request Headers:
	I0916 10:52:40.741972 1415006 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:52:40.741976 1415006 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:52:40.756155 1415006 round_trippers.go:574] Response Status: 200 OK in 14 milliseconds
	I0916 10:52:41.241625 1415006 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-334765-m02
	I0916 10:52:41.241647 1415006 round_trippers.go:469] Request Headers:
	I0916 10:52:41.241657 1415006 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:52:41.241662 1415006 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:52:41.245894 1415006 round_trippers.go:574] Response Status: 200 OK in 4 milliseconds
	I0916 10:52:41.247063 1415006 node_ready.go:53] node "ha-334765-m02" has status "Ready":"False"
	I0916 10:52:41.742510 1415006 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-334765-m02
	I0916 10:52:41.742545 1415006 round_trippers.go:469] Request Headers:
	I0916 10:52:41.742556 1415006 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:52:41.742560 1415006 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:52:41.745490 1415006 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 10:52:42.242360 1415006 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-334765-m02
	I0916 10:52:42.242390 1415006 round_trippers.go:469] Request Headers:
	I0916 10:52:42.242400 1415006 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:52:42.242406 1415006 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:52:42.246532 1415006 round_trippers.go:574] Response Status: 200 OK in 4 milliseconds
	I0916 10:52:42.742541 1415006 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-334765-m02
	I0916 10:52:42.742563 1415006 round_trippers.go:469] Request Headers:
	I0916 10:52:42.742572 1415006 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:52:42.742576 1415006 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:52:42.745391 1415006 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 10:52:43.241682 1415006 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-334765-m02
	I0916 10:52:43.241707 1415006 round_trippers.go:469] Request Headers:
	I0916 10:52:43.241739 1415006 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:52:43.241745 1415006 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:52:43.244745 1415006 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 10:52:43.741996 1415006 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-334765-m02
	I0916 10:52:43.742022 1415006 round_trippers.go:469] Request Headers:
	I0916 10:52:43.742032 1415006 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:52:43.742039 1415006 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:52:43.745752 1415006 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:52:43.746562 1415006 node_ready.go:53] node "ha-334765-m02" has status "Ready":"False"
	I0916 10:52:44.242297 1415006 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-334765-m02
	I0916 10:52:44.242323 1415006 round_trippers.go:469] Request Headers:
	I0916 10:52:44.242334 1415006 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:52:44.242338 1415006 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:52:44.245186 1415006 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 10:52:44.742276 1415006 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-334765-m02
	I0916 10:52:44.742305 1415006 round_trippers.go:469] Request Headers:
	I0916 10:52:44.742319 1415006 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:52:44.742327 1415006 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:52:44.747590 1415006 round_trippers.go:574] Response Status: 200 OK in 5 milliseconds
	I0916 10:52:45.241653 1415006 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-334765-m02
	I0916 10:52:45.241683 1415006 round_trippers.go:469] Request Headers:
	I0916 10:52:45.241691 1415006 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:52:45.241696 1415006 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:52:45.244961 1415006 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:52:45.742638 1415006 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-334765-m02
	I0916 10:52:45.742709 1415006 round_trippers.go:469] Request Headers:
	I0916 10:52:45.742733 1415006 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:52:45.742753 1415006 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:52:45.745915 1415006 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:52:46.242019 1415006 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-334765-m02
	I0916 10:52:46.242051 1415006 round_trippers.go:469] Request Headers:
	I0916 10:52:46.242061 1415006 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:52:46.242068 1415006 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:52:46.245110 1415006 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:52:46.246183 1415006 node_ready.go:53] node "ha-334765-m02" has status "Ready":"False"
	I0916 10:52:46.742450 1415006 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-334765-m02
	I0916 10:52:46.742475 1415006 round_trippers.go:469] Request Headers:
	I0916 10:52:46.742484 1415006 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:52:46.742487 1415006 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:52:46.745682 1415006 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:52:47.241933 1415006 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-334765-m02
	I0916 10:52:47.241954 1415006 round_trippers.go:469] Request Headers:
	I0916 10:52:47.241965 1415006 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:52:47.241969 1415006 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:52:47.245806 1415006 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:52:47.741951 1415006 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-334765-m02
	I0916 10:52:47.741979 1415006 round_trippers.go:469] Request Headers:
	I0916 10:52:47.741987 1415006 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:52:47.741992 1415006 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:52:47.745513 1415006 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:52:48.241606 1415006 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-334765-m02
	I0916 10:52:48.241627 1415006 round_trippers.go:469] Request Headers:
	I0916 10:52:48.241637 1415006 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:52:48.241641 1415006 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:52:48.244434 1415006 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 10:52:48.742220 1415006 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-334765-m02
	I0916 10:52:48.742246 1415006 round_trippers.go:469] Request Headers:
	I0916 10:52:48.742256 1415006 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:52:48.742263 1415006 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:52:48.745643 1415006 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:52:48.746444 1415006 node_ready.go:53] node "ha-334765-m02" has status "Ready":"False"
	I0916 10:52:49.242145 1415006 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-334765-m02
	I0916 10:52:49.242167 1415006 round_trippers.go:469] Request Headers:
	I0916 10:52:49.242188 1415006 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:52:49.242193 1415006 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:52:49.245054 1415006 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 10:52:49.742183 1415006 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-334765-m02
	I0916 10:52:49.742208 1415006 round_trippers.go:469] Request Headers:
	I0916 10:52:49.742218 1415006 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:52:49.742225 1415006 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:52:49.745773 1415006 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:52:50.242281 1415006 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-334765-m02
	I0916 10:52:50.242307 1415006 round_trippers.go:469] Request Headers:
	I0916 10:52:50.242316 1415006 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:52:50.242323 1415006 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:52:50.245234 1415006 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 10:52:50.742303 1415006 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-334765-m02
	I0916 10:52:50.742328 1415006 round_trippers.go:469] Request Headers:
	I0916 10:52:50.742339 1415006 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:52:50.742343 1415006 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:52:50.745257 1415006 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 10:52:51.242595 1415006 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-334765-m02
	I0916 10:52:51.242620 1415006 round_trippers.go:469] Request Headers:
	I0916 10:52:51.242630 1415006 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:52:51.242634 1415006 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:52:51.245376 1415006 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 10:52:51.246283 1415006 node_ready.go:53] node "ha-334765-m02" has status "Ready":"False"
	I0916 10:52:51.742591 1415006 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-334765-m02
	I0916 10:52:51.742616 1415006 round_trippers.go:469] Request Headers:
	I0916 10:52:51.742625 1415006 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:52:51.742630 1415006 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:52:51.745485 1415006 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 10:52:52.241886 1415006 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-334765-m02
	I0916 10:52:52.241907 1415006 round_trippers.go:469] Request Headers:
	I0916 10:52:52.241916 1415006 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:52:52.241920 1415006 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:52:52.247220 1415006 round_trippers.go:574] Response Status: 200 OK in 5 milliseconds
	I0916 10:52:52.742341 1415006 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-334765-m02
	I0916 10:52:52.742363 1415006 round_trippers.go:469] Request Headers:
	I0916 10:52:52.742372 1415006 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:52:52.742377 1415006 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:52:52.745917 1415006 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:52:53.241555 1415006 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-334765-m02
	I0916 10:52:53.241580 1415006 round_trippers.go:469] Request Headers:
	I0916 10:52:53.241591 1415006 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:52:53.241597 1415006 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:52:53.244293 1415006 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 10:52:53.741926 1415006 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-334765-m02
	I0916 10:52:53.741947 1415006 round_trippers.go:469] Request Headers:
	I0916 10:52:53.741957 1415006 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:52:53.741962 1415006 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:52:53.744959 1415006 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 10:52:53.745473 1415006 node_ready.go:53] node "ha-334765-m02" has status "Ready":"False"
	I0916 10:52:54.242061 1415006 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-334765-m02
	I0916 10:52:54.242086 1415006 round_trippers.go:469] Request Headers:
	I0916 10:52:54.242097 1415006 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:52:54.242103 1415006 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:52:54.244841 1415006 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 10:52:54.742033 1415006 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-334765-m02
	I0916 10:52:54.742063 1415006 round_trippers.go:469] Request Headers:
	I0916 10:52:54.742073 1415006 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:52:54.742077 1415006 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:52:54.745079 1415006 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 10:52:55.242356 1415006 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-334765-m02
	I0916 10:52:55.242378 1415006 round_trippers.go:469] Request Headers:
	I0916 10:52:55.242388 1415006 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:52:55.242393 1415006 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:52:55.245278 1415006 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 10:52:55.741833 1415006 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-334765-m02
	I0916 10:52:55.741860 1415006 round_trippers.go:469] Request Headers:
	I0916 10:52:55.741870 1415006 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:52:55.741876 1415006 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:52:55.744609 1415006 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 10:52:56.241780 1415006 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-334765-m02
	I0916 10:52:56.241808 1415006 round_trippers.go:469] Request Headers:
	I0916 10:52:56.241818 1415006 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:52:56.241823 1415006 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:52:56.244615 1415006 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 10:52:56.245318 1415006 node_ready.go:53] node "ha-334765-m02" has status "Ready":"False"
	I0916 10:52:56.741743 1415006 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-334765-m02
	I0916 10:52:56.741766 1415006 round_trippers.go:469] Request Headers:
	I0916 10:52:56.741776 1415006 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:52:56.741780 1415006 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:52:56.744633 1415006 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 10:52:57.241613 1415006 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-334765-m02
	I0916 10:52:57.241636 1415006 round_trippers.go:469] Request Headers:
	I0916 10:52:57.241646 1415006 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:52:57.241652 1415006 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:52:57.244216 1415006 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 10:52:57.742036 1415006 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-334765-m02
	I0916 10:52:57.742062 1415006 round_trippers.go:469] Request Headers:
	I0916 10:52:57.742072 1415006 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:52:57.742077 1415006 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:52:57.746530 1415006 round_trippers.go:574] Response Status: 200 OK in 4 milliseconds
	I0916 10:52:58.241978 1415006 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-334765-m02
	I0916 10:52:58.242006 1415006 round_trippers.go:469] Request Headers:
	I0916 10:52:58.242021 1415006 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:52:58.242027 1415006 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:52:58.244775 1415006 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 10:52:58.741646 1415006 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-334765-m02
	I0916 10:52:58.741667 1415006 round_trippers.go:469] Request Headers:
	I0916 10:52:58.741677 1415006 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:52:58.741682 1415006 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:52:58.745084 1415006 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:52:58.745902 1415006 node_ready.go:53] node "ha-334765-m02" has status "Ready":"False"
	I0916 10:52:59.242241 1415006 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-334765-m02
	I0916 10:52:59.242268 1415006 round_trippers.go:469] Request Headers:
	I0916 10:52:59.242278 1415006 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:52:59.242282 1415006 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:52:59.245123 1415006 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 10:52:59.741914 1415006 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-334765-m02
	I0916 10:52:59.741938 1415006 round_trippers.go:469] Request Headers:
	I0916 10:52:59.741947 1415006 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:52:59.741952 1415006 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:52:59.744805 1415006 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 10:53:00.241767 1415006 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-334765-m02
	I0916 10:53:00.241795 1415006 round_trippers.go:469] Request Headers:
	I0916 10:53:00.241808 1415006 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:53:00.241818 1415006 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:53:00.246348 1415006 round_trippers.go:574] Response Status: 200 OK in 4 milliseconds
	I0916 10:53:00.741743 1415006 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-334765-m02
	I0916 10:53:00.741765 1415006 round_trippers.go:469] Request Headers:
	I0916 10:53:00.741775 1415006 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:53:00.741779 1415006 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:53:00.749230 1415006 round_trippers.go:574] Response Status: 200 OK in 7 milliseconds
	I0916 10:53:00.750039 1415006 node_ready.go:53] node "ha-334765-m02" has status "Ready":"False"
	I0916 10:53:01.241929 1415006 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-334765-m02
	I0916 10:53:01.241955 1415006 round_trippers.go:469] Request Headers:
	I0916 10:53:01.241966 1415006 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:53:01.241971 1415006 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:53:01.244762 1415006 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 10:53:01.741929 1415006 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-334765-m02
	I0916 10:53:01.741956 1415006 round_trippers.go:469] Request Headers:
	I0916 10:53:01.741965 1415006 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:53:01.741968 1415006 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:53:01.745371 1415006 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:53:02.242363 1415006 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-334765-m02
	I0916 10:53:02.242389 1415006 round_trippers.go:469] Request Headers:
	I0916 10:53:02.242400 1415006 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:53:02.242404 1415006 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:53:02.245485 1415006 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:53:02.246489 1415006 node_ready.go:49] node "ha-334765-m02" has status "Ready":"True"
	I0916 10:53:02.246513 1415006 node_ready.go:38] duration metric: took 38.505119639s for node "ha-334765-m02" to be "Ready" ...
	I0916 10:53:02.246524 1415006 pod_ready.go:36] extra waiting up to 6m0s for all system-critical pods including labels [k8s-app=kube-dns component=etcd component=kube-apiserver component=kube-controller-manager k8s-app=kube-proxy component=kube-scheduler] to be "Ready" ...
	I0916 10:53:02.246667 1415006 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods
	I0916 10:53:02.246679 1415006 round_trippers.go:469] Request Headers:
	I0916 10:53:02.246688 1415006 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:53:02.246693 1415006 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:53:02.251351 1415006 round_trippers.go:574] Response Status: 200 OK in 4 milliseconds
	I0916 10:53:02.268461 1415006 pod_ready.go:79] waiting up to 6m0s for pod "coredns-7c65d6cfc9-q5xr7" in "kube-system" namespace to be "Ready" ...
	I0916 10:53:02.268572 1415006 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/coredns-7c65d6cfc9-q5xr7
	I0916 10:53:02.268585 1415006 round_trippers.go:469] Request Headers:
	I0916 10:53:02.268595 1415006 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:53:02.268602 1415006 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:53:02.272439 1415006 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:53:02.273315 1415006 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-334765
	I0916 10:53:02.273339 1415006 round_trippers.go:469] Request Headers:
	I0916 10:53:02.273349 1415006 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:53:02.273354 1415006 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:53:02.275959 1415006 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 10:53:02.276538 1415006 pod_ready.go:93] pod "coredns-7c65d6cfc9-q5xr7" in "kube-system" namespace has status "Ready":"True"
	I0916 10:53:02.276558 1415006 pod_ready.go:82] duration metric: took 8.057814ms for pod "coredns-7c65d6cfc9-q5xr7" in "kube-system" namespace to be "Ready" ...
	I0916 10:53:02.276569 1415006 pod_ready.go:79] waiting up to 6m0s for pod "coredns-7c65d6cfc9-s9fp9" in "kube-system" namespace to be "Ready" ...
	I0916 10:53:02.276638 1415006 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/coredns-7c65d6cfc9-s9fp9
	I0916 10:53:02.276649 1415006 round_trippers.go:469] Request Headers:
	I0916 10:53:02.276657 1415006 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:53:02.276661 1415006 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:53:02.279530 1415006 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 10:53:02.280211 1415006 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-334765
	I0916 10:53:02.280229 1415006 round_trippers.go:469] Request Headers:
	I0916 10:53:02.280238 1415006 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:53:02.280242 1415006 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:53:02.283048 1415006 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 10:53:02.283574 1415006 pod_ready.go:93] pod "coredns-7c65d6cfc9-s9fp9" in "kube-system" namespace has status "Ready":"True"
	I0916 10:53:02.283596 1415006 pod_ready.go:82] duration metric: took 7.016289ms for pod "coredns-7c65d6cfc9-s9fp9" in "kube-system" namespace to be "Ready" ...
	I0916 10:53:02.283607 1415006 pod_ready.go:79] waiting up to 6m0s for pod "etcd-ha-334765" in "kube-system" namespace to be "Ready" ...
	I0916 10:53:02.283675 1415006 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/etcd-ha-334765
	I0916 10:53:02.283687 1415006 round_trippers.go:469] Request Headers:
	I0916 10:53:02.283695 1415006 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:53:02.283701 1415006 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:53:02.286265 1415006 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 10:53:02.286865 1415006 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-334765
	I0916 10:53:02.286896 1415006 round_trippers.go:469] Request Headers:
	I0916 10:53:02.286906 1415006 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:53:02.286911 1415006 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:53:02.289188 1415006 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 10:53:02.290118 1415006 pod_ready.go:93] pod "etcd-ha-334765" in "kube-system" namespace has status "Ready":"True"
	I0916 10:53:02.290142 1415006 pod_ready.go:82] duration metric: took 6.523247ms for pod "etcd-ha-334765" in "kube-system" namespace to be "Ready" ...
	I0916 10:53:02.290153 1415006 pod_ready.go:79] waiting up to 6m0s for pod "etcd-ha-334765-m02" in "kube-system" namespace to be "Ready" ...
	I0916 10:53:02.290253 1415006 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/etcd-ha-334765-m02
	I0916 10:53:02.290262 1415006 round_trippers.go:469] Request Headers:
	I0916 10:53:02.290270 1415006 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:53:02.290274 1415006 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:53:02.292803 1415006 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 10:53:02.293555 1415006 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-334765-m02
	I0916 10:53:02.293576 1415006 round_trippers.go:469] Request Headers:
	I0916 10:53:02.293585 1415006 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:53:02.293588 1415006 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:53:02.296047 1415006 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 10:53:02.296705 1415006 pod_ready.go:93] pod "etcd-ha-334765-m02" in "kube-system" namespace has status "Ready":"True"
	I0916 10:53:02.296754 1415006 pod_ready.go:82] duration metric: took 6.57187ms for pod "etcd-ha-334765-m02" in "kube-system" namespace to be "Ready" ...
	I0916 10:53:02.296779 1415006 pod_ready.go:79] waiting up to 6m0s for pod "kube-apiserver-ha-334765" in "kube-system" namespace to be "Ready" ...
	I0916 10:53:02.443144 1415006 request.go:632] Waited for 146.287429ms due to client-side throttling, not priority and fairness, request: GET:https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/kube-apiserver-ha-334765
	I0916 10:53:02.443230 1415006 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/kube-apiserver-ha-334765
	I0916 10:53:02.443240 1415006 round_trippers.go:469] Request Headers:
	I0916 10:53:02.443249 1415006 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:53:02.443262 1415006 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:53:02.446555 1415006 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:53:02.642805 1415006 request.go:632] Waited for 195.379106ms due to client-side throttling, not priority and fairness, request: GET:https://192.168.49.2:8443/api/v1/nodes/ha-334765
	I0916 10:53:02.642916 1415006 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-334765
	I0916 10:53:02.642930 1415006 round_trippers.go:469] Request Headers:
	I0916 10:53:02.642952 1415006 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:53:02.642970 1415006 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:53:02.646665 1415006 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:53:02.647425 1415006 pod_ready.go:93] pod "kube-apiserver-ha-334765" in "kube-system" namespace has status "Ready":"True"
	I0916 10:53:02.647451 1415006 pod_ready.go:82] duration metric: took 350.660949ms for pod "kube-apiserver-ha-334765" in "kube-system" namespace to be "Ready" ...
	I0916 10:53:02.647465 1415006 pod_ready.go:79] waiting up to 6m0s for pod "kube-apiserver-ha-334765-m02" in "kube-system" namespace to be "Ready" ...
	I0916 10:53:02.842366 1415006 request.go:632] Waited for 194.827087ms due to client-side throttling, not priority and fairness, request: GET:https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/kube-apiserver-ha-334765-m02
	I0916 10:53:02.842458 1415006 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/kube-apiserver-ha-334765-m02
	I0916 10:53:02.842468 1415006 round_trippers.go:469] Request Headers:
	I0916 10:53:02.842477 1415006 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:53:02.842482 1415006 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:53:02.845485 1415006 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 10:53:03.042509 1415006 request.go:632] Waited for 196.214326ms due to client-side throttling, not priority and fairness, request: GET:https://192.168.49.2:8443/api/v1/nodes/ha-334765-m02
	I0916 10:53:03.042622 1415006 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-334765-m02
	I0916 10:53:03.042634 1415006 round_trippers.go:469] Request Headers:
	I0916 10:53:03.042644 1415006 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:53:03.042652 1415006 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:53:03.045759 1415006 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:53:03.046402 1415006 pod_ready.go:93] pod "kube-apiserver-ha-334765-m02" in "kube-system" namespace has status "Ready":"True"
	I0916 10:53:03.046486 1415006 pod_ready.go:82] duration metric: took 399.011706ms for pod "kube-apiserver-ha-334765-m02" in "kube-system" namespace to be "Ready" ...
	I0916 10:53:03.046515 1415006 pod_ready.go:79] waiting up to 6m0s for pod "kube-controller-manager-ha-334765" in "kube-system" namespace to be "Ready" ...
	I0916 10:53:03.242890 1415006 request.go:632] Waited for 196.300429ms due to client-side throttling, not priority and fairness, request: GET:https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/kube-controller-manager-ha-334765
	I0916 10:53:03.242965 1415006 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/kube-controller-manager-ha-334765
	I0916 10:53:03.242973 1415006 round_trippers.go:469] Request Headers:
	I0916 10:53:03.242981 1415006 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:53:03.242985 1415006 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:53:03.246378 1415006 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:53:03.442460 1415006 request.go:632] Waited for 195.246531ms due to client-side throttling, not priority and fairness, request: GET:https://192.168.49.2:8443/api/v1/nodes/ha-334765
	I0916 10:53:03.442537 1415006 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-334765
	I0916 10:53:03.442548 1415006 round_trippers.go:469] Request Headers:
	I0916 10:53:03.442559 1415006 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:53:03.442574 1415006 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:53:03.445445 1415006 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 10:53:03.446203 1415006 pod_ready.go:93] pod "kube-controller-manager-ha-334765" in "kube-system" namespace has status "Ready":"True"
	I0916 10:53:03.446231 1415006 pod_ready.go:82] duration metric: took 399.70594ms for pod "kube-controller-manager-ha-334765" in "kube-system" namespace to be "Ready" ...
	I0916 10:53:03.446244 1415006 pod_ready.go:79] waiting up to 6m0s for pod "kube-controller-manager-ha-334765-m02" in "kube-system" namespace to be "Ready" ...
	I0916 10:53:03.642600 1415006 request.go:632] Waited for 196.265943ms due to client-side throttling, not priority and fairness, request: GET:https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/kube-controller-manager-ha-334765-m02
	I0916 10:53:03.642664 1415006 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/kube-controller-manager-ha-334765-m02
	I0916 10:53:03.642670 1415006 round_trippers.go:469] Request Headers:
	I0916 10:53:03.642679 1415006 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:53:03.642687 1415006 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:53:03.645564 1415006 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 10:53:03.843031 1415006 request.go:632] Waited for 196.277414ms due to client-side throttling, not priority and fairness, request: GET:https://192.168.49.2:8443/api/v1/nodes/ha-334765-m02
	I0916 10:53:03.843103 1415006 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-334765-m02
	I0916 10:53:03.843109 1415006 round_trippers.go:469] Request Headers:
	I0916 10:53:03.843118 1415006 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:53:03.843122 1415006 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:53:03.846031 1415006 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 10:53:03.846685 1415006 pod_ready.go:93] pod "kube-controller-manager-ha-334765-m02" in "kube-system" namespace has status "Ready":"True"
	I0916 10:53:03.846705 1415006 pod_ready.go:82] duration metric: took 400.453524ms for pod "kube-controller-manager-ha-334765-m02" in "kube-system" namespace to be "Ready" ...
	I0916 10:53:03.846717 1415006 pod_ready.go:79] waiting up to 6m0s for pod "kube-proxy-l998t" in "kube-system" namespace to be "Ready" ...
	I0916 10:53:04.043206 1415006 request.go:632] Waited for 196.413574ms due to client-side throttling, not priority and fairness, request: GET:https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/kube-proxy-l998t
	I0916 10:53:04.043312 1415006 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/kube-proxy-l998t
	I0916 10:53:04.043345 1415006 round_trippers.go:469] Request Headers:
	I0916 10:53:04.043361 1415006 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:53:04.043366 1415006 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:53:04.046273 1415006 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 10:53:04.242448 1415006 request.go:632] Waited for 195.253449ms due to client-side throttling, not priority and fairness, request: GET:https://192.168.49.2:8443/api/v1/nodes/ha-334765-m02
	I0916 10:53:04.242555 1415006 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-334765-m02
	I0916 10:53:04.242568 1415006 round_trippers.go:469] Request Headers:
	I0916 10:53:04.242589 1415006 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:53:04.242602 1415006 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:53:04.245443 1415006 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 10:53:04.246596 1415006 pod_ready.go:93] pod "kube-proxy-l998t" in "kube-system" namespace has status "Ready":"True"
	I0916 10:53:04.246635 1415006 pod_ready.go:82] duration metric: took 399.906535ms for pod "kube-proxy-l998t" in "kube-system" namespace to be "Ready" ...
	I0916 10:53:04.246683 1415006 pod_ready.go:79] waiting up to 6m0s for pod "kube-proxy-tlfs7" in "kube-system" namespace to be "Ready" ...
	I0916 10:53:04.443057 1415006 request.go:632] Waited for 196.287465ms due to client-side throttling, not priority and fairness, request: GET:https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/kube-proxy-tlfs7
	I0916 10:53:04.443134 1415006 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/kube-proxy-tlfs7
	I0916 10:53:04.443146 1415006 round_trippers.go:469] Request Headers:
	I0916 10:53:04.443153 1415006 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:53:04.443157 1415006 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:53:04.446204 1415006 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:53:04.643359 1415006 request.go:632] Waited for 196.339574ms due to client-side throttling, not priority and fairness, request: GET:https://192.168.49.2:8443/api/v1/nodes/ha-334765
	I0916 10:53:04.643433 1415006 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-334765
	I0916 10:53:04.643443 1415006 round_trippers.go:469] Request Headers:
	I0916 10:53:04.643452 1415006 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:53:04.643464 1415006 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:53:04.646461 1415006 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 10:53:04.647237 1415006 pod_ready.go:93] pod "kube-proxy-tlfs7" in "kube-system" namespace has status "Ready":"True"
	I0916 10:53:04.647261 1415006 pod_ready.go:82] duration metric: took 400.564553ms for pod "kube-proxy-tlfs7" in "kube-system" namespace to be "Ready" ...
	I0916 10:53:04.647274 1415006 pod_ready.go:79] waiting up to 6m0s for pod "kube-scheduler-ha-334765" in "kube-system" namespace to be "Ready" ...
	I0916 10:53:04.843076 1415006 request.go:632] Waited for 195.706318ms due to client-side throttling, not priority and fairness, request: GET:https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/kube-scheduler-ha-334765
	I0916 10:53:04.843175 1415006 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/kube-scheduler-ha-334765
	I0916 10:53:04.843185 1415006 round_trippers.go:469] Request Headers:
	I0916 10:53:04.843195 1415006 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:53:04.843212 1415006 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:53:04.846214 1415006 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 10:53:05.043290 1415006 request.go:632] Waited for 196.31747ms due to client-side throttling, not priority and fairness, request: GET:https://192.168.49.2:8443/api/v1/nodes/ha-334765
	I0916 10:53:05.043357 1415006 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-334765
	I0916 10:53:05.043364 1415006 round_trippers.go:469] Request Headers:
	I0916 10:53:05.043373 1415006 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:53:05.043383 1415006 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:53:05.046606 1415006 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:53:05.047589 1415006 pod_ready.go:93] pod "kube-scheduler-ha-334765" in "kube-system" namespace has status "Ready":"True"
	I0916 10:53:05.047661 1415006 pod_ready.go:82] duration metric: took 400.344866ms for pod "kube-scheduler-ha-334765" in "kube-system" namespace to be "Ready" ...
	I0916 10:53:05.047689 1415006 pod_ready.go:79] waiting up to 6m0s for pod "kube-scheduler-ha-334765-m02" in "kube-system" namespace to be "Ready" ...
	I0916 10:53:05.242459 1415006 request.go:632] Waited for 194.67199ms due to client-side throttling, not priority and fairness, request: GET:https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/kube-scheduler-ha-334765-m02
	I0916 10:53:05.242544 1415006 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/kube-scheduler-ha-334765-m02
	I0916 10:53:05.242555 1415006 round_trippers.go:469] Request Headers:
	I0916 10:53:05.242571 1415006 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:53:05.242577 1415006 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:53:05.245337 1415006 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 10:53:05.442971 1415006 request.go:632] Waited for 196.967989ms due to client-side throttling, not priority and fairness, request: GET:https://192.168.49.2:8443/api/v1/nodes/ha-334765-m02
	I0916 10:53:05.443047 1415006 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-334765-m02
	I0916 10:53:05.443053 1415006 round_trippers.go:469] Request Headers:
	I0916 10:53:05.443063 1415006 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:53:05.443067 1415006 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:53:05.445902 1415006 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 10:53:05.446757 1415006 pod_ready.go:93] pod "kube-scheduler-ha-334765-m02" in "kube-system" namespace has status "Ready":"True"
	I0916 10:53:05.446777 1415006 pod_ready.go:82] duration metric: took 399.067697ms for pod "kube-scheduler-ha-334765-m02" in "kube-system" namespace to be "Ready" ...
	I0916 10:53:05.446790 1415006 pod_ready.go:39] duration metric: took 3.200220972s for extra waiting for all system-critical and pods with labels [k8s-app=kube-dns component=etcd component=kube-apiserver component=kube-controller-manager k8s-app=kube-proxy component=kube-scheduler] to be "Ready" ...
	I0916 10:53:05.446806 1415006 api_server.go:52] waiting for apiserver process to appear ...
	I0916 10:53:05.446880 1415006 ssh_runner.go:195] Run: sudo pgrep -xnf kube-apiserver.*minikube.*
	I0916 10:53:05.458901 1415006 api_server.go:72] duration metric: took 41.943544799s to wait for apiserver process to appear ...
	I0916 10:53:05.458967 1415006 api_server.go:88] waiting for apiserver healthz status ...
	I0916 10:53:05.459005 1415006 api_server.go:253] Checking apiserver healthz at https://192.168.49.2:8443/healthz ...
	I0916 10:53:05.467138 1415006 api_server.go:279] https://192.168.49.2:8443/healthz returned 200:
	ok
	I0916 10:53:05.467221 1415006 round_trippers.go:463] GET https://192.168.49.2:8443/version
	I0916 10:53:05.467232 1415006 round_trippers.go:469] Request Headers:
	I0916 10:53:05.467242 1415006 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:53:05.467248 1415006 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:53:05.468101 1415006 round_trippers.go:574] Response Status: 200 OK in 0 milliseconds
	I0916 10:53:05.468214 1415006 api_server.go:141] control plane version: v1.31.1
	I0916 10:53:05.468228 1415006 api_server.go:131] duration metric: took 9.240118ms to wait for apiserver health ...
	I0916 10:53:05.468236 1415006 system_pods.go:43] waiting for kube-system pods to appear ...
	I0916 10:53:05.642462 1415006 request.go:632] Waited for 174.157713ms due to client-side throttling, not priority and fairness, request: GET:https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods
	I0916 10:53:05.642633 1415006 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods
	I0916 10:53:05.642647 1415006 round_trippers.go:469] Request Headers:
	I0916 10:53:05.642655 1415006 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:53:05.642660 1415006 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:53:05.647406 1415006 round_trippers.go:574] Response Status: 200 OK in 4 milliseconds
	I0916 10:53:05.653624 1415006 system_pods.go:59] 17 kube-system pods found
	I0916 10:53:05.653662 1415006 system_pods.go:61] "coredns-7c65d6cfc9-q5xr7" [14514e6e-34ae-4a79-b0e0-008742ae46b9] Running
	I0916 10:53:05.653668 1415006 system_pods.go:61] "coredns-7c65d6cfc9-s9fp9" [0e29200a-0909-47e1-8521-bf5f9b645d6c] Running
	I0916 10:53:05.653673 1415006 system_pods.go:61] "etcd-ha-334765" [9a0b9474-60f4-440e-a898-d397f7425086] Running
	I0916 10:53:05.653677 1415006 system_pods.go:61] "etcd-ha-334765-m02" [635fd2d2-f9cc-4e08-b73b-18633a58b6e4] Running
	I0916 10:53:05.653682 1415006 system_pods.go:61] "kindnet-7s5t5" [e1832b94-ac8f-43c0-af10-ddc6afbb229b] Running
	I0916 10:53:05.653686 1415006 system_pods.go:61] "kindnet-vj27j" [61e290b4-d19c-40f3-a50d-bfa09fddb710] Running
	I0916 10:53:05.653690 1415006 system_pods.go:61] "kube-apiserver-ha-334765" [471aea01-5646-4ce8-91e0-b0b39f8a275a] Running
	I0916 10:53:05.653694 1415006 system_pods.go:61] "kube-apiserver-ha-334765-m02" [877c49f9-6fae-4cdb-b208-940eba98383b] Running
	I0916 10:53:05.653698 1415006 system_pods.go:61] "kube-controller-manager-ha-334765" [23b2f4a4-942f-4ea7-afef-561ab69ac144] Running
	I0916 10:53:05.653703 1415006 system_pods.go:61] "kube-controller-manager-ha-334765-m02" [07411ea7-458c-475c-93ff-5db4f6c1c4b1] Running
	I0916 10:53:05.653706 1415006 system_pods.go:61] "kube-proxy-l998t" [e92c97ea-9eb8-40c4-a7f6-aeb43c89e6f4] Running
	I0916 10:53:05.653711 1415006 system_pods.go:61] "kube-proxy-tlfs7" [6a873882-8023-44b5-82d9-2f18e70f8ef1] Running
	I0916 10:53:05.653722 1415006 system_pods.go:61] "kube-scheduler-ha-334765" [6189b5cd-f342-4b6a-ae21-b6b7125e4f06] Running
	I0916 10:53:05.653725 1415006 system_pods.go:61] "kube-scheduler-ha-334765-m02" [61387062-d6b0-4e2d-b2f9-10f29b0bcef6] Running
	I0916 10:53:05.653730 1415006 system_pods.go:61] "kube-vip-ha-334765" [65843776-1f0b-4a2b-b30c-62d55f497269] Running
	I0916 10:53:05.653735 1415006 system_pods.go:61] "kube-vip-ha-334765-m02" [450bd3f6-46b4-426c-a6b2-2ad37b58b171] Running
	I0916 10:53:05.653739 1415006 system_pods.go:61] "storage-provisioner" [4db2490d-9707-4734-973b-adac5570e275] Running
	I0916 10:53:05.653751 1415006 system_pods.go:74] duration metric: took 185.50876ms to wait for pod list to return data ...
	I0916 10:53:05.653760 1415006 default_sa.go:34] waiting for default service account to be created ...
	I0916 10:53:05.843013 1415006 request.go:632] Waited for 189.171469ms due to client-side throttling, not priority and fairness, request: GET:https://192.168.49.2:8443/api/v1/namespaces/default/serviceaccounts
	I0916 10:53:05.843072 1415006 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/default/serviceaccounts
	I0916 10:53:05.843111 1415006 round_trippers.go:469] Request Headers:
	I0916 10:53:05.843124 1415006 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:53:05.843128 1415006 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:53:05.846194 1415006 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:53:05.846512 1415006 default_sa.go:45] found service account: "default"
	I0916 10:53:05.846554 1415006 default_sa.go:55] duration metric: took 192.782373ms for default service account to be created ...
	I0916 10:53:05.846579 1415006 system_pods.go:116] waiting for k8s-apps to be running ...
	I0916 10:53:06.042639 1415006 request.go:632] Waited for 195.976491ms due to client-side throttling, not priority and fairness, request: GET:https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods
	I0916 10:53:06.042754 1415006 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods
	I0916 10:53:06.042771 1415006 round_trippers.go:469] Request Headers:
	I0916 10:53:06.042810 1415006 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:53:06.042816 1415006 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:53:06.047399 1415006 round_trippers.go:574] Response Status: 200 OK in 4 milliseconds
	I0916 10:53:06.053539 1415006 system_pods.go:86] 17 kube-system pods found
	I0916 10:53:06.053625 1415006 system_pods.go:89] "coredns-7c65d6cfc9-q5xr7" [14514e6e-34ae-4a79-b0e0-008742ae46b9] Running
	I0916 10:53:06.053663 1415006 system_pods.go:89] "coredns-7c65d6cfc9-s9fp9" [0e29200a-0909-47e1-8521-bf5f9b645d6c] Running
	I0916 10:53:06.053691 1415006 system_pods.go:89] "etcd-ha-334765" [9a0b9474-60f4-440e-a898-d397f7425086] Running
	I0916 10:53:06.053716 1415006 system_pods.go:89] "etcd-ha-334765-m02" [635fd2d2-f9cc-4e08-b73b-18633a58b6e4] Running
	I0916 10:53:06.053750 1415006 system_pods.go:89] "kindnet-7s5t5" [e1832b94-ac8f-43c0-af10-ddc6afbb229b] Running
	I0916 10:53:06.053773 1415006 system_pods.go:89] "kindnet-vj27j" [61e290b4-d19c-40f3-a50d-bfa09fddb710] Running
	I0916 10:53:06.053795 1415006 system_pods.go:89] "kube-apiserver-ha-334765" [471aea01-5646-4ce8-91e0-b0b39f8a275a] Running
	I0916 10:53:06.053831 1415006 system_pods.go:89] "kube-apiserver-ha-334765-m02" [877c49f9-6fae-4cdb-b208-940eba98383b] Running
	I0916 10:53:06.053857 1415006 system_pods.go:89] "kube-controller-manager-ha-334765" [23b2f4a4-942f-4ea7-afef-561ab69ac144] Running
	I0916 10:53:06.053877 1415006 system_pods.go:89] "kube-controller-manager-ha-334765-m02" [07411ea7-458c-475c-93ff-5db4f6c1c4b1] Running
	I0916 10:53:06.053914 1415006 system_pods.go:89] "kube-proxy-l998t" [e92c97ea-9eb8-40c4-a7f6-aeb43c89e6f4] Running
	I0916 10:53:06.053941 1415006 system_pods.go:89] "kube-proxy-tlfs7" [6a873882-8023-44b5-82d9-2f18e70f8ef1] Running
	I0916 10:53:06.053961 1415006 system_pods.go:89] "kube-scheduler-ha-334765" [6189b5cd-f342-4b6a-ae21-b6b7125e4f06] Running
	I0916 10:53:06.053998 1415006 system_pods.go:89] "kube-scheduler-ha-334765-m02" [61387062-d6b0-4e2d-b2f9-10f29b0bcef6] Running
	I0916 10:53:06.054022 1415006 system_pods.go:89] "kube-vip-ha-334765" [65843776-1f0b-4a2b-b30c-62d55f497269] Running
	I0916 10:53:06.054041 1415006 system_pods.go:89] "kube-vip-ha-334765-m02" [450bd3f6-46b4-426c-a6b2-2ad37b58b171] Running
	I0916 10:53:06.054075 1415006 system_pods.go:89] "storage-provisioner" [4db2490d-9707-4734-973b-adac5570e275] Running
	I0916 10:53:06.054100 1415006 system_pods.go:126] duration metric: took 207.502425ms to wait for k8s-apps to be running ...
	I0916 10:53:06.054122 1415006 system_svc.go:44] waiting for kubelet service to be running ....
	I0916 10:53:06.054223 1415006 ssh_runner.go:195] Run: sudo systemctl is-active --quiet service kubelet
	I0916 10:53:06.069303 1415006 system_svc.go:56] duration metric: took 15.171233ms WaitForService to wait for kubelet
	I0916 10:53:06.069331 1415006 kubeadm.go:582] duration metric: took 42.55399515s to wait for: map[apiserver:true apps_running:true default_sa:true extra:true kubelet:true node_ready:true system_pods:true]
	I0916 10:53:06.069351 1415006 node_conditions.go:102] verifying NodePressure condition ...
	I0916 10:53:06.242778 1415006 request.go:632] Waited for 173.332043ms due to client-side throttling, not priority and fairness, request: GET:https://192.168.49.2:8443/api/v1/nodes
	I0916 10:53:06.242857 1415006 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes
	I0916 10:53:06.242867 1415006 round_trippers.go:469] Request Headers:
	I0916 10:53:06.242876 1415006 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:53:06.242882 1415006 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:53:06.246168 1415006 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:53:06.247202 1415006 node_conditions.go:122] node storage ephemeral capacity is 203034800Ki
	I0916 10:53:06.247235 1415006 node_conditions.go:123] node cpu capacity is 2
	I0916 10:53:06.247246 1415006 node_conditions.go:122] node storage ephemeral capacity is 203034800Ki
	I0916 10:53:06.247251 1415006 node_conditions.go:123] node cpu capacity is 2
	I0916 10:53:06.247256 1415006 node_conditions.go:105] duration metric: took 177.90001ms to run NodePressure ...
	I0916 10:53:06.247268 1415006 start.go:241] waiting for startup goroutines ...
	I0916 10:53:06.247300 1415006 start.go:255] writing updated cluster config ...
	I0916 10:53:06.249092 1415006 out.go:201] 
	I0916 10:53:06.250653 1415006 config.go:182] Loaded profile config "ha-334765": Driver=docker, ContainerRuntime=crio, KubernetesVersion=v1.31.1
	I0916 10:53:06.250796 1415006 profile.go:143] Saving config to /home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/ha-334765/config.json ...
	I0916 10:53:06.252540 1415006 out.go:177] * Starting "ha-334765-m03" control-plane node in "ha-334765" cluster
	I0916 10:53:06.254018 1415006 cache.go:121] Beginning downloading kic base image for docker with crio
	I0916 10:53:06.255307 1415006 out.go:177] * Pulling base image v0.0.45-1726358845-19644 ...
	I0916 10:53:06.256474 1415006 preload.go:131] Checking if preload exists for k8s version v1.31.1 and runtime crio
	I0916 10:53:06.256499 1415006 cache.go:56] Caching tarball of preloaded images
	I0916 10:53:06.256560 1415006 image.go:79] Checking for gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 in local docker daemon
	I0916 10:53:06.256644 1415006 preload.go:172] Found /home/jenkins/minikube-integration/19651-1378450/.minikube/cache/preloaded-tarball/preloaded-images-k8s-v18-v1.31.1-cri-o-overlay-arm64.tar.lz4 in cache, skipping download
	I0916 10:53:06.256656 1415006 cache.go:59] Finished verifying existence of preloaded tar for v1.31.1 on crio
	I0916 10:53:06.256871 1415006 profile.go:143] Saving config to /home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/ha-334765/config.json ...
	W0916 10:53:06.275827 1415006 image.go:95] image gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 is of wrong architecture
	I0916 10:53:06.275854 1415006 cache.go:149] Downloading gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 to local cache
	I0916 10:53:06.275935 1415006 image.go:63] Checking for gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 in local cache directory
	I0916 10:53:06.275957 1415006 image.go:66] Found gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 in local cache directory, skipping pull
	I0916 10:53:06.275965 1415006 image.go:135] gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 exists in cache, skipping pull
	I0916 10:53:06.275973 1415006 cache.go:152] successfully saved gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 as a tarball
	I0916 10:53:06.275981 1415006 cache.go:162] Loading gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 from local cache
	I0916 10:53:06.278155 1415006 image.go:273] response: 
	I0916 10:53:06.474580 1415006 cache.go:164] successfully loaded and using gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 from cached tarball
	I0916 10:53:06.474619 1415006 cache.go:194] Successfully downloaded all kic artifacts
	I0916 10:53:06.474653 1415006 start.go:360] acquireMachinesLock for ha-334765-m03: {Name:mkfee903f3f5d2ff3d5e015b57c571ebdaa535f2 Clock:{} Delay:500ms Timeout:10m0s Cancel:<nil>}
	I0916 10:53:06.474774 1415006 start.go:364] duration metric: took 99.583µs to acquireMachinesLock for "ha-334765-m03"
	I0916 10:53:06.474806 1415006 start.go:93] Provisioning new machine with config: &{Name:ha-334765 KeepContext:false EmbedCerts:false MinikubeISO: KicBaseImage:gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 Memory:2200 CPUs:2 DiskSize:20000 Driver:docker HyperkitVpnKitSock: HyperkitVSockPorts:[] DockerEnv:[] ContainerVolumeMounts:[] InsecureRegistry:[] RegistryMirror:[] HostOnlyCIDR:192.168.59.1/24 HypervVirtualSwitch: HypervUseExternalSwitch:false HypervExternalAdapter: KVMNetwork:default KVMQemuURI:qemu:///system KVMGPU:false KVMHidden:false KVMNUMACount:1 APIServerPort:8443 DockerOpt:[] DisableDriverMounts:false NFSShare:[] NFSSharesRoot:/nfsshares UUID: NoVTXCheck:false DNSProxy:false HostDNSResolver:true HostOnlyNicType:virtio NatNicType:virtio SSHIPAddress: SSHUser:root SSHKey: SSHPort:22 KubernetesConfig:{KubernetesVersion:v1.31.1 ClusterName:ha-334765 Namespace:default APIServerHAVIP:192.168.49.254 APIServerNa
me:minikubeCA APIServerNames:[] APIServerIPs:[] DNSDomain:cluster.local ContainerRuntime:crio CRISocket: NetworkPlugin:cni FeatureGates: ServiceCIDR:10.96.0.0/12 ImageRepository: LoadBalancerStartIP: LoadBalancerEndIP: CustomIngressCert: RegistryAliases: ExtraOptions:[] ShouldLoadCachedImages:true EnableDefaultCNI:false CNI:} Nodes:[{Name: IP:192.168.49.2 Port:8443 KubernetesVersion:v1.31.1 ContainerRuntime:crio ControlPlane:true Worker:true} {Name:m02 IP:192.168.49.3 Port:8443 KubernetesVersion:v1.31.1 ContainerRuntime:crio ControlPlane:true Worker:true} {Name:m03 IP: Port:8443 KubernetesVersion:v1.31.1 ContainerRuntime:crio ControlPlane:true Worker:true}] Addons:map[ambassador:false auto-pause:false cloud-spanner:false csi-hostpath-driver:false dashboard:false default-storageclass:false efk:false freshpod:false gcp-auth:false gvisor:false headlamp:false helm-tiller:false inaccel:false ingress:false ingress-dns:false inspektor-gadget:false istio:false istio-provisioner:false kong:false kubeflow:false kubevir
t:false logviewer:false metallb:false metrics-server:false nvidia-device-plugin:false nvidia-driver-installer:false nvidia-gpu-device-plugin:false olm:false pod-security-policy:false portainer:false registry:false registry-aliases:false registry-creds:false storage-provisioner:false storage-provisioner-gluster:false storage-provisioner-rancher:false volcano:false volumesnapshots:false yakd:false] CustomAddonImages:map[] CustomAddonRegistries:map[] VerifyComponents:map[apiserver:true apps_running:true default_sa:true extra:true kubelet:true node_ready:true system_pods:true] StartHostTimeout:6m0s ScheduledStop:<nil> ExposedPorts:[] ListenAddress: Network: Subnet: MultiNodeRequested:true ExtraDisks:0 CertExpiration:26280h0m0s Mount:false MountString:/home/jenkins:/minikube-host Mount9PVersion:9p2000.L MountGID:docker MountIP: MountMSize:262144 MountOptions:[] MountPort:0 MountType:9p MountUID:docker BinaryMirror: DisableOptimizations:false DisableMetrics:false CustomQemuFirmwarePath: SocketVMnetClientPath: Socke
tVMnetPath: StaticIP: SSHAuthSock: SSHAgentPID:0 GPUs: AutoPauseInterval:1m0s} &{Name:m03 IP: Port:8443 KubernetesVersion:v1.31.1 ContainerRuntime:crio ControlPlane:true Worker:true}
	I0916 10:53:06.474936 1415006 start.go:125] createHost starting for "m03" (driver="docker")
	I0916 10:53:06.476575 1415006 out.go:235] * Creating docker container (CPUs=2, Memory=2200MB) ...
	I0916 10:53:06.476719 1415006 start.go:159] libmachine.API.Create for "ha-334765" (driver="docker")
	I0916 10:53:06.476783 1415006 client.go:168] LocalClient.Create starting
	I0916 10:53:06.476872 1415006 main.go:141] libmachine: Reading certificate data from /home/jenkins/minikube-integration/19651-1378450/.minikube/certs/ca.pem
	I0916 10:53:06.476909 1415006 main.go:141] libmachine: Decoding PEM data...
	I0916 10:53:06.476931 1415006 main.go:141] libmachine: Parsing certificate...
	I0916 10:53:06.476987 1415006 main.go:141] libmachine: Reading certificate data from /home/jenkins/minikube-integration/19651-1378450/.minikube/certs/cert.pem
	I0916 10:53:06.477009 1415006 main.go:141] libmachine: Decoding PEM data...
	I0916 10:53:06.477024 1415006 main.go:141] libmachine: Parsing certificate...
	I0916 10:53:06.477270 1415006 cli_runner.go:164] Run: docker network inspect ha-334765 --format "{"Name": "{{.Name}}","Driver": "{{.Driver}}","Subnet": "{{range .IPAM.Config}}{{.Subnet}}{{end}}","Gateway": "{{range .IPAM.Config}}{{.Gateway}}{{end}}","MTU": {{if (index .Options "com.docker.network.driver.mtu")}}{{(index .Options "com.docker.network.driver.mtu")}}{{else}}0{{end}}, "ContainerIPs": [{{range $k,$v := .Containers }}"{{$v.IPv4Address}}",{{end}}]}"
	I0916 10:53:06.493092 1415006 network_create.go:77] Found existing network {name:ha-334765 subnet:0x400180ced0 gateway:[0 0 0 0 0 0 0 0 0 0 255 255 192 168 49 1] mtu:1500}
	I0916 10:53:06.493133 1415006 kic.go:121] calculated static IP "192.168.49.4" for the "ha-334765-m03" container
	I0916 10:53:06.493213 1415006 cli_runner.go:164] Run: docker ps -a --format {{.Names}}
	I0916 10:53:06.507809 1415006 cli_runner.go:164] Run: docker volume create ha-334765-m03 --label name.minikube.sigs.k8s.io=ha-334765-m03 --label created_by.minikube.sigs.k8s.io=true
	I0916 10:53:06.524398 1415006 oci.go:103] Successfully created a docker volume ha-334765-m03
	I0916 10:53:06.524488 1415006 cli_runner.go:164] Run: docker run --rm --name ha-334765-m03-preload-sidecar --label created_by.minikube.sigs.k8s.io=true --label name.minikube.sigs.k8s.io=ha-334765-m03 --entrypoint /usr/bin/test -v ha-334765-m03:/var gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 -d /var/lib
	I0916 10:53:07.123195 1415006 oci.go:107] Successfully prepared a docker volume ha-334765-m03
	I0916 10:53:07.123241 1415006 preload.go:131] Checking if preload exists for k8s version v1.31.1 and runtime crio
	I0916 10:53:07.123262 1415006 kic.go:194] Starting extracting preloaded images to volume ...
	I0916 10:53:07.123339 1415006 cli_runner.go:164] Run: docker run --rm --entrypoint /usr/bin/tar -v /home/jenkins/minikube-integration/19651-1378450/.minikube/cache/preloaded-tarball/preloaded-images-k8s-v18-v1.31.1-cri-o-overlay-arm64.tar.lz4:/preloaded.tar:ro -v ha-334765-m03:/extractDir gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 -I lz4 -xf /preloaded.tar -C /extractDir
	I0916 10:53:11.291684 1415006 cli_runner.go:217] Completed: docker run --rm --entrypoint /usr/bin/tar -v /home/jenkins/minikube-integration/19651-1378450/.minikube/cache/preloaded-tarball/preloaded-images-k8s-v18-v1.31.1-cri-o-overlay-arm64.tar.lz4:/preloaded.tar:ro -v ha-334765-m03:/extractDir gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 -I lz4 -xf /preloaded.tar -C /extractDir: (4.168299443s)
	I0916 10:53:11.291718 1415006 kic.go:203] duration metric: took 4.168452038s to extract preloaded images to volume ...
	W0916 10:53:11.291863 1415006 cgroups_linux.go:77] Your kernel does not support swap limit capabilities or the cgroup is not mounted.
	I0916 10:53:11.291988 1415006 cli_runner.go:164] Run: docker info --format "'{{json .SecurityOptions}}'"
	I0916 10:53:11.362214 1415006 cli_runner.go:164] Run: docker run -d -t --privileged --security-opt seccomp=unconfined --tmpfs /tmp --tmpfs /run -v /lib/modules:/lib/modules:ro --hostname ha-334765-m03 --name ha-334765-m03 --label created_by.minikube.sigs.k8s.io=true --label name.minikube.sigs.k8s.io=ha-334765-m03 --label role.minikube.sigs.k8s.io= --label mode.minikube.sigs.k8s.io=ha-334765-m03 --network ha-334765 --ip 192.168.49.4 --volume ha-334765-m03:/var --security-opt apparmor=unconfined --memory=2200mb --cpus=2 -e container=docker --expose 8443 --publish=127.0.0.1::8443 --publish=127.0.0.1::22 --publish=127.0.0.1::2376 --publish=127.0.0.1::5000 --publish=127.0.0.1::32443 gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0
	I0916 10:53:11.713441 1415006 cli_runner.go:164] Run: docker container inspect ha-334765-m03 --format={{.State.Running}}
	I0916 10:53:11.735379 1415006 cli_runner.go:164] Run: docker container inspect ha-334765-m03 --format={{.State.Status}}
	I0916 10:53:11.764201 1415006 cli_runner.go:164] Run: docker exec ha-334765-m03 stat /var/lib/dpkg/alternatives/iptables
	I0916 10:53:11.835446 1415006 oci.go:144] the created container "ha-334765-m03" has a running status.
	I0916 10:53:11.835475 1415006 kic.go:225] Creating ssh key for kic: /home/jenkins/minikube-integration/19651-1378450/.minikube/machines/ha-334765-m03/id_rsa...
	I0916 10:53:12.082378 1415006 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-1378450/.minikube/machines/ha-334765-m03/id_rsa.pub -> /home/docker/.ssh/authorized_keys
	I0916 10:53:12.082457 1415006 kic_runner.go:191] docker (temp): /home/jenkins/minikube-integration/19651-1378450/.minikube/machines/ha-334765-m03/id_rsa.pub --> /home/docker/.ssh/authorized_keys (381 bytes)
	I0916 10:53:12.107481 1415006 cli_runner.go:164] Run: docker container inspect ha-334765-m03 --format={{.State.Status}}
	I0916 10:53:12.143696 1415006 kic_runner.go:93] Run: chown docker:docker /home/docker/.ssh/authorized_keys
	I0916 10:53:12.143723 1415006 kic_runner.go:114] Args: [docker exec --privileged ha-334765-m03 chown docker:docker /home/docker/.ssh/authorized_keys]
	I0916 10:53:12.261270 1415006 cli_runner.go:164] Run: docker container inspect ha-334765-m03 --format={{.State.Status}}
	I0916 10:53:12.291299 1415006 machine.go:93] provisionDockerMachine start ...
	I0916 10:53:12.291406 1415006 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" ha-334765-m03
	I0916 10:53:12.331691 1415006 main.go:141] libmachine: Using SSH client type: native
	I0916 10:53:12.331980 1415006 main.go:141] libmachine: &{{{<nil> 0 [] [] []} docker [0x41abe0] 0x41d420 <nil>  [] 0s} 127.0.0.1 34628 <nil> <nil>}
	I0916 10:53:12.331996 1415006 main.go:141] libmachine: About to run SSH command:
	hostname
	I0916 10:53:12.332609 1415006 main.go:141] libmachine: Error dialing TCP: ssh: handshake failed: read tcp 127.0.0.1:34576->127.0.0.1:34628: read: connection reset by peer
	I0916 10:53:15.472981 1415006 main.go:141] libmachine: SSH cmd err, output: <nil>: ha-334765-m03
	
	I0916 10:53:15.473007 1415006 ubuntu.go:169] provisioning hostname "ha-334765-m03"
	I0916 10:53:15.473076 1415006 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" ha-334765-m03
	I0916 10:53:15.494704 1415006 main.go:141] libmachine: Using SSH client type: native
	I0916 10:53:15.495028 1415006 main.go:141] libmachine: &{{{<nil> 0 [] [] []} docker [0x41abe0] 0x41d420 <nil>  [] 0s} 127.0.0.1 34628 <nil> <nil>}
	I0916 10:53:15.495067 1415006 main.go:141] libmachine: About to run SSH command:
	sudo hostname ha-334765-m03 && echo "ha-334765-m03" | sudo tee /etc/hostname
	I0916 10:53:15.650225 1415006 main.go:141] libmachine: SSH cmd err, output: <nil>: ha-334765-m03
	
	I0916 10:53:15.650316 1415006 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" ha-334765-m03
	I0916 10:53:15.672331 1415006 main.go:141] libmachine: Using SSH client type: native
	I0916 10:53:15.672616 1415006 main.go:141] libmachine: &{{{<nil> 0 [] [] []} docker [0x41abe0] 0x41d420 <nil>  [] 0s} 127.0.0.1 34628 <nil> <nil>}
	I0916 10:53:15.672637 1415006 main.go:141] libmachine: About to run SSH command:
	
			if ! grep -xq '.*\sha-334765-m03' /etc/hosts; then
				if grep -xq '127.0.1.1\s.*' /etc/hosts; then
					sudo sed -i 's/^127.0.1.1\s.*/127.0.1.1 ha-334765-m03/g' /etc/hosts;
				else 
					echo '127.0.1.1 ha-334765-m03' | sudo tee -a /etc/hosts; 
				fi
			fi
	I0916 10:53:15.814223 1415006 main.go:141] libmachine: SSH cmd err, output: <nil>: 
	I0916 10:53:15.814255 1415006 ubuntu.go:175] set auth options {CertDir:/home/jenkins/minikube-integration/19651-1378450/.minikube CaCertPath:/home/jenkins/minikube-integration/19651-1378450/.minikube/certs/ca.pem CaPrivateKeyPath:/home/jenkins/minikube-integration/19651-1378450/.minikube/certs/ca-key.pem CaCertRemotePath:/etc/docker/ca.pem ServerCertPath:/home/jenkins/minikube-integration/19651-1378450/.minikube/machines/server.pem ServerKeyPath:/home/jenkins/minikube-integration/19651-1378450/.minikube/machines/server-key.pem ClientKeyPath:/home/jenkins/minikube-integration/19651-1378450/.minikube/certs/key.pem ServerCertRemotePath:/etc/docker/server.pem ServerKeyRemotePath:/etc/docker/server-key.pem ClientCertPath:/home/jenkins/minikube-integration/19651-1378450/.minikube/certs/cert.pem ServerCertSANs:[] StorePath:/home/jenkins/minikube-integration/19651-1378450/.minikube}
	I0916 10:53:15.814272 1415006 ubuntu.go:177] setting up certificates
	I0916 10:53:15.814281 1415006 provision.go:84] configureAuth start
	I0916 10:53:15.814347 1415006 cli_runner.go:164] Run: docker container inspect -f "{{range .NetworkSettings.Networks}}{{.IPAddress}},{{.GlobalIPv6Address}}{{end}}" ha-334765-m03
	I0916 10:53:15.833647 1415006 provision.go:143] copyHostCerts
	I0916 10:53:15.833696 1415006 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-1378450/.minikube/certs/ca.pem -> /home/jenkins/minikube-integration/19651-1378450/.minikube/ca.pem
	I0916 10:53:15.833730 1415006 exec_runner.go:144] found /home/jenkins/minikube-integration/19651-1378450/.minikube/ca.pem, removing ...
	I0916 10:53:15.833741 1415006 exec_runner.go:203] rm: /home/jenkins/minikube-integration/19651-1378450/.minikube/ca.pem
	I0916 10:53:15.833820 1415006 exec_runner.go:151] cp: /home/jenkins/minikube-integration/19651-1378450/.minikube/certs/ca.pem --> /home/jenkins/minikube-integration/19651-1378450/.minikube/ca.pem (1078 bytes)
	I0916 10:53:15.833906 1415006 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-1378450/.minikube/certs/cert.pem -> /home/jenkins/minikube-integration/19651-1378450/.minikube/cert.pem
	I0916 10:53:15.833927 1415006 exec_runner.go:144] found /home/jenkins/minikube-integration/19651-1378450/.minikube/cert.pem, removing ...
	I0916 10:53:15.833932 1415006 exec_runner.go:203] rm: /home/jenkins/minikube-integration/19651-1378450/.minikube/cert.pem
	I0916 10:53:15.833963 1415006 exec_runner.go:151] cp: /home/jenkins/minikube-integration/19651-1378450/.minikube/certs/cert.pem --> /home/jenkins/minikube-integration/19651-1378450/.minikube/cert.pem (1123 bytes)
	I0916 10:53:15.834012 1415006 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-1378450/.minikube/certs/key.pem -> /home/jenkins/minikube-integration/19651-1378450/.minikube/key.pem
	I0916 10:53:15.834032 1415006 exec_runner.go:144] found /home/jenkins/minikube-integration/19651-1378450/.minikube/key.pem, removing ...
	I0916 10:53:15.834039 1415006 exec_runner.go:203] rm: /home/jenkins/minikube-integration/19651-1378450/.minikube/key.pem
	I0916 10:53:15.834064 1415006 exec_runner.go:151] cp: /home/jenkins/minikube-integration/19651-1378450/.minikube/certs/key.pem --> /home/jenkins/minikube-integration/19651-1378450/.minikube/key.pem (1679 bytes)
	I0916 10:53:15.834129 1415006 provision.go:117] generating server cert: /home/jenkins/minikube-integration/19651-1378450/.minikube/machines/server.pem ca-key=/home/jenkins/minikube-integration/19651-1378450/.minikube/certs/ca.pem private-key=/home/jenkins/minikube-integration/19651-1378450/.minikube/certs/ca-key.pem org=jenkins.ha-334765-m03 san=[127.0.0.1 192.168.49.4 ha-334765-m03 localhost minikube]
	I0916 10:53:16.104183 1415006 provision.go:177] copyRemoteCerts
	I0916 10:53:16.104255 1415006 ssh_runner.go:195] Run: sudo mkdir -p /etc/docker /etc/docker /etc/docker
	I0916 10:53:16.104300 1415006 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" ha-334765-m03
	I0916 10:53:16.123646 1415006 sshutil.go:53] new ssh client: &{IP:127.0.0.1 Port:34628 SSHKeyPath:/home/jenkins/minikube-integration/19651-1378450/.minikube/machines/ha-334765-m03/id_rsa Username:docker}
	I0916 10:53:16.222535 1415006 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-1378450/.minikube/certs/ca.pem -> /etc/docker/ca.pem
	I0916 10:53:16.222611 1415006 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-1378450/.minikube/certs/ca.pem --> /etc/docker/ca.pem (1078 bytes)
	I0916 10:53:16.251274 1415006 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-1378450/.minikube/machines/server.pem -> /etc/docker/server.pem
	I0916 10:53:16.251343 1415006 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-1378450/.minikube/machines/server.pem --> /etc/docker/server.pem (1208 bytes)
	I0916 10:53:16.278542 1415006 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-1378450/.minikube/machines/server-key.pem -> /etc/docker/server-key.pem
	I0916 10:53:16.278611 1415006 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-1378450/.minikube/machines/server-key.pem --> /etc/docker/server-key.pem (1679 bytes)
	I0916 10:53:16.306453 1415006 provision.go:87] duration metric: took 492.157045ms to configureAuth
	I0916 10:53:16.306481 1415006 ubuntu.go:193] setting minikube options for container-runtime
	I0916 10:53:16.306725 1415006 config.go:182] Loaded profile config "ha-334765": Driver=docker, ContainerRuntime=crio, KubernetesVersion=v1.31.1
	I0916 10:53:16.306838 1415006 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" ha-334765-m03
	I0916 10:53:16.324544 1415006 main.go:141] libmachine: Using SSH client type: native
	I0916 10:53:16.324981 1415006 main.go:141] libmachine: &{{{<nil> 0 [] [] []} docker [0x41abe0] 0x41d420 <nil>  [] 0s} 127.0.0.1 34628 <nil> <nil>}
	I0916 10:53:16.325005 1415006 main.go:141] libmachine: About to run SSH command:
	sudo mkdir -p /etc/sysconfig && printf %s "
	CRIO_MINIKUBE_OPTIONS='--insecure-registry 10.96.0.0/12 '
	" | sudo tee /etc/sysconfig/crio.minikube && sudo systemctl restart crio
	I0916 10:53:16.627483 1415006 main.go:141] libmachine: SSH cmd err, output: <nil>: 
	CRIO_MINIKUBE_OPTIONS='--insecure-registry 10.96.0.0/12 '
	
	I0916 10:53:16.627505 1415006 machine.go:96] duration metric: took 4.336184696s to provisionDockerMachine
	I0916 10:53:16.627515 1415006 client.go:171] duration metric: took 10.150719198s to LocalClient.Create
	I0916 10:53:16.627528 1415006 start.go:167] duration metric: took 10.150810715s to libmachine.API.Create "ha-334765"
	I0916 10:53:16.627535 1415006 start.go:293] postStartSetup for "ha-334765-m03" (driver="docker")
	I0916 10:53:16.627546 1415006 start.go:322] creating required directories: [/etc/kubernetes/addons /etc/kubernetes/manifests /var/tmp/minikube /var/lib/minikube /var/lib/minikube/certs /var/lib/minikube/images /var/lib/minikube/binaries /tmp/gvisor /usr/share/ca-certificates /etc/ssl/certs]
	I0916 10:53:16.627615 1415006 ssh_runner.go:195] Run: sudo mkdir -p /etc/kubernetes/addons /etc/kubernetes/manifests /var/tmp/minikube /var/lib/minikube /var/lib/minikube/certs /var/lib/minikube/images /var/lib/minikube/binaries /tmp/gvisor /usr/share/ca-certificates /etc/ssl/certs
	I0916 10:53:16.627656 1415006 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" ha-334765-m03
	I0916 10:53:16.654164 1415006 sshutil.go:53] new ssh client: &{IP:127.0.0.1 Port:34628 SSHKeyPath:/home/jenkins/minikube-integration/19651-1378450/.minikube/machines/ha-334765-m03/id_rsa Username:docker}
	I0916 10:53:16.758791 1415006 ssh_runner.go:195] Run: cat /etc/os-release
	I0916 10:53:16.762707 1415006 main.go:141] libmachine: Couldn't set key VERSION_CODENAME, no corresponding struct field found
	I0916 10:53:16.762743 1415006 main.go:141] libmachine: Couldn't set key PRIVACY_POLICY_URL, no corresponding struct field found
	I0916 10:53:16.762754 1415006 main.go:141] libmachine: Couldn't set key UBUNTU_CODENAME, no corresponding struct field found
	I0916 10:53:16.762761 1415006 info.go:137] Remote host: Ubuntu 22.04.4 LTS
	I0916 10:53:16.762772 1415006 filesync.go:126] Scanning /home/jenkins/minikube-integration/19651-1378450/.minikube/addons for local assets ...
	I0916 10:53:16.762840 1415006 filesync.go:126] Scanning /home/jenkins/minikube-integration/19651-1378450/.minikube/files for local assets ...
	I0916 10:53:16.762923 1415006 filesync.go:149] local asset: /home/jenkins/minikube-integration/19651-1378450/.minikube/files/etc/ssl/certs/13838332.pem -> 13838332.pem in /etc/ssl/certs
	I0916 10:53:16.762936 1415006 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-1378450/.minikube/files/etc/ssl/certs/13838332.pem -> /etc/ssl/certs/13838332.pem
	I0916 10:53:16.763037 1415006 ssh_runner.go:195] Run: sudo mkdir -p /etc/ssl/certs
	I0916 10:53:16.772014 1415006 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-1378450/.minikube/files/etc/ssl/certs/13838332.pem --> /etc/ssl/certs/13838332.pem (1708 bytes)
	I0916 10:53:16.801813 1415006 start.go:296] duration metric: took 174.260965ms for postStartSetup
	I0916 10:53:16.802211 1415006 cli_runner.go:164] Run: docker container inspect -f "{{range .NetworkSettings.Networks}}{{.IPAddress}},{{.GlobalIPv6Address}}{{end}}" ha-334765-m03
	I0916 10:53:16.821029 1415006 profile.go:143] Saving config to /home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/ha-334765/config.json ...
	I0916 10:53:16.821355 1415006 ssh_runner.go:195] Run: sh -c "df -h /var | awk 'NR==2{print $5}'"
	I0916 10:53:16.821396 1415006 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" ha-334765-m03
	I0916 10:53:16.839877 1415006 sshutil.go:53] new ssh client: &{IP:127.0.0.1 Port:34628 SSHKeyPath:/home/jenkins/minikube-integration/19651-1378450/.minikube/machines/ha-334765-m03/id_rsa Username:docker}
	I0916 10:53:16.934101 1415006 ssh_runner.go:195] Run: sh -c "df -BG /var | awk 'NR==2{print $4}'"
	I0916 10:53:16.939654 1415006 start.go:128] duration metric: took 10.464702951s to createHost
	I0916 10:53:16.939677 1415006 start.go:83] releasing machines lock for "ha-334765-m03", held for 10.464890261s
	I0916 10:53:16.939761 1415006 cli_runner.go:164] Run: docker container inspect -f "{{range .NetworkSettings.Networks}}{{.IPAddress}},{{.GlobalIPv6Address}}{{end}}" ha-334765-m03
	I0916 10:53:16.965319 1415006 out.go:177] * Found network options:
	I0916 10:53:16.968311 1415006 out.go:177]   - NO_PROXY=192.168.49.2,192.168.49.3
	W0916 10:53:16.970944 1415006 proxy.go:119] fail to check proxy env: Error ip not in block
	W0916 10:53:16.970983 1415006 proxy.go:119] fail to check proxy env: Error ip not in block
	W0916 10:53:16.971012 1415006 proxy.go:119] fail to check proxy env: Error ip not in block
	W0916 10:53:16.971033 1415006 proxy.go:119] fail to check proxy env: Error ip not in block
	I0916 10:53:16.971117 1415006 ssh_runner.go:195] Run: sudo sh -c "podman version >/dev/null"
	I0916 10:53:16.971163 1415006 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" ha-334765-m03
	I0916 10:53:16.971452 1415006 ssh_runner.go:195] Run: curl -sS -m 2 https://registry.k8s.io/
	I0916 10:53:16.971510 1415006 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" ha-334765-m03
	I0916 10:53:16.989731 1415006 sshutil.go:53] new ssh client: &{IP:127.0.0.1 Port:34628 SSHKeyPath:/home/jenkins/minikube-integration/19651-1378450/.minikube/machines/ha-334765-m03/id_rsa Username:docker}
	I0916 10:53:16.994076 1415006 sshutil.go:53] new ssh client: &{IP:127.0.0.1 Port:34628 SSHKeyPath:/home/jenkins/minikube-integration/19651-1378450/.minikube/machines/ha-334765-m03/id_rsa Username:docker}
	I0916 10:53:17.260397 1415006 ssh_runner.go:195] Run: sh -c "stat /etc/cni/net.d/*loopback.conf*"
	I0916 10:53:17.265904 1415006 ssh_runner.go:195] Run: sudo find /etc/cni/net.d -maxdepth 1 -type f -name *loopback.conf* -not -name *.mk_disabled -exec sh -c "sudo mv {} {}.mk_disabled" ;
	I0916 10:53:17.288372 1415006 cni.go:221] loopback cni configuration disabled: "/etc/cni/net.d/*loopback.conf*" found
	I0916 10:53:17.288452 1415006 ssh_runner.go:195] Run: sudo find /etc/cni/net.d -maxdepth 1 -type f ( ( -name *bridge* -or -name *podman* ) -and -not -name *.mk_disabled ) -printf "%p, " -exec sh -c "sudo mv {} {}.mk_disabled" ;
	I0916 10:53:17.373761 1415006 cni.go:262] disabled [/etc/cni/net.d/87-podman-bridge.conflist, /etc/cni/net.d/100-crio-bridge.conf] bridge cni config(s)
	I0916 10:53:17.373782 1415006 start.go:495] detecting cgroup driver to use...
	I0916 10:53:17.373815 1415006 detect.go:187] detected "cgroupfs" cgroup driver on host os
	I0916 10:53:17.373866 1415006 ssh_runner.go:195] Run: sudo systemctl stop -f containerd
	I0916 10:53:17.400994 1415006 ssh_runner.go:195] Run: sudo systemctl is-active --quiet service containerd
	I0916 10:53:17.415230 1415006 docker.go:217] disabling cri-docker service (if available) ...
	I0916 10:53:17.415320 1415006 ssh_runner.go:195] Run: sudo systemctl stop -f cri-docker.socket
	I0916 10:53:17.432013 1415006 ssh_runner.go:195] Run: sudo systemctl stop -f cri-docker.service
	I0916 10:53:17.449884 1415006 ssh_runner.go:195] Run: sudo systemctl disable cri-docker.socket
	I0916 10:53:17.567007 1415006 ssh_runner.go:195] Run: sudo systemctl mask cri-docker.service
	I0916 10:53:17.678362 1415006 docker.go:233] disabling docker service ...
	I0916 10:53:17.678486 1415006 ssh_runner.go:195] Run: sudo systemctl stop -f docker.socket
	I0916 10:53:17.714391 1415006 ssh_runner.go:195] Run: sudo systemctl stop -f docker.service
	I0916 10:53:17.728114 1415006 ssh_runner.go:195] Run: sudo systemctl disable docker.socket
	I0916 10:53:17.828765 1415006 ssh_runner.go:195] Run: sudo systemctl mask docker.service
	I0916 10:53:17.931898 1415006 ssh_runner.go:195] Run: sudo systemctl is-active --quiet service docker
	I0916 10:53:17.944776 1415006 ssh_runner.go:195] Run: /bin/bash -c "sudo mkdir -p /etc && printf %s "runtime-endpoint: unix:///var/run/crio/crio.sock
	" | sudo tee /etc/crictl.yaml"
	I0916 10:53:17.962967 1415006 crio.go:59] configure cri-o to use "registry.k8s.io/pause:3.10" pause image...
	I0916 10:53:17.963074 1415006 ssh_runner.go:195] Run: sh -c "sudo sed -i 's|^.*pause_image = .*$|pause_image = "registry.k8s.io/pause:3.10"|' /etc/crio/crio.conf.d/02-crio.conf"
	I0916 10:53:17.974753 1415006 crio.go:70] configuring cri-o to use "cgroupfs" as cgroup driver...
	I0916 10:53:17.974862 1415006 ssh_runner.go:195] Run: sh -c "sudo sed -i 's|^.*cgroup_manager = .*$|cgroup_manager = "cgroupfs"|' /etc/crio/crio.conf.d/02-crio.conf"
	I0916 10:53:17.987324 1415006 ssh_runner.go:195] Run: sh -c "sudo sed -i '/conmon_cgroup = .*/d' /etc/crio/crio.conf.d/02-crio.conf"
	I0916 10:53:17.999085 1415006 ssh_runner.go:195] Run: sh -c "sudo sed -i '/cgroup_manager = .*/a conmon_cgroup = "pod"' /etc/crio/crio.conf.d/02-crio.conf"
	I0916 10:53:18.015477 1415006 ssh_runner.go:195] Run: sh -c "sudo rm -rf /etc/cni/net.mk"
	I0916 10:53:18.026175 1415006 ssh_runner.go:195] Run: sh -c "sudo sed -i '/^ *"net.ipv4.ip_unprivileged_port_start=.*"/d' /etc/crio/crio.conf.d/02-crio.conf"
	I0916 10:53:18.039368 1415006 ssh_runner.go:195] Run: sh -c "sudo grep -q "^ *default_sysctls" /etc/crio/crio.conf.d/02-crio.conf || sudo sed -i '/conmon_cgroup = .*/a default_sysctls = \[\n\]' /etc/crio/crio.conf.d/02-crio.conf"
	I0916 10:53:18.063804 1415006 ssh_runner.go:195] Run: sh -c "sudo sed -i -r 's|^default_sysctls *= *\[|&\n  "net.ipv4.ip_unprivileged_port_start=0",|' /etc/crio/crio.conf.d/02-crio.conf"
	I0916 10:53:18.074316 1415006 ssh_runner.go:195] Run: sudo sysctl net.bridge.bridge-nf-call-iptables
	I0916 10:53:18.084063 1415006 ssh_runner.go:195] Run: sudo sh -c "echo 1 > /proc/sys/net/ipv4/ip_forward"
	I0916 10:53:18.093683 1415006 ssh_runner.go:195] Run: sudo systemctl daemon-reload
	I0916 10:53:18.187365 1415006 ssh_runner.go:195] Run: sudo systemctl restart crio
	I0916 10:53:18.309318 1415006 start.go:542] Will wait 60s for socket path /var/run/crio/crio.sock
	I0916 10:53:18.309407 1415006 ssh_runner.go:195] Run: stat /var/run/crio/crio.sock
	I0916 10:53:18.314492 1415006 start.go:563] Will wait 60s for crictl version
	I0916 10:53:18.314584 1415006 ssh_runner.go:195] Run: which crictl
	I0916 10:53:18.318911 1415006 ssh_runner.go:195] Run: sudo /usr/bin/crictl version
	I0916 10:53:18.363698 1415006 start.go:579] Version:  0.1.0
	RuntimeName:  cri-o
	RuntimeVersion:  1.24.6
	RuntimeApiVersion:  v1
	I0916 10:53:18.363788 1415006 ssh_runner.go:195] Run: crio --version
	I0916 10:53:18.404796 1415006 ssh_runner.go:195] Run: crio --version
	I0916 10:53:18.451943 1415006 out.go:177] * Preparing Kubernetes v1.31.1 on CRI-O 1.24.6 ...
	I0916 10:53:18.455026 1415006 out.go:177]   - env NO_PROXY=192.168.49.2
	I0916 10:53:18.457816 1415006 out.go:177]   - env NO_PROXY=192.168.49.2,192.168.49.3
	I0916 10:53:18.460419 1415006 cli_runner.go:164] Run: docker network inspect ha-334765 --format "{"Name": "{{.Name}}","Driver": "{{.Driver}}","Subnet": "{{range .IPAM.Config}}{{.Subnet}}{{end}}","Gateway": "{{range .IPAM.Config}}{{.Gateway}}{{end}}","MTU": {{if (index .Options "com.docker.network.driver.mtu")}}{{(index .Options "com.docker.network.driver.mtu")}}{{else}}0{{end}}, "ContainerIPs": [{{range $k,$v := .Containers }}"{{$v.IPv4Address}}",{{end}}]}"
	I0916 10:53:18.476580 1415006 ssh_runner.go:195] Run: grep 192.168.49.1	host.minikube.internal$ /etc/hosts
	I0916 10:53:18.480547 1415006 ssh_runner.go:195] Run: /bin/bash -c "{ grep -v $'\thost.minikube.internal$' "/etc/hosts"; echo "192.168.49.1	host.minikube.internal"; } > /tmp/h.$$; sudo cp /tmp/h.$$ "/etc/hosts""
	I0916 10:53:18.491447 1415006 mustload.go:65] Loading cluster: ha-334765
	I0916 10:53:18.491704 1415006 config.go:182] Loaded profile config "ha-334765": Driver=docker, ContainerRuntime=crio, KubernetesVersion=v1.31.1
	I0916 10:53:18.491965 1415006 cli_runner.go:164] Run: docker container inspect ha-334765 --format={{.State.Status}}
	I0916 10:53:18.508964 1415006 host.go:66] Checking if "ha-334765" exists ...
	I0916 10:53:18.509259 1415006 certs.go:68] Setting up /home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/ha-334765 for IP: 192.168.49.4
	I0916 10:53:18.509275 1415006 certs.go:194] generating shared ca certs ...
	I0916 10:53:18.509289 1415006 certs.go:226] acquiring lock for ca certs: {Name:mk0ae46b50e2e49d53ad6fcc94535aa50d9156d6 Clock:{} Delay:500ms Timeout:1m0s Cancel:<nil>}
	I0916 10:53:18.509407 1415006 certs.go:235] skipping valid "minikubeCA" ca cert: /home/jenkins/minikube-integration/19651-1378450/.minikube/ca.key
	I0916 10:53:18.509455 1415006 certs.go:235] skipping valid "proxyClientCA" ca cert: /home/jenkins/minikube-integration/19651-1378450/.minikube/proxy-client-ca.key
	I0916 10:53:18.509475 1415006 certs.go:256] generating profile certs ...
	I0916 10:53:18.509554 1415006 certs.go:359] skipping valid signed profile cert regeneration for "minikube-user": /home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/ha-334765/client.key
	I0916 10:53:18.509582 1415006 certs.go:363] generating signed profile cert for "minikube": /home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/ha-334765/apiserver.key.ce2d4ce7
	I0916 10:53:18.509602 1415006 crypto.go:68] Generating cert /home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/ha-334765/apiserver.crt.ce2d4ce7 with IP's: [10.96.0.1 127.0.0.1 10.0.0.1 192.168.49.2 192.168.49.3 192.168.49.4 192.168.49.254]
	I0916 10:53:19.047943 1415006 crypto.go:156] Writing cert to /home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/ha-334765/apiserver.crt.ce2d4ce7 ...
	I0916 10:53:19.047976 1415006 lock.go:35] WriteFile acquiring /home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/ha-334765/apiserver.crt.ce2d4ce7: {Name:mk3f67d3b8ad284ed3fcbe0ef0f2362b0ca1a10f Clock:{} Delay:500ms Timeout:1m0s Cancel:<nil>}
	I0916 10:53:19.048216 1415006 crypto.go:164] Writing key to /home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/ha-334765/apiserver.key.ce2d4ce7 ...
	I0916 10:53:19.048232 1415006 lock.go:35] WriteFile acquiring /home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/ha-334765/apiserver.key.ce2d4ce7: {Name:mka9b6e6481e41e152d6f053685783f8774b731a Clock:{} Delay:500ms Timeout:1m0s Cancel:<nil>}
	I0916 10:53:19.048319 1415006 certs.go:381] copying /home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/ha-334765/apiserver.crt.ce2d4ce7 -> /home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/ha-334765/apiserver.crt
	I0916 10:53:19.048466 1415006 certs.go:385] copying /home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/ha-334765/apiserver.key.ce2d4ce7 -> /home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/ha-334765/apiserver.key
	I0916 10:53:19.048628 1415006 certs.go:359] skipping valid signed profile cert regeneration for "aggregator": /home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/ha-334765/proxy-client.key
	I0916 10:53:19.048647 1415006 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-1378450/.minikube/ca.crt -> /var/lib/minikube/certs/ca.crt
	I0916 10:53:19.048664 1415006 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-1378450/.minikube/ca.key -> /var/lib/minikube/certs/ca.key
	I0916 10:53:19.048702 1415006 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-1378450/.minikube/proxy-client-ca.crt -> /var/lib/minikube/certs/proxy-client-ca.crt
	I0916 10:53:19.048719 1415006 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-1378450/.minikube/proxy-client-ca.key -> /var/lib/minikube/certs/proxy-client-ca.key
	I0916 10:53:19.048734 1415006 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/ha-334765/apiserver.crt -> /var/lib/minikube/certs/apiserver.crt
	I0916 10:53:19.048746 1415006 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/ha-334765/apiserver.key -> /var/lib/minikube/certs/apiserver.key
	I0916 10:53:19.048760 1415006 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/ha-334765/proxy-client.crt -> /var/lib/minikube/certs/proxy-client.crt
	I0916 10:53:19.048776 1415006 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/ha-334765/proxy-client.key -> /var/lib/minikube/certs/proxy-client.key
	I0916 10:53:19.048832 1415006 certs.go:484] found cert: /home/jenkins/minikube-integration/19651-1378450/.minikube/certs/1383833.pem (1338 bytes)
	W0916 10:53:19.048865 1415006 certs.go:480] ignoring /home/jenkins/minikube-integration/19651-1378450/.minikube/certs/1383833_empty.pem, impossibly tiny 0 bytes
	I0916 10:53:19.048877 1415006 certs.go:484] found cert: /home/jenkins/minikube-integration/19651-1378450/.minikube/certs/ca-key.pem (1679 bytes)
	I0916 10:53:19.048941 1415006 certs.go:484] found cert: /home/jenkins/minikube-integration/19651-1378450/.minikube/certs/ca.pem (1078 bytes)
	I0916 10:53:19.048973 1415006 certs.go:484] found cert: /home/jenkins/minikube-integration/19651-1378450/.minikube/certs/cert.pem (1123 bytes)
	I0916 10:53:19.048998 1415006 certs.go:484] found cert: /home/jenkins/minikube-integration/19651-1378450/.minikube/certs/key.pem (1679 bytes)
	I0916 10:53:19.049041 1415006 certs.go:484] found cert: /home/jenkins/minikube-integration/19651-1378450/.minikube/files/etc/ssl/certs/13838332.pem (1708 bytes)
	I0916 10:53:19.049074 1415006 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-1378450/.minikube/ca.crt -> /usr/share/ca-certificates/minikubeCA.pem
	I0916 10:53:19.049095 1415006 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-1378450/.minikube/certs/1383833.pem -> /usr/share/ca-certificates/1383833.pem
	I0916 10:53:19.049110 1415006 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-1378450/.minikube/files/etc/ssl/certs/13838332.pem -> /usr/share/ca-certificates/13838332.pem
	I0916 10:53:19.049167 1415006 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" ha-334765
	I0916 10:53:19.067596 1415006 sshutil.go:53] new ssh client: &{IP:127.0.0.1 Port:34618 SSHKeyPath:/home/jenkins/minikube-integration/19651-1378450/.minikube/machines/ha-334765/id_rsa Username:docker}
	I0916 10:53:19.165164 1415006 ssh_runner.go:195] Run: stat -c %s /var/lib/minikube/certs/sa.pub
	I0916 10:53:19.170272 1415006 ssh_runner.go:447] scp /var/lib/minikube/certs/sa.pub --> memory (451 bytes)
	I0916 10:53:19.184795 1415006 ssh_runner.go:195] Run: stat -c %s /var/lib/minikube/certs/sa.key
	I0916 10:53:19.188445 1415006 ssh_runner.go:447] scp /var/lib/minikube/certs/sa.key --> memory (1675 bytes)
	I0916 10:53:19.201392 1415006 ssh_runner.go:195] Run: stat -c %s /var/lib/minikube/certs/front-proxy-ca.crt
	I0916 10:53:19.205315 1415006 ssh_runner.go:447] scp /var/lib/minikube/certs/front-proxy-ca.crt --> memory (1123 bytes)
	I0916 10:53:19.218566 1415006 ssh_runner.go:195] Run: stat -c %s /var/lib/minikube/certs/front-proxy-ca.key
	I0916 10:53:19.222202 1415006 ssh_runner.go:447] scp /var/lib/minikube/certs/front-proxy-ca.key --> memory (1675 bytes)
	I0916 10:53:19.234664 1415006 ssh_runner.go:195] Run: stat -c %s /var/lib/minikube/certs/etcd/ca.crt
	I0916 10:53:19.238432 1415006 ssh_runner.go:447] scp /var/lib/minikube/certs/etcd/ca.crt --> memory (1094 bytes)
	I0916 10:53:19.250860 1415006 ssh_runner.go:195] Run: stat -c %s /var/lib/minikube/certs/etcd/ca.key
	I0916 10:53:19.254569 1415006 ssh_runner.go:447] scp /var/lib/minikube/certs/etcd/ca.key --> memory (1679 bytes)
	I0916 10:53:19.267856 1415006 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-1378450/.minikube/ca.crt --> /var/lib/minikube/certs/ca.crt (1111 bytes)
	I0916 10:53:19.293781 1415006 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-1378450/.minikube/ca.key --> /var/lib/minikube/certs/ca.key (1675 bytes)
	I0916 10:53:19.325208 1415006 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-1378450/.minikube/proxy-client-ca.crt --> /var/lib/minikube/certs/proxy-client-ca.crt (1119 bytes)
	I0916 10:53:19.355506 1415006 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-1378450/.minikube/proxy-client-ca.key --> /var/lib/minikube/certs/proxy-client-ca.key (1679 bytes)
	I0916 10:53:19.382558 1415006 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/ha-334765/apiserver.crt --> /var/lib/minikube/certs/apiserver.crt (1444 bytes)
	I0916 10:53:19.411948 1415006 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/ha-334765/apiserver.key --> /var/lib/minikube/certs/apiserver.key (1675 bytes)
	I0916 10:53:19.439207 1415006 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/ha-334765/proxy-client.crt --> /var/lib/minikube/certs/proxy-client.crt (1147 bytes)
	I0916 10:53:19.472121 1415006 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/ha-334765/proxy-client.key --> /var/lib/minikube/certs/proxy-client.key (1679 bytes)
	I0916 10:53:19.503271 1415006 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-1378450/.minikube/ca.crt --> /usr/share/ca-certificates/minikubeCA.pem (1111 bytes)
	I0916 10:53:19.528254 1415006 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-1378450/.minikube/certs/1383833.pem --> /usr/share/ca-certificates/1383833.pem (1338 bytes)
	I0916 10:53:19.556903 1415006 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-1378450/.minikube/files/etc/ssl/certs/13838332.pem --> /usr/share/ca-certificates/13838332.pem (1708 bytes)
	I0916 10:53:19.589671 1415006 ssh_runner.go:362] scp memory --> /var/lib/minikube/certs/sa.pub (451 bytes)
	I0916 10:53:19.610404 1415006 ssh_runner.go:362] scp memory --> /var/lib/minikube/certs/sa.key (1675 bytes)
	I0916 10:53:19.631169 1415006 ssh_runner.go:362] scp memory --> /var/lib/minikube/certs/front-proxy-ca.crt (1123 bytes)
	I0916 10:53:19.650809 1415006 ssh_runner.go:362] scp memory --> /var/lib/minikube/certs/front-proxy-ca.key (1675 bytes)
	I0916 10:53:19.670985 1415006 ssh_runner.go:362] scp memory --> /var/lib/minikube/certs/etcd/ca.crt (1094 bytes)
	I0916 10:53:19.691580 1415006 ssh_runner.go:362] scp memory --> /var/lib/minikube/certs/etcd/ca.key (1679 bytes)
	I0916 10:53:19.712009 1415006 ssh_runner.go:362] scp memory --> /var/lib/minikube/kubeconfig (744 bytes)
	I0916 10:53:19.733794 1415006 ssh_runner.go:195] Run: openssl version
	I0916 10:53:19.741003 1415006 ssh_runner.go:195] Run: sudo /bin/bash -c "test -s /usr/share/ca-certificates/13838332.pem && ln -fs /usr/share/ca-certificates/13838332.pem /etc/ssl/certs/13838332.pem"
	I0916 10:53:19.752608 1415006 ssh_runner.go:195] Run: ls -la /usr/share/ca-certificates/13838332.pem
	I0916 10:53:19.756420 1415006 certs.go:528] hashing: -rw-r--r-- 1 root root 1708 Sep 16 10:46 /usr/share/ca-certificates/13838332.pem
	I0916 10:53:19.756518 1415006 ssh_runner.go:195] Run: openssl x509 -hash -noout -in /usr/share/ca-certificates/13838332.pem
	I0916 10:53:19.763995 1415006 ssh_runner.go:195] Run: sudo /bin/bash -c "test -L /etc/ssl/certs/3ec20f2e.0 || ln -fs /etc/ssl/certs/13838332.pem /etc/ssl/certs/3ec20f2e.0"
	I0916 10:53:19.773757 1415006 ssh_runner.go:195] Run: sudo /bin/bash -c "test -s /usr/share/ca-certificates/minikubeCA.pem && ln -fs /usr/share/ca-certificates/minikubeCA.pem /etc/ssl/certs/minikubeCA.pem"
	I0916 10:53:19.783253 1415006 ssh_runner.go:195] Run: ls -la /usr/share/ca-certificates/minikubeCA.pem
	I0916 10:53:19.786986 1415006 certs.go:528] hashing: -rw-r--r-- 1 root root 1111 Sep 16 10:35 /usr/share/ca-certificates/minikubeCA.pem
	I0916 10:53:19.787055 1415006 ssh_runner.go:195] Run: openssl x509 -hash -noout -in /usr/share/ca-certificates/minikubeCA.pem
	I0916 10:53:19.794466 1415006 ssh_runner.go:195] Run: sudo /bin/bash -c "test -L /etc/ssl/certs/b5213941.0 || ln -fs /etc/ssl/certs/minikubeCA.pem /etc/ssl/certs/b5213941.0"
	I0916 10:53:19.804322 1415006 ssh_runner.go:195] Run: sudo /bin/bash -c "test -s /usr/share/ca-certificates/1383833.pem && ln -fs /usr/share/ca-certificates/1383833.pem /etc/ssl/certs/1383833.pem"
	I0916 10:53:19.814048 1415006 ssh_runner.go:195] Run: ls -la /usr/share/ca-certificates/1383833.pem
	I0916 10:53:19.818016 1415006 certs.go:528] hashing: -rw-r--r-- 1 root root 1338 Sep 16 10:46 /usr/share/ca-certificates/1383833.pem
	I0916 10:53:19.818130 1415006 ssh_runner.go:195] Run: openssl x509 -hash -noout -in /usr/share/ca-certificates/1383833.pem
	I0916 10:53:19.826289 1415006 ssh_runner.go:195] Run: sudo /bin/bash -c "test -L /etc/ssl/certs/51391683.0 || ln -fs /etc/ssl/certs/1383833.pem /etc/ssl/certs/51391683.0"
	I0916 10:53:19.836101 1415006 ssh_runner.go:195] Run: stat /var/lib/minikube/certs/apiserver-kubelet-client.crt
	I0916 10:53:19.840415 1415006 certs.go:399] 'apiserver-kubelet-client' cert doesn't exist, likely first start: stat /var/lib/minikube/certs/apiserver-kubelet-client.crt: Process exited with status 1
	stdout:
	
	stderr:
	stat: cannot statx '/var/lib/minikube/certs/apiserver-kubelet-client.crt': No such file or directory
	I0916 10:53:19.840472 1415006 kubeadm.go:934] updating node {m03 192.168.49.4 8443 v1.31.1 crio true true} ...
	I0916 10:53:19.840565 1415006 kubeadm.go:946] kubelet [Unit]
	Wants=crio.service
	
	[Service]
	ExecStart=
	ExecStart=/var/lib/minikube/binaries/v1.31.1/kubelet --bootstrap-kubeconfig=/etc/kubernetes/bootstrap-kubelet.conf --cgroups-per-qos=false --config=/var/lib/kubelet/config.yaml --enforce-node-allocatable= --hostname-override=ha-334765-m03 --kubeconfig=/etc/kubernetes/kubelet.conf --node-ip=192.168.49.4
	
	[Install]
	 config:
	{KubernetesVersion:v1.31.1 ClusterName:ha-334765 Namespace:default APIServerHAVIP:192.168.49.254 APIServerName:minikubeCA APIServerNames:[] APIServerIPs:[] DNSDomain:cluster.local ContainerRuntime:crio CRISocket: NetworkPlugin:cni FeatureGates: ServiceCIDR:10.96.0.0/12 ImageRepository: LoadBalancerStartIP: LoadBalancerEndIP: CustomIngressCert: RegistryAliases: ExtraOptions:[] ShouldLoadCachedImages:true EnableDefaultCNI:false CNI:}
	I0916 10:53:19.840598 1415006 kube-vip.go:115] generating kube-vip config ...
	I0916 10:53:19.840650 1415006 ssh_runner.go:195] Run: sudo sh -c "lsmod | grep ip_vs"
	I0916 10:53:19.855502 1415006 kube-vip.go:167] auto-enabling control-plane load-balancing in kube-vip
	I0916 10:53:19.855572 1415006 kube-vip.go:137] kube-vip config:
	apiVersion: v1
	kind: Pod
	metadata:
	  creationTimestamp: null
	  name: kube-vip
	  namespace: kube-system
	spec:
	  containers:
	  - args:
	    - manager
	    env:
	    - name: vip_arp
	      value: "true"
	    - name: port
	      value: "8443"
	    - name: vip_nodename
	      valueFrom:
	        fieldRef:
	          fieldPath: spec.nodeName
	    - name: vip_interface
	      value: eth0
	    - name: vip_cidr
	      value: "32"
	    - name: dns_mode
	      value: first
	    - name: cp_enable
	      value: "true"
	    - name: cp_namespace
	      value: kube-system
	    - name: vip_leaderelection
	      value: "true"
	    - name: vip_leasename
	      value: plndr-cp-lock
	    - name: vip_leaseduration
	      value: "5"
	    - name: vip_renewdeadline
	      value: "3"
	    - name: vip_retryperiod
	      value: "1"
	    - name: address
	      value: 192.168.49.254
	    - name: prometheus_server
	      value: :2112
	    - name : lb_enable
	      value: "true"
	    - name: lb_port
	      value: "8443"
	    image: ghcr.io/kube-vip/kube-vip:v0.8.0
	    imagePullPolicy: IfNotPresent
	    name: kube-vip
	    resources: {}
	    securityContext:
	      capabilities:
	        add:
	        - NET_ADMIN
	        - NET_RAW
	    volumeMounts:
	    - mountPath: /etc/kubernetes/admin.conf
	      name: kubeconfig
	  hostAliases:
	  - hostnames:
	    - kubernetes
	    ip: 127.0.0.1
	  hostNetwork: true
	  volumes:
	  - hostPath:
	      path: "/etc/kubernetes/admin.conf"
	    name: kubeconfig
	status: {}
	I0916 10:53:19.855644 1415006 ssh_runner.go:195] Run: sudo ls /var/lib/minikube/binaries/v1.31.1
	I0916 10:53:19.865367 1415006 binaries.go:44] Found k8s binaries, skipping transfer
	I0916 10:53:19.865511 1415006 ssh_runner.go:195] Run: sudo mkdir -p /etc/systemd/system/kubelet.service.d /lib/systemd/system /etc/kubernetes/manifests
	I0916 10:53:19.875152 1415006 ssh_runner.go:362] scp memory --> /etc/systemd/system/kubelet.service.d/10-kubeadm.conf (363 bytes)
	I0916 10:53:19.894518 1415006 ssh_runner.go:362] scp memory --> /lib/systemd/system/kubelet.service (352 bytes)
	I0916 10:53:19.914639 1415006 ssh_runner.go:362] scp memory --> /etc/kubernetes/manifests/kube-vip.yaml (1441 bytes)
	I0916 10:53:19.933891 1415006 ssh_runner.go:195] Run: grep 192.168.49.254	control-plane.minikube.internal$ /etc/hosts
	I0916 10:53:19.937701 1415006 ssh_runner.go:195] Run: /bin/bash -c "{ grep -v $'\tcontrol-plane.minikube.internal$' "/etc/hosts"; echo "192.168.49.254	control-plane.minikube.internal"; } > /tmp/h.$$; sudo cp /tmp/h.$$ "/etc/hosts""
	I0916 10:53:19.949318 1415006 ssh_runner.go:195] Run: sudo systemctl daemon-reload
	I0916 10:53:20.046913 1415006 ssh_runner.go:195] Run: sudo systemctl start kubelet
	I0916 10:53:20.072657 1415006 host.go:66] Checking if "ha-334765" exists ...
	I0916 10:53:20.073165 1415006 start.go:317] joinCluster: &{Name:ha-334765 KeepContext:false EmbedCerts:false MinikubeISO: KicBaseImage:gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 Memory:2200 CPUs:2 DiskSize:20000 Driver:docker HyperkitVpnKitSock: HyperkitVSockPorts:[] DockerEnv:[] ContainerVolumeMounts:[] InsecureRegistry:[] RegistryMirror:[] HostOnlyCIDR:192.168.59.1/24 HypervVirtualSwitch: HypervUseExternalSwitch:false HypervExternalAdapter: KVMNetwork:default KVMQemuURI:qemu:///system KVMGPU:false KVMHidden:false KVMNUMACount:1 APIServerPort:8443 DockerOpt:[] DisableDriverMounts:false NFSShare:[] NFSSharesRoot:/nfsshares UUID: NoVTXCheck:false DNSProxy:false HostDNSResolver:true HostOnlyNicType:virtio NatNicType:virtio SSHIPAddress: SSHUser:root SSHKey: SSHPort:22 KubernetesConfig:{KubernetesVersion:v1.31.1 ClusterName:ha-334765 Namespace:default APIServerHAVIP:192.168.49.254 APIServerName:minikubeCA APIServerN
ames:[] APIServerIPs:[] DNSDomain:cluster.local ContainerRuntime:crio CRISocket: NetworkPlugin:cni FeatureGates: ServiceCIDR:10.96.0.0/12 ImageRepository: LoadBalancerStartIP: LoadBalancerEndIP: CustomIngressCert: RegistryAliases: ExtraOptions:[] ShouldLoadCachedImages:true EnableDefaultCNI:false CNI:} Nodes:[{Name: IP:192.168.49.2 Port:8443 KubernetesVersion:v1.31.1 ContainerRuntime:crio ControlPlane:true Worker:true} {Name:m02 IP:192.168.49.3 Port:8443 KubernetesVersion:v1.31.1 ContainerRuntime:crio ControlPlane:true Worker:true} {Name:m03 IP:192.168.49.4 Port:8443 KubernetesVersion:v1.31.1 ContainerRuntime:crio ControlPlane:true Worker:true}] Addons:map[ambassador:false auto-pause:false cloud-spanner:false csi-hostpath-driver:false dashboard:false default-storageclass:false efk:false freshpod:false gcp-auth:false gvisor:false headlamp:false helm-tiller:false inaccel:false ingress:false ingress-dns:false inspektor-gadget:false istio:false istio-provisioner:false kong:false kubeflow:false kubevirt:false logv
iewer:false metallb:false metrics-server:false nvidia-device-plugin:false nvidia-driver-installer:false nvidia-gpu-device-plugin:false olm:false pod-security-policy:false portainer:false registry:false registry-aliases:false registry-creds:false storage-provisioner:false storage-provisioner-gluster:false storage-provisioner-rancher:false volcano:false volumesnapshots:false yakd:false] CustomAddonImages:map[] CustomAddonRegistries:map[] VerifyComponents:map[apiserver:true apps_running:true default_sa:true extra:true kubelet:true node_ready:true system_pods:true] StartHostTimeout:6m0s ScheduledStop:<nil> ExposedPorts:[] ListenAddress: Network: Subnet: MultiNodeRequested:true ExtraDisks:0 CertExpiration:26280h0m0s Mount:false MountString:/home/jenkins:/minikube-host Mount9PVersion:9p2000.L MountGID:docker MountIP: MountMSize:262144 MountOptions:[] MountPort:0 MountType:9p MountUID:docker BinaryMirror: DisableOptimizations:false DisableMetrics:false CustomQemuFirmwarePath: SocketVMnetClientPath: SocketVMnetPath:
StaticIP: SSHAuthSock: SSHAgentPID:0 GPUs: AutoPauseInterval:1m0s}
	I0916 10:53:20.073363 1415006 ssh_runner.go:195] Run: /bin/bash -c "sudo env PATH="/var/lib/minikube/binaries/v1.31.1:$PATH" kubeadm token create --print-join-command --ttl=0"
	I0916 10:53:20.073461 1415006 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" ha-334765
	I0916 10:53:20.100714 1415006 sshutil.go:53] new ssh client: &{IP:127.0.0.1 Port:34618 SSHKeyPath:/home/jenkins/minikube-integration/19651-1378450/.minikube/machines/ha-334765/id_rsa Username:docker}
	I0916 10:53:20.274860 1415006 start.go:343] trying to join control-plane node "m03" to cluster: &{Name:m03 IP:192.168.49.4 Port:8443 KubernetesVersion:v1.31.1 ContainerRuntime:crio ControlPlane:true Worker:true}
	I0916 10:53:20.274906 1415006 ssh_runner.go:195] Run: /bin/bash -c "sudo env PATH="/var/lib/minikube/binaries/v1.31.1:$PATH" kubeadm join control-plane.minikube.internal:8443 --token ksxyw6.2d1ymdt7tddb4km5 --discovery-token-ca-cert-hash sha256:a39d4a6e06a2efc97f5d9564a89b81063790e757dde370e866d9dc4c2ed0ec07 --ignore-preflight-errors=all --cri-socket unix:///var/run/crio/crio.sock --node-name=ha-334765-m03 --control-plane --apiserver-advertise-address=192.168.49.4 --apiserver-bind-port=8443"
	I0916 10:53:30.640812 1415006 ssh_runner.go:235] Completed: /bin/bash -c "sudo env PATH="/var/lib/minikube/binaries/v1.31.1:$PATH" kubeadm join control-plane.minikube.internal:8443 --token ksxyw6.2d1ymdt7tddb4km5 --discovery-token-ca-cert-hash sha256:a39d4a6e06a2efc97f5d9564a89b81063790e757dde370e866d9dc4c2ed0ec07 --ignore-preflight-errors=all --cri-socket unix:///var/run/crio/crio.sock --node-name=ha-334765-m03 --control-plane --apiserver-advertise-address=192.168.49.4 --apiserver-bind-port=8443": (10.365883142s)
	I0916 10:53:30.640843 1415006 ssh_runner.go:195] Run: /bin/bash -c "sudo systemctl daemon-reload && sudo systemctl enable kubelet && sudo systemctl start kubelet"
	I0916 10:53:31.125803 1415006 ssh_runner.go:195] Run: sudo /var/lib/minikube/binaries/v1.31.1/kubectl --kubeconfig=/var/lib/minikube/kubeconfig label --overwrite nodes ha-334765-m03 minikube.k8s.io/updated_at=2024_09_16T10_53_31_0700 minikube.k8s.io/version=v1.34.0 minikube.k8s.io/commit=90d544f06ea0f69499271b003be64a9a224d57ed minikube.k8s.io/name=ha-334765 minikube.k8s.io/primary=false
	I0916 10:53:31.396293 1415006 ssh_runner.go:195] Run: sudo /var/lib/minikube/binaries/v1.31.1/kubectl --kubeconfig=/var/lib/minikube/kubeconfig taint nodes ha-334765-m03 node-role.kubernetes.io/control-plane:NoSchedule-
	I0916 10:53:31.598835 1415006 start.go:319] duration metric: took 11.525668168s to joinCluster
	I0916 10:53:31.598894 1415006 start.go:235] Will wait 6m0s for node &{Name:m03 IP:192.168.49.4 Port:8443 KubernetesVersion:v1.31.1 ContainerRuntime:crio ControlPlane:true Worker:true}
	I0916 10:53:31.599454 1415006 config.go:182] Loaded profile config "ha-334765": Driver=docker, ContainerRuntime=crio, KubernetesVersion=v1.31.1
	I0916 10:53:31.604275 1415006 out.go:177] * Verifying Kubernetes components...
	I0916 10:53:31.606980 1415006 ssh_runner.go:195] Run: sudo systemctl daemon-reload
	I0916 10:53:31.797463 1415006 ssh_runner.go:195] Run: sudo systemctl start kubelet
	I0916 10:53:31.833344 1415006 loader.go:395] Config loaded from file:  /home/jenkins/minikube-integration/19651-1378450/kubeconfig
	I0916 10:53:31.833687 1415006 kapi.go:59] client config for ha-334765: &rest.Config{Host:"https://192.168.49.254:8443", APIPath:"", ContentConfig:rest.ContentConfig{AcceptContentTypes:"", ContentType:"", GroupVersion:(*schema.GroupVersion)(nil), NegotiatedSerializer:runtime.NegotiatedSerializer(nil)}, Username:"", Password:"", BearerToken:"", BearerTokenFile:"", Impersonate:rest.ImpersonationConfig{UserName:"", UID:"", Groups:[]string(nil), Extra:map[string][]string(nil)}, AuthProvider:<nil>, AuthConfigPersister:rest.AuthProviderConfigPersister(nil), ExecProvider:<nil>, TLSClientConfig:rest.sanitizedTLSClientConfig{Insecure:false, ServerName:"", CertFile:"/home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/ha-334765/client.crt", KeyFile:"/home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/ha-334765/client.key", CAFile:"/home/jenkins/minikube-integration/19651-1378450/.minikube/ca.crt", CertData:[]uint8(nil), KeyData:[]uint8(nil), CAData:[]uint8(nil), NextProtos:[]strin
g(nil)}, UserAgent:"", DisableCompression:false, Transport:http.RoundTripper(nil), WrapTransport:(transport.WrapperFunc)(0x1a1e6c0), QPS:0, Burst:0, RateLimiter:flowcontrol.RateLimiter(nil), WarningHandler:rest.WarningHandler(nil), Timeout:0, Dial:(func(context.Context, string, string) (net.Conn, error))(nil), Proxy:(func(*http.Request) (*url.URL, error))(nil)}
	W0916 10:53:31.833751 1415006 kubeadm.go:483] Overriding stale ClientConfig host https://192.168.49.254:8443 with https://192.168.49.2:8443
	I0916 10:53:31.834041 1415006 node_ready.go:35] waiting up to 6m0s for node "ha-334765-m03" to be "Ready" ...
	I0916 10:53:31.834123 1415006 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-334765-m03
	I0916 10:53:31.834129 1415006 round_trippers.go:469] Request Headers:
	I0916 10:53:31.834137 1415006 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:53:31.834141 1415006 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:53:31.837376 1415006 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:53:32.334572 1415006 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-334765-m03
	I0916 10:53:32.334647 1415006 round_trippers.go:469] Request Headers:
	I0916 10:53:32.334670 1415006 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:53:32.334689 1415006 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:53:32.337666 1415006 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 10:53:32.834705 1415006 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-334765-m03
	I0916 10:53:32.834724 1415006 round_trippers.go:469] Request Headers:
	I0916 10:53:32.834734 1415006 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:53:32.834738 1415006 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:53:32.837580 1415006 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 10:53:33.334634 1415006 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-334765-m03
	I0916 10:53:33.334707 1415006 round_trippers.go:469] Request Headers:
	I0916 10:53:33.334731 1415006 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:53:33.334751 1415006 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:53:33.338340 1415006 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:53:33.834288 1415006 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-334765-m03
	I0916 10:53:33.834357 1415006 round_trippers.go:469] Request Headers:
	I0916 10:53:33.834380 1415006 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:53:33.834397 1415006 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:53:33.837256 1415006 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 10:53:33.838513 1415006 node_ready.go:53] node "ha-334765-m03" has status "Ready":"False"
	I0916 10:53:34.334672 1415006 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-334765-m03
	I0916 10:53:34.334696 1415006 round_trippers.go:469] Request Headers:
	I0916 10:53:34.334707 1415006 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:53:34.334711 1415006 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:53:34.337733 1415006 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:53:34.834338 1415006 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-334765-m03
	I0916 10:53:34.834361 1415006 round_trippers.go:469] Request Headers:
	I0916 10:53:34.834380 1415006 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:53:34.834402 1415006 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:53:34.838423 1415006 round_trippers.go:574] Response Status: 200 OK in 4 milliseconds
	I0916 10:53:35.334337 1415006 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-334765-m03
	I0916 10:53:35.334361 1415006 round_trippers.go:469] Request Headers:
	I0916 10:53:35.334370 1415006 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:53:35.334374 1415006 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:53:35.337532 1415006 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:53:35.834533 1415006 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-334765-m03
	I0916 10:53:35.834556 1415006 round_trippers.go:469] Request Headers:
	I0916 10:53:35.834567 1415006 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:53:35.834572 1415006 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:53:35.837777 1415006 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:53:36.334925 1415006 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-334765-m03
	I0916 10:53:36.334962 1415006 round_trippers.go:469] Request Headers:
	I0916 10:53:36.334979 1415006 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:53:36.334984 1415006 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:53:36.338421 1415006 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:53:36.339093 1415006 node_ready.go:53] node "ha-334765-m03" has status "Ready":"False"
	I0916 10:53:36.834319 1415006 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-334765-m03
	I0916 10:53:36.834345 1415006 round_trippers.go:469] Request Headers:
	I0916 10:53:36.834355 1415006 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:53:36.834359 1415006 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:53:36.837761 1415006 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:53:37.335214 1415006 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-334765-m03
	I0916 10:53:37.335242 1415006 round_trippers.go:469] Request Headers:
	I0916 10:53:37.335251 1415006 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:53:37.335257 1415006 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:53:37.338207 1415006 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 10:53:37.834909 1415006 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-334765-m03
	I0916 10:53:37.834927 1415006 round_trippers.go:469] Request Headers:
	I0916 10:53:37.834937 1415006 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:53:37.834942 1415006 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:53:37.839069 1415006 round_trippers.go:574] Response Status: 200 OK in 4 milliseconds
	I0916 10:53:38.334529 1415006 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-334765-m03
	I0916 10:53:38.334552 1415006 round_trippers.go:469] Request Headers:
	I0916 10:53:38.334562 1415006 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:53:38.334566 1415006 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:53:38.337585 1415006 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 10:53:38.835032 1415006 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-334765-m03
	I0916 10:53:38.835058 1415006 round_trippers.go:469] Request Headers:
	I0916 10:53:38.835068 1415006 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:53:38.835072 1415006 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:53:38.838726 1415006 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:53:38.839539 1415006 node_ready.go:53] node "ha-334765-m03" has status "Ready":"False"
	I0916 10:53:39.335389 1415006 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-334765-m03
	I0916 10:53:39.335414 1415006 round_trippers.go:469] Request Headers:
	I0916 10:53:39.335424 1415006 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:53:39.335430 1415006 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:53:39.338809 1415006 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:53:39.835229 1415006 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-334765-m03
	I0916 10:53:39.835254 1415006 round_trippers.go:469] Request Headers:
	I0916 10:53:39.835264 1415006 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:53:39.835268 1415006 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:53:39.839005 1415006 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:53:40.335096 1415006 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-334765-m03
	I0916 10:53:40.335121 1415006 round_trippers.go:469] Request Headers:
	I0916 10:53:40.335131 1415006 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:53:40.335137 1415006 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:53:40.338609 1415006 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:53:40.835192 1415006 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-334765-m03
	I0916 10:53:40.835217 1415006 round_trippers.go:469] Request Headers:
	I0916 10:53:40.835227 1415006 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:53:40.835231 1415006 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:53:40.838036 1415006 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 10:53:41.334291 1415006 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-334765-m03
	I0916 10:53:41.334317 1415006 round_trippers.go:469] Request Headers:
	I0916 10:53:41.334327 1415006 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:53:41.334330 1415006 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:53:41.337742 1415006 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:53:41.338409 1415006 node_ready.go:53] node "ha-334765-m03" has status "Ready":"False"
	I0916 10:53:41.834696 1415006 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-334765-m03
	I0916 10:53:41.834721 1415006 round_trippers.go:469] Request Headers:
	I0916 10:53:41.834731 1415006 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:53:41.834736 1415006 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:53:41.837505 1415006 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 10:53:42.334987 1415006 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-334765-m03
	I0916 10:53:42.335014 1415006 round_trippers.go:469] Request Headers:
	I0916 10:53:42.335024 1415006 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:53:42.335035 1415006 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:53:42.338292 1415006 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:53:42.834327 1415006 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-334765-m03
	I0916 10:53:42.834350 1415006 round_trippers.go:469] Request Headers:
	I0916 10:53:42.834362 1415006 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:53:42.834370 1415006 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:53:42.837711 1415006 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:53:43.335129 1415006 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-334765-m03
	I0916 10:53:43.335154 1415006 round_trippers.go:469] Request Headers:
	I0916 10:53:43.335164 1415006 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:53:43.335169 1415006 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:53:43.338064 1415006 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 10:53:43.338799 1415006 node_ready.go:53] node "ha-334765-m03" has status "Ready":"False"
	I0916 10:53:43.834278 1415006 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-334765-m03
	I0916 10:53:43.834303 1415006 round_trippers.go:469] Request Headers:
	I0916 10:53:43.834311 1415006 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:53:43.834315 1415006 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:53:43.837366 1415006 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:53:44.334790 1415006 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-334765-m03
	I0916 10:53:44.334813 1415006 round_trippers.go:469] Request Headers:
	I0916 10:53:44.334831 1415006 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:53:44.334836 1415006 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:53:44.340073 1415006 round_trippers.go:574] Response Status: 200 OK in 5 milliseconds
	I0916 10:53:44.834381 1415006 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-334765-m03
	I0916 10:53:44.834404 1415006 round_trippers.go:469] Request Headers:
	I0916 10:53:44.834412 1415006 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:53:44.834416 1415006 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:53:44.837293 1415006 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 10:53:45.334302 1415006 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-334765-m03
	I0916 10:53:45.334337 1415006 round_trippers.go:469] Request Headers:
	I0916 10:53:45.334348 1415006 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:53:45.334356 1415006 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:53:45.338454 1415006 round_trippers.go:574] Response Status: 200 OK in 4 milliseconds
	I0916 10:53:45.339100 1415006 node_ready.go:53] node "ha-334765-m03" has status "Ready":"False"
	I0916 10:53:45.835035 1415006 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-334765-m03
	I0916 10:53:45.835074 1415006 round_trippers.go:469] Request Headers:
	I0916 10:53:45.835108 1415006 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:53:45.835114 1415006 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:53:45.838226 1415006 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:53:46.334329 1415006 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-334765-m03
	I0916 10:53:46.334356 1415006 round_trippers.go:469] Request Headers:
	I0916 10:53:46.334365 1415006 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:53:46.334369 1415006 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:53:46.337611 1415006 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:53:46.835198 1415006 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-334765-m03
	I0916 10:53:46.835227 1415006 round_trippers.go:469] Request Headers:
	I0916 10:53:46.835236 1415006 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:53:46.835243 1415006 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:53:46.839134 1415006 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:53:47.334400 1415006 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-334765-m03
	I0916 10:53:47.334427 1415006 round_trippers.go:469] Request Headers:
	I0916 10:53:47.334436 1415006 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:53:47.334443 1415006 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:53:47.337505 1415006 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:53:47.834710 1415006 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-334765-m03
	I0916 10:53:47.834735 1415006 round_trippers.go:469] Request Headers:
	I0916 10:53:47.834745 1415006 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:53:47.834749 1415006 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:53:47.837659 1415006 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 10:53:47.838212 1415006 node_ready.go:53] node "ha-334765-m03" has status "Ready":"False"
	I0916 10:53:48.335021 1415006 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-334765-m03
	I0916 10:53:48.335043 1415006 round_trippers.go:469] Request Headers:
	I0916 10:53:48.335052 1415006 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:53:48.335057 1415006 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:53:48.337824 1415006 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 10:53:48.834207 1415006 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-334765-m03
	I0916 10:53:48.834230 1415006 round_trippers.go:469] Request Headers:
	I0916 10:53:48.834240 1415006 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:53:48.834244 1415006 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:53:48.837316 1415006 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:53:49.334248 1415006 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-334765-m03
	I0916 10:53:49.334275 1415006 round_trippers.go:469] Request Headers:
	I0916 10:53:49.334285 1415006 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:53:49.334291 1415006 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:53:49.338068 1415006 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:53:49.834749 1415006 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-334765-m03
	I0916 10:53:49.834771 1415006 round_trippers.go:469] Request Headers:
	I0916 10:53:49.834781 1415006 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:53:49.834784 1415006 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:53:49.837961 1415006 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:53:49.838637 1415006 node_ready.go:53] node "ha-334765-m03" has status "Ready":"False"
	I0916 10:53:50.334263 1415006 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-334765-m03
	I0916 10:53:50.334288 1415006 round_trippers.go:469] Request Headers:
	I0916 10:53:50.334298 1415006 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:53:50.334303 1415006 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:53:50.337326 1415006 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:53:50.834342 1415006 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-334765-m03
	I0916 10:53:50.834369 1415006 round_trippers.go:469] Request Headers:
	I0916 10:53:50.834380 1415006 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:53:50.834384 1415006 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:53:50.837734 1415006 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:53:51.334695 1415006 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-334765-m03
	I0916 10:53:51.334733 1415006 round_trippers.go:469] Request Headers:
	I0916 10:53:51.334743 1415006 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:53:51.334748 1415006 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:53:51.338065 1415006 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:53:51.834867 1415006 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-334765-m03
	I0916 10:53:51.834892 1415006 round_trippers.go:469] Request Headers:
	I0916 10:53:51.834902 1415006 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:53:51.834908 1415006 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:53:51.838148 1415006 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:53:51.838877 1415006 node_ready.go:53] node "ha-334765-m03" has status "Ready":"False"
	I0916 10:53:52.334577 1415006 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-334765-m03
	I0916 10:53:52.334601 1415006 round_trippers.go:469] Request Headers:
	I0916 10:53:52.334611 1415006 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:53:52.334615 1415006 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:53:52.337642 1415006 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:53:52.835278 1415006 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-334765-m03
	I0916 10:53:52.835306 1415006 round_trippers.go:469] Request Headers:
	I0916 10:53:52.835316 1415006 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:53:52.835321 1415006 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:53:52.838258 1415006 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 10:53:53.334951 1415006 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-334765-m03
	I0916 10:53:53.334976 1415006 round_trippers.go:469] Request Headers:
	I0916 10:53:53.334992 1415006 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:53:53.334997 1415006 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:53:53.338675 1415006 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:53:53.834618 1415006 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-334765-m03
	I0916 10:53:53.834648 1415006 round_trippers.go:469] Request Headers:
	I0916 10:53:53.834658 1415006 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:53:53.834662 1415006 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:53:53.838336 1415006 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:53:53.839444 1415006 node_ready.go:53] node "ha-334765-m03" has status "Ready":"False"
	I0916 10:53:54.334672 1415006 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-334765-m03
	I0916 10:53:54.334709 1415006 round_trippers.go:469] Request Headers:
	I0916 10:53:54.334720 1415006 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:53:54.334724 1415006 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:53:54.339608 1415006 round_trippers.go:574] Response Status: 200 OK in 4 milliseconds
	I0916 10:53:54.835193 1415006 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-334765-m03
	I0916 10:53:54.835228 1415006 round_trippers.go:469] Request Headers:
	I0916 10:53:54.835242 1415006 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:53:54.835247 1415006 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:53:54.838653 1415006 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:53:55.335250 1415006 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-334765-m03
	I0916 10:53:55.335277 1415006 round_trippers.go:469] Request Headers:
	I0916 10:53:55.335287 1415006 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:53:55.335291 1415006 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:53:55.338230 1415006 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 10:53:55.834762 1415006 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-334765-m03
	I0916 10:53:55.834784 1415006 round_trippers.go:469] Request Headers:
	I0916 10:53:55.834797 1415006 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:53:55.834801 1415006 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:53:55.838056 1415006 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:53:56.334285 1415006 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-334765-m03
	I0916 10:53:56.334308 1415006 round_trippers.go:469] Request Headers:
	I0916 10:53:56.334318 1415006 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:53:56.334323 1415006 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:53:56.337775 1415006 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:53:56.338362 1415006 node_ready.go:53] node "ha-334765-m03" has status "Ready":"False"
	I0916 10:53:56.834647 1415006 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-334765-m03
	I0916 10:53:56.834671 1415006 round_trippers.go:469] Request Headers:
	I0916 10:53:56.834681 1415006 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:53:56.834685 1415006 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:53:56.837569 1415006 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 10:53:57.334657 1415006 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-334765-m03
	I0916 10:53:57.334678 1415006 round_trippers.go:469] Request Headers:
	I0916 10:53:57.334691 1415006 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:53:57.334694 1415006 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:53:57.343098 1415006 round_trippers.go:574] Response Status: 200 OK in 8 milliseconds
	I0916 10:53:57.835006 1415006 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-334765-m03
	I0916 10:53:57.835033 1415006 round_trippers.go:469] Request Headers:
	I0916 10:53:57.835042 1415006 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:53:57.835049 1415006 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:53:57.837720 1415006 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 10:53:58.334429 1415006 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-334765-m03
	I0916 10:53:58.334456 1415006 round_trippers.go:469] Request Headers:
	I0916 10:53:58.334465 1415006 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:53:58.334472 1415006 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:53:58.337511 1415006 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:53:58.834605 1415006 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-334765-m03
	I0916 10:53:58.834628 1415006 round_trippers.go:469] Request Headers:
	I0916 10:53:58.834637 1415006 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:53:58.834642 1415006 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:53:58.837378 1415006 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 10:53:58.837903 1415006 node_ready.go:53] node "ha-334765-m03" has status "Ready":"False"
	I0916 10:53:59.334925 1415006 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-334765-m03
	I0916 10:53:59.334951 1415006 round_trippers.go:469] Request Headers:
	I0916 10:53:59.334962 1415006 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:53:59.334968 1415006 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:53:59.338253 1415006 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:53:59.834904 1415006 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-334765-m03
	I0916 10:53:59.834929 1415006 round_trippers.go:469] Request Headers:
	I0916 10:53:59.834939 1415006 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:53:59.834945 1415006 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:53:59.838010 1415006 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:54:00.335143 1415006 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-334765-m03
	I0916 10:54:00.335205 1415006 round_trippers.go:469] Request Headers:
	I0916 10:54:00.335217 1415006 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:54:00.335221 1415006 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:54:00.345031 1415006 round_trippers.go:574] Response Status: 200 OK in 9 milliseconds
	I0916 10:54:00.834926 1415006 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-334765-m03
	I0916 10:54:00.834952 1415006 round_trippers.go:469] Request Headers:
	I0916 10:54:00.834962 1415006 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:54:00.834966 1415006 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:54:00.838074 1415006 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:54:00.839282 1415006 node_ready.go:53] node "ha-334765-m03" has status "Ready":"False"
	I0916 10:54:01.334380 1415006 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-334765-m03
	I0916 10:54:01.334408 1415006 round_trippers.go:469] Request Headers:
	I0916 10:54:01.334417 1415006 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:54:01.334424 1415006 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:54:01.339710 1415006 round_trippers.go:574] Response Status: 200 OK in 5 milliseconds
	I0916 10:54:01.834219 1415006 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-334765-m03
	I0916 10:54:01.834247 1415006 round_trippers.go:469] Request Headers:
	I0916 10:54:01.834264 1415006 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:54:01.834270 1415006 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:54:01.837969 1415006 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:54:02.334202 1415006 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-334765-m03
	I0916 10:54:02.334232 1415006 round_trippers.go:469] Request Headers:
	I0916 10:54:02.334242 1415006 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:54:02.334247 1415006 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:54:02.337619 1415006 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:54:02.834905 1415006 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-334765-m03
	I0916 10:54:02.834928 1415006 round_trippers.go:469] Request Headers:
	I0916 10:54:02.834938 1415006 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:54:02.834943 1415006 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:54:02.845883 1415006 round_trippers.go:574] Response Status: 200 OK in 10 milliseconds
	I0916 10:54:02.847840 1415006 node_ready.go:53] node "ha-334765-m03" has status "Ready":"False"
	I0916 10:54:03.334290 1415006 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-334765-m03
	I0916 10:54:03.334312 1415006 round_trippers.go:469] Request Headers:
	I0916 10:54:03.334322 1415006 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:54:03.334326 1415006 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:54:03.337176 1415006 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 10:54:03.834315 1415006 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-334765-m03
	I0916 10:54:03.834339 1415006 round_trippers.go:469] Request Headers:
	I0916 10:54:03.834349 1415006 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:54:03.834354 1415006 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:54:03.837206 1415006 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 10:54:04.334616 1415006 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-334765-m03
	I0916 10:54:04.334641 1415006 round_trippers.go:469] Request Headers:
	I0916 10:54:04.334651 1415006 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:54:04.334656 1415006 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:54:04.337620 1415006 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 10:54:04.834866 1415006 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-334765-m03
	I0916 10:54:04.834896 1415006 round_trippers.go:469] Request Headers:
	I0916 10:54:04.834906 1415006 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:54:04.834910 1415006 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:54:04.837695 1415006 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 10:54:05.335209 1415006 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-334765-m03
	I0916 10:54:05.335237 1415006 round_trippers.go:469] Request Headers:
	I0916 10:54:05.335247 1415006 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:54:05.335251 1415006 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:54:05.338244 1415006 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 10:54:05.339099 1415006 node_ready.go:53] node "ha-334765-m03" has status "Ready":"False"
	I0916 10:54:05.835148 1415006 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-334765-m03
	I0916 10:54:05.835172 1415006 round_trippers.go:469] Request Headers:
	I0916 10:54:05.835181 1415006 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:54:05.835188 1415006 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:54:05.838744 1415006 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:54:06.334296 1415006 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-334765-m03
	I0916 10:54:06.334319 1415006 round_trippers.go:469] Request Headers:
	I0916 10:54:06.334329 1415006 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:54:06.334335 1415006 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:54:06.337643 1415006 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:54:06.834660 1415006 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-334765-m03
	I0916 10:54:06.834698 1415006 round_trippers.go:469] Request Headers:
	I0916 10:54:06.834708 1415006 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:54:06.834712 1415006 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:54:06.837310 1415006 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 10:54:07.334318 1415006 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-334765-m03
	I0916 10:54:07.334341 1415006 round_trippers.go:469] Request Headers:
	I0916 10:54:07.334349 1415006 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:54:07.334354 1415006 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:54:07.337293 1415006 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 10:54:07.834294 1415006 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-334765-m03
	I0916 10:54:07.834318 1415006 round_trippers.go:469] Request Headers:
	I0916 10:54:07.834332 1415006 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:54:07.834336 1415006 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:54:07.836888 1415006 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 10:54:07.837784 1415006 node_ready.go:53] node "ha-334765-m03" has status "Ready":"False"
	I0916 10:54:08.334459 1415006 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-334765-m03
	I0916 10:54:08.334481 1415006 round_trippers.go:469] Request Headers:
	I0916 10:54:08.334495 1415006 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:54:08.334502 1415006 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:54:08.337582 1415006 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:54:08.834924 1415006 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-334765-m03
	I0916 10:54:08.834949 1415006 round_trippers.go:469] Request Headers:
	I0916 10:54:08.834958 1415006 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:54:08.834964 1415006 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:54:08.837781 1415006 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 10:54:09.335244 1415006 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-334765-m03
	I0916 10:54:09.335271 1415006 round_trippers.go:469] Request Headers:
	I0916 10:54:09.335281 1415006 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:54:09.335285 1415006 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:54:09.338282 1415006 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 10:54:09.834676 1415006 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-334765-m03
	I0916 10:54:09.834700 1415006 round_trippers.go:469] Request Headers:
	I0916 10:54:09.834711 1415006 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:54:09.834715 1415006 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:54:09.849261 1415006 round_trippers.go:574] Response Status: 200 OK in 14 milliseconds
	I0916 10:54:09.854065 1415006 node_ready.go:49] node "ha-334765-m03" has status "Ready":"True"
	I0916 10:54:09.854092 1415006 node_ready.go:38] duration metric: took 38.020036174s for node "ha-334765-m03" to be "Ready" ...
	I0916 10:54:09.854104 1415006 pod_ready.go:36] extra waiting up to 6m0s for all system-critical pods including labels [k8s-app=kube-dns component=etcd component=kube-apiserver component=kube-controller-manager k8s-app=kube-proxy component=kube-scheduler] to be "Ready" ...
	I0916 10:54:09.854175 1415006 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods
	I0916 10:54:09.854187 1415006 round_trippers.go:469] Request Headers:
	I0916 10:54:09.854196 1415006 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:54:09.854200 1415006 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:54:09.867206 1415006 round_trippers.go:574] Response Status: 200 OK in 12 milliseconds
	I0916 10:54:09.877172 1415006 pod_ready.go:79] waiting up to 6m0s for pod "coredns-7c65d6cfc9-q5xr7" in "kube-system" namespace to be "Ready" ...
	I0916 10:54:09.877276 1415006 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/coredns-7c65d6cfc9-q5xr7
	I0916 10:54:09.877289 1415006 round_trippers.go:469] Request Headers:
	I0916 10:54:09.877298 1415006 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:54:09.877301 1415006 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:54:09.880381 1415006 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:54:09.881181 1415006 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-334765
	I0916 10:54:09.881246 1415006 round_trippers.go:469] Request Headers:
	I0916 10:54:09.881269 1415006 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:54:09.881285 1415006 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:54:09.883907 1415006 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 10:54:09.884653 1415006 pod_ready.go:93] pod "coredns-7c65d6cfc9-q5xr7" in "kube-system" namespace has status "Ready":"True"
	I0916 10:54:09.884746 1415006 pod_ready.go:82] duration metric: took 7.469866ms for pod "coredns-7c65d6cfc9-q5xr7" in "kube-system" namespace to be "Ready" ...
	I0916 10:54:09.884760 1415006 pod_ready.go:79] waiting up to 6m0s for pod "coredns-7c65d6cfc9-s9fp9" in "kube-system" namespace to be "Ready" ...
	I0916 10:54:09.884831 1415006 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/coredns-7c65d6cfc9-s9fp9
	I0916 10:54:09.884846 1415006 round_trippers.go:469] Request Headers:
	I0916 10:54:09.884855 1415006 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:54:09.884860 1415006 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:54:09.887917 1415006 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:54:09.888873 1415006 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-334765
	I0916 10:54:09.888895 1415006 round_trippers.go:469] Request Headers:
	I0916 10:54:09.888904 1415006 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:54:09.888908 1415006 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:54:09.891637 1415006 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 10:54:09.892242 1415006 pod_ready.go:93] pod "coredns-7c65d6cfc9-s9fp9" in "kube-system" namespace has status "Ready":"True"
	I0916 10:54:09.892283 1415006 pod_ready.go:82] duration metric: took 7.514525ms for pod "coredns-7c65d6cfc9-s9fp9" in "kube-system" namespace to be "Ready" ...
	I0916 10:54:09.892321 1415006 pod_ready.go:79] waiting up to 6m0s for pod "etcd-ha-334765" in "kube-system" namespace to be "Ready" ...
	I0916 10:54:09.892421 1415006 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/etcd-ha-334765
	I0916 10:54:09.892446 1415006 round_trippers.go:469] Request Headers:
	I0916 10:54:09.892481 1415006 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:54:09.892502 1415006 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:54:09.895188 1415006 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 10:54:09.896014 1415006 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-334765
	I0916 10:54:09.896037 1415006 round_trippers.go:469] Request Headers:
	I0916 10:54:09.896046 1415006 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:54:09.896051 1415006 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:54:09.898613 1415006 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 10:54:09.899255 1415006 pod_ready.go:93] pod "etcd-ha-334765" in "kube-system" namespace has status "Ready":"True"
	I0916 10:54:09.899305 1415006 pod_ready.go:82] duration metric: took 6.957584ms for pod "etcd-ha-334765" in "kube-system" namespace to be "Ready" ...
	I0916 10:54:09.899334 1415006 pod_ready.go:79] waiting up to 6m0s for pod "etcd-ha-334765-m02" in "kube-system" namespace to be "Ready" ...
	I0916 10:54:09.899455 1415006 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/etcd-ha-334765-m02
	I0916 10:54:09.899482 1415006 round_trippers.go:469] Request Headers:
	I0916 10:54:09.899507 1415006 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:54:09.899539 1415006 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:54:09.902359 1415006 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 10:54:09.903536 1415006 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-334765-m02
	I0916 10:54:09.903578 1415006 round_trippers.go:469] Request Headers:
	I0916 10:54:09.903616 1415006 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:54:09.903639 1415006 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:54:09.906380 1415006 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 10:54:09.907138 1415006 pod_ready.go:93] pod "etcd-ha-334765-m02" in "kube-system" namespace has status "Ready":"True"
	I0916 10:54:09.907159 1415006 pod_ready.go:82] duration metric: took 7.804151ms for pod "etcd-ha-334765-m02" in "kube-system" namespace to be "Ready" ...
	I0916 10:54:09.907173 1415006 pod_ready.go:79] waiting up to 6m0s for pod "etcd-ha-334765-m03" in "kube-system" namespace to be "Ready" ...
	I0916 10:54:10.035614 1415006 request.go:632] Waited for 128.346187ms due to client-side throttling, not priority and fairness, request: GET:https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/etcd-ha-334765-m03
	I0916 10:54:10.035727 1415006 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/etcd-ha-334765-m03
	I0916 10:54:10.035768 1415006 round_trippers.go:469] Request Headers:
	I0916 10:54:10.035784 1415006 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:54:10.035791 1415006 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:54:10.039843 1415006 round_trippers.go:574] Response Status: 200 OK in 4 milliseconds
	I0916 10:54:10.235249 1415006 request.go:632] Waited for 194.457282ms due to client-side throttling, not priority and fairness, request: GET:https://192.168.49.2:8443/api/v1/nodes/ha-334765-m03
	I0916 10:54:10.235331 1415006 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-334765-m03
	I0916 10:54:10.235337 1415006 round_trippers.go:469] Request Headers:
	I0916 10:54:10.235355 1415006 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:54:10.235375 1415006 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:54:10.238233 1415006 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 10:54:10.239108 1415006 pod_ready.go:93] pod "etcd-ha-334765-m03" in "kube-system" namespace has status "Ready":"True"
	I0916 10:54:10.239141 1415006 pod_ready.go:82] duration metric: took 331.959241ms for pod "etcd-ha-334765-m03" in "kube-system" namespace to be "Ready" ...
	I0916 10:54:10.239179 1415006 pod_ready.go:79] waiting up to 6m0s for pod "kube-apiserver-ha-334765" in "kube-system" namespace to be "Ready" ...
	I0916 10:54:10.435280 1415006 request.go:632] Waited for 195.986876ms due to client-side throttling, not priority and fairness, request: GET:https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/kube-apiserver-ha-334765
	I0916 10:54:10.435346 1415006 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/kube-apiserver-ha-334765
	I0916 10:54:10.435353 1415006 round_trippers.go:469] Request Headers:
	I0916 10:54:10.435380 1415006 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:54:10.435410 1415006 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:54:10.438185 1415006 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 10:54:10.634844 1415006 request.go:632] Waited for 195.912753ms due to client-side throttling, not priority and fairness, request: GET:https://192.168.49.2:8443/api/v1/nodes/ha-334765
	I0916 10:54:10.634955 1415006 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-334765
	I0916 10:54:10.634984 1415006 round_trippers.go:469] Request Headers:
	I0916 10:54:10.635000 1415006 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:54:10.635005 1415006 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:54:10.638444 1415006 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:54:10.639291 1415006 pod_ready.go:93] pod "kube-apiserver-ha-334765" in "kube-system" namespace has status "Ready":"True"
	I0916 10:54:10.639315 1415006 pod_ready.go:82] duration metric: took 400.119572ms for pod "kube-apiserver-ha-334765" in "kube-system" namespace to be "Ready" ...
	I0916 10:54:10.639327 1415006 pod_ready.go:79] waiting up to 6m0s for pod "kube-apiserver-ha-334765-m02" in "kube-system" namespace to be "Ready" ...
	I0916 10:54:10.835341 1415006 request.go:632] Waited for 195.940683ms due to client-side throttling, not priority and fairness, request: GET:https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/kube-apiserver-ha-334765-m02
	I0916 10:54:10.835478 1415006 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/kube-apiserver-ha-334765-m02
	I0916 10:54:10.835491 1415006 round_trippers.go:469] Request Headers:
	I0916 10:54:10.835500 1415006 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:54:10.835504 1415006 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:54:10.839135 1415006 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:54:11.035271 1415006 request.go:632] Waited for 195.339951ms due to client-side throttling, not priority and fairness, request: GET:https://192.168.49.2:8443/api/v1/nodes/ha-334765-m02
	I0916 10:54:11.035412 1415006 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-334765-m02
	I0916 10:54:11.035425 1415006 round_trippers.go:469] Request Headers:
	I0916 10:54:11.035437 1415006 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:54:11.035443 1415006 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:54:11.038915 1415006 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:54:11.039662 1415006 pod_ready.go:93] pod "kube-apiserver-ha-334765-m02" in "kube-system" namespace has status "Ready":"True"
	I0916 10:54:11.039683 1415006 pod_ready.go:82] duration metric: took 400.348678ms for pod "kube-apiserver-ha-334765-m02" in "kube-system" namespace to be "Ready" ...
	I0916 10:54:11.039713 1415006 pod_ready.go:79] waiting up to 6m0s for pod "kube-apiserver-ha-334765-m03" in "kube-system" namespace to be "Ready" ...
	I0916 10:54:11.235305 1415006 request.go:632] Waited for 195.488895ms due to client-side throttling, not priority and fairness, request: GET:https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/kube-apiserver-ha-334765-m03
	I0916 10:54:11.235369 1415006 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/kube-apiserver-ha-334765-m03
	I0916 10:54:11.235376 1415006 round_trippers.go:469] Request Headers:
	I0916 10:54:11.235385 1415006 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:54:11.235393 1415006 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:54:11.238269 1415006 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 10:54:11.435527 1415006 request.go:632] Waited for 196.346243ms due to client-side throttling, not priority and fairness, request: GET:https://192.168.49.2:8443/api/v1/nodes/ha-334765-m03
	I0916 10:54:11.435600 1415006 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-334765-m03
	I0916 10:54:11.435612 1415006 round_trippers.go:469] Request Headers:
	I0916 10:54:11.435621 1415006 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:54:11.435627 1415006 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:54:11.438790 1415006 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:54:11.439339 1415006 pod_ready.go:93] pod "kube-apiserver-ha-334765-m03" in "kube-system" namespace has status "Ready":"True"
	I0916 10:54:11.439358 1415006 pod_ready.go:82] duration metric: took 399.629697ms for pod "kube-apiserver-ha-334765-m03" in "kube-system" namespace to be "Ready" ...
	I0916 10:54:11.439371 1415006 pod_ready.go:79] waiting up to 6m0s for pod "kube-controller-manager-ha-334765" in "kube-system" namespace to be "Ready" ...
	I0916 10:54:11.635328 1415006 request.go:632] Waited for 195.882494ms due to client-side throttling, not priority and fairness, request: GET:https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/kube-controller-manager-ha-334765
	I0916 10:54:11.635388 1415006 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/kube-controller-manager-ha-334765
	I0916 10:54:11.635394 1415006 round_trippers.go:469] Request Headers:
	I0916 10:54:11.635403 1415006 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:54:11.635439 1415006 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:54:11.638604 1415006 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:54:11.834747 1415006 request.go:632] Waited for 195.26705ms due to client-side throttling, not priority and fairness, request: GET:https://192.168.49.2:8443/api/v1/nodes/ha-334765
	I0916 10:54:11.834844 1415006 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-334765
	I0916 10:54:11.834858 1415006 round_trippers.go:469] Request Headers:
	I0916 10:54:11.834868 1415006 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:54:11.834872 1415006 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:54:11.838076 1415006 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:54:11.839207 1415006 pod_ready.go:93] pod "kube-controller-manager-ha-334765" in "kube-system" namespace has status "Ready":"True"
	I0916 10:54:11.839242 1415006 pod_ready.go:82] duration metric: took 399.858516ms for pod "kube-controller-manager-ha-334765" in "kube-system" namespace to be "Ready" ...
	I0916 10:54:11.839256 1415006 pod_ready.go:79] waiting up to 6m0s for pod "kube-controller-manager-ha-334765-m02" in "kube-system" namespace to be "Ready" ...
	I0916 10:54:12.035143 1415006 request.go:632] Waited for 195.790983ms due to client-side throttling, not priority and fairness, request: GET:https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/kube-controller-manager-ha-334765-m02
	I0916 10:54:12.035255 1415006 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/kube-controller-manager-ha-334765-m02
	I0916 10:54:12.035270 1415006 round_trippers.go:469] Request Headers:
	I0916 10:54:12.035295 1415006 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:54:12.035308 1415006 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:54:12.038799 1415006 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:54:12.234779 1415006 request.go:632] Waited for 195.242846ms due to client-side throttling, not priority and fairness, request: GET:https://192.168.49.2:8443/api/v1/nodes/ha-334765-m02
	I0916 10:54:12.234836 1415006 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-334765-m02
	I0916 10:54:12.234846 1415006 round_trippers.go:469] Request Headers:
	I0916 10:54:12.234855 1415006 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:54:12.234865 1415006 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:54:12.237789 1415006 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 10:54:12.238627 1415006 pod_ready.go:93] pod "kube-controller-manager-ha-334765-m02" in "kube-system" namespace has status "Ready":"True"
	I0916 10:54:12.238647 1415006 pod_ready.go:82] duration metric: took 399.383731ms for pod "kube-controller-manager-ha-334765-m02" in "kube-system" namespace to be "Ready" ...
	I0916 10:54:12.238659 1415006 pod_ready.go:79] waiting up to 6m0s for pod "kube-controller-manager-ha-334765-m03" in "kube-system" namespace to be "Ready" ...
	I0916 10:54:12.435640 1415006 request.go:632] Waited for 196.863621ms due to client-side throttling, not priority and fairness, request: GET:https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/kube-controller-manager-ha-334765-m03
	I0916 10:54:12.435728 1415006 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/kube-controller-manager-ha-334765-m03
	I0916 10:54:12.435741 1415006 round_trippers.go:469] Request Headers:
	I0916 10:54:12.435750 1415006 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:54:12.435760 1415006 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:54:12.440030 1415006 round_trippers.go:574] Response Status: 200 OK in 4 milliseconds
	I0916 10:54:12.635380 1415006 request.go:632] Waited for 194.362713ms due to client-side throttling, not priority and fairness, request: GET:https://192.168.49.2:8443/api/v1/nodes/ha-334765-m03
	I0916 10:54:12.635440 1415006 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-334765-m03
	I0916 10:54:12.635446 1415006 round_trippers.go:469] Request Headers:
	I0916 10:54:12.635454 1415006 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:54:12.635464 1415006 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:54:12.638081 1415006 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 10:54:12.638751 1415006 pod_ready.go:93] pod "kube-controller-manager-ha-334765-m03" in "kube-system" namespace has status "Ready":"True"
	I0916 10:54:12.638770 1415006 pod_ready.go:82] duration metric: took 400.0763ms for pod "kube-controller-manager-ha-334765-m03" in "kube-system" namespace to be "Ready" ...
	I0916 10:54:12.638782 1415006 pod_ready.go:79] waiting up to 6m0s for pod "kube-proxy-4vsvh" in "kube-system" namespace to be "Ready" ...
	I0916 10:54:12.834723 1415006 request.go:632] Waited for 195.809084ms due to client-side throttling, not priority and fairness, request: GET:https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/kube-proxy-4vsvh
	I0916 10:54:12.834840 1415006 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/kube-proxy-4vsvh
	I0916 10:54:12.834876 1415006 round_trippers.go:469] Request Headers:
	I0916 10:54:12.834893 1415006 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:54:12.834899 1415006 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:54:12.838099 1415006 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:54:13.035554 1415006 request.go:632] Waited for 196.329366ms due to client-side throttling, not priority and fairness, request: GET:https://192.168.49.2:8443/api/v1/nodes/ha-334765-m03
	I0916 10:54:13.035628 1415006 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-334765-m03
	I0916 10:54:13.035637 1415006 round_trippers.go:469] Request Headers:
	I0916 10:54:13.035647 1415006 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:54:13.035652 1415006 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:54:13.038454 1415006 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 10:54:13.039118 1415006 pod_ready.go:93] pod "kube-proxy-4vsvh" in "kube-system" namespace has status "Ready":"True"
	I0916 10:54:13.039141 1415006 pod_ready.go:82] duration metric: took 400.351329ms for pod "kube-proxy-4vsvh" in "kube-system" namespace to be "Ready" ...
	I0916 10:54:13.039169 1415006 pod_ready.go:79] waiting up to 6m0s for pod "kube-proxy-l998t" in "kube-system" namespace to be "Ready" ...
	I0916 10:54:13.235690 1415006 request.go:632] Waited for 196.449666ms due to client-side throttling, not priority and fairness, request: GET:https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/kube-proxy-l998t
	I0916 10:54:13.235755 1415006 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/kube-proxy-l998t
	I0916 10:54:13.235764 1415006 round_trippers.go:469] Request Headers:
	I0916 10:54:13.235783 1415006 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:54:13.235796 1415006 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:54:13.238489 1415006 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 10:54:13.435561 1415006 request.go:632] Waited for 196.222948ms due to client-side throttling, not priority and fairness, request: GET:https://192.168.49.2:8443/api/v1/nodes/ha-334765-m02
	I0916 10:54:13.435623 1415006 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-334765-m02
	I0916 10:54:13.435635 1415006 round_trippers.go:469] Request Headers:
	I0916 10:54:13.435645 1415006 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:54:13.435662 1415006 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:54:13.442733 1415006 round_trippers.go:574] Response Status: 200 OK in 7 milliseconds
	I0916 10:54:13.443704 1415006 pod_ready.go:93] pod "kube-proxy-l998t" in "kube-system" namespace has status "Ready":"True"
	I0916 10:54:13.443726 1415006 pod_ready.go:82] duration metric: took 404.544453ms for pod "kube-proxy-l998t" in "kube-system" namespace to be "Ready" ...
	I0916 10:54:13.443738 1415006 pod_ready.go:79] waiting up to 6m0s for pod "kube-proxy-tlfs7" in "kube-system" namespace to be "Ready" ...
	I0916 10:54:13.635099 1415006 request.go:632] Waited for 191.290889ms due to client-side throttling, not priority and fairness, request: GET:https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/kube-proxy-tlfs7
	I0916 10:54:13.635183 1415006 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/kube-proxy-tlfs7
	I0916 10:54:13.635194 1415006 round_trippers.go:469] Request Headers:
	I0916 10:54:13.635202 1415006 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:54:13.635213 1415006 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:54:13.638457 1415006 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:54:13.835588 1415006 request.go:632] Waited for 196.290122ms due to client-side throttling, not priority and fairness, request: GET:https://192.168.49.2:8443/api/v1/nodes/ha-334765
	I0916 10:54:13.835645 1415006 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-334765
	I0916 10:54:13.835655 1415006 round_trippers.go:469] Request Headers:
	I0916 10:54:13.835674 1415006 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:54:13.835686 1415006 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:54:13.838539 1415006 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 10:54:13.839403 1415006 pod_ready.go:93] pod "kube-proxy-tlfs7" in "kube-system" namespace has status "Ready":"True"
	I0916 10:54:13.839467 1415006 pod_ready.go:82] duration metric: took 395.719603ms for pod "kube-proxy-tlfs7" in "kube-system" namespace to be "Ready" ...
	I0916 10:54:13.839486 1415006 pod_ready.go:79] waiting up to 6m0s for pod "kube-scheduler-ha-334765" in "kube-system" namespace to be "Ready" ...
	I0916 10:54:14.034857 1415006 request.go:632] Waited for 195.30385ms due to client-side throttling, not priority and fairness, request: GET:https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/kube-scheduler-ha-334765
	I0916 10:54:14.034956 1415006 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/kube-scheduler-ha-334765
	I0916 10:54:14.034967 1415006 round_trippers.go:469] Request Headers:
	I0916 10:54:14.035001 1415006 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:54:14.035009 1415006 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:54:14.038346 1415006 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:54:14.235315 1415006 request.go:632] Waited for 196.258591ms due to client-side throttling, not priority and fairness, request: GET:https://192.168.49.2:8443/api/v1/nodes/ha-334765
	I0916 10:54:14.235402 1415006 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-334765
	I0916 10:54:14.235434 1415006 round_trippers.go:469] Request Headers:
	I0916 10:54:14.235450 1415006 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:54:14.235458 1415006 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:54:14.238407 1415006 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 10:54:14.239032 1415006 pod_ready.go:93] pod "kube-scheduler-ha-334765" in "kube-system" namespace has status "Ready":"True"
	I0916 10:54:14.239059 1415006 pod_ready.go:82] duration metric: took 399.557224ms for pod "kube-scheduler-ha-334765" in "kube-system" namespace to be "Ready" ...
	I0916 10:54:14.239091 1415006 pod_ready.go:79] waiting up to 6m0s for pod "kube-scheduler-ha-334765-m02" in "kube-system" namespace to be "Ready" ...
	I0916 10:54:14.435529 1415006 request.go:632] Waited for 196.359683ms due to client-side throttling, not priority and fairness, request: GET:https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/kube-scheduler-ha-334765-m02
	I0916 10:54:14.435602 1415006 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/kube-scheduler-ha-334765-m02
	I0916 10:54:14.435611 1415006 round_trippers.go:469] Request Headers:
	I0916 10:54:14.435621 1415006 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:54:14.435628 1415006 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:54:14.438377 1415006 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 10:54:14.635344 1415006 request.go:632] Waited for 196.32153ms due to client-side throttling, not priority and fairness, request: GET:https://192.168.49.2:8443/api/v1/nodes/ha-334765-m02
	I0916 10:54:14.635413 1415006 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-334765-m02
	I0916 10:54:14.635422 1415006 round_trippers.go:469] Request Headers:
	I0916 10:54:14.635430 1415006 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:54:14.635440 1415006 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:54:14.638829 1415006 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:54:14.639457 1415006 pod_ready.go:93] pod "kube-scheduler-ha-334765-m02" in "kube-system" namespace has status "Ready":"True"
	I0916 10:54:14.639474 1415006 pod_ready.go:82] duration metric: took 400.371735ms for pod "kube-scheduler-ha-334765-m02" in "kube-system" namespace to be "Ready" ...
	I0916 10:54:14.639486 1415006 pod_ready.go:79] waiting up to 6m0s for pod "kube-scheduler-ha-334765-m03" in "kube-system" namespace to be "Ready" ...
	I0916 10:54:14.835382 1415006 request.go:632] Waited for 195.82633ms due to client-side throttling, not priority and fairness, request: GET:https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/kube-scheduler-ha-334765-m03
	I0916 10:54:14.835464 1415006 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/kube-scheduler-ha-334765-m03
	I0916 10:54:14.835493 1415006 round_trippers.go:469] Request Headers:
	I0916 10:54:14.835508 1415006 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:54:14.835512 1415006 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:54:14.838577 1415006 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:54:15.034892 1415006 request.go:632] Waited for 195.597561ms due to client-side throttling, not priority and fairness, request: GET:https://192.168.49.2:8443/api/v1/nodes/ha-334765-m03
	I0916 10:54:15.035037 1415006 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-334765-m03
	I0916 10:54:15.035073 1415006 round_trippers.go:469] Request Headers:
	I0916 10:54:15.035096 1415006 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:54:15.035104 1415006 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:54:15.043345 1415006 round_trippers.go:574] Response Status: 200 OK in 8 milliseconds
	I0916 10:54:15.044498 1415006 pod_ready.go:93] pod "kube-scheduler-ha-334765-m03" in "kube-system" namespace has status "Ready":"True"
	I0916 10:54:15.044529 1415006 pod_ready.go:82] duration metric: took 405.03117ms for pod "kube-scheduler-ha-334765-m03" in "kube-system" namespace to be "Ready" ...
	I0916 10:54:15.044549 1415006 pod_ready.go:39] duration metric: took 5.190434658s for extra waiting for all system-critical and pods with labels [k8s-app=kube-dns component=etcd component=kube-apiserver component=kube-controller-manager k8s-app=kube-proxy component=kube-scheduler] to be "Ready" ...
	I0916 10:54:15.044567 1415006 api_server.go:52] waiting for apiserver process to appear ...
	I0916 10:54:15.044658 1415006 ssh_runner.go:195] Run: sudo pgrep -xnf kube-apiserver.*minikube.*
	I0916 10:54:15.065826 1415006 api_server.go:72] duration metric: took 43.466898987s to wait for apiserver process to appear ...
	I0916 10:54:15.065865 1415006 api_server.go:88] waiting for apiserver healthz status ...
	I0916 10:54:15.065890 1415006 api_server.go:253] Checking apiserver healthz at https://192.168.49.2:8443/healthz ...
	I0916 10:54:15.081492 1415006 api_server.go:279] https://192.168.49.2:8443/healthz returned 200:
	ok
	I0916 10:54:15.081586 1415006 round_trippers.go:463] GET https://192.168.49.2:8443/version
	I0916 10:54:15.081606 1415006 round_trippers.go:469] Request Headers:
	I0916 10:54:15.081617 1415006 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:54:15.081627 1415006 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:54:15.085787 1415006 round_trippers.go:574] Response Status: 200 OK in 4 milliseconds
	I0916 10:54:15.085894 1415006 api_server.go:141] control plane version: v1.31.1
	I0916 10:54:15.085925 1415006 api_server.go:131] duration metric: took 20.052103ms to wait for apiserver health ...
	I0916 10:54:15.085936 1415006 system_pods.go:43] waiting for kube-system pods to appear ...
	I0916 10:54:15.235309 1415006 request.go:632] Waited for 149.268405ms due to client-side throttling, not priority and fairness, request: GET:https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods
	I0916 10:54:15.235368 1415006 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods
	I0916 10:54:15.235374 1415006 round_trippers.go:469] Request Headers:
	I0916 10:54:15.235384 1415006 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:54:15.235393 1415006 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:54:15.241237 1415006 round_trippers.go:574] Response Status: 200 OK in 5 milliseconds
	I0916 10:54:15.250252 1415006 system_pods.go:59] 24 kube-system pods found
	I0916 10:54:15.250287 1415006 system_pods.go:61] "coredns-7c65d6cfc9-q5xr7" [14514e6e-34ae-4a79-b0e0-008742ae46b9] Running
	I0916 10:54:15.250295 1415006 system_pods.go:61] "coredns-7c65d6cfc9-s9fp9" [0e29200a-0909-47e1-8521-bf5f9b645d6c] Running
	I0916 10:54:15.250299 1415006 system_pods.go:61] "etcd-ha-334765" [9a0b9474-60f4-440e-a898-d397f7425086] Running
	I0916 10:54:15.250304 1415006 system_pods.go:61] "etcd-ha-334765-m02" [635fd2d2-f9cc-4e08-b73b-18633a58b6e4] Running
	I0916 10:54:15.250309 1415006 system_pods.go:61] "etcd-ha-334765-m03" [9527225b-e7ae-481b-b5b1-47b445990b4b] Running
	I0916 10:54:15.250313 1415006 system_pods.go:61] "kindnet-7s5t5" [e1832b94-ac8f-43c0-af10-ddc6afbb229b] Running
	I0916 10:54:15.250317 1415006 system_pods.go:61] "kindnet-rfw69" [396f204a-53ea-4720-85fc-05ba54d285ca] Running
	I0916 10:54:15.250322 1415006 system_pods.go:61] "kindnet-vj27j" [61e290b4-d19c-40f3-a50d-bfa09fddb710] Running
	I0916 10:54:15.250326 1415006 system_pods.go:61] "kube-apiserver-ha-334765" [471aea01-5646-4ce8-91e0-b0b39f8a275a] Running
	I0916 10:54:15.250331 1415006 system_pods.go:61] "kube-apiserver-ha-334765-m02" [877c49f9-6fae-4cdb-b208-940eba98383b] Running
	I0916 10:54:15.250341 1415006 system_pods.go:61] "kube-apiserver-ha-334765-m03" [b14f2a2b-6410-438a-99e2-86fa58140695] Running
	I0916 10:54:15.250348 1415006 system_pods.go:61] "kube-controller-manager-ha-334765" [23b2f4a4-942f-4ea7-afef-561ab69ac144] Running
	I0916 10:54:15.250357 1415006 system_pods.go:61] "kube-controller-manager-ha-334765-m02" [07411ea7-458c-475c-93ff-5db4f6c1c4b1] Running
	I0916 10:54:15.250361 1415006 system_pods.go:61] "kube-controller-manager-ha-334765-m03" [2aa8cca1-22de-4cd0-88a2-ac864da09d8d] Running
	I0916 10:54:15.250365 1415006 system_pods.go:61] "kube-proxy-4vsvh" [551f3711-d8b3-4360-8a18-d6183d4aec6d] Running
	I0916 10:54:15.250369 1415006 system_pods.go:61] "kube-proxy-l998t" [e92c97ea-9eb8-40c4-a7f6-aeb43c89e6f4] Running
	I0916 10:54:15.250373 1415006 system_pods.go:61] "kube-proxy-tlfs7" [6a873882-8023-44b5-82d9-2f18e70f8ef1] Running
	I0916 10:54:15.250385 1415006 system_pods.go:61] "kube-scheduler-ha-334765" [6189b5cd-f342-4b6a-ae21-b6b7125e4f06] Running
	I0916 10:54:15.250389 1415006 system_pods.go:61] "kube-scheduler-ha-334765-m02" [61387062-d6b0-4e2d-b2f9-10f29b0bcef6] Running
	I0916 10:54:15.250393 1415006 system_pods.go:61] "kube-scheduler-ha-334765-m03" [98c99d71-0ea3-46a3-ab06-7b5971730ba8] Running
	I0916 10:54:15.250396 1415006 system_pods.go:61] "kube-vip-ha-334765" [65843776-1f0b-4a2b-b30c-62d55f497269] Running
	I0916 10:54:15.250400 1415006 system_pods.go:61] "kube-vip-ha-334765-m02" [450bd3f6-46b4-426c-a6b2-2ad37b58b171] Running
	I0916 10:54:15.250404 1415006 system_pods.go:61] "kube-vip-ha-334765-m03" [efeb2f57-409a-45a7-87e2-dae52a680b3e] Running
	I0916 10:54:15.250407 1415006 system_pods.go:61] "storage-provisioner" [4db2490d-9707-4734-973b-adac5570e275] Running
	I0916 10:54:15.250413 1415006 system_pods.go:74] duration metric: took 164.468004ms to wait for pod list to return data ...
	I0916 10:54:15.250420 1415006 default_sa.go:34] waiting for default service account to be created ...
	I0916 10:54:15.435244 1415006 request.go:632] Waited for 184.740884ms due to client-side throttling, not priority and fairness, request: GET:https://192.168.49.2:8443/api/v1/namespaces/default/serviceaccounts
	I0916 10:54:15.435302 1415006 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/default/serviceaccounts
	I0916 10:54:15.435308 1415006 round_trippers.go:469] Request Headers:
	I0916 10:54:15.435317 1415006 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:54:15.435322 1415006 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:54:15.438665 1415006 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:54:15.438792 1415006 default_sa.go:45] found service account: "default"
	I0916 10:54:15.438811 1415006 default_sa.go:55] duration metric: took 188.382261ms for default service account to be created ...
	I0916 10:54:15.438821 1415006 system_pods.go:116] waiting for k8s-apps to be running ...
	I0916 10:54:15.635182 1415006 request.go:632] Waited for 196.288021ms due to client-side throttling, not priority and fairness, request: GET:https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods
	I0916 10:54:15.635243 1415006 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods
	I0916 10:54:15.635249 1415006 round_trippers.go:469] Request Headers:
	I0916 10:54:15.635259 1415006 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:54:15.635275 1415006 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:54:15.642044 1415006 round_trippers.go:574] Response Status: 200 OK in 6 milliseconds
	I0916 10:54:15.651279 1415006 system_pods.go:86] 24 kube-system pods found
	I0916 10:54:15.651317 1415006 system_pods.go:89] "coredns-7c65d6cfc9-q5xr7" [14514e6e-34ae-4a79-b0e0-008742ae46b9] Running
	I0916 10:54:15.651325 1415006 system_pods.go:89] "coredns-7c65d6cfc9-s9fp9" [0e29200a-0909-47e1-8521-bf5f9b645d6c] Running
	I0916 10:54:15.651330 1415006 system_pods.go:89] "etcd-ha-334765" [9a0b9474-60f4-440e-a898-d397f7425086] Running
	I0916 10:54:15.651336 1415006 system_pods.go:89] "etcd-ha-334765-m02" [635fd2d2-f9cc-4e08-b73b-18633a58b6e4] Running
	I0916 10:54:15.651341 1415006 system_pods.go:89] "etcd-ha-334765-m03" [9527225b-e7ae-481b-b5b1-47b445990b4b] Running
	I0916 10:54:15.651345 1415006 system_pods.go:89] "kindnet-7s5t5" [e1832b94-ac8f-43c0-af10-ddc6afbb229b] Running
	I0916 10:54:15.651350 1415006 system_pods.go:89] "kindnet-rfw69" [396f204a-53ea-4720-85fc-05ba54d285ca] Running
	I0916 10:54:15.651354 1415006 system_pods.go:89] "kindnet-vj27j" [61e290b4-d19c-40f3-a50d-bfa09fddb710] Running
	I0916 10:54:15.651359 1415006 system_pods.go:89] "kube-apiserver-ha-334765" [471aea01-5646-4ce8-91e0-b0b39f8a275a] Running
	I0916 10:54:15.651369 1415006 system_pods.go:89] "kube-apiserver-ha-334765-m02" [877c49f9-6fae-4cdb-b208-940eba98383b] Running
	I0916 10:54:15.651376 1415006 system_pods.go:89] "kube-apiserver-ha-334765-m03" [b14f2a2b-6410-438a-99e2-86fa58140695] Running
	I0916 10:54:15.651384 1415006 system_pods.go:89] "kube-controller-manager-ha-334765" [23b2f4a4-942f-4ea7-afef-561ab69ac144] Running
	I0916 10:54:15.651389 1415006 system_pods.go:89] "kube-controller-manager-ha-334765-m02" [07411ea7-458c-475c-93ff-5db4f6c1c4b1] Running
	I0916 10:54:15.651393 1415006 system_pods.go:89] "kube-controller-manager-ha-334765-m03" [2aa8cca1-22de-4cd0-88a2-ac864da09d8d] Running
	I0916 10:54:15.651397 1415006 system_pods.go:89] "kube-proxy-4vsvh" [551f3711-d8b3-4360-8a18-d6183d4aec6d] Running
	I0916 10:54:15.651404 1415006 system_pods.go:89] "kube-proxy-l998t" [e92c97ea-9eb8-40c4-a7f6-aeb43c89e6f4] Running
	I0916 10:54:15.651408 1415006 system_pods.go:89] "kube-proxy-tlfs7" [6a873882-8023-44b5-82d9-2f18e70f8ef1] Running
	I0916 10:54:15.651419 1415006 system_pods.go:89] "kube-scheduler-ha-334765" [6189b5cd-f342-4b6a-ae21-b6b7125e4f06] Running
	I0916 10:54:15.651423 1415006 system_pods.go:89] "kube-scheduler-ha-334765-m02" [61387062-d6b0-4e2d-b2f9-10f29b0bcef6] Running
	I0916 10:54:15.651427 1415006 system_pods.go:89] "kube-scheduler-ha-334765-m03" [98c99d71-0ea3-46a3-ab06-7b5971730ba8] Running
	I0916 10:54:15.651437 1415006 system_pods.go:89] "kube-vip-ha-334765" [65843776-1f0b-4a2b-b30c-62d55f497269] Running
	I0916 10:54:15.651440 1415006 system_pods.go:89] "kube-vip-ha-334765-m02" [450bd3f6-46b4-426c-a6b2-2ad37b58b171] Running
	I0916 10:54:15.651444 1415006 system_pods.go:89] "kube-vip-ha-334765-m03" [efeb2f57-409a-45a7-87e2-dae52a680b3e] Running
	I0916 10:54:15.651447 1415006 system_pods.go:89] "storage-provisioner" [4db2490d-9707-4734-973b-adac5570e275] Running
	I0916 10:54:15.651455 1415006 system_pods.go:126] duration metric: took 212.62378ms to wait for k8s-apps to be running ...
	I0916 10:54:15.651465 1415006 system_svc.go:44] waiting for kubelet service to be running ....
	I0916 10:54:15.651526 1415006 ssh_runner.go:195] Run: sudo systemctl is-active --quiet service kubelet
	I0916 10:54:15.668882 1415006 system_svc.go:56] duration metric: took 17.401012ms WaitForService to wait for kubelet
	I0916 10:54:15.668913 1415006 kubeadm.go:582] duration metric: took 44.069992713s to wait for: map[apiserver:true apps_running:true default_sa:true extra:true kubelet:true node_ready:true system_pods:true]
	I0916 10:54:15.668930 1415006 node_conditions.go:102] verifying NodePressure condition ...
	I0916 10:54:15.835321 1415006 request.go:632] Waited for 166.314013ms due to client-side throttling, not priority and fairness, request: GET:https://192.168.49.2:8443/api/v1/nodes
	I0916 10:54:15.835384 1415006 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes
	I0916 10:54:15.835393 1415006 round_trippers.go:469] Request Headers:
	I0916 10:54:15.835402 1415006 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:54:15.835412 1415006 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:54:15.838719 1415006 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:54:15.839904 1415006 node_conditions.go:122] node storage ephemeral capacity is 203034800Ki
	I0916 10:54:15.839923 1415006 node_conditions.go:123] node cpu capacity is 2
	I0916 10:54:15.839933 1415006 node_conditions.go:122] node storage ephemeral capacity is 203034800Ki
	I0916 10:54:15.839938 1415006 node_conditions.go:123] node cpu capacity is 2
	I0916 10:54:15.839942 1415006 node_conditions.go:122] node storage ephemeral capacity is 203034800Ki
	I0916 10:54:15.839947 1415006 node_conditions.go:123] node cpu capacity is 2
	I0916 10:54:15.839951 1415006 node_conditions.go:105] duration metric: took 171.01571ms to run NodePressure ...
	I0916 10:54:15.839962 1415006 start.go:241] waiting for startup goroutines ...
	I0916 10:54:15.839983 1415006 start.go:255] writing updated cluster config ...
	I0916 10:54:15.840309 1415006 ssh_runner.go:195] Run: rm -f paused
	I0916 10:54:15.848531 1415006 out.go:177] * Done! kubectl is now configured to use "ha-334765" cluster and "default" namespace by default
	E0916 10:54:15.851249 1415006 start.go:291] kubectl info: exec: fork/exec /usr/local/bin/kubectl: exec format error
	
	
	==> CRI-O <==
	Sep 16 10:52:40 ha-334765 crio[979]: time="2024-09-16 10:52:40.519999141Z" level=info msg="Ran pod sandbox 58def518e92ee8a0fbdff450f0b2d89cb0c01f90c327d847d65637cb8734d5ef with infra container: kube-system/coredns-7c65d6cfc9-s9fp9/POD" id=b97bb72b-3885-4af7-928b-0a634327ae92 name=/runtime.v1.RuntimeService/RunPodSandbox
	Sep 16 10:52:40 ha-334765 crio[979]: time="2024-09-16 10:52:40.522707339Z" level=info msg="Ran pod sandbox 001c841a1318492ebe49fd68b6823f248801c7a32495817dbb43f485bef2bd21 with infra container: kube-system/storage-provisioner/POD" id=59aa4d27-56a5-43cb-81fe-847651495177 name=/runtime.v1.RuntimeService/RunPodSandbox
	Sep 16 10:52:40 ha-334765 crio[979]: time="2024-09-16 10:52:40.525885728Z" level=info msg="Checking image status: registry.k8s.io/coredns/coredns:v1.11.3" id=25186bff-a43f-4ae0-bdca-828d22fa3027 name=/runtime.v1.ImageService/ImageStatus
	Sep 16 10:52:40 ha-334765 crio[979]: time="2024-09-16 10:52:40.526109533Z" level=info msg="Image status: &ImageStatusResponse{Image:&Image{Id:2f6c962e7b8311337352d9fdea917da2184d9919f4da7695bc2a6517cf392fe4,RepoTags:[registry.k8s.io/coredns/coredns:v1.11.3],RepoDigests:[registry.k8s.io/coredns/coredns@sha256:31440a2bef59e2f1ffb600113b557103740ff851e27b0aef5b849f6e3ab994a6 registry.k8s.io/coredns/coredns@sha256:9caabbf6238b189a65d0d6e6ac138de60d6a1c419e5a341fbbb7c78382559c6e],Size_:61647114,Uid:nil,Username:nonroot,Spec:nil,},Info:map[string]string{},}" id=25186bff-a43f-4ae0-bdca-828d22fa3027 name=/runtime.v1.ImageService/ImageStatus
	Sep 16 10:52:40 ha-334765 crio[979]: time="2024-09-16 10:52:40.526333946Z" level=info msg="Checking image status: gcr.io/k8s-minikube/storage-provisioner:v5" id=6769606b-7ada-4f83-9512-2a827d754b53 name=/runtime.v1.ImageService/ImageStatus
	Sep 16 10:52:40 ha-334765 crio[979]: time="2024-09-16 10:52:40.526525621Z" level=info msg="Image status: &ImageStatusResponse{Image:&Image{Id:ba04bb24b95753201135cbc420b233c1b0b9fa2e1fd21d28319c348c33fbcde6,RepoTags:[gcr.io/k8s-minikube/storage-provisioner:v5],RepoDigests:[gcr.io/k8s-minikube/storage-provisioner@sha256:0ba370588274b88531ab311a5d2e645d240a853555c1e58fd1dd428fc333c9d2 gcr.io/k8s-minikube/storage-provisioner@sha256:18eb69d1418e854ad5a19e399310e52808a8321e4c441c1dddad8977a0d7a944],Size_:29037500,Uid:nil,Username:,Spec:nil,},Info:map[string]string{},}" id=6769606b-7ada-4f83-9512-2a827d754b53 name=/runtime.v1.ImageService/ImageStatus
	Sep 16 10:52:40 ha-334765 crio[979]: time="2024-09-16 10:52:40.527705160Z" level=info msg="Checking image status: registry.k8s.io/coredns/coredns:v1.11.3" id=5792ef5a-f8a6-4c30-98ce-8a1d8627905f name=/runtime.v1.ImageService/ImageStatus
	Sep 16 10:52:40 ha-334765 crio[979]: time="2024-09-16 10:52:40.527885176Z" level=info msg="Image status: &ImageStatusResponse{Image:&Image{Id:2f6c962e7b8311337352d9fdea917da2184d9919f4da7695bc2a6517cf392fe4,RepoTags:[registry.k8s.io/coredns/coredns:v1.11.3],RepoDigests:[registry.k8s.io/coredns/coredns@sha256:31440a2bef59e2f1ffb600113b557103740ff851e27b0aef5b849f6e3ab994a6 registry.k8s.io/coredns/coredns@sha256:9caabbf6238b189a65d0d6e6ac138de60d6a1c419e5a341fbbb7c78382559c6e],Size_:61647114,Uid:nil,Username:nonroot,Spec:nil,},Info:map[string]string{},}" id=5792ef5a-f8a6-4c30-98ce-8a1d8627905f name=/runtime.v1.ImageService/ImageStatus
	Sep 16 10:52:40 ha-334765 crio[979]: time="2024-09-16 10:52:40.527977917Z" level=info msg="Checking image status: gcr.io/k8s-minikube/storage-provisioner:v5" id=52ff5be7-7636-43cb-ac60-9ae276cfae05 name=/runtime.v1.ImageService/ImageStatus
	Sep 16 10:52:40 ha-334765 crio[979]: time="2024-09-16 10:52:40.528113946Z" level=info msg="Image status: &ImageStatusResponse{Image:&Image{Id:ba04bb24b95753201135cbc420b233c1b0b9fa2e1fd21d28319c348c33fbcde6,RepoTags:[gcr.io/k8s-minikube/storage-provisioner:v5],RepoDigests:[gcr.io/k8s-minikube/storage-provisioner@sha256:0ba370588274b88531ab311a5d2e645d240a853555c1e58fd1dd428fc333c9d2 gcr.io/k8s-minikube/storage-provisioner@sha256:18eb69d1418e854ad5a19e399310e52808a8321e4c441c1dddad8977a0d7a944],Size_:29037500,Uid:nil,Username:,Spec:nil,},Info:map[string]string{},}" id=52ff5be7-7636-43cb-ac60-9ae276cfae05 name=/runtime.v1.ImageService/ImageStatus
	Sep 16 10:52:40 ha-334765 crio[979]: time="2024-09-16 10:52:40.529330186Z" level=info msg="Creating container: kube-system/coredns-7c65d6cfc9-s9fp9/coredns" id=279c9e3d-b080-49c2-b226-0fda1393d0ec name=/runtime.v1.RuntimeService/CreateContainer
	Sep 16 10:52:40 ha-334765 crio[979]: time="2024-09-16 10:52:40.529432961Z" level=warning msg="Allowed annotations are specified for workload []"
	Sep 16 10:52:40 ha-334765 crio[979]: time="2024-09-16 10:52:40.529985636Z" level=info msg="Creating container: kube-system/storage-provisioner/storage-provisioner" id=5f70674d-8771-49d2-85b5-96eb9e34fb96 name=/runtime.v1.RuntimeService/CreateContainer
	Sep 16 10:52:40 ha-334765 crio[979]: time="2024-09-16 10:52:40.530062664Z" level=warning msg="Allowed annotations are specified for workload []"
	Sep 16 10:52:40 ha-334765 crio[979]: time="2024-09-16 10:52:40.568868882Z" level=warning msg="Failed to open /etc/passwd: open /var/lib/containers/storage/overlay/38e307f07a96e8cb0e3f723de2521d79276944e6b03e4c010ba60f4188b9044d/merged/etc/passwd: no such file or directory"
	Sep 16 10:52:40 ha-334765 crio[979]: time="2024-09-16 10:52:40.569202847Z" level=warning msg="Failed to open /etc/group: open /var/lib/containers/storage/overlay/38e307f07a96e8cb0e3f723de2521d79276944e6b03e4c010ba60f4188b9044d/merged/etc/group: no such file or directory"
	Sep 16 10:52:40 ha-334765 crio[979]: time="2024-09-16 10:52:40.625291958Z" level=info msg="Created container e1bb424abe07892ff88db2f7855ccf353bfd83d107d295aff42c09caa6cf6ac1: kube-system/coredns-7c65d6cfc9-s9fp9/coredns" id=279c9e3d-b080-49c2-b226-0fda1393d0ec name=/runtime.v1.RuntimeService/CreateContainer
	Sep 16 10:52:40 ha-334765 crio[979]: time="2024-09-16 10:52:40.626422350Z" level=info msg="Starting container: e1bb424abe07892ff88db2f7855ccf353bfd83d107d295aff42c09caa6cf6ac1" id=52fc3345-1e73-4dc6-986e-f1b6156a351f name=/runtime.v1.RuntimeService/StartContainer
	Sep 16 10:52:40 ha-334765 crio[979]: time="2024-09-16 10:52:40.643361825Z" level=info msg="Created container b39228db2d4d897b215a20c448db60b4d17c171274e5a653470b7613c63ab22d: kube-system/coredns-7c65d6cfc9-q5xr7/coredns" id=1294853a-0d22-434b-b838-17f1e72ebfae name=/runtime.v1.RuntimeService/CreateContainer
	Sep 16 10:52:40 ha-334765 crio[979]: time="2024-09-16 10:52:40.644135237Z" level=info msg="Starting container: b39228db2d4d897b215a20c448db60b4d17c171274e5a653470b7613c63ab22d" id=41b1f1a1-a94b-41e0-8d20-44f1215c9eda name=/runtime.v1.RuntimeService/StartContainer
	Sep 16 10:52:40 ha-334765 crio[979]: time="2024-09-16 10:52:40.646585350Z" level=info msg="Created container adc8f9784b1ec4d53b7c9ba64a0d20f459c9e1d35382e118dd0e8702427c3cfd: kube-system/storage-provisioner/storage-provisioner" id=5f70674d-8771-49d2-85b5-96eb9e34fb96 name=/runtime.v1.RuntimeService/CreateContainer
	Sep 16 10:52:40 ha-334765 crio[979]: time="2024-09-16 10:52:40.647109808Z" level=info msg="Starting container: adc8f9784b1ec4d53b7c9ba64a0d20f459c9e1d35382e118dd0e8702427c3cfd" id=997cd3e4-00a7-4c11-8fb4-5acb1d0d1593 name=/runtime.v1.RuntimeService/StartContainer
	Sep 16 10:52:40 ha-334765 crio[979]: time="2024-09-16 10:52:40.652753414Z" level=info msg="Started container" PID=2095 containerID=e1bb424abe07892ff88db2f7855ccf353bfd83d107d295aff42c09caa6cf6ac1 description=kube-system/coredns-7c65d6cfc9-s9fp9/coredns id=52fc3345-1e73-4dc6-986e-f1b6156a351f name=/runtime.v1.RuntimeService/StartContainer sandboxID=58def518e92ee8a0fbdff450f0b2d89cb0c01f90c327d847d65637cb8734d5ef
	Sep 16 10:52:40 ha-334765 crio[979]: time="2024-09-16 10:52:40.666247702Z" level=info msg="Started container" PID=2101 containerID=adc8f9784b1ec4d53b7c9ba64a0d20f459c9e1d35382e118dd0e8702427c3cfd description=kube-system/storage-provisioner/storage-provisioner id=997cd3e4-00a7-4c11-8fb4-5acb1d0d1593 name=/runtime.v1.RuntimeService/StartContainer sandboxID=001c841a1318492ebe49fd68b6823f248801c7a32495817dbb43f485bef2bd21
	Sep 16 10:52:40 ha-334765 crio[979]: time="2024-09-16 10:52:40.673868062Z" level=info msg="Started container" PID=2076 containerID=b39228db2d4d897b215a20c448db60b4d17c171274e5a653470b7613c63ab22d description=kube-system/coredns-7c65d6cfc9-q5xr7/coredns id=41b1f1a1-a94b-41e0-8d20-44f1215c9eda name=/runtime.v1.RuntimeService/StartContainer sandboxID=fcf9d17c32966868a5889905be4475a5c0e703c6754eaf0e3a6a870b0986519b
	
	
	==> container status <==
	CONTAINER           IMAGE                                                                                               CREATED             STATE               NAME                      ATTEMPT             POD ID              POD
	adc8f9784b1ec       ba04bb24b95753201135cbc420b233c1b0b9fa2e1fd21d28319c348c33fbcde6                                    3 minutes ago       Running             storage-provisioner       0                   001c841a13184       storage-provisioner
	e1bb424abe078       2f6c962e7b8311337352d9fdea917da2184d9919f4da7695bc2a6517cf392fe4                                    3 minutes ago       Running             coredns                   0                   58def518e92ee       coredns-7c65d6cfc9-s9fp9
	b39228db2d4d8       2f6c962e7b8311337352d9fdea917da2184d9919f4da7695bc2a6517cf392fe4                                    3 minutes ago       Running             coredns                   0                   fcf9d17c32966       coredns-7c65d6cfc9-q5xr7
	4e367c2e592f9       6a23fa8fd2b78ab58e42ba273808edc936a9c53d8ac4a919f6337be094843a51                                    4 minutes ago       Running             kindnet-cni               0                   1e25f226ecf5a       kindnet-7s5t5
	e1979b8578120       24a140c548c075e487e45d0ee73b1aa89f8bfb40c08a57e05975559728822b1d                                    4 minutes ago       Running             kube-proxy                0                   facacbf959961       kube-proxy-tlfs7
	8dfb86491b77a       ghcr.io/kube-vip/kube-vip@sha256:360f0c5d02322075cc80edb9e4e0d2171e941e55072184f1f902203fafc81d0f   4 minutes ago       Running             kube-vip                  0                   afc2dcca571a8       kube-vip-ha-334765
	942911c4142a5       279f381cb37365bbbcd133c9531fba9c2beb0f38dbbe6ddfcd0b1b1643d3450e                                    4 minutes ago       Running             kube-controller-manager   0                   a2380692f5f7e       kube-controller-manager-ha-334765
	42f82617ee823       d3f53a98c0a9d9163c4848bcf34b2d2f5e1e3691b79f3d1dd6d0206809e02853                                    4 minutes ago       Running             kube-apiserver            0                   4b932f715d94f       kube-apiserver-ha-334765
	04fb33f068e50       7f8aa378bb47dffcf430f3a601abe39137e88aee0238e23ed8530fdd18dab82d                                    4 minutes ago       Running             kube-scheduler            0                   9743c9508108d       kube-scheduler-ha-334765
	87df03de85a8a       27e3830e1402783674d8b594038967deea9d51f0d91b34c93c8f39d2f68af7da                                    4 minutes ago       Running             etcd                      0                   eb6b8de72187f       etcd-ha-334765
	
	
	==> coredns [b39228db2d4d897b215a20c448db60b4d17c171274e5a653470b7613c63ab22d] <==
	[INFO] 10.244.2.2:60718 - 6 "A IN kubernetes.default. udp 36 false 512" NXDOMAIN qr,rd,ra 36 0.001988979s
	[INFO] 10.244.2.2:60533 - 9 "PTR IN 1.0.96.10.in-addr.arpa. udp 40 false 512" NOERROR qr,aa,rd 112 0.00017378s
	[INFO] 10.244.2.3:49282 - 3 "AAAA IN kubernetes.default. udp 36 false 512" NXDOMAIN qr,rd,ra 36 0.001552216s
	[INFO] 10.244.2.3:43574 - 6 "A IN kubernetes.default. udp 36 false 512" NXDOMAIN qr,rd,ra 36 0.00119709s
	[INFO] 10.244.2.3:56190 - 7 "A IN kubernetes.default.default.svc.cluster.local. udp 62 false 512" NXDOMAIN qr,aa,rd 155 0.00012963s
	[INFO] 10.244.2.3:34905 - 8 "A IN kubernetes.default.svc.cluster.local. udp 54 false 512" NOERROR qr,aa,rd 106 0.000217717s
	[INFO] 10.244.1.2:51860 - 2 "PTR IN 10.0.96.10.in-addr.arpa. udp 41 false 512" NOERROR qr,aa,rd 116 0.000199527s
	[INFO] 10.244.1.2:35957 - 4 "AAAA IN kubernetes.default.default.svc.cluster.local. udp 62 false 512" NXDOMAIN qr,aa,rd 155 0.000152029s
	[INFO] 10.244.1.2:55907 - 7 "A IN kubernetes.default.default.svc.cluster.local. udp 62 false 512" NXDOMAIN qr,aa,rd 155 0.000096942s
	[INFO] 10.244.1.2:50978 - 8 "A IN kubernetes.default.svc.cluster.local. udp 54 false 512" NOERROR qr,aa,rd 106 0.000068675s
	[INFO] 10.244.1.2:51125 - 9 "PTR IN 1.0.96.10.in-addr.arpa. udp 40 false 512" NOERROR qr,aa,rd 112 0.000129671s
	[INFO] 10.244.2.2:60214 - 3 "AAAA IN kubernetes.default.svc.cluster.local. udp 54 false 512" NOERROR qr,aa,rd 147 0.000138237s
	[INFO] 10.244.2.2:45302 - 4 "A IN kubernetes.default.svc.cluster.local. udp 54 false 512" NOERROR qr,aa,rd 106 0.000086324s
	[INFO] 10.244.2.2:45998 - 5 "PTR IN 1.0.96.10.in-addr.arpa. udp 40 false 512" NOERROR qr,aa,rd 112 0.000103053s
	[INFO] 10.244.2.3:42017 - 2 "PTR IN 10.0.96.10.in-addr.arpa. udp 41 false 512" NOERROR qr,aa,rd 116 0.000096736s
	[INFO] 10.244.2.3:35753 - 4 "A IN kubernetes.default.svc.cluster.local. udp 54 false 512" NOERROR qr,aa,rd 106 0.000071342s
	[INFO] 10.244.1.2:40346 - 3 "AAAA IN kubernetes.default.svc.cluster.local. udp 54 false 512" NOERROR qr,aa,rd 147 0.000096064s
	[INFO] 10.244.1.2:40215 - 4 "A IN kubernetes.default.svc.cluster.local. udp 54 false 512" NOERROR qr,aa,rd 106 0.000063826s
	[INFO] 10.244.1.2:43106 - 5 "PTR IN 1.0.96.10.in-addr.arpa. udp 40 false 512" NOERROR qr,aa,rd 112 0.000085905s
	[INFO] 10.244.2.2:45784 - 2 "PTR IN 10.0.96.10.in-addr.arpa. udp 41 false 512" NOERROR qr,aa,rd 116 0.000096999s
	[INFO] 10.244.2.3:36002 - 2 "PTR IN 10.0.96.10.in-addr.arpa. udp 41 false 512" NOERROR qr,aa,rd 116 0.000099427s
	[INFO] 10.244.2.3:38212 - 3 "AAAA IN host.minikube.internal. udp 40 false 512" NOERROR qr,aa,rd 40 0.000105285s
	[INFO] 10.244.2.3:45229 - 4 "A IN host.minikube.internal. udp 40 false 512" NOERROR qr,aa,rd 78 0.000091272s
	[INFO] 10.244.2.3:44135 - 5 "PTR IN 1.49.168.192.in-addr.arpa. udp 43 false 512" NOERROR qr,aa,rd 104 0.000091395s
	[INFO] 10.244.1.2:42055 - 3 "AAAA IN host.minikube.internal. udp 40 false 512" NOERROR qr,aa,rd 40 0.000083707s
	
	
	==> coredns [e1bb424abe07892ff88db2f7855ccf353bfd83d107d295aff42c09caa6cf6ac1] <==
	[INFO] 10.244.2.2:44124 - 4 "A IN kubernetes.io. udp 31 false 512" NOERROR qr,rd,ra 60 0.002159321s
	[INFO] 10.244.2.2:58166 - 5 "PTR IN 148.40.75.147.in-addr.arpa. udp 44 false 512" NXDOMAIN qr,rd,ra 44 0.086819453s
	[INFO] 10.244.1.2:43380 - 2 "PTR IN 10.0.96.10.in-addr.arpa. udp 41 false 512" NOERROR qr,aa,rd 116 0.000269975s
	[INFO] 10.244.1.2:50566 - 3 "AAAA IN kubernetes.io. udp 31 false 512" NOERROR qr,aa,rd,ra 31 0.000084445s
	[INFO] 10.244.2.2:60735 - 2 "PTR IN 10.0.96.10.in-addr.arpa. udp 41 false 512" NOERROR qr,aa,rd 116 0.000146342s
	[INFO] 10.244.2.2:55629 - 3 "AAAA IN kubernetes.default. udp 36 false 512" NXDOMAIN qr,rd,ra 36 0.003742478s
	[INFO] 10.244.2.2:58855 - 7 "A IN kubernetes.default.default.svc.cluster.local. udp 62 false 512" NXDOMAIN qr,aa,rd 155 0.000236597s
	[INFO] 10.244.2.2:49910 - 8 "A IN kubernetes.default.svc.cluster.local. udp 54 false 512" NOERROR qr,aa,rd 106 0.000148123s
	[INFO] 10.244.2.3:57067 - 2 "PTR IN 10.0.96.10.in-addr.arpa. udp 41 false 512" NOERROR qr,aa,rd 116 0.000182543s
	[INFO] 10.244.2.3:59470 - 4 "AAAA IN kubernetes.default.default.svc.cluster.local. udp 62 false 512" NXDOMAIN qr,aa,rd 155 0.000231354s
	[INFO] 10.244.2.3:35175 - 5 "AAAA IN kubernetes.default.svc.cluster.local. udp 54 false 512" NOERROR qr,aa,rd 147 0.000359483s
	[INFO] 10.244.2.3:58611 - 9 "PTR IN 1.0.96.10.in-addr.arpa. udp 40 false 512" NOERROR qr,aa,rd 112 0.00017314s
	[INFO] 10.244.1.2:47364 - 3 "AAAA IN kubernetes.default. udp 36 false 512" NXDOMAIN qr,rd,ra 36 0.001674788s
	[INFO] 10.244.1.2:34355 - 5 "AAAA IN kubernetes.default.svc.cluster.local. udp 54 false 512" NOERROR qr,aa,rd 147 0.000164033s
	[INFO] 10.244.1.2:39537 - 6 "A IN kubernetes.default. udp 36 false 512" NXDOMAIN qr,rd,ra 36 0.001387132s
	[INFO] 10.244.2.2:57450 - 2 "PTR IN 10.0.96.10.in-addr.arpa. udp 41 false 512" NOERROR qr,aa,rd 116 0.000150922s
	[INFO] 10.244.2.3:47756 - 3 "AAAA IN kubernetes.default.svc.cluster.local. udp 54 false 512" NOERROR qr,aa,rd 147 0.000109207s
	[INFO] 10.244.2.3:49471 - 5 "PTR IN 1.0.96.10.in-addr.arpa. udp 40 false 512" NOERROR qr,aa,rd 112 0.000084264s
	[INFO] 10.244.1.2:51431 - 2 "PTR IN 10.0.96.10.in-addr.arpa. udp 41 false 512" NOERROR qr,aa,rd 116 0.000153063s
	[INFO] 10.244.2.2:35440 - 3 "AAAA IN host.minikube.internal. udp 40 false 512" NOERROR qr,aa,rd 40 0.000221902s
	[INFO] 10.244.2.2:38547 - 4 "A IN host.minikube.internal. udp 40 false 512" NOERROR qr,aa,rd 78 0.000131312s
	[INFO] 10.244.2.2:58642 - 5 "PTR IN 1.49.168.192.in-addr.arpa. udp 43 false 512" NOERROR qr,aa,rd 104 0.00016226s
	[INFO] 10.244.1.2:48380 - 2 "PTR IN 10.0.96.10.in-addr.arpa. udp 41 false 512" NOERROR qr,aa,rd 116 0.000230402s
	[INFO] 10.244.1.2:40950 - 4 "A IN host.minikube.internal. udp 40 false 512" NOERROR qr,aa,rd 78 0.000091698s
	[INFO] 10.244.1.2:41025 - 5 "PTR IN 1.49.168.192.in-addr.arpa. udp 43 false 512" NOERROR qr,aa,rd 104 0.000084191s
	
	
	==> describe nodes <==
	Name:               ha-334765
	Roles:              control-plane
	Labels:             beta.kubernetes.io/arch=arm64
	                    beta.kubernetes.io/os=linux
	                    kubernetes.io/arch=arm64
	                    kubernetes.io/hostname=ha-334765
	                    kubernetes.io/os=linux
	                    minikube.k8s.io/commit=90d544f06ea0f69499271b003be64a9a224d57ed
	                    minikube.k8s.io/name=ha-334765
	                    minikube.k8s.io/primary=true
	                    minikube.k8s.io/updated_at=2024_09_16T10_51_56_0700
	                    minikube.k8s.io/version=v1.34.0
	                    node-role.kubernetes.io/control-plane=
	                    node.kubernetes.io/exclude-from-external-load-balancers=
	Annotations:        kubeadm.alpha.kubernetes.io/cri-socket: unix:///var/run/crio/crio.sock
	                    node.alpha.kubernetes.io/ttl: 0
	                    volumes.kubernetes.io/controller-managed-attach-detach: true
	CreationTimestamp:  Mon, 16 Sep 2024 10:51:54 +0000
	Taints:             <none>
	Unschedulable:      false
	Lease:
	  HolderIdentity:  ha-334765
	  AcquireTime:     <unset>
	  RenewTime:       Mon, 16 Sep 2024 10:56:32 +0000
	Conditions:
	  Type             Status  LastHeartbeatTime                 LastTransitionTime                Reason                       Message
	  ----             ------  -----------------                 ------------------                ------                       -------
	  MemoryPressure   False   Mon, 16 Sep 2024 10:52:40 +0000   Mon, 16 Sep 2024 10:51:52 +0000   KubeletHasSufficientMemory   kubelet has sufficient memory available
	  DiskPressure     False   Mon, 16 Sep 2024 10:52:40 +0000   Mon, 16 Sep 2024 10:51:52 +0000   KubeletHasNoDiskPressure     kubelet has no disk pressure
	  PIDPressure      False   Mon, 16 Sep 2024 10:52:40 +0000   Mon, 16 Sep 2024 10:51:52 +0000   KubeletHasSufficientPID      kubelet has sufficient PID available
	  Ready            True    Mon, 16 Sep 2024 10:52:40 +0000   Mon, 16 Sep 2024 10:52:40 +0000   KubeletReady                 kubelet is posting ready status
	Addresses:
	  InternalIP:  192.168.49.2
	  Hostname:    ha-334765
	Capacity:
	  cpu:                2
	  ephemeral-storage:  203034800Ki
	  hugepages-1Gi:      0
	  hugepages-2Mi:      0
	  hugepages-32Mi:     0
	  hugepages-64Ki:     0
	  memory:             8022304Ki
	  pods:               110
	Allocatable:
	  cpu:                2
	  ephemeral-storage:  203034800Ki
	  hugepages-1Gi:      0
	  hugepages-2Mi:      0
	  hugepages-32Mi:     0
	  hugepages-64Ki:     0
	  memory:             8022304Ki
	  pods:               110
	System Info:
	  Machine ID:                 87dc8482c7ba4d99a2731913dc3dcad5
	  System UUID:                15c23ccf-7aa3-4a1a-8aeb-2a833bffb1e5
	  Boot ID:                    34b2555f-ef29-4c31-9b47-b3b930bd3b4b
	  Kernel Version:             5.15.0-1069-aws
	  OS Image:                   Ubuntu 22.04.4 LTS
	  Operating System:           linux
	  Architecture:               arm64
	  Container Runtime Version:  cri-o://1.24.6
	  Kubelet Version:            v1.31.1
	  Kube-Proxy Version:         v1.31.1
	PodCIDR:                      10.244.0.0/24
	PodCIDRs:                     10.244.0.0/24
	Non-terminated Pods:          (10 in total)
	  Namespace                   Name                                 CPU Requests  CPU Limits  Memory Requests  Memory Limits  Age
	  ---------                   ----                                 ------------  ----------  ---------------  -------------  ---
	  kube-system                 coredns-7c65d6cfc9-q5xr7             100m (5%)     0 (0%)      70Mi (0%)        170Mi (2%)     4m38s
	  kube-system                 coredns-7c65d6cfc9-s9fp9             100m (5%)     0 (0%)      70Mi (0%)        170Mi (2%)     4m38s
	  kube-system                 etcd-ha-334765                       100m (5%)     0 (0%)      100Mi (1%)       0 (0%)         4m41s
	  kube-system                 kindnet-7s5t5                        100m (5%)     100m (5%)   50Mi (0%)        50Mi (0%)      4m38s
	  kube-system                 kube-apiserver-ha-334765             250m (12%)    0 (0%)      0 (0%)           0 (0%)         4m41s
	  kube-system                 kube-controller-manager-ha-334765    200m (10%)    0 (0%)      0 (0%)           0 (0%)         4m41s
	  kube-system                 kube-proxy-tlfs7                     0 (0%)        0 (0%)      0 (0%)           0 (0%)         4m38s
	  kube-system                 kube-scheduler-ha-334765             100m (5%)     0 (0%)      0 (0%)           0 (0%)         4m41s
	  kube-system                 kube-vip-ha-334765                   0 (0%)        0 (0%)      0 (0%)           0 (0%)         4m41s
	  kube-system                 storage-provisioner                  0 (0%)        0 (0%)      0 (0%)           0 (0%)         4m36s
	Allocated resources:
	  (Total limits may be over 100 percent, i.e., overcommitted.)
	  Resource           Requests    Limits
	  --------           --------    ------
	  cpu                950m (47%)  100m (5%)
	  memory             290Mi (3%)  390Mi (4%)
	  ephemeral-storage  0 (0%)      0 (0%)
	  hugepages-1Gi      0 (0%)      0 (0%)
	  hugepages-2Mi      0 (0%)      0 (0%)
	  hugepages-32Mi     0 (0%)      0 (0%)
	  hugepages-64Ki     0 (0%)      0 (0%)
	Events:
	  Type     Reason                   Age                    From             Message
	  ----     ------                   ----                   ----             -------
	  Normal   Starting                 4m36s                  kube-proxy       
	  Normal   NodeHasNoDiskPressure    4m51s (x8 over 4m51s)  kubelet          Node ha-334765 status is now: NodeHasNoDiskPressure
	  Normal   NodeHasSufficientPID     4m51s (x7 over 4m51s)  kubelet          Node ha-334765 status is now: NodeHasSufficientPID
	  Normal   Starting                 4m51s                  kubelet          Starting kubelet.
	  Warning  CgroupV1                 4m51s                  kubelet          Cgroup v1 support is in maintenance mode, please migrate to Cgroup v2.
	  Normal   NodeHasSufficientMemory  4m51s (x8 over 4m51s)  kubelet          Node ha-334765 status is now: NodeHasSufficientMemory
	  Normal   Starting                 4m41s                  kubelet          Starting kubelet.
	  Warning  CgroupV1                 4m41s                  kubelet          Cgroup v1 support is in maintenance mode, please migrate to Cgroup v2.
	  Normal   NodeHasSufficientMemory  4m41s                  kubelet          Node ha-334765 status is now: NodeHasSufficientMemory
	  Normal   NodeHasNoDiskPressure    4m41s                  kubelet          Node ha-334765 status is now: NodeHasNoDiskPressure
	  Normal   NodeHasSufficientPID     4m41s                  kubelet          Node ha-334765 status is now: NodeHasSufficientPID
	  Normal   RegisteredNode           4m39s                  node-controller  Node ha-334765 event: Registered Node ha-334765 in Controller
	  Normal   RegisteredNode           4m7s                   node-controller  Node ha-334765 event: Registered Node ha-334765 in Controller
	  Normal   NodeReady                3m56s                  kubelet          Node ha-334765 status is now: NodeReady
	  Normal   RegisteredNode           2m59s                  node-controller  Node ha-334765 event: Registered Node ha-334765 in Controller
	
	
	Name:               ha-334765-m02
	Roles:              control-plane
	Labels:             beta.kubernetes.io/arch=arm64
	                    beta.kubernetes.io/os=linux
	                    kubernetes.io/arch=arm64
	                    kubernetes.io/hostname=ha-334765-m02
	                    kubernetes.io/os=linux
	                    minikube.k8s.io/commit=90d544f06ea0f69499271b003be64a9a224d57ed
	                    minikube.k8s.io/name=ha-334765
	                    minikube.k8s.io/primary=false
	                    minikube.k8s.io/updated_at=2024_09_16T10_52_23_0700
	                    minikube.k8s.io/version=v1.34.0
	                    node-role.kubernetes.io/control-plane=
	                    node.kubernetes.io/exclude-from-external-load-balancers=
	Annotations:        kubeadm.alpha.kubernetes.io/cri-socket: unix:///var/run/crio/crio.sock
	                    node.alpha.kubernetes.io/ttl: 0
	                    volumes.kubernetes.io/controller-managed-attach-detach: true
	CreationTimestamp:  Mon, 16 Sep 2024 10:52:19 +0000
	Taints:             <none>
	Unschedulable:      false
	Lease:
	  HolderIdentity:  ha-334765-m02
	  AcquireTime:     <unset>
	  RenewTime:       Mon, 16 Sep 2024 10:56:30 +0000
	Conditions:
	  Type             Status  LastHeartbeatTime                 LastTransitionTime                Reason                       Message
	  ----             ------  -----------------                 ------------------                ------                       -------
	  MemoryPressure   False   Mon, 16 Sep 2024 10:56:30 +0000   Mon, 16 Sep 2024 10:52:19 +0000   KubeletHasSufficientMemory   kubelet has sufficient memory available
	  DiskPressure     False   Mon, 16 Sep 2024 10:56:30 +0000   Mon, 16 Sep 2024 10:52:19 +0000   KubeletHasNoDiskPressure     kubelet has no disk pressure
	  PIDPressure      False   Mon, 16 Sep 2024 10:56:30 +0000   Mon, 16 Sep 2024 10:52:19 +0000   KubeletHasSufficientPID      kubelet has sufficient PID available
	  Ready            True    Mon, 16 Sep 2024 10:56:30 +0000   Mon, 16 Sep 2024 10:53:02 +0000   KubeletReady                 kubelet is posting ready status
	Addresses:
	  InternalIP:  192.168.49.3
	  Hostname:    ha-334765-m02
	Capacity:
	  cpu:                2
	  ephemeral-storage:  203034800Ki
	  hugepages-1Gi:      0
	  hugepages-2Mi:      0
	  hugepages-32Mi:     0
	  hugepages-64Ki:     0
	  memory:             8022304Ki
	  pods:               110
	Allocatable:
	  cpu:                2
	  ephemeral-storage:  203034800Ki
	  hugepages-1Gi:      0
	  hugepages-2Mi:      0
	  hugepages-32Mi:     0
	  hugepages-64Ki:     0
	  memory:             8022304Ki
	  pods:               110
	System Info:
	  Machine ID:                 9f584a856bc842fb9a69f9552fe6d888
	  System UUID:                aea91ea0-3fb3-4815-9747-a2bcb9506f24
	  Boot ID:                    34b2555f-ef29-4c31-9b47-b3b930bd3b4b
	  Kernel Version:             5.15.0-1069-aws
	  OS Image:                   Ubuntu 22.04.4 LTS
	  Operating System:           linux
	  Architecture:               arm64
	  Container Runtime Version:  cri-o://1.24.6
	  Kubelet Version:            v1.31.1
	  Kube-Proxy Version:         v1.31.1
	PodCIDR:                      10.244.1.0/24
	PodCIDRs:                     10.244.1.0/24
	Non-terminated Pods:          (8 in total)
	  Namespace                   Name                                     CPU Requests  CPU Limits  Memory Requests  Memory Limits  Age
	  ---------                   ----                                     ------------  ----------  ---------------  -------------  ---
	  default                     busybox-7dff88458-tczms                  0 (0%)        0 (0%)      0 (0%)           0 (0%)         2m19s
	  kube-system                 etcd-ha-334765-m02                       100m (5%)     0 (0%)      100Mi (1%)       0 (0%)         4m16s
	  kube-system                 kindnet-vj27j                            100m (5%)     100m (5%)   50Mi (0%)        50Mi (0%)      4m17s
	  kube-system                 kube-apiserver-ha-334765-m02             250m (12%)    0 (0%)      0 (0%)           0 (0%)         4m16s
	  kube-system                 kube-controller-manager-ha-334765-m02    200m (10%)    0 (0%)      0 (0%)           0 (0%)         4m16s
	  kube-system                 kube-proxy-l998t                         0 (0%)        0 (0%)      0 (0%)           0 (0%)         4m17s
	  kube-system                 kube-scheduler-ha-334765-m02             100m (5%)     0 (0%)      0 (0%)           0 (0%)         4m16s
	  kube-system                 kube-vip-ha-334765-m02                   0 (0%)        0 (0%)      0 (0%)           0 (0%)         4m13s
	Allocated resources:
	  (Total limits may be over 100 percent, i.e., overcommitted.)
	  Resource           Requests    Limits
	  --------           --------    ------
	  cpu                750m (37%)  100m (5%)
	  memory             150Mi (1%)  50Mi (0%)
	  ephemeral-storage  0 (0%)      0 (0%)
	  hugepages-1Gi      0 (0%)      0 (0%)
	  hugepages-2Mi      0 (0%)      0 (0%)
	  hugepages-32Mi     0 (0%)      0 (0%)
	  hugepages-64Ki     0 (0%)      0 (0%)
	Events:
	  Type     Reason                   Age                    From             Message
	  ----     ------                   ----                   ----             -------
	  Normal   Starting                 4m9s                   kube-proxy       
	  Normal   NodeHasSufficientMemory  4m17s (x8 over 4m17s)  kubelet          Node ha-334765-m02 status is now: NodeHasSufficientMemory
	  Normal   NodeHasNoDiskPressure    4m17s (x8 over 4m17s)  kubelet          Node ha-334765-m02 status is now: NodeHasNoDiskPressure
	  Normal   NodeHasSufficientPID     4m17s (x7 over 4m17s)  kubelet          Node ha-334765-m02 status is now: NodeHasSufficientPID
	  Normal   RegisteredNode           4m14s                  node-controller  Node ha-334765-m02 event: Registered Node ha-334765-m02 in Controller
	  Normal   RegisteredNode           4m7s                   node-controller  Node ha-334765-m02 event: Registered Node ha-334765-m02 in Controller
	  Normal   RegisteredNode           2m59s                  node-controller  Node ha-334765-m02 event: Registered Node ha-334765-m02 in Controller
	  Normal   Starting                 26s                    kubelet          Starting kubelet.
	  Warning  CgroupV1                 26s                    kubelet          Cgroup v1 support is in maintenance mode, please migrate to Cgroup v2.
	  Normal   NodeHasSufficientMemory  26s (x8 over 26s)      kubelet          Node ha-334765-m02 status is now: NodeHasSufficientMemory
	  Normal   NodeHasNoDiskPressure    26s (x8 over 26s)      kubelet          Node ha-334765-m02 status is now: NodeHasNoDiskPressure
	  Normal   NodeHasSufficientPID     26s (x7 over 26s)      kubelet          Node ha-334765-m02 status is now: NodeHasSufficientPID
	
	
	Name:               ha-334765-m03
	Roles:              control-plane
	Labels:             beta.kubernetes.io/arch=arm64
	                    beta.kubernetes.io/os=linux
	                    kubernetes.io/arch=arm64
	                    kubernetes.io/hostname=ha-334765-m03
	                    kubernetes.io/os=linux
	                    minikube.k8s.io/commit=90d544f06ea0f69499271b003be64a9a224d57ed
	                    minikube.k8s.io/name=ha-334765
	                    minikube.k8s.io/primary=false
	                    minikube.k8s.io/updated_at=2024_09_16T10_53_31_0700
	                    minikube.k8s.io/version=v1.34.0
	                    node-role.kubernetes.io/control-plane=
	                    node.kubernetes.io/exclude-from-external-load-balancers=
	Annotations:        kubeadm.alpha.kubernetes.io/cri-socket: unix:///var/run/crio/crio.sock
	                    node.alpha.kubernetes.io/ttl: 0
	                    volumes.kubernetes.io/controller-managed-attach-detach: true
	CreationTimestamp:  Mon, 16 Sep 2024 10:53:26 +0000
	Taints:             <none>
	Unschedulable:      false
	Lease:
	  HolderIdentity:  ha-334765-m03
	  AcquireTime:     <unset>
	  RenewTime:       Mon, 16 Sep 2024 10:56:30 +0000
	Conditions:
	  Type             Status  LastHeartbeatTime                 LastTransitionTime                Reason                       Message
	  ----             ------  -----------------                 ------------------                ------                       -------
	  MemoryPressure   False   Mon, 16 Sep 2024 10:54:27 +0000   Mon, 16 Sep 2024 10:53:26 +0000   KubeletHasSufficientMemory   kubelet has sufficient memory available
	  DiskPressure     False   Mon, 16 Sep 2024 10:54:27 +0000   Mon, 16 Sep 2024 10:53:26 +0000   KubeletHasNoDiskPressure     kubelet has no disk pressure
	  PIDPressure      False   Mon, 16 Sep 2024 10:54:27 +0000   Mon, 16 Sep 2024 10:53:26 +0000   KubeletHasSufficientPID      kubelet has sufficient PID available
	  Ready            True    Mon, 16 Sep 2024 10:54:27 +0000   Mon, 16 Sep 2024 10:54:09 +0000   KubeletReady                 kubelet is posting ready status
	Addresses:
	  InternalIP:  192.168.49.4
	  Hostname:    ha-334765-m03
	Capacity:
	  cpu:                2
	  ephemeral-storage:  203034800Ki
	  hugepages-1Gi:      0
	  hugepages-2Mi:      0
	  hugepages-32Mi:     0
	  hugepages-64Ki:     0
	  memory:             8022304Ki
	  pods:               110
	Allocatable:
	  cpu:                2
	  ephemeral-storage:  203034800Ki
	  hugepages-1Gi:      0
	  hugepages-2Mi:      0
	  hugepages-32Mi:     0
	  hugepages-64Ki:     0
	  memory:             8022304Ki
	  pods:               110
	System Info:
	  Machine ID:                 d5114246fab6471585539c19d438fdb6
	  System UUID:                f0fee577-b975-499e-b5ad-667fb8443848
	  Boot ID:                    34b2555f-ef29-4c31-9b47-b3b930bd3b4b
	  Kernel Version:             5.15.0-1069-aws
	  OS Image:                   Ubuntu 22.04.4 LTS
	  Operating System:           linux
	  Architecture:               arm64
	  Container Runtime Version:  cri-o://1.24.6
	  Kubelet Version:            v1.31.1
	  Kube-Proxy Version:         v1.31.1
	PodCIDR:                      10.244.2.0/24
	PodCIDRs:                     10.244.2.0/24
	Non-terminated Pods:          (9 in total)
	  Namespace                   Name                                     CPU Requests  CPU Limits  Memory Requests  Memory Limits  Age
	  ---------                   ----                                     ------------  ----------  ---------------  -------------  ---
	  default                     busybox-7dff88458-mbfkp                  0 (0%)        0 (0%)      0 (0%)           0 (0%)         2m20s
	  default                     busybox-7dff88458-mh2kc                  0 (0%)        0 (0%)      0 (0%)           0 (0%)         2m20s
	  kube-system                 etcd-ha-334765-m03                       100m (5%)     0 (0%)      100Mi (1%)       0 (0%)         3m8s
	  kube-system                 kindnet-rfw69                            100m (5%)     100m (5%)   50Mi (0%)        50Mi (0%)      3m10s
	  kube-system                 kube-apiserver-ha-334765-m03             250m (12%)    0 (0%)      0 (0%)           0 (0%)         3m8s
	  kube-system                 kube-controller-manager-ha-334765-m03    200m (10%)    0 (0%)      0 (0%)           0 (0%)         3m6s
	  kube-system                 kube-proxy-4vsvh                         0 (0%)        0 (0%)      0 (0%)           0 (0%)         3m10s
	  kube-system                 kube-scheduler-ha-334765-m03             100m (5%)     0 (0%)      0 (0%)           0 (0%)         3m4s
	  kube-system                 kube-vip-ha-334765-m03                   0 (0%)        0 (0%)      0 (0%)           0 (0%)         3m6s
	Allocated resources:
	  (Total limits may be over 100 percent, i.e., overcommitted.)
	  Resource           Requests    Limits
	  --------           --------    ------
	  cpu                750m (37%)  100m (5%)
	  memory             150Mi (1%)  50Mi (0%)
	  ephemeral-storage  0 (0%)      0 (0%)
	  hugepages-1Gi      0 (0%)      0 (0%)
	  hugepages-2Mi      0 (0%)      0 (0%)
	  hugepages-32Mi     0 (0%)      0 (0%)
	  hugepages-64Ki     0 (0%)      0 (0%)
	Events:
	  Type    Reason                   Age                    From             Message
	  ----    ------                   ----                   ----             -------
	  Normal  Starting                 3m2s                   kube-proxy       
	  Normal  NodeHasSufficientMemory  3m10s (x8 over 3m10s)  kubelet          Node ha-334765-m03 status is now: NodeHasSufficientMemory
	  Normal  NodeHasNoDiskPressure    3m10s (x8 over 3m10s)  kubelet          Node ha-334765-m03 status is now: NodeHasNoDiskPressure
	  Normal  NodeHasSufficientPID     3m10s (x7 over 3m10s)  kubelet          Node ha-334765-m03 status is now: NodeHasSufficientPID
	  Normal  RegisteredNode           3m9s                   node-controller  Node ha-334765-m03 event: Registered Node ha-334765-m03 in Controller
	  Normal  RegisteredNode           3m6s                   node-controller  Node ha-334765-m03 event: Registered Node ha-334765-m03 in Controller
	  Normal  RegisteredNode           2m59s                  node-controller  Node ha-334765-m03 event: Registered Node ha-334765-m03 in Controller
	
	
	Name:               ha-334765-m04
	Roles:              <none>
	Labels:             beta.kubernetes.io/arch=arm64
	                    beta.kubernetes.io/os=linux
	                    kubernetes.io/arch=arm64
	                    kubernetes.io/hostname=ha-334765-m04
	                    kubernetes.io/os=linux
	                    minikube.k8s.io/commit=90d544f06ea0f69499271b003be64a9a224d57ed
	                    minikube.k8s.io/name=ha-334765
	                    minikube.k8s.io/primary=false
	                    minikube.k8s.io/updated_at=2024_09_16T10_54_43_0700
	                    minikube.k8s.io/version=v1.34.0
	Annotations:        kubeadm.alpha.kubernetes.io/cri-socket: unix:///var/run/crio/crio.sock
	                    node.alpha.kubernetes.io/ttl: 0
	                    volumes.kubernetes.io/controller-managed-attach-detach: true
	CreationTimestamp:  Mon, 16 Sep 2024 10:54:42 +0000
	Taints:             <none>
	Unschedulable:      false
	Lease:
	  HolderIdentity:  ha-334765-m04
	  AcquireTime:     <unset>
	  RenewTime:       Mon, 16 Sep 2024 10:56:35 +0000
	Conditions:
	  Type             Status  LastHeartbeatTime                 LastTransitionTime                Reason                       Message
	  ----             ------  -----------------                 ------------------                ------                       -------
	  MemoryPressure   False   Mon, 16 Sep 2024 10:55:25 +0000   Mon, 16 Sep 2024 10:54:42 +0000   KubeletHasSufficientMemory   kubelet has sufficient memory available
	  DiskPressure     False   Mon, 16 Sep 2024 10:55:25 +0000   Mon, 16 Sep 2024 10:54:42 +0000   KubeletHasNoDiskPressure     kubelet has no disk pressure
	  PIDPressure      False   Mon, 16 Sep 2024 10:55:25 +0000   Mon, 16 Sep 2024 10:54:42 +0000   KubeletHasSufficientPID      kubelet has sufficient PID available
	  Ready            True    Mon, 16 Sep 2024 10:55:25 +0000   Mon, 16 Sep 2024 10:55:25 +0000   KubeletReady                 kubelet is posting ready status
	Addresses:
	  InternalIP:  192.168.49.5
	  Hostname:    ha-334765-m04
	Capacity:
	  cpu:                2
	  ephemeral-storage:  203034800Ki
	  hugepages-1Gi:      0
	  hugepages-2Mi:      0
	  hugepages-32Mi:     0
	  hugepages-64Ki:     0
	  memory:             8022304Ki
	  pods:               110
	Allocatable:
	  cpu:                2
	  ephemeral-storage:  203034800Ki
	  hugepages-1Gi:      0
	  hugepages-2Mi:      0
	  hugepages-32Mi:     0
	  hugepages-64Ki:     0
	  memory:             8022304Ki
	  pods:               110
	System Info:
	  Machine ID:                 4dd286f9ed0c4b2ea7ffee02c4c1d337
	  System UUID:                2ce236e7-eff0-4b96-a330-3e2c709a50e7
	  Boot ID:                    34b2555f-ef29-4c31-9b47-b3b930bd3b4b
	  Kernel Version:             5.15.0-1069-aws
	  OS Image:                   Ubuntu 22.04.4 LTS
	  Operating System:           linux
	  Architecture:               arm64
	  Container Runtime Version:  cri-o://1.24.6
	  Kubelet Version:            v1.31.1
	  Kube-Proxy Version:         v1.31.1
	PodCIDR:                      10.244.3.0/24
	PodCIDRs:                     10.244.3.0/24
	Non-terminated Pods:          (2 in total)
	  Namespace                   Name                CPU Requests  CPU Limits  Memory Requests  Memory Limits  Age
	  ---------                   ----                ------------  ----------  ---------------  -------------  ---
	  kube-system                 kindnet-plxdg       100m (5%)     100m (5%)   50Mi (0%)        50Mi (0%)      114s
	  kube-system                 kube-proxy-br496    0 (0%)        0 (0%)      0 (0%)           0 (0%)         114s
	Allocated resources:
	  (Total limits may be over 100 percent, i.e., overcommitted.)
	  Resource           Requests   Limits
	  --------           --------   ------
	  cpu                100m (5%)  100m (5%)
	  memory             50Mi (0%)  50Mi (0%)
	  ephemeral-storage  0 (0%)     0 (0%)
	  hugepages-1Gi      0 (0%)     0 (0%)
	  hugepages-2Mi      0 (0%)     0 (0%)
	  hugepages-32Mi     0 (0%)     0 (0%)
	  hugepages-64Ki     0 (0%)     0 (0%)
	Events:
	  Type     Reason                   Age                  From             Message
	  ----     ------                   ----                 ----             -------
	  Normal   Starting                 111s                 kube-proxy       
	  Normal   RegisteredNode           114s                 node-controller  Node ha-334765-m04 event: Registered Node ha-334765-m04 in Controller
	  Warning  CgroupV1                 114s                 kubelet          Cgroup v1 support is in maintenance mode, please migrate to Cgroup v2.
	  Normal   NodeHasSufficientMemory  114s (x2 over 114s)  kubelet          Node ha-334765-m04 status is now: NodeHasSufficientMemory
	  Normal   NodeHasNoDiskPressure    114s (x2 over 114s)  kubelet          Node ha-334765-m04 status is now: NodeHasNoDiskPressure
	  Normal   NodeHasSufficientPID     114s (x2 over 114s)  kubelet          Node ha-334765-m04 status is now: NodeHasSufficientPID
	  Normal   RegisteredNode           111s                 node-controller  Node ha-334765-m04 event: Registered Node ha-334765-m04 in Controller
	  Normal   RegisteredNode           109s                 node-controller  Node ha-334765-m04 event: Registered Node ha-334765-m04 in Controller
	  Normal   NodeReady                71s                  kubelet          Node ha-334765-m04 status is now: NodeReady
	
	
	==> dmesg <==
	[Sep16 10:07] systemd-journald[226]: Failed to send stream file descriptor to service manager: Connection refused
	
	
	==> etcd [87df03de85a8a5a35eeb988107885097dc0af1971560276e2869960289fc36f0] <==
	{"level":"info","ts":"2024-09-16T10:56:18.242653Z","caller":"rafthttp/stream.go:274","msg":"established TCP streaming connection with remote peer","stream-writer-type":"stream MsgApp v2","local-member-id":"aec36adc501070cc","remote-peer-id":"1ee24603fd50eda8"}
	{"level":"info","ts":"2024-09-16T10:56:18.317399Z","caller":"rafthttp/stream.go:249","msg":"set message encoder","from":"aec36adc501070cc","to":"1ee24603fd50eda8","stream-type":"stream Message"}
	{"level":"info","ts":"2024-09-16T10:56:18.317442Z","caller":"rafthttp/stream.go:274","msg":"established TCP streaming connection with remote peer","stream-writer-type":"stream Message","local-member-id":"aec36adc501070cc","remote-peer-id":"1ee24603fd50eda8"}
	{"level":"warn","ts":"2024-09-16T10:56:18.353964Z","caller":"rafthttp/peer_status.go:66","msg":"peer became inactive (message send to peer failed)","peer-id":"1ee24603fd50eda8","error":"failed to write 1ee24603fd50eda8 on pipeline (read tcp 192.168.49.2:37618->192.168.49.3:2380: read: connection reset by peer)"}
	{"level":"warn","ts":"2024-09-16T10:56:18.355331Z","caller":"rafthttp/stream.go:421","msg":"lost TCP streaming connection with remote peer","stream-reader-type":"stream Message","local-member-id":"aec36adc501070cc","remote-peer-id":"1ee24603fd50eda8","error":"unexpected EOF"}
	{"level":"warn","ts":"2024-09-16T10:56:18.356078Z","caller":"rafthttp/stream.go:421","msg":"lost TCP streaming connection with remote peer","stream-reader-type":"stream MsgApp v2","local-member-id":"aec36adc501070cc","remote-peer-id":"1ee24603fd50eda8","error":"unexpected EOF"}
	{"level":"warn","ts":"2024-09-16T10:56:18.465128Z","caller":"rafthttp/stream.go:223","msg":"lost TCP streaming connection with remote peer","stream-writer-type":"stream Message","local-member-id":"aec36adc501070cc","remote-peer-id":"1ee24603fd50eda8"}
	{"level":"warn","ts":"2024-09-16T10:56:20.478842Z","caller":"rafthttp/stream.go:194","msg":"lost TCP streaming connection with remote peer","stream-writer-type":"stream MsgApp v2","local-member-id":"aec36adc501070cc","remote-peer-id":"1ee24603fd50eda8"}
	{"level":"warn","ts":"2024-09-16T10:56:21.953275Z","caller":"etcdserver/cluster_util.go:294","msg":"failed to reach the peer URL","address":"https://192.168.49.3:2380/version","remote-member-id":"1ee24603fd50eda8","error":"Get \"https://192.168.49.3:2380/version\": dial tcp 192.168.49.3:2380: connect: connection refused"}
	{"level":"warn","ts":"2024-09-16T10:56:21.953332Z","caller":"etcdserver/cluster_util.go:158","msg":"failed to get version","remote-member-id":"1ee24603fd50eda8","error":"Get \"https://192.168.49.3:2380/version\": dial tcp 192.168.49.3:2380: connect: connection refused"}
	{"level":"warn","ts":"2024-09-16T10:56:25.485322Z","caller":"rafthttp/probing_status.go:68","msg":"prober detected unhealthy status","round-tripper-name":"ROUND_TRIPPER_RAFT_MESSAGE","remote-peer-id":"1ee24603fd50eda8","rtt":"21.960422ms","error":"dial tcp 192.168.49.3:2380: connect: connection refused"}
	{"level":"warn","ts":"2024-09-16T10:56:25.485331Z","caller":"rafthttp/probing_status.go:68","msg":"prober detected unhealthy status","round-tripper-name":"ROUND_TRIPPER_SNAPSHOT","remote-peer-id":"1ee24603fd50eda8","rtt":"3.715511ms","error":"dial tcp 192.168.49.3:2380: connect: connection refused"}
	{"level":"warn","ts":"2024-09-16T10:56:25.955039Z","caller":"etcdserver/cluster_util.go:294","msg":"failed to reach the peer URL","address":"https://192.168.49.3:2380/version","remote-member-id":"1ee24603fd50eda8","error":"Get \"https://192.168.49.3:2380/version\": dial tcp 192.168.49.3:2380: connect: connection refused"}
	{"level":"warn","ts":"2024-09-16T10:56:25.955173Z","caller":"etcdserver/cluster_util.go:158","msg":"failed to get version","remote-member-id":"1ee24603fd50eda8","error":"Get \"https://192.168.49.3:2380/version\": dial tcp 192.168.49.3:2380: connect: connection refused"}
	{"level":"warn","ts":"2024-09-16T10:56:29.956567Z","caller":"etcdserver/cluster_util.go:294","msg":"failed to reach the peer URL","address":"https://192.168.49.3:2380/version","remote-member-id":"1ee24603fd50eda8","error":"Get \"https://192.168.49.3:2380/version\": dial tcp 192.168.49.3:2380: connect: connection refused"}
	{"level":"warn","ts":"2024-09-16T10:56:29.956620Z","caller":"etcdserver/cluster_util.go:158","msg":"failed to get version","remote-member-id":"1ee24603fd50eda8","error":"Get \"https://192.168.49.3:2380/version\": dial tcp 192.168.49.3:2380: connect: connection refused"}
	{"level":"warn","ts":"2024-09-16T10:56:30.485790Z","caller":"rafthttp/probing_status.go:68","msg":"prober detected unhealthy status","round-tripper-name":"ROUND_TRIPPER_SNAPSHOT","remote-peer-id":"1ee24603fd50eda8","rtt":"3.715511ms","error":"dial tcp 192.168.49.3:2380: connect: connection refused"}
	{"level":"warn","ts":"2024-09-16T10:56:30.485859Z","caller":"rafthttp/probing_status.go:68","msg":"prober detected unhealthy status","round-tripper-name":"ROUND_TRIPPER_RAFT_MESSAGE","remote-peer-id":"1ee24603fd50eda8","rtt":"21.960422ms","error":"dial tcp 192.168.49.3:2380: connect: connection refused"}
	{"level":"info","ts":"2024-09-16T10:56:32.931902Z","caller":"rafthttp/peer_status.go:53","msg":"peer became active","peer-id":"1ee24603fd50eda8"}
	{"level":"info","ts":"2024-09-16T10:56:32.941079Z","caller":"rafthttp/stream.go:412","msg":"established TCP streaming connection with remote peer","stream-reader-type":"stream Message","local-member-id":"aec36adc501070cc","remote-peer-id":"1ee24603fd50eda8"}
	{"level":"info","ts":"2024-09-16T10:56:32.975808Z","caller":"rafthttp/stream.go:412","msg":"established TCP streaming connection with remote peer","stream-reader-type":"stream MsgApp v2","local-member-id":"aec36adc501070cc","remote-peer-id":"1ee24603fd50eda8"}
	{"level":"info","ts":"2024-09-16T10:56:33.380626Z","caller":"rafthttp/stream.go:249","msg":"set message encoder","from":"aec36adc501070cc","to":"1ee24603fd50eda8","stream-type":"stream Message"}
	{"level":"info","ts":"2024-09-16T10:56:33.380669Z","caller":"rafthttp/stream.go:274","msg":"established TCP streaming connection with remote peer","stream-writer-type":"stream Message","local-member-id":"aec36adc501070cc","remote-peer-id":"1ee24603fd50eda8"}
	{"level":"info","ts":"2024-09-16T10:56:33.654464Z","caller":"rafthttp/stream.go:249","msg":"set message encoder","from":"aec36adc501070cc","to":"1ee24603fd50eda8","stream-type":"stream MsgApp v2"}
	{"level":"info","ts":"2024-09-16T10:56:33.654672Z","caller":"rafthttp/stream.go:274","msg":"established TCP streaming connection with remote peer","stream-writer-type":"stream MsgApp v2","local-member-id":"aec36adc501070cc","remote-peer-id":"1ee24603fd50eda8"}
	
	
	==> kernel <==
	 10:56:36 up 10:39,  0 users,  load average: 3.26, 2.21, 1.92
	Linux ha-334765 5.15.0-1069-aws #75~20.04.1-Ubuntu SMP Mon Aug 19 16:22:47 UTC 2024 aarch64 aarch64 aarch64 GNU/Linux
	PRETTY_NAME="Ubuntu 22.04.4 LTS"
	
	
	==> kindnet [4e367c2e592f9aea5b6f808fe6b2f319782cc7f486aef441ea9eccd8a2234ceb] <==
	I0916 10:55:59.622768       1 main.go:322] Node ha-334765-m02 has CIDR [10.244.1.0/24] 
	I0916 10:56:09.626735       1 main.go:295] Handling node with IPs: map[192.168.49.5:{}]
	I0916 10:56:09.626769       1 main.go:322] Node ha-334765-m04 has CIDR [10.244.3.0/24] 
	I0916 10:56:09.626878       1 main.go:295] Handling node with IPs: map[192.168.49.2:{}]
	I0916 10:56:09.626891       1 main.go:299] handling current node
	I0916 10:56:09.626905       1 main.go:295] Handling node with IPs: map[192.168.49.3:{}]
	I0916 10:56:09.626911       1 main.go:322] Node ha-334765-m02 has CIDR [10.244.1.0/24] 
	I0916 10:56:09.626955       1 main.go:295] Handling node with IPs: map[192.168.49.4:{}]
	I0916 10:56:09.626965       1 main.go:322] Node ha-334765-m03 has CIDR [10.244.2.0/24] 
	I0916 10:56:19.630799       1 main.go:295] Handling node with IPs: map[192.168.49.2:{}]
	I0916 10:56:19.630834       1 main.go:299] handling current node
	I0916 10:56:19.630852       1 main.go:295] Handling node with IPs: map[192.168.49.3:{}]
	I0916 10:56:19.630859       1 main.go:322] Node ha-334765-m02 has CIDR [10.244.1.0/24] 
	I0916 10:56:19.630963       1 main.go:295] Handling node with IPs: map[192.168.49.4:{}]
	I0916 10:56:19.630976       1 main.go:322] Node ha-334765-m03 has CIDR [10.244.2.0/24] 
	I0916 10:56:19.631021       1 main.go:295] Handling node with IPs: map[192.168.49.5:{}]
	I0916 10:56:19.631033       1 main.go:322] Node ha-334765-m04 has CIDR [10.244.3.0/24] 
	I0916 10:56:29.630837       1 main.go:295] Handling node with IPs: map[192.168.49.5:{}]
	I0916 10:56:29.630874       1 main.go:322] Node ha-334765-m04 has CIDR [10.244.3.0/24] 
	I0916 10:56:29.630996       1 main.go:295] Handling node with IPs: map[192.168.49.2:{}]
	I0916 10:56:29.631010       1 main.go:299] handling current node
	I0916 10:56:29.631023       1 main.go:295] Handling node with IPs: map[192.168.49.3:{}]
	I0916 10:56:29.631028       1 main.go:322] Node ha-334765-m02 has CIDR [10.244.1.0/24] 
	I0916 10:56:29.631077       1 main.go:295] Handling node with IPs: map[192.168.49.4:{}]
	I0916 10:56:29.631087       1 main.go:322] Node ha-334765-m03 has CIDR [10.244.2.0/24] 
	
	
	==> kube-apiserver [42f82617ee823d573b9d4f28daf99d7e25b6909d4243e3187869a02bdce9fdff] <==
	I0916 10:51:52.062598       1 storage_scheduling.go:111] all system priority classes are created successfully or already exist.
	I0916 10:51:52.734853       1 controller.go:615] quota admission added evaluator for: roles.rbac.authorization.k8s.io
	I0916 10:51:52.790464       1 controller.go:615] quota admission added evaluator for: rolebindings.rbac.authorization.k8s.io
	I0916 10:51:52.897833       1 alloc.go:330] "allocated clusterIPs" service="default/kubernetes" clusterIPs={"IPv4":"10.96.0.1"}
	W0916 10:51:52.905594       1 lease.go:265] Resetting endpoints for master service "kubernetes" to [192.168.49.2]
	I0916 10:51:52.906850       1 controller.go:615] quota admission added evaluator for: endpoints
	I0916 10:51:52.912174       1 controller.go:615] quota admission added evaluator for: endpointslices.discovery.k8s.io
	I0916 10:51:53.112482       1 controller.go:615] quota admission added evaluator for: serviceaccounts
	I0916 10:51:55.796509       1 controller.go:615] quota admission added evaluator for: deployments.apps
	I0916 10:51:55.839773       1 alloc.go:330] "allocated clusterIPs" service="kube-system/kube-dns" clusterIPs={"IPv4":"10.96.0.10"}
	I0916 10:51:55.858150       1 controller.go:615] quota admission added evaluator for: daemonsets.apps
	I0916 10:51:58.517329       1 controller.go:615] quota admission added evaluator for: controllerrevisions.apps
	I0916 10:51:58.779036       1 controller.go:615] quota admission added evaluator for: replicasets.apps
	E0916 10:54:24.784781       1 watch.go:250] "Unhandled Error" err="http2: stream closed" logger="UnhandledError"
	E0916 10:54:25.822674       1 conn.go:339] Error on socket receive: read tcp 192.168.49.254:8443->192.168.49.1:41276: use of closed network connection
	E0916 10:54:26.081024       1 conn.go:339] Error on socket receive: read tcp 192.168.49.254:8443->192.168.49.1:56708: use of closed network connection
	E0916 10:54:26.313044       1 conn.go:339] Error on socket receive: read tcp 192.168.49.254:8443->192.168.49.1:56730: use of closed network connection
	E0916 10:54:26.827467       1 conn.go:339] Error on socket receive: read tcp 192.168.49.254:8443->192.168.49.1:56778: use of closed network connection
	E0916 10:54:27.093710       1 conn.go:339] Error on socket receive: read tcp 192.168.49.254:8443->192.168.49.1:56798: use of closed network connection
	E0916 10:54:27.327051       1 conn.go:339] Error on socket receive: read tcp 192.168.49.254:8443->192.168.49.1:56820: use of closed network connection
	E0916 10:54:27.554546       1 conn.go:339] Error on socket receive: read tcp 192.168.49.254:8443->192.168.49.1:56844: use of closed network connection
	E0916 10:54:27.987173       1 conn.go:339] Error on socket receive: read tcp 192.168.49.254:8443->192.168.49.1:56870: use of closed network connection
	E0916 10:54:28.225420       1 conn.go:339] Error on socket receive: read tcp 192.168.49.254:8443->192.168.49.1:56880: use of closed network connection
	E0916 10:54:28.678732       1 conn.go:339] Error on socket receive: read tcp 192.168.49.254:8443->192.168.49.1:56908: use of closed network connection
	E0916 10:54:28.913617       1 conn.go:339] Error on socket receive: read tcp 192.168.49.254:8443->192.168.49.1:56920: use of closed network connection
	
	
	==> kube-controller-manager [942911c4142a59d4fc2b1d92ba267126bbdb629387bc6b3fa725bafa1a1d00d1] <==
	I0916 10:54:27.680828       1 range_allocator.go:241] "Successfully synced" logger="node-ipam-controller" key="ha-334765-m03"
	E0916 10:54:42.064745       1 certificate_controller.go:151] "Unhandled Error" err="Sync csr-xwk47 failed with : error updating signature for csr: Operation cannot be fulfilled on certificatesigningrequests.certificates.k8s.io \"csr-xwk47\": the object has been modified; please apply your changes to the latest version and try again" logger="UnhandledError"
	I0916 10:54:42.353962       1 actual_state_of_world.go:540] "Failed to update statusUpdateNeeded field in actual state of world" logger="persistentvolume-attach-detach-controller" err="Failed to set statusUpdateNeeded to needed true, because nodeName=\"ha-334765-m04\" does not exist"
	I0916 10:54:42.406194       1 range_allocator.go:422] "Set node PodCIDR" logger="node-ipam-controller" node="ha-334765-m04" podCIDRs=["10.244.3.0/24"]
	I0916 10:54:42.406304       1 range_allocator.go:241] "Successfully synced" logger="node-ipam-controller" key="ha-334765-m04"
	I0916 10:54:42.406358       1 range_allocator.go:241] "Successfully synced" logger="node-ipam-controller" key="ha-334765-m04"
	I0916 10:54:42.422131       1 range_allocator.go:241] "Successfully synced" logger="node-ipam-controller" key="ha-334765-m04"
	I0916 10:54:42.779654       1 range_allocator.go:241] "Successfully synced" logger="node-ipam-controller" key="ha-334765-m04"
	I0916 10:54:42.858731       1 node_lifecycle_controller.go:884] "Missing timestamp for Node. Assuming now as a timestamp" logger="node-lifecycle-controller" node="ha-334765-m04"
	I0916 10:54:42.947669       1 range_allocator.go:241] "Successfully synced" logger="node-ipam-controller" key="ha-334765-m04"
	I0916 10:54:43.315307       1 range_allocator.go:241] "Successfully synced" logger="node-ipam-controller" key="ha-334765-m04"
	I0916 10:54:45.387280       1 range_allocator.go:241] "Successfully synced" logger="node-ipam-controller" key="ha-334765-m04"
	I0916 10:54:45.427214       1 range_allocator.go:241] "Successfully synced" logger="node-ipam-controller" key="ha-334765-m04"
	I0916 10:54:47.156328       1 range_allocator.go:241] "Successfully synced" logger="node-ipam-controller" key="ha-334765-m04"
	I0916 10:54:47.224629       1 range_allocator.go:241] "Successfully synced" logger="node-ipam-controller" key="ha-334765-m04"
	I0916 10:54:52.113885       1 range_allocator.go:241] "Successfully synced" logger="node-ipam-controller" key="ha-334765-m02"
	I0916 10:54:52.775659       1 range_allocator.go:241] "Successfully synced" logger="node-ipam-controller" key="ha-334765-m04"
	I0916 10:55:25.183913       1 range_allocator.go:241] "Successfully synced" logger="node-ipam-controller" key="ha-334765-m04"
	I0916 10:55:25.184407       1 topologycache.go:237] "Can't get CPU or zone information for node" logger="endpointslice-controller" node="ha-334765-m04"
	I0916 10:55:25.213152       1 range_allocator.go:241] "Successfully synced" logger="node-ipam-controller" key="ha-334765-m04"
	I0916 10:55:25.412910       1 range_allocator.go:241] "Successfully synced" logger="node-ipam-controller" key="ha-334765-m04"
	I0916 10:56:30.244096       1 range_allocator.go:241] "Successfully synced" logger="node-ipam-controller" key="ha-334765-m02"
	I0916 10:56:31.671768       1 replica_set.go:679] "Finished syncing" logger="replicaset-controller" kind="ReplicaSet" key="default/busybox-7dff88458" duration="77.11297ms"
	I0916 10:56:31.671945       1 replica_set.go:679] "Finished syncing" logger="replicaset-controller" kind="ReplicaSet" key="default/busybox-7dff88458" duration="67.592µs"
	I0916 10:56:32.789257       1 replica_set.go:679] "Finished syncing" logger="replicaset-controller" kind="ReplicaSet" key="default/busybox-7dff88458" duration="45.981µs"
	
	
	==> kube-proxy [e1979b857812014745feb8baa7c2bc7b3750644c2185150532d37f3bf6389742] <==
	I0916 10:51:59.283351       1 server_linux.go:66] "Using iptables proxy"
	I0916 10:51:59.600524       1 server.go:677] "Successfully retrieved node IP(s)" IPs=["192.168.49.2"]
	E0916 10:51:59.600625       1 server.go:234] "Kube-proxy configuration may be incomplete or incorrect" err="nodePortAddresses is unset; NodePort connections will be accepted on all local IPs. Consider using `--nodeport-addresses primary`"
	I0916 10:51:59.738471       1 server.go:243] "kube-proxy running in dual-stack mode" primary ipFamily="IPv4"
	I0916 10:51:59.776540       1 server_linux.go:169] "Using iptables Proxier"
	I0916 10:51:59.805947       1 proxier.go:255] "Setting route_localnet=1 to allow node-ports on localhost; to change this either disable iptables.localhostNodePorts (--iptables-localhost-nodeports) or set nodePortAddresses (--nodeport-addresses) to filter loopback addresses" ipFamily="IPv4"
	I0916 10:51:59.806378       1 server.go:483] "Version info" version="v1.31.1"
	I0916 10:51:59.806546       1 server.go:485] "Golang settings" GOGC="" GOMAXPROCS="" GOTRACEBACK=""
	I0916 10:51:59.807688       1 config.go:199] "Starting service config controller"
	I0916 10:51:59.807772       1 shared_informer.go:313] Waiting for caches to sync for service config
	I0916 10:51:59.807831       1 config.go:105] "Starting endpoint slice config controller"
	I0916 10:51:59.807860       1 shared_informer.go:313] Waiting for caches to sync for endpoint slice config
	I0916 10:51:59.808420       1 config.go:328] "Starting node config controller"
	I0916 10:51:59.837473       1 shared_informer.go:313] Waiting for caches to sync for node config
	I0916 10:51:59.908782       1 shared_informer.go:320] Caches are synced for endpoint slice config
	I0916 10:51:59.908839       1 shared_informer.go:320] Caches are synced for service config
	I0916 10:51:59.941440       1 shared_informer.go:320] Caches are synced for node config
	
	
	==> kube-scheduler [04fb33f068e50df557ad2766e0b9f19ce855957bef36f74835dedc91014730a4] <==
	E0916 10:52:40.159246       1 framework.go:1305] "Plugin Failed" err="Operation cannot be fulfilled on pods/binding \"coredns-7c65d6cfc9-s9fp9\": pod coredns-7c65d6cfc9-s9fp9 is already assigned to node \"ha-334765\"" plugin="DefaultBinder" pod="kube-system/coredns-7c65d6cfc9-s9fp9" node="ha-334765"
	E0916 10:52:40.159345       1 schedule_one.go:348] "scheduler cache ForgetPod failed" err="pod 0e29200a-0909-47e1-8521-bf5f9b645d6c(kube-system/coredns-7c65d6cfc9-s9fp9) wasn't assumed so cannot be forgotten" pod="kube-system/coredns-7c65d6cfc9-s9fp9"
	E0916 10:52:40.159399       1 schedule_one.go:1057] "Error scheduling pod; retrying" err="running Bind plugin \"DefaultBinder\": Operation cannot be fulfilled on pods/binding \"coredns-7c65d6cfc9-s9fp9\": pod coredns-7c65d6cfc9-s9fp9 is already assigned to node \"ha-334765\"" pod="kube-system/coredns-7c65d6cfc9-s9fp9"
	I0916 10:52:40.159422       1 schedule_one.go:1070] "Pod has been assigned to node. Abort adding it back to queue." pod="kube-system/coredns-7c65d6cfc9-s9fp9" node="ha-334765"
	E0916 10:52:40.163345       1 framework.go:1305] "Plugin Failed" err="Operation cannot be fulfilled on pods/binding \"storage-provisioner\": pod storage-provisioner is already assigned to node \"ha-334765\"" plugin="DefaultBinder" pod="kube-system/storage-provisioner" node="ha-334765"
	E0916 10:52:40.163500       1 schedule_one.go:1057] "Error scheduling pod; retrying" err="running Bind plugin \"DefaultBinder\": Operation cannot be fulfilled on pods/binding \"storage-provisioner\": pod storage-provisioner is already assigned to node \"ha-334765\"" pod="kube-system/storage-provisioner"
	E0916 10:53:26.592667       1 framework.go:1305] "Plugin Failed" err="Operation cannot be fulfilled on pods/binding \"kindnet-rfw69\": pod kindnet-rfw69 is already assigned to node \"ha-334765-m03\"" plugin="DefaultBinder" pod="kube-system/kindnet-rfw69" node="ha-334765-m03"
	E0916 10:53:26.592854       1 schedule_one.go:348] "scheduler cache ForgetPod failed" err="pod 396f204a-53ea-4720-85fc-05ba54d285ca(kube-system/kindnet-rfw69) wasn't assumed so cannot be forgotten" pod="kube-system/kindnet-rfw69"
	E0916 10:53:26.592899       1 schedule_one.go:1057] "Error scheduling pod; retrying" err="running Bind plugin \"DefaultBinder\": Operation cannot be fulfilled on pods/binding \"kindnet-rfw69\": pod kindnet-rfw69 is already assigned to node \"ha-334765-m03\"" pod="kube-system/kindnet-rfw69"
	I0916 10:53:26.592943       1 schedule_one.go:1070] "Pod has been assigned to node. Abort adding it back to queue." pod="kube-system/kindnet-rfw69" node="ha-334765-m03"
	I0916 10:54:17.119262       1 cache.go:503] "Pod was added to a different node than it was assumed" podKey="d1ebb37a-bf5c-499a-b26a-5fb9e3076c6a" pod="default/busybox-7dff88458-mh2kc" assumedNode="ha-334765-m03" currentNode="ha-334765-m02"
	E0916 10:54:17.160604       1 framework.go:1305] "Plugin Failed" err="Operation cannot be fulfilled on pods/binding \"busybox-7dff88458-mh2kc\": pod busybox-7dff88458-mh2kc is already assigned to node \"ha-334765-m03\"" plugin="DefaultBinder" pod="default/busybox-7dff88458-mh2kc" node="ha-334765-m02"
	E0916 10:54:17.160660       1 schedule_one.go:348] "scheduler cache ForgetPod failed" err="pod d1ebb37a-bf5c-499a-b26a-5fb9e3076c6a(default/busybox-7dff88458-mh2kc) was assumed on ha-334765-m02 but assigned to ha-334765-m03" pod="default/busybox-7dff88458-mh2kc"
	E0916 10:54:17.161445       1 schedule_one.go:1057] "Error scheduling pod; retrying" err="running Bind plugin \"DefaultBinder\": Operation cannot be fulfilled on pods/binding \"busybox-7dff88458-mh2kc\": pod busybox-7dff88458-mh2kc is already assigned to node \"ha-334765-m03\"" pod="default/busybox-7dff88458-mh2kc"
	I0916 10:54:17.161498       1 schedule_one.go:1070] "Pod has been assigned to node. Abort adding it back to queue." pod="default/busybox-7dff88458-mh2kc" node="ha-334765-m03"
	E0916 10:54:17.196982       1 framework.go:1305] "Plugin Failed" err="Operation cannot be fulfilled on pods/binding \"busybox-7dff88458-jnlww\": pod busybox-7dff88458-jnlww is already assigned to node \"ha-334765-m02\"" plugin="DefaultBinder" pod="default/busybox-7dff88458-jnlww" node="ha-334765-m02"
	E0916 10:54:17.197090       1 schedule_one.go:1057] "Error scheduling pod; retrying" err="running Bind plugin \"DefaultBinder\": Operation cannot be fulfilled on pods/binding \"busybox-7dff88458-jnlww\": pod busybox-7dff88458-jnlww is already assigned to node \"ha-334765-m02\"" pod="default/busybox-7dff88458-jnlww"
	E0916 10:54:17.197808       1 framework.go:1305] "Plugin Failed" err="Operation cannot be fulfilled on pods/binding \"busybox-7dff88458-89hpp\": pod busybox-7dff88458-89hpp is already assigned to node \"ha-334765\"" plugin="DefaultBinder" pod="default/busybox-7dff88458-89hpp" node="ha-334765"
	E0916 10:54:17.197870       1 schedule_one.go:1057] "Error scheduling pod; retrying" err="running Bind plugin \"DefaultBinder\": Operation cannot be fulfilled on pods/binding \"busybox-7dff88458-89hpp\": pod busybox-7dff88458-89hpp is already assigned to node \"ha-334765\"" pod="default/busybox-7dff88458-89hpp"
	E0916 10:54:42.504006       1 framework.go:1305] "Plugin Failed" err="Operation cannot be fulfilled on pods/binding \"kube-proxy-br496\": pod kube-proxy-br496 is already assigned to node \"ha-334765-m04\"" plugin="DefaultBinder" pod="kube-system/kube-proxy-br496" node="ha-334765-m04"
	E0916 10:54:42.504170       1 schedule_one.go:1057] "Error scheduling pod; retrying" err="running Bind plugin \"DefaultBinder\": Operation cannot be fulfilled on pods/binding \"kube-proxy-br496\": pod kube-proxy-br496 is already assigned to node \"ha-334765-m04\"" pod="kube-system/kube-proxy-br496"
	E0916 10:54:42.565201       1 framework.go:1305] "Plugin Failed" err="Operation cannot be fulfilled on pods/binding \"kindnet-plxdg\": pod kindnet-plxdg is already assigned to node \"ha-334765-m04\"" plugin="DefaultBinder" pod="kube-system/kindnet-plxdg" node="ha-334765-m04"
	E0916 10:54:42.565280       1 schedule_one.go:1057] "Error scheduling pod; retrying" err="running Bind plugin \"DefaultBinder\": Operation cannot be fulfilled on pods/binding \"kindnet-plxdg\": pod kindnet-plxdg is already assigned to node \"ha-334765-m04\"" pod="kube-system/kindnet-plxdg"
	E0916 10:54:42.730269       1 framework.go:1305] "Plugin Failed" err="Operation cannot be fulfilled on pods/binding \"kube-proxy-4tn75\": pod kube-proxy-4tn75 is already assigned to node \"ha-334765-m04\"" plugin="DefaultBinder" pod="kube-system/kube-proxy-4tn75" node="ha-334765-m04"
	E0916 10:54:42.730443       1 schedule_one.go:1057] "Error scheduling pod; retrying" err="running Bind plugin \"DefaultBinder\": Operation cannot be fulfilled on pods/binding \"kube-proxy-4tn75\": pod kube-proxy-4tn75 is already assigned to node \"ha-334765-m04\"" pod="kube-system/kube-proxy-4tn75"
	
	
	==> kubelet <==
	Sep 16 10:54:35 ha-334765 kubelet[1577]: E0916 10:54:35.857867    1577 eviction_manager.go:212] "Eviction manager: failed to synchronize" err="eviction manager: failed to get HasDedicatedImageFs: missing image stats: &ImageFsInfoResponse{ImageFilesystems:[]*FilesystemUsage{&FilesystemUsage{Timestamp:1726484075857635600,FsId:&FilesystemIdentifier{Mountpoint:/var/lib/containers/storage/overlay-images,},UsedBytes:&UInt64Value{Value:137825,},InodesUsed:&UInt64Value{Value:63,},},},ContainerFilesystems:[]*FilesystemUsage{},}"
	Sep 16 10:54:45 ha-334765 kubelet[1577]: E0916 10:54:45.858881    1577 eviction_manager.go:257] "Eviction manager: failed to get HasDedicatedImageFs" err="missing image stats: &ImageFsInfoResponse{ImageFilesystems:[]*FilesystemUsage{&FilesystemUsage{Timestamp:1726484085858695976,FsId:&FilesystemIdentifier{Mountpoint:/var/lib/containers/storage/overlay-images,},UsedBytes:&UInt64Value{Value:137825,},InodesUsed:&UInt64Value{Value:63,},},},ContainerFilesystems:[]*FilesystemUsage{},}"
	Sep 16 10:54:45 ha-334765 kubelet[1577]: E0916 10:54:45.858916    1577 eviction_manager.go:212] "Eviction manager: failed to synchronize" err="eviction manager: failed to get HasDedicatedImageFs: missing image stats: &ImageFsInfoResponse{ImageFilesystems:[]*FilesystemUsage{&FilesystemUsage{Timestamp:1726484085858695976,FsId:&FilesystemIdentifier{Mountpoint:/var/lib/containers/storage/overlay-images,},UsedBytes:&UInt64Value{Value:137825,},InodesUsed:&UInt64Value{Value:63,},},},ContainerFilesystems:[]*FilesystemUsage{},}"
	Sep 16 10:54:55 ha-334765 kubelet[1577]: E0916 10:54:55.869218    1577 eviction_manager.go:257] "Eviction manager: failed to get HasDedicatedImageFs" err="missing image stats: &ImageFsInfoResponse{ImageFilesystems:[]*FilesystemUsage{&FilesystemUsage{Timestamp:1726484095868808670,FsId:&FilesystemIdentifier{Mountpoint:/var/lib/containers/storage/overlay-images,},UsedBytes:&UInt64Value{Value:137825,},InodesUsed:&UInt64Value{Value:63,},},},ContainerFilesystems:[]*FilesystemUsage{},}"
	Sep 16 10:54:55 ha-334765 kubelet[1577]: E0916 10:54:55.869262    1577 eviction_manager.go:212] "Eviction manager: failed to synchronize" err="eviction manager: failed to get HasDedicatedImageFs: missing image stats: &ImageFsInfoResponse{ImageFilesystems:[]*FilesystemUsage{&FilesystemUsage{Timestamp:1726484095868808670,FsId:&FilesystemIdentifier{Mountpoint:/var/lib/containers/storage/overlay-images,},UsedBytes:&UInt64Value{Value:137825,},InodesUsed:&UInt64Value{Value:63,},},},ContainerFilesystems:[]*FilesystemUsage{},}"
	Sep 16 10:55:05 ha-334765 kubelet[1577]: E0916 10:55:05.870908    1577 eviction_manager.go:257] "Eviction manager: failed to get HasDedicatedImageFs" err="missing image stats: &ImageFsInfoResponse{ImageFilesystems:[]*FilesystemUsage{&FilesystemUsage{Timestamp:1726484105870695517,FsId:&FilesystemIdentifier{Mountpoint:/var/lib/containers/storage/overlay-images,},UsedBytes:&UInt64Value{Value:137825,},InodesUsed:&UInt64Value{Value:63,},},},ContainerFilesystems:[]*FilesystemUsage{},}"
	Sep 16 10:55:05 ha-334765 kubelet[1577]: E0916 10:55:05.870946    1577 eviction_manager.go:212] "Eviction manager: failed to synchronize" err="eviction manager: failed to get HasDedicatedImageFs: missing image stats: &ImageFsInfoResponse{ImageFilesystems:[]*FilesystemUsage{&FilesystemUsage{Timestamp:1726484105870695517,FsId:&FilesystemIdentifier{Mountpoint:/var/lib/containers/storage/overlay-images,},UsedBytes:&UInt64Value{Value:137825,},InodesUsed:&UInt64Value{Value:63,},},},ContainerFilesystems:[]*FilesystemUsage{},}"
	Sep 16 10:55:15 ha-334765 kubelet[1577]: E0916 10:55:15.872457    1577 eviction_manager.go:257] "Eviction manager: failed to get HasDedicatedImageFs" err="missing image stats: &ImageFsInfoResponse{ImageFilesystems:[]*FilesystemUsage{&FilesystemUsage{Timestamp:1726484115872243215,FsId:&FilesystemIdentifier{Mountpoint:/var/lib/containers/storage/overlay-images,},UsedBytes:&UInt64Value{Value:137825,},InodesUsed:&UInt64Value{Value:63,},},},ContainerFilesystems:[]*FilesystemUsage{},}"
	Sep 16 10:55:15 ha-334765 kubelet[1577]: E0916 10:55:15.872498    1577 eviction_manager.go:212] "Eviction manager: failed to synchronize" err="eviction manager: failed to get HasDedicatedImageFs: missing image stats: &ImageFsInfoResponse{ImageFilesystems:[]*FilesystemUsage{&FilesystemUsage{Timestamp:1726484115872243215,FsId:&FilesystemIdentifier{Mountpoint:/var/lib/containers/storage/overlay-images,},UsedBytes:&UInt64Value{Value:137825,},InodesUsed:&UInt64Value{Value:63,},},},ContainerFilesystems:[]*FilesystemUsage{},}"
	Sep 16 10:55:25 ha-334765 kubelet[1577]: E0916 10:55:25.873717    1577 eviction_manager.go:257] "Eviction manager: failed to get HasDedicatedImageFs" err="missing image stats: &ImageFsInfoResponse{ImageFilesystems:[]*FilesystemUsage{&FilesystemUsage{Timestamp:1726484125873489809,FsId:&FilesystemIdentifier{Mountpoint:/var/lib/containers/storage/overlay-images,},UsedBytes:&UInt64Value{Value:137825,},InodesUsed:&UInt64Value{Value:63,},},},ContainerFilesystems:[]*FilesystemUsage{},}"
	Sep 16 10:55:25 ha-334765 kubelet[1577]: E0916 10:55:25.873757    1577 eviction_manager.go:212] "Eviction manager: failed to synchronize" err="eviction manager: failed to get HasDedicatedImageFs: missing image stats: &ImageFsInfoResponse{ImageFilesystems:[]*FilesystemUsage{&FilesystemUsage{Timestamp:1726484125873489809,FsId:&FilesystemIdentifier{Mountpoint:/var/lib/containers/storage/overlay-images,},UsedBytes:&UInt64Value{Value:137825,},InodesUsed:&UInt64Value{Value:63,},},},ContainerFilesystems:[]*FilesystemUsage{},}"
	Sep 16 10:55:35 ha-334765 kubelet[1577]: E0916 10:55:35.878879    1577 eviction_manager.go:257] "Eviction manager: failed to get HasDedicatedImageFs" err="missing image stats: &ImageFsInfoResponse{ImageFilesystems:[]*FilesystemUsage{&FilesystemUsage{Timestamp:1726484135878520118,FsId:&FilesystemIdentifier{Mountpoint:/var/lib/containers/storage/overlay-images,},UsedBytes:&UInt64Value{Value:137825,},InodesUsed:&UInt64Value{Value:63,},},},ContainerFilesystems:[]*FilesystemUsage{},}"
	Sep 16 10:55:35 ha-334765 kubelet[1577]: E0916 10:55:35.878935    1577 eviction_manager.go:212] "Eviction manager: failed to synchronize" err="eviction manager: failed to get HasDedicatedImageFs: missing image stats: &ImageFsInfoResponse{ImageFilesystems:[]*FilesystemUsage{&FilesystemUsage{Timestamp:1726484135878520118,FsId:&FilesystemIdentifier{Mountpoint:/var/lib/containers/storage/overlay-images,},UsedBytes:&UInt64Value{Value:137825,},InodesUsed:&UInt64Value{Value:63,},},},ContainerFilesystems:[]*FilesystemUsage{},}"
	Sep 16 10:55:45 ha-334765 kubelet[1577]: E0916 10:55:45.880953    1577 eviction_manager.go:257] "Eviction manager: failed to get HasDedicatedImageFs" err="missing image stats: &ImageFsInfoResponse{ImageFilesystems:[]*FilesystemUsage{&FilesystemUsage{Timestamp:1726484145880640593,FsId:&FilesystemIdentifier{Mountpoint:/var/lib/containers/storage/overlay-images,},UsedBytes:&UInt64Value{Value:137825,},InodesUsed:&UInt64Value{Value:63,},},},ContainerFilesystems:[]*FilesystemUsage{},}"
	Sep 16 10:55:45 ha-334765 kubelet[1577]: E0916 10:55:45.880997    1577 eviction_manager.go:212] "Eviction manager: failed to synchronize" err="eviction manager: failed to get HasDedicatedImageFs: missing image stats: &ImageFsInfoResponse{ImageFilesystems:[]*FilesystemUsage{&FilesystemUsage{Timestamp:1726484145880640593,FsId:&FilesystemIdentifier{Mountpoint:/var/lib/containers/storage/overlay-images,},UsedBytes:&UInt64Value{Value:137825,},InodesUsed:&UInt64Value{Value:63,},},},ContainerFilesystems:[]*FilesystemUsage{},}"
	Sep 16 10:55:55 ha-334765 kubelet[1577]: E0916 10:55:55.882541    1577 eviction_manager.go:257] "Eviction manager: failed to get HasDedicatedImageFs" err="missing image stats: &ImageFsInfoResponse{ImageFilesystems:[]*FilesystemUsage{&FilesystemUsage{Timestamp:1726484155882316367,FsId:&FilesystemIdentifier{Mountpoint:/var/lib/containers/storage/overlay-images,},UsedBytes:&UInt64Value{Value:137825,},InodesUsed:&UInt64Value{Value:63,},},},ContainerFilesystems:[]*FilesystemUsage{},}"
	Sep 16 10:55:55 ha-334765 kubelet[1577]: E0916 10:55:55.882579    1577 eviction_manager.go:212] "Eviction manager: failed to synchronize" err="eviction manager: failed to get HasDedicatedImageFs: missing image stats: &ImageFsInfoResponse{ImageFilesystems:[]*FilesystemUsage{&FilesystemUsage{Timestamp:1726484155882316367,FsId:&FilesystemIdentifier{Mountpoint:/var/lib/containers/storage/overlay-images,},UsedBytes:&UInt64Value{Value:137825,},InodesUsed:&UInt64Value{Value:63,},},},ContainerFilesystems:[]*FilesystemUsage{},}"
	Sep 16 10:56:05 ha-334765 kubelet[1577]: E0916 10:56:05.884378    1577 eviction_manager.go:257] "Eviction manager: failed to get HasDedicatedImageFs" err="missing image stats: &ImageFsInfoResponse{ImageFilesystems:[]*FilesystemUsage{&FilesystemUsage{Timestamp:1726484165884160275,FsId:&FilesystemIdentifier{Mountpoint:/var/lib/containers/storage/overlay-images,},UsedBytes:&UInt64Value{Value:137825,},InodesUsed:&UInt64Value{Value:63,},},},ContainerFilesystems:[]*FilesystemUsage{},}"
	Sep 16 10:56:05 ha-334765 kubelet[1577]: E0916 10:56:05.884416    1577 eviction_manager.go:212] "Eviction manager: failed to synchronize" err="eviction manager: failed to get HasDedicatedImageFs: missing image stats: &ImageFsInfoResponse{ImageFilesystems:[]*FilesystemUsage{&FilesystemUsage{Timestamp:1726484165884160275,FsId:&FilesystemIdentifier{Mountpoint:/var/lib/containers/storage/overlay-images,},UsedBytes:&UInt64Value{Value:137825,},InodesUsed:&UInt64Value{Value:63,},},},ContainerFilesystems:[]*FilesystemUsage{},}"
	Sep 16 10:56:15 ha-334765 kubelet[1577]: E0916 10:56:15.886468    1577 eviction_manager.go:257] "Eviction manager: failed to get HasDedicatedImageFs" err="missing image stats: &ImageFsInfoResponse{ImageFilesystems:[]*FilesystemUsage{&FilesystemUsage{Timestamp:1726484175886212321,FsId:&FilesystemIdentifier{Mountpoint:/var/lib/containers/storage/overlay-images,},UsedBytes:&UInt64Value{Value:137825,},InodesUsed:&UInt64Value{Value:63,},},},ContainerFilesystems:[]*FilesystemUsage{},}"
	Sep 16 10:56:15 ha-334765 kubelet[1577]: E0916 10:56:15.886512    1577 eviction_manager.go:212] "Eviction manager: failed to synchronize" err="eviction manager: failed to get HasDedicatedImageFs: missing image stats: &ImageFsInfoResponse{ImageFilesystems:[]*FilesystemUsage{&FilesystemUsage{Timestamp:1726484175886212321,FsId:&FilesystemIdentifier{Mountpoint:/var/lib/containers/storage/overlay-images,},UsedBytes:&UInt64Value{Value:137825,},InodesUsed:&UInt64Value{Value:63,},},},ContainerFilesystems:[]*FilesystemUsage{},}"
	Sep 16 10:56:25 ha-334765 kubelet[1577]: E0916 10:56:25.888410    1577 eviction_manager.go:257] "Eviction manager: failed to get HasDedicatedImageFs" err="missing image stats: &ImageFsInfoResponse{ImageFilesystems:[]*FilesystemUsage{&FilesystemUsage{Timestamp:1726484185888193195,FsId:&FilesystemIdentifier{Mountpoint:/var/lib/containers/storage/overlay-images,},UsedBytes:&UInt64Value{Value:137825,},InodesUsed:&UInt64Value{Value:63,},},},ContainerFilesystems:[]*FilesystemUsage{},}"
	Sep 16 10:56:25 ha-334765 kubelet[1577]: E0916 10:56:25.888451    1577 eviction_manager.go:212] "Eviction manager: failed to synchronize" err="eviction manager: failed to get HasDedicatedImageFs: missing image stats: &ImageFsInfoResponse{ImageFilesystems:[]*FilesystemUsage{&FilesystemUsage{Timestamp:1726484185888193195,FsId:&FilesystemIdentifier{Mountpoint:/var/lib/containers/storage/overlay-images,},UsedBytes:&UInt64Value{Value:137825,},InodesUsed:&UInt64Value{Value:63,},},},ContainerFilesystems:[]*FilesystemUsage{},}"
	Sep 16 10:56:35 ha-334765 kubelet[1577]: E0916 10:56:35.890375    1577 eviction_manager.go:257] "Eviction manager: failed to get HasDedicatedImageFs" err="missing image stats: &ImageFsInfoResponse{ImageFilesystems:[]*FilesystemUsage{&FilesystemUsage{Timestamp:1726484195890156628,FsId:&FilesystemIdentifier{Mountpoint:/var/lib/containers/storage/overlay-images,},UsedBytes:&UInt64Value{Value:137825,},InodesUsed:&UInt64Value{Value:63,},},},ContainerFilesystems:[]*FilesystemUsage{},}"
	Sep 16 10:56:35 ha-334765 kubelet[1577]: E0916 10:56:35.890422    1577 eviction_manager.go:212] "Eviction manager: failed to synchronize" err="eviction manager: failed to get HasDedicatedImageFs: missing image stats: &ImageFsInfoResponse{ImageFilesystems:[]*FilesystemUsage{&FilesystemUsage{Timestamp:1726484195890156628,FsId:&FilesystemIdentifier{Mountpoint:/var/lib/containers/storage/overlay-images,},UsedBytes:&UInt64Value{Value:137825,},InodesUsed:&UInt64Value{Value:63,},},},ContainerFilesystems:[]*FilesystemUsage{},}"
	

                                                
                                                
-- /stdout --
helpers_test.go:254: (dbg) Run:  out/minikube-linux-arm64 status --format={{.APIServer}} -p ha-334765 -n ha-334765
helpers_test.go:261: (dbg) Run:  kubectl --context ha-334765 get po -o=jsonpath={.items[*].metadata.name} -A --field-selector=status.phase!=Running
helpers_test.go:261: (dbg) Non-zero exit: kubectl --context ha-334765 get po -o=jsonpath={.items[*].metadata.name} -A --field-selector=status.phase!=Running: fork/exec /usr/local/bin/kubectl: exec format error (1.125633ms)
helpers_test.go:263: kubectl --context ha-334765 get po -o=jsonpath={.items[*].metadata.name} -A --field-selector=status.phase!=Running: fork/exec /usr/local/bin/kubectl: exec format error
--- FAIL: TestMultiControlPlane/serial/RestartSecondaryNode (29.54s)

                                                
                                    
x
+
TestMultiControlPlane/serial/DeleteSecondaryNode (17.31s)

                                                
                                                
=== RUN   TestMultiControlPlane/serial/DeleteSecondaryNode
ha_test.go:487: (dbg) Run:  out/minikube-linux-arm64 -p ha-334765 node delete m03 -v=7 --alsologtostderr
ha_test.go:487: (dbg) Done: out/minikube-linux-arm64 -p ha-334765 node delete m03 -v=7 --alsologtostderr: (12.631897663s)
ha_test.go:493: (dbg) Run:  out/minikube-linux-arm64 -p ha-334765 status -v=7 --alsologtostderr
ha_test.go:511: (dbg) Run:  kubectl get nodes
ha_test.go:511: (dbg) Non-zero exit: kubectl get nodes: fork/exec /usr/local/bin/kubectl: exec format error (3.201347ms)
ha_test.go:513: failed to run kubectl get nodes. args "kubectl get nodes" : fork/exec /usr/local/bin/kubectl: exec format error
helpers_test.go:222: -----------------------post-mortem--------------------------------
helpers_test.go:230: ======>  post-mortem[TestMultiControlPlane/serial/DeleteSecondaryNode]: docker inspect <======
helpers_test.go:231: (dbg) Run:  docker inspect ha-334765
helpers_test.go:235: (dbg) docker inspect ha-334765:

                                                
                                                
-- stdout --
	[
	    {
	        "Id": "471d2d625f18ea254879cc15bae69f2fa706198361173916de05b257110d78a5",
	        "Created": "2024-09-16T10:51:30.912390622Z",
	        "Path": "/usr/local/bin/entrypoint",
	        "Args": [
	            "/sbin/init"
	        ],
	        "State": {
	            "Status": "running",
	            "Running": true,
	            "Paused": false,
	            "Restarting": false,
	            "OOMKilled": false,
	            "Dead": false,
	            "Pid": 1434681,
	            "ExitCode": 0,
	            "Error": "",
	            "StartedAt": "2024-09-16T10:57:03.349766268Z",
	            "FinishedAt": "2024-09-16T10:57:02.58436028Z"
	        },
	        "Image": "sha256:a1b71fa87733590eb4674b16f6945626ae533f3af37066893e3fd70eb9476268",
	        "ResolvConfPath": "/var/lib/docker/containers/471d2d625f18ea254879cc15bae69f2fa706198361173916de05b257110d78a5/resolv.conf",
	        "HostnamePath": "/var/lib/docker/containers/471d2d625f18ea254879cc15bae69f2fa706198361173916de05b257110d78a5/hostname",
	        "HostsPath": "/var/lib/docker/containers/471d2d625f18ea254879cc15bae69f2fa706198361173916de05b257110d78a5/hosts",
	        "LogPath": "/var/lib/docker/containers/471d2d625f18ea254879cc15bae69f2fa706198361173916de05b257110d78a5/471d2d625f18ea254879cc15bae69f2fa706198361173916de05b257110d78a5-json.log",
	        "Name": "/ha-334765",
	        "RestartCount": 0,
	        "Driver": "overlay2",
	        "Platform": "linux",
	        "MountLabel": "",
	        "ProcessLabel": "",
	        "AppArmorProfile": "unconfined",
	        "ExecIDs": null,
	        "HostConfig": {
	            "Binds": [
	                "/lib/modules:/lib/modules:ro",
	                "ha-334765:/var"
	            ],
	            "ContainerIDFile": "",
	            "LogConfig": {
	                "Type": "json-file",
	                "Config": {}
	            },
	            "NetworkMode": "ha-334765",
	            "PortBindings": {
	                "22/tcp": [
	                    {
	                        "HostIp": "127.0.0.1",
	                        "HostPort": ""
	                    }
	                ],
	                "2376/tcp": [
	                    {
	                        "HostIp": "127.0.0.1",
	                        "HostPort": ""
	                    }
	                ],
	                "32443/tcp": [
	                    {
	                        "HostIp": "127.0.0.1",
	                        "HostPort": ""
	                    }
	                ],
	                "5000/tcp": [
	                    {
	                        "HostIp": "127.0.0.1",
	                        "HostPort": ""
	                    }
	                ],
	                "8443/tcp": [
	                    {
	                        "HostIp": "127.0.0.1",
	                        "HostPort": ""
	                    }
	                ]
	            },
	            "RestartPolicy": {
	                "Name": "no",
	                "MaximumRetryCount": 0
	            },
	            "AutoRemove": false,
	            "VolumeDriver": "",
	            "VolumesFrom": null,
	            "ConsoleSize": [
	                0,
	                0
	            ],
	            "CapAdd": null,
	            "CapDrop": null,
	            "CgroupnsMode": "host",
	            "Dns": [],
	            "DnsOptions": [],
	            "DnsSearch": [],
	            "ExtraHosts": null,
	            "GroupAdd": null,
	            "IpcMode": "private",
	            "Cgroup": "",
	            "Links": null,
	            "OomScoreAdj": 0,
	            "PidMode": "",
	            "Privileged": true,
	            "PublishAllPorts": false,
	            "ReadonlyRootfs": false,
	            "SecurityOpt": [
	                "seccomp=unconfined",
	                "apparmor=unconfined",
	                "label=disable"
	            ],
	            "Tmpfs": {
	                "/run": "",
	                "/tmp": ""
	            },
	            "UTSMode": "",
	            "UsernsMode": "",
	            "ShmSize": 67108864,
	            "Runtime": "runc",
	            "Isolation": "",
	            "CpuShares": 0,
	            "Memory": 2306867200,
	            "NanoCpus": 2000000000,
	            "CgroupParent": "",
	            "BlkioWeight": 0,
	            "BlkioWeightDevice": [],
	            "BlkioDeviceReadBps": [],
	            "BlkioDeviceWriteBps": [],
	            "BlkioDeviceReadIOps": [],
	            "BlkioDeviceWriteIOps": [],
	            "CpuPeriod": 0,
	            "CpuQuota": 0,
	            "CpuRealtimePeriod": 0,
	            "CpuRealtimeRuntime": 0,
	            "CpusetCpus": "",
	            "CpusetMems": "",
	            "Devices": [],
	            "DeviceCgroupRules": null,
	            "DeviceRequests": null,
	            "MemoryReservation": 0,
	            "MemorySwap": 4613734400,
	            "MemorySwappiness": null,
	            "OomKillDisable": false,
	            "PidsLimit": null,
	            "Ulimits": [],
	            "CpuCount": 0,
	            "CpuPercent": 0,
	            "IOMaximumIOps": 0,
	            "IOMaximumBandwidth": 0,
	            "MaskedPaths": null,
	            "ReadonlyPaths": null
	        },
	        "GraphDriver": {
	            "Data": {
	                "LowerDir": "/var/lib/docker/overlay2/e8db6f6ac1e96cd2638477ad27706691a225de8009dee3e5127d903edb7d7779-init/diff:/var/lib/docker/overlay2/1502e35c27c097cfc834a7c6caeee5bb9f58b41375577f491b73f55bc131cbae/diff",
	                "MergedDir": "/var/lib/docker/overlay2/e8db6f6ac1e96cd2638477ad27706691a225de8009dee3e5127d903edb7d7779/merged",
	                "UpperDir": "/var/lib/docker/overlay2/e8db6f6ac1e96cd2638477ad27706691a225de8009dee3e5127d903edb7d7779/diff",
	                "WorkDir": "/var/lib/docker/overlay2/e8db6f6ac1e96cd2638477ad27706691a225de8009dee3e5127d903edb7d7779/work"
	            },
	            "Name": "overlay2"
	        },
	        "Mounts": [
	            {
	                "Type": "bind",
	                "Source": "/lib/modules",
	                "Destination": "/lib/modules",
	                "Mode": "ro",
	                "RW": false,
	                "Propagation": "rprivate"
	            },
	            {
	                "Type": "volume",
	                "Name": "ha-334765",
	                "Source": "/var/lib/docker/volumes/ha-334765/_data",
	                "Destination": "/var",
	                "Driver": "local",
	                "Mode": "z",
	                "RW": true,
	                "Propagation": ""
	            }
	        ],
	        "Config": {
	            "Hostname": "ha-334765",
	            "Domainname": "",
	            "User": "",
	            "AttachStdin": false,
	            "AttachStdout": false,
	            "AttachStderr": false,
	            "ExposedPorts": {
	                "22/tcp": {},
	                "2376/tcp": {},
	                "32443/tcp": {},
	                "5000/tcp": {},
	                "8443/tcp": {}
	            },
	            "Tty": true,
	            "OpenStdin": false,
	            "StdinOnce": false,
	            "Env": [
	                "container=docker",
	                "PATH=/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin"
	            ],
	            "Cmd": null,
	            "Image": "gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0",
	            "Volumes": null,
	            "WorkingDir": "/",
	            "Entrypoint": [
	                "/usr/local/bin/entrypoint",
	                "/sbin/init"
	            ],
	            "OnBuild": null,
	            "Labels": {
	                "created_by.minikube.sigs.k8s.io": "true",
	                "mode.minikube.sigs.k8s.io": "ha-334765",
	                "name.minikube.sigs.k8s.io": "ha-334765",
	                "role.minikube.sigs.k8s.io": ""
	            },
	            "StopSignal": "SIGRTMIN+3"
	        },
	        "NetworkSettings": {
	            "Bridge": "",
	            "SandboxID": "eec0c0a37a21d7b1df2530a7dafd75683b93af5503432cc69d4070bb6b0da7e5",
	            "SandboxKey": "/var/run/docker/netns/eec0c0a37a21",
	            "Ports": {
	                "22/tcp": [
	                    {
	                        "HostIp": "127.0.0.1",
	                        "HostPort": "34643"
	                    }
	                ],
	                "2376/tcp": [
	                    {
	                        "HostIp": "127.0.0.1",
	                        "HostPort": "34644"
	                    }
	                ],
	                "32443/tcp": [
	                    {
	                        "HostIp": "127.0.0.1",
	                        "HostPort": "34647"
	                    }
	                ],
	                "5000/tcp": [
	                    {
	                        "HostIp": "127.0.0.1",
	                        "HostPort": "34645"
	                    }
	                ],
	                "8443/tcp": [
	                    {
	                        "HostIp": "127.0.0.1",
	                        "HostPort": "34646"
	                    }
	                ]
	            },
	            "HairpinMode": false,
	            "LinkLocalIPv6Address": "",
	            "LinkLocalIPv6PrefixLen": 0,
	            "SecondaryIPAddresses": null,
	            "SecondaryIPv6Addresses": null,
	            "EndpointID": "",
	            "Gateway": "",
	            "GlobalIPv6Address": "",
	            "GlobalIPv6PrefixLen": 0,
	            "IPAddress": "",
	            "IPPrefixLen": 0,
	            "IPv6Gateway": "",
	            "MacAddress": "",
	            "Networks": {
	                "ha-334765": {
	                    "IPAMConfig": {
	                        "IPv4Address": "192.168.49.2"
	                    },
	                    "Links": null,
	                    "Aliases": null,
	                    "MacAddress": "02:42:c0:a8:31:02",
	                    "DriverOpts": null,
	                    "NetworkID": "a49e1846148d74f15aa5bd587e5d2d6b8a3c4246e7c45cf081cf9063a160d645",
	                    "EndpointID": "af35eb8905c592dd84a5abf380c803882cf355921d9383f8f0ff22ffa149e939",
	                    "Gateway": "192.168.49.1",
	                    "IPAddress": "192.168.49.2",
	                    "IPPrefixLen": 24,
	                    "IPv6Gateway": "",
	                    "GlobalIPv6Address": "",
	                    "GlobalIPv6PrefixLen": 0,
	                    "DNSNames": [
	                        "ha-334765",
	                        "471d2d625f18"
	                    ]
	                }
	            }
	        }
	    }
	]

                                                
                                                
-- /stdout --
helpers_test.go:239: (dbg) Run:  out/minikube-linux-arm64 status --format={{.Host}} -p ha-334765 -n ha-334765
helpers_test.go:244: <<< TestMultiControlPlane/serial/DeleteSecondaryNode FAILED: start of post-mortem logs <<<
helpers_test.go:245: ======>  post-mortem[TestMultiControlPlane/serial/DeleteSecondaryNode]: minikube logs <======
helpers_test.go:247: (dbg) Run:  out/minikube-linux-arm64 -p ha-334765 logs -n 25
helpers_test.go:247: (dbg) Done: out/minikube-linux-arm64 -p ha-334765 logs -n 25: (2.226421378s)
helpers_test.go:252: TestMultiControlPlane/serial/DeleteSecondaryNode logs: 
-- stdout --
	
	==> Audit <==
	|---------|----------------------------------------------------------------------------------|-----------|---------|---------|---------------------|---------------------|
	| Command |                                       Args                                       |  Profile  |  User   | Version |     Start Time      |      End Time       |
	|---------|----------------------------------------------------------------------------------|-----------|---------|---------|---------------------|---------------------|
	| ssh     | ha-334765 ssh -n                                                                 | ha-334765 | jenkins | v1.34.0 | 16 Sep 24 10:55 UTC | 16 Sep 24 10:55 UTC |
	|         | ha-334765-m03 sudo cat                                                           |           |         |         |                     |                     |
	|         | /home/docker/cp-test.txt                                                         |           |         |         |                     |                     |
	| ssh     | ha-334765 ssh -n ha-334765-m02 sudo cat                                          | ha-334765 | jenkins | v1.34.0 | 16 Sep 24 10:55 UTC | 16 Sep 24 10:55 UTC |
	|         | /home/docker/cp-test_ha-334765-m03_ha-334765-m02.txt                             |           |         |         |                     |                     |
	| cp      | ha-334765 cp ha-334765-m03:/home/docker/cp-test.txt                              | ha-334765 | jenkins | v1.34.0 | 16 Sep 24 10:55 UTC | 16 Sep 24 10:55 UTC |
	|         | ha-334765-m04:/home/docker/cp-test_ha-334765-m03_ha-334765-m04.txt               |           |         |         |                     |                     |
	| ssh     | ha-334765 ssh -n                                                                 | ha-334765 | jenkins | v1.34.0 | 16 Sep 24 10:55 UTC | 16 Sep 24 10:55 UTC |
	|         | ha-334765-m03 sudo cat                                                           |           |         |         |                     |                     |
	|         | /home/docker/cp-test.txt                                                         |           |         |         |                     |                     |
	| ssh     | ha-334765 ssh -n ha-334765-m04 sudo cat                                          | ha-334765 | jenkins | v1.34.0 | 16 Sep 24 10:55 UTC | 16 Sep 24 10:55 UTC |
	|         | /home/docker/cp-test_ha-334765-m03_ha-334765-m04.txt                             |           |         |         |                     |                     |
	| cp      | ha-334765 cp testdata/cp-test.txt                                                | ha-334765 | jenkins | v1.34.0 | 16 Sep 24 10:55 UTC | 16 Sep 24 10:55 UTC |
	|         | ha-334765-m04:/home/docker/cp-test.txt                                           |           |         |         |                     |                     |
	| ssh     | ha-334765 ssh -n                                                                 | ha-334765 | jenkins | v1.34.0 | 16 Sep 24 10:55 UTC | 16 Sep 24 10:55 UTC |
	|         | ha-334765-m04 sudo cat                                                           |           |         |         |                     |                     |
	|         | /home/docker/cp-test.txt                                                         |           |         |         |                     |                     |
	| cp      | ha-334765 cp ha-334765-m04:/home/docker/cp-test.txt                              | ha-334765 | jenkins | v1.34.0 | 16 Sep 24 10:55 UTC | 16 Sep 24 10:55 UTC |
	|         | /tmp/TestMultiControlPlaneserialCopyFile3524304278/001/cp-test_ha-334765-m04.txt |           |         |         |                     |                     |
	| ssh     | ha-334765 ssh -n                                                                 | ha-334765 | jenkins | v1.34.0 | 16 Sep 24 10:55 UTC | 16 Sep 24 10:55 UTC |
	|         | ha-334765-m04 sudo cat                                                           |           |         |         |                     |                     |
	|         | /home/docker/cp-test.txt                                                         |           |         |         |                     |                     |
	| cp      | ha-334765 cp ha-334765-m04:/home/docker/cp-test.txt                              | ha-334765 | jenkins | v1.34.0 | 16 Sep 24 10:55 UTC | 16 Sep 24 10:55 UTC |
	|         | ha-334765:/home/docker/cp-test_ha-334765-m04_ha-334765.txt                       |           |         |         |                     |                     |
	| ssh     | ha-334765 ssh -n                                                                 | ha-334765 | jenkins | v1.34.0 | 16 Sep 24 10:55 UTC | 16 Sep 24 10:55 UTC |
	|         | ha-334765-m04 sudo cat                                                           |           |         |         |                     |                     |
	|         | /home/docker/cp-test.txt                                                         |           |         |         |                     |                     |
	| ssh     | ha-334765 ssh -n ha-334765 sudo cat                                              | ha-334765 | jenkins | v1.34.0 | 16 Sep 24 10:55 UTC | 16 Sep 24 10:55 UTC |
	|         | /home/docker/cp-test_ha-334765-m04_ha-334765.txt                                 |           |         |         |                     |                     |
	| cp      | ha-334765 cp ha-334765-m04:/home/docker/cp-test.txt                              | ha-334765 | jenkins | v1.34.0 | 16 Sep 24 10:55 UTC | 16 Sep 24 10:55 UTC |
	|         | ha-334765-m02:/home/docker/cp-test_ha-334765-m04_ha-334765-m02.txt               |           |         |         |                     |                     |
	| ssh     | ha-334765 ssh -n                                                                 | ha-334765 | jenkins | v1.34.0 | 16 Sep 24 10:55 UTC | 16 Sep 24 10:55 UTC |
	|         | ha-334765-m04 sudo cat                                                           |           |         |         |                     |                     |
	|         | /home/docker/cp-test.txt                                                         |           |         |         |                     |                     |
	| ssh     | ha-334765 ssh -n ha-334765-m02 sudo cat                                          | ha-334765 | jenkins | v1.34.0 | 16 Sep 24 10:55 UTC | 16 Sep 24 10:55 UTC |
	|         | /home/docker/cp-test_ha-334765-m04_ha-334765-m02.txt                             |           |         |         |                     |                     |
	| cp      | ha-334765 cp ha-334765-m04:/home/docker/cp-test.txt                              | ha-334765 | jenkins | v1.34.0 | 16 Sep 24 10:55 UTC | 16 Sep 24 10:55 UTC |
	|         | ha-334765-m03:/home/docker/cp-test_ha-334765-m04_ha-334765-m03.txt               |           |         |         |                     |                     |
	| ssh     | ha-334765 ssh -n                                                                 | ha-334765 | jenkins | v1.34.0 | 16 Sep 24 10:55 UTC | 16 Sep 24 10:55 UTC |
	|         | ha-334765-m04 sudo cat                                                           |           |         |         |                     |                     |
	|         | /home/docker/cp-test.txt                                                         |           |         |         |                     |                     |
	| ssh     | ha-334765 ssh -n ha-334765-m03 sudo cat                                          | ha-334765 | jenkins | v1.34.0 | 16 Sep 24 10:55 UTC | 16 Sep 24 10:55 UTC |
	|         | /home/docker/cp-test_ha-334765-m04_ha-334765-m03.txt                             |           |         |         |                     |                     |
	| node    | ha-334765 node stop m02 -v=7                                                     | ha-334765 | jenkins | v1.34.0 | 16 Sep 24 10:55 UTC | 16 Sep 24 10:56 UTC |
	|         | --alsologtostderr                                                                |           |         |         |                     |                     |
	| node    | ha-334765 node start m02 -v=7                                                    | ha-334765 | jenkins | v1.34.0 | 16 Sep 24 10:56 UTC | 16 Sep 24 10:56 UTC |
	|         | --alsologtostderr                                                                |           |         |         |                     |                     |
	| node    | list -p ha-334765 -v=7                                                           | ha-334765 | jenkins | v1.34.0 | 16 Sep 24 10:56 UTC |                     |
	|         | --alsologtostderr                                                                |           |         |         |                     |                     |
	| stop    | -p ha-334765 -v=7                                                                | ha-334765 | jenkins | v1.34.0 | 16 Sep 24 10:56 UTC | 16 Sep 24 10:57 UTC |
	|         | --alsologtostderr                                                                |           |         |         |                     |                     |
	| start   | -p ha-334765 --wait=true -v=7                                                    | ha-334765 | jenkins | v1.34.0 | 16 Sep 24 10:57 UTC | 16 Sep 24 10:59 UTC |
	|         | --alsologtostderr                                                                |           |         |         |                     |                     |
	| node    | list -p ha-334765                                                                | ha-334765 | jenkins | v1.34.0 | 16 Sep 24 10:59 UTC |                     |
	| node    | ha-334765 node delete m03 -v=7                                                   | ha-334765 | jenkins | v1.34.0 | 16 Sep 24 10:59 UTC | 16 Sep 24 11:00 UTC |
	|         | --alsologtostderr                                                                |           |         |         |                     |                     |
	|---------|----------------------------------------------------------------------------------|-----------|---------|---------|---------------------|---------------------|
	
	
	==> Last Start <==
	Log file created at: 2024/09/16 10:57:02
	Running on machine: ip-172-31-21-244
	Binary: Built with gc go1.23.0 for linux/arm64
	Log line format: [IWEF]mmdd hh:mm:ss.uuuuuu threadid file:line] msg
	I0916 10:57:02.868884 1434484 out.go:345] Setting OutFile to fd 1 ...
	I0916 10:57:02.869013 1434484 out.go:392] TERM=,COLORTERM=, which probably does not support color
	I0916 10:57:02.869024 1434484 out.go:358] Setting ErrFile to fd 2...
	I0916 10:57:02.869029 1434484 out.go:392] TERM=,COLORTERM=, which probably does not support color
	I0916 10:57:02.869288 1434484 root.go:338] Updating PATH: /home/jenkins/minikube-integration/19651-1378450/.minikube/bin
	I0916 10:57:02.869668 1434484 out.go:352] Setting JSON to false
	I0916 10:57:02.870549 1434484 start.go:129] hostinfo: {"hostname":"ip-172-31-21-244","uptime":38368,"bootTime":1726445855,"procs":157,"os":"linux","platform":"ubuntu","platformFamily":"debian","platformVersion":"20.04","kernelVersion":"5.15.0-1069-aws","kernelArch":"aarch64","virtualizationSystem":"","virtualizationRole":"","hostId":"da8ac1fd-6236-412a-a346-95873c98230d"}
	I0916 10:57:02.870625 1434484 start.go:139] virtualization:  
	I0916 10:57:02.873982 1434484 out.go:177] * [ha-334765] minikube v1.34.0 on Ubuntu 20.04 (arm64)
	I0916 10:57:02.877370 1434484 out.go:177]   - MINIKUBE_LOCATION=19651
	I0916 10:57:02.877427 1434484 notify.go:220] Checking for updates...
	I0916 10:57:02.883865 1434484 out.go:177]   - MINIKUBE_SUPPRESS_DOCKER_PERFORMANCE=true
	I0916 10:57:02.886508 1434484 out.go:177]   - KUBECONFIG=/home/jenkins/minikube-integration/19651-1378450/kubeconfig
	I0916 10:57:02.889076 1434484 out.go:177]   - MINIKUBE_HOME=/home/jenkins/minikube-integration/19651-1378450/.minikube
	I0916 10:57:02.891635 1434484 out.go:177]   - MINIKUBE_BIN=out/minikube-linux-arm64
	I0916 10:57:02.894244 1434484 out.go:177]   - MINIKUBE_FORCE_SYSTEMD=
	I0916 10:57:02.897530 1434484 config.go:182] Loaded profile config "ha-334765": Driver=docker, ContainerRuntime=crio, KubernetesVersion=v1.31.1
	I0916 10:57:02.897620 1434484 driver.go:394] Setting default libvirt URI to qemu:///system
	I0916 10:57:02.926179 1434484 docker.go:123] docker version: linux-27.2.1:Docker Engine - Community
	I0916 10:57:02.926297 1434484 cli_runner.go:164] Run: docker system info --format "{{json .}}"
	I0916 10:57:02.984604 1434484 info.go:266] docker info: {ID:5FDH:SA5P:5GCT:NLAS:B73P:SGDQ:PBG5:UBVH:UZY3:RXGO:CI7S:WAIH Containers:4 ContainersRunning:0 ContainersPaused:0 ContainersStopped:4 Images:3 Driver:overlay2 DriverStatus:[[Backing Filesystem extfs] [Supports d_type true] [Using metacopy false] [Native Overlay Diff true] [userxattr false]] SystemStatus:<nil> Plugins:{Volume:[local] Network:[bridge host ipvlan macvlan null overlay] Authorization:<nil> Log:[awslogs fluentd gcplogs gelf journald json-file local splunk syslog]} MemoryLimit:true SwapLimit:true KernelMemory:false KernelMemoryTCP:true CPUCfsPeriod:true CPUCfsQuota:true CPUShares:true CPUSet:true PidsLimit:true IPv4Forwarding:true BridgeNfIptables:true BridgeNfIP6Tables:true Debug:false NFd:26 OomKillDisable:true NGoroutines:41 SystemTime:2024-09-16 10:57:02.974508021 +0000 UTC LoggingDriver:json-file CgroupDriver:cgroupfs NEventsListener:0 KernelVersion:5.15.0-1069-aws OperatingSystem:Ubuntu 20.04.6 LTS OSType:linux Architecture:aar
ch64 IndexServerAddress:https://index.docker.io/v1/ RegistryConfig:{AllowNondistributableArtifactsCIDRs:[] AllowNondistributableArtifactsHostnames:[] InsecureRegistryCIDRs:[127.0.0.0/8] IndexConfigs:{DockerIo:{Name:docker.io Mirrors:[] Secure:true Official:true}} Mirrors:[]} NCPU:2 MemTotal:8214839296 GenericResources:<nil> DockerRootDir:/var/lib/docker HTTPProxy: HTTPSProxy: NoProxy: Name:ip-172-31-21-244 Labels:[] ExperimentalBuild:false ServerVersion:27.2.1 ClusterStore: ClusterAdvertise: Runtimes:{Runc:{Path:runc}} DefaultRuntime:runc Swarm:{NodeID: NodeAddr: LocalNodeState:inactive ControlAvailable:false Error: RemoteManagers:<nil>} LiveRestoreEnabled:false Isolation: InitBinary:docker-init ContainerdCommit:{ID:7f7fdf5fed64eb6a7caf99b3e12efcf9d60e311c Expected:7f7fdf5fed64eb6a7caf99b3e12efcf9d60e311c} RuncCommit:{ID:v1.1.14-0-g2c9f560 Expected:v1.1.14-0-g2c9f560} InitCommit:{ID:de40ad0 Expected:de40ad0} SecurityOptions:[name=apparmor name=seccomp,profile=builtin] ProductLicense: Warnings:<nil> ServerErro
rs:[] ClientInfo:{Debug:false Plugins:[map[Name:buildx Path:/usr/libexec/docker/cli-plugins/docker-buildx SchemaVersion:0.1.0 ShortDescription:Docker Buildx Vendor:Docker Inc. Version:v0.16.2] map[Name:compose Path:/usr/libexec/docker/cli-plugins/docker-compose SchemaVersion:0.1.0 ShortDescription:Docker Compose Vendor:Docker Inc. Version:v2.29.2]] Warnings:<nil>}}
	I0916 10:57:02.984739 1434484 docker.go:318] overlay module found
	I0916 10:57:02.987750 1434484 out.go:177] * Using the docker driver based on existing profile
	I0916 10:57:02.990470 1434484 start.go:297] selected driver: docker
	I0916 10:57:02.990491 1434484 start.go:901] validating driver "docker" against &{Name:ha-334765 KeepContext:false EmbedCerts:false MinikubeISO: KicBaseImage:gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 Memory:2200 CPUs:2 DiskSize:20000 Driver:docker HyperkitVpnKitSock: HyperkitVSockPorts:[] DockerEnv:[] ContainerVolumeMounts:[] InsecureRegistry:[] RegistryMirror:[] HostOnlyCIDR:192.168.59.1/24 HypervVirtualSwitch: HypervUseExternalSwitch:false HypervExternalAdapter: KVMNetwork:default KVMQemuURI:qemu:///system KVMGPU:false KVMHidden:false KVMNUMACount:1 APIServerPort:8443 DockerOpt:[] DisableDriverMounts:false NFSShare:[] NFSSharesRoot:/nfsshares UUID: NoVTXCheck:false DNSProxy:false HostDNSResolver:true HostOnlyNicType:virtio NatNicType:virtio SSHIPAddress: SSHUser:root SSHKey: SSHPort:22 KubernetesConfig:{KubernetesVersion:v1.31.1 ClusterName:ha-334765 Namespace:default APIServerHAVIP:192.168.49.254 APIServerName
:minikubeCA APIServerNames:[] APIServerIPs:[] DNSDomain:cluster.local ContainerRuntime:crio CRISocket: NetworkPlugin:cni FeatureGates: ServiceCIDR:10.96.0.0/12 ImageRepository: LoadBalancerStartIP: LoadBalancerEndIP: CustomIngressCert: RegistryAliases: ExtraOptions:[] ShouldLoadCachedImages:true EnableDefaultCNI:false CNI:} Nodes:[{Name: IP:192.168.49.2 Port:8443 KubernetesVersion:v1.31.1 ContainerRuntime:crio ControlPlane:true Worker:true} {Name:m02 IP:192.168.49.3 Port:8443 KubernetesVersion:v1.31.1 ContainerRuntime:crio ControlPlane:true Worker:true} {Name:m03 IP:192.168.49.4 Port:8443 KubernetesVersion:v1.31.1 ContainerRuntime:crio ControlPlane:true Worker:true} {Name:m04 IP:192.168.49.5 Port:0 KubernetesVersion:v1.31.1 ContainerRuntime: ControlPlane:false Worker:true}] Addons:map[ambassador:false auto-pause:false cloud-spanner:false csi-hostpath-driver:false dashboard:false default-storageclass:false efk:false freshpod:false gcp-auth:false gvisor:false headlamp:false helm-tiller:false inaccel:false ingre
ss:false ingress-dns:false inspektor-gadget:false istio:false istio-provisioner:false kong:false kubeflow:false kubevirt:false logviewer:false metallb:false metrics-server:false nvidia-device-plugin:false nvidia-driver-installer:false nvidia-gpu-device-plugin:false olm:false pod-security-policy:false portainer:false registry:false registry-aliases:false registry-creds:false storage-provisioner:false storage-provisioner-gluster:false storage-provisioner-rancher:false volcano:false volumesnapshots:false yakd:false] CustomAddonImages:map[] CustomAddonRegistries:map[] VerifyComponents:map[apiserver:true apps_running:true default_sa:true extra:true kubelet:true node_ready:true system_pods:true] StartHostTimeout:6m0s ScheduledStop:<nil> ExposedPorts:[] ListenAddress: Network: Subnet: MultiNodeRequested:true ExtraDisks:0 CertExpiration:26280h0m0s Mount:false MountString:/home/jenkins:/minikube-host Mount9PVersion:9p2000.L MountGID:docker MountIP: MountMSize:262144 MountOptions:[] MountPort:0 MountType:9p MountUID:do
cker BinaryMirror: DisableOptimizations:false DisableMetrics:false CustomQemuFirmwarePath: SocketVMnetClientPath: SocketVMnetPath: StaticIP: SSHAuthSock: SSHAgentPID:0 GPUs: AutoPauseInterval:1m0s}
	I0916 10:57:02.990692 1434484 start.go:912] status for docker: {Installed:true Healthy:true Running:false NeedsImprovement:false Error:<nil> Reason: Fix: Doc: Version:}
	I0916 10:57:02.990799 1434484 cli_runner.go:164] Run: docker system info --format "{{json .}}"
	I0916 10:57:03.051533 1434484 info.go:266] docker info: {ID:5FDH:SA5P:5GCT:NLAS:B73P:SGDQ:PBG5:UBVH:UZY3:RXGO:CI7S:WAIH Containers:4 ContainersRunning:0 ContainersPaused:0 ContainersStopped:4 Images:3 Driver:overlay2 DriverStatus:[[Backing Filesystem extfs] [Supports d_type true] [Using metacopy false] [Native Overlay Diff true] [userxattr false]] SystemStatus:<nil> Plugins:{Volume:[local] Network:[bridge host ipvlan macvlan null overlay] Authorization:<nil> Log:[awslogs fluentd gcplogs gelf journald json-file local splunk syslog]} MemoryLimit:true SwapLimit:true KernelMemory:false KernelMemoryTCP:true CPUCfsPeriod:true CPUCfsQuota:true CPUShares:true CPUSet:true PidsLimit:true IPv4Forwarding:true BridgeNfIptables:true BridgeNfIP6Tables:true Debug:false NFd:26 OomKillDisable:true NGoroutines:41 SystemTime:2024-09-16 10:57:03.041536554 +0000 UTC LoggingDriver:json-file CgroupDriver:cgroupfs NEventsListener:0 KernelVersion:5.15.0-1069-aws OperatingSystem:Ubuntu 20.04.6 LTS OSType:linux Architecture:aar
ch64 IndexServerAddress:https://index.docker.io/v1/ RegistryConfig:{AllowNondistributableArtifactsCIDRs:[] AllowNondistributableArtifactsHostnames:[] InsecureRegistryCIDRs:[127.0.0.0/8] IndexConfigs:{DockerIo:{Name:docker.io Mirrors:[] Secure:true Official:true}} Mirrors:[]} NCPU:2 MemTotal:8214839296 GenericResources:<nil> DockerRootDir:/var/lib/docker HTTPProxy: HTTPSProxy: NoProxy: Name:ip-172-31-21-244 Labels:[] ExperimentalBuild:false ServerVersion:27.2.1 ClusterStore: ClusterAdvertise: Runtimes:{Runc:{Path:runc}} DefaultRuntime:runc Swarm:{NodeID: NodeAddr: LocalNodeState:inactive ControlAvailable:false Error: RemoteManagers:<nil>} LiveRestoreEnabled:false Isolation: InitBinary:docker-init ContainerdCommit:{ID:7f7fdf5fed64eb6a7caf99b3e12efcf9d60e311c Expected:7f7fdf5fed64eb6a7caf99b3e12efcf9d60e311c} RuncCommit:{ID:v1.1.14-0-g2c9f560 Expected:v1.1.14-0-g2c9f560} InitCommit:{ID:de40ad0 Expected:de40ad0} SecurityOptions:[name=apparmor name=seccomp,profile=builtin] ProductLicense: Warnings:<nil> ServerErro
rs:[] ClientInfo:{Debug:false Plugins:[map[Name:buildx Path:/usr/libexec/docker/cli-plugins/docker-buildx SchemaVersion:0.1.0 ShortDescription:Docker Buildx Vendor:Docker Inc. Version:v0.16.2] map[Name:compose Path:/usr/libexec/docker/cli-plugins/docker-compose SchemaVersion:0.1.0 ShortDescription:Docker Compose Vendor:Docker Inc. Version:v2.29.2]] Warnings:<nil>}}
	I0916 10:57:03.051991 1434484 start_flags.go:947] Waiting for all components: map[apiserver:true apps_running:true default_sa:true extra:true kubelet:true node_ready:true system_pods:true]
	I0916 10:57:03.052038 1434484 cni.go:84] Creating CNI manager for ""
	I0916 10:57:03.052093 1434484 cni.go:136] multinode detected (4 nodes found), recommending kindnet
	I0916 10:57:03.052145 1434484 start.go:340] cluster config:
	{Name:ha-334765 KeepContext:false EmbedCerts:false MinikubeISO: KicBaseImage:gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 Memory:2200 CPUs:2 DiskSize:20000 Driver:docker HyperkitVpnKitSock: HyperkitVSockPorts:[] DockerEnv:[] ContainerVolumeMounts:[] InsecureRegistry:[] RegistryMirror:[] HostOnlyCIDR:192.168.59.1/24 HypervVirtualSwitch: HypervUseExternalSwitch:false HypervExternalAdapter: KVMNetwork:default KVMQemuURI:qemu:///system KVMGPU:false KVMHidden:false KVMNUMACount:1 APIServerPort:8443 DockerOpt:[] DisableDriverMounts:false NFSShare:[] NFSSharesRoot:/nfsshares UUID: NoVTXCheck:false DNSProxy:false HostDNSResolver:true HostOnlyNicType:virtio NatNicType:virtio SSHIPAddress: SSHUser:root SSHKey: SSHPort:22 KubernetesConfig:{KubernetesVersion:v1.31.1 ClusterName:ha-334765 Namespace:default APIServerHAVIP:192.168.49.254 APIServerName:minikubeCA APIServerNames:[] APIServerIPs:[] DNSDomain:cluster.local ContainerR
untime:crio CRISocket: NetworkPlugin:cni FeatureGates: ServiceCIDR:10.96.0.0/12 ImageRepository: LoadBalancerStartIP: LoadBalancerEndIP: CustomIngressCert: RegistryAliases: ExtraOptions:[] ShouldLoadCachedImages:true EnableDefaultCNI:false CNI:} Nodes:[{Name: IP:192.168.49.2 Port:8443 KubernetesVersion:v1.31.1 ContainerRuntime:crio ControlPlane:true Worker:true} {Name:m02 IP:192.168.49.3 Port:8443 KubernetesVersion:v1.31.1 ContainerRuntime:crio ControlPlane:true Worker:true} {Name:m03 IP:192.168.49.4 Port:8443 KubernetesVersion:v1.31.1 ContainerRuntime:crio ControlPlane:true Worker:true} {Name:m04 IP:192.168.49.5 Port:0 KubernetesVersion:v1.31.1 ContainerRuntime:crio ControlPlane:false Worker:true}] Addons:map[ambassador:false auto-pause:false cloud-spanner:false csi-hostpath-driver:false dashboard:false default-storageclass:false efk:false freshpod:false gcp-auth:false gvisor:false headlamp:false helm-tiller:false inaccel:false ingress:false ingress-dns:false inspektor-gadget:false istio:false istio-provisio
ner:false kong:false kubeflow:false kubevirt:false logviewer:false metallb:false metrics-server:false nvidia-device-plugin:false nvidia-driver-installer:false nvidia-gpu-device-plugin:false olm:false pod-security-policy:false portainer:false registry:false registry-aliases:false registry-creds:false storage-provisioner:false storage-provisioner-gluster:false storage-provisioner-rancher:false volcano:false volumesnapshots:false yakd:false] CustomAddonImages:map[] CustomAddonRegistries:map[] VerifyComponents:map[apiserver:true apps_running:true default_sa:true extra:true kubelet:true node_ready:true system_pods:true] StartHostTimeout:6m0s ScheduledStop:<nil> ExposedPorts:[] ListenAddress: Network: Subnet: MultiNodeRequested:true ExtraDisks:0 CertExpiration:26280h0m0s Mount:false MountString:/home/jenkins:/minikube-host Mount9PVersion:9p2000.L MountGID:docker MountIP: MountMSize:262144 MountOptions:[] MountPort:0 MountType:9p MountUID:docker BinaryMirror: DisableOptimizations:false DisableMetrics:false CustomQem
uFirmwarePath: SocketVMnetClientPath: SocketVMnetPath: StaticIP: SSHAuthSock: SSHAgentPID:0 GPUs: AutoPauseInterval:1m0s}
	I0916 10:57:03.056465 1434484 out.go:177] * Starting "ha-334765" primary control-plane node in "ha-334765" cluster
	I0916 10:57:03.059033 1434484 cache.go:121] Beginning downloading kic base image for docker with crio
	I0916 10:57:03.061748 1434484 out.go:177] * Pulling base image v0.0.45-1726358845-19644 ...
	I0916 10:57:03.064274 1434484 preload.go:131] Checking if preload exists for k8s version v1.31.1 and runtime crio
	I0916 10:57:03.064332 1434484 preload.go:146] Found local preload: /home/jenkins/minikube-integration/19651-1378450/.minikube/cache/preloaded-tarball/preloaded-images-k8s-v18-v1.31.1-cri-o-overlay-arm64.tar.lz4
	I0916 10:57:03.064345 1434484 cache.go:56] Caching tarball of preloaded images
	I0916 10:57:03.064359 1434484 image.go:79] Checking for gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 in local docker daemon
	I0916 10:57:03.064446 1434484 preload.go:172] Found /home/jenkins/minikube-integration/19651-1378450/.minikube/cache/preloaded-tarball/preloaded-images-k8s-v18-v1.31.1-cri-o-overlay-arm64.tar.lz4 in cache, skipping download
	I0916 10:57:03.064457 1434484 cache.go:59] Finished verifying existence of preloaded tar for v1.31.1 on crio
	I0916 10:57:03.064604 1434484 profile.go:143] Saving config to /home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/ha-334765/config.json ...
	W0916 10:57:03.082660 1434484 image.go:95] image gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 is of wrong architecture
	I0916 10:57:03.082685 1434484 cache.go:149] Downloading gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 to local cache
	I0916 10:57:03.082764 1434484 image.go:63] Checking for gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 in local cache directory
	I0916 10:57:03.082787 1434484 image.go:66] Found gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 in local cache directory, skipping pull
	I0916 10:57:03.082792 1434484 image.go:135] gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 exists in cache, skipping pull
	I0916 10:57:03.082809 1434484 cache.go:152] successfully saved gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 as a tarball
	I0916 10:57:03.082815 1434484 cache.go:162] Loading gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 from local cache
	I0916 10:57:03.084363 1434484 image.go:273] response: 
	I0916 10:57:03.201349 1434484 cache.go:164] successfully loaded and using gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 from cached tarball
	I0916 10:57:03.201396 1434484 cache.go:194] Successfully downloaded all kic artifacts
	I0916 10:57:03.201429 1434484 start.go:360] acquireMachinesLock for ha-334765: {Name:mk63c1424907d32e4e30c00d74a2bae6eec53e1d Clock:{} Delay:500ms Timeout:10m0s Cancel:<nil>}
	I0916 10:57:03.201519 1434484 start.go:364] duration metric: took 65.418µs to acquireMachinesLock for "ha-334765"
	I0916 10:57:03.201549 1434484 start.go:96] Skipping create...Using existing machine configuration
	I0916 10:57:03.201560 1434484 fix.go:54] fixHost starting: 
	I0916 10:57:03.201871 1434484 cli_runner.go:164] Run: docker container inspect ha-334765 --format={{.State.Status}}
	I0916 10:57:03.218384 1434484 fix.go:112] recreateIfNeeded on ha-334765: state=Stopped err=<nil>
	W0916 10:57:03.218414 1434484 fix.go:138] unexpected machine state, will restart: <nil>
	I0916 10:57:03.221468 1434484 out.go:177] * Restarting existing docker container for "ha-334765" ...
	I0916 10:57:03.224171 1434484 cli_runner.go:164] Run: docker start ha-334765
	I0916 10:57:03.527782 1434484 cli_runner.go:164] Run: docker container inspect ha-334765 --format={{.State.Status}}
	I0916 10:57:03.547511 1434484 kic.go:430] container "ha-334765" state is running.
	I0916 10:57:03.548077 1434484 cli_runner.go:164] Run: docker container inspect -f "{{range .NetworkSettings.Networks}}{{.IPAddress}},{{.GlobalIPv6Address}}{{end}}" ha-334765
	I0916 10:57:03.570834 1434484 profile.go:143] Saving config to /home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/ha-334765/config.json ...
	I0916 10:57:03.571073 1434484 machine.go:93] provisionDockerMachine start ...
	I0916 10:57:03.571136 1434484 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" ha-334765
	I0916 10:57:03.594647 1434484 main.go:141] libmachine: Using SSH client type: native
	I0916 10:57:03.594905 1434484 main.go:141] libmachine: &{{{<nil> 0 [] [] []} docker [0x41abe0] 0x41d420 <nil>  [] 0s} 127.0.0.1 34643 <nil> <nil>}
	I0916 10:57:03.594914 1434484 main.go:141] libmachine: About to run SSH command:
	hostname
	I0916 10:57:03.598093 1434484 main.go:141] libmachine: Error dialing TCP: ssh: handshake failed: EOF
	I0916 10:57:06.747978 1434484 main.go:141] libmachine: SSH cmd err, output: <nil>: ha-334765
	
	I0916 10:57:06.748002 1434484 ubuntu.go:169] provisioning hostname "ha-334765"
	I0916 10:57:06.748105 1434484 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" ha-334765
	I0916 10:57:06.765168 1434484 main.go:141] libmachine: Using SSH client type: native
	I0916 10:57:06.765420 1434484 main.go:141] libmachine: &{{{<nil> 0 [] [] []} docker [0x41abe0] 0x41d420 <nil>  [] 0s} 127.0.0.1 34643 <nil> <nil>}
	I0916 10:57:06.765439 1434484 main.go:141] libmachine: About to run SSH command:
	sudo hostname ha-334765 && echo "ha-334765" | sudo tee /etc/hostname
	I0916 10:57:06.912375 1434484 main.go:141] libmachine: SSH cmd err, output: <nil>: ha-334765
	
	I0916 10:57:06.912511 1434484 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" ha-334765
	I0916 10:57:06.930262 1434484 main.go:141] libmachine: Using SSH client type: native
	I0916 10:57:06.930513 1434484 main.go:141] libmachine: &{{{<nil> 0 [] [] []} docker [0x41abe0] 0x41d420 <nil>  [] 0s} 127.0.0.1 34643 <nil> <nil>}
	I0916 10:57:06.930536 1434484 main.go:141] libmachine: About to run SSH command:
	
			if ! grep -xq '.*\sha-334765' /etc/hosts; then
				if grep -xq '127.0.1.1\s.*' /etc/hosts; then
					sudo sed -i 's/^127.0.1.1\s.*/127.0.1.1 ha-334765/g' /etc/hosts;
				else 
					echo '127.0.1.1 ha-334765' | sudo tee -a /etc/hosts; 
				fi
			fi
	I0916 10:57:07.068974 1434484 main.go:141] libmachine: SSH cmd err, output: <nil>: 
	I0916 10:57:07.069041 1434484 ubuntu.go:175] set auth options {CertDir:/home/jenkins/minikube-integration/19651-1378450/.minikube CaCertPath:/home/jenkins/minikube-integration/19651-1378450/.minikube/certs/ca.pem CaPrivateKeyPath:/home/jenkins/minikube-integration/19651-1378450/.minikube/certs/ca-key.pem CaCertRemotePath:/etc/docker/ca.pem ServerCertPath:/home/jenkins/minikube-integration/19651-1378450/.minikube/machines/server.pem ServerKeyPath:/home/jenkins/minikube-integration/19651-1378450/.minikube/machines/server-key.pem ClientKeyPath:/home/jenkins/minikube-integration/19651-1378450/.minikube/certs/key.pem ServerCertRemotePath:/etc/docker/server.pem ServerKeyRemotePath:/etc/docker/server-key.pem ClientCertPath:/home/jenkins/minikube-integration/19651-1378450/.minikube/certs/cert.pem ServerCertSANs:[] StorePath:/home/jenkins/minikube-integration/19651-1378450/.minikube}
	I0916 10:57:07.069170 1434484 ubuntu.go:177] setting up certificates
	I0916 10:57:07.069202 1434484 provision.go:84] configureAuth start
	I0916 10:57:07.069272 1434484 cli_runner.go:164] Run: docker container inspect -f "{{range .NetworkSettings.Networks}}{{.IPAddress}},{{.GlobalIPv6Address}}{{end}}" ha-334765
	I0916 10:57:07.085692 1434484 provision.go:143] copyHostCerts
	I0916 10:57:07.085736 1434484 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-1378450/.minikube/certs/key.pem -> /home/jenkins/minikube-integration/19651-1378450/.minikube/key.pem
	I0916 10:57:07.085779 1434484 exec_runner.go:144] found /home/jenkins/minikube-integration/19651-1378450/.minikube/key.pem, removing ...
	I0916 10:57:07.085791 1434484 exec_runner.go:203] rm: /home/jenkins/minikube-integration/19651-1378450/.minikube/key.pem
	I0916 10:57:07.085868 1434484 exec_runner.go:151] cp: /home/jenkins/minikube-integration/19651-1378450/.minikube/certs/key.pem --> /home/jenkins/minikube-integration/19651-1378450/.minikube/key.pem (1679 bytes)
	I0916 10:57:07.085956 1434484 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-1378450/.minikube/certs/ca.pem -> /home/jenkins/minikube-integration/19651-1378450/.minikube/ca.pem
	I0916 10:57:07.085977 1434484 exec_runner.go:144] found /home/jenkins/minikube-integration/19651-1378450/.minikube/ca.pem, removing ...
	I0916 10:57:07.085982 1434484 exec_runner.go:203] rm: /home/jenkins/minikube-integration/19651-1378450/.minikube/ca.pem
	I0916 10:57:07.086009 1434484 exec_runner.go:151] cp: /home/jenkins/minikube-integration/19651-1378450/.minikube/certs/ca.pem --> /home/jenkins/minikube-integration/19651-1378450/.minikube/ca.pem (1078 bytes)
	I0916 10:57:07.086056 1434484 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-1378450/.minikube/certs/cert.pem -> /home/jenkins/minikube-integration/19651-1378450/.minikube/cert.pem
	I0916 10:57:07.086081 1434484 exec_runner.go:144] found /home/jenkins/minikube-integration/19651-1378450/.minikube/cert.pem, removing ...
	I0916 10:57:07.086086 1434484 exec_runner.go:203] rm: /home/jenkins/minikube-integration/19651-1378450/.minikube/cert.pem
	I0916 10:57:07.086114 1434484 exec_runner.go:151] cp: /home/jenkins/minikube-integration/19651-1378450/.minikube/certs/cert.pem --> /home/jenkins/minikube-integration/19651-1378450/.minikube/cert.pem (1123 bytes)
	I0916 10:57:07.086171 1434484 provision.go:117] generating server cert: /home/jenkins/minikube-integration/19651-1378450/.minikube/machines/server.pem ca-key=/home/jenkins/minikube-integration/19651-1378450/.minikube/certs/ca.pem private-key=/home/jenkins/minikube-integration/19651-1378450/.minikube/certs/ca-key.pem org=jenkins.ha-334765 san=[127.0.0.1 192.168.49.2 ha-334765 localhost minikube]
	I0916 10:57:07.266735 1434484 provision.go:177] copyRemoteCerts
	I0916 10:57:07.266809 1434484 ssh_runner.go:195] Run: sudo mkdir -p /etc/docker /etc/docker /etc/docker
	I0916 10:57:07.266853 1434484 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" ha-334765
	I0916 10:57:07.284909 1434484 sshutil.go:53] new ssh client: &{IP:127.0.0.1 Port:34643 SSHKeyPath:/home/jenkins/minikube-integration/19651-1378450/.minikube/machines/ha-334765/id_rsa Username:docker}
	I0916 10:57:07.381598 1434484 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-1378450/.minikube/machines/server-key.pem -> /etc/docker/server-key.pem
	I0916 10:57:07.381678 1434484 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-1378450/.minikube/machines/server-key.pem --> /etc/docker/server-key.pem (1675 bytes)
	I0916 10:57:07.406815 1434484 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-1378450/.minikube/certs/ca.pem -> /etc/docker/ca.pem
	I0916 10:57:07.406877 1434484 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-1378450/.minikube/certs/ca.pem --> /etc/docker/ca.pem (1078 bytes)
	I0916 10:57:07.432479 1434484 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-1378450/.minikube/machines/server.pem -> /etc/docker/server.pem
	I0916 10:57:07.432556 1434484 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-1378450/.minikube/machines/server.pem --> /etc/docker/server.pem (1200 bytes)
	I0916 10:57:07.458054 1434484 provision.go:87] duration metric: took 388.82419ms to configureAuth
	I0916 10:57:07.458086 1434484 ubuntu.go:193] setting minikube options for container-runtime
	I0916 10:57:07.458339 1434484 config.go:182] Loaded profile config "ha-334765": Driver=docker, ContainerRuntime=crio, KubernetesVersion=v1.31.1
	I0916 10:57:07.458446 1434484 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" ha-334765
	I0916 10:57:07.475724 1434484 main.go:141] libmachine: Using SSH client type: native
	I0916 10:57:07.475963 1434484 main.go:141] libmachine: &{{{<nil> 0 [] [] []} docker [0x41abe0] 0x41d420 <nil>  [] 0s} 127.0.0.1 34643 <nil> <nil>}
	I0916 10:57:07.475977 1434484 main.go:141] libmachine: About to run SSH command:
	sudo mkdir -p /etc/sysconfig && printf %s "
	CRIO_MINIKUBE_OPTIONS='--insecure-registry 10.96.0.0/12 '
	" | sudo tee /etc/sysconfig/crio.minikube && sudo systemctl restart crio
	I0916 10:57:07.867552 1434484 main.go:141] libmachine: SSH cmd err, output: <nil>: 
	CRIO_MINIKUBE_OPTIONS='--insecure-registry 10.96.0.0/12 '
	
	I0916 10:57:07.867578 1434484 machine.go:96] duration metric: took 4.296495656s to provisionDockerMachine
	I0916 10:57:07.867590 1434484 start.go:293] postStartSetup for "ha-334765" (driver="docker")
	I0916 10:57:07.867601 1434484 start.go:322] creating required directories: [/etc/kubernetes/addons /etc/kubernetes/manifests /var/tmp/minikube /var/lib/minikube /var/lib/minikube/certs /var/lib/minikube/images /var/lib/minikube/binaries /tmp/gvisor /usr/share/ca-certificates /etc/ssl/certs]
	I0916 10:57:07.867700 1434484 ssh_runner.go:195] Run: sudo mkdir -p /etc/kubernetes/addons /etc/kubernetes/manifests /var/tmp/minikube /var/lib/minikube /var/lib/minikube/certs /var/lib/minikube/images /var/lib/minikube/binaries /tmp/gvisor /usr/share/ca-certificates /etc/ssl/certs
	I0916 10:57:07.867764 1434484 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" ha-334765
	I0916 10:57:07.892906 1434484 sshutil.go:53] new ssh client: &{IP:127.0.0.1 Port:34643 SSHKeyPath:/home/jenkins/minikube-integration/19651-1378450/.minikube/machines/ha-334765/id_rsa Username:docker}
	I0916 10:57:07.989847 1434484 ssh_runner.go:195] Run: cat /etc/os-release
	I0916 10:57:07.993037 1434484 main.go:141] libmachine: Couldn't set key VERSION_CODENAME, no corresponding struct field found
	I0916 10:57:07.993075 1434484 main.go:141] libmachine: Couldn't set key PRIVACY_POLICY_URL, no corresponding struct field found
	I0916 10:57:07.993089 1434484 main.go:141] libmachine: Couldn't set key UBUNTU_CODENAME, no corresponding struct field found
	I0916 10:57:07.993096 1434484 info.go:137] Remote host: Ubuntu 22.04.4 LTS
	I0916 10:57:07.993107 1434484 filesync.go:126] Scanning /home/jenkins/minikube-integration/19651-1378450/.minikube/addons for local assets ...
	I0916 10:57:07.993169 1434484 filesync.go:126] Scanning /home/jenkins/minikube-integration/19651-1378450/.minikube/files for local assets ...
	I0916 10:57:07.993263 1434484 filesync.go:149] local asset: /home/jenkins/minikube-integration/19651-1378450/.minikube/files/etc/ssl/certs/13838332.pem -> 13838332.pem in /etc/ssl/certs
	I0916 10:57:07.993275 1434484 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-1378450/.minikube/files/etc/ssl/certs/13838332.pem -> /etc/ssl/certs/13838332.pem
	I0916 10:57:07.993381 1434484 ssh_runner.go:195] Run: sudo mkdir -p /etc/ssl/certs
	I0916 10:57:08.004834 1434484 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-1378450/.minikube/files/etc/ssl/certs/13838332.pem --> /etc/ssl/certs/13838332.pem (1708 bytes)
	I0916 10:57:08.031528 1434484 start.go:296] duration metric: took 163.922774ms for postStartSetup
	I0916 10:57:08.031616 1434484 ssh_runner.go:195] Run: sh -c "df -h /var | awk 'NR==2{print $5}'"
	I0916 10:57:08.031660 1434484 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" ha-334765
	I0916 10:57:08.048960 1434484 sshutil.go:53] new ssh client: &{IP:127.0.0.1 Port:34643 SSHKeyPath:/home/jenkins/minikube-integration/19651-1378450/.minikube/machines/ha-334765/id_rsa Username:docker}
	I0916 10:57:08.142228 1434484 ssh_runner.go:195] Run: sh -c "df -BG /var | awk 'NR==2{print $4}'"
	I0916 10:57:08.147069 1434484 fix.go:56] duration metric: took 4.945501551s for fixHost
	I0916 10:57:08.147097 1434484 start.go:83] releasing machines lock for "ha-334765", held for 4.94556285s
	I0916 10:57:08.147190 1434484 cli_runner.go:164] Run: docker container inspect -f "{{range .NetworkSettings.Networks}}{{.IPAddress}},{{.GlobalIPv6Address}}{{end}}" ha-334765
	I0916 10:57:08.163255 1434484 ssh_runner.go:195] Run: cat /version.json
	I0916 10:57:08.163314 1434484 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" ha-334765
	I0916 10:57:08.163587 1434484 ssh_runner.go:195] Run: curl -sS -m 2 https://registry.k8s.io/
	I0916 10:57:08.163641 1434484 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" ha-334765
	I0916 10:57:08.186505 1434484 sshutil.go:53] new ssh client: &{IP:127.0.0.1 Port:34643 SSHKeyPath:/home/jenkins/minikube-integration/19651-1378450/.minikube/machines/ha-334765/id_rsa Username:docker}
	I0916 10:57:08.190240 1434484 sshutil.go:53] new ssh client: &{IP:127.0.0.1 Port:34643 SSHKeyPath:/home/jenkins/minikube-integration/19651-1378450/.minikube/machines/ha-334765/id_rsa Username:docker}
	I0916 10:57:08.280191 1434484 ssh_runner.go:195] Run: systemctl --version
	I0916 10:57:08.417174 1434484 ssh_runner.go:195] Run: sudo sh -c "podman version >/dev/null"
	I0916 10:57:08.559248 1434484 ssh_runner.go:195] Run: sh -c "stat /etc/cni/net.d/*loopback.conf*"
	I0916 10:57:08.563474 1434484 ssh_runner.go:195] Run: sudo find /etc/cni/net.d -maxdepth 1 -type f -name *loopback.conf* -not -name *.mk_disabled -exec sh -c "sudo mv {} {}.mk_disabled" ;
	I0916 10:57:08.571978 1434484 cni.go:221] loopback cni configuration disabled: "/etc/cni/net.d/*loopback.conf*" found
	I0916 10:57:08.572061 1434484 ssh_runner.go:195] Run: sudo find /etc/cni/net.d -maxdepth 1 -type f ( ( -name *bridge* -or -name *podman* ) -and -not -name *.mk_disabled ) -printf "%p, " -exec sh -c "sudo mv {} {}.mk_disabled" ;
	I0916 10:57:08.580931 1434484 cni.go:259] no active bridge cni configs found in "/etc/cni/net.d" - nothing to disable
	I0916 10:57:08.580967 1434484 start.go:495] detecting cgroup driver to use...
	I0916 10:57:08.581000 1434484 detect.go:187] detected "cgroupfs" cgroup driver on host os
	I0916 10:57:08.581053 1434484 ssh_runner.go:195] Run: sudo systemctl stop -f containerd
	I0916 10:57:08.593396 1434484 ssh_runner.go:195] Run: sudo systemctl is-active --quiet service containerd
	I0916 10:57:08.604533 1434484 docker.go:217] disabling cri-docker service (if available) ...
	I0916 10:57:08.604767 1434484 ssh_runner.go:195] Run: sudo systemctl stop -f cri-docker.socket
	I0916 10:57:08.617947 1434484 ssh_runner.go:195] Run: sudo systemctl stop -f cri-docker.service
	I0916 10:57:08.629993 1434484 ssh_runner.go:195] Run: sudo systemctl disable cri-docker.socket
	I0916 10:57:08.723737 1434484 ssh_runner.go:195] Run: sudo systemctl mask cri-docker.service
	I0916 10:57:08.814395 1434484 docker.go:233] disabling docker service ...
	I0916 10:57:08.814460 1434484 ssh_runner.go:195] Run: sudo systemctl stop -f docker.socket
	I0916 10:57:08.826862 1434484 ssh_runner.go:195] Run: sudo systemctl stop -f docker.service
	I0916 10:57:08.838374 1434484 ssh_runner.go:195] Run: sudo systemctl disable docker.socket
	I0916 10:57:08.926530 1434484 ssh_runner.go:195] Run: sudo systemctl mask docker.service
	I0916 10:57:09.018806 1434484 ssh_runner.go:195] Run: sudo systemctl is-active --quiet service docker
	I0916 10:57:09.037009 1434484 ssh_runner.go:195] Run: /bin/bash -c "sudo mkdir -p /etc && printf %s "runtime-endpoint: unix:///var/run/crio/crio.sock
	" | sudo tee /etc/crictl.yaml"
	I0916 10:57:09.054957 1434484 crio.go:59] configure cri-o to use "registry.k8s.io/pause:3.10" pause image...
	I0916 10:57:09.055077 1434484 ssh_runner.go:195] Run: sh -c "sudo sed -i 's|^.*pause_image = .*$|pause_image = "registry.k8s.io/pause:3.10"|' /etc/crio/crio.conf.d/02-crio.conf"
	I0916 10:57:09.065239 1434484 crio.go:70] configuring cri-o to use "cgroupfs" as cgroup driver...
	I0916 10:57:09.065350 1434484 ssh_runner.go:195] Run: sh -c "sudo sed -i 's|^.*cgroup_manager = .*$|cgroup_manager = "cgroupfs"|' /etc/crio/crio.conf.d/02-crio.conf"
	I0916 10:57:09.076234 1434484 ssh_runner.go:195] Run: sh -c "sudo sed -i '/conmon_cgroup = .*/d' /etc/crio/crio.conf.d/02-crio.conf"
	I0916 10:57:09.087181 1434484 ssh_runner.go:195] Run: sh -c "sudo sed -i '/cgroup_manager = .*/a conmon_cgroup = "pod"' /etc/crio/crio.conf.d/02-crio.conf"
	I0916 10:57:09.097700 1434484 ssh_runner.go:195] Run: sh -c "sudo rm -rf /etc/cni/net.mk"
	I0916 10:57:09.107433 1434484 ssh_runner.go:195] Run: sh -c "sudo sed -i '/^ *"net.ipv4.ip_unprivileged_port_start=.*"/d' /etc/crio/crio.conf.d/02-crio.conf"
	I0916 10:57:09.117629 1434484 ssh_runner.go:195] Run: sh -c "sudo grep -q "^ *default_sysctls" /etc/crio/crio.conf.d/02-crio.conf || sudo sed -i '/conmon_cgroup = .*/a default_sysctls = \[\n\]' /etc/crio/crio.conf.d/02-crio.conf"
	I0916 10:57:09.127578 1434484 ssh_runner.go:195] Run: sh -c "sudo sed -i -r 's|^default_sysctls *= *\[|&\n  "net.ipv4.ip_unprivileged_port_start=0",|' /etc/crio/crio.conf.d/02-crio.conf"
	I0916 10:57:09.137656 1434484 ssh_runner.go:195] Run: sudo sysctl net.bridge.bridge-nf-call-iptables
	I0916 10:57:09.146514 1434484 ssh_runner.go:195] Run: sudo sh -c "echo 1 > /proc/sys/net/ipv4/ip_forward"
	I0916 10:57:09.155277 1434484 ssh_runner.go:195] Run: sudo systemctl daemon-reload
	I0916 10:57:09.248075 1434484 ssh_runner.go:195] Run: sudo systemctl restart crio
	I0916 10:57:09.365686 1434484 start.go:542] Will wait 60s for socket path /var/run/crio/crio.sock
	I0916 10:57:09.365793 1434484 ssh_runner.go:195] Run: stat /var/run/crio/crio.sock
	I0916 10:57:09.369797 1434484 start.go:563] Will wait 60s for crictl version
	I0916 10:57:09.369873 1434484 ssh_runner.go:195] Run: which crictl
	I0916 10:57:09.373304 1434484 ssh_runner.go:195] Run: sudo /usr/bin/crictl version
	I0916 10:57:09.415345 1434484 start.go:579] Version:  0.1.0
	RuntimeName:  cri-o
	RuntimeVersion:  1.24.6
	RuntimeApiVersion:  v1
	I0916 10:57:09.415437 1434484 ssh_runner.go:195] Run: crio --version
	I0916 10:57:09.462908 1434484 ssh_runner.go:195] Run: crio --version
	I0916 10:57:09.506285 1434484 out.go:177] * Preparing Kubernetes v1.31.1 on CRI-O 1.24.6 ...
	I0916 10:57:09.508872 1434484 cli_runner.go:164] Run: docker network inspect ha-334765 --format "{"Name": "{{.Name}}","Driver": "{{.Driver}}","Subnet": "{{range .IPAM.Config}}{{.Subnet}}{{end}}","Gateway": "{{range .IPAM.Config}}{{.Gateway}}{{end}}","MTU": {{if (index .Options "com.docker.network.driver.mtu")}}{{(index .Options "com.docker.network.driver.mtu")}}{{else}}0{{end}}, "ContainerIPs": [{{range $k,$v := .Containers }}"{{$v.IPv4Address}}",{{end}}]}"
	I0916 10:57:09.525589 1434484 ssh_runner.go:195] Run: grep 192.168.49.1	host.minikube.internal$ /etc/hosts
	I0916 10:57:09.529789 1434484 ssh_runner.go:195] Run: /bin/bash -c "{ grep -v $'\thost.minikube.internal$' "/etc/hosts"; echo "192.168.49.1	host.minikube.internal"; } > /tmp/h.$$; sudo cp /tmp/h.$$ "/etc/hosts""
	I0916 10:57:09.541719 1434484 kubeadm.go:883] updating cluster {Name:ha-334765 KeepContext:false EmbedCerts:false MinikubeISO: KicBaseImage:gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 Memory:2200 CPUs:2 DiskSize:20000 Driver:docker HyperkitVpnKitSock: HyperkitVSockPorts:[] DockerEnv:[] ContainerVolumeMounts:[] InsecureRegistry:[] RegistryMirror:[] HostOnlyCIDR:192.168.59.1/24 HypervVirtualSwitch: HypervUseExternalSwitch:false HypervExternalAdapter: KVMNetwork:default KVMQemuURI:qemu:///system KVMGPU:false KVMHidden:false KVMNUMACount:1 APIServerPort:8443 DockerOpt:[] DisableDriverMounts:false NFSShare:[] NFSSharesRoot:/nfsshares UUID: NoVTXCheck:false DNSProxy:false HostDNSResolver:true HostOnlyNicType:virtio NatNicType:virtio SSHIPAddress: SSHUser:root SSHKey: SSHPort:22 KubernetesConfig:{KubernetesVersion:v1.31.1 ClusterName:ha-334765 Namespace:default APIServerHAVIP:192.168.49.254 APIServerName:minikubeCA APISe
rverNames:[] APIServerIPs:[] DNSDomain:cluster.local ContainerRuntime:crio CRISocket: NetworkPlugin:cni FeatureGates: ServiceCIDR:10.96.0.0/12 ImageRepository: LoadBalancerStartIP: LoadBalancerEndIP: CustomIngressCert: RegistryAliases: ExtraOptions:[] ShouldLoadCachedImages:true EnableDefaultCNI:false CNI:} Nodes:[{Name: IP:192.168.49.2 Port:8443 KubernetesVersion:v1.31.1 ContainerRuntime:crio ControlPlane:true Worker:true} {Name:m02 IP:192.168.49.3 Port:8443 KubernetesVersion:v1.31.1 ContainerRuntime:crio ControlPlane:true Worker:true} {Name:m03 IP:192.168.49.4 Port:8443 KubernetesVersion:v1.31.1 ContainerRuntime:crio ControlPlane:true Worker:true} {Name:m04 IP:192.168.49.5 Port:0 KubernetesVersion:v1.31.1 ContainerRuntime:crio ControlPlane:false Worker:true}] Addons:map[ambassador:false auto-pause:false cloud-spanner:false csi-hostpath-driver:false dashboard:false default-storageclass:false efk:false freshpod:false gcp-auth:false gvisor:false headlamp:false helm-tiller:false inaccel:false ingress:false ingr
ess-dns:false inspektor-gadget:false istio:false istio-provisioner:false kong:false kubeflow:false kubevirt:false logviewer:false metallb:false metrics-server:false nvidia-device-plugin:false nvidia-driver-installer:false nvidia-gpu-device-plugin:false olm:false pod-security-policy:false portainer:false registry:false registry-aliases:false registry-creds:false storage-provisioner:false storage-provisioner-gluster:false storage-provisioner-rancher:false volcano:false volumesnapshots:false yakd:false] CustomAddonImages:map[] CustomAddonRegistries:map[] VerifyComponents:map[apiserver:true apps_running:true default_sa:true extra:true kubelet:true node_ready:true system_pods:true] StartHostTimeout:6m0s ScheduledStop:<nil> ExposedPorts:[] ListenAddress: Network: Subnet: MultiNodeRequested:true ExtraDisks:0 CertExpiration:26280h0m0s Mount:false MountString:/home/jenkins:/minikube-host Mount9PVersion:9p2000.L MountGID:docker MountIP: MountMSize:262144 MountOptions:[] MountPort:0 MountType:9p MountUID:docker BinaryMi
rror: DisableOptimizations:false DisableMetrics:false CustomQemuFirmwarePath: SocketVMnetClientPath: SocketVMnetPath: StaticIP: SSHAuthSock: SSHAgentPID:0 GPUs: AutoPauseInterval:1m0s} ...
	I0916 10:57:09.541889 1434484 preload.go:131] Checking if preload exists for k8s version v1.31.1 and runtime crio
	I0916 10:57:09.541949 1434484 ssh_runner.go:195] Run: sudo crictl images --output json
	I0916 10:57:09.597636 1434484 crio.go:514] all images are preloaded for cri-o runtime.
	I0916 10:57:09.597662 1434484 crio.go:433] Images already preloaded, skipping extraction
	I0916 10:57:09.597721 1434484 ssh_runner.go:195] Run: sudo crictl images --output json
	I0916 10:57:09.635526 1434484 crio.go:514] all images are preloaded for cri-o runtime.
	I0916 10:57:09.635551 1434484 cache_images.go:84] Images are preloaded, skipping loading
	I0916 10:57:09.635563 1434484 kubeadm.go:934] updating node { 192.168.49.2 8443 v1.31.1 crio true true} ...
	I0916 10:57:09.635679 1434484 kubeadm.go:946] kubelet [Unit]
	Wants=crio.service
	
	[Service]
	ExecStart=
	ExecStart=/var/lib/minikube/binaries/v1.31.1/kubelet --bootstrap-kubeconfig=/etc/kubernetes/bootstrap-kubelet.conf --cgroups-per-qos=false --config=/var/lib/kubelet/config.yaml --enforce-node-allocatable= --hostname-override=ha-334765 --kubeconfig=/etc/kubernetes/kubelet.conf --node-ip=192.168.49.2
	
	[Install]
	 config:
	{KubernetesVersion:v1.31.1 ClusterName:ha-334765 Namespace:default APIServerHAVIP:192.168.49.254 APIServerName:minikubeCA APIServerNames:[] APIServerIPs:[] DNSDomain:cluster.local ContainerRuntime:crio CRISocket: NetworkPlugin:cni FeatureGates: ServiceCIDR:10.96.0.0/12 ImageRepository: LoadBalancerStartIP: LoadBalancerEndIP: CustomIngressCert: RegistryAliases: ExtraOptions:[] ShouldLoadCachedImages:true EnableDefaultCNI:false CNI:}
	I0916 10:57:09.635767 1434484 ssh_runner.go:195] Run: crio config
	I0916 10:57:09.690009 1434484 cni.go:84] Creating CNI manager for ""
	I0916 10:57:09.690036 1434484 cni.go:136] multinode detected (4 nodes found), recommending kindnet
	I0916 10:57:09.690045 1434484 kubeadm.go:84] Using pod CIDR: 10.244.0.0/16
	I0916 10:57:09.690070 1434484 kubeadm.go:181] kubeadm options: {CertDir:/var/lib/minikube/certs ServiceCIDR:10.96.0.0/12 PodSubnet:10.244.0.0/16 AdvertiseAddress:192.168.49.2 APIServerPort:8443 KubernetesVersion:v1.31.1 EtcdDataDir:/var/lib/minikube/etcd EtcdExtraArgs:map[] ClusterName:ha-334765 NodeName:ha-334765 DNSDomain:cluster.local CRISocket:/var/run/crio/crio.sock ImageRepository: ComponentOptions:[{Component:apiServer ExtraArgs:map[enable-admission-plugins:NamespaceLifecycle,LimitRanger,ServiceAccount,DefaultStorageClass,DefaultTolerationSeconds,NodeRestriction,MutatingAdmissionWebhook,ValidatingAdmissionWebhook,ResourceQuota] Pairs:map[certSANs:["127.0.0.1", "localhost", "192.168.49.2"]]} {Component:controllerManager ExtraArgs:map[allocate-node-cidrs:true leader-elect:false] Pairs:map[]} {Component:scheduler ExtraArgs:map[leader-elect:false] Pairs:map[]}] FeatureArgs:map[] NodeIP:192.168.49.2 CgroupDriver:cgroupfs ClientCAFile:/var/lib/minikube/certs/ca.crt StaticPodPath:/etc/kubernetes/mani
fests ControlPlaneAddress:control-plane.minikube.internal KubeProxyOptions:map[] ResolvConfSearchRegression:false KubeletConfigOpts:map[containerRuntimeEndpoint:unix:///var/run/crio/crio.sock hairpinMode:hairpin-veth runtimeRequestTimeout:15m] PrependCriSocketUnix:true}
	I0916 10:57:09.690219 1434484 kubeadm.go:187] kubeadm config:
	apiVersion: kubeadm.k8s.io/v1beta3
	kind: InitConfiguration
	localAPIEndpoint:
	  advertiseAddress: 192.168.49.2
	  bindPort: 8443
	bootstrapTokens:
	  - groups:
	      - system:bootstrappers:kubeadm:default-node-token
	    ttl: 24h0m0s
	    usages:
	      - signing
	      - authentication
	nodeRegistration:
	  criSocket: unix:///var/run/crio/crio.sock
	  name: "ha-334765"
	  kubeletExtraArgs:
	    node-ip: 192.168.49.2
	  taints: []
	---
	apiVersion: kubeadm.k8s.io/v1beta3
	kind: ClusterConfiguration
	apiServer:
	  certSANs: ["127.0.0.1", "localhost", "192.168.49.2"]
	  extraArgs:
	    enable-admission-plugins: "NamespaceLifecycle,LimitRanger,ServiceAccount,DefaultStorageClass,DefaultTolerationSeconds,NodeRestriction,MutatingAdmissionWebhook,ValidatingAdmissionWebhook,ResourceQuota"
	controllerManager:
	  extraArgs:
	    allocate-node-cidrs: "true"
	    leader-elect: "false"
	scheduler:
	  extraArgs:
	    leader-elect: "false"
	certificatesDir: /var/lib/minikube/certs
	clusterName: mk
	controlPlaneEndpoint: control-plane.minikube.internal:8443
	etcd:
	  local:
	    dataDir: /var/lib/minikube/etcd
	    extraArgs:
	      proxy-refresh-interval: "70000"
	kubernetesVersion: v1.31.1
	networking:
	  dnsDomain: cluster.local
	  podSubnet: "10.244.0.0/16"
	  serviceSubnet: 10.96.0.0/12
	---
	apiVersion: kubelet.config.k8s.io/v1beta1
	kind: KubeletConfiguration
	authentication:
	  x509:
	    clientCAFile: /var/lib/minikube/certs/ca.crt
	cgroupDriver: cgroupfs
	containerRuntimeEndpoint: unix:///var/run/crio/crio.sock
	hairpinMode: hairpin-veth
	runtimeRequestTimeout: 15m
	clusterDomain: "cluster.local"
	# disable disk resource management by default
	imageGCHighThresholdPercent: 100
	evictionHard:
	  nodefs.available: "0%"
	  nodefs.inodesFree: "0%"
	  imagefs.available: "0%"
	failSwapOn: false
	staticPodPath: /etc/kubernetes/manifests
	---
	apiVersion: kubeproxy.config.k8s.io/v1alpha1
	kind: KubeProxyConfiguration
	clusterCIDR: "10.244.0.0/16"
	metricsBindAddress: 0.0.0.0:10249
	conntrack:
	  maxPerCore: 0
	# Skip setting "net.netfilter.nf_conntrack_tcp_timeout_established"
	  tcpEstablishedTimeout: 0s
	# Skip setting "net.netfilter.nf_conntrack_tcp_timeout_close"
	  tcpCloseWaitTimeout: 0s
	
	I0916 10:57:09.690239 1434484 kube-vip.go:115] generating kube-vip config ...
	I0916 10:57:09.690292 1434484 ssh_runner.go:195] Run: sudo sh -c "lsmod | grep ip_vs"
	I0916 10:57:09.702732 1434484 kube-vip.go:167] auto-enabling control-plane load-balancing in kube-vip
	I0916 10:57:09.702861 1434484 kube-vip.go:137] kube-vip config:
	apiVersion: v1
	kind: Pod
	metadata:
	  creationTimestamp: null
	  name: kube-vip
	  namespace: kube-system
	spec:
	  containers:
	  - args:
	    - manager
	    env:
	    - name: vip_arp
	      value: "true"
	    - name: port
	      value: "8443"
	    - name: vip_nodename
	      valueFrom:
	        fieldRef:
	          fieldPath: spec.nodeName
	    - name: vip_interface
	      value: eth0
	    - name: vip_cidr
	      value: "32"
	    - name: dns_mode
	      value: first
	    - name: cp_enable
	      value: "true"
	    - name: cp_namespace
	      value: kube-system
	    - name: vip_leaderelection
	      value: "true"
	    - name: vip_leasename
	      value: plndr-cp-lock
	    - name: vip_leaseduration
	      value: "5"
	    - name: vip_renewdeadline
	      value: "3"
	    - name: vip_retryperiod
	      value: "1"
	    - name: address
	      value: 192.168.49.254
	    - name: prometheus_server
	      value: :2112
	    - name : lb_enable
	      value: "true"
	    - name: lb_port
	      value: "8443"
	    image: ghcr.io/kube-vip/kube-vip:v0.8.0
	    imagePullPolicy: IfNotPresent
	    name: kube-vip
	    resources: {}
	    securityContext:
	      capabilities:
	        add:
	        - NET_ADMIN
	        - NET_RAW
	    volumeMounts:
	    - mountPath: /etc/kubernetes/admin.conf
	      name: kubeconfig
	  hostAliases:
	  - hostnames:
	    - kubernetes
	    ip: 127.0.0.1
	  hostNetwork: true
	  volumes:
	  - hostPath:
	      path: "/etc/kubernetes/admin.conf"
	    name: kubeconfig
	status: {}
	I0916 10:57:09.702933 1434484 ssh_runner.go:195] Run: sudo ls /var/lib/minikube/binaries/v1.31.1
	I0916 10:57:09.711669 1434484 binaries.go:44] Found k8s binaries, skipping transfer
	I0916 10:57:09.711746 1434484 ssh_runner.go:195] Run: sudo mkdir -p /etc/systemd/system/kubelet.service.d /lib/systemd/system /var/tmp/minikube /etc/kubernetes/manifests
	I0916 10:57:09.720374 1434484 ssh_runner.go:362] scp memory --> /etc/systemd/system/kubelet.service.d/10-kubeadm.conf (359 bytes)
	I0916 10:57:09.738409 1434484 ssh_runner.go:362] scp memory --> /lib/systemd/system/kubelet.service (352 bytes)
	I0916 10:57:09.755604 1434484 ssh_runner.go:362] scp memory --> /var/tmp/minikube/kubeadm.yaml.new (2147 bytes)
	I0916 10:57:09.773383 1434484 ssh_runner.go:362] scp memory --> /etc/kubernetes/manifests/kube-vip.yaml (1441 bytes)
	I0916 10:57:09.791564 1434484 ssh_runner.go:195] Run: grep 192.168.49.254	control-plane.minikube.internal$ /etc/hosts
	I0916 10:57:09.795951 1434484 ssh_runner.go:195] Run: /bin/bash -c "{ grep -v $'\tcontrol-plane.minikube.internal$' "/etc/hosts"; echo "192.168.49.254	control-plane.minikube.internal"; } > /tmp/h.$$; sudo cp /tmp/h.$$ "/etc/hosts""
	I0916 10:57:09.807678 1434484 ssh_runner.go:195] Run: sudo systemctl daemon-reload
	I0916 10:57:09.896627 1434484 ssh_runner.go:195] Run: sudo systemctl start kubelet
	I0916 10:57:09.911057 1434484 certs.go:68] Setting up /home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/ha-334765 for IP: 192.168.49.2
	I0916 10:57:09.911086 1434484 certs.go:194] generating shared ca certs ...
	I0916 10:57:09.911102 1434484 certs.go:226] acquiring lock for ca certs: {Name:mk0ae46b50e2e49d53ad6fcc94535aa50d9156d6 Clock:{} Delay:500ms Timeout:1m0s Cancel:<nil>}
	I0916 10:57:09.911320 1434484 certs.go:235] skipping valid "minikubeCA" ca cert: /home/jenkins/minikube-integration/19651-1378450/.minikube/ca.key
	I0916 10:57:09.911394 1434484 certs.go:235] skipping valid "proxyClientCA" ca cert: /home/jenkins/minikube-integration/19651-1378450/.minikube/proxy-client-ca.key
	I0916 10:57:09.911409 1434484 certs.go:256] generating profile certs ...
	I0916 10:57:09.911526 1434484 certs.go:359] skipping valid signed profile cert regeneration for "minikube-user": /home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/ha-334765/client.key
	I0916 10:57:09.911571 1434484 certs.go:363] generating signed profile cert for "minikube": /home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/ha-334765/apiserver.key.fc02f7db
	I0916 10:57:09.911607 1434484 crypto.go:68] Generating cert /home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/ha-334765/apiserver.crt.fc02f7db with IP's: [10.96.0.1 127.0.0.1 10.0.0.1 192.168.49.2 192.168.49.3 192.168.49.4 192.168.49.254]
	I0916 10:57:10.641845 1434484 crypto.go:156] Writing cert to /home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/ha-334765/apiserver.crt.fc02f7db ...
	I0916 10:57:10.641880 1434484 lock.go:35] WriteFile acquiring /home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/ha-334765/apiserver.crt.fc02f7db: {Name:mk3e6b81db9cb48a7942bbdf567115acfb16c2db Clock:{} Delay:500ms Timeout:1m0s Cancel:<nil>}
	I0916 10:57:10.642082 1434484 crypto.go:164] Writing key to /home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/ha-334765/apiserver.key.fc02f7db ...
	I0916 10:57:10.642097 1434484 lock.go:35] WriteFile acquiring /home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/ha-334765/apiserver.key.fc02f7db: {Name:mk75cc8113dfbd6c255ee33a5aeb13588331d4a0 Clock:{} Delay:500ms Timeout:1m0s Cancel:<nil>}
	I0916 10:57:10.642188 1434484 certs.go:381] copying /home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/ha-334765/apiserver.crt.fc02f7db -> /home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/ha-334765/apiserver.crt
	I0916 10:57:10.642341 1434484 certs.go:385] copying /home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/ha-334765/apiserver.key.fc02f7db -> /home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/ha-334765/apiserver.key
	I0916 10:57:10.642486 1434484 certs.go:359] skipping valid signed profile cert regeneration for "aggregator": /home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/ha-334765/proxy-client.key
	I0916 10:57:10.642505 1434484 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-1378450/.minikube/ca.crt -> /var/lib/minikube/certs/ca.crt
	I0916 10:57:10.642522 1434484 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-1378450/.minikube/ca.key -> /var/lib/minikube/certs/ca.key
	I0916 10:57:10.642537 1434484 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-1378450/.minikube/proxy-client-ca.crt -> /var/lib/minikube/certs/proxy-client-ca.crt
	I0916 10:57:10.642551 1434484 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-1378450/.minikube/proxy-client-ca.key -> /var/lib/minikube/certs/proxy-client-ca.key
	I0916 10:57:10.642569 1434484 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/ha-334765/apiserver.crt -> /var/lib/minikube/certs/apiserver.crt
	I0916 10:57:10.642588 1434484 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/ha-334765/apiserver.key -> /var/lib/minikube/certs/apiserver.key
	I0916 10:57:10.642603 1434484 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/ha-334765/proxy-client.crt -> /var/lib/minikube/certs/proxy-client.crt
	I0916 10:57:10.642617 1434484 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/ha-334765/proxy-client.key -> /var/lib/minikube/certs/proxy-client.key
	I0916 10:57:10.642668 1434484 certs.go:484] found cert: /home/jenkins/minikube-integration/19651-1378450/.minikube/certs/1383833.pem (1338 bytes)
	W0916 10:57:10.642708 1434484 certs.go:480] ignoring /home/jenkins/minikube-integration/19651-1378450/.minikube/certs/1383833_empty.pem, impossibly tiny 0 bytes
	I0916 10:57:10.642721 1434484 certs.go:484] found cert: /home/jenkins/minikube-integration/19651-1378450/.minikube/certs/ca-key.pem (1679 bytes)
	I0916 10:57:10.642749 1434484 certs.go:484] found cert: /home/jenkins/minikube-integration/19651-1378450/.minikube/certs/ca.pem (1078 bytes)
	I0916 10:57:10.642785 1434484 certs.go:484] found cert: /home/jenkins/minikube-integration/19651-1378450/.minikube/certs/cert.pem (1123 bytes)
	I0916 10:57:10.642828 1434484 certs.go:484] found cert: /home/jenkins/minikube-integration/19651-1378450/.minikube/certs/key.pem (1679 bytes)
	I0916 10:57:10.642882 1434484 certs.go:484] found cert: /home/jenkins/minikube-integration/19651-1378450/.minikube/files/etc/ssl/certs/13838332.pem (1708 bytes)
	I0916 10:57:10.642931 1434484 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-1378450/.minikube/files/etc/ssl/certs/13838332.pem -> /usr/share/ca-certificates/13838332.pem
	I0916 10:57:10.642948 1434484 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-1378450/.minikube/ca.crt -> /usr/share/ca-certificates/minikubeCA.pem
	I0916 10:57:10.642962 1434484 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-1378450/.minikube/certs/1383833.pem -> /usr/share/ca-certificates/1383833.pem
	I0916 10:57:10.643657 1434484 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-1378450/.minikube/ca.crt --> /var/lib/minikube/certs/ca.crt (1111 bytes)
	I0916 10:57:10.669506 1434484 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-1378450/.minikube/ca.key --> /var/lib/minikube/certs/ca.key (1675 bytes)
	I0916 10:57:10.695082 1434484 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-1378450/.minikube/proxy-client-ca.crt --> /var/lib/minikube/certs/proxy-client-ca.crt (1119 bytes)
	I0916 10:57:10.720394 1434484 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-1378450/.minikube/proxy-client-ca.key --> /var/lib/minikube/certs/proxy-client-ca.key (1679 bytes)
	I0916 10:57:10.745072 1434484 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/ha-334765/apiserver.crt --> /var/lib/minikube/certs/apiserver.crt (1440 bytes)
	I0916 10:57:10.769576 1434484 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/ha-334765/apiserver.key --> /var/lib/minikube/certs/apiserver.key (1675 bytes)
	I0916 10:57:10.796263 1434484 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/ha-334765/proxy-client.crt --> /var/lib/minikube/certs/proxy-client.crt (1147 bytes)
	I0916 10:57:10.821328 1434484 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/ha-334765/proxy-client.key --> /var/lib/minikube/certs/proxy-client.key (1679 bytes)
	I0916 10:57:10.846635 1434484 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-1378450/.minikube/files/etc/ssl/certs/13838332.pem --> /usr/share/ca-certificates/13838332.pem (1708 bytes)
	I0916 10:57:10.871296 1434484 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-1378450/.minikube/ca.crt --> /usr/share/ca-certificates/minikubeCA.pem (1111 bytes)
	I0916 10:57:10.896050 1434484 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-1378450/.minikube/certs/1383833.pem --> /usr/share/ca-certificates/1383833.pem (1338 bytes)
	I0916 10:57:10.919556 1434484 ssh_runner.go:362] scp memory --> /var/lib/minikube/kubeconfig (738 bytes)
	I0916 10:57:10.938758 1434484 ssh_runner.go:195] Run: openssl version
	I0916 10:57:10.944770 1434484 ssh_runner.go:195] Run: sudo /bin/bash -c "test -s /usr/share/ca-certificates/13838332.pem && ln -fs /usr/share/ca-certificates/13838332.pem /etc/ssl/certs/13838332.pem"
	I0916 10:57:10.954927 1434484 ssh_runner.go:195] Run: ls -la /usr/share/ca-certificates/13838332.pem
	I0916 10:57:10.958989 1434484 certs.go:528] hashing: -rw-r--r-- 1 root root 1708 Sep 16 10:46 /usr/share/ca-certificates/13838332.pem
	I0916 10:57:10.959060 1434484 ssh_runner.go:195] Run: openssl x509 -hash -noout -in /usr/share/ca-certificates/13838332.pem
	I0916 10:57:10.966823 1434484 ssh_runner.go:195] Run: sudo /bin/bash -c "test -L /etc/ssl/certs/3ec20f2e.0 || ln -fs /etc/ssl/certs/13838332.pem /etc/ssl/certs/3ec20f2e.0"
	I0916 10:57:10.977029 1434484 ssh_runner.go:195] Run: sudo /bin/bash -c "test -s /usr/share/ca-certificates/minikubeCA.pem && ln -fs /usr/share/ca-certificates/minikubeCA.pem /etc/ssl/certs/minikubeCA.pem"
	I0916 10:57:10.986865 1434484 ssh_runner.go:195] Run: ls -la /usr/share/ca-certificates/minikubeCA.pem
	I0916 10:57:10.990621 1434484 certs.go:528] hashing: -rw-r--r-- 1 root root 1111 Sep 16 10:35 /usr/share/ca-certificates/minikubeCA.pem
	I0916 10:57:10.990687 1434484 ssh_runner.go:195] Run: openssl x509 -hash -noout -in /usr/share/ca-certificates/minikubeCA.pem
	I0916 10:57:10.997687 1434484 ssh_runner.go:195] Run: sudo /bin/bash -c "test -L /etc/ssl/certs/b5213941.0 || ln -fs /etc/ssl/certs/minikubeCA.pem /etc/ssl/certs/b5213941.0"
	I0916 10:57:11.008284 1434484 ssh_runner.go:195] Run: sudo /bin/bash -c "test -s /usr/share/ca-certificates/1383833.pem && ln -fs /usr/share/ca-certificates/1383833.pem /etc/ssl/certs/1383833.pem"
	I0916 10:57:11.018875 1434484 ssh_runner.go:195] Run: ls -la /usr/share/ca-certificates/1383833.pem
	I0916 10:57:11.022715 1434484 certs.go:528] hashing: -rw-r--r-- 1 root root 1338 Sep 16 10:46 /usr/share/ca-certificates/1383833.pem
	I0916 10:57:11.022784 1434484 ssh_runner.go:195] Run: openssl x509 -hash -noout -in /usr/share/ca-certificates/1383833.pem
	I0916 10:57:11.029806 1434484 ssh_runner.go:195] Run: sudo /bin/bash -c "test -L /etc/ssl/certs/51391683.0 || ln -fs /etc/ssl/certs/1383833.pem /etc/ssl/certs/51391683.0"
	I0916 10:57:11.038695 1434484 ssh_runner.go:195] Run: stat /var/lib/minikube/certs/apiserver-kubelet-client.crt
	I0916 10:57:11.042346 1434484 ssh_runner.go:195] Run: openssl x509 -noout -in /var/lib/minikube/certs/apiserver-etcd-client.crt -checkend 86400
	I0916 10:57:11.049136 1434484 ssh_runner.go:195] Run: openssl x509 -noout -in /var/lib/minikube/certs/apiserver-kubelet-client.crt -checkend 86400
	I0916 10:57:11.056225 1434484 ssh_runner.go:195] Run: openssl x509 -noout -in /var/lib/minikube/certs/etcd/server.crt -checkend 86400
	I0916 10:57:11.063544 1434484 ssh_runner.go:195] Run: openssl x509 -noout -in /var/lib/minikube/certs/etcd/healthcheck-client.crt -checkend 86400
	I0916 10:57:11.071331 1434484 ssh_runner.go:195] Run: openssl x509 -noout -in /var/lib/minikube/certs/etcd/peer.crt -checkend 86400
	I0916 10:57:11.078705 1434484 ssh_runner.go:195] Run: openssl x509 -noout -in /var/lib/minikube/certs/front-proxy-client.crt -checkend 86400
	I0916 10:57:11.085936 1434484 kubeadm.go:392] StartCluster: {Name:ha-334765 KeepContext:false EmbedCerts:false MinikubeISO: KicBaseImage:gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 Memory:2200 CPUs:2 DiskSize:20000 Driver:docker HyperkitVpnKitSock: HyperkitVSockPorts:[] DockerEnv:[] ContainerVolumeMounts:[] InsecureRegistry:[] RegistryMirror:[] HostOnlyCIDR:192.168.59.1/24 HypervVirtualSwitch: HypervUseExternalSwitch:false HypervExternalAdapter: KVMNetwork:default KVMQemuURI:qemu:///system KVMGPU:false KVMHidden:false KVMNUMACount:1 APIServerPort:8443 DockerOpt:[] DisableDriverMounts:false NFSShare:[] NFSSharesRoot:/nfsshares UUID: NoVTXCheck:false DNSProxy:false HostDNSResolver:true HostOnlyNicType:virtio NatNicType:virtio SSHIPAddress: SSHUser:root SSHKey: SSHPort:22 KubernetesConfig:{KubernetesVersion:v1.31.1 ClusterName:ha-334765 Namespace:default APIServerHAVIP:192.168.49.254 APIServerName:minikubeCA APIServe
rNames:[] APIServerIPs:[] DNSDomain:cluster.local ContainerRuntime:crio CRISocket: NetworkPlugin:cni FeatureGates: ServiceCIDR:10.96.0.0/12 ImageRepository: LoadBalancerStartIP: LoadBalancerEndIP: CustomIngressCert: RegistryAliases: ExtraOptions:[] ShouldLoadCachedImages:true EnableDefaultCNI:false CNI:} Nodes:[{Name: IP:192.168.49.2 Port:8443 KubernetesVersion:v1.31.1 ContainerRuntime:crio ControlPlane:true Worker:true} {Name:m02 IP:192.168.49.3 Port:8443 KubernetesVersion:v1.31.1 ContainerRuntime:crio ControlPlane:true Worker:true} {Name:m03 IP:192.168.49.4 Port:8443 KubernetesVersion:v1.31.1 ContainerRuntime:crio ControlPlane:true Worker:true} {Name:m04 IP:192.168.49.5 Port:0 KubernetesVersion:v1.31.1 ContainerRuntime:crio ControlPlane:false Worker:true}] Addons:map[ambassador:false auto-pause:false cloud-spanner:false csi-hostpath-driver:false dashboard:false default-storageclass:false efk:false freshpod:false gcp-auth:false gvisor:false headlamp:false helm-tiller:false inaccel:false ingress:false ingress
-dns:false inspektor-gadget:false istio:false istio-provisioner:false kong:false kubeflow:false kubevirt:false logviewer:false metallb:false metrics-server:false nvidia-device-plugin:false nvidia-driver-installer:false nvidia-gpu-device-plugin:false olm:false pod-security-policy:false portainer:false registry:false registry-aliases:false registry-creds:false storage-provisioner:false storage-provisioner-gluster:false storage-provisioner-rancher:false volcano:false volumesnapshots:false yakd:false] CustomAddonImages:map[] CustomAddonRegistries:map[] VerifyComponents:map[apiserver:true apps_running:true default_sa:true extra:true kubelet:true node_ready:true system_pods:true] StartHostTimeout:6m0s ScheduledStop:<nil> ExposedPorts:[] ListenAddress: Network: Subnet: MultiNodeRequested:true ExtraDisks:0 CertExpiration:26280h0m0s Mount:false MountString:/home/jenkins:/minikube-host Mount9PVersion:9p2000.L MountGID:docker MountIP: MountMSize:262144 MountOptions:[] MountPort:0 MountType:9p MountUID:docker BinaryMirro
r: DisableOptimizations:false DisableMetrics:false CustomQemuFirmwarePath: SocketVMnetClientPath: SocketVMnetPath: StaticIP: SSHAuthSock: SSHAgentPID:0 GPUs: AutoPauseInterval:1m0s}
	I0916 10:57:11.086088 1434484 cri.go:54] listing CRI containers in root : {State:paused Name: Namespaces:[kube-system]}
	I0916 10:57:11.086160 1434484 ssh_runner.go:195] Run: sudo -s eval "crictl ps -a --quiet --label io.kubernetes.pod.namespace=kube-system"
	I0916 10:57:11.125268 1434484 cri.go:89] found id: ""
	I0916 10:57:11.125376 1434484 ssh_runner.go:195] Run: sudo ls /var/lib/kubelet/kubeadm-flags.env /var/lib/kubelet/config.yaml /var/lib/minikube/etcd
	I0916 10:57:11.134615 1434484 kubeadm.go:408] found existing configuration files, will attempt cluster restart
	I0916 10:57:11.134638 1434484 kubeadm.go:593] restartPrimaryControlPlane start ...
	I0916 10:57:11.134724 1434484 ssh_runner.go:195] Run: sudo test -d /data/minikube
	I0916 10:57:11.143760 1434484 kubeadm.go:130] /data/minikube skipping compat symlinks: sudo test -d /data/minikube: Process exited with status 1
	stdout:
	
	stderr:
	I0916 10:57:11.144185 1434484 kubeconfig.go:47] verify endpoint returned: get endpoint: "ha-334765" does not appear in /home/jenkins/minikube-integration/19651-1378450/kubeconfig
	I0916 10:57:11.144296 1434484 kubeconfig.go:62] /home/jenkins/minikube-integration/19651-1378450/kubeconfig needs updating (will repair): [kubeconfig missing "ha-334765" cluster setting kubeconfig missing "ha-334765" context setting]
	I0916 10:57:11.144579 1434484 lock.go:35] WriteFile acquiring /home/jenkins/minikube-integration/19651-1378450/kubeconfig: {Name:mk806df66aa01ad28d0c99bc1a876b4310e8a3a0 Clock:{} Delay:500ms Timeout:1m0s Cancel:<nil>}
	I0916 10:57:11.145009 1434484 loader.go:395] Config loaded from file:  /home/jenkins/minikube-integration/19651-1378450/kubeconfig
	I0916 10:57:11.145266 1434484 kapi.go:59] client config for ha-334765: &rest.Config{Host:"https://192.168.49.2:8443", APIPath:"", ContentConfig:rest.ContentConfig{AcceptContentTypes:"", ContentType:"", GroupVersion:(*schema.GroupVersion)(nil), NegotiatedSerializer:runtime.NegotiatedSerializer(nil)}, Username:"", Password:"", BearerToken:"", BearerTokenFile:"", Impersonate:rest.ImpersonationConfig{UserName:"", UID:"", Groups:[]string(nil), Extra:map[string][]string(nil)}, AuthProvider:<nil>, AuthConfigPersister:rest.AuthProviderConfigPersister(nil), ExecProvider:<nil>, TLSClientConfig:rest.sanitizedTLSClientConfig{Insecure:false, ServerName:"", CertFile:"/home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/ha-334765/client.crt", KeyFile:"/home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/ha-334765/client.key", CAFile:"/home/jenkins/minikube-integration/19651-1378450/.minikube/ca.crt", CertData:[]uint8(nil), KeyData:[]uint8(nil), CAData:[]uint8(nil), NextProtos:[]string(
nil)}, UserAgent:"", DisableCompression:false, Transport:http.RoundTripper(nil), WrapTransport:(transport.WrapperFunc)(0x1a1e6c0), QPS:0, Burst:0, RateLimiter:flowcontrol.RateLimiter(nil), WarningHandler:rest.WarningHandler(nil), Timeout:0, Dial:(func(context.Context, string, string) (net.Conn, error))(nil), Proxy:(func(*http.Request) (*url.URL, error))(nil)}
	I0916 10:57:11.145715 1434484 cert_rotation.go:140] Starting client certificate rotation controller
	I0916 10:57:11.145928 1434484 ssh_runner.go:195] Run: sudo diff -u /var/tmp/minikube/kubeadm.yaml /var/tmp/minikube/kubeadm.yaml.new
	I0916 10:57:11.155795 1434484 kubeadm.go:630] The running cluster does not require reconfiguration: 192.168.49.2
	I0916 10:57:11.155820 1434484 kubeadm.go:597] duration metric: took 21.17576ms to restartPrimaryControlPlane
	I0916 10:57:11.155829 1434484 kubeadm.go:394] duration metric: took 69.903431ms to StartCluster
	I0916 10:57:11.155859 1434484 settings.go:142] acquiring lock: {Name:mkc0474d366ad36774e47290c7932cc180a1b9f8 Clock:{} Delay:500ms Timeout:1m0s Cancel:<nil>}
	I0916 10:57:11.155935 1434484 settings.go:150] Updating kubeconfig:  /home/jenkins/minikube-integration/19651-1378450/kubeconfig
	I0916 10:57:11.156592 1434484 lock.go:35] WriteFile acquiring /home/jenkins/minikube-integration/19651-1378450/kubeconfig: {Name:mk806df66aa01ad28d0c99bc1a876b4310e8a3a0 Clock:{} Delay:500ms Timeout:1m0s Cancel:<nil>}
	I0916 10:57:11.156836 1434484 start.go:233] HA (multi-control plane) cluster: will skip waiting for primary control-plane node &{Name: IP:192.168.49.2 Port:8443 KubernetesVersion:v1.31.1 ContainerRuntime:crio ControlPlane:true Worker:true}
	I0916 10:57:11.156860 1434484 start.go:241] waiting for startup goroutines ...
	I0916 10:57:11.156874 1434484 addons.go:507] enable addons start: toEnable=map[ambassador:false auto-pause:false cloud-spanner:false csi-hostpath-driver:false dashboard:false default-storageclass:false efk:false freshpod:false gcp-auth:false gvisor:false headlamp:false helm-tiller:false inaccel:false ingress:false ingress-dns:false inspektor-gadget:false istio:false istio-provisioner:false kong:false kubeflow:false kubevirt:false logviewer:false metallb:false metrics-server:false nvidia-device-plugin:false nvidia-driver-installer:false nvidia-gpu-device-plugin:false olm:false pod-security-policy:false portainer:false registry:false registry-aliases:false registry-creds:false storage-provisioner:false storage-provisioner-gluster:false storage-provisioner-rancher:false volcano:false volumesnapshots:false yakd:false]
	I0916 10:57:11.157163 1434484 config.go:182] Loaded profile config "ha-334765": Driver=docker, ContainerRuntime=crio, KubernetesVersion=v1.31.1
	I0916 10:57:11.161237 1434484 out.go:177] * Enabled addons: 
	I0916 10:57:11.163738 1434484 addons.go:510] duration metric: took 6.865878ms for enable addons: enabled=[]
	I0916 10:57:11.163778 1434484 start.go:246] waiting for cluster config update ...
	I0916 10:57:11.163788 1434484 start.go:255] writing updated cluster config ...
	I0916 10:57:11.166498 1434484 out.go:201] 
	I0916 10:57:11.169534 1434484 config.go:182] Loaded profile config "ha-334765": Driver=docker, ContainerRuntime=crio, KubernetesVersion=v1.31.1
	I0916 10:57:11.169662 1434484 profile.go:143] Saving config to /home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/ha-334765/config.json ...
	I0916 10:57:11.172751 1434484 out.go:177] * Starting "ha-334765-m02" control-plane node in "ha-334765" cluster
	I0916 10:57:11.175771 1434484 cache.go:121] Beginning downloading kic base image for docker with crio
	I0916 10:57:11.178596 1434484 out.go:177] * Pulling base image v0.0.45-1726358845-19644 ...
	I0916 10:57:11.181296 1434484 preload.go:131] Checking if preload exists for k8s version v1.31.1 and runtime crio
	I0916 10:57:11.181327 1434484 cache.go:56] Caching tarball of preloaded images
	I0916 10:57:11.181360 1434484 image.go:79] Checking for gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 in local docker daemon
	I0916 10:57:11.181633 1434484 preload.go:172] Found /home/jenkins/minikube-integration/19651-1378450/.minikube/cache/preloaded-tarball/preloaded-images-k8s-v18-v1.31.1-cri-o-overlay-arm64.tar.lz4 in cache, skipping download
	I0916 10:57:11.181652 1434484 cache.go:59] Finished verifying existence of preloaded tar for v1.31.1 on crio
	I0916 10:57:11.181819 1434484 profile.go:143] Saving config to /home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/ha-334765/config.json ...
	W0916 10:57:11.204613 1434484 image.go:95] image gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 is of wrong architecture
	I0916 10:57:11.204634 1434484 cache.go:149] Downloading gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 to local cache
	I0916 10:57:11.204803 1434484 image.go:63] Checking for gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 in local cache directory
	I0916 10:57:11.204840 1434484 image.go:66] Found gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 in local cache directory, skipping pull
	I0916 10:57:11.204851 1434484 image.go:135] gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 exists in cache, skipping pull
	I0916 10:57:11.204860 1434484 cache.go:152] successfully saved gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 as a tarball
	I0916 10:57:11.204876 1434484 cache.go:162] Loading gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 from local cache
	I0916 10:57:11.206281 1434484 image.go:273] response: 
	I0916 10:57:11.319235 1434484 cache.go:164] successfully loaded and using gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 from cached tarball
	I0916 10:57:11.319274 1434484 cache.go:194] Successfully downloaded all kic artifacts
	I0916 10:57:11.319309 1434484 start.go:360] acquireMachinesLock for ha-334765-m02: {Name:mkb176e2cfa3ae927444127935258ba37ca2bc0a Clock:{} Delay:500ms Timeout:10m0s Cancel:<nil>}
	I0916 10:57:11.319378 1434484 start.go:364] duration metric: took 48.269µs to acquireMachinesLock for "ha-334765-m02"
	I0916 10:57:11.319406 1434484 start.go:96] Skipping create...Using existing machine configuration
	I0916 10:57:11.319412 1434484 fix.go:54] fixHost starting: m02
	I0916 10:57:11.319714 1434484 cli_runner.go:164] Run: docker container inspect ha-334765-m02 --format={{.State.Status}}
	I0916 10:57:11.336795 1434484 fix.go:112] recreateIfNeeded on ha-334765-m02: state=Stopped err=<nil>
	W0916 10:57:11.336827 1434484 fix.go:138] unexpected machine state, will restart: <nil>
	I0916 10:57:11.340025 1434484 out.go:177] * Restarting existing docker container for "ha-334765-m02" ...
	I0916 10:57:11.342852 1434484 cli_runner.go:164] Run: docker start ha-334765-m02
	I0916 10:57:11.631331 1434484 cli_runner.go:164] Run: docker container inspect ha-334765-m02 --format={{.State.Status}}
	I0916 10:57:11.649082 1434484 kic.go:430] container "ha-334765-m02" state is running.
	I0916 10:57:11.649565 1434484 cli_runner.go:164] Run: docker container inspect -f "{{range .NetworkSettings.Networks}}{{.IPAddress}},{{.GlobalIPv6Address}}{{end}}" ha-334765-m02
	I0916 10:57:11.679559 1434484 profile.go:143] Saving config to /home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/ha-334765/config.json ...
	I0916 10:57:11.679802 1434484 machine.go:93] provisionDockerMachine start ...
	I0916 10:57:11.679879 1434484 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" ha-334765-m02
	I0916 10:57:11.699392 1434484 main.go:141] libmachine: Using SSH client type: native
	I0916 10:57:11.699629 1434484 main.go:141] libmachine: &{{{<nil> 0 [] [] []} docker [0x41abe0] 0x41d420 <nil>  [] 0s} 127.0.0.1 34648 <nil> <nil>}
	I0916 10:57:11.699638 1434484 main.go:141] libmachine: About to run SSH command:
	hostname
	I0916 10:57:11.700209 1434484 main.go:141] libmachine: Error dialing TCP: ssh: handshake failed: read tcp 127.0.0.1:41876->127.0.0.1:34648: read: connection reset by peer
	I0916 10:57:14.885421 1434484 main.go:141] libmachine: SSH cmd err, output: <nil>: ha-334765-m02
	
	I0916 10:57:14.885497 1434484 ubuntu.go:169] provisioning hostname "ha-334765-m02"
	I0916 10:57:14.885598 1434484 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" ha-334765-m02
	I0916 10:57:14.931027 1434484 main.go:141] libmachine: Using SSH client type: native
	I0916 10:57:14.931312 1434484 main.go:141] libmachine: &{{{<nil> 0 [] [] []} docker [0x41abe0] 0x41d420 <nil>  [] 0s} 127.0.0.1 34648 <nil> <nil>}
	I0916 10:57:14.931325 1434484 main.go:141] libmachine: About to run SSH command:
	sudo hostname ha-334765-m02 && echo "ha-334765-m02" | sudo tee /etc/hostname
	I0916 10:57:15.158732 1434484 main.go:141] libmachine: SSH cmd err, output: <nil>: ha-334765-m02
	
	I0916 10:57:15.158894 1434484 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" ha-334765-m02
	I0916 10:57:15.187786 1434484 main.go:141] libmachine: Using SSH client type: native
	I0916 10:57:15.188042 1434484 main.go:141] libmachine: &{{{<nil> 0 [] [] []} docker [0x41abe0] 0x41d420 <nil>  [] 0s} 127.0.0.1 34648 <nil> <nil>}
	I0916 10:57:15.188059 1434484 main.go:141] libmachine: About to run SSH command:
	
			if ! grep -xq '.*\sha-334765-m02' /etc/hosts; then
				if grep -xq '127.0.1.1\s.*' /etc/hosts; then
					sudo sed -i 's/^127.0.1.1\s.*/127.0.1.1 ha-334765-m02/g' /etc/hosts;
				else 
					echo '127.0.1.1 ha-334765-m02' | sudo tee -a /etc/hosts; 
				fi
			fi
	I0916 10:57:15.374994 1434484 main.go:141] libmachine: SSH cmd err, output: <nil>: 
	I0916 10:57:15.375076 1434484 ubuntu.go:175] set auth options {CertDir:/home/jenkins/minikube-integration/19651-1378450/.minikube CaCertPath:/home/jenkins/minikube-integration/19651-1378450/.minikube/certs/ca.pem CaPrivateKeyPath:/home/jenkins/minikube-integration/19651-1378450/.minikube/certs/ca-key.pem CaCertRemotePath:/etc/docker/ca.pem ServerCertPath:/home/jenkins/minikube-integration/19651-1378450/.minikube/machines/server.pem ServerKeyPath:/home/jenkins/minikube-integration/19651-1378450/.minikube/machines/server-key.pem ClientKeyPath:/home/jenkins/minikube-integration/19651-1378450/.minikube/certs/key.pem ServerCertRemotePath:/etc/docker/server.pem ServerKeyRemotePath:/etc/docker/server-key.pem ClientCertPath:/home/jenkins/minikube-integration/19651-1378450/.minikube/certs/cert.pem ServerCertSANs:[] StorePath:/home/jenkins/minikube-integration/19651-1378450/.minikube}
	I0916 10:57:15.375123 1434484 ubuntu.go:177] setting up certificates
	I0916 10:57:15.375170 1434484 provision.go:84] configureAuth start
	I0916 10:57:15.375270 1434484 cli_runner.go:164] Run: docker container inspect -f "{{range .NetworkSettings.Networks}}{{.IPAddress}},{{.GlobalIPv6Address}}{{end}}" ha-334765-m02
	I0916 10:57:15.416858 1434484 provision.go:143] copyHostCerts
	I0916 10:57:15.416909 1434484 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-1378450/.minikube/certs/key.pem -> /home/jenkins/minikube-integration/19651-1378450/.minikube/key.pem
	I0916 10:57:15.416942 1434484 exec_runner.go:144] found /home/jenkins/minikube-integration/19651-1378450/.minikube/key.pem, removing ...
	I0916 10:57:15.416954 1434484 exec_runner.go:203] rm: /home/jenkins/minikube-integration/19651-1378450/.minikube/key.pem
	I0916 10:57:15.417034 1434484 exec_runner.go:151] cp: /home/jenkins/minikube-integration/19651-1378450/.minikube/certs/key.pem --> /home/jenkins/minikube-integration/19651-1378450/.minikube/key.pem (1679 bytes)
	I0916 10:57:15.417126 1434484 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-1378450/.minikube/certs/ca.pem -> /home/jenkins/minikube-integration/19651-1378450/.minikube/ca.pem
	I0916 10:57:15.417145 1434484 exec_runner.go:144] found /home/jenkins/minikube-integration/19651-1378450/.minikube/ca.pem, removing ...
	I0916 10:57:15.417155 1434484 exec_runner.go:203] rm: /home/jenkins/minikube-integration/19651-1378450/.minikube/ca.pem
	I0916 10:57:15.417188 1434484 exec_runner.go:151] cp: /home/jenkins/minikube-integration/19651-1378450/.minikube/certs/ca.pem --> /home/jenkins/minikube-integration/19651-1378450/.minikube/ca.pem (1078 bytes)
	I0916 10:57:15.417240 1434484 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-1378450/.minikube/certs/cert.pem -> /home/jenkins/minikube-integration/19651-1378450/.minikube/cert.pem
	I0916 10:57:15.417256 1434484 exec_runner.go:144] found /home/jenkins/minikube-integration/19651-1378450/.minikube/cert.pem, removing ...
	I0916 10:57:15.417265 1434484 exec_runner.go:203] rm: /home/jenkins/minikube-integration/19651-1378450/.minikube/cert.pem
	I0916 10:57:15.417291 1434484 exec_runner.go:151] cp: /home/jenkins/minikube-integration/19651-1378450/.minikube/certs/cert.pem --> /home/jenkins/minikube-integration/19651-1378450/.minikube/cert.pem (1123 bytes)
	I0916 10:57:15.417353 1434484 provision.go:117] generating server cert: /home/jenkins/minikube-integration/19651-1378450/.minikube/machines/server.pem ca-key=/home/jenkins/minikube-integration/19651-1378450/.minikube/certs/ca.pem private-key=/home/jenkins/minikube-integration/19651-1378450/.minikube/certs/ca-key.pem org=jenkins.ha-334765-m02 san=[127.0.0.1 192.168.49.3 ha-334765-m02 localhost minikube]
	I0916 10:57:16.404043 1434484 provision.go:177] copyRemoteCerts
	I0916 10:57:16.404121 1434484 ssh_runner.go:195] Run: sudo mkdir -p /etc/docker /etc/docker /etc/docker
	I0916 10:57:16.404172 1434484 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" ha-334765-m02
	I0916 10:57:16.428094 1434484 sshutil.go:53] new ssh client: &{IP:127.0.0.1 Port:34648 SSHKeyPath:/home/jenkins/minikube-integration/19651-1378450/.minikube/machines/ha-334765-m02/id_rsa Username:docker}
	I0916 10:57:16.613701 1434484 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-1378450/.minikube/certs/ca.pem -> /etc/docker/ca.pem
	I0916 10:57:16.613765 1434484 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-1378450/.minikube/certs/ca.pem --> /etc/docker/ca.pem (1078 bytes)
	I0916 10:57:16.685409 1434484 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-1378450/.minikube/machines/server.pem -> /etc/docker/server.pem
	I0916 10:57:16.685485 1434484 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-1378450/.minikube/machines/server.pem --> /etc/docker/server.pem (1208 bytes)
	I0916 10:57:16.811292 1434484 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-1378450/.minikube/machines/server-key.pem -> /etc/docker/server-key.pem
	I0916 10:57:16.811403 1434484 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-1378450/.minikube/machines/server-key.pem --> /etc/docker/server-key.pem (1675 bytes)
	I0916 10:57:16.864869 1434484 provision.go:87] duration metric: took 1.489669302s to configureAuth
	I0916 10:57:16.864974 1434484 ubuntu.go:193] setting minikube options for container-runtime
	I0916 10:57:16.865276 1434484 config.go:182] Loaded profile config "ha-334765": Driver=docker, ContainerRuntime=crio, KubernetesVersion=v1.31.1
	I0916 10:57:16.865409 1434484 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" ha-334765-m02
	I0916 10:57:16.890609 1434484 main.go:141] libmachine: Using SSH client type: native
	I0916 10:57:16.890866 1434484 main.go:141] libmachine: &{{{<nil> 0 [] [] []} docker [0x41abe0] 0x41d420 <nil>  [] 0s} 127.0.0.1 34648 <nil> <nil>}
	I0916 10:57:16.890886 1434484 main.go:141] libmachine: About to run SSH command:
	sudo mkdir -p /etc/sysconfig && printf %s "
	CRIO_MINIKUBE_OPTIONS='--insecure-registry 10.96.0.0/12 '
	" | sudo tee /etc/sysconfig/crio.minikube && sudo systemctl restart crio
	I0916 10:57:17.552481 1434484 main.go:141] libmachine: SSH cmd err, output: <nil>: 
	CRIO_MINIKUBE_OPTIONS='--insecure-registry 10.96.0.0/12 '
	
	I0916 10:57:17.552565 1434484 machine.go:96] duration metric: took 5.872746224s to provisionDockerMachine
	I0916 10:57:17.552598 1434484 start.go:293] postStartSetup for "ha-334765-m02" (driver="docker")
	I0916 10:57:17.552644 1434484 start.go:322] creating required directories: [/etc/kubernetes/addons /etc/kubernetes/manifests /var/tmp/minikube /var/lib/minikube /var/lib/minikube/certs /var/lib/minikube/images /var/lib/minikube/binaries /tmp/gvisor /usr/share/ca-certificates /etc/ssl/certs]
	I0916 10:57:17.552836 1434484 ssh_runner.go:195] Run: sudo mkdir -p /etc/kubernetes/addons /etc/kubernetes/manifests /var/tmp/minikube /var/lib/minikube /var/lib/minikube/certs /var/lib/minikube/images /var/lib/minikube/binaries /tmp/gvisor /usr/share/ca-certificates /etc/ssl/certs
	I0916 10:57:17.552924 1434484 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" ha-334765-m02
	I0916 10:57:17.589135 1434484 sshutil.go:53] new ssh client: &{IP:127.0.0.1 Port:34648 SSHKeyPath:/home/jenkins/minikube-integration/19651-1378450/.minikube/machines/ha-334765-m02/id_rsa Username:docker}
	I0916 10:57:17.875546 1434484 ssh_runner.go:195] Run: cat /etc/os-release
	I0916 10:57:17.924987 1434484 main.go:141] libmachine: Couldn't set key VERSION_CODENAME, no corresponding struct field found
	I0916 10:57:17.925032 1434484 main.go:141] libmachine: Couldn't set key PRIVACY_POLICY_URL, no corresponding struct field found
	I0916 10:57:17.925045 1434484 main.go:141] libmachine: Couldn't set key UBUNTU_CODENAME, no corresponding struct field found
	I0916 10:57:17.925052 1434484 info.go:137] Remote host: Ubuntu 22.04.4 LTS
	I0916 10:57:17.925067 1434484 filesync.go:126] Scanning /home/jenkins/minikube-integration/19651-1378450/.minikube/addons for local assets ...
	I0916 10:57:17.925128 1434484 filesync.go:126] Scanning /home/jenkins/minikube-integration/19651-1378450/.minikube/files for local assets ...
	I0916 10:57:17.925208 1434484 filesync.go:149] local asset: /home/jenkins/minikube-integration/19651-1378450/.minikube/files/etc/ssl/certs/13838332.pem -> 13838332.pem in /etc/ssl/certs
	I0916 10:57:17.925220 1434484 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-1378450/.minikube/files/etc/ssl/certs/13838332.pem -> /etc/ssl/certs/13838332.pem
	I0916 10:57:17.925322 1434484 ssh_runner.go:195] Run: sudo mkdir -p /etc/ssl/certs
	I0916 10:57:17.973780 1434484 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-1378450/.minikube/files/etc/ssl/certs/13838332.pem --> /etc/ssl/certs/13838332.pem (1708 bytes)
	I0916 10:57:18.121293 1434484 start.go:296] duration metric: took 568.647035ms for postStartSetup
	I0916 10:57:18.121386 1434484 ssh_runner.go:195] Run: sh -c "df -h /var | awk 'NR==2{print $5}'"
	I0916 10:57:18.121447 1434484 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" ha-334765-m02
	I0916 10:57:18.151442 1434484 sshutil.go:53] new ssh client: &{IP:127.0.0.1 Port:34648 SSHKeyPath:/home/jenkins/minikube-integration/19651-1378450/.minikube/machines/ha-334765-m02/id_rsa Username:docker}
	I0916 10:57:18.332654 1434484 ssh_runner.go:195] Run: sh -c "df -BG /var | awk 'NR==2{print $4}'"
	I0916 10:57:18.360975 1434484 fix.go:56] duration metric: took 7.041555188s for fixHost
	I0916 10:57:18.360998 1434484 start.go:83] releasing machines lock for "ha-334765-m02", held for 7.041605886s
	I0916 10:57:18.361078 1434484 cli_runner.go:164] Run: docker container inspect -f "{{range .NetworkSettings.Networks}}{{.IPAddress}},{{.GlobalIPv6Address}}{{end}}" ha-334765-m02
	I0916 10:57:18.391177 1434484 out.go:177] * Found network options:
	I0916 10:57:18.393954 1434484 out.go:177]   - NO_PROXY=192.168.49.2
	W0916 10:57:18.397427 1434484 proxy.go:119] fail to check proxy env: Error ip not in block
	W0916 10:57:18.397502 1434484 proxy.go:119] fail to check proxy env: Error ip not in block
	I0916 10:57:18.397601 1434484 ssh_runner.go:195] Run: sudo sh -c "podman version >/dev/null"
	I0916 10:57:18.397646 1434484 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" ha-334765-m02
	I0916 10:57:18.397904 1434484 ssh_runner.go:195] Run: curl -sS -m 2 https://registry.k8s.io/
	I0916 10:57:18.397978 1434484 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" ha-334765-m02
	I0916 10:57:18.434469 1434484 sshutil.go:53] new ssh client: &{IP:127.0.0.1 Port:34648 SSHKeyPath:/home/jenkins/minikube-integration/19651-1378450/.minikube/machines/ha-334765-m02/id_rsa Username:docker}
	I0916 10:57:18.451227 1434484 sshutil.go:53] new ssh client: &{IP:127.0.0.1 Port:34648 SSHKeyPath:/home/jenkins/minikube-integration/19651-1378450/.minikube/machines/ha-334765-m02/id_rsa Username:docker}
	I0916 10:57:18.942105 1434484 ssh_runner.go:195] Run: sh -c "stat /etc/cni/net.d/*loopback.conf*"
	I0916 10:57:18.970632 1434484 ssh_runner.go:195] Run: sudo find /etc/cni/net.d -maxdepth 1 -type f -name *loopback.conf* -not -name *.mk_disabled -exec sh -c "sudo mv {} {}.mk_disabled" ;
	I0916 10:57:19.002439 1434484 cni.go:221] loopback cni configuration disabled: "/etc/cni/net.d/*loopback.conf*" found
	I0916 10:57:19.002616 1434484 ssh_runner.go:195] Run: sudo find /etc/cni/net.d -maxdepth 1 -type f ( ( -name *bridge* -or -name *podman* ) -and -not -name *.mk_disabled ) -printf "%p, " -exec sh -c "sudo mv {} {}.mk_disabled" ;
	I0916 10:57:19.027352 1434484 cni.go:259] no active bridge cni configs found in "/etc/cni/net.d" - nothing to disable
	I0916 10:57:19.027427 1434484 start.go:495] detecting cgroup driver to use...
	I0916 10:57:19.027475 1434484 detect.go:187] detected "cgroupfs" cgroup driver on host os
	I0916 10:57:19.027559 1434484 ssh_runner.go:195] Run: sudo systemctl stop -f containerd
	I0916 10:57:19.061924 1434484 ssh_runner.go:195] Run: sudo systemctl is-active --quiet service containerd
	I0916 10:57:19.090997 1434484 docker.go:217] disabling cri-docker service (if available) ...
	I0916 10:57:19.091165 1434484 ssh_runner.go:195] Run: sudo systemctl stop -f cri-docker.socket
	I0916 10:57:19.123258 1434484 ssh_runner.go:195] Run: sudo systemctl stop -f cri-docker.service
	I0916 10:57:19.157650 1434484 ssh_runner.go:195] Run: sudo systemctl disable cri-docker.socket
	I0916 10:57:19.506124 1434484 ssh_runner.go:195] Run: sudo systemctl mask cri-docker.service
	I0916 10:57:19.852539 1434484 docker.go:233] disabling docker service ...
	I0916 10:57:19.852665 1434484 ssh_runner.go:195] Run: sudo systemctl stop -f docker.socket
	I0916 10:57:19.890880 1434484 ssh_runner.go:195] Run: sudo systemctl stop -f docker.service
	I0916 10:57:19.942971 1434484 ssh_runner.go:195] Run: sudo systemctl disable docker.socket
	I0916 10:57:20.272078 1434484 ssh_runner.go:195] Run: sudo systemctl mask docker.service
	I0916 10:57:20.561868 1434484 ssh_runner.go:195] Run: sudo systemctl is-active --quiet service docker
	I0916 10:57:20.610056 1434484 ssh_runner.go:195] Run: /bin/bash -c "sudo mkdir -p /etc && printf %s "runtime-endpoint: unix:///var/run/crio/crio.sock
	" | sudo tee /etc/crictl.yaml"
	I0916 10:57:20.666744 1434484 crio.go:59] configure cri-o to use "registry.k8s.io/pause:3.10" pause image...
	I0916 10:57:20.666866 1434484 ssh_runner.go:195] Run: sh -c "sudo sed -i 's|^.*pause_image = .*$|pause_image = "registry.k8s.io/pause:3.10"|' /etc/crio/crio.conf.d/02-crio.conf"
	I0916 10:57:20.699676 1434484 crio.go:70] configuring cri-o to use "cgroupfs" as cgroup driver...
	I0916 10:57:20.699790 1434484 ssh_runner.go:195] Run: sh -c "sudo sed -i 's|^.*cgroup_manager = .*$|cgroup_manager = "cgroupfs"|' /etc/crio/crio.conf.d/02-crio.conf"
	I0916 10:57:20.728758 1434484 ssh_runner.go:195] Run: sh -c "sudo sed -i '/conmon_cgroup = .*/d' /etc/crio/crio.conf.d/02-crio.conf"
	I0916 10:57:20.751260 1434484 ssh_runner.go:195] Run: sh -c "sudo sed -i '/cgroup_manager = .*/a conmon_cgroup = "pod"' /etc/crio/crio.conf.d/02-crio.conf"
	I0916 10:57:20.779627 1434484 ssh_runner.go:195] Run: sh -c "sudo rm -rf /etc/cni/net.mk"
	I0916 10:57:20.814485 1434484 ssh_runner.go:195] Run: sh -c "sudo sed -i '/^ *"net.ipv4.ip_unprivileged_port_start=.*"/d' /etc/crio/crio.conf.d/02-crio.conf"
	I0916 10:57:20.847824 1434484 ssh_runner.go:195] Run: sh -c "sudo grep -q "^ *default_sysctls" /etc/crio/crio.conf.d/02-crio.conf || sudo sed -i '/conmon_cgroup = .*/a default_sysctls = \[\n\]' /etc/crio/crio.conf.d/02-crio.conf"
	I0916 10:57:20.879855 1434484 ssh_runner.go:195] Run: sh -c "sudo sed -i -r 's|^default_sysctls *= *\[|&\n  "net.ipv4.ip_unprivileged_port_start=0",|' /etc/crio/crio.conf.d/02-crio.conf"
	I0916 10:57:20.911684 1434484 ssh_runner.go:195] Run: sudo sysctl net.bridge.bridge-nf-call-iptables
	I0916 10:57:20.943537 1434484 ssh_runner.go:195] Run: sudo sh -c "echo 1 > /proc/sys/net/ipv4/ip_forward"
	I0916 10:57:20.970849 1434484 ssh_runner.go:195] Run: sudo systemctl daemon-reload
	I0916 10:57:21.228764 1434484 ssh_runner.go:195] Run: sudo systemctl restart crio
	I0916 10:57:22.590937 1434484 ssh_runner.go:235] Completed: sudo systemctl restart crio: (1.362089752s)
	I0916 10:57:22.591014 1434484 start.go:542] Will wait 60s for socket path /var/run/crio/crio.sock
	I0916 10:57:22.591098 1434484 ssh_runner.go:195] Run: stat /var/run/crio/crio.sock
	I0916 10:57:22.603310 1434484 start.go:563] Will wait 60s for crictl version
	I0916 10:57:22.603428 1434484 ssh_runner.go:195] Run: which crictl
	I0916 10:57:22.611418 1434484 ssh_runner.go:195] Run: sudo /usr/bin/crictl version
	I0916 10:57:22.687433 1434484 start.go:579] Version:  0.1.0
	RuntimeName:  cri-o
	RuntimeVersion:  1.24.6
	RuntimeApiVersion:  v1
	I0916 10:57:22.687537 1434484 ssh_runner.go:195] Run: crio --version
	I0916 10:57:22.816059 1434484 ssh_runner.go:195] Run: crio --version
	I0916 10:57:22.942958 1434484 out.go:177] * Preparing Kubernetes v1.31.1 on CRI-O 1.24.6 ...
	I0916 10:57:22.946294 1434484 out.go:177]   - env NO_PROXY=192.168.49.2
	I0916 10:57:22.949200 1434484 cli_runner.go:164] Run: docker network inspect ha-334765 --format "{"Name": "{{.Name}}","Driver": "{{.Driver}}","Subnet": "{{range .IPAM.Config}}{{.Subnet}}{{end}}","Gateway": "{{range .IPAM.Config}}{{.Gateway}}{{end}}","MTU": {{if (index .Options "com.docker.network.driver.mtu")}}{{(index .Options "com.docker.network.driver.mtu")}}{{else}}0{{end}}, "ContainerIPs": [{{range $k,$v := .Containers }}"{{$v.IPv4Address}}",{{end}}]}"
	I0916 10:57:22.974455 1434484 ssh_runner.go:195] Run: grep 192.168.49.1	host.minikube.internal$ /etc/hosts
	I0916 10:57:22.977989 1434484 ssh_runner.go:195] Run: /bin/bash -c "{ grep -v $'\thost.minikube.internal$' "/etc/hosts"; echo "192.168.49.1	host.minikube.internal"; } > /tmp/h.$$; sudo cp /tmp/h.$$ "/etc/hosts""
	I0916 10:57:22.994287 1434484 mustload.go:65] Loading cluster: ha-334765
	I0916 10:57:22.994527 1434484 config.go:182] Loaded profile config "ha-334765": Driver=docker, ContainerRuntime=crio, KubernetesVersion=v1.31.1
	I0916 10:57:22.994787 1434484 cli_runner.go:164] Run: docker container inspect ha-334765 --format={{.State.Status}}
	I0916 10:57:23.038203 1434484 host.go:66] Checking if "ha-334765" exists ...
	I0916 10:57:23.038496 1434484 certs.go:68] Setting up /home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/ha-334765 for IP: 192.168.49.3
	I0916 10:57:23.038511 1434484 certs.go:194] generating shared ca certs ...
	I0916 10:57:23.038526 1434484 certs.go:226] acquiring lock for ca certs: {Name:mk0ae46b50e2e49d53ad6fcc94535aa50d9156d6 Clock:{} Delay:500ms Timeout:1m0s Cancel:<nil>}
	I0916 10:57:23.038641 1434484 certs.go:235] skipping valid "minikubeCA" ca cert: /home/jenkins/minikube-integration/19651-1378450/.minikube/ca.key
	I0916 10:57:23.038687 1434484 certs.go:235] skipping valid "proxyClientCA" ca cert: /home/jenkins/minikube-integration/19651-1378450/.minikube/proxy-client-ca.key
	I0916 10:57:23.038698 1434484 certs.go:256] generating profile certs ...
	I0916 10:57:23.038780 1434484 certs.go:359] skipping valid signed profile cert regeneration for "minikube-user": /home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/ha-334765/client.key
	I0916 10:57:23.038851 1434484 certs.go:359] skipping valid signed profile cert regeneration for "minikube": /home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/ha-334765/apiserver.key.b7ea8422
	I0916 10:57:23.038896 1434484 certs.go:359] skipping valid signed profile cert regeneration for "aggregator": /home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/ha-334765/proxy-client.key
	I0916 10:57:23.038909 1434484 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-1378450/.minikube/ca.crt -> /var/lib/minikube/certs/ca.crt
	I0916 10:57:23.038922 1434484 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-1378450/.minikube/ca.key -> /var/lib/minikube/certs/ca.key
	I0916 10:57:23.038939 1434484 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-1378450/.minikube/proxy-client-ca.crt -> /var/lib/minikube/certs/proxy-client-ca.crt
	I0916 10:57:23.038949 1434484 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-1378450/.minikube/proxy-client-ca.key -> /var/lib/minikube/certs/proxy-client-ca.key
	I0916 10:57:23.038963 1434484 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/ha-334765/apiserver.crt -> /var/lib/minikube/certs/apiserver.crt
	I0916 10:57:23.038976 1434484 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/ha-334765/apiserver.key -> /var/lib/minikube/certs/apiserver.key
	I0916 10:57:23.038992 1434484 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/ha-334765/proxy-client.crt -> /var/lib/minikube/certs/proxy-client.crt
	I0916 10:57:23.039003 1434484 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/ha-334765/proxy-client.key -> /var/lib/minikube/certs/proxy-client.key
	I0916 10:57:23.039060 1434484 certs.go:484] found cert: /home/jenkins/minikube-integration/19651-1378450/.minikube/certs/1383833.pem (1338 bytes)
	W0916 10:57:23.039093 1434484 certs.go:480] ignoring /home/jenkins/minikube-integration/19651-1378450/.minikube/certs/1383833_empty.pem, impossibly tiny 0 bytes
	I0916 10:57:23.039105 1434484 certs.go:484] found cert: /home/jenkins/minikube-integration/19651-1378450/.minikube/certs/ca-key.pem (1679 bytes)
	I0916 10:57:23.039129 1434484 certs.go:484] found cert: /home/jenkins/minikube-integration/19651-1378450/.minikube/certs/ca.pem (1078 bytes)
	I0916 10:57:23.039166 1434484 certs.go:484] found cert: /home/jenkins/minikube-integration/19651-1378450/.minikube/certs/cert.pem (1123 bytes)
	I0916 10:57:23.039191 1434484 certs.go:484] found cert: /home/jenkins/minikube-integration/19651-1378450/.minikube/certs/key.pem (1679 bytes)
	I0916 10:57:23.039239 1434484 certs.go:484] found cert: /home/jenkins/minikube-integration/19651-1378450/.minikube/files/etc/ssl/certs/13838332.pem (1708 bytes)
	I0916 10:57:23.039271 1434484 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-1378450/.minikube/files/etc/ssl/certs/13838332.pem -> /usr/share/ca-certificates/13838332.pem
	I0916 10:57:23.039289 1434484 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-1378450/.minikube/ca.crt -> /usr/share/ca-certificates/minikubeCA.pem
	I0916 10:57:23.039305 1434484 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-1378450/.minikube/certs/1383833.pem -> /usr/share/ca-certificates/1383833.pem
	I0916 10:57:23.039365 1434484 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" ha-334765
	I0916 10:57:23.071749 1434484 sshutil.go:53] new ssh client: &{IP:127.0.0.1 Port:34643 SSHKeyPath:/home/jenkins/minikube-integration/19651-1378450/.minikube/machines/ha-334765/id_rsa Username:docker}
	I0916 10:57:23.184959 1434484 ssh_runner.go:195] Run: stat -c %s /var/lib/minikube/certs/sa.pub
	I0916 10:57:23.201995 1434484 ssh_runner.go:447] scp /var/lib/minikube/certs/sa.pub --> memory (451 bytes)
	I0916 10:57:23.227121 1434484 ssh_runner.go:195] Run: stat -c %s /var/lib/minikube/certs/sa.key
	I0916 10:57:23.241364 1434484 ssh_runner.go:447] scp /var/lib/minikube/certs/sa.key --> memory (1675 bytes)
	I0916 10:57:23.266592 1434484 ssh_runner.go:195] Run: stat -c %s /var/lib/minikube/certs/front-proxy-ca.crt
	I0916 10:57:23.283335 1434484 ssh_runner.go:447] scp /var/lib/minikube/certs/front-proxy-ca.crt --> memory (1123 bytes)
	I0916 10:57:23.307281 1434484 ssh_runner.go:195] Run: stat -c %s /var/lib/minikube/certs/front-proxy-ca.key
	I0916 10:57:23.315450 1434484 ssh_runner.go:447] scp /var/lib/minikube/certs/front-proxy-ca.key --> memory (1675 bytes)
	I0916 10:57:23.338590 1434484 ssh_runner.go:195] Run: stat -c %s /var/lib/minikube/certs/etcd/ca.crt
	I0916 10:57:23.349150 1434484 ssh_runner.go:447] scp /var/lib/minikube/certs/etcd/ca.crt --> memory (1094 bytes)
	I0916 10:57:23.365227 1434484 ssh_runner.go:195] Run: stat -c %s /var/lib/minikube/certs/etcd/ca.key
	I0916 10:57:23.368956 1434484 ssh_runner.go:447] scp /var/lib/minikube/certs/etcd/ca.key --> memory (1679 bytes)
	I0916 10:57:23.382665 1434484 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-1378450/.minikube/ca.crt --> /var/lib/minikube/certs/ca.crt (1111 bytes)
	I0916 10:57:23.409873 1434484 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-1378450/.minikube/ca.key --> /var/lib/minikube/certs/ca.key (1675 bytes)
	I0916 10:57:23.457792 1434484 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-1378450/.minikube/proxy-client-ca.crt --> /var/lib/minikube/certs/proxy-client-ca.crt (1119 bytes)
	I0916 10:57:23.497976 1434484 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-1378450/.minikube/proxy-client-ca.key --> /var/lib/minikube/certs/proxy-client-ca.key (1679 bytes)
	I0916 10:57:23.533988 1434484 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/ha-334765/apiserver.crt --> /var/lib/minikube/certs/apiserver.crt (1440 bytes)
	I0916 10:57:23.569999 1434484 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/ha-334765/apiserver.key --> /var/lib/minikube/certs/apiserver.key (1675 bytes)
	I0916 10:57:23.604544 1434484 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/ha-334765/proxy-client.crt --> /var/lib/minikube/certs/proxy-client.crt (1147 bytes)
	I0916 10:57:23.648305 1434484 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/ha-334765/proxy-client.key --> /var/lib/minikube/certs/proxy-client.key (1679 bytes)
	I0916 10:57:23.690905 1434484 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-1378450/.minikube/files/etc/ssl/certs/13838332.pem --> /usr/share/ca-certificates/13838332.pem (1708 bytes)
	I0916 10:57:23.730691 1434484 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-1378450/.minikube/ca.crt --> /usr/share/ca-certificates/minikubeCA.pem (1111 bytes)
	I0916 10:57:23.776015 1434484 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-1378450/.minikube/certs/1383833.pem --> /usr/share/ca-certificates/1383833.pem (1338 bytes)
	I0916 10:57:23.815600 1434484 ssh_runner.go:362] scp memory --> /var/lib/minikube/certs/sa.pub (451 bytes)
	I0916 10:57:23.847318 1434484 ssh_runner.go:362] scp memory --> /var/lib/minikube/certs/sa.key (1675 bytes)
	I0916 10:57:23.884010 1434484 ssh_runner.go:362] scp memory --> /var/lib/minikube/certs/front-proxy-ca.crt (1123 bytes)
	I0916 10:57:23.904028 1434484 ssh_runner.go:362] scp memory --> /var/lib/minikube/certs/front-proxy-ca.key (1675 bytes)
	I0916 10:57:23.924103 1434484 ssh_runner.go:362] scp memory --> /var/lib/minikube/certs/etcd/ca.crt (1094 bytes)
	I0916 10:57:23.943001 1434484 ssh_runner.go:362] scp memory --> /var/lib/minikube/certs/etcd/ca.key (1679 bytes)
	I0916 10:57:23.962201 1434484 ssh_runner.go:362] scp memory --> /var/lib/minikube/kubeconfig (744 bytes)
	I0916 10:57:23.980511 1434484 ssh_runner.go:195] Run: openssl version
	I0916 10:57:23.988019 1434484 ssh_runner.go:195] Run: sudo /bin/bash -c "test -s /usr/share/ca-certificates/13838332.pem && ln -fs /usr/share/ca-certificates/13838332.pem /etc/ssl/certs/13838332.pem"
	I0916 10:57:23.997762 1434484 ssh_runner.go:195] Run: ls -la /usr/share/ca-certificates/13838332.pem
	I0916 10:57:24.001374 1434484 certs.go:528] hashing: -rw-r--r-- 1 root root 1708 Sep 16 10:46 /usr/share/ca-certificates/13838332.pem
	I0916 10:57:24.001500 1434484 ssh_runner.go:195] Run: openssl x509 -hash -noout -in /usr/share/ca-certificates/13838332.pem
	I0916 10:57:24.014389 1434484 ssh_runner.go:195] Run: sudo /bin/bash -c "test -L /etc/ssl/certs/3ec20f2e.0 || ln -fs /etc/ssl/certs/13838332.pem /etc/ssl/certs/3ec20f2e.0"
	I0916 10:57:24.024573 1434484 ssh_runner.go:195] Run: sudo /bin/bash -c "test -s /usr/share/ca-certificates/minikubeCA.pem && ln -fs /usr/share/ca-certificates/minikubeCA.pem /etc/ssl/certs/minikubeCA.pem"
	I0916 10:57:24.035103 1434484 ssh_runner.go:195] Run: ls -la /usr/share/ca-certificates/minikubeCA.pem
	I0916 10:57:24.038992 1434484 certs.go:528] hashing: -rw-r--r-- 1 root root 1111 Sep 16 10:35 /usr/share/ca-certificates/minikubeCA.pem
	I0916 10:57:24.039067 1434484 ssh_runner.go:195] Run: openssl x509 -hash -noout -in /usr/share/ca-certificates/minikubeCA.pem
	I0916 10:57:24.046666 1434484 ssh_runner.go:195] Run: sudo /bin/bash -c "test -L /etc/ssl/certs/b5213941.0 || ln -fs /etc/ssl/certs/minikubeCA.pem /etc/ssl/certs/b5213941.0"
	I0916 10:57:24.056635 1434484 ssh_runner.go:195] Run: sudo /bin/bash -c "test -s /usr/share/ca-certificates/1383833.pem && ln -fs /usr/share/ca-certificates/1383833.pem /etc/ssl/certs/1383833.pem"
	I0916 10:57:24.067022 1434484 ssh_runner.go:195] Run: ls -la /usr/share/ca-certificates/1383833.pem
	I0916 10:57:24.071087 1434484 certs.go:528] hashing: -rw-r--r-- 1 root root 1338 Sep 16 10:46 /usr/share/ca-certificates/1383833.pem
	I0916 10:57:24.071190 1434484 ssh_runner.go:195] Run: openssl x509 -hash -noout -in /usr/share/ca-certificates/1383833.pem
	I0916 10:57:24.078387 1434484 ssh_runner.go:195] Run: sudo /bin/bash -c "test -L /etc/ssl/certs/51391683.0 || ln -fs /etc/ssl/certs/1383833.pem /etc/ssl/certs/51391683.0"
	I0916 10:57:24.088520 1434484 ssh_runner.go:195] Run: stat /var/lib/minikube/certs/apiserver-kubelet-client.crt
	I0916 10:57:24.094437 1434484 ssh_runner.go:195] Run: openssl x509 -noout -in /var/lib/minikube/certs/apiserver-etcd-client.crt -checkend 86400
	I0916 10:57:24.101795 1434484 ssh_runner.go:195] Run: openssl x509 -noout -in /var/lib/minikube/certs/apiserver-kubelet-client.crt -checkend 86400
	I0916 10:57:24.109220 1434484 ssh_runner.go:195] Run: openssl x509 -noout -in /var/lib/minikube/certs/etcd/server.crt -checkend 86400
	I0916 10:57:24.116648 1434484 ssh_runner.go:195] Run: openssl x509 -noout -in /var/lib/minikube/certs/etcd/healthcheck-client.crt -checkend 86400
	I0916 10:57:24.124048 1434484 ssh_runner.go:195] Run: openssl x509 -noout -in /var/lib/minikube/certs/etcd/peer.crt -checkend 86400
	I0916 10:57:24.131479 1434484 ssh_runner.go:195] Run: openssl x509 -noout -in /var/lib/minikube/certs/front-proxy-client.crt -checkend 86400
	I0916 10:57:24.138752 1434484 kubeadm.go:934] updating node {m02 192.168.49.3 8443 v1.31.1 crio true true} ...
	I0916 10:57:24.138910 1434484 kubeadm.go:946] kubelet [Unit]
	Wants=crio.service
	
	[Service]
	ExecStart=
	ExecStart=/var/lib/minikube/binaries/v1.31.1/kubelet --bootstrap-kubeconfig=/etc/kubernetes/bootstrap-kubelet.conf --cgroups-per-qos=false --config=/var/lib/kubelet/config.yaml --enforce-node-allocatable= --hostname-override=ha-334765-m02 --kubeconfig=/etc/kubernetes/kubelet.conf --node-ip=192.168.49.3
	
	[Install]
	 config:
	{KubernetesVersion:v1.31.1 ClusterName:ha-334765 Namespace:default APIServerHAVIP:192.168.49.254 APIServerName:minikubeCA APIServerNames:[] APIServerIPs:[] DNSDomain:cluster.local ContainerRuntime:crio CRISocket: NetworkPlugin:cni FeatureGates: ServiceCIDR:10.96.0.0/12 ImageRepository: LoadBalancerStartIP: LoadBalancerEndIP: CustomIngressCert: RegistryAliases: ExtraOptions:[] ShouldLoadCachedImages:true EnableDefaultCNI:false CNI:}
	I0916 10:57:24.138967 1434484 kube-vip.go:115] generating kube-vip config ...
	I0916 10:57:24.139037 1434484 ssh_runner.go:195] Run: sudo sh -c "lsmod | grep ip_vs"
	I0916 10:57:24.151998 1434484 kube-vip.go:167] auto-enabling control-plane load-balancing in kube-vip
	I0916 10:57:24.152070 1434484 kube-vip.go:137] kube-vip config:
	apiVersion: v1
	kind: Pod
	metadata:
	  creationTimestamp: null
	  name: kube-vip
	  namespace: kube-system
	spec:
	  containers:
	  - args:
	    - manager
	    env:
	    - name: vip_arp
	      value: "true"
	    - name: port
	      value: "8443"
	    - name: vip_nodename
	      valueFrom:
	        fieldRef:
	          fieldPath: spec.nodeName
	    - name: vip_interface
	      value: eth0
	    - name: vip_cidr
	      value: "32"
	    - name: dns_mode
	      value: first
	    - name: cp_enable
	      value: "true"
	    - name: cp_namespace
	      value: kube-system
	    - name: vip_leaderelection
	      value: "true"
	    - name: vip_leasename
	      value: plndr-cp-lock
	    - name: vip_leaseduration
	      value: "5"
	    - name: vip_renewdeadline
	      value: "3"
	    - name: vip_retryperiod
	      value: "1"
	    - name: address
	      value: 192.168.49.254
	    - name: prometheus_server
	      value: :2112
	    - name : lb_enable
	      value: "true"
	    - name: lb_port
	      value: "8443"
	    image: ghcr.io/kube-vip/kube-vip:v0.8.0
	    imagePullPolicy: IfNotPresent
	    name: kube-vip
	    resources: {}
	    securityContext:
	      capabilities:
	        add:
	        - NET_ADMIN
	        - NET_RAW
	    volumeMounts:
	    - mountPath: /etc/kubernetes/admin.conf
	      name: kubeconfig
	  hostAliases:
	  - hostnames:
	    - kubernetes
	    ip: 127.0.0.1
	  hostNetwork: true
	  volumes:
	  - hostPath:
	      path: "/etc/kubernetes/admin.conf"
	    name: kubeconfig
	status: {}
	I0916 10:57:24.152145 1434484 ssh_runner.go:195] Run: sudo ls /var/lib/minikube/binaries/v1.31.1
	I0916 10:57:24.161360 1434484 binaries.go:44] Found k8s binaries, skipping transfer
	I0916 10:57:24.161468 1434484 ssh_runner.go:195] Run: sudo mkdir -p /etc/systemd/system/kubelet.service.d /lib/systemd/system /etc/kubernetes/manifests
	I0916 10:57:24.170650 1434484 ssh_runner.go:362] scp memory --> /etc/systemd/system/kubelet.service.d/10-kubeadm.conf (363 bytes)
	I0916 10:57:24.189489 1434484 ssh_runner.go:362] scp memory --> /lib/systemd/system/kubelet.service (352 bytes)
	I0916 10:57:24.210356 1434484 ssh_runner.go:362] scp memory --> /etc/kubernetes/manifests/kube-vip.yaml (1441 bytes)
	I0916 10:57:24.233224 1434484 ssh_runner.go:195] Run: grep 192.168.49.254	control-plane.minikube.internal$ /etc/hosts
	I0916 10:57:24.236646 1434484 ssh_runner.go:195] Run: /bin/bash -c "{ grep -v $'\tcontrol-plane.minikube.internal$' "/etc/hosts"; echo "192.168.49.254	control-plane.minikube.internal"; } > /tmp/h.$$; sudo cp /tmp/h.$$ "/etc/hosts""
	I0916 10:57:24.248284 1434484 ssh_runner.go:195] Run: sudo systemctl daemon-reload
	I0916 10:57:24.367693 1434484 ssh_runner.go:195] Run: sudo systemctl start kubelet
	I0916 10:57:24.380801 1434484 start.go:235] Will wait 6m0s for node &{Name:m02 IP:192.168.49.3 Port:8443 KubernetesVersion:v1.31.1 ContainerRuntime:crio ControlPlane:true Worker:true}
	I0916 10:57:24.381211 1434484 config.go:182] Loaded profile config "ha-334765": Driver=docker, ContainerRuntime=crio, KubernetesVersion=v1.31.1
	I0916 10:57:24.385816 1434484 out.go:177] * Verifying Kubernetes components...
	I0916 10:57:24.388233 1434484 ssh_runner.go:195] Run: sudo systemctl daemon-reload
	I0916 10:57:24.495109 1434484 ssh_runner.go:195] Run: sudo systemctl start kubelet
	I0916 10:57:24.510258 1434484 loader.go:395] Config loaded from file:  /home/jenkins/minikube-integration/19651-1378450/kubeconfig
	I0916 10:57:24.510524 1434484 kapi.go:59] client config for ha-334765: &rest.Config{Host:"https://192.168.49.254:8443", APIPath:"", ContentConfig:rest.ContentConfig{AcceptContentTypes:"", ContentType:"", GroupVersion:(*schema.GroupVersion)(nil), NegotiatedSerializer:runtime.NegotiatedSerializer(nil)}, Username:"", Password:"", BearerToken:"", BearerTokenFile:"", Impersonate:rest.ImpersonationConfig{UserName:"", UID:"", Groups:[]string(nil), Extra:map[string][]string(nil)}, AuthProvider:<nil>, AuthConfigPersister:rest.AuthProviderConfigPersister(nil), ExecProvider:<nil>, TLSClientConfig:rest.sanitizedTLSClientConfig{Insecure:false, ServerName:"", CertFile:"/home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/ha-334765/client.crt", KeyFile:"/home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/ha-334765/client.key", CAFile:"/home/jenkins/minikube-integration/19651-1378450/.minikube/ca.crt", CertData:[]uint8(nil), KeyData:[]uint8(nil), CAData:[]uint8(nil), NextProtos:[]strin
g(nil)}, UserAgent:"", DisableCompression:false, Transport:http.RoundTripper(nil), WrapTransport:(transport.WrapperFunc)(0x1a1e6c0), QPS:0, Burst:0, RateLimiter:flowcontrol.RateLimiter(nil), WarningHandler:rest.WarningHandler(nil), Timeout:0, Dial:(func(context.Context, string, string) (net.Conn, error))(nil), Proxy:(func(*http.Request) (*url.URL, error))(nil)}
	W0916 10:57:24.510588 1434484 kubeadm.go:483] Overriding stale ClientConfig host https://192.168.49.254:8443 with https://192.168.49.2:8443
	I0916 10:57:24.510797 1434484 node_ready.go:35] waiting up to 6m0s for node "ha-334765-m02" to be "Ready" ...
	I0916 10:57:24.510880 1434484 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-334765-m02
	I0916 10:57:24.510891 1434484 round_trippers.go:469] Request Headers:
	I0916 10:57:24.510900 1434484 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:57:24.510905 1434484 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:57:35.298126 1434484 round_trippers.go:574] Response Status: 500 Internal Server Error in 10787 milliseconds
	I0916 10:57:35.298343 1434484 node_ready.go:53] error getting node "ha-334765-m02": etcdserver: request timed out
	I0916 10:57:35.298417 1434484 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-334765-m02
	I0916 10:57:35.298428 1434484 round_trippers.go:469] Request Headers:
	I0916 10:57:35.298437 1434484 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:57:35.298442 1434484 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:57:44.181586 1434484 round_trippers.go:574] Response Status: 500 Internal Server Error in 8883 milliseconds
	I0916 10:57:44.182172 1434484 node_ready.go:53] error getting node "ha-334765-m02": etcdserver: leader changed
	I0916 10:57:44.182236 1434484 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-334765-m02
	I0916 10:57:44.182242 1434484 round_trippers.go:469] Request Headers:
	I0916 10:57:44.182250 1434484 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:57:44.182255 1434484 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:57:44.220311 1434484 round_trippers.go:574] Response Status: 200 OK in 38 milliseconds
	I0916 10:57:44.221907 1434484 node_ready.go:49] node "ha-334765-m02" has status "Ready":"True"
	I0916 10:57:44.221932 1434484 node_ready.go:38] duration metric: took 19.711114913s for node "ha-334765-m02" to be "Ready" ...
	I0916 10:57:44.221943 1434484 pod_ready.go:36] extra waiting up to 6m0s for all system-critical pods including labels [k8s-app=kube-dns component=etcd component=kube-apiserver component=kube-controller-manager k8s-app=kube-proxy component=kube-scheduler] to be "Ready" ...
	I0916 10:57:44.221990 1434484 envvar.go:172] "Feature gate default state" feature="WatchListClient" enabled=false
	I0916 10:57:44.222007 1434484 envvar.go:172] "Feature gate default state" feature="InformerResourceVersion" enabled=false
	I0916 10:57:44.222073 1434484 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods
	I0916 10:57:44.222077 1434484 round_trippers.go:469] Request Headers:
	I0916 10:57:44.222086 1434484 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:57:44.222090 1434484 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:57:44.225595 1434484 round_trippers.go:574] Response Status: 429 Too Many Requests in 3 milliseconds
	I0916 10:57:45.225890 1434484 with_retry.go:234] Got a Retry-After 1s response for attempt 1 to https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods
	I0916 10:57:45.225949 1434484 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods
	I0916 10:57:45.225964 1434484 round_trippers.go:469] Request Headers:
	I0916 10:57:45.225974 1434484 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:57:45.225979 1434484 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:57:45.234890 1434484 round_trippers.go:574] Response Status: 200 OK in 8 milliseconds
	I0916 10:57:45.255266 1434484 pod_ready.go:79] waiting up to 6m0s for pod "coredns-7c65d6cfc9-q5xr7" in "kube-system" namespace to be "Ready" ...
	I0916 10:57:45.255771 1434484 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/coredns-7c65d6cfc9-q5xr7
	I0916 10:57:45.255817 1434484 round_trippers.go:469] Request Headers:
	I0916 10:57:45.255844 1434484 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:57:45.255867 1434484 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:57:45.260378 1434484 round_trippers.go:574] Response Status: 200 OK in 4 milliseconds
	I0916 10:57:45.261150 1434484 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-334765
	I0916 10:57:45.261164 1434484 round_trippers.go:469] Request Headers:
	I0916 10:57:45.261173 1434484 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:57:45.261177 1434484 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:57:45.264753 1434484 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:57:45.265998 1434484 pod_ready.go:93] pod "coredns-7c65d6cfc9-q5xr7" in "kube-system" namespace has status "Ready":"True"
	I0916 10:57:45.266030 1434484 pod_ready.go:82] duration metric: took 10.41161ms for pod "coredns-7c65d6cfc9-q5xr7" in "kube-system" namespace to be "Ready" ...
	I0916 10:57:45.266042 1434484 pod_ready.go:79] waiting up to 6m0s for pod "coredns-7c65d6cfc9-s9fp9" in "kube-system" namespace to be "Ready" ...
	I0916 10:57:45.267559 1434484 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/coredns-7c65d6cfc9-s9fp9
	I0916 10:57:45.267584 1434484 round_trippers.go:469] Request Headers:
	I0916 10:57:45.267594 1434484 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:57:45.267601 1434484 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:57:45.276215 1434484 round_trippers.go:574] Response Status: 200 OK in 8 milliseconds
	I0916 10:57:45.277143 1434484 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-334765
	I0916 10:57:45.277206 1434484 round_trippers.go:469] Request Headers:
	I0916 10:57:45.277229 1434484 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:57:45.277249 1434484 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:57:45.283947 1434484 round_trippers.go:574] Response Status: 200 OK in 6 milliseconds
	I0916 10:57:45.285013 1434484 pod_ready.go:93] pod "coredns-7c65d6cfc9-s9fp9" in "kube-system" namespace has status "Ready":"True"
	I0916 10:57:45.285128 1434484 pod_ready.go:82] duration metric: took 19.028565ms for pod "coredns-7c65d6cfc9-s9fp9" in "kube-system" namespace to be "Ready" ...
	I0916 10:57:45.285194 1434484 pod_ready.go:79] waiting up to 6m0s for pod "etcd-ha-334765" in "kube-system" namespace to be "Ready" ...
	I0916 10:57:45.285330 1434484 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/etcd-ha-334765
	I0916 10:57:45.285365 1434484 round_trippers.go:469] Request Headers:
	I0916 10:57:45.285389 1434484 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:57:45.285416 1434484 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:57:45.290225 1434484 round_trippers.go:574] Response Status: 200 OK in 4 milliseconds
	I0916 10:57:45.291250 1434484 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-334765
	I0916 10:57:45.291317 1434484 round_trippers.go:469] Request Headers:
	I0916 10:57:45.291343 1434484 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:57:45.291360 1434484 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:57:45.307504 1434484 round_trippers.go:574] Response Status: 200 OK in 16 milliseconds
	I0916 10:57:45.308943 1434484 pod_ready.go:93] pod "etcd-ha-334765" in "kube-system" namespace has status "Ready":"True"
	I0916 10:57:45.308969 1434484 pod_ready.go:82] duration metric: took 23.747971ms for pod "etcd-ha-334765" in "kube-system" namespace to be "Ready" ...
	I0916 10:57:45.308981 1434484 pod_ready.go:79] waiting up to 6m0s for pod "etcd-ha-334765-m02" in "kube-system" namespace to be "Ready" ...
	I0916 10:57:45.309054 1434484 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/etcd-ha-334765-m02
	I0916 10:57:45.309066 1434484 round_trippers.go:469] Request Headers:
	I0916 10:57:45.309074 1434484 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:57:45.309080 1434484 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:57:45.315669 1434484 round_trippers.go:574] Response Status: 200 OK in 6 milliseconds
	I0916 10:57:45.316811 1434484 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-334765-m02
	I0916 10:57:45.316833 1434484 round_trippers.go:469] Request Headers:
	I0916 10:57:45.316843 1434484 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:57:45.316850 1434484 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:57:45.324345 1434484 round_trippers.go:574] Response Status: 200 OK in 7 milliseconds
	I0916 10:57:45.333824 1434484 pod_ready.go:93] pod "etcd-ha-334765-m02" in "kube-system" namespace has status "Ready":"True"
	I0916 10:57:45.333850 1434484 pod_ready.go:82] duration metric: took 24.858522ms for pod "etcd-ha-334765-m02" in "kube-system" namespace to be "Ready" ...
	I0916 10:57:45.333861 1434484 pod_ready.go:79] waiting up to 6m0s for pod "etcd-ha-334765-m03" in "kube-system" namespace to be "Ready" ...
	I0916 10:57:45.333960 1434484 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/etcd-ha-334765-m03
	I0916 10:57:45.333972 1434484 round_trippers.go:469] Request Headers:
	I0916 10:57:45.333981 1434484 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:57:45.333985 1434484 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:57:45.355199 1434484 round_trippers.go:574] Response Status: 200 OK in 21 milliseconds
	I0916 10:57:45.426512 1434484 request.go:632] Waited for 70.208597ms due to client-side throttling, not priority and fairness, request: GET:https://192.168.49.2:8443/api/v1/nodes/ha-334765-m03
	I0916 10:57:45.426578 1434484 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-334765-m03
	I0916 10:57:45.426586 1434484 round_trippers.go:469] Request Headers:
	I0916 10:57:45.426602 1434484 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:57:45.426609 1434484 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:57:45.435631 1434484 round_trippers.go:574] Response Status: 200 OK in 8 milliseconds
	I0916 10:57:45.436591 1434484 pod_ready.go:93] pod "etcd-ha-334765-m03" in "kube-system" namespace has status "Ready":"True"
	I0916 10:57:45.436615 1434484 pod_ready.go:82] duration metric: took 102.745737ms for pod "etcd-ha-334765-m03" in "kube-system" namespace to be "Ready" ...
	I0916 10:57:45.436642 1434484 pod_ready.go:79] waiting up to 6m0s for pod "kube-apiserver-ha-334765" in "kube-system" namespace to be "Ready" ...
	I0916 10:57:45.626008 1434484 request.go:632] Waited for 189.249856ms due to client-side throttling, not priority and fairness, request: GET:https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/kube-apiserver-ha-334765
	I0916 10:57:45.626069 1434484 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/kube-apiserver-ha-334765
	I0916 10:57:45.626075 1434484 round_trippers.go:469] Request Headers:
	I0916 10:57:45.626084 1434484 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:57:45.626092 1434484 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:57:45.629044 1434484 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 10:57:45.826926 1434484 request.go:632] Waited for 197.102565ms due to client-side throttling, not priority and fairness, request: GET:https://192.168.49.2:8443/api/v1/nodes/ha-334765
	I0916 10:57:45.826983 1434484 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-334765
	I0916 10:57:45.826992 1434484 round_trippers.go:469] Request Headers:
	I0916 10:57:45.827007 1434484 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:57:45.827013 1434484 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:57:45.832199 1434484 round_trippers.go:574] Response Status: 200 OK in 5 milliseconds
	I0916 10:57:45.832821 1434484 pod_ready.go:93] pod "kube-apiserver-ha-334765" in "kube-system" namespace has status "Ready":"True"
	I0916 10:57:45.832842 1434484 pod_ready.go:82] duration metric: took 396.187725ms for pod "kube-apiserver-ha-334765" in "kube-system" namespace to be "Ready" ...
	I0916 10:57:45.832855 1434484 pod_ready.go:79] waiting up to 6m0s for pod "kube-apiserver-ha-334765-m02" in "kube-system" namespace to be "Ready" ...
	I0916 10:57:46.026709 1434484 request.go:632] Waited for 193.762218ms due to client-side throttling, not priority and fairness, request: GET:https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/kube-apiserver-ha-334765-m02
	I0916 10:57:46.026773 1434484 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/kube-apiserver-ha-334765-m02
	I0916 10:57:46.026782 1434484 round_trippers.go:469] Request Headers:
	I0916 10:57:46.026791 1434484 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:57:46.026801 1434484 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:57:46.033115 1434484 round_trippers.go:574] Response Status: 200 OK in 6 milliseconds
	I0916 10:57:46.226604 1434484 request.go:632] Waited for 192.314558ms due to client-side throttling, not priority and fairness, request: GET:https://192.168.49.2:8443/api/v1/nodes/ha-334765-m02
	I0916 10:57:46.226668 1434484 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-334765-m02
	I0916 10:57:46.226681 1434484 round_trippers.go:469] Request Headers:
	I0916 10:57:46.226690 1434484 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:57:46.226700 1434484 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:57:46.229280 1434484 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 10:57:46.230249 1434484 pod_ready.go:93] pod "kube-apiserver-ha-334765-m02" in "kube-system" namespace has status "Ready":"True"
	I0916 10:57:46.230274 1434484 pod_ready.go:82] duration metric: took 397.411268ms for pod "kube-apiserver-ha-334765-m02" in "kube-system" namespace to be "Ready" ...
	I0916 10:57:46.230290 1434484 pod_ready.go:79] waiting up to 6m0s for pod "kube-apiserver-ha-334765-m03" in "kube-system" namespace to be "Ready" ...
	I0916 10:57:46.426797 1434484 request.go:632] Waited for 196.422179ms due to client-side throttling, not priority and fairness, request: GET:https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/kube-apiserver-ha-334765-m03
	I0916 10:57:46.426885 1434484 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/kube-apiserver-ha-334765-m03
	I0916 10:57:46.426900 1434484 round_trippers.go:469] Request Headers:
	I0916 10:57:46.426909 1434484 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:57:46.426917 1434484 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:57:46.433198 1434484 round_trippers.go:574] Response Status: 200 OK in 6 milliseconds
	I0916 10:57:46.626789 1434484 request.go:632] Waited for 192.289025ms due to client-side throttling, not priority and fairness, request: GET:https://192.168.49.2:8443/api/v1/nodes/ha-334765-m03
	I0916 10:57:46.626926 1434484 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-334765-m03
	I0916 10:57:46.626939 1434484 round_trippers.go:469] Request Headers:
	I0916 10:57:46.626948 1434484 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:57:46.626958 1434484 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:57:46.630940 1434484 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:57:46.632567 1434484 pod_ready.go:93] pod "kube-apiserver-ha-334765-m03" in "kube-system" namespace has status "Ready":"True"
	I0916 10:57:46.632593 1434484 pod_ready.go:82] duration metric: took 402.295075ms for pod "kube-apiserver-ha-334765-m03" in "kube-system" namespace to be "Ready" ...
	I0916 10:57:46.632611 1434484 pod_ready.go:79] waiting up to 6m0s for pod "kube-controller-manager-ha-334765" in "kube-system" namespace to be "Ready" ...
	I0916 10:57:46.826262 1434484 request.go:632] Waited for 193.563914ms due to client-side throttling, not priority and fairness, request: GET:https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/kube-controller-manager-ha-334765
	I0916 10:57:46.826471 1434484 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/kube-controller-manager-ha-334765
	I0916 10:57:46.826554 1434484 round_trippers.go:469] Request Headers:
	I0916 10:57:46.826591 1434484 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:57:46.826612 1434484 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:57:46.829705 1434484 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:57:47.026105 1434484 request.go:632] Waited for 195.183385ms due to client-side throttling, not priority and fairness, request: GET:https://192.168.49.2:8443/api/v1/nodes/ha-334765
	I0916 10:57:47.026212 1434484 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-334765
	I0916 10:57:47.026274 1434484 round_trippers.go:469] Request Headers:
	I0916 10:57:47.026302 1434484 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:57:47.026319 1434484 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:57:47.029431 1434484 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:57:47.030593 1434484 pod_ready.go:93] pod "kube-controller-manager-ha-334765" in "kube-system" namespace has status "Ready":"True"
	I0916 10:57:47.030654 1434484 pod_ready.go:82] duration metric: took 398.034546ms for pod "kube-controller-manager-ha-334765" in "kube-system" namespace to be "Ready" ...
	I0916 10:57:47.030681 1434484 pod_ready.go:79] waiting up to 6m0s for pod "kube-controller-manager-ha-334765-m02" in "kube-system" namespace to be "Ready" ...
	I0916 10:57:47.226589 1434484 request.go:632] Waited for 195.808633ms due to client-side throttling, not priority and fairness, request: GET:https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/kube-controller-manager-ha-334765-m02
	I0916 10:57:47.226708 1434484 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/kube-controller-manager-ha-334765-m02
	I0916 10:57:47.226748 1434484 round_trippers.go:469] Request Headers:
	I0916 10:57:47.226779 1434484 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:57:47.226800 1434484 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:57:47.230041 1434484 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:57:47.426171 1434484 request.go:632] Waited for 195.186699ms due to client-side throttling, not priority and fairness, request: GET:https://192.168.49.2:8443/api/v1/nodes/ha-334765-m02
	I0916 10:57:47.426283 1434484 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-334765-m02
	I0916 10:57:47.426350 1434484 round_trippers.go:469] Request Headers:
	I0916 10:57:47.426378 1434484 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:57:47.426399 1434484 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:57:47.429289 1434484 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 10:57:47.429924 1434484 pod_ready.go:93] pod "kube-controller-manager-ha-334765-m02" in "kube-system" namespace has status "Ready":"True"
	I0916 10:57:47.429977 1434484 pod_ready.go:82] duration metric: took 399.27399ms for pod "kube-controller-manager-ha-334765-m02" in "kube-system" namespace to be "Ready" ...
	I0916 10:57:47.430004 1434484 pod_ready.go:79] waiting up to 6m0s for pod "kube-controller-manager-ha-334765-m03" in "kube-system" namespace to be "Ready" ...
	I0916 10:57:47.625928 1434484 request.go:632] Waited for 195.800361ms due to client-side throttling, not priority and fairness, request: GET:https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/kube-controller-manager-ha-334765-m03
	I0916 10:57:47.625999 1434484 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/kube-controller-manager-ha-334765-m03
	I0916 10:57:47.626011 1434484 round_trippers.go:469] Request Headers:
	I0916 10:57:47.626019 1434484 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:57:47.626029 1434484 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:57:47.630201 1434484 round_trippers.go:574] Response Status: 200 OK in 4 milliseconds
	I0916 10:57:47.826358 1434484 request.go:632] Waited for 195.310093ms due to client-side throttling, not priority and fairness, request: GET:https://192.168.49.2:8443/api/v1/nodes/ha-334765-m03
	I0916 10:57:47.826468 1434484 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-334765-m03
	I0916 10:57:47.826531 1434484 round_trippers.go:469] Request Headers:
	I0916 10:57:47.826557 1434484 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:57:47.826573 1434484 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:57:47.831873 1434484 round_trippers.go:574] Response Status: 200 OK in 5 milliseconds
	I0916 10:57:47.832810 1434484 pod_ready.go:93] pod "kube-controller-manager-ha-334765-m03" in "kube-system" namespace has status "Ready":"True"
	I0916 10:57:47.832874 1434484 pod_ready.go:82] duration metric: took 402.848841ms for pod "kube-controller-manager-ha-334765-m03" in "kube-system" namespace to be "Ready" ...
	I0916 10:57:47.832916 1434484 pod_ready.go:79] waiting up to 6m0s for pod "kube-proxy-4vsvh" in "kube-system" namespace to be "Ready" ...
	I0916 10:57:48.026474 1434484 request.go:632] Waited for 193.467235ms due to client-side throttling, not priority and fairness, request: GET:https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/kube-proxy-4vsvh
	I0916 10:57:48.026659 1434484 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/kube-proxy-4vsvh
	I0916 10:57:48.026669 1434484 round_trippers.go:469] Request Headers:
	I0916 10:57:48.026679 1434484 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:57:48.026684 1434484 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:57:48.035918 1434484 round_trippers.go:574] Response Status: 200 OK in 9 milliseconds
	I0916 10:57:48.226162 1434484 request.go:632] Waited for 188.29505ms due to client-side throttling, not priority and fairness, request: GET:https://192.168.49.2:8443/api/v1/nodes/ha-334765-m03
	I0916 10:57:48.226334 1434484 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-334765-m03
	I0916 10:57:48.226376 1434484 round_trippers.go:469] Request Headers:
	I0916 10:57:48.226410 1434484 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:57:48.226430 1434484 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:57:48.229536 1434484 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:57:48.230238 1434484 pod_ready.go:93] pod "kube-proxy-4vsvh" in "kube-system" namespace has status "Ready":"True"
	I0916 10:57:48.230291 1434484 pod_ready.go:82] duration metric: took 397.349805ms for pod "kube-proxy-4vsvh" in "kube-system" namespace to be "Ready" ...
	I0916 10:57:48.230318 1434484 pod_ready.go:79] waiting up to 6m0s for pod "kube-proxy-br496" in "kube-system" namespace to be "Ready" ...
	I0916 10:57:48.426139 1434484 request.go:632] Waited for 195.715761ms due to client-side throttling, not priority and fairness, request: GET:https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/kube-proxy-br496
	I0916 10:57:48.426197 1434484 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/kube-proxy-br496
	I0916 10:57:48.426203 1434484 round_trippers.go:469] Request Headers:
	I0916 10:57:48.426211 1434484 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:57:48.426214 1434484 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:57:48.430217 1434484 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:57:48.626749 1434484 request.go:632] Waited for 195.332271ms due to client-side throttling, not priority and fairness, request: GET:https://192.168.49.2:8443/api/v1/nodes/ha-334765-m04
	I0916 10:57:48.626813 1434484 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-334765-m04
	I0916 10:57:48.626825 1434484 round_trippers.go:469] Request Headers:
	I0916 10:57:48.626834 1434484 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:57:48.626839 1434484 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:57:48.632530 1434484 round_trippers.go:574] Response Status: 200 OK in 5 milliseconds
	I0916 10:57:48.633059 1434484 pod_ready.go:93] pod "kube-proxy-br496" in "kube-system" namespace has status "Ready":"True"
	I0916 10:57:48.633072 1434484 pod_ready.go:82] duration metric: took 402.732882ms for pod "kube-proxy-br496" in "kube-system" namespace to be "Ready" ...
	I0916 10:57:48.633083 1434484 pod_ready.go:79] waiting up to 6m0s for pod "kube-proxy-l998t" in "kube-system" namespace to be "Ready" ...
	I0916 10:57:48.826572 1434484 request.go:632] Waited for 193.424004ms due to client-side throttling, not priority and fairness, request: GET:https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/kube-proxy-l998t
	I0916 10:57:48.826686 1434484 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/kube-proxy-l998t
	I0916 10:57:48.826722 1434484 round_trippers.go:469] Request Headers:
	I0916 10:57:48.826748 1434484 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:57:48.826768 1434484 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:57:48.830210 1434484 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:57:49.026219 1434484 request.go:632] Waited for 195.269257ms due to client-side throttling, not priority and fairness, request: GET:https://192.168.49.2:8443/api/v1/nodes/ha-334765-m02
	I0916 10:57:49.026405 1434484 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-334765-m02
	I0916 10:57:49.026430 1434484 round_trippers.go:469] Request Headers:
	I0916 10:57:49.026464 1434484 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:57:49.026486 1434484 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:57:49.030327 1434484 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:57:49.226544 1434484 request.go:632] Waited for 93.220816ms due to client-side throttling, not priority and fairness, request: GET:https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/kube-proxy-l998t
	I0916 10:57:49.226620 1434484 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/kube-proxy-l998t
	I0916 10:57:49.226631 1434484 round_trippers.go:469] Request Headers:
	I0916 10:57:49.226640 1434484 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:57:49.226653 1434484 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:57:49.229895 1434484 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:57:49.425903 1434484 request.go:632] Waited for 195.289934ms due to client-side throttling, not priority and fairness, request: GET:https://192.168.49.2:8443/api/v1/nodes/ha-334765-m02
	I0916 10:57:49.426032 1434484 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-334765-m02
	I0916 10:57:49.426044 1434484 round_trippers.go:469] Request Headers:
	I0916 10:57:49.426053 1434484 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:57:49.426066 1434484 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:57:49.429088 1434484 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:57:49.633678 1434484 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/kube-proxy-l998t
	I0916 10:57:49.633705 1434484 round_trippers.go:469] Request Headers:
	I0916 10:57:49.633715 1434484 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:57:49.633721 1434484 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:57:49.636693 1434484 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 10:57:49.826919 1434484 request.go:632] Waited for 189.285991ms due to client-side throttling, not priority and fairness, request: GET:https://192.168.49.2:8443/api/v1/nodes/ha-334765-m02
	I0916 10:57:49.826979 1434484 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-334765-m02
	I0916 10:57:49.826988 1434484 round_trippers.go:469] Request Headers:
	I0916 10:57:49.826997 1434484 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:57:49.827005 1434484 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:57:49.829885 1434484 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 10:57:50.133399 1434484 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/kube-proxy-l998t
	I0916 10:57:50.133430 1434484 round_trippers.go:469] Request Headers:
	I0916 10:57:50.133443 1434484 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:57:50.133449 1434484 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:57:50.136522 1434484 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:57:50.226643 1434484 request.go:632] Waited for 89.243111ms due to client-side throttling, not priority and fairness, request: GET:https://192.168.49.2:8443/api/v1/nodes/ha-334765-m02
	I0916 10:57:50.226708 1434484 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-334765-m02
	I0916 10:57:50.226720 1434484 round_trippers.go:469] Request Headers:
	I0916 10:57:50.226741 1434484 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:57:50.226747 1434484 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:57:50.230202 1434484 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:57:50.633337 1434484 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/kube-proxy-l998t
	I0916 10:57:50.633363 1434484 round_trippers.go:469] Request Headers:
	I0916 10:57:50.633373 1434484 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:57:50.633381 1434484 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:57:50.636198 1434484 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 10:57:50.636899 1434484 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-334765-m02
	I0916 10:57:50.636918 1434484 round_trippers.go:469] Request Headers:
	I0916 10:57:50.636926 1434484 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:57:50.636932 1434484 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:57:50.639706 1434484 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 10:57:50.640392 1434484 pod_ready.go:103] pod "kube-proxy-l998t" in "kube-system" namespace has status "Ready":"False"
	I0916 10:57:51.133328 1434484 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/kube-proxy-l998t
	I0916 10:57:51.133352 1434484 round_trippers.go:469] Request Headers:
	I0916 10:57:51.133366 1434484 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:57:51.133372 1434484 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:57:51.136789 1434484 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:57:51.137931 1434484 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-334765-m02
	I0916 10:57:51.137955 1434484 round_trippers.go:469] Request Headers:
	I0916 10:57:51.137971 1434484 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:57:51.137976 1434484 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:57:51.144382 1434484 round_trippers.go:574] Response Status: 200 OK in 6 milliseconds
	I0916 10:57:51.633332 1434484 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/kube-proxy-l998t
	I0916 10:57:51.633353 1434484 round_trippers.go:469] Request Headers:
	I0916 10:57:51.633363 1434484 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:57:51.633369 1434484 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:57:51.636944 1434484 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:57:51.639435 1434484 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-334765-m02
	I0916 10:57:51.639464 1434484 round_trippers.go:469] Request Headers:
	I0916 10:57:51.639474 1434484 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:57:51.639480 1434484 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:57:51.642234 1434484 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 10:57:52.134234 1434484 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/kube-proxy-l998t
	I0916 10:57:52.134258 1434484 round_trippers.go:469] Request Headers:
	I0916 10:57:52.134268 1434484 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:57:52.134273 1434484 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:57:52.137592 1434484 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:57:52.138871 1434484 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-334765-m02
	I0916 10:57:52.138891 1434484 round_trippers.go:469] Request Headers:
	I0916 10:57:52.138908 1434484 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:57:52.138916 1434484 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:57:52.142782 1434484 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:57:52.633953 1434484 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/kube-proxy-l998t
	I0916 10:57:52.633988 1434484 round_trippers.go:469] Request Headers:
	I0916 10:57:52.633998 1434484 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:57:52.634003 1434484 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:57:52.643551 1434484 round_trippers.go:574] Response Status: 200 OK in 9 milliseconds
	I0916 10:57:52.646134 1434484 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-334765-m02
	I0916 10:57:52.646157 1434484 round_trippers.go:469] Request Headers:
	I0916 10:57:52.646183 1434484 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:57:52.646201 1434484 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:57:52.659518 1434484 round_trippers.go:574] Response Status: 200 OK in 13 milliseconds
	I0916 10:57:52.661347 1434484 pod_ready.go:103] pod "kube-proxy-l998t" in "kube-system" namespace has status "Ready":"False"
	I0916 10:57:53.134018 1434484 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/kube-proxy-l998t
	I0916 10:57:53.134053 1434484 round_trippers.go:469] Request Headers:
	I0916 10:57:53.134067 1434484 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:57:53.134071 1434484 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:57:53.136961 1434484 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 10:57:53.137662 1434484 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-334765-m02
	I0916 10:57:53.137680 1434484 round_trippers.go:469] Request Headers:
	I0916 10:57:53.137690 1434484 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:57:53.137697 1434484 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:57:53.140121 1434484 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 10:57:53.633958 1434484 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/kube-proxy-l998t
	I0916 10:57:53.633984 1434484 round_trippers.go:469] Request Headers:
	I0916 10:57:53.633993 1434484 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:57:53.634003 1434484 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:57:53.636891 1434484 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 10:57:53.638071 1434484 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-334765-m02
	I0916 10:57:53.638119 1434484 round_trippers.go:469] Request Headers:
	I0916 10:57:53.638130 1434484 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:57:53.638138 1434484 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:57:53.641359 1434484 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:57:54.133352 1434484 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/kube-proxy-l998t
	I0916 10:57:54.133378 1434484 round_trippers.go:469] Request Headers:
	I0916 10:57:54.133394 1434484 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:57:54.133400 1434484 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:57:54.136491 1434484 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:57:54.137396 1434484 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-334765-m02
	I0916 10:57:54.137414 1434484 round_trippers.go:469] Request Headers:
	I0916 10:57:54.137423 1434484 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:57:54.137427 1434484 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:57:54.139940 1434484 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 10:57:54.634261 1434484 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/kube-proxy-l998t
	I0916 10:57:54.634284 1434484 round_trippers.go:469] Request Headers:
	I0916 10:57:54.634294 1434484 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:57:54.634299 1434484 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:57:54.637260 1434484 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 10:57:54.637954 1434484 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-334765-m02
	I0916 10:57:54.637979 1434484 round_trippers.go:469] Request Headers:
	I0916 10:57:54.637988 1434484 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:57:54.637992 1434484 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:57:54.640787 1434484 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 10:57:55.134223 1434484 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/kube-proxy-l998t
	I0916 10:57:55.134250 1434484 round_trippers.go:469] Request Headers:
	I0916 10:57:55.134261 1434484 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:57:55.134267 1434484 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:57:55.137354 1434484 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:57:55.138212 1434484 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-334765-m02
	I0916 10:57:55.138232 1434484 round_trippers.go:469] Request Headers:
	I0916 10:57:55.138242 1434484 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:57:55.138248 1434484 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:57:55.142776 1434484 round_trippers.go:574] Response Status: 200 OK in 4 milliseconds
	I0916 10:57:55.143564 1434484 pod_ready.go:103] pod "kube-proxy-l998t" in "kube-system" namespace has status "Ready":"False"
	I0916 10:57:55.633981 1434484 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/kube-proxy-l998t
	I0916 10:57:55.634017 1434484 round_trippers.go:469] Request Headers:
	I0916 10:57:55.634027 1434484 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:57:55.634033 1434484 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:57:55.637279 1434484 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:57:55.638069 1434484 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-334765-m02
	I0916 10:57:55.638090 1434484 round_trippers.go:469] Request Headers:
	I0916 10:57:55.638097 1434484 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:57:55.638103 1434484 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:57:55.640818 1434484 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 10:57:56.133938 1434484 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/kube-proxy-l998t
	I0916 10:57:56.133965 1434484 round_trippers.go:469] Request Headers:
	I0916 10:57:56.133978 1434484 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:57:56.133983 1434484 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:57:56.138326 1434484 round_trippers.go:574] Response Status: 200 OK in 4 milliseconds
	I0916 10:57:56.139188 1434484 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-334765-m02
	I0916 10:57:56.139203 1434484 round_trippers.go:469] Request Headers:
	I0916 10:57:56.139324 1434484 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:57:56.139339 1434484 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:57:56.149577 1434484 round_trippers.go:574] Response Status: 200 OK in 10 milliseconds
	I0916 10:57:56.633993 1434484 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/kube-proxy-l998t
	I0916 10:57:56.634015 1434484 round_trippers.go:469] Request Headers:
	I0916 10:57:56.634025 1434484 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:57:56.634034 1434484 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:57:56.641466 1434484 round_trippers.go:574] Response Status: 200 OK in 7 milliseconds
	I0916 10:57:56.643668 1434484 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-334765-m02
	I0916 10:57:56.643686 1434484 round_trippers.go:469] Request Headers:
	I0916 10:57:56.643695 1434484 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:57:56.643699 1434484 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:57:56.654940 1434484 round_trippers.go:574] Response Status: 200 OK in 11 milliseconds
	I0916 10:57:57.133907 1434484 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/kube-proxy-l998t
	I0916 10:57:57.133927 1434484 round_trippers.go:469] Request Headers:
	I0916 10:57:57.133937 1434484 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:57:57.133943 1434484 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:57:57.137636 1434484 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:57:57.138364 1434484 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-334765-m02
	I0916 10:57:57.138381 1434484 round_trippers.go:469] Request Headers:
	I0916 10:57:57.138391 1434484 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:57:57.138397 1434484 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:57:57.153987 1434484 round_trippers.go:574] Response Status: 200 OK in 15 milliseconds
	I0916 10:57:57.154631 1434484 pod_ready.go:103] pod "kube-proxy-l998t" in "kube-system" namespace has status "Ready":"False"
	I0916 10:57:57.633945 1434484 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/kube-proxy-l998t
	I0916 10:57:57.633973 1434484 round_trippers.go:469] Request Headers:
	I0916 10:57:57.633982 1434484 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:57:57.633986 1434484 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:57:57.639120 1434484 round_trippers.go:574] Response Status: 200 OK in 5 milliseconds
	I0916 10:57:57.640664 1434484 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-334765-m02
	I0916 10:57:57.640700 1434484 round_trippers.go:469] Request Headers:
	I0916 10:57:57.640710 1434484 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:57:57.640715 1434484 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:57:57.650535 1434484 round_trippers.go:574] Response Status: 200 OK in 9 milliseconds
	I0916 10:57:58.133733 1434484 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/kube-proxy-l998t
	I0916 10:57:58.133756 1434484 round_trippers.go:469] Request Headers:
	I0916 10:57:58.133765 1434484 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:57:58.133770 1434484 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:57:58.137868 1434484 round_trippers.go:574] Response Status: 200 OK in 4 milliseconds
	I0916 10:57:58.138669 1434484 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-334765-m02
	I0916 10:57:58.138683 1434484 round_trippers.go:469] Request Headers:
	I0916 10:57:58.138692 1434484 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:57:58.138697 1434484 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:57:58.142943 1434484 round_trippers.go:574] Response Status: 200 OK in 4 milliseconds
	I0916 10:57:58.633883 1434484 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/kube-proxy-l998t
	I0916 10:57:58.633907 1434484 round_trippers.go:469] Request Headers:
	I0916 10:57:58.633917 1434484 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:57:58.633923 1434484 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:57:58.636850 1434484 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 10:57:58.637919 1434484 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-334765-m02
	I0916 10:57:58.637942 1434484 round_trippers.go:469] Request Headers:
	I0916 10:57:58.637951 1434484 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:57:58.637955 1434484 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:57:58.640522 1434484 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 10:57:59.133931 1434484 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/kube-proxy-l998t
	I0916 10:57:59.133958 1434484 round_trippers.go:469] Request Headers:
	I0916 10:57:59.133969 1434484 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:57:59.133973 1434484 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:57:59.137273 1434484 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:57:59.138275 1434484 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-334765-m02
	I0916 10:57:59.138298 1434484 round_trippers.go:469] Request Headers:
	I0916 10:57:59.138322 1434484 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:57:59.138327 1434484 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:57:59.141768 1434484 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:57:59.633912 1434484 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/kube-proxy-l998t
	I0916 10:57:59.633937 1434484 round_trippers.go:469] Request Headers:
	I0916 10:57:59.633947 1434484 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:57:59.633950 1434484 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:57:59.636954 1434484 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 10:57:59.637627 1434484 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-334765-m02
	I0916 10:57:59.637650 1434484 round_trippers.go:469] Request Headers:
	I0916 10:57:59.637660 1434484 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:57:59.637666 1434484 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:57:59.640164 1434484 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 10:57:59.640706 1434484 pod_ready.go:103] pod "kube-proxy-l998t" in "kube-system" namespace has status "Ready":"False"
	I0916 10:58:00.133975 1434484 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/kube-proxy-l998t
	I0916 10:58:00.134015 1434484 round_trippers.go:469] Request Headers:
	I0916 10:58:00.134025 1434484 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:58:00.134030 1434484 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:58:00.287742 1434484 round_trippers.go:574] Response Status: 200 OK in 153 milliseconds
	I0916 10:58:00.292221 1434484 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-334765-m02
	I0916 10:58:00.292238 1434484 round_trippers.go:469] Request Headers:
	I0916 10:58:00.292254 1434484 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:58:00.292259 1434484 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:58:00.332574 1434484 round_trippers.go:574] Response Status: 200 OK in 40 milliseconds
	I0916 10:58:00.633297 1434484 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/kube-proxy-l998t
	I0916 10:58:00.633318 1434484 round_trippers.go:469] Request Headers:
	I0916 10:58:00.633327 1434484 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:58:00.633333 1434484 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:58:00.739745 1434484 round_trippers.go:574] Response Status: 200 OK in 106 milliseconds
	I0916 10:58:00.756906 1434484 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-334765-m02
	I0916 10:58:00.756924 1434484 round_trippers.go:469] Request Headers:
	I0916 10:58:00.756934 1434484 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:58:00.756941 1434484 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:58:00.787571 1434484 round_trippers.go:574] Response Status: 200 OK in 30 milliseconds
	I0916 10:58:01.134381 1434484 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/kube-proxy-l998t
	I0916 10:58:01.134463 1434484 round_trippers.go:469] Request Headers:
	I0916 10:58:01.134476 1434484 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:58:01.134482 1434484 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:58:01.138012 1434484 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:58:01.138834 1434484 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-334765-m02
	I0916 10:58:01.138890 1434484 round_trippers.go:469] Request Headers:
	I0916 10:58:01.138915 1434484 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:58:01.138937 1434484 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:58:01.142342 1434484 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:58:01.634029 1434484 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/kube-proxy-l998t
	I0916 10:58:01.634095 1434484 round_trippers.go:469] Request Headers:
	I0916 10:58:01.634119 1434484 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:58:01.634124 1434484 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:58:01.637907 1434484 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:58:01.638714 1434484 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-334765-m02
	I0916 10:58:01.638742 1434484 round_trippers.go:469] Request Headers:
	I0916 10:58:01.638752 1434484 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:58:01.638756 1434484 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:58:01.641938 1434484 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:58:01.642542 1434484 pod_ready.go:103] pod "kube-proxy-l998t" in "kube-system" namespace has status "Ready":"False"
	I0916 10:58:02.133240 1434484 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/kube-proxy-l998t
	I0916 10:58:02.133262 1434484 round_trippers.go:469] Request Headers:
	I0916 10:58:02.133271 1434484 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:58:02.133276 1434484 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:58:02.136160 1434484 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 10:58:02.136866 1434484 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-334765-m02
	I0916 10:58:02.136884 1434484 round_trippers.go:469] Request Headers:
	I0916 10:58:02.136893 1434484 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:58:02.136897 1434484 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:58:02.139626 1434484 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 10:58:02.633894 1434484 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/kube-proxy-l998t
	I0916 10:58:02.633916 1434484 round_trippers.go:469] Request Headers:
	I0916 10:58:02.633926 1434484 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:58:02.633939 1434484 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:58:02.636999 1434484 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:58:02.637848 1434484 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-334765-m02
	I0916 10:58:02.637867 1434484 round_trippers.go:469] Request Headers:
	I0916 10:58:02.637876 1434484 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:58:02.637881 1434484 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:58:02.640742 1434484 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 10:58:03.133639 1434484 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/kube-proxy-l998t
	I0916 10:58:03.133665 1434484 round_trippers.go:469] Request Headers:
	I0916 10:58:03.133675 1434484 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:58:03.133680 1434484 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:58:03.136570 1434484 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 10:58:03.137598 1434484 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-334765-m02
	I0916 10:58:03.137618 1434484 round_trippers.go:469] Request Headers:
	I0916 10:58:03.137628 1434484 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:58:03.137632 1434484 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:58:03.145478 1434484 round_trippers.go:574] Response Status: 200 OK in 7 milliseconds
	I0916 10:58:03.633924 1434484 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/kube-proxy-l998t
	I0916 10:58:03.633958 1434484 round_trippers.go:469] Request Headers:
	I0916 10:58:03.633968 1434484 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:58:03.633973 1434484 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:58:03.636825 1434484 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 10:58:03.637681 1434484 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-334765-m02
	I0916 10:58:03.637703 1434484 round_trippers.go:469] Request Headers:
	I0916 10:58:03.637713 1434484 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:58:03.637721 1434484 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:58:03.640762 1434484 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:58:04.133393 1434484 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/kube-proxy-l998t
	I0916 10:58:04.133421 1434484 round_trippers.go:469] Request Headers:
	I0916 10:58:04.133432 1434484 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:58:04.133437 1434484 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:58:04.136353 1434484 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 10:58:04.137172 1434484 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-334765-m02
	I0916 10:58:04.137192 1434484 round_trippers.go:469] Request Headers:
	I0916 10:58:04.137201 1434484 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:58:04.137206 1434484 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:58:04.139913 1434484 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 10:58:04.140471 1434484 pod_ready.go:103] pod "kube-proxy-l998t" in "kube-system" namespace has status "Ready":"False"
	I0916 10:58:04.633287 1434484 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/kube-proxy-l998t
	I0916 10:58:04.633311 1434484 round_trippers.go:469] Request Headers:
	I0916 10:58:04.633321 1434484 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:58:04.633325 1434484 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:58:04.636421 1434484 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:58:04.637642 1434484 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-334765-m02
	I0916 10:58:04.637659 1434484 round_trippers.go:469] Request Headers:
	I0916 10:58:04.637669 1434484 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:58:04.637672 1434484 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:58:04.642043 1434484 round_trippers.go:574] Response Status: 200 OK in 4 milliseconds
	I0916 10:58:05.133810 1434484 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/kube-proxy-l998t
	I0916 10:58:05.133836 1434484 round_trippers.go:469] Request Headers:
	I0916 10:58:05.133846 1434484 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:58:05.133852 1434484 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:58:05.137098 1434484 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:58:05.138251 1434484 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-334765-m02
	I0916 10:58:05.138279 1434484 round_trippers.go:469] Request Headers:
	I0916 10:58:05.138289 1434484 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:58:05.138296 1434484 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:58:05.142555 1434484 round_trippers.go:574] Response Status: 200 OK in 4 milliseconds
	I0916 10:58:05.633924 1434484 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/kube-proxy-l998t
	I0916 10:58:05.633947 1434484 round_trippers.go:469] Request Headers:
	I0916 10:58:05.633956 1434484 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:58:05.633960 1434484 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:58:05.638399 1434484 round_trippers.go:574] Response Status: 200 OK in 4 milliseconds
	I0916 10:58:05.639138 1434484 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-334765-m02
	I0916 10:58:05.639162 1434484 round_trippers.go:469] Request Headers:
	I0916 10:58:05.639171 1434484 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:58:05.639175 1434484 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:58:05.642561 1434484 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:58:06.133403 1434484 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/kube-proxy-l998t
	I0916 10:58:06.133429 1434484 round_trippers.go:469] Request Headers:
	I0916 10:58:06.133438 1434484 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:58:06.133445 1434484 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:58:06.136539 1434484 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:58:06.137588 1434484 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-334765-m02
	I0916 10:58:06.137613 1434484 round_trippers.go:469] Request Headers:
	I0916 10:58:06.137624 1434484 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:58:06.137630 1434484 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:58:06.140556 1434484 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 10:58:06.141165 1434484 pod_ready.go:103] pod "kube-proxy-l998t" in "kube-system" namespace has status "Ready":"False"
	I0916 10:58:06.633914 1434484 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/kube-proxy-l998t
	I0916 10:58:06.633939 1434484 round_trippers.go:469] Request Headers:
	I0916 10:58:06.633948 1434484 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:58:06.633952 1434484 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:58:06.637355 1434484 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:58:06.638324 1434484 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-334765-m02
	I0916 10:58:06.638347 1434484 round_trippers.go:469] Request Headers:
	I0916 10:58:06.638358 1434484 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:58:06.638378 1434484 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:58:06.642693 1434484 round_trippers.go:574] Response Status: 200 OK in 4 milliseconds
	I0916 10:58:07.133315 1434484 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/kube-proxy-l998t
	I0916 10:58:07.133340 1434484 round_trippers.go:469] Request Headers:
	I0916 10:58:07.133349 1434484 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:58:07.133355 1434484 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:58:07.136361 1434484 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 10:58:07.137624 1434484 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-334765-m02
	I0916 10:58:07.137702 1434484 round_trippers.go:469] Request Headers:
	I0916 10:58:07.137731 1434484 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:58:07.137755 1434484 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:58:07.146597 1434484 round_trippers.go:574] Response Status: 200 OK in 8 milliseconds
	I0916 10:58:07.147265 1434484 pod_ready.go:93] pod "kube-proxy-l998t" in "kube-system" namespace has status "Ready":"True"
	I0916 10:58:07.147326 1434484 pod_ready.go:82] duration metric: took 18.514234139s for pod "kube-proxy-l998t" in "kube-system" namespace to be "Ready" ...
	I0916 10:58:07.147353 1434484 pod_ready.go:79] waiting up to 6m0s for pod "kube-proxy-tlfs7" in "kube-system" namespace to be "Ready" ...
	I0916 10:58:07.147446 1434484 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/kube-proxy-tlfs7
	I0916 10:58:07.147474 1434484 round_trippers.go:469] Request Headers:
	I0916 10:58:07.147497 1434484 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:58:07.147516 1434484 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:58:07.155324 1434484 round_trippers.go:574] Response Status: 200 OK in 7 milliseconds
	I0916 10:58:07.156081 1434484 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-334765
	I0916 10:58:07.156103 1434484 round_trippers.go:469] Request Headers:
	I0916 10:58:07.156114 1434484 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:58:07.156118 1434484 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:58:07.158953 1434484 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 10:58:07.159561 1434484 pod_ready.go:93] pod "kube-proxy-tlfs7" in "kube-system" namespace has status "Ready":"True"
	I0916 10:58:07.159583 1434484 pod_ready.go:82] duration metric: took 12.210564ms for pod "kube-proxy-tlfs7" in "kube-system" namespace to be "Ready" ...
	I0916 10:58:07.159595 1434484 pod_ready.go:79] waiting up to 6m0s for pod "kube-scheduler-ha-334765" in "kube-system" namespace to be "Ready" ...
	I0916 10:58:07.159666 1434484 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/kube-scheduler-ha-334765
	I0916 10:58:07.159676 1434484 round_trippers.go:469] Request Headers:
	I0916 10:58:07.159685 1434484 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:58:07.159690 1434484 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:58:07.162420 1434484 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 10:58:07.163069 1434484 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-334765
	I0916 10:58:07.163087 1434484 round_trippers.go:469] Request Headers:
	I0916 10:58:07.163096 1434484 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:58:07.163102 1434484 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:58:07.166113 1434484 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 10:58:07.166742 1434484 pod_ready.go:93] pod "kube-scheduler-ha-334765" in "kube-system" namespace has status "Ready":"True"
	I0916 10:58:07.166764 1434484 pod_ready.go:82] duration metric: took 7.16096ms for pod "kube-scheduler-ha-334765" in "kube-system" namespace to be "Ready" ...
	I0916 10:58:07.166775 1434484 pod_ready.go:79] waiting up to 6m0s for pod "kube-scheduler-ha-334765-m02" in "kube-system" namespace to be "Ready" ...
	I0916 10:58:07.166845 1434484 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/kube-scheduler-ha-334765-m02
	I0916 10:58:07.166855 1434484 round_trippers.go:469] Request Headers:
	I0916 10:58:07.166864 1434484 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:58:07.166870 1434484 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:58:07.169681 1434484 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 10:58:07.170346 1434484 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-334765-m02
	I0916 10:58:07.170365 1434484 round_trippers.go:469] Request Headers:
	I0916 10:58:07.170374 1434484 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:58:07.170378 1434484 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:58:07.173019 1434484 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 10:58:07.173653 1434484 pod_ready.go:93] pod "kube-scheduler-ha-334765-m02" in "kube-system" namespace has status "Ready":"True"
	I0916 10:58:07.173676 1434484 pod_ready.go:82] duration metric: took 6.892281ms for pod "kube-scheduler-ha-334765-m02" in "kube-system" namespace to be "Ready" ...
	I0916 10:58:07.173688 1434484 pod_ready.go:79] waiting up to 6m0s for pod "kube-scheduler-ha-334765-m03" in "kube-system" namespace to be "Ready" ...
	I0916 10:58:07.173756 1434484 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/kube-scheduler-ha-334765-m03
	I0916 10:58:07.173766 1434484 round_trippers.go:469] Request Headers:
	I0916 10:58:07.173775 1434484 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:58:07.173784 1434484 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:58:07.176404 1434484 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 10:58:07.177028 1434484 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-334765-m03
	I0916 10:58:07.177048 1434484 round_trippers.go:469] Request Headers:
	I0916 10:58:07.177057 1434484 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:58:07.177061 1434484 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:58:07.179655 1434484 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 10:58:07.180454 1434484 pod_ready.go:93] pod "kube-scheduler-ha-334765-m03" in "kube-system" namespace has status "Ready":"True"
	I0916 10:58:07.180480 1434484 pod_ready.go:82] duration metric: took 6.783337ms for pod "kube-scheduler-ha-334765-m03" in "kube-system" namespace to be "Ready" ...
	I0916 10:58:07.180496 1434484 pod_ready.go:39] duration metric: took 22.958540164s for extra waiting for all system-critical and pods with labels [k8s-app=kube-dns component=etcd component=kube-apiserver component=kube-controller-manager k8s-app=kube-proxy component=kube-scheduler] to be "Ready" ...
	I0916 10:58:07.180512 1434484 api_server.go:52] waiting for apiserver process to appear ...
	I0916 10:58:07.180593 1434484 ssh_runner.go:195] Run: sudo pgrep -xnf kube-apiserver.*minikube.*
	I0916 10:58:07.193820 1434484 api_server.go:72] duration metric: took 42.81296865s to wait for apiserver process to appear ...
	I0916 10:58:07.193846 1434484 api_server.go:88] waiting for apiserver healthz status ...
	I0916 10:58:07.193867 1434484 api_server.go:253] Checking apiserver healthz at https://192.168.49.2:8443/healthz ...
	I0916 10:58:07.201960 1434484 api_server.go:279] https://192.168.49.2:8443/healthz returned 500:
	[+]ping ok
	[+]log ok
	[+]etcd ok
	[+]poststarthook/start-apiserver-admission-initializer ok
	[+]poststarthook/generic-apiserver-start-informers ok
	[+]poststarthook/priority-and-fairness-config-consumer ok
	[+]poststarthook/priority-and-fairness-filter ok
	[+]poststarthook/storage-object-count-tracker-hook ok
	[+]poststarthook/start-apiextensions-informers ok
	[+]poststarthook/start-apiextensions-controllers ok
	[+]poststarthook/crd-informer-synced ok
	[+]poststarthook/start-system-namespaces-controller ok
	[+]poststarthook/start-cluster-authentication-info-controller ok
	[+]poststarthook/start-kube-apiserver-identity-lease-controller ok
	[+]poststarthook/start-kube-apiserver-identity-lease-garbage-collector ok
	[+]poststarthook/start-legacy-token-tracking-controller ok
	[-]poststarthook/start-service-ip-repair-controllers failed: reason withheld
	[+]poststarthook/rbac/bootstrap-roles ok
	[+]poststarthook/scheduling/bootstrap-system-priority-classes ok
	[+]poststarthook/priority-and-fairness-config-producer ok
	[+]poststarthook/bootstrap-controller ok
	[+]poststarthook/aggregator-reload-proxy-client-cert ok
	[+]poststarthook/start-kube-aggregator-informers ok
	[+]poststarthook/apiservice-status-local-available-controller ok
	[+]poststarthook/apiservice-status-remote-available-controller ok
	[+]poststarthook/apiservice-registration-controller ok
	[+]poststarthook/apiservice-discovery-controller ok
	[+]poststarthook/kube-apiserver-autoregistration ok
	[+]autoregister-completion ok
	[+]poststarthook/apiservice-openapi-controller ok
	[+]poststarthook/apiservice-openapiv3-controller ok
	healthz check failed
	W0916 10:58:07.202002 1434484 api_server.go:103] status: https://192.168.49.2:8443/healthz returned error 500:
	[+]ping ok
	[+]log ok
	[+]etcd ok
	[+]poststarthook/start-apiserver-admission-initializer ok
	[+]poststarthook/generic-apiserver-start-informers ok
	[+]poststarthook/priority-and-fairness-config-consumer ok
	[+]poststarthook/priority-and-fairness-filter ok
	[+]poststarthook/storage-object-count-tracker-hook ok
	[+]poststarthook/start-apiextensions-informers ok
	[+]poststarthook/start-apiextensions-controllers ok
	[+]poststarthook/crd-informer-synced ok
	[+]poststarthook/start-system-namespaces-controller ok
	[+]poststarthook/start-cluster-authentication-info-controller ok
	[+]poststarthook/start-kube-apiserver-identity-lease-controller ok
	[+]poststarthook/start-kube-apiserver-identity-lease-garbage-collector ok
	[+]poststarthook/start-legacy-token-tracking-controller ok
	[-]poststarthook/start-service-ip-repair-controllers failed: reason withheld
	[+]poststarthook/rbac/bootstrap-roles ok
	[+]poststarthook/scheduling/bootstrap-system-priority-classes ok
	[+]poststarthook/priority-and-fairness-config-producer ok
	[+]poststarthook/bootstrap-controller ok
	[+]poststarthook/aggregator-reload-proxy-client-cert ok
	[+]poststarthook/start-kube-aggregator-informers ok
	[+]poststarthook/apiservice-status-local-available-controller ok
	[+]poststarthook/apiservice-status-remote-available-controller ok
	[+]poststarthook/apiservice-registration-controller ok
	[+]poststarthook/apiservice-discovery-controller ok
	[+]poststarthook/kube-apiserver-autoregistration ok
	[+]autoregister-completion ok
	[+]poststarthook/apiservice-openapi-controller ok
	[+]poststarthook/apiservice-openapiv3-controller ok
	healthz check failed
	I0916 10:58:07.694404 1434484 api_server.go:253] Checking apiserver healthz at https://192.168.49.2:8443/healthz ...
	I0916 10:58:07.702060 1434484 api_server.go:279] https://192.168.49.2:8443/healthz returned 500:
	[+]ping ok
	[+]log ok
	[+]etcd ok
	[+]poststarthook/start-apiserver-admission-initializer ok
	[+]poststarthook/generic-apiserver-start-informers ok
	[+]poststarthook/priority-and-fairness-config-consumer ok
	[+]poststarthook/priority-and-fairness-filter ok
	[+]poststarthook/storage-object-count-tracker-hook ok
	[+]poststarthook/start-apiextensions-informers ok
	[+]poststarthook/start-apiextensions-controllers ok
	[+]poststarthook/crd-informer-synced ok
	[+]poststarthook/start-system-namespaces-controller ok
	[+]poststarthook/start-cluster-authentication-info-controller ok
	[+]poststarthook/start-kube-apiserver-identity-lease-controller ok
	[+]poststarthook/start-kube-apiserver-identity-lease-garbage-collector ok
	[+]poststarthook/start-legacy-token-tracking-controller ok
	[-]poststarthook/start-service-ip-repair-controllers failed: reason withheld
	[+]poststarthook/rbac/bootstrap-roles ok
	[+]poststarthook/scheduling/bootstrap-system-priority-classes ok
	[+]poststarthook/priority-and-fairness-config-producer ok
	[+]poststarthook/bootstrap-controller ok
	[+]poststarthook/aggregator-reload-proxy-client-cert ok
	[+]poststarthook/start-kube-aggregator-informers ok
	[+]poststarthook/apiservice-status-local-available-controller ok
	[+]poststarthook/apiservice-status-remote-available-controller ok
	[+]poststarthook/apiservice-registration-controller ok
	[+]poststarthook/apiservice-discovery-controller ok
	[+]poststarthook/kube-apiserver-autoregistration ok
	[+]autoregister-completion ok
	[+]poststarthook/apiservice-openapi-controller ok
	[+]poststarthook/apiservice-openapiv3-controller ok
	healthz check failed
	W0916 10:58:07.702095 1434484 api_server.go:103] status: https://192.168.49.2:8443/healthz returned error 500:
	[+]ping ok
	[+]log ok
	[+]etcd ok
	[+]poststarthook/start-apiserver-admission-initializer ok
	[+]poststarthook/generic-apiserver-start-informers ok
	[+]poststarthook/priority-and-fairness-config-consumer ok
	[+]poststarthook/priority-and-fairness-filter ok
	[+]poststarthook/storage-object-count-tracker-hook ok
	[+]poststarthook/start-apiextensions-informers ok
	[+]poststarthook/start-apiextensions-controllers ok
	[+]poststarthook/crd-informer-synced ok
	[+]poststarthook/start-system-namespaces-controller ok
	[+]poststarthook/start-cluster-authentication-info-controller ok
	[+]poststarthook/start-kube-apiserver-identity-lease-controller ok
	[+]poststarthook/start-kube-apiserver-identity-lease-garbage-collector ok
	[+]poststarthook/start-legacy-token-tracking-controller ok
	[-]poststarthook/start-service-ip-repair-controllers failed: reason withheld
	[+]poststarthook/rbac/bootstrap-roles ok
	[+]poststarthook/scheduling/bootstrap-system-priority-classes ok
	[+]poststarthook/priority-and-fairness-config-producer ok
	[+]poststarthook/bootstrap-controller ok
	[+]poststarthook/aggregator-reload-proxy-client-cert ok
	[+]poststarthook/start-kube-aggregator-informers ok
	[+]poststarthook/apiservice-status-local-available-controller ok
	[+]poststarthook/apiservice-status-remote-available-controller ok
	[+]poststarthook/apiservice-registration-controller ok
	[+]poststarthook/apiservice-discovery-controller ok
	[+]poststarthook/kube-apiserver-autoregistration ok
	[+]autoregister-completion ok
	[+]poststarthook/apiservice-openapi-controller ok
	[+]poststarthook/apiservice-openapiv3-controller ok
	healthz check failed
	I0916 10:58:08.194542 1434484 api_server.go:253] Checking apiserver healthz at https://192.168.49.2:8443/healthz ...
	I0916 10:58:08.204723 1434484 api_server.go:279] https://192.168.49.2:8443/healthz returned 500:
	[+]ping ok
	[+]log ok
	[+]etcd ok
	[+]poststarthook/start-apiserver-admission-initializer ok
	[+]poststarthook/generic-apiserver-start-informers ok
	[+]poststarthook/priority-and-fairness-config-consumer ok
	[+]poststarthook/priority-and-fairness-filter ok
	[+]poststarthook/storage-object-count-tracker-hook ok
	[+]poststarthook/start-apiextensions-informers ok
	[+]poststarthook/start-apiextensions-controllers ok
	[+]poststarthook/crd-informer-synced ok
	[+]poststarthook/start-system-namespaces-controller ok
	[+]poststarthook/start-cluster-authentication-info-controller ok
	[+]poststarthook/start-kube-apiserver-identity-lease-controller ok
	[+]poststarthook/start-kube-apiserver-identity-lease-garbage-collector ok
	[+]poststarthook/start-legacy-token-tracking-controller ok
	[-]poststarthook/start-service-ip-repair-controllers failed: reason withheld
	[+]poststarthook/rbac/bootstrap-roles ok
	[+]poststarthook/scheduling/bootstrap-system-priority-classes ok
	[+]poststarthook/priority-and-fairness-config-producer ok
	[+]poststarthook/bootstrap-controller ok
	[+]poststarthook/aggregator-reload-proxy-client-cert ok
	[+]poststarthook/start-kube-aggregator-informers ok
	[+]poststarthook/apiservice-status-local-available-controller ok
	[+]poststarthook/apiservice-status-remote-available-controller ok
	[+]poststarthook/apiservice-registration-controller ok
	[+]poststarthook/apiservice-discovery-controller ok
	[+]poststarthook/kube-apiserver-autoregistration ok
	[+]autoregister-completion ok
	[+]poststarthook/apiservice-openapi-controller ok
	[+]poststarthook/apiservice-openapiv3-controller ok
	healthz check failed
	W0916 10:58:08.204754 1434484 api_server.go:103] status: https://192.168.49.2:8443/healthz returned error 500:
	[+]ping ok
	[+]log ok
	[+]etcd ok
	[+]poststarthook/start-apiserver-admission-initializer ok
	[+]poststarthook/generic-apiserver-start-informers ok
	[+]poststarthook/priority-and-fairness-config-consumer ok
	[+]poststarthook/priority-and-fairness-filter ok
	[+]poststarthook/storage-object-count-tracker-hook ok
	[+]poststarthook/start-apiextensions-informers ok
	[+]poststarthook/start-apiextensions-controllers ok
	[+]poststarthook/crd-informer-synced ok
	[+]poststarthook/start-system-namespaces-controller ok
	[+]poststarthook/start-cluster-authentication-info-controller ok
	[+]poststarthook/start-kube-apiserver-identity-lease-controller ok
	[+]poststarthook/start-kube-apiserver-identity-lease-garbage-collector ok
	[+]poststarthook/start-legacy-token-tracking-controller ok
	[-]poststarthook/start-service-ip-repair-controllers failed: reason withheld
	[+]poststarthook/rbac/bootstrap-roles ok
	[+]poststarthook/scheduling/bootstrap-system-priority-classes ok
	[+]poststarthook/priority-and-fairness-config-producer ok
	[+]poststarthook/bootstrap-controller ok
	[+]poststarthook/aggregator-reload-proxy-client-cert ok
	[+]poststarthook/start-kube-aggregator-informers ok
	[+]poststarthook/apiservice-status-local-available-controller ok
	[+]poststarthook/apiservice-status-remote-available-controller ok
	[+]poststarthook/apiservice-registration-controller ok
	[+]poststarthook/apiservice-discovery-controller ok
	[+]poststarthook/kube-apiserver-autoregistration ok
	[+]autoregister-completion ok
	[+]poststarthook/apiservice-openapi-controller ok
	[+]poststarthook/apiservice-openapiv3-controller ok
	healthz check failed
	I0916 10:58:08.693989 1434484 api_server.go:253] Checking apiserver healthz at https://192.168.49.2:8443/healthz ...
	I0916 10:58:08.701774 1434484 api_server.go:279] https://192.168.49.2:8443/healthz returned 500:
	[+]ping ok
	[+]log ok
	[+]etcd ok
	[+]poststarthook/start-apiserver-admission-initializer ok
	[+]poststarthook/generic-apiserver-start-informers ok
	[+]poststarthook/priority-and-fairness-config-consumer ok
	[+]poststarthook/priority-and-fairness-filter ok
	[+]poststarthook/storage-object-count-tracker-hook ok
	[+]poststarthook/start-apiextensions-informers ok
	[+]poststarthook/start-apiextensions-controllers ok
	[+]poststarthook/crd-informer-synced ok
	[+]poststarthook/start-system-namespaces-controller ok
	[+]poststarthook/start-cluster-authentication-info-controller ok
	[+]poststarthook/start-kube-apiserver-identity-lease-controller ok
	[+]poststarthook/start-kube-apiserver-identity-lease-garbage-collector ok
	[+]poststarthook/start-legacy-token-tracking-controller ok
	[-]poststarthook/start-service-ip-repair-controllers failed: reason withheld
	[+]poststarthook/rbac/bootstrap-roles ok
	[+]poststarthook/scheduling/bootstrap-system-priority-classes ok
	[+]poststarthook/priority-and-fairness-config-producer ok
	[+]poststarthook/bootstrap-controller ok
	[+]poststarthook/aggregator-reload-proxy-client-cert ok
	[+]poststarthook/start-kube-aggregator-informers ok
	[+]poststarthook/apiservice-status-local-available-controller ok
	[+]poststarthook/apiservice-status-remote-available-controller ok
	[+]poststarthook/apiservice-registration-controller ok
	[+]poststarthook/apiservice-discovery-controller ok
	[+]poststarthook/kube-apiserver-autoregistration ok
	[+]autoregister-completion ok
	[+]poststarthook/apiservice-openapi-controller ok
	[+]poststarthook/apiservice-openapiv3-controller ok
	healthz check failed
	W0916 10:58:08.701808 1434484 api_server.go:103] status: https://192.168.49.2:8443/healthz returned error 500:
	[+]ping ok
	[+]log ok
	[+]etcd ok
	[+]poststarthook/start-apiserver-admission-initializer ok
	[+]poststarthook/generic-apiserver-start-informers ok
	[+]poststarthook/priority-and-fairness-config-consumer ok
	[+]poststarthook/priority-and-fairness-filter ok
	[+]poststarthook/storage-object-count-tracker-hook ok
	[+]poststarthook/start-apiextensions-informers ok
	[+]poststarthook/start-apiextensions-controllers ok
	[+]poststarthook/crd-informer-synced ok
	[+]poststarthook/start-system-namespaces-controller ok
	[+]poststarthook/start-cluster-authentication-info-controller ok
	[+]poststarthook/start-kube-apiserver-identity-lease-controller ok
	[+]poststarthook/start-kube-apiserver-identity-lease-garbage-collector ok
	[+]poststarthook/start-legacy-token-tracking-controller ok
	[-]poststarthook/start-service-ip-repair-controllers failed: reason withheld
	[+]poststarthook/rbac/bootstrap-roles ok
	[+]poststarthook/scheduling/bootstrap-system-priority-classes ok
	[+]poststarthook/priority-and-fairness-config-producer ok
	[+]poststarthook/bootstrap-controller ok
	[+]poststarthook/aggregator-reload-proxy-client-cert ok
	[+]poststarthook/start-kube-aggregator-informers ok
	[+]poststarthook/apiservice-status-local-available-controller ok
	[+]poststarthook/apiservice-status-remote-available-controller ok
	[+]poststarthook/apiservice-registration-controller ok
	[+]poststarthook/apiservice-discovery-controller ok
	[+]poststarthook/kube-apiserver-autoregistration ok
	[+]autoregister-completion ok
	[+]poststarthook/apiservice-openapi-controller ok
	[+]poststarthook/apiservice-openapiv3-controller ok
	healthz check failed
	I0916 10:58:09.194295 1434484 api_server.go:253] Checking apiserver healthz at https://192.168.49.2:8443/healthz ...
	I0916 10:58:09.202193 1434484 api_server.go:279] https://192.168.49.2:8443/healthz returned 500:
	[+]ping ok
	[+]log ok
	[+]etcd ok
	[+]poststarthook/start-apiserver-admission-initializer ok
	[+]poststarthook/generic-apiserver-start-informers ok
	[+]poststarthook/priority-and-fairness-config-consumer ok
	[+]poststarthook/priority-and-fairness-filter ok
	[+]poststarthook/storage-object-count-tracker-hook ok
	[+]poststarthook/start-apiextensions-informers ok
	[+]poststarthook/start-apiextensions-controllers ok
	[+]poststarthook/crd-informer-synced ok
	[+]poststarthook/start-system-namespaces-controller ok
	[+]poststarthook/start-cluster-authentication-info-controller ok
	[+]poststarthook/start-kube-apiserver-identity-lease-controller ok
	[+]poststarthook/start-kube-apiserver-identity-lease-garbage-collector ok
	[+]poststarthook/start-legacy-token-tracking-controller ok
	[-]poststarthook/start-service-ip-repair-controllers failed: reason withheld
	[+]poststarthook/rbac/bootstrap-roles ok
	[+]poststarthook/scheduling/bootstrap-system-priority-classes ok
	[+]poststarthook/priority-and-fairness-config-producer ok
	[+]poststarthook/bootstrap-controller ok
	[+]poststarthook/aggregator-reload-proxy-client-cert ok
	[+]poststarthook/start-kube-aggregator-informers ok
	[+]poststarthook/apiservice-status-local-available-controller ok
	[+]poststarthook/apiservice-status-remote-available-controller ok
	[+]poststarthook/apiservice-registration-controller ok
	[+]poststarthook/apiservice-discovery-controller ok
	[+]poststarthook/kube-apiserver-autoregistration ok
	[+]autoregister-completion ok
	[+]poststarthook/apiservice-openapi-controller ok
	[+]poststarthook/apiservice-openapiv3-controller ok
	healthz check failed
	W0916 10:58:09.202224 1434484 api_server.go:103] status: https://192.168.49.2:8443/healthz returned error 500:
	[+]ping ok
	[+]log ok
	[+]etcd ok
	[+]poststarthook/start-apiserver-admission-initializer ok
	[+]poststarthook/generic-apiserver-start-informers ok
	[+]poststarthook/priority-and-fairness-config-consumer ok
	[+]poststarthook/priority-and-fairness-filter ok
	[+]poststarthook/storage-object-count-tracker-hook ok
	[+]poststarthook/start-apiextensions-informers ok
	[+]poststarthook/start-apiextensions-controllers ok
	[+]poststarthook/crd-informer-synced ok
	[+]poststarthook/start-system-namespaces-controller ok
	[+]poststarthook/start-cluster-authentication-info-controller ok
	[+]poststarthook/start-kube-apiserver-identity-lease-controller ok
	[+]poststarthook/start-kube-apiserver-identity-lease-garbage-collector ok
	[+]poststarthook/start-legacy-token-tracking-controller ok
	[-]poststarthook/start-service-ip-repair-controllers failed: reason withheld
	[+]poststarthook/rbac/bootstrap-roles ok
	[+]poststarthook/scheduling/bootstrap-system-priority-classes ok
	[+]poststarthook/priority-and-fairness-config-producer ok
	[+]poststarthook/bootstrap-controller ok
	[+]poststarthook/aggregator-reload-proxy-client-cert ok
	[+]poststarthook/start-kube-aggregator-informers ok
	[+]poststarthook/apiservice-status-local-available-controller ok
	[+]poststarthook/apiservice-status-remote-available-controller ok
	[+]poststarthook/apiservice-registration-controller ok
	[+]poststarthook/apiservice-discovery-controller ok
	[+]poststarthook/kube-apiserver-autoregistration ok
	[+]autoregister-completion ok
	[+]poststarthook/apiservice-openapi-controller ok
	[+]poststarthook/apiservice-openapiv3-controller ok
	healthz check failed
	I0916 10:58:09.694937 1434484 api_server.go:253] Checking apiserver healthz at https://192.168.49.2:8443/healthz ...
	I0916 10:58:09.702551 1434484 api_server.go:279] https://192.168.49.2:8443/healthz returned 500:
	[+]ping ok
	[+]log ok
	[+]etcd ok
	[+]poststarthook/start-apiserver-admission-initializer ok
	[+]poststarthook/generic-apiserver-start-informers ok
	[+]poststarthook/priority-and-fairness-config-consumer ok
	[+]poststarthook/priority-and-fairness-filter ok
	[+]poststarthook/storage-object-count-tracker-hook ok
	[+]poststarthook/start-apiextensions-informers ok
	[+]poststarthook/start-apiextensions-controllers ok
	[+]poststarthook/crd-informer-synced ok
	[+]poststarthook/start-system-namespaces-controller ok
	[+]poststarthook/start-cluster-authentication-info-controller ok
	[+]poststarthook/start-kube-apiserver-identity-lease-controller ok
	[+]poststarthook/start-kube-apiserver-identity-lease-garbage-collector ok
	[+]poststarthook/start-legacy-token-tracking-controller ok
	[-]poststarthook/start-service-ip-repair-controllers failed: reason withheld
	[+]poststarthook/rbac/bootstrap-roles ok
	[+]poststarthook/scheduling/bootstrap-system-priority-classes ok
	[+]poststarthook/priority-and-fairness-config-producer ok
	[+]poststarthook/bootstrap-controller ok
	[+]poststarthook/aggregator-reload-proxy-client-cert ok
	[+]poststarthook/start-kube-aggregator-informers ok
	[+]poststarthook/apiservice-status-local-available-controller ok
	[+]poststarthook/apiservice-status-remote-available-controller ok
	[+]poststarthook/apiservice-registration-controller ok
	[+]poststarthook/apiservice-discovery-controller ok
	[+]poststarthook/kube-apiserver-autoregistration ok
	[+]autoregister-completion ok
	[+]poststarthook/apiservice-openapi-controller ok
	[+]poststarthook/apiservice-openapiv3-controller ok
	healthz check failed
	W0916 10:58:09.702580 1434484 api_server.go:103] status: https://192.168.49.2:8443/healthz returned error 500:
	[+]ping ok
	[+]log ok
	[+]etcd ok
	[+]poststarthook/start-apiserver-admission-initializer ok
	[+]poststarthook/generic-apiserver-start-informers ok
	[+]poststarthook/priority-and-fairness-config-consumer ok
	[+]poststarthook/priority-and-fairness-filter ok
	[+]poststarthook/storage-object-count-tracker-hook ok
	[+]poststarthook/start-apiextensions-informers ok
	[+]poststarthook/start-apiextensions-controllers ok
	[+]poststarthook/crd-informer-synced ok
	[+]poststarthook/start-system-namespaces-controller ok
	[+]poststarthook/start-cluster-authentication-info-controller ok
	[+]poststarthook/start-kube-apiserver-identity-lease-controller ok
	[+]poststarthook/start-kube-apiserver-identity-lease-garbage-collector ok
	[+]poststarthook/start-legacy-token-tracking-controller ok
	[-]poststarthook/start-service-ip-repair-controllers failed: reason withheld
	[+]poststarthook/rbac/bootstrap-roles ok
	[+]poststarthook/scheduling/bootstrap-system-priority-classes ok
	[+]poststarthook/priority-and-fairness-config-producer ok
	[+]poststarthook/bootstrap-controller ok
	[+]poststarthook/aggregator-reload-proxy-client-cert ok
	[+]poststarthook/start-kube-aggregator-informers ok
	[+]poststarthook/apiservice-status-local-available-controller ok
	[+]poststarthook/apiservice-status-remote-available-controller ok
	[+]poststarthook/apiservice-registration-controller ok
	[+]poststarthook/apiservice-discovery-controller ok
	[+]poststarthook/kube-apiserver-autoregistration ok
	[+]autoregister-completion ok
	[+]poststarthook/apiservice-openapi-controller ok
	[+]poststarthook/apiservice-openapiv3-controller ok
	healthz check failed
	I0916 10:58:10.194085 1434484 api_server.go:253] Checking apiserver healthz at https://192.168.49.2:8443/healthz ...
	I0916 10:58:10.201957 1434484 api_server.go:279] https://192.168.49.2:8443/healthz returned 500:
	[+]ping ok
	[+]log ok
	[+]etcd ok
	[+]poststarthook/start-apiserver-admission-initializer ok
	[+]poststarthook/generic-apiserver-start-informers ok
	[+]poststarthook/priority-and-fairness-config-consumer ok
	[+]poststarthook/priority-and-fairness-filter ok
	[+]poststarthook/storage-object-count-tracker-hook ok
	[+]poststarthook/start-apiextensions-informers ok
	[+]poststarthook/start-apiextensions-controllers ok
	[+]poststarthook/crd-informer-synced ok
	[+]poststarthook/start-system-namespaces-controller ok
	[+]poststarthook/start-cluster-authentication-info-controller ok
	[+]poststarthook/start-kube-apiserver-identity-lease-controller ok
	[+]poststarthook/start-kube-apiserver-identity-lease-garbage-collector ok
	[+]poststarthook/start-legacy-token-tracking-controller ok
	[-]poststarthook/start-service-ip-repair-controllers failed: reason withheld
	[+]poststarthook/rbac/bootstrap-roles ok
	[+]poststarthook/scheduling/bootstrap-system-priority-classes ok
	[+]poststarthook/priority-and-fairness-config-producer ok
	[+]poststarthook/bootstrap-controller ok
	[+]poststarthook/aggregator-reload-proxy-client-cert ok
	[+]poststarthook/start-kube-aggregator-informers ok
	[+]poststarthook/apiservice-status-local-available-controller ok
	[+]poststarthook/apiservice-status-remote-available-controller ok
	[+]poststarthook/apiservice-registration-controller ok
	[+]poststarthook/apiservice-discovery-controller ok
	[+]poststarthook/kube-apiserver-autoregistration ok
	[+]autoregister-completion ok
	[+]poststarthook/apiservice-openapi-controller ok
	[+]poststarthook/apiservice-openapiv3-controller ok
	healthz check failed
	W0916 10:58:10.201987 1434484 api_server.go:103] status: https://192.168.49.2:8443/healthz returned error 500:
	[+]ping ok
	[+]log ok
	[+]etcd ok
	[+]poststarthook/start-apiserver-admission-initializer ok
	[+]poststarthook/generic-apiserver-start-informers ok
	[+]poststarthook/priority-and-fairness-config-consumer ok
	[+]poststarthook/priority-and-fairness-filter ok
	[+]poststarthook/storage-object-count-tracker-hook ok
	[+]poststarthook/start-apiextensions-informers ok
	[+]poststarthook/start-apiextensions-controllers ok
	[+]poststarthook/crd-informer-synced ok
	[+]poststarthook/start-system-namespaces-controller ok
	[+]poststarthook/start-cluster-authentication-info-controller ok
	[+]poststarthook/start-kube-apiserver-identity-lease-controller ok
	[+]poststarthook/start-kube-apiserver-identity-lease-garbage-collector ok
	[+]poststarthook/start-legacy-token-tracking-controller ok
	[-]poststarthook/start-service-ip-repair-controllers failed: reason withheld
	[+]poststarthook/rbac/bootstrap-roles ok
	[+]poststarthook/scheduling/bootstrap-system-priority-classes ok
	[+]poststarthook/priority-and-fairness-config-producer ok
	[+]poststarthook/bootstrap-controller ok
	[+]poststarthook/aggregator-reload-proxy-client-cert ok
	[+]poststarthook/start-kube-aggregator-informers ok
	[+]poststarthook/apiservice-status-local-available-controller ok
	[+]poststarthook/apiservice-status-remote-available-controller ok
	[+]poststarthook/apiservice-registration-controller ok
	[+]poststarthook/apiservice-discovery-controller ok
	[+]poststarthook/kube-apiserver-autoregistration ok
	[+]autoregister-completion ok
	[+]poststarthook/apiservice-openapi-controller ok
	[+]poststarthook/apiservice-openapiv3-controller ok
	healthz check failed
	I0916 10:58:10.694329 1434484 api_server.go:253] Checking apiserver healthz at https://192.168.49.2:8443/healthz ...
	I0916 10:58:10.701997 1434484 api_server.go:279] https://192.168.49.2:8443/healthz returned 500:
	[+]ping ok
	[+]log ok
	[+]etcd ok
	[+]poststarthook/start-apiserver-admission-initializer ok
	[+]poststarthook/generic-apiserver-start-informers ok
	[+]poststarthook/priority-and-fairness-config-consumer ok
	[+]poststarthook/priority-and-fairness-filter ok
	[+]poststarthook/storage-object-count-tracker-hook ok
	[+]poststarthook/start-apiextensions-informers ok
	[+]poststarthook/start-apiextensions-controllers ok
	[+]poststarthook/crd-informer-synced ok
	[+]poststarthook/start-system-namespaces-controller ok
	[+]poststarthook/start-cluster-authentication-info-controller ok
	[+]poststarthook/start-kube-apiserver-identity-lease-controller ok
	[+]poststarthook/start-kube-apiserver-identity-lease-garbage-collector ok
	[+]poststarthook/start-legacy-token-tracking-controller ok
	[-]poststarthook/start-service-ip-repair-controllers failed: reason withheld
	[+]poststarthook/rbac/bootstrap-roles ok
	[+]poststarthook/scheduling/bootstrap-system-priority-classes ok
	[+]poststarthook/priority-and-fairness-config-producer ok
	[+]poststarthook/bootstrap-controller ok
	[+]poststarthook/aggregator-reload-proxy-client-cert ok
	[+]poststarthook/start-kube-aggregator-informers ok
	[+]poststarthook/apiservice-status-local-available-controller ok
	[+]poststarthook/apiservice-status-remote-available-controller ok
	[+]poststarthook/apiservice-registration-controller ok
	[+]poststarthook/apiservice-discovery-controller ok
	[+]poststarthook/kube-apiserver-autoregistration ok
	[+]autoregister-completion ok
	[+]poststarthook/apiservice-openapi-controller ok
	[+]poststarthook/apiservice-openapiv3-controller ok
	healthz check failed
	W0916 10:58:10.702024 1434484 api_server.go:103] status: https://192.168.49.2:8443/healthz returned error 500:
	[+]ping ok
	[+]log ok
	[+]etcd ok
	[+]poststarthook/start-apiserver-admission-initializer ok
	[+]poststarthook/generic-apiserver-start-informers ok
	[+]poststarthook/priority-and-fairness-config-consumer ok
	[+]poststarthook/priority-and-fairness-filter ok
	[+]poststarthook/storage-object-count-tracker-hook ok
	[+]poststarthook/start-apiextensions-informers ok
	[+]poststarthook/start-apiextensions-controllers ok
	[+]poststarthook/crd-informer-synced ok
	[+]poststarthook/start-system-namespaces-controller ok
	[+]poststarthook/start-cluster-authentication-info-controller ok
	[+]poststarthook/start-kube-apiserver-identity-lease-controller ok
	[+]poststarthook/start-kube-apiserver-identity-lease-garbage-collector ok
	[+]poststarthook/start-legacy-token-tracking-controller ok
	[-]poststarthook/start-service-ip-repair-controllers failed: reason withheld
	[+]poststarthook/rbac/bootstrap-roles ok
	[+]poststarthook/scheduling/bootstrap-system-priority-classes ok
	[+]poststarthook/priority-and-fairness-config-producer ok
	[+]poststarthook/bootstrap-controller ok
	[+]poststarthook/aggregator-reload-proxy-client-cert ok
	[+]poststarthook/start-kube-aggregator-informers ok
	[+]poststarthook/apiservice-status-local-available-controller ok
	[+]poststarthook/apiservice-status-remote-available-controller ok
	[+]poststarthook/apiservice-registration-controller ok
	[+]poststarthook/apiservice-discovery-controller ok
	[+]poststarthook/kube-apiserver-autoregistration ok
	[+]autoregister-completion ok
	[+]poststarthook/apiservice-openapi-controller ok
	[+]poststarthook/apiservice-openapiv3-controller ok
	healthz check failed
	I0916 10:58:11.194767 1434484 api_server.go:253] Checking apiserver healthz at https://192.168.49.2:8443/healthz ...
	I0916 10:58:11.202615 1434484 api_server.go:279] https://192.168.49.2:8443/healthz returned 500:
	[+]ping ok
	[+]log ok
	[+]etcd ok
	[+]poststarthook/start-apiserver-admission-initializer ok
	[+]poststarthook/generic-apiserver-start-informers ok
	[+]poststarthook/priority-and-fairness-config-consumer ok
	[+]poststarthook/priority-and-fairness-filter ok
	[+]poststarthook/storage-object-count-tracker-hook ok
	[+]poststarthook/start-apiextensions-informers ok
	[+]poststarthook/start-apiextensions-controllers ok
	[+]poststarthook/crd-informer-synced ok
	[+]poststarthook/start-system-namespaces-controller ok
	[+]poststarthook/start-cluster-authentication-info-controller ok
	[+]poststarthook/start-kube-apiserver-identity-lease-controller ok
	[+]poststarthook/start-kube-apiserver-identity-lease-garbage-collector ok
	[+]poststarthook/start-legacy-token-tracking-controller ok
	[-]poststarthook/start-service-ip-repair-controllers failed: reason withheld
	[+]poststarthook/rbac/bootstrap-roles ok
	[+]poststarthook/scheduling/bootstrap-system-priority-classes ok
	[+]poststarthook/priority-and-fairness-config-producer ok
	[+]poststarthook/bootstrap-controller ok
	[+]poststarthook/aggregator-reload-proxy-client-cert ok
	[+]poststarthook/start-kube-aggregator-informers ok
	[+]poststarthook/apiservice-status-local-available-controller ok
	[+]poststarthook/apiservice-status-remote-available-controller ok
	[+]poststarthook/apiservice-registration-controller ok
	[+]poststarthook/apiservice-discovery-controller ok
	[+]poststarthook/kube-apiserver-autoregistration ok
	[+]autoregister-completion ok
	[+]poststarthook/apiservice-openapi-controller ok
	[+]poststarthook/apiservice-openapiv3-controller ok
	healthz check failed
	W0916 10:58:11.202652 1434484 api_server.go:103] status: https://192.168.49.2:8443/healthz returned error 500:
	[+]ping ok
	[+]log ok
	[+]etcd ok
	[+]poststarthook/start-apiserver-admission-initializer ok
	[+]poststarthook/generic-apiserver-start-informers ok
	[+]poststarthook/priority-and-fairness-config-consumer ok
	[+]poststarthook/priority-and-fairness-filter ok
	[+]poststarthook/storage-object-count-tracker-hook ok
	[+]poststarthook/start-apiextensions-informers ok
	[+]poststarthook/start-apiextensions-controllers ok
	[+]poststarthook/crd-informer-synced ok
	[+]poststarthook/start-system-namespaces-controller ok
	[+]poststarthook/start-cluster-authentication-info-controller ok
	[+]poststarthook/start-kube-apiserver-identity-lease-controller ok
	[+]poststarthook/start-kube-apiserver-identity-lease-garbage-collector ok
	[+]poststarthook/start-legacy-token-tracking-controller ok
	[-]poststarthook/start-service-ip-repair-controllers failed: reason withheld
	[+]poststarthook/rbac/bootstrap-roles ok
	[+]poststarthook/scheduling/bootstrap-system-priority-classes ok
	[+]poststarthook/priority-and-fairness-config-producer ok
	[+]poststarthook/bootstrap-controller ok
	[+]poststarthook/aggregator-reload-proxy-client-cert ok
	[+]poststarthook/start-kube-aggregator-informers ok
	[+]poststarthook/apiservice-status-local-available-controller ok
	[+]poststarthook/apiservice-status-remote-available-controller ok
	[+]poststarthook/apiservice-registration-controller ok
	[+]poststarthook/apiservice-discovery-controller ok
	[+]poststarthook/kube-apiserver-autoregistration ok
	[+]autoregister-completion ok
	[+]poststarthook/apiservice-openapi-controller ok
	[+]poststarthook/apiservice-openapiv3-controller ok
	healthz check failed
	I0916 10:58:11.694005 1434484 api_server.go:253] Checking apiserver healthz at https://192.168.49.2:8443/healthz ...
	I0916 10:58:11.701899 1434484 api_server.go:279] https://192.168.49.2:8443/healthz returned 500:
	[+]ping ok
	[+]log ok
	[+]etcd ok
	[+]poststarthook/start-apiserver-admission-initializer ok
	[+]poststarthook/generic-apiserver-start-informers ok
	[+]poststarthook/priority-and-fairness-config-consumer ok
	[+]poststarthook/priority-and-fairness-filter ok
	[+]poststarthook/storage-object-count-tracker-hook ok
	[+]poststarthook/start-apiextensions-informers ok
	[+]poststarthook/start-apiextensions-controllers ok
	[+]poststarthook/crd-informer-synced ok
	[+]poststarthook/start-system-namespaces-controller ok
	[+]poststarthook/start-cluster-authentication-info-controller ok
	[+]poststarthook/start-kube-apiserver-identity-lease-controller ok
	[+]poststarthook/start-kube-apiserver-identity-lease-garbage-collector ok
	[+]poststarthook/start-legacy-token-tracking-controller ok
	[-]poststarthook/start-service-ip-repair-controllers failed: reason withheld
	[+]poststarthook/rbac/bootstrap-roles ok
	[+]poststarthook/scheduling/bootstrap-system-priority-classes ok
	[+]poststarthook/priority-and-fairness-config-producer ok
	[+]poststarthook/bootstrap-controller ok
	[+]poststarthook/aggregator-reload-proxy-client-cert ok
	[+]poststarthook/start-kube-aggregator-informers ok
	[+]poststarthook/apiservice-status-local-available-controller ok
	[+]poststarthook/apiservice-status-remote-available-controller ok
	[+]poststarthook/apiservice-registration-controller ok
	[+]poststarthook/apiservice-discovery-controller ok
	[+]poststarthook/kube-apiserver-autoregistration ok
	[+]autoregister-completion ok
	[+]poststarthook/apiservice-openapi-controller ok
	[+]poststarthook/apiservice-openapiv3-controller ok
	healthz check failed
	W0916 10:58:11.701934 1434484 api_server.go:103] status: https://192.168.49.2:8443/healthz returned error 500:
	[+]ping ok
	[+]log ok
	[+]etcd ok
	[+]poststarthook/start-apiserver-admission-initializer ok
	[+]poststarthook/generic-apiserver-start-informers ok
	[+]poststarthook/priority-and-fairness-config-consumer ok
	[+]poststarthook/priority-and-fairness-filter ok
	[+]poststarthook/storage-object-count-tracker-hook ok
	[+]poststarthook/start-apiextensions-informers ok
	[+]poststarthook/start-apiextensions-controllers ok
	[+]poststarthook/crd-informer-synced ok
	[+]poststarthook/start-system-namespaces-controller ok
	[+]poststarthook/start-cluster-authentication-info-controller ok
	[+]poststarthook/start-kube-apiserver-identity-lease-controller ok
	[+]poststarthook/start-kube-apiserver-identity-lease-garbage-collector ok
	[+]poststarthook/start-legacy-token-tracking-controller ok
	[-]poststarthook/start-service-ip-repair-controllers failed: reason withheld
	[+]poststarthook/rbac/bootstrap-roles ok
	[+]poststarthook/scheduling/bootstrap-system-priority-classes ok
	[+]poststarthook/priority-and-fairness-config-producer ok
	[+]poststarthook/bootstrap-controller ok
	[+]poststarthook/aggregator-reload-proxy-client-cert ok
	[+]poststarthook/start-kube-aggregator-informers ok
	[+]poststarthook/apiservice-status-local-available-controller ok
	[+]poststarthook/apiservice-status-remote-available-controller ok
	[+]poststarthook/apiservice-registration-controller ok
	[+]poststarthook/apiservice-discovery-controller ok
	[+]poststarthook/kube-apiserver-autoregistration ok
	[+]autoregister-completion ok
	[+]poststarthook/apiservice-openapi-controller ok
	[+]poststarthook/apiservice-openapiv3-controller ok
	healthz check failed
	I0916 10:58:12.194280 1434484 api_server.go:253] Checking apiserver healthz at https://192.168.49.2:8443/healthz ...
	I0916 10:58:12.202214 1434484 api_server.go:279] https://192.168.49.2:8443/healthz returned 500:
	[+]ping ok
	[+]log ok
	[+]etcd ok
	[+]poststarthook/start-apiserver-admission-initializer ok
	[+]poststarthook/generic-apiserver-start-informers ok
	[+]poststarthook/priority-and-fairness-config-consumer ok
	[+]poststarthook/priority-and-fairness-filter ok
	[+]poststarthook/storage-object-count-tracker-hook ok
	[+]poststarthook/start-apiextensions-informers ok
	[+]poststarthook/start-apiextensions-controllers ok
	[+]poststarthook/crd-informer-synced ok
	[+]poststarthook/start-system-namespaces-controller ok
	[+]poststarthook/start-cluster-authentication-info-controller ok
	[+]poststarthook/start-kube-apiserver-identity-lease-controller ok
	[+]poststarthook/start-kube-apiserver-identity-lease-garbage-collector ok
	[+]poststarthook/start-legacy-token-tracking-controller ok
	[-]poststarthook/start-service-ip-repair-controllers failed: reason withheld
	[+]poststarthook/rbac/bootstrap-roles ok
	[+]poststarthook/scheduling/bootstrap-system-priority-classes ok
	[+]poststarthook/priority-and-fairness-config-producer ok
	[+]poststarthook/bootstrap-controller ok
	[+]poststarthook/aggregator-reload-proxy-client-cert ok
	[+]poststarthook/start-kube-aggregator-informers ok
	[+]poststarthook/apiservice-status-local-available-controller ok
	[+]poststarthook/apiservice-status-remote-available-controller ok
	[+]poststarthook/apiservice-registration-controller ok
	[+]poststarthook/apiservice-discovery-controller ok
	[+]poststarthook/kube-apiserver-autoregistration ok
	[+]autoregister-completion ok
	[+]poststarthook/apiservice-openapi-controller ok
	[+]poststarthook/apiservice-openapiv3-controller ok
	healthz check failed
	W0916 10:58:12.202243 1434484 api_server.go:103] status: https://192.168.49.2:8443/healthz returned error 500:
	[+]ping ok
	[+]log ok
	[+]etcd ok
	[+]poststarthook/start-apiserver-admission-initializer ok
	[+]poststarthook/generic-apiserver-start-informers ok
	[+]poststarthook/priority-and-fairness-config-consumer ok
	[+]poststarthook/priority-and-fairness-filter ok
	[+]poststarthook/storage-object-count-tracker-hook ok
	[+]poststarthook/start-apiextensions-informers ok
	[+]poststarthook/start-apiextensions-controllers ok
	[+]poststarthook/crd-informer-synced ok
	[+]poststarthook/start-system-namespaces-controller ok
	[+]poststarthook/start-cluster-authentication-info-controller ok
	[+]poststarthook/start-kube-apiserver-identity-lease-controller ok
	[+]poststarthook/start-kube-apiserver-identity-lease-garbage-collector ok
	[+]poststarthook/start-legacy-token-tracking-controller ok
	[-]poststarthook/start-service-ip-repair-controllers failed: reason withheld
	[+]poststarthook/rbac/bootstrap-roles ok
	[+]poststarthook/scheduling/bootstrap-system-priority-classes ok
	[+]poststarthook/priority-and-fairness-config-producer ok
	[+]poststarthook/bootstrap-controller ok
	[+]poststarthook/aggregator-reload-proxy-client-cert ok
	[+]poststarthook/start-kube-aggregator-informers ok
	[+]poststarthook/apiservice-status-local-available-controller ok
	[+]poststarthook/apiservice-status-remote-available-controller ok
	[+]poststarthook/apiservice-registration-controller ok
	[+]poststarthook/apiservice-discovery-controller ok
	[+]poststarthook/kube-apiserver-autoregistration ok
	[+]autoregister-completion ok
	[+]poststarthook/apiservice-openapi-controller ok
	[+]poststarthook/apiservice-openapiv3-controller ok
	healthz check failed
	I0916 10:58:12.694372 1434484 api_server.go:253] Checking apiserver healthz at https://192.168.49.2:8443/healthz ...
	I0916 10:58:12.702380 1434484 api_server.go:279] https://192.168.49.2:8443/healthz returned 500:
	[+]ping ok
	[+]log ok
	[+]etcd ok
	[+]poststarthook/start-apiserver-admission-initializer ok
	[+]poststarthook/generic-apiserver-start-informers ok
	[+]poststarthook/priority-and-fairness-config-consumer ok
	[+]poststarthook/priority-and-fairness-filter ok
	[+]poststarthook/storage-object-count-tracker-hook ok
	[+]poststarthook/start-apiextensions-informers ok
	[+]poststarthook/start-apiextensions-controllers ok
	[+]poststarthook/crd-informer-synced ok
	[+]poststarthook/start-system-namespaces-controller ok
	[+]poststarthook/start-cluster-authentication-info-controller ok
	[+]poststarthook/start-kube-apiserver-identity-lease-controller ok
	[+]poststarthook/start-kube-apiserver-identity-lease-garbage-collector ok
	[+]poststarthook/start-legacy-token-tracking-controller ok
	[-]poststarthook/start-service-ip-repair-controllers failed: reason withheld
	[+]poststarthook/rbac/bootstrap-roles ok
	[+]poststarthook/scheduling/bootstrap-system-priority-classes ok
	[+]poststarthook/priority-and-fairness-config-producer ok
	[+]poststarthook/bootstrap-controller ok
	[+]poststarthook/aggregator-reload-proxy-client-cert ok
	[+]poststarthook/start-kube-aggregator-informers ok
	[+]poststarthook/apiservice-status-local-available-controller ok
	[+]poststarthook/apiservice-status-remote-available-controller ok
	[+]poststarthook/apiservice-registration-controller ok
	[+]poststarthook/apiservice-discovery-controller ok
	[+]poststarthook/kube-apiserver-autoregistration ok
	[+]autoregister-completion ok
	[+]poststarthook/apiservice-openapi-controller ok
	[+]poststarthook/apiservice-openapiv3-controller ok
	healthz check failed
	W0916 10:58:12.702423 1434484 api_server.go:103] status: https://192.168.49.2:8443/healthz returned error 500:
	[+]ping ok
	[+]log ok
	[+]etcd ok
	[+]poststarthook/start-apiserver-admission-initializer ok
	[+]poststarthook/generic-apiserver-start-informers ok
	[+]poststarthook/priority-and-fairness-config-consumer ok
	[+]poststarthook/priority-and-fairness-filter ok
	[+]poststarthook/storage-object-count-tracker-hook ok
	[+]poststarthook/start-apiextensions-informers ok
	[+]poststarthook/start-apiextensions-controllers ok
	[+]poststarthook/crd-informer-synced ok
	[+]poststarthook/start-system-namespaces-controller ok
	[+]poststarthook/start-cluster-authentication-info-controller ok
	[+]poststarthook/start-kube-apiserver-identity-lease-controller ok
	[+]poststarthook/start-kube-apiserver-identity-lease-garbage-collector ok
	[+]poststarthook/start-legacy-token-tracking-controller ok
	[-]poststarthook/start-service-ip-repair-controllers failed: reason withheld
	[+]poststarthook/rbac/bootstrap-roles ok
	[+]poststarthook/scheduling/bootstrap-system-priority-classes ok
	[+]poststarthook/priority-and-fairness-config-producer ok
	[+]poststarthook/bootstrap-controller ok
	[+]poststarthook/aggregator-reload-proxy-client-cert ok
	[+]poststarthook/start-kube-aggregator-informers ok
	[+]poststarthook/apiservice-status-local-available-controller ok
	[+]poststarthook/apiservice-status-remote-available-controller ok
	[+]poststarthook/apiservice-registration-controller ok
	[+]poststarthook/apiservice-discovery-controller ok
	[+]poststarthook/kube-apiserver-autoregistration ok
	[+]autoregister-completion ok
	[+]poststarthook/apiservice-openapi-controller ok
	[+]poststarthook/apiservice-openapiv3-controller ok
	healthz check failed
	I0916 10:58:13.194098 1434484 api_server.go:253] Checking apiserver healthz at https://192.168.49.2:8443/healthz ...
	I0916 10:58:13.202022 1434484 api_server.go:279] https://192.168.49.2:8443/healthz returned 500:
	[+]ping ok
	[+]log ok
	[+]etcd ok
	[+]poststarthook/start-apiserver-admission-initializer ok
	[+]poststarthook/generic-apiserver-start-informers ok
	[+]poststarthook/priority-and-fairness-config-consumer ok
	[+]poststarthook/priority-and-fairness-filter ok
	[+]poststarthook/storage-object-count-tracker-hook ok
	[+]poststarthook/start-apiextensions-informers ok
	[+]poststarthook/start-apiextensions-controllers ok
	[+]poststarthook/crd-informer-synced ok
	[+]poststarthook/start-system-namespaces-controller ok
	[+]poststarthook/start-cluster-authentication-info-controller ok
	[+]poststarthook/start-kube-apiserver-identity-lease-controller ok
	[+]poststarthook/start-kube-apiserver-identity-lease-garbage-collector ok
	[+]poststarthook/start-legacy-token-tracking-controller ok
	[-]poststarthook/start-service-ip-repair-controllers failed: reason withheld
	[+]poststarthook/rbac/bootstrap-roles ok
	[+]poststarthook/scheduling/bootstrap-system-priority-classes ok
	[+]poststarthook/priority-and-fairness-config-producer ok
	[+]poststarthook/bootstrap-controller ok
	[+]poststarthook/aggregator-reload-proxy-client-cert ok
	[+]poststarthook/start-kube-aggregator-informers ok
	[+]poststarthook/apiservice-status-local-available-controller ok
	[+]poststarthook/apiservice-status-remote-available-controller ok
	[+]poststarthook/apiservice-registration-controller ok
	[+]poststarthook/apiservice-discovery-controller ok
	[+]poststarthook/kube-apiserver-autoregistration ok
	[+]autoregister-completion ok
	[+]poststarthook/apiservice-openapi-controller ok
	[+]poststarthook/apiservice-openapiv3-controller ok
	healthz check failed
	W0916 10:58:13.202057 1434484 api_server.go:103] status: https://192.168.49.2:8443/healthz returned error 500:
	[+]ping ok
	[+]log ok
	[+]etcd ok
	[+]poststarthook/start-apiserver-admission-initializer ok
	[+]poststarthook/generic-apiserver-start-informers ok
	[+]poststarthook/priority-and-fairness-config-consumer ok
	[+]poststarthook/priority-and-fairness-filter ok
	[+]poststarthook/storage-object-count-tracker-hook ok
	[+]poststarthook/start-apiextensions-informers ok
	[+]poststarthook/start-apiextensions-controllers ok
	[+]poststarthook/crd-informer-synced ok
	[+]poststarthook/start-system-namespaces-controller ok
	[+]poststarthook/start-cluster-authentication-info-controller ok
	[+]poststarthook/start-kube-apiserver-identity-lease-controller ok
	[+]poststarthook/start-kube-apiserver-identity-lease-garbage-collector ok
	[+]poststarthook/start-legacy-token-tracking-controller ok
	[-]poststarthook/start-service-ip-repair-controllers failed: reason withheld
	[+]poststarthook/rbac/bootstrap-roles ok
	[+]poststarthook/scheduling/bootstrap-system-priority-classes ok
	[+]poststarthook/priority-and-fairness-config-producer ok
	[+]poststarthook/bootstrap-controller ok
	[+]poststarthook/aggregator-reload-proxy-client-cert ok
	[+]poststarthook/start-kube-aggregator-informers ok
	[+]poststarthook/apiservice-status-local-available-controller ok
	[+]poststarthook/apiservice-status-remote-available-controller ok
	[+]poststarthook/apiservice-registration-controller ok
	[+]poststarthook/apiservice-discovery-controller ok
	[+]poststarthook/kube-apiserver-autoregistration ok
	[+]autoregister-completion ok
	[+]poststarthook/apiservice-openapi-controller ok
	[+]poststarthook/apiservice-openapiv3-controller ok
	healthz check failed
	I0916 10:58:13.694462 1434484 api_server.go:253] Checking apiserver healthz at https://192.168.49.2:8443/healthz ...
	I0916 10:58:13.704010 1434484 api_server.go:279] https://192.168.49.2:8443/healthz returned 500:
	[+]ping ok
	[+]log ok
	[+]etcd ok
	[+]poststarthook/start-apiserver-admission-initializer ok
	[+]poststarthook/generic-apiserver-start-informers ok
	[+]poststarthook/priority-and-fairness-config-consumer ok
	[+]poststarthook/priority-and-fairness-filter ok
	[+]poststarthook/storage-object-count-tracker-hook ok
	[+]poststarthook/start-apiextensions-informers ok
	[+]poststarthook/start-apiextensions-controllers ok
	[+]poststarthook/crd-informer-synced ok
	[+]poststarthook/start-system-namespaces-controller ok
	[+]poststarthook/start-cluster-authentication-info-controller ok
	[+]poststarthook/start-kube-apiserver-identity-lease-controller ok
	[+]poststarthook/start-kube-apiserver-identity-lease-garbage-collector ok
	[+]poststarthook/start-legacy-token-tracking-controller ok
	[-]poststarthook/start-service-ip-repair-controllers failed: reason withheld
	[+]poststarthook/rbac/bootstrap-roles ok
	[+]poststarthook/scheduling/bootstrap-system-priority-classes ok
	[+]poststarthook/priority-and-fairness-config-producer ok
	[+]poststarthook/bootstrap-controller ok
	[+]poststarthook/aggregator-reload-proxy-client-cert ok
	[+]poststarthook/start-kube-aggregator-informers ok
	[+]poststarthook/apiservice-status-local-available-controller ok
	[+]poststarthook/apiservice-status-remote-available-controller ok
	[+]poststarthook/apiservice-registration-controller ok
	[+]poststarthook/apiservice-discovery-controller ok
	[+]poststarthook/kube-apiserver-autoregistration ok
	[+]autoregister-completion ok
	[+]poststarthook/apiservice-openapi-controller ok
	[+]poststarthook/apiservice-openapiv3-controller ok
	healthz check failed
	W0916 10:58:13.704039 1434484 api_server.go:103] status: https://192.168.49.2:8443/healthz returned error 500:
	[+]ping ok
	[+]log ok
	[+]etcd ok
	[+]poststarthook/start-apiserver-admission-initializer ok
	[+]poststarthook/generic-apiserver-start-informers ok
	[+]poststarthook/priority-and-fairness-config-consumer ok
	[+]poststarthook/priority-and-fairness-filter ok
	[+]poststarthook/storage-object-count-tracker-hook ok
	[+]poststarthook/start-apiextensions-informers ok
	[+]poststarthook/start-apiextensions-controllers ok
	[+]poststarthook/crd-informer-synced ok
	[+]poststarthook/start-system-namespaces-controller ok
	[+]poststarthook/start-cluster-authentication-info-controller ok
	[+]poststarthook/start-kube-apiserver-identity-lease-controller ok
	[+]poststarthook/start-kube-apiserver-identity-lease-garbage-collector ok
	[+]poststarthook/start-legacy-token-tracking-controller ok
	[-]poststarthook/start-service-ip-repair-controllers failed: reason withheld
	[+]poststarthook/rbac/bootstrap-roles ok
	[+]poststarthook/scheduling/bootstrap-system-priority-classes ok
	[+]poststarthook/priority-and-fairness-config-producer ok
	[+]poststarthook/bootstrap-controller ok
	[+]poststarthook/aggregator-reload-proxy-client-cert ok
	[+]poststarthook/start-kube-aggregator-informers ok
	[+]poststarthook/apiservice-status-local-available-controller ok
	[+]poststarthook/apiservice-status-remote-available-controller ok
	[+]poststarthook/apiservice-registration-controller ok
	[+]poststarthook/apiservice-discovery-controller ok
	[+]poststarthook/kube-apiserver-autoregistration ok
	[+]autoregister-completion ok
	[+]poststarthook/apiservice-openapi-controller ok
	[+]poststarthook/apiservice-openapiv3-controller ok
	healthz check failed
	I0916 10:58:14.194364 1434484 api_server.go:253] Checking apiserver healthz at https://192.168.49.2:8443/healthz ...
	I0916 10:58:14.203036 1434484 api_server.go:279] https://192.168.49.2:8443/healthz returned 500:
	[+]ping ok
	[+]log ok
	[+]etcd ok
	[+]poststarthook/start-apiserver-admission-initializer ok
	[+]poststarthook/generic-apiserver-start-informers ok
	[+]poststarthook/priority-and-fairness-config-consumer ok
	[+]poststarthook/priority-and-fairness-filter ok
	[+]poststarthook/storage-object-count-tracker-hook ok
	[+]poststarthook/start-apiextensions-informers ok
	[+]poststarthook/start-apiextensions-controllers ok
	[+]poststarthook/crd-informer-synced ok
	[+]poststarthook/start-system-namespaces-controller ok
	[+]poststarthook/start-cluster-authentication-info-controller ok
	[+]poststarthook/start-kube-apiserver-identity-lease-controller ok
	[+]poststarthook/start-kube-apiserver-identity-lease-garbage-collector ok
	[+]poststarthook/start-legacy-token-tracking-controller ok
	[-]poststarthook/start-service-ip-repair-controllers failed: reason withheld
	[+]poststarthook/rbac/bootstrap-roles ok
	[+]poststarthook/scheduling/bootstrap-system-priority-classes ok
	[+]poststarthook/priority-and-fairness-config-producer ok
	[+]poststarthook/bootstrap-controller ok
	[+]poststarthook/aggregator-reload-proxy-client-cert ok
	[+]poststarthook/start-kube-aggregator-informers ok
	[+]poststarthook/apiservice-status-local-available-controller ok
	[+]poststarthook/apiservice-status-remote-available-controller ok
	[+]poststarthook/apiservice-registration-controller ok
	[+]poststarthook/apiservice-discovery-controller ok
	[+]poststarthook/kube-apiserver-autoregistration ok
	[+]autoregister-completion ok
	[+]poststarthook/apiservice-openapi-controller ok
	[+]poststarthook/apiservice-openapiv3-controller ok
	healthz check failed
	W0916 10:58:14.203069 1434484 api_server.go:103] status: https://192.168.49.2:8443/healthz returned error 500:
	[+]ping ok
	[+]log ok
	[+]etcd ok
	[+]poststarthook/start-apiserver-admission-initializer ok
	[+]poststarthook/generic-apiserver-start-informers ok
	[+]poststarthook/priority-and-fairness-config-consumer ok
	[+]poststarthook/priority-and-fairness-filter ok
	[+]poststarthook/storage-object-count-tracker-hook ok
	[+]poststarthook/start-apiextensions-informers ok
	[+]poststarthook/start-apiextensions-controllers ok
	[+]poststarthook/crd-informer-synced ok
	[+]poststarthook/start-system-namespaces-controller ok
	[+]poststarthook/start-cluster-authentication-info-controller ok
	[+]poststarthook/start-kube-apiserver-identity-lease-controller ok
	[+]poststarthook/start-kube-apiserver-identity-lease-garbage-collector ok
	[+]poststarthook/start-legacy-token-tracking-controller ok
	[-]poststarthook/start-service-ip-repair-controllers failed: reason withheld
	[+]poststarthook/rbac/bootstrap-roles ok
	[+]poststarthook/scheduling/bootstrap-system-priority-classes ok
	[+]poststarthook/priority-and-fairness-config-producer ok
	[+]poststarthook/bootstrap-controller ok
	[+]poststarthook/aggregator-reload-proxy-client-cert ok
	[+]poststarthook/start-kube-aggregator-informers ok
	[+]poststarthook/apiservice-status-local-available-controller ok
	[+]poststarthook/apiservice-status-remote-available-controller ok
	[+]poststarthook/apiservice-registration-controller ok
	[+]poststarthook/apiservice-discovery-controller ok
	[+]poststarthook/kube-apiserver-autoregistration ok
	[+]autoregister-completion ok
	[+]poststarthook/apiservice-openapi-controller ok
	[+]poststarthook/apiservice-openapiv3-controller ok
	healthz check failed
	I0916 10:58:14.694579 1434484 api_server.go:253] Checking apiserver healthz at https://192.168.49.2:8443/healthz ...
	I0916 10:58:14.702250 1434484 api_server.go:279] https://192.168.49.2:8443/healthz returned 500:
	[+]ping ok
	[+]log ok
	[+]etcd ok
	[+]poststarthook/start-apiserver-admission-initializer ok
	[+]poststarthook/generic-apiserver-start-informers ok
	[+]poststarthook/priority-and-fairness-config-consumer ok
	[+]poststarthook/priority-and-fairness-filter ok
	[+]poststarthook/storage-object-count-tracker-hook ok
	[+]poststarthook/start-apiextensions-informers ok
	[+]poststarthook/start-apiextensions-controllers ok
	[+]poststarthook/crd-informer-synced ok
	[+]poststarthook/start-system-namespaces-controller ok
	[+]poststarthook/start-cluster-authentication-info-controller ok
	[+]poststarthook/start-kube-apiserver-identity-lease-controller ok
	[+]poststarthook/start-kube-apiserver-identity-lease-garbage-collector ok
	[+]poststarthook/start-legacy-token-tracking-controller ok
	[-]poststarthook/start-service-ip-repair-controllers failed: reason withheld
	[+]poststarthook/rbac/bootstrap-roles ok
	[+]poststarthook/scheduling/bootstrap-system-priority-classes ok
	[+]poststarthook/priority-and-fairness-config-producer ok
	[+]poststarthook/bootstrap-controller ok
	[+]poststarthook/aggregator-reload-proxy-client-cert ok
	[+]poststarthook/start-kube-aggregator-informers ok
	[+]poststarthook/apiservice-status-local-available-controller ok
	[+]poststarthook/apiservice-status-remote-available-controller ok
	[+]poststarthook/apiservice-registration-controller ok
	[+]poststarthook/apiservice-discovery-controller ok
	[+]poststarthook/kube-apiserver-autoregistration ok
	[+]autoregister-completion ok
	[+]poststarthook/apiservice-openapi-controller ok
	[+]poststarthook/apiservice-openapiv3-controller ok
	healthz check failed
	W0916 10:58:14.702279 1434484 api_server.go:103] status: https://192.168.49.2:8443/healthz returned error 500:
	[+]ping ok
	[+]log ok
	[+]etcd ok
	[+]poststarthook/start-apiserver-admission-initializer ok
	[+]poststarthook/generic-apiserver-start-informers ok
	[+]poststarthook/priority-and-fairness-config-consumer ok
	[+]poststarthook/priority-and-fairness-filter ok
	[+]poststarthook/storage-object-count-tracker-hook ok
	[+]poststarthook/start-apiextensions-informers ok
	[+]poststarthook/start-apiextensions-controllers ok
	[+]poststarthook/crd-informer-synced ok
	[+]poststarthook/start-system-namespaces-controller ok
	[+]poststarthook/start-cluster-authentication-info-controller ok
	[+]poststarthook/start-kube-apiserver-identity-lease-controller ok
	[+]poststarthook/start-kube-apiserver-identity-lease-garbage-collector ok
	[+]poststarthook/start-legacy-token-tracking-controller ok
	[-]poststarthook/start-service-ip-repair-controllers failed: reason withheld
	[+]poststarthook/rbac/bootstrap-roles ok
	[+]poststarthook/scheduling/bootstrap-system-priority-classes ok
	[+]poststarthook/priority-and-fairness-config-producer ok
	[+]poststarthook/bootstrap-controller ok
	[+]poststarthook/aggregator-reload-proxy-client-cert ok
	[+]poststarthook/start-kube-aggregator-informers ok
	[+]poststarthook/apiservice-status-local-available-controller ok
	[+]poststarthook/apiservice-status-remote-available-controller ok
	[+]poststarthook/apiservice-registration-controller ok
	[+]poststarthook/apiservice-discovery-controller ok
	[+]poststarthook/kube-apiserver-autoregistration ok
	[+]autoregister-completion ok
	[+]poststarthook/apiservice-openapi-controller ok
	[+]poststarthook/apiservice-openapiv3-controller ok
	healthz check failed
	I0916 10:58:15.194851 1434484 api_server.go:253] Checking apiserver healthz at https://192.168.49.2:8443/healthz ...
	I0916 10:58:15.202839 1434484 api_server.go:279] https://192.168.49.2:8443/healthz returned 500:
	[+]ping ok
	[+]log ok
	[+]etcd ok
	[+]poststarthook/start-apiserver-admission-initializer ok
	[+]poststarthook/generic-apiserver-start-informers ok
	[+]poststarthook/priority-and-fairness-config-consumer ok
	[+]poststarthook/priority-and-fairness-filter ok
	[+]poststarthook/storage-object-count-tracker-hook ok
	[+]poststarthook/start-apiextensions-informers ok
	[+]poststarthook/start-apiextensions-controllers ok
	[+]poststarthook/crd-informer-synced ok
	[+]poststarthook/start-system-namespaces-controller ok
	[+]poststarthook/start-cluster-authentication-info-controller ok
	[+]poststarthook/start-kube-apiserver-identity-lease-controller ok
	[+]poststarthook/start-kube-apiserver-identity-lease-garbage-collector ok
	[+]poststarthook/start-legacy-token-tracking-controller ok
	[-]poststarthook/start-service-ip-repair-controllers failed: reason withheld
	[+]poststarthook/rbac/bootstrap-roles ok
	[+]poststarthook/scheduling/bootstrap-system-priority-classes ok
	[+]poststarthook/priority-and-fairness-config-producer ok
	[+]poststarthook/bootstrap-controller ok
	[+]poststarthook/aggregator-reload-proxy-client-cert ok
	[+]poststarthook/start-kube-aggregator-informers ok
	[+]poststarthook/apiservice-status-local-available-controller ok
	[+]poststarthook/apiservice-status-remote-available-controller ok
	[+]poststarthook/apiservice-registration-controller ok
	[+]poststarthook/apiservice-discovery-controller ok
	[+]poststarthook/kube-apiserver-autoregistration ok
	[+]autoregister-completion ok
	[+]poststarthook/apiservice-openapi-controller ok
	[+]poststarthook/apiservice-openapiv3-controller ok
	healthz check failed
	W0916 10:58:15.202874 1434484 api_server.go:103] status: https://192.168.49.2:8443/healthz returned error 500:
	[+]ping ok
	[+]log ok
	[+]etcd ok
	[+]poststarthook/start-apiserver-admission-initializer ok
	[+]poststarthook/generic-apiserver-start-informers ok
	[+]poststarthook/priority-and-fairness-config-consumer ok
	[+]poststarthook/priority-and-fairness-filter ok
	[+]poststarthook/storage-object-count-tracker-hook ok
	[+]poststarthook/start-apiextensions-informers ok
	[+]poststarthook/start-apiextensions-controllers ok
	[+]poststarthook/crd-informer-synced ok
	[+]poststarthook/start-system-namespaces-controller ok
	[+]poststarthook/start-cluster-authentication-info-controller ok
	[+]poststarthook/start-kube-apiserver-identity-lease-controller ok
	[+]poststarthook/start-kube-apiserver-identity-lease-garbage-collector ok
	[+]poststarthook/start-legacy-token-tracking-controller ok
	[-]poststarthook/start-service-ip-repair-controllers failed: reason withheld
	[+]poststarthook/rbac/bootstrap-roles ok
	[+]poststarthook/scheduling/bootstrap-system-priority-classes ok
	[+]poststarthook/priority-and-fairness-config-producer ok
	[+]poststarthook/bootstrap-controller ok
	[+]poststarthook/aggregator-reload-proxy-client-cert ok
	[+]poststarthook/start-kube-aggregator-informers ok
	[+]poststarthook/apiservice-status-local-available-controller ok
	[+]poststarthook/apiservice-status-remote-available-controller ok
	[+]poststarthook/apiservice-registration-controller ok
	[+]poststarthook/apiservice-discovery-controller ok
	[+]poststarthook/kube-apiserver-autoregistration ok
	[+]autoregister-completion ok
	[+]poststarthook/apiservice-openapi-controller ok
	[+]poststarthook/apiservice-openapiv3-controller ok
	healthz check failed
	I0916 10:58:15.694127 1434484 api_server.go:253] Checking apiserver healthz at https://192.168.49.2:8443/healthz ...
	I0916 10:58:15.702113 1434484 api_server.go:279] https://192.168.49.2:8443/healthz returned 500:
	[+]ping ok
	[+]log ok
	[+]etcd ok
	[+]poststarthook/start-apiserver-admission-initializer ok
	[+]poststarthook/generic-apiserver-start-informers ok
	[+]poststarthook/priority-and-fairness-config-consumer ok
	[+]poststarthook/priority-and-fairness-filter ok
	[+]poststarthook/storage-object-count-tracker-hook ok
	[+]poststarthook/start-apiextensions-informers ok
	[+]poststarthook/start-apiextensions-controllers ok
	[+]poststarthook/crd-informer-synced ok
	[+]poststarthook/start-system-namespaces-controller ok
	[+]poststarthook/start-cluster-authentication-info-controller ok
	[+]poststarthook/start-kube-apiserver-identity-lease-controller ok
	[+]poststarthook/start-kube-apiserver-identity-lease-garbage-collector ok
	[+]poststarthook/start-legacy-token-tracking-controller ok
	[-]poststarthook/start-service-ip-repair-controllers failed: reason withheld
	[+]poststarthook/rbac/bootstrap-roles ok
	[+]poststarthook/scheduling/bootstrap-system-priority-classes ok
	[+]poststarthook/priority-and-fairness-config-producer ok
	[+]poststarthook/bootstrap-controller ok
	[+]poststarthook/aggregator-reload-proxy-client-cert ok
	[+]poststarthook/start-kube-aggregator-informers ok
	[+]poststarthook/apiservice-status-local-available-controller ok
	[+]poststarthook/apiservice-status-remote-available-controller ok
	[+]poststarthook/apiservice-registration-controller ok
	[+]poststarthook/apiservice-discovery-controller ok
	[+]poststarthook/kube-apiserver-autoregistration ok
	[+]autoregister-completion ok
	[+]poststarthook/apiservice-openapi-controller ok
	[+]poststarthook/apiservice-openapiv3-controller ok
	healthz check failed
	W0916 10:58:15.702141 1434484 api_server.go:103] status: https://192.168.49.2:8443/healthz returned error 500:
	[+]ping ok
	[+]log ok
	[+]etcd ok
	[+]poststarthook/start-apiserver-admission-initializer ok
	[+]poststarthook/generic-apiserver-start-informers ok
	[+]poststarthook/priority-and-fairness-config-consumer ok
	[+]poststarthook/priority-and-fairness-filter ok
	[+]poststarthook/storage-object-count-tracker-hook ok
	[+]poststarthook/start-apiextensions-informers ok
	[+]poststarthook/start-apiextensions-controllers ok
	[+]poststarthook/crd-informer-synced ok
	[+]poststarthook/start-system-namespaces-controller ok
	[+]poststarthook/start-cluster-authentication-info-controller ok
	[+]poststarthook/start-kube-apiserver-identity-lease-controller ok
	[+]poststarthook/start-kube-apiserver-identity-lease-garbage-collector ok
	[+]poststarthook/start-legacy-token-tracking-controller ok
	[-]poststarthook/start-service-ip-repair-controllers failed: reason withheld
	[+]poststarthook/rbac/bootstrap-roles ok
	[+]poststarthook/scheduling/bootstrap-system-priority-classes ok
	[+]poststarthook/priority-and-fairness-config-producer ok
	[+]poststarthook/bootstrap-controller ok
	[+]poststarthook/aggregator-reload-proxy-client-cert ok
	[+]poststarthook/start-kube-aggregator-informers ok
	[+]poststarthook/apiservice-status-local-available-controller ok
	[+]poststarthook/apiservice-status-remote-available-controller ok
	[+]poststarthook/apiservice-registration-controller ok
	[+]poststarthook/apiservice-discovery-controller ok
	[+]poststarthook/kube-apiserver-autoregistration ok
	[+]autoregister-completion ok
	[+]poststarthook/apiservice-openapi-controller ok
	[+]poststarthook/apiservice-openapiv3-controller ok
	healthz check failed
	I0916 10:58:16.194344 1434484 api_server.go:253] Checking apiserver healthz at https://192.168.49.2:8443/healthz ...
	I0916 10:58:16.202320 1434484 api_server.go:279] https://192.168.49.2:8443/healthz returned 500:
	[+]ping ok
	[+]log ok
	[+]etcd ok
	[+]poststarthook/start-apiserver-admission-initializer ok
	[+]poststarthook/generic-apiserver-start-informers ok
	[+]poststarthook/priority-and-fairness-config-consumer ok
	[+]poststarthook/priority-and-fairness-filter ok
	[+]poststarthook/storage-object-count-tracker-hook ok
	[+]poststarthook/start-apiextensions-informers ok
	[+]poststarthook/start-apiextensions-controllers ok
	[+]poststarthook/crd-informer-synced ok
	[+]poststarthook/start-system-namespaces-controller ok
	[+]poststarthook/start-cluster-authentication-info-controller ok
	[+]poststarthook/start-kube-apiserver-identity-lease-controller ok
	[+]poststarthook/start-kube-apiserver-identity-lease-garbage-collector ok
	[+]poststarthook/start-legacy-token-tracking-controller ok
	[-]poststarthook/start-service-ip-repair-controllers failed: reason withheld
	[+]poststarthook/rbac/bootstrap-roles ok
	[+]poststarthook/scheduling/bootstrap-system-priority-classes ok
	[+]poststarthook/priority-and-fairness-config-producer ok
	[+]poststarthook/bootstrap-controller ok
	[+]poststarthook/aggregator-reload-proxy-client-cert ok
	[+]poststarthook/start-kube-aggregator-informers ok
	[+]poststarthook/apiservice-status-local-available-controller ok
	[+]poststarthook/apiservice-status-remote-available-controller ok
	[+]poststarthook/apiservice-registration-controller ok
	[+]poststarthook/apiservice-discovery-controller ok
	[+]poststarthook/kube-apiserver-autoregistration ok
	[+]autoregister-completion ok
	[+]poststarthook/apiservice-openapi-controller ok
	[+]poststarthook/apiservice-openapiv3-controller ok
	healthz check failed
	W0916 10:58:16.202352 1434484 api_server.go:103] status: https://192.168.49.2:8443/healthz returned error 500:
	[+]ping ok
	[+]log ok
	[+]etcd ok
	[+]poststarthook/start-apiserver-admission-initializer ok
	[+]poststarthook/generic-apiserver-start-informers ok
	[+]poststarthook/priority-and-fairness-config-consumer ok
	[+]poststarthook/priority-and-fairness-filter ok
	[+]poststarthook/storage-object-count-tracker-hook ok
	[+]poststarthook/start-apiextensions-informers ok
	[+]poststarthook/start-apiextensions-controllers ok
	[+]poststarthook/crd-informer-synced ok
	[+]poststarthook/start-system-namespaces-controller ok
	[+]poststarthook/start-cluster-authentication-info-controller ok
	[+]poststarthook/start-kube-apiserver-identity-lease-controller ok
	[+]poststarthook/start-kube-apiserver-identity-lease-garbage-collector ok
	[+]poststarthook/start-legacy-token-tracking-controller ok
	[-]poststarthook/start-service-ip-repair-controllers failed: reason withheld
	[+]poststarthook/rbac/bootstrap-roles ok
	[+]poststarthook/scheduling/bootstrap-system-priority-classes ok
	[+]poststarthook/priority-and-fairness-config-producer ok
	[+]poststarthook/bootstrap-controller ok
	[+]poststarthook/aggregator-reload-proxy-client-cert ok
	[+]poststarthook/start-kube-aggregator-informers ok
	[+]poststarthook/apiservice-status-local-available-controller ok
	[+]poststarthook/apiservice-status-remote-available-controller ok
	[+]poststarthook/apiservice-registration-controller ok
	[+]poststarthook/apiservice-discovery-controller ok
	[+]poststarthook/kube-apiserver-autoregistration ok
	[+]autoregister-completion ok
	[+]poststarthook/apiservice-openapi-controller ok
	[+]poststarthook/apiservice-openapiv3-controller ok
	healthz check failed
	I0916 10:58:16.694539 1434484 api_server.go:253] Checking apiserver healthz at https://192.168.49.2:8443/healthz ...
	I0916 10:58:16.702195 1434484 api_server.go:279] https://192.168.49.2:8443/healthz returned 500:
	[+]ping ok
	[+]log ok
	[+]etcd ok
	[+]poststarthook/start-apiserver-admission-initializer ok
	[+]poststarthook/generic-apiserver-start-informers ok
	[+]poststarthook/priority-and-fairness-config-consumer ok
	[+]poststarthook/priority-and-fairness-filter ok
	[+]poststarthook/storage-object-count-tracker-hook ok
	[+]poststarthook/start-apiextensions-informers ok
	[+]poststarthook/start-apiextensions-controllers ok
	[+]poststarthook/crd-informer-synced ok
	[+]poststarthook/start-system-namespaces-controller ok
	[+]poststarthook/start-cluster-authentication-info-controller ok
	[+]poststarthook/start-kube-apiserver-identity-lease-controller ok
	[+]poststarthook/start-kube-apiserver-identity-lease-garbage-collector ok
	[+]poststarthook/start-legacy-token-tracking-controller ok
	[-]poststarthook/start-service-ip-repair-controllers failed: reason withheld
	[+]poststarthook/rbac/bootstrap-roles ok
	[+]poststarthook/scheduling/bootstrap-system-priority-classes ok
	[+]poststarthook/priority-and-fairness-config-producer ok
	[+]poststarthook/bootstrap-controller ok
	[+]poststarthook/aggregator-reload-proxy-client-cert ok
	[+]poststarthook/start-kube-aggregator-informers ok
	[+]poststarthook/apiservice-status-local-available-controller ok
	[+]poststarthook/apiservice-status-remote-available-controller ok
	[+]poststarthook/apiservice-registration-controller ok
	[+]poststarthook/apiservice-discovery-controller ok
	[+]poststarthook/kube-apiserver-autoregistration ok
	[+]autoregister-completion ok
	[+]poststarthook/apiservice-openapi-controller ok
	[+]poststarthook/apiservice-openapiv3-controller ok
	healthz check failed
	W0916 10:58:16.702227 1434484 api_server.go:103] status: https://192.168.49.2:8443/healthz returned error 500:
	[+]ping ok
	[+]log ok
	[+]etcd ok
	[+]poststarthook/start-apiserver-admission-initializer ok
	[+]poststarthook/generic-apiserver-start-informers ok
	[+]poststarthook/priority-and-fairness-config-consumer ok
	[+]poststarthook/priority-and-fairness-filter ok
	[+]poststarthook/storage-object-count-tracker-hook ok
	[+]poststarthook/start-apiextensions-informers ok
	[+]poststarthook/start-apiextensions-controllers ok
	[+]poststarthook/crd-informer-synced ok
	[+]poststarthook/start-system-namespaces-controller ok
	[+]poststarthook/start-cluster-authentication-info-controller ok
	[+]poststarthook/start-kube-apiserver-identity-lease-controller ok
	[+]poststarthook/start-kube-apiserver-identity-lease-garbage-collector ok
	[+]poststarthook/start-legacy-token-tracking-controller ok
	[-]poststarthook/start-service-ip-repair-controllers failed: reason withheld
	[+]poststarthook/rbac/bootstrap-roles ok
	[+]poststarthook/scheduling/bootstrap-system-priority-classes ok
	[+]poststarthook/priority-and-fairness-config-producer ok
	[+]poststarthook/bootstrap-controller ok
	[+]poststarthook/aggregator-reload-proxy-client-cert ok
	[+]poststarthook/start-kube-aggregator-informers ok
	[+]poststarthook/apiservice-status-local-available-controller ok
	[+]poststarthook/apiservice-status-remote-available-controller ok
	[+]poststarthook/apiservice-registration-controller ok
	[+]poststarthook/apiservice-discovery-controller ok
	[+]poststarthook/kube-apiserver-autoregistration ok
	[+]autoregister-completion ok
	[+]poststarthook/apiservice-openapi-controller ok
	[+]poststarthook/apiservice-openapiv3-controller ok
	healthz check failed
	I0916 10:58:17.194893 1434484 api_server.go:253] Checking apiserver healthz at https://192.168.49.2:8443/healthz ...
	I0916 10:58:17.203924 1434484 api_server.go:279] https://192.168.49.2:8443/healthz returned 500:
	[+]ping ok
	[+]log ok
	[+]etcd ok
	[+]poststarthook/start-apiserver-admission-initializer ok
	[+]poststarthook/generic-apiserver-start-informers ok
	[+]poststarthook/priority-and-fairness-config-consumer ok
	[+]poststarthook/priority-and-fairness-filter ok
	[+]poststarthook/storage-object-count-tracker-hook ok
	[+]poststarthook/start-apiextensions-informers ok
	[+]poststarthook/start-apiextensions-controllers ok
	[+]poststarthook/crd-informer-synced ok
	[+]poststarthook/start-system-namespaces-controller ok
	[+]poststarthook/start-cluster-authentication-info-controller ok
	[+]poststarthook/start-kube-apiserver-identity-lease-controller ok
	[+]poststarthook/start-kube-apiserver-identity-lease-garbage-collector ok
	[+]poststarthook/start-legacy-token-tracking-controller ok
	[-]poststarthook/start-service-ip-repair-controllers failed: reason withheld
	[+]poststarthook/rbac/bootstrap-roles ok
	[+]poststarthook/scheduling/bootstrap-system-priority-classes ok
	[+]poststarthook/priority-and-fairness-config-producer ok
	[+]poststarthook/bootstrap-controller ok
	[+]poststarthook/aggregator-reload-proxy-client-cert ok
	[+]poststarthook/start-kube-aggregator-informers ok
	[+]poststarthook/apiservice-status-local-available-controller ok
	[+]poststarthook/apiservice-status-remote-available-controller ok
	[+]poststarthook/apiservice-registration-controller ok
	[+]poststarthook/apiservice-discovery-controller ok
	[+]poststarthook/kube-apiserver-autoregistration ok
	[+]autoregister-completion ok
	[+]poststarthook/apiservice-openapi-controller ok
	[+]poststarthook/apiservice-openapiv3-controller ok
	healthz check failed
	W0916 10:58:17.203955 1434484 api_server.go:103] status: https://192.168.49.2:8443/healthz returned error 500:
	[+]ping ok
	[+]log ok
	[+]etcd ok
	[+]poststarthook/start-apiserver-admission-initializer ok
	[+]poststarthook/generic-apiserver-start-informers ok
	[+]poststarthook/priority-and-fairness-config-consumer ok
	[+]poststarthook/priority-and-fairness-filter ok
	[+]poststarthook/storage-object-count-tracker-hook ok
	[+]poststarthook/start-apiextensions-informers ok
	[+]poststarthook/start-apiextensions-controllers ok
	[+]poststarthook/crd-informer-synced ok
	[+]poststarthook/start-system-namespaces-controller ok
	[+]poststarthook/start-cluster-authentication-info-controller ok
	[+]poststarthook/start-kube-apiserver-identity-lease-controller ok
	[+]poststarthook/start-kube-apiserver-identity-lease-garbage-collector ok
	[+]poststarthook/start-legacy-token-tracking-controller ok
	[-]poststarthook/start-service-ip-repair-controllers failed: reason withheld
	[+]poststarthook/rbac/bootstrap-roles ok
	[+]poststarthook/scheduling/bootstrap-system-priority-classes ok
	[+]poststarthook/priority-and-fairness-config-producer ok
	[+]poststarthook/bootstrap-controller ok
	[+]poststarthook/aggregator-reload-proxy-client-cert ok
	[+]poststarthook/start-kube-aggregator-informers ok
	[+]poststarthook/apiservice-status-local-available-controller ok
	[+]poststarthook/apiservice-status-remote-available-controller ok
	[+]poststarthook/apiservice-registration-controller ok
	[+]poststarthook/apiservice-discovery-controller ok
	[+]poststarthook/kube-apiserver-autoregistration ok
	[+]autoregister-completion ok
	[+]poststarthook/apiservice-openapi-controller ok
	[+]poststarthook/apiservice-openapiv3-controller ok
	healthz check failed
	I0916 10:58:17.694552 1434484 api_server.go:253] Checking apiserver healthz at https://192.168.49.2:8443/healthz ...
	I0916 10:58:17.703803 1434484 api_server.go:279] https://192.168.49.2:8443/healthz returned 500:
	[+]ping ok
	[+]log ok
	[+]etcd ok
	[+]poststarthook/start-apiserver-admission-initializer ok
	[+]poststarthook/generic-apiserver-start-informers ok
	[+]poststarthook/priority-and-fairness-config-consumer ok
	[+]poststarthook/priority-and-fairness-filter ok
	[+]poststarthook/storage-object-count-tracker-hook ok
	[+]poststarthook/start-apiextensions-informers ok
	[+]poststarthook/start-apiextensions-controllers ok
	[+]poststarthook/crd-informer-synced ok
	[+]poststarthook/start-system-namespaces-controller ok
	[+]poststarthook/start-cluster-authentication-info-controller ok
	[+]poststarthook/start-kube-apiserver-identity-lease-controller ok
	[+]poststarthook/start-kube-apiserver-identity-lease-garbage-collector ok
	[+]poststarthook/start-legacy-token-tracking-controller ok
	[-]poststarthook/start-service-ip-repair-controllers failed: reason withheld
	[+]poststarthook/rbac/bootstrap-roles ok
	[+]poststarthook/scheduling/bootstrap-system-priority-classes ok
	[+]poststarthook/priority-and-fairness-config-producer ok
	[+]poststarthook/bootstrap-controller ok
	[+]poststarthook/aggregator-reload-proxy-client-cert ok
	[+]poststarthook/start-kube-aggregator-informers ok
	[+]poststarthook/apiservice-status-local-available-controller ok
	[+]poststarthook/apiservice-status-remote-available-controller ok
	[+]poststarthook/apiservice-registration-controller ok
	[+]poststarthook/apiservice-discovery-controller ok
	[+]poststarthook/kube-apiserver-autoregistration ok
	[+]autoregister-completion ok
	[+]poststarthook/apiservice-openapi-controller ok
	[+]poststarthook/apiservice-openapiv3-controller ok
	healthz check failed
	W0916 10:58:17.703829 1434484 api_server.go:103] status: https://192.168.49.2:8443/healthz returned error 500:
	[+]ping ok
	[+]log ok
	[+]etcd ok
	[+]poststarthook/start-apiserver-admission-initializer ok
	[+]poststarthook/generic-apiserver-start-informers ok
	[+]poststarthook/priority-and-fairness-config-consumer ok
	[+]poststarthook/priority-and-fairness-filter ok
	[+]poststarthook/storage-object-count-tracker-hook ok
	[+]poststarthook/start-apiextensions-informers ok
	[+]poststarthook/start-apiextensions-controllers ok
	[+]poststarthook/crd-informer-synced ok
	[+]poststarthook/start-system-namespaces-controller ok
	[+]poststarthook/start-cluster-authentication-info-controller ok
	[+]poststarthook/start-kube-apiserver-identity-lease-controller ok
	[+]poststarthook/start-kube-apiserver-identity-lease-garbage-collector ok
	[+]poststarthook/start-legacy-token-tracking-controller ok
	[-]poststarthook/start-service-ip-repair-controllers failed: reason withheld
	[+]poststarthook/rbac/bootstrap-roles ok
	[+]poststarthook/scheduling/bootstrap-system-priority-classes ok
	[+]poststarthook/priority-and-fairness-config-producer ok
	[+]poststarthook/bootstrap-controller ok
	[+]poststarthook/aggregator-reload-proxy-client-cert ok
	[+]poststarthook/start-kube-aggregator-informers ok
	[+]poststarthook/apiservice-status-local-available-controller ok
	[+]poststarthook/apiservice-status-remote-available-controller ok
	[+]poststarthook/apiservice-registration-controller ok
	[+]poststarthook/apiservice-discovery-controller ok
	[+]poststarthook/kube-apiserver-autoregistration ok
	[+]autoregister-completion ok
	[+]poststarthook/apiservice-openapi-controller ok
	[+]poststarthook/apiservice-openapiv3-controller ok
	healthz check failed
	I0916 10:58:18.194232 1434484 api_server.go:253] Checking apiserver healthz at https://192.168.49.2:8443/healthz ...
	I0916 10:58:18.202279 1434484 api_server.go:279] https://192.168.49.2:8443/healthz returned 500:
	[+]ping ok
	[+]log ok
	[+]etcd ok
	[+]poststarthook/start-apiserver-admission-initializer ok
	[+]poststarthook/generic-apiserver-start-informers ok
	[+]poststarthook/priority-and-fairness-config-consumer ok
	[+]poststarthook/priority-and-fairness-filter ok
	[+]poststarthook/storage-object-count-tracker-hook ok
	[+]poststarthook/start-apiextensions-informers ok
	[+]poststarthook/start-apiextensions-controllers ok
	[+]poststarthook/crd-informer-synced ok
	[+]poststarthook/start-system-namespaces-controller ok
	[+]poststarthook/start-cluster-authentication-info-controller ok
	[+]poststarthook/start-kube-apiserver-identity-lease-controller ok
	[+]poststarthook/start-kube-apiserver-identity-lease-garbage-collector ok
	[+]poststarthook/start-legacy-token-tracking-controller ok
	[-]poststarthook/start-service-ip-repair-controllers failed: reason withheld
	[+]poststarthook/rbac/bootstrap-roles ok
	[+]poststarthook/scheduling/bootstrap-system-priority-classes ok
	[+]poststarthook/priority-and-fairness-config-producer ok
	[+]poststarthook/bootstrap-controller ok
	[+]poststarthook/aggregator-reload-proxy-client-cert ok
	[+]poststarthook/start-kube-aggregator-informers ok
	[+]poststarthook/apiservice-status-local-available-controller ok
	[+]poststarthook/apiservice-status-remote-available-controller ok
	[+]poststarthook/apiservice-registration-controller ok
	[+]poststarthook/apiservice-discovery-controller ok
	[+]poststarthook/kube-apiserver-autoregistration ok
	[+]autoregister-completion ok
	[+]poststarthook/apiservice-openapi-controller ok
	[+]poststarthook/apiservice-openapiv3-controller ok
	healthz check failed
	W0916 10:58:18.202306 1434484 api_server.go:103] status: https://192.168.49.2:8443/healthz returned error 500:
	[+]ping ok
	[+]log ok
	[+]etcd ok
	[+]poststarthook/start-apiserver-admission-initializer ok
	[+]poststarthook/generic-apiserver-start-informers ok
	[+]poststarthook/priority-and-fairness-config-consumer ok
	[+]poststarthook/priority-and-fairness-filter ok
	[+]poststarthook/storage-object-count-tracker-hook ok
	[+]poststarthook/start-apiextensions-informers ok
	[+]poststarthook/start-apiextensions-controllers ok
	[+]poststarthook/crd-informer-synced ok
	[+]poststarthook/start-system-namespaces-controller ok
	[+]poststarthook/start-cluster-authentication-info-controller ok
	[+]poststarthook/start-kube-apiserver-identity-lease-controller ok
	[+]poststarthook/start-kube-apiserver-identity-lease-garbage-collector ok
	[+]poststarthook/start-legacy-token-tracking-controller ok
	[-]poststarthook/start-service-ip-repair-controllers failed: reason withheld
	[+]poststarthook/rbac/bootstrap-roles ok
	[+]poststarthook/scheduling/bootstrap-system-priority-classes ok
	[+]poststarthook/priority-and-fairness-config-producer ok
	[+]poststarthook/bootstrap-controller ok
	[+]poststarthook/aggregator-reload-proxy-client-cert ok
	[+]poststarthook/start-kube-aggregator-informers ok
	[+]poststarthook/apiservice-status-local-available-controller ok
	[+]poststarthook/apiservice-status-remote-available-controller ok
	[+]poststarthook/apiservice-registration-controller ok
	[+]poststarthook/apiservice-discovery-controller ok
	[+]poststarthook/kube-apiserver-autoregistration ok
	[+]autoregister-completion ok
	[+]poststarthook/apiservice-openapi-controller ok
	[+]poststarthook/apiservice-openapiv3-controller ok
	healthz check failed
	I0916 10:58:18.694401 1434484 api_server.go:253] Checking apiserver healthz at https://192.168.49.2:8443/healthz ...
	I0916 10:58:18.702105 1434484 api_server.go:279] https://192.168.49.2:8443/healthz returned 500:
	[+]ping ok
	[+]log ok
	[+]etcd ok
	[+]poststarthook/start-apiserver-admission-initializer ok
	[+]poststarthook/generic-apiserver-start-informers ok
	[+]poststarthook/priority-and-fairness-config-consumer ok
	[+]poststarthook/priority-and-fairness-filter ok
	[+]poststarthook/storage-object-count-tracker-hook ok
	[+]poststarthook/start-apiextensions-informers ok
	[+]poststarthook/start-apiextensions-controllers ok
	[+]poststarthook/crd-informer-synced ok
	[+]poststarthook/start-system-namespaces-controller ok
	[+]poststarthook/start-cluster-authentication-info-controller ok
	[+]poststarthook/start-kube-apiserver-identity-lease-controller ok
	[+]poststarthook/start-kube-apiserver-identity-lease-garbage-collector ok
	[+]poststarthook/start-legacy-token-tracking-controller ok
	[-]poststarthook/start-service-ip-repair-controllers failed: reason withheld
	[+]poststarthook/rbac/bootstrap-roles ok
	[+]poststarthook/scheduling/bootstrap-system-priority-classes ok
	[+]poststarthook/priority-and-fairness-config-producer ok
	[+]poststarthook/bootstrap-controller ok
	[+]poststarthook/aggregator-reload-proxy-client-cert ok
	[+]poststarthook/start-kube-aggregator-informers ok
	[+]poststarthook/apiservice-status-local-available-controller ok
	[+]poststarthook/apiservice-status-remote-available-controller ok
	[+]poststarthook/apiservice-registration-controller ok
	[+]poststarthook/apiservice-discovery-controller ok
	[+]poststarthook/kube-apiserver-autoregistration ok
	[+]autoregister-completion ok
	[+]poststarthook/apiservice-openapi-controller ok
	[+]poststarthook/apiservice-openapiv3-controller ok
	healthz check failed
	W0916 10:58:18.702134 1434484 api_server.go:103] status: https://192.168.49.2:8443/healthz returned error 500:
	[+]ping ok
	[+]log ok
	[+]etcd ok
	[+]poststarthook/start-apiserver-admission-initializer ok
	[+]poststarthook/generic-apiserver-start-informers ok
	[+]poststarthook/priority-and-fairness-config-consumer ok
	[+]poststarthook/priority-and-fairness-filter ok
	[+]poststarthook/storage-object-count-tracker-hook ok
	[+]poststarthook/start-apiextensions-informers ok
	[+]poststarthook/start-apiextensions-controllers ok
	[+]poststarthook/crd-informer-synced ok
	[+]poststarthook/start-system-namespaces-controller ok
	[+]poststarthook/start-cluster-authentication-info-controller ok
	[+]poststarthook/start-kube-apiserver-identity-lease-controller ok
	[+]poststarthook/start-kube-apiserver-identity-lease-garbage-collector ok
	[+]poststarthook/start-legacy-token-tracking-controller ok
	[-]poststarthook/start-service-ip-repair-controllers failed: reason withheld
	[+]poststarthook/rbac/bootstrap-roles ok
	[+]poststarthook/scheduling/bootstrap-system-priority-classes ok
	[+]poststarthook/priority-and-fairness-config-producer ok
	[+]poststarthook/bootstrap-controller ok
	[+]poststarthook/aggregator-reload-proxy-client-cert ok
	[+]poststarthook/start-kube-aggregator-informers ok
	[+]poststarthook/apiservice-status-local-available-controller ok
	[+]poststarthook/apiservice-status-remote-available-controller ok
	[+]poststarthook/apiservice-registration-controller ok
	[+]poststarthook/apiservice-discovery-controller ok
	[+]poststarthook/kube-apiserver-autoregistration ok
	[+]autoregister-completion ok
	[+]poststarthook/apiservice-openapi-controller ok
	[+]poststarthook/apiservice-openapiv3-controller ok
	healthz check failed
	I0916 10:58:19.194378 1434484 api_server.go:253] Checking apiserver healthz at https://192.168.49.2:8443/healthz ...
	I0916 10:58:19.202530 1434484 api_server.go:279] https://192.168.49.2:8443/healthz returned 500:
	[+]ping ok
	[+]log ok
	[+]etcd ok
	[+]poststarthook/start-apiserver-admission-initializer ok
	[+]poststarthook/generic-apiserver-start-informers ok
	[+]poststarthook/priority-and-fairness-config-consumer ok
	[+]poststarthook/priority-and-fairness-filter ok
	[+]poststarthook/storage-object-count-tracker-hook ok
	[+]poststarthook/start-apiextensions-informers ok
	[+]poststarthook/start-apiextensions-controllers ok
	[+]poststarthook/crd-informer-synced ok
	[+]poststarthook/start-system-namespaces-controller ok
	[+]poststarthook/start-cluster-authentication-info-controller ok
	[+]poststarthook/start-kube-apiserver-identity-lease-controller ok
	[+]poststarthook/start-kube-apiserver-identity-lease-garbage-collector ok
	[+]poststarthook/start-legacy-token-tracking-controller ok
	[-]poststarthook/start-service-ip-repair-controllers failed: reason withheld
	[+]poststarthook/rbac/bootstrap-roles ok
	[+]poststarthook/scheduling/bootstrap-system-priority-classes ok
	[+]poststarthook/priority-and-fairness-config-producer ok
	[+]poststarthook/bootstrap-controller ok
	[+]poststarthook/aggregator-reload-proxy-client-cert ok
	[+]poststarthook/start-kube-aggregator-informers ok
	[+]poststarthook/apiservice-status-local-available-controller ok
	[+]poststarthook/apiservice-status-remote-available-controller ok
	[+]poststarthook/apiservice-registration-controller ok
	[+]poststarthook/apiservice-discovery-controller ok
	[+]poststarthook/kube-apiserver-autoregistration ok
	[+]autoregister-completion ok
	[+]poststarthook/apiservice-openapi-controller ok
	[+]poststarthook/apiservice-openapiv3-controller ok
	healthz check failed
	W0916 10:58:19.202557 1434484 api_server.go:103] status: https://192.168.49.2:8443/healthz returned error 500:
	[+]ping ok
	[+]log ok
	[+]etcd ok
	[+]poststarthook/start-apiserver-admission-initializer ok
	[+]poststarthook/generic-apiserver-start-informers ok
	[+]poststarthook/priority-and-fairness-config-consumer ok
	[+]poststarthook/priority-and-fairness-filter ok
	[+]poststarthook/storage-object-count-tracker-hook ok
	[+]poststarthook/start-apiextensions-informers ok
	[+]poststarthook/start-apiextensions-controllers ok
	[+]poststarthook/crd-informer-synced ok
	[+]poststarthook/start-system-namespaces-controller ok
	[+]poststarthook/start-cluster-authentication-info-controller ok
	[+]poststarthook/start-kube-apiserver-identity-lease-controller ok
	[+]poststarthook/start-kube-apiserver-identity-lease-garbage-collector ok
	[+]poststarthook/start-legacy-token-tracking-controller ok
	[-]poststarthook/start-service-ip-repair-controllers failed: reason withheld
	[+]poststarthook/rbac/bootstrap-roles ok
	[+]poststarthook/scheduling/bootstrap-system-priority-classes ok
	[+]poststarthook/priority-and-fairness-config-producer ok
	[+]poststarthook/bootstrap-controller ok
	[+]poststarthook/aggregator-reload-proxy-client-cert ok
	[+]poststarthook/start-kube-aggregator-informers ok
	[+]poststarthook/apiservice-status-local-available-controller ok
	[+]poststarthook/apiservice-status-remote-available-controller ok
	[+]poststarthook/apiservice-registration-controller ok
	[+]poststarthook/apiservice-discovery-controller ok
	[+]poststarthook/kube-apiserver-autoregistration ok
	[+]autoregister-completion ok
	[+]poststarthook/apiservice-openapi-controller ok
	[+]poststarthook/apiservice-openapiv3-controller ok
	healthz check failed
	I0916 10:58:19.694055 1434484 api_server.go:253] Checking apiserver healthz at https://192.168.49.2:8443/healthz ...
	I0916 10:58:19.702630 1434484 api_server.go:279] https://192.168.49.2:8443/healthz returned 500:
	[+]ping ok
	[+]log ok
	[+]etcd ok
	[+]poststarthook/start-apiserver-admission-initializer ok
	[+]poststarthook/generic-apiserver-start-informers ok
	[+]poststarthook/priority-and-fairness-config-consumer ok
	[+]poststarthook/priority-and-fairness-filter ok
	[+]poststarthook/storage-object-count-tracker-hook ok
	[+]poststarthook/start-apiextensions-informers ok
	[+]poststarthook/start-apiextensions-controllers ok
	[+]poststarthook/crd-informer-synced ok
	[+]poststarthook/start-system-namespaces-controller ok
	[+]poststarthook/start-cluster-authentication-info-controller ok
	[+]poststarthook/start-kube-apiserver-identity-lease-controller ok
	[+]poststarthook/start-kube-apiserver-identity-lease-garbage-collector ok
	[+]poststarthook/start-legacy-token-tracking-controller ok
	[-]poststarthook/start-service-ip-repair-controllers failed: reason withheld
	[+]poststarthook/rbac/bootstrap-roles ok
	[+]poststarthook/scheduling/bootstrap-system-priority-classes ok
	[+]poststarthook/priority-and-fairness-config-producer ok
	[+]poststarthook/bootstrap-controller ok
	[+]poststarthook/aggregator-reload-proxy-client-cert ok
	[+]poststarthook/start-kube-aggregator-informers ok
	[+]poststarthook/apiservice-status-local-available-controller ok
	[+]poststarthook/apiservice-status-remote-available-controller ok
	[+]poststarthook/apiservice-registration-controller ok
	[+]poststarthook/apiservice-discovery-controller ok
	[+]poststarthook/kube-apiserver-autoregistration ok
	[+]autoregister-completion ok
	[+]poststarthook/apiservice-openapi-controller ok
	[+]poststarthook/apiservice-openapiv3-controller ok
	healthz check failed
	W0916 10:58:19.702661 1434484 api_server.go:103] status: https://192.168.49.2:8443/healthz returned error 500:
	[+]ping ok
	[+]log ok
	[+]etcd ok
	[+]poststarthook/start-apiserver-admission-initializer ok
	[+]poststarthook/generic-apiserver-start-informers ok
	[+]poststarthook/priority-and-fairness-config-consumer ok
	[+]poststarthook/priority-and-fairness-filter ok
	[+]poststarthook/storage-object-count-tracker-hook ok
	[+]poststarthook/start-apiextensions-informers ok
	[+]poststarthook/start-apiextensions-controllers ok
	[+]poststarthook/crd-informer-synced ok
	[+]poststarthook/start-system-namespaces-controller ok
	[+]poststarthook/start-cluster-authentication-info-controller ok
	[+]poststarthook/start-kube-apiserver-identity-lease-controller ok
	[+]poststarthook/start-kube-apiserver-identity-lease-garbage-collector ok
	[+]poststarthook/start-legacy-token-tracking-controller ok
	[-]poststarthook/start-service-ip-repair-controllers failed: reason withheld
	[+]poststarthook/rbac/bootstrap-roles ok
	[+]poststarthook/scheduling/bootstrap-system-priority-classes ok
	[+]poststarthook/priority-and-fairness-config-producer ok
	[+]poststarthook/bootstrap-controller ok
	[+]poststarthook/aggregator-reload-proxy-client-cert ok
	[+]poststarthook/start-kube-aggregator-informers ok
	[+]poststarthook/apiservice-status-local-available-controller ok
	[+]poststarthook/apiservice-status-remote-available-controller ok
	[+]poststarthook/apiservice-registration-controller ok
	[+]poststarthook/apiservice-discovery-controller ok
	[+]poststarthook/kube-apiserver-autoregistration ok
	[+]autoregister-completion ok
	[+]poststarthook/apiservice-openapi-controller ok
	[+]poststarthook/apiservice-openapiv3-controller ok
	healthz check failed
	I0916 10:58:20.194264 1434484 api_server.go:253] Checking apiserver healthz at https://192.168.49.2:8443/healthz ...
	I0916 10:58:20.203527 1434484 api_server.go:279] https://192.168.49.2:8443/healthz returned 500:
	[+]ping ok
	[+]log ok
	[+]etcd ok
	[+]poststarthook/start-apiserver-admission-initializer ok
	[+]poststarthook/generic-apiserver-start-informers ok
	[+]poststarthook/priority-and-fairness-config-consumer ok
	[+]poststarthook/priority-and-fairness-filter ok
	[+]poststarthook/storage-object-count-tracker-hook ok
	[+]poststarthook/start-apiextensions-informers ok
	[+]poststarthook/start-apiextensions-controllers ok
	[+]poststarthook/crd-informer-synced ok
	[+]poststarthook/start-system-namespaces-controller ok
	[+]poststarthook/start-cluster-authentication-info-controller ok
	[+]poststarthook/start-kube-apiserver-identity-lease-controller ok
	[+]poststarthook/start-kube-apiserver-identity-lease-garbage-collector ok
	[+]poststarthook/start-legacy-token-tracking-controller ok
	[-]poststarthook/start-service-ip-repair-controllers failed: reason withheld
	[+]poststarthook/rbac/bootstrap-roles ok
	[+]poststarthook/scheduling/bootstrap-system-priority-classes ok
	[+]poststarthook/priority-and-fairness-config-producer ok
	[+]poststarthook/bootstrap-controller ok
	[+]poststarthook/aggregator-reload-proxy-client-cert ok
	[+]poststarthook/start-kube-aggregator-informers ok
	[+]poststarthook/apiservice-status-local-available-controller ok
	[+]poststarthook/apiservice-status-remote-available-controller ok
	[+]poststarthook/apiservice-registration-controller ok
	[+]poststarthook/apiservice-discovery-controller ok
	[+]poststarthook/kube-apiserver-autoregistration ok
	[+]autoregister-completion ok
	[+]poststarthook/apiservice-openapi-controller ok
	[+]poststarthook/apiservice-openapiv3-controller ok
	healthz check failed
	W0916 10:58:20.203557 1434484 api_server.go:103] status: https://192.168.49.2:8443/healthz returned error 500:
	[+]ping ok
	[+]log ok
	[+]etcd ok
	[+]poststarthook/start-apiserver-admission-initializer ok
	[+]poststarthook/generic-apiserver-start-informers ok
	[+]poststarthook/priority-and-fairness-config-consumer ok
	[+]poststarthook/priority-and-fairness-filter ok
	[+]poststarthook/storage-object-count-tracker-hook ok
	[+]poststarthook/start-apiextensions-informers ok
	[+]poststarthook/start-apiextensions-controllers ok
	[+]poststarthook/crd-informer-synced ok
	[+]poststarthook/start-system-namespaces-controller ok
	[+]poststarthook/start-cluster-authentication-info-controller ok
	[+]poststarthook/start-kube-apiserver-identity-lease-controller ok
	[+]poststarthook/start-kube-apiserver-identity-lease-garbage-collector ok
	[+]poststarthook/start-legacy-token-tracking-controller ok
	[-]poststarthook/start-service-ip-repair-controllers failed: reason withheld
	[+]poststarthook/rbac/bootstrap-roles ok
	[+]poststarthook/scheduling/bootstrap-system-priority-classes ok
	[+]poststarthook/priority-and-fairness-config-producer ok
	[+]poststarthook/bootstrap-controller ok
	[+]poststarthook/aggregator-reload-proxy-client-cert ok
	[+]poststarthook/start-kube-aggregator-informers ok
	[+]poststarthook/apiservice-status-local-available-controller ok
	[+]poststarthook/apiservice-status-remote-available-controller ok
	[+]poststarthook/apiservice-registration-controller ok
	[+]poststarthook/apiservice-discovery-controller ok
	[+]poststarthook/kube-apiserver-autoregistration ok
	[+]autoregister-completion ok
	[+]poststarthook/apiservice-openapi-controller ok
	[+]poststarthook/apiservice-openapiv3-controller ok
	healthz check failed
	I0916 10:58:20.693983 1434484 api_server.go:253] Checking apiserver healthz at https://192.168.49.2:8443/healthz ...
	I0916 10:58:20.701623 1434484 api_server.go:279] https://192.168.49.2:8443/healthz returned 500:
	[+]ping ok
	[+]log ok
	[+]etcd ok
	[+]poststarthook/start-apiserver-admission-initializer ok
	[+]poststarthook/generic-apiserver-start-informers ok
	[+]poststarthook/priority-and-fairness-config-consumer ok
	[+]poststarthook/priority-and-fairness-filter ok
	[+]poststarthook/storage-object-count-tracker-hook ok
	[+]poststarthook/start-apiextensions-informers ok
	[+]poststarthook/start-apiextensions-controllers ok
	[+]poststarthook/crd-informer-synced ok
	[+]poststarthook/start-system-namespaces-controller ok
	[+]poststarthook/start-cluster-authentication-info-controller ok
	[+]poststarthook/start-kube-apiserver-identity-lease-controller ok
	[+]poststarthook/start-kube-apiserver-identity-lease-garbage-collector ok
	[+]poststarthook/start-legacy-token-tracking-controller ok
	[-]poststarthook/start-service-ip-repair-controllers failed: reason withheld
	[+]poststarthook/rbac/bootstrap-roles ok
	[+]poststarthook/scheduling/bootstrap-system-priority-classes ok
	[+]poststarthook/priority-and-fairness-config-producer ok
	[+]poststarthook/bootstrap-controller ok
	[+]poststarthook/aggregator-reload-proxy-client-cert ok
	[+]poststarthook/start-kube-aggregator-informers ok
	[+]poststarthook/apiservice-status-local-available-controller ok
	[+]poststarthook/apiservice-status-remote-available-controller ok
	[+]poststarthook/apiservice-registration-controller ok
	[+]poststarthook/apiservice-discovery-controller ok
	[+]poststarthook/kube-apiserver-autoregistration ok
	[+]autoregister-completion ok
	[+]poststarthook/apiservice-openapi-controller ok
	[+]poststarthook/apiservice-openapiv3-controller ok
	healthz check failed
	W0916 10:58:20.701650 1434484 api_server.go:103] status: https://192.168.49.2:8443/healthz returned error 500:
	[+]ping ok
	[+]log ok
	[+]etcd ok
	[+]poststarthook/start-apiserver-admission-initializer ok
	[+]poststarthook/generic-apiserver-start-informers ok
	[+]poststarthook/priority-and-fairness-config-consumer ok
	[+]poststarthook/priority-and-fairness-filter ok
	[+]poststarthook/storage-object-count-tracker-hook ok
	[+]poststarthook/start-apiextensions-informers ok
	[+]poststarthook/start-apiextensions-controllers ok
	[+]poststarthook/crd-informer-synced ok
	[+]poststarthook/start-system-namespaces-controller ok
	[+]poststarthook/start-cluster-authentication-info-controller ok
	[+]poststarthook/start-kube-apiserver-identity-lease-controller ok
	[+]poststarthook/start-kube-apiserver-identity-lease-garbage-collector ok
	[+]poststarthook/start-legacy-token-tracking-controller ok
	[-]poststarthook/start-service-ip-repair-controllers failed: reason withheld
	[+]poststarthook/rbac/bootstrap-roles ok
	[+]poststarthook/scheduling/bootstrap-system-priority-classes ok
	[+]poststarthook/priority-and-fairness-config-producer ok
	[+]poststarthook/bootstrap-controller ok
	[+]poststarthook/aggregator-reload-proxy-client-cert ok
	[+]poststarthook/start-kube-aggregator-informers ok
	[+]poststarthook/apiservice-status-local-available-controller ok
	[+]poststarthook/apiservice-status-remote-available-controller ok
	[+]poststarthook/apiservice-registration-controller ok
	[+]poststarthook/apiservice-discovery-controller ok
	[+]poststarthook/kube-apiserver-autoregistration ok
	[+]autoregister-completion ok
	[+]poststarthook/apiservice-openapi-controller ok
	[+]poststarthook/apiservice-openapiv3-controller ok
	healthz check failed
	I0916 10:58:21.194479 1434484 api_server.go:253] Checking apiserver healthz at https://192.168.49.2:8443/healthz ...
	I0916 10:58:21.202891 1434484 api_server.go:279] https://192.168.49.2:8443/healthz returned 500:
	[+]ping ok
	[+]log ok
	[+]etcd ok
	[+]poststarthook/start-apiserver-admission-initializer ok
	[+]poststarthook/generic-apiserver-start-informers ok
	[+]poststarthook/priority-and-fairness-config-consumer ok
	[+]poststarthook/priority-and-fairness-filter ok
	[+]poststarthook/storage-object-count-tracker-hook ok
	[+]poststarthook/start-apiextensions-informers ok
	[+]poststarthook/start-apiextensions-controllers ok
	[+]poststarthook/crd-informer-synced ok
	[+]poststarthook/start-system-namespaces-controller ok
	[+]poststarthook/start-cluster-authentication-info-controller ok
	[+]poststarthook/start-kube-apiserver-identity-lease-controller ok
	[+]poststarthook/start-kube-apiserver-identity-lease-garbage-collector ok
	[+]poststarthook/start-legacy-token-tracking-controller ok
	[-]poststarthook/start-service-ip-repair-controllers failed: reason withheld
	[+]poststarthook/rbac/bootstrap-roles ok
	[+]poststarthook/scheduling/bootstrap-system-priority-classes ok
	[+]poststarthook/priority-and-fairness-config-producer ok
	[+]poststarthook/bootstrap-controller ok
	[+]poststarthook/aggregator-reload-proxy-client-cert ok
	[+]poststarthook/start-kube-aggregator-informers ok
	[+]poststarthook/apiservice-status-local-available-controller ok
	[+]poststarthook/apiservice-status-remote-available-controller ok
	[+]poststarthook/apiservice-registration-controller ok
	[+]poststarthook/apiservice-discovery-controller ok
	[+]poststarthook/kube-apiserver-autoregistration ok
	[+]autoregister-completion ok
	[+]poststarthook/apiservice-openapi-controller ok
	[+]poststarthook/apiservice-openapiv3-controller ok
	healthz check failed
	W0916 10:58:21.202933 1434484 api_server.go:103] status: https://192.168.49.2:8443/healthz returned error 500:
	[+]ping ok
	[+]log ok
	[+]etcd ok
	[+]poststarthook/start-apiserver-admission-initializer ok
	[+]poststarthook/generic-apiserver-start-informers ok
	[+]poststarthook/priority-and-fairness-config-consumer ok
	[+]poststarthook/priority-and-fairness-filter ok
	[+]poststarthook/storage-object-count-tracker-hook ok
	[+]poststarthook/start-apiextensions-informers ok
	[+]poststarthook/start-apiextensions-controllers ok
	[+]poststarthook/crd-informer-synced ok
	[+]poststarthook/start-system-namespaces-controller ok
	[+]poststarthook/start-cluster-authentication-info-controller ok
	[+]poststarthook/start-kube-apiserver-identity-lease-controller ok
	[+]poststarthook/start-kube-apiserver-identity-lease-garbage-collector ok
	[+]poststarthook/start-legacy-token-tracking-controller ok
	[-]poststarthook/start-service-ip-repair-controllers failed: reason withheld
	[+]poststarthook/rbac/bootstrap-roles ok
	[+]poststarthook/scheduling/bootstrap-system-priority-classes ok
	[+]poststarthook/priority-and-fairness-config-producer ok
	[+]poststarthook/bootstrap-controller ok
	[+]poststarthook/aggregator-reload-proxy-client-cert ok
	[+]poststarthook/start-kube-aggregator-informers ok
	[+]poststarthook/apiservice-status-local-available-controller ok
	[+]poststarthook/apiservice-status-remote-available-controller ok
	[+]poststarthook/apiservice-registration-controller ok
	[+]poststarthook/apiservice-discovery-controller ok
	[+]poststarthook/kube-apiserver-autoregistration ok
	[+]autoregister-completion ok
	[+]poststarthook/apiservice-openapi-controller ok
	[+]poststarthook/apiservice-openapiv3-controller ok
	healthz check failed
	I0916 10:58:21.694445 1434484 api_server.go:253] Checking apiserver healthz at https://192.168.49.2:8443/healthz ...
	I0916 10:58:21.703731 1434484 api_server.go:279] https://192.168.49.2:8443/healthz returned 500:
	[+]ping ok
	[+]log ok
	[+]etcd ok
	[+]poststarthook/start-apiserver-admission-initializer ok
	[+]poststarthook/generic-apiserver-start-informers ok
	[+]poststarthook/priority-and-fairness-config-consumer ok
	[+]poststarthook/priority-and-fairness-filter ok
	[+]poststarthook/storage-object-count-tracker-hook ok
	[+]poststarthook/start-apiextensions-informers ok
	[+]poststarthook/start-apiextensions-controllers ok
	[+]poststarthook/crd-informer-synced ok
	[+]poststarthook/start-system-namespaces-controller ok
	[+]poststarthook/start-cluster-authentication-info-controller ok
	[+]poststarthook/start-kube-apiserver-identity-lease-controller ok
	[+]poststarthook/start-kube-apiserver-identity-lease-garbage-collector ok
	[+]poststarthook/start-legacy-token-tracking-controller ok
	[-]poststarthook/start-service-ip-repair-controllers failed: reason withheld
	[+]poststarthook/rbac/bootstrap-roles ok
	[+]poststarthook/scheduling/bootstrap-system-priority-classes ok
	[+]poststarthook/priority-and-fairness-config-producer ok
	[+]poststarthook/bootstrap-controller ok
	[+]poststarthook/aggregator-reload-proxy-client-cert ok
	[+]poststarthook/start-kube-aggregator-informers ok
	[+]poststarthook/apiservice-status-local-available-controller ok
	[+]poststarthook/apiservice-status-remote-available-controller ok
	[+]poststarthook/apiservice-registration-controller ok
	[+]poststarthook/apiservice-discovery-controller ok
	[+]poststarthook/kube-apiserver-autoregistration ok
	[+]autoregister-completion ok
	[+]poststarthook/apiservice-openapi-controller ok
	[+]poststarthook/apiservice-openapiv3-controller ok
	healthz check failed
	W0916 10:58:21.703760 1434484 api_server.go:103] status: https://192.168.49.2:8443/healthz returned error 500:
	[+]ping ok
	[+]log ok
	[+]etcd ok
	[+]poststarthook/start-apiserver-admission-initializer ok
	[+]poststarthook/generic-apiserver-start-informers ok
	[+]poststarthook/priority-and-fairness-config-consumer ok
	[+]poststarthook/priority-and-fairness-filter ok
	[+]poststarthook/storage-object-count-tracker-hook ok
	[+]poststarthook/start-apiextensions-informers ok
	[+]poststarthook/start-apiextensions-controllers ok
	[+]poststarthook/crd-informer-synced ok
	[+]poststarthook/start-system-namespaces-controller ok
	[+]poststarthook/start-cluster-authentication-info-controller ok
	[+]poststarthook/start-kube-apiserver-identity-lease-controller ok
	[+]poststarthook/start-kube-apiserver-identity-lease-garbage-collector ok
	[+]poststarthook/start-legacy-token-tracking-controller ok
	[-]poststarthook/start-service-ip-repair-controllers failed: reason withheld
	[+]poststarthook/rbac/bootstrap-roles ok
	[+]poststarthook/scheduling/bootstrap-system-priority-classes ok
	[+]poststarthook/priority-and-fairness-config-producer ok
	[+]poststarthook/bootstrap-controller ok
	[+]poststarthook/aggregator-reload-proxy-client-cert ok
	[+]poststarthook/start-kube-aggregator-informers ok
	[+]poststarthook/apiservice-status-local-available-controller ok
	[+]poststarthook/apiservice-status-remote-available-controller ok
	[+]poststarthook/apiservice-registration-controller ok
	[+]poststarthook/apiservice-discovery-controller ok
	[+]poststarthook/kube-apiserver-autoregistration ok
	[+]autoregister-completion ok
	[+]poststarthook/apiservice-openapi-controller ok
	[+]poststarthook/apiservice-openapiv3-controller ok
	healthz check failed
	I0916 10:58:22.194258 1434484 api_server.go:253] Checking apiserver healthz at https://192.168.49.2:8443/healthz ...
	I0916 10:58:22.202096 1434484 api_server.go:279] https://192.168.49.2:8443/healthz returned 500:
	[+]ping ok
	[+]log ok
	[+]etcd ok
	[+]poststarthook/start-apiserver-admission-initializer ok
	[+]poststarthook/generic-apiserver-start-informers ok
	[+]poststarthook/priority-and-fairness-config-consumer ok
	[+]poststarthook/priority-and-fairness-filter ok
	[+]poststarthook/storage-object-count-tracker-hook ok
	[+]poststarthook/start-apiextensions-informers ok
	[+]poststarthook/start-apiextensions-controllers ok
	[+]poststarthook/crd-informer-synced ok
	[+]poststarthook/start-system-namespaces-controller ok
	[+]poststarthook/start-cluster-authentication-info-controller ok
	[+]poststarthook/start-kube-apiserver-identity-lease-controller ok
	[+]poststarthook/start-kube-apiserver-identity-lease-garbage-collector ok
	[+]poststarthook/start-legacy-token-tracking-controller ok
	[-]poststarthook/start-service-ip-repair-controllers failed: reason withheld
	[+]poststarthook/rbac/bootstrap-roles ok
	[+]poststarthook/scheduling/bootstrap-system-priority-classes ok
	[+]poststarthook/priority-and-fairness-config-producer ok
	[+]poststarthook/bootstrap-controller ok
	[+]poststarthook/aggregator-reload-proxy-client-cert ok
	[+]poststarthook/start-kube-aggregator-informers ok
	[+]poststarthook/apiservice-status-local-available-controller ok
	[+]poststarthook/apiservice-status-remote-available-controller ok
	[+]poststarthook/apiservice-registration-controller ok
	[+]poststarthook/apiservice-discovery-controller ok
	[+]poststarthook/kube-apiserver-autoregistration ok
	[+]autoregister-completion ok
	[+]poststarthook/apiservice-openapi-controller ok
	[+]poststarthook/apiservice-openapiv3-controller ok
	healthz check failed
	W0916 10:58:22.202126 1434484 api_server.go:103] status: https://192.168.49.2:8443/healthz returned error 500:
	[+]ping ok
	[+]log ok
	[+]etcd ok
	[+]poststarthook/start-apiserver-admission-initializer ok
	[+]poststarthook/generic-apiserver-start-informers ok
	[+]poststarthook/priority-and-fairness-config-consumer ok
	[+]poststarthook/priority-and-fairness-filter ok
	[+]poststarthook/storage-object-count-tracker-hook ok
	[+]poststarthook/start-apiextensions-informers ok
	[+]poststarthook/start-apiextensions-controllers ok
	[+]poststarthook/crd-informer-synced ok
	[+]poststarthook/start-system-namespaces-controller ok
	[+]poststarthook/start-cluster-authentication-info-controller ok
	[+]poststarthook/start-kube-apiserver-identity-lease-controller ok
	[+]poststarthook/start-kube-apiserver-identity-lease-garbage-collector ok
	[+]poststarthook/start-legacy-token-tracking-controller ok
	[-]poststarthook/start-service-ip-repair-controllers failed: reason withheld
	[+]poststarthook/rbac/bootstrap-roles ok
	[+]poststarthook/scheduling/bootstrap-system-priority-classes ok
	[+]poststarthook/priority-and-fairness-config-producer ok
	[+]poststarthook/bootstrap-controller ok
	[+]poststarthook/aggregator-reload-proxy-client-cert ok
	[+]poststarthook/start-kube-aggregator-informers ok
	[+]poststarthook/apiservice-status-local-available-controller ok
	[+]poststarthook/apiservice-status-remote-available-controller ok
	[+]poststarthook/apiservice-registration-controller ok
	[+]poststarthook/apiservice-discovery-controller ok
	[+]poststarthook/kube-apiserver-autoregistration ok
	[+]autoregister-completion ok
	[+]poststarthook/apiservice-openapi-controller ok
	[+]poststarthook/apiservice-openapiv3-controller ok
	healthz check failed
	I0916 10:58:22.694702 1434484 api_server.go:253] Checking apiserver healthz at https://192.168.49.2:8443/healthz ...
	I0916 10:58:22.702806 1434484 api_server.go:279] https://192.168.49.2:8443/healthz returned 500:
	[+]ping ok
	[+]log ok
	[+]etcd ok
	[+]poststarthook/start-apiserver-admission-initializer ok
	[+]poststarthook/generic-apiserver-start-informers ok
	[+]poststarthook/priority-and-fairness-config-consumer ok
	[+]poststarthook/priority-and-fairness-filter ok
	[+]poststarthook/storage-object-count-tracker-hook ok
	[+]poststarthook/start-apiextensions-informers ok
	[+]poststarthook/start-apiextensions-controllers ok
	[+]poststarthook/crd-informer-synced ok
	[+]poststarthook/start-system-namespaces-controller ok
	[+]poststarthook/start-cluster-authentication-info-controller ok
	[+]poststarthook/start-kube-apiserver-identity-lease-controller ok
	[+]poststarthook/start-kube-apiserver-identity-lease-garbage-collector ok
	[+]poststarthook/start-legacy-token-tracking-controller ok
	[-]poststarthook/start-service-ip-repair-controllers failed: reason withheld
	[+]poststarthook/rbac/bootstrap-roles ok
	[+]poststarthook/scheduling/bootstrap-system-priority-classes ok
	[+]poststarthook/priority-and-fairness-config-producer ok
	[+]poststarthook/bootstrap-controller ok
	[+]poststarthook/aggregator-reload-proxy-client-cert ok
	[+]poststarthook/start-kube-aggregator-informers ok
	[+]poststarthook/apiservice-status-local-available-controller ok
	[+]poststarthook/apiservice-status-remote-available-controller ok
	[+]poststarthook/apiservice-registration-controller ok
	[+]poststarthook/apiservice-discovery-controller ok
	[+]poststarthook/kube-apiserver-autoregistration ok
	[+]autoregister-completion ok
	[+]poststarthook/apiservice-openapi-controller ok
	[+]poststarthook/apiservice-openapiv3-controller ok
	healthz check failed
	W0916 10:58:22.702881 1434484 api_server.go:103] status: https://192.168.49.2:8443/healthz returned error 500:
	[+]ping ok
	[+]log ok
	[+]etcd ok
	[+]poststarthook/start-apiserver-admission-initializer ok
	[+]poststarthook/generic-apiserver-start-informers ok
	[+]poststarthook/priority-and-fairness-config-consumer ok
	[+]poststarthook/priority-and-fairness-filter ok
	[+]poststarthook/storage-object-count-tracker-hook ok
	[+]poststarthook/start-apiextensions-informers ok
	[+]poststarthook/start-apiextensions-controllers ok
	[+]poststarthook/crd-informer-synced ok
	[+]poststarthook/start-system-namespaces-controller ok
	[+]poststarthook/start-cluster-authentication-info-controller ok
	[+]poststarthook/start-kube-apiserver-identity-lease-controller ok
	[+]poststarthook/start-kube-apiserver-identity-lease-garbage-collector ok
	[+]poststarthook/start-legacy-token-tracking-controller ok
	[-]poststarthook/start-service-ip-repair-controllers failed: reason withheld
	[+]poststarthook/rbac/bootstrap-roles ok
	[+]poststarthook/scheduling/bootstrap-system-priority-classes ok
	[+]poststarthook/priority-and-fairness-config-producer ok
	[+]poststarthook/bootstrap-controller ok
	[+]poststarthook/aggregator-reload-proxy-client-cert ok
	[+]poststarthook/start-kube-aggregator-informers ok
	[+]poststarthook/apiservice-status-local-available-controller ok
	[+]poststarthook/apiservice-status-remote-available-controller ok
	[+]poststarthook/apiservice-registration-controller ok
	[+]poststarthook/apiservice-discovery-controller ok
	[+]poststarthook/kube-apiserver-autoregistration ok
	[+]autoregister-completion ok
	[+]poststarthook/apiservice-openapi-controller ok
	[+]poststarthook/apiservice-openapiv3-controller ok
	healthz check failed
	I0916 10:58:23.194481 1434484 api_server.go:253] Checking apiserver healthz at https://192.168.49.2:8443/healthz ...
	I0916 10:58:23.202585 1434484 api_server.go:279] https://192.168.49.2:8443/healthz returned 500:
	[+]ping ok
	[+]log ok
	[+]etcd ok
	[+]poststarthook/start-apiserver-admission-initializer ok
	[+]poststarthook/generic-apiserver-start-informers ok
	[+]poststarthook/priority-and-fairness-config-consumer ok
	[+]poststarthook/priority-and-fairness-filter ok
	[+]poststarthook/storage-object-count-tracker-hook ok
	[+]poststarthook/start-apiextensions-informers ok
	[+]poststarthook/start-apiextensions-controllers ok
	[+]poststarthook/crd-informer-synced ok
	[+]poststarthook/start-system-namespaces-controller ok
	[+]poststarthook/start-cluster-authentication-info-controller ok
	[+]poststarthook/start-kube-apiserver-identity-lease-controller ok
	[+]poststarthook/start-kube-apiserver-identity-lease-garbage-collector ok
	[+]poststarthook/start-legacy-token-tracking-controller ok
	[-]poststarthook/start-service-ip-repair-controllers failed: reason withheld
	[+]poststarthook/rbac/bootstrap-roles ok
	[+]poststarthook/scheduling/bootstrap-system-priority-classes ok
	[+]poststarthook/priority-and-fairness-config-producer ok
	[+]poststarthook/bootstrap-controller ok
	[+]poststarthook/aggregator-reload-proxy-client-cert ok
	[+]poststarthook/start-kube-aggregator-informers ok
	[+]poststarthook/apiservice-status-local-available-controller ok
	[+]poststarthook/apiservice-status-remote-available-controller ok
	[+]poststarthook/apiservice-registration-controller ok
	[+]poststarthook/apiservice-discovery-controller ok
	[+]poststarthook/kube-apiserver-autoregistration ok
	[+]autoregister-completion ok
	[+]poststarthook/apiservice-openapi-controller ok
	[+]poststarthook/apiservice-openapiv3-controller ok
	healthz check failed
	W0916 10:58:23.202613 1434484 api_server.go:103] status: https://192.168.49.2:8443/healthz returned error 500:
	[+]ping ok
	[+]log ok
	[+]etcd ok
	[+]poststarthook/start-apiserver-admission-initializer ok
	[+]poststarthook/generic-apiserver-start-informers ok
	[+]poststarthook/priority-and-fairness-config-consumer ok
	[+]poststarthook/priority-and-fairness-filter ok
	[+]poststarthook/storage-object-count-tracker-hook ok
	[+]poststarthook/start-apiextensions-informers ok
	[+]poststarthook/start-apiextensions-controllers ok
	[+]poststarthook/crd-informer-synced ok
	[+]poststarthook/start-system-namespaces-controller ok
	[+]poststarthook/start-cluster-authentication-info-controller ok
	[+]poststarthook/start-kube-apiserver-identity-lease-controller ok
	[+]poststarthook/start-kube-apiserver-identity-lease-garbage-collector ok
	[+]poststarthook/start-legacy-token-tracking-controller ok
	[-]poststarthook/start-service-ip-repair-controllers failed: reason withheld
	[+]poststarthook/rbac/bootstrap-roles ok
	[+]poststarthook/scheduling/bootstrap-system-priority-classes ok
	[+]poststarthook/priority-and-fairness-config-producer ok
	[+]poststarthook/bootstrap-controller ok
	[+]poststarthook/aggregator-reload-proxy-client-cert ok
	[+]poststarthook/start-kube-aggregator-informers ok
	[+]poststarthook/apiservice-status-local-available-controller ok
	[+]poststarthook/apiservice-status-remote-available-controller ok
	[+]poststarthook/apiservice-registration-controller ok
	[+]poststarthook/apiservice-discovery-controller ok
	[+]poststarthook/kube-apiserver-autoregistration ok
	[+]autoregister-completion ok
	[+]poststarthook/apiservice-openapi-controller ok
	[+]poststarthook/apiservice-openapiv3-controller ok
	healthz check failed
	I0916 10:58:23.694268 1434484 api_server.go:253] Checking apiserver healthz at https://192.168.49.2:8443/healthz ...
	I0916 10:58:23.876342 1434484 api_server.go:269] stopped: https://192.168.49.2:8443/healthz: Get "https://192.168.49.2:8443/healthz": read tcp 192.168.49.1:36538->192.168.49.2:8443: read: connection reset by peer
	I0916 10:58:24.194525 1434484 api_server.go:253] Checking apiserver healthz at https://192.168.49.2:8443/healthz ...
	I0916 10:58:24.194972 1434484 api_server.go:269] stopped: https://192.168.49.2:8443/healthz: Get "https://192.168.49.2:8443/healthz": dial tcp 192.168.49.2:8443: connect: connection refused
	I0916 10:58:24.694235 1434484 cri.go:54] listing CRI containers in root : {State:all Name:kube-apiserver Namespaces:[]}
	I0916 10:58:24.694337 1434484 ssh_runner.go:195] Run: sudo crictl ps -a --quiet --name=kube-apiserver
	I0916 10:58:24.773237 1434484 cri.go:89] found id: "9d3a49380a28adc77c80880a98d52d9806925eb299377f59f85f411c604cc468"
	I0916 10:58:24.773263 1434484 cri.go:89] found id: "3365058081c0e585345a7d47e1ccba16f8453c4f51f446289361f3b8fbd79bde"
	I0916 10:58:24.773269 1434484 cri.go:89] found id: ""
	I0916 10:58:24.773276 1434484 logs.go:276] 2 containers: [9d3a49380a28adc77c80880a98d52d9806925eb299377f59f85f411c604cc468 3365058081c0e585345a7d47e1ccba16f8453c4f51f446289361f3b8fbd79bde]
	I0916 10:58:24.773353 1434484 ssh_runner.go:195] Run: which crictl
	I0916 10:58:24.778334 1434484 ssh_runner.go:195] Run: which crictl
	I0916 10:58:24.782578 1434484 cri.go:54] listing CRI containers in root : {State:all Name:etcd Namespaces:[]}
	I0916 10:58:24.782651 1434484 ssh_runner.go:195] Run: sudo crictl ps -a --quiet --name=etcd
	I0916 10:58:24.853603 1434484 cri.go:89] found id: "8834a41e97dc741d0e79671f975ecbff191e8d166d3d632d371a5f8ade9d0890"
	I0916 10:58:24.853627 1434484 cri.go:89] found id: "29fc8ed8b8f7f3d1e0dba0a22850e7a42676e477dec90e46467202f35c41cfec"
	I0916 10:58:24.853632 1434484 cri.go:89] found id: ""
	I0916 10:58:24.853639 1434484 logs.go:276] 2 containers: [8834a41e97dc741d0e79671f975ecbff191e8d166d3d632d371a5f8ade9d0890 29fc8ed8b8f7f3d1e0dba0a22850e7a42676e477dec90e46467202f35c41cfec]
	I0916 10:58:24.853695 1434484 ssh_runner.go:195] Run: which crictl
	I0916 10:58:24.857941 1434484 ssh_runner.go:195] Run: which crictl
	I0916 10:58:24.862430 1434484 cri.go:54] listing CRI containers in root : {State:all Name:coredns Namespaces:[]}
	I0916 10:58:24.862505 1434484 ssh_runner.go:195] Run: sudo crictl ps -a --quiet --name=coredns
	I0916 10:58:24.915718 1434484 cri.go:89] found id: ""
	I0916 10:58:24.915746 1434484 logs.go:276] 0 containers: []
	W0916 10:58:24.915757 1434484 logs.go:278] No container was found matching "coredns"
	I0916 10:58:24.915763 1434484 cri.go:54] listing CRI containers in root : {State:all Name:kube-scheduler Namespaces:[]}
	I0916 10:58:24.915824 1434484 ssh_runner.go:195] Run: sudo crictl ps -a --quiet --name=kube-scheduler
	I0916 10:58:24.998644 1434484 cri.go:89] found id: "a98770698d81ba4df7de45f6f2171e1e370672221e5d1f1a6c0450a6fbd89797"
	I0916 10:58:24.998670 1434484 cri.go:89] found id: "8dfc630c611cfbdf01df3bcfbe6c573e9b54e5f6fcf8df90dd905dc56b921d87"
	I0916 10:58:24.998676 1434484 cri.go:89] found id: ""
	I0916 10:58:24.998684 1434484 logs.go:276] 2 containers: [a98770698d81ba4df7de45f6f2171e1e370672221e5d1f1a6c0450a6fbd89797 8dfc630c611cfbdf01df3bcfbe6c573e9b54e5f6fcf8df90dd905dc56b921d87]
	I0916 10:58:24.998742 1434484 ssh_runner.go:195] Run: which crictl
	I0916 10:58:25.004777 1434484 ssh_runner.go:195] Run: which crictl
	I0916 10:58:25.010184 1434484 cri.go:54] listing CRI containers in root : {State:all Name:kube-proxy Namespaces:[]}
	I0916 10:58:25.010273 1434484 ssh_runner.go:195] Run: sudo crictl ps -a --quiet --name=kube-proxy
	I0916 10:58:25.066074 1434484 cri.go:89] found id: "204d3723c057d42ea2141887c1ebad0407134ed4baeffe51470031f1c20a0bc3"
	I0916 10:58:25.066114 1434484 cri.go:89] found id: ""
	I0916 10:58:25.066124 1434484 logs.go:276] 1 containers: [204d3723c057d42ea2141887c1ebad0407134ed4baeffe51470031f1c20a0bc3]
	I0916 10:58:25.066182 1434484 ssh_runner.go:195] Run: which crictl
	I0916 10:58:25.070670 1434484 cri.go:54] listing CRI containers in root : {State:all Name:kube-controller-manager Namespaces:[]}
	I0916 10:58:25.070761 1434484 ssh_runner.go:195] Run: sudo crictl ps -a --quiet --name=kube-controller-manager
	I0916 10:58:25.126128 1434484 cri.go:89] found id: "be821cdf7dddc9de52be661e1c6e03d86a704d4c523183dc899f64b43ac04583"
	I0916 10:58:25.126171 1434484 cri.go:89] found id: "2f98291c7b2a1482da8d22dd393673a913e72ea7d00f601cae0f2c3452f72459"
	I0916 10:58:25.126177 1434484 cri.go:89] found id: ""
	I0916 10:58:25.126185 1434484 logs.go:276] 2 containers: [be821cdf7dddc9de52be661e1c6e03d86a704d4c523183dc899f64b43ac04583 2f98291c7b2a1482da8d22dd393673a913e72ea7d00f601cae0f2c3452f72459]
	I0916 10:58:25.126268 1434484 ssh_runner.go:195] Run: which crictl
	I0916 10:58:25.131085 1434484 ssh_runner.go:195] Run: which crictl
	I0916 10:58:25.135907 1434484 cri.go:54] listing CRI containers in root : {State:all Name:kindnet Namespaces:[]}
	I0916 10:58:25.136021 1434484 ssh_runner.go:195] Run: sudo crictl ps -a --quiet --name=kindnet
	I0916 10:58:25.203907 1434484 cri.go:89] found id: "142f1b4238b0011ac301c130a985e97501d6bfc0d96d380741cff696085f2a2f"
	I0916 10:58:25.203942 1434484 cri.go:89] found id: ""
	I0916 10:58:25.203951 1434484 logs.go:276] 1 containers: [142f1b4238b0011ac301c130a985e97501d6bfc0d96d380741cff696085f2a2f]
	I0916 10:58:25.204015 1434484 ssh_runner.go:195] Run: which crictl
	I0916 10:58:25.208211 1434484 logs.go:123] Gathering logs for CRI-O ...
	I0916 10:58:25.208242 1434484 ssh_runner.go:195] Run: /bin/bash -c "sudo journalctl -u crio -n 400"
	I0916 10:58:25.304207 1434484 logs.go:123] Gathering logs for describe nodes ...
	I0916 10:58:25.304286 1434484 ssh_runner.go:195] Run: /bin/bash -c "sudo /var/lib/minikube/binaries/v1.31.1/kubectl describe nodes --kubeconfig=/var/lib/minikube/kubeconfig"
	I0916 10:58:26.140803 1434484 logs.go:123] Gathering logs for kube-scheduler [a98770698d81ba4df7de45f6f2171e1e370672221e5d1f1a6c0450a6fbd89797] ...
	I0916 10:58:26.140880 1434484 ssh_runner.go:195] Run: /bin/bash -c "sudo /usr/bin/crictl logs --tail 400 a98770698d81ba4df7de45f6f2171e1e370672221e5d1f1a6c0450a6fbd89797"
	I0916 10:58:26.216344 1434484 logs.go:123] Gathering logs for kube-scheduler [8dfc630c611cfbdf01df3bcfbe6c573e9b54e5f6fcf8df90dd905dc56b921d87] ...
	I0916 10:58:26.216423 1434484 ssh_runner.go:195] Run: /bin/bash -c "sudo /usr/bin/crictl logs --tail 400 8dfc630c611cfbdf01df3bcfbe6c573e9b54e5f6fcf8df90dd905dc56b921d87"
	I0916 10:58:26.289472 1434484 logs.go:123] Gathering logs for kube-controller-manager [2f98291c7b2a1482da8d22dd393673a913e72ea7d00f601cae0f2c3452f72459] ...
	I0916 10:58:26.289506 1434484 ssh_runner.go:195] Run: /bin/bash -c "sudo /usr/bin/crictl logs --tail 400 2f98291c7b2a1482da8d22dd393673a913e72ea7d00f601cae0f2c3452f72459"
	I0916 10:58:26.342791 1434484 logs.go:123] Gathering logs for etcd [29fc8ed8b8f7f3d1e0dba0a22850e7a42676e477dec90e46467202f35c41cfec] ...
	I0916 10:58:26.342831 1434484 ssh_runner.go:195] Run: /bin/bash -c "sudo /usr/bin/crictl logs --tail 400 29fc8ed8b8f7f3d1e0dba0a22850e7a42676e477dec90e46467202f35c41cfec"
	I0916 10:58:26.424338 1434484 logs.go:123] Gathering logs for kindnet [142f1b4238b0011ac301c130a985e97501d6bfc0d96d380741cff696085f2a2f] ...
	I0916 10:58:26.424383 1434484 ssh_runner.go:195] Run: /bin/bash -c "sudo /usr/bin/crictl logs --tail 400 142f1b4238b0011ac301c130a985e97501d6bfc0d96d380741cff696085f2a2f"
	I0916 10:58:26.484750 1434484 logs.go:123] Gathering logs for container status ...
	I0916 10:58:26.484786 1434484 ssh_runner.go:195] Run: /bin/bash -c "sudo `which crictl || echo crictl` ps -a || sudo docker ps -a"
	I0916 10:58:26.556655 1434484 logs.go:123] Gathering logs for kubelet ...
	I0916 10:58:26.556877 1434484 ssh_runner.go:195] Run: /bin/bash -c "sudo journalctl -u kubelet -n 400"
	I0916 10:58:26.651230 1434484 logs.go:123] Gathering logs for dmesg ...
	I0916 10:58:26.651279 1434484 ssh_runner.go:195] Run: /bin/bash -c "sudo dmesg -PH -L=never --level warn,err,crit,alert,emerg | tail -n 400"
	I0916 10:58:26.671695 1434484 logs.go:123] Gathering logs for kube-apiserver [3365058081c0e585345a7d47e1ccba16f8453c4f51f446289361f3b8fbd79bde] ...
	I0916 10:58:26.671722 1434484 ssh_runner.go:195] Run: /bin/bash -c "sudo /usr/bin/crictl logs --tail 400 3365058081c0e585345a7d47e1ccba16f8453c4f51f446289361f3b8fbd79bde"
	I0916 10:58:26.728930 1434484 logs.go:123] Gathering logs for kube-controller-manager [be821cdf7dddc9de52be661e1c6e03d86a704d4c523183dc899f64b43ac04583] ...
	I0916 10:58:26.728961 1434484 ssh_runner.go:195] Run: /bin/bash -c "sudo /usr/bin/crictl logs --tail 400 be821cdf7dddc9de52be661e1c6e03d86a704d4c523183dc899f64b43ac04583"
	I0916 10:58:26.804446 1434484 logs.go:123] Gathering logs for kube-apiserver [9d3a49380a28adc77c80880a98d52d9806925eb299377f59f85f411c604cc468] ...
	I0916 10:58:26.804521 1434484 ssh_runner.go:195] Run: /bin/bash -c "sudo /usr/bin/crictl logs --tail 400 9d3a49380a28adc77c80880a98d52d9806925eb299377f59f85f411c604cc468"
	I0916 10:58:26.875786 1434484 logs.go:123] Gathering logs for etcd [8834a41e97dc741d0e79671f975ecbff191e8d166d3d632d371a5f8ade9d0890] ...
	I0916 10:58:26.875827 1434484 ssh_runner.go:195] Run: /bin/bash -c "sudo /usr/bin/crictl logs --tail 400 8834a41e97dc741d0e79671f975ecbff191e8d166d3d632d371a5f8ade9d0890"
	I0916 10:58:26.963299 1434484 logs.go:123] Gathering logs for kube-proxy [204d3723c057d42ea2141887c1ebad0407134ed4baeffe51470031f1c20a0bc3] ...
	I0916 10:58:26.963345 1434484 ssh_runner.go:195] Run: /bin/bash -c "sudo /usr/bin/crictl logs --tail 400 204d3723c057d42ea2141887c1ebad0407134ed4baeffe51470031f1c20a0bc3"
	I0916 10:58:29.519147 1434484 api_server.go:253] Checking apiserver healthz at https://192.168.49.2:8443/healthz ...
	I0916 10:58:29.526962 1434484 api_server.go:279] https://192.168.49.2:8443/healthz returned 200:
	ok
	I0916 10:58:29.527039 1434484 round_trippers.go:463] GET https://192.168.49.2:8443/version
	I0916 10:58:29.527049 1434484 round_trippers.go:469] Request Headers:
	I0916 10:58:29.527057 1434484 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:58:29.527061 1434484 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:58:29.540764 1434484 round_trippers.go:574] Response Status: 200 OK in 13 milliseconds
	I0916 10:58:29.540979 1434484 api_server.go:141] control plane version: v1.31.1
	I0916 10:58:29.541037 1434484 api_server.go:131] duration metric: took 22.347183518s to wait for apiserver health ...
	I0916 10:58:29.541085 1434484 system_pods.go:43] waiting for kube-system pods to appear ...
	I0916 10:58:29.541140 1434484 cri.go:54] listing CRI containers in root : {State:all Name:kube-apiserver Namespaces:[]}
	I0916 10:58:29.541278 1434484 ssh_runner.go:195] Run: sudo crictl ps -a --quiet --name=kube-apiserver
	I0916 10:58:29.599084 1434484 cri.go:89] found id: "9d3a49380a28adc77c80880a98d52d9806925eb299377f59f85f411c604cc468"
	I0916 10:58:29.599112 1434484 cri.go:89] found id: "3365058081c0e585345a7d47e1ccba16f8453c4f51f446289361f3b8fbd79bde"
	I0916 10:58:29.599118 1434484 cri.go:89] found id: ""
	I0916 10:58:29.599125 1434484 logs.go:276] 2 containers: [9d3a49380a28adc77c80880a98d52d9806925eb299377f59f85f411c604cc468 3365058081c0e585345a7d47e1ccba16f8453c4f51f446289361f3b8fbd79bde]
	I0916 10:58:29.599239 1434484 ssh_runner.go:195] Run: which crictl
	I0916 10:58:29.603566 1434484 ssh_runner.go:195] Run: which crictl
	I0916 10:58:29.608100 1434484 cri.go:54] listing CRI containers in root : {State:all Name:etcd Namespaces:[]}
	I0916 10:58:29.608246 1434484 ssh_runner.go:195] Run: sudo crictl ps -a --quiet --name=etcd
	I0916 10:58:29.656901 1434484 cri.go:89] found id: "8834a41e97dc741d0e79671f975ecbff191e8d166d3d632d371a5f8ade9d0890"
	I0916 10:58:29.656925 1434484 cri.go:89] found id: "29fc8ed8b8f7f3d1e0dba0a22850e7a42676e477dec90e46467202f35c41cfec"
	I0916 10:58:29.656930 1434484 cri.go:89] found id: ""
	I0916 10:58:29.656939 1434484 logs.go:276] 2 containers: [8834a41e97dc741d0e79671f975ecbff191e8d166d3d632d371a5f8ade9d0890 29fc8ed8b8f7f3d1e0dba0a22850e7a42676e477dec90e46467202f35c41cfec]
	I0916 10:58:29.656998 1434484 ssh_runner.go:195] Run: which crictl
	I0916 10:58:29.660521 1434484 ssh_runner.go:195] Run: which crictl
	I0916 10:58:29.664309 1434484 cri.go:54] listing CRI containers in root : {State:all Name:coredns Namespaces:[]}
	I0916 10:58:29.664446 1434484 ssh_runner.go:195] Run: sudo crictl ps -a --quiet --name=coredns
	I0916 10:58:29.703073 1434484 cri.go:89] found id: ""
	I0916 10:58:29.703096 1434484 logs.go:276] 0 containers: []
	W0916 10:58:29.703106 1434484 logs.go:278] No container was found matching "coredns"
	I0916 10:58:29.703113 1434484 cri.go:54] listing CRI containers in root : {State:all Name:kube-scheduler Namespaces:[]}
	I0916 10:58:29.703178 1434484 ssh_runner.go:195] Run: sudo crictl ps -a --quiet --name=kube-scheduler
	I0916 10:58:29.742510 1434484 cri.go:89] found id: "a98770698d81ba4df7de45f6f2171e1e370672221e5d1f1a6c0450a6fbd89797"
	I0916 10:58:29.742534 1434484 cri.go:89] found id: "8dfc630c611cfbdf01df3bcfbe6c573e9b54e5f6fcf8df90dd905dc56b921d87"
	I0916 10:58:29.742539 1434484 cri.go:89] found id: ""
	I0916 10:58:29.742547 1434484 logs.go:276] 2 containers: [a98770698d81ba4df7de45f6f2171e1e370672221e5d1f1a6c0450a6fbd89797 8dfc630c611cfbdf01df3bcfbe6c573e9b54e5f6fcf8df90dd905dc56b921d87]
	I0916 10:58:29.742633 1434484 ssh_runner.go:195] Run: which crictl
	I0916 10:58:29.746518 1434484 ssh_runner.go:195] Run: which crictl
	I0916 10:58:29.750020 1434484 cri.go:54] listing CRI containers in root : {State:all Name:kube-proxy Namespaces:[]}
	I0916 10:58:29.750117 1434484 ssh_runner.go:195] Run: sudo crictl ps -a --quiet --name=kube-proxy
	I0916 10:58:29.788961 1434484 cri.go:89] found id: "204d3723c057d42ea2141887c1ebad0407134ed4baeffe51470031f1c20a0bc3"
	I0916 10:58:29.789042 1434484 cri.go:89] found id: ""
	I0916 10:58:29.789057 1434484 logs.go:276] 1 containers: [204d3723c057d42ea2141887c1ebad0407134ed4baeffe51470031f1c20a0bc3]
	I0916 10:58:29.789118 1434484 ssh_runner.go:195] Run: which crictl
	I0916 10:58:29.792747 1434484 cri.go:54] listing CRI containers in root : {State:all Name:kube-controller-manager Namespaces:[]}
	I0916 10:58:29.792833 1434484 ssh_runner.go:195] Run: sudo crictl ps -a --quiet --name=kube-controller-manager
	I0916 10:58:29.846311 1434484 cri.go:89] found id: "be821cdf7dddc9de52be661e1c6e03d86a704d4c523183dc899f64b43ac04583"
	I0916 10:58:29.846345 1434484 cri.go:89] found id: "2f98291c7b2a1482da8d22dd393673a913e72ea7d00f601cae0f2c3452f72459"
	I0916 10:58:29.846351 1434484 cri.go:89] found id: ""
	I0916 10:58:29.846359 1434484 logs.go:276] 2 containers: [be821cdf7dddc9de52be661e1c6e03d86a704d4c523183dc899f64b43ac04583 2f98291c7b2a1482da8d22dd393673a913e72ea7d00f601cae0f2c3452f72459]
	I0916 10:58:29.846457 1434484 ssh_runner.go:195] Run: which crictl
	I0916 10:58:29.851659 1434484 ssh_runner.go:195] Run: which crictl
	I0916 10:58:29.855303 1434484 cri.go:54] listing CRI containers in root : {State:all Name:kindnet Namespaces:[]}
	I0916 10:58:29.855397 1434484 ssh_runner.go:195] Run: sudo crictl ps -a --quiet --name=kindnet
	I0916 10:58:29.892481 1434484 cri.go:89] found id: "142f1b4238b0011ac301c130a985e97501d6bfc0d96d380741cff696085f2a2f"
	I0916 10:58:29.892507 1434484 cri.go:89] found id: ""
	I0916 10:58:29.892516 1434484 logs.go:276] 1 containers: [142f1b4238b0011ac301c130a985e97501d6bfc0d96d380741cff696085f2a2f]
	I0916 10:58:29.892610 1434484 ssh_runner.go:195] Run: which crictl
	I0916 10:58:29.896596 1434484 logs.go:123] Gathering logs for kube-apiserver [9d3a49380a28adc77c80880a98d52d9806925eb299377f59f85f411c604cc468] ...
	I0916 10:58:29.896669 1434484 ssh_runner.go:195] Run: /bin/bash -c "sudo /usr/bin/crictl logs --tail 400 9d3a49380a28adc77c80880a98d52d9806925eb299377f59f85f411c604cc468"
	I0916 10:58:29.948252 1434484 logs.go:123] Gathering logs for container status ...
	I0916 10:58:29.948281 1434484 ssh_runner.go:195] Run: /bin/bash -c "sudo `which crictl || echo crictl` ps -a || sudo docker ps -a"
	I0916 10:58:29.998479 1434484 logs.go:123] Gathering logs for CRI-O ...
	I0916 10:58:29.998508 1434484 ssh_runner.go:195] Run: /bin/bash -c "sudo journalctl -u crio -n 400"
	I0916 10:58:30.102102 1434484 logs.go:123] Gathering logs for kubelet ...
	I0916 10:58:30.102151 1434484 ssh_runner.go:195] Run: /bin/bash -c "sudo journalctl -u kubelet -n 400"
	I0916 10:58:30.220379 1434484 logs.go:123] Gathering logs for etcd [29fc8ed8b8f7f3d1e0dba0a22850e7a42676e477dec90e46467202f35c41cfec] ...
	I0916 10:58:30.220435 1434484 ssh_runner.go:195] Run: /bin/bash -c "sudo /usr/bin/crictl logs --tail 400 29fc8ed8b8f7f3d1e0dba0a22850e7a42676e477dec90e46467202f35c41cfec"
	I0916 10:58:30.289272 1434484 logs.go:123] Gathering logs for kube-controller-manager [be821cdf7dddc9de52be661e1c6e03d86a704d4c523183dc899f64b43ac04583] ...
	I0916 10:58:30.289310 1434484 ssh_runner.go:195] Run: /bin/bash -c "sudo /usr/bin/crictl logs --tail 400 be821cdf7dddc9de52be661e1c6e03d86a704d4c523183dc899f64b43ac04583"
	I0916 10:58:30.361727 1434484 logs.go:123] Gathering logs for kindnet [142f1b4238b0011ac301c130a985e97501d6bfc0d96d380741cff696085f2a2f] ...
	I0916 10:58:30.361762 1434484 ssh_runner.go:195] Run: /bin/bash -c "sudo /usr/bin/crictl logs --tail 400 142f1b4238b0011ac301c130a985e97501d6bfc0d96d380741cff696085f2a2f"
	I0916 10:58:30.400705 1434484 logs.go:123] Gathering logs for dmesg ...
	I0916 10:58:30.400736 1434484 ssh_runner.go:195] Run: /bin/bash -c "sudo dmesg -PH -L=never --level warn,err,crit,alert,emerg | tail -n 400"
	I0916 10:58:30.419766 1434484 logs.go:123] Gathering logs for describe nodes ...
	I0916 10:58:30.419796 1434484 ssh_runner.go:195] Run: /bin/bash -c "sudo /var/lib/minikube/binaries/v1.31.1/kubectl describe nodes --kubeconfig=/var/lib/minikube/kubeconfig"
	I0916 10:58:30.751703 1434484 logs.go:123] Gathering logs for etcd [8834a41e97dc741d0e79671f975ecbff191e8d166d3d632d371a5f8ade9d0890] ...
	I0916 10:58:30.751738 1434484 ssh_runner.go:195] Run: /bin/bash -c "sudo /usr/bin/crictl logs --tail 400 8834a41e97dc741d0e79671f975ecbff191e8d166d3d632d371a5f8ade9d0890"
	I0916 10:58:30.803642 1434484 logs.go:123] Gathering logs for kube-proxy [204d3723c057d42ea2141887c1ebad0407134ed4baeffe51470031f1c20a0bc3] ...
	I0916 10:58:30.803676 1434484 ssh_runner.go:195] Run: /bin/bash -c "sudo /usr/bin/crictl logs --tail 400 204d3723c057d42ea2141887c1ebad0407134ed4baeffe51470031f1c20a0bc3"
	I0916 10:58:30.864497 1434484 logs.go:123] Gathering logs for kube-apiserver [3365058081c0e585345a7d47e1ccba16f8453c4f51f446289361f3b8fbd79bde] ...
	I0916 10:58:30.864533 1434484 ssh_runner.go:195] Run: /bin/bash -c "sudo /usr/bin/crictl logs --tail 400 3365058081c0e585345a7d47e1ccba16f8453c4f51f446289361f3b8fbd79bde"
	I0916 10:58:30.906182 1434484 logs.go:123] Gathering logs for kube-scheduler [a98770698d81ba4df7de45f6f2171e1e370672221e5d1f1a6c0450a6fbd89797] ...
	I0916 10:58:30.906213 1434484 ssh_runner.go:195] Run: /bin/bash -c "sudo /usr/bin/crictl logs --tail 400 a98770698d81ba4df7de45f6f2171e1e370672221e5d1f1a6c0450a6fbd89797"
	I0916 10:58:30.960393 1434484 logs.go:123] Gathering logs for kube-scheduler [8dfc630c611cfbdf01df3bcfbe6c573e9b54e5f6fcf8df90dd905dc56b921d87] ...
	I0916 10:58:30.960429 1434484 ssh_runner.go:195] Run: /bin/bash -c "sudo /usr/bin/crictl logs --tail 400 8dfc630c611cfbdf01df3bcfbe6c573e9b54e5f6fcf8df90dd905dc56b921d87"
	I0916 10:58:31.005706 1434484 logs.go:123] Gathering logs for kube-controller-manager [2f98291c7b2a1482da8d22dd393673a913e72ea7d00f601cae0f2c3452f72459] ...
	I0916 10:58:31.005806 1434484 ssh_runner.go:195] Run: /bin/bash -c "sudo /usr/bin/crictl logs --tail 400 2f98291c7b2a1482da8d22dd393673a913e72ea7d00f601cae0f2c3452f72459"
	I0916 10:58:33.549824 1434484 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods
	I0916 10:58:33.549847 1434484 round_trippers.go:469] Request Headers:
	I0916 10:58:33.549856 1434484 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:58:33.549863 1434484 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:58:33.557374 1434484 round_trippers.go:574] Response Status: 200 OK in 7 milliseconds
	I0916 10:58:33.567401 1434484 system_pods.go:59] 26 kube-system pods found
	I0916 10:58:33.567452 1434484 system_pods.go:61] "coredns-7c65d6cfc9-q5xr7" [14514e6e-34ae-4a79-b0e0-008742ae46b9] Running / Ready:ContainersNotReady (containers with unready status: [coredns]) / ContainersReady:ContainersNotReady (containers with unready status: [coredns])
	I0916 10:58:33.567463 1434484 system_pods.go:61] "coredns-7c65d6cfc9-s9fp9" [0e29200a-0909-47e1-8521-bf5f9b645d6c] Running / Ready:ContainersNotReady (containers with unready status: [coredns]) / ContainersReady:ContainersNotReady (containers with unready status: [coredns])
	I0916 10:58:33.567474 1434484 system_pods.go:61] "etcd-ha-334765" [9a0b9474-60f4-440e-a898-d397f7425086] Running
	I0916 10:58:33.567480 1434484 system_pods.go:61] "etcd-ha-334765-m02" [635fd2d2-f9cc-4e08-b73b-18633a58b6e4] Running
	I0916 10:58:33.567486 1434484 system_pods.go:61] "etcd-ha-334765-m03" [9527225b-e7ae-481b-b5b1-47b445990b4b] Running
	I0916 10:58:33.567490 1434484 system_pods.go:61] "kindnet-7s5t5" [e1832b94-ac8f-43c0-af10-ddc6afbb229b] Running
	I0916 10:58:33.567495 1434484 system_pods.go:61] "kindnet-plxdg" [15478b1f-0067-4d48-84f3-27b777cc4ff3] Running
	I0916 10:58:33.567506 1434484 system_pods.go:61] "kindnet-rfw69" [396f204a-53ea-4720-85fc-05ba54d285ca] Running
	I0916 10:58:33.567511 1434484 system_pods.go:61] "kindnet-vj27j" [61e290b4-d19c-40f3-a50d-bfa09fddb710] Running
	I0916 10:58:33.567517 1434484 system_pods.go:61] "kube-apiserver-ha-334765" [471aea01-5646-4ce8-91e0-b0b39f8a275a] Running / Ready:ContainersNotReady (containers with unready status: [kube-apiserver]) / ContainersReady:ContainersNotReady (containers with unready status: [kube-apiserver])
	I0916 10:58:33.567526 1434484 system_pods.go:61] "kube-apiserver-ha-334765-m02" [877c49f9-6fae-4cdb-b208-940eba98383b] Running
	I0916 10:58:33.567531 1434484 system_pods.go:61] "kube-apiserver-ha-334765-m03" [b14f2a2b-6410-438a-99e2-86fa58140695] Running
	I0916 10:58:33.567539 1434484 system_pods.go:61] "kube-controller-manager-ha-334765" [23b2f4a4-942f-4ea7-afef-561ab69ac144] Running / Ready:ContainersNotReady (containers with unready status: [kube-controller-manager]) / ContainersReady:ContainersNotReady (containers with unready status: [kube-controller-manager])
	I0916 10:58:33.567546 1434484 system_pods.go:61] "kube-controller-manager-ha-334765-m02" [07411ea7-458c-475c-93ff-5db4f6c1c4b1] Running
	I0916 10:58:33.567552 1434484 system_pods.go:61] "kube-controller-manager-ha-334765-m03" [2aa8cca1-22de-4cd0-88a2-ac864da09d8d] Running
	I0916 10:58:33.567557 1434484 system_pods.go:61] "kube-proxy-4vsvh" [551f3711-d8b3-4360-8a18-d6183d4aec6d] Running
	I0916 10:58:33.567564 1434484 system_pods.go:61] "kube-proxy-br496" [db7b7049-0d21-4564-8c72-de55e63b5051] Running
	I0916 10:58:33.567569 1434484 system_pods.go:61] "kube-proxy-l998t" [e92c97ea-9eb8-40c4-a7f6-aeb43c89e6f4] Running
	I0916 10:58:33.567572 1434484 system_pods.go:61] "kube-proxy-tlfs7" [6a873882-8023-44b5-82d9-2f18e70f8ef1] Running
	I0916 10:58:33.567578 1434484 system_pods.go:61] "kube-scheduler-ha-334765" [6189b5cd-f342-4b6a-ae21-b6b7125e4f06] Running
	I0916 10:58:33.567590 1434484 system_pods.go:61] "kube-scheduler-ha-334765-m02" [61387062-d6b0-4e2d-b2f9-10f29b0bcef6] Running
	I0916 10:58:33.567594 1434484 system_pods.go:61] "kube-scheduler-ha-334765-m03" [98c99d71-0ea3-46a3-ab06-7b5971730ba8] Running
	I0916 10:58:33.567598 1434484 system_pods.go:61] "kube-vip-ha-334765" [baed9adb-c604-4a84-b55e-53a93f120d7b] Running
	I0916 10:58:33.567605 1434484 system_pods.go:61] "kube-vip-ha-334765-m02" [450bd3f6-46b4-426c-a6b2-2ad37b58b171] Running
	I0916 10:58:33.567610 1434484 system_pods.go:61] "kube-vip-ha-334765-m03" [efeb2f57-409a-45a7-87e2-dae52a680b3e] Running
	I0916 10:58:33.567615 1434484 system_pods.go:61] "storage-provisioner" [4db2490d-9707-4734-973b-adac5570e275] Running
	I0916 10:58:33.567622 1434484 system_pods.go:74] duration metric: took 4.026504254s to wait for pod list to return data ...
	I0916 10:58:33.567634 1434484 default_sa.go:34] waiting for default service account to be created ...
	I0916 10:58:33.567721 1434484 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/default/serviceaccounts
	I0916 10:58:33.567731 1434484 round_trippers.go:469] Request Headers:
	I0916 10:58:33.567740 1434484 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:58:33.567744 1434484 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:58:33.570887 1434484 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:58:33.571146 1434484 default_sa.go:45] found service account: "default"
	I0916 10:58:33.571165 1434484 default_sa.go:55] duration metric: took 3.524941ms for default service account to be created ...
	I0916 10:58:33.571181 1434484 system_pods.go:116] waiting for k8s-apps to be running ...
	I0916 10:58:33.571246 1434484 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods
	I0916 10:58:33.571256 1434484 round_trippers.go:469] Request Headers:
	I0916 10:58:33.571265 1434484 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:58:33.571269 1434484 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:58:33.576172 1434484 round_trippers.go:574] Response Status: 200 OK in 4 milliseconds
	I0916 10:58:33.588917 1434484 system_pods.go:86] 26 kube-system pods found
	I0916 10:58:33.588961 1434484 system_pods.go:89] "coredns-7c65d6cfc9-q5xr7" [14514e6e-34ae-4a79-b0e0-008742ae46b9] Running / Ready:ContainersNotReady (containers with unready status: [coredns]) / ContainersReady:ContainersNotReady (containers with unready status: [coredns])
	I0916 10:58:33.588974 1434484 system_pods.go:89] "coredns-7c65d6cfc9-s9fp9" [0e29200a-0909-47e1-8521-bf5f9b645d6c] Running / Ready:ContainersNotReady (containers with unready status: [coredns]) / ContainersReady:ContainersNotReady (containers with unready status: [coredns])
	I0916 10:58:33.589004 1434484 system_pods.go:89] "etcd-ha-334765" [9a0b9474-60f4-440e-a898-d397f7425086] Running
	I0916 10:58:33.589019 1434484 system_pods.go:89] "etcd-ha-334765-m02" [635fd2d2-f9cc-4e08-b73b-18633a58b6e4] Running
	I0916 10:58:33.589025 1434484 system_pods.go:89] "etcd-ha-334765-m03" [9527225b-e7ae-481b-b5b1-47b445990b4b] Running
	I0916 10:58:33.589031 1434484 system_pods.go:89] "kindnet-7s5t5" [e1832b94-ac8f-43c0-af10-ddc6afbb229b] Running
	I0916 10:58:33.589039 1434484 system_pods.go:89] "kindnet-plxdg" [15478b1f-0067-4d48-84f3-27b777cc4ff3] Running
	I0916 10:58:33.589044 1434484 system_pods.go:89] "kindnet-rfw69" [396f204a-53ea-4720-85fc-05ba54d285ca] Running
	I0916 10:58:33.589052 1434484 system_pods.go:89] "kindnet-vj27j" [61e290b4-d19c-40f3-a50d-bfa09fddb710] Running
	I0916 10:58:33.589059 1434484 system_pods.go:89] "kube-apiserver-ha-334765" [471aea01-5646-4ce8-91e0-b0b39f8a275a] Running / Ready:ContainersNotReady (containers with unready status: [kube-apiserver]) / ContainersReady:ContainersNotReady (containers with unready status: [kube-apiserver])
	I0916 10:58:33.589080 1434484 system_pods.go:89] "kube-apiserver-ha-334765-m02" [877c49f9-6fae-4cdb-b208-940eba98383b] Running
	I0916 10:58:33.589092 1434484 system_pods.go:89] "kube-apiserver-ha-334765-m03" [b14f2a2b-6410-438a-99e2-86fa58140695] Running
	I0916 10:58:33.589102 1434484 system_pods.go:89] "kube-controller-manager-ha-334765" [23b2f4a4-942f-4ea7-afef-561ab69ac144] Running / Ready:ContainersNotReady (containers with unready status: [kube-controller-manager]) / ContainersReady:ContainersNotReady (containers with unready status: [kube-controller-manager])
	I0916 10:58:33.589118 1434484 system_pods.go:89] "kube-controller-manager-ha-334765-m02" [07411ea7-458c-475c-93ff-5db4f6c1c4b1] Running
	I0916 10:58:33.589131 1434484 system_pods.go:89] "kube-controller-manager-ha-334765-m03" [2aa8cca1-22de-4cd0-88a2-ac864da09d8d] Running
	I0916 10:58:33.589137 1434484 system_pods.go:89] "kube-proxy-4vsvh" [551f3711-d8b3-4360-8a18-d6183d4aec6d] Running
	I0916 10:58:33.589141 1434484 system_pods.go:89] "kube-proxy-br496" [db7b7049-0d21-4564-8c72-de55e63b5051] Running
	I0916 10:58:33.589147 1434484 system_pods.go:89] "kube-proxy-l998t" [e92c97ea-9eb8-40c4-a7f6-aeb43c89e6f4] Running
	I0916 10:58:33.589152 1434484 system_pods.go:89] "kube-proxy-tlfs7" [6a873882-8023-44b5-82d9-2f18e70f8ef1] Running
	I0916 10:58:33.589156 1434484 system_pods.go:89] "kube-scheduler-ha-334765" [6189b5cd-f342-4b6a-ae21-b6b7125e4f06] Running
	I0916 10:58:33.589166 1434484 system_pods.go:89] "kube-scheduler-ha-334765-m02" [61387062-d6b0-4e2d-b2f9-10f29b0bcef6] Running
	I0916 10:58:33.589171 1434484 system_pods.go:89] "kube-scheduler-ha-334765-m03" [98c99d71-0ea3-46a3-ab06-7b5971730ba8] Running
	I0916 10:58:33.589175 1434484 system_pods.go:89] "kube-vip-ha-334765" [baed9adb-c604-4a84-b55e-53a93f120d7b] Running
	I0916 10:58:33.589180 1434484 system_pods.go:89] "kube-vip-ha-334765-m02" [450bd3f6-46b4-426c-a6b2-2ad37b58b171] Running
	I0916 10:58:33.589192 1434484 system_pods.go:89] "kube-vip-ha-334765-m03" [efeb2f57-409a-45a7-87e2-dae52a680b3e] Running
	I0916 10:58:33.589196 1434484 system_pods.go:89] "storage-provisioner" [4db2490d-9707-4734-973b-adac5570e275] Running
	I0916 10:58:33.589203 1434484 system_pods.go:126] duration metric: took 18.015997ms to wait for k8s-apps to be running ...
	I0916 10:58:33.589213 1434484 system_svc.go:44] waiting for kubelet service to be running ....
	I0916 10:58:33.589283 1434484 ssh_runner.go:195] Run: sudo systemctl is-active --quiet service kubelet
	I0916 10:58:33.605287 1434484 system_svc.go:56] duration metric: took 16.064193ms WaitForService to wait for kubelet
	I0916 10:58:33.605317 1434484 kubeadm.go:582] duration metric: took 1m9.224470162s to wait for: map[apiserver:true apps_running:true default_sa:true extra:true kubelet:true node_ready:true system_pods:true]
	I0916 10:58:33.605343 1434484 node_conditions.go:102] verifying NodePressure condition ...
	I0916 10:58:33.605422 1434484 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes
	I0916 10:58:33.605432 1434484 round_trippers.go:469] Request Headers:
	I0916 10:58:33.605441 1434484 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:58:33.605444 1434484 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:58:33.611453 1434484 round_trippers.go:574] Response Status: 200 OK in 5 milliseconds
	I0916 10:58:33.613596 1434484 node_conditions.go:122] node storage ephemeral capacity is 203034800Ki
	I0916 10:58:33.613634 1434484 node_conditions.go:123] node cpu capacity is 2
	I0916 10:58:33.613647 1434484 node_conditions.go:122] node storage ephemeral capacity is 203034800Ki
	I0916 10:58:33.613652 1434484 node_conditions.go:123] node cpu capacity is 2
	I0916 10:58:33.613657 1434484 node_conditions.go:122] node storage ephemeral capacity is 203034800Ki
	I0916 10:58:33.613661 1434484 node_conditions.go:123] node cpu capacity is 2
	I0916 10:58:33.613665 1434484 node_conditions.go:122] node storage ephemeral capacity is 203034800Ki
	I0916 10:58:33.613670 1434484 node_conditions.go:123] node cpu capacity is 2
	I0916 10:58:33.613676 1434484 node_conditions.go:105] duration metric: took 8.327264ms to run NodePressure ...
	I0916 10:58:33.613689 1434484 start.go:241] waiting for startup goroutines ...
	I0916 10:58:33.613717 1434484 start.go:255] writing updated cluster config ...
	I0916 10:58:33.619586 1434484 out.go:201] 
	I0916 10:58:33.623790 1434484 config.go:182] Loaded profile config "ha-334765": Driver=docker, ContainerRuntime=crio, KubernetesVersion=v1.31.1
	I0916 10:58:33.623944 1434484 profile.go:143] Saving config to /home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/ha-334765/config.json ...
	I0916 10:58:33.626756 1434484 out.go:177] * Starting "ha-334765-m03" control-plane node in "ha-334765" cluster
	I0916 10:58:33.630671 1434484 cache.go:121] Beginning downloading kic base image for docker with crio
	I0916 10:58:33.632605 1434484 out.go:177] * Pulling base image v0.0.45-1726358845-19644 ...
	I0916 10:58:33.634069 1434484 preload.go:131] Checking if preload exists for k8s version v1.31.1 and runtime crio
	I0916 10:58:33.634140 1434484 cache.go:56] Caching tarball of preloaded images
	I0916 10:58:33.634152 1434484 image.go:79] Checking for gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 in local docker daemon
	I0916 10:58:33.634291 1434484 preload.go:172] Found /home/jenkins/minikube-integration/19651-1378450/.minikube/cache/preloaded-tarball/preloaded-images-k8s-v18-v1.31.1-cri-o-overlay-arm64.tar.lz4 in cache, skipping download
	I0916 10:58:33.634322 1434484 cache.go:59] Finished verifying existence of preloaded tar for v1.31.1 on crio
	I0916 10:58:33.634495 1434484 profile.go:143] Saving config to /home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/ha-334765/config.json ...
	W0916 10:58:33.654093 1434484 image.go:95] image gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 is of wrong architecture
	I0916 10:58:33.654115 1434484 cache.go:149] Downloading gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 to local cache
	I0916 10:58:33.654196 1434484 image.go:63] Checking for gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 in local cache directory
	I0916 10:58:33.654221 1434484 image.go:66] Found gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 in local cache directory, skipping pull
	I0916 10:58:33.654226 1434484 image.go:135] gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 exists in cache, skipping pull
	I0916 10:58:33.654234 1434484 cache.go:152] successfully saved gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 as a tarball
	I0916 10:58:33.654241 1434484 cache.go:162] Loading gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 from local cache
	I0916 10:58:33.655459 1434484 image.go:273] response: 
	I0916 10:58:33.769980 1434484 cache.go:164] successfully loaded and using gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 from cached tarball
	I0916 10:58:33.770023 1434484 cache.go:194] Successfully downloaded all kic artifacts
	I0916 10:58:33.770054 1434484 start.go:360] acquireMachinesLock for ha-334765-m03: {Name:mkfee903f3f5d2ff3d5e015b57c571ebdaa535f2 Clock:{} Delay:500ms Timeout:10m0s Cancel:<nil>}
	I0916 10:58:33.770131 1434484 start.go:364] duration metric: took 54.612µs to acquireMachinesLock for "ha-334765-m03"
	I0916 10:58:33.770158 1434484 start.go:96] Skipping create...Using existing machine configuration
	I0916 10:58:33.770167 1434484 fix.go:54] fixHost starting: m03
	I0916 10:58:33.770448 1434484 cli_runner.go:164] Run: docker container inspect ha-334765-m03 --format={{.State.Status}}
	I0916 10:58:33.788088 1434484 fix.go:112] recreateIfNeeded on ha-334765-m03: state=Stopped err=<nil>
	W0916 10:58:33.788116 1434484 fix.go:138] unexpected machine state, will restart: <nil>
	I0916 10:58:33.791903 1434484 out.go:177] * Restarting existing docker container for "ha-334765-m03" ...
	I0916 10:58:33.795271 1434484 cli_runner.go:164] Run: docker start ha-334765-m03
	I0916 10:58:34.118224 1434484 cli_runner.go:164] Run: docker container inspect ha-334765-m03 --format={{.State.Status}}
	I0916 10:58:34.152779 1434484 kic.go:430] container "ha-334765-m03" state is running.
	I0916 10:58:34.153156 1434484 cli_runner.go:164] Run: docker container inspect -f "{{range .NetworkSettings.Networks}}{{.IPAddress}},{{.GlobalIPv6Address}}{{end}}" ha-334765-m03
	I0916 10:58:34.179500 1434484 profile.go:143] Saving config to /home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/ha-334765/config.json ...
	I0916 10:58:34.179748 1434484 machine.go:93] provisionDockerMachine start ...
	I0916 10:58:34.179805 1434484 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" ha-334765-m03
	I0916 10:58:34.204273 1434484 main.go:141] libmachine: Using SSH client type: native
	I0916 10:58:34.204513 1434484 main.go:141] libmachine: &{{{<nil> 0 [] [] []} docker [0x41abe0] 0x41d420 <nil>  [] 0s} 127.0.0.1 34653 <nil> <nil>}
	I0916 10:58:34.204523 1434484 main.go:141] libmachine: About to run SSH command:
	hostname
	I0916 10:58:34.209008 1434484 main.go:141] libmachine: Error dialing TCP: ssh: handshake failed: EOF
	I0916 10:58:37.410476 1434484 main.go:141] libmachine: SSH cmd err, output: <nil>: ha-334765-m03
	
	I0916 10:58:37.410505 1434484 ubuntu.go:169] provisioning hostname "ha-334765-m03"
	I0916 10:58:37.410577 1434484 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" ha-334765-m03
	I0916 10:58:37.439796 1434484 main.go:141] libmachine: Using SSH client type: native
	I0916 10:58:37.440048 1434484 main.go:141] libmachine: &{{{<nil> 0 [] [] []} docker [0x41abe0] 0x41d420 <nil>  [] 0s} 127.0.0.1 34653 <nil> <nil>}
	I0916 10:58:37.440065 1434484 main.go:141] libmachine: About to run SSH command:
	sudo hostname ha-334765-m03 && echo "ha-334765-m03" | sudo tee /etc/hostname
	I0916 10:58:37.706905 1434484 main.go:141] libmachine: SSH cmd err, output: <nil>: ha-334765-m03
	
	I0916 10:58:37.715787 1434484 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" ha-334765-m03
	I0916 10:58:37.741035 1434484 main.go:141] libmachine: Using SSH client type: native
	I0916 10:58:37.741272 1434484 main.go:141] libmachine: &{{{<nil> 0 [] [] []} docker [0x41abe0] 0x41d420 <nil>  [] 0s} 127.0.0.1 34653 <nil> <nil>}
	I0916 10:58:37.741289 1434484 main.go:141] libmachine: About to run SSH command:
	
			if ! grep -xq '.*\sha-334765-m03' /etc/hosts; then
				if grep -xq '127.0.1.1\s.*' /etc/hosts; then
					sudo sed -i 's/^127.0.1.1\s.*/127.0.1.1 ha-334765-m03/g' /etc/hosts;
				else 
					echo '127.0.1.1 ha-334765-m03' | sudo tee -a /etc/hosts; 
				fi
			fi
	I0916 10:58:37.943623 1434484 main.go:141] libmachine: SSH cmd err, output: <nil>: 
	I0916 10:58:37.943648 1434484 ubuntu.go:175] set auth options {CertDir:/home/jenkins/minikube-integration/19651-1378450/.minikube CaCertPath:/home/jenkins/minikube-integration/19651-1378450/.minikube/certs/ca.pem CaPrivateKeyPath:/home/jenkins/minikube-integration/19651-1378450/.minikube/certs/ca-key.pem CaCertRemotePath:/etc/docker/ca.pem ServerCertPath:/home/jenkins/minikube-integration/19651-1378450/.minikube/machines/server.pem ServerKeyPath:/home/jenkins/minikube-integration/19651-1378450/.minikube/machines/server-key.pem ClientKeyPath:/home/jenkins/minikube-integration/19651-1378450/.minikube/certs/key.pem ServerCertRemotePath:/etc/docker/server.pem ServerKeyRemotePath:/etc/docker/server-key.pem ClientCertPath:/home/jenkins/minikube-integration/19651-1378450/.minikube/certs/cert.pem ServerCertSANs:[] StorePath:/home/jenkins/minikube-integration/19651-1378450/.minikube}
	I0916 10:58:37.943665 1434484 ubuntu.go:177] setting up certificates
	I0916 10:58:37.943676 1434484 provision.go:84] configureAuth start
	I0916 10:58:37.943740 1434484 cli_runner.go:164] Run: docker container inspect -f "{{range .NetworkSettings.Networks}}{{.IPAddress}},{{.GlobalIPv6Address}}{{end}}" ha-334765-m03
	I0916 10:58:37.977619 1434484 provision.go:143] copyHostCerts
	I0916 10:58:37.977661 1434484 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-1378450/.minikube/certs/ca.pem -> /home/jenkins/minikube-integration/19651-1378450/.minikube/ca.pem
	I0916 10:58:37.977694 1434484 exec_runner.go:144] found /home/jenkins/minikube-integration/19651-1378450/.minikube/ca.pem, removing ...
	I0916 10:58:37.977700 1434484 exec_runner.go:203] rm: /home/jenkins/minikube-integration/19651-1378450/.minikube/ca.pem
	I0916 10:58:37.977799 1434484 exec_runner.go:151] cp: /home/jenkins/minikube-integration/19651-1378450/.minikube/certs/ca.pem --> /home/jenkins/minikube-integration/19651-1378450/.minikube/ca.pem (1078 bytes)
	I0916 10:58:37.977880 1434484 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-1378450/.minikube/certs/cert.pem -> /home/jenkins/minikube-integration/19651-1378450/.minikube/cert.pem
	I0916 10:58:37.977897 1434484 exec_runner.go:144] found /home/jenkins/minikube-integration/19651-1378450/.minikube/cert.pem, removing ...
	I0916 10:58:37.977901 1434484 exec_runner.go:203] rm: /home/jenkins/minikube-integration/19651-1378450/.minikube/cert.pem
	I0916 10:58:37.977927 1434484 exec_runner.go:151] cp: /home/jenkins/minikube-integration/19651-1378450/.minikube/certs/cert.pem --> /home/jenkins/minikube-integration/19651-1378450/.minikube/cert.pem (1123 bytes)
	I0916 10:58:37.977967 1434484 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-1378450/.minikube/certs/key.pem -> /home/jenkins/minikube-integration/19651-1378450/.minikube/key.pem
	I0916 10:58:37.977983 1434484 exec_runner.go:144] found /home/jenkins/minikube-integration/19651-1378450/.minikube/key.pem, removing ...
	I0916 10:58:37.977987 1434484 exec_runner.go:203] rm: /home/jenkins/minikube-integration/19651-1378450/.minikube/key.pem
	I0916 10:58:37.978009 1434484 exec_runner.go:151] cp: /home/jenkins/minikube-integration/19651-1378450/.minikube/certs/key.pem --> /home/jenkins/minikube-integration/19651-1378450/.minikube/key.pem (1679 bytes)
	I0916 10:58:37.978054 1434484 provision.go:117] generating server cert: /home/jenkins/minikube-integration/19651-1378450/.minikube/machines/server.pem ca-key=/home/jenkins/minikube-integration/19651-1378450/.minikube/certs/ca.pem private-key=/home/jenkins/minikube-integration/19651-1378450/.minikube/certs/ca-key.pem org=jenkins.ha-334765-m03 san=[127.0.0.1 192.168.49.4 ha-334765-m03 localhost minikube]
	I0916 10:58:38.774047 1434484 provision.go:177] copyRemoteCerts
	I0916 10:58:38.774164 1434484 ssh_runner.go:195] Run: sudo mkdir -p /etc/docker /etc/docker /etc/docker
	I0916 10:58:38.774235 1434484 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" ha-334765-m03
	I0916 10:58:38.805907 1434484 sshutil.go:53] new ssh client: &{IP:127.0.0.1 Port:34653 SSHKeyPath:/home/jenkins/minikube-integration/19651-1378450/.minikube/machines/ha-334765-m03/id_rsa Username:docker}
	I0916 10:58:38.954113 1434484 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-1378450/.minikube/machines/server-key.pem -> /etc/docker/server-key.pem
	I0916 10:58:38.954237 1434484 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-1378450/.minikube/machines/server-key.pem --> /etc/docker/server-key.pem (1675 bytes)
	I0916 10:58:38.982775 1434484 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-1378450/.minikube/certs/ca.pem -> /etc/docker/ca.pem
	I0916 10:58:38.982870 1434484 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-1378450/.minikube/certs/ca.pem --> /etc/docker/ca.pem (1078 bytes)
	I0916 10:58:39.017948 1434484 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-1378450/.minikube/machines/server.pem -> /etc/docker/server.pem
	I0916 10:58:39.018330 1434484 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-1378450/.minikube/machines/server.pem --> /etc/docker/server.pem (1208 bytes)
	I0916 10:58:39.071061 1434484 provision.go:87] duration metric: took 1.12737041s to configureAuth
	I0916 10:58:39.071152 1434484 ubuntu.go:193] setting minikube options for container-runtime
	I0916 10:58:39.071544 1434484 config.go:182] Loaded profile config "ha-334765": Driver=docker, ContainerRuntime=crio, KubernetesVersion=v1.31.1
	I0916 10:58:39.071744 1434484 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" ha-334765-m03
	I0916 10:58:39.109424 1434484 main.go:141] libmachine: Using SSH client type: native
	I0916 10:58:39.109676 1434484 main.go:141] libmachine: &{{{<nil> 0 [] [] []} docker [0x41abe0] 0x41d420 <nil>  [] 0s} 127.0.0.1 34653 <nil> <nil>}
	I0916 10:58:39.109692 1434484 main.go:141] libmachine: About to run SSH command:
	sudo mkdir -p /etc/sysconfig && printf %s "
	CRIO_MINIKUBE_OPTIONS='--insecure-registry 10.96.0.0/12 '
	" | sudo tee /etc/sysconfig/crio.minikube && sudo systemctl restart crio
	I0916 10:58:40.802763 1434484 main.go:141] libmachine: SSH cmd err, output: <nil>: 
	CRIO_MINIKUBE_OPTIONS='--insecure-registry 10.96.0.0/12 '
	
	I0916 10:58:40.802880 1434484 machine.go:96] duration metric: took 6.623121861s to provisionDockerMachine
	I0916 10:58:40.802924 1434484 start.go:293] postStartSetup for "ha-334765-m03" (driver="docker")
	I0916 10:58:40.802993 1434484 start.go:322] creating required directories: [/etc/kubernetes/addons /etc/kubernetes/manifests /var/tmp/minikube /var/lib/minikube /var/lib/minikube/certs /var/lib/minikube/images /var/lib/minikube/binaries /tmp/gvisor /usr/share/ca-certificates /etc/ssl/certs]
	I0916 10:58:40.803128 1434484 ssh_runner.go:195] Run: sudo mkdir -p /etc/kubernetes/addons /etc/kubernetes/manifests /var/tmp/minikube /var/lib/minikube /var/lib/minikube/certs /var/lib/minikube/images /var/lib/minikube/binaries /tmp/gvisor /usr/share/ca-certificates /etc/ssl/certs
	I0916 10:58:40.803248 1434484 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" ha-334765-m03
	I0916 10:58:40.844588 1434484 sshutil.go:53] new ssh client: &{IP:127.0.0.1 Port:34653 SSHKeyPath:/home/jenkins/minikube-integration/19651-1378450/.minikube/machines/ha-334765-m03/id_rsa Username:docker}
	I0916 10:58:40.950892 1434484 ssh_runner.go:195] Run: cat /etc/os-release
	I0916 10:58:40.954769 1434484 main.go:141] libmachine: Couldn't set key VERSION_CODENAME, no corresponding struct field found
	I0916 10:58:40.954809 1434484 main.go:141] libmachine: Couldn't set key PRIVACY_POLICY_URL, no corresponding struct field found
	I0916 10:58:40.954827 1434484 main.go:141] libmachine: Couldn't set key UBUNTU_CODENAME, no corresponding struct field found
	I0916 10:58:40.954841 1434484 info.go:137] Remote host: Ubuntu 22.04.4 LTS
	I0916 10:58:40.954856 1434484 filesync.go:126] Scanning /home/jenkins/minikube-integration/19651-1378450/.minikube/addons for local assets ...
	I0916 10:58:40.954913 1434484 filesync.go:126] Scanning /home/jenkins/minikube-integration/19651-1378450/.minikube/files for local assets ...
	I0916 10:58:40.954992 1434484 filesync.go:149] local asset: /home/jenkins/minikube-integration/19651-1378450/.minikube/files/etc/ssl/certs/13838332.pem -> 13838332.pem in /etc/ssl/certs
	I0916 10:58:40.955005 1434484 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-1378450/.minikube/files/etc/ssl/certs/13838332.pem -> /etc/ssl/certs/13838332.pem
	I0916 10:58:40.955113 1434484 ssh_runner.go:195] Run: sudo mkdir -p /etc/ssl/certs
	I0916 10:58:40.964726 1434484 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-1378450/.minikube/files/etc/ssl/certs/13838332.pem --> /etc/ssl/certs/13838332.pem (1708 bytes)
	I0916 10:58:41.006900 1434484 start.go:296] duration metric: took 203.926158ms for postStartSetup
	I0916 10:58:41.007049 1434484 ssh_runner.go:195] Run: sh -c "df -h /var | awk 'NR==2{print $5}'"
	I0916 10:58:41.007126 1434484 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" ha-334765-m03
	I0916 10:58:41.037985 1434484 sshutil.go:53] new ssh client: &{IP:127.0.0.1 Port:34653 SSHKeyPath:/home/jenkins/minikube-integration/19651-1378450/.minikube/machines/ha-334765-m03/id_rsa Username:docker}
	I0916 10:58:41.210681 1434484 ssh_runner.go:195] Run: sh -c "df -BG /var | awk 'NR==2{print $4}'"
	I0916 10:58:41.253871 1434484 fix.go:56] duration metric: took 7.483697243s for fixHost
	I0916 10:58:41.253899 1434484 start.go:83] releasing machines lock for "ha-334765-m03", held for 7.483754989s
	I0916 10:58:41.253971 1434484 cli_runner.go:164] Run: docker container inspect -f "{{range .NetworkSettings.Networks}}{{.IPAddress}},{{.GlobalIPv6Address}}{{end}}" ha-334765-m03
	I0916 10:58:41.296801 1434484 out.go:177] * Found network options:
	I0916 10:58:41.299583 1434484 out.go:177]   - NO_PROXY=192.168.49.2,192.168.49.3
	W0916 10:58:41.302377 1434484 proxy.go:119] fail to check proxy env: Error ip not in block
	W0916 10:58:41.302415 1434484 proxy.go:119] fail to check proxy env: Error ip not in block
	W0916 10:58:41.302441 1434484 proxy.go:119] fail to check proxy env: Error ip not in block
	W0916 10:58:41.302452 1434484 proxy.go:119] fail to check proxy env: Error ip not in block
	I0916 10:58:41.302524 1434484 ssh_runner.go:195] Run: sudo sh -c "podman version >/dev/null"
	I0916 10:58:41.302581 1434484 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" ha-334765-m03
	I0916 10:58:41.302851 1434484 ssh_runner.go:195] Run: curl -sS -m 2 https://registry.k8s.io/
	I0916 10:58:41.302910 1434484 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" ha-334765-m03
	I0916 10:58:41.339132 1434484 sshutil.go:53] new ssh client: &{IP:127.0.0.1 Port:34653 SSHKeyPath:/home/jenkins/minikube-integration/19651-1378450/.minikube/machines/ha-334765-m03/id_rsa Username:docker}
	I0916 10:58:41.349642 1434484 sshutil.go:53] new ssh client: &{IP:127.0.0.1 Port:34653 SSHKeyPath:/home/jenkins/minikube-integration/19651-1378450/.minikube/machines/ha-334765-m03/id_rsa Username:docker}
	I0916 10:58:41.866865 1434484 ssh_runner.go:195] Run: sh -c "stat /etc/cni/net.d/*loopback.conf*"
	I0916 10:58:42.236222 1434484 ssh_runner.go:195] Run: sudo find /etc/cni/net.d -maxdepth 1 -type f -name *loopback.conf* -not -name *.mk_disabled -exec sh -c "sudo mv {} {}.mk_disabled" ;
	I0916 10:58:42.278512 1434484 cni.go:221] loopback cni configuration disabled: "/etc/cni/net.d/*loopback.conf*" found
	I0916 10:58:42.278606 1434484 ssh_runner.go:195] Run: sudo find /etc/cni/net.d -maxdepth 1 -type f ( ( -name *bridge* -or -name *podman* ) -and -not -name *.mk_disabled ) -printf "%p, " -exec sh -c "sudo mv {} {}.mk_disabled" ;
	I0916 10:58:42.314816 1434484 cni.go:259] no active bridge cni configs found in "/etc/cni/net.d" - nothing to disable
	I0916 10:58:42.314848 1434484 start.go:495] detecting cgroup driver to use...
	I0916 10:58:42.314885 1434484 detect.go:187] detected "cgroupfs" cgroup driver on host os
	I0916 10:58:42.314943 1434484 ssh_runner.go:195] Run: sudo systemctl stop -f containerd
	I0916 10:58:42.366634 1434484 ssh_runner.go:195] Run: sudo systemctl is-active --quiet service containerd
	I0916 10:58:42.399776 1434484 docker.go:217] disabling cri-docker service (if available) ...
	I0916 10:58:42.399873 1434484 ssh_runner.go:195] Run: sudo systemctl stop -f cri-docker.socket
	I0916 10:58:42.435562 1434484 ssh_runner.go:195] Run: sudo systemctl stop -f cri-docker.service
	I0916 10:58:42.474257 1434484 ssh_runner.go:195] Run: sudo systemctl disable cri-docker.socket
	I0916 10:58:42.808414 1434484 ssh_runner.go:195] Run: sudo systemctl mask cri-docker.service
	I0916 10:58:43.074712 1434484 docker.go:233] disabling docker service ...
	I0916 10:58:43.074853 1434484 ssh_runner.go:195] Run: sudo systemctl stop -f docker.socket
	I0916 10:58:43.111263 1434484 ssh_runner.go:195] Run: sudo systemctl stop -f docker.service
	I0916 10:58:43.162144 1434484 ssh_runner.go:195] Run: sudo systemctl disable docker.socket
	I0916 10:58:43.423083 1434484 ssh_runner.go:195] Run: sudo systemctl mask docker.service
	I0916 10:58:43.685460 1434484 ssh_runner.go:195] Run: sudo systemctl is-active --quiet service docker
	I0916 10:58:43.711626 1434484 ssh_runner.go:195] Run: /bin/bash -c "sudo mkdir -p /etc && printf %s "runtime-endpoint: unix:///var/run/crio/crio.sock
	" | sudo tee /etc/crictl.yaml"
	I0916 10:58:43.784937 1434484 crio.go:59] configure cri-o to use "registry.k8s.io/pause:3.10" pause image...
	I0916 10:58:43.785061 1434484 ssh_runner.go:195] Run: sh -c "sudo sed -i 's|^.*pause_image = .*$|pause_image = "registry.k8s.io/pause:3.10"|' /etc/crio/crio.conf.d/02-crio.conf"
	I0916 10:58:43.832392 1434484 crio.go:70] configuring cri-o to use "cgroupfs" as cgroup driver...
	I0916 10:58:43.832471 1434484 ssh_runner.go:195] Run: sh -c "sudo sed -i 's|^.*cgroup_manager = .*$|cgroup_manager = "cgroupfs"|' /etc/crio/crio.conf.d/02-crio.conf"
	I0916 10:58:43.865821 1434484 ssh_runner.go:195] Run: sh -c "sudo sed -i '/conmon_cgroup = .*/d' /etc/crio/crio.conf.d/02-crio.conf"
	I0916 10:58:43.884874 1434484 ssh_runner.go:195] Run: sh -c "sudo sed -i '/cgroup_manager = .*/a conmon_cgroup = "pod"' /etc/crio/crio.conf.d/02-crio.conf"
	I0916 10:58:43.913227 1434484 ssh_runner.go:195] Run: sh -c "sudo rm -rf /etc/cni/net.mk"
	I0916 10:58:43.938030 1434484 ssh_runner.go:195] Run: sh -c "sudo sed -i '/^ *"net.ipv4.ip_unprivileged_port_start=.*"/d' /etc/crio/crio.conf.d/02-crio.conf"
	I0916 10:58:43.965366 1434484 ssh_runner.go:195] Run: sh -c "sudo grep -q "^ *default_sysctls" /etc/crio/crio.conf.d/02-crio.conf || sudo sed -i '/conmon_cgroup = .*/a default_sysctls = \[\n\]' /etc/crio/crio.conf.d/02-crio.conf"
	I0916 10:58:43.996928 1434484 ssh_runner.go:195] Run: sh -c "sudo sed -i -r 's|^default_sysctls *= *\[|&\n  "net.ipv4.ip_unprivileged_port_start=0",|' /etc/crio/crio.conf.d/02-crio.conf"
	I0916 10:58:44.033714 1434484 ssh_runner.go:195] Run: sudo sysctl net.bridge.bridge-nf-call-iptables
	I0916 10:58:44.055725 1434484 ssh_runner.go:195] Run: sudo sh -c "echo 1 > /proc/sys/net/ipv4/ip_forward"
	I0916 10:58:44.070820 1434484 ssh_runner.go:195] Run: sudo systemctl daemon-reload
	I0916 10:58:44.295896 1434484 ssh_runner.go:195] Run: sudo systemctl restart crio
	I0916 10:58:44.586124 1434484 start.go:542] Will wait 60s for socket path /var/run/crio/crio.sock
	I0916 10:58:44.586241 1434484 ssh_runner.go:195] Run: stat /var/run/crio/crio.sock
	I0916 10:58:44.593329 1434484 start.go:563] Will wait 60s for crictl version
	I0916 10:58:44.593442 1434484 ssh_runner.go:195] Run: which crictl
	I0916 10:58:44.599993 1434484 ssh_runner.go:195] Run: sudo /usr/bin/crictl version
	I0916 10:58:44.659332 1434484 start.go:579] Version:  0.1.0
	RuntimeName:  cri-o
	RuntimeVersion:  1.24.6
	RuntimeApiVersion:  v1
	I0916 10:58:44.659448 1434484 ssh_runner.go:195] Run: crio --version
	I0916 10:58:44.705235 1434484 ssh_runner.go:195] Run: crio --version
	I0916 10:58:44.755086 1434484 out.go:177] * Preparing Kubernetes v1.31.1 on CRI-O 1.24.6 ...
	I0916 10:58:44.757829 1434484 out.go:177]   - env NO_PROXY=192.168.49.2
	I0916 10:58:44.760724 1434484 out.go:177]   - env NO_PROXY=192.168.49.2,192.168.49.3
	I0916 10:58:44.763405 1434484 cli_runner.go:164] Run: docker network inspect ha-334765 --format "{"Name": "{{.Name}}","Driver": "{{.Driver}}","Subnet": "{{range .IPAM.Config}}{{.Subnet}}{{end}}","Gateway": "{{range .IPAM.Config}}{{.Gateway}}{{end}}","MTU": {{if (index .Options "com.docker.network.driver.mtu")}}{{(index .Options "com.docker.network.driver.mtu")}}{{else}}0{{end}}, "ContainerIPs": [{{range $k,$v := .Containers }}"{{$v.IPv4Address}}",{{end}}]}"
	I0916 10:58:44.783198 1434484 ssh_runner.go:195] Run: grep 192.168.49.1	host.minikube.internal$ /etc/hosts
	I0916 10:58:44.787266 1434484 ssh_runner.go:195] Run: /bin/bash -c "{ grep -v $'\thost.minikube.internal$' "/etc/hosts"; echo "192.168.49.1	host.minikube.internal"; } > /tmp/h.$$; sudo cp /tmp/h.$$ "/etc/hosts""
	I0916 10:58:44.800238 1434484 mustload.go:65] Loading cluster: ha-334765
	I0916 10:58:44.800504 1434484 config.go:182] Loaded profile config "ha-334765": Driver=docker, ContainerRuntime=crio, KubernetesVersion=v1.31.1
	I0916 10:58:44.800801 1434484 cli_runner.go:164] Run: docker container inspect ha-334765 --format={{.State.Status}}
	I0916 10:58:44.818667 1434484 host.go:66] Checking if "ha-334765" exists ...
	I0916 10:58:44.818954 1434484 certs.go:68] Setting up /home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/ha-334765 for IP: 192.168.49.4
	I0916 10:58:44.818968 1434484 certs.go:194] generating shared ca certs ...
	I0916 10:58:44.818984 1434484 certs.go:226] acquiring lock for ca certs: {Name:mk0ae46b50e2e49d53ad6fcc94535aa50d9156d6 Clock:{} Delay:500ms Timeout:1m0s Cancel:<nil>}
	I0916 10:58:44.819140 1434484 certs.go:235] skipping valid "minikubeCA" ca cert: /home/jenkins/minikube-integration/19651-1378450/.minikube/ca.key
	I0916 10:58:44.819201 1434484 certs.go:235] skipping valid "proxyClientCA" ca cert: /home/jenkins/minikube-integration/19651-1378450/.minikube/proxy-client-ca.key
	I0916 10:58:44.819213 1434484 certs.go:256] generating profile certs ...
	I0916 10:58:44.819295 1434484 certs.go:359] skipping valid signed profile cert regeneration for "minikube-user": /home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/ha-334765/client.key
	I0916 10:58:44.819379 1434484 certs.go:359] skipping valid signed profile cert regeneration for "minikube": /home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/ha-334765/apiserver.key.ce2d4ce7
	I0916 10:58:44.819424 1434484 certs.go:359] skipping valid signed profile cert regeneration for "aggregator": /home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/ha-334765/proxy-client.key
	I0916 10:58:44.819439 1434484 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-1378450/.minikube/ca.crt -> /var/lib/minikube/certs/ca.crt
	I0916 10:58:44.819452 1434484 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-1378450/.minikube/ca.key -> /var/lib/minikube/certs/ca.key
	I0916 10:58:44.819469 1434484 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-1378450/.minikube/proxy-client-ca.crt -> /var/lib/minikube/certs/proxy-client-ca.crt
	I0916 10:58:44.819483 1434484 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-1378450/.minikube/proxy-client-ca.key -> /var/lib/minikube/certs/proxy-client-ca.key
	I0916 10:58:44.819494 1434484 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/ha-334765/apiserver.crt -> /var/lib/minikube/certs/apiserver.crt
	I0916 10:58:44.819507 1434484 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/ha-334765/apiserver.key -> /var/lib/minikube/certs/apiserver.key
	I0916 10:58:44.819523 1434484 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/ha-334765/proxy-client.crt -> /var/lib/minikube/certs/proxy-client.crt
	I0916 10:58:44.819534 1434484 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/ha-334765/proxy-client.key -> /var/lib/minikube/certs/proxy-client.key
	I0916 10:58:44.819593 1434484 certs.go:484] found cert: /home/jenkins/minikube-integration/19651-1378450/.minikube/certs/1383833.pem (1338 bytes)
	W0916 10:58:44.819631 1434484 certs.go:480] ignoring /home/jenkins/minikube-integration/19651-1378450/.minikube/certs/1383833_empty.pem, impossibly tiny 0 bytes
	I0916 10:58:44.819643 1434484 certs.go:484] found cert: /home/jenkins/minikube-integration/19651-1378450/.minikube/certs/ca-key.pem (1679 bytes)
	I0916 10:58:44.819669 1434484 certs.go:484] found cert: /home/jenkins/minikube-integration/19651-1378450/.minikube/certs/ca.pem (1078 bytes)
	I0916 10:58:44.819696 1434484 certs.go:484] found cert: /home/jenkins/minikube-integration/19651-1378450/.minikube/certs/cert.pem (1123 bytes)
	I0916 10:58:44.819723 1434484 certs.go:484] found cert: /home/jenkins/minikube-integration/19651-1378450/.minikube/certs/key.pem (1679 bytes)
	I0916 10:58:44.819771 1434484 certs.go:484] found cert: /home/jenkins/minikube-integration/19651-1378450/.minikube/files/etc/ssl/certs/13838332.pem (1708 bytes)
	I0916 10:58:44.819805 1434484 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-1378450/.minikube/certs/1383833.pem -> /usr/share/ca-certificates/1383833.pem
	I0916 10:58:44.819821 1434484 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-1378450/.minikube/files/etc/ssl/certs/13838332.pem -> /usr/share/ca-certificates/13838332.pem
	I0916 10:58:44.819833 1434484 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-1378450/.minikube/ca.crt -> /usr/share/ca-certificates/minikubeCA.pem
	I0916 10:58:44.819893 1434484 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" ha-334765
	I0916 10:58:44.842815 1434484 sshutil.go:53] new ssh client: &{IP:127.0.0.1 Port:34643 SSHKeyPath:/home/jenkins/minikube-integration/19651-1378450/.minikube/machines/ha-334765/id_rsa Username:docker}
	I0916 10:58:44.937024 1434484 ssh_runner.go:195] Run: stat -c %s /var/lib/minikube/certs/sa.pub
	I0916 10:58:44.941363 1434484 ssh_runner.go:447] scp /var/lib/minikube/certs/sa.pub --> memory (451 bytes)
	I0916 10:58:44.959585 1434484 ssh_runner.go:195] Run: stat -c %s /var/lib/minikube/certs/sa.key
	I0916 10:58:44.963438 1434484 ssh_runner.go:447] scp /var/lib/minikube/certs/sa.key --> memory (1675 bytes)
	I0916 10:58:44.976048 1434484 ssh_runner.go:195] Run: stat -c %s /var/lib/minikube/certs/front-proxy-ca.crt
	I0916 10:58:44.980571 1434484 ssh_runner.go:447] scp /var/lib/minikube/certs/front-proxy-ca.crt --> memory (1123 bytes)
	I0916 10:58:44.993962 1434484 ssh_runner.go:195] Run: stat -c %s /var/lib/minikube/certs/front-proxy-ca.key
	I0916 10:58:44.997761 1434484 ssh_runner.go:447] scp /var/lib/minikube/certs/front-proxy-ca.key --> memory (1675 bytes)
	I0916 10:58:45.020929 1434484 ssh_runner.go:195] Run: stat -c %s /var/lib/minikube/certs/etcd/ca.crt
	I0916 10:58:45.039282 1434484 ssh_runner.go:447] scp /var/lib/minikube/certs/etcd/ca.crt --> memory (1094 bytes)
	I0916 10:58:45.085336 1434484 ssh_runner.go:195] Run: stat -c %s /var/lib/minikube/certs/etcd/ca.key
	I0916 10:58:45.114753 1434484 ssh_runner.go:447] scp /var/lib/minikube/certs/etcd/ca.key --> memory (1679 bytes)
	I0916 10:58:45.181707 1434484 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-1378450/.minikube/ca.crt --> /var/lib/minikube/certs/ca.crt (1111 bytes)
	I0916 10:58:45.265626 1434484 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-1378450/.minikube/ca.key --> /var/lib/minikube/certs/ca.key (1675 bytes)
	I0916 10:58:45.353133 1434484 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-1378450/.minikube/proxy-client-ca.crt --> /var/lib/minikube/certs/proxy-client-ca.crt (1119 bytes)
	I0916 10:58:45.393523 1434484 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-1378450/.minikube/proxy-client-ca.key --> /var/lib/minikube/certs/proxy-client-ca.key (1679 bytes)
	I0916 10:58:45.448105 1434484 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/ha-334765/apiserver.crt --> /var/lib/minikube/certs/apiserver.crt (1440 bytes)
	I0916 10:58:45.487285 1434484 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/ha-334765/apiserver.key --> /var/lib/minikube/certs/apiserver.key (1675 bytes)
	I0916 10:58:45.531209 1434484 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/ha-334765/proxy-client.crt --> /var/lib/minikube/certs/proxy-client.crt (1147 bytes)
	I0916 10:58:45.574329 1434484 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/ha-334765/proxy-client.key --> /var/lib/minikube/certs/proxy-client.key (1679 bytes)
	I0916 10:58:45.608859 1434484 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-1378450/.minikube/certs/1383833.pem --> /usr/share/ca-certificates/1383833.pem (1338 bytes)
	I0916 10:58:45.638772 1434484 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-1378450/.minikube/files/etc/ssl/certs/13838332.pem --> /usr/share/ca-certificates/13838332.pem (1708 bytes)
	I0916 10:58:45.666449 1434484 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-1378450/.minikube/ca.crt --> /usr/share/ca-certificates/minikubeCA.pem (1111 bytes)
	I0916 10:58:45.694646 1434484 ssh_runner.go:362] scp memory --> /var/lib/minikube/certs/sa.pub (451 bytes)
	I0916 10:58:45.715488 1434484 ssh_runner.go:362] scp memory --> /var/lib/minikube/certs/sa.key (1675 bytes)
	I0916 10:58:45.737484 1434484 ssh_runner.go:362] scp memory --> /var/lib/minikube/certs/front-proxy-ca.crt (1123 bytes)
	I0916 10:58:45.758170 1434484 ssh_runner.go:362] scp memory --> /var/lib/minikube/certs/front-proxy-ca.key (1675 bytes)
	I0916 10:58:45.778091 1434484 ssh_runner.go:362] scp memory --> /var/lib/minikube/certs/etcd/ca.crt (1094 bytes)
	I0916 10:58:45.818259 1434484 ssh_runner.go:362] scp memory --> /var/lib/minikube/certs/etcd/ca.key (1679 bytes)
	I0916 10:58:45.850347 1434484 ssh_runner.go:362] scp memory --> /var/lib/minikube/kubeconfig (744 bytes)
	I0916 10:58:45.871634 1434484 ssh_runner.go:195] Run: openssl version
	I0916 10:58:45.877849 1434484 ssh_runner.go:195] Run: sudo /bin/bash -c "test -s /usr/share/ca-certificates/13838332.pem && ln -fs /usr/share/ca-certificates/13838332.pem /etc/ssl/certs/13838332.pem"
	I0916 10:58:45.890906 1434484 ssh_runner.go:195] Run: ls -la /usr/share/ca-certificates/13838332.pem
	I0916 10:58:45.895253 1434484 certs.go:528] hashing: -rw-r--r-- 1 root root 1708 Sep 16 10:46 /usr/share/ca-certificates/13838332.pem
	I0916 10:58:45.895325 1434484 ssh_runner.go:195] Run: openssl x509 -hash -noout -in /usr/share/ca-certificates/13838332.pem
	I0916 10:58:45.903222 1434484 ssh_runner.go:195] Run: sudo /bin/bash -c "test -L /etc/ssl/certs/3ec20f2e.0 || ln -fs /etc/ssl/certs/13838332.pem /etc/ssl/certs/3ec20f2e.0"
	I0916 10:58:45.919453 1434484 ssh_runner.go:195] Run: sudo /bin/bash -c "test -s /usr/share/ca-certificates/minikubeCA.pem && ln -fs /usr/share/ca-certificates/minikubeCA.pem /etc/ssl/certs/minikubeCA.pem"
	I0916 10:58:45.929702 1434484 ssh_runner.go:195] Run: ls -la /usr/share/ca-certificates/minikubeCA.pem
	I0916 10:58:45.933818 1434484 certs.go:528] hashing: -rw-r--r-- 1 root root 1111 Sep 16 10:35 /usr/share/ca-certificates/minikubeCA.pem
	I0916 10:58:45.933892 1434484 ssh_runner.go:195] Run: openssl x509 -hash -noout -in /usr/share/ca-certificates/minikubeCA.pem
	I0916 10:58:45.942755 1434484 ssh_runner.go:195] Run: sudo /bin/bash -c "test -L /etc/ssl/certs/b5213941.0 || ln -fs /etc/ssl/certs/minikubeCA.pem /etc/ssl/certs/b5213941.0"
	I0916 10:58:45.956770 1434484 ssh_runner.go:195] Run: sudo /bin/bash -c "test -s /usr/share/ca-certificates/1383833.pem && ln -fs /usr/share/ca-certificates/1383833.pem /etc/ssl/certs/1383833.pem"
	I0916 10:58:45.968443 1434484 ssh_runner.go:195] Run: ls -la /usr/share/ca-certificates/1383833.pem
	I0916 10:58:45.972320 1434484 certs.go:528] hashing: -rw-r--r-- 1 root root 1338 Sep 16 10:46 /usr/share/ca-certificates/1383833.pem
	I0916 10:58:45.972436 1434484 ssh_runner.go:195] Run: openssl x509 -hash -noout -in /usr/share/ca-certificates/1383833.pem
	I0916 10:58:45.985514 1434484 ssh_runner.go:195] Run: sudo /bin/bash -c "test -L /etc/ssl/certs/51391683.0 || ln -fs /etc/ssl/certs/1383833.pem /etc/ssl/certs/51391683.0"
	I0916 10:58:46.000415 1434484 ssh_runner.go:195] Run: stat /var/lib/minikube/certs/apiserver-kubelet-client.crt
	I0916 10:58:46.006184 1434484 ssh_runner.go:195] Run: openssl x509 -noout -in /var/lib/minikube/certs/apiserver-etcd-client.crt -checkend 86400
	I0916 10:58:46.020854 1434484 ssh_runner.go:195] Run: openssl x509 -noout -in /var/lib/minikube/certs/apiserver-kubelet-client.crt -checkend 86400
	I0916 10:58:46.029325 1434484 ssh_runner.go:195] Run: openssl x509 -noout -in /var/lib/minikube/certs/etcd/server.crt -checkend 86400
	I0916 10:58:46.038601 1434484 ssh_runner.go:195] Run: openssl x509 -noout -in /var/lib/minikube/certs/etcd/healthcheck-client.crt -checkend 86400
	I0916 10:58:46.047939 1434484 ssh_runner.go:195] Run: openssl x509 -noout -in /var/lib/minikube/certs/etcd/peer.crt -checkend 86400
	I0916 10:58:46.056207 1434484 ssh_runner.go:195] Run: openssl x509 -noout -in /var/lib/minikube/certs/front-proxy-client.crt -checkend 86400
	I0916 10:58:46.063593 1434484 kubeadm.go:934] updating node {m03 192.168.49.4 8443 v1.31.1 crio true true} ...
	I0916 10:58:46.063761 1434484 kubeadm.go:946] kubelet [Unit]
	Wants=crio.service
	
	[Service]
	ExecStart=
	ExecStart=/var/lib/minikube/binaries/v1.31.1/kubelet --bootstrap-kubeconfig=/etc/kubernetes/bootstrap-kubelet.conf --cgroups-per-qos=false --config=/var/lib/kubelet/config.yaml --enforce-node-allocatable= --hostname-override=ha-334765-m03 --kubeconfig=/etc/kubernetes/kubelet.conf --node-ip=192.168.49.4
	
	[Install]
	 config:
	{KubernetesVersion:v1.31.1 ClusterName:ha-334765 Namespace:default APIServerHAVIP:192.168.49.254 APIServerName:minikubeCA APIServerNames:[] APIServerIPs:[] DNSDomain:cluster.local ContainerRuntime:crio CRISocket: NetworkPlugin:cni FeatureGates: ServiceCIDR:10.96.0.0/12 ImageRepository: LoadBalancerStartIP: LoadBalancerEndIP: CustomIngressCert: RegistryAliases: ExtraOptions:[] ShouldLoadCachedImages:true EnableDefaultCNI:false CNI:}
	I0916 10:58:46.063808 1434484 kube-vip.go:115] generating kube-vip config ...
	I0916 10:58:46.063882 1434484 ssh_runner.go:195] Run: sudo sh -c "lsmod | grep ip_vs"
	I0916 10:58:46.078680 1434484 kube-vip.go:167] auto-enabling control-plane load-balancing in kube-vip
	I0916 10:58:46.078788 1434484 kube-vip.go:137] kube-vip config:
	apiVersion: v1
	kind: Pod
	metadata:
	  creationTimestamp: null
	  name: kube-vip
	  namespace: kube-system
	spec:
	  containers:
	  - args:
	    - manager
	    env:
	    - name: vip_arp
	      value: "true"
	    - name: port
	      value: "8443"
	    - name: vip_nodename
	      valueFrom:
	        fieldRef:
	          fieldPath: spec.nodeName
	    - name: vip_interface
	      value: eth0
	    - name: vip_cidr
	      value: "32"
	    - name: dns_mode
	      value: first
	    - name: cp_enable
	      value: "true"
	    - name: cp_namespace
	      value: kube-system
	    - name: vip_leaderelection
	      value: "true"
	    - name: vip_leasename
	      value: plndr-cp-lock
	    - name: vip_leaseduration
	      value: "5"
	    - name: vip_renewdeadline
	      value: "3"
	    - name: vip_retryperiod
	      value: "1"
	    - name: address
	      value: 192.168.49.254
	    - name: prometheus_server
	      value: :2112
	    - name : lb_enable
	      value: "true"
	    - name: lb_port
	      value: "8443"
	    image: ghcr.io/kube-vip/kube-vip:v0.8.0
	    imagePullPolicy: IfNotPresent
	    name: kube-vip
	    resources: {}
	    securityContext:
	      capabilities:
	        add:
	        - NET_ADMIN
	        - NET_RAW
	    volumeMounts:
	    - mountPath: /etc/kubernetes/admin.conf
	      name: kubeconfig
	  hostAliases:
	  - hostnames:
	    - kubernetes
	    ip: 127.0.0.1
	  hostNetwork: true
	  volumes:
	  - hostPath:
	      path: "/etc/kubernetes/admin.conf"
	    name: kubeconfig
	status: {}
	I0916 10:58:46.078882 1434484 ssh_runner.go:195] Run: sudo ls /var/lib/minikube/binaries/v1.31.1
	I0916 10:58:46.088368 1434484 binaries.go:44] Found k8s binaries, skipping transfer
	I0916 10:58:46.088487 1434484 ssh_runner.go:195] Run: sudo mkdir -p /etc/systemd/system/kubelet.service.d /lib/systemd/system /etc/kubernetes/manifests
	I0916 10:58:46.097672 1434484 ssh_runner.go:362] scp memory --> /etc/systemd/system/kubelet.service.d/10-kubeadm.conf (363 bytes)
	I0916 10:58:46.116718 1434484 ssh_runner.go:362] scp memory --> /lib/systemd/system/kubelet.service (352 bytes)
	I0916 10:58:46.138497 1434484 ssh_runner.go:362] scp memory --> /etc/kubernetes/manifests/kube-vip.yaml (1441 bytes)
	I0916 10:58:46.159430 1434484 ssh_runner.go:195] Run: grep 192.168.49.254	control-plane.minikube.internal$ /etc/hosts
	I0916 10:58:46.163010 1434484 ssh_runner.go:195] Run: /bin/bash -c "{ grep -v $'\tcontrol-plane.minikube.internal$' "/etc/hosts"; echo "192.168.49.254	control-plane.minikube.internal"; } > /tmp/h.$$; sudo cp /tmp/h.$$ "/etc/hosts""
	I0916 10:58:46.174749 1434484 ssh_runner.go:195] Run: sudo systemctl daemon-reload
	I0916 10:58:46.285014 1434484 ssh_runner.go:195] Run: sudo systemctl start kubelet
	I0916 10:58:46.298748 1434484 start.go:235] Will wait 6m0s for node &{Name:m03 IP:192.168.49.4 Port:8443 KubernetesVersion:v1.31.1 ContainerRuntime:crio ControlPlane:true Worker:true}
	I0916 10:58:46.299311 1434484 config.go:182] Loaded profile config "ha-334765": Driver=docker, ContainerRuntime=crio, KubernetesVersion=v1.31.1
	I0916 10:58:46.302102 1434484 out.go:177] * Verifying Kubernetes components...
	I0916 10:58:46.305019 1434484 ssh_runner.go:195] Run: sudo systemctl daemon-reload
	I0916 10:58:46.416515 1434484 ssh_runner.go:195] Run: sudo systemctl start kubelet
	I0916 10:58:46.429593 1434484 loader.go:395] Config loaded from file:  /home/jenkins/minikube-integration/19651-1378450/kubeconfig
	I0916 10:58:46.429867 1434484 kapi.go:59] client config for ha-334765: &rest.Config{Host:"https://192.168.49.254:8443", APIPath:"", ContentConfig:rest.ContentConfig{AcceptContentTypes:"", ContentType:"", GroupVersion:(*schema.GroupVersion)(nil), NegotiatedSerializer:runtime.NegotiatedSerializer(nil)}, Username:"", Password:"", BearerToken:"", BearerTokenFile:"", Impersonate:rest.ImpersonationConfig{UserName:"", UID:"", Groups:[]string(nil), Extra:map[string][]string(nil)}, AuthProvider:<nil>, AuthConfigPersister:rest.AuthProviderConfigPersister(nil), ExecProvider:<nil>, TLSClientConfig:rest.sanitizedTLSClientConfig{Insecure:false, ServerName:"", CertFile:"/home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/ha-334765/client.crt", KeyFile:"/home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/ha-334765/client.key", CAFile:"/home/jenkins/minikube-integration/19651-1378450/.minikube/ca.crt", CertData:[]uint8(nil), KeyData:[]uint8(nil), CAData:[]uint8(nil), NextProtos:[]strin
g(nil)}, UserAgent:"", DisableCompression:false, Transport:http.RoundTripper(nil), WrapTransport:(transport.WrapperFunc)(0x1a1e6c0), QPS:0, Burst:0, RateLimiter:flowcontrol.RateLimiter(nil), WarningHandler:rest.WarningHandler(nil), Timeout:0, Dial:(func(context.Context, string, string) (net.Conn, error))(nil), Proxy:(func(*http.Request) (*url.URL, error))(nil)}
	W0916 10:58:46.429930 1434484 kubeadm.go:483] Overriding stale ClientConfig host https://192.168.49.254:8443 with https://192.168.49.2:8443
	I0916 10:58:46.430151 1434484 node_ready.go:35] waiting up to 6m0s for node "ha-334765-m03" to be "Ready" ...
	I0916 10:58:46.430224 1434484 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-334765-m03
	I0916 10:58:46.430237 1434484 round_trippers.go:469] Request Headers:
	I0916 10:58:46.430247 1434484 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:58:46.430253 1434484 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:58:46.433371 1434484 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:58:46.930425 1434484 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-334765-m03
	I0916 10:58:46.930445 1434484 round_trippers.go:469] Request Headers:
	I0916 10:58:46.930455 1434484 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:58:46.930460 1434484 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:58:46.933704 1434484 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:58:47.431346 1434484 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-334765-m03
	I0916 10:58:47.431370 1434484 round_trippers.go:469] Request Headers:
	I0916 10:58:47.431380 1434484 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:58:47.431385 1434484 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:58:47.434284 1434484 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 10:58:47.930766 1434484 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-334765-m03
	I0916 10:58:47.930838 1434484 round_trippers.go:469] Request Headers:
	I0916 10:58:47.930861 1434484 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:58:47.930880 1434484 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:58:47.933749 1434484 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 10:58:48.431031 1434484 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-334765-m03
	I0916 10:58:48.431060 1434484 round_trippers.go:469] Request Headers:
	I0916 10:58:48.431071 1434484 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:58:48.431075 1434484 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:58:48.434188 1434484 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:58:48.435055 1434484 node_ready.go:53] node "ha-334765-m03" has status "Ready":"Unknown"
	I0916 10:58:48.930745 1434484 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-334765-m03
	I0916 10:58:48.930798 1434484 round_trippers.go:469] Request Headers:
	I0916 10:58:48.930808 1434484 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:58:48.930813 1434484 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:58:48.936003 1434484 round_trippers.go:574] Response Status: 200 OK in 5 milliseconds
	I0916 10:58:49.431135 1434484 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-334765-m03
	I0916 10:58:49.431190 1434484 round_trippers.go:469] Request Headers:
	I0916 10:58:49.431198 1434484 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:58:49.431201 1434484 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:58:49.434042 1434484 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 10:58:49.930552 1434484 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-334765-m03
	I0916 10:58:49.930577 1434484 round_trippers.go:469] Request Headers:
	I0916 10:58:49.930588 1434484 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:58:49.930592 1434484 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:58:49.933471 1434484 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 10:58:50.430637 1434484 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-334765-m03
	I0916 10:58:50.430661 1434484 round_trippers.go:469] Request Headers:
	I0916 10:58:50.430671 1434484 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:58:50.430675 1434484 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:58:50.434476 1434484 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:58:50.931357 1434484 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-334765-m03
	I0916 10:58:50.931381 1434484 round_trippers.go:469] Request Headers:
	I0916 10:58:50.931392 1434484 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:58:50.931398 1434484 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:58:50.934445 1434484 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:58:50.935430 1434484 node_ready.go:53] node "ha-334765-m03" has status "Ready":"Unknown"
	I0916 10:58:51.430452 1434484 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-334765-m03
	I0916 10:58:51.430476 1434484 round_trippers.go:469] Request Headers:
	I0916 10:58:51.430485 1434484 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:58:51.430491 1434484 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:58:51.433396 1434484 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 10:58:51.930507 1434484 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-334765-m03
	I0916 10:58:51.930530 1434484 round_trippers.go:469] Request Headers:
	I0916 10:58:51.930540 1434484 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:58:51.930544 1434484 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:58:51.933636 1434484 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:58:52.431124 1434484 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-334765-m03
	I0916 10:58:52.431153 1434484 round_trippers.go:469] Request Headers:
	I0916 10:58:52.431170 1434484 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:58:52.431174 1434484 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:58:52.434254 1434484 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:58:52.931291 1434484 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-334765-m03
	I0916 10:58:52.931315 1434484 round_trippers.go:469] Request Headers:
	I0916 10:58:52.931325 1434484 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:58:52.931330 1434484 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:58:52.934231 1434484 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 10:58:53.431272 1434484 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-334765-m03
	I0916 10:58:53.431295 1434484 round_trippers.go:469] Request Headers:
	I0916 10:58:53.431305 1434484 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:58:53.431311 1434484 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:58:53.434879 1434484 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:58:53.435711 1434484 node_ready.go:53] node "ha-334765-m03" has status "Ready":"Unknown"
	I0916 10:58:53.931247 1434484 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-334765-m03
	I0916 10:58:53.931273 1434484 round_trippers.go:469] Request Headers:
	I0916 10:58:53.931284 1434484 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:58:53.931290 1434484 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:58:53.934181 1434484 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 10:58:54.431081 1434484 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-334765-m03
	I0916 10:58:54.431103 1434484 round_trippers.go:469] Request Headers:
	I0916 10:58:54.431112 1434484 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:58:54.431117 1434484 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:58:54.433971 1434484 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 10:58:54.930379 1434484 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-334765-m03
	I0916 10:58:54.930405 1434484 round_trippers.go:469] Request Headers:
	I0916 10:58:54.930414 1434484 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:58:54.930423 1434484 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:58:54.934339 1434484 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:58:54.935040 1434484 node_ready.go:49] node "ha-334765-m03" has status "Ready":"True"
	I0916 10:58:54.935061 1434484 node_ready.go:38] duration metric: took 8.504891517s for node "ha-334765-m03" to be "Ready" ...
	I0916 10:58:54.935071 1434484 pod_ready.go:36] extra waiting up to 6m0s for all system-critical pods including labels [k8s-app=kube-dns component=etcd component=kube-apiserver component=kube-controller-manager k8s-app=kube-proxy component=kube-scheduler] to be "Ready" ...
	I0916 10:58:54.935139 1434484 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods
	I0916 10:58:54.935168 1434484 round_trippers.go:469] Request Headers:
	I0916 10:58:54.935177 1434484 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:58:54.935180 1434484 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:58:54.942806 1434484 round_trippers.go:574] Response Status: 200 OK in 7 milliseconds
	I0916 10:58:54.954490 1434484 pod_ready.go:79] waiting up to 6m0s for pod "coredns-7c65d6cfc9-q5xr7" in "kube-system" namespace to be "Ready" ...
	I0916 10:58:54.954654 1434484 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/coredns-7c65d6cfc9-q5xr7
	I0916 10:58:54.954668 1434484 round_trippers.go:469] Request Headers:
	I0916 10:58:54.954677 1434484 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:58:54.954697 1434484 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:58:54.957710 1434484 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 10:58:54.958326 1434484 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-334765
	I0916 10:58:54.958344 1434484 round_trippers.go:469] Request Headers:
	I0916 10:58:54.958353 1434484 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:58:54.958360 1434484 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:58:54.963638 1434484 round_trippers.go:574] Response Status: 200 OK in 5 milliseconds
	I0916 10:58:54.964287 1434484 pod_ready.go:93] pod "coredns-7c65d6cfc9-q5xr7" in "kube-system" namespace has status "Ready":"True"
	I0916 10:58:54.964308 1434484 pod_ready.go:82] duration metric: took 9.783794ms for pod "coredns-7c65d6cfc9-q5xr7" in "kube-system" namespace to be "Ready" ...
	I0916 10:58:54.964320 1434484 pod_ready.go:79] waiting up to 6m0s for pod "coredns-7c65d6cfc9-s9fp9" in "kube-system" namespace to be "Ready" ...
	I0916 10:58:54.964385 1434484 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/coredns-7c65d6cfc9-s9fp9
	I0916 10:58:54.964395 1434484 round_trippers.go:469] Request Headers:
	I0916 10:58:54.964403 1434484 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:58:54.964408 1434484 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:58:54.967123 1434484 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 10:58:54.967859 1434484 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-334765
	I0916 10:58:54.967877 1434484 round_trippers.go:469] Request Headers:
	I0916 10:58:54.967885 1434484 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:58:54.967889 1434484 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:58:54.970170 1434484 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 10:58:54.971054 1434484 pod_ready.go:93] pod "coredns-7c65d6cfc9-s9fp9" in "kube-system" namespace has status "Ready":"True"
	I0916 10:58:54.971071 1434484 pod_ready.go:82] duration metric: took 6.7436ms for pod "coredns-7c65d6cfc9-s9fp9" in "kube-system" namespace to be "Ready" ...
	I0916 10:58:54.971082 1434484 pod_ready.go:79] waiting up to 6m0s for pod "etcd-ha-334765" in "kube-system" namespace to be "Ready" ...
	I0916 10:58:54.971152 1434484 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/etcd-ha-334765
	I0916 10:58:54.971161 1434484 round_trippers.go:469] Request Headers:
	I0916 10:58:54.971169 1434484 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:58:54.971175 1434484 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:58:54.973725 1434484 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 10:58:54.974716 1434484 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-334765
	I0916 10:58:54.974742 1434484 round_trippers.go:469] Request Headers:
	I0916 10:58:54.974752 1434484 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:58:54.974758 1434484 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:58:54.977286 1434484 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 10:58:54.978031 1434484 pod_ready.go:93] pod "etcd-ha-334765" in "kube-system" namespace has status "Ready":"True"
	I0916 10:58:54.978066 1434484 pod_ready.go:82] duration metric: took 6.974708ms for pod "etcd-ha-334765" in "kube-system" namespace to be "Ready" ...
	I0916 10:58:54.978077 1434484 pod_ready.go:79] waiting up to 6m0s for pod "etcd-ha-334765-m02" in "kube-system" namespace to be "Ready" ...
	I0916 10:58:54.978140 1434484 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/etcd-ha-334765-m02
	I0916 10:58:54.978155 1434484 round_trippers.go:469] Request Headers:
	I0916 10:58:54.978162 1434484 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:58:54.978169 1434484 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:58:54.981036 1434484 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 10:58:54.982193 1434484 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-334765-m02
	I0916 10:58:54.982213 1434484 round_trippers.go:469] Request Headers:
	I0916 10:58:54.982222 1434484 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:58:54.982231 1434484 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:58:54.985334 1434484 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:58:54.986389 1434484 pod_ready.go:93] pod "etcd-ha-334765-m02" in "kube-system" namespace has status "Ready":"True"
	I0916 10:58:54.986410 1434484 pod_ready.go:82] duration metric: took 8.32623ms for pod "etcd-ha-334765-m02" in "kube-system" namespace to be "Ready" ...
	I0916 10:58:54.986422 1434484 pod_ready.go:79] waiting up to 6m0s for pod "etcd-ha-334765-m03" in "kube-system" namespace to be "Ready" ...
	I0916 10:58:55.130645 1434484 request.go:632] Waited for 144.159692ms due to client-side throttling, not priority and fairness, request: GET:https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/etcd-ha-334765-m03
	I0916 10:58:55.130712 1434484 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/etcd-ha-334765-m03
	I0916 10:58:55.130721 1434484 round_trippers.go:469] Request Headers:
	I0916 10:58:55.130735 1434484 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:58:55.130743 1434484 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:58:55.155327 1434484 round_trippers.go:574] Response Status: 200 OK in 24 milliseconds
	I0916 10:58:55.330580 1434484 request.go:632] Waited for 172.178308ms due to client-side throttling, not priority and fairness, request: GET:https://192.168.49.2:8443/api/v1/nodes/ha-334765-m03
	I0916 10:58:55.330682 1434484 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-334765-m03
	I0916 10:58:55.330702 1434484 round_trippers.go:469] Request Headers:
	I0916 10:58:55.330735 1434484 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:58:55.330758 1434484 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:58:55.354432 1434484 round_trippers.go:574] Response Status: 200 OK in 23 milliseconds
	I0916 10:58:55.530667 1434484 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/etcd-ha-334765-m03
	I0916 10:58:55.530732 1434484 round_trippers.go:469] Request Headers:
	I0916 10:58:55.530754 1434484 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:58:55.530774 1434484 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:58:55.533453 1434484 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 10:58:55.730575 1434484 request.go:632] Waited for 193.244131ms due to client-side throttling, not priority and fairness, request: GET:https://192.168.49.2:8443/api/v1/nodes/ha-334765-m03
	I0916 10:58:55.730690 1434484 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-334765-m03
	I0916 10:58:55.730723 1434484 round_trippers.go:469] Request Headers:
	I0916 10:58:55.730750 1434484 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:58:55.730770 1434484 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:58:55.733862 1434484 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:58:55.987415 1434484 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/etcd-ha-334765-m03
	I0916 10:58:55.987490 1434484 round_trippers.go:469] Request Headers:
	I0916 10:58:55.987516 1434484 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:58:55.987534 1434484 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:58:55.990992 1434484 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:58:56.130371 1434484 request.go:632] Waited for 138.093966ms due to client-side throttling, not priority and fairness, request: GET:https://192.168.49.2:8443/api/v1/nodes/ha-334765-m03
	I0916 10:58:56.130527 1434484 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-334765-m03
	I0916 10:58:56.130556 1434484 round_trippers.go:469] Request Headers:
	I0916 10:58:56.130578 1434484 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:58:56.130604 1434484 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:58:56.133911 1434484 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:58:56.487202 1434484 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/etcd-ha-334765-m03
	I0916 10:58:56.487271 1434484 round_trippers.go:469] Request Headers:
	I0916 10:58:56.487294 1434484 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:58:56.487315 1434484 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:58:56.511519 1434484 round_trippers.go:574] Response Status: 200 OK in 24 milliseconds
	I0916 10:58:56.530410 1434484 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-334765-m03
	I0916 10:58:56.530485 1434484 round_trippers.go:469] Request Headers:
	I0916 10:58:56.530511 1434484 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:58:56.530530 1434484 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:58:56.536025 1434484 round_trippers.go:574] Response Status: 200 OK in 5 milliseconds
	I0916 10:58:56.987244 1434484 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/etcd-ha-334765-m03
	I0916 10:58:56.987319 1434484 round_trippers.go:469] Request Headers:
	I0916 10:58:56.987342 1434484 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:58:56.987360 1434484 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:58:56.990248 1434484 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 10:58:56.991600 1434484 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-334765-m03
	I0916 10:58:56.991660 1434484 round_trippers.go:469] Request Headers:
	I0916 10:58:56.991683 1434484 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:58:56.991698 1434484 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:58:56.994747 1434484 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 10:58:56.995740 1434484 pod_ready.go:103] pod "etcd-ha-334765-m03" in "kube-system" namespace has status "Ready":"False"
	I0916 10:58:57.487187 1434484 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/etcd-ha-334765-m03
	I0916 10:58:57.487259 1434484 round_trippers.go:469] Request Headers:
	I0916 10:58:57.487282 1434484 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:58:57.487301 1434484 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:58:57.491405 1434484 round_trippers.go:574] Response Status: 200 OK in 4 milliseconds
	I0916 10:58:57.492597 1434484 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-334765-m03
	I0916 10:58:57.492657 1434484 round_trippers.go:469] Request Headers:
	I0916 10:58:57.492700 1434484 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:58:57.492726 1434484 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:58:57.498217 1434484 round_trippers.go:574] Response Status: 200 OK in 5 milliseconds
	I0916 10:58:57.986712 1434484 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/etcd-ha-334765-m03
	I0916 10:58:57.986739 1434484 round_trippers.go:469] Request Headers:
	I0916 10:58:57.986750 1434484 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:58:57.986755 1434484 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:58:57.994050 1434484 round_trippers.go:574] Response Status: 200 OK in 7 milliseconds
	I0916 10:58:57.995114 1434484 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-334765-m03
	I0916 10:58:57.995134 1434484 round_trippers.go:469] Request Headers:
	I0916 10:58:57.995153 1434484 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:58:57.995158 1434484 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:58:58.004954 1434484 round_trippers.go:574] Response Status: 200 OK in 9 milliseconds
	I0916 10:58:58.486600 1434484 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/etcd-ha-334765-m03
	I0916 10:58:58.486625 1434484 round_trippers.go:469] Request Headers:
	I0916 10:58:58.486635 1434484 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:58:58.486640 1434484 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:58:58.489946 1434484 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:58:58.490732 1434484 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-334765-m03
	I0916 10:58:58.490751 1434484 round_trippers.go:469] Request Headers:
	I0916 10:58:58.490761 1434484 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:58:58.490765 1434484 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:58:58.494657 1434484 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:58:58.986940 1434484 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/etcd-ha-334765-m03
	I0916 10:58:58.986965 1434484 round_trippers.go:469] Request Headers:
	I0916 10:58:58.986975 1434484 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:58:58.986981 1434484 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:58:58.989688 1434484 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 10:58:58.990725 1434484 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-334765-m03
	I0916 10:58:58.990747 1434484 round_trippers.go:469] Request Headers:
	I0916 10:58:58.990758 1434484 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:58:58.990761 1434484 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:58:58.993347 1434484 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 10:58:59.487073 1434484 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/etcd-ha-334765-m03
	I0916 10:58:59.487157 1434484 round_trippers.go:469] Request Headers:
	I0916 10:58:59.487181 1434484 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:58:59.487210 1434484 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:58:59.490459 1434484 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:58:59.491279 1434484 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-334765-m03
	I0916 10:58:59.491324 1434484 round_trippers.go:469] Request Headers:
	I0916 10:58:59.491347 1434484 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:58:59.491382 1434484 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:58:59.495188 1434484 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:58:59.495923 1434484 pod_ready.go:103] pod "etcd-ha-334765-m03" in "kube-system" namespace has status "Ready":"False"
	I0916 10:58:59.986610 1434484 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/etcd-ha-334765-m03
	I0916 10:58:59.986688 1434484 round_trippers.go:469] Request Headers:
	I0916 10:58:59.986713 1434484 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:58:59.986730 1434484 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:58:59.990271 1434484 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:58:59.991123 1434484 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-334765-m03
	I0916 10:58:59.991190 1434484 round_trippers.go:469] Request Headers:
	I0916 10:58:59.991215 1434484 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:58:59.991233 1434484 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:58:59.995216 1434484 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:59:00.487634 1434484 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/etcd-ha-334765-m03
	I0916 10:59:00.487716 1434484 round_trippers.go:469] Request Headers:
	I0916 10:59:00.487740 1434484 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:59:00.487759 1434484 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:59:00.491199 1434484 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:59:00.492436 1434484 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-334765-m03
	I0916 10:59:00.492511 1434484 round_trippers.go:469] Request Headers:
	I0916 10:59:00.492533 1434484 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:59:00.492550 1434484 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:59:00.496167 1434484 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:59:00.986801 1434484 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/etcd-ha-334765-m03
	I0916 10:59:00.986879 1434484 round_trippers.go:469] Request Headers:
	I0916 10:59:00.986909 1434484 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:59:00.986929 1434484 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:59:00.990056 1434484 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:59:00.991200 1434484 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-334765-m03
	I0916 10:59:00.991266 1434484 round_trippers.go:469] Request Headers:
	I0916 10:59:00.991290 1434484 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:59:00.991307 1434484 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:59:00.994187 1434484 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 10:59:01.486634 1434484 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/etcd-ha-334765-m03
	I0916 10:59:01.486707 1434484 round_trippers.go:469] Request Headers:
	I0916 10:59:01.486729 1434484 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:59:01.486750 1434484 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:59:01.489615 1434484 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 10:59:01.490462 1434484 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-334765-m03
	I0916 10:59:01.490519 1434484 round_trippers.go:469] Request Headers:
	I0916 10:59:01.490542 1434484 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:59:01.490563 1434484 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:59:01.493249 1434484 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 10:59:01.986672 1434484 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/etcd-ha-334765-m03
	I0916 10:59:01.986696 1434484 round_trippers.go:469] Request Headers:
	I0916 10:59:01.986705 1434484 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:59:01.986709 1434484 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:59:01.989711 1434484 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 10:59:01.990478 1434484 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-334765-m03
	I0916 10:59:01.990497 1434484 round_trippers.go:469] Request Headers:
	I0916 10:59:01.990506 1434484 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:59:01.990512 1434484 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:59:01.999986 1434484 round_trippers.go:574] Response Status: 200 OK in 9 milliseconds
	I0916 10:59:02.001157 1434484 pod_ready.go:103] pod "etcd-ha-334765-m03" in "kube-system" namespace has status "Ready":"False"
	I0916 10:59:02.487652 1434484 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/etcd-ha-334765-m03
	I0916 10:59:02.487675 1434484 round_trippers.go:469] Request Headers:
	I0916 10:59:02.487684 1434484 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:59:02.487689 1434484 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:59:02.490367 1434484 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 10:59:02.491252 1434484 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-334765-m03
	I0916 10:59:02.491275 1434484 round_trippers.go:469] Request Headers:
	I0916 10:59:02.491285 1434484 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:59:02.491293 1434484 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:59:02.494039 1434484 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 10:59:02.986848 1434484 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/etcd-ha-334765-m03
	I0916 10:59:02.986870 1434484 round_trippers.go:469] Request Headers:
	I0916 10:59:02.986880 1434484 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:59:02.986884 1434484 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:59:02.989844 1434484 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 10:59:02.990837 1434484 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-334765-m03
	I0916 10:59:02.990858 1434484 round_trippers.go:469] Request Headers:
	I0916 10:59:02.990869 1434484 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:59:02.990873 1434484 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:59:02.993589 1434484 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 10:59:03.486915 1434484 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/etcd-ha-334765-m03
	I0916 10:59:03.486939 1434484 round_trippers.go:469] Request Headers:
	I0916 10:59:03.486950 1434484 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:59:03.486960 1434484 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:59:03.495400 1434484 round_trippers.go:574] Response Status: 200 OK in 8 milliseconds
	I0916 10:59:03.496346 1434484 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-334765-m03
	I0916 10:59:03.496366 1434484 round_trippers.go:469] Request Headers:
	I0916 10:59:03.496376 1434484 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:59:03.496379 1434484 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:59:03.504412 1434484 round_trippers.go:574] Response Status: 200 OK in 8 milliseconds
	I0916 10:59:03.986867 1434484 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/etcd-ha-334765-m03
	I0916 10:59:03.986895 1434484 round_trippers.go:469] Request Headers:
	I0916 10:59:03.986905 1434484 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:59:03.986910 1434484 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:59:03.990811 1434484 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:59:03.991991 1434484 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-334765-m03
	I0916 10:59:03.992075 1434484 round_trippers.go:469] Request Headers:
	I0916 10:59:03.992122 1434484 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:59:03.992133 1434484 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:59:03.994941 1434484 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 10:59:04.487237 1434484 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/etcd-ha-334765-m03
	I0916 10:59:04.487259 1434484 round_trippers.go:469] Request Headers:
	I0916 10:59:04.487269 1434484 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:59:04.487274 1434484 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:59:04.490227 1434484 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 10:59:04.491018 1434484 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-334765-m03
	I0916 10:59:04.491038 1434484 round_trippers.go:469] Request Headers:
	I0916 10:59:04.491047 1434484 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:59:04.491052 1434484 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:59:04.493629 1434484 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 10:59:04.494453 1434484 pod_ready.go:103] pod "etcd-ha-334765-m03" in "kube-system" namespace has status "Ready":"False"
	I0916 10:59:04.987007 1434484 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/etcd-ha-334765-m03
	I0916 10:59:04.987036 1434484 round_trippers.go:469] Request Headers:
	I0916 10:59:04.987046 1434484 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:59:04.987051 1434484 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:59:04.990245 1434484 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:59:04.991028 1434484 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-334765-m03
	I0916 10:59:04.991060 1434484 round_trippers.go:469] Request Headers:
	I0916 10:59:04.991070 1434484 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:59:04.991076 1434484 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:59:04.994000 1434484 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 10:59:05.486653 1434484 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/etcd-ha-334765-m03
	I0916 10:59:05.486677 1434484 round_trippers.go:469] Request Headers:
	I0916 10:59:05.486687 1434484 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:59:05.486692 1434484 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:59:05.489552 1434484 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 10:59:05.490494 1434484 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-334765-m03
	I0916 10:59:05.490514 1434484 round_trippers.go:469] Request Headers:
	I0916 10:59:05.490524 1434484 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:59:05.490528 1434484 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:59:05.493077 1434484 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 10:59:05.987368 1434484 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/etcd-ha-334765-m03
	I0916 10:59:05.987392 1434484 round_trippers.go:469] Request Headers:
	I0916 10:59:05.987402 1434484 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:59:05.987406 1434484 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:59:05.990605 1434484 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:59:05.991404 1434484 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-334765-m03
	I0916 10:59:05.991423 1434484 round_trippers.go:469] Request Headers:
	I0916 10:59:05.991432 1434484 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:59:05.991437 1434484 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:59:05.993971 1434484 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 10:59:06.487343 1434484 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/etcd-ha-334765-m03
	I0916 10:59:06.487365 1434484 round_trippers.go:469] Request Headers:
	I0916 10:59:06.487374 1434484 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:59:06.487379 1434484 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:59:06.490729 1434484 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:59:06.491625 1434484 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-334765-m03
	I0916 10:59:06.491646 1434484 round_trippers.go:469] Request Headers:
	I0916 10:59:06.491655 1434484 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:59:06.491658 1434484 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:59:06.494471 1434484 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 10:59:06.495088 1434484 pod_ready.go:103] pod "etcd-ha-334765-m03" in "kube-system" namespace has status "Ready":"False"
	I0916 10:59:06.986790 1434484 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/etcd-ha-334765-m03
	I0916 10:59:06.986815 1434484 round_trippers.go:469] Request Headers:
	I0916 10:59:06.986825 1434484 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:59:06.986831 1434484 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:59:06.989847 1434484 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:59:06.990631 1434484 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-334765-m03
	I0916 10:59:06.990649 1434484 round_trippers.go:469] Request Headers:
	I0916 10:59:06.990660 1434484 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:59:06.990683 1434484 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:59:06.993336 1434484 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 10:59:07.487419 1434484 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/etcd-ha-334765-m03
	I0916 10:59:07.487442 1434484 round_trippers.go:469] Request Headers:
	I0916 10:59:07.487452 1434484 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:59:07.487457 1434484 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:59:07.490492 1434484 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:59:07.491262 1434484 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-334765-m03
	I0916 10:59:07.491281 1434484 round_trippers.go:469] Request Headers:
	I0916 10:59:07.491290 1434484 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:59:07.491300 1434484 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:59:07.493902 1434484 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 10:59:07.987167 1434484 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/etcd-ha-334765-m03
	I0916 10:59:07.987187 1434484 round_trippers.go:469] Request Headers:
	I0916 10:59:07.987197 1434484 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:59:07.987201 1434484 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:59:07.990034 1434484 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 10:59:07.990872 1434484 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-334765-m03
	I0916 10:59:07.990893 1434484 round_trippers.go:469] Request Headers:
	I0916 10:59:07.990902 1434484 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:59:07.990906 1434484 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:59:07.993265 1434484 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 10:59:08.486644 1434484 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/etcd-ha-334765-m03
	I0916 10:59:08.486667 1434484 round_trippers.go:469] Request Headers:
	I0916 10:59:08.486678 1434484 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:59:08.486684 1434484 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:59:08.489908 1434484 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:59:08.490891 1434484 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-334765-m03
	I0916 10:59:08.490913 1434484 round_trippers.go:469] Request Headers:
	I0916 10:59:08.490923 1434484 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:59:08.490928 1434484 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:59:08.493875 1434484 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 10:59:08.987463 1434484 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/etcd-ha-334765-m03
	I0916 10:59:08.987484 1434484 round_trippers.go:469] Request Headers:
	I0916 10:59:08.987494 1434484 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:59:08.987499 1434484 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:59:08.990447 1434484 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 10:59:08.991253 1434484 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-334765-m03
	I0916 10:59:08.991275 1434484 round_trippers.go:469] Request Headers:
	I0916 10:59:08.991285 1434484 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:59:08.991290 1434484 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:59:08.993816 1434484 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 10:59:08.994411 1434484 pod_ready.go:103] pod "etcd-ha-334765-m03" in "kube-system" namespace has status "Ready":"False"
	I0916 10:59:09.487189 1434484 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/etcd-ha-334765-m03
	I0916 10:59:09.487210 1434484 round_trippers.go:469] Request Headers:
	I0916 10:59:09.487220 1434484 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:59:09.487225 1434484 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:59:09.490114 1434484 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 10:59:09.490902 1434484 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-334765-m03
	I0916 10:59:09.490925 1434484 round_trippers.go:469] Request Headers:
	I0916 10:59:09.490935 1434484 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:59:09.490940 1434484 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:59:09.493746 1434484 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 10:59:09.986873 1434484 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/etcd-ha-334765-m03
	I0916 10:59:09.986896 1434484 round_trippers.go:469] Request Headers:
	I0916 10:59:09.986906 1434484 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:59:09.986910 1434484 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:59:09.989810 1434484 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 10:59:09.990646 1434484 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-334765-m03
	I0916 10:59:09.990667 1434484 round_trippers.go:469] Request Headers:
	I0916 10:59:09.990676 1434484 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:59:09.990682 1434484 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:59:09.993211 1434484 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 10:59:10.487387 1434484 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/etcd-ha-334765-m03
	I0916 10:59:10.487412 1434484 round_trippers.go:469] Request Headers:
	I0916 10:59:10.487422 1434484 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:59:10.487426 1434484 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:59:10.490294 1434484 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 10:59:10.491111 1434484 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-334765-m03
	I0916 10:59:10.491132 1434484 round_trippers.go:469] Request Headers:
	I0916 10:59:10.491141 1434484 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:59:10.491153 1434484 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:59:10.493503 1434484 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 10:59:10.987551 1434484 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/etcd-ha-334765-m03
	I0916 10:59:10.987577 1434484 round_trippers.go:469] Request Headers:
	I0916 10:59:10.987587 1434484 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:59:10.987592 1434484 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:59:10.991255 1434484 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:59:10.992244 1434484 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-334765-m03
	I0916 10:59:10.992264 1434484 round_trippers.go:469] Request Headers:
	I0916 10:59:10.992274 1434484 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:59:10.992279 1434484 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:59:10.994878 1434484 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 10:59:10.995496 1434484 pod_ready.go:103] pod "etcd-ha-334765-m03" in "kube-system" namespace has status "Ready":"False"
	I0916 10:59:11.486636 1434484 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/etcd-ha-334765-m03
	I0916 10:59:11.486667 1434484 round_trippers.go:469] Request Headers:
	I0916 10:59:11.486677 1434484 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:59:11.486681 1434484 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:59:11.491295 1434484 round_trippers.go:574] Response Status: 200 OK in 4 milliseconds
	I0916 10:59:11.492451 1434484 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-334765-m03
	I0916 10:59:11.492480 1434484 round_trippers.go:469] Request Headers:
	I0916 10:59:11.492489 1434484 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:59:11.492495 1434484 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:59:11.495742 1434484 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:59:11.986656 1434484 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/etcd-ha-334765-m03
	I0916 10:59:11.986679 1434484 round_trippers.go:469] Request Headers:
	I0916 10:59:11.986689 1434484 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:59:11.986693 1434484 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:59:11.989664 1434484 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 10:59:11.990581 1434484 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-334765-m03
	I0916 10:59:11.990601 1434484 round_trippers.go:469] Request Headers:
	I0916 10:59:11.990612 1434484 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:59:11.990617 1434484 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:59:11.993192 1434484 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 10:59:12.487212 1434484 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/etcd-ha-334765-m03
	I0916 10:59:12.487253 1434484 round_trippers.go:469] Request Headers:
	I0916 10:59:12.487265 1434484 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:59:12.487270 1434484 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:59:12.491574 1434484 round_trippers.go:574] Response Status: 200 OK in 4 milliseconds
	I0916 10:59:12.492370 1434484 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-334765-m03
	I0916 10:59:12.492387 1434484 round_trippers.go:469] Request Headers:
	I0916 10:59:12.492397 1434484 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:59:12.492401 1434484 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:59:12.495102 1434484 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 10:59:12.986946 1434484 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/etcd-ha-334765-m03
	I0916 10:59:12.986968 1434484 round_trippers.go:469] Request Headers:
	I0916 10:59:12.986978 1434484 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:59:12.986982 1434484 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:59:12.989949 1434484 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 10:59:12.990916 1434484 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-334765-m03
	I0916 10:59:12.990936 1434484 round_trippers.go:469] Request Headers:
	I0916 10:59:12.990946 1434484 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:59:12.990951 1434484 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:59:12.993607 1434484 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 10:59:13.486685 1434484 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/etcd-ha-334765-m03
	I0916 10:59:13.486710 1434484 round_trippers.go:469] Request Headers:
	I0916 10:59:13.486720 1434484 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:59:13.486724 1434484 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:59:13.490156 1434484 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:59:13.491312 1434484 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-334765-m03
	I0916 10:59:13.491335 1434484 round_trippers.go:469] Request Headers:
	I0916 10:59:13.491344 1434484 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:59:13.491348 1434484 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:59:13.494209 1434484 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 10:59:13.495033 1434484 pod_ready.go:93] pod "etcd-ha-334765-m03" in "kube-system" namespace has status "Ready":"True"
	I0916 10:59:13.495058 1434484 pod_ready.go:82] duration metric: took 18.50862951s for pod "etcd-ha-334765-m03" in "kube-system" namespace to be "Ready" ...
	I0916 10:59:13.495080 1434484 pod_ready.go:79] waiting up to 6m0s for pod "kube-apiserver-ha-334765" in "kube-system" namespace to be "Ready" ...
	I0916 10:59:13.495145 1434484 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/kube-apiserver-ha-334765
	I0916 10:59:13.495165 1434484 round_trippers.go:469] Request Headers:
	I0916 10:59:13.495173 1434484 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:59:13.495177 1434484 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:59:13.497943 1434484 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 10:59:13.498987 1434484 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-334765
	I0916 10:59:13.499009 1434484 round_trippers.go:469] Request Headers:
	I0916 10:59:13.499017 1434484 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:59:13.499020 1434484 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:59:13.505706 1434484 round_trippers.go:574] Response Status: 200 OK in 6 milliseconds
	I0916 10:59:13.507264 1434484 pod_ready.go:93] pod "kube-apiserver-ha-334765" in "kube-system" namespace has status "Ready":"True"
	I0916 10:59:13.507291 1434484 pod_ready.go:82] duration metric: took 12.203171ms for pod "kube-apiserver-ha-334765" in "kube-system" namespace to be "Ready" ...
	I0916 10:59:13.507303 1434484 pod_ready.go:79] waiting up to 6m0s for pod "kube-apiserver-ha-334765-m02" in "kube-system" namespace to be "Ready" ...
	I0916 10:59:13.507382 1434484 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/kube-apiserver-ha-334765-m02
	I0916 10:59:13.507392 1434484 round_trippers.go:469] Request Headers:
	I0916 10:59:13.507399 1434484 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:59:13.507411 1434484 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:59:13.510272 1434484 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 10:59:13.510991 1434484 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-334765-m02
	I0916 10:59:13.511008 1434484 round_trippers.go:469] Request Headers:
	I0916 10:59:13.511018 1434484 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:59:13.511022 1434484 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:59:13.515516 1434484 round_trippers.go:574] Response Status: 200 OK in 4 milliseconds
	I0916 10:59:13.516075 1434484 pod_ready.go:93] pod "kube-apiserver-ha-334765-m02" in "kube-system" namespace has status "Ready":"True"
	I0916 10:59:13.516094 1434484 pod_ready.go:82] duration metric: took 8.783138ms for pod "kube-apiserver-ha-334765-m02" in "kube-system" namespace to be "Ready" ...
	I0916 10:59:13.516111 1434484 pod_ready.go:79] waiting up to 6m0s for pod "kube-apiserver-ha-334765-m03" in "kube-system" namespace to be "Ready" ...
	I0916 10:59:13.516221 1434484 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/kube-apiserver-ha-334765-m03
	I0916 10:59:13.516232 1434484 round_trippers.go:469] Request Headers:
	I0916 10:59:13.516250 1434484 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:59:13.516258 1434484 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:59:13.519310 1434484 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:59:13.520169 1434484 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-334765-m03
	I0916 10:59:13.520188 1434484 round_trippers.go:469] Request Headers:
	I0916 10:59:13.520197 1434484 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:59:13.520203 1434484 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:59:13.529369 1434484 round_trippers.go:574] Response Status: 200 OK in 9 milliseconds
	I0916 10:59:13.529998 1434484 pod_ready.go:93] pod "kube-apiserver-ha-334765-m03" in "kube-system" namespace has status "Ready":"True"
	I0916 10:59:13.530019 1434484 pod_ready.go:82] duration metric: took 13.896822ms for pod "kube-apiserver-ha-334765-m03" in "kube-system" namespace to be "Ready" ...
	I0916 10:59:13.530030 1434484 pod_ready.go:79] waiting up to 6m0s for pod "kube-controller-manager-ha-334765" in "kube-system" namespace to be "Ready" ...
	I0916 10:59:13.530113 1434484 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/kube-controller-manager-ha-334765
	I0916 10:59:13.530126 1434484 round_trippers.go:469] Request Headers:
	I0916 10:59:13.530135 1434484 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:59:13.530139 1434484 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:59:13.533374 1434484 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:59:13.534203 1434484 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-334765
	I0916 10:59:13.534222 1434484 round_trippers.go:469] Request Headers:
	I0916 10:59:13.534232 1434484 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:59:13.534238 1434484 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:59:13.537217 1434484 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 10:59:13.537825 1434484 pod_ready.go:93] pod "kube-controller-manager-ha-334765" in "kube-system" namespace has status "Ready":"True"
	I0916 10:59:13.537847 1434484 pod_ready.go:82] duration metric: took 7.808976ms for pod "kube-controller-manager-ha-334765" in "kube-system" namespace to be "Ready" ...
	I0916 10:59:13.537858 1434484 pod_ready.go:79] waiting up to 6m0s for pod "kube-controller-manager-ha-334765-m02" in "kube-system" namespace to be "Ready" ...
	I0916 10:59:13.687213 1434484 request.go:632] Waited for 149.286127ms due to client-side throttling, not priority and fairness, request: GET:https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/kube-controller-manager-ha-334765-m02
	I0916 10:59:13.687290 1434484 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/kube-controller-manager-ha-334765-m02
	I0916 10:59:13.687306 1434484 round_trippers.go:469] Request Headers:
	I0916 10:59:13.687315 1434484 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:59:13.687322 1434484 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:59:13.690360 1434484 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 10:59:13.887493 1434484 request.go:632] Waited for 196.353871ms due to client-side throttling, not priority and fairness, request: GET:https://192.168.49.2:8443/api/v1/nodes/ha-334765-m02
	I0916 10:59:13.887616 1434484 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-334765-m02
	I0916 10:59:13.887640 1434484 round_trippers.go:469] Request Headers:
	I0916 10:59:13.887653 1434484 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:59:13.887661 1434484 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:59:13.890954 1434484 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:59:13.891860 1434484 pod_ready.go:93] pod "kube-controller-manager-ha-334765-m02" in "kube-system" namespace has status "Ready":"True"
	I0916 10:59:13.891881 1434484 pod_ready.go:82] duration metric: took 354.01536ms for pod "kube-controller-manager-ha-334765-m02" in "kube-system" namespace to be "Ready" ...
	I0916 10:59:13.891893 1434484 pod_ready.go:79] waiting up to 6m0s for pod "kube-controller-manager-ha-334765-m03" in "kube-system" namespace to be "Ready" ...
	I0916 10:59:14.086979 1434484 request.go:632] Waited for 194.961159ms due to client-side throttling, not priority and fairness, request: GET:https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/kube-controller-manager-ha-334765-m03
	I0916 10:59:14.087237 1434484 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/kube-controller-manager-ha-334765-m03
	I0916 10:59:14.087254 1434484 round_trippers.go:469] Request Headers:
	I0916 10:59:14.087268 1434484 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:59:14.087280 1434484 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:59:14.102220 1434484 round_trippers.go:574] Response Status: 200 OK in 14 milliseconds
	I0916 10:59:14.287387 1434484 request.go:632] Waited for 184.3264ms due to client-side throttling, not priority and fairness, request: GET:https://192.168.49.2:8443/api/v1/nodes/ha-334765-m03
	I0916 10:59:14.287443 1434484 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-334765-m03
	I0916 10:59:14.287449 1434484 round_trippers.go:469] Request Headers:
	I0916 10:59:14.287458 1434484 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:59:14.287467 1434484 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:59:14.290161 1434484 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 10:59:14.290670 1434484 pod_ready.go:93] pod "kube-controller-manager-ha-334765-m03" in "kube-system" namespace has status "Ready":"True"
	I0916 10:59:14.290690 1434484 pod_ready.go:82] duration metric: took 398.790484ms for pod "kube-controller-manager-ha-334765-m03" in "kube-system" namespace to be "Ready" ...
	I0916 10:59:14.290702 1434484 pod_ready.go:79] waiting up to 6m0s for pod "kube-proxy-4vsvh" in "kube-system" namespace to be "Ready" ...
	I0916 10:59:14.487187 1434484 request.go:632] Waited for 196.404947ms due to client-side throttling, not priority and fairness, request: GET:https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/kube-proxy-4vsvh
	I0916 10:59:14.487251 1434484 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/kube-proxy-4vsvh
	I0916 10:59:14.487260 1434484 round_trippers.go:469] Request Headers:
	I0916 10:59:14.487269 1434484 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:59:14.487278 1434484 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:59:14.490260 1434484 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 10:59:14.687268 1434484 request.go:632] Waited for 196.370396ms due to client-side throttling, not priority and fairness, request: GET:https://192.168.49.2:8443/api/v1/nodes/ha-334765-m03
	I0916 10:59:14.687332 1434484 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-334765-m03
	I0916 10:59:14.687338 1434484 round_trippers.go:469] Request Headers:
	I0916 10:59:14.687349 1434484 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:59:14.687355 1434484 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:59:14.690366 1434484 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 10:59:14.691062 1434484 pod_ready.go:93] pod "kube-proxy-4vsvh" in "kube-system" namespace has status "Ready":"True"
	I0916 10:59:14.691084 1434484 pod_ready.go:82] duration metric: took 400.374288ms for pod "kube-proxy-4vsvh" in "kube-system" namespace to be "Ready" ...
	I0916 10:59:14.691096 1434484 pod_ready.go:79] waiting up to 6m0s for pod "kube-proxy-br496" in "kube-system" namespace to be "Ready" ...
	I0916 10:59:14.887635 1434484 request.go:632] Waited for 196.46252ms due to client-side throttling, not priority and fairness, request: GET:https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/kube-proxy-br496
	I0916 10:59:14.887698 1434484 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/kube-proxy-br496
	I0916 10:59:14.887705 1434484 round_trippers.go:469] Request Headers:
	I0916 10:59:14.887711 1434484 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:59:14.887716 1434484 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:59:14.891717 1434484 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:59:15.087200 1434484 request.go:632] Waited for 194.404554ms due to client-side throttling, not priority and fairness, request: GET:https://192.168.49.2:8443/api/v1/nodes/ha-334765-m04
	I0916 10:59:15.087268 1434484 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-334765-m04
	I0916 10:59:15.087275 1434484 round_trippers.go:469] Request Headers:
	I0916 10:59:15.087284 1434484 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:59:15.087291 1434484 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:59:15.091618 1434484 round_trippers.go:574] Response Status: 200 OK in 4 milliseconds
	I0916 10:59:15.095549 1434484 pod_ready.go:98] node "ha-334765-m04" hosting pod "kube-proxy-br496" in "kube-system" namespace is currently not "Ready" (skipping!): node "ha-334765-m04" has status "Ready":"Unknown"
	I0916 10:59:15.095583 1434484 pod_ready.go:82] duration metric: took 404.47702ms for pod "kube-proxy-br496" in "kube-system" namespace to be "Ready" ...
	E0916 10:59:15.095595 1434484 pod_ready.go:67] WaitExtra: waitPodCondition: node "ha-334765-m04" hosting pod "kube-proxy-br496" in "kube-system" namespace is currently not "Ready" (skipping!): node "ha-334765-m04" has status "Ready":"Unknown"
	I0916 10:59:15.095603 1434484 pod_ready.go:79] waiting up to 6m0s for pod "kube-proxy-l998t" in "kube-system" namespace to be "Ready" ...
	I0916 10:59:15.286993 1434484 request.go:632] Waited for 191.316714ms due to client-side throttling, not priority and fairness, request: GET:https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/kube-proxy-l998t
	I0916 10:59:15.287054 1434484 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/kube-proxy-l998t
	I0916 10:59:15.287067 1434484 round_trippers.go:469] Request Headers:
	I0916 10:59:15.287082 1434484 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:59:15.287089 1434484 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:59:15.290110 1434484 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:59:15.487152 1434484 request.go:632] Waited for 196.321864ms due to client-side throttling, not priority and fairness, request: GET:https://192.168.49.2:8443/api/v1/nodes/ha-334765-m02
	I0916 10:59:15.487213 1434484 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-334765-m02
	I0916 10:59:15.487223 1434484 round_trippers.go:469] Request Headers:
	I0916 10:59:15.487232 1434484 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:59:15.487239 1434484 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:59:15.490329 1434484 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:59:15.491035 1434484 pod_ready.go:93] pod "kube-proxy-l998t" in "kube-system" namespace has status "Ready":"True"
	I0916 10:59:15.491093 1434484 pod_ready.go:82] duration metric: took 395.481226ms for pod "kube-proxy-l998t" in "kube-system" namespace to be "Ready" ...
	I0916 10:59:15.491121 1434484 pod_ready.go:79] waiting up to 6m0s for pod "kube-proxy-tlfs7" in "kube-system" namespace to be "Ready" ...
	I0916 10:59:15.686784 1434484 request.go:632] Waited for 195.535438ms due to client-side throttling, not priority and fairness, request: GET:https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/kube-proxy-tlfs7
	I0916 10:59:15.686848 1434484 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/kube-proxy-tlfs7
	I0916 10:59:15.686857 1434484 round_trippers.go:469] Request Headers:
	I0916 10:59:15.686867 1434484 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:59:15.686875 1434484 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:59:15.689951 1434484 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:59:15.887005 1434484 request.go:632] Waited for 196.334113ms due to client-side throttling, not priority and fairness, request: GET:https://192.168.49.2:8443/api/v1/nodes/ha-334765
	I0916 10:59:15.887064 1434484 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-334765
	I0916 10:59:15.887075 1434484 round_trippers.go:469] Request Headers:
	I0916 10:59:15.887089 1434484 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:59:15.887097 1434484 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:59:15.889960 1434484 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 10:59:15.890595 1434484 pod_ready.go:93] pod "kube-proxy-tlfs7" in "kube-system" namespace has status "Ready":"True"
	I0916 10:59:15.890618 1434484 pod_ready.go:82] duration metric: took 399.446148ms for pod "kube-proxy-tlfs7" in "kube-system" namespace to be "Ready" ...
	I0916 10:59:15.890629 1434484 pod_ready.go:79] waiting up to 6m0s for pod "kube-scheduler-ha-334765" in "kube-system" namespace to be "Ready" ...
	I0916 10:59:16.087199 1434484 request.go:632] Waited for 196.449532ms due to client-side throttling, not priority and fairness, request: GET:https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/kube-scheduler-ha-334765
	I0916 10:59:16.087273 1434484 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/kube-scheduler-ha-334765
	I0916 10:59:16.087279 1434484 round_trippers.go:469] Request Headers:
	I0916 10:59:16.087289 1434484 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:59:16.087295 1434484 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:59:16.090250 1434484 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 10:59:16.287139 1434484 request.go:632] Waited for 196.340094ms due to client-side throttling, not priority and fairness, request: GET:https://192.168.49.2:8443/api/v1/nodes/ha-334765
	I0916 10:59:16.287216 1434484 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-334765
	I0916 10:59:16.287225 1434484 round_trippers.go:469] Request Headers:
	I0916 10:59:16.287240 1434484 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:59:16.287261 1434484 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:59:16.290290 1434484 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:59:16.290821 1434484 pod_ready.go:93] pod "kube-scheduler-ha-334765" in "kube-system" namespace has status "Ready":"True"
	I0916 10:59:16.290840 1434484 pod_ready.go:82] duration metric: took 400.203241ms for pod "kube-scheduler-ha-334765" in "kube-system" namespace to be "Ready" ...
	I0916 10:59:16.290851 1434484 pod_ready.go:79] waiting up to 6m0s for pod "kube-scheduler-ha-334765-m02" in "kube-system" namespace to be "Ready" ...
	I0916 10:59:16.487233 1434484 request.go:632] Waited for 196.316152ms due to client-side throttling, not priority and fairness, request: GET:https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/kube-scheduler-ha-334765-m02
	I0916 10:59:16.487301 1434484 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/kube-scheduler-ha-334765-m02
	I0916 10:59:16.487313 1434484 round_trippers.go:469] Request Headers:
	I0916 10:59:16.487322 1434484 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:59:16.487328 1434484 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:59:16.490204 1434484 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 10:59:16.687337 1434484 request.go:632] Waited for 196.280067ms due to client-side throttling, not priority and fairness, request: GET:https://192.168.49.2:8443/api/v1/nodes/ha-334765-m02
	I0916 10:59:16.687451 1434484 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-334765-m02
	I0916 10:59:16.687483 1434484 round_trippers.go:469] Request Headers:
	I0916 10:59:16.687510 1434484 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:59:16.687530 1434484 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:59:16.696221 1434484 round_trippers.go:574] Response Status: 200 OK in 8 milliseconds
	I0916 10:59:16.697332 1434484 pod_ready.go:93] pod "kube-scheduler-ha-334765-m02" in "kube-system" namespace has status "Ready":"True"
	I0916 10:59:16.697354 1434484 pod_ready.go:82] duration metric: took 406.495865ms for pod "kube-scheduler-ha-334765-m02" in "kube-system" namespace to be "Ready" ...
	I0916 10:59:16.697367 1434484 pod_ready.go:79] waiting up to 6m0s for pod "kube-scheduler-ha-334765-m03" in "kube-system" namespace to be "Ready" ...
	I0916 10:59:16.887752 1434484 request.go:632] Waited for 190.313974ms due to client-side throttling, not priority and fairness, request: GET:https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/kube-scheduler-ha-334765-m03
	I0916 10:59:16.887816 1434484 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/kube-scheduler-ha-334765-m03
	I0916 10:59:16.887828 1434484 round_trippers.go:469] Request Headers:
	I0916 10:59:16.887837 1434484 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:59:16.887846 1434484 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:59:16.890897 1434484 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:59:17.086872 1434484 request.go:632] Waited for 195.272799ms due to client-side throttling, not priority and fairness, request: GET:https://192.168.49.2:8443/api/v1/nodes/ha-334765-m03
	I0916 10:59:17.086957 1434484 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-334765-m03
	I0916 10:59:17.086967 1434484 round_trippers.go:469] Request Headers:
	I0916 10:59:17.086976 1434484 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:59:17.086982 1434484 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:59:17.089889 1434484 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 10:59:17.090670 1434484 pod_ready.go:93] pod "kube-scheduler-ha-334765-m03" in "kube-system" namespace has status "Ready":"True"
	I0916 10:59:17.090696 1434484 pod_ready.go:82] duration metric: took 393.321067ms for pod "kube-scheduler-ha-334765-m03" in "kube-system" namespace to be "Ready" ...
	I0916 10:59:17.090710 1434484 pod_ready.go:39] duration metric: took 22.155628511s for extra waiting for all system-critical and pods with labels [k8s-app=kube-dns component=etcd component=kube-apiserver component=kube-controller-manager k8s-app=kube-proxy component=kube-scheduler] to be "Ready" ...
	I0916 10:59:17.090730 1434484 api_server.go:52] waiting for apiserver process to appear ...
	I0916 10:59:17.090800 1434484 ssh_runner.go:195] Run: sudo pgrep -xnf kube-apiserver.*minikube.*
	I0916 10:59:17.111095 1434484 api_server.go:72] duration metric: took 30.812222151s to wait for apiserver process to appear ...
	I0916 10:59:17.111142 1434484 api_server.go:88] waiting for apiserver healthz status ...
	I0916 10:59:17.111184 1434484 api_server.go:253] Checking apiserver healthz at https://192.168.49.2:8443/healthz ...
	I0916 10:59:17.118893 1434484 api_server.go:279] https://192.168.49.2:8443/healthz returned 200:
	ok
	I0916 10:59:17.118979 1434484 round_trippers.go:463] GET https://192.168.49.2:8443/version
	I0916 10:59:17.118992 1434484 round_trippers.go:469] Request Headers:
	I0916 10:59:17.119002 1434484 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:59:17.119010 1434484 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:59:17.119847 1434484 round_trippers.go:574] Response Status: 200 OK in 0 milliseconds
	I0916 10:59:17.119912 1434484 api_server.go:141] control plane version: v1.31.1
	I0916 10:59:17.119925 1434484 api_server.go:131] duration metric: took 8.776492ms to wait for apiserver health ...
	I0916 10:59:17.119933 1434484 system_pods.go:43] waiting for kube-system pods to appear ...
	I0916 10:59:17.287294 1434484 request.go:632] Waited for 167.290908ms due to client-side throttling, not priority and fairness, request: GET:https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods
	I0916 10:59:17.287368 1434484 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods
	I0916 10:59:17.287380 1434484 round_trippers.go:469] Request Headers:
	I0916 10:59:17.287390 1434484 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:59:17.287402 1434484 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:59:17.294296 1434484 round_trippers.go:574] Response Status: 200 OK in 6 milliseconds
	I0916 10:59:17.304468 1434484 system_pods.go:59] 26 kube-system pods found
	I0916 10:59:17.304505 1434484 system_pods.go:61] "coredns-7c65d6cfc9-q5xr7" [14514e6e-34ae-4a79-b0e0-008742ae46b9] Running
	I0916 10:59:17.304513 1434484 system_pods.go:61] "coredns-7c65d6cfc9-s9fp9" [0e29200a-0909-47e1-8521-bf5f9b645d6c] Running
	I0916 10:59:17.304517 1434484 system_pods.go:61] "etcd-ha-334765" [9a0b9474-60f4-440e-a898-d397f7425086] Running
	I0916 10:59:17.304522 1434484 system_pods.go:61] "etcd-ha-334765-m02" [635fd2d2-f9cc-4e08-b73b-18633a58b6e4] Running
	I0916 10:59:17.304710 1434484 system_pods.go:61] "etcd-ha-334765-m03" [9527225b-e7ae-481b-b5b1-47b445990b4b] Running
	I0916 10:59:17.304716 1434484 system_pods.go:61] "kindnet-7s5t5" [e1832b94-ac8f-43c0-af10-ddc6afbb229b] Running
	I0916 10:59:17.304727 1434484 system_pods.go:61] "kindnet-plxdg" [15478b1f-0067-4d48-84f3-27b777cc4ff3] Running
	I0916 10:59:17.304731 1434484 system_pods.go:61] "kindnet-rfw69" [396f204a-53ea-4720-85fc-05ba54d285ca] Running
	I0916 10:59:17.304737 1434484 system_pods.go:61] "kindnet-vj27j" [61e290b4-d19c-40f3-a50d-bfa09fddb710] Running
	I0916 10:59:17.304769 1434484 system_pods.go:61] "kube-apiserver-ha-334765" [471aea01-5646-4ce8-91e0-b0b39f8a275a] Running
	I0916 10:59:17.304783 1434484 system_pods.go:61] "kube-apiserver-ha-334765-m02" [877c49f9-6fae-4cdb-b208-940eba98383b] Running
	I0916 10:59:17.304788 1434484 system_pods.go:61] "kube-apiserver-ha-334765-m03" [b14f2a2b-6410-438a-99e2-86fa58140695] Running
	I0916 10:59:17.304797 1434484 system_pods.go:61] "kube-controller-manager-ha-334765" [23b2f4a4-942f-4ea7-afef-561ab69ac144] Running
	I0916 10:59:17.304801 1434484 system_pods.go:61] "kube-controller-manager-ha-334765-m02" [07411ea7-458c-475c-93ff-5db4f6c1c4b1] Running
	I0916 10:59:17.304808 1434484 system_pods.go:61] "kube-controller-manager-ha-334765-m03" [2aa8cca1-22de-4cd0-88a2-ac864da09d8d] Running
	I0916 10:59:17.304812 1434484 system_pods.go:61] "kube-proxy-4vsvh" [551f3711-d8b3-4360-8a18-d6183d4aec6d] Running
	I0916 10:59:17.304824 1434484 system_pods.go:61] "kube-proxy-br496" [db7b7049-0d21-4564-8c72-de55e63b5051] Running
	I0916 10:59:17.304828 1434484 system_pods.go:61] "kube-proxy-l998t" [e92c97ea-9eb8-40c4-a7f6-aeb43c89e6f4] Running
	I0916 10:59:17.304832 1434484 system_pods.go:61] "kube-proxy-tlfs7" [6a873882-8023-44b5-82d9-2f18e70f8ef1] Running
	I0916 10:59:17.304848 1434484 system_pods.go:61] "kube-scheduler-ha-334765" [6189b5cd-f342-4b6a-ae21-b6b7125e4f06] Running
	I0916 10:59:17.304859 1434484 system_pods.go:61] "kube-scheduler-ha-334765-m02" [61387062-d6b0-4e2d-b2f9-10f29b0bcef6] Running
	I0916 10:59:17.304863 1434484 system_pods.go:61] "kube-scheduler-ha-334765-m03" [98c99d71-0ea3-46a3-ab06-7b5971730ba8] Running
	I0916 10:59:17.304867 1434484 system_pods.go:61] "kube-vip-ha-334765" [baed9adb-c604-4a84-b55e-53a93f120d7b] Running
	I0916 10:59:17.304871 1434484 system_pods.go:61] "kube-vip-ha-334765-m02" [450bd3f6-46b4-426c-a6b2-2ad37b58b171] Running
	I0916 10:59:17.304878 1434484 system_pods.go:61] "kube-vip-ha-334765-m03" [efeb2f57-409a-45a7-87e2-dae52a680b3e] Running
	I0916 10:59:17.304883 1434484 system_pods.go:61] "storage-provisioner" [4db2490d-9707-4734-973b-adac5570e275] Running
	I0916 10:59:17.304896 1434484 system_pods.go:74] duration metric: took 184.953379ms to wait for pod list to return data ...
	I0916 10:59:17.304904 1434484 default_sa.go:34] waiting for default service account to be created ...
	I0916 10:59:17.487308 1434484 request.go:632] Waited for 182.316104ms due to client-side throttling, not priority and fairness, request: GET:https://192.168.49.2:8443/api/v1/namespaces/default/serviceaccounts
	I0916 10:59:17.487367 1434484 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/default/serviceaccounts
	I0916 10:59:17.487373 1434484 round_trippers.go:469] Request Headers:
	I0916 10:59:17.487382 1434484 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:59:17.487386 1434484 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:59:17.490777 1434484 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:59:17.491087 1434484 default_sa.go:45] found service account: "default"
	I0916 10:59:17.491107 1434484 default_sa.go:55] duration metric: took 186.194873ms for default service account to be created ...
	I0916 10:59:17.491118 1434484 system_pods.go:116] waiting for k8s-apps to be running ...
	I0916 10:59:17.687415 1434484 request.go:632] Waited for 196.223987ms due to client-side throttling, not priority and fairness, request: GET:https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods
	I0916 10:59:17.687474 1434484 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods
	I0916 10:59:17.687481 1434484 round_trippers.go:469] Request Headers:
	I0916 10:59:17.687490 1434484 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:59:17.687498 1434484 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:59:17.693477 1434484 round_trippers.go:574] Response Status: 200 OK in 5 milliseconds
	I0916 10:59:17.705130 1434484 system_pods.go:86] 26 kube-system pods found
	I0916 10:59:17.705181 1434484 system_pods.go:89] "coredns-7c65d6cfc9-q5xr7" [14514e6e-34ae-4a79-b0e0-008742ae46b9] Running
	I0916 10:59:17.705194 1434484 system_pods.go:89] "coredns-7c65d6cfc9-s9fp9" [0e29200a-0909-47e1-8521-bf5f9b645d6c] Running
	I0916 10:59:17.705223 1434484 system_pods.go:89] "etcd-ha-334765" [9a0b9474-60f4-440e-a898-d397f7425086] Running
	I0916 10:59:17.705240 1434484 system_pods.go:89] "etcd-ha-334765-m02" [635fd2d2-f9cc-4e08-b73b-18633a58b6e4] Running
	I0916 10:59:17.705252 1434484 system_pods.go:89] "etcd-ha-334765-m03" [9527225b-e7ae-481b-b5b1-47b445990b4b] Running
	I0916 10:59:17.705261 1434484 system_pods.go:89] "kindnet-7s5t5" [e1832b94-ac8f-43c0-af10-ddc6afbb229b] Running
	I0916 10:59:17.705266 1434484 system_pods.go:89] "kindnet-plxdg" [15478b1f-0067-4d48-84f3-27b777cc4ff3] Running
	I0916 10:59:17.705270 1434484 system_pods.go:89] "kindnet-rfw69" [396f204a-53ea-4720-85fc-05ba54d285ca] Running
	I0916 10:59:17.705280 1434484 system_pods.go:89] "kindnet-vj27j" [61e290b4-d19c-40f3-a50d-bfa09fddb710] Running
	I0916 10:59:17.705286 1434484 system_pods.go:89] "kube-apiserver-ha-334765" [471aea01-5646-4ce8-91e0-b0b39f8a275a] Running
	I0916 10:59:17.705296 1434484 system_pods.go:89] "kube-apiserver-ha-334765-m02" [877c49f9-6fae-4cdb-b208-940eba98383b] Running
	I0916 10:59:17.705309 1434484 system_pods.go:89] "kube-apiserver-ha-334765-m03" [b14f2a2b-6410-438a-99e2-86fa58140695] Running
	I0916 10:59:17.705318 1434484 system_pods.go:89] "kube-controller-manager-ha-334765" [23b2f4a4-942f-4ea7-afef-561ab69ac144] Running
	I0916 10:59:17.705327 1434484 system_pods.go:89] "kube-controller-manager-ha-334765-m02" [07411ea7-458c-475c-93ff-5db4f6c1c4b1] Running
	I0916 10:59:17.705331 1434484 system_pods.go:89] "kube-controller-manager-ha-334765-m03" [2aa8cca1-22de-4cd0-88a2-ac864da09d8d] Running
	I0916 10:59:17.705336 1434484 system_pods.go:89] "kube-proxy-4vsvh" [551f3711-d8b3-4360-8a18-d6183d4aec6d] Running
	I0916 10:59:17.705342 1434484 system_pods.go:89] "kube-proxy-br496" [db7b7049-0d21-4564-8c72-de55e63b5051] Running
	I0916 10:59:17.705347 1434484 system_pods.go:89] "kube-proxy-l998t" [e92c97ea-9eb8-40c4-a7f6-aeb43c89e6f4] Running
	I0916 10:59:17.705359 1434484 system_pods.go:89] "kube-proxy-tlfs7" [6a873882-8023-44b5-82d9-2f18e70f8ef1] Running
	I0916 10:59:17.705363 1434484 system_pods.go:89] "kube-scheduler-ha-334765" [6189b5cd-f342-4b6a-ae21-b6b7125e4f06] Running
	I0916 10:59:17.705372 1434484 system_pods.go:89] "kube-scheduler-ha-334765-m02" [61387062-d6b0-4e2d-b2f9-10f29b0bcef6] Running
	I0916 10:59:17.705385 1434484 system_pods.go:89] "kube-scheduler-ha-334765-m03" [98c99d71-0ea3-46a3-ab06-7b5971730ba8] Running
	I0916 10:59:17.705389 1434484 system_pods.go:89] "kube-vip-ha-334765" [baed9adb-c604-4a84-b55e-53a93f120d7b] Running
	I0916 10:59:17.705396 1434484 system_pods.go:89] "kube-vip-ha-334765-m02" [450bd3f6-46b4-426c-a6b2-2ad37b58b171] Running
	I0916 10:59:17.705405 1434484 system_pods.go:89] "kube-vip-ha-334765-m03" [efeb2f57-409a-45a7-87e2-dae52a680b3e] Running
	I0916 10:59:17.705409 1434484 system_pods.go:89] "storage-provisioner" [4db2490d-9707-4734-973b-adac5570e275] Running
	I0916 10:59:17.705417 1434484 system_pods.go:126] duration metric: took 214.293553ms to wait for k8s-apps to be running ...
	I0916 10:59:17.705429 1434484 system_svc.go:44] waiting for kubelet service to be running ....
	I0916 10:59:17.705497 1434484 ssh_runner.go:195] Run: sudo systemctl is-active --quiet service kubelet
	I0916 10:59:17.719489 1434484 system_svc.go:56] duration metric: took 14.049147ms WaitForService to wait for kubelet
	I0916 10:59:17.719519 1434484 kubeadm.go:582] duration metric: took 31.420651618s to wait for: map[apiserver:true apps_running:true default_sa:true extra:true kubelet:true node_ready:true system_pods:true]
	I0916 10:59:17.719538 1434484 node_conditions.go:102] verifying NodePressure condition ...
	I0916 10:59:17.887104 1434484 request.go:632] Waited for 167.465517ms due to client-side throttling, not priority and fairness, request: GET:https://192.168.49.2:8443/api/v1/nodes
	I0916 10:59:17.887194 1434484 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes
	I0916 10:59:17.887205 1434484 round_trippers.go:469] Request Headers:
	I0916 10:59:17.887215 1434484 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:59:17.887221 1434484 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:59:17.890575 1434484 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:59:17.892464 1434484 node_conditions.go:122] node storage ephemeral capacity is 203034800Ki
	I0916 10:59:17.892495 1434484 node_conditions.go:123] node cpu capacity is 2
	I0916 10:59:17.892507 1434484 node_conditions.go:122] node storage ephemeral capacity is 203034800Ki
	I0916 10:59:17.892513 1434484 node_conditions.go:123] node cpu capacity is 2
	I0916 10:59:17.892518 1434484 node_conditions.go:122] node storage ephemeral capacity is 203034800Ki
	I0916 10:59:17.892522 1434484 node_conditions.go:123] node cpu capacity is 2
	I0916 10:59:17.892526 1434484 node_conditions.go:122] node storage ephemeral capacity is 203034800Ki
	I0916 10:59:17.892530 1434484 node_conditions.go:123] node cpu capacity is 2
	I0916 10:59:17.892536 1434484 node_conditions.go:105] duration metric: took 172.981972ms to run NodePressure ...
	I0916 10:59:17.892547 1434484 start.go:241] waiting for startup goroutines ...
	I0916 10:59:17.892569 1434484 start.go:255] writing updated cluster config ...
	I0916 10:59:17.895963 1434484 out.go:201] 
	I0916 10:59:17.898739 1434484 config.go:182] Loaded profile config "ha-334765": Driver=docker, ContainerRuntime=crio, KubernetesVersion=v1.31.1
	I0916 10:59:17.898867 1434484 profile.go:143] Saving config to /home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/ha-334765/config.json ...
	I0916 10:59:17.902690 1434484 out.go:177] * Starting "ha-334765-m04" worker node in "ha-334765" cluster
	I0916 10:59:17.905553 1434484 cache.go:121] Beginning downloading kic base image for docker with crio
	I0916 10:59:17.908366 1434484 out.go:177] * Pulling base image v0.0.45-1726358845-19644 ...
	I0916 10:59:17.910741 1434484 preload.go:131] Checking if preload exists for k8s version v1.31.1 and runtime crio
	I0916 10:59:17.910774 1434484 cache.go:56] Caching tarball of preloaded images
	I0916 10:59:17.910887 1434484 preload.go:172] Found /home/jenkins/minikube-integration/19651-1378450/.minikube/cache/preloaded-tarball/preloaded-images-k8s-v18-v1.31.1-cri-o-overlay-arm64.tar.lz4 in cache, skipping download
	I0916 10:59:17.910904 1434484 cache.go:59] Finished verifying existence of preloaded tar for v1.31.1 on crio
	I0916 10:59:17.911038 1434484 profile.go:143] Saving config to /home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/ha-334765/config.json ...
	I0916 10:59:17.911290 1434484 image.go:79] Checking for gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 in local docker daemon
	W0916 10:59:17.944998 1434484 image.go:95] image gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 is of wrong architecture
	I0916 10:59:17.945021 1434484 cache.go:149] Downloading gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 to local cache
	I0916 10:59:17.945098 1434484 image.go:63] Checking for gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 in local cache directory
	I0916 10:59:17.945122 1434484 image.go:66] Found gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 in local cache directory, skipping pull
	I0916 10:59:17.945128 1434484 image.go:135] gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 exists in cache, skipping pull
	I0916 10:59:17.945136 1434484 cache.go:152] successfully saved gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 as a tarball
	I0916 10:59:17.945146 1434484 cache.go:162] Loading gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 from local cache
	I0916 10:59:17.946703 1434484 image.go:273] response: 
	I0916 10:59:18.070671 1434484 cache.go:164] successfully loaded and using gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 from cached tarball
	I0916 10:59:18.070709 1434484 cache.go:194] Successfully downloaded all kic artifacts
	I0916 10:59:18.070740 1434484 start.go:360] acquireMachinesLock for ha-334765-m04: {Name:mkadeccdfd1355580c8feb9cbbbc4cc86963393f Clock:{} Delay:500ms Timeout:10m0s Cancel:<nil>}
	I0916 10:59:18.070805 1434484 start.go:364] duration metric: took 43.174µs to acquireMachinesLock for "ha-334765-m04"
	I0916 10:59:18.070834 1434484 start.go:96] Skipping create...Using existing machine configuration
	I0916 10:59:18.070844 1434484 fix.go:54] fixHost starting: m04
	I0916 10:59:18.071131 1434484 cli_runner.go:164] Run: docker container inspect ha-334765-m04 --format={{.State.Status}}
	I0916 10:59:18.089984 1434484 fix.go:112] recreateIfNeeded on ha-334765-m04: state=Stopped err=<nil>
	W0916 10:59:18.090012 1434484 fix.go:138] unexpected machine state, will restart: <nil>
	I0916 10:59:18.093200 1434484 out.go:177] * Restarting existing docker container for "ha-334765-m04" ...
	I0916 10:59:18.095838 1434484 cli_runner.go:164] Run: docker start ha-334765-m04
	I0916 10:59:18.433789 1434484 cli_runner.go:164] Run: docker container inspect ha-334765-m04 --format={{.State.Status}}
	I0916 10:59:18.457632 1434484 kic.go:430] container "ha-334765-m04" state is running.
	I0916 10:59:18.458005 1434484 cli_runner.go:164] Run: docker container inspect -f "{{range .NetworkSettings.Networks}}{{.IPAddress}},{{.GlobalIPv6Address}}{{end}}" ha-334765-m04
	I0916 10:59:18.479674 1434484 profile.go:143] Saving config to /home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/ha-334765/config.json ...
	I0916 10:59:18.479927 1434484 machine.go:93] provisionDockerMachine start ...
	I0916 10:59:18.479990 1434484 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" ha-334765-m04
	I0916 10:59:18.507095 1434484 main.go:141] libmachine: Using SSH client type: native
	I0916 10:59:18.507352 1434484 main.go:141] libmachine: &{{{<nil> 0 [] [] []} docker [0x41abe0] 0x41d420 <nil>  [] 0s} 127.0.0.1 34658 <nil> <nil>}
	I0916 10:59:18.507363 1434484 main.go:141] libmachine: About to run SSH command:
	hostname
	I0916 10:59:18.508321 1434484 main.go:141] libmachine: Error dialing TCP: ssh: handshake failed: EOF
	I0916 10:59:21.653301 1434484 main.go:141] libmachine: SSH cmd err, output: <nil>: ha-334765-m04
	
	I0916 10:59:21.653331 1434484 ubuntu.go:169] provisioning hostname "ha-334765-m04"
	I0916 10:59:21.653421 1434484 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" ha-334765-m04
	I0916 10:59:21.677874 1434484 main.go:141] libmachine: Using SSH client type: native
	I0916 10:59:21.678173 1434484 main.go:141] libmachine: &{{{<nil> 0 [] [] []} docker [0x41abe0] 0x41d420 <nil>  [] 0s} 127.0.0.1 34658 <nil> <nil>}
	I0916 10:59:21.678192 1434484 main.go:141] libmachine: About to run SSH command:
	sudo hostname ha-334765-m04 && echo "ha-334765-m04" | sudo tee /etc/hostname
	I0916 10:59:21.850150 1434484 main.go:141] libmachine: SSH cmd err, output: <nil>: ha-334765-m04
	
	I0916 10:59:21.850247 1434484 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" ha-334765-m04
	I0916 10:59:21.886568 1434484 main.go:141] libmachine: Using SSH client type: native
	I0916 10:59:21.886898 1434484 main.go:141] libmachine: &{{{<nil> 0 [] [] []} docker [0x41abe0] 0x41d420 <nil>  [] 0s} 127.0.0.1 34658 <nil> <nil>}
	I0916 10:59:21.886928 1434484 main.go:141] libmachine: About to run SSH command:
	
			if ! grep -xq '.*\sha-334765-m04' /etc/hosts; then
				if grep -xq '127.0.1.1\s.*' /etc/hosts; then
					sudo sed -i 's/^127.0.1.1\s.*/127.0.1.1 ha-334765-m04/g' /etc/hosts;
				else 
					echo '127.0.1.1 ha-334765-m04' | sudo tee -a /etc/hosts; 
				fi
			fi
	I0916 10:59:22.033141 1434484 main.go:141] libmachine: SSH cmd err, output: <nil>: 
	I0916 10:59:22.033170 1434484 ubuntu.go:175] set auth options {CertDir:/home/jenkins/minikube-integration/19651-1378450/.minikube CaCertPath:/home/jenkins/minikube-integration/19651-1378450/.minikube/certs/ca.pem CaPrivateKeyPath:/home/jenkins/minikube-integration/19651-1378450/.minikube/certs/ca-key.pem CaCertRemotePath:/etc/docker/ca.pem ServerCertPath:/home/jenkins/minikube-integration/19651-1378450/.minikube/machines/server.pem ServerKeyPath:/home/jenkins/minikube-integration/19651-1378450/.minikube/machines/server-key.pem ClientKeyPath:/home/jenkins/minikube-integration/19651-1378450/.minikube/certs/key.pem ServerCertRemotePath:/etc/docker/server.pem ServerKeyRemotePath:/etc/docker/server-key.pem ClientCertPath:/home/jenkins/minikube-integration/19651-1378450/.minikube/certs/cert.pem ServerCertSANs:[] StorePath:/home/jenkins/minikube-integration/19651-1378450/.minikube}
	I0916 10:59:22.033188 1434484 ubuntu.go:177] setting up certificates
	I0916 10:59:22.033198 1434484 provision.go:84] configureAuth start
	I0916 10:59:22.033260 1434484 cli_runner.go:164] Run: docker container inspect -f "{{range .NetworkSettings.Networks}}{{.IPAddress}},{{.GlobalIPv6Address}}{{end}}" ha-334765-m04
	I0916 10:59:22.054621 1434484 provision.go:143] copyHostCerts
	I0916 10:59:22.054671 1434484 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-1378450/.minikube/certs/ca.pem -> /home/jenkins/minikube-integration/19651-1378450/.minikube/ca.pem
	I0916 10:59:22.054709 1434484 exec_runner.go:144] found /home/jenkins/minikube-integration/19651-1378450/.minikube/ca.pem, removing ...
	I0916 10:59:22.054722 1434484 exec_runner.go:203] rm: /home/jenkins/minikube-integration/19651-1378450/.minikube/ca.pem
	I0916 10:59:22.054804 1434484 exec_runner.go:151] cp: /home/jenkins/minikube-integration/19651-1378450/.minikube/certs/ca.pem --> /home/jenkins/minikube-integration/19651-1378450/.minikube/ca.pem (1078 bytes)
	I0916 10:59:22.054903 1434484 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-1378450/.minikube/certs/cert.pem -> /home/jenkins/minikube-integration/19651-1378450/.minikube/cert.pem
	I0916 10:59:22.054925 1434484 exec_runner.go:144] found /home/jenkins/minikube-integration/19651-1378450/.minikube/cert.pem, removing ...
	I0916 10:59:22.054933 1434484 exec_runner.go:203] rm: /home/jenkins/minikube-integration/19651-1378450/.minikube/cert.pem
	I0916 10:59:22.054964 1434484 exec_runner.go:151] cp: /home/jenkins/minikube-integration/19651-1378450/.minikube/certs/cert.pem --> /home/jenkins/minikube-integration/19651-1378450/.minikube/cert.pem (1123 bytes)
	I0916 10:59:22.055013 1434484 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-1378450/.minikube/certs/key.pem -> /home/jenkins/minikube-integration/19651-1378450/.minikube/key.pem
	I0916 10:59:22.055075 1434484 exec_runner.go:144] found /home/jenkins/minikube-integration/19651-1378450/.minikube/key.pem, removing ...
	I0916 10:59:22.055084 1434484 exec_runner.go:203] rm: /home/jenkins/minikube-integration/19651-1378450/.minikube/key.pem
	I0916 10:59:22.055177 1434484 exec_runner.go:151] cp: /home/jenkins/minikube-integration/19651-1378450/.minikube/certs/key.pem --> /home/jenkins/minikube-integration/19651-1378450/.minikube/key.pem (1679 bytes)
	I0916 10:59:22.055363 1434484 provision.go:117] generating server cert: /home/jenkins/minikube-integration/19651-1378450/.minikube/machines/server.pem ca-key=/home/jenkins/minikube-integration/19651-1378450/.minikube/certs/ca.pem private-key=/home/jenkins/minikube-integration/19651-1378450/.minikube/certs/ca-key.pem org=jenkins.ha-334765-m04 san=[127.0.0.1 192.168.49.5 ha-334765-m04 localhost minikube]
	I0916 10:59:22.786019 1434484 provision.go:177] copyRemoteCerts
	I0916 10:59:22.786103 1434484 ssh_runner.go:195] Run: sudo mkdir -p /etc/docker /etc/docker /etc/docker
	I0916 10:59:22.786181 1434484 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" ha-334765-m04
	I0916 10:59:22.804474 1434484 sshutil.go:53] new ssh client: &{IP:127.0.0.1 Port:34658 SSHKeyPath:/home/jenkins/minikube-integration/19651-1378450/.minikube/machines/ha-334765-m04/id_rsa Username:docker}
	I0916 10:59:22.907340 1434484 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-1378450/.minikube/certs/ca.pem -> /etc/docker/ca.pem
	I0916 10:59:22.907459 1434484 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-1378450/.minikube/certs/ca.pem --> /etc/docker/ca.pem (1078 bytes)
	I0916 10:59:22.940653 1434484 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-1378450/.minikube/machines/server.pem -> /etc/docker/server.pem
	I0916 10:59:22.940745 1434484 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-1378450/.minikube/machines/server.pem --> /etc/docker/server.pem (1208 bytes)
	I0916 10:59:22.967935 1434484 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-1378450/.minikube/machines/server-key.pem -> /etc/docker/server-key.pem
	I0916 10:59:22.968027 1434484 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-1378450/.minikube/machines/server-key.pem --> /etc/docker/server-key.pem (1675 bytes)
	I0916 10:59:22.996079 1434484 provision.go:87] duration metric: took 962.865676ms to configureAuth
	I0916 10:59:22.996105 1434484 ubuntu.go:193] setting minikube options for container-runtime
	I0916 10:59:22.996346 1434484 config.go:182] Loaded profile config "ha-334765": Driver=docker, ContainerRuntime=crio, KubernetesVersion=v1.31.1
	I0916 10:59:22.996448 1434484 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" ha-334765-m04
	I0916 10:59:23.015821 1434484 main.go:141] libmachine: Using SSH client type: native
	I0916 10:59:23.016064 1434484 main.go:141] libmachine: &{{{<nil> 0 [] [] []} docker [0x41abe0] 0x41d420 <nil>  [] 0s} 127.0.0.1 34658 <nil> <nil>}
	I0916 10:59:23.016084 1434484 main.go:141] libmachine: About to run SSH command:
	sudo mkdir -p /etc/sysconfig && printf %s "
	CRIO_MINIKUBE_OPTIONS='--insecure-registry 10.96.0.0/12 '
	" | sudo tee /etc/sysconfig/crio.minikube && sudo systemctl restart crio
	I0916 10:59:23.317430 1434484 main.go:141] libmachine: SSH cmd err, output: <nil>: 
	CRIO_MINIKUBE_OPTIONS='--insecure-registry 10.96.0.0/12 '
	
	I0916 10:59:23.317455 1434484 machine.go:96] duration metric: took 4.837517471s to provisionDockerMachine
	I0916 10:59:23.317471 1434484 start.go:293] postStartSetup for "ha-334765-m04" (driver="docker")
	I0916 10:59:23.317482 1434484 start.go:322] creating required directories: [/etc/kubernetes/addons /etc/kubernetes/manifests /var/tmp/minikube /var/lib/minikube /var/lib/minikube/certs /var/lib/minikube/images /var/lib/minikube/binaries /tmp/gvisor /usr/share/ca-certificates /etc/ssl/certs]
	I0916 10:59:23.317549 1434484 ssh_runner.go:195] Run: sudo mkdir -p /etc/kubernetes/addons /etc/kubernetes/manifests /var/tmp/minikube /var/lib/minikube /var/lib/minikube/certs /var/lib/minikube/images /var/lib/minikube/binaries /tmp/gvisor /usr/share/ca-certificates /etc/ssl/certs
	I0916 10:59:23.317600 1434484 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" ha-334765-m04
	I0916 10:59:23.340127 1434484 sshutil.go:53] new ssh client: &{IP:127.0.0.1 Port:34658 SSHKeyPath:/home/jenkins/minikube-integration/19651-1378450/.minikube/machines/ha-334765-m04/id_rsa Username:docker}
	I0916 10:59:23.446696 1434484 ssh_runner.go:195] Run: cat /etc/os-release
	I0916 10:59:23.450295 1434484 main.go:141] libmachine: Couldn't set key VERSION_CODENAME, no corresponding struct field found
	I0916 10:59:23.450335 1434484 main.go:141] libmachine: Couldn't set key PRIVACY_POLICY_URL, no corresponding struct field found
	I0916 10:59:23.450347 1434484 main.go:141] libmachine: Couldn't set key UBUNTU_CODENAME, no corresponding struct field found
	I0916 10:59:23.450354 1434484 info.go:137] Remote host: Ubuntu 22.04.4 LTS
	I0916 10:59:23.450364 1434484 filesync.go:126] Scanning /home/jenkins/minikube-integration/19651-1378450/.minikube/addons for local assets ...
	I0916 10:59:23.450429 1434484 filesync.go:126] Scanning /home/jenkins/minikube-integration/19651-1378450/.minikube/files for local assets ...
	I0916 10:59:23.450521 1434484 filesync.go:149] local asset: /home/jenkins/minikube-integration/19651-1378450/.minikube/files/etc/ssl/certs/13838332.pem -> 13838332.pem in /etc/ssl/certs
	I0916 10:59:23.450533 1434484 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-1378450/.minikube/files/etc/ssl/certs/13838332.pem -> /etc/ssl/certs/13838332.pem
	I0916 10:59:23.450635 1434484 ssh_runner.go:195] Run: sudo mkdir -p /etc/ssl/certs
	I0916 10:59:23.461595 1434484 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-1378450/.minikube/files/etc/ssl/certs/13838332.pem --> /etc/ssl/certs/13838332.pem (1708 bytes)
	I0916 10:59:23.494764 1434484 start.go:296] duration metric: took 177.277577ms for postStartSetup
	I0916 10:59:23.494851 1434484 ssh_runner.go:195] Run: sh -c "df -h /var | awk 'NR==2{print $5}'"
	I0916 10:59:23.494901 1434484 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" ha-334765-m04
	I0916 10:59:23.512154 1434484 sshutil.go:53] new ssh client: &{IP:127.0.0.1 Port:34658 SSHKeyPath:/home/jenkins/minikube-integration/19651-1378450/.minikube/machines/ha-334765-m04/id_rsa Username:docker}
	I0916 10:59:23.609484 1434484 ssh_runner.go:195] Run: sh -c "df -BG /var | awk 'NR==2{print $4}'"
	I0916 10:59:23.614015 1434484 fix.go:56] duration metric: took 5.543161347s for fixHost
	I0916 10:59:23.614038 1434484 start.go:83] releasing machines lock for "ha-334765-m04", held for 5.54321948s
	I0916 10:59:23.614111 1434484 cli_runner.go:164] Run: docker container inspect -f "{{range .NetworkSettings.Networks}}{{.IPAddress}},{{.GlobalIPv6Address}}{{end}}" ha-334765-m04
	I0916 10:59:23.635202 1434484 out.go:177] * Found network options:
	I0916 10:59:23.637897 1434484 out.go:177]   - NO_PROXY=192.168.49.2,192.168.49.3,192.168.49.4
	W0916 10:59:23.640617 1434484 proxy.go:119] fail to check proxy env: Error ip not in block
	W0916 10:59:23.640652 1434484 proxy.go:119] fail to check proxy env: Error ip not in block
	W0916 10:59:23.640663 1434484 proxy.go:119] fail to check proxy env: Error ip not in block
	W0916 10:59:23.640711 1434484 proxy.go:119] fail to check proxy env: Error ip not in block
	W0916 10:59:23.640722 1434484 proxy.go:119] fail to check proxy env: Error ip not in block
	W0916 10:59:23.640732 1434484 proxy.go:119] fail to check proxy env: Error ip not in block
	I0916 10:59:23.640811 1434484 ssh_runner.go:195] Run: sudo sh -c "podman version >/dev/null"
	I0916 10:59:23.640861 1434484 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" ha-334765-m04
	I0916 10:59:23.642095 1434484 ssh_runner.go:195] Run: curl -sS -m 2 https://registry.k8s.io/
	I0916 10:59:23.642171 1434484 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" ha-334765-m04
	I0916 10:59:23.674398 1434484 sshutil.go:53] new ssh client: &{IP:127.0.0.1 Port:34658 SSHKeyPath:/home/jenkins/minikube-integration/19651-1378450/.minikube/machines/ha-334765-m04/id_rsa Username:docker}
	I0916 10:59:23.689397 1434484 sshutil.go:53] new ssh client: &{IP:127.0.0.1 Port:34658 SSHKeyPath:/home/jenkins/minikube-integration/19651-1378450/.minikube/machines/ha-334765-m04/id_rsa Username:docker}
	I0916 10:59:23.938426 1434484 ssh_runner.go:195] Run: sh -c "stat /etc/cni/net.d/*loopback.conf*"
	I0916 10:59:23.949948 1434484 ssh_runner.go:195] Run: sudo find /etc/cni/net.d -maxdepth 1 -type f -name *loopback.conf* -not -name *.mk_disabled -exec sh -c "sudo mv {} {}.mk_disabled" ;
	I0916 10:59:23.962980 1434484 cni.go:221] loopback cni configuration disabled: "/etc/cni/net.d/*loopback.conf*" found
	I0916 10:59:23.963067 1434484 ssh_runner.go:195] Run: sudo find /etc/cni/net.d -maxdepth 1 -type f ( ( -name *bridge* -or -name *podman* ) -and -not -name *.mk_disabled ) -printf "%p, " -exec sh -c "sudo mv {} {}.mk_disabled" ;
	I0916 10:59:23.972927 1434484 cni.go:259] no active bridge cni configs found in "/etc/cni/net.d" - nothing to disable
	I0916 10:59:23.972962 1434484 start.go:495] detecting cgroup driver to use...
	I0916 10:59:23.973010 1434484 detect.go:187] detected "cgroupfs" cgroup driver on host os
	I0916 10:59:23.973083 1434484 ssh_runner.go:195] Run: sudo systemctl stop -f containerd
	I0916 10:59:23.986672 1434484 ssh_runner.go:195] Run: sudo systemctl is-active --quiet service containerd
	I0916 10:59:24.004348 1434484 docker.go:217] disabling cri-docker service (if available) ...
	I0916 10:59:24.004430 1434484 ssh_runner.go:195] Run: sudo systemctl stop -f cri-docker.socket
	I0916 10:59:24.022384 1434484 ssh_runner.go:195] Run: sudo systemctl stop -f cri-docker.service
	I0916 10:59:24.037672 1434484 ssh_runner.go:195] Run: sudo systemctl disable cri-docker.socket
	I0916 10:59:24.142284 1434484 ssh_runner.go:195] Run: sudo systemctl mask cri-docker.service
	I0916 10:59:24.246868 1434484 docker.go:233] disabling docker service ...
	I0916 10:59:24.246945 1434484 ssh_runner.go:195] Run: sudo systemctl stop -f docker.socket
	I0916 10:59:24.260892 1434484 ssh_runner.go:195] Run: sudo systemctl stop -f docker.service
	I0916 10:59:24.273274 1434484 ssh_runner.go:195] Run: sudo systemctl disable docker.socket
	I0916 10:59:24.368905 1434484 ssh_runner.go:195] Run: sudo systemctl mask docker.service
	I0916 10:59:24.467274 1434484 ssh_runner.go:195] Run: sudo systemctl is-active --quiet service docker
	I0916 10:59:24.479140 1434484 ssh_runner.go:195] Run: /bin/bash -c "sudo mkdir -p /etc && printf %s "runtime-endpoint: unix:///var/run/crio/crio.sock
	" | sudo tee /etc/crictl.yaml"
	I0916 10:59:24.500939 1434484 crio.go:59] configure cri-o to use "registry.k8s.io/pause:3.10" pause image...
	I0916 10:59:24.501012 1434484 ssh_runner.go:195] Run: sh -c "sudo sed -i 's|^.*pause_image = .*$|pause_image = "registry.k8s.io/pause:3.10"|' /etc/crio/crio.conf.d/02-crio.conf"
	I0916 10:59:24.513744 1434484 crio.go:70] configuring cri-o to use "cgroupfs" as cgroup driver...
	I0916 10:59:24.513822 1434484 ssh_runner.go:195] Run: sh -c "sudo sed -i 's|^.*cgroup_manager = .*$|cgroup_manager = "cgroupfs"|' /etc/crio/crio.conf.d/02-crio.conf"
	I0916 10:59:24.525746 1434484 ssh_runner.go:195] Run: sh -c "sudo sed -i '/conmon_cgroup = .*/d' /etc/crio/crio.conf.d/02-crio.conf"
	I0916 10:59:24.537175 1434484 ssh_runner.go:195] Run: sh -c "sudo sed -i '/cgroup_manager = .*/a conmon_cgroup = "pod"' /etc/crio/crio.conf.d/02-crio.conf"
	I0916 10:59:24.549062 1434484 ssh_runner.go:195] Run: sh -c "sudo rm -rf /etc/cni/net.mk"
	I0916 10:59:24.559250 1434484 ssh_runner.go:195] Run: sh -c "sudo sed -i '/^ *"net.ipv4.ip_unprivileged_port_start=.*"/d' /etc/crio/crio.conf.d/02-crio.conf"
	I0916 10:59:24.569935 1434484 ssh_runner.go:195] Run: sh -c "sudo grep -q "^ *default_sysctls" /etc/crio/crio.conf.d/02-crio.conf || sudo sed -i '/conmon_cgroup = .*/a default_sysctls = \[\n\]' /etc/crio/crio.conf.d/02-crio.conf"
	I0916 10:59:24.580882 1434484 ssh_runner.go:195] Run: sh -c "sudo sed -i -r 's|^default_sysctls *= *\[|&\n  "net.ipv4.ip_unprivileged_port_start=0",|' /etc/crio/crio.conf.d/02-crio.conf"
	I0916 10:59:24.599575 1434484 ssh_runner.go:195] Run: sudo sysctl net.bridge.bridge-nf-call-iptables
	I0916 10:59:24.608321 1434484 ssh_runner.go:195] Run: sudo sh -c "echo 1 > /proc/sys/net/ipv4/ip_forward"
	I0916 10:59:24.617205 1434484 ssh_runner.go:195] Run: sudo systemctl daemon-reload
	I0916 10:59:24.750266 1434484 ssh_runner.go:195] Run: sudo systemctl restart crio
	I0916 10:59:24.907195 1434484 start.go:542] Will wait 60s for socket path /var/run/crio/crio.sock
	I0916 10:59:24.907276 1434484 ssh_runner.go:195] Run: stat /var/run/crio/crio.sock
	I0916 10:59:24.911651 1434484 start.go:563] Will wait 60s for crictl version
	I0916 10:59:24.911718 1434484 ssh_runner.go:195] Run: which crictl
	I0916 10:59:24.915963 1434484 ssh_runner.go:195] Run: sudo /usr/bin/crictl version
	I0916 10:59:24.968868 1434484 start.go:579] Version:  0.1.0
	RuntimeName:  cri-o
	RuntimeVersion:  1.24.6
	RuntimeApiVersion:  v1
	I0916 10:59:24.968965 1434484 ssh_runner.go:195] Run: crio --version
	I0916 10:59:25.016880 1434484 ssh_runner.go:195] Run: crio --version
	I0916 10:59:25.067250 1434484 out.go:177] * Preparing Kubernetes v1.31.1 on CRI-O 1.24.6 ...
	I0916 10:59:25.069890 1434484 out.go:177]   - env NO_PROXY=192.168.49.2
	I0916 10:59:25.072485 1434484 out.go:177]   - env NO_PROXY=192.168.49.2,192.168.49.3
	I0916 10:59:25.075568 1434484 out.go:177]   - env NO_PROXY=192.168.49.2,192.168.49.3,192.168.49.4
	I0916 10:59:25.078187 1434484 cli_runner.go:164] Run: docker network inspect ha-334765 --format "{"Name": "{{.Name}}","Driver": "{{.Driver}}","Subnet": "{{range .IPAM.Config}}{{.Subnet}}{{end}}","Gateway": "{{range .IPAM.Config}}{{.Gateway}}{{end}}","MTU": {{if (index .Options "com.docker.network.driver.mtu")}}{{(index .Options "com.docker.network.driver.mtu")}}{{else}}0{{end}}, "ContainerIPs": [{{range $k,$v := .Containers }}"{{$v.IPv4Address}}",{{end}}]}"
	I0916 10:59:25.094560 1434484 ssh_runner.go:195] Run: grep 192.168.49.1	host.minikube.internal$ /etc/hosts
	I0916 10:59:25.098636 1434484 ssh_runner.go:195] Run: /bin/bash -c "{ grep -v $'\thost.minikube.internal$' "/etc/hosts"; echo "192.168.49.1	host.minikube.internal"; } > /tmp/h.$$; sudo cp /tmp/h.$$ "/etc/hosts""
	I0916 10:59:25.110516 1434484 mustload.go:65] Loading cluster: ha-334765
	I0916 10:59:25.110783 1434484 config.go:182] Loaded profile config "ha-334765": Driver=docker, ContainerRuntime=crio, KubernetesVersion=v1.31.1
	I0916 10:59:25.111057 1434484 cli_runner.go:164] Run: docker container inspect ha-334765 --format={{.State.Status}}
	I0916 10:59:25.131459 1434484 host.go:66] Checking if "ha-334765" exists ...
	I0916 10:59:25.131789 1434484 certs.go:68] Setting up /home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/ha-334765 for IP: 192.168.49.5
	I0916 10:59:25.131813 1434484 certs.go:194] generating shared ca certs ...
	I0916 10:59:25.131829 1434484 certs.go:226] acquiring lock for ca certs: {Name:mk0ae46b50e2e49d53ad6fcc94535aa50d9156d6 Clock:{} Delay:500ms Timeout:1m0s Cancel:<nil>}
	I0916 10:59:25.131954 1434484 certs.go:235] skipping valid "minikubeCA" ca cert: /home/jenkins/minikube-integration/19651-1378450/.minikube/ca.key
	I0916 10:59:25.132004 1434484 certs.go:235] skipping valid "proxyClientCA" ca cert: /home/jenkins/minikube-integration/19651-1378450/.minikube/proxy-client-ca.key
	I0916 10:59:25.132020 1434484 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-1378450/.minikube/ca.crt -> /var/lib/minikube/certs/ca.crt
	I0916 10:59:25.132037 1434484 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-1378450/.minikube/ca.key -> /var/lib/minikube/certs/ca.key
	I0916 10:59:25.132049 1434484 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-1378450/.minikube/proxy-client-ca.crt -> /var/lib/minikube/certs/proxy-client-ca.crt
	I0916 10:59:25.132066 1434484 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-1378450/.minikube/proxy-client-ca.key -> /var/lib/minikube/certs/proxy-client-ca.key
	I0916 10:59:25.132121 1434484 certs.go:484] found cert: /home/jenkins/minikube-integration/19651-1378450/.minikube/certs/1383833.pem (1338 bytes)
	W0916 10:59:25.132153 1434484 certs.go:480] ignoring /home/jenkins/minikube-integration/19651-1378450/.minikube/certs/1383833_empty.pem, impossibly tiny 0 bytes
	I0916 10:59:25.132164 1434484 certs.go:484] found cert: /home/jenkins/minikube-integration/19651-1378450/.minikube/certs/ca-key.pem (1679 bytes)
	I0916 10:59:25.132187 1434484 certs.go:484] found cert: /home/jenkins/minikube-integration/19651-1378450/.minikube/certs/ca.pem (1078 bytes)
	I0916 10:59:25.132209 1434484 certs.go:484] found cert: /home/jenkins/minikube-integration/19651-1378450/.minikube/certs/cert.pem (1123 bytes)
	I0916 10:59:25.132231 1434484 certs.go:484] found cert: /home/jenkins/minikube-integration/19651-1378450/.minikube/certs/key.pem (1679 bytes)
	I0916 10:59:25.132277 1434484 certs.go:484] found cert: /home/jenkins/minikube-integration/19651-1378450/.minikube/files/etc/ssl/certs/13838332.pem (1708 bytes)
	I0916 10:59:25.132309 1434484 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-1378450/.minikube/certs/1383833.pem -> /usr/share/ca-certificates/1383833.pem
	I0916 10:59:25.132326 1434484 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-1378450/.minikube/files/etc/ssl/certs/13838332.pem -> /usr/share/ca-certificates/13838332.pem
	I0916 10:59:25.132337 1434484 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-1378450/.minikube/ca.crt -> /usr/share/ca-certificates/minikubeCA.pem
	I0916 10:59:25.132358 1434484 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-1378450/.minikube/ca.crt --> /var/lib/minikube/certs/ca.crt (1111 bytes)
	I0916 10:59:25.161803 1434484 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-1378450/.minikube/ca.key --> /var/lib/minikube/certs/ca.key (1675 bytes)
	I0916 10:59:25.194296 1434484 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-1378450/.minikube/proxy-client-ca.crt --> /var/lib/minikube/certs/proxy-client-ca.crt (1119 bytes)
	I0916 10:59:25.231876 1434484 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-1378450/.minikube/proxy-client-ca.key --> /var/lib/minikube/certs/proxy-client-ca.key (1679 bytes)
	I0916 10:59:25.262149 1434484 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-1378450/.minikube/certs/1383833.pem --> /usr/share/ca-certificates/1383833.pem (1338 bytes)
	I0916 10:59:25.294907 1434484 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-1378450/.minikube/files/etc/ssl/certs/13838332.pem --> /usr/share/ca-certificates/13838332.pem (1708 bytes)
	I0916 10:59:25.321697 1434484 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-1378450/.minikube/ca.crt --> /usr/share/ca-certificates/minikubeCA.pem (1111 bytes)
	I0916 10:59:25.362325 1434484 ssh_runner.go:195] Run: openssl version
	I0916 10:59:25.368150 1434484 ssh_runner.go:195] Run: sudo /bin/bash -c "test -s /usr/share/ca-certificates/1383833.pem && ln -fs /usr/share/ca-certificates/1383833.pem /etc/ssl/certs/1383833.pem"
	I0916 10:59:25.389148 1434484 ssh_runner.go:195] Run: ls -la /usr/share/ca-certificates/1383833.pem
	I0916 10:59:25.395782 1434484 certs.go:528] hashing: -rw-r--r-- 1 root root 1338 Sep 16 10:46 /usr/share/ca-certificates/1383833.pem
	I0916 10:59:25.395864 1434484 ssh_runner.go:195] Run: openssl x509 -hash -noout -in /usr/share/ca-certificates/1383833.pem
	I0916 10:59:25.404560 1434484 ssh_runner.go:195] Run: sudo /bin/bash -c "test -L /etc/ssl/certs/51391683.0 || ln -fs /etc/ssl/certs/1383833.pem /etc/ssl/certs/51391683.0"
	I0916 10:59:25.416325 1434484 ssh_runner.go:195] Run: sudo /bin/bash -c "test -s /usr/share/ca-certificates/13838332.pem && ln -fs /usr/share/ca-certificates/13838332.pem /etc/ssl/certs/13838332.pem"
	I0916 10:59:25.428347 1434484 ssh_runner.go:195] Run: ls -la /usr/share/ca-certificates/13838332.pem
	I0916 10:59:25.432108 1434484 certs.go:528] hashing: -rw-r--r-- 1 root root 1708 Sep 16 10:46 /usr/share/ca-certificates/13838332.pem
	I0916 10:59:25.432177 1434484 ssh_runner.go:195] Run: openssl x509 -hash -noout -in /usr/share/ca-certificates/13838332.pem
	I0916 10:59:25.440018 1434484 ssh_runner.go:195] Run: sudo /bin/bash -c "test -L /etc/ssl/certs/3ec20f2e.0 || ln -fs /etc/ssl/certs/13838332.pem /etc/ssl/certs/3ec20f2e.0"
	I0916 10:59:25.449219 1434484 ssh_runner.go:195] Run: sudo /bin/bash -c "test -s /usr/share/ca-certificates/minikubeCA.pem && ln -fs /usr/share/ca-certificates/minikubeCA.pem /etc/ssl/certs/minikubeCA.pem"
	I0916 10:59:25.459935 1434484 ssh_runner.go:195] Run: ls -la /usr/share/ca-certificates/minikubeCA.pem
	I0916 10:59:25.465330 1434484 certs.go:528] hashing: -rw-r--r-- 1 root root 1111 Sep 16 10:35 /usr/share/ca-certificates/minikubeCA.pem
	I0916 10:59:25.465465 1434484 ssh_runner.go:195] Run: openssl x509 -hash -noout -in /usr/share/ca-certificates/minikubeCA.pem
	I0916 10:59:25.472976 1434484 ssh_runner.go:195] Run: sudo /bin/bash -c "test -L /etc/ssl/certs/b5213941.0 || ln -fs /etc/ssl/certs/minikubeCA.pem /etc/ssl/certs/b5213941.0"
	I0916 10:59:25.483981 1434484 ssh_runner.go:195] Run: stat /var/lib/minikube/certs/apiserver-kubelet-client.crt
	I0916 10:59:25.487649 1434484 certs.go:399] 'apiserver-kubelet-client' cert doesn't exist, likely first start: stat /var/lib/minikube/certs/apiserver-kubelet-client.crt: Process exited with status 1
	stdout:
	
	stderr:
	stat: cannot statx '/var/lib/minikube/certs/apiserver-kubelet-client.crt': No such file or directory
	I0916 10:59:25.487691 1434484 kubeadm.go:934] updating node {m04 192.168.49.5 0 v1.31.1  false true} ...
	I0916 10:59:25.487771 1434484 kubeadm.go:946] kubelet [Unit]
	Wants=crio.service
	
	[Service]
	ExecStart=
	ExecStart=/var/lib/minikube/binaries/v1.31.1/kubelet --bootstrap-kubeconfig=/etc/kubernetes/bootstrap-kubelet.conf --cgroups-per-qos=false --config=/var/lib/kubelet/config.yaml --enforce-node-allocatable= --hostname-override=ha-334765-m04 --kubeconfig=/etc/kubernetes/kubelet.conf --node-ip=192.168.49.5
	
	[Install]
	 config:
	{KubernetesVersion:v1.31.1 ClusterName:ha-334765 Namespace:default APIServerHAVIP:192.168.49.254 APIServerName:minikubeCA APIServerNames:[] APIServerIPs:[] DNSDomain:cluster.local ContainerRuntime:crio CRISocket: NetworkPlugin:cni FeatureGates: ServiceCIDR:10.96.0.0/12 ImageRepository: LoadBalancerStartIP: LoadBalancerEndIP: CustomIngressCert: RegistryAliases: ExtraOptions:[] ShouldLoadCachedImages:true EnableDefaultCNI:false CNI:}
	I0916 10:59:25.487841 1434484 ssh_runner.go:195] Run: sudo ls /var/lib/minikube/binaries/v1.31.1
	I0916 10:59:25.496989 1434484 binaries.go:44] Found k8s binaries, skipping transfer
	I0916 10:59:25.497067 1434484 ssh_runner.go:195] Run: sudo mkdir -p /etc/systemd/system/kubelet.service.d /lib/systemd/system
	I0916 10:59:25.508226 1434484 ssh_runner.go:362] scp memory --> /etc/systemd/system/kubelet.service.d/10-kubeadm.conf (363 bytes)
	I0916 10:59:25.530882 1434484 ssh_runner.go:362] scp memory --> /lib/systemd/system/kubelet.service (352 bytes)
	I0916 10:59:25.551896 1434484 ssh_runner.go:195] Run: grep 192.168.49.254	control-plane.minikube.internal$ /etc/hosts
	I0916 10:59:25.557264 1434484 ssh_runner.go:195] Run: /bin/bash -c "{ grep -v $'\tcontrol-plane.minikube.internal$' "/etc/hosts"; echo "192.168.49.254	control-plane.minikube.internal"; } > /tmp/h.$$; sudo cp /tmp/h.$$ "/etc/hosts""
	I0916 10:59:25.568370 1434484 ssh_runner.go:195] Run: sudo systemctl daemon-reload
	I0916 10:59:25.713503 1434484 ssh_runner.go:195] Run: sudo systemctl start kubelet
	I0916 10:59:25.731736 1434484 start.go:235] Will wait 6m0s for node &{Name:m04 IP:192.168.49.5 Port:0 KubernetesVersion:v1.31.1 ContainerRuntime: ControlPlane:false Worker:true}
	I0916 10:59:25.732172 1434484 config.go:182] Loaded profile config "ha-334765": Driver=docker, ContainerRuntime=crio, KubernetesVersion=v1.31.1
	I0916 10:59:25.734773 1434484 out.go:177] * Verifying Kubernetes components...
	I0916 10:59:25.737331 1434484 ssh_runner.go:195] Run: sudo systemctl daemon-reload
	I0916 10:59:25.846176 1434484 ssh_runner.go:195] Run: sudo systemctl start kubelet
	I0916 10:59:25.860361 1434484 loader.go:395] Config loaded from file:  /home/jenkins/minikube-integration/19651-1378450/kubeconfig
	I0916 10:59:25.860636 1434484 kapi.go:59] client config for ha-334765: &rest.Config{Host:"https://192.168.49.254:8443", APIPath:"", ContentConfig:rest.ContentConfig{AcceptContentTypes:"", ContentType:"", GroupVersion:(*schema.GroupVersion)(nil), NegotiatedSerializer:runtime.NegotiatedSerializer(nil)}, Username:"", Password:"", BearerToken:"", BearerTokenFile:"", Impersonate:rest.ImpersonationConfig{UserName:"", UID:"", Groups:[]string(nil), Extra:map[string][]string(nil)}, AuthProvider:<nil>, AuthConfigPersister:rest.AuthProviderConfigPersister(nil), ExecProvider:<nil>, TLSClientConfig:rest.sanitizedTLSClientConfig{Insecure:false, ServerName:"", CertFile:"/home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/ha-334765/client.crt", KeyFile:"/home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/ha-334765/client.key", CAFile:"/home/jenkins/minikube-integration/19651-1378450/.minikube/ca.crt", CertData:[]uint8(nil), KeyData:[]uint8(nil), CAData:[]uint8(nil), NextProtos:[]strin
g(nil)}, UserAgent:"", DisableCompression:false, Transport:http.RoundTripper(nil), WrapTransport:(transport.WrapperFunc)(0x1a1e6c0), QPS:0, Burst:0, RateLimiter:flowcontrol.RateLimiter(nil), WarningHandler:rest.WarningHandler(nil), Timeout:0, Dial:(func(context.Context, string, string) (net.Conn, error))(nil), Proxy:(func(*http.Request) (*url.URL, error))(nil)}
	W0916 10:59:25.860794 1434484 kubeadm.go:483] Overriding stale ClientConfig host https://192.168.49.254:8443 with https://192.168.49.2:8443
	I0916 10:59:25.861006 1434484 node_ready.go:35] waiting up to 6m0s for node "ha-334765-m04" to be "Ready" ...
	I0916 10:59:25.861078 1434484 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-334765-m04
	I0916 10:59:25.861090 1434484 round_trippers.go:469] Request Headers:
	I0916 10:59:25.861099 1434484 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:59:25.861103 1434484 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:59:25.863811 1434484 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 10:59:26.361569 1434484 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-334765-m04
	I0916 10:59:26.361593 1434484 round_trippers.go:469] Request Headers:
	I0916 10:59:26.361603 1434484 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:59:26.361607 1434484 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:59:26.364418 1434484 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 10:59:26.861257 1434484 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-334765-m04
	I0916 10:59:26.861280 1434484 round_trippers.go:469] Request Headers:
	I0916 10:59:26.861290 1434484 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:59:26.861296 1434484 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:59:26.864532 1434484 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:59:27.362064 1434484 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-334765-m04
	I0916 10:59:27.362089 1434484 round_trippers.go:469] Request Headers:
	I0916 10:59:27.362101 1434484 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:59:27.362106 1434484 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:59:27.365200 1434484 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:59:27.862064 1434484 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-334765-m04
	I0916 10:59:27.862087 1434484 round_trippers.go:469] Request Headers:
	I0916 10:59:27.862097 1434484 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:59:27.862102 1434484 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:59:27.865176 1434484 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:59:27.865858 1434484 node_ready.go:53] node "ha-334765-m04" has status "Ready":"Unknown"
	I0916 10:59:28.361521 1434484 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-334765-m04
	I0916 10:59:28.361545 1434484 round_trippers.go:469] Request Headers:
	I0916 10:59:28.361562 1434484 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:59:28.361567 1434484 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:59:28.364622 1434484 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:59:28.861866 1434484 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-334765-m04
	I0916 10:59:28.861889 1434484 round_trippers.go:469] Request Headers:
	I0916 10:59:28.861900 1434484 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:59:28.861903 1434484 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:59:28.867002 1434484 round_trippers.go:574] Response Status: 200 OK in 5 milliseconds
	I0916 10:59:29.361911 1434484 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-334765-m04
	I0916 10:59:29.361931 1434484 round_trippers.go:469] Request Headers:
	I0916 10:59:29.361938 1434484 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:59:29.361941 1434484 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:59:29.364744 1434484 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 10:59:29.861345 1434484 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-334765-m04
	I0916 10:59:29.861371 1434484 round_trippers.go:469] Request Headers:
	I0916 10:59:29.861380 1434484 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:59:29.861393 1434484 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:59:29.865102 1434484 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:59:30.361423 1434484 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-334765-m04
	I0916 10:59:30.361452 1434484 round_trippers.go:469] Request Headers:
	I0916 10:59:30.361461 1434484 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:59:30.361466 1434484 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:59:30.364515 1434484 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:59:30.365176 1434484 node_ready.go:53] node "ha-334765-m04" has status "Ready":"Unknown"
	I0916 10:59:30.861997 1434484 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-334765-m04
	I0916 10:59:30.862034 1434484 round_trippers.go:469] Request Headers:
	I0916 10:59:30.862043 1434484 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:59:30.862049 1434484 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:59:30.865128 1434484 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:59:31.361272 1434484 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-334765-m04
	I0916 10:59:31.361296 1434484 round_trippers.go:469] Request Headers:
	I0916 10:59:31.361306 1434484 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:59:31.361311 1434484 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:59:31.364160 1434484 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 10:59:31.861272 1434484 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-334765-m04
	I0916 10:59:31.861295 1434484 round_trippers.go:469] Request Headers:
	I0916 10:59:31.861304 1434484 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:59:31.861311 1434484 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:59:31.864252 1434484 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 10:59:32.361955 1434484 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-334765-m04
	I0916 10:59:32.361977 1434484 round_trippers.go:469] Request Headers:
	I0916 10:59:32.361987 1434484 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:59:32.361992 1434484 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:59:32.364874 1434484 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 10:59:32.365799 1434484 node_ready.go:49] node "ha-334765-m04" has status "Ready":"True"
	I0916 10:59:32.365826 1434484 node_ready.go:38] duration metric: took 6.504801427s for node "ha-334765-m04" to be "Ready" ...
	I0916 10:59:32.365836 1434484 pod_ready.go:36] extra waiting up to 6m0s for all system-critical pods including labels [k8s-app=kube-dns component=etcd component=kube-apiserver component=kube-controller-manager k8s-app=kube-proxy component=kube-scheduler] to be "Ready" ...
	I0916 10:59:32.365928 1434484 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods
	I0916 10:59:32.365942 1434484 round_trippers.go:469] Request Headers:
	I0916 10:59:32.365965 1434484 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:59:32.365981 1434484 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:59:32.373805 1434484 round_trippers.go:574] Response Status: 200 OK in 7 milliseconds
	I0916 10:59:32.402987 1434484 pod_ready.go:79] waiting up to 6m0s for pod "coredns-7c65d6cfc9-q5xr7" in "kube-system" namespace to be "Ready" ...
	I0916 10:59:32.403097 1434484 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/coredns-7c65d6cfc9-q5xr7
	I0916 10:59:32.403110 1434484 round_trippers.go:469] Request Headers:
	I0916 10:59:32.403119 1434484 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:59:32.403123 1434484 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:59:32.406511 1434484 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:59:32.407128 1434484 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-334765
	I0916 10:59:32.407144 1434484 round_trippers.go:469] Request Headers:
	I0916 10:59:32.407162 1434484 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:59:32.407166 1434484 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:59:32.410911 1434484 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:59:32.411874 1434484 pod_ready.go:93] pod "coredns-7c65d6cfc9-q5xr7" in "kube-system" namespace has status "Ready":"True"
	I0916 10:59:32.411895 1434484 pod_ready.go:82] duration metric: took 8.870242ms for pod "coredns-7c65d6cfc9-q5xr7" in "kube-system" namespace to be "Ready" ...
	I0916 10:59:32.411909 1434484 pod_ready.go:79] waiting up to 6m0s for pod "coredns-7c65d6cfc9-s9fp9" in "kube-system" namespace to be "Ready" ...
	I0916 10:59:32.411977 1434484 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/coredns-7c65d6cfc9-s9fp9
	I0916 10:59:32.411987 1434484 round_trippers.go:469] Request Headers:
	I0916 10:59:32.411995 1434484 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:59:32.411999 1434484 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:59:32.414541 1434484 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 10:59:32.415566 1434484 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-334765
	I0916 10:59:32.415582 1434484 round_trippers.go:469] Request Headers:
	I0916 10:59:32.415592 1434484 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:59:32.415597 1434484 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:59:32.419285 1434484 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:59:32.420274 1434484 pod_ready.go:93] pod "coredns-7c65d6cfc9-s9fp9" in "kube-system" namespace has status "Ready":"True"
	I0916 10:59:32.420297 1434484 pod_ready.go:82] duration metric: took 8.376962ms for pod "coredns-7c65d6cfc9-s9fp9" in "kube-system" namespace to be "Ready" ...
	I0916 10:59:32.420310 1434484 pod_ready.go:79] waiting up to 6m0s for pod "etcd-ha-334765" in "kube-system" namespace to be "Ready" ...
	I0916 10:59:32.420375 1434484 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/etcd-ha-334765
	I0916 10:59:32.420385 1434484 round_trippers.go:469] Request Headers:
	I0916 10:59:32.420394 1434484 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:59:32.420398 1434484 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:59:32.423233 1434484 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 10:59:32.424215 1434484 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-334765
	I0916 10:59:32.424232 1434484 round_trippers.go:469] Request Headers:
	I0916 10:59:32.424242 1434484 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:59:32.424247 1434484 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:59:32.427673 1434484 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:59:32.428730 1434484 pod_ready.go:93] pod "etcd-ha-334765" in "kube-system" namespace has status "Ready":"True"
	I0916 10:59:32.428748 1434484 pod_ready.go:82] duration metric: took 8.4315ms for pod "etcd-ha-334765" in "kube-system" namespace to be "Ready" ...
	I0916 10:59:32.428761 1434484 pod_ready.go:79] waiting up to 6m0s for pod "etcd-ha-334765-m02" in "kube-system" namespace to be "Ready" ...
	I0916 10:59:32.428828 1434484 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/etcd-ha-334765-m02
	I0916 10:59:32.428841 1434484 round_trippers.go:469] Request Headers:
	I0916 10:59:32.428849 1434484 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:59:32.428853 1434484 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:59:32.431855 1434484 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 10:59:32.432901 1434484 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-334765-m02
	I0916 10:59:32.432923 1434484 round_trippers.go:469] Request Headers:
	I0916 10:59:32.432932 1434484 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:59:32.432939 1434484 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:59:32.437337 1434484 round_trippers.go:574] Response Status: 200 OK in 4 milliseconds
	I0916 10:59:32.438403 1434484 pod_ready.go:93] pod "etcd-ha-334765-m02" in "kube-system" namespace has status "Ready":"True"
	I0916 10:59:32.438428 1434484 pod_ready.go:82] duration metric: took 9.659309ms for pod "etcd-ha-334765-m02" in "kube-system" namespace to be "Ready" ...
	I0916 10:59:32.438438 1434484 pod_ready.go:79] waiting up to 6m0s for pod "etcd-ha-334765-m03" in "kube-system" namespace to be "Ready" ...
	I0916 10:59:32.562765 1434484 request.go:632] Waited for 124.240319ms due to client-side throttling, not priority and fairness, request: GET:https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/etcd-ha-334765-m03
	I0916 10:59:32.562833 1434484 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/etcd-ha-334765-m03
	I0916 10:59:32.562842 1434484 round_trippers.go:469] Request Headers:
	I0916 10:59:32.562851 1434484 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:59:32.562860 1434484 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:59:32.565735 1434484 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 10:59:32.762596 1434484 request.go:632] Waited for 196.202196ms due to client-side throttling, not priority and fairness, request: GET:https://192.168.49.2:8443/api/v1/nodes/ha-334765-m03
	I0916 10:59:32.762668 1434484 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-334765-m03
	I0916 10:59:32.762709 1434484 round_trippers.go:469] Request Headers:
	I0916 10:59:32.762735 1434484 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:59:32.762772 1434484 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:59:32.766036 1434484 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:59:32.766729 1434484 pod_ready.go:93] pod "etcd-ha-334765-m03" in "kube-system" namespace has status "Ready":"True"
	I0916 10:59:32.766772 1434484 pod_ready.go:82] duration metric: took 328.325811ms for pod "etcd-ha-334765-m03" in "kube-system" namespace to be "Ready" ...
	I0916 10:59:32.766828 1434484 pod_ready.go:79] waiting up to 6m0s for pod "kube-apiserver-ha-334765" in "kube-system" namespace to be "Ready" ...
	I0916 10:59:32.962937 1434484 request.go:632] Waited for 196.000322ms due to client-side throttling, not priority and fairness, request: GET:https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/kube-apiserver-ha-334765
	I0916 10:59:32.963021 1434484 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/kube-apiserver-ha-334765
	I0916 10:59:32.963029 1434484 round_trippers.go:469] Request Headers:
	I0916 10:59:32.963038 1434484 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:59:32.963041 1434484 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:59:32.966226 1434484 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:59:33.162318 1434484 request.go:632] Waited for 195.324818ms due to client-side throttling, not priority and fairness, request: GET:https://192.168.49.2:8443/api/v1/nodes/ha-334765
	I0916 10:59:33.162431 1434484 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-334765
	I0916 10:59:33.162444 1434484 round_trippers.go:469] Request Headers:
	I0916 10:59:33.162454 1434484 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:59:33.162459 1434484 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:59:33.165337 1434484 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 10:59:33.165942 1434484 pod_ready.go:93] pod "kube-apiserver-ha-334765" in "kube-system" namespace has status "Ready":"True"
	I0916 10:59:33.165964 1434484 pod_ready.go:82] duration metric: took 399.117911ms for pod "kube-apiserver-ha-334765" in "kube-system" namespace to be "Ready" ...
	I0916 10:59:33.165994 1434484 pod_ready.go:79] waiting up to 6m0s for pod "kube-apiserver-ha-334765-m02" in "kube-system" namespace to be "Ready" ...
	I0916 10:59:33.362926 1434484 request.go:632] Waited for 196.831595ms due to client-side throttling, not priority and fairness, request: GET:https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/kube-apiserver-ha-334765-m02
	I0916 10:59:33.362996 1434484 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/kube-apiserver-ha-334765-m02
	I0916 10:59:33.363005 1434484 round_trippers.go:469] Request Headers:
	I0916 10:59:33.363013 1434484 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:59:33.363021 1434484 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:59:33.366140 1434484 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:59:33.562325 1434484 request.go:632] Waited for 195.341376ms due to client-side throttling, not priority and fairness, request: GET:https://192.168.49.2:8443/api/v1/nodes/ha-334765-m02
	I0916 10:59:33.562399 1434484 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-334765-m02
	I0916 10:59:33.562409 1434484 round_trippers.go:469] Request Headers:
	I0916 10:59:33.562421 1434484 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:59:33.562426 1434484 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:59:33.565398 1434484 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 10:59:33.566037 1434484 pod_ready.go:93] pod "kube-apiserver-ha-334765-m02" in "kube-system" namespace has status "Ready":"True"
	I0916 10:59:33.566060 1434484 pod_ready.go:82] duration metric: took 400.050836ms for pod "kube-apiserver-ha-334765-m02" in "kube-system" namespace to be "Ready" ...
	I0916 10:59:33.566072 1434484 pod_ready.go:79] waiting up to 6m0s for pod "kube-apiserver-ha-334765-m03" in "kube-system" namespace to be "Ready" ...
	I0916 10:59:33.762502 1434484 request.go:632] Waited for 196.338019ms due to client-side throttling, not priority and fairness, request: GET:https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/kube-apiserver-ha-334765-m03
	I0916 10:59:33.762569 1434484 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/kube-apiserver-ha-334765-m03
	I0916 10:59:33.762578 1434484 round_trippers.go:469] Request Headers:
	I0916 10:59:33.762592 1434484 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:59:33.762599 1434484 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:59:33.765579 1434484 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 10:59:33.962487 1434484 request.go:632] Waited for 196.131345ms due to client-side throttling, not priority and fairness, request: GET:https://192.168.49.2:8443/api/v1/nodes/ha-334765-m03
	I0916 10:59:33.962578 1434484 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-334765-m03
	I0916 10:59:33.962597 1434484 round_trippers.go:469] Request Headers:
	I0916 10:59:33.962607 1434484 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:59:33.962615 1434484 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:59:33.967292 1434484 round_trippers.go:574] Response Status: 200 OK in 4 milliseconds
	I0916 10:59:33.967875 1434484 pod_ready.go:93] pod "kube-apiserver-ha-334765-m03" in "kube-system" namespace has status "Ready":"True"
	I0916 10:59:33.967896 1434484 pod_ready.go:82] duration metric: took 401.816403ms for pod "kube-apiserver-ha-334765-m03" in "kube-system" namespace to be "Ready" ...
	I0916 10:59:33.967907 1434484 pod_ready.go:79] waiting up to 6m0s for pod "kube-controller-manager-ha-334765" in "kube-system" namespace to be "Ready" ...
	I0916 10:59:34.162300 1434484 request.go:632] Waited for 194.325968ms due to client-side throttling, not priority and fairness, request: GET:https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/kube-controller-manager-ha-334765
	I0916 10:59:34.162373 1434484 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/kube-controller-manager-ha-334765
	I0916 10:59:34.162390 1434484 round_trippers.go:469] Request Headers:
	I0916 10:59:34.162435 1434484 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:59:34.162445 1434484 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:59:34.165614 1434484 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:59:34.362784 1434484 request.go:632] Waited for 196.356243ms due to client-side throttling, not priority and fairness, request: GET:https://192.168.49.2:8443/api/v1/nodes/ha-334765
	I0916 10:59:34.362844 1434484 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-334765
	I0916 10:59:34.362850 1434484 round_trippers.go:469] Request Headers:
	I0916 10:59:34.362859 1434484 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:59:34.362870 1434484 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:59:34.365995 1434484 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:59:34.366767 1434484 pod_ready.go:93] pod "kube-controller-manager-ha-334765" in "kube-system" namespace has status "Ready":"True"
	I0916 10:59:34.366790 1434484 pod_ready.go:82] duration metric: took 398.875169ms for pod "kube-controller-manager-ha-334765" in "kube-system" namespace to be "Ready" ...
	I0916 10:59:34.366803 1434484 pod_ready.go:79] waiting up to 6m0s for pod "kube-controller-manager-ha-334765-m02" in "kube-system" namespace to be "Ready" ...
	I0916 10:59:34.562636 1434484 request.go:632] Waited for 195.745099ms due to client-side throttling, not priority and fairness, request: GET:https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/kube-controller-manager-ha-334765-m02
	I0916 10:59:34.562705 1434484 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/kube-controller-manager-ha-334765-m02
	I0916 10:59:34.562711 1434484 round_trippers.go:469] Request Headers:
	I0916 10:59:34.562720 1434484 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:59:34.562726 1434484 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:59:34.565888 1434484 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:59:34.761964 1434484 request.go:632] Waited for 195.242507ms due to client-side throttling, not priority and fairness, request: GET:https://192.168.49.2:8443/api/v1/nodes/ha-334765-m02
	I0916 10:59:34.762089 1434484 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-334765-m02
	I0916 10:59:34.762107 1434484 round_trippers.go:469] Request Headers:
	I0916 10:59:34.762116 1434484 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:59:34.762120 1434484 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:59:34.767751 1434484 round_trippers.go:574] Response Status: 200 OK in 5 milliseconds
	I0916 10:59:34.768774 1434484 pod_ready.go:93] pod "kube-controller-manager-ha-334765-m02" in "kube-system" namespace has status "Ready":"True"
	I0916 10:59:34.768800 1434484 pod_ready.go:82] duration metric: took 401.989675ms for pod "kube-controller-manager-ha-334765-m02" in "kube-system" namespace to be "Ready" ...
	I0916 10:59:34.768812 1434484 pod_ready.go:79] waiting up to 6m0s for pod "kube-controller-manager-ha-334765-m03" in "kube-system" namespace to be "Ready" ...
	I0916 10:59:34.962270 1434484 request.go:632] Waited for 193.350042ms due to client-side throttling, not priority and fairness, request: GET:https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/kube-controller-manager-ha-334765-m03
	I0916 10:59:34.962346 1434484 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/kube-controller-manager-ha-334765-m03
	I0916 10:59:34.962355 1434484 round_trippers.go:469] Request Headers:
	I0916 10:59:34.962365 1434484 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:59:34.962372 1434484 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:59:34.965518 1434484 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:59:35.162471 1434484 request.go:632] Waited for 196.171238ms due to client-side throttling, not priority and fairness, request: GET:https://192.168.49.2:8443/api/v1/nodes/ha-334765-m03
	I0916 10:59:35.162593 1434484 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-334765-m03
	I0916 10:59:35.162607 1434484 round_trippers.go:469] Request Headers:
	I0916 10:59:35.162618 1434484 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:59:35.162623 1434484 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:59:35.165736 1434484 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:59:35.166616 1434484 pod_ready.go:93] pod "kube-controller-manager-ha-334765-m03" in "kube-system" namespace has status "Ready":"True"
	I0916 10:59:35.166639 1434484 pod_ready.go:82] duration metric: took 397.817005ms for pod "kube-controller-manager-ha-334765-m03" in "kube-system" namespace to be "Ready" ...
	I0916 10:59:35.166651 1434484 pod_ready.go:79] waiting up to 6m0s for pod "kube-proxy-4vsvh" in "kube-system" namespace to be "Ready" ...
	I0916 10:59:35.362921 1434484 request.go:632] Waited for 196.195918ms due to client-side throttling, not priority and fairness, request: GET:https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/kube-proxy-4vsvh
	I0916 10:59:35.362999 1434484 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/kube-proxy-4vsvh
	I0916 10:59:35.363005 1434484 round_trippers.go:469] Request Headers:
	I0916 10:59:35.363014 1434484 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:59:35.363024 1434484 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:59:35.366010 1434484 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 10:59:35.562974 1434484 request.go:632] Waited for 196.145663ms due to client-side throttling, not priority and fairness, request: GET:https://192.168.49.2:8443/api/v1/nodes/ha-334765-m03
	I0916 10:59:35.563061 1434484 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-334765-m03
	I0916 10:59:35.563073 1434484 round_trippers.go:469] Request Headers:
	I0916 10:59:35.563084 1434484 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:59:35.563094 1434484 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:59:35.566263 1434484 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:59:35.567842 1434484 pod_ready.go:93] pod "kube-proxy-4vsvh" in "kube-system" namespace has status "Ready":"True"
	I0916 10:59:35.567869 1434484 pod_ready.go:82] duration metric: took 401.209584ms for pod "kube-proxy-4vsvh" in "kube-system" namespace to be "Ready" ...
	I0916 10:59:35.567882 1434484 pod_ready.go:79] waiting up to 6m0s for pod "kube-proxy-br496" in "kube-system" namespace to be "Ready" ...
	I0916 10:59:35.762261 1434484 request.go:632] Waited for 194.261813ms due to client-side throttling, not priority and fairness, request: GET:https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/kube-proxy-br496
	I0916 10:59:35.762331 1434484 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/kube-proxy-br496
	I0916 10:59:35.762344 1434484 round_trippers.go:469] Request Headers:
	I0916 10:59:35.762353 1434484 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:59:35.762366 1434484 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:59:35.765925 1434484 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:59:35.962995 1434484 request.go:632] Waited for 196.361313ms due to client-side throttling, not priority and fairness, request: GET:https://192.168.49.2:8443/api/v1/nodes/ha-334765-m04
	I0916 10:59:35.963096 1434484 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-334765-m04
	I0916 10:59:35.963110 1434484 round_trippers.go:469] Request Headers:
	I0916 10:59:35.963120 1434484 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:59:35.963128 1434484 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:59:35.965864 1434484 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 10:59:36.161980 1434484 request.go:632] Waited for 93.18251ms due to client-side throttling, not priority and fairness, request: GET:https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/kube-proxy-br496
	I0916 10:59:36.162062 1434484 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/kube-proxy-br496
	I0916 10:59:36.162074 1434484 round_trippers.go:469] Request Headers:
	I0916 10:59:36.162083 1434484 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:59:36.162090 1434484 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:59:36.165233 1434484 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:59:36.362185 1434484 request.go:632] Waited for 196.254133ms due to client-side throttling, not priority and fairness, request: GET:https://192.168.49.2:8443/api/v1/nodes/ha-334765-m04
	I0916 10:59:36.362241 1434484 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-334765-m04
	I0916 10:59:36.362248 1434484 round_trippers.go:469] Request Headers:
	I0916 10:59:36.362275 1434484 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:59:36.362283 1434484 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:59:36.365335 1434484 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:59:36.568717 1434484 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/kube-proxy-br496
	I0916 10:59:36.568743 1434484 round_trippers.go:469] Request Headers:
	I0916 10:59:36.568751 1434484 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:59:36.568756 1434484 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:59:36.571668 1434484 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 10:59:36.762818 1434484 request.go:632] Waited for 190.338492ms due to client-side throttling, not priority and fairness, request: GET:https://192.168.49.2:8443/api/v1/nodes/ha-334765-m04
	I0916 10:59:36.762896 1434484 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-334765-m04
	I0916 10:59:36.762906 1434484 round_trippers.go:469] Request Headers:
	I0916 10:59:36.762914 1434484 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:59:36.762919 1434484 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:59:36.766593 1434484 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:59:37.068823 1434484 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/kube-proxy-br496
	I0916 10:59:37.068848 1434484 round_trippers.go:469] Request Headers:
	I0916 10:59:37.068858 1434484 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:59:37.068864 1434484 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:59:37.072142 1434484 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:59:37.162557 1434484 request.go:632] Waited for 89.211238ms due to client-side throttling, not priority and fairness, request: GET:https://192.168.49.2:8443/api/v1/nodes/ha-334765-m04
	I0916 10:59:37.162635 1434484 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-334765-m04
	I0916 10:59:37.162650 1434484 round_trippers.go:469] Request Headers:
	I0916 10:59:37.162659 1434484 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:59:37.162668 1434484 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:59:37.165757 1434484 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:59:37.568294 1434484 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/kube-proxy-br496
	I0916 10:59:37.568319 1434484 round_trippers.go:469] Request Headers:
	I0916 10:59:37.568329 1434484 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:59:37.568334 1434484 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:59:37.571196 1434484 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 10:59:37.572109 1434484 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-334765-m04
	I0916 10:59:37.572130 1434484 round_trippers.go:469] Request Headers:
	I0916 10:59:37.572139 1434484 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:59:37.572145 1434484 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:59:37.574659 1434484 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 10:59:37.575440 1434484 pod_ready.go:103] pod "kube-proxy-br496" in "kube-system" namespace has status "Ready":"False"
	I0916 10:59:38.068480 1434484 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/kube-proxy-br496
	I0916 10:59:38.068551 1434484 round_trippers.go:469] Request Headers:
	I0916 10:59:38.068562 1434484 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:59:38.068566 1434484 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:59:38.071574 1434484 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 10:59:38.072467 1434484 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-334765-m04
	I0916 10:59:38.072486 1434484 round_trippers.go:469] Request Headers:
	I0916 10:59:38.072496 1434484 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:59:38.072501 1434484 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:59:38.075268 1434484 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 10:59:38.568172 1434484 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/kube-proxy-br496
	I0916 10:59:38.568197 1434484 round_trippers.go:469] Request Headers:
	I0916 10:59:38.568208 1434484 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:59:38.568213 1434484 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:59:38.571292 1434484 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:59:38.572429 1434484 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-334765-m04
	I0916 10:59:38.572454 1434484 round_trippers.go:469] Request Headers:
	I0916 10:59:38.572494 1434484 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:59:38.572501 1434484 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:59:38.575959 1434484 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:59:39.068240 1434484 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/kube-proxy-br496
	I0916 10:59:39.068264 1434484 round_trippers.go:469] Request Headers:
	I0916 10:59:39.068274 1434484 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:59:39.068280 1434484 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:59:39.071289 1434484 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 10:59:39.072197 1434484 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-334765-m04
	I0916 10:59:39.072219 1434484 round_trippers.go:469] Request Headers:
	I0916 10:59:39.072229 1434484 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:59:39.072233 1434484 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:59:39.075276 1434484 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:59:39.568144 1434484 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/kube-proxy-br496
	I0916 10:59:39.568170 1434484 round_trippers.go:469] Request Headers:
	I0916 10:59:39.568179 1434484 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:59:39.568185 1434484 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:59:39.571301 1434484 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:59:39.572049 1434484 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-334765-m04
	I0916 10:59:39.572069 1434484 round_trippers.go:469] Request Headers:
	I0916 10:59:39.572079 1434484 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:59:39.572084 1434484 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:59:39.574716 1434484 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 10:59:40.068893 1434484 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/kube-proxy-br496
	I0916 10:59:40.068917 1434484 round_trippers.go:469] Request Headers:
	I0916 10:59:40.068927 1434484 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:59:40.068933 1434484 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:59:40.072287 1434484 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:59:40.073275 1434484 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-334765-m04
	I0916 10:59:40.073298 1434484 round_trippers.go:469] Request Headers:
	I0916 10:59:40.073309 1434484 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:59:40.073315 1434484 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:59:40.076091 1434484 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 10:59:40.076834 1434484 pod_ready.go:103] pod "kube-proxy-br496" in "kube-system" namespace has status "Ready":"False"
	I0916 10:59:40.568521 1434484 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/kube-proxy-br496
	I0916 10:59:40.568544 1434484 round_trippers.go:469] Request Headers:
	I0916 10:59:40.568554 1434484 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:59:40.568557 1434484 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:59:40.571706 1434484 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:59:40.572615 1434484 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-334765-m04
	I0916 10:59:40.572639 1434484 round_trippers.go:469] Request Headers:
	I0916 10:59:40.572657 1434484 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:59:40.572661 1434484 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:59:40.576008 1434484 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:59:41.068799 1434484 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/kube-proxy-br496
	I0916 10:59:41.068823 1434484 round_trippers.go:469] Request Headers:
	I0916 10:59:41.068832 1434484 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:59:41.068837 1434484 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:59:41.071702 1434484 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 10:59:41.073293 1434484 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-334765-m04
	I0916 10:59:41.073313 1434484 round_trippers.go:469] Request Headers:
	I0916 10:59:41.073321 1434484 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:59:41.073327 1434484 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:59:41.075839 1434484 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 10:59:41.568894 1434484 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/kube-proxy-br496
	I0916 10:59:41.568964 1434484 round_trippers.go:469] Request Headers:
	I0916 10:59:41.568978 1434484 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:59:41.568987 1434484 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:59:41.571858 1434484 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 10:59:41.572670 1434484 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-334765-m04
	I0916 10:59:41.572722 1434484 round_trippers.go:469] Request Headers:
	I0916 10:59:41.572732 1434484 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:59:41.572736 1434484 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:59:41.575078 1434484 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 10:59:42.068802 1434484 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/kube-proxy-br496
	I0916 10:59:42.068832 1434484 round_trippers.go:469] Request Headers:
	I0916 10:59:42.068847 1434484 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:59:42.068853 1434484 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:59:42.072449 1434484 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:59:42.073519 1434484 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-334765-m04
	I0916 10:59:42.073542 1434484 round_trippers.go:469] Request Headers:
	I0916 10:59:42.073552 1434484 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:59:42.073557 1434484 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:59:42.077092 1434484 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:59:42.077974 1434484 pod_ready.go:103] pod "kube-proxy-br496" in "kube-system" namespace has status "Ready":"False"
	I0916 10:59:42.568741 1434484 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/kube-proxy-br496
	I0916 10:59:42.568764 1434484 round_trippers.go:469] Request Headers:
	I0916 10:59:42.568775 1434484 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:59:42.568781 1434484 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:59:42.571887 1434484 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:59:42.572728 1434484 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-334765-m04
	I0916 10:59:42.572745 1434484 round_trippers.go:469] Request Headers:
	I0916 10:59:42.572753 1434484 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:59:42.572758 1434484 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:59:42.577614 1434484 round_trippers.go:574] Response Status: 200 OK in 4 milliseconds
	I0916 10:59:43.068938 1434484 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/kube-proxy-br496
	I0916 10:59:43.069004 1434484 round_trippers.go:469] Request Headers:
	I0916 10:59:43.069019 1434484 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:59:43.069029 1434484 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:59:43.072179 1434484 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:59:43.073078 1434484 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-334765-m04
	I0916 10:59:43.073099 1434484 round_trippers.go:469] Request Headers:
	I0916 10:59:43.073108 1434484 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:59:43.073115 1434484 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:59:43.076611 1434484 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:59:43.568078 1434484 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/kube-proxy-br496
	I0916 10:59:43.568101 1434484 round_trippers.go:469] Request Headers:
	I0916 10:59:43.568110 1434484 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:59:43.568115 1434484 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:59:43.571019 1434484 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 10:59:43.571904 1434484 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-334765-m04
	I0916 10:59:43.571924 1434484 round_trippers.go:469] Request Headers:
	I0916 10:59:43.571934 1434484 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:59:43.571939 1434484 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:59:43.574631 1434484 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 10:59:44.068157 1434484 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/kube-proxy-br496
	I0916 10:59:44.068183 1434484 round_trippers.go:469] Request Headers:
	I0916 10:59:44.068192 1434484 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:59:44.068197 1434484 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:59:44.071091 1434484 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 10:59:44.071883 1434484 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-334765-m04
	I0916 10:59:44.071902 1434484 round_trippers.go:469] Request Headers:
	I0916 10:59:44.071913 1434484 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:59:44.071917 1434484 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:59:44.074446 1434484 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 10:59:44.568093 1434484 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/kube-proxy-br496
	I0916 10:59:44.568116 1434484 round_trippers.go:469] Request Headers:
	I0916 10:59:44.568126 1434484 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:59:44.568131 1434484 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:59:44.571057 1434484 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 10:59:44.571925 1434484 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-334765-m04
	I0916 10:59:44.571945 1434484 round_trippers.go:469] Request Headers:
	I0916 10:59:44.571954 1434484 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:59:44.571960 1434484 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:59:44.574355 1434484 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 10:59:44.574897 1434484 pod_ready.go:103] pod "kube-proxy-br496" in "kube-system" namespace has status "Ready":"False"
	I0916 10:59:45.068237 1434484 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/kube-proxy-br496
	I0916 10:59:45.068275 1434484 round_trippers.go:469] Request Headers:
	I0916 10:59:45.068286 1434484 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:59:45.068291 1434484 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:59:45.074665 1434484 round_trippers.go:574] Response Status: 200 OK in 6 milliseconds
	I0916 10:59:45.076047 1434484 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-334765-m04
	I0916 10:59:45.076072 1434484 round_trippers.go:469] Request Headers:
	I0916 10:59:45.076082 1434484 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:59:45.076086 1434484 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:59:45.079529 1434484 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:59:45.568160 1434484 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/kube-proxy-br496
	I0916 10:59:45.568186 1434484 round_trippers.go:469] Request Headers:
	I0916 10:59:45.568196 1434484 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:59:45.568201 1434484 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:59:45.571276 1434484 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:59:45.571992 1434484 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-334765-m04
	I0916 10:59:45.572012 1434484 round_trippers.go:469] Request Headers:
	I0916 10:59:45.572022 1434484 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:59:45.572028 1434484 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:59:45.574746 1434484 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 10:59:46.069013 1434484 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/kube-proxy-br496
	I0916 10:59:46.069048 1434484 round_trippers.go:469] Request Headers:
	I0916 10:59:46.069058 1434484 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:59:46.069062 1434484 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:59:46.072432 1434484 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:59:46.073252 1434484 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-334765-m04
	I0916 10:59:46.073274 1434484 round_trippers.go:469] Request Headers:
	I0916 10:59:46.073285 1434484 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:59:46.073289 1434484 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:59:46.076217 1434484 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 10:59:46.568154 1434484 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/kube-proxy-br496
	I0916 10:59:46.568177 1434484 round_trippers.go:469] Request Headers:
	I0916 10:59:46.568187 1434484 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:59:46.568192 1434484 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:59:46.571174 1434484 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 10:59:46.571931 1434484 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-334765-m04
	I0916 10:59:46.571952 1434484 round_trippers.go:469] Request Headers:
	I0916 10:59:46.571962 1434484 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:59:46.571966 1434484 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:59:46.574562 1434484 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 10:59:46.575138 1434484 pod_ready.go:103] pod "kube-proxy-br496" in "kube-system" namespace has status "Ready":"False"
	I0916 10:59:47.068907 1434484 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/kube-proxy-br496
	I0916 10:59:47.068934 1434484 round_trippers.go:469] Request Headers:
	I0916 10:59:47.068943 1434484 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:59:47.068950 1434484 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:59:47.071909 1434484 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 10:59:47.072899 1434484 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-334765-m04
	I0916 10:59:47.072920 1434484 round_trippers.go:469] Request Headers:
	I0916 10:59:47.072929 1434484 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:59:47.072934 1434484 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:59:47.076622 1434484 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:59:47.568652 1434484 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/kube-proxy-br496
	I0916 10:59:47.568710 1434484 round_trippers.go:469] Request Headers:
	I0916 10:59:47.568720 1434484 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:59:47.568727 1434484 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:59:47.576388 1434484 round_trippers.go:574] Response Status: 200 OK in 7 milliseconds
	I0916 10:59:47.577599 1434484 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-334765-m04
	I0916 10:59:47.577626 1434484 round_trippers.go:469] Request Headers:
	I0916 10:59:47.577635 1434484 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:59:47.577641 1434484 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:59:47.580511 1434484 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 10:59:48.068375 1434484 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/kube-proxy-br496
	I0916 10:59:48.068420 1434484 round_trippers.go:469] Request Headers:
	I0916 10:59:48.068430 1434484 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:59:48.068434 1434484 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:59:48.072204 1434484 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:59:48.073201 1434484 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-334765-m04
	I0916 10:59:48.073223 1434484 round_trippers.go:469] Request Headers:
	I0916 10:59:48.073232 1434484 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:59:48.073238 1434484 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:59:48.076782 1434484 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:59:48.568264 1434484 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/kube-proxy-br496
	I0916 10:59:48.568285 1434484 round_trippers.go:469] Request Headers:
	I0916 10:59:48.568294 1434484 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:59:48.568299 1434484 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:59:48.589445 1434484 round_trippers.go:574] Response Status: 200 OK in 21 milliseconds
	I0916 10:59:48.593979 1434484 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-334765-m04
	I0916 10:59:48.594000 1434484 round_trippers.go:469] Request Headers:
	I0916 10:59:48.594010 1434484 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:59:48.594019 1434484 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:59:48.601846 1434484 round_trippers.go:574] Response Status: 200 OK in 7 milliseconds
	I0916 10:59:48.603222 1434484 pod_ready.go:103] pod "kube-proxy-br496" in "kube-system" namespace has status "Ready":"False"
	I0916 10:59:49.068975 1434484 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/kube-proxy-br496
	I0916 10:59:49.068999 1434484 round_trippers.go:469] Request Headers:
	I0916 10:59:49.069008 1434484 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:59:49.069014 1434484 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:59:49.072023 1434484 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 10:59:49.073671 1434484 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-334765-m04
	I0916 10:59:49.073695 1434484 round_trippers.go:469] Request Headers:
	I0916 10:59:49.073704 1434484 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:59:49.073710 1434484 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:59:49.076524 1434484 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 10:59:49.077112 1434484 pod_ready.go:93] pod "kube-proxy-br496" in "kube-system" namespace has status "Ready":"True"
	I0916 10:59:49.077136 1434484 pod_ready.go:82] duration metric: took 13.509216981s for pod "kube-proxy-br496" in "kube-system" namespace to be "Ready" ...
	I0916 10:59:49.077148 1434484 pod_ready.go:79] waiting up to 6m0s for pod "kube-proxy-l998t" in "kube-system" namespace to be "Ready" ...
	I0916 10:59:49.077212 1434484 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/kube-proxy-l998t
	I0916 10:59:49.077225 1434484 round_trippers.go:469] Request Headers:
	I0916 10:59:49.077234 1434484 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:59:49.077239 1434484 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:59:49.080165 1434484 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 10:59:49.080932 1434484 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-334765-m02
	I0916 10:59:49.080979 1434484 round_trippers.go:469] Request Headers:
	I0916 10:59:49.081002 1434484 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:59:49.081013 1434484 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:59:49.083824 1434484 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 10:59:49.084494 1434484 pod_ready.go:93] pod "kube-proxy-l998t" in "kube-system" namespace has status "Ready":"True"
	I0916 10:59:49.084517 1434484 pod_ready.go:82] duration metric: took 7.362235ms for pod "kube-proxy-l998t" in "kube-system" namespace to be "Ready" ...
	I0916 10:59:49.084530 1434484 pod_ready.go:79] waiting up to 6m0s for pod "kube-proxy-tlfs7" in "kube-system" namespace to be "Ready" ...
	I0916 10:59:49.084604 1434484 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/kube-proxy-tlfs7
	I0916 10:59:49.084613 1434484 round_trippers.go:469] Request Headers:
	I0916 10:59:49.084621 1434484 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:59:49.084626 1434484 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:59:49.087468 1434484 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 10:59:49.088207 1434484 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-334765
	I0916 10:59:49.088225 1434484 round_trippers.go:469] Request Headers:
	I0916 10:59:49.088234 1434484 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:59:49.088241 1434484 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:59:49.090903 1434484 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 10:59:49.091737 1434484 pod_ready.go:93] pod "kube-proxy-tlfs7" in "kube-system" namespace has status "Ready":"True"
	I0916 10:59:49.091762 1434484 pod_ready.go:82] duration metric: took 7.220134ms for pod "kube-proxy-tlfs7" in "kube-system" namespace to be "Ready" ...
	I0916 10:59:49.091773 1434484 pod_ready.go:79] waiting up to 6m0s for pod "kube-scheduler-ha-334765" in "kube-system" namespace to be "Ready" ...
	I0916 10:59:49.091840 1434484 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/kube-scheduler-ha-334765
	I0916 10:59:49.091851 1434484 round_trippers.go:469] Request Headers:
	I0916 10:59:49.091860 1434484 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:59:49.091865 1434484 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:59:49.094500 1434484 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 10:59:49.095129 1434484 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-334765
	I0916 10:59:49.095146 1434484 round_trippers.go:469] Request Headers:
	I0916 10:59:49.095154 1434484 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:59:49.095158 1434484 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:59:49.097867 1434484 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 10:59:49.098409 1434484 pod_ready.go:93] pod "kube-scheduler-ha-334765" in "kube-system" namespace has status "Ready":"True"
	I0916 10:59:49.098429 1434484 pod_ready.go:82] duration metric: took 6.648415ms for pod "kube-scheduler-ha-334765" in "kube-system" namespace to be "Ready" ...
	I0916 10:59:49.098441 1434484 pod_ready.go:79] waiting up to 6m0s for pod "kube-scheduler-ha-334765-m02" in "kube-system" namespace to be "Ready" ...
	I0916 10:59:49.098512 1434484 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/kube-scheduler-ha-334765-m02
	I0916 10:59:49.098522 1434484 round_trippers.go:469] Request Headers:
	I0916 10:59:49.098531 1434484 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:59:49.098535 1434484 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:59:49.101145 1434484 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 10:59:49.101805 1434484 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-334765-m02
	I0916 10:59:49.101830 1434484 round_trippers.go:469] Request Headers:
	I0916 10:59:49.101838 1434484 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:59:49.101841 1434484 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:59:49.104577 1434484 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 10:59:49.105383 1434484 pod_ready.go:93] pod "kube-scheduler-ha-334765-m02" in "kube-system" namespace has status "Ready":"True"
	I0916 10:59:49.105404 1434484 pod_ready.go:82] duration metric: took 6.94974ms for pod "kube-scheduler-ha-334765-m02" in "kube-system" namespace to be "Ready" ...
	I0916 10:59:49.105416 1434484 pod_ready.go:79] waiting up to 6m0s for pod "kube-scheduler-ha-334765-m03" in "kube-system" namespace to be "Ready" ...
	I0916 10:59:49.269846 1434484 request.go:632] Waited for 164.359282ms due to client-side throttling, not priority and fairness, request: GET:https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/kube-scheduler-ha-334765-m03
	I0916 10:59:49.269927 1434484 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/kube-scheduler-ha-334765-m03
	I0916 10:59:49.269947 1434484 round_trippers.go:469] Request Headers:
	I0916 10:59:49.269957 1434484 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:59:49.269966 1434484 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:59:49.272995 1434484 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:59:49.469820 1434484 request.go:632] Waited for 196.239692ms due to client-side throttling, not priority and fairness, request: GET:https://192.168.49.2:8443/api/v1/nodes/ha-334765-m03
	I0916 10:59:49.469924 1434484 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-334765-m03
	I0916 10:59:49.469944 1434484 round_trippers.go:469] Request Headers:
	I0916 10:59:49.469981 1434484 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:59:49.470007 1434484 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:59:49.472987 1434484 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 10:59:49.474313 1434484 pod_ready.go:93] pod "kube-scheduler-ha-334765-m03" in "kube-system" namespace has status "Ready":"True"
	I0916 10:59:49.474374 1434484 pod_ready.go:82] duration metric: took 368.948795ms for pod "kube-scheduler-ha-334765-m03" in "kube-system" namespace to be "Ready" ...
	I0916 10:59:49.474404 1434484 pod_ready.go:39] duration metric: took 17.108556042s for extra waiting for all system-critical and pods with labels [k8s-app=kube-dns component=etcd component=kube-apiserver component=kube-controller-manager k8s-app=kube-proxy component=kube-scheduler] to be "Ready" ...
	I0916 10:59:49.474447 1434484 system_svc.go:44] waiting for kubelet service to be running ....
	I0916 10:59:49.474527 1434484 ssh_runner.go:195] Run: sudo systemctl is-active --quiet service kubelet
	I0916 10:59:49.487846 1434484 system_svc.go:56] duration metric: took 13.390284ms WaitForService to wait for kubelet
	I0916 10:59:49.487928 1434484 kubeadm.go:582] duration metric: took 23.756143036s to wait for: map[apiserver:true apps_running:true default_sa:true extra:true kubelet:true node_ready:true system_pods:true]
	I0916 10:59:49.487961 1434484 node_conditions.go:102] verifying NodePressure condition ...
	I0916 10:59:49.669544 1434484 request.go:632] Waited for 181.463458ms due to client-side throttling, not priority and fairness, request: GET:https://192.168.49.2:8443/api/v1/nodes
	I0916 10:59:49.669607 1434484 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes
	I0916 10:59:49.669613 1434484 round_trippers.go:469] Request Headers:
	I0916 10:59:49.669626 1434484 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:59:49.669664 1434484 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:59:49.673454 1434484 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:59:49.675058 1434484 node_conditions.go:122] node storage ephemeral capacity is 203034800Ki
	I0916 10:59:49.675089 1434484 node_conditions.go:123] node cpu capacity is 2
	I0916 10:59:49.675101 1434484 node_conditions.go:122] node storage ephemeral capacity is 203034800Ki
	I0916 10:59:49.675106 1434484 node_conditions.go:123] node cpu capacity is 2
	I0916 10:59:49.675110 1434484 node_conditions.go:122] node storage ephemeral capacity is 203034800Ki
	I0916 10:59:49.675114 1434484 node_conditions.go:123] node cpu capacity is 2
	I0916 10:59:49.675118 1434484 node_conditions.go:122] node storage ephemeral capacity is 203034800Ki
	I0916 10:59:49.675123 1434484 node_conditions.go:123] node cpu capacity is 2
	I0916 10:59:49.675128 1434484 node_conditions.go:105] duration metric: took 187.150823ms to run NodePressure ...
	I0916 10:59:49.675140 1434484 start.go:241] waiting for startup goroutines ...
	I0916 10:59:49.675173 1434484 start.go:255] writing updated cluster config ...
	I0916 10:59:49.675521 1434484 ssh_runner.go:195] Run: rm -f paused
	I0916 10:59:49.684916 1434484 out.go:177] * Done! kubectl is now configured to use "ha-334765" cluster and "default" namespace by default
	E0916 10:59:49.687664 1434484 start.go:291] kubectl info: exec: fork/exec /usr/local/bin/kubectl: exec format error
	
	
	==> CRI-O <==
	Sep 16 10:58:41 ha-334765 crio[626]: time="2024-09-16 10:58:41.410117973Z" level=info msg="CNI monitoring event \"/etc/cni/net.d/10-kindnet.conflist\": CREATE"
	Sep 16 10:58:41 ha-334765 crio[626]: time="2024-09-16 10:58:41.445324165Z" level=info msg="Found CNI network kindnet (type=ptp) at /etc/cni/net.d/10-kindnet.conflist"
	Sep 16 10:58:41 ha-334765 crio[626]: time="2024-09-16 10:58:41.445381534Z" level=info msg="Updated default CNI network name to kindnet"
	Sep 16 10:59:51 ha-334765 crio[626]: time="2024-09-16 10:59:51.158159426Z" level=info msg="Running pod sandbox: default/busybox-7dff88458-55czh/POD" id=5dfd6353-98d4-4fe1-8c24-1d09dacba28a name=/runtime.v1.RuntimeService/RunPodSandbox
	Sep 16 10:59:51 ha-334765 crio[626]: time="2024-09-16 10:59:51.158229471Z" level=warning msg="Allowed annotations are specified for workload []"
	Sep 16 10:59:51 ha-334765 crio[626]: time="2024-09-16 10:59:51.204909122Z" level=info msg="Got pod network &{Name:busybox-7dff88458-55czh Namespace:default ID:e40ab8e1a2db01c8b050915b90b521ea492459af43f529c9ba6a0839087913e9 UID:36f5a6dc-dea0-4fc2-a9d1-aa474fb384f2 NetNS:/var/run/netns/70dd29fc-0fa0-4853-9ee1-1f2b0797d542 Networks:[] RuntimeConfig:map[kindnet:{IP: MAC: PortMappings:[] Bandwidth:<nil> IpRanges:[]}] Aliases:map[]}"
	Sep 16 10:59:51 ha-334765 crio[626]: time="2024-09-16 10:59:51.204950237Z" level=info msg="Adding pod default_busybox-7dff88458-55czh to CNI network \"kindnet\" (type=ptp)"
	Sep 16 10:59:51 ha-334765 crio[626]: time="2024-09-16 10:59:51.262471715Z" level=info msg="Got pod network &{Name:busybox-7dff88458-55czh Namespace:default ID:e40ab8e1a2db01c8b050915b90b521ea492459af43f529c9ba6a0839087913e9 UID:36f5a6dc-dea0-4fc2-a9d1-aa474fb384f2 NetNS:/var/run/netns/70dd29fc-0fa0-4853-9ee1-1f2b0797d542 Networks:[] RuntimeConfig:map[kindnet:{IP: MAC: PortMappings:[] Bandwidth:<nil> IpRanges:[]}] Aliases:map[]}"
	Sep 16 10:59:51 ha-334765 crio[626]: time="2024-09-16 10:59:51.262618427Z" level=info msg="Checking pod default_busybox-7dff88458-55czh for CNI network kindnet (type=ptp)"
	Sep 16 10:59:51 ha-334765 crio[626]: time="2024-09-16 10:59:51.275362581Z" level=info msg="Ran pod sandbox e40ab8e1a2db01c8b050915b90b521ea492459af43f529c9ba6a0839087913e9 with infra container: default/busybox-7dff88458-55czh/POD" id=5dfd6353-98d4-4fe1-8c24-1d09dacba28a name=/runtime.v1.RuntimeService/RunPodSandbox
	Sep 16 10:59:51 ha-334765 crio[626]: time="2024-09-16 10:59:51.276812924Z" level=info msg="Checking image status: gcr.io/k8s-minikube/busybox:1.28" id=efad7fc5-46c9-46a7-bcca-6d6f488a490e name=/runtime.v1.ImageService/ImageStatus
	Sep 16 10:59:51 ha-334765 crio[626]: time="2024-09-16 10:59:51.277220077Z" level=info msg="Image gcr.io/k8s-minikube/busybox:1.28 not found" id=efad7fc5-46c9-46a7-bcca-6d6f488a490e name=/runtime.v1.ImageService/ImageStatus
	Sep 16 10:59:51 ha-334765 crio[626]: time="2024-09-16 10:59:51.280216974Z" level=info msg="Pulling image: gcr.io/k8s-minikube/busybox:1.28" id=d7fb74de-2019-4c67-b39b-4355981e4ba5 name=/runtime.v1.ImageService/PullImage
	Sep 16 10:59:51 ha-334765 crio[626]: time="2024-09-16 10:59:51.282349629Z" level=info msg="Trying to access \"gcr.io/k8s-minikube/busybox:1.28\""
	Sep 16 10:59:52 ha-334765 crio[626]: time="2024-09-16 10:59:52.251001103Z" level=info msg="Trying to access \"gcr.io/k8s-minikube/busybox:1.28\""
	Sep 16 10:59:54 ha-334765 crio[626]: time="2024-09-16 10:59:54.318021393Z" level=info msg="Pulled image: gcr.io/k8s-minikube/busybox@sha256:859d41e4316c182cb559f9ae3c5ffcac8602ee1179794a1707c06cd092a008d3" id=d7fb74de-2019-4c67-b39b-4355981e4ba5 name=/runtime.v1.ImageService/PullImage
	Sep 16 10:59:54 ha-334765 crio[626]: time="2024-09-16 10:59:54.320470029Z" level=info msg="Checking image status: gcr.io/k8s-minikube/busybox:1.28" id=6eebc36f-9916-46cb-b57e-8813a2923e42 name=/runtime.v1.ImageService/ImageStatus
	Sep 16 10:59:54 ha-334765 crio[626]: time="2024-09-16 10:59:54.321707569Z" level=info msg="Image status: &ImageStatusResponse{Image:&Image{Id:89a35e2ebb6b938201966889b5e8c85b931db6432c5643966116cd1c28bf45cd,RepoTags:[gcr.io/k8s-minikube/busybox:1.28],RepoDigests:[gcr.io/k8s-minikube/busybox@sha256:859d41e4316c182cb559f9ae3c5ffcac8602ee1179794a1707c06cd092a008d3 gcr.io/k8s-minikube/busybox@sha256:9afb80db71730dbb303fe00765cbf34bddbdc6b66e49897fc2e1861967584b12],Size_:1496796,Uid:nil,Username:,Spec:nil,},Info:map[string]string{},}" id=6eebc36f-9916-46cb-b57e-8813a2923e42 name=/runtime.v1.ImageService/ImageStatus
	Sep 16 10:59:54 ha-334765 crio[626]: time="2024-09-16 10:59:54.324960566Z" level=info msg="Checking image status: gcr.io/k8s-minikube/busybox:1.28" id=741446b3-64ca-4706-80f9-f6f73ad1f691 name=/runtime.v1.ImageService/ImageStatus
	Sep 16 10:59:54 ha-334765 crio[626]: time="2024-09-16 10:59:54.326797385Z" level=info msg="Image status: &ImageStatusResponse{Image:&Image{Id:89a35e2ebb6b938201966889b5e8c85b931db6432c5643966116cd1c28bf45cd,RepoTags:[gcr.io/k8s-minikube/busybox:1.28],RepoDigests:[gcr.io/k8s-minikube/busybox@sha256:859d41e4316c182cb559f9ae3c5ffcac8602ee1179794a1707c06cd092a008d3 gcr.io/k8s-minikube/busybox@sha256:9afb80db71730dbb303fe00765cbf34bddbdc6b66e49897fc2e1861967584b12],Size_:1496796,Uid:nil,Username:,Spec:nil,},Info:map[string]string{},}" id=741446b3-64ca-4706-80f9-f6f73ad1f691 name=/runtime.v1.ImageService/ImageStatus
	Sep 16 10:59:54 ha-334765 crio[626]: time="2024-09-16 10:59:54.330258016Z" level=info msg="Creating container: default/busybox-7dff88458-55czh/busybox" id=225447af-4e5c-4b56-b382-4c6afb3d9801 name=/runtime.v1.RuntimeService/CreateContainer
	Sep 16 10:59:54 ha-334765 crio[626]: time="2024-09-16 10:59:54.330399108Z" level=warning msg="Allowed annotations are specified for workload []"
	Sep 16 10:59:54 ha-334765 crio[626]: time="2024-09-16 10:59:54.417704630Z" level=info msg="Created container 5285c2afb84aff8332a80d659714abccfb43b347048d0d8d20e028c7b8bf182a: default/busybox-7dff88458-55czh/busybox" id=225447af-4e5c-4b56-b382-4c6afb3d9801 name=/runtime.v1.RuntimeService/CreateContainer
	Sep 16 10:59:54 ha-334765 crio[626]: time="2024-09-16 10:59:54.420747573Z" level=info msg="Starting container: 5285c2afb84aff8332a80d659714abccfb43b347048d0d8d20e028c7b8bf182a" id=8bfea403-df96-4bdb-a319-9588c64535e0 name=/runtime.v1.RuntimeService/StartContainer
	Sep 16 10:59:54 ha-334765 crio[626]: time="2024-09-16 10:59:54.435080495Z" level=info msg="Started container" PID=1995 containerID=5285c2afb84aff8332a80d659714abccfb43b347048d0d8d20e028c7b8bf182a description=default/busybox-7dff88458-55czh/busybox id=8bfea403-df96-4bdb-a319-9588c64535e0 name=/runtime.v1.RuntimeService/StartContainer sandboxID=e40ab8e1a2db01c8b050915b90b521ea492459af43f529c9ba6a0839087913e9
	
	
	==> container status <==
	CONTAINER           IMAGE                                                                                                 CREATED              STATE               NAME                      ATTEMPT             POD ID              POD
	5285c2afb84af       gcr.io/k8s-minikube/busybox@sha256:859d41e4316c182cb559f9ae3c5ffcac8602ee1179794a1707c06cd092a008d3   10 seconds ago       Running             busybox                   0                   e40ab8e1a2db0       busybox-7dff88458-55czh
	607b372ab0497       279f381cb37365bbbcd133c9531fba9c2beb0f38dbbe6ddfcd0b1b1643d3450e                                      About a minute ago   Running             kube-controller-manager   4                   4c19411f3288b       kube-controller-manager-ha-334765
	5d869ed2eff63       ba04bb24b95753201135cbc420b233c1b0b9fa2e1fd21d28319c348c33fbcde6                                      About a minute ago   Running             storage-provisioner       2                   fa2370457873b       storage-provisioner
	ea8007bbeb0e0       7e2a4e229620ba3a757dc3699d10e8f77c453b7ee71936521668dec51669679d                                      About a minute ago   Running             kube-vip                  1                   44eee6ac52f4f       kube-vip-ha-334765
	5c412d80a4eca       d3f53a98c0a9d9163c4848bcf34b2d2f5e1e3691b79f3d1dd6d0206809e02853                                      About a minute ago   Running             kube-apiserver            2                   0744c12f665db       kube-apiserver-ha-334765
	ff866dd3e2e0e       2f6c962e7b8311337352d9fdea917da2184d9919f4da7695bc2a6517cf392fe4                                      2 minutes ago        Running             coredns                   1                   42b5dd306e83b       coredns-7c65d6cfc9-s9fp9
	0803fd3b6cb1a       ba04bb24b95753201135cbc420b233c1b0b9fa2e1fd21d28319c348c33fbcde6                                      2 minutes ago        Exited              storage-provisioner       1                   fa2370457873b       storage-provisioner
	09095ec87adcc       2f6c962e7b8311337352d9fdea917da2184d9919f4da7695bc2a6517cf392fe4                                      2 minutes ago        Running             coredns                   1                   30b34aa944c76       coredns-7c65d6cfc9-q5xr7
	61cf908391884       6a23fa8fd2b78ab58e42ba273808edc936a9c53d8ac4a919f6337be094843a51                                      2 minutes ago        Running             kindnet-cni               1                   9ba992f0b1115       kindnet-7s5t5
	6245d19c5d7d7       24a140c548c075e487e45d0ee73b1aa89f8bfb40c08a57e05975559728822b1d                                      2 minutes ago        Running             kube-proxy                1                   880a68230ddc7       kube-proxy-tlfs7
	f5db3144aa51b       279f381cb37365bbbcd133c9531fba9c2beb0f38dbbe6ddfcd0b1b1643d3450e                                      2 minutes ago        Exited              kube-controller-manager   3                   4c19411f3288b       kube-controller-manager-ha-334765
	91965bef6661a       d3f53a98c0a9d9163c4848bcf34b2d2f5e1e3691b79f3d1dd6d0206809e02853                                      2 minutes ago        Exited              kube-apiserver            1                   0744c12f665db       kube-apiserver-ha-334765
	8135d2bac513b       27e3830e1402783674d8b594038967deea9d51f0d91b34c93c8f39d2f68af7da                                      2 minutes ago        Running             etcd                      1                   d8ab7a582bfe1       etcd-ha-334765
	b7e24d95fd64d       7e2a4e229620ba3a757dc3699d10e8f77c453b7ee71936521668dec51669679d                                      2 minutes ago        Exited              kube-vip                  0                   44eee6ac52f4f       kube-vip-ha-334765
	15e97790478eb       7f8aa378bb47dffcf430f3a601abe39137e88aee0238e23ed8530fdd18dab82d                                      2 minutes ago        Running             kube-scheduler            1                   f7f22bf75d68a       kube-scheduler-ha-334765
	
	
	==> coredns [09095ec87adcc15a9612bfce7a11216ad877cfe524cddeffed33dbf5e0ed5a39] <==
	[INFO] plugin/kubernetes: waiting for Kubernetes API before starting server
	[WARNING] plugin/kubernetes: starting server with unsynced Kubernetes API
	.:53
	[INFO] plugin/reload: Running configuration SHA512 = 05e3eaddc414b2d71a69b2e2bc6f2681fc1f4d04bcdd3acc1a41457bb7db518208b95ddfc4c9fffedc59c25a8faf458be1af4915a4a3c0d6777cb7a346bc5d86
	CoreDNS-1.11.3
	linux/arm64, go1.21.11, a6338e9
	[INFO] 127.0.0.1:49858 - 28049 "HINFO IN 2972457188676485689.4820616713979779479. udp 57 false 512" NXDOMAIN qr,rd,ra 57 0.041126994s
	[INFO] plugin/ready: Still waiting on: "kubernetes"
	[INFO] plugin/ready: Still waiting on: "kubernetes"
	[INFO] plugin/ready: Still waiting on: "kubernetes"
	[INFO] plugin/kubernetes: pkg/mod/k8s.io/client-go@v0.29.3/tools/cache/reflector.go:229: failed to list *v1.Service: Get "https://10.96.0.1:443/api/v1/services?limit=500&resourceVersion=0": dial tcp 10.96.0.1:443: i/o timeout
	[INFO] plugin/kubernetes: Trace[1683974802]: "Reflector ListAndWatch" name:pkg/mod/k8s.io/client-go@v0.29.3/tools/cache/reflector.go:229 (16-Sep-2024 10:58:01.012) (total time: 30001ms):
	Trace[1683974802]: ---"Objects listed" error:Get "https://10.96.0.1:443/api/v1/services?limit=500&resourceVersion=0": dial tcp 10.96.0.1:443: i/o timeout 30001ms (10:58:31.013)
	Trace[1683974802]: [30.001438155s] [30.001438155s] END
	[ERROR] plugin/kubernetes: pkg/mod/k8s.io/client-go@v0.29.3/tools/cache/reflector.go:229: Failed to watch *v1.Service: failed to list *v1.Service: Get "https://10.96.0.1:443/api/v1/services?limit=500&resourceVersion=0": dial tcp 10.96.0.1:443: i/o timeout
	[INFO] plugin/kubernetes: pkg/mod/k8s.io/client-go@v0.29.3/tools/cache/reflector.go:229: failed to list *v1.EndpointSlice: Get "https://10.96.0.1:443/apis/discovery.k8s.io/v1/endpointslices?limit=500&resourceVersion=0": dial tcp 10.96.0.1:443: i/o timeout
	[INFO] plugin/kubernetes: Trace[672868676]: "Reflector ListAndWatch" name:pkg/mod/k8s.io/client-go@v0.29.3/tools/cache/reflector.go:229 (16-Sep-2024 10:58:01.013) (total time: 30001ms):
	Trace[672868676]: ---"Objects listed" error:Get "https://10.96.0.1:443/apis/discovery.k8s.io/v1/endpointslices?limit=500&resourceVersion=0": dial tcp 10.96.0.1:443: i/o timeout 30001ms (10:58:31.014)
	Trace[672868676]: [30.001108251s] [30.001108251s] END
	[ERROR] plugin/kubernetes: pkg/mod/k8s.io/client-go@v0.29.3/tools/cache/reflector.go:229: Failed to watch *v1.EndpointSlice: failed to list *v1.EndpointSlice: Get "https://10.96.0.1:443/apis/discovery.k8s.io/v1/endpointslices?limit=500&resourceVersion=0": dial tcp 10.96.0.1:443: i/o timeout
	[INFO] plugin/kubernetes: pkg/mod/k8s.io/client-go@v0.29.3/tools/cache/reflector.go:229: failed to list *v1.Namespace: Get "https://10.96.0.1:443/api/v1/namespaces?limit=500&resourceVersion=0": dial tcp 10.96.0.1:443: i/o timeout
	[INFO] plugin/kubernetes: Trace[832753644]: "Reflector ListAndWatch" name:pkg/mod/k8s.io/client-go@v0.29.3/tools/cache/reflector.go:229 (16-Sep-2024 10:58:01.013) (total time: 30001ms):
	Trace[832753644]: ---"Objects listed" error:Get "https://10.96.0.1:443/api/v1/namespaces?limit=500&resourceVersion=0": dial tcp 10.96.0.1:443: i/o timeout 30001ms (10:58:31.014)
	Trace[832753644]: [30.001211264s] [30.001211264s] END
	[ERROR] plugin/kubernetes: pkg/mod/k8s.io/client-go@v0.29.3/tools/cache/reflector.go:229: Failed to watch *v1.Namespace: failed to list *v1.Namespace: Get "https://10.96.0.1:443/api/v1/namespaces?limit=500&resourceVersion=0": dial tcp 10.96.0.1:443: i/o timeout
	
	
	==> coredns [ff866dd3e2e0e3048d32e7d0e7ce2d4ae0a9cebc5e0057920932cc79d8220c72] <==
	[INFO] plugin/kubernetes: waiting for Kubernetes API before starting server
	[WARNING] plugin/kubernetes: starting server with unsynced Kubernetes API
	.:53
	[INFO] plugin/reload: Running configuration SHA512 = 05e3eaddc414b2d71a69b2e2bc6f2681fc1f4d04bcdd3acc1a41457bb7db518208b95ddfc4c9fffedc59c25a8faf458be1af4915a4a3c0d6777cb7a346bc5d86
	CoreDNS-1.11.3
	linux/arm64, go1.21.11, a6338e9
	[INFO] 127.0.0.1:33467 - 21803 "HINFO IN 1424870945100461640.1111161138070486558. udp 57 false 512" NXDOMAIN qr,rd,ra 57 0.027725084s
	[INFO] plugin/ready: Still waiting on: "kubernetes"
	[INFO] plugin/ready: Still waiting on: "kubernetes"
	[INFO] plugin/ready: Still waiting on: "kubernetes"
	[INFO] plugin/kubernetes: pkg/mod/k8s.io/client-go@v0.29.3/tools/cache/reflector.go:229: failed to list *v1.EndpointSlice: Get "https://10.96.0.1:443/apis/discovery.k8s.io/v1/endpointslices?limit=500&resourceVersion=0": dial tcp 10.96.0.1:443: i/o timeout
	[INFO] plugin/kubernetes: pkg/mod/k8s.io/client-go@v0.29.3/tools/cache/reflector.go:229: failed to list *v1.Namespace: Get "https://10.96.0.1:443/api/v1/namespaces?limit=500&resourceVersion=0": dial tcp 10.96.0.1:443: i/o timeout
	[INFO] plugin/kubernetes: Trace[211534412]: "Reflector ListAndWatch" name:pkg/mod/k8s.io/client-go@v0.29.3/tools/cache/reflector.go:229 (16-Sep-2024 10:58:00.977) (total time: 30002ms):
	Trace[211534412]: ---"Objects listed" error:Get "https://10.96.0.1:443/apis/discovery.k8s.io/v1/endpointslices?limit=500&resourceVersion=0": dial tcp 10.96.0.1:443: i/o timeout 30000ms (10:58:30.978)
	Trace[211534412]: [30.002853889s] [30.002853889s] END
	[INFO] plugin/kubernetes: Trace[2071306121]: "Reflector ListAndWatch" name:pkg/mod/k8s.io/client-go@v0.29.3/tools/cache/reflector.go:229 (16-Sep-2024 10:58:00.977) (total time: 30003ms):
	Trace[2071306121]: ---"Objects listed" error:Get "https://10.96.0.1:443/api/v1/namespaces?limit=500&resourceVersion=0": dial tcp 10.96.0.1:443: i/o timeout 30000ms (10:58:30.978)
	Trace[2071306121]: [30.003675686s] [30.003675686s] END
	[ERROR] plugin/kubernetes: pkg/mod/k8s.io/client-go@v0.29.3/tools/cache/reflector.go:229: Failed to watch *v1.EndpointSlice: failed to list *v1.EndpointSlice: Get "https://10.96.0.1:443/apis/discovery.k8s.io/v1/endpointslices?limit=500&resourceVersion=0": dial tcp 10.96.0.1:443: i/o timeout
	[ERROR] plugin/kubernetes: pkg/mod/k8s.io/client-go@v0.29.3/tools/cache/reflector.go:229: Failed to watch *v1.Namespace: failed to list *v1.Namespace: Get "https://10.96.0.1:443/api/v1/namespaces?limit=500&resourceVersion=0": dial tcp 10.96.0.1:443: i/o timeout
	[INFO] plugin/kubernetes: pkg/mod/k8s.io/client-go@v0.29.3/tools/cache/reflector.go:229: failed to list *v1.Service: Get "https://10.96.0.1:443/api/v1/services?limit=500&resourceVersion=0": dial tcp 10.96.0.1:443: i/o timeout
	[INFO] plugin/kubernetes: Trace[625991357]: "Reflector ListAndWatch" name:pkg/mod/k8s.io/client-go@v0.29.3/tools/cache/reflector.go:229 (16-Sep-2024 10:58:00.977) (total time: 30005ms):
	Trace[625991357]: ---"Objects listed" error:Get "https://10.96.0.1:443/api/v1/services?limit=500&resourceVersion=0": dial tcp 10.96.0.1:443: i/o timeout 30001ms (10:58:30.978)
	Trace[625991357]: [30.005009708s] [30.005009708s] END
	[ERROR] plugin/kubernetes: pkg/mod/k8s.io/client-go@v0.29.3/tools/cache/reflector.go:229: Failed to watch *v1.Service: failed to list *v1.Service: Get "https://10.96.0.1:443/api/v1/services?limit=500&resourceVersion=0": dial tcp 10.96.0.1:443: i/o timeout
	
	
	==> describe nodes <==
	Name:               ha-334765
	Roles:              control-plane
	Labels:             beta.kubernetes.io/arch=arm64
	                    beta.kubernetes.io/os=linux
	                    kubernetes.io/arch=arm64
	                    kubernetes.io/hostname=ha-334765
	                    kubernetes.io/os=linux
	                    minikube.k8s.io/commit=90d544f06ea0f69499271b003be64a9a224d57ed
	                    minikube.k8s.io/name=ha-334765
	                    minikube.k8s.io/primary=true
	                    minikube.k8s.io/updated_at=2024_09_16T10_51_56_0700
	                    minikube.k8s.io/version=v1.34.0
	                    node-role.kubernetes.io/control-plane=
	                    node.kubernetes.io/exclude-from-external-load-balancers=
	Annotations:        kubeadm.alpha.kubernetes.io/cri-socket: unix:///var/run/crio/crio.sock
	                    node.alpha.kubernetes.io/ttl: 0
	                    volumes.kubernetes.io/controller-managed-attach-detach: true
	CreationTimestamp:  Mon, 16 Sep 2024 10:51:54 +0000
	Taints:             <none>
	Unschedulable:      false
	Lease:
	  HolderIdentity:  ha-334765
	  AcquireTime:     <unset>
	  RenewTime:       Mon, 16 Sep 2024 11:00:00 +0000
	Conditions:
	  Type             Status  LastHeartbeatTime                 LastTransitionTime                Reason                       Message
	  ----             ------  -----------------                 ------------------                ------                       -------
	  MemoryPressure   False   Mon, 16 Sep 2024 10:57:47 +0000   Mon, 16 Sep 2024 10:51:52 +0000   KubeletHasSufficientMemory   kubelet has sufficient memory available
	  DiskPressure     False   Mon, 16 Sep 2024 10:57:47 +0000   Mon, 16 Sep 2024 10:51:52 +0000   KubeletHasNoDiskPressure     kubelet has no disk pressure
	  PIDPressure      False   Mon, 16 Sep 2024 10:57:47 +0000   Mon, 16 Sep 2024 10:51:52 +0000   KubeletHasSufficientPID      kubelet has sufficient PID available
	  Ready            True    Mon, 16 Sep 2024 10:57:47 +0000   Mon, 16 Sep 2024 10:52:40 +0000   KubeletReady                 kubelet is posting ready status
	Addresses:
	  InternalIP:  192.168.49.2
	  Hostname:    ha-334765
	Capacity:
	  cpu:                2
	  ephemeral-storage:  203034800Ki
	  hugepages-1Gi:      0
	  hugepages-2Mi:      0
	  hugepages-32Mi:     0
	  hugepages-64Ki:     0
	  memory:             8022304Ki
	  pods:               110
	Allocatable:
	  cpu:                2
	  ephemeral-storage:  203034800Ki
	  hugepages-1Gi:      0
	  hugepages-2Mi:      0
	  hugepages-32Mi:     0
	  hugepages-64Ki:     0
	  memory:             8022304Ki
	  pods:               110
	System Info:
	  Machine ID:                 57d26eafa289416a8aff51483f09616d
	  System UUID:                15c23ccf-7aa3-4a1a-8aeb-2a833bffb1e5
	  Boot ID:                    34b2555f-ef29-4c31-9b47-b3b930bd3b4b
	  Kernel Version:             5.15.0-1069-aws
	  OS Image:                   Ubuntu 22.04.4 LTS
	  Operating System:           linux
	  Architecture:               arm64
	  Container Runtime Version:  cri-o://1.24.6
	  Kubelet Version:            v1.31.1
	  Kube-Proxy Version:         v1.31.1
	PodCIDR:                      10.244.0.0/24
	PodCIDRs:                     10.244.0.0/24
	Non-terminated Pods:          (11 in total)
	  Namespace                   Name                                 CPU Requests  CPU Limits  Memory Requests  Memory Limits  Age
	  ---------                   ----                                 ------------  ----------  ---------------  -------------  ---
	  default                     busybox-7dff88458-55czh              0 (0%)        0 (0%)      0 (0%)           0 (0%)         15s
	  kube-system                 coredns-7c65d6cfc9-q5xr7             100m (5%)     0 (0%)      70Mi (0%)        170Mi (2%)     8m7s
	  kube-system                 coredns-7c65d6cfc9-s9fp9             100m (5%)     0 (0%)      70Mi (0%)        170Mi (2%)     8m7s
	  kube-system                 etcd-ha-334765                       100m (5%)     0 (0%)      100Mi (1%)       0 (0%)         8m10s
	  kube-system                 kindnet-7s5t5                        100m (5%)     100m (5%)   50Mi (0%)        50Mi (0%)      8m7s
	  kube-system                 kube-apiserver-ha-334765             250m (12%)    0 (0%)      0 (0%)           0 (0%)         8m10s
	  kube-system                 kube-controller-manager-ha-334765    200m (10%)    0 (0%)      0 (0%)           0 (0%)         8m10s
	  kube-system                 kube-proxy-tlfs7                     0 (0%)        0 (0%)      0 (0%)           0 (0%)         8m7s
	  kube-system                 kube-scheduler-ha-334765             100m (5%)     0 (0%)      0 (0%)           0 (0%)         8m10s
	  kube-system                 kube-vip-ha-334765                   0 (0%)        0 (0%)      0 (0%)           0 (0%)         2m5s
	  kube-system                 storage-provisioner                  0 (0%)        0 (0%)      0 (0%)           0 (0%)         8m5s
	Allocated resources:
	  (Total limits may be over 100 percent, i.e., overcommitted.)
	  Resource           Requests    Limits
	  --------           --------    ------
	  cpu                950m (47%)  100m (5%)
	  memory             290Mi (3%)  390Mi (4%)
	  ephemeral-storage  0 (0%)      0 (0%)
	  hugepages-1Gi      0 (0%)      0 (0%)
	  hugepages-2Mi      0 (0%)      0 (0%)
	  hugepages-32Mi     0 (0%)      0 (0%)
	  hugepages-64Ki     0 (0%)      0 (0%)
	Events:
	  Type     Reason                   Age                    From             Message
	  ----     ------                   ----                   ----             -------
	  Normal   Starting                 2m3s                   kube-proxy       
	  Normal   Starting                 8m5s                   kube-proxy       
	  Normal   Starting                 8m20s                  kubelet          Starting kubelet.
	  Normal   NodeHasSufficientPID     8m20s (x7 over 8m20s)  kubelet          Node ha-334765 status is now: NodeHasSufficientPID
	  Normal   NodeHasNoDiskPressure    8m20s (x8 over 8m20s)  kubelet          Node ha-334765 status is now: NodeHasNoDiskPressure
	  Normal   NodeHasSufficientMemory  8m20s (x8 over 8m20s)  kubelet          Node ha-334765 status is now: NodeHasSufficientMemory
	  Warning  CgroupV1                 8m20s                  kubelet          Cgroup v1 support is in maintenance mode, please migrate to Cgroup v2.
	  Normal   NodeHasNoDiskPressure    8m10s                  kubelet          Node ha-334765 status is now: NodeHasNoDiskPressure
	  Normal   NodeHasSufficientMemory  8m10s                  kubelet          Node ha-334765 status is now: NodeHasSufficientMemory
	  Normal   NodeHasSufficientPID     8m10s                  kubelet          Node ha-334765 status is now: NodeHasSufficientPID
	  Normal   Starting                 8m10s                  kubelet          Starting kubelet.
	  Warning  CgroupV1                 8m10s                  kubelet          Cgroup v1 support is in maintenance mode, please migrate to Cgroup v2.
	  Normal   RegisteredNode           8m8s                   node-controller  Node ha-334765 event: Registered Node ha-334765 in Controller
	  Normal   RegisteredNode           7m36s                  node-controller  Node ha-334765 event: Registered Node ha-334765 in Controller
	  Normal   NodeReady                7m25s                  kubelet          Node ha-334765 status is now: NodeReady
	  Normal   RegisteredNode           6m28s                  node-controller  Node ha-334765 event: Registered Node ha-334765 in Controller
	  Normal   RegisteredNode           3m22s                  node-controller  Node ha-334765 event: Registered Node ha-334765 in Controller
	  Normal   Starting                 2m55s                  kubelet          Starting kubelet.
	  Warning  CgroupV1                 2m55s                  kubelet          Cgroup v1 support is in maintenance mode, please migrate to Cgroup v2.
	  Normal   NodeHasSufficientMemory  2m55s (x8 over 2m55s)  kubelet          Node ha-334765 status is now: NodeHasSufficientMemory
	  Normal   NodeHasNoDiskPressure    2m55s (x8 over 2m55s)  kubelet          Node ha-334765 status is now: NodeHasNoDiskPressure
	  Normal   NodeHasSufficientPID     2m55s (x7 over 2m55s)  kubelet          Node ha-334765 status is now: NodeHasSufficientPID
	  Normal   RegisteredNode           2m13s                  node-controller  Node ha-334765 event: Registered Node ha-334765 in Controller
	  Normal   RegisteredNode           80s                    node-controller  Node ha-334765 event: Registered Node ha-334765 in Controller
	  Normal   RegisteredNode           61s                    node-controller  Node ha-334765 event: Registered Node ha-334765 in Controller
	
	
	Name:               ha-334765-m02
	Roles:              control-plane
	Labels:             beta.kubernetes.io/arch=arm64
	                    beta.kubernetes.io/os=linux
	                    kubernetes.io/arch=arm64
	                    kubernetes.io/hostname=ha-334765-m02
	                    kubernetes.io/os=linux
	                    minikube.k8s.io/commit=90d544f06ea0f69499271b003be64a9a224d57ed
	                    minikube.k8s.io/name=ha-334765
	                    minikube.k8s.io/primary=false
	                    minikube.k8s.io/updated_at=2024_09_16T10_52_23_0700
	                    minikube.k8s.io/version=v1.34.0
	                    node-role.kubernetes.io/control-plane=
	                    node.kubernetes.io/exclude-from-external-load-balancers=
	Annotations:        kubeadm.alpha.kubernetes.io/cri-socket: unix:///var/run/crio/crio.sock
	                    node.alpha.kubernetes.io/ttl: 0
	                    volumes.kubernetes.io/controller-managed-attach-detach: true
	CreationTimestamp:  Mon, 16 Sep 2024 10:52:19 +0000
	Taints:             <none>
	Unschedulable:      false
	Lease:
	  HolderIdentity:  ha-334765-m02
	  AcquireTime:     <unset>
	  RenewTime:       Mon, 16 Sep 2024 11:00:02 +0000
	Conditions:
	  Type             Status  LastHeartbeatTime                 LastTransitionTime                Reason                       Message
	  ----             ------  -----------------                 ------------------                ------                       -------
	  MemoryPressure   False   Mon, 16 Sep 2024 10:57:50 +0000   Mon, 16 Sep 2024 10:52:19 +0000   KubeletHasSufficientMemory   kubelet has sufficient memory available
	  DiskPressure     False   Mon, 16 Sep 2024 10:57:50 +0000   Mon, 16 Sep 2024 10:52:19 +0000   KubeletHasNoDiskPressure     kubelet has no disk pressure
	  PIDPressure      False   Mon, 16 Sep 2024 10:57:50 +0000   Mon, 16 Sep 2024 10:52:19 +0000   KubeletHasSufficientPID      kubelet has sufficient PID available
	  Ready            True    Mon, 16 Sep 2024 10:57:50 +0000   Mon, 16 Sep 2024 10:53:02 +0000   KubeletReady                 kubelet is posting ready status
	Addresses:
	  InternalIP:  192.168.49.3
	  Hostname:    ha-334765-m02
	Capacity:
	  cpu:                2
	  ephemeral-storage:  203034800Ki
	  hugepages-1Gi:      0
	  hugepages-2Mi:      0
	  hugepages-32Mi:     0
	  hugepages-64Ki:     0
	  memory:             8022304Ki
	  pods:               110
	Allocatable:
	  cpu:                2
	  ephemeral-storage:  203034800Ki
	  hugepages-1Gi:      0
	  hugepages-2Mi:      0
	  hugepages-32Mi:     0
	  hugepages-64Ki:     0
	  memory:             8022304Ki
	  pods:               110
	System Info:
	  Machine ID:                 5876247a4ecb4e40a30746fe5b5be162
	  System UUID:                aea91ea0-3fb3-4815-9747-a2bcb9506f24
	  Boot ID:                    34b2555f-ef29-4c31-9b47-b3b930bd3b4b
	  Kernel Version:             5.15.0-1069-aws
	  OS Image:                   Ubuntu 22.04.4 LTS
	  Operating System:           linux
	  Architecture:               arm64
	  Container Runtime Version:  cri-o://1.24.6
	  Kubelet Version:            v1.31.1
	  Kube-Proxy Version:         v1.31.1
	PodCIDR:                      10.244.1.0/24
	PodCIDRs:                     10.244.1.0/24
	Non-terminated Pods:          (8 in total)
	  Namespace                   Name                                     CPU Requests  CPU Limits  Memory Requests  Memory Limits  Age
	  ---------                   ----                                     ------------  ----------  ---------------  -------------  ---
	  default                     busybox-7dff88458-tczms                  0 (0%)        0 (0%)      0 (0%)           0 (0%)         5m48s
	  kube-system                 etcd-ha-334765-m02                       100m (5%)     0 (0%)      100Mi (1%)       0 (0%)         7m45s
	  kube-system                 kindnet-vj27j                            100m (5%)     100m (5%)   50Mi (0%)        50Mi (0%)      7m46s
	  kube-system                 kube-apiserver-ha-334765-m02             250m (12%)    0 (0%)      0 (0%)           0 (0%)         7m45s
	  kube-system                 kube-controller-manager-ha-334765-m02    200m (10%)    0 (0%)      0 (0%)           0 (0%)         7m45s
	  kube-system                 kube-proxy-l998t                         0 (0%)        0 (0%)      0 (0%)           0 (0%)         7m46s
	  kube-system                 kube-scheduler-ha-334765-m02             100m (5%)     0 (0%)      0 (0%)           0 (0%)         7m45s
	  kube-system                 kube-vip-ha-334765-m02                   0 (0%)        0 (0%)      0 (0%)           0 (0%)         7m42s
	Allocated resources:
	  (Total limits may be over 100 percent, i.e., overcommitted.)
	  Resource           Requests    Limits
	  --------           --------    ------
	  cpu                750m (37%)  100m (5%)
	  memory             150Mi (1%)  50Mi (0%)
	  ephemeral-storage  0 (0%)      0 (0%)
	  hugepages-1Gi      0 (0%)      0 (0%)
	  hugepages-2Mi      0 (0%)      0 (0%)
	  hugepages-32Mi     0 (0%)      0 (0%)
	  hugepages-64Ki     0 (0%)      0 (0%)
	Events:
	  Type     Reason                   Age                    From             Message
	  ----     ------                   ----                   ----             -------
	  Normal   Starting                 118s                   kube-proxy       
	  Normal   Starting                 7m38s                  kube-proxy       
	  Normal   NodeHasNoDiskPressure    7m46s (x8 over 7m46s)  kubelet          Node ha-334765-m02 status is now: NodeHasNoDiskPressure
	  Normal   NodeHasSufficientPID     7m46s (x7 over 7m46s)  kubelet          Node ha-334765-m02 status is now: NodeHasSufficientPID
	  Normal   NodeHasSufficientMemory  7m46s (x8 over 7m46s)  kubelet          Node ha-334765-m02 status is now: NodeHasSufficientMemory
	  Normal   RegisteredNode           7m43s                  node-controller  Node ha-334765-m02 event: Registered Node ha-334765-m02 in Controller
	  Normal   RegisteredNode           7m36s                  node-controller  Node ha-334765-m02 event: Registered Node ha-334765-m02 in Controller
	  Normal   RegisteredNode           6m28s                  node-controller  Node ha-334765-m02 event: Registered Node ha-334765-m02 in Controller
	  Normal   NodeHasSufficientPID     3m55s (x7 over 3m55s)  kubelet          Node ha-334765-m02 status is now: NodeHasSufficientPID
	  Normal   NodeHasNoDiskPressure    3m55s (x8 over 3m55s)  kubelet          Node ha-334765-m02 status is now: NodeHasNoDiskPressure
	  Normal   Starting                 3m55s                  kubelet          Starting kubelet.
	  Warning  CgroupV1                 3m55s                  kubelet          Cgroup v1 support is in maintenance mode, please migrate to Cgroup v2.
	  Normal   NodeHasSufficientMemory  3m55s (x8 over 3m55s)  kubelet          Node ha-334765-m02 status is now: NodeHasSufficientMemory
	  Normal   RegisteredNode           3m22s                  node-controller  Node ha-334765-m02 event: Registered Node ha-334765-m02 in Controller
	  Normal   NodeHasSufficientMemory  2m53s (x9 over 2m53s)  kubelet          Node ha-334765-m02 status is now: NodeHasSufficientMemory
	  Normal   Starting                 2m53s                  kubelet          Starting kubelet.
	  Warning  CgroupV1                 2m53s                  kubelet          Cgroup v1 support is in maintenance mode, please migrate to Cgroup v2.
	  Normal   NodeHasNoDiskPressure    2m53s (x7 over 2m53s)  kubelet          Node ha-334765-m02 status is now: NodeHasNoDiskPressure
	  Normal   NodeHasSufficientPID     2m53s (x7 over 2m53s)  kubelet          Node ha-334765-m02 status is now: NodeHasSufficientPID
	  Normal   RegisteredNode           2m13s                  node-controller  Node ha-334765-m02 event: Registered Node ha-334765-m02 in Controller
	  Normal   RegisteredNode           80s                    node-controller  Node ha-334765-m02 event: Registered Node ha-334765-m02 in Controller
	  Normal   RegisteredNode           61s                    node-controller  Node ha-334765-m02 event: Registered Node ha-334765-m02 in Controller
	
	
	Name:               ha-334765-m04
	Roles:              <none>
	Labels:             beta.kubernetes.io/arch=arm64
	                    beta.kubernetes.io/os=linux
	                    kubernetes.io/arch=arm64
	                    kubernetes.io/hostname=ha-334765-m04
	                    kubernetes.io/os=linux
	                    minikube.k8s.io/commit=90d544f06ea0f69499271b003be64a9a224d57ed
	                    minikube.k8s.io/name=ha-334765
	                    minikube.k8s.io/primary=false
	                    minikube.k8s.io/updated_at=2024_09_16T10_54_43_0700
	                    minikube.k8s.io/version=v1.34.0
	Annotations:        kubeadm.alpha.kubernetes.io/cri-socket: unix:///var/run/crio/crio.sock
	                    node.alpha.kubernetes.io/ttl: 0
	                    volumes.kubernetes.io/controller-managed-attach-detach: true
	CreationTimestamp:  Mon, 16 Sep 2024 10:54:42 +0000
	Taints:             <none>
	Unschedulable:      false
	Lease:
	  HolderIdentity:  ha-334765-m04
	  AcquireTime:     <unset>
	  RenewTime:       Mon, 16 Sep 2024 11:00:02 +0000
	Conditions:
	  Type             Status  LastHeartbeatTime                 LastTransitionTime                Reason                       Message
	  ----             ------  -----------------                 ------------------                ------                       -------
	  MemoryPressure   False   Mon, 16 Sep 2024 10:59:32 +0000   Mon, 16 Sep 2024 10:59:32 +0000   KubeletHasSufficientMemory   kubelet has sufficient memory available
	  DiskPressure     False   Mon, 16 Sep 2024 10:59:32 +0000   Mon, 16 Sep 2024 10:59:32 +0000   KubeletHasNoDiskPressure     kubelet has no disk pressure
	  PIDPressure      False   Mon, 16 Sep 2024 10:59:32 +0000   Mon, 16 Sep 2024 10:59:32 +0000   KubeletHasSufficientPID      kubelet has sufficient PID available
	  Ready            True    Mon, 16 Sep 2024 10:59:32 +0000   Mon, 16 Sep 2024 10:59:32 +0000   KubeletReady                 kubelet is posting ready status
	Addresses:
	  InternalIP:  192.168.49.5
	  Hostname:    ha-334765-m04
	Capacity:
	  cpu:                2
	  ephemeral-storage:  203034800Ki
	  hugepages-1Gi:      0
	  hugepages-2Mi:      0
	  hugepages-32Mi:     0
	  hugepages-64Ki:     0
	  memory:             8022304Ki
	  pods:               110
	Allocatable:
	  cpu:                2
	  ephemeral-storage:  203034800Ki
	  hugepages-1Gi:      0
	  hugepages-2Mi:      0
	  hugepages-32Mi:     0
	  hugepages-64Ki:     0
	  memory:             8022304Ki
	  pods:               110
	System Info:
	  Machine ID:                 dd746b13e222435683cb98b0069c4dec
	  System UUID:                2ce236e7-eff0-4b96-a330-3e2c709a50e7
	  Boot ID:                    34b2555f-ef29-4c31-9b47-b3b930bd3b4b
	  Kernel Version:             5.15.0-1069-aws
	  OS Image:                   Ubuntu 22.04.4 LTS
	  Operating System:           linux
	  Architecture:               arm64
	  Container Runtime Version:  cri-o://1.24.6
	  Kubelet Version:            v1.31.1
	  Kube-Proxy Version:         v1.31.1
	PodCIDR:                      10.244.3.0/24
	PodCIDRs:                     10.244.3.0/24
	Non-terminated Pods:          (3 in total)
	  Namespace                   Name                       CPU Requests  CPU Limits  Memory Requests  Memory Limits  Age
	  ---------                   ----                       ------------  ----------  ---------------  -------------  ---
	  default                     busybox-7dff88458-2n2c7    0 (0%)        0 (0%)      0 (0%)           0 (0%)         15s
	  kube-system                 kindnet-plxdg              100m (5%)     100m (5%)   50Mi (0%)        50Mi (0%)      5m23s
	  kube-system                 kube-proxy-br496           0 (0%)        0 (0%)      0 (0%)           0 (0%)         5m23s
	Allocated resources:
	  (Total limits may be over 100 percent, i.e., overcommitted.)
	  Resource           Requests   Limits
	  --------           --------   ------
	  cpu                100m (5%)  100m (5%)
	  memory             50Mi (0%)  50Mi (0%)
	  ephemeral-storage  0 (0%)     0 (0%)
	  hugepages-1Gi      0 (0%)     0 (0%)
	  hugepages-2Mi      0 (0%)     0 (0%)
	  hugepages-32Mi     0 (0%)     0 (0%)
	  hugepages-64Ki     0 (0%)     0 (0%)
	Events:
	  Type     Reason                   Age                    From             Message
	  ----     ------                   ----                   ----             -------
	  Normal   Starting                 5m20s                  kube-proxy       
	  Normal   Starting                 17s                    kube-proxy       
	  Normal   RegisteredNode           5m23s                  node-controller  Node ha-334765-m04 event: Registered Node ha-334765-m04 in Controller
	  Warning  CgroupV1                 5m23s                  kubelet          Cgroup v1 support is in maintenance mode, please migrate to Cgroup v2.
	  Normal   NodeHasSufficientMemory  5m23s (x2 over 5m23s)  kubelet          Node ha-334765-m04 status is now: NodeHasSufficientMemory
	  Normal   NodeHasNoDiskPressure    5m23s (x2 over 5m23s)  kubelet          Node ha-334765-m04 status is now: NodeHasNoDiskPressure
	  Normal   NodeHasSufficientPID     5m23s (x2 over 5m23s)  kubelet          Node ha-334765-m04 status is now: NodeHasSufficientPID
	  Normal   RegisteredNode           5m20s                  node-controller  Node ha-334765-m04 event: Registered Node ha-334765-m04 in Controller
	  Normal   RegisteredNode           5m18s                  node-controller  Node ha-334765-m04 event: Registered Node ha-334765-m04 in Controller
	  Normal   NodeReady                4m40s                  kubelet          Node ha-334765-m04 status is now: NodeReady
	  Normal   RegisteredNode           3m22s                  node-controller  Node ha-334765-m04 event: Registered Node ha-334765-m04 in Controller
	  Normal   RegisteredNode           2m13s                  node-controller  Node ha-334765-m04 event: Registered Node ha-334765-m04 in Controller
	  Normal   NodeNotReady             93s                    node-controller  Node ha-334765-m04 status is now: NodeNotReady
	  Normal   RegisteredNode           80s                    node-controller  Node ha-334765-m04 event: Registered Node ha-334765-m04 in Controller
	  Normal   RegisteredNode           61s                    node-controller  Node ha-334765-m04 event: Registered Node ha-334765-m04 in Controller
	  Normal   Starting                 46s                    kubelet          Starting kubelet.
	  Warning  CgroupV1                 46s                    kubelet          Cgroup v1 support is in maintenance mode, please migrate to Cgroup v2.
	  Normal   NodeHasSufficientPID     40s (x7 over 46s)      kubelet          Node ha-334765-m04 status is now: NodeHasSufficientPID
	  Normal   NodeHasSufficientMemory  33s (x8 over 46s)      kubelet          Node ha-334765-m04 status is now: NodeHasSufficientMemory
	  Normal   NodeHasNoDiskPressure    33s (x8 over 46s)      kubelet          Node ha-334765-m04 status is now: NodeHasNoDiskPressure
	
	
	==> dmesg <==
	[Sep16 10:07] systemd-journald[226]: Failed to send stream file descriptor to service manager: Connection refused
	
	
	==> etcd [8135d2bac513b0664ddcaf33dae9f54766148674f3a57b6f028c7393059a224b] <==
	{"level":"warn","ts":"2024-09-16T10:59:54.563636Z","caller":"embed/config_logging.go:170","msg":"rejected connection on client endpoint","remote-addr":"192.168.49.4:44048","server-name":"","error":"EOF"}
	{"level":"warn","ts":"2024-09-16T10:59:54.717991Z","caller":"embed/config_logging.go:170","msg":"rejected connection on client endpoint","remote-addr":"192.168.49.4:44066","server-name":"","error":"EOF"}
	{"level":"info","ts":"2024-09-16T10:59:54.749631Z","logger":"raft","caller":"etcdserver/zap_raft.go:77","msg":"aec36adc501070cc switched to configuration voters=(2225418148823231912 12593026477526642892)"}
	{"level":"info","ts":"2024-09-16T10:59:54.756300Z","caller":"membership/cluster.go:472","msg":"removed member","cluster-id":"fa54960ea34d58be","local-member-id":"aec36adc501070cc","removed-remote-peer-id":"e131de7e5408ffcb","removed-remote-peer-urls":["https://192.168.49.4:2380"]}
	{"level":"info","ts":"2024-09-16T10:59:54.756358Z","caller":"rafthttp/peer.go:330","msg":"stopping remote peer","remote-peer-id":"e131de7e5408ffcb"}
	{"level":"warn","ts":"2024-09-16T10:59:54.756423Z","caller":"etcdserver/server.go:987","msg":"rejected Raft message from removed member","local-member-id":"aec36adc501070cc","removed-member-id":"e131de7e5408ffcb"}
	{"level":"warn","ts":"2024-09-16T10:59:54.756455Z","caller":"rafthttp/peer.go:180","msg":"failed to process Raft message","error":"cannot process message from removed member"}
	{"level":"warn","ts":"2024-09-16T10:59:54.756469Z","caller":"etcdserver/server.go:987","msg":"rejected Raft message from removed member","local-member-id":"aec36adc501070cc","removed-member-id":"e131de7e5408ffcb"}
	{"level":"warn","ts":"2024-09-16T10:59:54.756475Z","caller":"rafthttp/peer.go:180","msg":"failed to process Raft message","error":"cannot process message from removed member"}
	{"level":"warn","ts":"2024-09-16T10:59:54.756531Z","caller":"rafthttp/stream.go:286","msg":"closed TCP streaming connection with remote peer","stream-writer-type":"stream MsgApp v2","remote-peer-id":"e131de7e5408ffcb"}
	{"level":"info","ts":"2024-09-16T10:59:54.756552Z","caller":"rafthttp/stream.go:294","msg":"stopped TCP streaming connection with remote peer","stream-writer-type":"stream MsgApp v2","remote-peer-id":"e131de7e5408ffcb"}
	{"level":"warn","ts":"2024-09-16T10:59:54.756880Z","caller":"rafthttp/stream.go:286","msg":"closed TCP streaming connection with remote peer","stream-writer-type":"stream Message","remote-peer-id":"e131de7e5408ffcb"}
	{"level":"info","ts":"2024-09-16T10:59:54.757153Z","caller":"rafthttp/stream.go:294","msg":"stopped TCP streaming connection with remote peer","stream-writer-type":"stream Message","remote-peer-id":"e131de7e5408ffcb"}
	{"level":"info","ts":"2024-09-16T10:59:54.757312Z","caller":"rafthttp/pipeline.go:85","msg":"stopped HTTP pipelining with remote peer","local-member-id":"aec36adc501070cc","remote-peer-id":"e131de7e5408ffcb"}
	{"level":"warn","ts":"2024-09-16T10:59:54.757570Z","caller":"rafthttp/stream.go:421","msg":"lost TCP streaming connection with remote peer","stream-reader-type":"stream MsgApp v2","local-member-id":"aec36adc501070cc","remote-peer-id":"e131de7e5408ffcb","error":"context canceled"}
	{"level":"warn","ts":"2024-09-16T10:59:54.757602Z","caller":"rafthttp/peer_status.go:66","msg":"peer became inactive (message send to peer failed)","peer-id":"e131de7e5408ffcb","error":"failed to read e131de7e5408ffcb on stream MsgApp v2 (context canceled)"}
	{"level":"info","ts":"2024-09-16T10:59:54.757629Z","caller":"rafthttp/stream.go:442","msg":"stopped stream reader with remote peer","stream-reader-type":"stream MsgApp v2","local-member-id":"aec36adc501070cc","remote-peer-id":"e131de7e5408ffcb"}
	{"level":"warn","ts":"2024-09-16T10:59:54.757743Z","caller":"rafthttp/stream.go:421","msg":"lost TCP streaming connection with remote peer","stream-reader-type":"stream Message","local-member-id":"aec36adc501070cc","remote-peer-id":"e131de7e5408ffcb","error":"context canceled"}
	{"level":"info","ts":"2024-09-16T10:59:54.757781Z","caller":"rafthttp/stream.go:442","msg":"stopped stream reader with remote peer","stream-reader-type":"stream Message","local-member-id":"aec36adc501070cc","remote-peer-id":"e131de7e5408ffcb"}
	{"level":"info","ts":"2024-09-16T10:59:54.757802Z","caller":"rafthttp/peer.go:335","msg":"stopped remote peer","remote-peer-id":"e131de7e5408ffcb"}
	{"level":"info","ts":"2024-09-16T10:59:54.757813Z","caller":"rafthttp/transport.go:355","msg":"removed remote peer","local-member-id":"aec36adc501070cc","removed-remote-peer-id":"e131de7e5408ffcb"}
	{"level":"warn","ts":"2024-09-16T10:59:54.807794Z","caller":"embed/config_logging.go:170","msg":"rejected connection on peer endpoint","remote-addr":"192.168.49.4:44886","server-name":"","error":"EOF"}
	{"level":"warn","ts":"2024-09-16T10:59:54.808133Z","caller":"embed/config_logging.go:170","msg":"rejected connection on peer endpoint","remote-addr":"192.168.49.4:44888","server-name":"","error":"EOF"}
	{"level":"warn","ts":"2024-09-16T11:00:05.074507Z","caller":"etcdserver/util.go:170","msg":"apply request took too long","took":"106.778829ms","expected-duration":"100ms","prefix":"read-only range ","request":"key:\"/registry/events/\" range_end:\"/registry/events0\" limit:500 ","response":"range_response_count:489 size:359734"}
	{"level":"info","ts":"2024-09-16T11:00:05.074583Z","caller":"traceutil/trace.go:171","msg":"trace[16313580] range","detail":"{range_begin:/registry/events/; range_end:/registry/events0; response_count:489; response_revision:2704; }","duration":"106.872488ms","start":"2024-09-16T11:00:04.967690Z","end":"2024-09-16T11:00:05.074562Z","steps":["trace[16313580] 'range keys from bolt db'  (duration: 105.972924ms)"],"step_count":1}
	
	
	==> kernel <==
	 11:00:05 up 10:42,  0 users,  load average: 4.14, 3.36, 2.45
	Linux ha-334765 5.15.0-1069-aws #75~20.04.1-Ubuntu SMP Mon Aug 19 16:22:47 UTC 2024 aarch64 aarch64 aarch64 GNU/Linux
	PRETTY_NAME="Ubuntu 22.04.4 LTS"
	
	
	==> kindnet [61cf9083918845e247428ff0d556956b0248ff0ad4679c01d5b3c7a48d8e449b] <==
	I0916 10:59:31.327491       1 main.go:322] Node ha-334765-m03 has CIDR [10.244.2.0/24] 
	I0916 10:59:31.327531       1 main.go:295] Handling node with IPs: map[192.168.49.5:{}]
	I0916 10:59:31.327549       1 main.go:322] Node ha-334765-m04 has CIDR [10.244.3.0/24] 
	I0916 10:59:41.328756       1 main.go:295] Handling node with IPs: map[192.168.49.4:{}]
	I0916 10:59:41.328793       1 main.go:322] Node ha-334765-m03 has CIDR [10.244.2.0/24] 
	I0916 10:59:41.328912       1 main.go:295] Handling node with IPs: map[192.168.49.5:{}]
	I0916 10:59:41.328927       1 main.go:322] Node ha-334765-m04 has CIDR [10.244.3.0/24] 
	I0916 10:59:41.328965       1 main.go:295] Handling node with IPs: map[192.168.49.2:{}]
	I0916 10:59:41.328977       1 main.go:299] handling current node
	I0916 10:59:41.328990       1 main.go:295] Handling node with IPs: map[192.168.49.3:{}]
	I0916 10:59:41.328997       1 main.go:322] Node ha-334765-m02 has CIDR [10.244.1.0/24] 
	I0916 10:59:51.356180       1 main.go:295] Handling node with IPs: map[192.168.49.2:{}]
	I0916 10:59:51.360190       1 main.go:299] handling current node
	I0916 10:59:51.360210       1 main.go:295] Handling node with IPs: map[192.168.49.3:{}]
	I0916 10:59:51.360217       1 main.go:322] Node ha-334765-m02 has CIDR [10.244.1.0/24] 
	I0916 10:59:51.360466       1 main.go:295] Handling node with IPs: map[192.168.49.4:{}]
	I0916 10:59:51.360478       1 main.go:322] Node ha-334765-m03 has CIDR [10.244.2.0/24] 
	I0916 10:59:51.360758       1 main.go:295] Handling node with IPs: map[192.168.49.5:{}]
	I0916 10:59:51.360805       1 main.go:322] Node ha-334765-m04 has CIDR [10.244.3.0/24] 
	I0916 11:00:01.517881       1 main.go:295] Handling node with IPs: map[192.168.49.2:{}]
	I0916 11:00:01.517915       1 main.go:299] handling current node
	I0916 11:00:01.517932       1 main.go:295] Handling node with IPs: map[192.168.49.3:{}]
	I0916 11:00:01.517939       1 main.go:322] Node ha-334765-m02 has CIDR [10.244.1.0/24] 
	I0916 11:00:01.518067       1 main.go:295] Handling node with IPs: map[192.168.49.5:{}]
	I0916 11:00:01.518083       1 main.go:322] Node ha-334765-m04 has CIDR [10.244.3.0/24] 
	
	
	==> kube-apiserver [5c412d80a4ecafb0b689ea69ef511a2f3dcdccbcf2623074f3d816f72e7a46cb] <==
	I0916 10:58:27.299338       1 crd_finalizer.go:269] Starting CRDFinalizer
	I0916 10:58:27.299382       1 crdregistration_controller.go:114] Starting crd-autoregister controller
	I0916 10:58:27.299444       1 shared_informer.go:313] Waiting for caches to sync for crd-autoregister
	I0916 10:58:27.578257       1 system_namespaces_controller.go:66] Starting system namespaces controller
	I0916 10:58:27.683306       1 apf_controller.go:382] Running API Priority and Fairness config worker
	I0916 10:58:27.683428       1 apf_controller.go:385] Running API Priority and Fairness periodic rebalancing process
	I0916 10:58:27.685201       1 shared_informer.go:320] Caches are synced for cluster_authentication_trust_controller
	I0916 10:58:27.687343       1 shared_informer.go:320] Caches are synced for configmaps
	I0916 10:58:27.699539       1 shared_informer.go:320] Caches are synced for crd-autoregister
	I0916 10:58:27.699828       1 aggregator.go:171] initial CRD sync complete...
	I0916 10:58:27.699873       1 autoregister_controller.go:144] Starting autoregister controller
	I0916 10:58:27.699936       1 cache.go:32] Waiting for caches to sync for autoregister controller
	I0916 10:58:27.699971       1 cache.go:39] Caches are synced for autoregister controller
	I0916 10:58:27.725540       1 shared_informer.go:320] Caches are synced for node_authorizer
	I0916 10:58:27.731975       1 shared_informer.go:320] Caches are synced for *generic.policySource[*k8s.io/api/admissionregistration/v1.ValidatingAdmissionPolicy,*k8s.io/api/admissionregistration/v1.ValidatingAdmissionPolicyBinding,k8s.io/apiserver/pkg/admission/plugin/policy/validating.Validator]
	I0916 10:58:27.732003       1 policy_source.go:224] refreshing policies
	I0916 10:58:27.776736       1 cache.go:39] Caches are synced for APIServiceRegistrationController controller
	I0916 10:58:27.780782       1 cache.go:39] Caches are synced for RemoteAvailability controller
	I0916 10:58:27.781442       1 cache.go:39] Caches are synced for LocalAvailability controller
	I0916 10:58:27.782563       1 handler_discovery.go:450] Starting ResourceDiscoveryManager
	I0916 10:58:27.788406       1 controller.go:615] quota admission added evaluator for: leases.coordination.k8s.io
	I0916 10:58:28.282763       1 storage_scheduling.go:111] all system priority classes are created successfully or already exist.
	W0916 10:58:28.815543       1 lease.go:265] Resetting endpoints for master service "kubernetes" to [192.168.49.2 192.168.49.3]
	I0916 10:58:28.817011       1 controller.go:615] quota admission added evaluator for: endpoints
	I0916 10:58:28.833455       1 controller.go:615] quota admission added evaluator for: endpointslices.discovery.k8s.io
	
	
	==> kube-apiserver [91965bef6661abd48c88b0879ea77f4bbcf35a1e56545a44fd6adf3cea11eedb] <==
	W0916 10:57:44.201609       1 reflector.go:561] storage/cacher.go:/mutatingwebhookconfigurations: failed to list *admissionregistration.MutatingWebhookConfiguration: etcdserver: leader changed
	E0916 10:57:44.201625       1 cacher.go:478] cacher (mutatingwebhookconfigurations.admissionregistration.k8s.io): unexpected ListAndWatch error: failed to list *admissionregistration.MutatingWebhookConfiguration: etcdserver: leader changed; reinitializing...
	W0916 10:57:44.201635       1 reflector.go:561] storage/cacher.go:/persistentvolumes: failed to list *core.PersistentVolume: etcdserver: leader changed
	E0916 10:57:44.201642       1 cacher.go:478] cacher (persistentvolumes): unexpected ListAndWatch error: failed to list *core.PersistentVolume: etcdserver: leader changed; reinitializing...
	W0916 10:57:44.201669       1 reflector.go:561] storage/cacher.go:/replicasets: failed to list *apps.ReplicaSet: etcdserver: leader changed
	E0916 10:57:44.201681       1 cacher.go:478] cacher (replicasets.apps): unexpected ListAndWatch error: failed to list *apps.ReplicaSet: etcdserver: leader changed; reinitializing...
	I0916 10:57:44.691335       1 storage_scheduling.go:111] all system priority classes are created successfully or already exist.
	I0916 10:57:45.403462       1 shared_informer.go:320] Caches are synced for configmaps
	I0916 10:57:46.081858       1 cache.go:39] Caches are synced for LocalAvailability controller
	I0916 10:57:46.081977       1 cache.go:39] Caches are synced for APIServiceRegistrationController controller
	I0916 10:57:46.088089       1 handler_discovery.go:450] Starting ResourceDiscoveryManager
	I0916 10:57:46.166694       1 cache.go:39] Caches are synced for autoregister controller
	W0916 10:57:46.390875       1 lease.go:265] Resetting endpoints for master service "kubernetes" to [192.168.49.3]
	I0916 10:57:47.087956       1 shared_informer.go:320] Caches are synced for *generic.policySource[*k8s.io/api/admissionregistration/v1.ValidatingAdmissionPolicy,*k8s.io/api/admissionregistration/v1.ValidatingAdmissionPolicyBinding,k8s.io/apiserver/pkg/admission/plugin/policy/validating.Validator]
	I0916 10:57:47.087992       1 policy_source.go:224] refreshing policies
	I0916 10:57:47.097314       1 controller.go:615] quota admission added evaluator for: endpoints
	I0916 10:57:47.099925       1 controller.go:615] quota admission added evaluator for: leases.coordination.k8s.io
	I0916 10:57:47.118279       1 controller.go:615] quota admission added evaluator for: endpointslices.discovery.k8s.io
	E0916 10:57:47.140556       1 controller.go:95] Found stale data, removed previous endpoints on kubernetes service, apiserver didn't exit successfully previously
	I0916 10:57:47.182358       1 cache.go:39] Caches are synced for RemoteAvailability controller
	I0916 10:57:47.188753       1 shared_informer.go:320] Caches are synced for node_authorizer
	I0916 10:57:47.282575       1 apf_controller.go:382] Running API Priority and Fairness config worker
	I0916 10:57:47.283383       1 apf_controller.go:385] Running API Priority and Fairness periodic rebalancing process
	I0916 10:57:47.282675       1 shared_informer.go:320] Caches are synced for cluster_authentication_trust_controller
	F0916 10:58:23.682815       1 hooks.go:210] PostStartHook "start-service-ip-repair-controllers" failed: unable to perform initial IP and Port allocation check
	
	
	==> kube-controller-manager [607b372ab0497d92e58c663a102dc4a05b095310966aa5d10e40b796cc7279d0] <==
	I0916 10:59:32.393283       1 range_allocator.go:241] "Successfully synced" logger="node-ipam-controller" key="ha-334765-m04"
	I0916 10:59:50.551508       1 range_allocator.go:241] "Successfully synced" logger="node-ipam-controller" key="ha-334765-m03"
	I0916 10:59:50.591231       1 range_allocator.go:241] "Successfully synced" logger="node-ipam-controller" key="ha-334765-m03"
	I0916 10:59:50.998884       1 replica_set.go:679] "Finished syncing" logger="replicaset-controller" kind="ReplicaSet" key="default/busybox-7dff88458" duration="332.720745ms"
	I0916 10:59:51.295188       1 replica_set.go:679] "Finished syncing" logger="replicaset-controller" kind="ReplicaSet" key="default/busybox-7dff88458" duration="296.175247ms"
	E0916 10:59:51.295321       1 replica_set.go:560] "Unhandled Error" err="sync \"default/busybox-7dff88458\" failed with Operation cannot be fulfilled on replicasets.apps \"busybox-7dff88458\": the object has been modified; please apply your changes to the latest version and try again" logger="UnhandledError"
	I0916 10:59:51.296779       1 replica_set.go:679] "Finished syncing" logger="replicaset-controller" kind="ReplicaSet" key="default/busybox-7dff88458" duration="237.894µs"
	I0916 10:59:51.302603       1 replica_set.go:679] "Finished syncing" logger="replicaset-controller" kind="ReplicaSet" key="default/busybox-7dff88458" duration="683.437µs"
	I0916 10:59:52.769819       1 replica_set.go:679] "Finished syncing" logger="replicaset-controller" kind="ReplicaSet" key="default/busybox-7dff88458" duration="62.529µs"
	I0916 10:59:52.856781       1 replica_set.go:679] "Finished syncing" logger="replicaset-controller" kind="ReplicaSet" key="default/busybox-7dff88458" duration="173.961µs"
	I0916 10:59:53.461534       1 replica_set.go:679] "Finished syncing" logger="replicaset-controller" kind="ReplicaSet" key="default/busybox-7dff88458" duration="61.684µs"
	I0916 10:59:53.479262       1 replica_set.go:679] "Finished syncing" logger="replicaset-controller" kind="ReplicaSet" key="default/busybox-7dff88458" duration="54.85µs"
	I0916 10:59:53.496403       1 replica_set.go:679] "Finished syncing" logger="replicaset-controller" kind="ReplicaSet" key="default/busybox-7dff88458" duration="62.784µs"
	I0916 10:59:53.505706       1 replica_set.go:679] "Finished syncing" logger="replicaset-controller" kind="ReplicaSet" key="default/busybox-7dff88458" duration="53.62µs"
	I0916 10:59:54.784274       1 replica_set.go:679] "Finished syncing" logger="replicaset-controller" kind="ReplicaSet" key="default/busybox-7dff88458" duration="107.118947ms"
	I0916 10:59:54.784534       1 replica_set.go:679] "Finished syncing" logger="replicaset-controller" kind="ReplicaSet" key="default/busybox-7dff88458" duration="127.521µs"
	I0916 10:59:55.643122       1 replica_set.go:679] "Finished syncing" logger="replicaset-controller" kind="ReplicaSet" key="default/busybox-7dff88458" duration="41.262762ms"
	I0916 10:59:55.643330       1 replica_set.go:679] "Finished syncing" logger="replicaset-controller" kind="ReplicaSet" key="default/busybox-7dff88458" duration="73.713µs"
	I0916 10:59:58.023100       1 range_allocator.go:241] "Successfully synced" logger="node-ipam-controller" key="ha-334765-m03"
	I0916 10:59:58.023725       1 topologycache.go:237] "Can't get CPU or zone information for node" logger="endpointslice-controller" node="ha-334765-m04"
	E0916 11:00:05.387869       1 gc_controller.go:151] "Failed to get node" err="node \"ha-334765-m03\" not found" logger="pod-garbage-collector-controller" node="ha-334765-m03"
	E0916 11:00:05.387907       1 gc_controller.go:151] "Failed to get node" err="node \"ha-334765-m03\" not found" logger="pod-garbage-collector-controller" node="ha-334765-m03"
	E0916 11:00:05.387914       1 gc_controller.go:151] "Failed to get node" err="node \"ha-334765-m03\" not found" logger="pod-garbage-collector-controller" node="ha-334765-m03"
	E0916 11:00:05.387920       1 gc_controller.go:151] "Failed to get node" err="node \"ha-334765-m03\" not found" logger="pod-garbage-collector-controller" node="ha-334765-m03"
	E0916 11:00:05.387926       1 gc_controller.go:151] "Failed to get node" err="node \"ha-334765-m03\" not found" logger="pod-garbage-collector-controller" node="ha-334765-m03"
	
	
	==> kube-controller-manager [f5db3144aa51b172d0ec61e2be0c97a5ab84bcfa9ebb427ee7e32f02c15be571] <==
	I0916 10:57:57.825377       1 serving.go:386] Generated self-signed cert in-memory
	I0916 10:57:58.398765       1 controllermanager.go:197] "Starting" version="v1.31.1"
	I0916 10:57:58.398799       1 controllermanager.go:199] "Golang settings" GOGC="" GOMAXPROCS="" GOTRACEBACK=""
	I0916 10:57:58.400788       1 dynamic_cafile_content.go:160] "Starting controller" name="request-header::/var/lib/minikube/certs/front-proxy-ca.crt"
	I0916 10:57:58.400910       1 dynamic_cafile_content.go:160] "Starting controller" name="client-ca-bundle::/var/lib/minikube/certs/ca.crt"
	I0916 10:57:58.401639       1 secure_serving.go:213] Serving securely on 127.0.0.1:10257
	I0916 10:57:58.401683       1 tlsconfig.go:243] "Starting DynamicServingCertificateController"
	E0916 10:58:08.420360       1 controllermanager.go:242] "Error building controller context" err="failed to wait for apiserver being healthy: timed out waiting for the condition: failed to get apiserver /healthz status: an error on the server (\"[+]ping ok\\n[+]log ok\\n[+]etcd ok\\n[+]poststarthook/start-apiserver-admission-initializer ok\\n[+]poststarthook/generic-apiserver-start-informers ok\\n[+]poststarthook/priority-and-fairness-config-consumer ok\\n[+]poststarthook/priority-and-fairness-filter ok\\n[+]poststarthook/storage-object-count-tracker-hook ok\\n[+]poststarthook/start-apiextensions-informers ok\\n[+]poststarthook/start-apiextensions-controllers ok\\n[+]poststarthook/crd-informer-synced ok\\n[+]poststarthook/start-system-namespaces-controller ok\\n[+]poststarthook/start-cluster-authentication-info-controller ok\\n[+]poststarthook/start-kube-apiserver-identity-lease-controller ok\\n[+]poststarthook/start-kube-apiserver-identity-lease-garbage-collector ok\\n[+]poststarthook/start-legacy-to
ken-tracking-controller ok\\n[-]poststarthook/start-service-ip-repair-controllers failed: reason withheld\\n[+]poststarthook/rbac/bootstrap-roles ok\\n[+]poststarthook/scheduling/bootstrap-system-priority-classes ok\\n[+]poststarthook/priority-and-fairness-config-producer ok\\n[+]poststarthook/bootstrap-controller ok\\n[+]poststarthook/aggregator-reload-proxy-client-cert ok\\n[+]poststarthook/start-kube-aggregator-informers ok\\n[+]poststarthook/apiservice-status-local-available-controller ok\\n[+]poststarthook/apiservice-status-remote-available-controller ok\\n[+]poststarthook/apiservice-registration-controller ok\\n[+]poststarthook/apiservice-discovery-controller ok\\n[+]poststarthook/kube-apiserver-autoregistration ok\\n[+]autoregister-completion ok\\n[+]poststarthook/apiservice-openapi-controller ok\\n[+]poststarthook/apiservice-openapiv3-controller ok\\nhealthz check failed\") has prevented the request from succeeding"
	
	
	==> kube-proxy [6245d19c5d7d70faa57beaf7e8ecfc339febeb65a17c641e4efb5e2dd7db4d6d] <==
	I0916 10:58:01.056192       1 server_linux.go:66] "Using iptables proxy"
	I0916 10:58:01.158521       1 server.go:677] "Successfully retrieved node IP(s)" IPs=["192.168.49.2"]
	E0916 10:58:01.158734       1 server.go:234] "Kube-proxy configuration may be incomplete or incorrect" err="nodePortAddresses is unset; NodePort connections will be accepted on all local IPs. Consider using `--nodeport-addresses primary`"
	I0916 10:58:01.179476       1 server.go:243] "kube-proxy running in dual-stack mode" primary ipFamily="IPv4"
	I0916 10:58:01.179609       1 server_linux.go:169] "Using iptables Proxier"
	I0916 10:58:01.181970       1 proxier.go:255] "Setting route_localnet=1 to allow node-ports on localhost; to change this either disable iptables.localhostNodePorts (--iptables-localhost-nodeports) or set nodePortAddresses (--nodeport-addresses) to filter loopback addresses" ipFamily="IPv4"
	I0916 10:58:01.185137       1 server.go:483] "Version info" version="v1.31.1"
	I0916 10:58:01.185236       1 server.go:485] "Golang settings" GOGC="" GOMAXPROCS="" GOTRACEBACK=""
	I0916 10:58:01.186899       1 config.go:199] "Starting service config controller"
	I0916 10:58:01.186942       1 shared_informer.go:313] Waiting for caches to sync for service config
	I0916 10:58:01.186965       1 config.go:105] "Starting endpoint slice config controller"
	I0916 10:58:01.186969       1 shared_informer.go:313] Waiting for caches to sync for endpoint slice config
	I0916 10:58:01.187739       1 config.go:328] "Starting node config controller"
	I0916 10:58:01.187760       1 shared_informer.go:313] Waiting for caches to sync for node config
	I0916 10:58:01.287594       1 shared_informer.go:320] Caches are synced for endpoint slice config
	I0916 10:58:01.287675       1 shared_informer.go:320] Caches are synced for service config
	I0916 10:58:01.287946       1 shared_informer.go:320] Caches are synced for node config
	
	
	==> kube-scheduler [15e97790478eb76261fc55ff15792c70d89c56ee418385d7eff8af8d2c6cdac8] <==
	E0916 10:57:44.834277       1 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Service: failed to list *v1.Service: services is forbidden: User \"system:kube-scheduler\" cannot list resource \"services\" in API group \"\" at the cluster scope" logger="UnhandledError"
	I0916 10:58:06.512256       1 shared_informer.go:320] Caches are synced for client-ca::kube-system::extension-apiserver-authentication::client-ca-file
	E0916 10:58:27.529901       1 reflector.go:158] "Unhandled Error" err="runtime/asm_arm64.s:1222: Failed to watch *v1.ConfigMap: unknown (get configmaps) - error from a previous attempt: read tcp 192.168.49.2:39336->192.168.49.2:8443: read: connection reset by peer" logger="UnhandledError"
	E0916 10:58:27.626101       1 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.CSIStorageCapacity: unknown (get csistoragecapacities.storage.k8s.io) - error from a previous attempt: read tcp 192.168.49.2:39344->192.168.49.2:8443: read: connection reset by peer" logger="UnhandledError"
	E0916 10:58:27.626202       1 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.PersistentVolume: unknown (get persistentvolumes) - error from a previous attempt: read tcp 192.168.49.2:39340->192.168.49.2:8443: read: connection reset by peer" logger="UnhandledError"
	E0916 10:58:27.626107       1 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.ReplicaSet: unknown (get replicasets.apps) - error from a previous attempt: read tcp 192.168.49.2:39346->192.168.49.2:8443: read: connection reset by peer" logger="UnhandledError"
	E0916 10:58:27.626297       1 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.StorageClass: unknown (get storageclasses.storage.k8s.io) - error from a previous attempt: read tcp 192.168.49.2:39338->192.168.49.2:8443: read: connection reset by peer" logger="UnhandledError"
	E0916 10:58:27.626256       1 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.CSINode: unknown (get csinodes.storage.k8s.io) - error from a previous attempt: read tcp 192.168.49.2:39432->192.168.49.2:8443: read: connection reset by peer" logger="UnhandledError"
	E0916 10:58:27.626452       1 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.ReplicationController: unknown (get replicationcontrollers) - error from a previous attempt: read tcp 192.168.49.2:39348->192.168.49.2:8443: read: connection reset by peer" logger="UnhandledError"
	E0916 10:58:27.626660       1 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.PersistentVolumeClaim: unknown (get persistentvolumeclaims) - error from a previous attempt: read tcp 192.168.49.2:39356->192.168.49.2:8443: read: connection reset by peer" logger="UnhandledError"
	E0916 10:58:27.626802       1 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.PodDisruptionBudget: unknown (get poddisruptionbudgets.policy) - error from a previous attempt: read tcp 192.168.49.2:39420->192.168.49.2:8443: read: connection reset by peer" logger="UnhandledError"
	E0916 10:58:27.626917       1 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Node: unknown (get nodes) - error from a previous attempt: read tcp 192.168.49.2:39412->192.168.49.2:8443: read: connection reset by peer" logger="UnhandledError"
	E0916 10:58:27.627013       1 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Namespace: unknown (get namespaces) - error from a previous attempt: read tcp 192.168.49.2:39396->192.168.49.2:8443: read: connection reset by peer" logger="UnhandledError"
	E0916 10:58:27.627122       1 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Service: unknown (get services) - error from a previous attempt: read tcp 192.168.49.2:39388->192.168.49.2:8443: read: connection reset by peer" logger="UnhandledError"
	E0916 10:58:27.627216       1 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.StatefulSet: unknown (get statefulsets.apps) - error from a previous attempt: read tcp 192.168.49.2:39380->192.168.49.2:8443: read: connection reset by peer" logger="UnhandledError"
	E0916 10:58:27.627441       1 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Pod: unknown (get pods) - error from a previous attempt: read tcp 192.168.49.2:39366->192.168.49.2:8443: read: connection reset by peer" logger="UnhandledError"
	E0916 10:58:27.628799       1 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.CSIDriver: unknown (get csidrivers.storage.k8s.io) - error from a previous attempt: read tcp 192.168.49.2:39444->192.168.49.2:8443: read: connection reset by peer" logger="UnhandledError"
	E0916 10:59:50.760108       1 framework.go:1305] "Plugin Failed" err="Operation cannot be fulfilled on pods/binding \"busybox-7dff88458-2n2c7\": pod busybox-7dff88458-2n2c7 is already assigned to node \"ha-334765-m04\"" plugin="DefaultBinder" pod="default/busybox-7dff88458-2n2c7" node="ha-334765-m04"
	E0916 10:59:50.760173       1 schedule_one.go:348] "scheduler cache ForgetPod failed" err="pod 3c388a1c-af25-45d7-ae39-9898dd87219c(default/busybox-7dff88458-2n2c7) wasn't assumed so cannot be forgotten" pod="default/busybox-7dff88458-2n2c7"
	E0916 10:59:50.760192       1 schedule_one.go:1057] "Error scheduling pod; retrying" err="running Bind plugin \"DefaultBinder\": Operation cannot be fulfilled on pods/binding \"busybox-7dff88458-2n2c7\": pod busybox-7dff88458-2n2c7 is already assigned to node \"ha-334765-m04\"" pod="default/busybox-7dff88458-2n2c7"
	I0916 10:59:50.760213       1 schedule_one.go:1070] "Pod has been assigned to node. Abort adding it back to queue." pod="default/busybox-7dff88458-2n2c7" node="ha-334765-m04"
	E0916 10:59:50.893379       1 framework.go:1305] "Plugin Failed" err="Operation cannot be fulfilled on pods/binding \"busybox-7dff88458-55czh\": pod busybox-7dff88458-55czh is already assigned to node \"ha-334765\"" plugin="DefaultBinder" pod="default/busybox-7dff88458-55czh" node="ha-334765"
	E0916 10:59:50.893856       1 schedule_one.go:348] "scheduler cache ForgetPod failed" err="pod 36f5a6dc-dea0-4fc2-a9d1-aa474fb384f2(default/busybox-7dff88458-55czh) wasn't assumed so cannot be forgotten" pod="default/busybox-7dff88458-55czh"
	E0916 10:59:50.893922       1 schedule_one.go:1057] "Error scheduling pod; retrying" err="running Bind plugin \"DefaultBinder\": Operation cannot be fulfilled on pods/binding \"busybox-7dff88458-55czh\": pod busybox-7dff88458-55czh is already assigned to node \"ha-334765\"" pod="default/busybox-7dff88458-55czh"
	I0916 10:59:50.894058       1 schedule_one.go:1070] "Pod has been assigned to node. Abort adding it back to queue." pod="default/busybox-7dff88458-55czh" node="ha-334765"
	
	
	==> kubelet <==
	Sep 16 10:58:27 ha-334765 kubelet[743]: E0916 10:58:27.464314     743 reflector.go:158] "Unhandled Error" err="object-\"kube-system\"/\"kube-proxy\": Failed to watch *v1.ConfigMap: unknown (get configmaps) - error from a previous attempt: read tcp 192.168.49.254:43656->192.168.49.254:8443: read: connection reset by peer" logger="UnhandledError"
	Sep 16 10:58:28 ha-334765 kubelet[743]: I0916 10:58:28.376180     743 scope.go:117] "RemoveContainer" containerID="b7e24d95fd64dbeef1a99721611361f8a361e91404a1ec6148bfc2d0e8719482"
	Sep 16 10:58:30 ha-334765 kubelet[743]: E0916 10:58:30.177814     743 eviction_manager.go:257] "Eviction manager: failed to get HasDedicatedImageFs" err="missing image stats: &ImageFsInfoResponse{ImageFilesystems:[]*FilesystemUsage{&FilesystemUsage{Timestamp:1726484310177179841,FsId:&FilesystemIdentifier{Mountpoint:/var/lib/containers/storage/overlay-images,},UsedBytes:&UInt64Value{Value:137825,},InodesUsed:&UInt64Value{Value:63,},},},ContainerFilesystems:[]*FilesystemUsage{},}"
	Sep 16 10:58:30 ha-334765 kubelet[743]: E0916 10:58:30.177873     743 eviction_manager.go:212] "Eviction manager: failed to synchronize" err="eviction manager: failed to get HasDedicatedImageFs: missing image stats: &ImageFsInfoResponse{ImageFilesystems:[]*FilesystemUsage{&FilesystemUsage{Timestamp:1726484310177179841,FsId:&FilesystemIdentifier{Mountpoint:/var/lib/containers/storage/overlay-images,},UsedBytes:&UInt64Value{Value:137825,},InodesUsed:&UInt64Value{Value:63,},},},ContainerFilesystems:[]*FilesystemUsage{},}"
	Sep 16 10:58:31 ha-334765 kubelet[743]: I0916 10:58:31.384434     743 scope.go:117] "RemoveContainer" containerID="0803fd3b6cb1ac232a6f59589c22452da8c46d391583775ed164af969d0ffc7e"
	Sep 16 10:58:40 ha-334765 kubelet[743]: I0916 10:58:40.171962     743 scope.go:117] "RemoveContainer" containerID="f5db3144aa51b172d0ec61e2be0c97a5ab84bcfa9ebb427ee7e32f02c15be571"
	Sep 16 10:58:40 ha-334765 kubelet[743]: E0916 10:58:40.180190     743 eviction_manager.go:257] "Eviction manager: failed to get HasDedicatedImageFs" err="missing image stats: &ImageFsInfoResponse{ImageFilesystems:[]*FilesystemUsage{&FilesystemUsage{Timestamp:1726484320179711929,FsId:&FilesystemIdentifier{Mountpoint:/var/lib/containers/storage/overlay-images,},UsedBytes:&UInt64Value{Value:137825,},InodesUsed:&UInt64Value{Value:63,},},},ContainerFilesystems:[]*FilesystemUsage{},}"
	Sep 16 10:58:40 ha-334765 kubelet[743]: E0916 10:58:40.180226     743 eviction_manager.go:212] "Eviction manager: failed to synchronize" err="eviction manager: failed to get HasDedicatedImageFs: missing image stats: &ImageFsInfoResponse{ImageFilesystems:[]*FilesystemUsage{&FilesystemUsage{Timestamp:1726484320179711929,FsId:&FilesystemIdentifier{Mountpoint:/var/lib/containers/storage/overlay-images,},UsedBytes:&UInt64Value{Value:137825,},InodesUsed:&UInt64Value{Value:63,},},},ContainerFilesystems:[]*FilesystemUsage{},}"
	Sep 16 10:58:50 ha-334765 kubelet[743]: E0916 10:58:50.183195     743 eviction_manager.go:257] "Eviction manager: failed to get HasDedicatedImageFs" err="missing image stats: &ImageFsInfoResponse{ImageFilesystems:[]*FilesystemUsage{&FilesystemUsage{Timestamp:1726484330182366675,FsId:&FilesystemIdentifier{Mountpoint:/var/lib/containers/storage/overlay-images,},UsedBytes:&UInt64Value{Value:137825,},InodesUsed:&UInt64Value{Value:63,},},},ContainerFilesystems:[]*FilesystemUsage{},}"
	Sep 16 10:58:50 ha-334765 kubelet[743]: E0916 10:58:50.183237     743 eviction_manager.go:212] "Eviction manager: failed to synchronize" err="eviction manager: failed to get HasDedicatedImageFs: missing image stats: &ImageFsInfoResponse{ImageFilesystems:[]*FilesystemUsage{&FilesystemUsage{Timestamp:1726484330182366675,FsId:&FilesystemIdentifier{Mountpoint:/var/lib/containers/storage/overlay-images,},UsedBytes:&UInt64Value{Value:137825,},InodesUsed:&UInt64Value{Value:63,},},},ContainerFilesystems:[]*FilesystemUsage{},}"
	Sep 16 10:59:00 ha-334765 kubelet[743]: E0916 10:59:00.185045     743 eviction_manager.go:257] "Eviction manager: failed to get HasDedicatedImageFs" err="missing image stats: &ImageFsInfoResponse{ImageFilesystems:[]*FilesystemUsage{&FilesystemUsage{Timestamp:1726484340184232135,FsId:&FilesystemIdentifier{Mountpoint:/var/lib/containers/storage/overlay-images,},UsedBytes:&UInt64Value{Value:137825,},InodesUsed:&UInt64Value{Value:63,},},},ContainerFilesystems:[]*FilesystemUsage{},}"
	Sep 16 10:59:00 ha-334765 kubelet[743]: E0916 10:59:00.193955     743 eviction_manager.go:212] "Eviction manager: failed to synchronize" err="eviction manager: failed to get HasDedicatedImageFs: missing image stats: &ImageFsInfoResponse{ImageFilesystems:[]*FilesystemUsage{&FilesystemUsage{Timestamp:1726484340184232135,FsId:&FilesystemIdentifier{Mountpoint:/var/lib/containers/storage/overlay-images,},UsedBytes:&UInt64Value{Value:137825,},InodesUsed:&UInt64Value{Value:63,},},},ContainerFilesystems:[]*FilesystemUsage{},}"
	Sep 16 10:59:10 ha-334765 kubelet[743]: E0916 10:59:10.197525     743 eviction_manager.go:257] "Eviction manager: failed to get HasDedicatedImageFs" err="missing image stats: &ImageFsInfoResponse{ImageFilesystems:[]*FilesystemUsage{&FilesystemUsage{Timestamp:1726484350197133397,FsId:&FilesystemIdentifier{Mountpoint:/var/lib/containers/storage/overlay-images,},UsedBytes:&UInt64Value{Value:137825,},InodesUsed:&UInt64Value{Value:63,},},},ContainerFilesystems:[]*FilesystemUsage{},}"
	Sep 16 10:59:10 ha-334765 kubelet[743]: E0916 10:59:10.197572     743 eviction_manager.go:212] "Eviction manager: failed to synchronize" err="eviction manager: failed to get HasDedicatedImageFs: missing image stats: &ImageFsInfoResponse{ImageFilesystems:[]*FilesystemUsage{&FilesystemUsage{Timestamp:1726484350197133397,FsId:&FilesystemIdentifier{Mountpoint:/var/lib/containers/storage/overlay-images,},UsedBytes:&UInt64Value{Value:137825,},InodesUsed:&UInt64Value{Value:63,},},},ContainerFilesystems:[]*FilesystemUsage{},}"
	Sep 16 10:59:20 ha-334765 kubelet[743]: E0916 10:59:20.199523     743 eviction_manager.go:257] "Eviction manager: failed to get HasDedicatedImageFs" err="missing image stats: &ImageFsInfoResponse{ImageFilesystems:[]*FilesystemUsage{&FilesystemUsage{Timestamp:1726484360199279206,FsId:&FilesystemIdentifier{Mountpoint:/var/lib/containers/storage/overlay-images,},UsedBytes:&UInt64Value{Value:137825,},InodesUsed:&UInt64Value{Value:63,},},},ContainerFilesystems:[]*FilesystemUsage{},}"
	Sep 16 10:59:20 ha-334765 kubelet[743]: E0916 10:59:20.199564     743 eviction_manager.go:212] "Eviction manager: failed to synchronize" err="eviction manager: failed to get HasDedicatedImageFs: missing image stats: &ImageFsInfoResponse{ImageFilesystems:[]*FilesystemUsage{&FilesystemUsage{Timestamp:1726484360199279206,FsId:&FilesystemIdentifier{Mountpoint:/var/lib/containers/storage/overlay-images,},UsedBytes:&UInt64Value{Value:137825,},InodesUsed:&UInt64Value{Value:63,},},},ContainerFilesystems:[]*FilesystemUsage{},}"
	Sep 16 10:59:30 ha-334765 kubelet[743]: E0916 10:59:30.203286     743 eviction_manager.go:257] "Eviction manager: failed to get HasDedicatedImageFs" err="missing image stats: &ImageFsInfoResponse{ImageFilesystems:[]*FilesystemUsage{&FilesystemUsage{Timestamp:1726484370203004186,FsId:&FilesystemIdentifier{Mountpoint:/var/lib/containers/storage/overlay-images,},UsedBytes:&UInt64Value{Value:137825,},InodesUsed:&UInt64Value{Value:63,},},},ContainerFilesystems:[]*FilesystemUsage{},}"
	Sep 16 10:59:30 ha-334765 kubelet[743]: E0916 10:59:30.203329     743 eviction_manager.go:212] "Eviction manager: failed to synchronize" err="eviction manager: failed to get HasDedicatedImageFs: missing image stats: &ImageFsInfoResponse{ImageFilesystems:[]*FilesystemUsage{&FilesystemUsage{Timestamp:1726484370203004186,FsId:&FilesystemIdentifier{Mountpoint:/var/lib/containers/storage/overlay-images,},UsedBytes:&UInt64Value{Value:137825,},InodesUsed:&UInt64Value{Value:63,},},},ContainerFilesystems:[]*FilesystemUsage{},}"
	Sep 16 10:59:40 ha-334765 kubelet[743]: E0916 10:59:40.205751     743 eviction_manager.go:257] "Eviction manager: failed to get HasDedicatedImageFs" err="missing image stats: &ImageFsInfoResponse{ImageFilesystems:[]*FilesystemUsage{&FilesystemUsage{Timestamp:1726484380205521739,FsId:&FilesystemIdentifier{Mountpoint:/var/lib/containers/storage/overlay-images,},UsedBytes:&UInt64Value{Value:137825,},InodesUsed:&UInt64Value{Value:63,},},},ContainerFilesystems:[]*FilesystemUsage{},}"
	Sep 16 10:59:40 ha-334765 kubelet[743]: E0916 10:59:40.205790     743 eviction_manager.go:212] "Eviction manager: failed to synchronize" err="eviction manager: failed to get HasDedicatedImageFs: missing image stats: &ImageFsInfoResponse{ImageFilesystems:[]*FilesystemUsage{&FilesystemUsage{Timestamp:1726484380205521739,FsId:&FilesystemIdentifier{Mountpoint:/var/lib/containers/storage/overlay-images,},UsedBytes:&UInt64Value{Value:137825,},InodesUsed:&UInt64Value{Value:63,},},},ContainerFilesystems:[]*FilesystemUsage{},}"
	Sep 16 10:59:50 ha-334765 kubelet[743]: E0916 10:59:50.207774     743 eviction_manager.go:257] "Eviction manager: failed to get HasDedicatedImageFs" err="missing image stats: &ImageFsInfoResponse{ImageFilesystems:[]*FilesystemUsage{&FilesystemUsage{Timestamp:1726484390207545856,FsId:&FilesystemIdentifier{Mountpoint:/var/lib/containers/storage/overlay-images,},UsedBytes:&UInt64Value{Value:137825,},InodesUsed:&UInt64Value{Value:63,},},},ContainerFilesystems:[]*FilesystemUsage{},}"
	Sep 16 10:59:50 ha-334765 kubelet[743]: E0916 10:59:50.208204     743 eviction_manager.go:212] "Eviction manager: failed to synchronize" err="eviction manager: failed to get HasDedicatedImageFs: missing image stats: &ImageFsInfoResponse{ImageFilesystems:[]*FilesystemUsage{&FilesystemUsage{Timestamp:1726484390207545856,FsId:&FilesystemIdentifier{Mountpoint:/var/lib/containers/storage/overlay-images,},UsedBytes:&UInt64Value{Value:137825,},InodesUsed:&UInt64Value{Value:63,},},},ContainerFilesystems:[]*FilesystemUsage{},}"
	Sep 16 10:59:50 ha-334765 kubelet[743]: I0916 10:59:50.863294     743 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-g2dsp\" (UniqueName: \"kubernetes.io/projected/36f5a6dc-dea0-4fc2-a9d1-aa474fb384f2-kube-api-access-g2dsp\") pod \"busybox-7dff88458-55czh\" (UID: \"36f5a6dc-dea0-4fc2-a9d1-aa474fb384f2\") " pod="default/busybox-7dff88458-55czh"
	Sep 16 11:00:00 ha-334765 kubelet[743]: E0916 11:00:00.250819     743 eviction_manager.go:257] "Eviction manager: failed to get HasDedicatedImageFs" err="missing image stats: &ImageFsInfoResponse{ImageFilesystems:[]*FilesystemUsage{&FilesystemUsage{Timestamp:1726484400215167845,FsId:&FilesystemIdentifier{Mountpoint:/var/lib/containers/storage/overlay-images,},UsedBytes:&UInt64Value{Value:147135,},InodesUsed:&UInt64Value{Value:69,},},},ContainerFilesystems:[]*FilesystemUsage{},}"
	Sep 16 11:00:00 ha-334765 kubelet[743]: E0916 11:00:00.250860     743 eviction_manager.go:212] "Eviction manager: failed to synchronize" err="eviction manager: failed to get HasDedicatedImageFs: missing image stats: &ImageFsInfoResponse{ImageFilesystems:[]*FilesystemUsage{&FilesystemUsage{Timestamp:1726484400215167845,FsId:&FilesystemIdentifier{Mountpoint:/var/lib/containers/storage/overlay-images,},UsedBytes:&UInt64Value{Value:147135,},InodesUsed:&UInt64Value{Value:69,},},},ContainerFilesystems:[]*FilesystemUsage{},}"
	

                                                
                                                
-- /stdout --
helpers_test.go:254: (dbg) Run:  out/minikube-linux-arm64 status --format={{.APIServer}} -p ha-334765 -n ha-334765
helpers_test.go:261: (dbg) Run:  kubectl --context ha-334765 get po -o=jsonpath={.items[*].metadata.name} -A --field-selector=status.phase!=Running
helpers_test.go:261: (dbg) Non-zero exit: kubectl --context ha-334765 get po -o=jsonpath={.items[*].metadata.name} -A --field-selector=status.phase!=Running: fork/exec /usr/local/bin/kubectl: exec format error (486.388µs)
helpers_test.go:263: kubectl --context ha-334765 get po -o=jsonpath={.items[*].metadata.name} -A --field-selector=status.phase!=Running: fork/exec /usr/local/bin/kubectl: exec format error
--- FAIL: TestMultiControlPlane/serial/DeleteSecondaryNode (17.31s)

                                                
                                    
x
+
TestMultiControlPlane/serial/RestartCluster (90.15s)

                                                
                                                
=== RUN   TestMultiControlPlane/serial/RestartCluster
ha_test.go:560: (dbg) Run:  out/minikube-linux-arm64 start -p ha-334765 --wait=true -v=7 --alsologtostderr --driver=docker  --container-runtime=crio
ha_test.go:560: (dbg) Done: out/minikube-linux-arm64 start -p ha-334765 --wait=true -v=7 --alsologtostderr --driver=docker  --container-runtime=crio: (1m26.671833288s)
ha_test.go:566: (dbg) Run:  out/minikube-linux-arm64 -p ha-334765 status -v=7 --alsologtostderr
ha_test.go:584: (dbg) Run:  kubectl get nodes
ha_test.go:584: (dbg) Non-zero exit: kubectl get nodes: fork/exec /usr/local/bin/kubectl: exec format error (527.732µs)
ha_test.go:586: failed to run kubectl get nodes. args "kubectl get nodes" : fork/exec /usr/local/bin/kubectl: exec format error
helpers_test.go:222: -----------------------post-mortem--------------------------------
helpers_test.go:230: ======>  post-mortem[TestMultiControlPlane/serial/RestartCluster]: docker inspect <======
helpers_test.go:231: (dbg) Run:  docker inspect ha-334765
helpers_test.go:235: (dbg) docker inspect ha-334765:

                                                
                                                
-- stdout --
	[
	    {
	        "Id": "471d2d625f18ea254879cc15bae69f2fa706198361173916de05b257110d78a5",
	        "Created": "2024-09-16T10:51:30.912390622Z",
	        "Path": "/usr/local/bin/entrypoint",
	        "Args": [
	            "/sbin/init"
	        ],
	        "State": {
	            "Status": "running",
	            "Running": true,
	            "Paused": false,
	            "Restarting": false,
	            "OOMKilled": false,
	            "Dead": false,
	            "Pid": 1445586,
	            "ExitCode": 0,
	            "Error": "",
	            "StartedAt": "2024-09-16T11:00:44.164890778Z",
	            "FinishedAt": "2024-09-16T11:00:43.144106281Z"
	        },
	        "Image": "sha256:a1b71fa87733590eb4674b16f6945626ae533f3af37066893e3fd70eb9476268",
	        "ResolvConfPath": "/var/lib/docker/containers/471d2d625f18ea254879cc15bae69f2fa706198361173916de05b257110d78a5/resolv.conf",
	        "HostnamePath": "/var/lib/docker/containers/471d2d625f18ea254879cc15bae69f2fa706198361173916de05b257110d78a5/hostname",
	        "HostsPath": "/var/lib/docker/containers/471d2d625f18ea254879cc15bae69f2fa706198361173916de05b257110d78a5/hosts",
	        "LogPath": "/var/lib/docker/containers/471d2d625f18ea254879cc15bae69f2fa706198361173916de05b257110d78a5/471d2d625f18ea254879cc15bae69f2fa706198361173916de05b257110d78a5-json.log",
	        "Name": "/ha-334765",
	        "RestartCount": 0,
	        "Driver": "overlay2",
	        "Platform": "linux",
	        "MountLabel": "",
	        "ProcessLabel": "",
	        "AppArmorProfile": "unconfined",
	        "ExecIDs": null,
	        "HostConfig": {
	            "Binds": [
	                "/lib/modules:/lib/modules:ro",
	                "ha-334765:/var"
	            ],
	            "ContainerIDFile": "",
	            "LogConfig": {
	                "Type": "json-file",
	                "Config": {}
	            },
	            "NetworkMode": "ha-334765",
	            "PortBindings": {
	                "22/tcp": [
	                    {
	                        "HostIp": "127.0.0.1",
	                        "HostPort": ""
	                    }
	                ],
	                "2376/tcp": [
	                    {
	                        "HostIp": "127.0.0.1",
	                        "HostPort": ""
	                    }
	                ],
	                "32443/tcp": [
	                    {
	                        "HostIp": "127.0.0.1",
	                        "HostPort": ""
	                    }
	                ],
	                "5000/tcp": [
	                    {
	                        "HostIp": "127.0.0.1",
	                        "HostPort": ""
	                    }
	                ],
	                "8443/tcp": [
	                    {
	                        "HostIp": "127.0.0.1",
	                        "HostPort": ""
	                    }
	                ]
	            },
	            "RestartPolicy": {
	                "Name": "no",
	                "MaximumRetryCount": 0
	            },
	            "AutoRemove": false,
	            "VolumeDriver": "",
	            "VolumesFrom": null,
	            "ConsoleSize": [
	                0,
	                0
	            ],
	            "CapAdd": null,
	            "CapDrop": null,
	            "CgroupnsMode": "host",
	            "Dns": [],
	            "DnsOptions": [],
	            "DnsSearch": [],
	            "ExtraHosts": null,
	            "GroupAdd": null,
	            "IpcMode": "private",
	            "Cgroup": "",
	            "Links": null,
	            "OomScoreAdj": 0,
	            "PidMode": "",
	            "Privileged": true,
	            "PublishAllPorts": false,
	            "ReadonlyRootfs": false,
	            "SecurityOpt": [
	                "seccomp=unconfined",
	                "apparmor=unconfined",
	                "label=disable"
	            ],
	            "Tmpfs": {
	                "/run": "",
	                "/tmp": ""
	            },
	            "UTSMode": "",
	            "UsernsMode": "",
	            "ShmSize": 67108864,
	            "Runtime": "runc",
	            "Isolation": "",
	            "CpuShares": 0,
	            "Memory": 2306867200,
	            "NanoCpus": 2000000000,
	            "CgroupParent": "",
	            "BlkioWeight": 0,
	            "BlkioWeightDevice": [],
	            "BlkioDeviceReadBps": [],
	            "BlkioDeviceWriteBps": [],
	            "BlkioDeviceReadIOps": [],
	            "BlkioDeviceWriteIOps": [],
	            "CpuPeriod": 0,
	            "CpuQuota": 0,
	            "CpuRealtimePeriod": 0,
	            "CpuRealtimeRuntime": 0,
	            "CpusetCpus": "",
	            "CpusetMems": "",
	            "Devices": [],
	            "DeviceCgroupRules": null,
	            "DeviceRequests": null,
	            "MemoryReservation": 0,
	            "MemorySwap": 4613734400,
	            "MemorySwappiness": null,
	            "OomKillDisable": false,
	            "PidsLimit": null,
	            "Ulimits": [],
	            "CpuCount": 0,
	            "CpuPercent": 0,
	            "IOMaximumIOps": 0,
	            "IOMaximumBandwidth": 0,
	            "MaskedPaths": null,
	            "ReadonlyPaths": null
	        },
	        "GraphDriver": {
	            "Data": {
	                "LowerDir": "/var/lib/docker/overlay2/e8db6f6ac1e96cd2638477ad27706691a225de8009dee3e5127d903edb7d7779-init/diff:/var/lib/docker/overlay2/1502e35c27c097cfc834a7c6caeee5bb9f58b41375577f491b73f55bc131cbae/diff",
	                "MergedDir": "/var/lib/docker/overlay2/e8db6f6ac1e96cd2638477ad27706691a225de8009dee3e5127d903edb7d7779/merged",
	                "UpperDir": "/var/lib/docker/overlay2/e8db6f6ac1e96cd2638477ad27706691a225de8009dee3e5127d903edb7d7779/diff",
	                "WorkDir": "/var/lib/docker/overlay2/e8db6f6ac1e96cd2638477ad27706691a225de8009dee3e5127d903edb7d7779/work"
	            },
	            "Name": "overlay2"
	        },
	        "Mounts": [
	            {
	                "Type": "bind",
	                "Source": "/lib/modules",
	                "Destination": "/lib/modules",
	                "Mode": "ro",
	                "RW": false,
	                "Propagation": "rprivate"
	            },
	            {
	                "Type": "volume",
	                "Name": "ha-334765",
	                "Source": "/var/lib/docker/volumes/ha-334765/_data",
	                "Destination": "/var",
	                "Driver": "local",
	                "Mode": "z",
	                "RW": true,
	                "Propagation": ""
	            }
	        ],
	        "Config": {
	            "Hostname": "ha-334765",
	            "Domainname": "",
	            "User": "",
	            "AttachStdin": false,
	            "AttachStdout": false,
	            "AttachStderr": false,
	            "ExposedPorts": {
	                "22/tcp": {},
	                "2376/tcp": {},
	                "32443/tcp": {},
	                "5000/tcp": {},
	                "8443/tcp": {}
	            },
	            "Tty": true,
	            "OpenStdin": false,
	            "StdinOnce": false,
	            "Env": [
	                "container=docker",
	                "PATH=/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin"
	            ],
	            "Cmd": null,
	            "Image": "gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0",
	            "Volumes": null,
	            "WorkingDir": "/",
	            "Entrypoint": [
	                "/usr/local/bin/entrypoint",
	                "/sbin/init"
	            ],
	            "OnBuild": null,
	            "Labels": {
	                "created_by.minikube.sigs.k8s.io": "true",
	                "mode.minikube.sigs.k8s.io": "ha-334765",
	                "name.minikube.sigs.k8s.io": "ha-334765",
	                "role.minikube.sigs.k8s.io": ""
	            },
	            "StopSignal": "SIGRTMIN+3"
	        },
	        "NetworkSettings": {
	            "Bridge": "",
	            "SandboxID": "8f7a6f1bc4a83a0b24712f7b9478abfb1002f9f1d208c4c5f37ef9a2b14bfe40",
	            "SandboxKey": "/var/run/docker/netns/8f7a6f1bc4a8",
	            "Ports": {
	                "22/tcp": [
	                    {
	                        "HostIp": "127.0.0.1",
	                        "HostPort": "34663"
	                    }
	                ],
	                "2376/tcp": [
	                    {
	                        "HostIp": "127.0.0.1",
	                        "HostPort": "34664"
	                    }
	                ],
	                "32443/tcp": [
	                    {
	                        "HostIp": "127.0.0.1",
	                        "HostPort": "34667"
	                    }
	                ],
	                "5000/tcp": [
	                    {
	                        "HostIp": "127.0.0.1",
	                        "HostPort": "34665"
	                    }
	                ],
	                "8443/tcp": [
	                    {
	                        "HostIp": "127.0.0.1",
	                        "HostPort": "34666"
	                    }
	                ]
	            },
	            "HairpinMode": false,
	            "LinkLocalIPv6Address": "",
	            "LinkLocalIPv6PrefixLen": 0,
	            "SecondaryIPAddresses": null,
	            "SecondaryIPv6Addresses": null,
	            "EndpointID": "",
	            "Gateway": "",
	            "GlobalIPv6Address": "",
	            "GlobalIPv6PrefixLen": 0,
	            "IPAddress": "",
	            "IPPrefixLen": 0,
	            "IPv6Gateway": "",
	            "MacAddress": "",
	            "Networks": {
	                "ha-334765": {
	                    "IPAMConfig": {
	                        "IPv4Address": "192.168.49.2"
	                    },
	                    "Links": null,
	                    "Aliases": null,
	                    "MacAddress": "02:42:c0:a8:31:02",
	                    "DriverOpts": null,
	                    "NetworkID": "a49e1846148d74f15aa5bd587e5d2d6b8a3c4246e7c45cf081cf9063a160d645",
	                    "EndpointID": "916e745b373f49908dd5a6c254237406f85ca69571d5331c1a4cd42d8ccca877",
	                    "Gateway": "192.168.49.1",
	                    "IPAddress": "192.168.49.2",
	                    "IPPrefixLen": 24,
	                    "IPv6Gateway": "",
	                    "GlobalIPv6Address": "",
	                    "GlobalIPv6PrefixLen": 0,
	                    "DNSNames": [
	                        "ha-334765",
	                        "471d2d625f18"
	                    ]
	                }
	            }
	        }
	    }
	]

                                                
                                                
-- /stdout --
helpers_test.go:239: (dbg) Run:  out/minikube-linux-arm64 status --format={{.Host}} -p ha-334765 -n ha-334765
helpers_test.go:244: <<< TestMultiControlPlane/serial/RestartCluster FAILED: start of post-mortem logs <<<
helpers_test.go:245: ======>  post-mortem[TestMultiControlPlane/serial/RestartCluster]: minikube logs <======
helpers_test.go:247: (dbg) Run:  out/minikube-linux-arm64 -p ha-334765 logs -n 25
helpers_test.go:247: (dbg) Done: out/minikube-linux-arm64 -p ha-334765 logs -n 25: (1.675735796s)
helpers_test.go:252: TestMultiControlPlane/serial/RestartCluster logs: 
-- stdout --
	
	==> Audit <==
	|---------|----------------------------------------------------------------------------------|-----------|---------|---------|---------------------|---------------------|
	| Command |                                       Args                                       |  Profile  |  User   | Version |     Start Time      |      End Time       |
	|---------|----------------------------------------------------------------------------------|-----------|---------|---------|---------------------|---------------------|
	| cp      | ha-334765 cp ha-334765-m03:/home/docker/cp-test.txt                              | ha-334765 | jenkins | v1.34.0 | 16 Sep 24 10:55 UTC | 16 Sep 24 10:55 UTC |
	|         | ha-334765-m04:/home/docker/cp-test_ha-334765-m03_ha-334765-m04.txt               |           |         |         |                     |                     |
	| ssh     | ha-334765 ssh -n                                                                 | ha-334765 | jenkins | v1.34.0 | 16 Sep 24 10:55 UTC | 16 Sep 24 10:55 UTC |
	|         | ha-334765-m03 sudo cat                                                           |           |         |         |                     |                     |
	|         | /home/docker/cp-test.txt                                                         |           |         |         |                     |                     |
	| ssh     | ha-334765 ssh -n ha-334765-m04 sudo cat                                          | ha-334765 | jenkins | v1.34.0 | 16 Sep 24 10:55 UTC | 16 Sep 24 10:55 UTC |
	|         | /home/docker/cp-test_ha-334765-m03_ha-334765-m04.txt                             |           |         |         |                     |                     |
	| cp      | ha-334765 cp testdata/cp-test.txt                                                | ha-334765 | jenkins | v1.34.0 | 16 Sep 24 10:55 UTC | 16 Sep 24 10:55 UTC |
	|         | ha-334765-m04:/home/docker/cp-test.txt                                           |           |         |         |                     |                     |
	| ssh     | ha-334765 ssh -n                                                                 | ha-334765 | jenkins | v1.34.0 | 16 Sep 24 10:55 UTC | 16 Sep 24 10:55 UTC |
	|         | ha-334765-m04 sudo cat                                                           |           |         |         |                     |                     |
	|         | /home/docker/cp-test.txt                                                         |           |         |         |                     |                     |
	| cp      | ha-334765 cp ha-334765-m04:/home/docker/cp-test.txt                              | ha-334765 | jenkins | v1.34.0 | 16 Sep 24 10:55 UTC | 16 Sep 24 10:55 UTC |
	|         | /tmp/TestMultiControlPlaneserialCopyFile3524304278/001/cp-test_ha-334765-m04.txt |           |         |         |                     |                     |
	| ssh     | ha-334765 ssh -n                                                                 | ha-334765 | jenkins | v1.34.0 | 16 Sep 24 10:55 UTC | 16 Sep 24 10:55 UTC |
	|         | ha-334765-m04 sudo cat                                                           |           |         |         |                     |                     |
	|         | /home/docker/cp-test.txt                                                         |           |         |         |                     |                     |
	| cp      | ha-334765 cp ha-334765-m04:/home/docker/cp-test.txt                              | ha-334765 | jenkins | v1.34.0 | 16 Sep 24 10:55 UTC | 16 Sep 24 10:55 UTC |
	|         | ha-334765:/home/docker/cp-test_ha-334765-m04_ha-334765.txt                       |           |         |         |                     |                     |
	| ssh     | ha-334765 ssh -n                                                                 | ha-334765 | jenkins | v1.34.0 | 16 Sep 24 10:55 UTC | 16 Sep 24 10:55 UTC |
	|         | ha-334765-m04 sudo cat                                                           |           |         |         |                     |                     |
	|         | /home/docker/cp-test.txt                                                         |           |         |         |                     |                     |
	| ssh     | ha-334765 ssh -n ha-334765 sudo cat                                              | ha-334765 | jenkins | v1.34.0 | 16 Sep 24 10:55 UTC | 16 Sep 24 10:55 UTC |
	|         | /home/docker/cp-test_ha-334765-m04_ha-334765.txt                                 |           |         |         |                     |                     |
	| cp      | ha-334765 cp ha-334765-m04:/home/docker/cp-test.txt                              | ha-334765 | jenkins | v1.34.0 | 16 Sep 24 10:55 UTC | 16 Sep 24 10:55 UTC |
	|         | ha-334765-m02:/home/docker/cp-test_ha-334765-m04_ha-334765-m02.txt               |           |         |         |                     |                     |
	| ssh     | ha-334765 ssh -n                                                                 | ha-334765 | jenkins | v1.34.0 | 16 Sep 24 10:55 UTC | 16 Sep 24 10:55 UTC |
	|         | ha-334765-m04 sudo cat                                                           |           |         |         |                     |                     |
	|         | /home/docker/cp-test.txt                                                         |           |         |         |                     |                     |
	| ssh     | ha-334765 ssh -n ha-334765-m02 sudo cat                                          | ha-334765 | jenkins | v1.34.0 | 16 Sep 24 10:55 UTC | 16 Sep 24 10:55 UTC |
	|         | /home/docker/cp-test_ha-334765-m04_ha-334765-m02.txt                             |           |         |         |                     |                     |
	| cp      | ha-334765 cp ha-334765-m04:/home/docker/cp-test.txt                              | ha-334765 | jenkins | v1.34.0 | 16 Sep 24 10:55 UTC | 16 Sep 24 10:55 UTC |
	|         | ha-334765-m03:/home/docker/cp-test_ha-334765-m04_ha-334765-m03.txt               |           |         |         |                     |                     |
	| ssh     | ha-334765 ssh -n                                                                 | ha-334765 | jenkins | v1.34.0 | 16 Sep 24 10:55 UTC | 16 Sep 24 10:55 UTC |
	|         | ha-334765-m04 sudo cat                                                           |           |         |         |                     |                     |
	|         | /home/docker/cp-test.txt                                                         |           |         |         |                     |                     |
	| ssh     | ha-334765 ssh -n ha-334765-m03 sudo cat                                          | ha-334765 | jenkins | v1.34.0 | 16 Sep 24 10:55 UTC | 16 Sep 24 10:55 UTC |
	|         | /home/docker/cp-test_ha-334765-m04_ha-334765-m03.txt                             |           |         |         |                     |                     |
	| node    | ha-334765 node stop m02 -v=7                                                     | ha-334765 | jenkins | v1.34.0 | 16 Sep 24 10:55 UTC | 16 Sep 24 10:56 UTC |
	|         | --alsologtostderr                                                                |           |         |         |                     |                     |
	| node    | ha-334765 node start m02 -v=7                                                    | ha-334765 | jenkins | v1.34.0 | 16 Sep 24 10:56 UTC | 16 Sep 24 10:56 UTC |
	|         | --alsologtostderr                                                                |           |         |         |                     |                     |
	| node    | list -p ha-334765 -v=7                                                           | ha-334765 | jenkins | v1.34.0 | 16 Sep 24 10:56 UTC |                     |
	|         | --alsologtostderr                                                                |           |         |         |                     |                     |
	| stop    | -p ha-334765 -v=7                                                                | ha-334765 | jenkins | v1.34.0 | 16 Sep 24 10:56 UTC | 16 Sep 24 10:57 UTC |
	|         | --alsologtostderr                                                                |           |         |         |                     |                     |
	| start   | -p ha-334765 --wait=true -v=7                                                    | ha-334765 | jenkins | v1.34.0 | 16 Sep 24 10:57 UTC | 16 Sep 24 10:59 UTC |
	|         | --alsologtostderr                                                                |           |         |         |                     |                     |
	| node    | list -p ha-334765                                                                | ha-334765 | jenkins | v1.34.0 | 16 Sep 24 10:59 UTC |                     |
	| node    | ha-334765 node delete m03 -v=7                                                   | ha-334765 | jenkins | v1.34.0 | 16 Sep 24 10:59 UTC | 16 Sep 24 11:00 UTC |
	|         | --alsologtostderr                                                                |           |         |         |                     |                     |
	| stop    | ha-334765 stop -v=7                                                              | ha-334765 | jenkins | v1.34.0 | 16 Sep 24 11:00 UTC | 16 Sep 24 11:00 UTC |
	|         | --alsologtostderr                                                                |           |         |         |                     |                     |
	| start   | -p ha-334765 --wait=true                                                         | ha-334765 | jenkins | v1.34.0 | 16 Sep 24 11:00 UTC | 16 Sep 24 11:02 UTC |
	|         | -v=7 --alsologtostderr                                                           |           |         |         |                     |                     |
	|         | --driver=docker                                                                  |           |         |         |                     |                     |
	|         | --container-runtime=crio                                                         |           |         |         |                     |                     |
	|---------|----------------------------------------------------------------------------------|-----------|---------|---------|---------------------|---------------------|
	
	
	==> Last Start <==
	Log file created at: 2024/09/16 11:00:43
	Running on machine: ip-172-31-21-244
	Binary: Built with gc go1.23.0 for linux/arm64
	Log line format: [IWEF]mmdd hh:mm:ss.uuuuuu threadid file:line] msg
	I0916 11:00:43.604244 1445387 out.go:345] Setting OutFile to fd 1 ...
	I0916 11:00:43.604479 1445387 out.go:392] TERM=,COLORTERM=, which probably does not support color
	I0916 11:00:43.604502 1445387 out.go:358] Setting ErrFile to fd 2...
	I0916 11:00:43.604534 1445387 out.go:392] TERM=,COLORTERM=, which probably does not support color
	I0916 11:00:43.604840 1445387 root.go:338] Updating PATH: /home/jenkins/minikube-integration/19651-1378450/.minikube/bin
	I0916 11:00:43.605294 1445387 out.go:352] Setting JSON to false
	I0916 11:00:43.606260 1445387 start.go:129] hostinfo: {"hostname":"ip-172-31-21-244","uptime":38589,"bootTime":1726445855,"procs":156,"os":"linux","platform":"ubuntu","platformFamily":"debian","platformVersion":"20.04","kernelVersion":"5.15.0-1069-aws","kernelArch":"aarch64","virtualizationSystem":"","virtualizationRole":"","hostId":"da8ac1fd-6236-412a-a346-95873c98230d"}
	I0916 11:00:43.606370 1445387 start.go:139] virtualization:  
	I0916 11:00:43.609874 1445387 out.go:177] * [ha-334765] minikube v1.34.0 on Ubuntu 20.04 (arm64)
	I0916 11:00:43.613450 1445387 out.go:177]   - MINIKUBE_LOCATION=19651
	I0916 11:00:43.613581 1445387 notify.go:220] Checking for updates...
	I0916 11:00:43.619077 1445387 out.go:177]   - MINIKUBE_SUPPRESS_DOCKER_PERFORMANCE=true
	I0916 11:00:43.621879 1445387 out.go:177]   - KUBECONFIG=/home/jenkins/minikube-integration/19651-1378450/kubeconfig
	I0916 11:00:43.624436 1445387 out.go:177]   - MINIKUBE_HOME=/home/jenkins/minikube-integration/19651-1378450/.minikube
	I0916 11:00:43.627114 1445387 out.go:177]   - MINIKUBE_BIN=out/minikube-linux-arm64
	I0916 11:00:43.629710 1445387 out.go:177]   - MINIKUBE_FORCE_SYSTEMD=
	I0916 11:00:43.633027 1445387 config.go:182] Loaded profile config "ha-334765": Driver=docker, ContainerRuntime=crio, KubernetesVersion=v1.31.1
	I0916 11:00:43.633544 1445387 driver.go:394] Setting default libvirt URI to qemu:///system
	I0916 11:00:43.666437 1445387 docker.go:123] docker version: linux-27.2.1:Docker Engine - Community
	I0916 11:00:43.666606 1445387 cli_runner.go:164] Run: docker system info --format "{{json .}}"
	I0916 11:00:43.724281 1445387 info.go:266] docker info: {ID:5FDH:SA5P:5GCT:NLAS:B73P:SGDQ:PBG5:UBVH:UZY3:RXGO:CI7S:WAIH Containers:3 ContainersRunning:0 ContainersPaused:0 ContainersStopped:3 Images:3 Driver:overlay2 DriverStatus:[[Backing Filesystem extfs] [Supports d_type true] [Using metacopy false] [Native Overlay Diff true] [userxattr false]] SystemStatus:<nil> Plugins:{Volume:[local] Network:[bridge host ipvlan macvlan null overlay] Authorization:<nil> Log:[awslogs fluentd gcplogs gelf journald json-file local splunk syslog]} MemoryLimit:true SwapLimit:true KernelMemory:false KernelMemoryTCP:true CPUCfsPeriod:true CPUCfsQuota:true CPUShares:true CPUSet:true PidsLimit:true IPv4Forwarding:true BridgeNfIptables:true BridgeNfIP6Tables:true Debug:false NFd:38 OomKillDisable:true NGoroutines:41 SystemTime:2024-09-16 11:00:43.714118646 +0000 UTC LoggingDriver:json-file CgroupDriver:cgroupfs NEventsListener:0 KernelVersion:5.15.0-1069-aws OperatingSystem:Ubuntu 20.04.6 LTS OSType:linux Architecture:aar
ch64 IndexServerAddress:https://index.docker.io/v1/ RegistryConfig:{AllowNondistributableArtifactsCIDRs:[] AllowNondistributableArtifactsHostnames:[] InsecureRegistryCIDRs:[127.0.0.0/8] IndexConfigs:{DockerIo:{Name:docker.io Mirrors:[] Secure:true Official:true}} Mirrors:[]} NCPU:2 MemTotal:8214839296 GenericResources:<nil> DockerRootDir:/var/lib/docker HTTPProxy: HTTPSProxy: NoProxy: Name:ip-172-31-21-244 Labels:[] ExperimentalBuild:false ServerVersion:27.2.1 ClusterStore: ClusterAdvertise: Runtimes:{Runc:{Path:runc}} DefaultRuntime:runc Swarm:{NodeID: NodeAddr: LocalNodeState:inactive ControlAvailable:false Error: RemoteManagers:<nil>} LiveRestoreEnabled:false Isolation: InitBinary:docker-init ContainerdCommit:{ID:7f7fdf5fed64eb6a7caf99b3e12efcf9d60e311c Expected:7f7fdf5fed64eb6a7caf99b3e12efcf9d60e311c} RuncCommit:{ID:v1.1.14-0-g2c9f560 Expected:v1.1.14-0-g2c9f560} InitCommit:{ID:de40ad0 Expected:de40ad0} SecurityOptions:[name=apparmor name=seccomp,profile=builtin] ProductLicense: Warnings:<nil> ServerErro
rs:[] ClientInfo:{Debug:false Plugins:[map[Name:buildx Path:/usr/libexec/docker/cli-plugins/docker-buildx SchemaVersion:0.1.0 ShortDescription:Docker Buildx Vendor:Docker Inc. Version:v0.16.2] map[Name:compose Path:/usr/libexec/docker/cli-plugins/docker-compose SchemaVersion:0.1.0 ShortDescription:Docker Compose Vendor:Docker Inc. Version:v2.29.2]] Warnings:<nil>}}
	I0916 11:00:43.724396 1445387 docker.go:318] overlay module found
	I0916 11:00:43.728957 1445387 out.go:177] * Using the docker driver based on existing profile
	I0916 11:00:43.731810 1445387 start.go:297] selected driver: docker
	I0916 11:00:43.731834 1445387 start.go:901] validating driver "docker" against &{Name:ha-334765 KeepContext:false EmbedCerts:false MinikubeISO: KicBaseImage:gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 Memory:2200 CPUs:2 DiskSize:20000 Driver:docker HyperkitVpnKitSock: HyperkitVSockPorts:[] DockerEnv:[] ContainerVolumeMounts:[] InsecureRegistry:[] RegistryMirror:[] HostOnlyCIDR:192.168.59.1/24 HypervVirtualSwitch: HypervUseExternalSwitch:false HypervExternalAdapter: KVMNetwork:default KVMQemuURI:qemu:///system KVMGPU:false KVMHidden:false KVMNUMACount:1 APIServerPort:8443 DockerOpt:[] DisableDriverMounts:false NFSShare:[] NFSSharesRoot:/nfsshares UUID: NoVTXCheck:false DNSProxy:false HostDNSResolver:true HostOnlyNicType:virtio NatNicType:virtio SSHIPAddress: SSHUser:root SSHKey: SSHPort:22 KubernetesConfig:{KubernetesVersion:v1.31.1 ClusterName:ha-334765 Namespace:default APIServerHAVIP:192.168.49.254 APIServerName
:minikubeCA APIServerNames:[] APIServerIPs:[] DNSDomain:cluster.local ContainerRuntime:crio CRISocket: NetworkPlugin:cni FeatureGates: ServiceCIDR:10.96.0.0/12 ImageRepository: LoadBalancerStartIP: LoadBalancerEndIP: CustomIngressCert: RegistryAliases: ExtraOptions:[] ShouldLoadCachedImages:true EnableDefaultCNI:false CNI:} Nodes:[{Name: IP:192.168.49.2 Port:8443 KubernetesVersion:v1.31.1 ContainerRuntime:crio ControlPlane:true Worker:true} {Name:m02 IP:192.168.49.3 Port:8443 KubernetesVersion:v1.31.1 ContainerRuntime:crio ControlPlane:true Worker:true} {Name:m04 IP:192.168.49.5 Port:0 KubernetesVersion:v1.31.1 ContainerRuntime: ControlPlane:false Worker:true}] Addons:map[ambassador:false auto-pause:false cloud-spanner:false csi-hostpath-driver:false dashboard:false default-storageclass:false efk:false freshpod:false gcp-auth:false gvisor:false headlamp:false helm-tiller:false inaccel:false ingress:false ingress-dns:false inspektor-gadget:false istio:false istio-provisioner:false kong:false kubeflow:false kub
evirt:false logviewer:false metallb:false metrics-server:false nvidia-device-plugin:false nvidia-driver-installer:false nvidia-gpu-device-plugin:false olm:false pod-security-policy:false portainer:false registry:false registry-aliases:false registry-creds:false storage-provisioner:false storage-provisioner-gluster:false storage-provisioner-rancher:false volcano:false volumesnapshots:false yakd:false] CustomAddonImages:map[] CustomAddonRegistries:map[] VerifyComponents:map[apiserver:true apps_running:true default_sa:true extra:true kubelet:true node_ready:true system_pods:true] StartHostTimeout:6m0s ScheduledStop:<nil> ExposedPorts:[] ListenAddress: Network: Subnet: MultiNodeRequested:true ExtraDisks:0 CertExpiration:26280h0m0s Mount:false MountString:/home/jenkins:/minikube-host Mount9PVersion:9p2000.L MountGID:docker MountIP: MountMSize:262144 MountOptions:[] MountPort:0 MountType:9p MountUID:docker BinaryMirror: DisableOptimizations:false DisableMetrics:false CustomQemuFirmwarePath: SocketVMnetClientPath: S
ocketVMnetPath: StaticIP: SSHAuthSock: SSHAgentPID:0 GPUs: AutoPauseInterval:1m0s}
	I0916 11:00:43.731996 1445387 start.go:912] status for docker: {Installed:true Healthy:true Running:false NeedsImprovement:false Error:<nil> Reason: Fix: Doc: Version:}
	I0916 11:00:43.732120 1445387 cli_runner.go:164] Run: docker system info --format "{{json .}}"
	I0916 11:00:43.795207 1445387 info.go:266] docker info: {ID:5FDH:SA5P:5GCT:NLAS:B73P:SGDQ:PBG5:UBVH:UZY3:RXGO:CI7S:WAIH Containers:3 ContainersRunning:0 ContainersPaused:0 ContainersStopped:3 Images:3 Driver:overlay2 DriverStatus:[[Backing Filesystem extfs] [Supports d_type true] [Using metacopy false] [Native Overlay Diff true] [userxattr false]] SystemStatus:<nil> Plugins:{Volume:[local] Network:[bridge host ipvlan macvlan null overlay] Authorization:<nil> Log:[awslogs fluentd gcplogs gelf journald json-file local splunk syslog]} MemoryLimit:true SwapLimit:true KernelMemory:false KernelMemoryTCP:true CPUCfsPeriod:true CPUCfsQuota:true CPUShares:true CPUSet:true PidsLimit:true IPv4Forwarding:true BridgeNfIptables:true BridgeNfIP6Tables:true Debug:false NFd:38 OomKillDisable:true NGoroutines:41 SystemTime:2024-09-16 11:00:43.786074821 +0000 UTC LoggingDriver:json-file CgroupDriver:cgroupfs NEventsListener:0 KernelVersion:5.15.0-1069-aws OperatingSystem:Ubuntu 20.04.6 LTS OSType:linux Architecture:aar
ch64 IndexServerAddress:https://index.docker.io/v1/ RegistryConfig:{AllowNondistributableArtifactsCIDRs:[] AllowNondistributableArtifactsHostnames:[] InsecureRegistryCIDRs:[127.0.0.0/8] IndexConfigs:{DockerIo:{Name:docker.io Mirrors:[] Secure:true Official:true}} Mirrors:[]} NCPU:2 MemTotal:8214839296 GenericResources:<nil> DockerRootDir:/var/lib/docker HTTPProxy: HTTPSProxy: NoProxy: Name:ip-172-31-21-244 Labels:[] ExperimentalBuild:false ServerVersion:27.2.1 ClusterStore: ClusterAdvertise: Runtimes:{Runc:{Path:runc}} DefaultRuntime:runc Swarm:{NodeID: NodeAddr: LocalNodeState:inactive ControlAvailable:false Error: RemoteManagers:<nil>} LiveRestoreEnabled:false Isolation: InitBinary:docker-init ContainerdCommit:{ID:7f7fdf5fed64eb6a7caf99b3e12efcf9d60e311c Expected:7f7fdf5fed64eb6a7caf99b3e12efcf9d60e311c} RuncCommit:{ID:v1.1.14-0-g2c9f560 Expected:v1.1.14-0-g2c9f560} InitCommit:{ID:de40ad0 Expected:de40ad0} SecurityOptions:[name=apparmor name=seccomp,profile=builtin] ProductLicense: Warnings:<nil> ServerErro
rs:[] ClientInfo:{Debug:false Plugins:[map[Name:buildx Path:/usr/libexec/docker/cli-plugins/docker-buildx SchemaVersion:0.1.0 ShortDescription:Docker Buildx Vendor:Docker Inc. Version:v0.16.2] map[Name:compose Path:/usr/libexec/docker/cli-plugins/docker-compose SchemaVersion:0.1.0 ShortDescription:Docker Compose Vendor:Docker Inc. Version:v2.29.2]] Warnings:<nil>}}
	I0916 11:00:43.795715 1445387 start_flags.go:947] Waiting for all components: map[apiserver:true apps_running:true default_sa:true extra:true kubelet:true node_ready:true system_pods:true]
	I0916 11:00:43.795743 1445387 cni.go:84] Creating CNI manager for ""
	I0916 11:00:43.795781 1445387 cni.go:136] multinode detected (3 nodes found), recommending kindnet
	I0916 11:00:43.796365 1445387 start.go:340] cluster config:
	{Name:ha-334765 KeepContext:false EmbedCerts:false MinikubeISO: KicBaseImage:gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 Memory:2200 CPUs:2 DiskSize:20000 Driver:docker HyperkitVpnKitSock: HyperkitVSockPorts:[] DockerEnv:[] ContainerVolumeMounts:[] InsecureRegistry:[] RegistryMirror:[] HostOnlyCIDR:192.168.59.1/24 HypervVirtualSwitch: HypervUseExternalSwitch:false HypervExternalAdapter: KVMNetwork:default KVMQemuURI:qemu:///system KVMGPU:false KVMHidden:false KVMNUMACount:1 APIServerPort:8443 DockerOpt:[] DisableDriverMounts:false NFSShare:[] NFSSharesRoot:/nfsshares UUID: NoVTXCheck:false DNSProxy:false HostDNSResolver:true HostOnlyNicType:virtio NatNicType:virtio SSHIPAddress: SSHUser:root SSHKey: SSHPort:22 KubernetesConfig:{KubernetesVersion:v1.31.1 ClusterName:ha-334765 Namespace:default APIServerHAVIP:192.168.49.254 APIServerName:minikubeCA APIServerNames:[] APIServerIPs:[] DNSDomain:cluster.local ContainerR
untime:crio CRISocket: NetworkPlugin:cni FeatureGates: ServiceCIDR:10.96.0.0/12 ImageRepository: LoadBalancerStartIP: LoadBalancerEndIP: CustomIngressCert: RegistryAliases: ExtraOptions:[] ShouldLoadCachedImages:true EnableDefaultCNI:false CNI:} Nodes:[{Name: IP:192.168.49.2 Port:8443 KubernetesVersion:v1.31.1 ContainerRuntime:crio ControlPlane:true Worker:true} {Name:m02 IP:192.168.49.3 Port:8443 KubernetesVersion:v1.31.1 ContainerRuntime:crio ControlPlane:true Worker:true} {Name:m04 IP:192.168.49.5 Port:0 KubernetesVersion:v1.31.1 ContainerRuntime:crio ControlPlane:false Worker:true}] Addons:map[ambassador:false auto-pause:false cloud-spanner:false csi-hostpath-driver:false dashboard:false default-storageclass:false efk:false freshpod:false gcp-auth:false gvisor:false headlamp:false helm-tiller:false inaccel:false ingress:false ingress-dns:false inspektor-gadget:false istio:false istio-provisioner:false kong:false kubeflow:false kubevirt:false logviewer:false metallb:false metrics-server:false nvidia-device
-plugin:false nvidia-driver-installer:false nvidia-gpu-device-plugin:false olm:false pod-security-policy:false portainer:false registry:false registry-aliases:false registry-creds:false storage-provisioner:false storage-provisioner-gluster:false storage-provisioner-rancher:false volcano:false volumesnapshots:false yakd:false] CustomAddonImages:map[] CustomAddonRegistries:map[] VerifyComponents:map[apiserver:true apps_running:true default_sa:true extra:true kubelet:true node_ready:true system_pods:true] StartHostTimeout:6m0s ScheduledStop:<nil> ExposedPorts:[] ListenAddress: Network: Subnet: MultiNodeRequested:true ExtraDisks:0 CertExpiration:26280h0m0s Mount:false MountString:/home/jenkins:/minikube-host Mount9PVersion:9p2000.L MountGID:docker MountIP: MountMSize:262144 MountOptions:[] MountPort:0 MountType:9p MountUID:docker BinaryMirror: DisableOptimizations:false DisableMetrics:false CustomQemuFirmwarePath: SocketVMnetClientPath: SocketVMnetPath: StaticIP: SSHAuthSock: SSHAgentPID:0 GPUs: AutoPauseInterval
:1m0s}
	I0916 11:00:43.799323 1445387 out.go:177] * Starting "ha-334765" primary control-plane node in "ha-334765" cluster
	I0916 11:00:43.801879 1445387 cache.go:121] Beginning downloading kic base image for docker with crio
	I0916 11:00:43.804746 1445387 out.go:177] * Pulling base image v0.0.45-1726358845-19644 ...
	I0916 11:00:43.807297 1445387 preload.go:131] Checking if preload exists for k8s version v1.31.1 and runtime crio
	I0916 11:00:43.807363 1445387 image.go:79] Checking for gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 in local docker daemon
	I0916 11:00:43.807417 1445387 preload.go:146] Found local preload: /home/jenkins/minikube-integration/19651-1378450/.minikube/cache/preloaded-tarball/preloaded-images-k8s-v18-v1.31.1-cri-o-overlay-arm64.tar.lz4
	I0916 11:00:43.807443 1445387 cache.go:56] Caching tarball of preloaded images
	I0916 11:00:43.807528 1445387 preload.go:172] Found /home/jenkins/minikube-integration/19651-1378450/.minikube/cache/preloaded-tarball/preloaded-images-k8s-v18-v1.31.1-cri-o-overlay-arm64.tar.lz4 in cache, skipping download
	I0916 11:00:43.807539 1445387 cache.go:59] Finished verifying existence of preloaded tar for v1.31.1 on crio
	I0916 11:00:43.807678 1445387 profile.go:143] Saving config to /home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/ha-334765/config.json ...
	W0916 11:00:43.829225 1445387 image.go:95] image gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 is of wrong architecture
	I0916 11:00:43.829250 1445387 cache.go:149] Downloading gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 to local cache
	I0916 11:00:43.829346 1445387 image.go:63] Checking for gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 in local cache directory
	I0916 11:00:43.829371 1445387 image.go:66] Found gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 in local cache directory, skipping pull
	I0916 11:00:43.829376 1445387 image.go:135] gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 exists in cache, skipping pull
	I0916 11:00:43.829385 1445387 cache.go:152] successfully saved gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 as a tarball
	I0916 11:00:43.829390 1445387 cache.go:162] Loading gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 from local cache
	I0916 11:00:43.830758 1445387 image.go:273] response: 
	I0916 11:00:44.015984 1445387 cache.go:164] successfully loaded and using gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 from cached tarball
	I0916 11:00:44.016021 1445387 cache.go:194] Successfully downloaded all kic artifacts
	I0916 11:00:44.016053 1445387 start.go:360] acquireMachinesLock for ha-334765: {Name:mk63c1424907d32e4e30c00d74a2bae6eec53e1d Clock:{} Delay:500ms Timeout:10m0s Cancel:<nil>}
	I0916 11:00:44.016124 1445387 start.go:364] duration metric: took 48.253µs to acquireMachinesLock for "ha-334765"
	I0916 11:00:44.016150 1445387 start.go:96] Skipping create...Using existing machine configuration
	I0916 11:00:44.016156 1445387 fix.go:54] fixHost starting: 
	I0916 11:00:44.016457 1445387 cli_runner.go:164] Run: docker container inspect ha-334765 --format={{.State.Status}}
	I0916 11:00:44.033222 1445387 fix.go:112] recreateIfNeeded on ha-334765: state=Stopped err=<nil>
	W0916 11:00:44.033254 1445387 fix.go:138] unexpected machine state, will restart: <nil>
	I0916 11:00:44.036216 1445387 out.go:177] * Restarting existing docker container for "ha-334765" ...
	I0916 11:00:44.039141 1445387 cli_runner.go:164] Run: docker start ha-334765
	I0916 11:00:44.344648 1445387 cli_runner.go:164] Run: docker container inspect ha-334765 --format={{.State.Status}}
	I0916 11:00:44.362455 1445387 kic.go:430] container "ha-334765" state is running.
	I0916 11:00:44.362884 1445387 cli_runner.go:164] Run: docker container inspect -f "{{range .NetworkSettings.Networks}}{{.IPAddress}},{{.GlobalIPv6Address}}{{end}}" ha-334765
	I0916 11:00:44.387024 1445387 profile.go:143] Saving config to /home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/ha-334765/config.json ...
	I0916 11:00:44.387309 1445387 machine.go:93] provisionDockerMachine start ...
	I0916 11:00:44.387390 1445387 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" ha-334765
	I0916 11:00:44.409318 1445387 main.go:141] libmachine: Using SSH client type: native
	I0916 11:00:44.409580 1445387 main.go:141] libmachine: &{{{<nil> 0 [] [] []} docker [0x41abe0] 0x41d420 <nil>  [] 0s} 127.0.0.1 34663 <nil> <nil>}
	I0916 11:00:44.409590 1445387 main.go:141] libmachine: About to run SSH command:
	hostname
	I0916 11:00:44.410860 1445387 main.go:141] libmachine: Error dialing TCP: ssh: handshake failed: EOF
	I0916 11:00:47.548382 1445387 main.go:141] libmachine: SSH cmd err, output: <nil>: ha-334765
	
	I0916 11:00:47.548405 1445387 ubuntu.go:169] provisioning hostname "ha-334765"
	I0916 11:00:47.548515 1445387 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" ha-334765
	I0916 11:00:47.566728 1445387 main.go:141] libmachine: Using SSH client type: native
	I0916 11:00:47.567006 1445387 main.go:141] libmachine: &{{{<nil> 0 [] [] []} docker [0x41abe0] 0x41d420 <nil>  [] 0s} 127.0.0.1 34663 <nil> <nil>}
	I0916 11:00:47.567028 1445387 main.go:141] libmachine: About to run SSH command:
	sudo hostname ha-334765 && echo "ha-334765" | sudo tee /etc/hostname
	I0916 11:00:47.721068 1445387 main.go:141] libmachine: SSH cmd err, output: <nil>: ha-334765
	
	I0916 11:00:47.721195 1445387 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" ha-334765
	I0916 11:00:47.740663 1445387 main.go:141] libmachine: Using SSH client type: native
	I0916 11:00:47.740960 1445387 main.go:141] libmachine: &{{{<nil> 0 [] [] []} docker [0x41abe0] 0x41d420 <nil>  [] 0s} 127.0.0.1 34663 <nil> <nil>}
	I0916 11:00:47.740986 1445387 main.go:141] libmachine: About to run SSH command:
	
			if ! grep -xq '.*\sha-334765' /etc/hosts; then
				if grep -xq '127.0.1.1\s.*' /etc/hosts; then
					sudo sed -i 's/^127.0.1.1\s.*/127.0.1.1 ha-334765/g' /etc/hosts;
				else 
					echo '127.0.1.1 ha-334765' | sudo tee -a /etc/hosts; 
				fi
			fi
	I0916 11:00:47.877249 1445387 main.go:141] libmachine: SSH cmd err, output: <nil>: 
	I0916 11:00:47.877552 1445387 ubuntu.go:175] set auth options {CertDir:/home/jenkins/minikube-integration/19651-1378450/.minikube CaCertPath:/home/jenkins/minikube-integration/19651-1378450/.minikube/certs/ca.pem CaPrivateKeyPath:/home/jenkins/minikube-integration/19651-1378450/.minikube/certs/ca-key.pem CaCertRemotePath:/etc/docker/ca.pem ServerCertPath:/home/jenkins/minikube-integration/19651-1378450/.minikube/machines/server.pem ServerKeyPath:/home/jenkins/minikube-integration/19651-1378450/.minikube/machines/server-key.pem ClientKeyPath:/home/jenkins/minikube-integration/19651-1378450/.minikube/certs/key.pem ServerCertRemotePath:/etc/docker/server.pem ServerKeyRemotePath:/etc/docker/server-key.pem ClientCertPath:/home/jenkins/minikube-integration/19651-1378450/.minikube/certs/cert.pem ServerCertSANs:[] StorePath:/home/jenkins/minikube-integration/19651-1378450/.minikube}
	I0916 11:00:47.877673 1445387 ubuntu.go:177] setting up certificates
	I0916 11:00:47.877713 1445387 provision.go:84] configureAuth start
	I0916 11:00:47.877845 1445387 cli_runner.go:164] Run: docker container inspect -f "{{range .NetworkSettings.Networks}}{{.IPAddress}},{{.GlobalIPv6Address}}{{end}}" ha-334765
	I0916 11:00:47.898688 1445387 provision.go:143] copyHostCerts
	I0916 11:00:47.898742 1445387 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-1378450/.minikube/certs/ca.pem -> /home/jenkins/minikube-integration/19651-1378450/.minikube/ca.pem
	I0916 11:00:47.898783 1445387 exec_runner.go:144] found /home/jenkins/minikube-integration/19651-1378450/.minikube/ca.pem, removing ...
	I0916 11:00:47.898790 1445387 exec_runner.go:203] rm: /home/jenkins/minikube-integration/19651-1378450/.minikube/ca.pem
	I0916 11:00:47.898866 1445387 exec_runner.go:151] cp: /home/jenkins/minikube-integration/19651-1378450/.minikube/certs/ca.pem --> /home/jenkins/minikube-integration/19651-1378450/.minikube/ca.pem (1078 bytes)
	I0916 11:00:47.899326 1445387 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-1378450/.minikube/certs/cert.pem -> /home/jenkins/minikube-integration/19651-1378450/.minikube/cert.pem
	I0916 11:00:47.899363 1445387 exec_runner.go:144] found /home/jenkins/minikube-integration/19651-1378450/.minikube/cert.pem, removing ...
	I0916 11:00:47.899369 1445387 exec_runner.go:203] rm: /home/jenkins/minikube-integration/19651-1378450/.minikube/cert.pem
	I0916 11:00:47.899435 1445387 exec_runner.go:151] cp: /home/jenkins/minikube-integration/19651-1378450/.minikube/certs/cert.pem --> /home/jenkins/minikube-integration/19651-1378450/.minikube/cert.pem (1123 bytes)
	I0916 11:00:47.899505 1445387 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-1378450/.minikube/certs/key.pem -> /home/jenkins/minikube-integration/19651-1378450/.minikube/key.pem
	I0916 11:00:47.899530 1445387 exec_runner.go:144] found /home/jenkins/minikube-integration/19651-1378450/.minikube/key.pem, removing ...
	I0916 11:00:47.899542 1445387 exec_runner.go:203] rm: /home/jenkins/minikube-integration/19651-1378450/.minikube/key.pem
	I0916 11:00:47.899580 1445387 exec_runner.go:151] cp: /home/jenkins/minikube-integration/19651-1378450/.minikube/certs/key.pem --> /home/jenkins/minikube-integration/19651-1378450/.minikube/key.pem (1679 bytes)
	I0916 11:00:47.899641 1445387 provision.go:117] generating server cert: /home/jenkins/minikube-integration/19651-1378450/.minikube/machines/server.pem ca-key=/home/jenkins/minikube-integration/19651-1378450/.minikube/certs/ca.pem private-key=/home/jenkins/minikube-integration/19651-1378450/.minikube/certs/ca-key.pem org=jenkins.ha-334765 san=[127.0.0.1 192.168.49.2 ha-334765 localhost minikube]
	I0916 11:00:48.917241 1445387 provision.go:177] copyRemoteCerts
	I0916 11:00:48.917321 1445387 ssh_runner.go:195] Run: sudo mkdir -p /etc/docker /etc/docker /etc/docker
	I0916 11:00:48.917408 1445387 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" ha-334765
	I0916 11:00:48.933724 1445387 sshutil.go:53] new ssh client: &{IP:127.0.0.1 Port:34663 SSHKeyPath:/home/jenkins/minikube-integration/19651-1378450/.minikube/machines/ha-334765/id_rsa Username:docker}
	I0916 11:00:49.035434 1445387 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-1378450/.minikube/certs/ca.pem -> /etc/docker/ca.pem
	I0916 11:00:49.035501 1445387 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-1378450/.minikube/certs/ca.pem --> /etc/docker/ca.pem (1078 bytes)
	I0916 11:00:49.061051 1445387 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-1378450/.minikube/machines/server.pem -> /etc/docker/server.pem
	I0916 11:00:49.061127 1445387 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-1378450/.minikube/machines/server.pem --> /etc/docker/server.pem (1200 bytes)
	I0916 11:00:49.087575 1445387 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-1378450/.minikube/machines/server-key.pem -> /etc/docker/server-key.pem
	I0916 11:00:49.087643 1445387 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-1378450/.minikube/machines/server-key.pem --> /etc/docker/server-key.pem (1675 bytes)
	I0916 11:00:49.112848 1445387 provision.go:87] duration metric: took 1.235086416s to configureAuth
	I0916 11:00:49.112879 1445387 ubuntu.go:193] setting minikube options for container-runtime
	I0916 11:00:49.113117 1445387 config.go:182] Loaded profile config "ha-334765": Driver=docker, ContainerRuntime=crio, KubernetesVersion=v1.31.1
	I0916 11:00:49.113229 1445387 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" ha-334765
	I0916 11:00:49.130537 1445387 main.go:141] libmachine: Using SSH client type: native
	I0916 11:00:49.130787 1445387 main.go:141] libmachine: &{{{<nil> 0 [] [] []} docker [0x41abe0] 0x41d420 <nil>  [] 0s} 127.0.0.1 34663 <nil> <nil>}
	I0916 11:00:49.130808 1445387 main.go:141] libmachine: About to run SSH command:
	sudo mkdir -p /etc/sysconfig && printf %s "
	CRIO_MINIKUBE_OPTIONS='--insecure-registry 10.96.0.0/12 '
	" | sudo tee /etc/sysconfig/crio.minikube && sudo systemctl restart crio
	I0916 11:00:49.570636 1445387 main.go:141] libmachine: SSH cmd err, output: <nil>: 
	CRIO_MINIKUBE_OPTIONS='--insecure-registry 10.96.0.0/12 '
	
	I0916 11:00:49.570723 1445387 machine.go:96] duration metric: took 5.183396503s to provisionDockerMachine
	I0916 11:00:49.570770 1445387 start.go:293] postStartSetup for "ha-334765" (driver="docker")
	I0916 11:00:49.570797 1445387 start.go:322] creating required directories: [/etc/kubernetes/addons /etc/kubernetes/manifests /var/tmp/minikube /var/lib/minikube /var/lib/minikube/certs /var/lib/minikube/images /var/lib/minikube/binaries /tmp/gvisor /usr/share/ca-certificates /etc/ssl/certs]
	I0916 11:00:49.570889 1445387 ssh_runner.go:195] Run: sudo mkdir -p /etc/kubernetes/addons /etc/kubernetes/manifests /var/tmp/minikube /var/lib/minikube /var/lib/minikube/certs /var/lib/minikube/images /var/lib/minikube/binaries /tmp/gvisor /usr/share/ca-certificates /etc/ssl/certs
	I0916 11:00:49.570972 1445387 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" ha-334765
	I0916 11:00:49.600721 1445387 sshutil.go:53] new ssh client: &{IP:127.0.0.1 Port:34663 SSHKeyPath:/home/jenkins/minikube-integration/19651-1378450/.minikube/machines/ha-334765/id_rsa Username:docker}
	I0916 11:00:49.701882 1445387 ssh_runner.go:195] Run: cat /etc/os-release
	I0916 11:00:49.705106 1445387 main.go:141] libmachine: Couldn't set key VERSION_CODENAME, no corresponding struct field found
	I0916 11:00:49.705142 1445387 main.go:141] libmachine: Couldn't set key PRIVACY_POLICY_URL, no corresponding struct field found
	I0916 11:00:49.705153 1445387 main.go:141] libmachine: Couldn't set key UBUNTU_CODENAME, no corresponding struct field found
	I0916 11:00:49.705171 1445387 info.go:137] Remote host: Ubuntu 22.04.4 LTS
	I0916 11:00:49.705185 1445387 filesync.go:126] Scanning /home/jenkins/minikube-integration/19651-1378450/.minikube/addons for local assets ...
	I0916 11:00:49.705246 1445387 filesync.go:126] Scanning /home/jenkins/minikube-integration/19651-1378450/.minikube/files for local assets ...
	I0916 11:00:49.705334 1445387 filesync.go:149] local asset: /home/jenkins/minikube-integration/19651-1378450/.minikube/files/etc/ssl/certs/13838332.pem -> 13838332.pem in /etc/ssl/certs
	I0916 11:00:49.705347 1445387 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-1378450/.minikube/files/etc/ssl/certs/13838332.pem -> /etc/ssl/certs/13838332.pem
	I0916 11:00:49.705459 1445387 ssh_runner.go:195] Run: sudo mkdir -p /etc/ssl/certs
	I0916 11:00:49.714071 1445387 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-1378450/.minikube/files/etc/ssl/certs/13838332.pem --> /etc/ssl/certs/13838332.pem (1708 bytes)
	I0916 11:00:49.739370 1445387 start.go:296] duration metric: took 168.568056ms for postStartSetup
	I0916 11:00:49.739469 1445387 ssh_runner.go:195] Run: sh -c "df -h /var | awk 'NR==2{print $5}'"
	I0916 11:00:49.739516 1445387 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" ha-334765
	I0916 11:00:49.755882 1445387 sshutil.go:53] new ssh client: &{IP:127.0.0.1 Port:34663 SSHKeyPath:/home/jenkins/minikube-integration/19651-1378450/.minikube/machines/ha-334765/id_rsa Username:docker}
	I0916 11:00:49.849506 1445387 ssh_runner.go:195] Run: sh -c "df -BG /var | awk 'NR==2{print $4}'"
	I0916 11:00:49.854315 1445387 fix.go:56] duration metric: took 5.83814909s for fixHost
	I0916 11:00:49.854338 1445387 start.go:83] releasing machines lock for "ha-334765", held for 5.838202102s
	I0916 11:00:49.854411 1445387 cli_runner.go:164] Run: docker container inspect -f "{{range .NetworkSettings.Networks}}{{.IPAddress}},{{.GlobalIPv6Address}}{{end}}" ha-334765
	I0916 11:00:49.874723 1445387 ssh_runner.go:195] Run: cat /version.json
	I0916 11:00:49.874785 1445387 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" ha-334765
	I0916 11:00:49.875061 1445387 ssh_runner.go:195] Run: curl -sS -m 2 https://registry.k8s.io/
	I0916 11:00:49.875137 1445387 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" ha-334765
	I0916 11:00:49.894152 1445387 sshutil.go:53] new ssh client: &{IP:127.0.0.1 Port:34663 SSHKeyPath:/home/jenkins/minikube-integration/19651-1378450/.minikube/machines/ha-334765/id_rsa Username:docker}
	I0916 11:00:49.900386 1445387 sshutil.go:53] new ssh client: &{IP:127.0.0.1 Port:34663 SSHKeyPath:/home/jenkins/minikube-integration/19651-1378450/.minikube/machines/ha-334765/id_rsa Username:docker}
	I0916 11:00:49.988538 1445387 ssh_runner.go:195] Run: systemctl --version
	I0916 11:00:50.135015 1445387 ssh_runner.go:195] Run: sudo sh -c "podman version >/dev/null"
	I0916 11:00:50.278388 1445387 ssh_runner.go:195] Run: sh -c "stat /etc/cni/net.d/*loopback.conf*"
	I0916 11:00:50.283180 1445387 ssh_runner.go:195] Run: sudo find /etc/cni/net.d -maxdepth 1 -type f -name *loopback.conf* -not -name *.mk_disabled -exec sh -c "sudo mv {} {}.mk_disabled" ;
	I0916 11:00:50.292866 1445387 cni.go:221] loopback cni configuration disabled: "/etc/cni/net.d/*loopback.conf*" found
	I0916 11:00:50.292955 1445387 ssh_runner.go:195] Run: sudo find /etc/cni/net.d -maxdepth 1 -type f ( ( -name *bridge* -or -name *podman* ) -and -not -name *.mk_disabled ) -printf "%p, " -exec sh -c "sudo mv {} {}.mk_disabled" ;
	I0916 11:00:50.302612 1445387 cni.go:259] no active bridge cni configs found in "/etc/cni/net.d" - nothing to disable
	I0916 11:00:50.302636 1445387 start.go:495] detecting cgroup driver to use...
	I0916 11:00:50.302671 1445387 detect.go:187] detected "cgroupfs" cgroup driver on host os
	I0916 11:00:50.302745 1445387 ssh_runner.go:195] Run: sudo systemctl stop -f containerd
	I0916 11:00:50.316057 1445387 ssh_runner.go:195] Run: sudo systemctl is-active --quiet service containerd
	I0916 11:00:50.328606 1445387 docker.go:217] disabling cri-docker service (if available) ...
	I0916 11:00:50.328698 1445387 ssh_runner.go:195] Run: sudo systemctl stop -f cri-docker.socket
	I0916 11:00:50.342094 1445387 ssh_runner.go:195] Run: sudo systemctl stop -f cri-docker.service
	I0916 11:00:50.354270 1445387 ssh_runner.go:195] Run: sudo systemctl disable cri-docker.socket
	I0916 11:00:50.435681 1445387 ssh_runner.go:195] Run: sudo systemctl mask cri-docker.service
	I0916 11:00:50.520031 1445387 docker.go:233] disabling docker service ...
	I0916 11:00:50.520119 1445387 ssh_runner.go:195] Run: sudo systemctl stop -f docker.socket
	I0916 11:00:50.532435 1445387 ssh_runner.go:195] Run: sudo systemctl stop -f docker.service
	I0916 11:00:50.544830 1445387 ssh_runner.go:195] Run: sudo systemctl disable docker.socket
	I0916 11:00:50.638302 1445387 ssh_runner.go:195] Run: sudo systemctl mask docker.service
	I0916 11:00:50.732891 1445387 ssh_runner.go:195] Run: sudo systemctl is-active --quiet service docker
	I0916 11:00:50.745384 1445387 ssh_runner.go:195] Run: /bin/bash -c "sudo mkdir -p /etc && printf %s "runtime-endpoint: unix:///var/run/crio/crio.sock
	" | sudo tee /etc/crictl.yaml"
	I0916 11:00:50.762200 1445387 crio.go:59] configure cri-o to use "registry.k8s.io/pause:3.10" pause image...
	I0916 11:00:50.762314 1445387 ssh_runner.go:195] Run: sh -c "sudo sed -i 's|^.*pause_image = .*$|pause_image = "registry.k8s.io/pause:3.10"|' /etc/crio/crio.conf.d/02-crio.conf"
	I0916 11:00:50.772168 1445387 crio.go:70] configuring cri-o to use "cgroupfs" as cgroup driver...
	I0916 11:00:50.772264 1445387 ssh_runner.go:195] Run: sh -c "sudo sed -i 's|^.*cgroup_manager = .*$|cgroup_manager = "cgroupfs"|' /etc/crio/crio.conf.d/02-crio.conf"
	I0916 11:00:50.782198 1445387 ssh_runner.go:195] Run: sh -c "sudo sed -i '/conmon_cgroup = .*/d' /etc/crio/crio.conf.d/02-crio.conf"
	I0916 11:00:50.792265 1445387 ssh_runner.go:195] Run: sh -c "sudo sed -i '/cgroup_manager = .*/a conmon_cgroup = "pod"' /etc/crio/crio.conf.d/02-crio.conf"
	I0916 11:00:50.802360 1445387 ssh_runner.go:195] Run: sh -c "sudo rm -rf /etc/cni/net.mk"
	I0916 11:00:50.811973 1445387 ssh_runner.go:195] Run: sh -c "sudo sed -i '/^ *"net.ipv4.ip_unprivileged_port_start=.*"/d' /etc/crio/crio.conf.d/02-crio.conf"
	I0916 11:00:50.822262 1445387 ssh_runner.go:195] Run: sh -c "sudo grep -q "^ *default_sysctls" /etc/crio/crio.conf.d/02-crio.conf || sudo sed -i '/conmon_cgroup = .*/a default_sysctls = \[\n\]' /etc/crio/crio.conf.d/02-crio.conf"
	I0916 11:00:50.832942 1445387 ssh_runner.go:195] Run: sh -c "sudo sed -i -r 's|^default_sysctls *= *\[|&\n  "net.ipv4.ip_unprivileged_port_start=0",|' /etc/crio/crio.conf.d/02-crio.conf"
	I0916 11:00:50.843135 1445387 ssh_runner.go:195] Run: sudo sysctl net.bridge.bridge-nf-call-iptables
	I0916 11:00:50.852025 1445387 ssh_runner.go:195] Run: sudo sh -c "echo 1 > /proc/sys/net/ipv4/ip_forward"
	I0916 11:00:50.860781 1445387 ssh_runner.go:195] Run: sudo systemctl daemon-reload
	I0916 11:00:50.943163 1445387 ssh_runner.go:195] Run: sudo systemctl restart crio
	I0916 11:00:51.072943 1445387 start.go:542] Will wait 60s for socket path /var/run/crio/crio.sock
	I0916 11:00:51.073031 1445387 ssh_runner.go:195] Run: stat /var/run/crio/crio.sock
	I0916 11:00:51.077125 1445387 start.go:563] Will wait 60s for crictl version
	I0916 11:00:51.077198 1445387 ssh_runner.go:195] Run: which crictl
	I0916 11:00:51.081391 1445387 ssh_runner.go:195] Run: sudo /usr/bin/crictl version
	I0916 11:00:51.123412 1445387 start.go:579] Version:  0.1.0
	RuntimeName:  cri-o
	RuntimeVersion:  1.24.6
	RuntimeApiVersion:  v1
	I0916 11:00:51.123601 1445387 ssh_runner.go:195] Run: crio --version
	I0916 11:00:51.168620 1445387 ssh_runner.go:195] Run: crio --version
	I0916 11:00:51.211358 1445387 out.go:177] * Preparing Kubernetes v1.31.1 on CRI-O 1.24.6 ...
	I0916 11:00:51.213952 1445387 cli_runner.go:164] Run: docker network inspect ha-334765 --format "{"Name": "{{.Name}}","Driver": "{{.Driver}}","Subnet": "{{range .IPAM.Config}}{{.Subnet}}{{end}}","Gateway": "{{range .IPAM.Config}}{{.Gateway}}{{end}}","MTU": {{if (index .Options "com.docker.network.driver.mtu")}}{{(index .Options "com.docker.network.driver.mtu")}}{{else}}0{{end}}, "ContainerIPs": [{{range $k,$v := .Containers }}"{{$v.IPv4Address}}",{{end}}]}"
	I0916 11:00:51.230162 1445387 ssh_runner.go:195] Run: grep 192.168.49.1	host.minikube.internal$ /etc/hosts
	I0916 11:00:51.234026 1445387 ssh_runner.go:195] Run: /bin/bash -c "{ grep -v $'\thost.minikube.internal$' "/etc/hosts"; echo "192.168.49.1	host.minikube.internal"; } > /tmp/h.$$; sudo cp /tmp/h.$$ "/etc/hosts""
	I0916 11:00:51.245944 1445387 kubeadm.go:883] updating cluster {Name:ha-334765 KeepContext:false EmbedCerts:false MinikubeISO: KicBaseImage:gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 Memory:2200 CPUs:2 DiskSize:20000 Driver:docker HyperkitVpnKitSock: HyperkitVSockPorts:[] DockerEnv:[] ContainerVolumeMounts:[] InsecureRegistry:[] RegistryMirror:[] HostOnlyCIDR:192.168.59.1/24 HypervVirtualSwitch: HypervUseExternalSwitch:false HypervExternalAdapter: KVMNetwork:default KVMQemuURI:qemu:///system KVMGPU:false KVMHidden:false KVMNUMACount:1 APIServerPort:8443 DockerOpt:[] DisableDriverMounts:false NFSShare:[] NFSSharesRoot:/nfsshares UUID: NoVTXCheck:false DNSProxy:false HostDNSResolver:true HostOnlyNicType:virtio NatNicType:virtio SSHIPAddress: SSHUser:root SSHKey: SSHPort:22 KubernetesConfig:{KubernetesVersion:v1.31.1 ClusterName:ha-334765 Namespace:default APIServerHAVIP:192.168.49.254 APIServerName:minikubeCA APISe
rverNames:[] APIServerIPs:[] DNSDomain:cluster.local ContainerRuntime:crio CRISocket: NetworkPlugin:cni FeatureGates: ServiceCIDR:10.96.0.0/12 ImageRepository: LoadBalancerStartIP: LoadBalancerEndIP: CustomIngressCert: RegistryAliases: ExtraOptions:[] ShouldLoadCachedImages:true EnableDefaultCNI:false CNI:} Nodes:[{Name: IP:192.168.49.2 Port:8443 KubernetesVersion:v1.31.1 ContainerRuntime:crio ControlPlane:true Worker:true} {Name:m02 IP:192.168.49.3 Port:8443 KubernetesVersion:v1.31.1 ContainerRuntime:crio ControlPlane:true Worker:true} {Name:m04 IP:192.168.49.5 Port:0 KubernetesVersion:v1.31.1 ContainerRuntime:crio ControlPlane:false Worker:true}] Addons:map[ambassador:false auto-pause:false cloud-spanner:false csi-hostpath-driver:false dashboard:false default-storageclass:false efk:false freshpod:false gcp-auth:false gvisor:false headlamp:false helm-tiller:false inaccel:false ingress:false ingress-dns:false inspektor-gadget:false istio:false istio-provisioner:false kong:false kubeflow:false kubevirt:false l
ogviewer:false metallb:false metrics-server:false nvidia-device-plugin:false nvidia-driver-installer:false nvidia-gpu-device-plugin:false olm:false pod-security-policy:false portainer:false registry:false registry-aliases:false registry-creds:false storage-provisioner:false storage-provisioner-gluster:false storage-provisioner-rancher:false volcano:false volumesnapshots:false yakd:false] CustomAddonImages:map[] CustomAddonRegistries:map[] VerifyComponents:map[apiserver:true apps_running:true default_sa:true extra:true kubelet:true node_ready:true system_pods:true] StartHostTimeout:6m0s ScheduledStop:<nil> ExposedPorts:[] ListenAddress: Network: Subnet: MultiNodeRequested:true ExtraDisks:0 CertExpiration:26280h0m0s Mount:false MountString:/home/jenkins:/minikube-host Mount9PVersion:9p2000.L MountGID:docker MountIP: MountMSize:262144 MountOptions:[] MountPort:0 MountType:9p MountUID:docker BinaryMirror: DisableOptimizations:false DisableMetrics:false CustomQemuFirmwarePath: SocketVMnetClientPath: SocketVMnetPat
h: StaticIP: SSHAuthSock: SSHAgentPID:0 GPUs: AutoPauseInterval:1m0s} ...
	I0916 11:00:51.246148 1445387 preload.go:131] Checking if preload exists for k8s version v1.31.1 and runtime crio
	I0916 11:00:51.246211 1445387 ssh_runner.go:195] Run: sudo crictl images --output json
	I0916 11:00:51.296288 1445387 crio.go:514] all images are preloaded for cri-o runtime.
	I0916 11:00:51.296313 1445387 crio.go:433] Images already preloaded, skipping extraction
	I0916 11:00:51.296379 1445387 ssh_runner.go:195] Run: sudo crictl images --output json
	I0916 11:00:51.338303 1445387 crio.go:514] all images are preloaded for cri-o runtime.
	I0916 11:00:51.338328 1445387 cache_images.go:84] Images are preloaded, skipping loading
	I0916 11:00:51.338337 1445387 kubeadm.go:934] updating node { 192.168.49.2 8443 v1.31.1 crio true true} ...
	I0916 11:00:51.338440 1445387 kubeadm.go:946] kubelet [Unit]
	Wants=crio.service
	
	[Service]
	ExecStart=
	ExecStart=/var/lib/minikube/binaries/v1.31.1/kubelet --bootstrap-kubeconfig=/etc/kubernetes/bootstrap-kubelet.conf --cgroups-per-qos=false --config=/var/lib/kubelet/config.yaml --enforce-node-allocatable= --hostname-override=ha-334765 --kubeconfig=/etc/kubernetes/kubelet.conf --node-ip=192.168.49.2
	
	[Install]
	 config:
	{KubernetesVersion:v1.31.1 ClusterName:ha-334765 Namespace:default APIServerHAVIP:192.168.49.254 APIServerName:minikubeCA APIServerNames:[] APIServerIPs:[] DNSDomain:cluster.local ContainerRuntime:crio CRISocket: NetworkPlugin:cni FeatureGates: ServiceCIDR:10.96.0.0/12 ImageRepository: LoadBalancerStartIP: LoadBalancerEndIP: CustomIngressCert: RegistryAliases: ExtraOptions:[] ShouldLoadCachedImages:true EnableDefaultCNI:false CNI:}
	I0916 11:00:51.338524 1445387 ssh_runner.go:195] Run: crio config
	I0916 11:00:51.395003 1445387 cni.go:84] Creating CNI manager for ""
	I0916 11:00:51.395034 1445387 cni.go:136] multinode detected (3 nodes found), recommending kindnet
	I0916 11:00:51.395044 1445387 kubeadm.go:84] Using pod CIDR: 10.244.0.0/16
	I0916 11:00:51.395067 1445387 kubeadm.go:181] kubeadm options: {CertDir:/var/lib/minikube/certs ServiceCIDR:10.96.0.0/12 PodSubnet:10.244.0.0/16 AdvertiseAddress:192.168.49.2 APIServerPort:8443 KubernetesVersion:v1.31.1 EtcdDataDir:/var/lib/minikube/etcd EtcdExtraArgs:map[] ClusterName:ha-334765 NodeName:ha-334765 DNSDomain:cluster.local CRISocket:/var/run/crio/crio.sock ImageRepository: ComponentOptions:[{Component:apiServer ExtraArgs:map[enable-admission-plugins:NamespaceLifecycle,LimitRanger,ServiceAccount,DefaultStorageClass,DefaultTolerationSeconds,NodeRestriction,MutatingAdmissionWebhook,ValidatingAdmissionWebhook,ResourceQuota] Pairs:map[certSANs:["127.0.0.1", "localhost", "192.168.49.2"]]} {Component:controllerManager ExtraArgs:map[allocate-node-cidrs:true leader-elect:false] Pairs:map[]} {Component:scheduler ExtraArgs:map[leader-elect:false] Pairs:map[]}] FeatureArgs:map[] NodeIP:192.168.49.2 CgroupDriver:cgroupfs ClientCAFile:/var/lib/minikube/certs/ca.crt StaticPodPath:/etc/kubernetes/mani
fests ControlPlaneAddress:control-plane.minikube.internal KubeProxyOptions:map[] ResolvConfSearchRegression:false KubeletConfigOpts:map[containerRuntimeEndpoint:unix:///var/run/crio/crio.sock hairpinMode:hairpin-veth runtimeRequestTimeout:15m] PrependCriSocketUnix:true}
	I0916 11:00:51.395215 1445387 kubeadm.go:187] kubeadm config:
	apiVersion: kubeadm.k8s.io/v1beta3
	kind: InitConfiguration
	localAPIEndpoint:
	  advertiseAddress: 192.168.49.2
	  bindPort: 8443
	bootstrapTokens:
	  - groups:
	      - system:bootstrappers:kubeadm:default-node-token
	    ttl: 24h0m0s
	    usages:
	      - signing
	      - authentication
	nodeRegistration:
	  criSocket: unix:///var/run/crio/crio.sock
	  name: "ha-334765"
	  kubeletExtraArgs:
	    node-ip: 192.168.49.2
	  taints: []
	---
	apiVersion: kubeadm.k8s.io/v1beta3
	kind: ClusterConfiguration
	apiServer:
	  certSANs: ["127.0.0.1", "localhost", "192.168.49.2"]
	  extraArgs:
	    enable-admission-plugins: "NamespaceLifecycle,LimitRanger,ServiceAccount,DefaultStorageClass,DefaultTolerationSeconds,NodeRestriction,MutatingAdmissionWebhook,ValidatingAdmissionWebhook,ResourceQuota"
	controllerManager:
	  extraArgs:
	    allocate-node-cidrs: "true"
	    leader-elect: "false"
	scheduler:
	  extraArgs:
	    leader-elect: "false"
	certificatesDir: /var/lib/minikube/certs
	clusterName: mk
	controlPlaneEndpoint: control-plane.minikube.internal:8443
	etcd:
	  local:
	    dataDir: /var/lib/minikube/etcd
	    extraArgs:
	      proxy-refresh-interval: "70000"
	kubernetesVersion: v1.31.1
	networking:
	  dnsDomain: cluster.local
	  podSubnet: "10.244.0.0/16"
	  serviceSubnet: 10.96.0.0/12
	---
	apiVersion: kubelet.config.k8s.io/v1beta1
	kind: KubeletConfiguration
	authentication:
	  x509:
	    clientCAFile: /var/lib/minikube/certs/ca.crt
	cgroupDriver: cgroupfs
	containerRuntimeEndpoint: unix:///var/run/crio/crio.sock
	hairpinMode: hairpin-veth
	runtimeRequestTimeout: 15m
	clusterDomain: "cluster.local"
	# disable disk resource management by default
	imageGCHighThresholdPercent: 100
	evictionHard:
	  nodefs.available: "0%"
	  nodefs.inodesFree: "0%"
	  imagefs.available: "0%"
	failSwapOn: false
	staticPodPath: /etc/kubernetes/manifests
	---
	apiVersion: kubeproxy.config.k8s.io/v1alpha1
	kind: KubeProxyConfiguration
	clusterCIDR: "10.244.0.0/16"
	metricsBindAddress: 0.0.0.0:10249
	conntrack:
	  maxPerCore: 0
	# Skip setting "net.netfilter.nf_conntrack_tcp_timeout_established"
	  tcpEstablishedTimeout: 0s
	# Skip setting "net.netfilter.nf_conntrack_tcp_timeout_close"
	  tcpCloseWaitTimeout: 0s
	
	I0916 11:00:51.395237 1445387 kube-vip.go:115] generating kube-vip config ...
	I0916 11:00:51.395294 1445387 ssh_runner.go:195] Run: sudo sh -c "lsmod | grep ip_vs"
	I0916 11:00:51.409269 1445387 kube-vip.go:167] auto-enabling control-plane load-balancing in kube-vip
	I0916 11:00:51.409373 1445387 kube-vip.go:137] kube-vip config:
	apiVersion: v1
	kind: Pod
	metadata:
	  creationTimestamp: null
	  name: kube-vip
	  namespace: kube-system
	spec:
	  containers:
	  - args:
	    - manager
	    env:
	    - name: vip_arp
	      value: "true"
	    - name: port
	      value: "8443"
	    - name: vip_nodename
	      valueFrom:
	        fieldRef:
	          fieldPath: spec.nodeName
	    - name: vip_interface
	      value: eth0
	    - name: vip_cidr
	      value: "32"
	    - name: dns_mode
	      value: first
	    - name: cp_enable
	      value: "true"
	    - name: cp_namespace
	      value: kube-system
	    - name: vip_leaderelection
	      value: "true"
	    - name: vip_leasename
	      value: plndr-cp-lock
	    - name: vip_leaseduration
	      value: "5"
	    - name: vip_renewdeadline
	      value: "3"
	    - name: vip_retryperiod
	      value: "1"
	    - name: address
	      value: 192.168.49.254
	    - name: prometheus_server
	      value: :2112
	    - name : lb_enable
	      value: "true"
	    - name: lb_port
	      value: "8443"
	    image: ghcr.io/kube-vip/kube-vip:v0.8.0
	    imagePullPolicy: IfNotPresent
	    name: kube-vip
	    resources: {}
	    securityContext:
	      capabilities:
	        add:
	        - NET_ADMIN
	        - NET_RAW
	    volumeMounts:
	    - mountPath: /etc/kubernetes/admin.conf
	      name: kubeconfig
	  hostAliases:
	  - hostnames:
	    - kubernetes
	    ip: 127.0.0.1
	  hostNetwork: true
	  volumes:
	  - hostPath:
	      path: "/etc/kubernetes/admin.conf"
	    name: kubeconfig
	status: {}
	I0916 11:00:51.409448 1445387 ssh_runner.go:195] Run: sudo ls /var/lib/minikube/binaries/v1.31.1
	I0916 11:00:51.419934 1445387 binaries.go:44] Found k8s binaries, skipping transfer
	I0916 11:00:51.420039 1445387 ssh_runner.go:195] Run: sudo mkdir -p /etc/systemd/system/kubelet.service.d /lib/systemd/system /var/tmp/minikube /etc/kubernetes/manifests
	I0916 11:00:51.429619 1445387 ssh_runner.go:362] scp memory --> /etc/systemd/system/kubelet.service.d/10-kubeadm.conf (359 bytes)
	I0916 11:00:51.449581 1445387 ssh_runner.go:362] scp memory --> /lib/systemd/system/kubelet.service (352 bytes)
	I0916 11:00:51.469514 1445387 ssh_runner.go:362] scp memory --> /var/tmp/minikube/kubeadm.yaml.new (2147 bytes)
	I0916 11:00:51.488634 1445387 ssh_runner.go:362] scp memory --> /etc/kubernetes/manifests/kube-vip.yaml (1441 bytes)
	I0916 11:00:51.508082 1445387 ssh_runner.go:195] Run: grep 192.168.49.254	control-plane.minikube.internal$ /etc/hosts
	I0916 11:00:51.511782 1445387 ssh_runner.go:195] Run: /bin/bash -c "{ grep -v $'\tcontrol-plane.minikube.internal$' "/etc/hosts"; echo "192.168.49.254	control-plane.minikube.internal"; } > /tmp/h.$$; sudo cp /tmp/h.$$ "/etc/hosts""
	I0916 11:00:51.523722 1445387 ssh_runner.go:195] Run: sudo systemctl daemon-reload
	I0916 11:00:51.603301 1445387 ssh_runner.go:195] Run: sudo systemctl start kubelet
	I0916 11:00:51.620263 1445387 certs.go:68] Setting up /home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/ha-334765 for IP: 192.168.49.2
	I0916 11:00:51.620286 1445387 certs.go:194] generating shared ca certs ...
	I0916 11:00:51.620306 1445387 certs.go:226] acquiring lock for ca certs: {Name:mk0ae46b50e2e49d53ad6fcc94535aa50d9156d6 Clock:{} Delay:500ms Timeout:1m0s Cancel:<nil>}
	I0916 11:00:51.620462 1445387 certs.go:235] skipping valid "minikubeCA" ca cert: /home/jenkins/minikube-integration/19651-1378450/.minikube/ca.key
	I0916 11:00:51.620503 1445387 certs.go:235] skipping valid "proxyClientCA" ca cert: /home/jenkins/minikube-integration/19651-1378450/.minikube/proxy-client-ca.key
	I0916 11:00:51.620510 1445387 certs.go:256] generating profile certs ...
	I0916 11:00:51.620585 1445387 certs.go:359] skipping valid signed profile cert regeneration for "minikube-user": /home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/ha-334765/client.key
	I0916 11:00:51.620613 1445387 certs.go:363] generating signed profile cert for "minikube": /home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/ha-334765/apiserver.key.4f5ea772
	I0916 11:00:51.620626 1445387 crypto.go:68] Generating cert /home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/ha-334765/apiserver.crt.4f5ea772 with IP's: [10.96.0.1 127.0.0.1 10.0.0.1 192.168.49.2 192.168.49.3 192.168.49.254]
	I0916 11:00:51.993568 1445387 crypto.go:156] Writing cert to /home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/ha-334765/apiserver.crt.4f5ea772 ...
	I0916 11:00:51.993606 1445387 lock.go:35] WriteFile acquiring /home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/ha-334765/apiserver.crt.4f5ea772: {Name:mka1bfae6ed9a8e2966bd862d8d802e84bf30e31 Clock:{} Delay:500ms Timeout:1m0s Cancel:<nil>}
	I0916 11:00:51.993815 1445387 crypto.go:164] Writing key to /home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/ha-334765/apiserver.key.4f5ea772 ...
	I0916 11:00:51.993830 1445387 lock.go:35] WriteFile acquiring /home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/ha-334765/apiserver.key.4f5ea772: {Name:mk73839cb46cc940de3d7201931764db049a5939 Clock:{} Delay:500ms Timeout:1m0s Cancel:<nil>}
	I0916 11:00:51.993926 1445387 certs.go:381] copying /home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/ha-334765/apiserver.crt.4f5ea772 -> /home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/ha-334765/apiserver.crt
	I0916 11:00:51.994088 1445387 certs.go:385] copying /home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/ha-334765/apiserver.key.4f5ea772 -> /home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/ha-334765/apiserver.key
	I0916 11:00:51.994228 1445387 certs.go:359] skipping valid signed profile cert regeneration for "aggregator": /home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/ha-334765/proxy-client.key
	I0916 11:00:51.994248 1445387 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-1378450/.minikube/ca.crt -> /var/lib/minikube/certs/ca.crt
	I0916 11:00:51.994265 1445387 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-1378450/.minikube/ca.key -> /var/lib/minikube/certs/ca.key
	I0916 11:00:51.994280 1445387 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-1378450/.minikube/proxy-client-ca.crt -> /var/lib/minikube/certs/proxy-client-ca.crt
	I0916 11:00:51.994294 1445387 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-1378450/.minikube/proxy-client-ca.key -> /var/lib/minikube/certs/proxy-client-ca.key
	I0916 11:00:51.994309 1445387 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/ha-334765/apiserver.crt -> /var/lib/minikube/certs/apiserver.crt
	I0916 11:00:51.994323 1445387 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/ha-334765/apiserver.key -> /var/lib/minikube/certs/apiserver.key
	I0916 11:00:51.994342 1445387 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/ha-334765/proxy-client.crt -> /var/lib/minikube/certs/proxy-client.crt
	I0916 11:00:51.994353 1445387 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/ha-334765/proxy-client.key -> /var/lib/minikube/certs/proxy-client.key
	I0916 11:00:51.994409 1445387 certs.go:484] found cert: /home/jenkins/minikube-integration/19651-1378450/.minikube/certs/1383833.pem (1338 bytes)
	W0916 11:00:51.994471 1445387 certs.go:480] ignoring /home/jenkins/minikube-integration/19651-1378450/.minikube/certs/1383833_empty.pem, impossibly tiny 0 bytes
	I0916 11:00:51.994484 1445387 certs.go:484] found cert: /home/jenkins/minikube-integration/19651-1378450/.minikube/certs/ca-key.pem (1679 bytes)
	I0916 11:00:51.994509 1445387 certs.go:484] found cert: /home/jenkins/minikube-integration/19651-1378450/.minikube/certs/ca.pem (1078 bytes)
	I0916 11:00:51.994533 1445387 certs.go:484] found cert: /home/jenkins/minikube-integration/19651-1378450/.minikube/certs/cert.pem (1123 bytes)
	I0916 11:00:51.994559 1445387 certs.go:484] found cert: /home/jenkins/minikube-integration/19651-1378450/.minikube/certs/key.pem (1679 bytes)
	I0916 11:00:51.994614 1445387 certs.go:484] found cert: /home/jenkins/minikube-integration/19651-1378450/.minikube/files/etc/ssl/certs/13838332.pem (1708 bytes)
	I0916 11:00:51.994644 1445387 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-1378450/.minikube/files/etc/ssl/certs/13838332.pem -> /usr/share/ca-certificates/13838332.pem
	I0916 11:00:51.994660 1445387 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-1378450/.minikube/ca.crt -> /usr/share/ca-certificates/minikubeCA.pem
	I0916 11:00:51.994672 1445387 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-1378450/.minikube/certs/1383833.pem -> /usr/share/ca-certificates/1383833.pem
	I0916 11:00:51.995496 1445387 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-1378450/.minikube/ca.crt --> /var/lib/minikube/certs/ca.crt (1111 bytes)
	I0916 11:00:52.028229 1445387 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-1378450/.minikube/ca.key --> /var/lib/minikube/certs/ca.key (1675 bytes)
	I0916 11:00:52.055410 1445387 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-1378450/.minikube/proxy-client-ca.crt --> /var/lib/minikube/certs/proxy-client-ca.crt (1119 bytes)
	I0916 11:00:52.080231 1445387 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-1378450/.minikube/proxy-client-ca.key --> /var/lib/minikube/certs/proxy-client-ca.key (1679 bytes)
	I0916 11:00:52.104419 1445387 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/ha-334765/apiserver.crt --> /var/lib/minikube/certs/apiserver.crt (1432 bytes)
	I0916 11:00:52.129448 1445387 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/ha-334765/apiserver.key --> /var/lib/minikube/certs/apiserver.key (1679 bytes)
	I0916 11:00:52.153777 1445387 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/ha-334765/proxy-client.crt --> /var/lib/minikube/certs/proxy-client.crt (1147 bytes)
	I0916 11:00:52.177778 1445387 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/ha-334765/proxy-client.key --> /var/lib/minikube/certs/proxy-client.key (1679 bytes)
	I0916 11:00:52.201789 1445387 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-1378450/.minikube/files/etc/ssl/certs/13838332.pem --> /usr/share/ca-certificates/13838332.pem (1708 bytes)
	I0916 11:00:52.226079 1445387 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-1378450/.minikube/ca.crt --> /usr/share/ca-certificates/minikubeCA.pem (1111 bytes)
	I0916 11:00:52.252110 1445387 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-1378450/.minikube/certs/1383833.pem --> /usr/share/ca-certificates/1383833.pem (1338 bytes)
	I0916 11:00:52.277417 1445387 ssh_runner.go:362] scp memory --> /var/lib/minikube/kubeconfig (738 bytes)
	I0916 11:00:52.296472 1445387 ssh_runner.go:195] Run: openssl version
	I0916 11:00:52.302984 1445387 ssh_runner.go:195] Run: sudo /bin/bash -c "test -s /usr/share/ca-certificates/13838332.pem && ln -fs /usr/share/ca-certificates/13838332.pem /etc/ssl/certs/13838332.pem"
	I0916 11:00:52.313299 1445387 ssh_runner.go:195] Run: ls -la /usr/share/ca-certificates/13838332.pem
	I0916 11:00:52.316652 1445387 certs.go:528] hashing: -rw-r--r-- 1 root root 1708 Sep 16 10:46 /usr/share/ca-certificates/13838332.pem
	I0916 11:00:52.316801 1445387 ssh_runner.go:195] Run: openssl x509 -hash -noout -in /usr/share/ca-certificates/13838332.pem
	I0916 11:00:52.324269 1445387 ssh_runner.go:195] Run: sudo /bin/bash -c "test -L /etc/ssl/certs/3ec20f2e.0 || ln -fs /etc/ssl/certs/13838332.pem /etc/ssl/certs/3ec20f2e.0"
	I0916 11:00:52.334208 1445387 ssh_runner.go:195] Run: sudo /bin/bash -c "test -s /usr/share/ca-certificates/minikubeCA.pem && ln -fs /usr/share/ca-certificates/minikubeCA.pem /etc/ssl/certs/minikubeCA.pem"
	I0916 11:00:52.344137 1445387 ssh_runner.go:195] Run: ls -la /usr/share/ca-certificates/minikubeCA.pem
	I0916 11:00:52.348023 1445387 certs.go:528] hashing: -rw-r--r-- 1 root root 1111 Sep 16 10:35 /usr/share/ca-certificates/minikubeCA.pem
	I0916 11:00:52.348123 1445387 ssh_runner.go:195] Run: openssl x509 -hash -noout -in /usr/share/ca-certificates/minikubeCA.pem
	I0916 11:00:52.355409 1445387 ssh_runner.go:195] Run: sudo /bin/bash -c "test -L /etc/ssl/certs/b5213941.0 || ln -fs /etc/ssl/certs/minikubeCA.pem /etc/ssl/certs/b5213941.0"
	I0916 11:00:52.364488 1445387 ssh_runner.go:195] Run: sudo /bin/bash -c "test -s /usr/share/ca-certificates/1383833.pem && ln -fs /usr/share/ca-certificates/1383833.pem /etc/ssl/certs/1383833.pem"
	I0916 11:00:52.373843 1445387 ssh_runner.go:195] Run: ls -la /usr/share/ca-certificates/1383833.pem
	I0916 11:00:52.377352 1445387 certs.go:528] hashing: -rw-r--r-- 1 root root 1338 Sep 16 10:46 /usr/share/ca-certificates/1383833.pem
	I0916 11:00:52.377446 1445387 ssh_runner.go:195] Run: openssl x509 -hash -noout -in /usr/share/ca-certificates/1383833.pem
	I0916 11:00:52.384181 1445387 ssh_runner.go:195] Run: sudo /bin/bash -c "test -L /etc/ssl/certs/51391683.0 || ln -fs /etc/ssl/certs/1383833.pem /etc/ssl/certs/51391683.0"
	I0916 11:00:52.393238 1445387 ssh_runner.go:195] Run: stat /var/lib/minikube/certs/apiserver-kubelet-client.crt
	I0916 11:00:52.396778 1445387 ssh_runner.go:195] Run: openssl x509 -noout -in /var/lib/minikube/certs/apiserver-etcd-client.crt -checkend 86400
	I0916 11:00:52.403966 1445387 ssh_runner.go:195] Run: openssl x509 -noout -in /var/lib/minikube/certs/apiserver-kubelet-client.crt -checkend 86400
	I0916 11:00:52.411163 1445387 ssh_runner.go:195] Run: openssl x509 -noout -in /var/lib/minikube/certs/etcd/server.crt -checkend 86400
	I0916 11:00:52.418242 1445387 ssh_runner.go:195] Run: openssl x509 -noout -in /var/lib/minikube/certs/etcd/healthcheck-client.crt -checkend 86400
	I0916 11:00:52.425163 1445387 ssh_runner.go:195] Run: openssl x509 -noout -in /var/lib/minikube/certs/etcd/peer.crt -checkend 86400
	I0916 11:00:52.432300 1445387 ssh_runner.go:195] Run: openssl x509 -noout -in /var/lib/minikube/certs/front-proxy-client.crt -checkend 86400
	I0916 11:00:52.439285 1445387 kubeadm.go:392] StartCluster: {Name:ha-334765 KeepContext:false EmbedCerts:false MinikubeISO: KicBaseImage:gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 Memory:2200 CPUs:2 DiskSize:20000 Driver:docker HyperkitVpnKitSock: HyperkitVSockPorts:[] DockerEnv:[] ContainerVolumeMounts:[] InsecureRegistry:[] RegistryMirror:[] HostOnlyCIDR:192.168.59.1/24 HypervVirtualSwitch: HypervUseExternalSwitch:false HypervExternalAdapter: KVMNetwork:default KVMQemuURI:qemu:///system KVMGPU:false KVMHidden:false KVMNUMACount:1 APIServerPort:8443 DockerOpt:[] DisableDriverMounts:false NFSShare:[] NFSSharesRoot:/nfsshares UUID: NoVTXCheck:false DNSProxy:false HostDNSResolver:true HostOnlyNicType:virtio NatNicType:virtio SSHIPAddress: SSHUser:root SSHKey: SSHPort:22 KubernetesConfig:{KubernetesVersion:v1.31.1 ClusterName:ha-334765 Namespace:default APIServerHAVIP:192.168.49.254 APIServerName:minikubeCA APIServe
rNames:[] APIServerIPs:[] DNSDomain:cluster.local ContainerRuntime:crio CRISocket: NetworkPlugin:cni FeatureGates: ServiceCIDR:10.96.0.0/12 ImageRepository: LoadBalancerStartIP: LoadBalancerEndIP: CustomIngressCert: RegistryAliases: ExtraOptions:[] ShouldLoadCachedImages:true EnableDefaultCNI:false CNI:} Nodes:[{Name: IP:192.168.49.2 Port:8443 KubernetesVersion:v1.31.1 ContainerRuntime:crio ControlPlane:true Worker:true} {Name:m02 IP:192.168.49.3 Port:8443 KubernetesVersion:v1.31.1 ContainerRuntime:crio ControlPlane:true Worker:true} {Name:m04 IP:192.168.49.5 Port:0 KubernetesVersion:v1.31.1 ContainerRuntime:crio ControlPlane:false Worker:true}] Addons:map[ambassador:false auto-pause:false cloud-spanner:false csi-hostpath-driver:false dashboard:false default-storageclass:false efk:false freshpod:false gcp-auth:false gvisor:false headlamp:false helm-tiller:false inaccel:false ingress:false ingress-dns:false inspektor-gadget:false istio:false istio-provisioner:false kong:false kubeflow:false kubevirt:false logv
iewer:false metallb:false metrics-server:false nvidia-device-plugin:false nvidia-driver-installer:false nvidia-gpu-device-plugin:false olm:false pod-security-policy:false portainer:false registry:false registry-aliases:false registry-creds:false storage-provisioner:false storage-provisioner-gluster:false storage-provisioner-rancher:false volcano:false volumesnapshots:false yakd:false] CustomAddonImages:map[] CustomAddonRegistries:map[] VerifyComponents:map[apiserver:true apps_running:true default_sa:true extra:true kubelet:true node_ready:true system_pods:true] StartHostTimeout:6m0s ScheduledStop:<nil> ExposedPorts:[] ListenAddress: Network: Subnet: MultiNodeRequested:true ExtraDisks:0 CertExpiration:26280h0m0s Mount:false MountString:/home/jenkins:/minikube-host Mount9PVersion:9p2000.L MountGID:docker MountIP: MountMSize:262144 MountOptions:[] MountPort:0 MountType:9p MountUID:docker BinaryMirror: DisableOptimizations:false DisableMetrics:false CustomQemuFirmwarePath: SocketVMnetClientPath: SocketVMnetPath:
StaticIP: SSHAuthSock: SSHAgentPID:0 GPUs: AutoPauseInterval:1m0s}
	I0916 11:00:52.439412 1445387 cri.go:54] listing CRI containers in root : {State:paused Name: Namespaces:[kube-system]}
	I0916 11:00:52.439475 1445387 ssh_runner.go:195] Run: sudo -s eval "crictl ps -a --quiet --label io.kubernetes.pod.namespace=kube-system"
	I0916 11:00:52.477390 1445387 cri.go:89] found id: ""
	I0916 11:00:52.477556 1445387 ssh_runner.go:195] Run: sudo ls /var/lib/kubelet/kubeadm-flags.env /var/lib/kubelet/config.yaml /var/lib/minikube/etcd
	I0916 11:00:52.486290 1445387 kubeadm.go:408] found existing configuration files, will attempt cluster restart
	I0916 11:00:52.486353 1445387 kubeadm.go:593] restartPrimaryControlPlane start ...
	I0916 11:00:52.486413 1445387 ssh_runner.go:195] Run: sudo test -d /data/minikube
	I0916 11:00:52.494823 1445387 kubeadm.go:130] /data/minikube skipping compat symlinks: sudo test -d /data/minikube: Process exited with status 1
	stdout:
	
	stderr:
	I0916 11:00:52.495250 1445387 kubeconfig.go:47] verify endpoint returned: get endpoint: "ha-334765" does not appear in /home/jenkins/minikube-integration/19651-1378450/kubeconfig
	I0916 11:00:52.495365 1445387 kubeconfig.go:62] /home/jenkins/minikube-integration/19651-1378450/kubeconfig needs updating (will repair): [kubeconfig missing "ha-334765" cluster setting kubeconfig missing "ha-334765" context setting]
	I0916 11:00:52.495676 1445387 lock.go:35] WriteFile acquiring /home/jenkins/minikube-integration/19651-1378450/kubeconfig: {Name:mk806df66aa01ad28d0c99bc1a876b4310e8a3a0 Clock:{} Delay:500ms Timeout:1m0s Cancel:<nil>}
	I0916 11:00:52.496089 1445387 loader.go:395] Config loaded from file:  /home/jenkins/minikube-integration/19651-1378450/kubeconfig
	I0916 11:00:52.496348 1445387 kapi.go:59] client config for ha-334765: &rest.Config{Host:"https://192.168.49.2:8443", APIPath:"", ContentConfig:rest.ContentConfig{AcceptContentTypes:"", ContentType:"", GroupVersion:(*schema.GroupVersion)(nil), NegotiatedSerializer:runtime.NegotiatedSerializer(nil)}, Username:"", Password:"", BearerToken:"", BearerTokenFile:"", Impersonate:rest.ImpersonationConfig{UserName:"", UID:"", Groups:[]string(nil), Extra:map[string][]string(nil)}, AuthProvider:<nil>, AuthConfigPersister:rest.AuthProviderConfigPersister(nil), ExecProvider:<nil>, TLSClientConfig:rest.sanitizedTLSClientConfig{Insecure:false, ServerName:"", CertFile:"/home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/ha-334765/client.crt", KeyFile:"/home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/ha-334765/client.key", CAFile:"/home/jenkins/minikube-integration/19651-1378450/.minikube/ca.crt", CertData:[]uint8(nil), KeyData:[]uint8(nil), CAData:[]uint8(nil), NextProtos:[]string(
nil)}, UserAgent:"", DisableCompression:false, Transport:http.RoundTripper(nil), WrapTransport:(transport.WrapperFunc)(0x1a1e6c0), QPS:0, Burst:0, RateLimiter:flowcontrol.RateLimiter(nil), WarningHandler:rest.WarningHandler(nil), Timeout:0, Dial:(func(context.Context, string, string) (net.Conn, error))(nil), Proxy:(func(*http.Request) (*url.URL, error))(nil)}
	I0916 11:00:52.497014 1445387 ssh_runner.go:195] Run: sudo diff -u /var/tmp/minikube/kubeadm.yaml /var/tmp/minikube/kubeadm.yaml.new
	I0916 11:00:52.497098 1445387 cert_rotation.go:140] Starting client certificate rotation controller
	I0916 11:00:52.505779 1445387 kubeadm.go:630] The running cluster does not require reconfiguration: 192.168.49.2
	I0916 11:00:52.505853 1445387 kubeadm.go:597] duration metric: took 19.485998ms to restartPrimaryControlPlane
	I0916 11:00:52.505871 1445387 kubeadm.go:394] duration metric: took 66.59394ms to StartCluster
	I0916 11:00:52.505889 1445387 settings.go:142] acquiring lock: {Name:mkc0474d366ad36774e47290c7932cc180a1b9f8 Clock:{} Delay:500ms Timeout:1m0s Cancel:<nil>}
	I0916 11:00:52.505954 1445387 settings.go:150] Updating kubeconfig:  /home/jenkins/minikube-integration/19651-1378450/kubeconfig
	I0916 11:00:52.506555 1445387 lock.go:35] WriteFile acquiring /home/jenkins/minikube-integration/19651-1378450/kubeconfig: {Name:mk806df66aa01ad28d0c99bc1a876b4310e8a3a0 Clock:{} Delay:500ms Timeout:1m0s Cancel:<nil>}
	I0916 11:00:52.506762 1445387 start.go:233] HA (multi-control plane) cluster: will skip waiting for primary control-plane node &{Name: IP:192.168.49.2 Port:8443 KubernetesVersion:v1.31.1 ContainerRuntime:crio ControlPlane:true Worker:true}
	I0916 11:00:52.506790 1445387 start.go:241] waiting for startup goroutines ...
	I0916 11:00:52.506798 1445387 addons.go:507] enable addons start: toEnable=map[ambassador:false auto-pause:false cloud-spanner:false csi-hostpath-driver:false dashboard:false default-storageclass:false efk:false freshpod:false gcp-auth:false gvisor:false headlamp:false helm-tiller:false inaccel:false ingress:false ingress-dns:false inspektor-gadget:false istio:false istio-provisioner:false kong:false kubeflow:false kubevirt:false logviewer:false metallb:false metrics-server:false nvidia-device-plugin:false nvidia-driver-installer:false nvidia-gpu-device-plugin:false olm:false pod-security-policy:false portainer:false registry:false registry-aliases:false registry-creds:false storage-provisioner:false storage-provisioner-gluster:false storage-provisioner-rancher:false volcano:false volumesnapshots:false yakd:false]
	I0916 11:00:52.507301 1445387 config.go:182] Loaded profile config "ha-334765": Driver=docker, ContainerRuntime=crio, KubernetesVersion=v1.31.1
	I0916 11:00:52.510929 1445387 out.go:177] * Enabled addons: 
	I0916 11:00:52.513719 1445387 addons.go:510] duration metric: took 6.912589ms for enable addons: enabled=[]
	I0916 11:00:52.513761 1445387 start.go:246] waiting for cluster config update ...
	I0916 11:00:52.513770 1445387 start.go:255] writing updated cluster config ...
	I0916 11:00:52.516577 1445387 out.go:201] 
	I0916 11:00:52.519321 1445387 config.go:182] Loaded profile config "ha-334765": Driver=docker, ContainerRuntime=crio, KubernetesVersion=v1.31.1
	I0916 11:00:52.519471 1445387 profile.go:143] Saving config to /home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/ha-334765/config.json ...
	I0916 11:00:52.522283 1445387 out.go:177] * Starting "ha-334765-m02" control-plane node in "ha-334765" cluster
	I0916 11:00:52.524781 1445387 cache.go:121] Beginning downloading kic base image for docker with crio
	I0916 11:00:52.527619 1445387 out.go:177] * Pulling base image v0.0.45-1726358845-19644 ...
	I0916 11:00:52.530120 1445387 preload.go:131] Checking if preload exists for k8s version v1.31.1 and runtime crio
	I0916 11:00:52.530171 1445387 cache.go:56] Caching tarball of preloaded images
	I0916 11:00:52.530199 1445387 image.go:79] Checking for gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 in local docker daemon
	I0916 11:00:52.530282 1445387 preload.go:172] Found /home/jenkins/minikube-integration/19651-1378450/.minikube/cache/preloaded-tarball/preloaded-images-k8s-v18-v1.31.1-cri-o-overlay-arm64.tar.lz4 in cache, skipping download
	I0916 11:00:52.530294 1445387 cache.go:59] Finished verifying existence of preloaded tar for v1.31.1 on crio
	I0916 11:00:52.530441 1445387 profile.go:143] Saving config to /home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/ha-334765/config.json ...
	W0916 11:00:52.548831 1445387 image.go:95] image gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 is of wrong architecture
	I0916 11:00:52.548856 1445387 cache.go:149] Downloading gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 to local cache
	I0916 11:00:52.548935 1445387 image.go:63] Checking for gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 in local cache directory
	I0916 11:00:52.548958 1445387 image.go:66] Found gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 in local cache directory, skipping pull
	I0916 11:00:52.548967 1445387 image.go:135] gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 exists in cache, skipping pull
	I0916 11:00:52.548975 1445387 cache.go:152] successfully saved gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 as a tarball
	I0916 11:00:52.548984 1445387 cache.go:162] Loading gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 from local cache
	I0916 11:00:52.550140 1445387 image.go:273] response: 
	I0916 11:00:52.675215 1445387 cache.go:164] successfully loaded and using gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 from cached tarball
	I0916 11:00:52.675250 1445387 cache.go:194] Successfully downloaded all kic artifacts
	I0916 11:00:52.675296 1445387 start.go:360] acquireMachinesLock for ha-334765-m02: {Name:mkb176e2cfa3ae927444127935258ba37ca2bc0a Clock:{} Delay:500ms Timeout:10m0s Cancel:<nil>}
	I0916 11:00:52.675398 1445387 start.go:364] duration metric: took 78.25µs to acquireMachinesLock for "ha-334765-m02"
	I0916 11:00:52.675424 1445387 start.go:96] Skipping create...Using existing machine configuration
	I0916 11:00:52.675432 1445387 fix.go:54] fixHost starting: m02
	I0916 11:00:52.675718 1445387 cli_runner.go:164] Run: docker container inspect ha-334765-m02 --format={{.State.Status}}
	I0916 11:00:52.692426 1445387 fix.go:112] recreateIfNeeded on ha-334765-m02: state=Stopped err=<nil>
	W0916 11:00:52.692458 1445387 fix.go:138] unexpected machine state, will restart: <nil>
	I0916 11:00:52.695631 1445387 out.go:177] * Restarting existing docker container for "ha-334765-m02" ...
	I0916 11:00:52.698325 1445387 cli_runner.go:164] Run: docker start ha-334765-m02
	I0916 11:00:52.998282 1445387 cli_runner.go:164] Run: docker container inspect ha-334765-m02 --format={{.State.Status}}
	I0916 11:00:53.026884 1445387 kic.go:430] container "ha-334765-m02" state is running.
	I0916 11:00:53.027266 1445387 cli_runner.go:164] Run: docker container inspect -f "{{range .NetworkSettings.Networks}}{{.IPAddress}},{{.GlobalIPv6Address}}{{end}}" ha-334765-m02
	I0916 11:00:53.050894 1445387 profile.go:143] Saving config to /home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/ha-334765/config.json ...
	I0916 11:00:53.051137 1445387 machine.go:93] provisionDockerMachine start ...
	I0916 11:00:53.051216 1445387 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" ha-334765-m02
	I0916 11:00:53.076521 1445387 main.go:141] libmachine: Using SSH client type: native
	I0916 11:00:53.076793 1445387 main.go:141] libmachine: &{{{<nil> 0 [] [] []} docker [0x41abe0] 0x41d420 <nil>  [] 0s} 127.0.0.1 34668 <nil> <nil>}
	I0916 11:00:53.076804 1445387 main.go:141] libmachine: About to run SSH command:
	hostname
	I0916 11:00:53.077474 1445387 main.go:141] libmachine: Error dialing TCP: ssh: handshake failed: EOF
	I0916 11:00:56.267655 1445387 main.go:141] libmachine: SSH cmd err, output: <nil>: ha-334765-m02
	
	I0916 11:00:56.267684 1445387 ubuntu.go:169] provisioning hostname "ha-334765-m02"
	I0916 11:00:56.267754 1445387 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" ha-334765-m02
	I0916 11:00:56.319323 1445387 main.go:141] libmachine: Using SSH client type: native
	I0916 11:00:56.319573 1445387 main.go:141] libmachine: &{{{<nil> 0 [] [] []} docker [0x41abe0] 0x41d420 <nil>  [] 0s} 127.0.0.1 34668 <nil> <nil>}
	I0916 11:00:56.319592 1445387 main.go:141] libmachine: About to run SSH command:
	sudo hostname ha-334765-m02 && echo "ha-334765-m02" | sudo tee /etc/hostname
	I0916 11:00:56.516489 1445387 main.go:141] libmachine: SSH cmd err, output: <nil>: ha-334765-m02
	
	I0916 11:00:56.516599 1445387 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" ha-334765-m02
	I0916 11:00:56.544309 1445387 main.go:141] libmachine: Using SSH client type: native
	I0916 11:00:56.544553 1445387 main.go:141] libmachine: &{{{<nil> 0 [] [] []} docker [0x41abe0] 0x41d420 <nil>  [] 0s} 127.0.0.1 34668 <nil> <nil>}
	I0916 11:00:56.544576 1445387 main.go:141] libmachine: About to run SSH command:
	
			if ! grep -xq '.*\sha-334765-m02' /etc/hosts; then
				if grep -xq '127.0.1.1\s.*' /etc/hosts; then
					sudo sed -i 's/^127.0.1.1\s.*/127.0.1.1 ha-334765-m02/g' /etc/hosts;
				else 
					echo '127.0.1.1 ha-334765-m02' | sudo tee -a /etc/hosts; 
				fi
			fi
	I0916 11:00:56.732623 1445387 main.go:141] libmachine: SSH cmd err, output: <nil>: 
	I0916 11:00:56.732656 1445387 ubuntu.go:175] set auth options {CertDir:/home/jenkins/minikube-integration/19651-1378450/.minikube CaCertPath:/home/jenkins/minikube-integration/19651-1378450/.minikube/certs/ca.pem CaPrivateKeyPath:/home/jenkins/minikube-integration/19651-1378450/.minikube/certs/ca-key.pem CaCertRemotePath:/etc/docker/ca.pem ServerCertPath:/home/jenkins/minikube-integration/19651-1378450/.minikube/machines/server.pem ServerKeyPath:/home/jenkins/minikube-integration/19651-1378450/.minikube/machines/server-key.pem ClientKeyPath:/home/jenkins/minikube-integration/19651-1378450/.minikube/certs/key.pem ServerCertRemotePath:/etc/docker/server.pem ServerKeyRemotePath:/etc/docker/server-key.pem ClientCertPath:/home/jenkins/minikube-integration/19651-1378450/.minikube/certs/cert.pem ServerCertSANs:[] StorePath:/home/jenkins/minikube-integration/19651-1378450/.minikube}
	I0916 11:00:56.732686 1445387 ubuntu.go:177] setting up certificates
	I0916 11:00:56.732696 1445387 provision.go:84] configureAuth start
	I0916 11:00:56.732759 1445387 cli_runner.go:164] Run: docker container inspect -f "{{range .NetworkSettings.Networks}}{{.IPAddress}},{{.GlobalIPv6Address}}{{end}}" ha-334765-m02
	I0916 11:00:56.774203 1445387 provision.go:143] copyHostCerts
	I0916 11:00:56.774249 1445387 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-1378450/.minikube/certs/key.pem -> /home/jenkins/minikube-integration/19651-1378450/.minikube/key.pem
	I0916 11:00:56.774286 1445387 exec_runner.go:144] found /home/jenkins/minikube-integration/19651-1378450/.minikube/key.pem, removing ...
	I0916 11:00:56.774298 1445387 exec_runner.go:203] rm: /home/jenkins/minikube-integration/19651-1378450/.minikube/key.pem
	I0916 11:00:56.774373 1445387 exec_runner.go:151] cp: /home/jenkins/minikube-integration/19651-1378450/.minikube/certs/key.pem --> /home/jenkins/minikube-integration/19651-1378450/.minikube/key.pem (1679 bytes)
	I0916 11:00:56.774463 1445387 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-1378450/.minikube/certs/ca.pem -> /home/jenkins/minikube-integration/19651-1378450/.minikube/ca.pem
	I0916 11:00:56.774485 1445387 exec_runner.go:144] found /home/jenkins/minikube-integration/19651-1378450/.minikube/ca.pem, removing ...
	I0916 11:00:56.774494 1445387 exec_runner.go:203] rm: /home/jenkins/minikube-integration/19651-1378450/.minikube/ca.pem
	I0916 11:00:56.774525 1445387 exec_runner.go:151] cp: /home/jenkins/minikube-integration/19651-1378450/.minikube/certs/ca.pem --> /home/jenkins/minikube-integration/19651-1378450/.minikube/ca.pem (1078 bytes)
	I0916 11:00:56.774574 1445387 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-1378450/.minikube/certs/cert.pem -> /home/jenkins/minikube-integration/19651-1378450/.minikube/cert.pem
	I0916 11:00:56.774596 1445387 exec_runner.go:144] found /home/jenkins/minikube-integration/19651-1378450/.minikube/cert.pem, removing ...
	I0916 11:00:56.774603 1445387 exec_runner.go:203] rm: /home/jenkins/minikube-integration/19651-1378450/.minikube/cert.pem
	I0916 11:00:56.774628 1445387 exec_runner.go:151] cp: /home/jenkins/minikube-integration/19651-1378450/.minikube/certs/cert.pem --> /home/jenkins/minikube-integration/19651-1378450/.minikube/cert.pem (1123 bytes)
	I0916 11:00:56.774682 1445387 provision.go:117] generating server cert: /home/jenkins/minikube-integration/19651-1378450/.minikube/machines/server.pem ca-key=/home/jenkins/minikube-integration/19651-1378450/.minikube/certs/ca.pem private-key=/home/jenkins/minikube-integration/19651-1378450/.minikube/certs/ca-key.pem org=jenkins.ha-334765-m02 san=[127.0.0.1 192.168.49.3 ha-334765-m02 localhost minikube]
	I0916 11:00:57.267859 1445387 provision.go:177] copyRemoteCerts
	I0916 11:00:57.267933 1445387 ssh_runner.go:195] Run: sudo mkdir -p /etc/docker /etc/docker /etc/docker
	I0916 11:00:57.267980 1445387 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" ha-334765-m02
	I0916 11:00:57.289264 1445387 sshutil.go:53] new ssh client: &{IP:127.0.0.1 Port:34668 SSHKeyPath:/home/jenkins/minikube-integration/19651-1378450/.minikube/machines/ha-334765-m02/id_rsa Username:docker}
	I0916 11:00:57.411122 1445387 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-1378450/.minikube/certs/ca.pem -> /etc/docker/ca.pem
	I0916 11:00:57.411191 1445387 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-1378450/.minikube/certs/ca.pem --> /etc/docker/ca.pem (1078 bytes)
	I0916 11:00:57.482737 1445387 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-1378450/.minikube/machines/server.pem -> /etc/docker/server.pem
	I0916 11:00:57.482813 1445387 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-1378450/.minikube/machines/server.pem --> /etc/docker/server.pem (1208 bytes)
	I0916 11:00:57.555007 1445387 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-1378450/.minikube/machines/server-key.pem -> /etc/docker/server-key.pem
	I0916 11:00:57.555080 1445387 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-1378450/.minikube/machines/server-key.pem --> /etc/docker/server-key.pem (1679 bytes)
	I0916 11:00:57.610832 1445387 provision.go:87] duration metric: took 878.121348ms to configureAuth
	I0916 11:00:57.610864 1445387 ubuntu.go:193] setting minikube options for container-runtime
	I0916 11:00:57.611144 1445387 config.go:182] Loaded profile config "ha-334765": Driver=docker, ContainerRuntime=crio, KubernetesVersion=v1.31.1
	I0916 11:00:57.611276 1445387 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" ha-334765-m02
	I0916 11:00:57.656750 1445387 main.go:141] libmachine: Using SSH client type: native
	I0916 11:00:57.657001 1445387 main.go:141] libmachine: &{{{<nil> 0 [] [] []} docker [0x41abe0] 0x41d420 <nil>  [] 0s} 127.0.0.1 34668 <nil> <nil>}
	I0916 11:00:57.657025 1445387 main.go:141] libmachine: About to run SSH command:
	sudo mkdir -p /etc/sysconfig && printf %s "
	CRIO_MINIKUBE_OPTIONS='--insecure-registry 10.96.0.0/12 '
	" | sudo tee /etc/sysconfig/crio.minikube && sudo systemctl restart crio
	I0916 11:00:58.142720 1445387 main.go:141] libmachine: SSH cmd err, output: <nil>: 
	CRIO_MINIKUBE_OPTIONS='--insecure-registry 10.96.0.0/12 '
	
	I0916 11:00:58.142771 1445387 machine.go:96] duration metric: took 5.091623696s to provisionDockerMachine
	I0916 11:00:58.142782 1445387 start.go:293] postStartSetup for "ha-334765-m02" (driver="docker")
	I0916 11:00:58.142794 1445387 start.go:322] creating required directories: [/etc/kubernetes/addons /etc/kubernetes/manifests /var/tmp/minikube /var/lib/minikube /var/lib/minikube/certs /var/lib/minikube/images /var/lib/minikube/binaries /tmp/gvisor /usr/share/ca-certificates /etc/ssl/certs]
	I0916 11:00:58.142873 1445387 ssh_runner.go:195] Run: sudo mkdir -p /etc/kubernetes/addons /etc/kubernetes/manifests /var/tmp/minikube /var/lib/minikube /var/lib/minikube/certs /var/lib/minikube/images /var/lib/minikube/binaries /tmp/gvisor /usr/share/ca-certificates /etc/ssl/certs
	I0916 11:00:58.142929 1445387 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" ha-334765-m02
	I0916 11:00:58.159822 1445387 sshutil.go:53] new ssh client: &{IP:127.0.0.1 Port:34668 SSHKeyPath:/home/jenkins/minikube-integration/19651-1378450/.minikube/machines/ha-334765-m02/id_rsa Username:docker}
	I0916 11:00:58.267905 1445387 ssh_runner.go:195] Run: cat /etc/os-release
	I0916 11:00:58.273024 1445387 main.go:141] libmachine: Couldn't set key VERSION_CODENAME, no corresponding struct field found
	I0916 11:00:58.273064 1445387 main.go:141] libmachine: Couldn't set key PRIVACY_POLICY_URL, no corresponding struct field found
	I0916 11:00:58.273076 1445387 main.go:141] libmachine: Couldn't set key UBUNTU_CODENAME, no corresponding struct field found
	I0916 11:00:58.273083 1445387 info.go:137] Remote host: Ubuntu 22.04.4 LTS
	I0916 11:00:58.273094 1445387 filesync.go:126] Scanning /home/jenkins/minikube-integration/19651-1378450/.minikube/addons for local assets ...
	I0916 11:00:58.273160 1445387 filesync.go:126] Scanning /home/jenkins/minikube-integration/19651-1378450/.minikube/files for local assets ...
	I0916 11:00:58.273254 1445387 filesync.go:149] local asset: /home/jenkins/minikube-integration/19651-1378450/.minikube/files/etc/ssl/certs/13838332.pem -> 13838332.pem in /etc/ssl/certs
	I0916 11:00:58.273267 1445387 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-1378450/.minikube/files/etc/ssl/certs/13838332.pem -> /etc/ssl/certs/13838332.pem
	I0916 11:00:58.273375 1445387 ssh_runner.go:195] Run: sudo mkdir -p /etc/ssl/certs
	I0916 11:00:58.285914 1445387 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-1378450/.minikube/files/etc/ssl/certs/13838332.pem --> /etc/ssl/certs/13838332.pem (1708 bytes)
	I0916 11:00:58.321769 1445387 start.go:296] duration metric: took 178.970593ms for postStartSetup
	I0916 11:00:58.321863 1445387 ssh_runner.go:195] Run: sh -c "df -h /var | awk 'NR==2{print $5}'"
	I0916 11:00:58.321910 1445387 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" ha-334765-m02
	I0916 11:00:58.346146 1445387 sshutil.go:53] new ssh client: &{IP:127.0.0.1 Port:34668 SSHKeyPath:/home/jenkins/minikube-integration/19651-1378450/.minikube/machines/ha-334765-m02/id_rsa Username:docker}
	I0916 11:00:58.452457 1445387 ssh_runner.go:195] Run: sh -c "df -BG /var | awk 'NR==2{print $4}'"
	I0916 11:00:58.461215 1445387 fix.go:56] duration metric: took 5.785773544s for fixHost
	I0916 11:00:58.461245 1445387 start.go:83] releasing machines lock for "ha-334765-m02", held for 5.785833982s
	I0916 11:00:58.461341 1445387 cli_runner.go:164] Run: docker container inspect -f "{{range .NetworkSettings.Networks}}{{.IPAddress}},{{.GlobalIPv6Address}}{{end}}" ha-334765-m02
	I0916 11:00:58.491096 1445387 out.go:177] * Found network options:
	I0916 11:00:58.494116 1445387 out.go:177]   - NO_PROXY=192.168.49.2
	W0916 11:00:58.497075 1445387 proxy.go:119] fail to check proxy env: Error ip not in block
	W0916 11:00:58.497130 1445387 proxy.go:119] fail to check proxy env: Error ip not in block
	I0916 11:00:58.497202 1445387 ssh_runner.go:195] Run: sudo sh -c "podman version >/dev/null"
	I0916 11:00:58.497248 1445387 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" ha-334765-m02
	I0916 11:00:58.497531 1445387 ssh_runner.go:195] Run: curl -sS -m 2 https://registry.k8s.io/
	I0916 11:00:58.497598 1445387 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" ha-334765-m02
	I0916 11:00:58.540210 1445387 sshutil.go:53] new ssh client: &{IP:127.0.0.1 Port:34668 SSHKeyPath:/home/jenkins/minikube-integration/19651-1378450/.minikube/machines/ha-334765-m02/id_rsa Username:docker}
	I0916 11:00:58.542576 1445387 sshutil.go:53] new ssh client: &{IP:127.0.0.1 Port:34668 SSHKeyPath:/home/jenkins/minikube-integration/19651-1378450/.minikube/machines/ha-334765-m02/id_rsa Username:docker}
	I0916 11:00:58.878226 1445387 ssh_runner.go:195] Run: sh -c "stat /etc/cni/net.d/*loopback.conf*"
	I0916 11:00:58.890286 1445387 ssh_runner.go:195] Run: sudo find /etc/cni/net.d -maxdepth 1 -type f -name *loopback.conf* -not -name *.mk_disabled -exec sh -c "sudo mv {} {}.mk_disabled" ;
	I0916 11:00:58.907186 1445387 cni.go:221] loopback cni configuration disabled: "/etc/cni/net.d/*loopback.conf*" found
	I0916 11:00:58.907300 1445387 ssh_runner.go:195] Run: sudo find /etc/cni/net.d -maxdepth 1 -type f ( ( -name *bridge* -or -name *podman* ) -and -not -name *.mk_disabled ) -printf "%p, " -exec sh -c "sudo mv {} {}.mk_disabled" ;
	I0916 11:00:58.922233 1445387 cni.go:259] no active bridge cni configs found in "/etc/cni/net.d" - nothing to disable
	I0916 11:00:58.922270 1445387 start.go:495] detecting cgroup driver to use...
	I0916 11:00:58.922325 1445387 detect.go:187] detected "cgroupfs" cgroup driver on host os
	I0916 11:00:58.922399 1445387 ssh_runner.go:195] Run: sudo systemctl stop -f containerd
	I0916 11:00:58.936832 1445387 ssh_runner.go:195] Run: sudo systemctl is-active --quiet service containerd
	I0916 11:00:58.954215 1445387 docker.go:217] disabling cri-docker service (if available) ...
	I0916 11:00:58.954315 1445387 ssh_runner.go:195] Run: sudo systemctl stop -f cri-docker.socket
	I0916 11:00:59.054341 1445387 ssh_runner.go:195] Run: sudo systemctl stop -f cri-docker.service
	I0916 11:00:59.119779 1445387 ssh_runner.go:195] Run: sudo systemctl disable cri-docker.socket
	I0916 11:00:59.613201 1445387 ssh_runner.go:195] Run: sudo systemctl mask cri-docker.service
	I0916 11:00:59.950793 1445387 docker.go:233] disabling docker service ...
	I0916 11:00:59.950898 1445387 ssh_runner.go:195] Run: sudo systemctl stop -f docker.socket
	I0916 11:00:59.974647 1445387 ssh_runner.go:195] Run: sudo systemctl stop -f docker.service
	I0916 11:01:00.026528 1445387 ssh_runner.go:195] Run: sudo systemctl disable docker.socket
	I0916 11:01:00.371755 1445387 ssh_runner.go:195] Run: sudo systemctl mask docker.service
	I0916 11:01:00.655197 1445387 ssh_runner.go:195] Run: sudo systemctl is-active --quiet service docker
	I0916 11:01:00.706915 1445387 ssh_runner.go:195] Run: /bin/bash -c "sudo mkdir -p /etc && printf %s "runtime-endpoint: unix:///var/run/crio/crio.sock
	" | sudo tee /etc/crictl.yaml"
	I0916 11:01:00.740808 1445387 crio.go:59] configure cri-o to use "registry.k8s.io/pause:3.10" pause image...
	I0916 11:01:00.740923 1445387 ssh_runner.go:195] Run: sh -c "sudo sed -i 's|^.*pause_image = .*$|pause_image = "registry.k8s.io/pause:3.10"|' /etc/crio/crio.conf.d/02-crio.conf"
	I0916 11:01:00.797253 1445387 crio.go:70] configuring cri-o to use "cgroupfs" as cgroup driver...
	I0916 11:01:00.797352 1445387 ssh_runner.go:195] Run: sh -c "sudo sed -i 's|^.*cgroup_manager = .*$|cgroup_manager = "cgroupfs"|' /etc/crio/crio.conf.d/02-crio.conf"
	I0916 11:01:00.847608 1445387 ssh_runner.go:195] Run: sh -c "sudo sed -i '/conmon_cgroup = .*/d' /etc/crio/crio.conf.d/02-crio.conf"
	I0916 11:01:00.887926 1445387 ssh_runner.go:195] Run: sh -c "sudo sed -i '/cgroup_manager = .*/a conmon_cgroup = "pod"' /etc/crio/crio.conf.d/02-crio.conf"
	I0916 11:01:00.912049 1445387 ssh_runner.go:195] Run: sh -c "sudo rm -rf /etc/cni/net.mk"
	I0916 11:01:00.928505 1445387 ssh_runner.go:195] Run: sh -c "sudo sed -i '/^ *"net.ipv4.ip_unprivileged_port_start=.*"/d' /etc/crio/crio.conf.d/02-crio.conf"
	I0916 11:01:00.997482 1445387 ssh_runner.go:195] Run: sh -c "sudo grep -q "^ *default_sysctls" /etc/crio/crio.conf.d/02-crio.conf || sudo sed -i '/conmon_cgroup = .*/a default_sysctls = \[\n\]' /etc/crio/crio.conf.d/02-crio.conf"
	I0916 11:01:01.037340 1445387 ssh_runner.go:195] Run: sh -c "sudo sed -i -r 's|^default_sysctls *= *\[|&\n  "net.ipv4.ip_unprivileged_port_start=0",|' /etc/crio/crio.conf.d/02-crio.conf"
	I0916 11:01:01.087868 1445387 ssh_runner.go:195] Run: sudo sysctl net.bridge.bridge-nf-call-iptables
	I0916 11:01:01.116444 1445387 ssh_runner.go:195] Run: sudo sh -c "echo 1 > /proc/sys/net/ipv4/ip_forward"
	I0916 11:01:01.162115 1445387 ssh_runner.go:195] Run: sudo systemctl daemon-reload
	I0916 11:01:01.476492 1445387 ssh_runner.go:195] Run: sudo systemctl restart crio
	I0916 11:01:03.707189 1445387 ssh_runner.go:235] Completed: sudo systemctl restart crio: (2.230647327s)
	I0916 11:01:03.707241 1445387 start.go:542] Will wait 60s for socket path /var/run/crio/crio.sock
	I0916 11:01:03.707306 1445387 ssh_runner.go:195] Run: stat /var/run/crio/crio.sock
	I0916 11:01:03.716327 1445387 start.go:563] Will wait 60s for crictl version
	I0916 11:01:03.716408 1445387 ssh_runner.go:195] Run: which crictl
	I0916 11:01:03.724280 1445387 ssh_runner.go:195] Run: sudo /usr/bin/crictl version
	I0916 11:01:03.827861 1445387 start.go:579] Version:  0.1.0
	RuntimeName:  cri-o
	RuntimeVersion:  1.24.6
	RuntimeApiVersion:  v1
	I0916 11:01:03.827960 1445387 ssh_runner.go:195] Run: crio --version
	I0916 11:01:03.895554 1445387 ssh_runner.go:195] Run: crio --version
	I0916 11:01:03.994136 1445387 out.go:177] * Preparing Kubernetes v1.31.1 on CRI-O 1.24.6 ...
	I0916 11:01:03.996535 1445387 out.go:177]   - env NO_PROXY=192.168.49.2
	I0916 11:01:03.998928 1445387 cli_runner.go:164] Run: docker network inspect ha-334765 --format "{"Name": "{{.Name}}","Driver": "{{.Driver}}","Subnet": "{{range .IPAM.Config}}{{.Subnet}}{{end}}","Gateway": "{{range .IPAM.Config}}{{.Gateway}}{{end}}","MTU": {{if (index .Options "com.docker.network.driver.mtu")}}{{(index .Options "com.docker.network.driver.mtu")}}{{else}}0{{end}}, "ContainerIPs": [{{range $k,$v := .Containers }}"{{$v.IPv4Address}}",{{end}}]}"
	I0916 11:01:04.024902 1445387 ssh_runner.go:195] Run: grep 192.168.49.1	host.minikube.internal$ /etc/hosts
	I0916 11:01:04.029322 1445387 ssh_runner.go:195] Run: /bin/bash -c "{ grep -v $'\thost.minikube.internal$' "/etc/hosts"; echo "192.168.49.1	host.minikube.internal"; } > /tmp/h.$$; sudo cp /tmp/h.$$ "/etc/hosts""
	I0916 11:01:04.040819 1445387 mustload.go:65] Loading cluster: ha-334765
	I0916 11:01:04.041072 1445387 config.go:182] Loaded profile config "ha-334765": Driver=docker, ContainerRuntime=crio, KubernetesVersion=v1.31.1
	I0916 11:01:04.041356 1445387 cli_runner.go:164] Run: docker container inspect ha-334765 --format={{.State.Status}}
	I0916 11:01:04.060780 1445387 host.go:66] Checking if "ha-334765" exists ...
	I0916 11:01:04.061121 1445387 certs.go:68] Setting up /home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/ha-334765 for IP: 192.168.49.3
	I0916 11:01:04.061130 1445387 certs.go:194] generating shared ca certs ...
	I0916 11:01:04.061144 1445387 certs.go:226] acquiring lock for ca certs: {Name:mk0ae46b50e2e49d53ad6fcc94535aa50d9156d6 Clock:{} Delay:500ms Timeout:1m0s Cancel:<nil>}
	I0916 11:01:04.061267 1445387 certs.go:235] skipping valid "minikubeCA" ca cert: /home/jenkins/minikube-integration/19651-1378450/.minikube/ca.key
	I0916 11:01:04.061306 1445387 certs.go:235] skipping valid "proxyClientCA" ca cert: /home/jenkins/minikube-integration/19651-1378450/.minikube/proxy-client-ca.key
	I0916 11:01:04.061314 1445387 certs.go:256] generating profile certs ...
	I0916 11:01:04.061389 1445387 certs.go:359] skipping valid signed profile cert regeneration for "minikube-user": /home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/ha-334765/client.key
	I0916 11:01:04.061453 1445387 certs.go:359] skipping valid signed profile cert regeneration for "minikube": /home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/ha-334765/apiserver.key.5b1cf632
	I0916 11:01:04.061490 1445387 certs.go:359] skipping valid signed profile cert regeneration for "aggregator": /home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/ha-334765/proxy-client.key
	I0916 11:01:04.061500 1445387 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-1378450/.minikube/ca.crt -> /var/lib/minikube/certs/ca.crt
	I0916 11:01:04.061513 1445387 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-1378450/.minikube/ca.key -> /var/lib/minikube/certs/ca.key
	I0916 11:01:04.061524 1445387 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-1378450/.minikube/proxy-client-ca.crt -> /var/lib/minikube/certs/proxy-client-ca.crt
	I0916 11:01:04.061535 1445387 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-1378450/.minikube/proxy-client-ca.key -> /var/lib/minikube/certs/proxy-client-ca.key
	I0916 11:01:04.061546 1445387 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/ha-334765/apiserver.crt -> /var/lib/minikube/certs/apiserver.crt
	I0916 11:01:04.061558 1445387 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/ha-334765/apiserver.key -> /var/lib/minikube/certs/apiserver.key
	I0916 11:01:04.061569 1445387 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/ha-334765/proxy-client.crt -> /var/lib/minikube/certs/proxy-client.crt
	I0916 11:01:04.061580 1445387 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/ha-334765/proxy-client.key -> /var/lib/minikube/certs/proxy-client.key
	I0916 11:01:04.061634 1445387 certs.go:484] found cert: /home/jenkins/minikube-integration/19651-1378450/.minikube/certs/1383833.pem (1338 bytes)
	W0916 11:01:04.061669 1445387 certs.go:480] ignoring /home/jenkins/minikube-integration/19651-1378450/.minikube/certs/1383833_empty.pem, impossibly tiny 0 bytes
	I0916 11:01:04.061678 1445387 certs.go:484] found cert: /home/jenkins/minikube-integration/19651-1378450/.minikube/certs/ca-key.pem (1679 bytes)
	I0916 11:01:04.061702 1445387 certs.go:484] found cert: /home/jenkins/minikube-integration/19651-1378450/.minikube/certs/ca.pem (1078 bytes)
	I0916 11:01:04.061727 1445387 certs.go:484] found cert: /home/jenkins/minikube-integration/19651-1378450/.minikube/certs/cert.pem (1123 bytes)
	I0916 11:01:04.061750 1445387 certs.go:484] found cert: /home/jenkins/minikube-integration/19651-1378450/.minikube/certs/key.pem (1679 bytes)
	I0916 11:01:04.061792 1445387 certs.go:484] found cert: /home/jenkins/minikube-integration/19651-1378450/.minikube/files/etc/ssl/certs/13838332.pem (1708 bytes)
	I0916 11:01:04.061822 1445387 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-1378450/.minikube/ca.crt -> /usr/share/ca-certificates/minikubeCA.pem
	I0916 11:01:04.061835 1445387 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-1378450/.minikube/certs/1383833.pem -> /usr/share/ca-certificates/1383833.pem
	I0916 11:01:04.061846 1445387 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-1378450/.minikube/files/etc/ssl/certs/13838332.pem -> /usr/share/ca-certificates/13838332.pem
	I0916 11:01:04.061906 1445387 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" ha-334765
	I0916 11:01:04.090473 1445387 sshutil.go:53] new ssh client: &{IP:127.0.0.1 Port:34663 SSHKeyPath:/home/jenkins/minikube-integration/19651-1378450/.minikube/machines/ha-334765/id_rsa Username:docker}
	I0916 11:01:04.204997 1445387 ssh_runner.go:195] Run: stat -c %s /var/lib/minikube/certs/sa.pub
	I0916 11:01:04.211570 1445387 ssh_runner.go:447] scp /var/lib/minikube/certs/sa.pub --> memory (451 bytes)
	I0916 11:01:04.234081 1445387 ssh_runner.go:195] Run: stat -c %s /var/lib/minikube/certs/sa.key
	I0916 11:01:04.247537 1445387 ssh_runner.go:447] scp /var/lib/minikube/certs/sa.key --> memory (1675 bytes)
	I0916 11:01:04.262817 1445387 ssh_runner.go:195] Run: stat -c %s /var/lib/minikube/certs/front-proxy-ca.crt
	I0916 11:01:04.267504 1445387 ssh_runner.go:447] scp /var/lib/minikube/certs/front-proxy-ca.crt --> memory (1123 bytes)
	I0916 11:01:04.280753 1445387 ssh_runner.go:195] Run: stat -c %s /var/lib/minikube/certs/front-proxy-ca.key
	I0916 11:01:04.287774 1445387 ssh_runner.go:447] scp /var/lib/minikube/certs/front-proxy-ca.key --> memory (1675 bytes)
	I0916 11:01:04.301053 1445387 ssh_runner.go:195] Run: stat -c %s /var/lib/minikube/certs/etcd/ca.crt
	I0916 11:01:04.305303 1445387 ssh_runner.go:447] scp /var/lib/minikube/certs/etcd/ca.crt --> memory (1094 bytes)
	I0916 11:01:04.319049 1445387 ssh_runner.go:195] Run: stat -c %s /var/lib/minikube/certs/etcd/ca.key
	I0916 11:01:04.323031 1445387 ssh_runner.go:447] scp /var/lib/minikube/certs/etcd/ca.key --> memory (1679 bytes)
	I0916 11:01:04.336086 1445387 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-1378450/.minikube/ca.crt --> /var/lib/minikube/certs/ca.crt (1111 bytes)
	I0916 11:01:04.365298 1445387 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-1378450/.minikube/ca.key --> /var/lib/minikube/certs/ca.key (1675 bytes)
	I0916 11:01:04.394262 1445387 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-1378450/.minikube/proxy-client-ca.crt --> /var/lib/minikube/certs/proxy-client-ca.crt (1119 bytes)
	I0916 11:01:04.423637 1445387 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-1378450/.minikube/proxy-client-ca.key --> /var/lib/minikube/certs/proxy-client-ca.key (1679 bytes)
	I0916 11:01:04.452371 1445387 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/ha-334765/apiserver.crt --> /var/lib/minikube/certs/apiserver.crt (1432 bytes)
	I0916 11:01:04.480285 1445387 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/ha-334765/apiserver.key --> /var/lib/minikube/certs/apiserver.key (1679 bytes)
	I0916 11:01:04.508350 1445387 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/ha-334765/proxy-client.crt --> /var/lib/minikube/certs/proxy-client.crt (1147 bytes)
	I0916 11:01:04.538153 1445387 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/ha-334765/proxy-client.key --> /var/lib/minikube/certs/proxy-client.key (1679 bytes)
	I0916 11:01:04.566711 1445387 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-1378450/.minikube/ca.crt --> /usr/share/ca-certificates/minikubeCA.pem (1111 bytes)
	I0916 11:01:04.596601 1445387 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-1378450/.minikube/certs/1383833.pem --> /usr/share/ca-certificates/1383833.pem (1338 bytes)
	I0916 11:01:04.629973 1445387 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-1378450/.minikube/files/etc/ssl/certs/13838332.pem --> /usr/share/ca-certificates/13838332.pem (1708 bytes)
	I0916 11:01:04.670304 1445387 ssh_runner.go:362] scp memory --> /var/lib/minikube/certs/sa.pub (451 bytes)
	I0916 11:01:04.692577 1445387 ssh_runner.go:362] scp memory --> /var/lib/minikube/certs/sa.key (1675 bytes)
	I0916 11:01:04.713850 1445387 ssh_runner.go:362] scp memory --> /var/lib/minikube/certs/front-proxy-ca.crt (1123 bytes)
	I0916 11:01:04.735066 1445387 ssh_runner.go:362] scp memory --> /var/lib/minikube/certs/front-proxy-ca.key (1675 bytes)
	I0916 11:01:04.760021 1445387 ssh_runner.go:362] scp memory --> /var/lib/minikube/certs/etcd/ca.crt (1094 bytes)
	I0916 11:01:04.781162 1445387 ssh_runner.go:362] scp memory --> /var/lib/minikube/certs/etcd/ca.key (1679 bytes)
	I0916 11:01:04.801906 1445387 ssh_runner.go:362] scp memory --> /var/lib/minikube/kubeconfig (744 bytes)
	I0916 11:01:04.822570 1445387 ssh_runner.go:195] Run: openssl version
	I0916 11:01:04.828533 1445387 ssh_runner.go:195] Run: sudo /bin/bash -c "test -s /usr/share/ca-certificates/minikubeCA.pem && ln -fs /usr/share/ca-certificates/minikubeCA.pem /etc/ssl/certs/minikubeCA.pem"
	I0916 11:01:04.840785 1445387 ssh_runner.go:195] Run: ls -la /usr/share/ca-certificates/minikubeCA.pem
	I0916 11:01:04.847144 1445387 certs.go:528] hashing: -rw-r--r-- 1 root root 1111 Sep 16 10:35 /usr/share/ca-certificates/minikubeCA.pem
	I0916 11:01:04.847289 1445387 ssh_runner.go:195] Run: openssl x509 -hash -noout -in /usr/share/ca-certificates/minikubeCA.pem
	I0916 11:01:04.855476 1445387 ssh_runner.go:195] Run: sudo /bin/bash -c "test -L /etc/ssl/certs/b5213941.0 || ln -fs /etc/ssl/certs/minikubeCA.pem /etc/ssl/certs/b5213941.0"
	I0916 11:01:04.866263 1445387 ssh_runner.go:195] Run: sudo /bin/bash -c "test -s /usr/share/ca-certificates/1383833.pem && ln -fs /usr/share/ca-certificates/1383833.pem /etc/ssl/certs/1383833.pem"
	I0916 11:01:04.882247 1445387 ssh_runner.go:195] Run: ls -la /usr/share/ca-certificates/1383833.pem
	I0916 11:01:04.888129 1445387 certs.go:528] hashing: -rw-r--r-- 1 root root 1338 Sep 16 10:46 /usr/share/ca-certificates/1383833.pem
	I0916 11:01:04.888304 1445387 ssh_runner.go:195] Run: openssl x509 -hash -noout -in /usr/share/ca-certificates/1383833.pem
	I0916 11:01:04.897532 1445387 ssh_runner.go:195] Run: sudo /bin/bash -c "test -L /etc/ssl/certs/51391683.0 || ln -fs /etc/ssl/certs/1383833.pem /etc/ssl/certs/51391683.0"
	I0916 11:01:04.908528 1445387 ssh_runner.go:195] Run: sudo /bin/bash -c "test -s /usr/share/ca-certificates/13838332.pem && ln -fs /usr/share/ca-certificates/13838332.pem /etc/ssl/certs/13838332.pem"
	I0916 11:01:04.919776 1445387 ssh_runner.go:195] Run: ls -la /usr/share/ca-certificates/13838332.pem
	I0916 11:01:04.924451 1445387 certs.go:528] hashing: -rw-r--r-- 1 root root 1708 Sep 16 10:46 /usr/share/ca-certificates/13838332.pem
	I0916 11:01:04.924595 1445387 ssh_runner.go:195] Run: openssl x509 -hash -noout -in /usr/share/ca-certificates/13838332.pem
	I0916 11:01:04.933620 1445387 ssh_runner.go:195] Run: sudo /bin/bash -c "test -L /etc/ssl/certs/3ec20f2e.0 || ln -fs /etc/ssl/certs/13838332.pem /etc/ssl/certs/3ec20f2e.0"
	I0916 11:01:04.947618 1445387 ssh_runner.go:195] Run: stat /var/lib/minikube/certs/apiserver-kubelet-client.crt
	I0916 11:01:04.955067 1445387 ssh_runner.go:195] Run: openssl x509 -noout -in /var/lib/minikube/certs/apiserver-etcd-client.crt -checkend 86400
	I0916 11:01:04.963357 1445387 ssh_runner.go:195] Run: openssl x509 -noout -in /var/lib/minikube/certs/apiserver-kubelet-client.crt -checkend 86400
	I0916 11:01:04.972012 1445387 ssh_runner.go:195] Run: openssl x509 -noout -in /var/lib/minikube/certs/etcd/server.crt -checkend 86400
	I0916 11:01:04.981152 1445387 ssh_runner.go:195] Run: openssl x509 -noout -in /var/lib/minikube/certs/etcd/healthcheck-client.crt -checkend 86400
	I0916 11:01:04.989704 1445387 ssh_runner.go:195] Run: openssl x509 -noout -in /var/lib/minikube/certs/etcd/peer.crt -checkend 86400
	I0916 11:01:04.998367 1445387 ssh_runner.go:195] Run: openssl x509 -noout -in /var/lib/minikube/certs/front-proxy-client.crt -checkend 86400
	I0916 11:01:05.009360 1445387 kubeadm.go:934] updating node {m02 192.168.49.3 8443 v1.31.1 crio true true} ...
	I0916 11:01:05.009567 1445387 kubeadm.go:946] kubelet [Unit]
	Wants=crio.service
	
	[Service]
	ExecStart=
	ExecStart=/var/lib/minikube/binaries/v1.31.1/kubelet --bootstrap-kubeconfig=/etc/kubernetes/bootstrap-kubelet.conf --cgroups-per-qos=false --config=/var/lib/kubelet/config.yaml --enforce-node-allocatable= --hostname-override=ha-334765-m02 --kubeconfig=/etc/kubernetes/kubelet.conf --node-ip=192.168.49.3
	
	[Install]
	 config:
	{KubernetesVersion:v1.31.1 ClusterName:ha-334765 Namespace:default APIServerHAVIP:192.168.49.254 APIServerName:minikubeCA APIServerNames:[] APIServerIPs:[] DNSDomain:cluster.local ContainerRuntime:crio CRISocket: NetworkPlugin:cni FeatureGates: ServiceCIDR:10.96.0.0/12 ImageRepository: LoadBalancerStartIP: LoadBalancerEndIP: CustomIngressCert: RegistryAliases: ExtraOptions:[] ShouldLoadCachedImages:true EnableDefaultCNI:false CNI:}
	I0916 11:01:05.009631 1445387 kube-vip.go:115] generating kube-vip config ...
	I0916 11:01:05.009729 1445387 ssh_runner.go:195] Run: sudo sh -c "lsmod | grep ip_vs"
	I0916 11:01:05.031426 1445387 kube-vip.go:167] auto-enabling control-plane load-balancing in kube-vip
	I0916 11:01:05.031582 1445387 kube-vip.go:137] kube-vip config:
	apiVersion: v1
	kind: Pod
	metadata:
	  creationTimestamp: null
	  name: kube-vip
	  namespace: kube-system
	spec:
	  containers:
	  - args:
	    - manager
	    env:
	    - name: vip_arp
	      value: "true"
	    - name: port
	      value: "8443"
	    - name: vip_nodename
	      valueFrom:
	        fieldRef:
	          fieldPath: spec.nodeName
	    - name: vip_interface
	      value: eth0
	    - name: vip_cidr
	      value: "32"
	    - name: dns_mode
	      value: first
	    - name: cp_enable
	      value: "true"
	    - name: cp_namespace
	      value: kube-system
	    - name: vip_leaderelection
	      value: "true"
	    - name: vip_leasename
	      value: plndr-cp-lock
	    - name: vip_leaseduration
	      value: "5"
	    - name: vip_renewdeadline
	      value: "3"
	    - name: vip_retryperiod
	      value: "1"
	    - name: address
	      value: 192.168.49.254
	    - name: prometheus_server
	      value: :2112
	    - name : lb_enable
	      value: "true"
	    - name: lb_port
	      value: "8443"
	    image: ghcr.io/kube-vip/kube-vip:v0.8.0
	    imagePullPolicy: IfNotPresent
	    name: kube-vip
	    resources: {}
	    securityContext:
	      capabilities:
	        add:
	        - NET_ADMIN
	        - NET_RAW
	    volumeMounts:
	    - mountPath: /etc/kubernetes/admin.conf
	      name: kubeconfig
	  hostAliases:
	  - hostnames:
	    - kubernetes
	    ip: 127.0.0.1
	  hostNetwork: true
	  volumes:
	  - hostPath:
	      path: "/etc/kubernetes/admin.conf"
	    name: kubeconfig
	status: {}
	I0916 11:01:05.031690 1445387 ssh_runner.go:195] Run: sudo ls /var/lib/minikube/binaries/v1.31.1
	I0916 11:01:05.043022 1445387 binaries.go:44] Found k8s binaries, skipping transfer
	I0916 11:01:05.043172 1445387 ssh_runner.go:195] Run: sudo mkdir -p /etc/systemd/system/kubelet.service.d /lib/systemd/system /etc/kubernetes/manifests
	I0916 11:01:05.058295 1445387 ssh_runner.go:362] scp memory --> /etc/systemd/system/kubelet.service.d/10-kubeadm.conf (363 bytes)
	I0916 11:01:05.081363 1445387 ssh_runner.go:362] scp memory --> /lib/systemd/system/kubelet.service (352 bytes)
	I0916 11:01:05.104699 1445387 ssh_runner.go:362] scp memory --> /etc/kubernetes/manifests/kube-vip.yaml (1441 bytes)
	I0916 11:01:05.139497 1445387 ssh_runner.go:195] Run: grep 192.168.49.254	control-plane.minikube.internal$ /etc/hosts
	I0916 11:01:05.144025 1445387 ssh_runner.go:195] Run: /bin/bash -c "{ grep -v $'\tcontrol-plane.minikube.internal$' "/etc/hosts"; echo "192.168.49.254	control-plane.minikube.internal"; } > /tmp/h.$$; sudo cp /tmp/h.$$ "/etc/hosts""
	I0916 11:01:05.157082 1445387 ssh_runner.go:195] Run: sudo systemctl daemon-reload
	I0916 11:01:05.299734 1445387 ssh_runner.go:195] Run: sudo systemctl start kubelet
	I0916 11:01:05.315700 1445387 start.go:235] Will wait 6m0s for node &{Name:m02 IP:192.168.49.3 Port:8443 KubernetesVersion:v1.31.1 ContainerRuntime:crio ControlPlane:true Worker:true}
	I0916 11:01:05.316311 1445387 config.go:182] Loaded profile config "ha-334765": Driver=docker, ContainerRuntime=crio, KubernetesVersion=v1.31.1
	I0916 11:01:05.320671 1445387 out.go:177] * Verifying Kubernetes components...
	I0916 11:01:05.323110 1445387 ssh_runner.go:195] Run: sudo systemctl daemon-reload
	I0916 11:01:05.461453 1445387 ssh_runner.go:195] Run: sudo systemctl start kubelet
	I0916 11:01:05.480257 1445387 loader.go:395] Config loaded from file:  /home/jenkins/minikube-integration/19651-1378450/kubeconfig
	I0916 11:01:05.480628 1445387 kapi.go:59] client config for ha-334765: &rest.Config{Host:"https://192.168.49.254:8443", APIPath:"", ContentConfig:rest.ContentConfig{AcceptContentTypes:"", ContentType:"", GroupVersion:(*schema.GroupVersion)(nil), NegotiatedSerializer:runtime.NegotiatedSerializer(nil)}, Username:"", Password:"", BearerToken:"", BearerTokenFile:"", Impersonate:rest.ImpersonationConfig{UserName:"", UID:"", Groups:[]string(nil), Extra:map[string][]string(nil)}, AuthProvider:<nil>, AuthConfigPersister:rest.AuthProviderConfigPersister(nil), ExecProvider:<nil>, TLSClientConfig:rest.sanitizedTLSClientConfig{Insecure:false, ServerName:"", CertFile:"/home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/ha-334765/client.crt", KeyFile:"/home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/ha-334765/client.key", CAFile:"/home/jenkins/minikube-integration/19651-1378450/.minikube/ca.crt", CertData:[]uint8(nil), KeyData:[]uint8(nil), CAData:[]uint8(nil), NextProtos:[]strin
g(nil)}, UserAgent:"", DisableCompression:false, Transport:http.RoundTripper(nil), WrapTransport:(transport.WrapperFunc)(0x1a1e6c0), QPS:0, Burst:0, RateLimiter:flowcontrol.RateLimiter(nil), WarningHandler:rest.WarningHandler(nil), Timeout:0, Dial:(func(context.Context, string, string) (net.Conn, error))(nil), Proxy:(func(*http.Request) (*url.URL, error))(nil)}
	W0916 11:01:05.480737 1445387 kubeadm.go:483] Overriding stale ClientConfig host https://192.168.49.254:8443 with https://192.168.49.2:8443
	I0916 11:01:05.481023 1445387 node_ready.go:35] waiting up to 6m0s for node "ha-334765-m02" to be "Ready" ...
	I0916 11:01:05.481172 1445387 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-334765-m02
	I0916 11:01:05.481198 1445387 round_trippers.go:469] Request Headers:
	I0916 11:01:05.481232 1445387 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:01:05.481252 1445387 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:01:14.746586 1445387 round_trippers.go:574] Response Status: 200 OK in 9265 milliseconds
	I0916 11:01:14.748037 1445387 node_ready.go:49] node "ha-334765-m02" has status "Ready":"True"
	I0916 11:01:14.748071 1445387 node_ready.go:38] duration metric: took 9.267011376s for node "ha-334765-m02" to be "Ready" ...
	I0916 11:01:14.748083 1445387 pod_ready.go:36] extra waiting up to 6m0s for all system-critical pods including labels [k8s-app=kube-dns component=etcd component=kube-apiserver component=kube-controller-manager k8s-app=kube-proxy component=kube-scheduler] to be "Ready" ...
	I0916 11:01:14.748127 1445387 envvar.go:172] "Feature gate default state" feature="WatchListClient" enabled=false
	I0916 11:01:14.748156 1445387 envvar.go:172] "Feature gate default state" feature="InformerResourceVersion" enabled=false
	I0916 11:01:14.748227 1445387 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods
	I0916 11:01:14.748238 1445387 round_trippers.go:469] Request Headers:
	I0916 11:01:14.748247 1445387 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:01:14.748254 1445387 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:01:14.787009 1445387 round_trippers.go:574] Response Status: 200 OK in 38 milliseconds
	I0916 11:01:14.812775 1445387 pod_ready.go:79] waiting up to 6m0s for pod "coredns-7c65d6cfc9-q5xr7" in "kube-system" namespace to be "Ready" ...
	I0916 11:01:14.813029 1445387 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/coredns-7c65d6cfc9-q5xr7
	I0916 11:01:14.813065 1445387 round_trippers.go:469] Request Headers:
	I0916 11:01:14.813102 1445387 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:01:14.813150 1445387 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:01:14.823923 1445387 round_trippers.go:574] Response Status: 200 OK in 10 milliseconds
	I0916 11:01:14.825318 1445387 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-334765
	I0916 11:01:14.825346 1445387 round_trippers.go:469] Request Headers:
	I0916 11:01:14.825356 1445387 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:01:14.825360 1445387 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:01:14.841759 1445387 round_trippers.go:574] Response Status: 200 OK in 16 milliseconds
	I0916 11:01:14.842446 1445387 pod_ready.go:93] pod "coredns-7c65d6cfc9-q5xr7" in "kube-system" namespace has status "Ready":"True"
	I0916 11:01:14.842462 1445387 pod_ready.go:82] duration metric: took 29.590155ms for pod "coredns-7c65d6cfc9-q5xr7" in "kube-system" namespace to be "Ready" ...
	I0916 11:01:14.842475 1445387 pod_ready.go:79] waiting up to 6m0s for pod "coredns-7c65d6cfc9-s9fp9" in "kube-system" namespace to be "Ready" ...
	I0916 11:01:14.842575 1445387 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/coredns-7c65d6cfc9-s9fp9
	I0916 11:01:14.842585 1445387 round_trippers.go:469] Request Headers:
	I0916 11:01:14.842593 1445387 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:01:14.842597 1445387 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:01:14.856789 1445387 round_trippers.go:574] Response Status: 200 OK in 14 milliseconds
	I0916 11:01:14.861851 1445387 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-334765
	I0916 11:01:14.861952 1445387 round_trippers.go:469] Request Headers:
	I0916 11:01:14.861997 1445387 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:01:14.862047 1445387 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:01:14.874272 1445387 round_trippers.go:574] Response Status: 200 OK in 12 milliseconds
	I0916 11:01:14.875193 1445387 pod_ready.go:93] pod "coredns-7c65d6cfc9-s9fp9" in "kube-system" namespace has status "Ready":"True"
	I0916 11:01:14.875262 1445387 pod_ready.go:82] duration metric: took 32.771169ms for pod "coredns-7c65d6cfc9-s9fp9" in "kube-system" namespace to be "Ready" ...
	I0916 11:01:14.875313 1445387 pod_ready.go:79] waiting up to 6m0s for pod "etcd-ha-334765" in "kube-system" namespace to be "Ready" ...
	I0916 11:01:14.875440 1445387 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/etcd-ha-334765
	I0916 11:01:14.875492 1445387 round_trippers.go:469] Request Headers:
	I0916 11:01:14.875527 1445387 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:01:14.875569 1445387 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:01:14.880066 1445387 round_trippers.go:574] Response Status: 200 OK in 4 milliseconds
	I0916 11:01:14.880937 1445387 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-334765
	I0916 11:01:14.881009 1445387 round_trippers.go:469] Request Headers:
	I0916 11:01:14.881044 1445387 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:01:14.881080 1445387 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:01:14.889155 1445387 round_trippers.go:574] Response Status: 200 OK in 8 milliseconds
	I0916 11:01:14.890240 1445387 pod_ready.go:93] pod "etcd-ha-334765" in "kube-system" namespace has status "Ready":"True"
	I0916 11:01:14.901481 1445387 pod_ready.go:82] duration metric: took 26.119128ms for pod "etcd-ha-334765" in "kube-system" namespace to be "Ready" ...
	I0916 11:01:14.901592 1445387 pod_ready.go:79] waiting up to 6m0s for pod "etcd-ha-334765-m02" in "kube-system" namespace to be "Ready" ...
	I0916 11:01:14.901780 1445387 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/etcd-ha-334765-m02
	I0916 11:01:14.901819 1445387 round_trippers.go:469] Request Headers:
	I0916 11:01:14.901857 1445387 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:01:14.901890 1445387 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:01:14.913984 1445387 round_trippers.go:574] Response Status: 200 OK in 12 milliseconds
	I0916 11:01:14.914887 1445387 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-334765-m02
	I0916 11:01:14.914967 1445387 round_trippers.go:469] Request Headers:
	I0916 11:01:14.914995 1445387 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:01:14.915018 1445387 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:01:14.927151 1445387 round_trippers.go:574] Response Status: 200 OK in 12 milliseconds
	I0916 11:01:14.927862 1445387 pod_ready.go:93] pod "etcd-ha-334765-m02" in "kube-system" namespace has status "Ready":"True"
	I0916 11:01:14.927923 1445387 pod_ready.go:82] duration metric: took 26.300013ms for pod "etcd-ha-334765-m02" in "kube-system" namespace to be "Ready" ...
	I0916 11:01:14.927999 1445387 pod_ready.go:79] waiting up to 6m0s for pod "etcd-ha-334765-m03" in "kube-system" namespace to be "Ready" ...
	I0916 11:01:14.928159 1445387 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/etcd-ha-334765-m03
	I0916 11:01:14.928210 1445387 round_trippers.go:469] Request Headers:
	I0916 11:01:14.928233 1445387 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:01:14.928262 1445387 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:01:14.935952 1445387 round_trippers.go:574] Response Status: 200 OK in 7 milliseconds
	I0916 11:01:14.948318 1445387 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-334765-m03
	I0916 11:01:14.948427 1445387 round_trippers.go:469] Request Headers:
	I0916 11:01:14.948477 1445387 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:01:14.948510 1445387 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:01:14.956814 1445387 round_trippers.go:574] Response Status: 404 Not Found in 8 milliseconds
	I0916 11:01:14.957267 1445387 pod_ready.go:98] node "ha-334765-m03" hosting pod "etcd-ha-334765-m03" in "kube-system" namespace is currently not "Ready" (skipping!): error getting node "ha-334765-m03": nodes "ha-334765-m03" not found
	I0916 11:01:14.957317 1445387 pod_ready.go:82] duration metric: took 29.272534ms for pod "etcd-ha-334765-m03" in "kube-system" namespace to be "Ready" ...
	E0916 11:01:14.957349 1445387 pod_ready.go:67] WaitExtra: waitPodCondition: node "ha-334765-m03" hosting pod "etcd-ha-334765-m03" in "kube-system" namespace is currently not "Ready" (skipping!): error getting node "ha-334765-m03": nodes "ha-334765-m03" not found
	I0916 11:01:14.957399 1445387 pod_ready.go:79] waiting up to 6m0s for pod "kube-apiserver-ha-334765" in "kube-system" namespace to be "Ready" ...
	I0916 11:01:15.148901 1445387 request.go:632] Waited for 191.315374ms due to client-side throttling, not priority and fairness, request: GET:https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/kube-apiserver-ha-334765
	I0916 11:01:15.149035 1445387 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/kube-apiserver-ha-334765
	I0916 11:01:15.149070 1445387 round_trippers.go:469] Request Headers:
	I0916 11:01:15.149101 1445387 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:01:15.149123 1445387 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:01:15.167112 1445387 round_trippers.go:574] Response Status: 200 OK in 17 milliseconds
	I0916 11:01:15.348834 1445387 request.go:632] Waited for 176.320035ms due to client-side throttling, not priority and fairness, request: GET:https://192.168.49.2:8443/api/v1/nodes/ha-334765
	I0916 11:01:15.349397 1445387 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-334765
	I0916 11:01:15.349410 1445387 round_trippers.go:469] Request Headers:
	I0916 11:01:15.349429 1445387 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:01:15.349439 1445387 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:01:15.352706 1445387 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 11:01:15.353400 1445387 pod_ready.go:93] pod "kube-apiserver-ha-334765" in "kube-system" namespace has status "Ready":"True"
	I0916 11:01:15.353444 1445387 pod_ready.go:82] duration metric: took 395.97935ms for pod "kube-apiserver-ha-334765" in "kube-system" namespace to be "Ready" ...
	I0916 11:01:15.353470 1445387 pod_ready.go:79] waiting up to 6m0s for pod "kube-apiserver-ha-334765-m02" in "kube-system" namespace to be "Ready" ...
	I0916 11:01:15.549285 1445387 request.go:632] Waited for 195.722795ms due to client-side throttling, not priority and fairness, request: GET:https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/kube-apiserver-ha-334765-m02
	I0916 11:01:15.549379 1445387 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/kube-apiserver-ha-334765-m02
	I0916 11:01:15.549400 1445387 round_trippers.go:469] Request Headers:
	I0916 11:01:15.549415 1445387 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:01:15.549419 1445387 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:01:15.554767 1445387 round_trippers.go:574] Response Status: 200 OK in 5 milliseconds
	I0916 11:01:15.748327 1445387 request.go:632] Waited for 192.237828ms due to client-side throttling, not priority and fairness, request: GET:https://192.168.49.2:8443/api/v1/nodes/ha-334765-m02
	I0916 11:01:15.748399 1445387 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-334765-m02
	I0916 11:01:15.748416 1445387 round_trippers.go:469] Request Headers:
	I0916 11:01:15.748426 1445387 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:01:15.748470 1445387 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:01:15.752201 1445387 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 11:01:15.753301 1445387 pod_ready.go:93] pod "kube-apiserver-ha-334765-m02" in "kube-system" namespace has status "Ready":"True"
	I0916 11:01:15.753332 1445387 pod_ready.go:82] duration metric: took 399.841184ms for pod "kube-apiserver-ha-334765-m02" in "kube-system" namespace to be "Ready" ...
	I0916 11:01:15.753351 1445387 pod_ready.go:79] waiting up to 6m0s for pod "kube-apiserver-ha-334765-m03" in "kube-system" namespace to be "Ready" ...
	I0916 11:01:15.948270 1445387 request.go:632] Waited for 194.850964ms due to client-side throttling, not priority and fairness, request: GET:https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/kube-apiserver-ha-334765-m03
	I0916 11:01:15.948340 1445387 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/kube-apiserver-ha-334765-m03
	I0916 11:01:15.948347 1445387 round_trippers.go:469] Request Headers:
	I0916 11:01:15.948402 1445387 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:01:15.948412 1445387 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:01:15.952460 1445387 round_trippers.go:574] Response Status: 200 OK in 4 milliseconds
	I0916 11:01:16.149188 1445387 request.go:632] Waited for 194.279508ms due to client-side throttling, not priority and fairness, request: GET:https://192.168.49.2:8443/api/v1/nodes/ha-334765-m03
	I0916 11:01:16.149276 1445387 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-334765-m03
	I0916 11:01:16.149327 1445387 round_trippers.go:469] Request Headers:
	I0916 11:01:16.149341 1445387 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:01:16.149346 1445387 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:01:16.173143 1445387 round_trippers.go:574] Response Status: 404 Not Found in 23 milliseconds
	I0916 11:01:16.175884 1445387 pod_ready.go:98] node "ha-334765-m03" hosting pod "kube-apiserver-ha-334765-m03" in "kube-system" namespace is currently not "Ready" (skipping!): error getting node "ha-334765-m03": nodes "ha-334765-m03" not found
	I0916 11:01:16.175929 1445387 pod_ready.go:82] duration metric: took 422.569129ms for pod "kube-apiserver-ha-334765-m03" in "kube-system" namespace to be "Ready" ...
	E0916 11:01:16.175941 1445387 pod_ready.go:67] WaitExtra: waitPodCondition: node "ha-334765-m03" hosting pod "kube-apiserver-ha-334765-m03" in "kube-system" namespace is currently not "Ready" (skipping!): error getting node "ha-334765-m03": nodes "ha-334765-m03" not found
	I0916 11:01:16.175949 1445387 pod_ready.go:79] waiting up to 6m0s for pod "kube-controller-manager-ha-334765" in "kube-system" namespace to be "Ready" ...
	I0916 11:01:16.348331 1445387 request.go:632] Waited for 172.28675ms due to client-side throttling, not priority and fairness, request: GET:https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/kube-controller-manager-ha-334765
	I0916 11:01:16.348403 1445387 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/kube-controller-manager-ha-334765
	I0916 11:01:16.348415 1445387 round_trippers.go:469] Request Headers:
	I0916 11:01:16.348424 1445387 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:01:16.348462 1445387 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:01:16.353926 1445387 round_trippers.go:574] Response Status: 200 OK in 5 milliseconds
	I0916 11:01:16.549033 1445387 request.go:632] Waited for 191.254394ms due to client-side throttling, not priority and fairness, request: GET:https://192.168.49.2:8443/api/v1/nodes/ha-334765
	I0916 11:01:16.549105 1445387 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-334765
	I0916 11:01:16.549117 1445387 round_trippers.go:469] Request Headers:
	I0916 11:01:16.549125 1445387 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:01:16.549131 1445387 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:01:16.553803 1445387 round_trippers.go:574] Response Status: 200 OK in 4 milliseconds
	I0916 11:01:16.554782 1445387 pod_ready.go:93] pod "kube-controller-manager-ha-334765" in "kube-system" namespace has status "Ready":"True"
	I0916 11:01:16.554804 1445387 pod_ready.go:82] duration metric: took 378.843872ms for pod "kube-controller-manager-ha-334765" in "kube-system" namespace to be "Ready" ...
	I0916 11:01:16.554834 1445387 pod_ready.go:79] waiting up to 6m0s for pod "kube-controller-manager-ha-334765-m02" in "kube-system" namespace to be "Ready" ...
	I0916 11:01:16.748821 1445387 request.go:632] Waited for 193.916178ms due to client-side throttling, not priority and fairness, request: GET:https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/kube-controller-manager-ha-334765-m02
	I0916 11:01:16.748915 1445387 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/kube-controller-manager-ha-334765-m02
	I0916 11:01:16.748961 1445387 round_trippers.go:469] Request Headers:
	I0916 11:01:16.748978 1445387 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:01:16.748983 1445387 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:01:16.752819 1445387 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 11:01:16.948320 1445387 request.go:632] Waited for 194.150068ms due to client-side throttling, not priority and fairness, request: GET:https://192.168.49.2:8443/api/v1/nodes/ha-334765-m02
	I0916 11:01:16.948400 1445387 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-334765-m02
	I0916 11:01:16.948412 1445387 round_trippers.go:469] Request Headers:
	I0916 11:01:16.948453 1445387 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:01:16.948462 1445387 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:01:16.951183 1445387 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:01:17.148283 1445387 request.go:632] Waited for 93.150472ms due to client-side throttling, not priority and fairness, request: GET:https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/kube-controller-manager-ha-334765-m02
	I0916 11:01:17.148362 1445387 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/kube-controller-manager-ha-334765-m02
	I0916 11:01:17.148372 1445387 round_trippers.go:469] Request Headers:
	I0916 11:01:17.148382 1445387 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:01:17.148391 1445387 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:01:17.151396 1445387 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:01:17.348962 1445387 request.go:632] Waited for 196.30824ms due to client-side throttling, not priority and fairness, request: GET:https://192.168.49.2:8443/api/v1/nodes/ha-334765-m02
	I0916 11:01:17.349035 1445387 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-334765-m02
	I0916 11:01:17.349046 1445387 round_trippers.go:469] Request Headers:
	I0916 11:01:17.349056 1445387 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:01:17.349072 1445387 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:01:17.353487 1445387 round_trippers.go:574] Response Status: 200 OK in 4 milliseconds
	I0916 11:01:17.555598 1445387 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/kube-controller-manager-ha-334765-m02
	I0916 11:01:17.555633 1445387 round_trippers.go:469] Request Headers:
	I0916 11:01:17.555644 1445387 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:01:17.555649 1445387 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:01:17.567330 1445387 round_trippers.go:574] Response Status: 200 OK in 11 milliseconds
	I0916 11:01:17.748744 1445387 request.go:632] Waited for 180.110363ms due to client-side throttling, not priority and fairness, request: GET:https://192.168.49.2:8443/api/v1/nodes/ha-334765-m02
	I0916 11:01:17.748843 1445387 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-334765-m02
	I0916 11:01:17.748861 1445387 round_trippers.go:469] Request Headers:
	I0916 11:01:17.748871 1445387 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:01:17.748880 1445387 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:01:17.756642 1445387 round_trippers.go:574] Response Status: 200 OK in 7 milliseconds
	I0916 11:01:18.055258 1445387 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/kube-controller-manager-ha-334765-m02
	I0916 11:01:18.055289 1445387 round_trippers.go:469] Request Headers:
	I0916 11:01:18.055300 1445387 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:01:18.055304 1445387 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:01:18.059355 1445387 round_trippers.go:574] Response Status: 200 OK in 4 milliseconds
	I0916 11:01:18.148285 1445387 request.go:632] Waited for 88.10584ms due to client-side throttling, not priority and fairness, request: GET:https://192.168.49.2:8443/api/v1/nodes/ha-334765-m02
	I0916 11:01:18.148366 1445387 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-334765-m02
	I0916 11:01:18.148411 1445387 round_trippers.go:469] Request Headers:
	I0916 11:01:18.148425 1445387 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:01:18.148430 1445387 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:01:18.154257 1445387 round_trippers.go:574] Response Status: 200 OK in 5 milliseconds
	I0916 11:01:18.555092 1445387 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/kube-controller-manager-ha-334765-m02
	I0916 11:01:18.555116 1445387 round_trippers.go:469] Request Headers:
	I0916 11:01:18.555125 1445387 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:01:18.555129 1445387 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:01:18.558453 1445387 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 11:01:18.559271 1445387 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-334765-m02
	I0916 11:01:18.559289 1445387 round_trippers.go:469] Request Headers:
	I0916 11:01:18.559298 1445387 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:01:18.559302 1445387 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:01:18.562131 1445387 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:01:18.562672 1445387 pod_ready.go:103] pod "kube-controller-manager-ha-334765-m02" in "kube-system" namespace has status "Ready":"False"
	I0916 11:01:19.055358 1445387 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/kube-controller-manager-ha-334765-m02
	I0916 11:01:19.055384 1445387 round_trippers.go:469] Request Headers:
	I0916 11:01:19.055394 1445387 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:01:19.055399 1445387 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:01:19.084499 1445387 round_trippers.go:574] Response Status: 200 OK in 29 milliseconds
	I0916 11:01:19.085357 1445387 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-334765-m02
	I0916 11:01:19.085406 1445387 round_trippers.go:469] Request Headers:
	I0916 11:01:19.085432 1445387 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:01:19.085463 1445387 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:01:19.128332 1445387 round_trippers.go:574] Response Status: 200 OK in 42 milliseconds
	I0916 11:01:19.555074 1445387 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/kube-controller-manager-ha-334765-m02
	I0916 11:01:19.555167 1445387 round_trippers.go:469] Request Headers:
	I0916 11:01:19.555194 1445387 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:01:19.555214 1445387 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:01:19.709744 1445387 round_trippers.go:574] Response Status: 200 OK in 154 milliseconds
	I0916 11:01:19.710735 1445387 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-334765-m02
	I0916 11:01:19.710794 1445387 round_trippers.go:469] Request Headers:
	I0916 11:01:19.710816 1445387 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:01:19.710853 1445387 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:01:19.799557 1445387 round_trippers.go:574] Response Status: 200 OK in 88 milliseconds
	I0916 11:01:20.055081 1445387 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/kube-controller-manager-ha-334765-m02
	I0916 11:01:20.055156 1445387 round_trippers.go:469] Request Headers:
	I0916 11:01:20.055180 1445387 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:01:20.055199 1445387 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:01:20.084056 1445387 round_trippers.go:574] Response Status: 200 OK in 28 milliseconds
	I0916 11:01:20.085603 1445387 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-334765-m02
	I0916 11:01:20.085680 1445387 round_trippers.go:469] Request Headers:
	I0916 11:01:20.085704 1445387 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:01:20.085724 1445387 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:01:20.107809 1445387 round_trippers.go:574] Response Status: 200 OK in 22 milliseconds
	I0916 11:01:20.555769 1445387 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/kube-controller-manager-ha-334765-m02
	I0916 11:01:20.555790 1445387 round_trippers.go:469] Request Headers:
	I0916 11:01:20.555799 1445387 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:01:20.555806 1445387 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:01:20.564298 1445387 round_trippers.go:574] Response Status: 200 OK in 8 milliseconds
	I0916 11:01:20.565613 1445387 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-334765-m02
	I0916 11:01:20.565673 1445387 round_trippers.go:469] Request Headers:
	I0916 11:01:20.565697 1445387 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:01:20.565716 1445387 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:01:20.571688 1445387 round_trippers.go:574] Response Status: 200 OK in 5 milliseconds
	I0916 11:01:20.572703 1445387 pod_ready.go:103] pod "kube-controller-manager-ha-334765-m02" in "kube-system" namespace has status "Ready":"False"
	I0916 11:01:21.055070 1445387 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/kube-controller-manager-ha-334765-m02
	I0916 11:01:21.055147 1445387 round_trippers.go:469] Request Headers:
	I0916 11:01:21.055181 1445387 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:01:21.055198 1445387 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:01:21.064462 1445387 round_trippers.go:574] Response Status: 200 OK in 9 milliseconds
	I0916 11:01:21.065405 1445387 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-334765-m02
	I0916 11:01:21.065474 1445387 round_trippers.go:469] Request Headers:
	I0916 11:01:21.065499 1445387 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:01:21.065521 1445387 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:01:21.089675 1445387 round_trippers.go:574] Response Status: 200 OK in 24 milliseconds
	I0916 11:01:21.556046 1445387 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/kube-controller-manager-ha-334765-m02
	I0916 11:01:21.556065 1445387 round_trippers.go:469] Request Headers:
	I0916 11:01:21.556075 1445387 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:01:21.556080 1445387 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:01:21.559912 1445387 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 11:01:21.561324 1445387 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-334765-m02
	I0916 11:01:21.561390 1445387 round_trippers.go:469] Request Headers:
	I0916 11:01:21.561417 1445387 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:01:21.561434 1445387 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:01:21.564554 1445387 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 11:01:22.055059 1445387 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/kube-controller-manager-ha-334765-m02
	I0916 11:01:22.055085 1445387 round_trippers.go:469] Request Headers:
	I0916 11:01:22.055095 1445387 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:01:22.055101 1445387 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:01:22.058133 1445387 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 11:01:22.059433 1445387 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-334765-m02
	I0916 11:01:22.059494 1445387 round_trippers.go:469] Request Headers:
	I0916 11:01:22.059516 1445387 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:01:22.059534 1445387 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:01:22.062391 1445387 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:01:22.555149 1445387 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/kube-controller-manager-ha-334765-m02
	I0916 11:01:22.555233 1445387 round_trippers.go:469] Request Headers:
	I0916 11:01:22.555257 1445387 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:01:22.555274 1445387 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:01:22.557983 1445387 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:01:22.558845 1445387 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-334765-m02
	I0916 11:01:22.558899 1445387 round_trippers.go:469] Request Headers:
	I0916 11:01:22.558921 1445387 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:01:22.558941 1445387 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:01:22.562843 1445387 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 11:01:23.055132 1445387 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/kube-controller-manager-ha-334765-m02
	I0916 11:01:23.055162 1445387 round_trippers.go:469] Request Headers:
	I0916 11:01:23.055174 1445387 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:01:23.055180 1445387 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:01:23.058565 1445387 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 11:01:23.059493 1445387 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-334765-m02
	I0916 11:01:23.059517 1445387 round_trippers.go:469] Request Headers:
	I0916 11:01:23.059527 1445387 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:01:23.059534 1445387 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:01:23.062362 1445387 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:01:23.062970 1445387 pod_ready.go:103] pod "kube-controller-manager-ha-334765-m02" in "kube-system" namespace has status "Ready":"False"
	I0916 11:01:23.555732 1445387 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/kube-controller-manager-ha-334765-m02
	I0916 11:01:23.555758 1445387 round_trippers.go:469] Request Headers:
	I0916 11:01:23.555768 1445387 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:01:23.555774 1445387 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:01:23.558747 1445387 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:01:23.559849 1445387 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-334765-m02
	I0916 11:01:23.559882 1445387 round_trippers.go:469] Request Headers:
	I0916 11:01:23.559891 1445387 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:01:23.559897 1445387 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:01:23.562796 1445387 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:01:24.055896 1445387 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/kube-controller-manager-ha-334765-m02
	I0916 11:01:24.055922 1445387 round_trippers.go:469] Request Headers:
	I0916 11:01:24.055931 1445387 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:01:24.055938 1445387 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:01:24.058967 1445387 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 11:01:24.059861 1445387 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-334765-m02
	I0916 11:01:24.059943 1445387 round_trippers.go:469] Request Headers:
	I0916 11:01:24.059969 1445387 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:01:24.059992 1445387 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:01:24.063027 1445387 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:01:24.555122 1445387 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/kube-controller-manager-ha-334765-m02
	I0916 11:01:24.555147 1445387 round_trippers.go:469] Request Headers:
	I0916 11:01:24.555157 1445387 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:01:24.555161 1445387 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:01:24.558053 1445387 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:01:24.558860 1445387 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-334765-m02
	I0916 11:01:24.558877 1445387 round_trippers.go:469] Request Headers:
	I0916 11:01:24.558886 1445387 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:01:24.558907 1445387 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:01:24.561582 1445387 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:01:25.055144 1445387 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/kube-controller-manager-ha-334765-m02
	I0916 11:01:25.055169 1445387 round_trippers.go:469] Request Headers:
	I0916 11:01:25.055179 1445387 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:01:25.055205 1445387 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:01:25.058044 1445387 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:01:25.059080 1445387 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-334765-m02
	I0916 11:01:25.059099 1445387 round_trippers.go:469] Request Headers:
	I0916 11:01:25.059109 1445387 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:01:25.059114 1445387 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:01:25.061762 1445387 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:01:25.555525 1445387 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/kube-controller-manager-ha-334765-m02
	I0916 11:01:25.555550 1445387 round_trippers.go:469] Request Headers:
	I0916 11:01:25.555560 1445387 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:01:25.555563 1445387 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:01:25.558904 1445387 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 11:01:25.559716 1445387 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-334765-m02
	I0916 11:01:25.559737 1445387 round_trippers.go:469] Request Headers:
	I0916 11:01:25.559749 1445387 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:01:25.559755 1445387 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:01:25.562543 1445387 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:01:25.563212 1445387 pod_ready.go:103] pod "kube-controller-manager-ha-334765-m02" in "kube-system" namespace has status "Ready":"False"
	I0916 11:01:26.055829 1445387 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/kube-controller-manager-ha-334765-m02
	I0916 11:01:26.055856 1445387 round_trippers.go:469] Request Headers:
	I0916 11:01:26.055866 1445387 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:01:26.055872 1445387 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:01:26.059074 1445387 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 11:01:26.060094 1445387 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-334765-m02
	I0916 11:01:26.060151 1445387 round_trippers.go:469] Request Headers:
	I0916 11:01:26.060168 1445387 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:01:26.060173 1445387 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:01:26.062857 1445387 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:01:26.555245 1445387 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/kube-controller-manager-ha-334765-m02
	I0916 11:01:26.555277 1445387 round_trippers.go:469] Request Headers:
	I0916 11:01:26.555287 1445387 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:01:26.555291 1445387 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:01:26.558625 1445387 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 11:01:26.560043 1445387 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-334765-m02
	I0916 11:01:26.560067 1445387 round_trippers.go:469] Request Headers:
	I0916 11:01:26.560076 1445387 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:01:26.560081 1445387 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:01:26.564339 1445387 round_trippers.go:574] Response Status: 200 OK in 4 milliseconds
	I0916 11:01:27.055730 1445387 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/kube-controller-manager-ha-334765-m02
	I0916 11:01:27.055757 1445387 round_trippers.go:469] Request Headers:
	I0916 11:01:27.055773 1445387 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:01:27.055777 1445387 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:01:27.058863 1445387 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 11:01:27.059675 1445387 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-334765-m02
	I0916 11:01:27.059693 1445387 round_trippers.go:469] Request Headers:
	I0916 11:01:27.059702 1445387 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:01:27.059706 1445387 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:01:27.062452 1445387 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:01:27.555814 1445387 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/kube-controller-manager-ha-334765-m02
	I0916 11:01:27.555838 1445387 round_trippers.go:469] Request Headers:
	I0916 11:01:27.555848 1445387 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:01:27.555853 1445387 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:01:27.558817 1445387 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:01:27.559690 1445387 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-334765-m02
	I0916 11:01:27.559712 1445387 round_trippers.go:469] Request Headers:
	I0916 11:01:27.559722 1445387 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:01:27.559726 1445387 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:01:27.562255 1445387 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:01:28.055296 1445387 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/kube-controller-manager-ha-334765-m02
	I0916 11:01:28.055321 1445387 round_trippers.go:469] Request Headers:
	I0916 11:01:28.055330 1445387 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:01:28.055337 1445387 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:01:28.058405 1445387 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 11:01:28.059283 1445387 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-334765-m02
	I0916 11:01:28.059307 1445387 round_trippers.go:469] Request Headers:
	I0916 11:01:28.059316 1445387 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:01:28.059321 1445387 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:01:28.062127 1445387 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:01:28.062902 1445387 pod_ready.go:103] pod "kube-controller-manager-ha-334765-m02" in "kube-system" namespace has status "Ready":"False"
	I0916 11:01:28.555116 1445387 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/kube-controller-manager-ha-334765-m02
	I0916 11:01:28.555139 1445387 round_trippers.go:469] Request Headers:
	I0916 11:01:28.555148 1445387 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:01:28.555153 1445387 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:01:28.558246 1445387 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 11:01:28.559072 1445387 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-334765-m02
	I0916 11:01:28.559090 1445387 round_trippers.go:469] Request Headers:
	I0916 11:01:28.559100 1445387 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:01:28.559123 1445387 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:01:28.561692 1445387 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:01:29.055732 1445387 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/kube-controller-manager-ha-334765-m02
	I0916 11:01:29.055752 1445387 round_trippers.go:469] Request Headers:
	I0916 11:01:29.055762 1445387 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:01:29.055768 1445387 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:01:29.058984 1445387 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 11:01:29.060322 1445387 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-334765-m02
	I0916 11:01:29.060341 1445387 round_trippers.go:469] Request Headers:
	I0916 11:01:29.060351 1445387 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:01:29.060357 1445387 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:01:29.063229 1445387 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:01:29.555308 1445387 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/kube-controller-manager-ha-334765-m02
	I0916 11:01:29.555334 1445387 round_trippers.go:469] Request Headers:
	I0916 11:01:29.555344 1445387 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:01:29.555350 1445387 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:01:29.558039 1445387 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:01:29.558837 1445387 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-334765-m02
	I0916 11:01:29.558855 1445387 round_trippers.go:469] Request Headers:
	I0916 11:01:29.558864 1445387 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:01:29.558869 1445387 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:01:29.561424 1445387 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:01:30.055901 1445387 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/kube-controller-manager-ha-334765-m02
	I0916 11:01:30.055933 1445387 round_trippers.go:469] Request Headers:
	I0916 11:01:30.055944 1445387 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:01:30.055950 1445387 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:01:30.060168 1445387 round_trippers.go:574] Response Status: 200 OK in 4 milliseconds
	I0916 11:01:30.061606 1445387 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-334765-m02
	I0916 11:01:30.061625 1445387 round_trippers.go:469] Request Headers:
	I0916 11:01:30.061647 1445387 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:01:30.061653 1445387 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:01:30.071341 1445387 round_trippers.go:574] Response Status: 200 OK in 9 milliseconds
	I0916 11:01:30.072670 1445387 pod_ready.go:103] pod "kube-controller-manager-ha-334765-m02" in "kube-system" namespace has status "Ready":"False"
	I0916 11:01:30.555135 1445387 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/kube-controller-manager-ha-334765-m02
	I0916 11:01:30.555159 1445387 round_trippers.go:469] Request Headers:
	I0916 11:01:30.555169 1445387 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:01:30.555175 1445387 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:01:30.558122 1445387 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:01:30.559161 1445387 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-334765-m02
	I0916 11:01:30.559183 1445387 round_trippers.go:469] Request Headers:
	I0916 11:01:30.559228 1445387 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:01:30.559238 1445387 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:01:30.561885 1445387 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:01:30.562459 1445387 pod_ready.go:93] pod "kube-controller-manager-ha-334765-m02" in "kube-system" namespace has status "Ready":"True"
	I0916 11:01:30.562485 1445387 pod_ready.go:82] duration metric: took 14.007642327s for pod "kube-controller-manager-ha-334765-m02" in "kube-system" namespace to be "Ready" ...
	I0916 11:01:30.562499 1445387 pod_ready.go:79] waiting up to 6m0s for pod "kube-controller-manager-ha-334765-m03" in "kube-system" namespace to be "Ready" ...
	I0916 11:01:30.562570 1445387 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/kube-controller-manager-ha-334765-m03
	I0916 11:01:30.562581 1445387 round_trippers.go:469] Request Headers:
	I0916 11:01:30.562590 1445387 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:01:30.562595 1445387 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:01:30.565234 1445387 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:01:30.566156 1445387 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-334765-m03
	I0916 11:01:30.566176 1445387 round_trippers.go:469] Request Headers:
	I0916 11:01:30.566185 1445387 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:01:30.566190 1445387 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:01:30.568709 1445387 round_trippers.go:574] Response Status: 404 Not Found in 2 milliseconds
	I0916 11:01:30.568849 1445387 pod_ready.go:98] node "ha-334765-m03" hosting pod "kube-controller-manager-ha-334765-m03" in "kube-system" namespace is currently not "Ready" (skipping!): error getting node "ha-334765-m03": nodes "ha-334765-m03" not found
	I0916 11:01:30.568868 1445387 pod_ready.go:82] duration metric: took 6.360356ms for pod "kube-controller-manager-ha-334765-m03" in "kube-system" namespace to be "Ready" ...
	E0916 11:01:30.568878 1445387 pod_ready.go:67] WaitExtra: waitPodCondition: node "ha-334765-m03" hosting pod "kube-controller-manager-ha-334765-m03" in "kube-system" namespace is currently not "Ready" (skipping!): error getting node "ha-334765-m03": nodes "ha-334765-m03" not found
	I0916 11:01:30.568889 1445387 pod_ready.go:79] waiting up to 6m0s for pod "kube-proxy-4vsvh" in "kube-system" namespace to be "Ready" ...
	I0916 11:01:30.568955 1445387 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/kube-proxy-4vsvh
	I0916 11:01:30.568964 1445387 round_trippers.go:469] Request Headers:
	I0916 11:01:30.568973 1445387 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:01:30.568979 1445387 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:01:30.571660 1445387 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:01:30.572372 1445387 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-334765-m03
	I0916 11:01:30.572391 1445387 round_trippers.go:469] Request Headers:
	I0916 11:01:30.572401 1445387 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:01:30.572404 1445387 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:01:30.574784 1445387 round_trippers.go:574] Response Status: 404 Not Found in 2 milliseconds
	I0916 11:01:30.574956 1445387 pod_ready.go:98] node "ha-334765-m03" hosting pod "kube-proxy-4vsvh" in "kube-system" namespace is currently not "Ready" (skipping!): error getting node "ha-334765-m03": nodes "ha-334765-m03" not found
	I0916 11:01:30.575006 1445387 pod_ready.go:82] duration metric: took 6.106283ms for pod "kube-proxy-4vsvh" in "kube-system" namespace to be "Ready" ...
	E0916 11:01:30.575022 1445387 pod_ready.go:67] WaitExtra: waitPodCondition: node "ha-334765-m03" hosting pod "kube-proxy-4vsvh" in "kube-system" namespace is currently not "Ready" (skipping!): error getting node "ha-334765-m03": nodes "ha-334765-m03" not found
	I0916 11:01:30.575031 1445387 pod_ready.go:79] waiting up to 6m0s for pod "kube-proxy-br496" in "kube-system" namespace to be "Ready" ...
	I0916 11:01:30.575104 1445387 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/kube-proxy-br496
	I0916 11:01:30.575116 1445387 round_trippers.go:469] Request Headers:
	I0916 11:01:30.575124 1445387 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:01:30.575129 1445387 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:01:30.577745 1445387 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:01:30.578629 1445387 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-334765-m04
	I0916 11:01:30.578651 1445387 round_trippers.go:469] Request Headers:
	I0916 11:01:30.578661 1445387 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:01:30.578666 1445387 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:01:30.581296 1445387 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:01:30.581872 1445387 pod_ready.go:93] pod "kube-proxy-br496" in "kube-system" namespace has status "Ready":"True"
	I0916 11:01:30.581892 1445387 pod_ready.go:82] duration metric: took 6.852349ms for pod "kube-proxy-br496" in "kube-system" namespace to be "Ready" ...
	I0916 11:01:30.581905 1445387 pod_ready.go:79] waiting up to 6m0s for pod "kube-proxy-l998t" in "kube-system" namespace to be "Ready" ...
	I0916 11:01:30.581973 1445387 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/kube-proxy-l998t
	I0916 11:01:30.581984 1445387 round_trippers.go:469] Request Headers:
	I0916 11:01:30.581993 1445387 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:01:30.581999 1445387 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:01:30.584601 1445387 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:01:30.585228 1445387 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-334765-m02
	I0916 11:01:30.585239 1445387 round_trippers.go:469] Request Headers:
	I0916 11:01:30.585250 1445387 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:01:30.585254 1445387 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:01:30.587784 1445387 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:01:30.588378 1445387 pod_ready.go:93] pod "kube-proxy-l998t" in "kube-system" namespace has status "Ready":"True"
	I0916 11:01:30.588396 1445387 pod_ready.go:82] duration metric: took 6.483578ms for pod "kube-proxy-l998t" in "kube-system" namespace to be "Ready" ...
	I0916 11:01:30.588407 1445387 pod_ready.go:79] waiting up to 6m0s for pod "kube-proxy-tlfs7" in "kube-system" namespace to be "Ready" ...
	I0916 11:01:30.755743 1445387 request.go:632] Waited for 167.249126ms due to client-side throttling, not priority and fairness, request: GET:https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/kube-proxy-tlfs7
	I0916 11:01:30.755829 1445387 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/kube-proxy-tlfs7
	I0916 11:01:30.755843 1445387 round_trippers.go:469] Request Headers:
	I0916 11:01:30.755852 1445387 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:01:30.755857 1445387 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:01:30.758876 1445387 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:01:30.955916 1445387 request.go:632] Waited for 196.35711ms due to client-side throttling, not priority and fairness, request: GET:https://192.168.49.2:8443/api/v1/nodes/ha-334765
	I0916 11:01:30.956008 1445387 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-334765
	I0916 11:01:30.956014 1445387 round_trippers.go:469] Request Headers:
	I0916 11:01:30.956024 1445387 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:01:30.956034 1445387 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:01:30.959355 1445387 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 11:01:30.960139 1445387 pod_ready.go:93] pod "kube-proxy-tlfs7" in "kube-system" namespace has status "Ready":"True"
	I0916 11:01:30.960163 1445387 pod_ready.go:82] duration metric: took 371.728335ms for pod "kube-proxy-tlfs7" in "kube-system" namespace to be "Ready" ...
	I0916 11:01:30.960183 1445387 pod_ready.go:79] waiting up to 6m0s for pod "kube-scheduler-ha-334765" in "kube-system" namespace to be "Ready" ...
	I0916 11:01:31.156045 1445387 request.go:632] Waited for 195.754721ms due to client-side throttling, not priority and fairness, request: GET:https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/kube-scheduler-ha-334765
	I0916 11:01:31.156229 1445387 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/kube-scheduler-ha-334765
	I0916 11:01:31.156276 1445387 round_trippers.go:469] Request Headers:
	I0916 11:01:31.156316 1445387 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:01:31.156360 1445387 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:01:31.162447 1445387 round_trippers.go:574] Response Status: 200 OK in 6 milliseconds
	I0916 11:01:31.355340 1445387 request.go:632] Waited for 192.291891ms due to client-side throttling, not priority and fairness, request: GET:https://192.168.49.2:8443/api/v1/nodes/ha-334765
	I0916 11:01:31.355422 1445387 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-334765
	I0916 11:01:31.355429 1445387 round_trippers.go:469] Request Headers:
	I0916 11:01:31.355438 1445387 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:01:31.355443 1445387 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:01:31.358529 1445387 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 11:01:31.359201 1445387 pod_ready.go:93] pod "kube-scheduler-ha-334765" in "kube-system" namespace has status "Ready":"True"
	I0916 11:01:31.359224 1445387 pod_ready.go:82] duration metric: took 399.003554ms for pod "kube-scheduler-ha-334765" in "kube-system" namespace to be "Ready" ...
	I0916 11:01:31.359236 1445387 pod_ready.go:79] waiting up to 6m0s for pod "kube-scheduler-ha-334765-m02" in "kube-system" namespace to be "Ready" ...
	I0916 11:01:31.555749 1445387 request.go:632] Waited for 196.380297ms due to client-side throttling, not priority and fairness, request: GET:https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/kube-scheduler-ha-334765-m02
	I0916 11:01:31.555830 1445387 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/kube-scheduler-ha-334765-m02
	I0916 11:01:31.555842 1445387 round_trippers.go:469] Request Headers:
	I0916 11:01:31.555851 1445387 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:01:31.555860 1445387 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:01:31.558983 1445387 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 11:01:31.756100 1445387 request.go:632] Waited for 196.340626ms due to client-side throttling, not priority and fairness, request: GET:https://192.168.49.2:8443/api/v1/nodes/ha-334765-m02
	I0916 11:01:31.756172 1445387 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-334765-m02
	I0916 11:01:31.756180 1445387 round_trippers.go:469] Request Headers:
	I0916 11:01:31.756189 1445387 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:01:31.756194 1445387 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:01:31.759152 1445387 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:01:31.955882 1445387 request.go:632] Waited for 96.25181ms due to client-side throttling, not priority and fairness, request: GET:https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/kube-scheduler-ha-334765-m02
	I0916 11:01:31.956028 1445387 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/kube-scheduler-ha-334765-m02
	I0916 11:01:31.956070 1445387 round_trippers.go:469] Request Headers:
	I0916 11:01:31.956086 1445387 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:01:31.956092 1445387 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:01:31.959087 1445387 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:01:32.156139 1445387 request.go:632] Waited for 196.329689ms due to client-side throttling, not priority and fairness, request: GET:https://192.168.49.2:8443/api/v1/nodes/ha-334765-m02
	I0916 11:01:32.156212 1445387 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-334765-m02
	I0916 11:01:32.156218 1445387 round_trippers.go:469] Request Headers:
	I0916 11:01:32.156227 1445387 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:01:32.156232 1445387 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:01:32.159265 1445387 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 11:01:32.359483 1445387 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/kube-scheduler-ha-334765-m02
	I0916 11:01:32.359508 1445387 round_trippers.go:469] Request Headers:
	I0916 11:01:32.359520 1445387 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:01:32.359525 1445387 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:01:32.362398 1445387 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:01:32.555923 1445387 request.go:632] Waited for 192.905619ms due to client-side throttling, not priority and fairness, request: GET:https://192.168.49.2:8443/api/v1/nodes/ha-334765-m02
	I0916 11:01:32.556008 1445387 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-334765-m02
	I0916 11:01:32.556021 1445387 round_trippers.go:469] Request Headers:
	I0916 11:01:32.556028 1445387 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:01:32.556033 1445387 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:01:32.558965 1445387 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:01:32.859427 1445387 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/kube-scheduler-ha-334765-m02
	I0916 11:01:32.859452 1445387 round_trippers.go:469] Request Headers:
	I0916 11:01:32.859462 1445387 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:01:32.859466 1445387 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:01:32.862507 1445387 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 11:01:32.955683 1445387 request.go:632] Waited for 92.246577ms due to client-side throttling, not priority and fairness, request: GET:https://192.168.49.2:8443/api/v1/nodes/ha-334765-m02
	I0916 11:01:32.955740 1445387 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-334765-m02
	I0916 11:01:32.955746 1445387 round_trippers.go:469] Request Headers:
	I0916 11:01:32.955754 1445387 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:01:32.955758 1445387 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:01:32.958379 1445387 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:01:33.359800 1445387 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/kube-scheduler-ha-334765-m02
	I0916 11:01:33.359821 1445387 round_trippers.go:469] Request Headers:
	I0916 11:01:33.359832 1445387 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:01:33.359837 1445387 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:01:33.362698 1445387 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:01:33.363476 1445387 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-334765-m02
	I0916 11:01:33.363496 1445387 round_trippers.go:469] Request Headers:
	I0916 11:01:33.363505 1445387 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:01:33.363510 1445387 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:01:33.366107 1445387 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:01:33.366803 1445387 pod_ready.go:93] pod "kube-scheduler-ha-334765-m02" in "kube-system" namespace has status "Ready":"True"
	I0916 11:01:33.366847 1445387 pod_ready.go:82] duration metric: took 2.007602962s for pod "kube-scheduler-ha-334765-m02" in "kube-system" namespace to be "Ready" ...
	I0916 11:01:33.366867 1445387 pod_ready.go:79] waiting up to 6m0s for pod "kube-scheduler-ha-334765-m03" in "kube-system" namespace to be "Ready" ...
	I0916 11:01:33.555232 1445387 request.go:632] Waited for 188.273794ms due to client-side throttling, not priority and fairness, request: GET:https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/kube-scheduler-ha-334765-m03
	I0916 11:01:33.555321 1445387 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/kube-scheduler-ha-334765-m03
	I0916 11:01:33.555332 1445387 round_trippers.go:469] Request Headers:
	I0916 11:01:33.555348 1445387 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:01:33.555358 1445387 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:01:33.558519 1445387 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 11:01:33.756090 1445387 request.go:632] Waited for 196.935737ms due to client-side throttling, not priority and fairness, request: GET:https://192.168.49.2:8443/api/v1/nodes/ha-334765-m03
	I0916 11:01:33.756154 1445387 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-334765-m03
	I0916 11:01:33.756159 1445387 round_trippers.go:469] Request Headers:
	I0916 11:01:33.756168 1445387 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:01:33.756174 1445387 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:01:33.758773 1445387 round_trippers.go:574] Response Status: 404 Not Found in 2 milliseconds
	I0916 11:01:33.758953 1445387 pod_ready.go:98] node "ha-334765-m03" hosting pod "kube-scheduler-ha-334765-m03" in "kube-system" namespace is currently not "Ready" (skipping!): error getting node "ha-334765-m03": nodes "ha-334765-m03" not found
	I0916 11:01:33.758973 1445387 pod_ready.go:82] duration metric: took 392.097759ms for pod "kube-scheduler-ha-334765-m03" in "kube-system" namespace to be "Ready" ...
	E0916 11:01:33.758985 1445387 pod_ready.go:67] WaitExtra: waitPodCondition: node "ha-334765-m03" hosting pod "kube-scheduler-ha-334765-m03" in "kube-system" namespace is currently not "Ready" (skipping!): error getting node "ha-334765-m03": nodes "ha-334765-m03" not found
	I0916 11:01:33.758994 1445387 pod_ready.go:39] duration metric: took 19.010898832s for extra waiting for all system-critical and pods with labels [k8s-app=kube-dns component=etcd component=kube-apiserver component=kube-controller-manager k8s-app=kube-proxy component=kube-scheduler] to be "Ready" ...
	I0916 11:01:33.759016 1445387 api_server.go:52] waiting for apiserver process to appear ...
	I0916 11:01:33.759082 1445387 ssh_runner.go:195] Run: sudo pgrep -xnf kube-apiserver.*minikube.*
	I0916 11:01:33.770705 1445387 api_server.go:72] duration metric: took 28.45490782s to wait for apiserver process to appear ...
	I0916 11:01:33.770732 1445387 api_server.go:88] waiting for apiserver healthz status ...
	I0916 11:01:33.770754 1445387 api_server.go:253] Checking apiserver healthz at https://192.168.49.2:8443/healthz ...
	I0916 11:01:33.781469 1445387 api_server.go:279] https://192.168.49.2:8443/healthz returned 200:
	ok
	I0916 11:01:33.781562 1445387 round_trippers.go:463] GET https://192.168.49.2:8443/version
	I0916 11:01:33.781581 1445387 round_trippers.go:469] Request Headers:
	I0916 11:01:33.781597 1445387 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:01:33.781604 1445387 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:01:33.782547 1445387 round_trippers.go:574] Response Status: 200 OK in 0 milliseconds
	I0916 11:01:33.783138 1445387 api_server.go:141] control plane version: v1.31.1
	I0916 11:01:33.783165 1445387 api_server.go:131] duration metric: took 12.425705ms to wait for apiserver health ...
	I0916 11:01:33.783174 1445387 system_pods.go:43] waiting for kube-system pods to appear ...
	I0916 11:01:33.955585 1445387 request.go:632] Waited for 172.315107ms due to client-side throttling, not priority and fairness, request: GET:https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods
	I0916 11:01:33.955650 1445387 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods
	I0916 11:01:33.955671 1445387 round_trippers.go:469] Request Headers:
	I0916 11:01:33.955684 1445387 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:01:33.955693 1445387 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:01:33.964104 1445387 round_trippers.go:574] Response Status: 200 OK in 8 milliseconds
	I0916 11:01:33.974327 1445387 system_pods.go:59] 26 kube-system pods found
	I0916 11:01:33.974370 1445387 system_pods.go:61] "coredns-7c65d6cfc9-q5xr7" [14514e6e-34ae-4a79-b0e0-008742ae46b9] Running / Ready:ContainersNotReady (containers with unready status: [coredns]) / ContainersReady:ContainersNotReady (containers with unready status: [coredns])
	I0916 11:01:33.974379 1445387 system_pods.go:61] "coredns-7c65d6cfc9-s9fp9" [0e29200a-0909-47e1-8521-bf5f9b645d6c] Running / Ready:ContainersNotReady (containers with unready status: [coredns]) / ContainersReady:ContainersNotReady (containers with unready status: [coredns])
	I0916 11:01:33.974386 1445387 system_pods.go:61] "etcd-ha-334765" [9a0b9474-60f4-440e-a898-d397f7425086] Running
	I0916 11:01:33.974391 1445387 system_pods.go:61] "etcd-ha-334765-m02" [635fd2d2-f9cc-4e08-b73b-18633a58b6e4] Running
	I0916 11:01:33.974396 1445387 system_pods.go:61] "etcd-ha-334765-m03" [9527225b-e7ae-481b-b5b1-47b445990b4b] Running
	I0916 11:01:33.974401 1445387 system_pods.go:61] "kindnet-7s5t5" [e1832b94-ac8f-43c0-af10-ddc6afbb229b] Running
	I0916 11:01:33.974405 1445387 system_pods.go:61] "kindnet-plxdg" [15478b1f-0067-4d48-84f3-27b777cc4ff3] Running
	I0916 11:01:33.974409 1445387 system_pods.go:61] "kindnet-rfw69" [396f204a-53ea-4720-85fc-05ba54d285ca] Running
	I0916 11:01:33.974415 1445387 system_pods.go:61] "kindnet-vj27j" [61e290b4-d19c-40f3-a50d-bfa09fddb710] Running
	I0916 11:01:33.974419 1445387 system_pods.go:61] "kube-apiserver-ha-334765" [471aea01-5646-4ce8-91e0-b0b39f8a275a] Running
	I0916 11:01:33.974424 1445387 system_pods.go:61] "kube-apiserver-ha-334765-m02" [877c49f9-6fae-4cdb-b208-940eba98383b] Running
	I0916 11:01:33.974431 1445387 system_pods.go:61] "kube-apiserver-ha-334765-m03" [b14f2a2b-6410-438a-99e2-86fa58140695] Running
	I0916 11:01:33.974436 1445387 system_pods.go:61] "kube-controller-manager-ha-334765" [23b2f4a4-942f-4ea7-afef-561ab69ac144] Running
	I0916 11:01:33.974441 1445387 system_pods.go:61] "kube-controller-manager-ha-334765-m02" [07411ea7-458c-475c-93ff-5db4f6c1c4b1] Running
	I0916 11:01:33.974448 1445387 system_pods.go:61] "kube-controller-manager-ha-334765-m03" [2aa8cca1-22de-4cd0-88a2-ac864da09d8d] Running
	I0916 11:01:33.974452 1445387 system_pods.go:61] "kube-proxy-4vsvh" [551f3711-d8b3-4360-8a18-d6183d4aec6d] Running
	I0916 11:01:33.974457 1445387 system_pods.go:61] "kube-proxy-br496" [db7b7049-0d21-4564-8c72-de55e63b5051] Running
	I0916 11:01:33.974464 1445387 system_pods.go:61] "kube-proxy-l998t" [e92c97ea-9eb8-40c4-a7f6-aeb43c89e6f4] Running
	I0916 11:01:33.974471 1445387 system_pods.go:61] "kube-proxy-tlfs7" [6a873882-8023-44b5-82d9-2f18e70f8ef1] Running
	I0916 11:01:33.974476 1445387 system_pods.go:61] "kube-scheduler-ha-334765" [6189b5cd-f342-4b6a-ae21-b6b7125e4f06] Running
	I0916 11:01:33.974480 1445387 system_pods.go:61] "kube-scheduler-ha-334765-m02" [61387062-d6b0-4e2d-b2f9-10f29b0bcef6] Running
	I0916 11:01:33.974484 1445387 system_pods.go:61] "kube-scheduler-ha-334765-m03" [98c99d71-0ea3-46a3-ab06-7b5971730ba8] Running
	I0916 11:01:33.974489 1445387 system_pods.go:61] "kube-vip-ha-334765" [baed9adb-c604-4a84-b55e-53a93f120d7b] Running
	I0916 11:01:33.974494 1445387 system_pods.go:61] "kube-vip-ha-334765-m02" [450bd3f6-46b4-426c-a6b2-2ad37b58b171] Running
	I0916 11:01:33.974507 1445387 system_pods.go:61] "kube-vip-ha-334765-m03" [efeb2f57-409a-45a7-87e2-dae52a680b3e] Running
	I0916 11:01:33.974511 1445387 system_pods.go:61] "storage-provisioner" [4db2490d-9707-4734-973b-adac5570e275] Running
	I0916 11:01:33.974516 1445387 system_pods.go:74] duration metric: took 191.310402ms to wait for pod list to return data ...
	I0916 11:01:33.974523 1445387 default_sa.go:34] waiting for default service account to be created ...
	I0916 11:01:34.155972 1445387 request.go:632] Waited for 181.362074ms due to client-side throttling, not priority and fairness, request: GET:https://192.168.49.2:8443/api/v1/namespaces/default/serviceaccounts
	I0916 11:01:34.156071 1445387 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/default/serviceaccounts
	I0916 11:01:34.156083 1445387 round_trippers.go:469] Request Headers:
	I0916 11:01:34.156092 1445387 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:01:34.156103 1445387 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:01:34.159010 1445387 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:01:34.159265 1445387 default_sa.go:45] found service account: "default"
	I0916 11:01:34.159285 1445387 default_sa.go:55] duration metric: took 184.755424ms for default service account to be created ...
	I0916 11:01:34.159295 1445387 system_pods.go:116] waiting for k8s-apps to be running ...
	I0916 11:01:34.355721 1445387 request.go:632] Waited for 196.358545ms due to client-side throttling, not priority and fairness, request: GET:https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods
	I0916 11:01:34.355810 1445387 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods
	I0916 11:01:34.355823 1445387 round_trippers.go:469] Request Headers:
	I0916 11:01:34.355832 1445387 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:01:34.355842 1445387 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:01:34.361379 1445387 round_trippers.go:574] Response Status: 200 OK in 5 milliseconds
	I0916 11:01:34.373566 1445387 system_pods.go:86] 26 kube-system pods found
	I0916 11:01:34.373703 1445387 system_pods.go:89] "coredns-7c65d6cfc9-q5xr7" [14514e6e-34ae-4a79-b0e0-008742ae46b9] Running / Ready:ContainersNotReady (containers with unready status: [coredns]) / ContainersReady:ContainersNotReady (containers with unready status: [coredns])
	I0916 11:01:34.373717 1445387 system_pods.go:89] "coredns-7c65d6cfc9-s9fp9" [0e29200a-0909-47e1-8521-bf5f9b645d6c] Running / Ready:ContainersNotReady (containers with unready status: [coredns]) / ContainersReady:ContainersNotReady (containers with unready status: [coredns])
	I0916 11:01:34.373725 1445387 system_pods.go:89] "etcd-ha-334765" [9a0b9474-60f4-440e-a898-d397f7425086] Running
	I0916 11:01:34.373740 1445387 system_pods.go:89] "etcd-ha-334765-m02" [635fd2d2-f9cc-4e08-b73b-18633a58b6e4] Running
	I0916 11:01:34.373753 1445387 system_pods.go:89] "etcd-ha-334765-m03" [9527225b-e7ae-481b-b5b1-47b445990b4b] Running
	I0916 11:01:34.373761 1445387 system_pods.go:89] "kindnet-7s5t5" [e1832b94-ac8f-43c0-af10-ddc6afbb229b] Running
	I0916 11:01:34.373767 1445387 system_pods.go:89] "kindnet-plxdg" [15478b1f-0067-4d48-84f3-27b777cc4ff3] Running
	I0916 11:01:34.373771 1445387 system_pods.go:89] "kindnet-rfw69" [396f204a-53ea-4720-85fc-05ba54d285ca] Running
	I0916 11:01:34.373779 1445387 system_pods.go:89] "kindnet-vj27j" [61e290b4-d19c-40f3-a50d-bfa09fddb710] Running
	I0916 11:01:34.373787 1445387 system_pods.go:89] "kube-apiserver-ha-334765" [471aea01-5646-4ce8-91e0-b0b39f8a275a] Running
	I0916 11:01:34.373795 1445387 system_pods.go:89] "kube-apiserver-ha-334765-m02" [877c49f9-6fae-4cdb-b208-940eba98383b] Running
	I0916 11:01:34.373804 1445387 system_pods.go:89] "kube-apiserver-ha-334765-m03" [b14f2a2b-6410-438a-99e2-86fa58140695] Running
	I0916 11:01:34.373811 1445387 system_pods.go:89] "kube-controller-manager-ha-334765" [23b2f4a4-942f-4ea7-afef-561ab69ac144] Running
	I0916 11:01:34.373817 1445387 system_pods.go:89] "kube-controller-manager-ha-334765-m02" [07411ea7-458c-475c-93ff-5db4f6c1c4b1] Running
	I0916 11:01:34.373822 1445387 system_pods.go:89] "kube-controller-manager-ha-334765-m03" [2aa8cca1-22de-4cd0-88a2-ac864da09d8d] Running
	I0916 11:01:34.373840 1445387 system_pods.go:89] "kube-proxy-4vsvh" [551f3711-d8b3-4360-8a18-d6183d4aec6d] Running
	I0916 11:01:34.373844 1445387 system_pods.go:89] "kube-proxy-br496" [db7b7049-0d21-4564-8c72-de55e63b5051] Running
	I0916 11:01:34.373849 1445387 system_pods.go:89] "kube-proxy-l998t" [e92c97ea-9eb8-40c4-a7f6-aeb43c89e6f4] Running
	I0916 11:01:34.373859 1445387 system_pods.go:89] "kube-proxy-tlfs7" [6a873882-8023-44b5-82d9-2f18e70f8ef1] Running
	I0916 11:01:34.373864 1445387 system_pods.go:89] "kube-scheduler-ha-334765" [6189b5cd-f342-4b6a-ae21-b6b7125e4f06] Running
	I0916 11:01:34.373868 1445387 system_pods.go:89] "kube-scheduler-ha-334765-m02" [61387062-d6b0-4e2d-b2f9-10f29b0bcef6] Running
	I0916 11:01:34.373875 1445387 system_pods.go:89] "kube-scheduler-ha-334765-m03" [98c99d71-0ea3-46a3-ab06-7b5971730ba8] Running
	I0916 11:01:34.373879 1445387 system_pods.go:89] "kube-vip-ha-334765" [baed9adb-c604-4a84-b55e-53a93f120d7b] Running
	I0916 11:01:34.373886 1445387 system_pods.go:89] "kube-vip-ha-334765-m02" [450bd3f6-46b4-426c-a6b2-2ad37b58b171] Running
	I0916 11:01:34.373890 1445387 system_pods.go:89] "kube-vip-ha-334765-m03" [efeb2f57-409a-45a7-87e2-dae52a680b3e] Running
	I0916 11:01:34.373897 1445387 system_pods.go:89] "storage-provisioner" [4db2490d-9707-4734-973b-adac5570e275] Running
	I0916 11:01:34.373909 1445387 system_pods.go:126] duration metric: took 214.604912ms to wait for k8s-apps to be running ...
	I0916 11:01:34.373920 1445387 system_svc.go:44] waiting for kubelet service to be running ....
	I0916 11:01:34.374006 1445387 ssh_runner.go:195] Run: sudo systemctl is-active --quiet service kubelet
	I0916 11:01:34.386385 1445387 system_svc.go:56] duration metric: took 12.455834ms WaitForService to wait for kubelet
	I0916 11:01:34.386422 1445387 kubeadm.go:582] duration metric: took 29.070624935s to wait for: map[apiserver:true apps_running:true default_sa:true extra:true kubelet:true node_ready:true system_pods:true]
	I0916 11:01:34.386443 1445387 node_conditions.go:102] verifying NodePressure condition ...
	I0916 11:01:34.555809 1445387 request.go:632] Waited for 169.291855ms due to client-side throttling, not priority and fairness, request: GET:https://192.168.49.2:8443/api/v1/nodes
	I0916 11:01:34.555874 1445387 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes
	I0916 11:01:34.555880 1445387 round_trippers.go:469] Request Headers:
	I0916 11:01:34.555889 1445387 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:01:34.555894 1445387 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:01:34.559018 1445387 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 11:01:34.560927 1445387 node_conditions.go:122] node storage ephemeral capacity is 203034800Ki
	I0916 11:01:34.560969 1445387 node_conditions.go:123] node cpu capacity is 2
	I0916 11:01:34.560987 1445387 node_conditions.go:122] node storage ephemeral capacity is 203034800Ki
	I0916 11:01:34.560996 1445387 node_conditions.go:123] node cpu capacity is 2
	I0916 11:01:34.561001 1445387 node_conditions.go:122] node storage ephemeral capacity is 203034800Ki
	I0916 11:01:34.561015 1445387 node_conditions.go:123] node cpu capacity is 2
	I0916 11:01:34.561023 1445387 node_conditions.go:105] duration metric: took 174.571821ms to run NodePressure ...
	I0916 11:01:34.561036 1445387 start.go:241] waiting for startup goroutines ...
	I0916 11:01:34.561078 1445387 start.go:255] writing updated cluster config ...
	I0916 11:01:34.564310 1445387 out.go:201] 
	I0916 11:01:34.567004 1445387 config.go:182] Loaded profile config "ha-334765": Driver=docker, ContainerRuntime=crio, KubernetesVersion=v1.31.1
	I0916 11:01:34.567172 1445387 profile.go:143] Saving config to /home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/ha-334765/config.json ...
	I0916 11:01:34.570474 1445387 out.go:177] * Starting "ha-334765-m04" worker node in "ha-334765" cluster
	I0916 11:01:34.573589 1445387 cache.go:121] Beginning downloading kic base image for docker with crio
	I0916 11:01:34.576166 1445387 out.go:177] * Pulling base image v0.0.45-1726358845-19644 ...
	I0916 11:01:34.578749 1445387 preload.go:131] Checking if preload exists for k8s version v1.31.1 and runtime crio
	I0916 11:01:34.578795 1445387 cache.go:56] Caching tarball of preloaded images
	I0916 11:01:34.578840 1445387 image.go:79] Checking for gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 in local docker daemon
	I0916 11:01:34.578912 1445387 preload.go:172] Found /home/jenkins/minikube-integration/19651-1378450/.minikube/cache/preloaded-tarball/preloaded-images-k8s-v18-v1.31.1-cri-o-overlay-arm64.tar.lz4 in cache, skipping download
	I0916 11:01:34.578934 1445387 cache.go:59] Finished verifying existence of preloaded tar for v1.31.1 on crio
	I0916 11:01:34.579083 1445387 profile.go:143] Saving config to /home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/ha-334765/config.json ...
	W0916 11:01:34.598535 1445387 image.go:95] image gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 is of wrong architecture
	I0916 11:01:34.598569 1445387 cache.go:149] Downloading gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 to local cache
	I0916 11:01:34.598707 1445387 image.go:63] Checking for gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 in local cache directory
	I0916 11:01:34.598733 1445387 image.go:66] Found gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 in local cache directory, skipping pull
	I0916 11:01:34.598743 1445387 image.go:135] gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 exists in cache, skipping pull
	I0916 11:01:34.598752 1445387 cache.go:152] successfully saved gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 as a tarball
	I0916 11:01:34.598760 1445387 cache.go:162] Loading gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 from local cache
	I0916 11:01:34.600147 1445387 image.go:273] response: 
	I0916 11:01:34.715510 1445387 cache.go:164] successfully loaded and using gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 from cached tarball
	I0916 11:01:34.715550 1445387 cache.go:194] Successfully downloaded all kic artifacts
	I0916 11:01:34.715583 1445387 start.go:360] acquireMachinesLock for ha-334765-m04: {Name:mkadeccdfd1355580c8feb9cbbbc4cc86963393f Clock:{} Delay:500ms Timeout:10m0s Cancel:<nil>}
	I0916 11:01:34.715659 1445387 start.go:364] duration metric: took 53.315µs to acquireMachinesLock for "ha-334765-m04"
	I0916 11:01:34.715686 1445387 start.go:96] Skipping create...Using existing machine configuration
	I0916 11:01:34.715694 1445387 fix.go:54] fixHost starting: m04
	I0916 11:01:34.715970 1445387 cli_runner.go:164] Run: docker container inspect ha-334765-m04 --format={{.State.Status}}
	I0916 11:01:34.732168 1445387 fix.go:112] recreateIfNeeded on ha-334765-m04: state=Stopped err=<nil>
	W0916 11:01:34.732198 1445387 fix.go:138] unexpected machine state, will restart: <nil>
	I0916 11:01:34.735224 1445387 out.go:177] * Restarting existing docker container for "ha-334765-m04" ...
	I0916 11:01:34.737691 1445387 cli_runner.go:164] Run: docker start ha-334765-m04
	I0916 11:01:35.068448 1445387 cli_runner.go:164] Run: docker container inspect ha-334765-m04 --format={{.State.Status}}
	I0916 11:01:35.095606 1445387 kic.go:430] container "ha-334765-m04" state is running.
	I0916 11:01:35.096214 1445387 cli_runner.go:164] Run: docker container inspect -f "{{range .NetworkSettings.Networks}}{{.IPAddress}},{{.GlobalIPv6Address}}{{end}}" ha-334765-m04
	I0916 11:01:35.119379 1445387 profile.go:143] Saving config to /home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/ha-334765/config.json ...
	I0916 11:01:35.119651 1445387 machine.go:93] provisionDockerMachine start ...
	I0916 11:01:35.119716 1445387 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" ha-334765-m04
	I0916 11:01:35.142314 1445387 main.go:141] libmachine: Using SSH client type: native
	I0916 11:01:35.142571 1445387 main.go:141] libmachine: &{{{<nil> 0 [] [] []} docker [0x41abe0] 0x41d420 <nil>  [] 0s} 127.0.0.1 34673 <nil> <nil>}
	I0916 11:01:35.142581 1445387 main.go:141] libmachine: About to run SSH command:
	hostname
	I0916 11:01:35.143582 1445387 main.go:141] libmachine: Error dialing TCP: ssh: handshake failed: EOF
	I0916 11:01:38.280265 1445387 main.go:141] libmachine: SSH cmd err, output: <nil>: ha-334765-m04
	
	I0916 11:01:38.280314 1445387 ubuntu.go:169] provisioning hostname "ha-334765-m04"
	I0916 11:01:38.280400 1445387 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" ha-334765-m04
	I0916 11:01:38.308021 1445387 main.go:141] libmachine: Using SSH client type: native
	I0916 11:01:38.308279 1445387 main.go:141] libmachine: &{{{<nil> 0 [] [] []} docker [0x41abe0] 0x41d420 <nil>  [] 0s} 127.0.0.1 34673 <nil> <nil>}
	I0916 11:01:38.308297 1445387 main.go:141] libmachine: About to run SSH command:
	sudo hostname ha-334765-m04 && echo "ha-334765-m04" | sudo tee /etc/hostname
	I0916 11:01:38.461762 1445387 main.go:141] libmachine: SSH cmd err, output: <nil>: ha-334765-m04
	
	I0916 11:01:38.461847 1445387 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" ha-334765-m04
	I0916 11:01:38.479616 1445387 main.go:141] libmachine: Using SSH client type: native
	I0916 11:01:38.479900 1445387 main.go:141] libmachine: &{{{<nil> 0 [] [] []} docker [0x41abe0] 0x41d420 <nil>  [] 0s} 127.0.0.1 34673 <nil> <nil>}
	I0916 11:01:38.479924 1445387 main.go:141] libmachine: About to run SSH command:
	
			if ! grep -xq '.*\sha-334765-m04' /etc/hosts; then
				if grep -xq '127.0.1.1\s.*' /etc/hosts; then
					sudo sed -i 's/^127.0.1.1\s.*/127.0.1.1 ha-334765-m04/g' /etc/hosts;
				else 
					echo '127.0.1.1 ha-334765-m04' | sudo tee -a /etc/hosts; 
				fi
			fi
	I0916 11:01:38.616885 1445387 main.go:141] libmachine: SSH cmd err, output: <nil>: 
	I0916 11:01:38.616934 1445387 ubuntu.go:175] set auth options {CertDir:/home/jenkins/minikube-integration/19651-1378450/.minikube CaCertPath:/home/jenkins/minikube-integration/19651-1378450/.minikube/certs/ca.pem CaPrivateKeyPath:/home/jenkins/minikube-integration/19651-1378450/.minikube/certs/ca-key.pem CaCertRemotePath:/etc/docker/ca.pem ServerCertPath:/home/jenkins/minikube-integration/19651-1378450/.minikube/machines/server.pem ServerKeyPath:/home/jenkins/minikube-integration/19651-1378450/.minikube/machines/server-key.pem ClientKeyPath:/home/jenkins/minikube-integration/19651-1378450/.minikube/certs/key.pem ServerCertRemotePath:/etc/docker/server.pem ServerKeyRemotePath:/etc/docker/server-key.pem ClientCertPath:/home/jenkins/minikube-integration/19651-1378450/.minikube/certs/cert.pem ServerCertSANs:[] StorePath:/home/jenkins/minikube-integration/19651-1378450/.minikube}
	I0916 11:01:38.616957 1445387 ubuntu.go:177] setting up certificates
	I0916 11:01:38.616967 1445387 provision.go:84] configureAuth start
	I0916 11:01:38.617039 1445387 cli_runner.go:164] Run: docker container inspect -f "{{range .NetworkSettings.Networks}}{{.IPAddress}},{{.GlobalIPv6Address}}{{end}}" ha-334765-m04
	I0916 11:01:38.635550 1445387 provision.go:143] copyHostCerts
	I0916 11:01:38.635595 1445387 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-1378450/.minikube/certs/cert.pem -> /home/jenkins/minikube-integration/19651-1378450/.minikube/cert.pem
	I0916 11:01:38.635630 1445387 exec_runner.go:144] found /home/jenkins/minikube-integration/19651-1378450/.minikube/cert.pem, removing ...
	I0916 11:01:38.635638 1445387 exec_runner.go:203] rm: /home/jenkins/minikube-integration/19651-1378450/.minikube/cert.pem
	I0916 11:01:38.635721 1445387 exec_runner.go:151] cp: /home/jenkins/minikube-integration/19651-1378450/.minikube/certs/cert.pem --> /home/jenkins/minikube-integration/19651-1378450/.minikube/cert.pem (1123 bytes)
	I0916 11:01:38.635814 1445387 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-1378450/.minikube/certs/key.pem -> /home/jenkins/minikube-integration/19651-1378450/.minikube/key.pem
	I0916 11:01:38.635841 1445387 exec_runner.go:144] found /home/jenkins/minikube-integration/19651-1378450/.minikube/key.pem, removing ...
	I0916 11:01:38.635846 1445387 exec_runner.go:203] rm: /home/jenkins/minikube-integration/19651-1378450/.minikube/key.pem
	I0916 11:01:38.635891 1445387 exec_runner.go:151] cp: /home/jenkins/minikube-integration/19651-1378450/.minikube/certs/key.pem --> /home/jenkins/minikube-integration/19651-1378450/.minikube/key.pem (1679 bytes)
	I0916 11:01:38.635983 1445387 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-1378450/.minikube/certs/ca.pem -> /home/jenkins/minikube-integration/19651-1378450/.minikube/ca.pem
	I0916 11:01:38.635999 1445387 exec_runner.go:144] found /home/jenkins/minikube-integration/19651-1378450/.minikube/ca.pem, removing ...
	I0916 11:01:38.636004 1445387 exec_runner.go:203] rm: /home/jenkins/minikube-integration/19651-1378450/.minikube/ca.pem
	I0916 11:01:38.636028 1445387 exec_runner.go:151] cp: /home/jenkins/minikube-integration/19651-1378450/.minikube/certs/ca.pem --> /home/jenkins/minikube-integration/19651-1378450/.minikube/ca.pem (1078 bytes)
	I0916 11:01:38.636075 1445387 provision.go:117] generating server cert: /home/jenkins/minikube-integration/19651-1378450/.minikube/machines/server.pem ca-key=/home/jenkins/minikube-integration/19651-1378450/.minikube/certs/ca.pem private-key=/home/jenkins/minikube-integration/19651-1378450/.minikube/certs/ca-key.pem org=jenkins.ha-334765-m04 san=[127.0.0.1 192.168.49.5 ha-334765-m04 localhost minikube]
	I0916 11:01:39.023944 1445387 provision.go:177] copyRemoteCerts
	I0916 11:01:39.024043 1445387 ssh_runner.go:195] Run: sudo mkdir -p /etc/docker /etc/docker /etc/docker
	I0916 11:01:39.024136 1445387 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" ha-334765-m04
	I0916 11:01:39.041996 1445387 sshutil.go:53] new ssh client: &{IP:127.0.0.1 Port:34673 SSHKeyPath:/home/jenkins/minikube-integration/19651-1378450/.minikube/machines/ha-334765-m04/id_rsa Username:docker}
	I0916 11:01:39.142004 1445387 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-1378450/.minikube/machines/server-key.pem -> /etc/docker/server-key.pem
	I0916 11:01:39.142067 1445387 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-1378450/.minikube/machines/server-key.pem --> /etc/docker/server-key.pem (1675 bytes)
	I0916 11:01:39.170880 1445387 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-1378450/.minikube/certs/ca.pem -> /etc/docker/ca.pem
	I0916 11:01:39.170944 1445387 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-1378450/.minikube/certs/ca.pem --> /etc/docker/ca.pem (1078 bytes)
	I0916 11:01:39.198199 1445387 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-1378450/.minikube/machines/server.pem -> /etc/docker/server.pem
	I0916 11:01:39.198264 1445387 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-1378450/.minikube/machines/server.pem --> /etc/docker/server.pem (1208 bytes)
	I0916 11:01:39.225723 1445387 provision.go:87] duration metric: took 608.73435ms to configureAuth
	I0916 11:01:39.225753 1445387 ubuntu.go:193] setting minikube options for container-runtime
	I0916 11:01:39.225985 1445387 config.go:182] Loaded profile config "ha-334765": Driver=docker, ContainerRuntime=crio, KubernetesVersion=v1.31.1
	I0916 11:01:39.226097 1445387 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" ha-334765-m04
	I0916 11:01:39.242974 1445387 main.go:141] libmachine: Using SSH client type: native
	I0916 11:01:39.243262 1445387 main.go:141] libmachine: &{{{<nil> 0 [] [] []} docker [0x41abe0] 0x41d420 <nil>  [] 0s} 127.0.0.1 34673 <nil> <nil>}
	I0916 11:01:39.243279 1445387 main.go:141] libmachine: About to run SSH command:
	sudo mkdir -p /etc/sysconfig && printf %s "
	CRIO_MINIKUBE_OPTIONS='--insecure-registry 10.96.0.0/12 '
	" | sudo tee /etc/sysconfig/crio.minikube && sudo systemctl restart crio
	I0916 11:01:39.535382 1445387 main.go:141] libmachine: SSH cmd err, output: <nil>: 
	CRIO_MINIKUBE_OPTIONS='--insecure-registry 10.96.0.0/12 '
	
	I0916 11:01:39.535461 1445387 machine.go:96] duration metric: took 4.415788387s to provisionDockerMachine
	I0916 11:01:39.535490 1445387 start.go:293] postStartSetup for "ha-334765-m04" (driver="docker")
	I0916 11:01:39.535538 1445387 start.go:322] creating required directories: [/etc/kubernetes/addons /etc/kubernetes/manifests /var/tmp/minikube /var/lib/minikube /var/lib/minikube/certs /var/lib/minikube/images /var/lib/minikube/binaries /tmp/gvisor /usr/share/ca-certificates /etc/ssl/certs]
	I0916 11:01:39.535661 1445387 ssh_runner.go:195] Run: sudo mkdir -p /etc/kubernetes/addons /etc/kubernetes/manifests /var/tmp/minikube /var/lib/minikube /var/lib/minikube/certs /var/lib/minikube/images /var/lib/minikube/binaries /tmp/gvisor /usr/share/ca-certificates /etc/ssl/certs
	I0916 11:01:39.535781 1445387 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" ha-334765-m04
	I0916 11:01:39.557473 1445387 sshutil.go:53] new ssh client: &{IP:127.0.0.1 Port:34673 SSHKeyPath:/home/jenkins/minikube-integration/19651-1378450/.minikube/machines/ha-334765-m04/id_rsa Username:docker}
	I0916 11:01:39.659189 1445387 ssh_runner.go:195] Run: cat /etc/os-release
	I0916 11:01:39.662743 1445387 main.go:141] libmachine: Couldn't set key VERSION_CODENAME, no corresponding struct field found
	I0916 11:01:39.662791 1445387 main.go:141] libmachine: Couldn't set key PRIVACY_POLICY_URL, no corresponding struct field found
	I0916 11:01:39.662803 1445387 main.go:141] libmachine: Couldn't set key UBUNTU_CODENAME, no corresponding struct field found
	I0916 11:01:39.662811 1445387 info.go:137] Remote host: Ubuntu 22.04.4 LTS
	I0916 11:01:39.662823 1445387 filesync.go:126] Scanning /home/jenkins/minikube-integration/19651-1378450/.minikube/addons for local assets ...
	I0916 11:01:39.662892 1445387 filesync.go:126] Scanning /home/jenkins/minikube-integration/19651-1378450/.minikube/files for local assets ...
	I0916 11:01:39.662980 1445387 filesync.go:149] local asset: /home/jenkins/minikube-integration/19651-1378450/.minikube/files/etc/ssl/certs/13838332.pem -> 13838332.pem in /etc/ssl/certs
	I0916 11:01:39.662992 1445387 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-1378450/.minikube/files/etc/ssl/certs/13838332.pem -> /etc/ssl/certs/13838332.pem
	I0916 11:01:39.663096 1445387 ssh_runner.go:195] Run: sudo mkdir -p /etc/ssl/certs
	I0916 11:01:39.674268 1445387 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-1378450/.minikube/files/etc/ssl/certs/13838332.pem --> /etc/ssl/certs/13838332.pem (1708 bytes)
	I0916 11:01:39.703181 1445387 start.go:296] duration metric: took 167.641148ms for postStartSetup
	I0916 11:01:39.703297 1445387 ssh_runner.go:195] Run: sh -c "df -h /var | awk 'NR==2{print $5}'"
	I0916 11:01:39.703346 1445387 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" ha-334765-m04
	I0916 11:01:39.726624 1445387 sshutil.go:53] new ssh client: &{IP:127.0.0.1 Port:34673 SSHKeyPath:/home/jenkins/minikube-integration/19651-1378450/.minikube/machines/ha-334765-m04/id_rsa Username:docker}
	I0916 11:01:39.825987 1445387 ssh_runner.go:195] Run: sh -c "df -BG /var | awk 'NR==2{print $4}'"
	I0916 11:01:39.830770 1445387 fix.go:56] duration metric: took 5.11506836s for fixHost
	I0916 11:01:39.830796 1445387 start.go:83] releasing machines lock for "ha-334765-m04", held for 5.115124703s
	I0916 11:01:39.830870 1445387 cli_runner.go:164] Run: docker container inspect -f "{{range .NetworkSettings.Networks}}{{.IPAddress}},{{.GlobalIPv6Address}}{{end}}" ha-334765-m04
	I0916 11:01:39.855839 1445387 out.go:177] * Found network options:
	I0916 11:01:39.858694 1445387 out.go:177]   - NO_PROXY=192.168.49.2,192.168.49.3
	W0916 11:01:39.861192 1445387 proxy.go:119] fail to check proxy env: Error ip not in block
	W0916 11:01:39.861224 1445387 proxy.go:119] fail to check proxy env: Error ip not in block
	W0916 11:01:39.861251 1445387 proxy.go:119] fail to check proxy env: Error ip not in block
	W0916 11:01:39.861262 1445387 proxy.go:119] fail to check proxy env: Error ip not in block
	I0916 11:01:39.861340 1445387 ssh_runner.go:195] Run: sudo sh -c "podman version >/dev/null"
	I0916 11:01:39.861386 1445387 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" ha-334765-m04
	I0916 11:01:39.861680 1445387 ssh_runner.go:195] Run: curl -sS -m 2 https://registry.k8s.io/
	I0916 11:01:39.861736 1445387 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" ha-334765-m04
	I0916 11:01:39.888215 1445387 sshutil.go:53] new ssh client: &{IP:127.0.0.1 Port:34673 SSHKeyPath:/home/jenkins/minikube-integration/19651-1378450/.minikube/machines/ha-334765-m04/id_rsa Username:docker}
	I0916 11:01:39.889918 1445387 sshutil.go:53] new ssh client: &{IP:127.0.0.1 Port:34673 SSHKeyPath:/home/jenkins/minikube-integration/19651-1378450/.minikube/machines/ha-334765-m04/id_rsa Username:docker}
	I0916 11:01:40.183823 1445387 ssh_runner.go:195] Run: sh -c "stat /etc/cni/net.d/*loopback.conf*"
	I0916 11:01:40.188455 1445387 ssh_runner.go:195] Run: sudo find /etc/cni/net.d -maxdepth 1 -type f -name *loopback.conf* -not -name *.mk_disabled -exec sh -c "sudo mv {} {}.mk_disabled" ;
	I0916 11:01:40.198210 1445387 cni.go:221] loopback cni configuration disabled: "/etc/cni/net.d/*loopback.conf*" found
	I0916 11:01:40.198372 1445387 ssh_runner.go:195] Run: sudo find /etc/cni/net.d -maxdepth 1 -type f ( ( -name *bridge* -or -name *podman* ) -and -not -name *.mk_disabled ) -printf "%p, " -exec sh -c "sudo mv {} {}.mk_disabled" ;
	I0916 11:01:40.208550 1445387 cni.go:259] no active bridge cni configs found in "/etc/cni/net.d" - nothing to disable
	I0916 11:01:40.208580 1445387 start.go:495] detecting cgroup driver to use...
	I0916 11:01:40.208616 1445387 detect.go:187] detected "cgroupfs" cgroup driver on host os
	I0916 11:01:40.208789 1445387 ssh_runner.go:195] Run: sudo systemctl stop -f containerd
	I0916 11:01:40.222732 1445387 ssh_runner.go:195] Run: sudo systemctl is-active --quiet service containerd
	I0916 11:01:40.236758 1445387 docker.go:217] disabling cri-docker service (if available) ...
	I0916 11:01:40.236824 1445387 ssh_runner.go:195] Run: sudo systemctl stop -f cri-docker.socket
	I0916 11:01:40.253415 1445387 ssh_runner.go:195] Run: sudo systemctl stop -f cri-docker.service
	I0916 11:01:40.270244 1445387 ssh_runner.go:195] Run: sudo systemctl disable cri-docker.socket
	I0916 11:01:40.366018 1445387 ssh_runner.go:195] Run: sudo systemctl mask cri-docker.service
	I0916 11:01:40.478345 1445387 docker.go:233] disabling docker service ...
	I0916 11:01:40.478416 1445387 ssh_runner.go:195] Run: sudo systemctl stop -f docker.socket
	I0916 11:01:40.493252 1445387 ssh_runner.go:195] Run: sudo systemctl stop -f docker.service
	I0916 11:01:40.505911 1445387 ssh_runner.go:195] Run: sudo systemctl disable docker.socket
	I0916 11:01:40.602946 1445387 ssh_runner.go:195] Run: sudo systemctl mask docker.service
	I0916 11:01:40.703172 1445387 ssh_runner.go:195] Run: sudo systemctl is-active --quiet service docker
	I0916 11:01:40.716523 1445387 ssh_runner.go:195] Run: /bin/bash -c "sudo mkdir -p /etc && printf %s "runtime-endpoint: unix:///var/run/crio/crio.sock
	" | sudo tee /etc/crictl.yaml"
	I0916 11:01:40.735648 1445387 crio.go:59] configure cri-o to use "registry.k8s.io/pause:3.10" pause image...
	I0916 11:01:40.735779 1445387 ssh_runner.go:195] Run: sh -c "sudo sed -i 's|^.*pause_image = .*$|pause_image = "registry.k8s.io/pause:3.10"|' /etc/crio/crio.conf.d/02-crio.conf"
	I0916 11:01:40.747727 1445387 crio.go:70] configuring cri-o to use "cgroupfs" as cgroup driver...
	I0916 11:01:40.747811 1445387 ssh_runner.go:195] Run: sh -c "sudo sed -i 's|^.*cgroup_manager = .*$|cgroup_manager = "cgroupfs"|' /etc/crio/crio.conf.d/02-crio.conf"
	I0916 11:01:40.758333 1445387 ssh_runner.go:195] Run: sh -c "sudo sed -i '/conmon_cgroup = .*/d' /etc/crio/crio.conf.d/02-crio.conf"
	I0916 11:01:40.771410 1445387 ssh_runner.go:195] Run: sh -c "sudo sed -i '/cgroup_manager = .*/a conmon_cgroup = "pod"' /etc/crio/crio.conf.d/02-crio.conf"
	I0916 11:01:40.782408 1445387 ssh_runner.go:195] Run: sh -c "sudo rm -rf /etc/cni/net.mk"
	I0916 11:01:40.801563 1445387 ssh_runner.go:195] Run: sh -c "sudo sed -i '/^ *"net.ipv4.ip_unprivileged_port_start=.*"/d' /etc/crio/crio.conf.d/02-crio.conf"
	I0916 11:01:40.812500 1445387 ssh_runner.go:195] Run: sh -c "sudo grep -q "^ *default_sysctls" /etc/crio/crio.conf.d/02-crio.conf || sudo sed -i '/conmon_cgroup = .*/a default_sysctls = \[\n\]' /etc/crio/crio.conf.d/02-crio.conf"
	I0916 11:01:40.823417 1445387 ssh_runner.go:195] Run: sh -c "sudo sed -i -r 's|^default_sysctls *= *\[|&\n  "net.ipv4.ip_unprivileged_port_start=0",|' /etc/crio/crio.conf.d/02-crio.conf"
	I0916 11:01:40.835436 1445387 ssh_runner.go:195] Run: sudo sysctl net.bridge.bridge-nf-call-iptables
	I0916 11:01:40.844758 1445387 ssh_runner.go:195] Run: sudo sh -c "echo 1 > /proc/sys/net/ipv4/ip_forward"
	I0916 11:01:40.854510 1445387 ssh_runner.go:195] Run: sudo systemctl daemon-reload
	I0916 11:01:40.945097 1445387 ssh_runner.go:195] Run: sudo systemctl restart crio
	I0916 11:01:41.060237 1445387 start.go:542] Will wait 60s for socket path /var/run/crio/crio.sock
	I0916 11:01:41.060323 1445387 ssh_runner.go:195] Run: stat /var/run/crio/crio.sock
	I0916 11:01:41.064091 1445387 start.go:563] Will wait 60s for crictl version
	I0916 11:01:41.064180 1445387 ssh_runner.go:195] Run: which crictl
	I0916 11:01:41.068620 1445387 ssh_runner.go:195] Run: sudo /usr/bin/crictl version
	I0916 11:01:41.116348 1445387 start.go:579] Version:  0.1.0
	RuntimeName:  cri-o
	RuntimeVersion:  1.24.6
	RuntimeApiVersion:  v1
	I0916 11:01:41.116508 1445387 ssh_runner.go:195] Run: crio --version
	I0916 11:01:41.162097 1445387 ssh_runner.go:195] Run: crio --version
	I0916 11:01:41.203011 1445387 out.go:177] * Preparing Kubernetes v1.31.1 on CRI-O 1.24.6 ...
	I0916 11:01:41.205588 1445387 out.go:177]   - env NO_PROXY=192.168.49.2
	I0916 11:01:41.208119 1445387 out.go:177]   - env NO_PROXY=192.168.49.2,192.168.49.3
	I0916 11:01:41.210825 1445387 cli_runner.go:164] Run: docker network inspect ha-334765 --format "{"Name": "{{.Name}}","Driver": "{{.Driver}}","Subnet": "{{range .IPAM.Config}}{{.Subnet}}{{end}}","Gateway": "{{range .IPAM.Config}}{{.Gateway}}{{end}}","MTU": {{if (index .Options "com.docker.network.driver.mtu")}}{{(index .Options "com.docker.network.driver.mtu")}}{{else}}0{{end}}, "ContainerIPs": [{{range $k,$v := .Containers }}"{{$v.IPv4Address}}",{{end}}]}"
	I0916 11:01:41.229580 1445387 ssh_runner.go:195] Run: grep 192.168.49.1	host.minikube.internal$ /etc/hosts
	I0916 11:01:41.234454 1445387 ssh_runner.go:195] Run: /bin/bash -c "{ grep -v $'\thost.minikube.internal$' "/etc/hosts"; echo "192.168.49.1	host.minikube.internal"; } > /tmp/h.$$; sudo cp /tmp/h.$$ "/etc/hosts""
	I0916 11:01:41.247440 1445387 mustload.go:65] Loading cluster: ha-334765
	I0916 11:01:41.247726 1445387 config.go:182] Loaded profile config "ha-334765": Driver=docker, ContainerRuntime=crio, KubernetesVersion=v1.31.1
	I0916 11:01:41.248030 1445387 cli_runner.go:164] Run: docker container inspect ha-334765 --format={{.State.Status}}
	I0916 11:01:41.270451 1445387 host.go:66] Checking if "ha-334765" exists ...
	I0916 11:01:41.270795 1445387 certs.go:68] Setting up /home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/ha-334765 for IP: 192.168.49.5
	I0916 11:01:41.270820 1445387 certs.go:194] generating shared ca certs ...
	I0916 11:01:41.270836 1445387 certs.go:226] acquiring lock for ca certs: {Name:mk0ae46b50e2e49d53ad6fcc94535aa50d9156d6 Clock:{} Delay:500ms Timeout:1m0s Cancel:<nil>}
	I0916 11:01:41.270974 1445387 certs.go:235] skipping valid "minikubeCA" ca cert: /home/jenkins/minikube-integration/19651-1378450/.minikube/ca.key
	I0916 11:01:41.271023 1445387 certs.go:235] skipping valid "proxyClientCA" ca cert: /home/jenkins/minikube-integration/19651-1378450/.minikube/proxy-client-ca.key
	I0916 11:01:41.271035 1445387 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-1378450/.minikube/ca.crt -> /var/lib/minikube/certs/ca.crt
	I0916 11:01:41.271055 1445387 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-1378450/.minikube/ca.key -> /var/lib/minikube/certs/ca.key
	I0916 11:01:41.271072 1445387 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-1378450/.minikube/proxy-client-ca.crt -> /var/lib/minikube/certs/proxy-client-ca.crt
	I0916 11:01:41.271084 1445387 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-1378450/.minikube/proxy-client-ca.key -> /var/lib/minikube/certs/proxy-client-ca.key
	I0916 11:01:41.271144 1445387 certs.go:484] found cert: /home/jenkins/minikube-integration/19651-1378450/.minikube/certs/1383833.pem (1338 bytes)
	W0916 11:01:41.271178 1445387 certs.go:480] ignoring /home/jenkins/minikube-integration/19651-1378450/.minikube/certs/1383833_empty.pem, impossibly tiny 0 bytes
	I0916 11:01:41.271191 1445387 certs.go:484] found cert: /home/jenkins/minikube-integration/19651-1378450/.minikube/certs/ca-key.pem (1679 bytes)
	I0916 11:01:41.271224 1445387 certs.go:484] found cert: /home/jenkins/minikube-integration/19651-1378450/.minikube/certs/ca.pem (1078 bytes)
	I0916 11:01:41.271255 1445387 certs.go:484] found cert: /home/jenkins/minikube-integration/19651-1378450/.minikube/certs/cert.pem (1123 bytes)
	I0916 11:01:41.271281 1445387 certs.go:484] found cert: /home/jenkins/minikube-integration/19651-1378450/.minikube/certs/key.pem (1679 bytes)
	I0916 11:01:41.271342 1445387 certs.go:484] found cert: /home/jenkins/minikube-integration/19651-1378450/.minikube/files/etc/ssl/certs/13838332.pem (1708 bytes)
	I0916 11:01:41.271376 1445387 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-1378450/.minikube/certs/1383833.pem -> /usr/share/ca-certificates/1383833.pem
	I0916 11:01:41.271391 1445387 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-1378450/.minikube/files/etc/ssl/certs/13838332.pem -> /usr/share/ca-certificates/13838332.pem
	I0916 11:01:41.271409 1445387 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-1378450/.minikube/ca.crt -> /usr/share/ca-certificates/minikubeCA.pem
	I0916 11:01:41.271430 1445387 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-1378450/.minikube/ca.crt --> /var/lib/minikube/certs/ca.crt (1111 bytes)
	I0916 11:01:41.298868 1445387 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-1378450/.minikube/ca.key --> /var/lib/minikube/certs/ca.key (1675 bytes)
	I0916 11:01:41.327279 1445387 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-1378450/.minikube/proxy-client-ca.crt --> /var/lib/minikube/certs/proxy-client-ca.crt (1119 bytes)
	I0916 11:01:41.353943 1445387 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-1378450/.minikube/proxy-client-ca.key --> /var/lib/minikube/certs/proxy-client-ca.key (1679 bytes)
	I0916 11:01:41.380472 1445387 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-1378450/.minikube/certs/1383833.pem --> /usr/share/ca-certificates/1383833.pem (1338 bytes)
	I0916 11:01:41.409064 1445387 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-1378450/.minikube/files/etc/ssl/certs/13838332.pem --> /usr/share/ca-certificates/13838332.pem (1708 bytes)
	I0916 11:01:41.436421 1445387 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-1378450/.minikube/ca.crt --> /usr/share/ca-certificates/minikubeCA.pem (1111 bytes)
	I0916 11:01:41.463607 1445387 ssh_runner.go:195] Run: openssl version
	I0916 11:01:41.469738 1445387 ssh_runner.go:195] Run: sudo /bin/bash -c "test -s /usr/share/ca-certificates/1383833.pem && ln -fs /usr/share/ca-certificates/1383833.pem /etc/ssl/certs/1383833.pem"
	I0916 11:01:41.479621 1445387 ssh_runner.go:195] Run: ls -la /usr/share/ca-certificates/1383833.pem
	I0916 11:01:41.483504 1445387 certs.go:528] hashing: -rw-r--r-- 1 root root 1338 Sep 16 10:46 /usr/share/ca-certificates/1383833.pem
	I0916 11:01:41.483573 1445387 ssh_runner.go:195] Run: openssl x509 -hash -noout -in /usr/share/ca-certificates/1383833.pem
	I0916 11:01:41.490660 1445387 ssh_runner.go:195] Run: sudo /bin/bash -c "test -L /etc/ssl/certs/51391683.0 || ln -fs /etc/ssl/certs/1383833.pem /etc/ssl/certs/51391683.0"
	I0916 11:01:41.500266 1445387 ssh_runner.go:195] Run: sudo /bin/bash -c "test -s /usr/share/ca-certificates/13838332.pem && ln -fs /usr/share/ca-certificates/13838332.pem /etc/ssl/certs/13838332.pem"
	I0916 11:01:41.510417 1445387 ssh_runner.go:195] Run: ls -la /usr/share/ca-certificates/13838332.pem
	I0916 11:01:41.514445 1445387 certs.go:528] hashing: -rw-r--r-- 1 root root 1708 Sep 16 10:46 /usr/share/ca-certificates/13838332.pem
	I0916 11:01:41.514515 1445387 ssh_runner.go:195] Run: openssl x509 -hash -noout -in /usr/share/ca-certificates/13838332.pem
	I0916 11:01:41.522279 1445387 ssh_runner.go:195] Run: sudo /bin/bash -c "test -L /etc/ssl/certs/3ec20f2e.0 || ln -fs /etc/ssl/certs/13838332.pem /etc/ssl/certs/3ec20f2e.0"
	I0916 11:01:41.532807 1445387 ssh_runner.go:195] Run: sudo /bin/bash -c "test -s /usr/share/ca-certificates/minikubeCA.pem && ln -fs /usr/share/ca-certificates/minikubeCA.pem /etc/ssl/certs/minikubeCA.pem"
	I0916 11:01:41.544416 1445387 ssh_runner.go:195] Run: ls -la /usr/share/ca-certificates/minikubeCA.pem
	I0916 11:01:41.548420 1445387 certs.go:528] hashing: -rw-r--r-- 1 root root 1111 Sep 16 10:35 /usr/share/ca-certificates/minikubeCA.pem
	I0916 11:01:41.548494 1445387 ssh_runner.go:195] Run: openssl x509 -hash -noout -in /usr/share/ca-certificates/minikubeCA.pem
	I0916 11:01:41.555707 1445387 ssh_runner.go:195] Run: sudo /bin/bash -c "test -L /etc/ssl/certs/b5213941.0 || ln -fs /etc/ssl/certs/minikubeCA.pem /etc/ssl/certs/b5213941.0"
	I0916 11:01:41.565226 1445387 ssh_runner.go:195] Run: stat /var/lib/minikube/certs/apiserver-kubelet-client.crt
	I0916 11:01:41.568991 1445387 certs.go:399] 'apiserver-kubelet-client' cert doesn't exist, likely first start: stat /var/lib/minikube/certs/apiserver-kubelet-client.crt: Process exited with status 1
	stdout:
	
	stderr:
	stat: cannot statx '/var/lib/minikube/certs/apiserver-kubelet-client.crt': No such file or directory
	I0916 11:01:41.569038 1445387 kubeadm.go:934] updating node {m04 192.168.49.5 0 v1.31.1  false true} ...
	I0916 11:01:41.569138 1445387 kubeadm.go:946] kubelet [Unit]
	Wants=crio.service
	
	[Service]
	ExecStart=
	ExecStart=/var/lib/minikube/binaries/v1.31.1/kubelet --bootstrap-kubeconfig=/etc/kubernetes/bootstrap-kubelet.conf --cgroups-per-qos=false --config=/var/lib/kubelet/config.yaml --enforce-node-allocatable= --hostname-override=ha-334765-m04 --kubeconfig=/etc/kubernetes/kubelet.conf --node-ip=192.168.49.5
	
	[Install]
	 config:
	{KubernetesVersion:v1.31.1 ClusterName:ha-334765 Namespace:default APIServerHAVIP:192.168.49.254 APIServerName:minikubeCA APIServerNames:[] APIServerIPs:[] DNSDomain:cluster.local ContainerRuntime:crio CRISocket: NetworkPlugin:cni FeatureGates: ServiceCIDR:10.96.0.0/12 ImageRepository: LoadBalancerStartIP: LoadBalancerEndIP: CustomIngressCert: RegistryAliases: ExtraOptions:[] ShouldLoadCachedImages:true EnableDefaultCNI:false CNI:}
	I0916 11:01:41.569211 1445387 ssh_runner.go:195] Run: sudo ls /var/lib/minikube/binaries/v1.31.1
	I0916 11:01:41.578065 1445387 binaries.go:44] Found k8s binaries, skipping transfer
	I0916 11:01:41.578159 1445387 ssh_runner.go:195] Run: sudo mkdir -p /etc/systemd/system/kubelet.service.d /lib/systemd/system
	I0916 11:01:41.587161 1445387 ssh_runner.go:362] scp memory --> /etc/systemd/system/kubelet.service.d/10-kubeadm.conf (363 bytes)
	I0916 11:01:41.606367 1445387 ssh_runner.go:362] scp memory --> /lib/systemd/system/kubelet.service (352 bytes)
	I0916 11:01:41.625530 1445387 ssh_runner.go:195] Run: grep 192.168.49.254	control-plane.minikube.internal$ /etc/hosts
	I0916 11:01:41.631462 1445387 ssh_runner.go:195] Run: /bin/bash -c "{ grep -v $'\tcontrol-plane.minikube.internal$' "/etc/hosts"; echo "192.168.49.254	control-plane.minikube.internal"; } > /tmp/h.$$; sudo cp /tmp/h.$$ "/etc/hosts""
	I0916 11:01:41.646639 1445387 ssh_runner.go:195] Run: sudo systemctl daemon-reload
	I0916 11:01:41.743638 1445387 ssh_runner.go:195] Run: sudo systemctl start kubelet
	I0916 11:01:41.757051 1445387 start.go:235] Will wait 6m0s for node &{Name:m04 IP:192.168.49.5 Port:0 KubernetesVersion:v1.31.1 ContainerRuntime: ControlPlane:false Worker:true}
	I0916 11:01:41.757535 1445387 config.go:182] Loaded profile config "ha-334765": Driver=docker, ContainerRuntime=crio, KubernetesVersion=v1.31.1
	I0916 11:01:41.760153 1445387 out.go:177] * Verifying Kubernetes components...
	I0916 11:01:41.762977 1445387 ssh_runner.go:195] Run: sudo systemctl daemon-reload
	I0916 11:01:41.861464 1445387 ssh_runner.go:195] Run: sudo systemctl start kubelet
	I0916 11:01:41.877429 1445387 loader.go:395] Config loaded from file:  /home/jenkins/minikube-integration/19651-1378450/kubeconfig
	I0916 11:01:41.877767 1445387 kapi.go:59] client config for ha-334765: &rest.Config{Host:"https://192.168.49.254:8443", APIPath:"", ContentConfig:rest.ContentConfig{AcceptContentTypes:"", ContentType:"", GroupVersion:(*schema.GroupVersion)(nil), NegotiatedSerializer:runtime.NegotiatedSerializer(nil)}, Username:"", Password:"", BearerToken:"", BearerTokenFile:"", Impersonate:rest.ImpersonationConfig{UserName:"", UID:"", Groups:[]string(nil), Extra:map[string][]string(nil)}, AuthProvider:<nil>, AuthConfigPersister:rest.AuthProviderConfigPersister(nil), ExecProvider:<nil>, TLSClientConfig:rest.sanitizedTLSClientConfig{Insecure:false, ServerName:"", CertFile:"/home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/ha-334765/client.crt", KeyFile:"/home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/ha-334765/client.key", CAFile:"/home/jenkins/minikube-integration/19651-1378450/.minikube/ca.crt", CertData:[]uint8(nil), KeyData:[]uint8(nil), CAData:[]uint8(nil), NextProtos:[]strin
g(nil)}, UserAgent:"", DisableCompression:false, Transport:http.RoundTripper(nil), WrapTransport:(transport.WrapperFunc)(0x1a1e6c0), QPS:0, Burst:0, RateLimiter:flowcontrol.RateLimiter(nil), WarningHandler:rest.WarningHandler(nil), Timeout:0, Dial:(func(context.Context, string, string) (net.Conn, error))(nil), Proxy:(func(*http.Request) (*url.URL, error))(nil)}
	W0916 11:01:41.877833 1445387 kubeadm.go:483] Overriding stale ClientConfig host https://192.168.49.254:8443 with https://192.168.49.2:8443
	I0916 11:01:41.879288 1445387 node_ready.go:35] waiting up to 6m0s for node "ha-334765-m04" to be "Ready" ...
	I0916 11:01:41.879394 1445387 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-334765-m04
	I0916 11:01:41.879407 1445387 round_trippers.go:469] Request Headers:
	I0916 11:01:41.879416 1445387 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:01:41.879426 1445387 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:01:41.882428 1445387 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:01:41.883190 1445387 node_ready.go:49] node "ha-334765-m04" has status "Ready":"True"
	I0916 11:01:41.883222 1445387 node_ready.go:38] duration metric: took 3.897477ms for node "ha-334765-m04" to be "Ready" ...
	I0916 11:01:41.883233 1445387 pod_ready.go:36] extra waiting up to 6m0s for all system-critical pods including labels [k8s-app=kube-dns component=etcd component=kube-apiserver component=kube-controller-manager k8s-app=kube-proxy component=kube-scheduler] to be "Ready" ...
	I0916 11:01:41.883305 1445387 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods
	I0916 11:01:41.883317 1445387 round_trippers.go:469] Request Headers:
	I0916 11:01:41.883326 1445387 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:01:41.883334 1445387 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:01:41.889150 1445387 round_trippers.go:574] Response Status: 200 OK in 5 milliseconds
	I0916 11:01:41.899286 1445387 pod_ready.go:79] waiting up to 6m0s for pod "coredns-7c65d6cfc9-q5xr7" in "kube-system" namespace to be "Ready" ...
	I0916 11:01:41.899403 1445387 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/coredns-7c65d6cfc9-q5xr7
	I0916 11:01:41.899416 1445387 round_trippers.go:469] Request Headers:
	I0916 11:01:41.899425 1445387 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:01:41.899437 1445387 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:01:41.902485 1445387 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 11:01:41.903212 1445387 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-334765
	I0916 11:01:41.903250 1445387 round_trippers.go:469] Request Headers:
	I0916 11:01:41.903260 1445387 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:01:41.903266 1445387 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:01:41.906266 1445387 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:01:42.399721 1445387 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/coredns-7c65d6cfc9-q5xr7
	I0916 11:01:42.399756 1445387 round_trippers.go:469] Request Headers:
	I0916 11:01:42.399767 1445387 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:01:42.399772 1445387 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:01:42.403059 1445387 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 11:01:42.403837 1445387 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-334765
	I0916 11:01:42.403853 1445387 round_trippers.go:469] Request Headers:
	I0916 11:01:42.403862 1445387 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:01:42.403867 1445387 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:01:42.406666 1445387 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:01:42.899917 1445387 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/coredns-7c65d6cfc9-q5xr7
	I0916 11:01:42.899941 1445387 round_trippers.go:469] Request Headers:
	I0916 11:01:42.899951 1445387 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:01:42.899957 1445387 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:01:42.902946 1445387 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:01:42.904289 1445387 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-334765
	I0916 11:01:42.904315 1445387 round_trippers.go:469] Request Headers:
	I0916 11:01:42.904324 1445387 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:01:42.904332 1445387 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:01:42.908908 1445387 round_trippers.go:574] Response Status: 200 OK in 4 milliseconds
	I0916 11:01:43.399791 1445387 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/coredns-7c65d6cfc9-q5xr7
	I0916 11:01:43.399813 1445387 round_trippers.go:469] Request Headers:
	I0916 11:01:43.399823 1445387 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:01:43.399828 1445387 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:01:43.403028 1445387 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 11:01:43.404092 1445387 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-334765
	I0916 11:01:43.404116 1445387 round_trippers.go:469] Request Headers:
	I0916 11:01:43.404127 1445387 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:01:43.404132 1445387 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:01:43.406787 1445387 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:01:43.900390 1445387 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/coredns-7c65d6cfc9-q5xr7
	I0916 11:01:43.900415 1445387 round_trippers.go:469] Request Headers:
	I0916 11:01:43.900424 1445387 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:01:43.900431 1445387 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:01:43.903561 1445387 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 11:01:43.904733 1445387 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-334765
	I0916 11:01:43.904775 1445387 round_trippers.go:469] Request Headers:
	I0916 11:01:43.904784 1445387 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:01:43.904791 1445387 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:01:43.907565 1445387 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:01:43.909507 1445387 pod_ready.go:103] pod "coredns-7c65d6cfc9-q5xr7" in "kube-system" namespace has status "Ready":"False"
	I0916 11:01:44.400255 1445387 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/coredns-7c65d6cfc9-q5xr7
	I0916 11:01:44.400276 1445387 round_trippers.go:469] Request Headers:
	I0916 11:01:44.400285 1445387 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:01:44.400290 1445387 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:01:44.411405 1445387 round_trippers.go:574] Response Status: 200 OK in 11 milliseconds
	I0916 11:01:44.412298 1445387 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-334765
	I0916 11:01:44.412321 1445387 round_trippers.go:469] Request Headers:
	I0916 11:01:44.412331 1445387 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:01:44.412337 1445387 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:01:44.415010 1445387 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:01:44.899993 1445387 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/coredns-7c65d6cfc9-q5xr7
	I0916 11:01:44.900015 1445387 round_trippers.go:469] Request Headers:
	I0916 11:01:44.900025 1445387 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:01:44.900030 1445387 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:01:44.903203 1445387 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 11:01:44.904281 1445387 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-334765
	I0916 11:01:44.904304 1445387 round_trippers.go:469] Request Headers:
	I0916 11:01:44.904315 1445387 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:01:44.904320 1445387 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:01:44.909401 1445387 round_trippers.go:574] Response Status: 200 OK in 5 milliseconds
	I0916 11:01:45.399843 1445387 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/coredns-7c65d6cfc9-q5xr7
	I0916 11:01:45.399875 1445387 round_trippers.go:469] Request Headers:
	I0916 11:01:45.399886 1445387 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:01:45.399892 1445387 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:01:45.403493 1445387 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 11:01:45.404871 1445387 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-334765
	I0916 11:01:45.404892 1445387 round_trippers.go:469] Request Headers:
	I0916 11:01:45.404903 1445387 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:01:45.404908 1445387 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:01:45.408000 1445387 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 11:01:45.899540 1445387 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/coredns-7c65d6cfc9-q5xr7
	I0916 11:01:45.899565 1445387 round_trippers.go:469] Request Headers:
	I0916 11:01:45.899581 1445387 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:01:45.899586 1445387 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:01:45.902740 1445387 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 11:01:45.903571 1445387 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-334765
	I0916 11:01:45.903594 1445387 round_trippers.go:469] Request Headers:
	I0916 11:01:45.903604 1445387 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:01:45.903608 1445387 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:01:45.906446 1445387 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:01:46.400472 1445387 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/coredns-7c65d6cfc9-q5xr7
	I0916 11:01:46.400505 1445387 round_trippers.go:469] Request Headers:
	I0916 11:01:46.400516 1445387 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:01:46.400521 1445387 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:01:46.405514 1445387 round_trippers.go:574] Response Status: 200 OK in 4 milliseconds
	I0916 11:01:46.406385 1445387 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-334765
	I0916 11:01:46.406408 1445387 round_trippers.go:469] Request Headers:
	I0916 11:01:46.406417 1445387 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:01:46.406423 1445387 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:01:46.409618 1445387 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 11:01:46.410343 1445387 pod_ready.go:103] pod "coredns-7c65d6cfc9-q5xr7" in "kube-system" namespace has status "Ready":"False"
	I0916 11:01:46.899851 1445387 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/coredns-7c65d6cfc9-q5xr7
	I0916 11:01:46.899879 1445387 round_trippers.go:469] Request Headers:
	I0916 11:01:46.899890 1445387 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:01:46.899895 1445387 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:01:46.902914 1445387 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:01:46.903849 1445387 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-334765
	I0916 11:01:46.903868 1445387 round_trippers.go:469] Request Headers:
	I0916 11:01:46.903879 1445387 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:01:46.903885 1445387 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:01:46.907064 1445387 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 11:01:47.400294 1445387 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/coredns-7c65d6cfc9-q5xr7
	I0916 11:01:47.400319 1445387 round_trippers.go:469] Request Headers:
	I0916 11:01:47.400329 1445387 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:01:47.400333 1445387 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:01:47.403236 1445387 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:01:47.404196 1445387 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-334765
	I0916 11:01:47.404217 1445387 round_trippers.go:469] Request Headers:
	I0916 11:01:47.404226 1445387 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:01:47.404231 1445387 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:01:47.406980 1445387 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:01:47.900517 1445387 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/coredns-7c65d6cfc9-q5xr7
	I0916 11:01:47.900542 1445387 round_trippers.go:469] Request Headers:
	I0916 11:01:47.900552 1445387 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:01:47.900557 1445387 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:01:47.903547 1445387 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:01:47.904351 1445387 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-334765
	I0916 11:01:47.904370 1445387 round_trippers.go:469] Request Headers:
	I0916 11:01:47.904380 1445387 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:01:47.904385 1445387 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:01:47.906971 1445387 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:01:48.399597 1445387 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/coredns-7c65d6cfc9-q5xr7
	I0916 11:01:48.399617 1445387 round_trippers.go:469] Request Headers:
	I0916 11:01:48.399626 1445387 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:01:48.399631 1445387 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:01:48.407643 1445387 round_trippers.go:574] Response Status: 200 OK in 7 milliseconds
	I0916 11:01:48.408785 1445387 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-334765
	I0916 11:01:48.408803 1445387 round_trippers.go:469] Request Headers:
	I0916 11:01:48.408813 1445387 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:01:48.408817 1445387 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:01:48.411633 1445387 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:01:48.412586 1445387 pod_ready.go:103] pod "coredns-7c65d6cfc9-q5xr7" in "kube-system" namespace has status "Ready":"False"
	I0916 11:01:48.900546 1445387 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/coredns-7c65d6cfc9-q5xr7
	I0916 11:01:48.900566 1445387 round_trippers.go:469] Request Headers:
	I0916 11:01:48.900575 1445387 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:01:48.900579 1445387 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:01:48.903454 1445387 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:01:48.904305 1445387 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-334765
	I0916 11:01:48.904328 1445387 round_trippers.go:469] Request Headers:
	I0916 11:01:48.904338 1445387 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:01:48.904343 1445387 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:01:48.906925 1445387 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:01:49.399522 1445387 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/coredns-7c65d6cfc9-q5xr7
	I0916 11:01:49.399547 1445387 round_trippers.go:469] Request Headers:
	I0916 11:01:49.399557 1445387 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:01:49.399563 1445387 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:01:49.402489 1445387 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:01:49.403395 1445387 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-334765
	I0916 11:01:49.403416 1445387 round_trippers.go:469] Request Headers:
	I0916 11:01:49.403426 1445387 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:01:49.403431 1445387 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:01:49.406160 1445387 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:01:49.899747 1445387 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/coredns-7c65d6cfc9-q5xr7
	I0916 11:01:49.899772 1445387 round_trippers.go:469] Request Headers:
	I0916 11:01:49.899782 1445387 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:01:49.899788 1445387 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:01:49.902913 1445387 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 11:01:49.903709 1445387 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-334765
	I0916 11:01:49.903728 1445387 round_trippers.go:469] Request Headers:
	I0916 11:01:49.903738 1445387 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:01:49.903742 1445387 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:01:49.906699 1445387 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:01:50.399598 1445387 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/coredns-7c65d6cfc9-q5xr7
	I0916 11:01:50.399624 1445387 round_trippers.go:469] Request Headers:
	I0916 11:01:50.399634 1445387 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:01:50.399639 1445387 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:01:50.402579 1445387 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:01:50.403416 1445387 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-334765
	I0916 11:01:50.403438 1445387 round_trippers.go:469] Request Headers:
	I0916 11:01:50.403449 1445387 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:01:50.403454 1445387 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:01:50.406573 1445387 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 11:01:50.899876 1445387 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/coredns-7c65d6cfc9-q5xr7
	I0916 11:01:50.899901 1445387 round_trippers.go:469] Request Headers:
	I0916 11:01:50.899910 1445387 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:01:50.899914 1445387 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:01:50.903123 1445387 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 11:01:50.903984 1445387 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-334765
	I0916 11:01:50.904009 1445387 round_trippers.go:469] Request Headers:
	I0916 11:01:50.904019 1445387 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:01:50.904025 1445387 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:01:50.907035 1445387 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:01:50.907659 1445387 pod_ready.go:103] pod "coredns-7c65d6cfc9-q5xr7" in "kube-system" namespace has status "Ready":"False"
	I0916 11:01:51.400098 1445387 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/coredns-7c65d6cfc9-q5xr7
	I0916 11:01:51.400122 1445387 round_trippers.go:469] Request Headers:
	I0916 11:01:51.400132 1445387 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:01:51.400138 1445387 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:01:51.403078 1445387 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:01:51.404030 1445387 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-334765
	I0916 11:01:51.404051 1445387 round_trippers.go:469] Request Headers:
	I0916 11:01:51.404062 1445387 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:01:51.404068 1445387 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:01:51.406615 1445387 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:01:51.899533 1445387 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/coredns-7c65d6cfc9-q5xr7
	I0916 11:01:51.899558 1445387 round_trippers.go:469] Request Headers:
	I0916 11:01:51.899574 1445387 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:01:51.899586 1445387 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:01:51.902694 1445387 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 11:01:51.903571 1445387 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-334765
	I0916 11:01:51.903626 1445387 round_trippers.go:469] Request Headers:
	I0916 11:01:51.903646 1445387 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:01:51.903653 1445387 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:01:51.906333 1445387 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:01:52.399800 1445387 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/coredns-7c65d6cfc9-q5xr7
	I0916 11:01:52.399823 1445387 round_trippers.go:469] Request Headers:
	I0916 11:01:52.399832 1445387 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:01:52.399837 1445387 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:01:52.405646 1445387 round_trippers.go:574] Response Status: 200 OK in 5 milliseconds
	I0916 11:01:52.406956 1445387 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-334765
	I0916 11:01:52.407024 1445387 round_trippers.go:469] Request Headers:
	I0916 11:01:52.407058 1445387 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:01:52.407076 1445387 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:01:52.414979 1445387 round_trippers.go:574] Response Status: 200 OK in 7 milliseconds
	I0916 11:01:52.899892 1445387 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/coredns-7c65d6cfc9-q5xr7
	I0916 11:01:52.899921 1445387 round_trippers.go:469] Request Headers:
	I0916 11:01:52.899933 1445387 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:01:52.899939 1445387 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:01:52.903163 1445387 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 11:01:52.904439 1445387 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-334765
	I0916 11:01:52.904467 1445387 round_trippers.go:469] Request Headers:
	I0916 11:01:52.904477 1445387 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:01:52.904483 1445387 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:01:52.907418 1445387 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:01:52.908054 1445387 pod_ready.go:103] pod "coredns-7c65d6cfc9-q5xr7" in "kube-system" namespace has status "Ready":"False"
	I0916 11:01:53.399747 1445387 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/coredns-7c65d6cfc9-q5xr7
	I0916 11:01:53.399779 1445387 round_trippers.go:469] Request Headers:
	I0916 11:01:53.399789 1445387 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:01:53.399822 1445387 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:01:53.402720 1445387 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:01:53.403603 1445387 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-334765
	I0916 11:01:53.403624 1445387 round_trippers.go:469] Request Headers:
	I0916 11:01:53.403634 1445387 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:01:53.403667 1445387 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:01:53.406209 1445387 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:01:53.900511 1445387 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/coredns-7c65d6cfc9-q5xr7
	I0916 11:01:53.900532 1445387 round_trippers.go:469] Request Headers:
	I0916 11:01:53.900541 1445387 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:01:53.900545 1445387 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:01:53.903488 1445387 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:01:53.904280 1445387 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-334765
	I0916 11:01:53.904299 1445387 round_trippers.go:469] Request Headers:
	I0916 11:01:53.904308 1445387 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:01:53.904312 1445387 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:01:53.913356 1445387 round_trippers.go:574] Response Status: 200 OK in 9 milliseconds
	I0916 11:01:54.400003 1445387 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/coredns-7c65d6cfc9-q5xr7
	I0916 11:01:54.400026 1445387 round_trippers.go:469] Request Headers:
	I0916 11:01:54.400036 1445387 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:01:54.400041 1445387 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:01:54.403075 1445387 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 11:01:54.404234 1445387 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-334765
	I0916 11:01:54.404257 1445387 round_trippers.go:469] Request Headers:
	I0916 11:01:54.404266 1445387 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:01:54.404271 1445387 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:01:54.407189 1445387 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:01:54.899678 1445387 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/coredns-7c65d6cfc9-q5xr7
	I0916 11:01:54.899705 1445387 round_trippers.go:469] Request Headers:
	I0916 11:01:54.899724 1445387 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:01:54.899732 1445387 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:01:54.902827 1445387 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 11:01:54.903716 1445387 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-334765
	I0916 11:01:54.903735 1445387 round_trippers.go:469] Request Headers:
	I0916 11:01:54.903745 1445387 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:01:54.903768 1445387 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:01:54.906776 1445387 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:01:55.399697 1445387 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/coredns-7c65d6cfc9-q5xr7
	I0916 11:01:55.399783 1445387 round_trippers.go:469] Request Headers:
	I0916 11:01:55.399943 1445387 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:01:55.399972 1445387 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:01:55.403652 1445387 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 11:01:55.404475 1445387 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-334765
	I0916 11:01:55.404495 1445387 round_trippers.go:469] Request Headers:
	I0916 11:01:55.404505 1445387 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:01:55.404509 1445387 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:01:55.407170 1445387 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:01:55.407810 1445387 pod_ready.go:103] pod "coredns-7c65d6cfc9-q5xr7" in "kube-system" namespace has status "Ready":"False"
	I0916 11:01:55.900510 1445387 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/coredns-7c65d6cfc9-q5xr7
	I0916 11:01:55.900536 1445387 round_trippers.go:469] Request Headers:
	I0916 11:01:55.900546 1445387 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:01:55.900550 1445387 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:01:55.903575 1445387 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 11:01:55.904515 1445387 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-334765
	I0916 11:01:55.904536 1445387 round_trippers.go:469] Request Headers:
	I0916 11:01:55.904545 1445387 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:01:55.904549 1445387 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:01:55.907158 1445387 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:01:56.400079 1445387 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/coredns-7c65d6cfc9-q5xr7
	I0916 11:01:56.400142 1445387 round_trippers.go:469] Request Headers:
	I0916 11:01:56.400174 1445387 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:01:56.400192 1445387 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:01:56.403749 1445387 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 11:01:56.404817 1445387 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-334765
	I0916 11:01:56.404843 1445387 round_trippers.go:469] Request Headers:
	I0916 11:01:56.404853 1445387 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:01:56.404865 1445387 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:01:56.413466 1445387 round_trippers.go:574] Response Status: 200 OK in 8 milliseconds
	I0916 11:01:56.900351 1445387 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/coredns-7c65d6cfc9-q5xr7
	I0916 11:01:56.900379 1445387 round_trippers.go:469] Request Headers:
	I0916 11:01:56.900388 1445387 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:01:56.900392 1445387 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:01:56.903747 1445387 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 11:01:56.905114 1445387 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-334765
	I0916 11:01:56.905146 1445387 round_trippers.go:469] Request Headers:
	I0916 11:01:56.905156 1445387 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:01:56.905162 1445387 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:01:56.908106 1445387 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:01:57.399751 1445387 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/coredns-7c65d6cfc9-q5xr7
	I0916 11:01:57.399814 1445387 round_trippers.go:469] Request Headers:
	I0916 11:01:57.399839 1445387 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:01:57.399862 1445387 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:01:57.402832 1445387 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:01:57.403520 1445387 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-334765
	I0916 11:01:57.403531 1445387 round_trippers.go:469] Request Headers:
	I0916 11:01:57.403544 1445387 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:01:57.403548 1445387 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:01:57.406129 1445387 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:01:57.899977 1445387 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/coredns-7c65d6cfc9-q5xr7
	I0916 11:01:57.900000 1445387 round_trippers.go:469] Request Headers:
	I0916 11:01:57.900009 1445387 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:01:57.900014 1445387 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:01:57.906795 1445387 round_trippers.go:574] Response Status: 200 OK in 6 milliseconds
	I0916 11:01:57.907723 1445387 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-334765
	I0916 11:01:57.907778 1445387 round_trippers.go:469] Request Headers:
	I0916 11:01:57.907802 1445387 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:01:57.907824 1445387 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:01:57.912922 1445387 round_trippers.go:574] Response Status: 200 OK in 5 milliseconds
	I0916 11:01:57.913534 1445387 pod_ready.go:103] pod "coredns-7c65d6cfc9-q5xr7" in "kube-system" namespace has status "Ready":"False"
	I0916 11:01:58.400104 1445387 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/coredns-7c65d6cfc9-q5xr7
	I0916 11:01:58.400141 1445387 round_trippers.go:469] Request Headers:
	I0916 11:01:58.400151 1445387 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:01:58.400189 1445387 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:01:58.403838 1445387 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 11:01:58.405085 1445387 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-334765
	I0916 11:01:58.405109 1445387 round_trippers.go:469] Request Headers:
	I0916 11:01:58.405120 1445387 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:01:58.405125 1445387 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:01:58.407665 1445387 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:01:58.899549 1445387 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/coredns-7c65d6cfc9-q5xr7
	I0916 11:01:58.899573 1445387 round_trippers.go:469] Request Headers:
	I0916 11:01:58.899583 1445387 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:01:58.899590 1445387 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:01:58.902408 1445387 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:01:58.903301 1445387 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-334765
	I0916 11:01:58.903320 1445387 round_trippers.go:469] Request Headers:
	I0916 11:01:58.903331 1445387 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:01:58.903335 1445387 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:01:58.905924 1445387 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:01:59.399707 1445387 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/coredns-7c65d6cfc9-q5xr7
	I0916 11:01:59.399729 1445387 round_trippers.go:469] Request Headers:
	I0916 11:01:59.399738 1445387 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:01:59.399742 1445387 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:01:59.402844 1445387 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 11:01:59.403630 1445387 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-334765
	I0916 11:01:59.403652 1445387 round_trippers.go:469] Request Headers:
	I0916 11:01:59.403662 1445387 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:01:59.403669 1445387 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:01:59.406110 1445387 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:01:59.407069 1445387 pod_ready.go:93] pod "coredns-7c65d6cfc9-q5xr7" in "kube-system" namespace has status "Ready":"True"
	I0916 11:01:59.407092 1445387 pod_ready.go:82] duration metric: took 17.50776835s for pod "coredns-7c65d6cfc9-q5xr7" in "kube-system" namespace to be "Ready" ...
	I0916 11:01:59.407104 1445387 pod_ready.go:79] waiting up to 6m0s for pod "coredns-7c65d6cfc9-s9fp9" in "kube-system" namespace to be "Ready" ...
	I0916 11:01:59.407173 1445387 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/coredns-7c65d6cfc9-s9fp9
	I0916 11:01:59.407184 1445387 round_trippers.go:469] Request Headers:
	I0916 11:01:59.407193 1445387 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:01:59.407204 1445387 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:01:59.409800 1445387 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:01:59.410580 1445387 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-334765
	I0916 11:01:59.410596 1445387 round_trippers.go:469] Request Headers:
	I0916 11:01:59.410605 1445387 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:01:59.410610 1445387 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:01:59.413224 1445387 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:01:59.413811 1445387 pod_ready.go:93] pod "coredns-7c65d6cfc9-s9fp9" in "kube-system" namespace has status "Ready":"True"
	I0916 11:01:59.413833 1445387 pod_ready.go:82] duration metric: took 6.72226ms for pod "coredns-7c65d6cfc9-s9fp9" in "kube-system" namespace to be "Ready" ...
	I0916 11:01:59.413846 1445387 pod_ready.go:79] waiting up to 6m0s for pod "etcd-ha-334765" in "kube-system" namespace to be "Ready" ...
	I0916 11:01:59.413907 1445387 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/etcd-ha-334765
	I0916 11:01:59.413924 1445387 round_trippers.go:469] Request Headers:
	I0916 11:01:59.413933 1445387 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:01:59.413945 1445387 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:01:59.416603 1445387 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:01:59.417340 1445387 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-334765
	I0916 11:01:59.417358 1445387 round_trippers.go:469] Request Headers:
	I0916 11:01:59.417368 1445387 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:01:59.417389 1445387 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:01:59.419823 1445387 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:01:59.420426 1445387 pod_ready.go:93] pod "etcd-ha-334765" in "kube-system" namespace has status "Ready":"True"
	I0916 11:01:59.420445 1445387 pod_ready.go:82] duration metric: took 6.59144ms for pod "etcd-ha-334765" in "kube-system" namespace to be "Ready" ...
	I0916 11:01:59.420456 1445387 pod_ready.go:79] waiting up to 6m0s for pod "etcd-ha-334765-m02" in "kube-system" namespace to be "Ready" ...
	I0916 11:01:59.420530 1445387 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/etcd-ha-334765-m02
	I0916 11:01:59.420543 1445387 round_trippers.go:469] Request Headers:
	I0916 11:01:59.420551 1445387 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:01:59.420554 1445387 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:01:59.423083 1445387 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:01:59.423751 1445387 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-334765-m02
	I0916 11:01:59.423771 1445387 round_trippers.go:469] Request Headers:
	I0916 11:01:59.423781 1445387 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:01:59.423793 1445387 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:01:59.426362 1445387 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:01:59.427006 1445387 pod_ready.go:93] pod "etcd-ha-334765-m02" in "kube-system" namespace has status "Ready":"True"
	I0916 11:01:59.427024 1445387 pod_ready.go:82] duration metric: took 6.561057ms for pod "etcd-ha-334765-m02" in "kube-system" namespace to be "Ready" ...
	I0916 11:01:59.427035 1445387 pod_ready.go:79] waiting up to 6m0s for pod "etcd-ha-334765-m03" in "kube-system" namespace to be "Ready" ...
	I0916 11:01:59.427101 1445387 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/etcd-ha-334765-m03
	I0916 11:01:59.427115 1445387 round_trippers.go:469] Request Headers:
	I0916 11:01:59.427124 1445387 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:01:59.427128 1445387 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:01:59.429551 1445387 round_trippers.go:574] Response Status: 404 Not Found in 2 milliseconds
	I0916 11:01:59.429744 1445387 pod_ready.go:98] error getting pod "etcd-ha-334765-m03" in "kube-system" namespace (skipping!): pods "etcd-ha-334765-m03" not found
	I0916 11:01:59.429764 1445387 pod_ready.go:82] duration metric: took 2.716847ms for pod "etcd-ha-334765-m03" in "kube-system" namespace to be "Ready" ...
	E0916 11:01:59.429775 1445387 pod_ready.go:67] WaitExtra: waitPodCondition: error getting pod "etcd-ha-334765-m03" in "kube-system" namespace (skipping!): pods "etcd-ha-334765-m03" not found
	I0916 11:01:59.429815 1445387 pod_ready.go:79] waiting up to 6m0s for pod "kube-apiserver-ha-334765" in "kube-system" namespace to be "Ready" ...
	I0916 11:01:59.429894 1445387 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/kube-apiserver-ha-334765
	I0916 11:01:59.429903 1445387 round_trippers.go:469] Request Headers:
	I0916 11:01:59.429912 1445387 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:01:59.429921 1445387 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:01:59.432702 1445387 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:01:59.600022 1445387 request.go:632] Waited for 166.29322ms due to client-side throttling, not priority and fairness, request: GET:https://192.168.49.2:8443/api/v1/nodes/ha-334765
	I0916 11:01:59.600139 1445387 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-334765
	I0916 11:01:59.600154 1445387 round_trippers.go:469] Request Headers:
	I0916 11:01:59.600168 1445387 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:01:59.600173 1445387 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:01:59.602931 1445387 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:01:59.603580 1445387 pod_ready.go:93] pod "kube-apiserver-ha-334765" in "kube-system" namespace has status "Ready":"True"
	I0916 11:01:59.603625 1445387 pod_ready.go:82] duration metric: took 173.797827ms for pod "kube-apiserver-ha-334765" in "kube-system" namespace to be "Ready" ...
	I0916 11:01:59.603645 1445387 pod_ready.go:79] waiting up to 6m0s for pod "kube-apiserver-ha-334765-m02" in "kube-system" namespace to be "Ready" ...
	I0916 11:01:59.800081 1445387 request.go:632] Waited for 196.332365ms due to client-side throttling, not priority and fairness, request: GET:https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/kube-apiserver-ha-334765-m02
	I0916 11:01:59.800142 1445387 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/kube-apiserver-ha-334765-m02
	I0916 11:01:59.800149 1445387 round_trippers.go:469] Request Headers:
	I0916 11:01:59.800158 1445387 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:01:59.800166 1445387 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:01:59.803331 1445387 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 11:02:00.000463 1445387 request.go:632] Waited for 196.334292ms due to client-side throttling, not priority and fairness, request: GET:https://192.168.49.2:8443/api/v1/nodes/ha-334765-m02
	I0916 11:02:00.000533 1445387 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-334765-m02
	I0916 11:02:00.000548 1445387 round_trippers.go:469] Request Headers:
	I0916 11:02:00.000559 1445387 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:02:00.000567 1445387 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:02:00.013322 1445387 round_trippers.go:574] Response Status: 200 OK in 12 milliseconds
	I0916 11:02:00.015533 1445387 pod_ready.go:93] pod "kube-apiserver-ha-334765-m02" in "kube-system" namespace has status "Ready":"True"
	I0916 11:02:00.015559 1445387 pod_ready.go:82] duration metric: took 411.904721ms for pod "kube-apiserver-ha-334765-m02" in "kube-system" namespace to be "Ready" ...
	I0916 11:02:00.015572 1445387 pod_ready.go:79] waiting up to 6m0s for pod "kube-apiserver-ha-334765-m03" in "kube-system" namespace to be "Ready" ...
	I0916 11:02:00.202518 1445387 request.go:632] Waited for 186.832083ms due to client-side throttling, not priority and fairness, request: GET:https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/kube-apiserver-ha-334765-m03
	I0916 11:02:00.202603 1445387 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/kube-apiserver-ha-334765-m03
	I0916 11:02:00.202611 1445387 round_trippers.go:469] Request Headers:
	I0916 11:02:00.202620 1445387 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:02:00.202628 1445387 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:02:00.226301 1445387 round_trippers.go:574] Response Status: 404 Not Found in 23 milliseconds
	I0916 11:02:00.226456 1445387 pod_ready.go:98] error getting pod "kube-apiserver-ha-334765-m03" in "kube-system" namespace (skipping!): pods "kube-apiserver-ha-334765-m03" not found
	I0916 11:02:00.226502 1445387 pod_ready.go:82] duration metric: took 210.894467ms for pod "kube-apiserver-ha-334765-m03" in "kube-system" namespace to be "Ready" ...
	E0916 11:02:00.226515 1445387 pod_ready.go:67] WaitExtra: waitPodCondition: error getting pod "kube-apiserver-ha-334765-m03" in "kube-system" namespace (skipping!): pods "kube-apiserver-ha-334765-m03" not found
	I0916 11:02:00.226526 1445387 pod_ready.go:79] waiting up to 6m0s for pod "kube-controller-manager-ha-334765" in "kube-system" namespace to be "Ready" ...
	I0916 11:02:00.399765 1445387 request.go:632] Waited for 173.165268ms due to client-side throttling, not priority and fairness, request: GET:https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/kube-controller-manager-ha-334765
	I0916 11:02:00.399860 1445387 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/kube-controller-manager-ha-334765
	I0916 11:02:00.399869 1445387 round_trippers.go:469] Request Headers:
	I0916 11:02:00.399877 1445387 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:02:00.399890 1445387 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:02:00.405634 1445387 round_trippers.go:574] Response Status: 200 OK in 5 milliseconds
	I0916 11:02:00.600377 1445387 request.go:632] Waited for 193.320616ms due to client-side throttling, not priority and fairness, request: GET:https://192.168.49.2:8443/api/v1/nodes/ha-334765
	I0916 11:02:00.600434 1445387 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-334765
	I0916 11:02:00.600441 1445387 round_trippers.go:469] Request Headers:
	I0916 11:02:00.600450 1445387 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:02:00.600460 1445387 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:02:00.603323 1445387 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:02:00.604246 1445387 pod_ready.go:93] pod "kube-controller-manager-ha-334765" in "kube-system" namespace has status "Ready":"True"
	I0916 11:02:00.604267 1445387 pod_ready.go:82] duration metric: took 377.733463ms for pod "kube-controller-manager-ha-334765" in "kube-system" namespace to be "Ready" ...
	I0916 11:02:00.604280 1445387 pod_ready.go:79] waiting up to 6m0s for pod "kube-controller-manager-ha-334765-m02" in "kube-system" namespace to be "Ready" ...
	I0916 11:02:00.800699 1445387 request.go:632] Waited for 196.318375ms due to client-side throttling, not priority and fairness, request: GET:https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/kube-controller-manager-ha-334765-m02
	I0916 11:02:00.800799 1445387 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/kube-controller-manager-ha-334765-m02
	I0916 11:02:00.800807 1445387 round_trippers.go:469] Request Headers:
	I0916 11:02:00.800816 1445387 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:02:00.800822 1445387 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:02:00.803704 1445387 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:02:01.000094 1445387 request.go:632] Waited for 195.311837ms due to client-side throttling, not priority and fairness, request: GET:https://192.168.49.2:8443/api/v1/nodes/ha-334765-m02
	I0916 11:02:01.000179 1445387 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-334765-m02
	I0916 11:02:01.000240 1445387 round_trippers.go:469] Request Headers:
	I0916 11:02:01.000253 1445387 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:02:01.000258 1445387 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:02:01.012294 1445387 round_trippers.go:574] Response Status: 200 OK in 12 milliseconds
	I0916 11:02:01.012993 1445387 pod_ready.go:93] pod "kube-controller-manager-ha-334765-m02" in "kube-system" namespace has status "Ready":"True"
	I0916 11:02:01.013017 1445387 pod_ready.go:82] duration metric: took 408.708246ms for pod "kube-controller-manager-ha-334765-m02" in "kube-system" namespace to be "Ready" ...
	I0916 11:02:01.013034 1445387 pod_ready.go:79] waiting up to 6m0s for pod "kube-controller-manager-ha-334765-m03" in "kube-system" namespace to be "Ready" ...
	I0916 11:02:01.200311 1445387 request.go:632] Waited for 187.159033ms due to client-side throttling, not priority and fairness, request: GET:https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/kube-controller-manager-ha-334765-m03
	I0916 11:02:01.200382 1445387 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/kube-controller-manager-ha-334765-m03
	I0916 11:02:01.200392 1445387 round_trippers.go:469] Request Headers:
	I0916 11:02:01.200401 1445387 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:02:01.200406 1445387 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:02:01.203101 1445387 round_trippers.go:574] Response Status: 404 Not Found in 2 milliseconds
	I0916 11:02:01.203286 1445387 pod_ready.go:98] error getting pod "kube-controller-manager-ha-334765-m03" in "kube-system" namespace (skipping!): pods "kube-controller-manager-ha-334765-m03" not found
	I0916 11:02:01.203307 1445387 pod_ready.go:82] duration metric: took 190.260066ms for pod "kube-controller-manager-ha-334765-m03" in "kube-system" namespace to be "Ready" ...
	E0916 11:02:01.203319 1445387 pod_ready.go:67] WaitExtra: waitPodCondition: error getting pod "kube-controller-manager-ha-334765-m03" in "kube-system" namespace (skipping!): pods "kube-controller-manager-ha-334765-m03" not found
	I0916 11:02:01.203327 1445387 pod_ready.go:79] waiting up to 6m0s for pod "kube-proxy-4vsvh" in "kube-system" namespace to be "Ready" ...
	I0916 11:02:01.400838 1445387 request.go:632] Waited for 197.385277ms due to client-side throttling, not priority and fairness, request: GET:https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/kube-proxy-4vsvh
	I0916 11:02:01.400924 1445387 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/kube-proxy-4vsvh
	I0916 11:02:01.400935 1445387 round_trippers.go:469] Request Headers:
	I0916 11:02:01.400944 1445387 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:02:01.400956 1445387 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:02:01.403649 1445387 round_trippers.go:574] Response Status: 404 Not Found in 2 milliseconds
	I0916 11:02:01.403781 1445387 pod_ready.go:98] error getting pod "kube-proxy-4vsvh" in "kube-system" namespace (skipping!): pods "kube-proxy-4vsvh" not found
	I0916 11:02:01.403799 1445387 pod_ready.go:82] duration metric: took 200.465594ms for pod "kube-proxy-4vsvh" in "kube-system" namespace to be "Ready" ...
	E0916 11:02:01.403809 1445387 pod_ready.go:67] WaitExtra: waitPodCondition: error getting pod "kube-proxy-4vsvh" in "kube-system" namespace (skipping!): pods "kube-proxy-4vsvh" not found
	I0916 11:02:01.403817 1445387 pod_ready.go:79] waiting up to 6m0s for pod "kube-proxy-br496" in "kube-system" namespace to be "Ready" ...
	I0916 11:02:01.600232 1445387 request.go:632] Waited for 196.328828ms due to client-side throttling, not priority and fairness, request: GET:https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/kube-proxy-br496
	I0916 11:02:01.600302 1445387 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/kube-proxy-br496
	I0916 11:02:01.600308 1445387 round_trippers.go:469] Request Headers:
	I0916 11:02:01.600317 1445387 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:02:01.600322 1445387 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:02:01.603097 1445387 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:02:01.800399 1445387 request.go:632] Waited for 196.316275ms due to client-side throttling, not priority and fairness, request: GET:https://192.168.49.2:8443/api/v1/nodes/ha-334765-m04
	I0916 11:02:01.800474 1445387 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-334765-m04
	I0916 11:02:01.800488 1445387 round_trippers.go:469] Request Headers:
	I0916 11:02:01.800498 1445387 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:02:01.800504 1445387 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:02:01.803425 1445387 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:02:02.000380 1445387 request.go:632] Waited for 96.296289ms due to client-side throttling, not priority and fairness, request: GET:https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/kube-proxy-br496
	I0916 11:02:02.000462 1445387 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/kube-proxy-br496
	I0916 11:02:02.000469 1445387 round_trippers.go:469] Request Headers:
	I0916 11:02:02.000478 1445387 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:02:02.000489 1445387 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:02:02.009658 1445387 round_trippers.go:574] Response Status: 200 OK in 9 milliseconds
	I0916 11:02:02.200095 1445387 request.go:632] Waited for 189.371194ms due to client-side throttling, not priority and fairness, request: GET:https://192.168.49.2:8443/api/v1/nodes/ha-334765-m04
	I0916 11:02:02.200740 1445387 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-334765-m04
	I0916 11:02:02.200751 1445387 round_trippers.go:469] Request Headers:
	I0916 11:02:02.200760 1445387 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:02:02.200767 1445387 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:02:02.203706 1445387 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:02:02.405817 1445387 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/kube-proxy-br496
	I0916 11:02:02.405840 1445387 round_trippers.go:469] Request Headers:
	I0916 11:02:02.405849 1445387 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:02:02.405854 1445387 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:02:02.408646 1445387 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:02:02.600331 1445387 request.go:632] Waited for 190.802634ms due to client-side throttling, not priority and fairness, request: GET:https://192.168.49.2:8443/api/v1/nodes/ha-334765-m04
	I0916 11:02:02.600389 1445387 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-334765-m04
	I0916 11:02:02.600395 1445387 round_trippers.go:469] Request Headers:
	I0916 11:02:02.600401 1445387 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:02:02.600406 1445387 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:02:02.603385 1445387 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:02:02.904892 1445387 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/kube-proxy-br496
	I0916 11:02:02.904914 1445387 round_trippers.go:469] Request Headers:
	I0916 11:02:02.904924 1445387 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:02:02.904929 1445387 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:02:02.907681 1445387 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:02:03.000650 1445387 request.go:632] Waited for 92.214143ms due to client-side throttling, not priority and fairness, request: GET:https://192.168.49.2:8443/api/v1/nodes/ha-334765-m04
	I0916 11:02:03.000753 1445387 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-334765-m04
	I0916 11:02:03.000769 1445387 round_trippers.go:469] Request Headers:
	I0916 11:02:03.000778 1445387 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:02:03.000784 1445387 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:02:03.007622 1445387 round_trippers.go:574] Response Status: 200 OK in 6 milliseconds
	I0916 11:02:03.404022 1445387 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/kube-proxy-br496
	I0916 11:02:03.404044 1445387 round_trippers.go:469] Request Headers:
	I0916 11:02:03.404053 1445387 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:02:03.404057 1445387 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:02:03.406860 1445387 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:02:03.407661 1445387 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-334765-m04
	I0916 11:02:03.407687 1445387 round_trippers.go:469] Request Headers:
	I0916 11:02:03.407697 1445387 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:02:03.407703 1445387 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:02:03.410347 1445387 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:02:03.410897 1445387 pod_ready.go:103] pod "kube-proxy-br496" in "kube-system" namespace has status "Ready":"False"
	I0916 11:02:03.904811 1445387 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/kube-proxy-br496
	I0916 11:02:03.904834 1445387 round_trippers.go:469] Request Headers:
	I0916 11:02:03.904843 1445387 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:02:03.904847 1445387 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:02:03.907826 1445387 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:02:03.908729 1445387 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-334765-m04
	I0916 11:02:03.908750 1445387 round_trippers.go:469] Request Headers:
	I0916 11:02:03.908760 1445387 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:02:03.908765 1445387 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:02:03.911729 1445387 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:02:04.404775 1445387 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/kube-proxy-br496
	I0916 11:02:04.404796 1445387 round_trippers.go:469] Request Headers:
	I0916 11:02:04.404806 1445387 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:02:04.404810 1445387 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:02:04.418274 1445387 round_trippers.go:574] Response Status: 200 OK in 13 milliseconds
	I0916 11:02:04.419008 1445387 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-334765-m04
	I0916 11:02:04.419053 1445387 round_trippers.go:469] Request Headers:
	I0916 11:02:04.419091 1445387 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:02:04.419109 1445387 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:02:04.422383 1445387 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 11:02:04.904082 1445387 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/kube-proxy-br496
	I0916 11:02:04.904107 1445387 round_trippers.go:469] Request Headers:
	I0916 11:02:04.904116 1445387 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:02:04.904120 1445387 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:02:04.907138 1445387 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 11:02:04.908063 1445387 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-334765-m04
	I0916 11:02:04.908083 1445387 round_trippers.go:469] Request Headers:
	I0916 11:02:04.908091 1445387 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:02:04.908098 1445387 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:02:04.910876 1445387 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:02:05.404144 1445387 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/kube-proxy-br496
	I0916 11:02:05.404168 1445387 round_trippers.go:469] Request Headers:
	I0916 11:02:05.404177 1445387 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:02:05.404181 1445387 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:02:05.407283 1445387 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 11:02:05.408033 1445387 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-334765-m04
	I0916 11:02:05.408051 1445387 round_trippers.go:469] Request Headers:
	I0916 11:02:05.408061 1445387 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:02:05.408065 1445387 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:02:05.410759 1445387 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:02:05.411325 1445387 pod_ready.go:103] pod "kube-proxy-br496" in "kube-system" namespace has status "Ready":"False"
	I0916 11:02:05.904041 1445387 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/kube-proxy-br496
	I0916 11:02:05.904067 1445387 round_trippers.go:469] Request Headers:
	I0916 11:02:05.904075 1445387 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:02:05.904080 1445387 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:02:05.907398 1445387 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 11:02:05.908465 1445387 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-334765-m04
	I0916 11:02:05.908487 1445387 round_trippers.go:469] Request Headers:
	I0916 11:02:05.908499 1445387 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:02:05.908512 1445387 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:02:05.911554 1445387 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 11:02:06.404146 1445387 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/kube-proxy-br496
	I0916 11:02:06.404171 1445387 round_trippers.go:469] Request Headers:
	I0916 11:02:06.404179 1445387 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:02:06.404183 1445387 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:02:06.407067 1445387 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:02:06.407786 1445387 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-334765-m04
	I0916 11:02:06.407808 1445387 round_trippers.go:469] Request Headers:
	I0916 11:02:06.407817 1445387 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:02:06.407822 1445387 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:02:06.410418 1445387 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:02:06.904113 1445387 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/kube-proxy-br496
	I0916 11:02:06.904160 1445387 round_trippers.go:469] Request Headers:
	I0916 11:02:06.904170 1445387 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:02:06.904175 1445387 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:02:06.907303 1445387 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 11:02:06.907982 1445387 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-334765-m04
	I0916 11:02:06.908002 1445387 round_trippers.go:469] Request Headers:
	I0916 11:02:06.908011 1445387 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:02:06.908016 1445387 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:02:06.912286 1445387 round_trippers.go:574] Response Status: 200 OK in 4 milliseconds
	I0916 11:02:07.404563 1445387 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/kube-proxy-br496
	I0916 11:02:07.404588 1445387 round_trippers.go:469] Request Headers:
	I0916 11:02:07.404598 1445387 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:02:07.404604 1445387 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:02:07.407599 1445387 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:02:07.408749 1445387 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-334765-m04
	I0916 11:02:07.408789 1445387 round_trippers.go:469] Request Headers:
	I0916 11:02:07.408804 1445387 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:02:07.408809 1445387 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:02:07.416946 1445387 round_trippers.go:574] Response Status: 200 OK in 8 milliseconds
	I0916 11:02:07.417460 1445387 pod_ready.go:103] pod "kube-proxy-br496" in "kube-system" namespace has status "Ready":"False"
	I0916 11:02:07.904121 1445387 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/kube-proxy-br496
	I0916 11:02:07.904146 1445387 round_trippers.go:469] Request Headers:
	I0916 11:02:07.904155 1445387 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:02:07.904160 1445387 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:02:07.908002 1445387 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 11:02:07.909073 1445387 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-334765-m04
	I0916 11:02:07.909134 1445387 round_trippers.go:469] Request Headers:
	I0916 11:02:07.909150 1445387 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:02:07.909157 1445387 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:02:07.911741 1445387 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:02:08.404043 1445387 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/kube-proxy-br496
	I0916 11:02:08.404122 1445387 round_trippers.go:469] Request Headers:
	I0916 11:02:08.404146 1445387 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:02:08.404166 1445387 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:02:08.413211 1445387 round_trippers.go:574] Response Status: 200 OK in 9 milliseconds
	I0916 11:02:08.413944 1445387 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-334765-m04
	I0916 11:02:08.413967 1445387 round_trippers.go:469] Request Headers:
	I0916 11:02:08.413986 1445387 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:02:08.413991 1445387 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:02:08.416404 1445387 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:02:08.904904 1445387 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/kube-proxy-br496
	I0916 11:02:08.904929 1445387 round_trippers.go:469] Request Headers:
	I0916 11:02:08.904939 1445387 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:02:08.904945 1445387 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:02:08.908015 1445387 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 11:02:08.908770 1445387 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-334765-m04
	I0916 11:02:08.908790 1445387 round_trippers.go:469] Request Headers:
	I0916 11:02:08.908800 1445387 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:02:08.908804 1445387 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:02:08.911406 1445387 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:02:09.404874 1445387 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/kube-proxy-br496
	I0916 11:02:09.404899 1445387 round_trippers.go:469] Request Headers:
	I0916 11:02:09.404909 1445387 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:02:09.404914 1445387 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:02:09.407625 1445387 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:02:09.408442 1445387 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-334765-m04
	I0916 11:02:09.408467 1445387 round_trippers.go:469] Request Headers:
	I0916 11:02:09.408478 1445387 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:02:09.408484 1445387 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:02:09.411141 1445387 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:02:09.412219 1445387 pod_ready.go:93] pod "kube-proxy-br496" in "kube-system" namespace has status "Ready":"True"
	I0916 11:02:09.412244 1445387 pod_ready.go:82] duration metric: took 8.008415947s for pod "kube-proxy-br496" in "kube-system" namespace to be "Ready" ...
	I0916 11:02:09.412256 1445387 pod_ready.go:79] waiting up to 6m0s for pod "kube-proxy-l998t" in "kube-system" namespace to be "Ready" ...
	I0916 11:02:09.412326 1445387 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/kube-proxy-l998t
	I0916 11:02:09.412337 1445387 round_trippers.go:469] Request Headers:
	I0916 11:02:09.412356 1445387 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:02:09.412363 1445387 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:02:09.415016 1445387 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:02:09.416173 1445387 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-334765-m02
	I0916 11:02:09.416197 1445387 round_trippers.go:469] Request Headers:
	I0916 11:02:09.416207 1445387 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:02:09.416212 1445387 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:02:09.418855 1445387 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:02:09.419718 1445387 pod_ready.go:93] pod "kube-proxy-l998t" in "kube-system" namespace has status "Ready":"True"
	I0916 11:02:09.419746 1445387 pod_ready.go:82] duration metric: took 7.479245ms for pod "kube-proxy-l998t" in "kube-system" namespace to be "Ready" ...
	I0916 11:02:09.419766 1445387 pod_ready.go:79] waiting up to 6m0s for pod "kube-proxy-tlfs7" in "kube-system" namespace to be "Ready" ...
	I0916 11:02:09.419839 1445387 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/kube-proxy-tlfs7
	I0916 11:02:09.419850 1445387 round_trippers.go:469] Request Headers:
	I0916 11:02:09.419858 1445387 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:02:09.419862 1445387 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:02:09.422474 1445387 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:02:09.423192 1445387 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-334765
	I0916 11:02:09.423215 1445387 round_trippers.go:469] Request Headers:
	I0916 11:02:09.423225 1445387 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:02:09.423230 1445387 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:02:09.425680 1445387 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:02:09.426271 1445387 pod_ready.go:93] pod "kube-proxy-tlfs7" in "kube-system" namespace has status "Ready":"True"
	I0916 11:02:09.426290 1445387 pod_ready.go:82] duration metric: took 6.516554ms for pod "kube-proxy-tlfs7" in "kube-system" namespace to be "Ready" ...
	I0916 11:02:09.426301 1445387 pod_ready.go:79] waiting up to 6m0s for pod "kube-scheduler-ha-334765" in "kube-system" namespace to be "Ready" ...
	I0916 11:02:09.426368 1445387 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/kube-scheduler-ha-334765
	I0916 11:02:09.426378 1445387 round_trippers.go:469] Request Headers:
	I0916 11:02:09.426386 1445387 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:02:09.426393 1445387 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:02:09.429265 1445387 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:02:09.429944 1445387 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-334765
	I0916 11:02:09.429965 1445387 round_trippers.go:469] Request Headers:
	I0916 11:02:09.429974 1445387 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:02:09.429981 1445387 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:02:09.432983 1445387 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:02:09.433564 1445387 pod_ready.go:93] pod "kube-scheduler-ha-334765" in "kube-system" namespace has status "Ready":"True"
	I0916 11:02:09.433587 1445387 pod_ready.go:82] duration metric: took 7.277125ms for pod "kube-scheduler-ha-334765" in "kube-system" namespace to be "Ready" ...
	I0916 11:02:09.433600 1445387 pod_ready.go:79] waiting up to 6m0s for pod "kube-scheduler-ha-334765-m02" in "kube-system" namespace to be "Ready" ...
	I0916 11:02:09.599954 1445387 request.go:632] Waited for 166.28865ms due to client-side throttling, not priority and fairness, request: GET:https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/kube-scheduler-ha-334765-m02
	I0916 11:02:09.600015 1445387 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/kube-scheduler-ha-334765-m02
	I0916 11:02:09.600026 1445387 round_trippers.go:469] Request Headers:
	I0916 11:02:09.600050 1445387 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:02:09.600063 1445387 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:02:09.602835 1445387 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:02:09.800232 1445387 request.go:632] Waited for 196.324956ms due to client-side throttling, not priority and fairness, request: GET:https://192.168.49.2:8443/api/v1/nodes/ha-334765-m02
	I0916 11:02:09.800290 1445387 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-334765-m02
	I0916 11:02:09.800299 1445387 round_trippers.go:469] Request Headers:
	I0916 11:02:09.800314 1445387 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:02:09.800318 1445387 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:02:09.803291 1445387 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:02:09.803916 1445387 pod_ready.go:93] pod "kube-scheduler-ha-334765-m02" in "kube-system" namespace has status "Ready":"True"
	I0916 11:02:09.803943 1445387 pod_ready.go:82] duration metric: took 370.334897ms for pod "kube-scheduler-ha-334765-m02" in "kube-system" namespace to be "Ready" ...
	I0916 11:02:09.803956 1445387 pod_ready.go:79] waiting up to 6m0s for pod "kube-scheduler-ha-334765-m03" in "kube-system" namespace to be "Ready" ...
	I0916 11:02:09.999763 1445387 request.go:632] Waited for 195.735851ms due to client-side throttling, not priority and fairness, request: GET:https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/kube-scheduler-ha-334765-m03
	I0916 11:02:09.999828 1445387 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/kube-scheduler-ha-334765-m03
	I0916 11:02:09.999836 1445387 round_trippers.go:469] Request Headers:
	I0916 11:02:09.999844 1445387 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:02:09.999850 1445387 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:02:10.009384 1445387 round_trippers.go:574] Response Status: 404 Not Found in 9 milliseconds
	I0916 11:02:10.010240 1445387 pod_ready.go:98] error getting pod "kube-scheduler-ha-334765-m03" in "kube-system" namespace (skipping!): pods "kube-scheduler-ha-334765-m03" not found
	I0916 11:02:10.010268 1445387 pod_ready.go:82] duration metric: took 206.304159ms for pod "kube-scheduler-ha-334765-m03" in "kube-system" namespace to be "Ready" ...
	E0916 11:02:10.010281 1445387 pod_ready.go:67] WaitExtra: waitPodCondition: error getting pod "kube-scheduler-ha-334765-m03" in "kube-system" namespace (skipping!): pods "kube-scheduler-ha-334765-m03" not found
	I0916 11:02:10.010292 1445387 pod_ready.go:39] duration metric: took 28.127047374s for extra waiting for all system-critical and pods with labels [k8s-app=kube-dns component=etcd component=kube-apiserver component=kube-controller-manager k8s-app=kube-proxy component=kube-scheduler] to be "Ready" ...
	I0916 11:02:10.010308 1445387 system_svc.go:44] waiting for kubelet service to be running ....
	I0916 11:02:10.010380 1445387 ssh_runner.go:195] Run: sudo systemctl is-active --quiet service kubelet
	I0916 11:02:10.031037 1445387 system_svc.go:56] duration metric: took 20.701977ms WaitForService to wait for kubelet
	I0916 11:02:10.031140 1445387 kubeadm.go:582] duration metric: took 28.273997556s to wait for: map[apiserver:true apps_running:true default_sa:true extra:true kubelet:true node_ready:true system_pods:true]
	I0916 11:02:10.031179 1445387 node_conditions.go:102] verifying NodePressure condition ...
	I0916 11:02:10.200461 1445387 request.go:632] Waited for 169.168726ms due to client-side throttling, not priority and fairness, request: GET:https://192.168.49.2:8443/api/v1/nodes
	I0916 11:02:10.200542 1445387 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes
	I0916 11:02:10.200552 1445387 round_trippers.go:469] Request Headers:
	I0916 11:02:10.200562 1445387 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:02:10.200567 1445387 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:02:10.203797 1445387 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 11:02:10.205158 1445387 node_conditions.go:122] node storage ephemeral capacity is 203034800Ki
	I0916 11:02:10.205205 1445387 node_conditions.go:123] node cpu capacity is 2
	I0916 11:02:10.205217 1445387 node_conditions.go:122] node storage ephemeral capacity is 203034800Ki
	I0916 11:02:10.205222 1445387 node_conditions.go:123] node cpu capacity is 2
	I0916 11:02:10.205227 1445387 node_conditions.go:122] node storage ephemeral capacity is 203034800Ki
	I0916 11:02:10.205250 1445387 node_conditions.go:123] node cpu capacity is 2
	I0916 11:02:10.205263 1445387 node_conditions.go:105] duration metric: took 174.048479ms to run NodePressure ...
	I0916 11:02:10.205277 1445387 start.go:241] waiting for startup goroutines ...
	I0916 11:02:10.205301 1445387 start.go:255] writing updated cluster config ...
	I0916 11:02:10.205648 1445387 ssh_runner.go:195] Run: rm -f paused
	I0916 11:02:10.213484 1445387 out.go:177] * Done! kubectl is now configured to use "ha-334765" cluster and "default" namespace by default
	E0916 11:02:10.216803 1445387 start.go:291] kubectl info: exec: fork/exec /usr/local/bin/kubectl: exec format error
	
	
	==> CRI-O <==
	Sep 16 11:01:19 ha-334765 crio[640]: time="2024-09-16 11:01:19.826169546Z" level=info msg="Started container" PID=1285 containerID=e87dd818abe86ce616366480f82070a45eea884622e81f01c72ac54b5d4984d3 description=kube-system/kube-proxy-tlfs7/kube-proxy id=42cd38da-b218-4b4e-b8a2-9545003727dc name=/runtime.v1.RuntimeService/StartContainer sandboxID=b73863bd0cb405289c4a6e4155907660a1326384daa863943527332b7005bbcb
	Sep 16 11:01:49 ha-334765 conmon[1191]: conmon 0ecb2f9010d5ce52954a <ninfo>: container 1201 exited with status 1
	Sep 16 11:01:49 ha-334765 crio[640]: time="2024-09-16 11:01:49.992007452Z" level=info msg="Checking image status: gcr.io/k8s-minikube/storage-provisioner:v5" id=f693a70b-5957-470b-b1ed-4d2803526692 name=/runtime.v1.ImageService/ImageStatus
	Sep 16 11:01:49 ha-334765 crio[640]: time="2024-09-16 11:01:49.992231315Z" level=info msg="Image status: &ImageStatusResponse{Image:&Image{Id:ba04bb24b95753201135cbc420b233c1b0b9fa2e1fd21d28319c348c33fbcde6,RepoTags:[gcr.io/k8s-minikube/storage-provisioner:v5],RepoDigests:[gcr.io/k8s-minikube/storage-provisioner@sha256:0ba370588274b88531ab311a5d2e645d240a853555c1e58fd1dd428fc333c9d2 gcr.io/k8s-minikube/storage-provisioner@sha256:18eb69d1418e854ad5a19e399310e52808a8321e4c441c1dddad8977a0d7a944],Size_:29037500,Uid:nil,Username:,Spec:nil,},Info:map[string]string{},}" id=f693a70b-5957-470b-b1ed-4d2803526692 name=/runtime.v1.ImageService/ImageStatus
	Sep 16 11:01:49 ha-334765 crio[640]: time="2024-09-16 11:01:49.993619293Z" level=info msg="Checking image status: gcr.io/k8s-minikube/storage-provisioner:v5" id=ece150bc-232e-4f88-bb3c-d518ad233e22 name=/runtime.v1.ImageService/ImageStatus
	Sep 16 11:01:49 ha-334765 crio[640]: time="2024-09-16 11:01:49.994152826Z" level=info msg="Image status: &ImageStatusResponse{Image:&Image{Id:ba04bb24b95753201135cbc420b233c1b0b9fa2e1fd21d28319c348c33fbcde6,RepoTags:[gcr.io/k8s-minikube/storage-provisioner:v5],RepoDigests:[gcr.io/k8s-minikube/storage-provisioner@sha256:0ba370588274b88531ab311a5d2e645d240a853555c1e58fd1dd428fc333c9d2 gcr.io/k8s-minikube/storage-provisioner@sha256:18eb69d1418e854ad5a19e399310e52808a8321e4c441c1dddad8977a0d7a944],Size_:29037500,Uid:nil,Username:,Spec:nil,},Info:map[string]string{},}" id=ece150bc-232e-4f88-bb3c-d518ad233e22 name=/runtime.v1.ImageService/ImageStatus
	Sep 16 11:01:49 ha-334765 crio[640]: time="2024-09-16 11:01:49.994911207Z" level=info msg="Creating container: kube-system/storage-provisioner/storage-provisioner" id=968fc2c4-c962-4617-a62e-f31d48a66e5f name=/runtime.v1.RuntimeService/CreateContainer
	Sep 16 11:01:49 ha-334765 crio[640]: time="2024-09-16 11:01:49.995010659Z" level=warning msg="Allowed annotations are specified for workload []"
	Sep 16 11:01:50 ha-334765 crio[640]: time="2024-09-16 11:01:50.033997352Z" level=warning msg="Failed to open /etc/passwd: open /var/lib/containers/storage/overlay/e88168b552eaba356bff5fed03dc74208c6297ecb1bc7d3cfddc2fbc384d9f4b/merged/etc/passwd: no such file or directory"
	Sep 16 11:01:50 ha-334765 crio[640]: time="2024-09-16 11:01:50.034079926Z" level=warning msg="Failed to open /etc/group: open /var/lib/containers/storage/overlay/e88168b552eaba356bff5fed03dc74208c6297ecb1bc7d3cfddc2fbc384d9f4b/merged/etc/group: no such file or directory"
	Sep 16 11:01:50 ha-334765 crio[640]: time="2024-09-16 11:01:50.112922083Z" level=info msg="Created container 2671a6a8eac0a97957bbaf51c39d69a90972e3d182f8909c1c22797bb0f2ead7: kube-system/storage-provisioner/storage-provisioner" id=968fc2c4-c962-4617-a62e-f31d48a66e5f name=/runtime.v1.RuntimeService/CreateContainer
	Sep 16 11:01:50 ha-334765 crio[640]: time="2024-09-16 11:01:50.113803898Z" level=info msg="Starting container: 2671a6a8eac0a97957bbaf51c39d69a90972e3d182f8909c1c22797bb0f2ead7" id=77e50acc-5673-41c3-ad00-11e8bd72e877 name=/runtime.v1.RuntimeService/StartContainer
	Sep 16 11:01:50 ha-334765 crio[640]: time="2024-09-16 11:01:50.126022274Z" level=info msg="Started container" PID=1614 containerID=2671a6a8eac0a97957bbaf51c39d69a90972e3d182f8909c1c22797bb0f2ead7 description=kube-system/storage-provisioner/storage-provisioner id=77e50acc-5673-41c3-ad00-11e8bd72e877 name=/runtime.v1.RuntimeService/StartContainer sandboxID=edde2cdf6f724cf9ae0e3ea0e8f229b82953805d6a763388faa7eb25dd268423
	Sep 16 11:02:00 ha-334765 crio[640]: time="2024-09-16 11:02:00.118819591Z" level=info msg="CNI monitoring event \"/etc/cni/net.d/10-kindnet.conflist.temp\": CREATE"
	Sep 16 11:02:00 ha-334765 crio[640]: time="2024-09-16 11:02:00.146287994Z" level=info msg="Found CNI network kindnet (type=ptp) at /etc/cni/net.d/10-kindnet.conflist"
	Sep 16 11:02:00 ha-334765 crio[640]: time="2024-09-16 11:02:00.146338799Z" level=info msg="Updated default CNI network name to kindnet"
	Sep 16 11:02:00 ha-334765 crio[640]: time="2024-09-16 11:02:00.146369182Z" level=info msg="CNI monitoring event \"/etc/cni/net.d/10-kindnet.conflist.temp\": WRITE"
	Sep 16 11:02:00 ha-334765 crio[640]: time="2024-09-16 11:02:00.171106511Z" level=info msg="Found CNI network kindnet (type=ptp) at /etc/cni/net.d/10-kindnet.conflist"
	Sep 16 11:02:00 ha-334765 crio[640]: time="2024-09-16 11:02:00.171161976Z" level=info msg="Updated default CNI network name to kindnet"
	Sep 16 11:02:00 ha-334765 crio[640]: time="2024-09-16 11:02:00.171185344Z" level=info msg="CNI monitoring event \"/etc/cni/net.d/10-kindnet.conflist.temp\": RENAME"
	Sep 16 11:02:00 ha-334765 crio[640]: time="2024-09-16 11:02:00.212305637Z" level=info msg="Found CNI network kindnet (type=ptp) at /etc/cni/net.d/10-kindnet.conflist"
	Sep 16 11:02:00 ha-334765 crio[640]: time="2024-09-16 11:02:00.212357968Z" level=info msg="Updated default CNI network name to kindnet"
	Sep 16 11:02:00 ha-334765 crio[640]: time="2024-09-16 11:02:00.212395432Z" level=info msg="CNI monitoring event \"/etc/cni/net.d/10-kindnet.conflist\": CREATE"
	Sep 16 11:02:00 ha-334765 crio[640]: time="2024-09-16 11:02:00.292505962Z" level=info msg="Found CNI network kindnet (type=ptp) at /etc/cni/net.d/10-kindnet.conflist"
	Sep 16 11:02:00 ha-334765 crio[640]: time="2024-09-16 11:02:00.292549916Z" level=info msg="Updated default CNI network name to kindnet"
	
	
	==> container status <==
	CONTAINER           IMAGE                                                              CREATED              STATE               NAME                      ATTEMPT             POD ID              POD
	2671a6a8eac0a       ba04bb24b95753201135cbc420b233c1b0b9fa2e1fd21d28319c348c33fbcde6   22 seconds ago       Running             storage-provisioner       4                   edde2cdf6f724       storage-provisioner
	b884c82382d5f       2f6c962e7b8311337352d9fdea917da2184d9919f4da7695bc2a6517cf392fe4   52 seconds ago       Running             coredns                   2                   80916d2a0994d       coredns-7c65d6cfc9-s9fp9
	e87dd818abe86       24a140c548c075e487e45d0ee73b1aa89f8bfb40c08a57e05975559728822b1d   52 seconds ago       Running             kube-proxy                2                   b73863bd0cb40       kube-proxy-tlfs7
	f9e23d0bff5c2       2f6c962e7b8311337352d9fdea917da2184d9919f4da7695bc2a6517cf392fe4   52 seconds ago       Running             coredns                   2                   0245de2a7153f       coredns-7c65d6cfc9-q5xr7
	d1ef6aaed630f       6a23fa8fd2b78ab58e42ba273808edc936a9c53d8ac4a919f6337be094843a51   52 seconds ago       Running             kindnet-cni               2                   fea71009f67aa       kindnet-7s5t5
	0a2ba2682ed38       89a35e2ebb6b938201966889b5e8c85b931db6432c5643966116cd1c28bf45cd   52 seconds ago       Running             busybox                   1                   310d162e6b928       busybox-7dff88458-55czh
	0ecb2f9010d5c       ba04bb24b95753201135cbc420b233c1b0b9fa2e1fd21d28319c348c33fbcde6   52 seconds ago       Exited              storage-provisioner       3                   edde2cdf6f724       storage-provisioner
	b65d39ab2f00b       279f381cb37365bbbcd133c9531fba9c2beb0f38dbbe6ddfcd0b1b1643d3450e   About a minute ago   Running             kube-controller-manager   5                   6b8bc8651c7f8       kube-controller-manager-ha-334765
	865d4d6176fce       d3f53a98c0a9d9163c4848bcf34b2d2f5e1e3691b79f3d1dd6d0206809e02853   About a minute ago   Running             kube-apiserver            3                   fac37510c0afc       kube-apiserver-ha-334765
	b06b6813943f1       7e2a4e229620ba3a757dc3699d10e8f77c453b7ee71936521668dec51669679d   About a minute ago   Running             kube-vip                  2                   b73b953c83bfc       kube-vip-ha-334765
	4b7e9ee825d63       27e3830e1402783674d8b594038967deea9d51f0d91b34c93c8f39d2f68af7da   About a minute ago   Running             etcd                      2                   beaab6f4a782d       etcd-ha-334765
	03344d326ab99       7f8aa378bb47dffcf430f3a601abe39137e88aee0238e23ed8530fdd18dab82d   About a minute ago   Running             kube-scheduler            2                   4497e75cb0084       kube-scheduler-ha-334765
	
	
	==> coredns [b884c82382d5f4f02f7757f5f57afede2316f52fdf90ef7e4eed5fa9d0a3e702] <==
	[INFO] plugin/kubernetes: waiting for Kubernetes API before starting server
	[WARNING] plugin/kubernetes: starting server with unsynced Kubernetes API
	.:53
	[INFO] plugin/reload: Running configuration SHA512 = 05e3eaddc414b2d71a69b2e2bc6f2681fc1f4d04bcdd3acc1a41457bb7db518208b95ddfc4c9fffedc59c25a8faf458be1af4915a4a3c0d6777cb7a346bc5d86
	CoreDNS-1.11.3
	linux/arm64, go1.21.11, a6338e9
	[INFO] 127.0.0.1:52676 - 5349 "HINFO IN 5425872935274350068.4846411717927184002. udp 57 false 512" NXDOMAIN qr,rd,ra 57 0.013030563s
	[INFO] plugin/ready: Still waiting on: "kubernetes"
	[INFO] plugin/ready: Still waiting on: "kubernetes"
	[INFO] plugin/ready: Still waiting on: "kubernetes"
	[INFO] plugin/kubernetes: pkg/mod/k8s.io/client-go@v0.29.3/tools/cache/reflector.go:229: failed to list *v1.Service: Get "https://10.96.0.1:443/api/v1/services?limit=500&resourceVersion=0": dial tcp 10.96.0.1:443: i/o timeout
	[INFO] plugin/kubernetes: pkg/mod/k8s.io/client-go@v0.29.3/tools/cache/reflector.go:229: failed to list *v1.EndpointSlice: Get "https://10.96.0.1:443/apis/discovery.k8s.io/v1/endpointslices?limit=500&resourceVersion=0": dial tcp 10.96.0.1:443: i/o timeout
	[INFO] plugin/kubernetes: Trace[1642664982]: "Reflector ListAndWatch" name:pkg/mod/k8s.io/client-go@v0.29.3/tools/cache/reflector.go:229 (16-Sep-2024 11:01:19.741) (total time: 30001ms):
	Trace[1642664982]: ---"Objects listed" error:Get "https://10.96.0.1:443/api/v1/services?limit=500&resourceVersion=0": dial tcp 10.96.0.1:443: i/o timeout 30000ms (11:01:49.742)
	Trace[1642664982]: [30.001066781s] [30.001066781s] END
	[INFO] plugin/kubernetes: Trace[694372133]: "Reflector ListAndWatch" name:pkg/mod/k8s.io/client-go@v0.29.3/tools/cache/reflector.go:229 (16-Sep-2024 11:01:19.742) (total time: 30000ms):
	Trace[694372133]: ---"Objects listed" error:Get "https://10.96.0.1:443/apis/discovery.k8s.io/v1/endpointslices?limit=500&resourceVersion=0": dial tcp 10.96.0.1:443: i/o timeout 30000ms (11:01:49.742)
	Trace[694372133]: [30.000829388s] [30.000829388s] END
	[ERROR] plugin/kubernetes: pkg/mod/k8s.io/client-go@v0.29.3/tools/cache/reflector.go:229: Failed to watch *v1.Service: failed to list *v1.Service: Get "https://10.96.0.1:443/api/v1/services?limit=500&resourceVersion=0": dial tcp 10.96.0.1:443: i/o timeout
	[ERROR] plugin/kubernetes: pkg/mod/k8s.io/client-go@v0.29.3/tools/cache/reflector.go:229: Failed to watch *v1.EndpointSlice: failed to list *v1.EndpointSlice: Get "https://10.96.0.1:443/apis/discovery.k8s.io/v1/endpointslices?limit=500&resourceVersion=0": dial tcp 10.96.0.1:443: i/o timeout
	[INFO] plugin/kubernetes: pkg/mod/k8s.io/client-go@v0.29.3/tools/cache/reflector.go:229: failed to list *v1.Namespace: Get "https://10.96.0.1:443/api/v1/namespaces?limit=500&resourceVersion=0": dial tcp 10.96.0.1:443: i/o timeout
	[INFO] plugin/kubernetes: Trace[1867520705]: "Reflector ListAndWatch" name:pkg/mod/k8s.io/client-go@v0.29.3/tools/cache/reflector.go:229 (16-Sep-2024 11:01:19.742) (total time: 30002ms):
	Trace[1867520705]: ---"Objects listed" error:Get "https://10.96.0.1:443/api/v1/namespaces?limit=500&resourceVersion=0": dial tcp 10.96.0.1:443: i/o timeout 30000ms (11:01:49.742)
	Trace[1867520705]: [30.002105253s] [30.002105253s] END
	[ERROR] plugin/kubernetes: pkg/mod/k8s.io/client-go@v0.29.3/tools/cache/reflector.go:229: Failed to watch *v1.Namespace: failed to list *v1.Namespace: Get "https://10.96.0.1:443/api/v1/namespaces?limit=500&resourceVersion=0": dial tcp 10.96.0.1:443: i/o timeout
	
	
	==> coredns [f9e23d0bff5c2b6532ebde8a5da049e0cfcea882732eede7f22a13ad73d1c45b] <==
	[INFO] plugin/kubernetes: waiting for Kubernetes API before starting server
	[WARNING] plugin/kubernetes: starting server with unsynced Kubernetes API
	.:53
	[INFO] plugin/reload: Running configuration SHA512 = 05e3eaddc414b2d71a69b2e2bc6f2681fc1f4d04bcdd3acc1a41457bb7db518208b95ddfc4c9fffedc59c25a8faf458be1af4915a4a3c0d6777cb7a346bc5d86
	CoreDNS-1.11.3
	linux/arm64, go1.21.11, a6338e9
	[INFO] 127.0.0.1:43311 - 28321 "HINFO IN 3130558305314471328.3215134934352714368. udp 57 false 512" NXDOMAIN qr,rd,ra 57 0.022188528s
	[INFO] plugin/ready: Still waiting on: "kubernetes"
	[INFO] plugin/ready: Still waiting on: "kubernetes"
	[INFO] plugin/ready: Still waiting on: "kubernetes"
	[INFO] plugin/kubernetes: pkg/mod/k8s.io/client-go@v0.29.3/tools/cache/reflector.go:229: failed to list *v1.Service: Get "https://10.96.0.1:443/api/v1/services?limit=500&resourceVersion=0": dial tcp 10.96.0.1:443: i/o timeout
	[INFO] plugin/kubernetes: Trace[2060894607]: "Reflector ListAndWatch" name:pkg/mod/k8s.io/client-go@v0.29.3/tools/cache/reflector.go:229 (16-Sep-2024 11:01:19.991) (total time: 30007ms):
	Trace[2060894607]: ---"Objects listed" error:Get "https://10.96.0.1:443/api/v1/services?limit=500&resourceVersion=0": dial tcp 10.96.0.1:443: i/o timeout 30005ms (11:01:49.996)
	Trace[2060894607]: [30.00782027s] [30.00782027s] END
	[ERROR] plugin/kubernetes: pkg/mod/k8s.io/client-go@v0.29.3/tools/cache/reflector.go:229: Failed to watch *v1.Service: failed to list *v1.Service: Get "https://10.96.0.1:443/api/v1/services?limit=500&resourceVersion=0": dial tcp 10.96.0.1:443: i/o timeout
	[INFO] plugin/kubernetes: pkg/mod/k8s.io/client-go@v0.29.3/tools/cache/reflector.go:229: failed to list *v1.Namespace: Get "https://10.96.0.1:443/api/v1/namespaces?limit=500&resourceVersion=0": dial tcp 10.96.0.1:443: i/o timeout
	[INFO] plugin/kubernetes: Trace[96827324]: "Reflector ListAndWatch" name:pkg/mod/k8s.io/client-go@v0.29.3/tools/cache/reflector.go:229 (16-Sep-2024 11:01:19.991) (total time: 30007ms):
	Trace[96827324]: ---"Objects listed" error:Get "https://10.96.0.1:443/api/v1/namespaces?limit=500&resourceVersion=0": dial tcp 10.96.0.1:443: i/o timeout 30007ms (11:01:49.999)
	Trace[96827324]: [30.007816308s] [30.007816308s] END
	[ERROR] plugin/kubernetes: pkg/mod/k8s.io/client-go@v0.29.3/tools/cache/reflector.go:229: Failed to watch *v1.Namespace: failed to list *v1.Namespace: Get "https://10.96.0.1:443/api/v1/namespaces?limit=500&resourceVersion=0": dial tcp 10.96.0.1:443: i/o timeout
	[INFO] plugin/kubernetes: pkg/mod/k8s.io/client-go@v0.29.3/tools/cache/reflector.go:229: failed to list *v1.EndpointSlice: Get "https://10.96.0.1:443/apis/discovery.k8s.io/v1/endpointslices?limit=500&resourceVersion=0": dial tcp 10.96.0.1:443: i/o timeout
	[INFO] plugin/kubernetes: Trace[1878954913]: "Reflector ListAndWatch" name:pkg/mod/k8s.io/client-go@v0.29.3/tools/cache/reflector.go:229 (16-Sep-2024 11:01:19.991) (total time: 30008ms):
	Trace[1878954913]: ---"Objects listed" error:Get "https://10.96.0.1:443/apis/discovery.k8s.io/v1/endpointslices?limit=500&resourceVersion=0": dial tcp 10.96.0.1:443: i/o timeout 30005ms (11:01:49.997)
	Trace[1878954913]: [30.008257535s] [30.008257535s] END
	[ERROR] plugin/kubernetes: pkg/mod/k8s.io/client-go@v0.29.3/tools/cache/reflector.go:229: Failed to watch *v1.EndpointSlice: failed to list *v1.EndpointSlice: Get "https://10.96.0.1:443/apis/discovery.k8s.io/v1/endpointslices?limit=500&resourceVersion=0": dial tcp 10.96.0.1:443: i/o timeout
	
	
	==> describe nodes <==
	Name:               ha-334765
	Roles:              control-plane
	Labels:             beta.kubernetes.io/arch=arm64
	                    beta.kubernetes.io/os=linux
	                    kubernetes.io/arch=arm64
	                    kubernetes.io/hostname=ha-334765
	                    kubernetes.io/os=linux
	                    minikube.k8s.io/commit=90d544f06ea0f69499271b003be64a9a224d57ed
	                    minikube.k8s.io/name=ha-334765
	                    minikube.k8s.io/primary=true
	                    minikube.k8s.io/updated_at=2024_09_16T10_51_56_0700
	                    minikube.k8s.io/version=v1.34.0
	                    node-role.kubernetes.io/control-plane=
	                    node.kubernetes.io/exclude-from-external-load-balancers=
	Annotations:        kubeadm.alpha.kubernetes.io/cri-socket: unix:///var/run/crio/crio.sock
	                    node.alpha.kubernetes.io/ttl: 0
	                    volumes.kubernetes.io/controller-managed-attach-detach: true
	CreationTimestamp:  Mon, 16 Sep 2024 10:51:54 +0000
	Taints:             <none>
	Unschedulable:      false
	Lease:
	  HolderIdentity:  ha-334765
	  AcquireTime:     <unset>
	  RenewTime:       Mon, 16 Sep 2024 11:02:10 +0000
	Conditions:
	  Type             Status  LastHeartbeatTime                 LastTransitionTime                Reason                       Message
	  ----             ------  -----------------                 ------------------                ------                       -------
	  MemoryPressure   False   Mon, 16 Sep 2024 11:01:19 +0000   Mon, 16 Sep 2024 10:51:52 +0000   KubeletHasSufficientMemory   kubelet has sufficient memory available
	  DiskPressure     False   Mon, 16 Sep 2024 11:01:19 +0000   Mon, 16 Sep 2024 10:51:52 +0000   KubeletHasNoDiskPressure     kubelet has no disk pressure
	  PIDPressure      False   Mon, 16 Sep 2024 11:01:19 +0000   Mon, 16 Sep 2024 10:51:52 +0000   KubeletHasSufficientPID      kubelet has sufficient PID available
	  Ready            True    Mon, 16 Sep 2024 11:01:19 +0000   Mon, 16 Sep 2024 10:52:40 +0000   KubeletReady                 kubelet is posting ready status
	Addresses:
	  InternalIP:  192.168.49.2
	  Hostname:    ha-334765
	Capacity:
	  cpu:                2
	  ephemeral-storage:  203034800Ki
	  hugepages-1Gi:      0
	  hugepages-2Mi:      0
	  hugepages-32Mi:     0
	  hugepages-64Ki:     0
	  memory:             8022304Ki
	  pods:               110
	Allocatable:
	  cpu:                2
	  ephemeral-storage:  203034800Ki
	  hugepages-1Gi:      0
	  hugepages-2Mi:      0
	  hugepages-32Mi:     0
	  hugepages-64Ki:     0
	  memory:             8022304Ki
	  pods:               110
	System Info:
	  Machine ID:                 5bcff762aa9944059aab1b0de78bd0a6
	  System UUID:                15c23ccf-7aa3-4a1a-8aeb-2a833bffb1e5
	  Boot ID:                    34b2555f-ef29-4c31-9b47-b3b930bd3b4b
	  Kernel Version:             5.15.0-1069-aws
	  OS Image:                   Ubuntu 22.04.4 LTS
	  Operating System:           linux
	  Architecture:               arm64
	  Container Runtime Version:  cri-o://1.24.6
	  Kubelet Version:            v1.31.1
	  Kube-Proxy Version:         v1.31.1
	PodCIDR:                      10.244.0.0/24
	PodCIDRs:                     10.244.0.0/24
	Non-terminated Pods:          (11 in total)
	  Namespace                   Name                                 CPU Requests  CPU Limits  Memory Requests  Memory Limits  Age
	  ---------                   ----                                 ------------  ----------  ---------------  -------------  ---
	  default                     busybox-7dff88458-55czh              0 (0%)        0 (0%)      0 (0%)           0 (0%)         2m22s
	  kube-system                 coredns-7c65d6cfc9-q5xr7             100m (5%)     0 (0%)      70Mi (0%)        170Mi (2%)     10m
	  kube-system                 coredns-7c65d6cfc9-s9fp9             100m (5%)     0 (0%)      70Mi (0%)        170Mi (2%)     10m
	  kube-system                 etcd-ha-334765                       100m (5%)     0 (0%)      100Mi (1%)       0 (0%)         10m
	  kube-system                 kindnet-7s5t5                        100m (5%)     100m (5%)   50Mi (0%)        50Mi (0%)      10m
	  kube-system                 kube-apiserver-ha-334765             250m (12%)    0 (0%)      0 (0%)           0 (0%)         10m
	  kube-system                 kube-controller-manager-ha-334765    200m (10%)    0 (0%)      0 (0%)           0 (0%)         10m
	  kube-system                 kube-proxy-tlfs7                     0 (0%)        0 (0%)      0 (0%)           0 (0%)         10m
	  kube-system                 kube-scheduler-ha-334765             100m (5%)     0 (0%)      0 (0%)           0 (0%)         10m
	  kube-system                 kube-vip-ha-334765                   0 (0%)        0 (0%)      0 (0%)           0 (0%)         4m12s
	  kube-system                 storage-provisioner                  0 (0%)        0 (0%)      0 (0%)           0 (0%)         10m
	Allocated resources:
	  (Total limits may be over 100 percent, i.e., overcommitted.)
	  Resource           Requests    Limits
	  --------           --------    ------
	  cpu                950m (47%)  100m (5%)
	  memory             290Mi (3%)  390Mi (4%)
	  ephemeral-storage  0 (0%)      0 (0%)
	  hugepages-1Gi      0 (0%)      0 (0%)
	  hugepages-2Mi      0 (0%)      0 (0%)
	  hugepages-32Mi     0 (0%)      0 (0%)
	  hugepages-64Ki     0 (0%)      0 (0%)
	Events:
	  Type     Reason                   Age                  From             Message
	  ----     ------                   ----                 ----             -------
	  Normal   Starting                 10m                  kube-proxy       
	  Normal   Starting                 52s                  kube-proxy       
	  Normal   Starting                 4m11s                kube-proxy       
	  Normal   NodeHasSufficientPID     10m (x7 over 10m)    kubelet          Node ha-334765 status is now: NodeHasSufficientPID
	  Normal   Starting                 10m                  kubelet          Starting kubelet.
	  Warning  CgroupV1                 10m                  kubelet          Cgroup v1 support is in maintenance mode, please migrate to Cgroup v2.
	  Normal   NodeHasSufficientMemory  10m (x8 over 10m)    kubelet          Node ha-334765 status is now: NodeHasSufficientMemory
	  Normal   NodeHasNoDiskPressure    10m (x8 over 10m)    kubelet          Node ha-334765 status is now: NodeHasNoDiskPressure
	  Warning  CgroupV1                 10m                  kubelet          Cgroup v1 support is in maintenance mode, please migrate to Cgroup v2.
	  Normal   NodeHasNoDiskPressure    10m                  kubelet          Node ha-334765 status is now: NodeHasNoDiskPressure
	  Normal   NodeHasSufficientPID     10m                  kubelet          Node ha-334765 status is now: NodeHasSufficientPID
	  Normal   NodeHasSufficientMemory  10m                  kubelet          Node ha-334765 status is now: NodeHasSufficientMemory
	  Normal   Starting                 10m                  kubelet          Starting kubelet.
	  Normal   RegisteredNode           10m                  node-controller  Node ha-334765 event: Registered Node ha-334765 in Controller
	  Normal   RegisteredNode           9m43s                node-controller  Node ha-334765 event: Registered Node ha-334765 in Controller
	  Normal   NodeReady                9m32s                kubelet          Node ha-334765 status is now: NodeReady
	  Normal   RegisteredNode           8m35s                node-controller  Node ha-334765 event: Registered Node ha-334765 in Controller
	  Normal   RegisteredNode           5m29s                node-controller  Node ha-334765 event: Registered Node ha-334765 in Controller
	  Normal   NodeHasNoDiskPressure    5m2s (x8 over 5m2s)  kubelet          Node ha-334765 status is now: NodeHasNoDiskPressure
	  Normal   NodeHasSufficientPID     5m2s (x7 over 5m2s)  kubelet          Node ha-334765 status is now: NodeHasSufficientPID
	  Normal   NodeHasSufficientMemory  5m2s (x8 over 5m2s)  kubelet          Node ha-334765 status is now: NodeHasSufficientMemory
	  Normal   Starting                 5m2s                 kubelet          Starting kubelet.
	  Warning  CgroupV1                 5m2s                 kubelet          Cgroup v1 support is in maintenance mode, please migrate to Cgroup v2.
	  Normal   RegisteredNode           4m20s                node-controller  Node ha-334765 event: Registered Node ha-334765 in Controller
	  Normal   RegisteredNode           3m27s                node-controller  Node ha-334765 event: Registered Node ha-334765 in Controller
	  Normal   RegisteredNode           3m8s                 node-controller  Node ha-334765 event: Registered Node ha-334765 in Controller
	  Normal   Starting                 81s                  kubelet          Starting kubelet.
	  Warning  CgroupV1                 81s                  kubelet          Cgroup v1 support is in maintenance mode, please migrate to Cgroup v2.
	  Normal   NodeHasSufficientMemory  81s (x8 over 81s)    kubelet          Node ha-334765 status is now: NodeHasSufficientMemory
	  Normal   NodeHasNoDiskPressure    81s (x8 over 81s)    kubelet          Node ha-334765 status is now: NodeHasNoDiskPressure
	  Normal   NodeHasSufficientPID     81s (x7 over 81s)    kubelet          Node ha-334765 status is now: NodeHasSufficientPID
	  Normal   RegisteredNode           55s                  node-controller  Node ha-334765 event: Registered Node ha-334765 in Controller
	  Normal   RegisteredNode           48s                  node-controller  Node ha-334765 event: Registered Node ha-334765 in Controller
	
	
	Name:               ha-334765-m02
	Roles:              control-plane
	Labels:             beta.kubernetes.io/arch=arm64
	                    beta.kubernetes.io/os=linux
	                    kubernetes.io/arch=arm64
	                    kubernetes.io/hostname=ha-334765-m02
	                    kubernetes.io/os=linux
	                    minikube.k8s.io/commit=90d544f06ea0f69499271b003be64a9a224d57ed
	                    minikube.k8s.io/name=ha-334765
	                    minikube.k8s.io/primary=false
	                    minikube.k8s.io/updated_at=2024_09_16T10_52_23_0700
	                    minikube.k8s.io/version=v1.34.0
	                    node-role.kubernetes.io/control-plane=
	                    node.kubernetes.io/exclude-from-external-load-balancers=
	Annotations:        kubeadm.alpha.kubernetes.io/cri-socket: unix:///var/run/crio/crio.sock
	                    node.alpha.kubernetes.io/ttl: 0
	                    volumes.kubernetes.io/controller-managed-attach-detach: true
	CreationTimestamp:  Mon, 16 Sep 2024 10:52:19 +0000
	Taints:             <none>
	Unschedulable:      false
	Lease:
	  HolderIdentity:  ha-334765-m02
	  AcquireTime:     <unset>
	  RenewTime:       Mon, 16 Sep 2024 11:02:06 +0000
	Conditions:
	  Type             Status  LastHeartbeatTime                 LastTransitionTime                Reason                       Message
	  ----             ------  -----------------                 ------------------                ------                       -------
	  MemoryPressure   False   Mon, 16 Sep 2024 11:01:15 +0000   Mon, 16 Sep 2024 10:52:19 +0000   KubeletHasSufficientMemory   kubelet has sufficient memory available
	  DiskPressure     False   Mon, 16 Sep 2024 11:01:15 +0000   Mon, 16 Sep 2024 10:52:19 +0000   KubeletHasNoDiskPressure     kubelet has no disk pressure
	  PIDPressure      False   Mon, 16 Sep 2024 11:01:15 +0000   Mon, 16 Sep 2024 10:52:19 +0000   KubeletHasSufficientPID      kubelet has sufficient PID available
	  Ready            True    Mon, 16 Sep 2024 11:01:15 +0000   Mon, 16 Sep 2024 10:53:02 +0000   KubeletReady                 kubelet is posting ready status
	Addresses:
	  InternalIP:  192.168.49.3
	  Hostname:    ha-334765-m02
	Capacity:
	  cpu:                2
	  ephemeral-storage:  203034800Ki
	  hugepages-1Gi:      0
	  hugepages-2Mi:      0
	  hugepages-32Mi:     0
	  hugepages-64Ki:     0
	  memory:             8022304Ki
	  pods:               110
	Allocatable:
	  cpu:                2
	  ephemeral-storage:  203034800Ki
	  hugepages-1Gi:      0
	  hugepages-2Mi:      0
	  hugepages-32Mi:     0
	  hugepages-64Ki:     0
	  memory:             8022304Ki
	  pods:               110
	System Info:
	  Machine ID:                 26d42c4999594e8299e7aeb79bd64269
	  System UUID:                aea91ea0-3fb3-4815-9747-a2bcb9506f24
	  Boot ID:                    34b2555f-ef29-4c31-9b47-b3b930bd3b4b
	  Kernel Version:             5.15.0-1069-aws
	  OS Image:                   Ubuntu 22.04.4 LTS
	  Operating System:           linux
	  Architecture:               arm64
	  Container Runtime Version:  cri-o://1.24.6
	  Kubelet Version:            v1.31.1
	  Kube-Proxy Version:         v1.31.1
	PodCIDR:                      10.244.1.0/24
	PodCIDRs:                     10.244.1.0/24
	Non-terminated Pods:          (8 in total)
	  Namespace                   Name                                     CPU Requests  CPU Limits  Memory Requests  Memory Limits  Age
	  ---------                   ----                                     ------------  ----------  ---------------  -------------  ---
	  default                     busybox-7dff88458-tczms                  0 (0%)        0 (0%)      0 (0%)           0 (0%)         7m55s
	  kube-system                 etcd-ha-334765-m02                       100m (5%)     0 (0%)      100Mi (1%)       0 (0%)         9m52s
	  kube-system                 kindnet-vj27j                            100m (5%)     100m (5%)   50Mi (0%)        50Mi (0%)      9m53s
	  kube-system                 kube-apiserver-ha-334765-m02             250m (12%)    0 (0%)      0 (0%)           0 (0%)         9m52s
	  kube-system                 kube-controller-manager-ha-334765-m02    200m (10%)    0 (0%)      0 (0%)           0 (0%)         9m52s
	  kube-system                 kube-proxy-l998t                         0 (0%)        0 (0%)      0 (0%)           0 (0%)         9m53s
	  kube-system                 kube-scheduler-ha-334765-m02             100m (5%)     0 (0%)      0 (0%)           0 (0%)         9m52s
	  kube-system                 kube-vip-ha-334765-m02                   0 (0%)        0 (0%)      0 (0%)           0 (0%)         9m49s
	Allocated resources:
	  (Total limits may be over 100 percent, i.e., overcommitted.)
	  Resource           Requests    Limits
	  --------           --------    ------
	  cpu                750m (37%)  100m (5%)
	  memory             150Mi (1%)  50Mi (0%)
	  ephemeral-storage  0 (0%)      0 (0%)
	  hugepages-1Gi      0 (0%)      0 (0%)
	  hugepages-2Mi      0 (0%)      0 (0%)
	  hugepages-32Mi     0 (0%)      0 (0%)
	  hugepages-64Ki     0 (0%)      0 (0%)
	Events:
	  Type     Reason                   Age                    From             Message
	  ----     ------                   ----                   ----             -------
	  Normal   Starting                 9m45s                  kube-proxy       
	  Normal   Starting                 43s                    kube-proxy       
	  Normal   Starting                 4m5s                   kube-proxy       
	  Normal   NodeHasSufficientMemory  9m53s (x8 over 9m53s)  kubelet          Node ha-334765-m02 status is now: NodeHasSufficientMemory
	  Normal   NodeHasSufficientPID     9m53s (x7 over 9m53s)  kubelet          Node ha-334765-m02 status is now: NodeHasSufficientPID
	  Normal   NodeHasNoDiskPressure    9m53s (x8 over 9m53s)  kubelet          Node ha-334765-m02 status is now: NodeHasNoDiskPressure
	  Normal   RegisteredNode           9m50s                  node-controller  Node ha-334765-m02 event: Registered Node ha-334765-m02 in Controller
	  Normal   RegisteredNode           9m43s                  node-controller  Node ha-334765-m02 event: Registered Node ha-334765-m02 in Controller
	  Normal   RegisteredNode           8m35s                  node-controller  Node ha-334765-m02 event: Registered Node ha-334765-m02 in Controller
	  Normal   NodeHasSufficientPID     6m2s (x7 over 6m2s)    kubelet          Node ha-334765-m02 status is now: NodeHasSufficientPID
	  Normal   NodeHasNoDiskPressure    6m2s (x8 over 6m2s)    kubelet          Node ha-334765-m02 status is now: NodeHasNoDiskPressure
	  Normal   NodeHasSufficientMemory  6m2s (x8 over 6m2s)    kubelet          Node ha-334765-m02 status is now: NodeHasSufficientMemory
	  Normal   Starting                 6m2s                   kubelet          Starting kubelet.
	  Warning  CgroupV1                 6m2s                   kubelet          Cgroup v1 support is in maintenance mode, please migrate to Cgroup v2.
	  Normal   RegisteredNode           5m29s                  node-controller  Node ha-334765-m02 event: Registered Node ha-334765-m02 in Controller
	  Normal   NodeHasNoDiskPressure    5m (x7 over 5m)        kubelet          Node ha-334765-m02 status is now: NodeHasNoDiskPressure
	  Normal   NodeHasSufficientPID     5m (x7 over 5m)        kubelet          Node ha-334765-m02 status is now: NodeHasSufficientPID
	  Normal   NodeHasSufficientMemory  5m (x9 over 5m)        kubelet          Node ha-334765-m02 status is now: NodeHasSufficientMemory
	  Normal   Starting                 5m                     kubelet          Starting kubelet.
	  Warning  CgroupV1                 5m                     kubelet          Cgroup v1 support is in maintenance mode, please migrate to Cgroup v2.
	  Normal   RegisteredNode           4m20s                  node-controller  Node ha-334765-m02 event: Registered Node ha-334765-m02 in Controller
	  Normal   RegisteredNode           3m27s                  node-controller  Node ha-334765-m02 event: Registered Node ha-334765-m02 in Controller
	  Normal   RegisteredNode           3m8s                   node-controller  Node ha-334765-m02 event: Registered Node ha-334765-m02 in Controller
	  Normal   Starting                 79s                    kubelet          Starting kubelet.
	  Warning  CgroupV1                 79s                    kubelet          Cgroup v1 support is in maintenance mode, please migrate to Cgroup v2.
	  Normal   NodeHasSufficientMemory  78s (x8 over 79s)      kubelet          Node ha-334765-m02 status is now: NodeHasSufficientMemory
	  Normal   NodeHasNoDiskPressure    78s (x8 over 79s)      kubelet          Node ha-334765-m02 status is now: NodeHasNoDiskPressure
	  Normal   NodeHasSufficientPID     78s (x7 over 79s)      kubelet          Node ha-334765-m02 status is now: NodeHasSufficientPID
	  Normal   RegisteredNode           55s                    node-controller  Node ha-334765-m02 event: Registered Node ha-334765-m02 in Controller
	  Normal   RegisteredNode           48s                    node-controller  Node ha-334765-m02 event: Registered Node ha-334765-m02 in Controller
	
	
	Name:               ha-334765-m04
	Roles:              <none>
	Labels:             beta.kubernetes.io/arch=arm64
	                    beta.kubernetes.io/os=linux
	                    kubernetes.io/arch=arm64
	                    kubernetes.io/hostname=ha-334765-m04
	                    kubernetes.io/os=linux
	                    minikube.k8s.io/commit=90d544f06ea0f69499271b003be64a9a224d57ed
	                    minikube.k8s.io/name=ha-334765
	                    minikube.k8s.io/primary=false
	                    minikube.k8s.io/updated_at=2024_09_16T10_54_43_0700
	                    minikube.k8s.io/version=v1.34.0
	Annotations:        kubeadm.alpha.kubernetes.io/cri-socket: unix:///var/run/crio/crio.sock
	                    node.alpha.kubernetes.io/ttl: 0
	                    volumes.kubernetes.io/controller-managed-attach-detach: true
	CreationTimestamp:  Mon, 16 Sep 2024 10:54:42 +0000
	Taints:             <none>
	Unschedulable:      false
	Lease:
	  HolderIdentity:  ha-334765-m04
	  AcquireTime:     <unset>
	  RenewTime:       Mon, 16 Sep 2024 11:02:08 +0000
	Conditions:
	  Type             Status  LastHeartbeatTime                 LastTransitionTime                Reason                       Message
	  ----             ------  -----------------                 ------------------                ------                       -------
	  MemoryPressure   False   Mon, 16 Sep 2024 11:01:48 +0000   Mon, 16 Sep 2024 10:59:32 +0000   KubeletHasSufficientMemory   kubelet has sufficient memory available
	  DiskPressure     False   Mon, 16 Sep 2024 11:01:48 +0000   Mon, 16 Sep 2024 10:59:32 +0000   KubeletHasNoDiskPressure     kubelet has no disk pressure
	  PIDPressure      False   Mon, 16 Sep 2024 11:01:48 +0000   Mon, 16 Sep 2024 10:59:32 +0000   KubeletHasSufficientPID      kubelet has sufficient PID available
	  Ready            True    Mon, 16 Sep 2024 11:01:48 +0000   Mon, 16 Sep 2024 10:59:32 +0000   KubeletReady                 kubelet is posting ready status
	Addresses:
	  InternalIP:  192.168.49.5
	  Hostname:    ha-334765-m04
	Capacity:
	  cpu:                2
	  ephemeral-storage:  203034800Ki
	  hugepages-1Gi:      0
	  hugepages-2Mi:      0
	  hugepages-32Mi:     0
	  hugepages-64Ki:     0
	  memory:             8022304Ki
	  pods:               110
	Allocatable:
	  cpu:                2
	  ephemeral-storage:  203034800Ki
	  hugepages-1Gi:      0
	  hugepages-2Mi:      0
	  hugepages-32Mi:     0
	  hugepages-64Ki:     0
	  memory:             8022304Ki
	  pods:               110
	System Info:
	  Machine ID:                 b42a4cde7ef246f9bdf59344f91cf8d7
	  System UUID:                2ce236e7-eff0-4b96-a330-3e2c709a50e7
	  Boot ID:                    34b2555f-ef29-4c31-9b47-b3b930bd3b4b
	  Kernel Version:             5.15.0-1069-aws
	  OS Image:                   Ubuntu 22.04.4 LTS
	  Operating System:           linux
	  Architecture:               arm64
	  Container Runtime Version:  cri-o://1.24.6
	  Kubelet Version:            v1.31.1
	  Kube-Proxy Version:         v1.31.1
	PodCIDR:                      10.244.3.0/24
	PodCIDRs:                     10.244.3.0/24
	Non-terminated Pods:          (3 in total)
	  Namespace                   Name                       CPU Requests  CPU Limits  Memory Requests  Memory Limits  Age
	  ---------                   ----                       ------------  ----------  ---------------  -------------  ---
	  default                     busybox-7dff88458-2n2c7    0 (0%)        0 (0%)      0 (0%)           0 (0%)         2m22s
	  kube-system                 kindnet-plxdg              100m (5%)     100m (5%)   50Mi (0%)        50Mi (0%)      7m30s
	  kube-system                 kube-proxy-br496           0 (0%)        0 (0%)      0 (0%)           0 (0%)         7m30s
	Allocated resources:
	  (Total limits may be over 100 percent, i.e., overcommitted.)
	  Resource           Requests   Limits
	  --------           --------   ------
	  cpu                100m (5%)  100m (5%)
	  memory             50Mi (0%)  50Mi (0%)
	  ephemeral-storage  0 (0%)     0 (0%)
	  hugepages-1Gi      0 (0%)     0 (0%)
	  hugepages-2Mi      0 (0%)     0 (0%)
	  hugepages-32Mi     0 (0%)     0 (0%)
	  hugepages-64Ki     0 (0%)     0 (0%)
	Events:
	  Type     Reason                   Age                    From             Message
	  ----     ------                   ----                   ----             -------
	  Normal   Starting                 2m24s                  kube-proxy       
	  Normal   Starting                 3s                     kube-proxy       
	  Normal   Starting                 7m27s                  kube-proxy       
	  Normal   RegisteredNode           7m30s                  node-controller  Node ha-334765-m04 event: Registered Node ha-334765-m04 in Controller
	  Normal   NodeHasNoDiskPressure    7m30s (x2 over 7m30s)  kubelet          Node ha-334765-m04 status is now: NodeHasNoDiskPressure
	  Normal   NodeHasSufficientMemory  7m30s (x2 over 7m30s)  kubelet          Node ha-334765-m04 status is now: NodeHasSufficientMemory
	  Warning  CgroupV1                 7m30s                  kubelet          Cgroup v1 support is in maintenance mode, please migrate to Cgroup v2.
	  Normal   NodeHasSufficientPID     7m30s (x2 over 7m30s)  kubelet          Node ha-334765-m04 status is now: NodeHasSufficientPID
	  Normal   RegisteredNode           7m27s                  node-controller  Node ha-334765-m04 event: Registered Node ha-334765-m04 in Controller
	  Normal   RegisteredNode           7m25s                  node-controller  Node ha-334765-m04 event: Registered Node ha-334765-m04 in Controller
	  Normal   NodeReady                6m47s                  kubelet          Node ha-334765-m04 status is now: NodeReady
	  Normal   RegisteredNode           5m29s                  node-controller  Node ha-334765-m04 event: Registered Node ha-334765-m04 in Controller
	  Normal   RegisteredNode           4m20s                  node-controller  Node ha-334765-m04 event: Registered Node ha-334765-m04 in Controller
	  Normal   NodeNotReady             3m40s                  node-controller  Node ha-334765-m04 status is now: NodeNotReady
	  Normal   RegisteredNode           3m27s                  node-controller  Node ha-334765-m04 event: Registered Node ha-334765-m04 in Controller
	  Normal   RegisteredNode           3m8s                   node-controller  Node ha-334765-m04 event: Registered Node ha-334765-m04 in Controller
	  Normal   Starting                 2m53s                  kubelet          Starting kubelet.
	  Warning  CgroupV1                 2m53s                  kubelet          Cgroup v1 support is in maintenance mode, please migrate to Cgroup v2.
	  Normal   NodeHasSufficientPID     2m47s (x7 over 2m53s)  kubelet          Node ha-334765-m04 status is now: NodeHasSufficientPID
	  Normal   NodeHasNoDiskPressure    2m40s (x8 over 2m53s)  kubelet          Node ha-334765-m04 status is now: NodeHasNoDiskPressure
	  Normal   NodeHasSufficientMemory  2m40s (x8 over 2m53s)  kubelet          Node ha-334765-m04 status is now: NodeHasSufficientMemory
	  Normal   RegisteredNode           55s                    node-controller  Node ha-334765-m04 event: Registered Node ha-334765-m04 in Controller
	  Normal   RegisteredNode           48s                    node-controller  Node ha-334765-m04 event: Registered Node ha-334765-m04 in Controller
	  Normal   Starting                 37s                    kubelet          Starting kubelet.
	  Warning  CgroupV1                 37s                    kubelet          Cgroup v1 support is in maintenance mode, please migrate to Cgroup v2.
	  Normal   NodeHasSufficientPID     30s (x7 over 37s)      kubelet          Node ha-334765-m04 status is now: NodeHasSufficientPID
	  Normal   NodeHasNoDiskPressure    24s (x8 over 37s)      kubelet          Node ha-334765-m04 status is now: NodeHasNoDiskPressure
	  Normal   NodeHasSufficientMemory  24s (x8 over 37s)      kubelet          Node ha-334765-m04 status is now: NodeHasSufficientMemory
	
	
	==> dmesg <==
	[Sep16 10:07] systemd-journald[226]: Failed to send stream file descriptor to service manager: Connection refused
	
	
	==> etcd [4b7e9ee825d63c840b3f61fbc305fbb4048c47401f3d449ac4a17e01901b204a] <==
	{"level":"info","ts":"2024-09-16T11:01:12.430883Z","caller":"rafthttp/stream.go:274","msg":"established TCP streaming connection with remote peer","stream-writer-type":"stream MsgApp v2","local-member-id":"aec36adc501070cc","remote-peer-id":"1ee24603fd50eda8"}
	{"level":"info","ts":"2024-09-16T11:01:12.468081Z","logger":"raft","caller":"etcdserver/zap_raft.go:77","msg":"aec36adc501070cc is starting a new election at term 5"}
	{"level":"info","ts":"2024-09-16T11:01:12.468137Z","logger":"raft","caller":"etcdserver/zap_raft.go:77","msg":"aec36adc501070cc became pre-candidate at term 5"}
	{"level":"info","ts":"2024-09-16T11:01:12.468153Z","logger":"raft","caller":"etcdserver/zap_raft.go:77","msg":"aec36adc501070cc received MsgPreVoteResp from aec36adc501070cc at term 5"}
	{"level":"info","ts":"2024-09-16T11:01:12.468171Z","logger":"raft","caller":"etcdserver/zap_raft.go:77","msg":"aec36adc501070cc [logterm: 4, index: 3184] sent MsgPreVote request to 1ee24603fd50eda8 at term 5"}
	{"level":"info","ts":"2024-09-16T11:01:12.468687Z","logger":"raft","caller":"etcdserver/zap_raft.go:77","msg":"aec36adc501070cc received MsgPreVoteResp from 1ee24603fd50eda8 at term 5"}
	{"level":"info","ts":"2024-09-16T11:01:12.468713Z","logger":"raft","caller":"etcdserver/zap_raft.go:77","msg":"aec36adc501070cc has received 2 MsgPreVoteResp votes and 0 vote rejections"}
	{"level":"info","ts":"2024-09-16T11:01:12.468726Z","logger":"raft","caller":"etcdserver/zap_raft.go:77","msg":"aec36adc501070cc became candidate at term 6"}
	{"level":"info","ts":"2024-09-16T11:01:12.468733Z","logger":"raft","caller":"etcdserver/zap_raft.go:77","msg":"aec36adc501070cc received MsgVoteResp from aec36adc501070cc at term 6"}
	{"level":"info","ts":"2024-09-16T11:01:12.468744Z","logger":"raft","caller":"etcdserver/zap_raft.go:77","msg":"aec36adc501070cc [logterm: 4, index: 3184] sent MsgVote request to 1ee24603fd50eda8 at term 6"}
	{"level":"info","ts":"2024-09-16T11:01:12.472947Z","logger":"raft","caller":"etcdserver/zap_raft.go:77","msg":"aec36adc501070cc received MsgVoteResp from 1ee24603fd50eda8 at term 6"}
	{"level":"info","ts":"2024-09-16T11:01:12.472986Z","logger":"raft","caller":"etcdserver/zap_raft.go:77","msg":"aec36adc501070cc has received 2 MsgVoteResp votes and 0 vote rejections"}
	{"level":"info","ts":"2024-09-16T11:01:12.473001Z","logger":"raft","caller":"etcdserver/zap_raft.go:77","msg":"aec36adc501070cc became leader at term 6"}
	{"level":"info","ts":"2024-09-16T11:01:12.473012Z","logger":"raft","caller":"etcdserver/zap_raft.go:77","msg":"raft.node: aec36adc501070cc elected leader aec36adc501070cc at term 6"}
	{"level":"info","ts":"2024-09-16T11:01:12.477490Z","caller":"embed/serve.go:103","msg":"ready to serve client requests"}
	{"level":"info","ts":"2024-09-16T11:01:12.477468Z","caller":"etcdserver/server.go:2118","msg":"published local member to cluster through raft","local-member-id":"aec36adc501070cc","local-member-attributes":"{Name:ha-334765 ClientURLs:[https://192.168.49.2:2379]}","request-path":"/0/members/aec36adc501070cc/attributes","cluster-id":"fa54960ea34d58be","publish-timeout":"7s"}
	{"level":"info","ts":"2024-09-16T11:01:12.478687Z","caller":"v3rpc/health.go:61","msg":"grpc service status changed","service":"","status":"SERVING"}
	{"level":"info","ts":"2024-09-16T11:01:12.479652Z","caller":"embed/serve.go:250","msg":"serving client traffic securely","traffic":"grpc+http","address":"192.168.49.2:2379"}
	{"level":"info","ts":"2024-09-16T11:01:12.480643Z","caller":"embed/serve.go:103","msg":"ready to serve client requests"}
	{"level":"info","ts":"2024-09-16T11:01:12.481796Z","caller":"v3rpc/health.go:61","msg":"grpc service status changed","service":"","status":"SERVING"}
	{"level":"info","ts":"2024-09-16T11:01:12.483019Z","caller":"embed/serve.go:250","msg":"serving client traffic securely","traffic":"grpc+http","address":"127.0.0.1:2379"}
	{"level":"info","ts":"2024-09-16T11:01:12.487529Z","caller":"etcdmain/main.go:44","msg":"notifying init daemon"}
	{"level":"info","ts":"2024-09-16T11:01:12.487635Z","caller":"etcdmain/main.go:50","msg":"successfully notified init daemon"}
	{"level":"warn","ts":"2024-09-16T11:01:14.564727Z","caller":"rafthttp/probing_status.go:68","msg":"prober detected unhealthy status","round-tripper-name":"ROUND_TRIPPER_RAFT_MESSAGE","remote-peer-id":"1ee24603fd50eda8","rtt":"0s","error":"dial tcp 192.168.49.3:2380: connect: connection refused"}
	{"level":"warn","ts":"2024-09-16T11:01:14.564871Z","caller":"rafthttp/probing_status.go:68","msg":"prober detected unhealthy status","round-tripper-name":"ROUND_TRIPPER_SNAPSHOT","remote-peer-id":"1ee24603fd50eda8","rtt":"0s","error":"dial tcp 192.168.49.3:2380: connect: connection refused"}
	
	
	==> kernel <==
	 11:02:12 up 10:44,  0 users,  load average: 4.51, 3.75, 2.71
	Linux ha-334765 5.15.0-1069-aws #75~20.04.1-Ubuntu SMP Mon Aug 19 16:22:47 UTC 2024 aarch64 aarch64 aarch64 GNU/Linux
	PRETTY_NAME="Ubuntu 22.04.4 LTS"
	
	
	==> kindnet [d1ef6aaed630f1f829ed26f68fd9428ecfe8d5ccb7ae6953014db47013576607] <==
	Trace[2037494650]: [30.002455475s] [30.002455475s] END
	E0916 11:01:50.121226       1 reflector.go:150] pkg/mod/k8s.io/client-go@v0.30.3/tools/cache/reflector.go:232: Failed to watch *v1.Pod: failed to list *v1.Pod: Get "https://10.96.0.1:443/api/v1/pods?limit=500&resourceVersion=0": dial tcp 10.96.0.1:443: i/o timeout
	W0916 11:01:50.119423       1 reflector.go:547] pkg/mod/k8s.io/client-go@v0.30.3/tools/cache/reflector.go:232: failed to list *v1.Namespace: Get "https://10.96.0.1:443/api/v1/namespaces?limit=500&resourceVersion=0": dial tcp 10.96.0.1:443: i/o timeout
	I0916 11:01:50.121337       1 trace.go:236] Trace[1970481888]: "Reflector ListAndWatch" name:pkg/mod/k8s.io/client-go@v0.30.3/tools/cache/reflector.go:232 (16-Sep-2024 11:01:20.117) (total time: 30003ms):
	Trace[1970481888]: ---"Objects listed" error:Get "https://10.96.0.1:443/api/v1/namespaces?limit=500&resourceVersion=0": dial tcp 10.96.0.1:443: i/o timeout 30001ms (11:01:50.119)
	Trace[1970481888]: [30.00367489s] [30.00367489s] END
	E0916 11:01:50.121371       1 reflector.go:150] pkg/mod/k8s.io/client-go@v0.30.3/tools/cache/reflector.go:232: Failed to watch *v1.Namespace: failed to list *v1.Namespace: Get "https://10.96.0.1:443/api/v1/namespaces?limit=500&resourceVersion=0": dial tcp 10.96.0.1:443: i/o timeout
	E0916 11:01:50.121422       1 reflector.go:150] pkg/mod/k8s.io/client-go@v0.30.3/tools/cache/reflector.go:232: Failed to watch *v1.Node: failed to list *v1.Node: Get "https://10.96.0.1:443/api/v1/nodes?limit=500&resourceVersion=0": dial tcp 10.96.0.1:443: i/o timeout
	I0916 11:01:51.618583       1 shared_informer.go:320] Caches are synced for kube-network-policies
	I0916 11:01:51.618614       1 metrics.go:61] Registering metrics
	I0916 11:01:51.618682       1 controller.go:374] Syncing nftables rules
	I0916 11:02:00.117761       1 main.go:295] Handling node with IPs: map[192.168.49.3:{}]
	I0916 11:02:00.117812       1 main.go:322] Node ha-334765-m02 has CIDR [10.244.1.0/24] 
	I0916 11:02:00.117961       1 routes.go:62] Adding route {Ifindex: 0 Dst: 10.244.1.0/24 Src: <nil> Gw: 192.168.49.3 Flags: [] Table: 0} 
	I0916 11:02:00.118042       1 main.go:295] Handling node with IPs: map[192.168.49.5:{}]
	I0916 11:02:00.118051       1 main.go:322] Node ha-334765-m04 has CIDR [10.244.3.0/24] 
	I0916 11:02:00.118091       1 routes.go:62] Adding route {Ifindex: 0 Dst: 10.244.3.0/24 Src: <nil> Gw: 192.168.49.5 Flags: [] Table: 0} 
	I0916 11:02:00.118126       1 main.go:295] Handling node with IPs: map[192.168.49.2:{}]
	I0916 11:02:00.118133       1 main.go:299] handling current node
	I0916 11:02:10.118305       1 main.go:295] Handling node with IPs: map[192.168.49.5:{}]
	I0916 11:02:10.118420       1 main.go:322] Node ha-334765-m04 has CIDR [10.244.3.0/24] 
	I0916 11:02:10.118555       1 main.go:295] Handling node with IPs: map[192.168.49.2:{}]
	I0916 11:02:10.118592       1 main.go:299] handling current node
	I0916 11:02:10.118647       1 main.go:295] Handling node with IPs: map[192.168.49.3:{}]
	I0916 11:02:10.118680       1 main.go:322] Node ha-334765-m02 has CIDR [10.244.1.0/24] 
	
	
	==> kube-apiserver [865d4d6176fce2805e80e10a7713a27c370c11e1075a45ebdd94aa8ffdd3968f] <==
	I0916 11:01:14.463728       1 apiapproval_controller.go:189] Starting KubernetesAPIApprovalPolicyConformantConditionController
	I0916 11:01:14.463740       1 crd_finalizer.go:269] Starting CRDFinalizer
	I0916 11:01:14.849358       1 shared_informer.go:320] Caches are synced for node_authorizer
	I0916 11:01:14.860796       1 shared_informer.go:320] Caches are synced for *generic.policySource[*k8s.io/api/admissionregistration/v1.ValidatingAdmissionPolicy,*k8s.io/api/admissionregistration/v1.ValidatingAdmissionPolicyBinding,k8s.io/apiserver/pkg/admission/plugin/policy/validating.Validator]
	I0916 11:01:14.860830       1 policy_source.go:224] refreshing policies
	I0916 11:01:14.861617       1 cache.go:39] Caches are synced for LocalAvailability controller
	I0916 11:01:14.861979       1 shared_informer.go:320] Caches are synced for cluster_authentication_trust_controller
	I0916 11:01:14.862004       1 shared_informer.go:320] Caches are synced for configmaps
	I0916 11:01:14.862039       1 apf_controller.go:382] Running API Priority and Fairness config worker
	I0916 11:01:14.862058       1 apf_controller.go:385] Running API Priority and Fairness periodic rebalancing process
	I0916 11:01:14.863606       1 shared_informer.go:320] Caches are synced for crd-autoregister
	I0916 11:01:14.863647       1 cache.go:39] Caches are synced for RemoteAvailability controller
	I0916 11:01:14.863717       1 cache.go:39] Caches are synced for APIServiceRegistrationController controller
	I0916 11:01:14.871308       1 aggregator.go:171] initial CRD sync complete...
	I0916 11:01:14.871337       1 autoregister_controller.go:144] Starting autoregister controller
	I0916 11:01:14.871345       1 cache.go:32] Waiting for caches to sync for autoregister controller
	I0916 11:01:14.871350       1 cache.go:39] Caches are synced for autoregister controller
	I0916 11:01:14.872576       1 handler_discovery.go:450] Starting ResourceDiscoveryManager
	I0916 11:01:14.889445       1 controller.go:615] quota admission added evaluator for: leases.coordination.k8s.io
	W0916 11:01:14.896250       1 lease.go:265] Resetting endpoints for master service "kubernetes" to [192.168.49.3]
	I0916 11:01:14.898107       1 controller.go:615] quota admission added evaluator for: endpoints
	I0916 11:01:14.933729       1 controller.go:615] quota admission added evaluator for: endpointslices.discovery.k8s.io
	E0916 11:01:14.939060       1 controller.go:95] Found stale data, removed previous endpoints on kubernetes service, apiserver didn't exit successfully previously
	I0916 11:01:15.473358       1 storage_scheduling.go:111] all system priority classes are created successfully or already exist.
	W0916 11:01:15.856119       1 lease.go:265] Resetting endpoints for master service "kubernetes" to [192.168.49.2 192.168.49.3]
	
	
	==> kube-controller-manager [b65d39ab2f00b83c2b22e3284f1da01f3186002eb12baab15b1232593cef6c06] <==
	I0916 11:01:55.065148       1 replica_set.go:679] "Finished syncing" logger="replicaset-controller" kind="ReplicaSet" key="default/busybox-7dff88458" duration="49.361µs"
	E0916 11:01:57.821492       1 gc_controller.go:151] "Failed to get node" err="node \"ha-334765-m03\" not found" logger="pod-garbage-collector-controller" node="ha-334765-m03"
	E0916 11:01:57.821525       1 gc_controller.go:151] "Failed to get node" err="node \"ha-334765-m03\" not found" logger="pod-garbage-collector-controller" node="ha-334765-m03"
	E0916 11:01:57.821534       1 gc_controller.go:151] "Failed to get node" err="node \"ha-334765-m03\" not found" logger="pod-garbage-collector-controller" node="ha-334765-m03"
	E0916 11:01:57.821540       1 gc_controller.go:151] "Failed to get node" err="node \"ha-334765-m03\" not found" logger="pod-garbage-collector-controller" node="ha-334765-m03"
	E0916 11:01:57.821547       1 gc_controller.go:151] "Failed to get node" err="node \"ha-334765-m03\" not found" logger="pod-garbage-collector-controller" node="ha-334765-m03"
	I0916 11:01:57.836410       1 gc_controller.go:342] "PodGC is force deleting Pod" logger="pod-garbage-collector-controller" pod="kube-system/kube-proxy-4vsvh"
	I0916 11:01:57.874610       1 gc_controller.go:258] "Forced deletion of orphaned Pod succeeded" logger="pod-garbage-collector-controller" pod="kube-system/kube-proxy-4vsvh"
	I0916 11:01:57.874730       1 gc_controller.go:342] "PodGC is force deleting Pod" logger="pod-garbage-collector-controller" pod="kube-system/kindnet-rfw69"
	I0916 11:01:57.914647       1 gc_controller.go:258] "Forced deletion of orphaned Pod succeeded" logger="pod-garbage-collector-controller" pod="kube-system/kindnet-rfw69"
	I0916 11:01:57.914837       1 gc_controller.go:342] "PodGC is force deleting Pod" logger="pod-garbage-collector-controller" pod="kube-system/kube-apiserver-ha-334765-m03"
	I0916 11:01:57.953724       1 gc_controller.go:258] "Forced deletion of orphaned Pod succeeded" logger="pod-garbage-collector-controller" pod="kube-system/kube-apiserver-ha-334765-m03"
	I0916 11:01:57.953749       1 gc_controller.go:342] "PodGC is force deleting Pod" logger="pod-garbage-collector-controller" pod="kube-system/kube-controller-manager-ha-334765-m03"
	I0916 11:01:57.996132       1 gc_controller.go:258] "Forced deletion of orphaned Pod succeeded" logger="pod-garbage-collector-controller" pod="kube-system/kube-controller-manager-ha-334765-m03"
	I0916 11:01:57.996172       1 gc_controller.go:342] "PodGC is force deleting Pod" logger="pod-garbage-collector-controller" pod="kube-system/kube-scheduler-ha-334765-m03"
	I0916 11:01:58.036114       1 gc_controller.go:258] "Forced deletion of orphaned Pod succeeded" logger="pod-garbage-collector-controller" pod="kube-system/kube-scheduler-ha-334765-m03"
	I0916 11:01:58.036244       1 gc_controller.go:342] "PodGC is force deleting Pod" logger="pod-garbage-collector-controller" pod="kube-system/etcd-ha-334765-m03"
	I0916 11:01:58.080486       1 gc_controller.go:258] "Forced deletion of orphaned Pod succeeded" logger="pod-garbage-collector-controller" pod="kube-system/etcd-ha-334765-m03"
	I0916 11:01:58.080519       1 gc_controller.go:342] "PodGC is force deleting Pod" logger="pod-garbage-collector-controller" pod="kube-system/kube-vip-ha-334765-m03"
	I0916 11:01:58.111961       1 gc_controller.go:258] "Forced deletion of orphaned Pod succeeded" logger="pod-garbage-collector-controller" pod="kube-system/kube-vip-ha-334765-m03"
	I0916 11:01:59.198005       1 replica_set.go:679] "Finished syncing" logger="replicaset-controller" kind="ReplicaSet" key="kube-system/coredns-7c65d6cfc9" duration="75.607432ms"
	I0916 11:01:59.227128       1 replica_set.go:679] "Finished syncing" logger="replicaset-controller" kind="ReplicaSet" key="kube-system/coredns-7c65d6cfc9" duration="29.068896ms"
	I0916 11:01:59.227262       1 replica_set.go:679] "Finished syncing" logger="replicaset-controller" kind="ReplicaSet" key="kube-system/coredns-7c65d6cfc9" duration="90.296µs"
	I0916 11:02:06.142848       1 replica_set.go:679] "Finished syncing" logger="replicaset-controller" kind="ReplicaSet" key="default/busybox-7dff88458" duration="36.635113ms"
	I0916 11:02:06.143046       1 replica_set.go:679] "Finished syncing" logger="replicaset-controller" kind="ReplicaSet" key="default/busybox-7dff88458" duration="52.043µs"
	
	
	==> kube-proxy [e87dd818abe86ce616366480f82070a45eea884622e81f01c72ac54b5d4984d3] <==
	I0916 11:01:20.008797       1 server_linux.go:66] "Using iptables proxy"
	I0916 11:01:20.341084       1 server.go:677] "Successfully retrieved node IP(s)" IPs=["192.168.49.2"]
	E0916 11:01:20.341283       1 server.go:234] "Kube-proxy configuration may be incomplete or incorrect" err="nodePortAddresses is unset; NodePort connections will be accepted on all local IPs. Consider using `--nodeport-addresses primary`"
	I0916 11:01:20.473323       1 server.go:243] "kube-proxy running in dual-stack mode" primary ipFamily="IPv4"
	I0916 11:01:20.473457       1 server_linux.go:169] "Using iptables Proxier"
	I0916 11:01:20.483471       1 proxier.go:255] "Setting route_localnet=1 to allow node-ports on localhost; to change this either disable iptables.localhostNodePorts (--iptables-localhost-nodeports) or set nodePortAddresses (--nodeport-addresses) to filter loopback addresses" ipFamily="IPv4"
	I0916 11:01:20.483917       1 server.go:483] "Version info" version="v1.31.1"
	I0916 11:01:20.486787       1 server.go:485] "Golang settings" GOGC="" GOMAXPROCS="" GOTRACEBACK=""
	I0916 11:01:20.488048       1 config.go:199] "Starting service config controller"
	I0916 11:01:20.488082       1 shared_informer.go:313] Waiting for caches to sync for service config
	I0916 11:01:20.488109       1 config.go:105] "Starting endpoint slice config controller"
	I0916 11:01:20.488114       1 shared_informer.go:313] Waiting for caches to sync for endpoint slice config
	I0916 11:01:20.498861       1 config.go:328] "Starting node config controller"
	I0916 11:01:20.499017       1 shared_informer.go:313] Waiting for caches to sync for node config
	I0916 11:01:20.588284       1 shared_informer.go:320] Caches are synced for endpoint slice config
	I0916 11:01:20.588401       1 shared_informer.go:320] Caches are synced for service config
	I0916 11:01:20.602989       1 shared_informer.go:320] Caches are synced for node config
	
	
	==> kube-scheduler [03344d326ab99e3b385323b358751e20ef0f3ae3955611a35b8ba2dd7e2b01ff] <==
	I0916 11:01:02.078375       1 serving.go:386] Generated self-signed cert in-memory
	W0916 11:01:14.653314       1 requestheader_controller.go:196] Unable to get configmap/extension-apiserver-authentication in kube-system.  Usually fixed by 'kubectl create rolebinding -n kube-system ROLEBINDING_NAME --role=extension-apiserver-authentication-reader --serviceaccount=YOUR_NS:YOUR_SA'
	W0916 11:01:14.653357       1 authentication.go:370] Error looking up in-cluster authentication configuration: configmaps "extension-apiserver-authentication" is forbidden: User "system:kube-scheduler" cannot get resource "configmaps" in API group "" in the namespace "kube-system"
	W0916 11:01:14.653368       1 authentication.go:371] Continuing without authentication configuration. This may treat all requests as anonymous.
	W0916 11:01:14.653377       1 authentication.go:372] To require authentication configuration lookup to succeed, set --authentication-tolerate-lookup-failure=false
	I0916 11:01:14.814842       1 server.go:167] "Starting Kubernetes Scheduler" version="v1.31.1"
	I0916 11:01:14.815951       1 server.go:169] "Golang settings" GOGC="" GOMAXPROCS="" GOTRACEBACK=""
	I0916 11:01:14.818841       1 configmap_cafile_content.go:205] "Starting controller" name="client-ca::kube-system::extension-apiserver-authentication::client-ca-file"
	I0916 11:01:14.819154       1 secure_serving.go:213] Serving securely on 127.0.0.1:10259
	I0916 11:01:14.819179       1 tlsconfig.go:243] "Starting DynamicServingCertificateController"
	I0916 11:01:14.819783       1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::client-ca-file
	I0916 11:01:14.929329       1 shared_informer.go:320] Caches are synced for client-ca::kube-system::extension-apiserver-authentication::client-ca-file
	
	
	==> kubelet <==
	Sep 16 11:01:18 ha-334765 kubelet[756]: I0916 11:01:18.895038     756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"xtables-lock\" (UniqueName: \"kubernetes.io/host-path/e1832b94-ac8f-43c0-af10-ddc6afbb229b-xtables-lock\") pod \"kindnet-7s5t5\" (UID: \"e1832b94-ac8f-43c0-af10-ddc6afbb229b\") " pod="kube-system/kindnet-7s5t5"
	Sep 16 11:01:18 ha-334765 kubelet[756]: I0916 11:01:18.895087     756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"tmp\" (UniqueName: \"kubernetes.io/host-path/4db2490d-9707-4734-973b-adac5570e275-tmp\") pod \"storage-provisioner\" (UID: \"4db2490d-9707-4734-973b-adac5570e275\") " pod="kube-system/storage-provisioner"
	Sep 16 11:01:18 ha-334765 kubelet[756]: I0916 11:01:18.895120     756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cni-cfg\" (UniqueName: \"kubernetes.io/host-path/e1832b94-ac8f-43c0-af10-ddc6afbb229b-cni-cfg\") pod \"kindnet-7s5t5\" (UID: \"e1832b94-ac8f-43c0-af10-ddc6afbb229b\") " pod="kube-system/kindnet-7s5t5"
	Sep 16 11:01:18 ha-334765 kubelet[756]: I0916 11:01:18.895174     756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"lib-modules\" (UniqueName: \"kubernetes.io/host-path/6a873882-8023-44b5-82d9-2f18e70f8ef1-lib-modules\") pod \"kube-proxy-tlfs7\" (UID: \"6a873882-8023-44b5-82d9-2f18e70f8ef1\") " pod="kube-system/kube-proxy-tlfs7"
	Sep 16 11:01:18 ha-334765 kubelet[756]: I0916 11:01:18.895206     756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"lib-modules\" (UniqueName: \"kubernetes.io/host-path/e1832b94-ac8f-43c0-af10-ddc6afbb229b-lib-modules\") pod \"kindnet-7s5t5\" (UID: \"e1832b94-ac8f-43c0-af10-ddc6afbb229b\") " pod="kube-system/kindnet-7s5t5"
	Sep 16 11:01:18 ha-334765 kubelet[756]: I0916 11:01:18.895253     756 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"xtables-lock\" (UniqueName: \"kubernetes.io/host-path/6a873882-8023-44b5-82d9-2f18e70f8ef1-xtables-lock\") pod \"kube-proxy-tlfs7\" (UID: \"6a873882-8023-44b5-82d9-2f18e70f8ef1\") " pod="kube-system/kube-proxy-tlfs7"
	Sep 16 11:01:18 ha-334765 kubelet[756]: I0916 11:01:18.961199     756 swap_util.go:74] "error creating dir to test if tmpfs noswap is enabled. Assuming not supported" mount path="" error="stat /var/lib/kubelet/plugins/kubernetes.io/empty-dir: no such file or directory"
	Sep 16 11:01:19 ha-334765 kubelet[756]: I0916 11:01:19.662869     756 kubelet_node_status.go:72] "Attempting to register node" node="ha-334765"
	Sep 16 11:01:19 ha-334765 kubelet[756]: I0916 11:01:19.804164     756 kubelet_node_status.go:111] "Node was previously registered" node="ha-334765"
	Sep 16 11:01:19 ha-334765 kubelet[756]: I0916 11:01:19.804267     756 kubelet_node_status.go:75] "Successfully registered node" node="ha-334765"
	Sep 16 11:01:19 ha-334765 kubelet[756]: I0916 11:01:19.804313     756 kuberuntime_manager.go:1635] "Updating runtime config through cri with podcidr" CIDR="10.244.0.0/24"
	Sep 16 11:01:19 ha-334765 kubelet[756]: I0916 11:01:19.805124     756 kubelet_network.go:61] "Updating Pod CIDR" originalPodCIDR="" newPodCIDR="10.244.0.0/24"
	Sep 16 11:01:21 ha-334765 kubelet[756]: E0916 11:01:21.861755     756 eviction_manager.go:257] "Eviction manager: failed to get HasDedicatedImageFs" err="missing image stats: &ImageFsInfoResponse{ImageFilesystems:[]*FilesystemUsage{&FilesystemUsage{Timestamp:1726484481861111875,FsId:&FilesystemIdentifier{Mountpoint:/var/lib/containers/storage/overlay-images,},UsedBytes:&UInt64Value{Value:147135,},InodesUsed:&UInt64Value{Value:69,},},},ContainerFilesystems:[]*FilesystemUsage{},}"
	Sep 16 11:01:21 ha-334765 kubelet[756]: E0916 11:01:21.861803     756 eviction_manager.go:212] "Eviction manager: failed to synchronize" err="eviction manager: failed to get HasDedicatedImageFs: missing image stats: &ImageFsInfoResponse{ImageFilesystems:[]*FilesystemUsage{&FilesystemUsage{Timestamp:1726484481861111875,FsId:&FilesystemIdentifier{Mountpoint:/var/lib/containers/storage/overlay-images,},UsedBytes:&UInt64Value{Value:147135,},InodesUsed:&UInt64Value{Value:69,},},},ContainerFilesystems:[]*FilesystemUsage{},}"
	Sep 16 11:01:31 ha-334765 kubelet[756]: E0916 11:01:31.863125     756 eviction_manager.go:257] "Eviction manager: failed to get HasDedicatedImageFs" err="missing image stats: &ImageFsInfoResponse{ImageFilesystems:[]*FilesystemUsage{&FilesystemUsage{Timestamp:1726484491862876657,FsId:&FilesystemIdentifier{Mountpoint:/var/lib/containers/storage/overlay-images,},UsedBytes:&UInt64Value{Value:147135,},InodesUsed:&UInt64Value{Value:69,},},},ContainerFilesystems:[]*FilesystemUsage{},}"
	Sep 16 11:01:31 ha-334765 kubelet[756]: E0916 11:01:31.863164     756 eviction_manager.go:212] "Eviction manager: failed to synchronize" err="eviction manager: failed to get HasDedicatedImageFs: missing image stats: &ImageFsInfoResponse{ImageFilesystems:[]*FilesystemUsage{&FilesystemUsage{Timestamp:1726484491862876657,FsId:&FilesystemIdentifier{Mountpoint:/var/lib/containers/storage/overlay-images,},UsedBytes:&UInt64Value{Value:147135,},InodesUsed:&UInt64Value{Value:69,},},},ContainerFilesystems:[]*FilesystemUsage{},}"
	Sep 16 11:01:41 ha-334765 kubelet[756]: E0916 11:01:41.864932     756 eviction_manager.go:257] "Eviction manager: failed to get HasDedicatedImageFs" err="missing image stats: &ImageFsInfoResponse{ImageFilesystems:[]*FilesystemUsage{&FilesystemUsage{Timestamp:1726484501864362515,FsId:&FilesystemIdentifier{Mountpoint:/var/lib/containers/storage/overlay-images,},UsedBytes:&UInt64Value{Value:147135,},InodesUsed:&UInt64Value{Value:69,},},},ContainerFilesystems:[]*FilesystemUsage{},}"
	Sep 16 11:01:41 ha-334765 kubelet[756]: E0916 11:01:41.864972     756 eviction_manager.go:212] "Eviction manager: failed to synchronize" err="eviction manager: failed to get HasDedicatedImageFs: missing image stats: &ImageFsInfoResponse{ImageFilesystems:[]*FilesystemUsage{&FilesystemUsage{Timestamp:1726484501864362515,FsId:&FilesystemIdentifier{Mountpoint:/var/lib/containers/storage/overlay-images,},UsedBytes:&UInt64Value{Value:147135,},InodesUsed:&UInt64Value{Value:69,},},},ContainerFilesystems:[]*FilesystemUsage{},}"
	Sep 16 11:01:49 ha-334765 kubelet[756]: I0916 11:01:49.989883     756 scope.go:117] "RemoveContainer" containerID="0ecb2f9010d5ce52954ade3c53648c4679af403952607192fd110ee7ddda57ac"
	Sep 16 11:01:51 ha-334765 kubelet[756]: E0916 11:01:51.867980     756 eviction_manager.go:257] "Eviction manager: failed to get HasDedicatedImageFs" err="missing image stats: &ImageFsInfoResponse{ImageFilesystems:[]*FilesystemUsage{&FilesystemUsage{Timestamp:1726484511867715292,FsId:&FilesystemIdentifier{Mountpoint:/var/lib/containers/storage/overlay-images,},UsedBytes:&UInt64Value{Value:147135,},InodesUsed:&UInt64Value{Value:69,},},},ContainerFilesystems:[]*FilesystemUsage{},}"
	Sep 16 11:01:51 ha-334765 kubelet[756]: E0916 11:01:51.878664     756 eviction_manager.go:212] "Eviction manager: failed to synchronize" err="eviction manager: failed to get HasDedicatedImageFs: missing image stats: &ImageFsInfoResponse{ImageFilesystems:[]*FilesystemUsage{&FilesystemUsage{Timestamp:1726484511867715292,FsId:&FilesystemIdentifier{Mountpoint:/var/lib/containers/storage/overlay-images,},UsedBytes:&UInt64Value{Value:147135,},InodesUsed:&UInt64Value{Value:69,},},},ContainerFilesystems:[]*FilesystemUsage{},}"
	Sep 16 11:02:01 ha-334765 kubelet[756]: E0916 11:02:01.880787     756 eviction_manager.go:257] "Eviction manager: failed to get HasDedicatedImageFs" err="missing image stats: &ImageFsInfoResponse{ImageFilesystems:[]*FilesystemUsage{&FilesystemUsage{Timestamp:1726484521880522062,FsId:&FilesystemIdentifier{Mountpoint:/var/lib/containers/storage/overlay-images,},UsedBytes:&UInt64Value{Value:147135,},InodesUsed:&UInt64Value{Value:69,},},},ContainerFilesystems:[]*FilesystemUsage{},}"
	Sep 16 11:02:01 ha-334765 kubelet[756]: E0916 11:02:01.880823     756 eviction_manager.go:212] "Eviction manager: failed to synchronize" err="eviction manager: failed to get HasDedicatedImageFs: missing image stats: &ImageFsInfoResponse{ImageFilesystems:[]*FilesystemUsage{&FilesystemUsage{Timestamp:1726484521880522062,FsId:&FilesystemIdentifier{Mountpoint:/var/lib/containers/storage/overlay-images,},UsedBytes:&UInt64Value{Value:147135,},InodesUsed:&UInt64Value{Value:69,},},},ContainerFilesystems:[]*FilesystemUsage{},}"
	Sep 16 11:02:11 ha-334765 kubelet[756]: E0916 11:02:11.882492     756 eviction_manager.go:257] "Eviction manager: failed to get HasDedicatedImageFs" err="missing image stats: &ImageFsInfoResponse{ImageFilesystems:[]*FilesystemUsage{&FilesystemUsage{Timestamp:1726484531882279856,FsId:&FilesystemIdentifier{Mountpoint:/var/lib/containers/storage/overlay-images,},UsedBytes:&UInt64Value{Value:147135,},InodesUsed:&UInt64Value{Value:69,},},},ContainerFilesystems:[]*FilesystemUsage{},}"
	Sep 16 11:02:11 ha-334765 kubelet[756]: E0916 11:02:11.882533     756 eviction_manager.go:212] "Eviction manager: failed to synchronize" err="eviction manager: failed to get HasDedicatedImageFs: missing image stats: &ImageFsInfoResponse{ImageFilesystems:[]*FilesystemUsage{&FilesystemUsage{Timestamp:1726484531882279856,FsId:&FilesystemIdentifier{Mountpoint:/var/lib/containers/storage/overlay-images,},UsedBytes:&UInt64Value{Value:147135,},InodesUsed:&UInt64Value{Value:69,},},},ContainerFilesystems:[]*FilesystemUsage{},}"
	

                                                
                                                
-- /stdout --
helpers_test.go:254: (dbg) Run:  out/minikube-linux-arm64 status --format={{.APIServer}} -p ha-334765 -n ha-334765
helpers_test.go:261: (dbg) Run:  kubectl --context ha-334765 get po -o=jsonpath={.items[*].metadata.name} -A --field-selector=status.phase!=Running
helpers_test.go:261: (dbg) Non-zero exit: kubectl --context ha-334765 get po -o=jsonpath={.items[*].metadata.name} -A --field-selector=status.phase!=Running: fork/exec /usr/local/bin/kubectl: exec format error (572.884µs)
helpers_test.go:263: kubectl --context ha-334765 get po -o=jsonpath={.items[*].metadata.name} -A --field-selector=status.phase!=Running: fork/exec /usr/local/bin/kubectl: exec format error
--- FAIL: TestMultiControlPlane/serial/RestartCluster (90.15s)

                                                
                                    
x
+
TestMultiNode/serial/MultiNodeLabels (2.97s)

                                                
                                                
=== RUN   TestMultiNode/serial/MultiNodeLabels
multinode_test.go:221: (dbg) Run:  kubectl --context multinode-654612 get nodes -o "jsonpath=[{range .items[*]}{.metadata.labels},{end}]"
multinode_test.go:221: (dbg) Non-zero exit: kubectl --context multinode-654612 get nodes -o "jsonpath=[{range .items[*]}{.metadata.labels},{end}]": fork/exec /usr/local/bin/kubectl: exec format error (536.84µs)
multinode_test.go:223: failed to 'kubectl get nodes' with args "kubectl --context multinode-654612 get nodes -o \"jsonpath=[{range .items[*]}{.metadata.labels},{end}]\"": fork/exec /usr/local/bin/kubectl: exec format error
multinode_test.go:230: failed to decode json from label list: args "kubectl --context multinode-654612 get nodes -o \"jsonpath=[{range .items[*]}{.metadata.labels},{end}]\"": unexpected end of JSON input
helpers_test.go:222: -----------------------post-mortem--------------------------------
helpers_test.go:230: ======>  post-mortem[TestMultiNode/serial/MultiNodeLabels]: docker inspect <======
helpers_test.go:231: (dbg) Run:  docker inspect multinode-654612
helpers_test.go:235: (dbg) docker inspect multinode-654612:

                                                
                                                
-- stdout --
	[
	    {
	        "Id": "402497514f0b8b3453fe3f147b28574766ee05bfb7c084c9f8550757726f30cd",
	        "Created": "2024-09-16T11:09:45.282229543Z",
	        "Path": "/usr/local/bin/entrypoint",
	        "Args": [
	            "/sbin/init"
	        ],
	        "State": {
	            "Status": "running",
	            "Running": true,
	            "Paused": false,
	            "Restarting": false,
	            "OOMKilled": false,
	            "Dead": false,
	            "Pid": 1489022,
	            "ExitCode": 0,
	            "Error": "",
	            "StartedAt": "2024-09-16T11:09:45.436723255Z",
	            "FinishedAt": "0001-01-01T00:00:00Z"
	        },
	        "Image": "sha256:a1b71fa87733590eb4674b16f6945626ae533f3af37066893e3fd70eb9476268",
	        "ResolvConfPath": "/var/lib/docker/containers/402497514f0b8b3453fe3f147b28574766ee05bfb7c084c9f8550757726f30cd/resolv.conf",
	        "HostnamePath": "/var/lib/docker/containers/402497514f0b8b3453fe3f147b28574766ee05bfb7c084c9f8550757726f30cd/hostname",
	        "HostsPath": "/var/lib/docker/containers/402497514f0b8b3453fe3f147b28574766ee05bfb7c084c9f8550757726f30cd/hosts",
	        "LogPath": "/var/lib/docker/containers/402497514f0b8b3453fe3f147b28574766ee05bfb7c084c9f8550757726f30cd/402497514f0b8b3453fe3f147b28574766ee05bfb7c084c9f8550757726f30cd-json.log",
	        "Name": "/multinode-654612",
	        "RestartCount": 0,
	        "Driver": "overlay2",
	        "Platform": "linux",
	        "MountLabel": "",
	        "ProcessLabel": "",
	        "AppArmorProfile": "unconfined",
	        "ExecIDs": null,
	        "HostConfig": {
	            "Binds": [
	                "/lib/modules:/lib/modules:ro",
	                "multinode-654612:/var"
	            ],
	            "ContainerIDFile": "",
	            "LogConfig": {
	                "Type": "json-file",
	                "Config": {}
	            },
	            "NetworkMode": "multinode-654612",
	            "PortBindings": {
	                "22/tcp": [
	                    {
	                        "HostIp": "127.0.0.1",
	                        "HostPort": ""
	                    }
	                ],
	                "2376/tcp": [
	                    {
	                        "HostIp": "127.0.0.1",
	                        "HostPort": ""
	                    }
	                ],
	                "32443/tcp": [
	                    {
	                        "HostIp": "127.0.0.1",
	                        "HostPort": ""
	                    }
	                ],
	                "5000/tcp": [
	                    {
	                        "HostIp": "127.0.0.1",
	                        "HostPort": ""
	                    }
	                ],
	                "8443/tcp": [
	                    {
	                        "HostIp": "127.0.0.1",
	                        "HostPort": ""
	                    }
	                ]
	            },
	            "RestartPolicy": {
	                "Name": "no",
	                "MaximumRetryCount": 0
	            },
	            "AutoRemove": false,
	            "VolumeDriver": "",
	            "VolumesFrom": null,
	            "ConsoleSize": [
	                0,
	                0
	            ],
	            "CapAdd": null,
	            "CapDrop": null,
	            "CgroupnsMode": "host",
	            "Dns": [],
	            "DnsOptions": [],
	            "DnsSearch": [],
	            "ExtraHosts": null,
	            "GroupAdd": null,
	            "IpcMode": "private",
	            "Cgroup": "",
	            "Links": null,
	            "OomScoreAdj": 0,
	            "PidMode": "",
	            "Privileged": true,
	            "PublishAllPorts": false,
	            "ReadonlyRootfs": false,
	            "SecurityOpt": [
	                "seccomp=unconfined",
	                "apparmor=unconfined",
	                "label=disable"
	            ],
	            "Tmpfs": {
	                "/run": "",
	                "/tmp": ""
	            },
	            "UTSMode": "",
	            "UsernsMode": "",
	            "ShmSize": 67108864,
	            "Runtime": "runc",
	            "Isolation": "",
	            "CpuShares": 0,
	            "Memory": 2306867200,
	            "NanoCpus": 2000000000,
	            "CgroupParent": "",
	            "BlkioWeight": 0,
	            "BlkioWeightDevice": [],
	            "BlkioDeviceReadBps": [],
	            "BlkioDeviceWriteBps": [],
	            "BlkioDeviceReadIOps": [],
	            "BlkioDeviceWriteIOps": [],
	            "CpuPeriod": 0,
	            "CpuQuota": 0,
	            "CpuRealtimePeriod": 0,
	            "CpuRealtimeRuntime": 0,
	            "CpusetCpus": "",
	            "CpusetMems": "",
	            "Devices": [],
	            "DeviceCgroupRules": null,
	            "DeviceRequests": null,
	            "MemoryReservation": 0,
	            "MemorySwap": 4613734400,
	            "MemorySwappiness": null,
	            "OomKillDisable": false,
	            "PidsLimit": null,
	            "Ulimits": [],
	            "CpuCount": 0,
	            "CpuPercent": 0,
	            "IOMaximumIOps": 0,
	            "IOMaximumBandwidth": 0,
	            "MaskedPaths": null,
	            "ReadonlyPaths": null
	        },
	        "GraphDriver": {
	            "Data": {
	                "LowerDir": "/var/lib/docker/overlay2/8f4b3b4182dcd0f3e388c56cd4cf94240822ac61d3fc96a90ebc4c74757003f6-init/diff:/var/lib/docker/overlay2/1502e35c27c097cfc834a7c6caeee5bb9f58b41375577f491b73f55bc131cbae/diff",
	                "MergedDir": "/var/lib/docker/overlay2/8f4b3b4182dcd0f3e388c56cd4cf94240822ac61d3fc96a90ebc4c74757003f6/merged",
	                "UpperDir": "/var/lib/docker/overlay2/8f4b3b4182dcd0f3e388c56cd4cf94240822ac61d3fc96a90ebc4c74757003f6/diff",
	                "WorkDir": "/var/lib/docker/overlay2/8f4b3b4182dcd0f3e388c56cd4cf94240822ac61d3fc96a90ebc4c74757003f6/work"
	            },
	            "Name": "overlay2"
	        },
	        "Mounts": [
	            {
	                "Type": "bind",
	                "Source": "/lib/modules",
	                "Destination": "/lib/modules",
	                "Mode": "ro",
	                "RW": false,
	                "Propagation": "rprivate"
	            },
	            {
	                "Type": "volume",
	                "Name": "multinode-654612",
	                "Source": "/var/lib/docker/volumes/multinode-654612/_data",
	                "Destination": "/var",
	                "Driver": "local",
	                "Mode": "z",
	                "RW": true,
	                "Propagation": ""
	            }
	        ],
	        "Config": {
	            "Hostname": "multinode-654612",
	            "Domainname": "",
	            "User": "",
	            "AttachStdin": false,
	            "AttachStdout": false,
	            "AttachStderr": false,
	            "ExposedPorts": {
	                "22/tcp": {},
	                "2376/tcp": {},
	                "32443/tcp": {},
	                "5000/tcp": {},
	                "8443/tcp": {}
	            },
	            "Tty": true,
	            "OpenStdin": false,
	            "StdinOnce": false,
	            "Env": [
	                "container=docker",
	                "PATH=/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin"
	            ],
	            "Cmd": null,
	            "Image": "gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0",
	            "Volumes": null,
	            "WorkingDir": "/",
	            "Entrypoint": [
	                "/usr/local/bin/entrypoint",
	                "/sbin/init"
	            ],
	            "OnBuild": null,
	            "Labels": {
	                "created_by.minikube.sigs.k8s.io": "true",
	                "mode.minikube.sigs.k8s.io": "multinode-654612",
	                "name.minikube.sigs.k8s.io": "multinode-654612",
	                "role.minikube.sigs.k8s.io": ""
	            },
	            "StopSignal": "SIGRTMIN+3"
	        },
	        "NetworkSettings": {
	            "Bridge": "",
	            "SandboxID": "e9bdb8a96f941d3fb83485ff41db347cd0ca4b8ec9abe479b77dc1f947540d87",
	            "SandboxKey": "/var/run/docker/netns/e9bdb8a96f94",
	            "Ports": {
	                "22/tcp": [
	                    {
	                        "HostIp": "127.0.0.1",
	                        "HostPort": "34738"
	                    }
	                ],
	                "2376/tcp": [
	                    {
	                        "HostIp": "127.0.0.1",
	                        "HostPort": "34739"
	                    }
	                ],
	                "32443/tcp": [
	                    {
	                        "HostIp": "127.0.0.1",
	                        "HostPort": "34742"
	                    }
	                ],
	                "5000/tcp": [
	                    {
	                        "HostIp": "127.0.0.1",
	                        "HostPort": "34740"
	                    }
	                ],
	                "8443/tcp": [
	                    {
	                        "HostIp": "127.0.0.1",
	                        "HostPort": "34741"
	                    }
	                ]
	            },
	            "HairpinMode": false,
	            "LinkLocalIPv6Address": "",
	            "LinkLocalIPv6PrefixLen": 0,
	            "SecondaryIPAddresses": null,
	            "SecondaryIPv6Addresses": null,
	            "EndpointID": "",
	            "Gateway": "",
	            "GlobalIPv6Address": "",
	            "GlobalIPv6PrefixLen": 0,
	            "IPAddress": "",
	            "IPPrefixLen": 0,
	            "IPv6Gateway": "",
	            "MacAddress": "",
	            "Networks": {
	                "multinode-654612": {
	                    "IPAMConfig": {
	                        "IPv4Address": "192.168.67.2"
	                    },
	                    "Links": null,
	                    "Aliases": null,
	                    "MacAddress": "02:42:c0:a8:43:02",
	                    "DriverOpts": null,
	                    "NetworkID": "76703dbf7b5c303b888ff80e924d3dab5e1ece3140da60ee94903d5d35e68013",
	                    "EndpointID": "55b225e9580b23d8f9c99debd0c6695a2fd84eb21d47b2088c34127e4a33c4a8",
	                    "Gateway": "192.168.67.1",
	                    "IPAddress": "192.168.67.2",
	                    "IPPrefixLen": 24,
	                    "IPv6Gateway": "",
	                    "GlobalIPv6Address": "",
	                    "GlobalIPv6PrefixLen": 0,
	                    "DNSNames": [
	                        "multinode-654612",
	                        "402497514f0b"
	                    ]
	                }
	            }
	        }
	    }
	]

                                                
                                                
-- /stdout --
helpers_test.go:239: (dbg) Run:  out/minikube-linux-arm64 status --format={{.Host}} -p multinode-654612 -n multinode-654612
helpers_test.go:244: <<< TestMultiNode/serial/MultiNodeLabels FAILED: start of post-mortem logs <<<
helpers_test.go:245: ======>  post-mortem[TestMultiNode/serial/MultiNodeLabels]: minikube logs <======
helpers_test.go:247: (dbg) Run:  out/minikube-linux-arm64 -p multinode-654612 logs -n 25
helpers_test.go:247: (dbg) Done: out/minikube-linux-arm64 -p multinode-654612 logs -n 25: (1.607196472s)
helpers_test.go:252: TestMultiNode/serial/MultiNodeLabels logs: 
-- stdout --
	
	==> Audit <==
	|---------|---------------------------------------------------|----------------------|---------|---------|---------------------|---------------------|
	| Command |                       Args                        |       Profile        |  User   | Version |     Start Time      |      End Time       |
	|---------|---------------------------------------------------|----------------------|---------|---------|---------------------|---------------------|
	| ssh     | mount-start-2-436346 ssh -- ls                    | mount-start-2-436346 | jenkins | v1.34.0 | 16 Sep 24 11:09 UTC | 16 Sep 24 11:09 UTC |
	|         | /minikube-host                                    |                      |         |         |                     |                     |
	| delete  | -p mount-start-1-434131                           | mount-start-1-434131 | jenkins | v1.34.0 | 16 Sep 24 11:09 UTC | 16 Sep 24 11:09 UTC |
	|         | --alsologtostderr -v=5                            |                      |         |         |                     |                     |
	| ssh     | mount-start-2-436346 ssh -- ls                    | mount-start-2-436346 | jenkins | v1.34.0 | 16 Sep 24 11:09 UTC | 16 Sep 24 11:09 UTC |
	|         | /minikube-host                                    |                      |         |         |                     |                     |
	| stop    | -p mount-start-2-436346                           | mount-start-2-436346 | jenkins | v1.34.0 | 16 Sep 24 11:09 UTC | 16 Sep 24 11:09 UTC |
	| start   | -p mount-start-2-436346                           | mount-start-2-436346 | jenkins | v1.34.0 | 16 Sep 24 11:09 UTC | 16 Sep 24 11:09 UTC |
	| ssh     | mount-start-2-436346 ssh -- ls                    | mount-start-2-436346 | jenkins | v1.34.0 | 16 Sep 24 11:09 UTC | 16 Sep 24 11:09 UTC |
	|         | /minikube-host                                    |                      |         |         |                     |                     |
	| delete  | -p mount-start-2-436346                           | mount-start-2-436346 | jenkins | v1.34.0 | 16 Sep 24 11:09 UTC | 16 Sep 24 11:09 UTC |
	| delete  | -p mount-start-1-434131                           | mount-start-1-434131 | jenkins | v1.34.0 | 16 Sep 24 11:09 UTC | 16 Sep 24 11:09 UTC |
	| start   | -p multinode-654612                               | multinode-654612     | jenkins | v1.34.0 | 16 Sep 24 11:09 UTC | 16 Sep 24 11:11 UTC |
	|         | --wait=true --memory=2200                         |                      |         |         |                     |                     |
	|         | --nodes=2 -v=8                                    |                      |         |         |                     |                     |
	|         | --alsologtostderr                                 |                      |         |         |                     |                     |
	|         | --driver=docker                                   |                      |         |         |                     |                     |
	|         | --container-runtime=crio                          |                      |         |         |                     |                     |
	| kubectl | -p multinode-654612 -- apply -f                   | multinode-654612     | jenkins | v1.34.0 | 16 Sep 24 11:11 UTC | 16 Sep 24 11:11 UTC |
	|         | ./testdata/multinodes/multinode-pod-dns-test.yaml |                      |         |         |                     |                     |
	| kubectl | -p multinode-654612 -- rollout                    | multinode-654612     | jenkins | v1.34.0 | 16 Sep 24 11:11 UTC | 16 Sep 24 11:11 UTC |
	|         | status deployment/busybox                         |                      |         |         |                     |                     |
	| kubectl | -p multinode-654612 -- get pods -o                | multinode-654612     | jenkins | v1.34.0 | 16 Sep 24 11:11 UTC | 16 Sep 24 11:11 UTC |
	|         | jsonpath='{.items[*].status.podIP}'               |                      |         |         |                     |                     |
	| kubectl | -p multinode-654612 -- get pods -o                | multinode-654612     | jenkins | v1.34.0 | 16 Sep 24 11:11 UTC | 16 Sep 24 11:11 UTC |
	|         | jsonpath='{.items[*].metadata.name}'              |                      |         |         |                     |                     |
	| kubectl | -p multinode-654612 -- exec                       | multinode-654612     | jenkins | v1.34.0 | 16 Sep 24 11:11 UTC | 16 Sep 24 11:11 UTC |
	|         | busybox-7dff88458-rdtjw --                        |                      |         |         |                     |                     |
	|         | nslookup kubernetes.io                            |                      |         |         |                     |                     |
	| kubectl | -p multinode-654612 -- exec                       | multinode-654612     | jenkins | v1.34.0 | 16 Sep 24 11:11 UTC | 16 Sep 24 11:11 UTC |
	|         | busybox-7dff88458-sfkxt --                        |                      |         |         |                     |                     |
	|         | nslookup kubernetes.io                            |                      |         |         |                     |                     |
	| kubectl | -p multinode-654612 -- exec                       | multinode-654612     | jenkins | v1.34.0 | 16 Sep 24 11:11 UTC | 16 Sep 24 11:11 UTC |
	|         | busybox-7dff88458-rdtjw --                        |                      |         |         |                     |                     |
	|         | nslookup kubernetes.default                       |                      |         |         |                     |                     |
	| kubectl | -p multinode-654612 -- exec                       | multinode-654612     | jenkins | v1.34.0 | 16 Sep 24 11:11 UTC | 16 Sep 24 11:11 UTC |
	|         | busybox-7dff88458-sfkxt --                        |                      |         |         |                     |                     |
	|         | nslookup kubernetes.default                       |                      |         |         |                     |                     |
	| kubectl | -p multinode-654612 -- exec                       | multinode-654612     | jenkins | v1.34.0 | 16 Sep 24 11:11 UTC | 16 Sep 24 11:11 UTC |
	|         | busybox-7dff88458-rdtjw -- nslookup               |                      |         |         |                     |                     |
	|         | kubernetes.default.svc.cluster.local              |                      |         |         |                     |                     |
	| kubectl | -p multinode-654612 -- exec                       | multinode-654612     | jenkins | v1.34.0 | 16 Sep 24 11:11 UTC | 16 Sep 24 11:11 UTC |
	|         | busybox-7dff88458-sfkxt -- nslookup               |                      |         |         |                     |                     |
	|         | kubernetes.default.svc.cluster.local              |                      |         |         |                     |                     |
	| kubectl | -p multinode-654612 -- get pods -o                | multinode-654612     | jenkins | v1.34.0 | 16 Sep 24 11:11 UTC | 16 Sep 24 11:11 UTC |
	|         | jsonpath='{.items[*].metadata.name}'              |                      |         |         |                     |                     |
	| kubectl | -p multinode-654612 -- exec                       | multinode-654612     | jenkins | v1.34.0 | 16 Sep 24 11:11 UTC | 16 Sep 24 11:11 UTC |
	|         | busybox-7dff88458-rdtjw                           |                      |         |         |                     |                     |
	|         | -- sh -c nslookup                                 |                      |         |         |                     |                     |
	|         | host.minikube.internal | awk                      |                      |         |         |                     |                     |
	|         | 'NR==5' | cut -d' ' -f3                           |                      |         |         |                     |                     |
	| kubectl | -p multinode-654612 -- exec                       | multinode-654612     | jenkins | v1.34.0 | 16 Sep 24 11:11 UTC | 16 Sep 24 11:11 UTC |
	|         | busybox-7dff88458-rdtjw -- sh                     |                      |         |         |                     |                     |
	|         | -c ping -c 1 192.168.67.1                         |                      |         |         |                     |                     |
	| kubectl | -p multinode-654612 -- exec                       | multinode-654612     | jenkins | v1.34.0 | 16 Sep 24 11:11 UTC | 16 Sep 24 11:11 UTC |
	|         | busybox-7dff88458-sfkxt                           |                      |         |         |                     |                     |
	|         | -- sh -c nslookup                                 |                      |         |         |                     |                     |
	|         | host.minikube.internal | awk                      |                      |         |         |                     |                     |
	|         | 'NR==5' | cut -d' ' -f3                           |                      |         |         |                     |                     |
	| kubectl | -p multinode-654612 -- exec                       | multinode-654612     | jenkins | v1.34.0 | 16 Sep 24 11:11 UTC | 16 Sep 24 11:11 UTC |
	|         | busybox-7dff88458-sfkxt -- sh                     |                      |         |         |                     |                     |
	|         | -c ping -c 1 192.168.67.1                         |                      |         |         |                     |                     |
	| node    | add -p multinode-654612 -v 3                      | multinode-654612     | jenkins | v1.34.0 | 16 Sep 24 11:11 UTC | 16 Sep 24 11:12 UTC |
	|         | --alsologtostderr                                 |                      |         |         |                     |                     |
	|---------|---------------------------------------------------|----------------------|---------|---------|---------------------|---------------------|
	
	
	==> Last Start <==
	Log file created at: 2024/09/16 11:09:39
	Running on machine: ip-172-31-21-244
	Binary: Built with gc go1.23.0 for linux/arm64
	Log line format: [IWEF]mmdd hh:mm:ss.uuuuuu threadid file:line] msg
	I0916 11:09:39.863368 1488539 out.go:345] Setting OutFile to fd 1 ...
	I0916 11:09:39.863515 1488539 out.go:392] TERM=,COLORTERM=, which probably does not support color
	I0916 11:09:39.863527 1488539 out.go:358] Setting ErrFile to fd 2...
	I0916 11:09:39.863532 1488539 out.go:392] TERM=,COLORTERM=, which probably does not support color
	I0916 11:09:39.863795 1488539 root.go:338] Updating PATH: /home/jenkins/minikube-integration/19651-1378450/.minikube/bin
	I0916 11:09:39.864221 1488539 out.go:352] Setting JSON to false
	I0916 11:09:39.865166 1488539 start.go:129] hostinfo: {"hostname":"ip-172-31-21-244","uptime":39125,"bootTime":1726445855,"procs":181,"os":"linux","platform":"ubuntu","platformFamily":"debian","platformVersion":"20.04","kernelVersion":"5.15.0-1069-aws","kernelArch":"aarch64","virtualizationSystem":"","virtualizationRole":"","hostId":"da8ac1fd-6236-412a-a346-95873c98230d"}
	I0916 11:09:39.865239 1488539 start.go:139] virtualization:  
	I0916 11:09:39.868551 1488539 out.go:177] * [multinode-654612] minikube v1.34.0 on Ubuntu 20.04 (arm64)
	I0916 11:09:39.872083 1488539 out.go:177]   - MINIKUBE_LOCATION=19651
	I0916 11:09:39.872162 1488539 notify.go:220] Checking for updates...
	I0916 11:09:39.877390 1488539 out.go:177]   - MINIKUBE_SUPPRESS_DOCKER_PERFORMANCE=true
	I0916 11:09:39.880030 1488539 out.go:177]   - KUBECONFIG=/home/jenkins/minikube-integration/19651-1378450/kubeconfig
	I0916 11:09:39.882656 1488539 out.go:177]   - MINIKUBE_HOME=/home/jenkins/minikube-integration/19651-1378450/.minikube
	I0916 11:09:39.885306 1488539 out.go:177]   - MINIKUBE_BIN=out/minikube-linux-arm64
	I0916 11:09:39.887961 1488539 out.go:177]   - MINIKUBE_FORCE_SYSTEMD=
	I0916 11:09:39.890832 1488539 driver.go:394] Setting default libvirt URI to qemu:///system
	I0916 11:09:39.916693 1488539 docker.go:123] docker version: linux-27.2.1:Docker Engine - Community
	I0916 11:09:39.916927 1488539 cli_runner.go:164] Run: docker system info --format "{{json .}}"
	I0916 11:09:39.974976 1488539 info.go:266] docker info: {ID:5FDH:SA5P:5GCT:NLAS:B73P:SGDQ:PBG5:UBVH:UZY3:RXGO:CI7S:WAIH Containers:0 ContainersRunning:0 ContainersPaused:0 ContainersStopped:0 Images:3 Driver:overlay2 DriverStatus:[[Backing Filesystem extfs] [Supports d_type true] [Using metacopy false] [Native Overlay Diff true] [userxattr false]] SystemStatus:<nil> Plugins:{Volume:[local] Network:[bridge host ipvlan macvlan null overlay] Authorization:<nil> Log:[awslogs fluentd gcplogs gelf journald json-file local splunk syslog]} MemoryLimit:true SwapLimit:true KernelMemory:false KernelMemoryTCP:true CPUCfsPeriod:true CPUCfsQuota:true CPUShares:true CPUSet:true PidsLimit:true IPv4Forwarding:true BridgeNfIptables:true BridgeNfIP6Tables:true Debug:false NFd:25 OomKillDisable:true NGoroutines:41 SystemTime:2024-09-16 11:09:39.964987506 +0000 UTC LoggingDriver:json-file CgroupDriver:cgroupfs NEventsListener:0 KernelVersion:5.15.0-1069-aws OperatingSystem:Ubuntu 20.04.6 LTS OSType:linux Architecture:aar
ch64 IndexServerAddress:https://index.docker.io/v1/ RegistryConfig:{AllowNondistributableArtifactsCIDRs:[] AllowNondistributableArtifactsHostnames:[] InsecureRegistryCIDRs:[127.0.0.0/8] IndexConfigs:{DockerIo:{Name:docker.io Mirrors:[] Secure:true Official:true}} Mirrors:[]} NCPU:2 MemTotal:8214839296 GenericResources:<nil> DockerRootDir:/var/lib/docker HTTPProxy: HTTPSProxy: NoProxy: Name:ip-172-31-21-244 Labels:[] ExperimentalBuild:false ServerVersion:27.2.1 ClusterStore: ClusterAdvertise: Runtimes:{Runc:{Path:runc}} DefaultRuntime:runc Swarm:{NodeID: NodeAddr: LocalNodeState:inactive ControlAvailable:false Error: RemoteManagers:<nil>} LiveRestoreEnabled:false Isolation: InitBinary:docker-init ContainerdCommit:{ID:7f7fdf5fed64eb6a7caf99b3e12efcf9d60e311c Expected:7f7fdf5fed64eb6a7caf99b3e12efcf9d60e311c} RuncCommit:{ID:v1.1.14-0-g2c9f560 Expected:v1.1.14-0-g2c9f560} InitCommit:{ID:de40ad0 Expected:de40ad0} SecurityOptions:[name=apparmor name=seccomp,profile=builtin] ProductLicense: Warnings:<nil> ServerErro
rs:[] ClientInfo:{Debug:false Plugins:[map[Name:buildx Path:/usr/libexec/docker/cli-plugins/docker-buildx SchemaVersion:0.1.0 ShortDescription:Docker Buildx Vendor:Docker Inc. Version:v0.16.2] map[Name:compose Path:/usr/libexec/docker/cli-plugins/docker-compose SchemaVersion:0.1.0 ShortDescription:Docker Compose Vendor:Docker Inc. Version:v2.29.2]] Warnings:<nil>}}
	I0916 11:09:39.975089 1488539 docker.go:318] overlay module found
	I0916 11:09:39.977984 1488539 out.go:177] * Using the docker driver based on user configuration
	I0916 11:09:39.980581 1488539 start.go:297] selected driver: docker
	I0916 11:09:39.980603 1488539 start.go:901] validating driver "docker" against <nil>
	I0916 11:09:39.980625 1488539 start.go:912] status for docker: {Installed:true Healthy:true Running:false NeedsImprovement:false Error:<nil> Reason: Fix: Doc: Version:}
	I0916 11:09:39.981381 1488539 cli_runner.go:164] Run: docker system info --format "{{json .}}"
	I0916 11:09:40.041171 1488539 info.go:266] docker info: {ID:5FDH:SA5P:5GCT:NLAS:B73P:SGDQ:PBG5:UBVH:UZY3:RXGO:CI7S:WAIH Containers:0 ContainersRunning:0 ContainersPaused:0 ContainersStopped:0 Images:3 Driver:overlay2 DriverStatus:[[Backing Filesystem extfs] [Supports d_type true] [Using metacopy false] [Native Overlay Diff true] [userxattr false]] SystemStatus:<nil> Plugins:{Volume:[local] Network:[bridge host ipvlan macvlan null overlay] Authorization:<nil> Log:[awslogs fluentd gcplogs gelf journald json-file local splunk syslog]} MemoryLimit:true SwapLimit:true KernelMemory:false KernelMemoryTCP:true CPUCfsPeriod:true CPUCfsQuota:true CPUShares:true CPUSet:true PidsLimit:true IPv4Forwarding:true BridgeNfIptables:true BridgeNfIP6Tables:true Debug:false NFd:25 OomKillDisable:true NGoroutines:41 SystemTime:2024-09-16 11:09:40.030394576 +0000 UTC LoggingDriver:json-file CgroupDriver:cgroupfs NEventsListener:0 KernelVersion:5.15.0-1069-aws OperatingSystem:Ubuntu 20.04.6 LTS OSType:linux Architecture:aar
ch64 IndexServerAddress:https://index.docker.io/v1/ RegistryConfig:{AllowNondistributableArtifactsCIDRs:[] AllowNondistributableArtifactsHostnames:[] InsecureRegistryCIDRs:[127.0.0.0/8] IndexConfigs:{DockerIo:{Name:docker.io Mirrors:[] Secure:true Official:true}} Mirrors:[]} NCPU:2 MemTotal:8214839296 GenericResources:<nil> DockerRootDir:/var/lib/docker HTTPProxy: HTTPSProxy: NoProxy: Name:ip-172-31-21-244 Labels:[] ExperimentalBuild:false ServerVersion:27.2.1 ClusterStore: ClusterAdvertise: Runtimes:{Runc:{Path:runc}} DefaultRuntime:runc Swarm:{NodeID: NodeAddr: LocalNodeState:inactive ControlAvailable:false Error: RemoteManagers:<nil>} LiveRestoreEnabled:false Isolation: InitBinary:docker-init ContainerdCommit:{ID:7f7fdf5fed64eb6a7caf99b3e12efcf9d60e311c Expected:7f7fdf5fed64eb6a7caf99b3e12efcf9d60e311c} RuncCommit:{ID:v1.1.14-0-g2c9f560 Expected:v1.1.14-0-g2c9f560} InitCommit:{ID:de40ad0 Expected:de40ad0} SecurityOptions:[name=apparmor name=seccomp,profile=builtin] ProductLicense: Warnings:<nil> ServerErro
rs:[] ClientInfo:{Debug:false Plugins:[map[Name:buildx Path:/usr/libexec/docker/cli-plugins/docker-buildx SchemaVersion:0.1.0 ShortDescription:Docker Buildx Vendor:Docker Inc. Version:v0.16.2] map[Name:compose Path:/usr/libexec/docker/cli-plugins/docker-compose SchemaVersion:0.1.0 ShortDescription:Docker Compose Vendor:Docker Inc. Version:v2.29.2]] Warnings:<nil>}}
	I0916 11:09:40.041409 1488539 start_flags.go:310] no existing cluster config was found, will generate one from the flags 
	I0916 11:09:40.041688 1488539 start_flags.go:947] Waiting for all components: map[apiserver:true apps_running:true default_sa:true extra:true kubelet:true node_ready:true system_pods:true]
	I0916 11:09:40.044619 1488539 out.go:177] * Using Docker driver with root privileges
	I0916 11:09:40.047373 1488539 cni.go:84] Creating CNI manager for ""
	I0916 11:09:40.047450 1488539 cni.go:136] multinode detected (0 nodes found), recommending kindnet
	I0916 11:09:40.047463 1488539 start_flags.go:319] Found "CNI" CNI - setting NetworkPlugin=cni
	I0916 11:09:40.047559 1488539 start.go:340] cluster config:
	{Name:multinode-654612 KeepContext:false EmbedCerts:false MinikubeISO: KicBaseImage:gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 Memory:2200 CPUs:2 DiskSize:20000 Driver:docker HyperkitVpnKitSock: HyperkitVSockPorts:[] DockerEnv:[] ContainerVolumeMounts:[] InsecureRegistry:[] RegistryMirror:[] HostOnlyCIDR:192.168.59.1/24 HypervVirtualSwitch: HypervUseExternalSwitch:false HypervExternalAdapter: KVMNetwork:default KVMQemuURI:qemu:///system KVMGPU:false KVMHidden:false KVMNUMACount:1 APIServerPort:8443 DockerOpt:[] DisableDriverMounts:false NFSShare:[] NFSSharesRoot:/nfsshares UUID: NoVTXCheck:false DNSProxy:false HostDNSResolver:true HostOnlyNicType:virtio NatNicType:virtio SSHIPAddress: SSHUser:root SSHKey: SSHPort:22 KubernetesConfig:{KubernetesVersion:v1.31.1 ClusterName:multinode-654612 Namespace:default APIServerHAVIP: APIServerName:minikubeCA APIServerNames:[] APIServerIPs:[] DNSDomain:cluster.local ContainerR
untime:crio CRISocket: NetworkPlugin:cni FeatureGates: ServiceCIDR:10.96.0.0/12 ImageRepository: LoadBalancerStartIP: LoadBalancerEndIP: CustomIngressCert: RegistryAliases: ExtraOptions:[] ShouldLoadCachedImages:true EnableDefaultCNI:false CNI:} Nodes:[{Name: IP: Port:8443 KubernetesVersion:v1.31.1 ContainerRuntime:crio ControlPlane:true Worker:true}] Addons:map[] CustomAddonImages:map[] CustomAddonRegistries:map[] VerifyComponents:map[apiserver:true apps_running:true default_sa:true extra:true kubelet:true node_ready:true system_pods:true] StartHostTimeout:6m0s ScheduledStop:<nil> ExposedPorts:[] ListenAddress: Network: Subnet: MultiNodeRequested:true ExtraDisks:0 CertExpiration:26280h0m0s Mount:false MountString:/home/jenkins:/minikube-host Mount9PVersion:9p2000.L MountGID:docker MountIP: MountMSize:262144 MountOptions:[] MountPort:0 MountType:9p MountUID:docker BinaryMirror: DisableOptimizations:false DisableMetrics:false CustomQemuFirmwarePath: SocketVMnetClientPath: SocketVMnetPath: StaticIP: SSHAuthSock
: SSHAgentPID:0 GPUs: AutoPauseInterval:1m0s}
	I0916 11:09:40.050654 1488539 out.go:177] * Starting "multinode-654612" primary control-plane node in "multinode-654612" cluster
	I0916 11:09:40.053501 1488539 cache.go:121] Beginning downloading kic base image for docker with crio
	I0916 11:09:40.056432 1488539 out.go:177] * Pulling base image v0.0.45-1726358845-19644 ...
	I0916 11:09:40.059144 1488539 preload.go:131] Checking if preload exists for k8s version v1.31.1 and runtime crio
	I0916 11:09:40.059238 1488539 preload.go:146] Found local preload: /home/jenkins/minikube-integration/19651-1378450/.minikube/cache/preloaded-tarball/preloaded-images-k8s-v18-v1.31.1-cri-o-overlay-arm64.tar.lz4
	I0916 11:09:40.059253 1488539 cache.go:56] Caching tarball of preloaded images
	I0916 11:09:40.059264 1488539 image.go:79] Checking for gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 in local docker daemon
	I0916 11:09:40.059356 1488539 preload.go:172] Found /home/jenkins/minikube-integration/19651-1378450/.minikube/cache/preloaded-tarball/preloaded-images-k8s-v18-v1.31.1-cri-o-overlay-arm64.tar.lz4 in cache, skipping download
	I0916 11:09:40.059369 1488539 cache.go:59] Finished verifying existence of preloaded tar for v1.31.1 on crio
	I0916 11:09:40.059794 1488539 profile.go:143] Saving config to /home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/multinode-654612/config.json ...
	I0916 11:09:40.059873 1488539 lock.go:35] WriteFile acquiring /home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/multinode-654612/config.json: {Name:mk35ee7a773defc02a83448392f8abace23e005e Clock:{} Delay:500ms Timeout:1m0s Cancel:<nil>}
	W0916 11:09:40.079455 1488539 image.go:95] image gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 is of wrong architecture
	I0916 11:09:40.079483 1488539 cache.go:149] Downloading gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 to local cache
	I0916 11:09:40.079575 1488539 image.go:63] Checking for gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 in local cache directory
	I0916 11:09:40.079629 1488539 image.go:66] Found gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 in local cache directory, skipping pull
	I0916 11:09:40.079639 1488539 image.go:135] gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 exists in cache, skipping pull
	I0916 11:09:40.079647 1488539 cache.go:152] successfully saved gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 as a tarball
	I0916 11:09:40.079656 1488539 cache.go:162] Loading gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 from local cache
	I0916 11:09:40.081374 1488539 image.go:273] response: 
	I0916 11:09:40.211671 1488539 cache.go:164] successfully loaded and using gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 from cached tarball
	I0916 11:09:40.211740 1488539 cache.go:194] Successfully downloaded all kic artifacts
	I0916 11:09:40.211785 1488539 start.go:360] acquireMachinesLock for multinode-654612: {Name:mkfbf36af9c510d3c0697cdadc867dcd6648c047 Clock:{} Delay:500ms Timeout:10m0s Cancel:<nil>}
	I0916 11:09:40.211918 1488539 start.go:364] duration metric: took 108.116µs to acquireMachinesLock for "multinode-654612"
	I0916 11:09:40.211952 1488539 start.go:93] Provisioning new machine with config: &{Name:multinode-654612 KeepContext:false EmbedCerts:false MinikubeISO: KicBaseImage:gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 Memory:2200 CPUs:2 DiskSize:20000 Driver:docker HyperkitVpnKitSock: HyperkitVSockPorts:[] DockerEnv:[] ContainerVolumeMounts:[] InsecureRegistry:[] RegistryMirror:[] HostOnlyCIDR:192.168.59.1/24 HypervVirtualSwitch: HypervUseExternalSwitch:false HypervExternalAdapter: KVMNetwork:default KVMQemuURI:qemu:///system KVMGPU:false KVMHidden:false KVMNUMACount:1 APIServerPort:8443 DockerOpt:[] DisableDriverMounts:false NFSShare:[] NFSSharesRoot:/nfsshares UUID: NoVTXCheck:false DNSProxy:false HostDNSResolver:true HostOnlyNicType:virtio NatNicType:virtio SSHIPAddress: SSHUser:root SSHKey: SSHPort:22 KubernetesConfig:{KubernetesVersion:v1.31.1 ClusterName:multinode-654612 Namespace:default APIServerHAVIP: APIServerNa
me:minikubeCA APIServerNames:[] APIServerIPs:[] DNSDomain:cluster.local ContainerRuntime:crio CRISocket: NetworkPlugin:cni FeatureGates: ServiceCIDR:10.96.0.0/12 ImageRepository: LoadBalancerStartIP: LoadBalancerEndIP: CustomIngressCert: RegistryAliases: ExtraOptions:[] ShouldLoadCachedImages:true EnableDefaultCNI:false CNI:} Nodes:[{Name: IP: Port:8443 KubernetesVersion:v1.31.1 ContainerRuntime:crio ControlPlane:true Worker:true}] Addons:map[] CustomAddonImages:map[] CustomAddonRegistries:map[] VerifyComponents:map[apiserver:true apps_running:true default_sa:true extra:true kubelet:true node_ready:true system_pods:true] StartHostTimeout:6m0s ScheduledStop:<nil> ExposedPorts:[] ListenAddress: Network: Subnet: MultiNodeRequested:true ExtraDisks:0 CertExpiration:26280h0m0s Mount:false MountString:/home/jenkins:/minikube-host Mount9PVersion:9p2000.L MountGID:docker MountIP: MountMSize:262144 MountOptions:[] MountPort:0 MountType:9p MountUID:docker BinaryMirror: DisableOptimizations:false DisableMetrics:false Cus
tomQemuFirmwarePath: SocketVMnetClientPath: SocketVMnetPath: StaticIP: SSHAuthSock: SSHAgentPID:0 GPUs: AutoPauseInterval:1m0s} &{Name: IP: Port:8443 KubernetesVersion:v1.31.1 ContainerRuntime:crio ControlPlane:true Worker:true}
	I0916 11:09:40.212028 1488539 start.go:125] createHost starting for "" (driver="docker")
	I0916 11:09:40.215209 1488539 out.go:235] * Creating docker container (CPUs=2, Memory=2200MB) ...
	I0916 11:09:40.215498 1488539 start.go:159] libmachine.API.Create for "multinode-654612" (driver="docker")
	I0916 11:09:40.215539 1488539 client.go:168] LocalClient.Create starting
	I0916 11:09:40.215658 1488539 main.go:141] libmachine: Reading certificate data from /home/jenkins/minikube-integration/19651-1378450/.minikube/certs/ca.pem
	I0916 11:09:40.215697 1488539 main.go:141] libmachine: Decoding PEM data...
	I0916 11:09:40.215713 1488539 main.go:141] libmachine: Parsing certificate...
	I0916 11:09:40.215764 1488539 main.go:141] libmachine: Reading certificate data from /home/jenkins/minikube-integration/19651-1378450/.minikube/certs/cert.pem
	I0916 11:09:40.215782 1488539 main.go:141] libmachine: Decoding PEM data...
	I0916 11:09:40.215793 1488539 main.go:141] libmachine: Parsing certificate...
	I0916 11:09:40.216194 1488539 cli_runner.go:164] Run: docker network inspect multinode-654612 --format "{"Name": "{{.Name}}","Driver": "{{.Driver}}","Subnet": "{{range .IPAM.Config}}{{.Subnet}}{{end}}","Gateway": "{{range .IPAM.Config}}{{.Gateway}}{{end}}","MTU": {{if (index .Options "com.docker.network.driver.mtu")}}{{(index .Options "com.docker.network.driver.mtu")}}{{else}}0{{end}}, "ContainerIPs": [{{range $k,$v := .Containers }}"{{$v.IPv4Address}}",{{end}}]}"
	W0916 11:09:40.232508 1488539 cli_runner.go:211] docker network inspect multinode-654612 --format "{"Name": "{{.Name}}","Driver": "{{.Driver}}","Subnet": "{{range .IPAM.Config}}{{.Subnet}}{{end}}","Gateway": "{{range .IPAM.Config}}{{.Gateway}}{{end}}","MTU": {{if (index .Options "com.docker.network.driver.mtu")}}{{(index .Options "com.docker.network.driver.mtu")}}{{else}}0{{end}}, "ContainerIPs": [{{range $k,$v := .Containers }}"{{$v.IPv4Address}}",{{end}}]}" returned with exit code 1
	I0916 11:09:40.232598 1488539 network_create.go:284] running [docker network inspect multinode-654612] to gather additional debugging logs...
	I0916 11:09:40.232618 1488539 cli_runner.go:164] Run: docker network inspect multinode-654612
	W0916 11:09:40.247001 1488539 cli_runner.go:211] docker network inspect multinode-654612 returned with exit code 1
	I0916 11:09:40.247035 1488539 network_create.go:287] error running [docker network inspect multinode-654612]: docker network inspect multinode-654612: exit status 1
	stdout:
	[]
	
	stderr:
	Error response from daemon: network multinode-654612 not found
	I0916 11:09:40.247049 1488539 network_create.go:289] output of [docker network inspect multinode-654612]: -- stdout --
	[]
	
	-- /stdout --
	** stderr ** 
	Error response from daemon: network multinode-654612 not found
	
	** /stderr **
	I0916 11:09:40.247160 1488539 cli_runner.go:164] Run: docker network inspect bridge --format "{"Name": "{{.Name}}","Driver": "{{.Driver}}","Subnet": "{{range .IPAM.Config}}{{.Subnet}}{{end}}","Gateway": "{{range .IPAM.Config}}{{.Gateway}}{{end}}","MTU": {{if (index .Options "com.docker.network.driver.mtu")}}{{(index .Options "com.docker.network.driver.mtu")}}{{else}}0{{end}}, "ContainerIPs": [{{range $k,$v := .Containers }}"{{$v.IPv4Address}}",{{end}}]}"
	I0916 11:09:40.263391 1488539 network.go:211] skipping subnet 192.168.49.0/24 that is taken: &{IP:192.168.49.0 Netmask:255.255.255.0 Prefix:24 CIDR:192.168.49.0/24 Gateway:192.168.49.1 ClientMin:192.168.49.2 ClientMax:192.168.49.254 Broadcast:192.168.49.255 IsPrivate:true Interface:{IfaceName:br-a49e1846148d IfaceIPv4:192.168.49.1 IfaceMTU:1500 IfaceMAC:02:42:d3:9d:ef:74} reservation:<nil>}
	I0916 11:09:40.263722 1488539 network.go:211] skipping subnet 192.168.58.0/24 that is taken: &{IP:192.168.58.0 Netmask:255.255.255.0 Prefix:24 CIDR:192.168.58.0/24 Gateway:192.168.58.1 ClientMin:192.168.58.2 ClientMax:192.168.58.254 Broadcast:192.168.58.255 IsPrivate:true Interface:{IfaceName:br-2e9863632116 IfaceIPv4:192.168.58.1 IfaceMTU:1500 IfaceMAC:02:42:77:c8:06:b6} reservation:<nil>}
	I0916 11:09:40.264086 1488539 network.go:206] using free private subnet 192.168.67.0/24: &{IP:192.168.67.0 Netmask:255.255.255.0 Prefix:24 CIDR:192.168.67.0/24 Gateway:192.168.67.1 ClientMin:192.168.67.2 ClientMax:192.168.67.254 Broadcast:192.168.67.255 IsPrivate:true Interface:{IfaceName: IfaceIPv4: IfaceMTU:0 IfaceMAC:} reservation:0x40017ccb30}
	I0916 11:09:40.264113 1488539 network_create.go:124] attempt to create docker network multinode-654612 192.168.67.0/24 with gateway 192.168.67.1 and MTU of 1500 ...
	I0916 11:09:40.264184 1488539 cli_runner.go:164] Run: docker network create --driver=bridge --subnet=192.168.67.0/24 --gateway=192.168.67.1 -o --ip-masq -o --icc -o com.docker.network.driver.mtu=1500 --label=created_by.minikube.sigs.k8s.io=true --label=name.minikube.sigs.k8s.io=multinode-654612 multinode-654612
	I0916 11:09:40.339078 1488539 network_create.go:108] docker network multinode-654612 192.168.67.0/24 created
	I0916 11:09:40.339126 1488539 kic.go:121] calculated static IP "192.168.67.2" for the "multinode-654612" container
	I0916 11:09:40.339207 1488539 cli_runner.go:164] Run: docker ps -a --format {{.Names}}
	I0916 11:09:40.354826 1488539 cli_runner.go:164] Run: docker volume create multinode-654612 --label name.minikube.sigs.k8s.io=multinode-654612 --label created_by.minikube.sigs.k8s.io=true
	I0916 11:09:40.371838 1488539 oci.go:103] Successfully created a docker volume multinode-654612
	I0916 11:09:40.371968 1488539 cli_runner.go:164] Run: docker run --rm --name multinode-654612-preload-sidecar --label created_by.minikube.sigs.k8s.io=true --label name.minikube.sigs.k8s.io=multinode-654612 --entrypoint /usr/bin/test -v multinode-654612:/var gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 -d /var/lib
	I0916 11:09:40.940233 1488539 oci.go:107] Successfully prepared a docker volume multinode-654612
	I0916 11:09:40.940293 1488539 preload.go:131] Checking if preload exists for k8s version v1.31.1 and runtime crio
	I0916 11:09:40.940314 1488539 kic.go:194] Starting extracting preloaded images to volume ...
	I0916 11:09:40.940388 1488539 cli_runner.go:164] Run: docker run --rm --entrypoint /usr/bin/tar -v /home/jenkins/minikube-integration/19651-1378450/.minikube/cache/preloaded-tarball/preloaded-images-k8s-v18-v1.31.1-cri-o-overlay-arm64.tar.lz4:/preloaded.tar:ro -v multinode-654612:/extractDir gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 -I lz4 -xf /preloaded.tar -C /extractDir
	I0916 11:09:45.179143 1488539 cli_runner.go:217] Completed: docker run --rm --entrypoint /usr/bin/tar -v /home/jenkins/minikube-integration/19651-1378450/.minikube/cache/preloaded-tarball/preloaded-images-k8s-v18-v1.31.1-cri-o-overlay-arm64.tar.lz4:/preloaded.tar:ro -v multinode-654612:/extractDir gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 -I lz4 -xf /preloaded.tar -C /extractDir: (4.238708379s)
	I0916 11:09:45.179188 1488539 kic.go:203] duration metric: took 4.238869672s to extract preloaded images to volume ...
	W0916 11:09:45.179381 1488539 cgroups_linux.go:77] Your kernel does not support swap limit capabilities or the cgroup is not mounted.
	I0916 11:09:45.179529 1488539 cli_runner.go:164] Run: docker info --format "'{{json .SecurityOptions}}'"
	I0916 11:09:45.263136 1488539 cli_runner.go:164] Run: docker run -d -t --privileged --security-opt seccomp=unconfined --tmpfs /tmp --tmpfs /run -v /lib/modules:/lib/modules:ro --hostname multinode-654612 --name multinode-654612 --label created_by.minikube.sigs.k8s.io=true --label name.minikube.sigs.k8s.io=multinode-654612 --label role.minikube.sigs.k8s.io= --label mode.minikube.sigs.k8s.io=multinode-654612 --network multinode-654612 --ip 192.168.67.2 --volume multinode-654612:/var --security-opt apparmor=unconfined --memory=2200mb --cpus=2 -e container=docker --expose 8443 --publish=127.0.0.1::8443 --publish=127.0.0.1::22 --publish=127.0.0.1::2376 --publish=127.0.0.1::5000 --publish=127.0.0.1::32443 gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0
	I0916 11:09:45.638223 1488539 cli_runner.go:164] Run: docker container inspect multinode-654612 --format={{.State.Running}}
	I0916 11:09:45.656026 1488539 cli_runner.go:164] Run: docker container inspect multinode-654612 --format={{.State.Status}}
	I0916 11:09:45.675985 1488539 cli_runner.go:164] Run: docker exec multinode-654612 stat /var/lib/dpkg/alternatives/iptables
	I0916 11:09:45.744966 1488539 oci.go:144] the created container "multinode-654612" has a running status.
	I0916 11:09:45.744998 1488539 kic.go:225] Creating ssh key for kic: /home/jenkins/minikube-integration/19651-1378450/.minikube/machines/multinode-654612/id_rsa...
	I0916 11:09:46.839280 1488539 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-1378450/.minikube/machines/multinode-654612/id_rsa.pub -> /home/docker/.ssh/authorized_keys
	I0916 11:09:46.839333 1488539 kic_runner.go:191] docker (temp): /home/jenkins/minikube-integration/19651-1378450/.minikube/machines/multinode-654612/id_rsa.pub --> /home/docker/.ssh/authorized_keys (381 bytes)
	I0916 11:09:46.857527 1488539 cli_runner.go:164] Run: docker container inspect multinode-654612 --format={{.State.Status}}
	I0916 11:09:46.873242 1488539 kic_runner.go:93] Run: chown docker:docker /home/docker/.ssh/authorized_keys
	I0916 11:09:46.873265 1488539 kic_runner.go:114] Args: [docker exec --privileged multinode-654612 chown docker:docker /home/docker/.ssh/authorized_keys]
	I0916 11:09:46.923651 1488539 cli_runner.go:164] Run: docker container inspect multinode-654612 --format={{.State.Status}}
	I0916 11:09:46.939897 1488539 machine.go:93] provisionDockerMachine start ...
	I0916 11:09:46.939998 1488539 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" multinode-654612
	I0916 11:09:46.955785 1488539 main.go:141] libmachine: Using SSH client type: native
	I0916 11:09:46.956066 1488539 main.go:141] libmachine: &{{{<nil> 0 [] [] []} docker [0x41abe0] 0x41d420 <nil>  [] 0s} 127.0.0.1 34738 <nil> <nil>}
	I0916 11:09:46.956081 1488539 main.go:141] libmachine: About to run SSH command:
	hostname
	I0916 11:09:47.092400 1488539 main.go:141] libmachine: SSH cmd err, output: <nil>: multinode-654612
	
	I0916 11:09:47.092427 1488539 ubuntu.go:169] provisioning hostname "multinode-654612"
	I0916 11:09:47.092539 1488539 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" multinode-654612
	I0916 11:09:47.111222 1488539 main.go:141] libmachine: Using SSH client type: native
	I0916 11:09:47.111472 1488539 main.go:141] libmachine: &{{{<nil> 0 [] [] []} docker [0x41abe0] 0x41d420 <nil>  [] 0s} 127.0.0.1 34738 <nil> <nil>}
	I0916 11:09:47.111490 1488539 main.go:141] libmachine: About to run SSH command:
	sudo hostname multinode-654612 && echo "multinode-654612" | sudo tee /etc/hostname
	I0916 11:09:47.261836 1488539 main.go:141] libmachine: SSH cmd err, output: <nil>: multinode-654612
	
	I0916 11:09:47.261918 1488539 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" multinode-654612
	I0916 11:09:47.280380 1488539 main.go:141] libmachine: Using SSH client type: native
	I0916 11:09:47.280631 1488539 main.go:141] libmachine: &{{{<nil> 0 [] [] []} docker [0x41abe0] 0x41d420 <nil>  [] 0s} 127.0.0.1 34738 <nil> <nil>}
	I0916 11:09:47.280651 1488539 main.go:141] libmachine: About to run SSH command:
	
			if ! grep -xq '.*\smultinode-654612' /etc/hosts; then
				if grep -xq '127.0.1.1\s.*' /etc/hosts; then
					sudo sed -i 's/^127.0.1.1\s.*/127.0.1.1 multinode-654612/g' /etc/hosts;
				else 
					echo '127.0.1.1 multinode-654612' | sudo tee -a /etc/hosts; 
				fi
			fi
	I0916 11:09:47.417316 1488539 main.go:141] libmachine: SSH cmd err, output: <nil>: 
	I0916 11:09:47.417345 1488539 ubuntu.go:175] set auth options {CertDir:/home/jenkins/minikube-integration/19651-1378450/.minikube CaCertPath:/home/jenkins/minikube-integration/19651-1378450/.minikube/certs/ca.pem CaPrivateKeyPath:/home/jenkins/minikube-integration/19651-1378450/.minikube/certs/ca-key.pem CaCertRemotePath:/etc/docker/ca.pem ServerCertPath:/home/jenkins/minikube-integration/19651-1378450/.minikube/machines/server.pem ServerKeyPath:/home/jenkins/minikube-integration/19651-1378450/.minikube/machines/server-key.pem ClientKeyPath:/home/jenkins/minikube-integration/19651-1378450/.minikube/certs/key.pem ServerCertRemotePath:/etc/docker/server.pem ServerKeyRemotePath:/etc/docker/server-key.pem ClientCertPath:/home/jenkins/minikube-integration/19651-1378450/.minikube/certs/cert.pem ServerCertSANs:[] StorePath:/home/jenkins/minikube-integration/19651-1378450/.minikube}
	I0916 11:09:47.417414 1488539 ubuntu.go:177] setting up certificates
	I0916 11:09:47.417426 1488539 provision.go:84] configureAuth start
	I0916 11:09:47.417508 1488539 cli_runner.go:164] Run: docker container inspect -f "{{range .NetworkSettings.Networks}}{{.IPAddress}},{{.GlobalIPv6Address}}{{end}}" multinode-654612
	I0916 11:09:47.434212 1488539 provision.go:143] copyHostCerts
	I0916 11:09:47.434260 1488539 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-1378450/.minikube/certs/cert.pem -> /home/jenkins/minikube-integration/19651-1378450/.minikube/cert.pem
	I0916 11:09:47.434299 1488539 exec_runner.go:144] found /home/jenkins/minikube-integration/19651-1378450/.minikube/cert.pem, removing ...
	I0916 11:09:47.434309 1488539 exec_runner.go:203] rm: /home/jenkins/minikube-integration/19651-1378450/.minikube/cert.pem
	I0916 11:09:47.434394 1488539 exec_runner.go:151] cp: /home/jenkins/minikube-integration/19651-1378450/.minikube/certs/cert.pem --> /home/jenkins/minikube-integration/19651-1378450/.minikube/cert.pem (1123 bytes)
	I0916 11:09:47.434483 1488539 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-1378450/.minikube/certs/key.pem -> /home/jenkins/minikube-integration/19651-1378450/.minikube/key.pem
	I0916 11:09:47.434506 1488539 exec_runner.go:144] found /home/jenkins/minikube-integration/19651-1378450/.minikube/key.pem, removing ...
	I0916 11:09:47.434514 1488539 exec_runner.go:203] rm: /home/jenkins/minikube-integration/19651-1378450/.minikube/key.pem
	I0916 11:09:47.434541 1488539 exec_runner.go:151] cp: /home/jenkins/minikube-integration/19651-1378450/.minikube/certs/key.pem --> /home/jenkins/minikube-integration/19651-1378450/.minikube/key.pem (1679 bytes)
	I0916 11:09:47.434583 1488539 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-1378450/.minikube/certs/ca.pem -> /home/jenkins/minikube-integration/19651-1378450/.minikube/ca.pem
	I0916 11:09:47.434610 1488539 exec_runner.go:144] found /home/jenkins/minikube-integration/19651-1378450/.minikube/ca.pem, removing ...
	I0916 11:09:47.434618 1488539 exec_runner.go:203] rm: /home/jenkins/minikube-integration/19651-1378450/.minikube/ca.pem
	I0916 11:09:47.434642 1488539 exec_runner.go:151] cp: /home/jenkins/minikube-integration/19651-1378450/.minikube/certs/ca.pem --> /home/jenkins/minikube-integration/19651-1378450/.minikube/ca.pem (1078 bytes)
	I0916 11:09:47.434694 1488539 provision.go:117] generating server cert: /home/jenkins/minikube-integration/19651-1378450/.minikube/machines/server.pem ca-key=/home/jenkins/minikube-integration/19651-1378450/.minikube/certs/ca.pem private-key=/home/jenkins/minikube-integration/19651-1378450/.minikube/certs/ca-key.pem org=jenkins.multinode-654612 san=[127.0.0.1 192.168.67.2 localhost minikube multinode-654612]
	I0916 11:09:47.759510 1488539 provision.go:177] copyRemoteCerts
	I0916 11:09:47.759580 1488539 ssh_runner.go:195] Run: sudo mkdir -p /etc/docker /etc/docker /etc/docker
	I0916 11:09:47.759640 1488539 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" multinode-654612
	I0916 11:09:47.776880 1488539 sshutil.go:53] new ssh client: &{IP:127.0.0.1 Port:34738 SSHKeyPath:/home/jenkins/minikube-integration/19651-1378450/.minikube/machines/multinode-654612/id_rsa Username:docker}
	I0916 11:09:47.873818 1488539 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-1378450/.minikube/machines/server.pem -> /etc/docker/server.pem
	I0916 11:09:47.873883 1488539 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-1378450/.minikube/machines/server.pem --> /etc/docker/server.pem (1216 bytes)
	I0916 11:09:47.899736 1488539 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-1378450/.minikube/machines/server-key.pem -> /etc/docker/server-key.pem
	I0916 11:09:47.899808 1488539 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-1378450/.minikube/machines/server-key.pem --> /etc/docker/server-key.pem (1679 bytes)
	I0916 11:09:47.924490 1488539 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-1378450/.minikube/certs/ca.pem -> /etc/docker/ca.pem
	I0916 11:09:47.924556 1488539 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-1378450/.minikube/certs/ca.pem --> /etc/docker/ca.pem (1078 bytes)
	I0916 11:09:47.948605 1488539 provision.go:87] duration metric: took 531.159961ms to configureAuth
	I0916 11:09:47.948637 1488539 ubuntu.go:193] setting minikube options for container-runtime
	I0916 11:09:47.948948 1488539 config.go:182] Loaded profile config "multinode-654612": Driver=docker, ContainerRuntime=crio, KubernetesVersion=v1.31.1
	I0916 11:09:47.949067 1488539 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" multinode-654612
	I0916 11:09:47.966342 1488539 main.go:141] libmachine: Using SSH client type: native
	I0916 11:09:47.966644 1488539 main.go:141] libmachine: &{{{<nil> 0 [] [] []} docker [0x41abe0] 0x41d420 <nil>  [] 0s} 127.0.0.1 34738 <nil> <nil>}
	I0916 11:09:47.966673 1488539 main.go:141] libmachine: About to run SSH command:
	sudo mkdir -p /etc/sysconfig && printf %s "
	CRIO_MINIKUBE_OPTIONS='--insecure-registry 10.96.0.0/12 '
	" | sudo tee /etc/sysconfig/crio.minikube && sudo systemctl restart crio
	I0916 11:09:48.213760 1488539 main.go:141] libmachine: SSH cmd err, output: <nil>: 
	CRIO_MINIKUBE_OPTIONS='--insecure-registry 10.96.0.0/12 '
	
	I0916 11:09:48.213788 1488539 machine.go:96] duration metric: took 1.273865466s to provisionDockerMachine
	I0916 11:09:48.213798 1488539 client.go:171] duration metric: took 7.99825059s to LocalClient.Create
	I0916 11:09:48.213832 1488539 start.go:167] duration metric: took 7.998335929s to libmachine.API.Create "multinode-654612"
	I0916 11:09:48.213847 1488539 start.go:293] postStartSetup for "multinode-654612" (driver="docker")
	I0916 11:09:48.213865 1488539 start.go:322] creating required directories: [/etc/kubernetes/addons /etc/kubernetes/manifests /var/tmp/minikube /var/lib/minikube /var/lib/minikube/certs /var/lib/minikube/images /var/lib/minikube/binaries /tmp/gvisor /usr/share/ca-certificates /etc/ssl/certs]
	I0916 11:09:48.213948 1488539 ssh_runner.go:195] Run: sudo mkdir -p /etc/kubernetes/addons /etc/kubernetes/manifests /var/tmp/minikube /var/lib/minikube /var/lib/minikube/certs /var/lib/minikube/images /var/lib/minikube/binaries /tmp/gvisor /usr/share/ca-certificates /etc/ssl/certs
	I0916 11:09:48.214017 1488539 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" multinode-654612
	I0916 11:09:48.231965 1488539 sshutil.go:53] new ssh client: &{IP:127.0.0.1 Port:34738 SSHKeyPath:/home/jenkins/minikube-integration/19651-1378450/.minikube/machines/multinode-654612/id_rsa Username:docker}
	I0916 11:09:48.329975 1488539 ssh_runner.go:195] Run: cat /etc/os-release
	I0916 11:09:48.333135 1488539 command_runner.go:130] > PRETTY_NAME="Ubuntu 22.04.4 LTS"
	I0916 11:09:48.333154 1488539 command_runner.go:130] > NAME="Ubuntu"
	I0916 11:09:48.333164 1488539 command_runner.go:130] > VERSION_ID="22.04"
	I0916 11:09:48.333170 1488539 command_runner.go:130] > VERSION="22.04.4 LTS (Jammy Jellyfish)"
	I0916 11:09:48.333175 1488539 command_runner.go:130] > VERSION_CODENAME=jammy
	I0916 11:09:48.333178 1488539 command_runner.go:130] > ID=ubuntu
	I0916 11:09:48.333181 1488539 command_runner.go:130] > ID_LIKE=debian
	I0916 11:09:48.333188 1488539 command_runner.go:130] > HOME_URL="https://www.ubuntu.com/"
	I0916 11:09:48.333192 1488539 command_runner.go:130] > SUPPORT_URL="https://help.ubuntu.com/"
	I0916 11:09:48.333199 1488539 command_runner.go:130] > BUG_REPORT_URL="https://bugs.launchpad.net/ubuntu/"
	I0916 11:09:48.333206 1488539 command_runner.go:130] > PRIVACY_POLICY_URL="https://www.ubuntu.com/legal/terms-and-policies/privacy-policy"
	I0916 11:09:48.333211 1488539 command_runner.go:130] > UBUNTU_CODENAME=jammy
	I0916 11:09:48.333251 1488539 main.go:141] libmachine: Couldn't set key VERSION_CODENAME, no corresponding struct field found
	I0916 11:09:48.333274 1488539 main.go:141] libmachine: Couldn't set key PRIVACY_POLICY_URL, no corresponding struct field found
	I0916 11:09:48.333284 1488539 main.go:141] libmachine: Couldn't set key UBUNTU_CODENAME, no corresponding struct field found
	I0916 11:09:48.333292 1488539 info.go:137] Remote host: Ubuntu 22.04.4 LTS
	I0916 11:09:48.333303 1488539 filesync.go:126] Scanning /home/jenkins/minikube-integration/19651-1378450/.minikube/addons for local assets ...
	I0916 11:09:48.333364 1488539 filesync.go:126] Scanning /home/jenkins/minikube-integration/19651-1378450/.minikube/files for local assets ...
	I0916 11:09:48.333453 1488539 filesync.go:149] local asset: /home/jenkins/minikube-integration/19651-1378450/.minikube/files/etc/ssl/certs/13838332.pem -> 13838332.pem in /etc/ssl/certs
	I0916 11:09:48.333460 1488539 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-1378450/.minikube/files/etc/ssl/certs/13838332.pem -> /etc/ssl/certs/13838332.pem
	I0916 11:09:48.333570 1488539 ssh_runner.go:195] Run: sudo mkdir -p /etc/ssl/certs
	I0916 11:09:48.342289 1488539 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-1378450/.minikube/files/etc/ssl/certs/13838332.pem --> /etc/ssl/certs/13838332.pem (1708 bytes)
	I0916 11:09:48.366208 1488539 start.go:296] duration metric: took 152.341296ms for postStartSetup
	I0916 11:09:48.366571 1488539 cli_runner.go:164] Run: docker container inspect -f "{{range .NetworkSettings.Networks}}{{.IPAddress}},{{.GlobalIPv6Address}}{{end}}" multinode-654612
	I0916 11:09:48.382958 1488539 profile.go:143] Saving config to /home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/multinode-654612/config.json ...
	I0916 11:09:48.383247 1488539 ssh_runner.go:195] Run: sh -c "df -h /var | awk 'NR==2{print $5}'"
	I0916 11:09:48.383290 1488539 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" multinode-654612
	I0916 11:09:48.399589 1488539 sshutil.go:53] new ssh client: &{IP:127.0.0.1 Port:34738 SSHKeyPath:/home/jenkins/minikube-integration/19651-1378450/.minikube/machines/multinode-654612/id_rsa Username:docker}
	I0916 11:09:48.497545 1488539 command_runner.go:130] > 12%
	I0916 11:09:48.498084 1488539 ssh_runner.go:195] Run: sh -c "df -BG /var | awk 'NR==2{print $4}'"
	I0916 11:09:48.503022 1488539 command_runner.go:130] > 172G
	I0916 11:09:48.503051 1488539 start.go:128] duration metric: took 8.291011625s to createHost
	I0916 11:09:48.503062 1488539 start.go:83] releasing machines lock for "multinode-654612", held for 8.291131384s
	I0916 11:09:48.503141 1488539 cli_runner.go:164] Run: docker container inspect -f "{{range .NetworkSettings.Networks}}{{.IPAddress}},{{.GlobalIPv6Address}}{{end}}" multinode-654612
	I0916 11:09:48.519372 1488539 ssh_runner.go:195] Run: cat /version.json
	I0916 11:09:48.519433 1488539 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" multinode-654612
	I0916 11:09:48.519689 1488539 ssh_runner.go:195] Run: curl -sS -m 2 https://registry.k8s.io/
	I0916 11:09:48.519759 1488539 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" multinode-654612
	I0916 11:09:48.539532 1488539 sshutil.go:53] new ssh client: &{IP:127.0.0.1 Port:34738 SSHKeyPath:/home/jenkins/minikube-integration/19651-1378450/.minikube/machines/multinode-654612/id_rsa Username:docker}
	I0916 11:09:48.556842 1488539 sshutil.go:53] new ssh client: &{IP:127.0.0.1 Port:34738 SSHKeyPath:/home/jenkins/minikube-integration/19651-1378450/.minikube/machines/multinode-654612/id_rsa Username:docker}
	I0916 11:09:48.757240 1488539 command_runner.go:130] > <a href="https://github.com/kubernetes/registry.k8s.io">Temporary Redirect</a>.
	I0916 11:09:48.757311 1488539 command_runner.go:130] > {"iso_version": "v1.34.0-1726281733-19643", "kicbase_version": "v0.0.45-1726358845-19644", "minikube_version": "v1.34.0", "commit": "f890713149c79cf50e25c13e6a5c0470aa0f0450"}
	I0916 11:09:48.757476 1488539 ssh_runner.go:195] Run: systemctl --version
	I0916 11:09:48.761376 1488539 command_runner.go:130] > systemd 249 (249.11-0ubuntu3.12)
	I0916 11:09:48.761419 1488539 command_runner.go:130] > +PAM +AUDIT +SELINUX +APPARMOR +IMA +SMACK +SECCOMP +GCRYPT +GNUTLS +OPENSSL +ACL +BLKID +CURL +ELFUTILS +FIDO2 +IDN2 -IDN +IPTC +KMOD +LIBCRYPTSETUP +LIBFDISK +PCRE2 -PWQUALITY -P11KIT -QRENCODE +BZIP2 +LZ4 +XZ +ZLIB +ZSTD -XKBCOMMON +UTMP +SYSVINIT default-hierarchy=unified
	I0916 11:09:48.761712 1488539 ssh_runner.go:195] Run: sudo sh -c "podman version >/dev/null"
	I0916 11:09:48.903904 1488539 ssh_runner.go:195] Run: sh -c "stat /etc/cni/net.d/*loopback.conf*"
	I0916 11:09:48.908107 1488539 command_runner.go:130] >   File: /etc/cni/net.d/200-loopback.conf
	I0916 11:09:48.908135 1488539 command_runner.go:130] >   Size: 54        	Blocks: 8          IO Block: 4096   regular file
	I0916 11:09:48.908142 1488539 command_runner.go:130] > Device: 36h/54d	Inode: 1570512     Links: 1
	I0916 11:09:48.908149 1488539 command_runner.go:130] > Access: (0644/-rw-r--r--)  Uid: (    0/    root)   Gid: (    0/    root)
	I0916 11:09:48.908155 1488539 command_runner.go:130] > Access: 2023-06-14 14:44:50.000000000 +0000
	I0916 11:09:48.908174 1488539 command_runner.go:130] > Modify: 2023-06-14 14:44:50.000000000 +0000
	I0916 11:09:48.908183 1488539 command_runner.go:130] > Change: 2024-09-16 10:35:03.060526663 +0000
	I0916 11:09:48.908188 1488539 command_runner.go:130] >  Birth: 2024-09-16 10:35:03.060526663 +0000
	I0916 11:09:48.908668 1488539 ssh_runner.go:195] Run: sudo find /etc/cni/net.d -maxdepth 1 -type f -name *loopback.conf* -not -name *.mk_disabled -exec sh -c "sudo mv {} {}.mk_disabled" ;
	I0916 11:09:48.929931 1488539 cni.go:221] loopback cni configuration disabled: "/etc/cni/net.d/*loopback.conf*" found
	I0916 11:09:48.930058 1488539 ssh_runner.go:195] Run: sudo find /etc/cni/net.d -maxdepth 1 -type f ( ( -name *bridge* -or -name *podman* ) -and -not -name *.mk_disabled ) -printf "%p, " -exec sh -c "sudo mv {} {}.mk_disabled" ;
	I0916 11:09:48.963124 1488539 command_runner.go:139] > /etc/cni/net.d/87-podman-bridge.conflist, /etc/cni/net.d/100-crio-bridge.conf, 
	I0916 11:09:48.963191 1488539 cni.go:262] disabled [/etc/cni/net.d/87-podman-bridge.conflist, /etc/cni/net.d/100-crio-bridge.conf] bridge cni config(s)
	I0916 11:09:48.963203 1488539 start.go:495] detecting cgroup driver to use...
	I0916 11:09:48.963237 1488539 detect.go:187] detected "cgroupfs" cgroup driver on host os
	I0916 11:09:48.963304 1488539 ssh_runner.go:195] Run: sudo systemctl stop -f containerd
	I0916 11:09:48.979439 1488539 ssh_runner.go:195] Run: sudo systemctl is-active --quiet service containerd
	I0916 11:09:48.991317 1488539 docker.go:217] disabling cri-docker service (if available) ...
	I0916 11:09:48.991426 1488539 ssh_runner.go:195] Run: sudo systemctl stop -f cri-docker.socket
	I0916 11:09:49.007780 1488539 ssh_runner.go:195] Run: sudo systemctl stop -f cri-docker.service
	I0916 11:09:49.025143 1488539 ssh_runner.go:195] Run: sudo systemctl disable cri-docker.socket
	I0916 11:09:49.115879 1488539 ssh_runner.go:195] Run: sudo systemctl mask cri-docker.service
	I0916 11:09:49.212140 1488539 command_runner.go:130] ! Created symlink /etc/systemd/system/cri-docker.service → /dev/null.
	I0916 11:09:49.212222 1488539 docker.go:233] disabling docker service ...
	I0916 11:09:49.212302 1488539 ssh_runner.go:195] Run: sudo systemctl stop -f docker.socket
	I0916 11:09:49.234143 1488539 ssh_runner.go:195] Run: sudo systemctl stop -f docker.service
	I0916 11:09:49.246639 1488539 ssh_runner.go:195] Run: sudo systemctl disable docker.socket
	I0916 11:09:49.258136 1488539 command_runner.go:130] ! Removed /etc/systemd/system/sockets.target.wants/docker.socket.
	I0916 11:09:49.341778 1488539 ssh_runner.go:195] Run: sudo systemctl mask docker.service
	I0916 11:09:49.353681 1488539 command_runner.go:130] ! Created symlink /etc/systemd/system/docker.service → /dev/null.
	I0916 11:09:49.439730 1488539 ssh_runner.go:195] Run: sudo systemctl is-active --quiet service docker
	I0916 11:09:49.451566 1488539 ssh_runner.go:195] Run: /bin/bash -c "sudo mkdir -p /etc && printf %s "runtime-endpoint: unix:///var/run/crio/crio.sock
	" | sudo tee /etc/crictl.yaml"
	I0916 11:09:49.467301 1488539 command_runner.go:130] > runtime-endpoint: unix:///var/run/crio/crio.sock
	I0916 11:09:49.468467 1488539 crio.go:59] configure cri-o to use "registry.k8s.io/pause:3.10" pause image...
	I0916 11:09:49.468529 1488539 ssh_runner.go:195] Run: sh -c "sudo sed -i 's|^.*pause_image = .*$|pause_image = "registry.k8s.io/pause:3.10"|' /etc/crio/crio.conf.d/02-crio.conf"
	I0916 11:09:49.479433 1488539 crio.go:70] configuring cri-o to use "cgroupfs" as cgroup driver...
	I0916 11:09:49.479520 1488539 ssh_runner.go:195] Run: sh -c "sudo sed -i 's|^.*cgroup_manager = .*$|cgroup_manager = "cgroupfs"|' /etc/crio/crio.conf.d/02-crio.conf"
	I0916 11:09:49.490641 1488539 ssh_runner.go:195] Run: sh -c "sudo sed -i '/conmon_cgroup = .*/d' /etc/crio/crio.conf.d/02-crio.conf"
	I0916 11:09:49.501363 1488539 ssh_runner.go:195] Run: sh -c "sudo sed -i '/cgroup_manager = .*/a conmon_cgroup = "pod"' /etc/crio/crio.conf.d/02-crio.conf"
	I0916 11:09:49.512267 1488539 ssh_runner.go:195] Run: sh -c "sudo rm -rf /etc/cni/net.mk"
	I0916 11:09:49.522639 1488539 ssh_runner.go:195] Run: sh -c "sudo sed -i '/^ *"net.ipv4.ip_unprivileged_port_start=.*"/d' /etc/crio/crio.conf.d/02-crio.conf"
	I0916 11:09:49.533656 1488539 ssh_runner.go:195] Run: sh -c "sudo grep -q "^ *default_sysctls" /etc/crio/crio.conf.d/02-crio.conf || sudo sed -i '/conmon_cgroup = .*/a default_sysctls = \[\n\]' /etc/crio/crio.conf.d/02-crio.conf"
	I0916 11:09:49.550687 1488539 ssh_runner.go:195] Run: sh -c "sudo sed -i -r 's|^default_sysctls *= *\[|&\n  "net.ipv4.ip_unprivileged_port_start=0",|' /etc/crio/crio.conf.d/02-crio.conf"
	I0916 11:09:49.560865 1488539 ssh_runner.go:195] Run: sudo sysctl net.bridge.bridge-nf-call-iptables
	I0916 11:09:49.569553 1488539 command_runner.go:130] > net.bridge.bridge-nf-call-iptables = 1
	I0916 11:09:49.570749 1488539 ssh_runner.go:195] Run: sudo sh -c "echo 1 > /proc/sys/net/ipv4/ip_forward"
	I0916 11:09:49.579548 1488539 ssh_runner.go:195] Run: sudo systemctl daemon-reload
	I0916 11:09:49.668236 1488539 ssh_runner.go:195] Run: sudo systemctl restart crio
	I0916 11:09:49.766038 1488539 start.go:542] Will wait 60s for socket path /var/run/crio/crio.sock
	I0916 11:09:49.766111 1488539 ssh_runner.go:195] Run: stat /var/run/crio/crio.sock
	I0916 11:09:49.769662 1488539 command_runner.go:130] >   File: /var/run/crio/crio.sock
	I0916 11:09:49.769684 1488539 command_runner.go:130] >   Size: 0         	Blocks: 0          IO Block: 4096   socket
	I0916 11:09:49.769692 1488539 command_runner.go:130] > Device: 43h/67d	Inode: 186         Links: 1
	I0916 11:09:49.769700 1488539 command_runner.go:130] > Access: (0660/srw-rw----)  Uid: (    0/    root)   Gid: (    0/    root)
	I0916 11:09:49.769705 1488539 command_runner.go:130] > Access: 2024-09-16 11:09:49.751207935 +0000
	I0916 11:09:49.769710 1488539 command_runner.go:130] > Modify: 2024-09-16 11:09:49.751207935 +0000
	I0916 11:09:49.769715 1488539 command_runner.go:130] > Change: 2024-09-16 11:09:49.751207935 +0000
	I0916 11:09:49.769718 1488539 command_runner.go:130] >  Birth: -
	I0916 11:09:49.769734 1488539 start.go:563] Will wait 60s for crictl version
	I0916 11:09:49.769793 1488539 ssh_runner.go:195] Run: which crictl
	I0916 11:09:49.772913 1488539 command_runner.go:130] > /usr/bin/crictl
	I0916 11:09:49.773248 1488539 ssh_runner.go:195] Run: sudo /usr/bin/crictl version
	I0916 11:09:49.810657 1488539 command_runner.go:130] > Version:  0.1.0
	I0916 11:09:49.810680 1488539 command_runner.go:130] > RuntimeName:  cri-o
	I0916 11:09:49.810689 1488539 command_runner.go:130] > RuntimeVersion:  1.24.6
	I0916 11:09:49.810851 1488539 command_runner.go:130] > RuntimeApiVersion:  v1
	I0916 11:09:49.813451 1488539 start.go:579] Version:  0.1.0
	RuntimeName:  cri-o
	RuntimeVersion:  1.24.6
	RuntimeApiVersion:  v1
	I0916 11:09:49.813535 1488539 ssh_runner.go:195] Run: crio --version
	I0916 11:09:49.850914 1488539 command_runner.go:130] > crio version 1.24.6
	I0916 11:09:49.850939 1488539 command_runner.go:130] > Version:          1.24.6
	I0916 11:09:49.850948 1488539 command_runner.go:130] > GitCommit:        4bfe15a9feb74ffc95e66a21c04b15fa7bbc2b90
	I0916 11:09:49.850952 1488539 command_runner.go:130] > GitTreeState:     clean
	I0916 11:09:49.850959 1488539 command_runner.go:130] > BuildDate:        2023-06-14T14:44:50Z
	I0916 11:09:49.850963 1488539 command_runner.go:130] > GoVersion:        go1.18.2
	I0916 11:09:49.850967 1488539 command_runner.go:130] > Compiler:         gc
	I0916 11:09:49.850971 1488539 command_runner.go:130] > Platform:         linux/arm64
	I0916 11:09:49.850976 1488539 command_runner.go:130] > Linkmode:         dynamic
	I0916 11:09:49.850984 1488539 command_runner.go:130] > BuildTags:        apparmor, exclude_graphdriver_devicemapper, containers_image_ostree_stub, seccomp
	I0916 11:09:49.850991 1488539 command_runner.go:130] > SeccompEnabled:   true
	I0916 11:09:49.850995 1488539 command_runner.go:130] > AppArmorEnabled:  false
	I0916 11:09:49.853424 1488539 ssh_runner.go:195] Run: crio --version
	I0916 11:09:49.893332 1488539 command_runner.go:130] > crio version 1.24.6
	I0916 11:09:49.893356 1488539 command_runner.go:130] > Version:          1.24.6
	I0916 11:09:49.893364 1488539 command_runner.go:130] > GitCommit:        4bfe15a9feb74ffc95e66a21c04b15fa7bbc2b90
	I0916 11:09:49.893369 1488539 command_runner.go:130] > GitTreeState:     clean
	I0916 11:09:49.893375 1488539 command_runner.go:130] > BuildDate:        2023-06-14T14:44:50Z
	I0916 11:09:49.893380 1488539 command_runner.go:130] > GoVersion:        go1.18.2
	I0916 11:09:49.893384 1488539 command_runner.go:130] > Compiler:         gc
	I0916 11:09:49.893388 1488539 command_runner.go:130] > Platform:         linux/arm64
	I0916 11:09:49.893399 1488539 command_runner.go:130] > Linkmode:         dynamic
	I0916 11:09:49.893412 1488539 command_runner.go:130] > BuildTags:        apparmor, exclude_graphdriver_devicemapper, containers_image_ostree_stub, seccomp
	I0916 11:09:49.893420 1488539 command_runner.go:130] > SeccompEnabled:   true
	I0916 11:09:49.893425 1488539 command_runner.go:130] > AppArmorEnabled:  false
	I0916 11:09:49.898428 1488539 out.go:177] * Preparing Kubernetes v1.31.1 on CRI-O 1.24.6 ...
	I0916 11:09:49.901050 1488539 cli_runner.go:164] Run: docker network inspect multinode-654612 --format "{"Name": "{{.Name}}","Driver": "{{.Driver}}","Subnet": "{{range .IPAM.Config}}{{.Subnet}}{{end}}","Gateway": "{{range .IPAM.Config}}{{.Gateway}}{{end}}","MTU": {{if (index .Options "com.docker.network.driver.mtu")}}{{(index .Options "com.docker.network.driver.mtu")}}{{else}}0{{end}}, "ContainerIPs": [{{range $k,$v := .Containers }}"{{$v.IPv4Address}}",{{end}}]}"
	I0916 11:09:49.915805 1488539 ssh_runner.go:195] Run: grep 192.168.67.1	host.minikube.internal$ /etc/hosts
	I0916 11:09:49.919461 1488539 ssh_runner.go:195] Run: /bin/bash -c "{ grep -v $'\thost.minikube.internal$' "/etc/hosts"; echo "192.168.67.1	host.minikube.internal"; } > /tmp/h.$$; sudo cp /tmp/h.$$ "/etc/hosts""
	I0916 11:09:49.930712 1488539 kubeadm.go:883] updating cluster {Name:multinode-654612 KeepContext:false EmbedCerts:false MinikubeISO: KicBaseImage:gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 Memory:2200 CPUs:2 DiskSize:20000 Driver:docker HyperkitVpnKitSock: HyperkitVSockPorts:[] DockerEnv:[] ContainerVolumeMounts:[] InsecureRegistry:[] RegistryMirror:[] HostOnlyCIDR:192.168.59.1/24 HypervVirtualSwitch: HypervUseExternalSwitch:false HypervExternalAdapter: KVMNetwork:default KVMQemuURI:qemu:///system KVMGPU:false KVMHidden:false KVMNUMACount:1 APIServerPort:8443 DockerOpt:[] DisableDriverMounts:false NFSShare:[] NFSSharesRoot:/nfsshares UUID: NoVTXCheck:false DNSProxy:false HostDNSResolver:true HostOnlyNicType:virtio NatNicType:virtio SSHIPAddress: SSHUser:root SSHKey: SSHPort:22 KubernetesConfig:{KubernetesVersion:v1.31.1 ClusterName:multinode-654612 Namespace:default APIServerHAVIP: APIServerName:minikubeCA APISe
rverNames:[] APIServerIPs:[] DNSDomain:cluster.local ContainerRuntime:crio CRISocket: NetworkPlugin:cni FeatureGates: ServiceCIDR:10.96.0.0/12 ImageRepository: LoadBalancerStartIP: LoadBalancerEndIP: CustomIngressCert: RegistryAliases: ExtraOptions:[] ShouldLoadCachedImages:true EnableDefaultCNI:false CNI:} Nodes:[{Name: IP:192.168.67.2 Port:8443 KubernetesVersion:v1.31.1 ContainerRuntime:crio ControlPlane:true Worker:true}] Addons:map[] CustomAddonImages:map[] CustomAddonRegistries:map[] VerifyComponents:map[apiserver:true apps_running:true default_sa:true extra:true kubelet:true node_ready:true system_pods:true] StartHostTimeout:6m0s ScheduledStop:<nil> ExposedPorts:[] ListenAddress: Network: Subnet: MultiNodeRequested:true ExtraDisks:0 CertExpiration:26280h0m0s Mount:false MountString:/home/jenkins:/minikube-host Mount9PVersion:9p2000.L MountGID:docker MountIP: MountMSize:262144 MountOptions:[] MountPort:0 MountType:9p MountUID:docker BinaryMirror: DisableOptimizations:false DisableMetrics:false CustomQemu
FirmwarePath: SocketVMnetClientPath: SocketVMnetPath: StaticIP: SSHAuthSock: SSHAgentPID:0 GPUs: AutoPauseInterval:1m0s} ...
	I0916 11:09:49.930838 1488539 preload.go:131] Checking if preload exists for k8s version v1.31.1 and runtime crio
	I0916 11:09:49.930906 1488539 ssh_runner.go:195] Run: sudo crictl images --output json
	I0916 11:09:50.014687 1488539 command_runner.go:130] > {
	I0916 11:09:50.014715 1488539 command_runner.go:130] >   "images": [
	I0916 11:09:50.014720 1488539 command_runner.go:130] >     {
	I0916 11:09:50.014730 1488539 command_runner.go:130] >       "id": "6a23fa8fd2b78ab58e42ba273808edc936a9c53d8ac4a919f6337be094843a51",
	I0916 11:09:50.014736 1488539 command_runner.go:130] >       "repoTags": [
	I0916 11:09:50.014742 1488539 command_runner.go:130] >         "docker.io/kindest/kindnetd:v20240813-c6f155d6"
	I0916 11:09:50.014753 1488539 command_runner.go:130] >       ],
	I0916 11:09:50.014758 1488539 command_runner.go:130] >       "repoDigests": [
	I0916 11:09:50.014767 1488539 command_runner.go:130] >         "docker.io/kindest/kindnetd@sha256:4d39335073da9a0b82be8e01028f0aa75aff16caff2e2d8889d0effd579a6f64",
	I0916 11:09:50.014775 1488539 command_runner.go:130] >         "docker.io/kindest/kindnetd@sha256:e59a687ca28ae274a2fc92f1e2f5f1c739f353178a43a23aafc71adb802ed166"
	I0916 11:09:50.014779 1488539 command_runner.go:130] >       ],
	I0916 11:09:50.014784 1488539 command_runner.go:130] >       "size": "90295858",
	I0916 11:09:50.014787 1488539 command_runner.go:130] >       "uid": null,
	I0916 11:09:50.014791 1488539 command_runner.go:130] >       "username": "",
	I0916 11:09:50.014796 1488539 command_runner.go:130] >       "spec": null,
	I0916 11:09:50.014804 1488539 command_runner.go:130] >       "pinned": false
	I0916 11:09:50.014807 1488539 command_runner.go:130] >     },
	I0916 11:09:50.014811 1488539 command_runner.go:130] >     {
	I0916 11:09:50.014817 1488539 command_runner.go:130] >       "id": "ba04bb24b95753201135cbc420b233c1b0b9fa2e1fd21d28319c348c33fbcde6",
	I0916 11:09:50.014823 1488539 command_runner.go:130] >       "repoTags": [
	I0916 11:09:50.014829 1488539 command_runner.go:130] >         "gcr.io/k8s-minikube/storage-provisioner:v5"
	I0916 11:09:50.014832 1488539 command_runner.go:130] >       ],
	I0916 11:09:50.014837 1488539 command_runner.go:130] >       "repoDigests": [
	I0916 11:09:50.014846 1488539 command_runner.go:130] >         "gcr.io/k8s-minikube/storage-provisioner@sha256:0ba370588274b88531ab311a5d2e645d240a853555c1e58fd1dd428fc333c9d2",
	I0916 11:09:50.014859 1488539 command_runner.go:130] >         "gcr.io/k8s-minikube/storage-provisioner@sha256:18eb69d1418e854ad5a19e399310e52808a8321e4c441c1dddad8977a0d7a944"
	I0916 11:09:50.014862 1488539 command_runner.go:130] >       ],
	I0916 11:09:50.014868 1488539 command_runner.go:130] >       "size": "29037500",
	I0916 11:09:50.014875 1488539 command_runner.go:130] >       "uid": null,
	I0916 11:09:50.014879 1488539 command_runner.go:130] >       "username": "",
	I0916 11:09:50.014883 1488539 command_runner.go:130] >       "spec": null,
	I0916 11:09:50.014887 1488539 command_runner.go:130] >       "pinned": false
	I0916 11:09:50.014890 1488539 command_runner.go:130] >     },
	I0916 11:09:50.014893 1488539 command_runner.go:130] >     {
	I0916 11:09:50.014903 1488539 command_runner.go:130] >       "id": "2f6c962e7b8311337352d9fdea917da2184d9919f4da7695bc2a6517cf392fe4",
	I0916 11:09:50.014909 1488539 command_runner.go:130] >       "repoTags": [
	I0916 11:09:50.014914 1488539 command_runner.go:130] >         "registry.k8s.io/coredns/coredns:v1.11.3"
	I0916 11:09:50.014917 1488539 command_runner.go:130] >       ],
	I0916 11:09:50.014921 1488539 command_runner.go:130] >       "repoDigests": [
	I0916 11:09:50.014932 1488539 command_runner.go:130] >         "registry.k8s.io/coredns/coredns@sha256:31440a2bef59e2f1ffb600113b557103740ff851e27b0aef5b849f6e3ab994a6",
	I0916 11:09:50.014945 1488539 command_runner.go:130] >         "registry.k8s.io/coredns/coredns@sha256:9caabbf6238b189a65d0d6e6ac138de60d6a1c419e5a341fbbb7c78382559c6e"
	I0916 11:09:50.014949 1488539 command_runner.go:130] >       ],
	I0916 11:09:50.014959 1488539 command_runner.go:130] >       "size": "61647114",
	I0916 11:09:50.014963 1488539 command_runner.go:130] >       "uid": null,
	I0916 11:09:50.014967 1488539 command_runner.go:130] >       "username": "nonroot",
	I0916 11:09:50.014971 1488539 command_runner.go:130] >       "spec": null,
	I0916 11:09:50.014975 1488539 command_runner.go:130] >       "pinned": false
	I0916 11:09:50.014978 1488539 command_runner.go:130] >     },
	I0916 11:09:50.014981 1488539 command_runner.go:130] >     {
	I0916 11:09:50.014987 1488539 command_runner.go:130] >       "id": "27e3830e1402783674d8b594038967deea9d51f0d91b34c93c8f39d2f68af7da",
	I0916 11:09:50.014995 1488539 command_runner.go:130] >       "repoTags": [
	I0916 11:09:50.015003 1488539 command_runner.go:130] >         "registry.k8s.io/etcd:3.5.15-0"
	I0916 11:09:50.015008 1488539 command_runner.go:130] >       ],
	I0916 11:09:50.015013 1488539 command_runner.go:130] >       "repoDigests": [
	I0916 11:09:50.015024 1488539 command_runner.go:130] >         "registry.k8s.io/etcd@sha256:a6dc63e6e8cfa0307d7851762fa6b629afb18f28d8aa3fab5a6e91b4af60026a",
	I0916 11:09:50.015037 1488539 command_runner.go:130] >         "registry.k8s.io/etcd@sha256:e3ee3ca2dbaf511385000dbd54123629c71b6cfaabd469e658d76a116b7f43da"
	I0916 11:09:50.015044 1488539 command_runner.go:130] >       ],
	I0916 11:09:50.015049 1488539 command_runner.go:130] >       "size": "139912446",
	I0916 11:09:50.015052 1488539 command_runner.go:130] >       "uid": {
	I0916 11:09:50.015056 1488539 command_runner.go:130] >         "value": "0"
	I0916 11:09:50.015065 1488539 command_runner.go:130] >       },
	I0916 11:09:50.015069 1488539 command_runner.go:130] >       "username": "",
	I0916 11:09:50.015073 1488539 command_runner.go:130] >       "spec": null,
	I0916 11:09:50.015077 1488539 command_runner.go:130] >       "pinned": false
	I0916 11:09:50.015084 1488539 command_runner.go:130] >     },
	I0916 11:09:50.015087 1488539 command_runner.go:130] >     {
	I0916 11:09:50.015094 1488539 command_runner.go:130] >       "id": "d3f53a98c0a9d9163c4848bcf34b2d2f5e1e3691b79f3d1dd6d0206809e02853",
	I0916 11:09:50.015098 1488539 command_runner.go:130] >       "repoTags": [
	I0916 11:09:50.015107 1488539 command_runner.go:130] >         "registry.k8s.io/kube-apiserver:v1.31.1"
	I0916 11:09:50.015111 1488539 command_runner.go:130] >       ],
	I0916 11:09:50.015120 1488539 command_runner.go:130] >       "repoDigests": [
	I0916 11:09:50.015128 1488539 command_runner.go:130] >         "registry.k8s.io/kube-apiserver@sha256:2409c23dbb5a2b7a81adbb184d3eac43ac653e9b97a7c0ee121b89bb3ef61fdb",
	I0916 11:09:50.015148 1488539 command_runner.go:130] >         "registry.k8s.io/kube-apiserver@sha256:e3a40e6c6e99ba4a4d72432b3eda702099a2926e49d4afeb6138f2d95e6371ef"
	I0916 11:09:50.015155 1488539 command_runner.go:130] >       ],
	I0916 11:09:50.015159 1488539 command_runner.go:130] >       "size": "92632544",
	I0916 11:09:50.015162 1488539 command_runner.go:130] >       "uid": {
	I0916 11:09:50.015166 1488539 command_runner.go:130] >         "value": "0"
	I0916 11:09:50.015169 1488539 command_runner.go:130] >       },
	I0916 11:09:50.015173 1488539 command_runner.go:130] >       "username": "",
	I0916 11:09:50.015177 1488539 command_runner.go:130] >       "spec": null,
	I0916 11:09:50.015181 1488539 command_runner.go:130] >       "pinned": false
	I0916 11:09:50.015188 1488539 command_runner.go:130] >     },
	I0916 11:09:50.015191 1488539 command_runner.go:130] >     {
	I0916 11:09:50.015203 1488539 command_runner.go:130] >       "id": "279f381cb37365bbbcd133c9531fba9c2beb0f38dbbe6ddfcd0b1b1643d3450e",
	I0916 11:09:50.015206 1488539 command_runner.go:130] >       "repoTags": [
	I0916 11:09:50.015212 1488539 command_runner.go:130] >         "registry.k8s.io/kube-controller-manager:v1.31.1"
	I0916 11:09:50.015216 1488539 command_runner.go:130] >       ],
	I0916 11:09:50.015220 1488539 command_runner.go:130] >       "repoDigests": [
	I0916 11:09:50.015229 1488539 command_runner.go:130] >         "registry.k8s.io/kube-controller-manager@sha256:9f9da5b27e03f89599cc40ba89150aebf3b4cff001e6db6d998674b34181e1a1",
	I0916 11:09:50.015240 1488539 command_runner.go:130] >         "registry.k8s.io/kube-controller-manager@sha256:a9a0505b7d0caca0edd18e37bacc9425b2c8824546b26f5b286e8cb144669849"
	I0916 11:09:50.015244 1488539 command_runner.go:130] >       ],
	I0916 11:09:50.015248 1488539 command_runner.go:130] >       "size": "86930758",
	I0916 11:09:50.015252 1488539 command_runner.go:130] >       "uid": {
	I0916 11:09:50.015259 1488539 command_runner.go:130] >         "value": "0"
	I0916 11:09:50.015262 1488539 command_runner.go:130] >       },
	I0916 11:09:50.015266 1488539 command_runner.go:130] >       "username": "",
	I0916 11:09:50.015270 1488539 command_runner.go:130] >       "spec": null,
	I0916 11:09:50.015274 1488539 command_runner.go:130] >       "pinned": false
	I0916 11:09:50.015277 1488539 command_runner.go:130] >     },
	I0916 11:09:50.015281 1488539 command_runner.go:130] >     {
	I0916 11:09:50.015288 1488539 command_runner.go:130] >       "id": "24a140c548c075e487e45d0ee73b1aa89f8bfb40c08a57e05975559728822b1d",
	I0916 11:09:50.015296 1488539 command_runner.go:130] >       "repoTags": [
	I0916 11:09:50.015301 1488539 command_runner.go:130] >         "registry.k8s.io/kube-proxy:v1.31.1"
	I0916 11:09:50.015305 1488539 command_runner.go:130] >       ],
	I0916 11:09:50.015309 1488539 command_runner.go:130] >       "repoDigests": [
	I0916 11:09:50.015321 1488539 command_runner.go:130] >         "registry.k8s.io/kube-proxy@sha256:4ee50b00484d7f39a90fc4cda92251177ef5ad8fdf2f2a0c768f9e634b4c6d44",
	I0916 11:09:50.015330 1488539 command_runner.go:130] >         "registry.k8s.io/kube-proxy@sha256:7b3bf9f1e260ccb1fd543570e1e9869a373f716fb050cd23a6a2771aa4e06ae9"
	I0916 11:09:50.015337 1488539 command_runner.go:130] >       ],
	I0916 11:09:50.015341 1488539 command_runner.go:130] >       "size": "95951255",
	I0916 11:09:50.015345 1488539 command_runner.go:130] >       "uid": null,
	I0916 11:09:50.015349 1488539 command_runner.go:130] >       "username": "",
	I0916 11:09:50.015353 1488539 command_runner.go:130] >       "spec": null,
	I0916 11:09:50.015356 1488539 command_runner.go:130] >       "pinned": false
	I0916 11:09:50.015360 1488539 command_runner.go:130] >     },
	I0916 11:09:50.015363 1488539 command_runner.go:130] >     {
	I0916 11:09:50.015370 1488539 command_runner.go:130] >       "id": "7f8aa378bb47dffcf430f3a601abe39137e88aee0238e23ed8530fdd18dab82d",
	I0916 11:09:50.015373 1488539 command_runner.go:130] >       "repoTags": [
	I0916 11:09:50.015378 1488539 command_runner.go:130] >         "registry.k8s.io/kube-scheduler:v1.31.1"
	I0916 11:09:50.015381 1488539 command_runner.go:130] >       ],
	I0916 11:09:50.015385 1488539 command_runner.go:130] >       "repoDigests": [
	I0916 11:09:50.015410 1488539 command_runner.go:130] >         "registry.k8s.io/kube-scheduler@sha256:65212209347a96b08a97e679b98dca46885f09cf3a53e8d13b28d2c083a5b690",
	I0916 11:09:50.015422 1488539 command_runner.go:130] >         "registry.k8s.io/kube-scheduler@sha256:969a7e96340f3a927b3d652582edec2d6d82a083871d81ef5064b7edaab430d0"
	I0916 11:09:50.015425 1488539 command_runner.go:130] >       ],
	I0916 11:09:50.015429 1488539 command_runner.go:130] >       "size": "67007814",
	I0916 11:09:50.015432 1488539 command_runner.go:130] >       "uid": {
	I0916 11:09:50.015436 1488539 command_runner.go:130] >         "value": "0"
	I0916 11:09:50.015439 1488539 command_runner.go:130] >       },
	I0916 11:09:50.015443 1488539 command_runner.go:130] >       "username": "",
	I0916 11:09:50.015446 1488539 command_runner.go:130] >       "spec": null,
	I0916 11:09:50.015450 1488539 command_runner.go:130] >       "pinned": false
	I0916 11:09:50.015453 1488539 command_runner.go:130] >     },
	I0916 11:09:50.015456 1488539 command_runner.go:130] >     {
	I0916 11:09:50.015464 1488539 command_runner.go:130] >       "id": "afb61768ce381961ca0beff95337601f29dc70ff3ed14e5e4b3e5699057e6aa8",
	I0916 11:09:50.015471 1488539 command_runner.go:130] >       "repoTags": [
	I0916 11:09:50.015475 1488539 command_runner.go:130] >         "registry.k8s.io/pause:3.10"
	I0916 11:09:50.015479 1488539 command_runner.go:130] >       ],
	I0916 11:09:50.015483 1488539 command_runner.go:130] >       "repoDigests": [
	I0916 11:09:50.015491 1488539 command_runner.go:130] >         "registry.k8s.io/pause@sha256:e50b7059b633caf3c1449b8da680d11845cda4506b513ee7a2de00725f0a34a7",
	I0916 11:09:50.015502 1488539 command_runner.go:130] >         "registry.k8s.io/pause@sha256:ee6521f290b2168b6e0935a181d4cff9be1ac3f505666ef0e3c98fae8199917a"
	I0916 11:09:50.015511 1488539 command_runner.go:130] >       ],
	I0916 11:09:50.015518 1488539 command_runner.go:130] >       "size": "519877",
	I0916 11:09:50.015522 1488539 command_runner.go:130] >       "uid": {
	I0916 11:09:50.015526 1488539 command_runner.go:130] >         "value": "65535"
	I0916 11:09:50.015529 1488539 command_runner.go:130] >       },
	I0916 11:09:50.015533 1488539 command_runner.go:130] >       "username": "",
	I0916 11:09:50.015540 1488539 command_runner.go:130] >       "spec": null,
	I0916 11:09:50.015544 1488539 command_runner.go:130] >       "pinned": false
	I0916 11:09:50.015547 1488539 command_runner.go:130] >     }
	I0916 11:09:50.015550 1488539 command_runner.go:130] >   ]
	I0916 11:09:50.015553 1488539 command_runner.go:130] > }
	I0916 11:09:50.017697 1488539 crio.go:514] all images are preloaded for cri-o runtime.
	I0916 11:09:50.017728 1488539 crio.go:433] Images already preloaded, skipping extraction
	I0916 11:09:50.017803 1488539 ssh_runner.go:195] Run: sudo crictl images --output json
	I0916 11:09:50.057885 1488539 command_runner.go:130] > {
	I0916 11:09:50.057911 1488539 command_runner.go:130] >   "images": [
	I0916 11:09:50.057917 1488539 command_runner.go:130] >     {
	I0916 11:09:50.057927 1488539 command_runner.go:130] >       "id": "6a23fa8fd2b78ab58e42ba273808edc936a9c53d8ac4a919f6337be094843a51",
	I0916 11:09:50.057932 1488539 command_runner.go:130] >       "repoTags": [
	I0916 11:09:50.057939 1488539 command_runner.go:130] >         "docker.io/kindest/kindnetd:v20240813-c6f155d6"
	I0916 11:09:50.057943 1488539 command_runner.go:130] >       ],
	I0916 11:09:50.057948 1488539 command_runner.go:130] >       "repoDigests": [
	I0916 11:09:50.057957 1488539 command_runner.go:130] >         "docker.io/kindest/kindnetd@sha256:4d39335073da9a0b82be8e01028f0aa75aff16caff2e2d8889d0effd579a6f64",
	I0916 11:09:50.057969 1488539 command_runner.go:130] >         "docker.io/kindest/kindnetd@sha256:e59a687ca28ae274a2fc92f1e2f5f1c739f353178a43a23aafc71adb802ed166"
	I0916 11:09:50.057973 1488539 command_runner.go:130] >       ],
	I0916 11:09:50.057977 1488539 command_runner.go:130] >       "size": "90295858",
	I0916 11:09:50.057985 1488539 command_runner.go:130] >       "uid": null,
	I0916 11:09:50.057990 1488539 command_runner.go:130] >       "username": "",
	I0916 11:09:50.058002 1488539 command_runner.go:130] >       "spec": null,
	I0916 11:09:50.058009 1488539 command_runner.go:130] >       "pinned": false
	I0916 11:09:50.058013 1488539 command_runner.go:130] >     },
	I0916 11:09:50.058019 1488539 command_runner.go:130] >     {
	I0916 11:09:50.058026 1488539 command_runner.go:130] >       "id": "ba04bb24b95753201135cbc420b233c1b0b9fa2e1fd21d28319c348c33fbcde6",
	I0916 11:09:50.058033 1488539 command_runner.go:130] >       "repoTags": [
	I0916 11:09:50.058039 1488539 command_runner.go:130] >         "gcr.io/k8s-minikube/storage-provisioner:v5"
	I0916 11:09:50.058046 1488539 command_runner.go:130] >       ],
	I0916 11:09:50.058050 1488539 command_runner.go:130] >       "repoDigests": [
	I0916 11:09:50.058059 1488539 command_runner.go:130] >         "gcr.io/k8s-minikube/storage-provisioner@sha256:0ba370588274b88531ab311a5d2e645d240a853555c1e58fd1dd428fc333c9d2",
	I0916 11:09:50.058071 1488539 command_runner.go:130] >         "gcr.io/k8s-minikube/storage-provisioner@sha256:18eb69d1418e854ad5a19e399310e52808a8321e4c441c1dddad8977a0d7a944"
	I0916 11:09:50.058077 1488539 command_runner.go:130] >       ],
	I0916 11:09:50.058083 1488539 command_runner.go:130] >       "size": "29037500",
	I0916 11:09:50.058090 1488539 command_runner.go:130] >       "uid": null,
	I0916 11:09:50.058099 1488539 command_runner.go:130] >       "username": "",
	I0916 11:09:50.058106 1488539 command_runner.go:130] >       "spec": null,
	I0916 11:09:50.058110 1488539 command_runner.go:130] >       "pinned": false
	I0916 11:09:50.058116 1488539 command_runner.go:130] >     },
	I0916 11:09:50.058120 1488539 command_runner.go:130] >     {
	I0916 11:09:50.058127 1488539 command_runner.go:130] >       "id": "2f6c962e7b8311337352d9fdea917da2184d9919f4da7695bc2a6517cf392fe4",
	I0916 11:09:50.058136 1488539 command_runner.go:130] >       "repoTags": [
	I0916 11:09:50.058142 1488539 command_runner.go:130] >         "registry.k8s.io/coredns/coredns:v1.11.3"
	I0916 11:09:50.058169 1488539 command_runner.go:130] >       ],
	I0916 11:09:50.058178 1488539 command_runner.go:130] >       "repoDigests": [
	I0916 11:09:50.058188 1488539 command_runner.go:130] >         "registry.k8s.io/coredns/coredns@sha256:31440a2bef59e2f1ffb600113b557103740ff851e27b0aef5b849f6e3ab994a6",
	I0916 11:09:50.058199 1488539 command_runner.go:130] >         "registry.k8s.io/coredns/coredns@sha256:9caabbf6238b189a65d0d6e6ac138de60d6a1c419e5a341fbbb7c78382559c6e"
	I0916 11:09:50.058206 1488539 command_runner.go:130] >       ],
	I0916 11:09:50.058210 1488539 command_runner.go:130] >       "size": "61647114",
	I0916 11:09:50.058218 1488539 command_runner.go:130] >       "uid": null,
	I0916 11:09:50.058222 1488539 command_runner.go:130] >       "username": "nonroot",
	I0916 11:09:50.058229 1488539 command_runner.go:130] >       "spec": null,
	I0916 11:09:50.058233 1488539 command_runner.go:130] >       "pinned": false
	I0916 11:09:50.058239 1488539 command_runner.go:130] >     },
	I0916 11:09:50.058243 1488539 command_runner.go:130] >     {
	I0916 11:09:50.058254 1488539 command_runner.go:130] >       "id": "27e3830e1402783674d8b594038967deea9d51f0d91b34c93c8f39d2f68af7da",
	I0916 11:09:50.058262 1488539 command_runner.go:130] >       "repoTags": [
	I0916 11:09:50.058267 1488539 command_runner.go:130] >         "registry.k8s.io/etcd:3.5.15-0"
	I0916 11:09:50.058273 1488539 command_runner.go:130] >       ],
	I0916 11:09:50.058277 1488539 command_runner.go:130] >       "repoDigests": [
	I0916 11:09:50.058289 1488539 command_runner.go:130] >         "registry.k8s.io/etcd@sha256:a6dc63e6e8cfa0307d7851762fa6b629afb18f28d8aa3fab5a6e91b4af60026a",
	I0916 11:09:50.058302 1488539 command_runner.go:130] >         "registry.k8s.io/etcd@sha256:e3ee3ca2dbaf511385000dbd54123629c71b6cfaabd469e658d76a116b7f43da"
	I0916 11:09:50.058309 1488539 command_runner.go:130] >       ],
	I0916 11:09:50.058313 1488539 command_runner.go:130] >       "size": "139912446",
	I0916 11:09:50.058320 1488539 command_runner.go:130] >       "uid": {
	I0916 11:09:50.058324 1488539 command_runner.go:130] >         "value": "0"
	I0916 11:09:50.058330 1488539 command_runner.go:130] >       },
	I0916 11:09:50.058334 1488539 command_runner.go:130] >       "username": "",
	I0916 11:09:50.058344 1488539 command_runner.go:130] >       "spec": null,
	I0916 11:09:50.058348 1488539 command_runner.go:130] >       "pinned": false
	I0916 11:09:50.058354 1488539 command_runner.go:130] >     },
	I0916 11:09:50.058357 1488539 command_runner.go:130] >     {
	I0916 11:09:50.058364 1488539 command_runner.go:130] >       "id": "d3f53a98c0a9d9163c4848bcf34b2d2f5e1e3691b79f3d1dd6d0206809e02853",
	I0916 11:09:50.058371 1488539 command_runner.go:130] >       "repoTags": [
	I0916 11:09:50.058377 1488539 command_runner.go:130] >         "registry.k8s.io/kube-apiserver:v1.31.1"
	I0916 11:09:50.058383 1488539 command_runner.go:130] >       ],
	I0916 11:09:50.058387 1488539 command_runner.go:130] >       "repoDigests": [
	I0916 11:09:50.058399 1488539 command_runner.go:130] >         "registry.k8s.io/kube-apiserver@sha256:2409c23dbb5a2b7a81adbb184d3eac43ac653e9b97a7c0ee121b89bb3ef61fdb",
	I0916 11:09:50.058411 1488539 command_runner.go:130] >         "registry.k8s.io/kube-apiserver@sha256:e3a40e6c6e99ba4a4d72432b3eda702099a2926e49d4afeb6138f2d95e6371ef"
	I0916 11:09:50.058417 1488539 command_runner.go:130] >       ],
	I0916 11:09:50.058421 1488539 command_runner.go:130] >       "size": "92632544",
	I0916 11:09:50.058428 1488539 command_runner.go:130] >       "uid": {
	I0916 11:09:50.058432 1488539 command_runner.go:130] >         "value": "0"
	I0916 11:09:50.058439 1488539 command_runner.go:130] >       },
	I0916 11:09:50.058443 1488539 command_runner.go:130] >       "username": "",
	I0916 11:09:50.058451 1488539 command_runner.go:130] >       "spec": null,
	I0916 11:09:50.058455 1488539 command_runner.go:130] >       "pinned": false
	I0916 11:09:50.058462 1488539 command_runner.go:130] >     },
	I0916 11:09:50.058466 1488539 command_runner.go:130] >     {
	I0916 11:09:50.058473 1488539 command_runner.go:130] >       "id": "279f381cb37365bbbcd133c9531fba9c2beb0f38dbbe6ddfcd0b1b1643d3450e",
	I0916 11:09:50.058481 1488539 command_runner.go:130] >       "repoTags": [
	I0916 11:09:50.058487 1488539 command_runner.go:130] >         "registry.k8s.io/kube-controller-manager:v1.31.1"
	I0916 11:09:50.058493 1488539 command_runner.go:130] >       ],
	I0916 11:09:50.058497 1488539 command_runner.go:130] >       "repoDigests": [
	I0916 11:09:50.058509 1488539 command_runner.go:130] >         "registry.k8s.io/kube-controller-manager@sha256:9f9da5b27e03f89599cc40ba89150aebf3b4cff001e6db6d998674b34181e1a1",
	I0916 11:09:50.058521 1488539 command_runner.go:130] >         "registry.k8s.io/kube-controller-manager@sha256:a9a0505b7d0caca0edd18e37bacc9425b2c8824546b26f5b286e8cb144669849"
	I0916 11:09:50.058527 1488539 command_runner.go:130] >       ],
	I0916 11:09:50.058532 1488539 command_runner.go:130] >       "size": "86930758",
	I0916 11:09:50.058539 1488539 command_runner.go:130] >       "uid": {
	I0916 11:09:50.058543 1488539 command_runner.go:130] >         "value": "0"
	I0916 11:09:50.058550 1488539 command_runner.go:130] >       },
	I0916 11:09:50.058555 1488539 command_runner.go:130] >       "username": "",
	I0916 11:09:50.058563 1488539 command_runner.go:130] >       "spec": null,
	I0916 11:09:50.058568 1488539 command_runner.go:130] >       "pinned": false
	I0916 11:09:50.058575 1488539 command_runner.go:130] >     },
	I0916 11:09:50.058579 1488539 command_runner.go:130] >     {
	I0916 11:09:50.058590 1488539 command_runner.go:130] >       "id": "24a140c548c075e487e45d0ee73b1aa89f8bfb40c08a57e05975559728822b1d",
	I0916 11:09:50.058598 1488539 command_runner.go:130] >       "repoTags": [
	I0916 11:09:50.058604 1488539 command_runner.go:130] >         "registry.k8s.io/kube-proxy:v1.31.1"
	I0916 11:09:50.058610 1488539 command_runner.go:130] >       ],
	I0916 11:09:50.058615 1488539 command_runner.go:130] >       "repoDigests": [
	I0916 11:09:50.058626 1488539 command_runner.go:130] >         "registry.k8s.io/kube-proxy@sha256:4ee50b00484d7f39a90fc4cda92251177ef5ad8fdf2f2a0c768f9e634b4c6d44",
	I0916 11:09:50.058637 1488539 command_runner.go:130] >         "registry.k8s.io/kube-proxy@sha256:7b3bf9f1e260ccb1fd543570e1e9869a373f716fb050cd23a6a2771aa4e06ae9"
	I0916 11:09:50.058643 1488539 command_runner.go:130] >       ],
	I0916 11:09:50.058647 1488539 command_runner.go:130] >       "size": "95951255",
	I0916 11:09:50.058655 1488539 command_runner.go:130] >       "uid": null,
	I0916 11:09:50.058659 1488539 command_runner.go:130] >       "username": "",
	I0916 11:09:50.058669 1488539 command_runner.go:130] >       "spec": null,
	I0916 11:09:50.058673 1488539 command_runner.go:130] >       "pinned": false
	I0916 11:09:50.058680 1488539 command_runner.go:130] >     },
	I0916 11:09:50.058683 1488539 command_runner.go:130] >     {
	I0916 11:09:50.058694 1488539 command_runner.go:130] >       "id": "7f8aa378bb47dffcf430f3a601abe39137e88aee0238e23ed8530fdd18dab82d",
	I0916 11:09:50.058702 1488539 command_runner.go:130] >       "repoTags": [
	I0916 11:09:50.058707 1488539 command_runner.go:130] >         "registry.k8s.io/kube-scheduler:v1.31.1"
	I0916 11:09:50.058714 1488539 command_runner.go:130] >       ],
	I0916 11:09:50.058718 1488539 command_runner.go:130] >       "repoDigests": [
	I0916 11:09:50.058735 1488539 command_runner.go:130] >         "registry.k8s.io/kube-scheduler@sha256:65212209347a96b08a97e679b98dca46885f09cf3a53e8d13b28d2c083a5b690",
	I0916 11:09:50.058747 1488539 command_runner.go:130] >         "registry.k8s.io/kube-scheduler@sha256:969a7e96340f3a927b3d652582edec2d6d82a083871d81ef5064b7edaab430d0"
	I0916 11:09:50.058753 1488539 command_runner.go:130] >       ],
	I0916 11:09:50.058758 1488539 command_runner.go:130] >       "size": "67007814",
	I0916 11:09:50.058765 1488539 command_runner.go:130] >       "uid": {
	I0916 11:09:50.058769 1488539 command_runner.go:130] >         "value": "0"
	I0916 11:09:50.058776 1488539 command_runner.go:130] >       },
	I0916 11:09:50.058780 1488539 command_runner.go:130] >       "username": "",
	I0916 11:09:50.058789 1488539 command_runner.go:130] >       "spec": null,
	I0916 11:09:50.058794 1488539 command_runner.go:130] >       "pinned": false
	I0916 11:09:50.058801 1488539 command_runner.go:130] >     },
	I0916 11:09:50.058805 1488539 command_runner.go:130] >     {
	I0916 11:09:50.058811 1488539 command_runner.go:130] >       "id": "afb61768ce381961ca0beff95337601f29dc70ff3ed14e5e4b3e5699057e6aa8",
	I0916 11:09:50.058818 1488539 command_runner.go:130] >       "repoTags": [
	I0916 11:09:50.058824 1488539 command_runner.go:130] >         "registry.k8s.io/pause:3.10"
	I0916 11:09:50.058831 1488539 command_runner.go:130] >       ],
	I0916 11:09:50.058835 1488539 command_runner.go:130] >       "repoDigests": [
	I0916 11:09:50.058858 1488539 command_runner.go:130] >         "registry.k8s.io/pause@sha256:e50b7059b633caf3c1449b8da680d11845cda4506b513ee7a2de00725f0a34a7",
	I0916 11:09:50.058869 1488539 command_runner.go:130] >         "registry.k8s.io/pause@sha256:ee6521f290b2168b6e0935a181d4cff9be1ac3f505666ef0e3c98fae8199917a"
	I0916 11:09:50.058876 1488539 command_runner.go:130] >       ],
	I0916 11:09:50.058880 1488539 command_runner.go:130] >       "size": "519877",
	I0916 11:09:50.058887 1488539 command_runner.go:130] >       "uid": {
	I0916 11:09:50.058892 1488539 command_runner.go:130] >         "value": "65535"
	I0916 11:09:50.058898 1488539 command_runner.go:130] >       },
	I0916 11:09:50.058903 1488539 command_runner.go:130] >       "username": "",
	I0916 11:09:50.058911 1488539 command_runner.go:130] >       "spec": null,
	I0916 11:09:50.058915 1488539 command_runner.go:130] >       "pinned": false
	I0916 11:09:50.058921 1488539 command_runner.go:130] >     }
	I0916 11:09:50.058924 1488539 command_runner.go:130] >   ]
	I0916 11:09:50.058927 1488539 command_runner.go:130] > }
	I0916 11:09:50.061494 1488539 crio.go:514] all images are preloaded for cri-o runtime.
	I0916 11:09:50.061521 1488539 cache_images.go:84] Images are preloaded, skipping loading
	I0916 11:09:50.061531 1488539 kubeadm.go:934] updating node { 192.168.67.2 8443 v1.31.1 crio true true} ...
	I0916 11:09:50.061679 1488539 kubeadm.go:946] kubelet [Unit]
	Wants=crio.service
	
	[Service]
	ExecStart=
	ExecStart=/var/lib/minikube/binaries/v1.31.1/kubelet --bootstrap-kubeconfig=/etc/kubernetes/bootstrap-kubelet.conf --cgroups-per-qos=false --config=/var/lib/kubelet/config.yaml --enforce-node-allocatable= --hostname-override=multinode-654612 --kubeconfig=/etc/kubernetes/kubelet.conf --node-ip=192.168.67.2
	
	[Install]
	 config:
	{KubernetesVersion:v1.31.1 ClusterName:multinode-654612 Namespace:default APIServerHAVIP: APIServerName:minikubeCA APIServerNames:[] APIServerIPs:[] DNSDomain:cluster.local ContainerRuntime:crio CRISocket: NetworkPlugin:cni FeatureGates: ServiceCIDR:10.96.0.0/12 ImageRepository: LoadBalancerStartIP: LoadBalancerEndIP: CustomIngressCert: RegistryAliases: ExtraOptions:[] ShouldLoadCachedImages:true EnableDefaultCNI:false CNI:}
	I0916 11:09:50.061817 1488539 ssh_runner.go:195] Run: crio config
	I0916 11:09:50.109237 1488539 command_runner.go:130] > # The CRI-O configuration file specifies all of the available configuration
	I0916 11:09:50.109264 1488539 command_runner.go:130] > # options and command-line flags for the crio(8) OCI Kubernetes Container Runtime
	I0916 11:09:50.109271 1488539 command_runner.go:130] > # daemon, but in a TOML format that can be more easily modified and versioned.
	I0916 11:09:50.109275 1488539 command_runner.go:130] > #
	I0916 11:09:50.109283 1488539 command_runner.go:130] > # Please refer to crio.conf(5) for details of all configuration options.
	I0916 11:09:50.109290 1488539 command_runner.go:130] > # CRI-O supports partial configuration reload during runtime, which can be
	I0916 11:09:50.109296 1488539 command_runner.go:130] > # done by sending SIGHUP to the running process. Currently supported options
	I0916 11:09:50.109316 1488539 command_runner.go:130] > # are explicitly mentioned with: 'This option supports live configuration
	I0916 11:09:50.109320 1488539 command_runner.go:130] > # reload'.
	I0916 11:09:50.109327 1488539 command_runner.go:130] > # CRI-O reads its storage defaults from the containers-storage.conf(5) file
	I0916 11:09:50.109333 1488539 command_runner.go:130] > # located at /etc/containers/storage.conf. Modify this storage configuration if
	I0916 11:09:50.109340 1488539 command_runner.go:130] > # you want to change the system's defaults. If you want to modify storage just
	I0916 11:09:50.109346 1488539 command_runner.go:130] > # for CRI-O, you can change the storage configuration options here.
	I0916 11:09:50.109349 1488539 command_runner.go:130] > [crio]
	I0916 11:09:50.109360 1488539 command_runner.go:130] > # Path to the "root directory". CRI-O stores all of its data, including
	I0916 11:09:50.109365 1488539 command_runner.go:130] > # containers images, in this directory.
	I0916 11:09:50.109375 1488539 command_runner.go:130] > # root = "/home/docker/.local/share/containers/storage"
	I0916 11:09:50.109382 1488539 command_runner.go:130] > # Path to the "run directory". CRI-O stores all of its state in this directory.
	I0916 11:09:50.109387 1488539 command_runner.go:130] > # runroot = "/tmp/containers-user-1000/containers"
	I0916 11:09:50.109393 1488539 command_runner.go:130] > # Storage driver used to manage the storage of images and containers. Please
	I0916 11:09:50.109400 1488539 command_runner.go:130] > # refer to containers-storage.conf(5) to see all available storage drivers.
	I0916 11:09:50.109404 1488539 command_runner.go:130] > # storage_driver = "vfs"
	I0916 11:09:50.109409 1488539 command_runner.go:130] > # List to pass options to the storage driver. Please refer to
	I0916 11:09:50.109415 1488539 command_runner.go:130] > # containers-storage.conf(5) to see all available storage options.
	I0916 11:09:50.109418 1488539 command_runner.go:130] > # storage_option = [
	I0916 11:09:50.109421 1488539 command_runner.go:130] > # ]
	I0916 11:09:50.109427 1488539 command_runner.go:130] > # The default log directory where all logs will go unless directly specified by
	I0916 11:09:50.109434 1488539 command_runner.go:130] > # the kubelet. The log directory specified must be an absolute directory.
	I0916 11:09:50.109438 1488539 command_runner.go:130] > # log_dir = "/var/log/crio/pods"
	I0916 11:09:50.109444 1488539 command_runner.go:130] > # Location for CRI-O to lay down the temporary version file.
	I0916 11:09:50.109450 1488539 command_runner.go:130] > # It is used to check if crio wipe should wipe containers, which should
	I0916 11:09:50.109454 1488539 command_runner.go:130] > # always happen on a node reboot
	I0916 11:09:50.109458 1488539 command_runner.go:130] > # version_file = "/var/run/crio/version"
	I0916 11:09:50.109464 1488539 command_runner.go:130] > # Location for CRI-O to lay down the persistent version file.
	I0916 11:09:50.109470 1488539 command_runner.go:130] > # It is used to check if crio wipe should wipe images, which should
	I0916 11:09:50.109479 1488539 command_runner.go:130] > # only happen when CRI-O has been upgraded
	I0916 11:09:50.109484 1488539 command_runner.go:130] > # version_file_persist = "/var/lib/crio/version"
	I0916 11:09:50.109494 1488539 command_runner.go:130] > # InternalWipe is whether CRI-O should wipe containers and images after a reboot when the server starts.
	I0916 11:09:50.109502 1488539 command_runner.go:130] > # If set to false, one must use the external command 'crio wipe' to wipe the containers and images in these situations.
	I0916 11:09:50.109505 1488539 command_runner.go:130] > # internal_wipe = true
	I0916 11:09:50.109511 1488539 command_runner.go:130] > # Location for CRI-O to lay down the clean shutdown file.
	I0916 11:09:50.109517 1488539 command_runner.go:130] > # It is used to check whether crio had time to sync before shutting down.
	I0916 11:09:50.109522 1488539 command_runner.go:130] > # If not found, crio wipe will clear the storage directory.
	I0916 11:09:50.109527 1488539 command_runner.go:130] > # clean_shutdown_file = "/var/lib/crio/clean.shutdown"
	I0916 11:09:50.109533 1488539 command_runner.go:130] > # The crio.api table contains settings for the kubelet/gRPC interface.
	I0916 11:09:50.109537 1488539 command_runner.go:130] > [crio.api]
	I0916 11:09:50.109542 1488539 command_runner.go:130] > # Path to AF_LOCAL socket on which CRI-O will listen.
	I0916 11:09:50.109550 1488539 command_runner.go:130] > # listen = "/var/run/crio/crio.sock"
	I0916 11:09:50.109555 1488539 command_runner.go:130] > # IP address on which the stream server will listen.
	I0916 11:09:50.109559 1488539 command_runner.go:130] > # stream_address = "127.0.0.1"
	I0916 11:09:50.109566 1488539 command_runner.go:130] > # The port on which the stream server will listen. If the port is set to "0", then
	I0916 11:09:50.109571 1488539 command_runner.go:130] > # CRI-O will allocate a random free port number.
	I0916 11:09:50.109600 1488539 command_runner.go:130] > # stream_port = "0"
	I0916 11:09:50.109609 1488539 command_runner.go:130] > # Enable encrypted TLS transport of the stream server.
	I0916 11:09:50.109700 1488539 command_runner.go:130] > # stream_enable_tls = false
	I0916 11:09:50.109710 1488539 command_runner.go:130] > # Length of time until open streams terminate due to lack of activity
	I0916 11:09:50.109961 1488539 command_runner.go:130] > # stream_idle_timeout = ""
	I0916 11:09:50.109973 1488539 command_runner.go:130] > # Path to the x509 certificate file used to serve the encrypted stream. This
	I0916 11:09:50.109981 1488539 command_runner.go:130] > # file can change, and CRI-O will automatically pick up the changes within 5
	I0916 11:09:50.109990 1488539 command_runner.go:130] > # minutes.
	I0916 11:09:50.110223 1488539 command_runner.go:130] > # stream_tls_cert = ""
	I0916 11:09:50.110235 1488539 command_runner.go:130] > # Path to the key file used to serve the encrypted stream. This file can
	I0916 11:09:50.110247 1488539 command_runner.go:130] > # change and CRI-O will automatically pick up the changes within 5 minutes.
	I0916 11:09:50.110520 1488539 command_runner.go:130] > # stream_tls_key = ""
	I0916 11:09:50.110531 1488539 command_runner.go:130] > # Path to the x509 CA(s) file used to verify and authenticate client
	I0916 11:09:50.110538 1488539 command_runner.go:130] > # communication with the encrypted stream. This file can change and CRI-O will
	I0916 11:09:50.110549 1488539 command_runner.go:130] > # automatically pick up the changes within 5 minutes.
	I0916 11:09:50.110793 1488539 command_runner.go:130] > # stream_tls_ca = ""
	I0916 11:09:50.110838 1488539 command_runner.go:130] > # Maximum grpc send message size in bytes. If not set or <=0, then CRI-O will default to 16 * 1024 * 1024.
	I0916 11:09:50.111125 1488539 command_runner.go:130] > # grpc_max_send_msg_size = 83886080
	I0916 11:09:50.111137 1488539 command_runner.go:130] > # Maximum grpc receive message size. If not set or <= 0, then CRI-O will default to 16 * 1024 * 1024.
	I0916 11:09:50.111518 1488539 command_runner.go:130] > # grpc_max_recv_msg_size = 83886080
	I0916 11:09:50.111553 1488539 command_runner.go:130] > # The crio.runtime table contains settings pertaining to the OCI runtime used
	I0916 11:09:50.111560 1488539 command_runner.go:130] > # and options for how to set up and manage the OCI runtime.
	I0916 11:09:50.111565 1488539 command_runner.go:130] > [crio.runtime]
	I0916 11:09:50.111571 1488539 command_runner.go:130] > # A list of ulimits to be set in containers by default, specified as
	I0916 11:09:50.111577 1488539 command_runner.go:130] > # "<ulimit name>=<soft limit>:<hard limit>", for example:
	I0916 11:09:50.111581 1488539 command_runner.go:130] > # "nofile=1024:2048"
	I0916 11:09:50.111587 1488539 command_runner.go:130] > # If nothing is set here, settings will be inherited from the CRI-O daemon
	I0916 11:09:50.111710 1488539 command_runner.go:130] > # default_ulimits = [
	I0916 11:09:50.111854 1488539 command_runner.go:130] > # ]
	I0916 11:09:50.111870 1488539 command_runner.go:130] > # If true, the runtime will not use pivot_root, but instead use MS_MOVE.
	I0916 11:09:50.112197 1488539 command_runner.go:130] > # no_pivot = false
	I0916 11:09:50.112219 1488539 command_runner.go:130] > # decryption_keys_path is the path where the keys required for
	I0916 11:09:50.112228 1488539 command_runner.go:130] > # image decryption are stored. This option supports live configuration reload.
	I0916 11:09:50.112524 1488539 command_runner.go:130] > # decryption_keys_path = "/etc/crio/keys/"
	I0916 11:09:50.112535 1488539 command_runner.go:130] > # Path to the conmon binary, used for monitoring the OCI runtime.
	I0916 11:09:50.112540 1488539 command_runner.go:130] > # Will be searched for using $PATH if empty.
	I0916 11:09:50.112548 1488539 command_runner.go:130] > # This option is currently deprecated, and will be replaced with RuntimeHandler.MonitorEnv.
	I0916 11:09:50.112840 1488539 command_runner.go:130] > # conmon = ""
	I0916 11:09:50.112850 1488539 command_runner.go:130] > # Cgroup setting for conmon
	I0916 11:09:50.112865 1488539 command_runner.go:130] > # This option is currently deprecated, and will be replaced with RuntimeHandler.MonitorCgroup.
	I0916 11:09:50.113037 1488539 command_runner.go:130] > conmon_cgroup = "pod"
	I0916 11:09:50.113048 1488539 command_runner.go:130] > # Environment variable list for the conmon process, used for passing necessary
	I0916 11:09:50.113054 1488539 command_runner.go:130] > # environment variables to conmon or the runtime.
	I0916 11:09:50.113066 1488539 command_runner.go:130] > # This option is currently deprecated, and will be replaced with RuntimeHandler.MonitorEnv.
	I0916 11:09:50.113216 1488539 command_runner.go:130] > # conmon_env = [
	I0916 11:09:50.113416 1488539 command_runner.go:130] > # ]
	I0916 11:09:50.113480 1488539 command_runner.go:130] > # Additional environment variables to set for all the
	I0916 11:09:50.113567 1488539 command_runner.go:130] > # containers. These are overridden if set in the
	I0916 11:09:50.113594 1488539 command_runner.go:130] > # container image spec or in the container runtime configuration.
	I0916 11:09:50.113613 1488539 command_runner.go:130] > # default_env = [
	I0916 11:09:50.113753 1488539 command_runner.go:130] > # ]
	I0916 11:09:50.113809 1488539 command_runner.go:130] > # If true, SELinux will be used for pod separation on the host.
	I0916 11:09:50.114106 1488539 command_runner.go:130] > # selinux = false
	I0916 11:09:50.114174 1488539 command_runner.go:130] > # Path to the seccomp.json profile which is used as the default seccomp profile
	I0916 11:09:50.114251 1488539 command_runner.go:130] > # for the runtime. If not specified, then the internal default seccomp profile
	I0916 11:09:50.114279 1488539 command_runner.go:130] > # will be used. This option supports live configuration reload.
	I0916 11:09:50.114406 1488539 command_runner.go:130] > # seccomp_profile = ""
	I0916 11:09:50.114457 1488539 command_runner.go:130] > # Changes the meaning of an empty seccomp profile. By default
	I0916 11:09:50.114481 1488539 command_runner.go:130] > # (and according to CRI spec), an empty profile means unconfined.
	I0916 11:09:50.114501 1488539 command_runner.go:130] > # This option tells CRI-O to treat an empty profile as the default profile,
	I0916 11:09:50.114576 1488539 command_runner.go:130] > # which might increase security.
	I0916 11:09:50.114750 1488539 command_runner.go:130] > # seccomp_use_default_when_empty = true
	I0916 11:09:50.114806 1488539 command_runner.go:130] > # Used to change the name of the default AppArmor profile of CRI-O. The default
	I0916 11:09:50.114829 1488539 command_runner.go:130] > # profile name is "crio-default". This profile only takes effect if the user
	I0916 11:09:50.114891 1488539 command_runner.go:130] > # does not specify a profile via the Kubernetes Pod's metadata annotation. If
	I0916 11:09:50.114932 1488539 command_runner.go:130] > # the profile is set to "unconfined", then this equals to disabling AppArmor.
	I0916 11:09:50.114969 1488539 command_runner.go:130] > # This option supports live configuration reload.
	I0916 11:09:50.115048 1488539 command_runner.go:130] > # apparmor_profile = "crio-default"
	I0916 11:09:50.115101 1488539 command_runner.go:130] > # Path to the blockio class configuration file for configuring
	I0916 11:09:50.115122 1488539 command_runner.go:130] > # the cgroup blockio controller.
	I0916 11:09:50.115351 1488539 command_runner.go:130] > # blockio_config_file = ""
	I0916 11:09:50.115403 1488539 command_runner.go:130] > # Used to change irqbalance service config file path which is used for configuring
	I0916 11:09:50.115485 1488539 command_runner.go:130] > # irqbalance daemon.
	I0916 11:09:50.115668 1488539 command_runner.go:130] > # irqbalance_config_file = "/etc/sysconfig/irqbalance"
	I0916 11:09:50.115711 1488539 command_runner.go:130] > # Path to the RDT configuration file for configuring the resctrl pseudo-filesystem.
	I0916 11:09:50.115779 1488539 command_runner.go:130] > # This option supports live configuration reload.
	I0916 11:09:50.115994 1488539 command_runner.go:130] > # rdt_config_file = ""
	I0916 11:09:50.116061 1488539 command_runner.go:130] > # Cgroup management implementation used for the runtime.
	I0916 11:09:50.116167 1488539 command_runner.go:130] > cgroup_manager = "cgroupfs"
	I0916 11:09:50.116228 1488539 command_runner.go:130] > # Specify whether the image pull must be performed in a separate cgroup.
	I0916 11:09:50.116394 1488539 command_runner.go:130] > # separate_pull_cgroup = ""
	I0916 11:09:50.116455 1488539 command_runner.go:130] > # List of default capabilities for containers. If it is empty or commented out,
	I0916 11:09:50.116535 1488539 command_runner.go:130] > # only the capabilities defined in the containers json file by the user/kube
	I0916 11:09:50.116574 1488539 command_runner.go:130] > # will be added.
	I0916 11:09:50.116593 1488539 command_runner.go:130] > # default_capabilities = [
	I0916 11:09:50.116841 1488539 command_runner.go:130] > # 	"CHOWN",
	I0916 11:09:50.117031 1488539 command_runner.go:130] > # 	"DAC_OVERRIDE",
	I0916 11:09:50.117206 1488539 command_runner.go:130] > # 	"FSETID",
	I0916 11:09:50.117389 1488539 command_runner.go:130] > # 	"FOWNER",
	I0916 11:09:50.117571 1488539 command_runner.go:130] > # 	"SETGID",
	I0916 11:09:50.117760 1488539 command_runner.go:130] > # 	"SETUID",
	I0916 11:09:50.117994 1488539 command_runner.go:130] > # 	"SETPCAP",
	I0916 11:09:50.118143 1488539 command_runner.go:130] > # 	"NET_BIND_SERVICE",
	I0916 11:09:50.118302 1488539 command_runner.go:130] > # 	"KILL",
	I0916 11:09:50.118466 1488539 command_runner.go:130] > # ]
	I0916 11:09:50.118551 1488539 command_runner.go:130] > # Add capabilities to the inheritable set, as well as the default group of permitted, bounding and effective.
	I0916 11:09:50.118643 1488539 command_runner.go:130] > # If capabilities are expected to work for non-root users, this option should be set.
	I0916 11:09:50.118836 1488539 command_runner.go:130] > # add_inheritable_capabilities = true
	I0916 11:09:50.118894 1488539 command_runner.go:130] > # List of default sysctls. If it is empty or commented out, only the sysctls
	I0916 11:09:50.118926 1488539 command_runner.go:130] > # defined in the container json file by the user/kube will be added.
	I0916 11:09:50.119032 1488539 command_runner.go:130] > default_sysctls = [
	I0916 11:09:50.119085 1488539 command_runner.go:130] > 	"net.ipv4.ip_unprivileged_port_start=0",
	I0916 11:09:50.119158 1488539 command_runner.go:130] > ]
	I0916 11:09:50.119253 1488539 command_runner.go:130] > # List of devices on the host that a
	I0916 11:09:50.119376 1488539 command_runner.go:130] > # user can specify with the "io.kubernetes.cri-o.Devices" allowed annotation.
	I0916 11:09:50.119467 1488539 command_runner.go:130] > # allowed_devices = [
	I0916 11:09:50.119510 1488539 command_runner.go:130] > # 	"/dev/fuse",
	I0916 11:09:50.119708 1488539 command_runner.go:130] > # ]
	I0916 11:09:50.119758 1488539 command_runner.go:130] > # List of additional devices. specified as
	I0916 11:09:50.119877 1488539 command_runner.go:130] > # "<device-on-host>:<device-on-container>:<permissions>", for example: "--device=/dev/sdc:/dev/xvdc:rwm".
	I0916 11:09:50.119934 1488539 command_runner.go:130] > # If it is empty or commented out, only the devices
	I0916 11:09:50.119992 1488539 command_runner.go:130] > # defined in the container json file by the user/kube will be added.
	I0916 11:09:50.120066 1488539 command_runner.go:130] > # additional_devices = [
	I0916 11:09:50.120115 1488539 command_runner.go:130] > # ]
	I0916 11:09:50.120152 1488539 command_runner.go:130] > # List of directories to scan for CDI Spec files.
	I0916 11:09:50.120219 1488539 command_runner.go:130] > # cdi_spec_dirs = [
	I0916 11:09:50.120406 1488539 command_runner.go:130] > # 	"/etc/cdi",
	I0916 11:09:50.120561 1488539 command_runner.go:130] > # 	"/var/run/cdi",
	I0916 11:09:50.120728 1488539 command_runner.go:130] > # ]
	I0916 11:09:50.120818 1488539 command_runner.go:130] > # Change the default behavior of setting container devices uid/gid from CRI's
	I0916 11:09:50.120898 1488539 command_runner.go:130] > # SecurityContext (RunAsUser/RunAsGroup) instead of taking host's uid/gid.
	I0916 11:09:50.120960 1488539 command_runner.go:130] > # Defaults to false.
	I0916 11:09:50.121087 1488539 command_runner.go:130] > # device_ownership_from_security_context = false
	I0916 11:09:50.121121 1488539 command_runner.go:130] > # Path to OCI hooks directories for automatically executed hooks. If one of the
	I0916 11:09:50.121209 1488539 command_runner.go:130] > # directories does not exist, then CRI-O will automatically skip them.
	I0916 11:09:50.121281 1488539 command_runner.go:130] > # hooks_dir = [
	I0916 11:09:50.121503 1488539 command_runner.go:130] > # 	"/usr/share/containers/oci/hooks.d",
	I0916 11:09:50.121679 1488539 command_runner.go:130] > # ]
	I0916 11:09:50.121779 1488539 command_runner.go:130] > # Path to the file specifying the defaults mounts for each container. The
	I0916 11:09:50.121845 1488539 command_runner.go:130] > # format of the config is /SRC:/DST, one mount per line. Notice that CRI-O reads
	I0916 11:09:50.121886 1488539 command_runner.go:130] > # its default mounts from the following two files:
	I0916 11:09:50.121928 1488539 command_runner.go:130] > #
	I0916 11:09:50.121948 1488539 command_runner.go:130] > #   1) /etc/containers/mounts.conf (i.e., default_mounts_file): This is the
	I0916 11:09:50.122021 1488539 command_runner.go:130] > #      override file, where users can either add in their own default mounts, or
	I0916 11:09:50.122073 1488539 command_runner.go:130] > #      override the default mounts shipped with the package.
	I0916 11:09:50.122091 1488539 command_runner.go:130] > #
	I0916 11:09:50.122127 1488539 command_runner.go:130] > #   2) /usr/share/containers/mounts.conf: This is the default file read for
	I0916 11:09:50.122199 1488539 command_runner.go:130] > #      mounts. If you want CRI-O to read from a different, specific mounts file,
	I0916 11:09:50.122254 1488539 command_runner.go:130] > #      you can change the default_mounts_file. Note, if this is done, CRI-O will
	I0916 11:09:50.122284 1488539 command_runner.go:130] > #      only add mounts it finds in this file.
	I0916 11:09:50.122299 1488539 command_runner.go:130] > #
	I0916 11:09:50.122377 1488539 command_runner.go:130] > # default_mounts_file = ""
	I0916 11:09:50.122455 1488539 command_runner.go:130] > # Maximum number of processes allowed in a container.
	I0916 11:09:50.122520 1488539 command_runner.go:130] > # This option is deprecated. The Kubelet flag '--pod-pids-limit' should be used instead.
	I0916 11:09:50.122585 1488539 command_runner.go:130] > # pids_limit = 0
	I0916 11:09:50.122617 1488539 command_runner.go:130] > # Maximum sized allowed for the container log file. Negative numbers indicate
	I0916 11:09:50.122694 1488539 command_runner.go:130] > # that no size limit is imposed. If it is positive, it must be >= 8192 to
	I0916 11:09:50.122740 1488539 command_runner.go:130] > # match/exceed conmon's read buffer. The file is truncated and re-opened so the
	I0916 11:09:50.122792 1488539 command_runner.go:130] > # limit is never exceeded. This option is deprecated. The Kubelet flag '--container-log-max-size' should be used instead.
	I0916 11:09:50.122866 1488539 command_runner.go:130] > # log_size_max = -1
	I0916 11:09:50.122890 1488539 command_runner.go:130] > # Whether container output should be logged to journald in addition to the kuberentes log file
	I0916 11:09:50.122951 1488539 command_runner.go:130] > # log_to_journald = false
	I0916 11:09:50.122971 1488539 command_runner.go:130] > # Path to directory in which container exit files are written to by conmon.
	I0916 11:09:50.123272 1488539 command_runner.go:130] > # container_exits_dir = "/var/run/crio/exits"
	I0916 11:09:50.123312 1488539 command_runner.go:130] > # Path to directory for container attach sockets.
	I0916 11:09:50.123612 1488539 command_runner.go:130] > # container_attach_socket_dir = "/var/run/crio"
	I0916 11:09:50.123651 1488539 command_runner.go:130] > # The prefix to use for the source of the bind mounts.
	I0916 11:09:50.123790 1488539 command_runner.go:130] > # bind_mount_prefix = ""
	I0916 11:09:50.123836 1488539 command_runner.go:130] > # If set to true, all containers will run in read-only mode.
	I0916 11:09:50.124117 1488539 command_runner.go:130] > # read_only = false
	I0916 11:09:50.124171 1488539 command_runner.go:130] > # Changes the verbosity of the logs based on the level it is set to. Options
	I0916 11:09:50.124251 1488539 command_runner.go:130] > # are fatal, panic, error, warn, info, debug and trace. This option supports
	I0916 11:09:50.124297 1488539 command_runner.go:130] > # live configuration reload.
	I0916 11:09:50.124438 1488539 command_runner.go:130] > # log_level = "info"
	I0916 11:09:50.124471 1488539 command_runner.go:130] > # Filter the log messages by the provided regular expression.
	I0916 11:09:50.124532 1488539 command_runner.go:130] > # This option supports live configuration reload.
	I0916 11:09:50.124721 1488539 command_runner.go:130] > # log_filter = ""
	I0916 11:09:50.124788 1488539 command_runner.go:130] > # The UID mappings for the user namespace of each container. A range is
	I0916 11:09:50.124809 1488539 command_runner.go:130] > # specified in the form containerUID:HostUID:Size. Multiple ranges must be
	I0916 11:09:50.124880 1488539 command_runner.go:130] > # separated by comma.
	I0916 11:09:50.124931 1488539 command_runner.go:130] > # uid_mappings = ""
	I0916 11:09:50.124962 1488539 command_runner.go:130] > # The GID mappings for the user namespace of each container. A range is
	I0916 11:09:50.124998 1488539 command_runner.go:130] > # specified in the form containerGID:HostGID:Size. Multiple ranges must be
	I0916 11:09:50.125017 1488539 command_runner.go:130] > # separated by comma.
	I0916 11:09:50.125201 1488539 command_runner.go:130] > # gid_mappings = ""
	I0916 11:09:50.125269 1488539 command_runner.go:130] > # If set, CRI-O will reject any attempt to map host UIDs below this value
	I0916 11:09:50.125307 1488539 command_runner.go:130] > # into user namespaces.  A negative value indicates that no minimum is set,
	I0916 11:09:50.125392 1488539 command_runner.go:130] > # so specifying mappings will only be allowed for pods that run as UID 0.
	I0916 11:09:50.125587 1488539 command_runner.go:130] > # minimum_mappable_uid = -1
	I0916 11:09:50.125600 1488539 command_runner.go:130] > # If set, CRI-O will reject any attempt to map host GIDs below this value
	I0916 11:09:50.125607 1488539 command_runner.go:130] > # into user namespaces.  A negative value indicates that no minimum is set,
	I0916 11:09:50.125615 1488539 command_runner.go:130] > # so specifying mappings will only be allowed for pods that run as UID 0.
	I0916 11:09:50.125940 1488539 command_runner.go:130] > # minimum_mappable_gid = -1
	I0916 11:09:50.125952 1488539 command_runner.go:130] > # The minimal amount of time in seconds to wait before issuing a timeout
	I0916 11:09:50.125958 1488539 command_runner.go:130] > # regarding the proper termination of the container. The lowest possible
	I0916 11:09:50.125977 1488539 command_runner.go:130] > # value is 30s, whereas lower values are not considered by CRI-O.
	I0916 11:09:50.126266 1488539 command_runner.go:130] > # ctr_stop_timeout = 30
	I0916 11:09:50.126277 1488539 command_runner.go:130] > # drop_infra_ctr determines whether CRI-O drops the infra container
	I0916 11:09:50.126314 1488539 command_runner.go:130] > # when a pod does not have a private PID namespace, and does not use
	I0916 11:09:50.126322 1488539 command_runner.go:130] > # a kernel separating runtime (like kata).
	I0916 11:09:50.126327 1488539 command_runner.go:130] > # It requires manage_ns_lifecycle to be true.
	I0916 11:09:50.126661 1488539 command_runner.go:130] > # drop_infra_ctr = true
	I0916 11:09:50.126673 1488539 command_runner.go:130] > # infra_ctr_cpuset determines what CPUs will be used to run infra containers.
	I0916 11:09:50.126680 1488539 command_runner.go:130] > # You can use linux CPU list format to specify desired CPUs.
	I0916 11:09:50.126687 1488539 command_runner.go:130] > # To get better isolation for guaranteed pods, set this parameter to be equal to kubelet reserved-cpus.
	I0916 11:09:50.126937 1488539 command_runner.go:130] > # infra_ctr_cpuset = ""
	I0916 11:09:50.126947 1488539 command_runner.go:130] > # The directory where the state of the managed namespaces gets tracked.
	I0916 11:09:50.126970 1488539 command_runner.go:130] > # Only used when manage_ns_lifecycle is true.
	I0916 11:09:50.127279 1488539 command_runner.go:130] > # namespaces_dir = "/var/run"
	I0916 11:09:50.127290 1488539 command_runner.go:130] > # pinns_path is the path to find the pinns binary, which is needed to manage namespace lifecycle
	I0916 11:09:50.127545 1488539 command_runner.go:130] > # pinns_path = ""
	I0916 11:09:50.127556 1488539 command_runner.go:130] > # default_runtime is the _name_ of the OCI runtime to be used as the default.
	I0916 11:09:50.127589 1488539 command_runner.go:130] > # The name is matched against the runtimes map below. If this value is changed,
	I0916 11:09:50.127597 1488539 command_runner.go:130] > # the corresponding existing entry from the runtimes map below will be ignored.
	I0916 11:09:50.128040 1488539 command_runner.go:130] > # default_runtime = "runc"
	I0916 11:09:50.128057 1488539 command_runner.go:130] > # A list of paths that, when absent from the host,
	I0916 11:09:50.128066 1488539 command_runner.go:130] > # will cause a container creation to fail (as opposed to the current behavior being created as a directory).
	I0916 11:09:50.128088 1488539 command_runner.go:130] > # This option is to protect from source locations whose existence as a directory could jepordize the health of the node, and whose
	I0916 11:09:50.128118 1488539 command_runner.go:130] > # creation as a file is not desired either.
	I0916 11:09:50.128129 1488539 command_runner.go:130] > # An example is /etc/hostname, which will cause failures on reboot if it's created as a directory, but often doesn't exist because
	I0916 11:09:50.128133 1488539 command_runner.go:130] > # the hostname is being managed dynamically.
	I0916 11:09:50.128138 1488539 command_runner.go:130] > # absent_mount_sources_to_reject = [
	I0916 11:09:50.128141 1488539 command_runner.go:130] > # ]
	I0916 11:09:50.128147 1488539 command_runner.go:130] > # The "crio.runtime.runtimes" table defines a list of OCI compatible runtimes.
	I0916 11:09:50.128154 1488539 command_runner.go:130] > # The runtime to use is picked based on the runtime handler provided by the CRI.
	I0916 11:09:50.128161 1488539 command_runner.go:130] > # If no runtime handler is provided, the runtime will be picked based on the level
	I0916 11:09:50.128167 1488539 command_runner.go:130] > # of trust of the workload. Each entry in the table should follow the format:
	I0916 11:09:50.128170 1488539 command_runner.go:130] > #
	I0916 11:09:50.128175 1488539 command_runner.go:130] > #[crio.runtime.runtimes.runtime-handler]
	I0916 11:09:50.128194 1488539 command_runner.go:130] > #  runtime_path = "/path/to/the/executable"
	I0916 11:09:50.128200 1488539 command_runner.go:130] > #  runtime_type = "oci"
	I0916 11:09:50.128205 1488539 command_runner.go:130] > #  runtime_root = "/path/to/the/root"
	I0916 11:09:50.128210 1488539 command_runner.go:130] > #  privileged_without_host_devices = false
	I0916 11:09:50.128214 1488539 command_runner.go:130] > #  allowed_annotations = []
	I0916 11:09:50.128217 1488539 command_runner.go:130] > # Where:
	I0916 11:09:50.128223 1488539 command_runner.go:130] > # - runtime-handler: name used to identify the runtime
	I0916 11:09:50.128230 1488539 command_runner.go:130] > # - runtime_path (optional, string): absolute path to the runtime executable in
	I0916 11:09:50.128244 1488539 command_runner.go:130] > #   the host filesystem. If omitted, the runtime-handler identifier should match
	I0916 11:09:50.128250 1488539 command_runner.go:130] > #   the runtime executable name, and the runtime executable should be placed
	I0916 11:09:50.128254 1488539 command_runner.go:130] > #   in $PATH.
	I0916 11:09:50.128273 1488539 command_runner.go:130] > # - runtime_type (optional, string): type of runtime, one of: "oci", "vm". If
	I0916 11:09:50.128279 1488539 command_runner.go:130] > #   omitted, an "oci" runtime is assumed.
	I0916 11:09:50.128286 1488539 command_runner.go:130] > # - runtime_root (optional, string): root directory for storage of containers
	I0916 11:09:50.128298 1488539 command_runner.go:130] > #   state.
	I0916 11:09:50.128305 1488539 command_runner.go:130] > # - runtime_config_path (optional, string): the path for the runtime configuration
	I0916 11:09:50.128311 1488539 command_runner.go:130] > #   file. This can only be used with when using the VM runtime_type.
	I0916 11:09:50.128317 1488539 command_runner.go:130] > # - privileged_without_host_devices (optional, bool): an option for restricting
	I0916 11:09:50.128330 1488539 command_runner.go:130] > #   host devices from being passed to privileged containers.
	I0916 11:09:50.128350 1488539 command_runner.go:130] > # - allowed_annotations (optional, array of strings): an option for specifying
	I0916 11:09:50.128365 1488539 command_runner.go:130] > #   a list of experimental annotations that this runtime handler is allowed to process.
	I0916 11:09:50.128370 1488539 command_runner.go:130] > #   The currently recognized values are:
	I0916 11:09:50.128380 1488539 command_runner.go:130] > #   "io.kubernetes.cri-o.userns-mode" for configuring a user namespace for the pod.
	I0916 11:09:50.128403 1488539 command_runner.go:130] > #   "io.kubernetes.cri-o.cgroup2-mount-hierarchy-rw" for mounting cgroups writably when set to "true".
	I0916 11:09:50.128411 1488539 command_runner.go:130] > #   "io.kubernetes.cri-o.Devices" for configuring devices for the pod.
	I0916 11:09:50.128417 1488539 command_runner.go:130] > #   "io.kubernetes.cri-o.ShmSize" for configuring the size of /dev/shm.
	I0916 11:09:50.128425 1488539 command_runner.go:130] > #   "io.kubernetes.cri-o.UnifiedCgroup.$CTR_NAME" for configuring the cgroup v2 unified block for a container.
	I0916 11:09:50.128431 1488539 command_runner.go:130] > #   "io.containers.trace-syscall" for tracing syscalls via the OCI seccomp BPF hook.
	I0916 11:09:50.128437 1488539 command_runner.go:130] > #   "io.kubernetes.cri.rdt-class" for setting the RDT class of a container
	I0916 11:09:50.128445 1488539 command_runner.go:130] > # - monitor_exec_cgroup (optional, string): if set to "container", indicates exec probes
	I0916 11:09:50.128450 1488539 command_runner.go:130] > #   should be moved to the container's cgroup
	I0916 11:09:50.128454 1488539 command_runner.go:130] > [crio.runtime.runtimes.runc]
	I0916 11:09:50.128458 1488539 command_runner.go:130] > runtime_path = "/usr/lib/cri-o-runc/sbin/runc"
	I0916 11:09:50.128462 1488539 command_runner.go:130] > runtime_type = "oci"
	I0916 11:09:50.128479 1488539 command_runner.go:130] > runtime_root = "/run/runc"
	I0916 11:09:50.128484 1488539 command_runner.go:130] > runtime_config_path = ""
	I0916 11:09:50.128525 1488539 command_runner.go:130] > monitor_path = ""
	I0916 11:09:50.128529 1488539 command_runner.go:130] > monitor_cgroup = ""
	I0916 11:09:50.128536 1488539 command_runner.go:130] > monitor_exec_cgroup = ""
	I0916 11:09:50.128584 1488539 command_runner.go:130] > # crun is a fast and lightweight fully featured OCI runtime and C library for
	I0916 11:09:50.128590 1488539 command_runner.go:130] > # running containers
	I0916 11:09:50.128595 1488539 command_runner.go:130] > #[crio.runtime.runtimes.crun]
	I0916 11:09:50.128602 1488539 command_runner.go:130] > # Kata Containers is an OCI runtime, where containers are run inside lightweight
	I0916 11:09:50.128609 1488539 command_runner.go:130] > # VMs. Kata provides additional isolation towards the host, minimizing the host attack
	I0916 11:09:50.128614 1488539 command_runner.go:130] > # surface and mitigating the consequences of containers breakout.
	I0916 11:09:50.128633 1488539 command_runner.go:130] > # Kata Containers with the default configured VMM
	I0916 11:09:50.128639 1488539 command_runner.go:130] > #[crio.runtime.runtimes.kata-runtime]
	I0916 11:09:50.128644 1488539 command_runner.go:130] > # Kata Containers with the QEMU VMM
	I0916 11:09:50.128648 1488539 command_runner.go:130] > #[crio.runtime.runtimes.kata-qemu]
	I0916 11:09:50.128652 1488539 command_runner.go:130] > # Kata Containers with the Firecracker VMM
	I0916 11:09:50.128656 1488539 command_runner.go:130] > #[crio.runtime.runtimes.kata-fc]
	I0916 11:09:50.128666 1488539 command_runner.go:130] > # The workloads table defines ways to customize containers with different resources
	I0916 11:09:50.128692 1488539 command_runner.go:130] > # that work based on annotations, rather than the CRI.
	I0916 11:09:50.128700 1488539 command_runner.go:130] > # Note, the behavior of this table is EXPERIMENTAL and may change at any time.
	I0916 11:09:50.128708 1488539 command_runner.go:130] > # Each workload, has a name, activation_annotation, annotation_prefix and set of resources it supports mutating.
	I0916 11:09:50.128716 1488539 command_runner.go:130] > # The currently supported resources are "cpu" (to configure the cpu shares) and "cpuset" to configure the cpuset.
	I0916 11:09:50.128722 1488539 command_runner.go:130] > # Each resource can have a default value specified, or be empty.
	I0916 11:09:50.128731 1488539 command_runner.go:130] > # For a container to opt-into this workload, the pod should be configured with the annotation $activation_annotation (key only, value is ignored).
	I0916 11:09:50.128742 1488539 command_runner.go:130] > # To customize per-container, an annotation of the form $annotation_prefix.$resource/$ctrName = "value" can be specified
	I0916 11:09:50.128748 1488539 command_runner.go:130] > # signifying for that resource type to override the default value.
	I0916 11:09:50.128755 1488539 command_runner.go:130] > # If the annotation_prefix is not present, every container in the pod will be given the default values.
	I0916 11:09:50.128758 1488539 command_runner.go:130] > # Example:
	I0916 11:09:50.128763 1488539 command_runner.go:130] > # [crio.runtime.workloads.workload-type]
	I0916 11:09:50.128770 1488539 command_runner.go:130] > # activation_annotation = "io.crio/workload"
	I0916 11:09:50.128774 1488539 command_runner.go:130] > # annotation_prefix = "io.crio.workload-type"
	I0916 11:09:50.128794 1488539 command_runner.go:130] > # [crio.runtime.workloads.workload-type.resources]
	I0916 11:09:50.128798 1488539 command_runner.go:130] > # cpuset = 0
	I0916 11:09:50.128802 1488539 command_runner.go:130] > # cpushares = "0-1"
	I0916 11:09:50.128805 1488539 command_runner.go:130] > # Where:
	I0916 11:09:50.128809 1488539 command_runner.go:130] > # The workload name is workload-type.
	I0916 11:09:50.128816 1488539 command_runner.go:130] > # To specify, the pod must have the "io.crio.workload" annotation (this is a precise string match).
	I0916 11:09:50.128821 1488539 command_runner.go:130] > # This workload supports setting cpuset and cpu resources.
	I0916 11:09:50.128827 1488539 command_runner.go:130] > # annotation_prefix is used to customize the different resources.
	I0916 11:09:50.128835 1488539 command_runner.go:130] > # To configure the cpu shares a container gets in the example above, the pod would have to have the following annotation:
	I0916 11:09:50.128849 1488539 command_runner.go:130] > # "io.crio.workload-type/$container_name = {"cpushares": "value"}"
	I0916 11:09:50.128852 1488539 command_runner.go:130] > # 
	I0916 11:09:50.128871 1488539 command_runner.go:130] > # The crio.image table contains settings pertaining to the management of OCI images.
	I0916 11:09:50.128875 1488539 command_runner.go:130] > #
	I0916 11:09:50.128882 1488539 command_runner.go:130] > # CRI-O reads its configured registries defaults from the system wide
	I0916 11:09:50.128888 1488539 command_runner.go:130] > # containers-registries.conf(5) located in /etc/containers/registries.conf. If
	I0916 11:09:50.128894 1488539 command_runner.go:130] > # you want to modify just CRI-O, you can change the registries configuration in
	I0916 11:09:50.128901 1488539 command_runner.go:130] > # this file. Otherwise, leave insecure_registries and registries commented out to
	I0916 11:09:50.128907 1488539 command_runner.go:130] > # use the system's defaults from /etc/containers/registries.conf.
	I0916 11:09:50.128910 1488539 command_runner.go:130] > [crio.image]
	I0916 11:09:50.128916 1488539 command_runner.go:130] > # Default transport for pulling images from a remote container storage.
	I0916 11:09:50.128923 1488539 command_runner.go:130] > # default_transport = "docker://"
	I0916 11:09:50.128929 1488539 command_runner.go:130] > # The path to a file containing credentials necessary for pulling images from
	I0916 11:09:50.128947 1488539 command_runner.go:130] > # secure registries. The file is similar to that of /var/lib/kubelet/config.json
	I0916 11:09:50.128953 1488539 command_runner.go:130] > # global_auth_file = ""
	I0916 11:09:50.128958 1488539 command_runner.go:130] > # The image used to instantiate infra containers.
	I0916 11:09:50.128975 1488539 command_runner.go:130] > # This option supports live configuration reload.
	I0916 11:09:50.128981 1488539 command_runner.go:130] > pause_image = "registry.k8s.io/pause:3.10"
	I0916 11:09:50.128987 1488539 command_runner.go:130] > # The path to a file containing credentials specific for pulling the pause_image from
	I0916 11:09:50.128994 1488539 command_runner.go:130] > # above. The file is similar to that of /var/lib/kubelet/config.json
	I0916 11:09:50.128999 1488539 command_runner.go:130] > # This option supports live configuration reload.
	I0916 11:09:50.129003 1488539 command_runner.go:130] > # pause_image_auth_file = ""
	I0916 11:09:50.129008 1488539 command_runner.go:130] > # The command to run to have a container stay in the paused state.
	I0916 11:09:50.129014 1488539 command_runner.go:130] > # When explicitly set to "", it will fallback to the entrypoint and command
	I0916 11:09:50.129020 1488539 command_runner.go:130] > # specified in the pause image. When commented out, it will fallback to the
	I0916 11:09:50.129026 1488539 command_runner.go:130] > # default: "/pause". This option supports live configuration reload.
	I0916 11:09:50.129075 1488539 command_runner.go:130] > # pause_command = "/pause"
	I0916 11:09:50.129083 1488539 command_runner.go:130] > # Path to the file which decides what sort of policy we use when deciding
	I0916 11:09:50.129090 1488539 command_runner.go:130] > # whether or not to trust an image that we've pulled. It is not recommended that
	I0916 11:09:50.129096 1488539 command_runner.go:130] > # this option be used, as the default behavior of using the system-wide default
	I0916 11:09:50.129102 1488539 command_runner.go:130] > # policy (i.e., /etc/containers/policy.json) is most often preferred. Please
	I0916 11:09:50.129107 1488539 command_runner.go:130] > # refer to containers-policy.json(5) for more details.
	I0916 11:09:50.129110 1488539 command_runner.go:130] > # signature_policy = ""
	I0916 11:09:50.129118 1488539 command_runner.go:130] > # List of registries to skip TLS verification for pulling images. Please
	I0916 11:09:50.129138 1488539 command_runner.go:130] > # consider configuring the registries via /etc/containers/registries.conf before
	I0916 11:09:50.129143 1488539 command_runner.go:130] > # changing them here.
	I0916 11:09:50.129147 1488539 command_runner.go:130] > # insecure_registries = [
	I0916 11:09:50.129150 1488539 command_runner.go:130] > # ]
	I0916 11:09:50.129156 1488539 command_runner.go:130] > # Controls how image volumes are handled. The valid values are mkdir, bind and
	I0916 11:09:50.129161 1488539 command_runner.go:130] > # ignore; the latter will ignore volumes entirely.
	I0916 11:09:50.129165 1488539 command_runner.go:130] > # image_volumes = "mkdir"
	I0916 11:09:50.129170 1488539 command_runner.go:130] > # Temporary directory to use for storing big files
	I0916 11:09:50.129174 1488539 command_runner.go:130] > # big_files_temporary_dir = ""
	I0916 11:09:50.129180 1488539 command_runner.go:130] > # The crio.network table containers settings pertaining to the management of
	I0916 11:09:50.129183 1488539 command_runner.go:130] > # CNI plugins.
	I0916 11:09:50.129186 1488539 command_runner.go:130] > [crio.network]
	I0916 11:09:50.129192 1488539 command_runner.go:130] > # The default CNI network name to be selected. If not set or "", then
	I0916 11:09:50.129211 1488539 command_runner.go:130] > # CRI-O will pick-up the first one found in network_dir.
	I0916 11:09:50.129216 1488539 command_runner.go:130] > # cni_default_network = ""
	I0916 11:09:50.129225 1488539 command_runner.go:130] > # Path to the directory where CNI configuration files are located.
	I0916 11:09:50.129229 1488539 command_runner.go:130] > # network_dir = "/etc/cni/net.d/"
	I0916 11:09:50.129235 1488539 command_runner.go:130] > # Paths to directories where CNI plugin binaries are located.
	I0916 11:09:50.129240 1488539 command_runner.go:130] > # plugin_dirs = [
	I0916 11:09:50.129243 1488539 command_runner.go:130] > # 	"/opt/cni/bin/",
	I0916 11:09:50.129270 1488539 command_runner.go:130] > # ]
	I0916 11:09:50.129289 1488539 command_runner.go:130] > # A necessary configuration for Prometheus based metrics retrieval
	I0916 11:09:50.129293 1488539 command_runner.go:130] > [crio.metrics]
	I0916 11:09:50.129298 1488539 command_runner.go:130] > # Globally enable or disable metrics support.
	I0916 11:09:50.129303 1488539 command_runner.go:130] > # enable_metrics = false
	I0916 11:09:50.129308 1488539 command_runner.go:130] > # Specify enabled metrics collectors.
	I0916 11:09:50.129312 1488539 command_runner.go:130] > # Per default all metrics are enabled.
	I0916 11:09:50.129318 1488539 command_runner.go:130] > # It is possible, to prefix the metrics with "container_runtime_" and "crio_".
	I0916 11:09:50.129324 1488539 command_runner.go:130] > # For example, the metrics collector "operations" would be treated in the same
	I0916 11:09:50.129330 1488539 command_runner.go:130] > # way as "crio_operations" and "container_runtime_crio_operations".
	I0916 11:09:50.129334 1488539 command_runner.go:130] > # metrics_collectors = [
	I0916 11:09:50.129337 1488539 command_runner.go:130] > # 	"operations",
	I0916 11:09:50.129343 1488539 command_runner.go:130] > # 	"operations_latency_microseconds_total",
	I0916 11:09:50.129347 1488539 command_runner.go:130] > # 	"operations_latency_microseconds",
	I0916 11:09:50.129400 1488539 command_runner.go:130] > # 	"operations_errors",
	I0916 11:09:50.129583 1488539 command_runner.go:130] > # 	"image_pulls_by_digest",
	I0916 11:09:50.129773 1488539 command_runner.go:130] > # 	"image_pulls_by_name",
	I0916 11:09:50.129802 1488539 command_runner.go:130] > # 	"image_pulls_by_name_skipped",
	I0916 11:09:50.129808 1488539 command_runner.go:130] > # 	"image_pulls_failures",
	I0916 11:09:50.129813 1488539 command_runner.go:130] > # 	"image_pulls_successes",
	I0916 11:09:50.129816 1488539 command_runner.go:130] > # 	"image_pulls_layer_size",
	I0916 11:09:50.129820 1488539 command_runner.go:130] > # 	"image_layer_reuse",
	I0916 11:09:50.129824 1488539 command_runner.go:130] > # 	"containers_oom_total",
	I0916 11:09:50.129828 1488539 command_runner.go:130] > # 	"containers_oom",
	I0916 11:09:50.129832 1488539 command_runner.go:130] > # 	"processes_defunct",
	I0916 11:09:50.129878 1488539 command_runner.go:130] > # 	"operations_total",
	I0916 11:09:50.130001 1488539 command_runner.go:130] > # 	"operations_latency_seconds",
	I0916 11:09:50.130170 1488539 command_runner.go:130] > # 	"operations_latency_seconds_total",
	I0916 11:09:50.130178 1488539 command_runner.go:130] > # 	"operations_errors_total",
	I0916 11:09:50.130184 1488539 command_runner.go:130] > # 	"image_pulls_bytes_total",
	I0916 11:09:50.130204 1488539 command_runner.go:130] > # 	"image_pulls_skipped_bytes_total",
	I0916 11:09:50.130223 1488539 command_runner.go:130] > # 	"image_pulls_failure_total",
	I0916 11:09:50.130228 1488539 command_runner.go:130] > # 	"image_pulls_success_total",
	I0916 11:09:50.130232 1488539 command_runner.go:130] > # 	"image_layer_reuse_total",
	I0916 11:09:50.130236 1488539 command_runner.go:130] > # 	"containers_oom_count_total",
	I0916 11:09:50.130239 1488539 command_runner.go:130] > # ]
	I0916 11:09:50.130244 1488539 command_runner.go:130] > # The port on which the metrics server will listen.
	I0916 11:09:50.130314 1488539 command_runner.go:130] > # metrics_port = 9090
	I0916 11:09:50.130321 1488539 command_runner.go:130] > # Local socket path to bind the metrics server to
	I0916 11:09:50.130553 1488539 command_runner.go:130] > # metrics_socket = ""
	I0916 11:09:50.130570 1488539 command_runner.go:130] > # The certificate for the secure metrics server.
	I0916 11:09:50.130585 1488539 command_runner.go:130] > # If the certificate is not available on disk, then CRI-O will generate a
	I0916 11:09:50.130593 1488539 command_runner.go:130] > # self-signed one. CRI-O also watches for changes of this path and reloads the
	I0916 11:09:50.130598 1488539 command_runner.go:130] > # certificate on any modification event.
	I0916 11:09:50.130601 1488539 command_runner.go:130] > # metrics_cert = ""
	I0916 11:09:50.130607 1488539 command_runner.go:130] > # The certificate key for the secure metrics server.
	I0916 11:09:50.130612 1488539 command_runner.go:130] > # Behaves in the same way as the metrics_cert.
	I0916 11:09:50.130632 1488539 command_runner.go:130] > # metrics_key = ""
	I0916 11:09:50.130640 1488539 command_runner.go:130] > # A necessary configuration for OpenTelemetry trace data exporting
	I0916 11:09:50.130643 1488539 command_runner.go:130] > [crio.tracing]
	I0916 11:09:50.130649 1488539 command_runner.go:130] > # Globally enable or disable exporting OpenTelemetry traces.
	I0916 11:09:50.130652 1488539 command_runner.go:130] > # enable_tracing = false
	I0916 11:09:50.130658 1488539 command_runner.go:130] > # Address on which the gRPC trace collector listens on.
	I0916 11:09:50.130662 1488539 command_runner.go:130] > # tracing_endpoint = "0.0.0.0:4317"
	I0916 11:09:50.130666 1488539 command_runner.go:130] > # Number of samples to collect per million spans.
	I0916 11:09:50.130707 1488539 command_runner.go:130] > # tracing_sampling_rate_per_million = 0
	I0916 11:09:50.130715 1488539 command_runner.go:130] > # Necessary information pertaining to container and pod stats reporting.
	I0916 11:09:50.130718 1488539 command_runner.go:130] > [crio.stats]
	I0916 11:09:50.130724 1488539 command_runner.go:130] > # The number of seconds between collecting pod and container stats.
	I0916 11:09:50.130729 1488539 command_runner.go:130] > # If set to 0, the stats are collected on-demand instead.
	I0916 11:09:50.130875 1488539 command_runner.go:130] > # stats_collection_period = 0
	I0916 11:09:50.133009 1488539 command_runner.go:130] ! time="2024-09-16 11:09:50.106299699Z" level=info msg="Starting CRI-O, version: 1.24.6, git: 4bfe15a9feb74ffc95e66a21c04b15fa7bbc2b90(clean)"
	I0916 11:09:50.133031 1488539 command_runner.go:130] ! level=info msg="Using default capabilities: CAP_CHOWN, CAP_DAC_OVERRIDE, CAP_FSETID, CAP_FOWNER, CAP_SETGID, CAP_SETUID, CAP_SETPCAP, CAP_NET_BIND_SERVICE, CAP_KILL"
	I0916 11:09:50.133134 1488539 cni.go:84] Creating CNI manager for ""
	I0916 11:09:50.133142 1488539 cni.go:136] multinode detected (1 nodes found), recommending kindnet
	I0916 11:09:50.133151 1488539 kubeadm.go:84] Using pod CIDR: 10.244.0.0/16
	I0916 11:09:50.133179 1488539 kubeadm.go:181] kubeadm options: {CertDir:/var/lib/minikube/certs ServiceCIDR:10.96.0.0/12 PodSubnet:10.244.0.0/16 AdvertiseAddress:192.168.67.2 APIServerPort:8443 KubernetesVersion:v1.31.1 EtcdDataDir:/var/lib/minikube/etcd EtcdExtraArgs:map[] ClusterName:multinode-654612 NodeName:multinode-654612 DNSDomain:cluster.local CRISocket:/var/run/crio/crio.sock ImageRepository: ComponentOptions:[{Component:apiServer ExtraArgs:map[enable-admission-plugins:NamespaceLifecycle,LimitRanger,ServiceAccount,DefaultStorageClass,DefaultTolerationSeconds,NodeRestriction,MutatingAdmissionWebhook,ValidatingAdmissionWebhook,ResourceQuota] Pairs:map[certSANs:["127.0.0.1", "localhost", "192.168.67.2"]]} {Component:controllerManager ExtraArgs:map[allocate-node-cidrs:true leader-elect:false] Pairs:map[]} {Component:scheduler ExtraArgs:map[leader-elect:false] Pairs:map[]}] FeatureArgs:map[] NodeIP:192.168.67.2 CgroupDriver:cgroupfs ClientCAFile:/var/lib/minikube/certs/ca.crt StaticPodPath:/etc/k
ubernetes/manifests ControlPlaneAddress:control-plane.minikube.internal KubeProxyOptions:map[] ResolvConfSearchRegression:false KubeletConfigOpts:map[containerRuntimeEndpoint:unix:///var/run/crio/crio.sock hairpinMode:hairpin-veth runtimeRequestTimeout:15m] PrependCriSocketUnix:true}
	I0916 11:09:50.133319 1488539 kubeadm.go:187] kubeadm config:
	apiVersion: kubeadm.k8s.io/v1beta3
	kind: InitConfiguration
	localAPIEndpoint:
	  advertiseAddress: 192.168.67.2
	  bindPort: 8443
	bootstrapTokens:
	  - groups:
	      - system:bootstrappers:kubeadm:default-node-token
	    ttl: 24h0m0s
	    usages:
	      - signing
	      - authentication
	nodeRegistration:
	  criSocket: unix:///var/run/crio/crio.sock
	  name: "multinode-654612"
	  kubeletExtraArgs:
	    node-ip: 192.168.67.2
	  taints: []
	---
	apiVersion: kubeadm.k8s.io/v1beta3
	kind: ClusterConfiguration
	apiServer:
	  certSANs: ["127.0.0.1", "localhost", "192.168.67.2"]
	  extraArgs:
	    enable-admission-plugins: "NamespaceLifecycle,LimitRanger,ServiceAccount,DefaultStorageClass,DefaultTolerationSeconds,NodeRestriction,MutatingAdmissionWebhook,ValidatingAdmissionWebhook,ResourceQuota"
	controllerManager:
	  extraArgs:
	    allocate-node-cidrs: "true"
	    leader-elect: "false"
	scheduler:
	  extraArgs:
	    leader-elect: "false"
	certificatesDir: /var/lib/minikube/certs
	clusterName: mk
	controlPlaneEndpoint: control-plane.minikube.internal:8443
	etcd:
	  local:
	    dataDir: /var/lib/minikube/etcd
	    extraArgs:
	      proxy-refresh-interval: "70000"
	kubernetesVersion: v1.31.1
	networking:
	  dnsDomain: cluster.local
	  podSubnet: "10.244.0.0/16"
	  serviceSubnet: 10.96.0.0/12
	---
	apiVersion: kubelet.config.k8s.io/v1beta1
	kind: KubeletConfiguration
	authentication:
	  x509:
	    clientCAFile: /var/lib/minikube/certs/ca.crt
	cgroupDriver: cgroupfs
	containerRuntimeEndpoint: unix:///var/run/crio/crio.sock
	hairpinMode: hairpin-veth
	runtimeRequestTimeout: 15m
	clusterDomain: "cluster.local"
	# disable disk resource management by default
	imageGCHighThresholdPercent: 100
	evictionHard:
	  nodefs.available: "0%"
	  nodefs.inodesFree: "0%"
	  imagefs.available: "0%"
	failSwapOn: false
	staticPodPath: /etc/kubernetes/manifests
	---
	apiVersion: kubeproxy.config.k8s.io/v1alpha1
	kind: KubeProxyConfiguration
	clusterCIDR: "10.244.0.0/16"
	metricsBindAddress: 0.0.0.0:10249
	conntrack:
	  maxPerCore: 0
	# Skip setting "net.netfilter.nf_conntrack_tcp_timeout_established"
	  tcpEstablishedTimeout: 0s
	# Skip setting "net.netfilter.nf_conntrack_tcp_timeout_close"
	  tcpCloseWaitTimeout: 0s
	
	I0916 11:09:50.133399 1488539 ssh_runner.go:195] Run: sudo ls /var/lib/minikube/binaries/v1.31.1
	I0916 11:09:50.141869 1488539 command_runner.go:130] > kubeadm
	I0916 11:09:50.141894 1488539 command_runner.go:130] > kubectl
	I0916 11:09:50.141898 1488539 command_runner.go:130] > kubelet
	I0916 11:09:50.143073 1488539 binaries.go:44] Found k8s binaries, skipping transfer
	I0916 11:09:50.143177 1488539 ssh_runner.go:195] Run: sudo mkdir -p /etc/systemd/system/kubelet.service.d /lib/systemd/system /var/tmp/minikube
	I0916 11:09:50.152626 1488539 ssh_runner.go:362] scp memory --> /etc/systemd/system/kubelet.service.d/10-kubeadm.conf (366 bytes)
	I0916 11:09:50.172497 1488539 ssh_runner.go:362] scp memory --> /lib/systemd/system/kubelet.service (352 bytes)
	I0916 11:09:50.192328 1488539 ssh_runner.go:362] scp memory --> /var/tmp/minikube/kubeadm.yaml.new (2154 bytes)
	I0916 11:09:50.210914 1488539 ssh_runner.go:195] Run: grep 192.168.67.2	control-plane.minikube.internal$ /etc/hosts
	I0916 11:09:50.214537 1488539 ssh_runner.go:195] Run: /bin/bash -c "{ grep -v $'\tcontrol-plane.minikube.internal$' "/etc/hosts"; echo "192.168.67.2	control-plane.minikube.internal"; } > /tmp/h.$$; sudo cp /tmp/h.$$ "/etc/hosts""
	I0916 11:09:50.225794 1488539 ssh_runner.go:195] Run: sudo systemctl daemon-reload
	I0916 11:09:50.313520 1488539 ssh_runner.go:195] Run: sudo systemctl start kubelet
	I0916 11:09:50.327991 1488539 certs.go:68] Setting up /home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/multinode-654612 for IP: 192.168.67.2
	I0916 11:09:50.328015 1488539 certs.go:194] generating shared ca certs ...
	I0916 11:09:50.328041 1488539 certs.go:226] acquiring lock for ca certs: {Name:mk0ae46b50e2e49d53ad6fcc94535aa50d9156d6 Clock:{} Delay:500ms Timeout:1m0s Cancel:<nil>}
	I0916 11:09:50.328216 1488539 certs.go:235] skipping valid "minikubeCA" ca cert: /home/jenkins/minikube-integration/19651-1378450/.minikube/ca.key
	I0916 11:09:50.328272 1488539 certs.go:235] skipping valid "proxyClientCA" ca cert: /home/jenkins/minikube-integration/19651-1378450/.minikube/proxy-client-ca.key
	I0916 11:09:50.328291 1488539 certs.go:256] generating profile certs ...
	I0916 11:09:50.328365 1488539 certs.go:363] generating signed profile cert for "minikube-user": /home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/multinode-654612/client.key
	I0916 11:09:50.328382 1488539 crypto.go:68] Generating cert /home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/multinode-654612/client.crt with IP's: []
	I0916 11:09:51.208048 1488539 crypto.go:156] Writing cert to /home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/multinode-654612/client.crt ...
	I0916 11:09:51.208131 1488539 lock.go:35] WriteFile acquiring /home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/multinode-654612/client.crt: {Name:mk1816cbc9363ecfe161b609b475f722dc15370b Clock:{} Delay:500ms Timeout:1m0s Cancel:<nil>}
	I0916 11:09:51.208371 1488539 crypto.go:164] Writing key to /home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/multinode-654612/client.key ...
	I0916 11:09:51.208408 1488539 lock.go:35] WriteFile acquiring /home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/multinode-654612/client.key: {Name:mk78c749df7c64f579f85fe55fa244d192f30537 Clock:{} Delay:500ms Timeout:1m0s Cancel:<nil>}
	I0916 11:09:51.208556 1488539 certs.go:363] generating signed profile cert for "minikube": /home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/multinode-654612/apiserver.key.51b1752e
	I0916 11:09:51.208599 1488539 crypto.go:68] Generating cert /home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/multinode-654612/apiserver.crt.51b1752e with IP's: [10.96.0.1 127.0.0.1 10.0.0.1 192.168.67.2]
	I0916 11:09:51.585190 1488539 crypto.go:156] Writing cert to /home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/multinode-654612/apiserver.crt.51b1752e ...
	I0916 11:09:51.585227 1488539 lock.go:35] WriteFile acquiring /home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/multinode-654612/apiserver.crt.51b1752e: {Name:mk85af17d14460b81f65bc98a438799a21dcc7e2 Clock:{} Delay:500ms Timeout:1m0s Cancel:<nil>}
	I0916 11:09:51.585489 1488539 crypto.go:164] Writing key to /home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/multinode-654612/apiserver.key.51b1752e ...
	I0916 11:09:51.585507 1488539 lock.go:35] WriteFile acquiring /home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/multinode-654612/apiserver.key.51b1752e: {Name:mkd172abd2af8e3371642f78522e3ee51cf4c879 Clock:{} Delay:500ms Timeout:1m0s Cancel:<nil>}
	I0916 11:09:51.585602 1488539 certs.go:381] copying /home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/multinode-654612/apiserver.crt.51b1752e -> /home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/multinode-654612/apiserver.crt
	I0916 11:09:51.585697 1488539 certs.go:385] copying /home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/multinode-654612/apiserver.key.51b1752e -> /home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/multinode-654612/apiserver.key
	I0916 11:09:51.585760 1488539 certs.go:363] generating signed profile cert for "aggregator": /home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/multinode-654612/proxy-client.key
	I0916 11:09:51.585779 1488539 crypto.go:68] Generating cert /home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/multinode-654612/proxy-client.crt with IP's: []
	I0916 11:09:52.392062 1488539 crypto.go:156] Writing cert to /home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/multinode-654612/proxy-client.crt ...
	I0916 11:09:52.392097 1488539 lock.go:35] WriteFile acquiring /home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/multinode-654612/proxy-client.crt: {Name:mk3f8a946d3a53bec11f22f47ded66010decb891 Clock:{} Delay:500ms Timeout:1m0s Cancel:<nil>}
	I0916 11:09:52.392285 1488539 crypto.go:164] Writing key to /home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/multinode-654612/proxy-client.key ...
	I0916 11:09:52.392312 1488539 lock.go:35] WriteFile acquiring /home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/multinode-654612/proxy-client.key: {Name:mk2e493f7cbfb27e5386bd8f92eff0a2de000bf2 Clock:{} Delay:500ms Timeout:1m0s Cancel:<nil>}
	I0916 11:09:52.392404 1488539 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-1378450/.minikube/ca.crt -> /var/lib/minikube/certs/ca.crt
	I0916 11:09:52.392426 1488539 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-1378450/.minikube/ca.key -> /var/lib/minikube/certs/ca.key
	I0916 11:09:52.392440 1488539 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-1378450/.minikube/proxy-client-ca.crt -> /var/lib/minikube/certs/proxy-client-ca.crt
	I0916 11:09:52.392459 1488539 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-1378450/.minikube/proxy-client-ca.key -> /var/lib/minikube/certs/proxy-client-ca.key
	I0916 11:09:52.392474 1488539 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/multinode-654612/apiserver.crt -> /var/lib/minikube/certs/apiserver.crt
	I0916 11:09:52.392493 1488539 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/multinode-654612/apiserver.key -> /var/lib/minikube/certs/apiserver.key
	I0916 11:09:52.392513 1488539 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/multinode-654612/proxy-client.crt -> /var/lib/minikube/certs/proxy-client.crt
	I0916 11:09:52.392527 1488539 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/multinode-654612/proxy-client.key -> /var/lib/minikube/certs/proxy-client.key
	I0916 11:09:52.392585 1488539 certs.go:484] found cert: /home/jenkins/minikube-integration/19651-1378450/.minikube/certs/1383833.pem (1338 bytes)
	W0916 11:09:52.392632 1488539 certs.go:480] ignoring /home/jenkins/minikube-integration/19651-1378450/.minikube/certs/1383833_empty.pem, impossibly tiny 0 bytes
	I0916 11:09:52.392640 1488539 certs.go:484] found cert: /home/jenkins/minikube-integration/19651-1378450/.minikube/certs/ca-key.pem (1679 bytes)
	I0916 11:09:52.392667 1488539 certs.go:484] found cert: /home/jenkins/minikube-integration/19651-1378450/.minikube/certs/ca.pem (1078 bytes)
	I0916 11:09:52.392718 1488539 certs.go:484] found cert: /home/jenkins/minikube-integration/19651-1378450/.minikube/certs/cert.pem (1123 bytes)
	I0916 11:09:52.392746 1488539 certs.go:484] found cert: /home/jenkins/minikube-integration/19651-1378450/.minikube/certs/key.pem (1679 bytes)
	I0916 11:09:52.392795 1488539 certs.go:484] found cert: /home/jenkins/minikube-integration/19651-1378450/.minikube/files/etc/ssl/certs/13838332.pem (1708 bytes)
	I0916 11:09:52.392832 1488539 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-1378450/.minikube/certs/1383833.pem -> /usr/share/ca-certificates/1383833.pem
	I0916 11:09:52.392850 1488539 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-1378450/.minikube/files/etc/ssl/certs/13838332.pem -> /usr/share/ca-certificates/13838332.pem
	I0916 11:09:52.392862 1488539 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-1378450/.minikube/ca.crt -> /usr/share/ca-certificates/minikubeCA.pem
	I0916 11:09:52.393473 1488539 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-1378450/.minikube/ca.crt --> /var/lib/minikube/certs/ca.crt (1111 bytes)
	I0916 11:09:52.420759 1488539 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-1378450/.minikube/ca.key --> /var/lib/minikube/certs/ca.key (1675 bytes)
	I0916 11:09:52.447472 1488539 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-1378450/.minikube/proxy-client-ca.crt --> /var/lib/minikube/certs/proxy-client-ca.crt (1119 bytes)
	I0916 11:09:52.472263 1488539 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-1378450/.minikube/proxy-client-ca.key --> /var/lib/minikube/certs/proxy-client-ca.key (1679 bytes)
	I0916 11:09:52.496610 1488539 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/multinode-654612/apiserver.crt --> /var/lib/minikube/certs/apiserver.crt (1424 bytes)
	I0916 11:09:52.521557 1488539 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/multinode-654612/apiserver.key --> /var/lib/minikube/certs/apiserver.key (1679 bytes)
	I0916 11:09:52.546624 1488539 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/multinode-654612/proxy-client.crt --> /var/lib/minikube/certs/proxy-client.crt (1147 bytes)
	I0916 11:09:52.572181 1488539 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/multinode-654612/proxy-client.key --> /var/lib/minikube/certs/proxy-client.key (1675 bytes)
	I0916 11:09:52.597083 1488539 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-1378450/.minikube/certs/1383833.pem --> /usr/share/ca-certificates/1383833.pem (1338 bytes)
	I0916 11:09:52.623828 1488539 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-1378450/.minikube/files/etc/ssl/certs/13838332.pem --> /usr/share/ca-certificates/13838332.pem (1708 bytes)
	I0916 11:09:52.648742 1488539 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-1378450/.minikube/ca.crt --> /usr/share/ca-certificates/minikubeCA.pem (1111 bytes)
	I0916 11:09:52.678092 1488539 ssh_runner.go:362] scp memory --> /var/lib/minikube/kubeconfig (738 bytes)
	I0916 11:09:52.695974 1488539 ssh_runner.go:195] Run: openssl version
	I0916 11:09:52.701144 1488539 command_runner.go:130] > OpenSSL 3.0.2 15 Mar 2022 (Library: OpenSSL 3.0.2 15 Mar 2022)
	I0916 11:09:52.701598 1488539 ssh_runner.go:195] Run: sudo /bin/bash -c "test -s /usr/share/ca-certificates/minikubeCA.pem && ln -fs /usr/share/ca-certificates/minikubeCA.pem /etc/ssl/certs/minikubeCA.pem"
	I0916 11:09:52.711032 1488539 ssh_runner.go:195] Run: ls -la /usr/share/ca-certificates/minikubeCA.pem
	I0916 11:09:52.714481 1488539 command_runner.go:130] > -rw-r--r-- 1 root root 1111 Sep 16 10:35 /usr/share/ca-certificates/minikubeCA.pem
	I0916 11:09:52.714826 1488539 certs.go:528] hashing: -rw-r--r-- 1 root root 1111 Sep 16 10:35 /usr/share/ca-certificates/minikubeCA.pem
	I0916 11:09:52.714897 1488539 ssh_runner.go:195] Run: openssl x509 -hash -noout -in /usr/share/ca-certificates/minikubeCA.pem
	I0916 11:09:52.721306 1488539 command_runner.go:130] > b5213941
	I0916 11:09:52.721806 1488539 ssh_runner.go:195] Run: sudo /bin/bash -c "test -L /etc/ssl/certs/b5213941.0 || ln -fs /etc/ssl/certs/minikubeCA.pem /etc/ssl/certs/b5213941.0"
	I0916 11:09:52.731165 1488539 ssh_runner.go:195] Run: sudo /bin/bash -c "test -s /usr/share/ca-certificates/1383833.pem && ln -fs /usr/share/ca-certificates/1383833.pem /etc/ssl/certs/1383833.pem"
	I0916 11:09:52.741068 1488539 ssh_runner.go:195] Run: ls -la /usr/share/ca-certificates/1383833.pem
	I0916 11:09:52.744721 1488539 command_runner.go:130] > -rw-r--r-- 1 root root 1338 Sep 16 10:46 /usr/share/ca-certificates/1383833.pem
	I0916 11:09:52.744762 1488539 certs.go:528] hashing: -rw-r--r-- 1 root root 1338 Sep 16 10:46 /usr/share/ca-certificates/1383833.pem
	I0916 11:09:52.744815 1488539 ssh_runner.go:195] Run: openssl x509 -hash -noout -in /usr/share/ca-certificates/1383833.pem
	I0916 11:09:52.751341 1488539 command_runner.go:130] > 51391683
	I0916 11:09:52.751749 1488539 ssh_runner.go:195] Run: sudo /bin/bash -c "test -L /etc/ssl/certs/51391683.0 || ln -fs /etc/ssl/certs/1383833.pem /etc/ssl/certs/51391683.0"
	I0916 11:09:52.761375 1488539 ssh_runner.go:195] Run: sudo /bin/bash -c "test -s /usr/share/ca-certificates/13838332.pem && ln -fs /usr/share/ca-certificates/13838332.pem /etc/ssl/certs/13838332.pem"
	I0916 11:09:52.770588 1488539 ssh_runner.go:195] Run: ls -la /usr/share/ca-certificates/13838332.pem
	I0916 11:09:52.774092 1488539 command_runner.go:130] > -rw-r--r-- 1 root root 1708 Sep 16 10:46 /usr/share/ca-certificates/13838332.pem
	I0916 11:09:52.774123 1488539 certs.go:528] hashing: -rw-r--r-- 1 root root 1708 Sep 16 10:46 /usr/share/ca-certificates/13838332.pem
	I0916 11:09:52.774189 1488539 ssh_runner.go:195] Run: openssl x509 -hash -noout -in /usr/share/ca-certificates/13838332.pem
	I0916 11:09:52.781373 1488539 command_runner.go:130] > 3ec20f2e
	I0916 11:09:52.781816 1488539 ssh_runner.go:195] Run: sudo /bin/bash -c "test -L /etc/ssl/certs/3ec20f2e.0 || ln -fs /etc/ssl/certs/13838332.pem /etc/ssl/certs/3ec20f2e.0"
	I0916 11:09:52.791027 1488539 ssh_runner.go:195] Run: stat /var/lib/minikube/certs/apiserver-kubelet-client.crt
	I0916 11:09:52.794279 1488539 command_runner.go:130] ! stat: cannot statx '/var/lib/minikube/certs/apiserver-kubelet-client.crt': No such file or directory
	I0916 11:09:52.794318 1488539 certs.go:399] 'apiserver-kubelet-client' cert doesn't exist, likely first start: stat /var/lib/minikube/certs/apiserver-kubelet-client.crt: Process exited with status 1
	stdout:
	
	stderr:
	stat: cannot statx '/var/lib/minikube/certs/apiserver-kubelet-client.crt': No such file or directory
	I0916 11:09:52.794356 1488539 kubeadm.go:392] StartCluster: {Name:multinode-654612 KeepContext:false EmbedCerts:false MinikubeISO: KicBaseImage:gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 Memory:2200 CPUs:2 DiskSize:20000 Driver:docker HyperkitVpnKitSock: HyperkitVSockPorts:[] DockerEnv:[] ContainerVolumeMounts:[] InsecureRegistry:[] RegistryMirror:[] HostOnlyCIDR:192.168.59.1/24 HypervVirtualSwitch: HypervUseExternalSwitch:false HypervExternalAdapter: KVMNetwork:default KVMQemuURI:qemu:///system KVMGPU:false KVMHidden:false KVMNUMACount:1 APIServerPort:8443 DockerOpt:[] DisableDriverMounts:false NFSShare:[] NFSSharesRoot:/nfsshares UUID: NoVTXCheck:false DNSProxy:false HostDNSResolver:true HostOnlyNicType:virtio NatNicType:virtio SSHIPAddress: SSHUser:root SSHKey: SSHPort:22 KubernetesConfig:{KubernetesVersion:v1.31.1 ClusterName:multinode-654612 Namespace:default APIServerHAVIP: APIServerName:minikubeCA APIServe
rNames:[] APIServerIPs:[] DNSDomain:cluster.local ContainerRuntime:crio CRISocket: NetworkPlugin:cni FeatureGates: ServiceCIDR:10.96.0.0/12 ImageRepository: LoadBalancerStartIP: LoadBalancerEndIP: CustomIngressCert: RegistryAliases: ExtraOptions:[] ShouldLoadCachedImages:true EnableDefaultCNI:false CNI:} Nodes:[{Name: IP:192.168.67.2 Port:8443 KubernetesVersion:v1.31.1 ContainerRuntime:crio ControlPlane:true Worker:true}] Addons:map[] CustomAddonImages:map[] CustomAddonRegistries:map[] VerifyComponents:map[apiserver:true apps_running:true default_sa:true extra:true kubelet:true node_ready:true system_pods:true] StartHostTimeout:6m0s ScheduledStop:<nil> ExposedPorts:[] ListenAddress: Network: Subnet: MultiNodeRequested:true ExtraDisks:0 CertExpiration:26280h0m0s Mount:false MountString:/home/jenkins:/minikube-host Mount9PVersion:9p2000.L MountGID:docker MountIP: MountMSize:262144 MountOptions:[] MountPort:0 MountType:9p MountUID:docker BinaryMirror: DisableOptimizations:false DisableMetrics:false CustomQemuFir
mwarePath: SocketVMnetClientPath: SocketVMnetPath: StaticIP: SSHAuthSock: SSHAgentPID:0 GPUs: AutoPauseInterval:1m0s}
	I0916 11:09:52.794436 1488539 cri.go:54] listing CRI containers in root : {State:paused Name: Namespaces:[kube-system]}
	I0916 11:09:52.794492 1488539 ssh_runner.go:195] Run: sudo -s eval "crictl ps -a --quiet --label io.kubernetes.pod.namespace=kube-system"
	I0916 11:09:52.834750 1488539 cri.go:89] found id: ""
	I0916 11:09:52.834863 1488539 ssh_runner.go:195] Run: sudo ls /var/lib/kubelet/kubeadm-flags.env /var/lib/kubelet/config.yaml /var/lib/minikube/etcd
	I0916 11:09:52.843934 1488539 command_runner.go:130] ! ls: cannot access '/var/lib/kubelet/kubeadm-flags.env': No such file or directory
	I0916 11:09:52.843972 1488539 command_runner.go:130] ! ls: cannot access '/var/lib/kubelet/config.yaml': No such file or directory
	I0916 11:09:52.843982 1488539 command_runner.go:130] ! ls: cannot access '/var/lib/minikube/etcd': No such file or directory
	I0916 11:09:52.844097 1488539 ssh_runner.go:195] Run: sudo cp /var/tmp/minikube/kubeadm.yaml.new /var/tmp/minikube/kubeadm.yaml
	I0916 11:09:52.853358 1488539 kubeadm.go:214] ignoring SystemVerification for kubeadm because of docker driver
	I0916 11:09:52.853460 1488539 ssh_runner.go:195] Run: sudo ls -la /etc/kubernetes/admin.conf /etc/kubernetes/kubelet.conf /etc/kubernetes/controller-manager.conf /etc/kubernetes/scheduler.conf
	I0916 11:09:52.862655 1488539 command_runner.go:130] ! ls: cannot access '/etc/kubernetes/admin.conf': No such file or directory
	I0916 11:09:52.862680 1488539 command_runner.go:130] ! ls: cannot access '/etc/kubernetes/kubelet.conf': No such file or directory
	I0916 11:09:52.862688 1488539 command_runner.go:130] ! ls: cannot access '/etc/kubernetes/controller-manager.conf': No such file or directory
	I0916 11:09:52.862910 1488539 command_runner.go:130] ! ls: cannot access '/etc/kubernetes/scheduler.conf': No such file or directory
	I0916 11:09:52.863920 1488539 kubeadm.go:155] config check failed, skipping stale config cleanup: sudo ls -la /etc/kubernetes/admin.conf /etc/kubernetes/kubelet.conf /etc/kubernetes/controller-manager.conf /etc/kubernetes/scheduler.conf: Process exited with status 2
	stdout:
	
	stderr:
	ls: cannot access '/etc/kubernetes/admin.conf': No such file or directory
	ls: cannot access '/etc/kubernetes/kubelet.conf': No such file or directory
	ls: cannot access '/etc/kubernetes/controller-manager.conf': No such file or directory
	ls: cannot access '/etc/kubernetes/scheduler.conf': No such file or directory
	I0916 11:09:52.863941 1488539 kubeadm.go:157] found existing configuration files:
	
	I0916 11:09:52.863994 1488539 ssh_runner.go:195] Run: sudo grep https://control-plane.minikube.internal:8443 /etc/kubernetes/admin.conf
	I0916 11:09:52.872703 1488539 command_runner.go:130] ! grep: /etc/kubernetes/admin.conf: No such file or directory
	I0916 11:09:52.872756 1488539 kubeadm.go:163] "https://control-plane.minikube.internal:8443" may not be in /etc/kubernetes/admin.conf - will remove: sudo grep https://control-plane.minikube.internal:8443 /etc/kubernetes/admin.conf: Process exited with status 2
	stdout:
	
	stderr:
	grep: /etc/kubernetes/admin.conf: No such file or directory
	I0916 11:09:52.872837 1488539 ssh_runner.go:195] Run: sudo rm -f /etc/kubernetes/admin.conf
	I0916 11:09:52.881329 1488539 ssh_runner.go:195] Run: sudo grep https://control-plane.minikube.internal:8443 /etc/kubernetes/kubelet.conf
	I0916 11:09:52.890064 1488539 command_runner.go:130] ! grep: /etc/kubernetes/kubelet.conf: No such file or directory
	I0916 11:09:52.890105 1488539 kubeadm.go:163] "https://control-plane.minikube.internal:8443" may not be in /etc/kubernetes/kubelet.conf - will remove: sudo grep https://control-plane.minikube.internal:8443 /etc/kubernetes/kubelet.conf: Process exited with status 2
	stdout:
	
	stderr:
	grep: /etc/kubernetes/kubelet.conf: No such file or directory
	I0916 11:09:52.890159 1488539 ssh_runner.go:195] Run: sudo rm -f /etc/kubernetes/kubelet.conf
	I0916 11:09:52.898828 1488539 ssh_runner.go:195] Run: sudo grep https://control-plane.minikube.internal:8443 /etc/kubernetes/controller-manager.conf
	I0916 11:09:52.907846 1488539 command_runner.go:130] ! grep: /etc/kubernetes/controller-manager.conf: No such file or directory
	I0916 11:09:52.907891 1488539 kubeadm.go:163] "https://control-plane.minikube.internal:8443" may not be in /etc/kubernetes/controller-manager.conf - will remove: sudo grep https://control-plane.minikube.internal:8443 /etc/kubernetes/controller-manager.conf: Process exited with status 2
	stdout:
	
	stderr:
	grep: /etc/kubernetes/controller-manager.conf: No such file or directory
	I0916 11:09:52.907967 1488539 ssh_runner.go:195] Run: sudo rm -f /etc/kubernetes/controller-manager.conf
	I0916 11:09:52.916769 1488539 ssh_runner.go:195] Run: sudo grep https://control-plane.minikube.internal:8443 /etc/kubernetes/scheduler.conf
	I0916 11:09:52.925409 1488539 command_runner.go:130] ! grep: /etc/kubernetes/scheduler.conf: No such file or directory
	I0916 11:09:52.925451 1488539 kubeadm.go:163] "https://control-plane.minikube.internal:8443" may not be in /etc/kubernetes/scheduler.conf - will remove: sudo grep https://control-plane.minikube.internal:8443 /etc/kubernetes/scheduler.conf: Process exited with status 2
	stdout:
	
	stderr:
	grep: /etc/kubernetes/scheduler.conf: No such file or directory
	I0916 11:09:52.925513 1488539 ssh_runner.go:195] Run: sudo rm -f /etc/kubernetes/scheduler.conf
	I0916 11:09:52.934105 1488539 ssh_runner.go:286] Start: /bin/bash -c "sudo env PATH="/var/lib/minikube/binaries/v1.31.1:$PATH" kubeadm init --config /var/tmp/minikube/kubeadm.yaml  --ignore-preflight-errors=DirAvailable--etc-kubernetes-manifests,DirAvailable--var-lib-minikube,DirAvailable--var-lib-minikube-etcd,FileAvailable--etc-kubernetes-manifests-kube-scheduler.yaml,FileAvailable--etc-kubernetes-manifests-kube-apiserver.yaml,FileAvailable--etc-kubernetes-manifests-kube-controller-manager.yaml,FileAvailable--etc-kubernetes-manifests-etcd.yaml,Port-10250,Swap,NumCPU,Mem,SystemVerification,FileContent--proc-sys-net-bridge-bridge-nf-call-iptables"
	I0916 11:09:52.980348 1488539 kubeadm.go:310] W0916 11:09:52.979690    1216 common.go:101] your configuration file uses a deprecated API spec: "kubeadm.k8s.io/v1beta3" (kind: "ClusterConfiguration"). Please use 'kubeadm config migrate --old-config old.yaml --new-config new.yaml', which will write the new, similar spec using a newer API version.
	I0916 11:09:52.980422 1488539 command_runner.go:130] ! W0916 11:09:52.979690    1216 common.go:101] your configuration file uses a deprecated API spec: "kubeadm.k8s.io/v1beta3" (kind: "ClusterConfiguration"). Please use 'kubeadm config migrate --old-config old.yaml --new-config new.yaml', which will write the new, similar spec using a newer API version.
	I0916 11:09:52.981213 1488539 kubeadm.go:310] W0916 11:09:52.980671    1216 common.go:101] your configuration file uses a deprecated API spec: "kubeadm.k8s.io/v1beta3" (kind: "InitConfiguration"). Please use 'kubeadm config migrate --old-config old.yaml --new-config new.yaml', which will write the new, similar spec using a newer API version.
	I0916 11:09:52.981271 1488539 command_runner.go:130] ! W0916 11:09:52.980671    1216 common.go:101] your configuration file uses a deprecated API spec: "kubeadm.k8s.io/v1beta3" (kind: "InitConfiguration"). Please use 'kubeadm config migrate --old-config old.yaml --new-config new.yaml', which will write the new, similar spec using a newer API version.
	I0916 11:09:53.001534 1488539 kubeadm.go:310] 	[WARNING SystemVerification]: failed to parse kernel config: unable to load kernel module: "configs", output: "modprobe: FATAL: Module configs not found in directory /lib/modules/5.15.0-1069-aws\n", err: exit status 1
	I0916 11:09:53.001617 1488539 command_runner.go:130] ! 	[WARNING SystemVerification]: failed to parse kernel config: unable to load kernel module: "configs", output: "modprobe: FATAL: Module configs not found in directory /lib/modules/5.15.0-1069-aws\n", err: exit status 1
	I0916 11:09:53.065728 1488539 kubeadm.go:310] 	[WARNING Service-Kubelet]: kubelet service is not enabled, please run 'systemctl enable kubelet.service'
	I0916 11:09:53.065736 1488539 command_runner.go:130] ! 	[WARNING Service-Kubelet]: kubelet service is not enabled, please run 'systemctl enable kubelet.service'
	I0916 11:10:10.854424 1488539 kubeadm.go:310] [init] Using Kubernetes version: v1.31.1
	I0916 11:10:10.854451 1488539 command_runner.go:130] > [init] Using Kubernetes version: v1.31.1
	I0916 11:10:10.854499 1488539 kubeadm.go:310] [preflight] Running pre-flight checks
	I0916 11:10:10.854505 1488539 command_runner.go:130] > [preflight] Running pre-flight checks
	I0916 11:10:10.854616 1488539 kubeadm.go:310] [preflight] The system verification failed. Printing the output from the verification:
	I0916 11:10:10.854635 1488539 command_runner.go:130] > [preflight] The system verification failed. Printing the output from the verification:
	I0916 11:10:10.854694 1488539 kubeadm.go:310] KERNEL_VERSION: 5.15.0-1069-aws
	I0916 11:10:10.854707 1488539 command_runner.go:130] > KERNEL_VERSION: 5.15.0-1069-aws
	I0916 11:10:10.854759 1488539 kubeadm.go:310] OS: Linux
	I0916 11:10:10.854777 1488539 command_runner.go:130] > OS: Linux
	I0916 11:10:10.854835 1488539 kubeadm.go:310] CGROUPS_CPU: enabled
	I0916 11:10:10.854844 1488539 command_runner.go:130] > CGROUPS_CPU: enabled
	I0916 11:10:10.854891 1488539 kubeadm.go:310] CGROUPS_CPUACCT: enabled
	I0916 11:10:10.854899 1488539 command_runner.go:130] > CGROUPS_CPUACCT: enabled
	I0916 11:10:10.854945 1488539 kubeadm.go:310] CGROUPS_CPUSET: enabled
	I0916 11:10:10.854952 1488539 command_runner.go:130] > CGROUPS_CPUSET: enabled
	I0916 11:10:10.855000 1488539 kubeadm.go:310] CGROUPS_DEVICES: enabled
	I0916 11:10:10.855007 1488539 command_runner.go:130] > CGROUPS_DEVICES: enabled
	I0916 11:10:10.855054 1488539 kubeadm.go:310] CGROUPS_FREEZER: enabled
	I0916 11:10:10.855061 1488539 command_runner.go:130] > CGROUPS_FREEZER: enabled
	I0916 11:10:10.855122 1488539 kubeadm.go:310] CGROUPS_MEMORY: enabled
	I0916 11:10:10.855145 1488539 command_runner.go:130] > CGROUPS_MEMORY: enabled
	I0916 11:10:10.855190 1488539 kubeadm.go:310] CGROUPS_PIDS: enabled
	I0916 11:10:10.855199 1488539 command_runner.go:130] > CGROUPS_PIDS: enabled
	I0916 11:10:10.855246 1488539 kubeadm.go:310] CGROUPS_HUGETLB: enabled
	I0916 11:10:10.855254 1488539 command_runner.go:130] > CGROUPS_HUGETLB: enabled
	I0916 11:10:10.855319 1488539 kubeadm.go:310] CGROUPS_BLKIO: enabled
	I0916 11:10:10.855325 1488539 command_runner.go:130] > CGROUPS_BLKIO: enabled
	I0916 11:10:10.855401 1488539 kubeadm.go:310] [preflight] Pulling images required for setting up a Kubernetes cluster
	I0916 11:10:10.855406 1488539 command_runner.go:130] > [preflight] Pulling images required for setting up a Kubernetes cluster
	I0916 11:10:10.855522 1488539 kubeadm.go:310] [preflight] This might take a minute or two, depending on the speed of your internet connection
	I0916 11:10:10.855536 1488539 command_runner.go:130] > [preflight] This might take a minute or two, depending on the speed of your internet connection
	I0916 11:10:10.855635 1488539 kubeadm.go:310] [preflight] You can also perform this action beforehand using 'kubeadm config images pull'
	I0916 11:10:10.855646 1488539 command_runner.go:130] > [preflight] You can also perform this action beforehand using 'kubeadm config images pull'
	I0916 11:10:10.855711 1488539 kubeadm.go:310] [certs] Using certificateDir folder "/var/lib/minikube/certs"
	I0916 11:10:10.855754 1488539 command_runner.go:130] > [certs] Using certificateDir folder "/var/lib/minikube/certs"
	I0916 11:10:10.858473 1488539 out.go:235]   - Generating certificates and keys ...
	I0916 11:10:10.858574 1488539 command_runner.go:130] > [certs] Using existing ca certificate authority
	I0916 11:10:10.858585 1488539 kubeadm.go:310] [certs] Using existing ca certificate authority
	I0916 11:10:10.858656 1488539 command_runner.go:130] > [certs] Using existing apiserver certificate and key on disk
	I0916 11:10:10.858665 1488539 kubeadm.go:310] [certs] Using existing apiserver certificate and key on disk
	I0916 11:10:10.858730 1488539 command_runner.go:130] > [certs] Generating "apiserver-kubelet-client" certificate and key
	I0916 11:10:10.858738 1488539 kubeadm.go:310] [certs] Generating "apiserver-kubelet-client" certificate and key
	I0916 11:10:10.858794 1488539 command_runner.go:130] > [certs] Generating "front-proxy-ca" certificate and key
	I0916 11:10:10.858802 1488539 kubeadm.go:310] [certs] Generating "front-proxy-ca" certificate and key
	I0916 11:10:10.858862 1488539 command_runner.go:130] > [certs] Generating "front-proxy-client" certificate and key
	I0916 11:10:10.858869 1488539 kubeadm.go:310] [certs] Generating "front-proxy-client" certificate and key
	I0916 11:10:10.858919 1488539 command_runner.go:130] > [certs] Generating "etcd/ca" certificate and key
	I0916 11:10:10.858927 1488539 kubeadm.go:310] [certs] Generating "etcd/ca" certificate and key
	I0916 11:10:10.858979 1488539 command_runner.go:130] > [certs] Generating "etcd/server" certificate and key
	I0916 11:10:10.858986 1488539 kubeadm.go:310] [certs] Generating "etcd/server" certificate and key
	I0916 11:10:10.859110 1488539 command_runner.go:130] > [certs] etcd/server serving cert is signed for DNS names [localhost multinode-654612] and IPs [192.168.67.2 127.0.0.1 ::1]
	I0916 11:10:10.859122 1488539 kubeadm.go:310] [certs] etcd/server serving cert is signed for DNS names [localhost multinode-654612] and IPs [192.168.67.2 127.0.0.1 ::1]
	I0916 11:10:10.859173 1488539 command_runner.go:130] > [certs] Generating "etcd/peer" certificate and key
	I0916 11:10:10.859180 1488539 kubeadm.go:310] [certs] Generating "etcd/peer" certificate and key
	I0916 11:10:10.859299 1488539 command_runner.go:130] > [certs] etcd/peer serving cert is signed for DNS names [localhost multinode-654612] and IPs [192.168.67.2 127.0.0.1 ::1]
	I0916 11:10:10.859306 1488539 kubeadm.go:310] [certs] etcd/peer serving cert is signed for DNS names [localhost multinode-654612] and IPs [192.168.67.2 127.0.0.1 ::1]
	I0916 11:10:10.859370 1488539 command_runner.go:130] > [certs] Generating "etcd/healthcheck-client" certificate and key
	I0916 11:10:10.859377 1488539 kubeadm.go:310] [certs] Generating "etcd/healthcheck-client" certificate and key
	I0916 11:10:10.859440 1488539 command_runner.go:130] > [certs] Generating "apiserver-etcd-client" certificate and key
	I0916 11:10:10.859448 1488539 kubeadm.go:310] [certs] Generating "apiserver-etcd-client" certificate and key
	I0916 11:10:10.859491 1488539 command_runner.go:130] > [certs] Generating "sa" key and public key
	I0916 11:10:10.859499 1488539 kubeadm.go:310] [certs] Generating "sa" key and public key
	I0916 11:10:10.859553 1488539 command_runner.go:130] > [kubeconfig] Using kubeconfig folder "/etc/kubernetes"
	I0916 11:10:10.859561 1488539 kubeadm.go:310] [kubeconfig] Using kubeconfig folder "/etc/kubernetes"
	I0916 11:10:10.859612 1488539 command_runner.go:130] > [kubeconfig] Writing "admin.conf" kubeconfig file
	I0916 11:10:10.859623 1488539 kubeadm.go:310] [kubeconfig] Writing "admin.conf" kubeconfig file
	I0916 11:10:10.859680 1488539 command_runner.go:130] > [kubeconfig] Writing "super-admin.conf" kubeconfig file
	I0916 11:10:10.859687 1488539 kubeadm.go:310] [kubeconfig] Writing "super-admin.conf" kubeconfig file
	I0916 11:10:10.859740 1488539 command_runner.go:130] > [kubeconfig] Writing "kubelet.conf" kubeconfig file
	I0916 11:10:10.859749 1488539 kubeadm.go:310] [kubeconfig] Writing "kubelet.conf" kubeconfig file
	I0916 11:10:10.859810 1488539 command_runner.go:130] > [kubeconfig] Writing "controller-manager.conf" kubeconfig file
	I0916 11:10:10.859820 1488539 kubeadm.go:310] [kubeconfig] Writing "controller-manager.conf" kubeconfig file
	I0916 11:10:10.859886 1488539 command_runner.go:130] > [kubeconfig] Writing "scheduler.conf" kubeconfig file
	I0916 11:10:10.859911 1488539 kubeadm.go:310] [kubeconfig] Writing "scheduler.conf" kubeconfig file
	I0916 11:10:10.859991 1488539 command_runner.go:130] > [etcd] Creating static Pod manifest for local etcd in "/etc/kubernetes/manifests"
	I0916 11:10:10.859998 1488539 kubeadm.go:310] [etcd] Creating static Pod manifest for local etcd in "/etc/kubernetes/manifests"
	I0916 11:10:10.860063 1488539 command_runner.go:130] > [control-plane] Using manifest folder "/etc/kubernetes/manifests"
	I0916 11:10:10.860070 1488539 kubeadm.go:310] [control-plane] Using manifest folder "/etc/kubernetes/manifests"
	I0916 11:10:10.862753 1488539 out.go:235]   - Booting up control plane ...
	I0916 11:10:10.862923 1488539 command_runner.go:130] > [control-plane] Creating static Pod manifest for "kube-apiserver"
	I0916 11:10:10.862935 1488539 kubeadm.go:310] [control-plane] Creating static Pod manifest for "kube-apiserver"
	I0916 11:10:10.863063 1488539 command_runner.go:130] > [control-plane] Creating static Pod manifest for "kube-controller-manager"
	I0916 11:10:10.863073 1488539 kubeadm.go:310] [control-plane] Creating static Pod manifest for "kube-controller-manager"
	I0916 11:10:10.863141 1488539 command_runner.go:130] > [control-plane] Creating static Pod manifest for "kube-scheduler"
	I0916 11:10:10.863150 1488539 kubeadm.go:310] [control-plane] Creating static Pod manifest for "kube-scheduler"
	I0916 11:10:10.863257 1488539 command_runner.go:130] > [kubelet-start] Writing kubelet environment file with flags to file "/var/lib/kubelet/kubeadm-flags.env"
	I0916 11:10:10.863267 1488539 kubeadm.go:310] [kubelet-start] Writing kubelet environment file with flags to file "/var/lib/kubelet/kubeadm-flags.env"
	I0916 11:10:10.863403 1488539 command_runner.go:130] > [kubelet-start] Writing kubelet configuration to file "/var/lib/kubelet/config.yaml"
	I0916 11:10:10.863413 1488539 kubeadm.go:310] [kubelet-start] Writing kubelet configuration to file "/var/lib/kubelet/config.yaml"
	I0916 11:10:10.863452 1488539 command_runner.go:130] > [kubelet-start] Starting the kubelet
	I0916 11:10:10.863459 1488539 kubeadm.go:310] [kubelet-start] Starting the kubelet
	I0916 11:10:10.863625 1488539 command_runner.go:130] > [wait-control-plane] Waiting for the kubelet to boot up the control plane as static Pods from directory "/etc/kubernetes/manifests"
	I0916 11:10:10.863635 1488539 kubeadm.go:310] [wait-control-plane] Waiting for the kubelet to boot up the control plane as static Pods from directory "/etc/kubernetes/manifests"
	I0916 11:10:10.863736 1488539 command_runner.go:130] > [kubelet-check] Waiting for a healthy kubelet at http://127.0.0.1:10248/healthz. This can take up to 4m0s
	I0916 11:10:10.863746 1488539 kubeadm.go:310] [kubelet-check] Waiting for a healthy kubelet at http://127.0.0.1:10248/healthz. This can take up to 4m0s
	I0916 11:10:10.863808 1488539 command_runner.go:130] > [kubelet-check] The kubelet is healthy after 2.001682005s
	I0916 11:10:10.863818 1488539 kubeadm.go:310] [kubelet-check] The kubelet is healthy after 2.001682005s
	I0916 11:10:10.863896 1488539 command_runner.go:130] > [api-check] Waiting for a healthy API server. This can take up to 4m0s
	I0916 11:10:10.863907 1488539 kubeadm.go:310] [api-check] Waiting for a healthy API server. This can take up to 4m0s
	I0916 11:10:10.863970 1488539 command_runner.go:130] > [api-check] The API server is healthy after 6.001409713s
	I0916 11:10:10.863978 1488539 kubeadm.go:310] [api-check] The API server is healthy after 6.001409713s
	I0916 11:10:10.864108 1488539 command_runner.go:130] > [upload-config] Storing the configuration used in ConfigMap "kubeadm-config" in the "kube-system" Namespace
	I0916 11:10:10.864132 1488539 kubeadm.go:310] [upload-config] Storing the configuration used in ConfigMap "kubeadm-config" in the "kube-system" Namespace
	I0916 11:10:10.864257 1488539 command_runner.go:130] > [kubelet] Creating a ConfigMap "kubelet-config" in namespace kube-system with the configuration for the kubelets in the cluster
	I0916 11:10:10.864262 1488539 kubeadm.go:310] [kubelet] Creating a ConfigMap "kubelet-config" in namespace kube-system with the configuration for the kubelets in the cluster
	I0916 11:10:10.864319 1488539 command_runner.go:130] > [upload-certs] Skipping phase. Please see --upload-certs
	I0916 11:10:10.864323 1488539 kubeadm.go:310] [upload-certs] Skipping phase. Please see --upload-certs
	I0916 11:10:10.864510 1488539 command_runner.go:130] > [mark-control-plane] Marking the node multinode-654612 as control-plane by adding the labels: [node-role.kubernetes.io/control-plane node.kubernetes.io/exclude-from-external-load-balancers]
	I0916 11:10:10.864516 1488539 kubeadm.go:310] [mark-control-plane] Marking the node multinode-654612 as control-plane by adding the labels: [node-role.kubernetes.io/control-plane node.kubernetes.io/exclude-from-external-load-balancers]
	I0916 11:10:10.864571 1488539 command_runner.go:130] > [bootstrap-token] Using token: b40s63.zibnp7p33t2buer0
	I0916 11:10:10.864576 1488539 kubeadm.go:310] [bootstrap-token] Using token: b40s63.zibnp7p33t2buer0
	I0916 11:10:10.868852 1488539 out.go:235]   - Configuring RBAC rules ...
	I0916 11:10:10.868981 1488539 command_runner.go:130] > [bootstrap-token] Configuring bootstrap tokens, cluster-info ConfigMap, RBAC Roles
	I0916 11:10:10.868995 1488539 kubeadm.go:310] [bootstrap-token] Configuring bootstrap tokens, cluster-info ConfigMap, RBAC Roles
	I0916 11:10:10.869116 1488539 kubeadm.go:310] [bootstrap-token] Configured RBAC rules to allow Node Bootstrap tokens to get nodes
	I0916 11:10:10.869121 1488539 command_runner.go:130] > [bootstrap-token] Configured RBAC rules to allow Node Bootstrap tokens to get nodes
	I0916 11:10:10.869321 1488539 kubeadm.go:310] [bootstrap-token] Configured RBAC rules to allow Node Bootstrap tokens to post CSRs in order for nodes to get long term certificate credentials
	I0916 11:10:10.869332 1488539 command_runner.go:130] > [bootstrap-token] Configured RBAC rules to allow Node Bootstrap tokens to post CSRs in order for nodes to get long term certificate credentials
	I0916 11:10:10.869481 1488539 kubeadm.go:310] [bootstrap-token] Configured RBAC rules to allow the csrapprover controller automatically approve CSRs from a Node Bootstrap Token
	I0916 11:10:10.869512 1488539 command_runner.go:130] > [bootstrap-token] Configured RBAC rules to allow the csrapprover controller automatically approve CSRs from a Node Bootstrap Token
	I0916 11:10:10.869655 1488539 kubeadm.go:310] [bootstrap-token] Configured RBAC rules to allow certificate rotation for all node client certificates in the cluster
	I0916 11:10:10.869675 1488539 command_runner.go:130] > [bootstrap-token] Configured RBAC rules to allow certificate rotation for all node client certificates in the cluster
	I0916 11:10:10.869762 1488539 kubeadm.go:310] [bootstrap-token] Creating the "cluster-info" ConfigMap in the "kube-public" namespace
	I0916 11:10:10.869772 1488539 command_runner.go:130] > [bootstrap-token] Creating the "cluster-info" ConfigMap in the "kube-public" namespace
	I0916 11:10:10.869889 1488539 kubeadm.go:310] [kubelet-finalize] Updating "/etc/kubernetes/kubelet.conf" to point to a rotatable kubelet client certificate and key
	I0916 11:10:10.869899 1488539 command_runner.go:130] > [kubelet-finalize] Updating "/etc/kubernetes/kubelet.conf" to point to a rotatable kubelet client certificate and key
	I0916 11:10:10.869943 1488539 kubeadm.go:310] [addons] Applied essential addon: CoreDNS
	I0916 11:10:10.869950 1488539 command_runner.go:130] > [addons] Applied essential addon: CoreDNS
	I0916 11:10:10.869995 1488539 kubeadm.go:310] [addons] Applied essential addon: kube-proxy
	I0916 11:10:10.870002 1488539 command_runner.go:130] > [addons] Applied essential addon: kube-proxy
	I0916 11:10:10.870006 1488539 kubeadm.go:310] 
	I0916 11:10:10.870066 1488539 kubeadm.go:310] Your Kubernetes control-plane has initialized successfully!
	I0916 11:10:10.870072 1488539 command_runner.go:130] > Your Kubernetes control-plane has initialized successfully!
	I0916 11:10:10.870077 1488539 kubeadm.go:310] 
	I0916 11:10:10.870152 1488539 kubeadm.go:310] To start using your cluster, you need to run the following as a regular user:
	I0916 11:10:10.870159 1488539 command_runner.go:130] > To start using your cluster, you need to run the following as a regular user:
	I0916 11:10:10.870164 1488539 kubeadm.go:310] 
	I0916 11:10:10.870189 1488539 kubeadm.go:310]   mkdir -p $HOME/.kube
	I0916 11:10:10.870196 1488539 command_runner.go:130] >   mkdir -p $HOME/.kube
	I0916 11:10:10.870253 1488539 kubeadm.go:310]   sudo cp -i /etc/kubernetes/admin.conf $HOME/.kube/config
	I0916 11:10:10.870260 1488539 command_runner.go:130] >   sudo cp -i /etc/kubernetes/admin.conf $HOME/.kube/config
	I0916 11:10:10.870310 1488539 kubeadm.go:310]   sudo chown $(id -u):$(id -g) $HOME/.kube/config
	I0916 11:10:10.870317 1488539 command_runner.go:130] >   sudo chown $(id -u):$(id -g) $HOME/.kube/config
	I0916 11:10:10.870321 1488539 kubeadm.go:310] 
	I0916 11:10:10.870375 1488539 kubeadm.go:310] Alternatively, if you are the root user, you can run:
	I0916 11:10:10.870382 1488539 command_runner.go:130] > Alternatively, if you are the root user, you can run:
	I0916 11:10:10.870387 1488539 kubeadm.go:310] 
	I0916 11:10:10.870434 1488539 kubeadm.go:310]   export KUBECONFIG=/etc/kubernetes/admin.conf
	I0916 11:10:10.870440 1488539 command_runner.go:130] >   export KUBECONFIG=/etc/kubernetes/admin.conf
	I0916 11:10:10.870445 1488539 kubeadm.go:310] 
	I0916 11:10:10.870496 1488539 kubeadm.go:310] You should now deploy a pod network to the cluster.
	I0916 11:10:10.870503 1488539 command_runner.go:130] > You should now deploy a pod network to the cluster.
	I0916 11:10:10.870576 1488539 kubeadm.go:310] Run "kubectl apply -f [podnetwork].yaml" with one of the options listed at:
	I0916 11:10:10.870583 1488539 command_runner.go:130] > Run "kubectl apply -f [podnetwork].yaml" with one of the options listed at:
	I0916 11:10:10.870654 1488539 kubeadm.go:310]   https://kubernetes.io/docs/concepts/cluster-administration/addons/
	I0916 11:10:10.870661 1488539 command_runner.go:130] >   https://kubernetes.io/docs/concepts/cluster-administration/addons/
	I0916 11:10:10.870664 1488539 kubeadm.go:310] 
	I0916 11:10:10.870747 1488539 kubeadm.go:310] You can now join any number of control-plane nodes by copying certificate authorities
	I0916 11:10:10.870756 1488539 command_runner.go:130] > You can now join any number of control-plane nodes by copying certificate authorities
	I0916 11:10:10.870831 1488539 kubeadm.go:310] and service account keys on each node and then running the following as root:
	I0916 11:10:10.870838 1488539 command_runner.go:130] > and service account keys on each node and then running the following as root:
	I0916 11:10:10.870842 1488539 kubeadm.go:310] 
	I0916 11:10:10.870924 1488539 kubeadm.go:310]   kubeadm join control-plane.minikube.internal:8443 --token b40s63.zibnp7p33t2buer0 \
	I0916 11:10:10.870931 1488539 command_runner.go:130] >   kubeadm join control-plane.minikube.internal:8443 --token b40s63.zibnp7p33t2buer0 \
	I0916 11:10:10.871032 1488539 kubeadm.go:310] 	--discovery-token-ca-cert-hash sha256:a39d4a6e06a2efc97f5d9564a89b81063790e757dde370e866d9dc4c2ed0ec07 \
	I0916 11:10:10.871038 1488539 command_runner.go:130] > 	--discovery-token-ca-cert-hash sha256:a39d4a6e06a2efc97f5d9564a89b81063790e757dde370e866d9dc4c2ed0ec07 \
	I0916 11:10:10.871058 1488539 kubeadm.go:310] 	--control-plane 
	I0916 11:10:10.871067 1488539 command_runner.go:130] > 	--control-plane 
	I0916 11:10:10.871071 1488539 kubeadm.go:310] 
	I0916 11:10:10.871155 1488539 kubeadm.go:310] Then you can join any number of worker nodes by running the following on each as root:
	I0916 11:10:10.871162 1488539 command_runner.go:130] > Then you can join any number of worker nodes by running the following on each as root:
	I0916 11:10:10.871166 1488539 kubeadm.go:310] 
	I0916 11:10:10.871247 1488539 kubeadm.go:310] kubeadm join control-plane.minikube.internal:8443 --token b40s63.zibnp7p33t2buer0 \
	I0916 11:10:10.871253 1488539 command_runner.go:130] > kubeadm join control-plane.minikube.internal:8443 --token b40s63.zibnp7p33t2buer0 \
	I0916 11:10:10.871354 1488539 kubeadm.go:310] 	--discovery-token-ca-cert-hash sha256:a39d4a6e06a2efc97f5d9564a89b81063790e757dde370e866d9dc4c2ed0ec07 
	I0916 11:10:10.871360 1488539 command_runner.go:130] > 	--discovery-token-ca-cert-hash sha256:a39d4a6e06a2efc97f5d9564a89b81063790e757dde370e866d9dc4c2ed0ec07 
	I0916 11:10:10.871382 1488539 cni.go:84] Creating CNI manager for ""
	I0916 11:10:10.871390 1488539 cni.go:136] multinode detected (1 nodes found), recommending kindnet
	I0916 11:10:10.875799 1488539 out.go:177] * Configuring CNI (Container Networking Interface) ...
	I0916 11:10:10.878343 1488539 ssh_runner.go:195] Run: stat /opt/cni/bin/portmap
	I0916 11:10:10.882115 1488539 command_runner.go:130] >   File: /opt/cni/bin/portmap
	I0916 11:10:10.882163 1488539 command_runner.go:130] >   Size: 4030506   	Blocks: 7880       IO Block: 4096   regular file
	I0916 11:10:10.882174 1488539 command_runner.go:130] > Device: 36h/54d	Inode: 1574378     Links: 1
	I0916 11:10:10.882181 1488539 command_runner.go:130] > Access: (0755/-rwxr-xr-x)  Uid: (    0/    root)   Gid: (    0/    root)
	I0916 11:10:10.882187 1488539 command_runner.go:130] > Access: 2023-12-04 16:39:54.000000000 +0000
	I0916 11:10:10.882192 1488539 command_runner.go:130] > Modify: 2023-12-04 16:39:54.000000000 +0000
	I0916 11:10:10.882197 1488539 command_runner.go:130] > Change: 2024-09-16 10:35:03.752507709 +0000
	I0916 11:10:10.882201 1488539 command_runner.go:130] >  Birth: 2024-09-16 10:35:03.704509024 +0000
	I0916 11:10:10.882316 1488539 cni.go:182] applying CNI manifest using /var/lib/minikube/binaries/v1.31.1/kubectl ...
	I0916 11:10:10.882324 1488539 ssh_runner.go:362] scp memory --> /var/tmp/minikube/cni.yaml (2601 bytes)
	I0916 11:10:10.902078 1488539 ssh_runner.go:195] Run: sudo /var/lib/minikube/binaries/v1.31.1/kubectl apply --kubeconfig=/var/lib/minikube/kubeconfig -f /var/tmp/minikube/cni.yaml
	I0916 11:10:11.153612 1488539 command_runner.go:130] > clusterrole.rbac.authorization.k8s.io/kindnet created
	I0916 11:10:11.164125 1488539 command_runner.go:130] > clusterrolebinding.rbac.authorization.k8s.io/kindnet created
	I0916 11:10:11.173732 1488539 command_runner.go:130] > serviceaccount/kindnet created
	I0916 11:10:11.188063 1488539 command_runner.go:130] > daemonset.apps/kindnet created
	I0916 11:10:11.191784 1488539 ssh_runner.go:195] Run: /bin/bash -c "cat /proc/$(pgrep kube-apiserver)/oom_adj"
	I0916 11:10:11.191917 1488539 ssh_runner.go:195] Run: sudo /var/lib/minikube/binaries/v1.31.1/kubectl create clusterrolebinding minikube-rbac --clusterrole=cluster-admin --serviceaccount=kube-system:default --kubeconfig=/var/lib/minikube/kubeconfig
	I0916 11:10:11.192009 1488539 ssh_runner.go:195] Run: sudo /var/lib/minikube/binaries/v1.31.1/kubectl --kubeconfig=/var/lib/minikube/kubeconfig label --overwrite nodes multinode-654612 minikube.k8s.io/updated_at=2024_09_16T11_10_11_0700 minikube.k8s.io/version=v1.34.0 minikube.k8s.io/commit=90d544f06ea0f69499271b003be64a9a224d57ed minikube.k8s.io/name=multinode-654612 minikube.k8s.io/primary=true
	I0916 11:10:11.205670 1488539 command_runner.go:130] > -16
	I0916 11:10:11.205950 1488539 ops.go:34] apiserver oom_adj: -16
	I0916 11:10:11.325674 1488539 command_runner.go:130] > clusterrolebinding.rbac.authorization.k8s.io/minikube-rbac created
	I0916 11:10:11.330218 1488539 ssh_runner.go:195] Run: sudo /var/lib/minikube/binaries/v1.31.1/kubectl get sa default --kubeconfig=/var/lib/minikube/kubeconfig
	I0916 11:10:11.335730 1488539 command_runner.go:130] > node/multinode-654612 labeled
	I0916 11:10:11.453794 1488539 command_runner.go:130] ! Error from server (NotFound): serviceaccounts "default" not found
	I0916 11:10:11.830372 1488539 ssh_runner.go:195] Run: sudo /var/lib/minikube/binaries/v1.31.1/kubectl get sa default --kubeconfig=/var/lib/minikube/kubeconfig
	I0916 11:10:11.920614 1488539 command_runner.go:130] ! Error from server (NotFound): serviceaccounts "default" not found
	I0916 11:10:12.331316 1488539 ssh_runner.go:195] Run: sudo /var/lib/minikube/binaries/v1.31.1/kubectl get sa default --kubeconfig=/var/lib/minikube/kubeconfig
	I0916 11:10:12.416556 1488539 command_runner.go:130] ! Error from server (NotFound): serviceaccounts "default" not found
	I0916 11:10:12.831139 1488539 ssh_runner.go:195] Run: sudo /var/lib/minikube/binaries/v1.31.1/kubectl get sa default --kubeconfig=/var/lib/minikube/kubeconfig
	I0916 11:10:12.914763 1488539 command_runner.go:130] ! Error from server (NotFound): serviceaccounts "default" not found
	I0916 11:10:13.330338 1488539 ssh_runner.go:195] Run: sudo /var/lib/minikube/binaries/v1.31.1/kubectl get sa default --kubeconfig=/var/lib/minikube/kubeconfig
	I0916 11:10:13.426637 1488539 command_runner.go:130] ! Error from server (NotFound): serviceaccounts "default" not found
	I0916 11:10:13.830893 1488539 ssh_runner.go:195] Run: sudo /var/lib/minikube/binaries/v1.31.1/kubectl get sa default --kubeconfig=/var/lib/minikube/kubeconfig
	I0916 11:10:13.917716 1488539 command_runner.go:130] ! Error from server (NotFound): serviceaccounts "default" not found
	I0916 11:10:14.330987 1488539 ssh_runner.go:195] Run: sudo /var/lib/minikube/binaries/v1.31.1/kubectl get sa default --kubeconfig=/var/lib/minikube/kubeconfig
	I0916 11:10:14.461637 1488539 command_runner.go:130] ! Error from server (NotFound): serviceaccounts "default" not found
	I0916 11:10:14.830246 1488539 ssh_runner.go:195] Run: sudo /var/lib/minikube/binaries/v1.31.1/kubectl get sa default --kubeconfig=/var/lib/minikube/kubeconfig
	I0916 11:10:14.929204 1488539 command_runner.go:130] > NAME      SECRETS   AGE
	I0916 11:10:14.929233 1488539 command_runner.go:130] > default   0         0s
	I0916 11:10:14.932893 1488539 kubeadm.go:1113] duration metric: took 3.741020617s to wait for elevateKubeSystemPrivileges
	I0916 11:10:14.932920 1488539 kubeadm.go:394] duration metric: took 22.138566358s to StartCluster
	I0916 11:10:14.932938 1488539 settings.go:142] acquiring lock: {Name:mkc0474d366ad36774e47290c7932cc180a1b9f8 Clock:{} Delay:500ms Timeout:1m0s Cancel:<nil>}
	I0916 11:10:14.933003 1488539 settings.go:150] Updating kubeconfig:  /home/jenkins/minikube-integration/19651-1378450/kubeconfig
	I0916 11:10:14.933675 1488539 lock.go:35] WriteFile acquiring /home/jenkins/minikube-integration/19651-1378450/kubeconfig: {Name:mk806df66aa01ad28d0c99bc1a876b4310e8a3a0 Clock:{} Delay:500ms Timeout:1m0s Cancel:<nil>}
	I0916 11:10:14.933861 1488539 start.go:235] Will wait 6m0s for node &{Name: IP:192.168.67.2 Port:8443 KubernetesVersion:v1.31.1 ContainerRuntime:crio ControlPlane:true Worker:true}
	I0916 11:10:14.934025 1488539 ssh_runner.go:195] Run: /bin/bash -c "sudo /var/lib/minikube/binaries/v1.31.1/kubectl --kubeconfig=/var/lib/minikube/kubeconfig -n kube-system get configmap coredns -o yaml"
	I0916 11:10:14.934265 1488539 config.go:182] Loaded profile config "multinode-654612": Driver=docker, ContainerRuntime=crio, KubernetesVersion=v1.31.1
	I0916 11:10:14.934298 1488539 addons.go:507] enable addons start: toEnable=map[ambassador:false auto-pause:false cloud-spanner:false csi-hostpath-driver:false dashboard:false default-storageclass:true efk:false freshpod:false gcp-auth:false gvisor:false headlamp:false helm-tiller:false inaccel:false ingress:false ingress-dns:false inspektor-gadget:false istio:false istio-provisioner:false kong:false kubeflow:false kubevirt:false logviewer:false metallb:false metrics-server:false nvidia-device-plugin:false nvidia-driver-installer:false nvidia-gpu-device-plugin:false olm:false pod-security-policy:false portainer:false registry:false registry-aliases:false registry-creds:false storage-provisioner:true storage-provisioner-gluster:false storage-provisioner-rancher:false volcano:false volumesnapshots:false yakd:false]
	I0916 11:10:14.934358 1488539 addons.go:69] Setting storage-provisioner=true in profile "multinode-654612"
	I0916 11:10:14.934373 1488539 addons.go:234] Setting addon storage-provisioner=true in "multinode-654612"
	I0916 11:10:14.934396 1488539 host.go:66] Checking if "multinode-654612" exists ...
	I0916 11:10:14.934899 1488539 cli_runner.go:164] Run: docker container inspect multinode-654612 --format={{.State.Status}}
	I0916 11:10:14.935368 1488539 addons.go:69] Setting default-storageclass=true in profile "multinode-654612"
	I0916 11:10:14.935397 1488539 addons_storage_classes.go:33] enableOrDisableStorageClasses default-storageclass=true on "multinode-654612"
	I0916 11:10:14.935714 1488539 cli_runner.go:164] Run: docker container inspect multinode-654612 --format={{.State.Status}}
	I0916 11:10:14.937810 1488539 out.go:177] * Verifying Kubernetes components...
	I0916 11:10:14.942180 1488539 ssh_runner.go:195] Run: sudo systemctl daemon-reload
	I0916 11:10:14.982007 1488539 loader.go:395] Config loaded from file:  /home/jenkins/minikube-integration/19651-1378450/kubeconfig
	I0916 11:10:14.982298 1488539 kapi.go:59] client config for multinode-654612: &rest.Config{Host:"https://192.168.67.2:8443", APIPath:"", ContentConfig:rest.ContentConfig{AcceptContentTypes:"", ContentType:"", GroupVersion:(*schema.GroupVersion)(nil), NegotiatedSerializer:runtime.NegotiatedSerializer(nil)}, Username:"", Password:"", BearerToken:"", BearerTokenFile:"", Impersonate:rest.ImpersonationConfig{UserName:"", UID:"", Groups:[]string(nil), Extra:map[string][]string(nil)}, AuthProvider:<nil>, AuthConfigPersister:rest.AuthProviderConfigPersister(nil), ExecProvider:<nil>, TLSClientConfig:rest.sanitizedTLSClientConfig{Insecure:false, ServerName:"", CertFile:"/home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/multinode-654612/client.crt", KeyFile:"/home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/multinode-654612/client.key", CAFile:"/home/jenkins/minikube-integration/19651-1378450/.minikube/ca.crt", CertData:[]uint8(nil), KeyData:[]uint8(nil), CAData:[]uint8(nil),
NextProtos:[]string(nil)}, UserAgent:"", DisableCompression:false, Transport:http.RoundTripper(nil), WrapTransport:(transport.WrapperFunc)(0x1a1e6c0), QPS:0, Burst:0, RateLimiter:flowcontrol.RateLimiter(nil), WarningHandler:rest.WarningHandler(nil), Timeout:0, Dial:(func(context.Context, string, string) (net.Conn, error))(nil), Proxy:(func(*http.Request) (*url.URL, error))(nil)}
	I0916 11:10:14.982887 1488539 cert_rotation.go:140] Starting client certificate rotation controller
	I0916 11:10:14.983143 1488539 addons.go:234] Setting addon default-storageclass=true in "multinode-654612"
	I0916 11:10:14.983177 1488539 host.go:66] Checking if "multinode-654612" exists ...
	I0916 11:10:14.983606 1488539 cli_runner.go:164] Run: docker container inspect multinode-654612 --format={{.State.Status}}
	I0916 11:10:14.991099 1488539 out.go:177]   - Using image gcr.io/k8s-minikube/storage-provisioner:v5
	I0916 11:10:14.994064 1488539 addons.go:431] installing /etc/kubernetes/addons/storage-provisioner.yaml
	I0916 11:10:14.994089 1488539 ssh_runner.go:362] scp memory --> /etc/kubernetes/addons/storage-provisioner.yaml (2676 bytes)
	I0916 11:10:14.994163 1488539 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" multinode-654612
	I0916 11:10:15.010146 1488539 addons.go:431] installing /etc/kubernetes/addons/storageclass.yaml
	I0916 11:10:15.010178 1488539 ssh_runner.go:362] scp storageclass/storageclass.yaml --> /etc/kubernetes/addons/storageclass.yaml (271 bytes)
	I0916 11:10:15.010259 1488539 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" multinode-654612
	I0916 11:10:15.039435 1488539 sshutil.go:53] new ssh client: &{IP:127.0.0.1 Port:34738 SSHKeyPath:/home/jenkins/minikube-integration/19651-1378450/.minikube/machines/multinode-654612/id_rsa Username:docker}
	I0916 11:10:15.056948 1488539 sshutil.go:53] new ssh client: &{IP:127.0.0.1 Port:34738 SSHKeyPath:/home/jenkins/minikube-integration/19651-1378450/.minikube/machines/multinode-654612/id_rsa Username:docker}
	I0916 11:10:15.216075 1488539 ssh_runner.go:195] Run: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.31.1/kubectl apply -f /etc/kubernetes/addons/storage-provisioner.yaml
	I0916 11:10:15.282816 1488539 ssh_runner.go:195] Run: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.31.1/kubectl apply -f /etc/kubernetes/addons/storageclass.yaml
	I0916 11:10:15.336866 1488539 command_runner.go:130] > apiVersion: v1
	I0916 11:10:15.336939 1488539 command_runner.go:130] > data:
	I0916 11:10:15.336974 1488539 command_runner.go:130] >   Corefile: |
	I0916 11:10:15.336996 1488539 command_runner.go:130] >     .:53 {
	I0916 11:10:15.337016 1488539 command_runner.go:130] >         errors
	I0916 11:10:15.337051 1488539 command_runner.go:130] >         health {
	I0916 11:10:15.337073 1488539 command_runner.go:130] >            lameduck 5s
	I0916 11:10:15.337089 1488539 command_runner.go:130] >         }
	I0916 11:10:15.337109 1488539 command_runner.go:130] >         ready
	I0916 11:10:15.337148 1488539 command_runner.go:130] >         kubernetes cluster.local in-addr.arpa ip6.arpa {
	I0916 11:10:15.337170 1488539 command_runner.go:130] >            pods insecure
	I0916 11:10:15.337189 1488539 command_runner.go:130] >            fallthrough in-addr.arpa ip6.arpa
	I0916 11:10:15.337222 1488539 command_runner.go:130] >            ttl 30
	I0916 11:10:15.337247 1488539 command_runner.go:130] >         }
	I0916 11:10:15.337268 1488539 command_runner.go:130] >         prometheus :9153
	I0916 11:10:15.337301 1488539 command_runner.go:130] >         forward . /etc/resolv.conf {
	I0916 11:10:15.337322 1488539 command_runner.go:130] >            max_concurrent 1000
	I0916 11:10:15.337338 1488539 command_runner.go:130] >         }
	I0916 11:10:15.337357 1488539 command_runner.go:130] >         cache 30
	I0916 11:10:15.337387 1488539 command_runner.go:130] >         loop
	I0916 11:10:15.337410 1488539 command_runner.go:130] >         reload
	I0916 11:10:15.337467 1488539 command_runner.go:130] >         loadbalance
	I0916 11:10:15.337492 1488539 command_runner.go:130] >     }
	I0916 11:10:15.337510 1488539 command_runner.go:130] > kind: ConfigMap
	I0916 11:10:15.337541 1488539 command_runner.go:130] > metadata:
	I0916 11:10:15.337568 1488539 command_runner.go:130] >   creationTimestamp: "2024-09-16T11:10:10Z"
	I0916 11:10:15.337588 1488539 command_runner.go:130] >   name: coredns
	I0916 11:10:15.337620 1488539 command_runner.go:130] >   namespace: kube-system
	I0916 11:10:15.337644 1488539 command_runner.go:130] >   resourceVersion: "229"
	I0916 11:10:15.337663 1488539 command_runner.go:130] >   uid: 0328fa4b-2cc5-463c-941b-8ca226ae16f8
	I0916 11:10:15.341864 1488539 ssh_runner.go:195] Run: /bin/bash -c "sudo /var/lib/minikube/binaries/v1.31.1/kubectl --kubeconfig=/var/lib/minikube/kubeconfig -n kube-system get configmap coredns -o yaml | sed -e '/^        forward . \/etc\/resolv.conf.*/i \        hosts {\n           192.168.67.1 host.minikube.internal\n           fallthrough\n        }' -e '/^        errors *$/i \        log' | sudo /var/lib/minikube/binaries/v1.31.1/kubectl --kubeconfig=/var/lib/minikube/kubeconfig replace -f -"
	I0916 11:10:15.341998 1488539 ssh_runner.go:195] Run: sudo systemctl start kubelet
	I0916 11:10:15.894297 1488539 command_runner.go:130] > serviceaccount/storage-provisioner created
	I0916 11:10:15.904577 1488539 command_runner.go:130] > clusterrolebinding.rbac.authorization.k8s.io/storage-provisioner created
	I0916 11:10:15.915984 1488539 command_runner.go:130] > role.rbac.authorization.k8s.io/system:persistent-volume-provisioner created
	I0916 11:10:15.925831 1488539 command_runner.go:130] > rolebinding.rbac.authorization.k8s.io/system:persistent-volume-provisioner created
	I0916 11:10:15.934179 1488539 command_runner.go:130] > endpoints/k8s.io-minikube-hostpath created
	I0916 11:10:15.945567 1488539 command_runner.go:130] > pod/storage-provisioner created
	I0916 11:10:15.950869 1488539 command_runner.go:130] > storageclass.storage.k8s.io/standard created
	I0916 11:10:15.950940 1488539 envvar.go:172] "Feature gate default state" feature="WatchListClient" enabled=false
	I0916 11:10:15.950955 1488539 envvar.go:172] "Feature gate default state" feature="InformerResourceVersion" enabled=false
	I0916 11:10:15.951046 1488539 round_trippers.go:463] GET https://192.168.67.2:8443/apis/storage.k8s.io/v1/storageclasses
	I0916 11:10:15.951052 1488539 round_trippers.go:469] Request Headers:
	I0916 11:10:15.951060 1488539 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:10:15.951063 1488539 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:10:15.951263 1488539 command_runner.go:130] > configmap/coredns replaced
	I0916 11:10:15.951283 1488539 start.go:971] {"host.minikube.internal": 192.168.67.1} host record injected into CoreDNS's ConfigMap
	I0916 11:10:15.951686 1488539 loader.go:395] Config loaded from file:  /home/jenkins/minikube-integration/19651-1378450/kubeconfig
	I0916 11:10:15.951943 1488539 kapi.go:59] client config for multinode-654612: &rest.Config{Host:"https://192.168.67.2:8443", APIPath:"", ContentConfig:rest.ContentConfig{AcceptContentTypes:"", ContentType:"", GroupVersion:(*schema.GroupVersion)(nil), NegotiatedSerializer:runtime.NegotiatedSerializer(nil)}, Username:"", Password:"", BearerToken:"", BearerTokenFile:"", Impersonate:rest.ImpersonationConfig{UserName:"", UID:"", Groups:[]string(nil), Extra:map[string][]string(nil)}, AuthProvider:<nil>, AuthConfigPersister:rest.AuthProviderConfigPersister(nil), ExecProvider:<nil>, TLSClientConfig:rest.sanitizedTLSClientConfig{Insecure:false, ServerName:"", CertFile:"/home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/multinode-654612/client.crt", KeyFile:"/home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/multinode-654612/client.key", CAFile:"/home/jenkins/minikube-integration/19651-1378450/.minikube/ca.crt", CertData:[]uint8(nil), KeyData:[]uint8(nil), CAData:[]uint8(nil),
NextProtos:[]string(nil)}, UserAgent:"", DisableCompression:false, Transport:http.RoundTripper(nil), WrapTransport:(transport.WrapperFunc)(0x1a1e6c0), QPS:0, Burst:0, RateLimiter:flowcontrol.RateLimiter(nil), WarningHandler:rest.WarningHandler(nil), Timeout:0, Dial:(func(context.Context, string, string) (net.Conn, error))(nil), Proxy:(func(*http.Request) (*url.URL, error))(nil)}
	I0916 11:10:15.952252 1488539 round_trippers.go:463] GET https://192.168.67.2:8443/apis/apps/v1/namespaces/kube-system/deployments/coredns/scale
	I0916 11:10:15.952260 1488539 round_trippers.go:469] Request Headers:
	I0916 11:10:15.952268 1488539 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:10:15.952273 1488539 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:10:15.952922 1488539 loader.go:395] Config loaded from file:  /home/jenkins/minikube-integration/19651-1378450/kubeconfig
	I0916 11:10:15.953282 1488539 kapi.go:59] client config for multinode-654612: &rest.Config{Host:"https://192.168.67.2:8443", APIPath:"", ContentConfig:rest.ContentConfig{AcceptContentTypes:"", ContentType:"", GroupVersion:(*schema.GroupVersion)(nil), NegotiatedSerializer:runtime.NegotiatedSerializer(nil)}, Username:"", Password:"", BearerToken:"", BearerTokenFile:"", Impersonate:rest.ImpersonationConfig{UserName:"", UID:"", Groups:[]string(nil), Extra:map[string][]string(nil)}, AuthProvider:<nil>, AuthConfigPersister:rest.AuthProviderConfigPersister(nil), ExecProvider:<nil>, TLSClientConfig:rest.sanitizedTLSClientConfig{Insecure:false, ServerName:"", CertFile:"/home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/multinode-654612/client.crt", KeyFile:"/home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/multinode-654612/client.key", CAFile:"/home/jenkins/minikube-integration/19651-1378450/.minikube/ca.crt", CertData:[]uint8(nil), KeyData:[]uint8(nil), CAData:[]uint8(nil),
NextProtos:[]string(nil)}, UserAgent:"", DisableCompression:false, Transport:http.RoundTripper(nil), WrapTransport:(transport.WrapperFunc)(0x1a1e6c0), QPS:0, Burst:0, RateLimiter:flowcontrol.RateLimiter(nil), WarningHandler:rest.WarningHandler(nil), Timeout:0, Dial:(func(context.Context, string, string) (net.Conn, error))(nil), Proxy:(func(*http.Request) (*url.URL, error))(nil)}
	I0916 11:10:15.953603 1488539 node_ready.go:35] waiting up to 6m0s for node "multinode-654612" to be "Ready" ...
	I0916 11:10:15.953718 1488539 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/nodes/multinode-654612
	I0916 11:10:15.953753 1488539 round_trippers.go:469] Request Headers:
	I0916 11:10:15.953776 1488539 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:10:15.953794 1488539 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:10:15.986653 1488539 round_trippers.go:574] Response Status: 200 OK in 32 milliseconds
	I0916 11:10:15.986676 1488539 round_trippers.go:577] Response Headers:
	I0916 11:10:15.986685 1488539 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:10:15.986689 1488539 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:10:15.986694 1488539 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:10:15 GMT
	I0916 11:10:15.986697 1488539 round_trippers.go:580]     Audit-Id: 9e3a4fce-4c3f-4ee7-b99a-11967743a7b9
	I0916 11:10:15.986700 1488539 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:10:15.986703 1488539 round_trippers.go:580]     Content-Type: application/json
	I0916 11:10:15.986898 1488539 round_trippers.go:574] Response Status: 200 OK in 34 milliseconds
	I0916 11:10:15.986928 1488539 round_trippers.go:577] Response Headers:
	I0916 11:10:15.986938 1488539 round_trippers.go:580]     Audit-Id: 772c45c3-c98f-43c3-aef5-dab4ae1bfe33
	I0916 11:10:15.986942 1488539 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:10:15.986945 1488539 round_trippers.go:580]     Content-Type: application/json
	I0916 11:10:15.986948 1488539 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:10:15.986951 1488539 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:10:15.986955 1488539 round_trippers.go:580]     Content-Length: 291
	I0916 11:10:15.986958 1488539 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:10:15 GMT
	I0916 11:10:15.986980 1488539 request.go:1351] Response Body: {"kind":"Scale","apiVersion":"autoscaling/v1","metadata":{"name":"coredns","namespace":"kube-system","uid":"b57d8e9c-a4d8-4110-8ca0-89835edf91fa","resourceVersion":"348","creationTimestamp":"2024-09-16T11:10:10Z"},"spec":{"replicas":2},"status":{"replicas":2,"selector":"k8s-app=kube-dns"}}
	I0916 11:10:15.987027 1488539 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-654612","uid":"17ee1588-6ece-4a45-8e72-a05ec6d1f806","resourceVersion":"295","creationTimestamp":"2024-09-16T11:10:07Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-654612","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-654612","minikube.k8s.io/primary":"true","minikube.k8s.io/updated_at":"2024_09_16T11_10_11_0700","minikube.k8s.io/version":"v1.34.0","node-role.kubernetes.io/control-plane":"","node.kubernetes.io/exclude-from-external-load-balancers":""},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///var/run/crio/crio.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubelet","operation":"Update","apiVe
rsion":"v1","time":"2024-09-16T11:10:07Z","fieldsType":"FieldsV1","fiel [truncated 6223 chars]
	I0916 11:10:15.987418 1488539 request.go:1351] Request Body: {"kind":"Scale","apiVersion":"autoscaling/v1","metadata":{"name":"coredns","namespace":"kube-system","uid":"b57d8e9c-a4d8-4110-8ca0-89835edf91fa","resourceVersion":"348","creationTimestamp":"2024-09-16T11:10:10Z"},"spec":{"replicas":1},"status":{"replicas":2,"selector":"k8s-app=kube-dns"}}
	I0916 11:10:15.987477 1488539 round_trippers.go:463] PUT https://192.168.67.2:8443/apis/apps/v1/namespaces/kube-system/deployments/coredns/scale
	I0916 11:10:15.987488 1488539 round_trippers.go:469] Request Headers:
	I0916 11:10:15.987497 1488539 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:10:15.987505 1488539 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:10:15.987509 1488539 round_trippers.go:473]     Content-Type: application/json
	I0916 11:10:15.989002 1488539 round_trippers.go:574] Response Status: 200 OK in 37 milliseconds
	I0916 11:10:15.989022 1488539 round_trippers.go:577] Response Headers:
	I0916 11:10:15.989029 1488539 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:10:15.989033 1488539 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:10:15.989036 1488539 round_trippers.go:580]     Content-Length: 1273
	I0916 11:10:15.989039 1488539 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:10:15 GMT
	I0916 11:10:15.989042 1488539 round_trippers.go:580]     Audit-Id: 42c79c11-4fd7-43ef-b1ad-fb91fa16b6c4
	I0916 11:10:15.989045 1488539 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:10:15.989056 1488539 round_trippers.go:580]     Content-Type: application/json
	I0916 11:10:15.989118 1488539 request.go:1351] Response Body: {"kind":"StorageClassList","apiVersion":"storage.k8s.io/v1","metadata":{"resourceVersion":"356"},"items":[{"metadata":{"name":"standard","uid":"824e6101-7039-44b4-b417-59d2cf58814a","resourceVersion":"332","creationTimestamp":"2024-09-16T11:10:15Z","labels":{"addonmanager.kubernetes.io/mode":"EnsureExists"},"annotations":{"kubectl.kubernetes.io/last-applied-configuration":"{\"apiVersion\":\"storage.k8s.io/v1\",\"kind\":\"StorageClass\",\"metadata\":{\"annotations\":{\"storageclass.kubernetes.io/is-default-class\":\"true\"},\"labels\":{\"addonmanager.kubernetes.io/mode\":\"EnsureExists\"},\"name\":\"standard\"},\"provisioner\":\"k8s.io/minikube-hostpath\"}\n","storageclass.kubernetes.io/is-default-class":"true"},"managedFields":[{"manager":"kubectl-client-side-apply","operation":"Update","apiVersion":"storage.k8s.io/v1","time":"2024-09-16T11:10:15Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:annotations":{".":{},"f:kubectl.kuberne
tes.io/last-applied-configuration":{},"f:storageclass.kubernetes.io/is- [truncated 249 chars]
	I0916 11:10:15.989474 1488539 request.go:1351] Request Body: {"kind":"StorageClass","apiVersion":"storage.k8s.io/v1","metadata":{"name":"standard","uid":"824e6101-7039-44b4-b417-59d2cf58814a","resourceVersion":"332","creationTimestamp":"2024-09-16T11:10:15Z","labels":{"addonmanager.kubernetes.io/mode":"EnsureExists"},"annotations":{"kubectl.kubernetes.io/last-applied-configuration":"{\"apiVersion\":\"storage.k8s.io/v1\",\"kind\":\"StorageClass\",\"metadata\":{\"annotations\":{\"storageclass.kubernetes.io/is-default-class\":\"true\"},\"labels\":{\"addonmanager.kubernetes.io/mode\":\"EnsureExists\"},\"name\":\"standard\"},\"provisioner\":\"k8s.io/minikube-hostpath\"}\n","storageclass.kubernetes.io/is-default-class":"true"},"managedFields":[{"manager":"kubectl-client-side-apply","operation":"Update","apiVersion":"storage.k8s.io/v1","time":"2024-09-16T11:10:15Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:annotations":{".":{},"f:kubectl.kubernetes.io/last-applied-configuration":{},"f:storageclas
s.kubernetes.io/is-default-class":{}},"f:labels":{".":{},"f:addonmanag [truncated 196 chars]
	I0916 11:10:15.989519 1488539 round_trippers.go:463] PUT https://192.168.67.2:8443/apis/storage.k8s.io/v1/storageclasses/standard
	I0916 11:10:15.989525 1488539 round_trippers.go:469] Request Headers:
	I0916 11:10:15.989533 1488539 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:10:15.989538 1488539 round_trippers.go:473]     Content-Type: application/json
	I0916 11:10:15.989541 1488539 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:10:15.993468 1488539 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 11:10:15.993488 1488539 round_trippers.go:577] Response Headers:
	I0916 11:10:15.993529 1488539 round_trippers.go:580]     Audit-Id: f2821cf8-2c2a-4b57-81bb-17f287d257d6
	I0916 11:10:15.993535 1488539 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:10:15.993538 1488539 round_trippers.go:580]     Content-Type: application/json
	I0916 11:10:15.993541 1488539 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:10:15.993543 1488539 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:10:15.993546 1488539 round_trippers.go:580]     Content-Length: 1220
	I0916 11:10:15.993549 1488539 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:10:15 GMT
	I0916 11:10:15.993582 1488539 request.go:1351] Response Body: {"kind":"StorageClass","apiVersion":"storage.k8s.io/v1","metadata":{"name":"standard","uid":"824e6101-7039-44b4-b417-59d2cf58814a","resourceVersion":"332","creationTimestamp":"2024-09-16T11:10:15Z","labels":{"addonmanager.kubernetes.io/mode":"EnsureExists"},"annotations":{"kubectl.kubernetes.io/last-applied-configuration":"{\"apiVersion\":\"storage.k8s.io/v1\",\"kind\":\"StorageClass\",\"metadata\":{\"annotations\":{\"storageclass.kubernetes.io/is-default-class\":\"true\"},\"labels\":{\"addonmanager.kubernetes.io/mode\":\"EnsureExists\"},\"name\":\"standard\"},\"provisioner\":\"k8s.io/minikube-hostpath\"}\n","storageclass.kubernetes.io/is-default-class":"true"},"managedFields":[{"manager":"kubectl-client-side-apply","operation":"Update","apiVersion":"storage.k8s.io/v1","time":"2024-09-16T11:10:15Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:annotations":{".":{},"f:kubectl.kubernetes.io/last-applied-configuration":{},"f:storagecla
ss.kubernetes.io/is-default-class":{}},"f:labels":{".":{},"f:addonmanag [truncated 196 chars]
	I0916 11:10:15.997988 1488539 out.go:177] * Enabled addons: storage-provisioner, default-storageclass
	I0916 11:10:16.000565 1488539 addons.go:510] duration metric: took 1.066255785s for enable addons: enabled=[storage-provisioner default-storageclass]
	I0916 11:10:16.001134 1488539 round_trippers.go:574] Response Status: 200 OK in 13 milliseconds
	I0916 11:10:16.001159 1488539 round_trippers.go:577] Response Headers:
	I0916 11:10:16.001168 1488539 round_trippers.go:580]     Content-Length: 291
	I0916 11:10:16.001172 1488539 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:10:16 GMT
	I0916 11:10:16.001174 1488539 round_trippers.go:580]     Audit-Id: 6a8e0e97-01ec-42cc-bd8d-fa8cb222517d
	I0916 11:10:16.001177 1488539 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:10:16.001180 1488539 round_trippers.go:580]     Content-Type: application/json
	I0916 11:10:16.001183 1488539 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:10:16.001185 1488539 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:10:16.001213 1488539 request.go:1351] Response Body: {"kind":"Scale","apiVersion":"autoscaling/v1","metadata":{"name":"coredns","namespace":"kube-system","uid":"b57d8e9c-a4d8-4110-8ca0-89835edf91fa","resourceVersion":"357","creationTimestamp":"2024-09-16T11:10:10Z"},"spec":{"replicas":1},"status":{"replicas":2,"selector":"k8s-app=kube-dns"}}
	I0916 11:10:16.452910 1488539 round_trippers.go:463] GET https://192.168.67.2:8443/apis/apps/v1/namespaces/kube-system/deployments/coredns/scale
	I0916 11:10:16.452938 1488539 round_trippers.go:469] Request Headers:
	I0916 11:10:16.452948 1488539 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:10:16.452952 1488539 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:10:16.454229 1488539 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/nodes/multinode-654612
	I0916 11:10:16.454250 1488539 round_trippers.go:469] Request Headers:
	I0916 11:10:16.454259 1488539 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:10:16.454263 1488539 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:10:16.455335 1488539 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:10:16.455360 1488539 round_trippers.go:577] Response Headers:
	I0916 11:10:16.455368 1488539 round_trippers.go:580]     Content-Length: 291
	I0916 11:10:16.455372 1488539 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:10:16 GMT
	I0916 11:10:16.455376 1488539 round_trippers.go:580]     Audit-Id: 5d43ac4c-8fd6-40a7-a385-8096fc959cb8
	I0916 11:10:16.455379 1488539 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:10:16.455382 1488539 round_trippers.go:580]     Content-Type: application/json
	I0916 11:10:16.455385 1488539 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:10:16.455388 1488539 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:10:16.459002 1488539 request.go:1351] Response Body: {"kind":"Scale","apiVersion":"autoscaling/v1","metadata":{"name":"coredns","namespace":"kube-system","uid":"b57d8e9c-a4d8-4110-8ca0-89835edf91fa","resourceVersion":"370","creationTimestamp":"2024-09-16T11:10:10Z"},"spec":{"replicas":1},"status":{"replicas":1,"selector":"k8s-app=kube-dns"}}
	I0916 11:10:16.459118 1488539 kapi.go:214] "coredns" deployment in "kube-system" namespace and "multinode-654612" context rescaled to 1 replicas
	I0916 11:10:16.463388 1488539 round_trippers.go:574] Response Status: 200 OK in 9 milliseconds
	I0916 11:10:16.463409 1488539 round_trippers.go:577] Response Headers:
	I0916 11:10:16.463417 1488539 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:10:16.463422 1488539 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:10:16.463426 1488539 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:10:16 GMT
	I0916 11:10:16.463430 1488539 round_trippers.go:580]     Audit-Id: 0218f56f-e80a-48de-b984-ca95fd447639
	I0916 11:10:16.463433 1488539 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:10:16.463436 1488539 round_trippers.go:580]     Content-Type: application/json
	I0916 11:10:16.464022 1488539 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-654612","uid":"17ee1588-6ece-4a45-8e72-a05ec6d1f806","resourceVersion":"295","creationTimestamp":"2024-09-16T11:10:07Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-654612","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-654612","minikube.k8s.io/primary":"true","minikube.k8s.io/updated_at":"2024_09_16T11_10_11_0700","minikube.k8s.io/version":"v1.34.0","node-role.kubernetes.io/control-plane":"","node.kubernetes.io/exclude-from-external-load-balancers":""},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///var/run/crio/crio.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubelet","operation":"Update","apiVe
rsion":"v1","time":"2024-09-16T11:10:07Z","fieldsType":"FieldsV1","fiel [truncated 6223 chars]
	I0916 11:10:16.954221 1488539 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/nodes/multinode-654612
	I0916 11:10:16.954245 1488539 round_trippers.go:469] Request Headers:
	I0916 11:10:16.954254 1488539 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:10:16.954258 1488539 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:10:16.956643 1488539 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:10:16.956752 1488539 round_trippers.go:577] Response Headers:
	I0916 11:10:16.956778 1488539 round_trippers.go:580]     Audit-Id: a08dc693-1630-468a-9c24-9b04c1268c2f
	I0916 11:10:16.956796 1488539 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:10:16.956830 1488539 round_trippers.go:580]     Content-Type: application/json
	I0916 11:10:16.956853 1488539 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:10:16.956872 1488539 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:10:16.956889 1488539 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:10:16 GMT
	I0916 11:10:16.957042 1488539 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-654612","uid":"17ee1588-6ece-4a45-8e72-a05ec6d1f806","resourceVersion":"295","creationTimestamp":"2024-09-16T11:10:07Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-654612","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-654612","minikube.k8s.io/primary":"true","minikube.k8s.io/updated_at":"2024_09_16T11_10_11_0700","minikube.k8s.io/version":"v1.34.0","node-role.kubernetes.io/control-plane":"","node.kubernetes.io/exclude-from-external-load-balancers":""},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///var/run/crio/crio.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubelet","operation":"Update","apiVe
rsion":"v1","time":"2024-09-16T11:10:07Z","fieldsType":"FieldsV1","fiel [truncated 6223 chars]
	I0916 11:10:17.454418 1488539 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/nodes/multinode-654612
	I0916 11:10:17.454444 1488539 round_trippers.go:469] Request Headers:
	I0916 11:10:17.454453 1488539 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:10:17.454460 1488539 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:10:17.456531 1488539 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:10:17.456558 1488539 round_trippers.go:577] Response Headers:
	I0916 11:10:17.456567 1488539 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:10:17.456572 1488539 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:10:17 GMT
	I0916 11:10:17.456577 1488539 round_trippers.go:580]     Audit-Id: de55e2e4-5a2b-4c91-800e-25dff7c49dd0
	I0916 11:10:17.456579 1488539 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:10:17.456582 1488539 round_trippers.go:580]     Content-Type: application/json
	I0916 11:10:17.456585 1488539 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:10:17.456879 1488539 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-654612","uid":"17ee1588-6ece-4a45-8e72-a05ec6d1f806","resourceVersion":"295","creationTimestamp":"2024-09-16T11:10:07Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-654612","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-654612","minikube.k8s.io/primary":"true","minikube.k8s.io/updated_at":"2024_09_16T11_10_11_0700","minikube.k8s.io/version":"v1.34.0","node-role.kubernetes.io/control-plane":"","node.kubernetes.io/exclude-from-external-load-balancers":""},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///var/run/crio/crio.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubelet","operation":"Update","apiVe
rsion":"v1","time":"2024-09-16T11:10:07Z","fieldsType":"FieldsV1","fiel [truncated 6223 chars]
	I0916 11:10:17.953920 1488539 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/nodes/multinode-654612
	I0916 11:10:17.953946 1488539 round_trippers.go:469] Request Headers:
	I0916 11:10:17.953960 1488539 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:10:17.953964 1488539 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:10:17.956789 1488539 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:10:17.956816 1488539 round_trippers.go:577] Response Headers:
	I0916 11:10:17.956824 1488539 round_trippers.go:580]     Audit-Id: 0ec9fcd8-8ee1-4a01-a556-ea935e3cae51
	I0916 11:10:17.956829 1488539 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:10:17.956834 1488539 round_trippers.go:580]     Content-Type: application/json
	I0916 11:10:17.956837 1488539 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:10:17.956840 1488539 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:10:17.956843 1488539 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:10:17 GMT
	I0916 11:10:17.957410 1488539 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-654612","uid":"17ee1588-6ece-4a45-8e72-a05ec6d1f806","resourceVersion":"295","creationTimestamp":"2024-09-16T11:10:07Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-654612","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-654612","minikube.k8s.io/primary":"true","minikube.k8s.io/updated_at":"2024_09_16T11_10_11_0700","minikube.k8s.io/version":"v1.34.0","node-role.kubernetes.io/control-plane":"","node.kubernetes.io/exclude-from-external-load-balancers":""},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///var/run/crio/crio.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubelet","operation":"Update","apiVe
rsion":"v1","time":"2024-09-16T11:10:07Z","fieldsType":"FieldsV1","fiel [truncated 6223 chars]
	I0916 11:10:17.957851 1488539 node_ready.go:53] node "multinode-654612" has status "Ready":"False"
	I0916 11:10:18.454209 1488539 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/nodes/multinode-654612
	I0916 11:10:18.454234 1488539 round_trippers.go:469] Request Headers:
	I0916 11:10:18.454244 1488539 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:10:18.454249 1488539 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:10:18.456484 1488539 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:10:18.456506 1488539 round_trippers.go:577] Response Headers:
	I0916 11:10:18.456516 1488539 round_trippers.go:580]     Audit-Id: aaeb0cf1-23c4-44fa-87ba-7b762493099f
	I0916 11:10:18.456521 1488539 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:10:18.456525 1488539 round_trippers.go:580]     Content-Type: application/json
	I0916 11:10:18.456527 1488539 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:10:18.456530 1488539 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:10:18.456533 1488539 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:10:18 GMT
	I0916 11:10:18.456723 1488539 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-654612","uid":"17ee1588-6ece-4a45-8e72-a05ec6d1f806","resourceVersion":"295","creationTimestamp":"2024-09-16T11:10:07Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-654612","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-654612","minikube.k8s.io/primary":"true","minikube.k8s.io/updated_at":"2024_09_16T11_10_11_0700","minikube.k8s.io/version":"v1.34.0","node-role.kubernetes.io/control-plane":"","node.kubernetes.io/exclude-from-external-load-balancers":""},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///var/run/crio/crio.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubelet","operation":"Update","apiVe
rsion":"v1","time":"2024-09-16T11:10:07Z","fieldsType":"FieldsV1","fiel [truncated 6223 chars]
	I0916 11:10:18.953870 1488539 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/nodes/multinode-654612
	I0916 11:10:18.953897 1488539 round_trippers.go:469] Request Headers:
	I0916 11:10:18.953907 1488539 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:10:18.953914 1488539 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:10:18.956511 1488539 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:10:18.956537 1488539 round_trippers.go:577] Response Headers:
	I0916 11:10:18.956548 1488539 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:10:18.956554 1488539 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:10:18 GMT
	I0916 11:10:18.956557 1488539 round_trippers.go:580]     Audit-Id: eb4746f8-fc2d-4e60-a90f-8db6804ccdfa
	I0916 11:10:18.956560 1488539 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:10:18.956563 1488539 round_trippers.go:580]     Content-Type: application/json
	I0916 11:10:18.956601 1488539 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:10:18.957195 1488539 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-654612","uid":"17ee1588-6ece-4a45-8e72-a05ec6d1f806","resourceVersion":"295","creationTimestamp":"2024-09-16T11:10:07Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-654612","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-654612","minikube.k8s.io/primary":"true","minikube.k8s.io/updated_at":"2024_09_16T11_10_11_0700","minikube.k8s.io/version":"v1.34.0","node-role.kubernetes.io/control-plane":"","node.kubernetes.io/exclude-from-external-load-balancers":""},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///var/run/crio/crio.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubelet","operation":"Update","apiVe
rsion":"v1","time":"2024-09-16T11:10:07Z","fieldsType":"FieldsV1","fiel [truncated 6223 chars]
	I0916 11:10:19.454444 1488539 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/nodes/multinode-654612
	I0916 11:10:19.454526 1488539 round_trippers.go:469] Request Headers:
	I0916 11:10:19.454541 1488539 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:10:19.454545 1488539 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:10:19.457018 1488539 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:10:19.457041 1488539 round_trippers.go:577] Response Headers:
	I0916 11:10:19.457049 1488539 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:10:19.457053 1488539 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:10:19.457057 1488539 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:10:19 GMT
	I0916 11:10:19.457061 1488539 round_trippers.go:580]     Audit-Id: 75e5924f-5e80-421e-8c03-0e7c8d5e1999
	I0916 11:10:19.457064 1488539 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:10:19.457066 1488539 round_trippers.go:580]     Content-Type: application/json
	I0916 11:10:19.457273 1488539 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-654612","uid":"17ee1588-6ece-4a45-8e72-a05ec6d1f806","resourceVersion":"295","creationTimestamp":"2024-09-16T11:10:07Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-654612","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-654612","minikube.k8s.io/primary":"true","minikube.k8s.io/updated_at":"2024_09_16T11_10_11_0700","minikube.k8s.io/version":"v1.34.0","node-role.kubernetes.io/control-plane":"","node.kubernetes.io/exclude-from-external-load-balancers":""},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///var/run/crio/crio.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubelet","operation":"Update","apiVe
rsion":"v1","time":"2024-09-16T11:10:07Z","fieldsType":"FieldsV1","fiel [truncated 6223 chars]
	I0916 11:10:19.954857 1488539 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/nodes/multinode-654612
	I0916 11:10:19.954884 1488539 round_trippers.go:469] Request Headers:
	I0916 11:10:19.954894 1488539 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:10:19.954900 1488539 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:10:19.957120 1488539 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:10:19.957143 1488539 round_trippers.go:577] Response Headers:
	I0916 11:10:19.957152 1488539 round_trippers.go:580]     Audit-Id: 7819e35d-efd6-4772-bcf8-b67acc5e845f
	I0916 11:10:19.957183 1488539 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:10:19.957195 1488539 round_trippers.go:580]     Content-Type: application/json
	I0916 11:10:19.957199 1488539 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:10:19.957202 1488539 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:10:19.957208 1488539 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:10:19 GMT
	I0916 11:10:19.957456 1488539 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-654612","uid":"17ee1588-6ece-4a45-8e72-a05ec6d1f806","resourceVersion":"295","creationTimestamp":"2024-09-16T11:10:07Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-654612","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-654612","minikube.k8s.io/primary":"true","minikube.k8s.io/updated_at":"2024_09_16T11_10_11_0700","minikube.k8s.io/version":"v1.34.0","node-role.kubernetes.io/control-plane":"","node.kubernetes.io/exclude-from-external-load-balancers":""},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///var/run/crio/crio.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubelet","operation":"Update","apiVe
rsion":"v1","time":"2024-09-16T11:10:07Z","fieldsType":"FieldsV1","fiel [truncated 6223 chars]
	I0916 11:10:19.957885 1488539 node_ready.go:53] node "multinode-654612" has status "Ready":"False"
	I0916 11:10:20.454664 1488539 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/nodes/multinode-654612
	I0916 11:10:20.454699 1488539 round_trippers.go:469] Request Headers:
	I0916 11:10:20.454708 1488539 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:10:20.454779 1488539 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:10:20.457204 1488539 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:10:20.457229 1488539 round_trippers.go:577] Response Headers:
	I0916 11:10:20.457238 1488539 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:10:20 GMT
	I0916 11:10:20.457244 1488539 round_trippers.go:580]     Audit-Id: 73a4dccc-dd39-4644-831b-8b526d28bea4
	I0916 11:10:20.457247 1488539 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:10:20.457250 1488539 round_trippers.go:580]     Content-Type: application/json
	I0916 11:10:20.457253 1488539 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:10:20.457256 1488539 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:10:20.457949 1488539 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-654612","uid":"17ee1588-6ece-4a45-8e72-a05ec6d1f806","resourceVersion":"295","creationTimestamp":"2024-09-16T11:10:07Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-654612","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-654612","minikube.k8s.io/primary":"true","minikube.k8s.io/updated_at":"2024_09_16T11_10_11_0700","minikube.k8s.io/version":"v1.34.0","node-role.kubernetes.io/control-plane":"","node.kubernetes.io/exclude-from-external-load-balancers":""},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///var/run/crio/crio.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubelet","operation":"Update","apiVe
rsion":"v1","time":"2024-09-16T11:10:07Z","fieldsType":"FieldsV1","fiel [truncated 6223 chars]
	I0916 11:10:20.954019 1488539 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/nodes/multinode-654612
	I0916 11:10:20.954042 1488539 round_trippers.go:469] Request Headers:
	I0916 11:10:20.954052 1488539 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:10:20.954057 1488539 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:10:20.956589 1488539 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:10:20.956664 1488539 round_trippers.go:577] Response Headers:
	I0916 11:10:20.956709 1488539 round_trippers.go:580]     Audit-Id: a9aecfc3-86c9-4513-9fba-891a05e4f329
	I0916 11:10:20.956729 1488539 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:10:20.956746 1488539 round_trippers.go:580]     Content-Type: application/json
	I0916 11:10:20.956764 1488539 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:10:20.956796 1488539 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:10:20.956814 1488539 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:10:20 GMT
	I0916 11:10:20.956960 1488539 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-654612","uid":"17ee1588-6ece-4a45-8e72-a05ec6d1f806","resourceVersion":"295","creationTimestamp":"2024-09-16T11:10:07Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-654612","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-654612","minikube.k8s.io/primary":"true","minikube.k8s.io/updated_at":"2024_09_16T11_10_11_0700","minikube.k8s.io/version":"v1.34.0","node-role.kubernetes.io/control-plane":"","node.kubernetes.io/exclude-from-external-load-balancers":""},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///var/run/crio/crio.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubelet","operation":"Update","apiVe
rsion":"v1","time":"2024-09-16T11:10:07Z","fieldsType":"FieldsV1","fiel [truncated 6223 chars]
	I0916 11:10:21.454622 1488539 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/nodes/multinode-654612
	I0916 11:10:21.454649 1488539 round_trippers.go:469] Request Headers:
	I0916 11:10:21.454657 1488539 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:10:21.454661 1488539 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:10:21.456893 1488539 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:10:21.456922 1488539 round_trippers.go:577] Response Headers:
	I0916 11:10:21.456932 1488539 round_trippers.go:580]     Content-Type: application/json
	I0916 11:10:21.456936 1488539 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:10:21.456939 1488539 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:10:21.456948 1488539 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:10:21 GMT
	I0916 11:10:21.456951 1488539 round_trippers.go:580]     Audit-Id: 28eecb8a-5628-45ac-9b56-8267a72e6ddb
	I0916 11:10:21.456954 1488539 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:10:21.457213 1488539 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-654612","uid":"17ee1588-6ece-4a45-8e72-a05ec6d1f806","resourceVersion":"295","creationTimestamp":"2024-09-16T11:10:07Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-654612","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-654612","minikube.k8s.io/primary":"true","minikube.k8s.io/updated_at":"2024_09_16T11_10_11_0700","minikube.k8s.io/version":"v1.34.0","node-role.kubernetes.io/control-plane":"","node.kubernetes.io/exclude-from-external-load-balancers":""},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///var/run/crio/crio.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubelet","operation":"Update","apiVe
rsion":"v1","time":"2024-09-16T11:10:07Z","fieldsType":"FieldsV1","fiel [truncated 6223 chars]
	I0916 11:10:21.953902 1488539 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/nodes/multinode-654612
	I0916 11:10:21.953925 1488539 round_trippers.go:469] Request Headers:
	I0916 11:10:21.953952 1488539 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:10:21.953957 1488539 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:10:21.956189 1488539 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:10:21.956216 1488539 round_trippers.go:577] Response Headers:
	I0916 11:10:21.956226 1488539 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:10:21 GMT
	I0916 11:10:21.956230 1488539 round_trippers.go:580]     Audit-Id: 82cbcf3e-7b6b-4014-82ec-5ea14697ef6f
	I0916 11:10:21.956234 1488539 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:10:21.956237 1488539 round_trippers.go:580]     Content-Type: application/json
	I0916 11:10:21.956240 1488539 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:10:21.956245 1488539 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:10:21.956577 1488539 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-654612","uid":"17ee1588-6ece-4a45-8e72-a05ec6d1f806","resourceVersion":"295","creationTimestamp":"2024-09-16T11:10:07Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-654612","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-654612","minikube.k8s.io/primary":"true","minikube.k8s.io/updated_at":"2024_09_16T11_10_11_0700","minikube.k8s.io/version":"v1.34.0","node-role.kubernetes.io/control-plane":"","node.kubernetes.io/exclude-from-external-load-balancers":""},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///var/run/crio/crio.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubelet","operation":"Update","apiVe
rsion":"v1","time":"2024-09-16T11:10:07Z","fieldsType":"FieldsV1","fiel [truncated 6223 chars]
	I0916 11:10:22.453921 1488539 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/nodes/multinode-654612
	I0916 11:10:22.453949 1488539 round_trippers.go:469] Request Headers:
	I0916 11:10:22.453959 1488539 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:10:22.453965 1488539 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:10:22.456257 1488539 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:10:22.456289 1488539 round_trippers.go:577] Response Headers:
	I0916 11:10:22.456298 1488539 round_trippers.go:580]     Audit-Id: 4628a911-12f8-4994-9189-bfc4438a271c
	I0916 11:10:22.456302 1488539 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:10:22.456307 1488539 round_trippers.go:580]     Content-Type: application/json
	I0916 11:10:22.456313 1488539 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:10:22.456319 1488539 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:10:22.456322 1488539 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:10:22 GMT
	I0916 11:10:22.456533 1488539 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-654612","uid":"17ee1588-6ece-4a45-8e72-a05ec6d1f806","resourceVersion":"295","creationTimestamp":"2024-09-16T11:10:07Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-654612","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-654612","minikube.k8s.io/primary":"true","minikube.k8s.io/updated_at":"2024_09_16T11_10_11_0700","minikube.k8s.io/version":"v1.34.0","node-role.kubernetes.io/control-plane":"","node.kubernetes.io/exclude-from-external-load-balancers":""},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///var/run/crio/crio.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubelet","operation":"Update","apiVe
rsion":"v1","time":"2024-09-16T11:10:07Z","fieldsType":"FieldsV1","fiel [truncated 6223 chars]
	I0916 11:10:22.457033 1488539 node_ready.go:53] node "multinode-654612" has status "Ready":"False"
	I0916 11:10:22.953892 1488539 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/nodes/multinode-654612
	I0916 11:10:22.953919 1488539 round_trippers.go:469] Request Headers:
	I0916 11:10:22.953934 1488539 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:10:22.953940 1488539 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:10:22.956452 1488539 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:10:22.956477 1488539 round_trippers.go:577] Response Headers:
	I0916 11:10:22.956486 1488539 round_trippers.go:580]     Audit-Id: 60290daf-b57e-4d57-9d58-69549d9cc092
	I0916 11:10:22.956490 1488539 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:10:22.956492 1488539 round_trippers.go:580]     Content-Type: application/json
	I0916 11:10:22.956495 1488539 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:10:22.956498 1488539 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:10:22.956504 1488539 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:10:22 GMT
	I0916 11:10:22.956688 1488539 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-654612","uid":"17ee1588-6ece-4a45-8e72-a05ec6d1f806","resourceVersion":"295","creationTimestamp":"2024-09-16T11:10:07Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-654612","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-654612","minikube.k8s.io/primary":"true","minikube.k8s.io/updated_at":"2024_09_16T11_10_11_0700","minikube.k8s.io/version":"v1.34.0","node-role.kubernetes.io/control-plane":"","node.kubernetes.io/exclude-from-external-load-balancers":""},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///var/run/crio/crio.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubelet","operation":"Update","apiVe
rsion":"v1","time":"2024-09-16T11:10:07Z","fieldsType":"FieldsV1","fiel [truncated 6223 chars]
	I0916 11:10:23.454765 1488539 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/nodes/multinode-654612
	I0916 11:10:23.454792 1488539 round_trippers.go:469] Request Headers:
	I0916 11:10:23.454802 1488539 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:10:23.454806 1488539 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:10:23.457387 1488539 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:10:23.457422 1488539 round_trippers.go:577] Response Headers:
	I0916 11:10:23.457431 1488539 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:10:23.457437 1488539 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:10:23.457441 1488539 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:10:23 GMT
	I0916 11:10:23.457445 1488539 round_trippers.go:580]     Audit-Id: f50118d4-4ef1-47a2-a00f-fb0147e0a79b
	I0916 11:10:23.457448 1488539 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:10:23.457451 1488539 round_trippers.go:580]     Content-Type: application/json
	I0916 11:10:23.457680 1488539 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-654612","uid":"17ee1588-6ece-4a45-8e72-a05ec6d1f806","resourceVersion":"295","creationTimestamp":"2024-09-16T11:10:07Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-654612","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-654612","minikube.k8s.io/primary":"true","minikube.k8s.io/updated_at":"2024_09_16T11_10_11_0700","minikube.k8s.io/version":"v1.34.0","node-role.kubernetes.io/control-plane":"","node.kubernetes.io/exclude-from-external-load-balancers":""},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///var/run/crio/crio.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubelet","operation":"Update","apiVe
rsion":"v1","time":"2024-09-16T11:10:07Z","fieldsType":"FieldsV1","fiel [truncated 6223 chars]
	I0916 11:10:23.954750 1488539 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/nodes/multinode-654612
	I0916 11:10:23.954779 1488539 round_trippers.go:469] Request Headers:
	I0916 11:10:23.954788 1488539 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:10:23.954792 1488539 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:10:23.957410 1488539 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:10:23.957436 1488539 round_trippers.go:577] Response Headers:
	I0916 11:10:23.957445 1488539 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:10:23 GMT
	I0916 11:10:23.957451 1488539 round_trippers.go:580]     Audit-Id: 9e6cdacf-6b86-4c8e-a9e9-f8f3c8786206
	I0916 11:10:23.957455 1488539 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:10:23.957458 1488539 round_trippers.go:580]     Content-Type: application/json
	I0916 11:10:23.957466 1488539 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:10:23.957470 1488539 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:10:23.957645 1488539 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-654612","uid":"17ee1588-6ece-4a45-8e72-a05ec6d1f806","resourceVersion":"295","creationTimestamp":"2024-09-16T11:10:07Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-654612","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-654612","minikube.k8s.io/primary":"true","minikube.k8s.io/updated_at":"2024_09_16T11_10_11_0700","minikube.k8s.io/version":"v1.34.0","node-role.kubernetes.io/control-plane":"","node.kubernetes.io/exclude-from-external-load-balancers":""},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///var/run/crio/crio.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubelet","operation":"Update","apiVe
rsion":"v1","time":"2024-09-16T11:10:07Z","fieldsType":"FieldsV1","fiel [truncated 6223 chars]
	I0916 11:10:24.453867 1488539 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/nodes/multinode-654612
	I0916 11:10:24.453895 1488539 round_trippers.go:469] Request Headers:
	I0916 11:10:24.453905 1488539 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:10:24.453911 1488539 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:10:24.456090 1488539 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:10:24.456112 1488539 round_trippers.go:577] Response Headers:
	I0916 11:10:24.456121 1488539 round_trippers.go:580]     Audit-Id: 7982c331-9512-43dc-82f4-1937e1dcd231
	I0916 11:10:24.456125 1488539 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:10:24.456127 1488539 round_trippers.go:580]     Content-Type: application/json
	I0916 11:10:24.456130 1488539 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:10:24.456139 1488539 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:10:24.456142 1488539 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:10:24 GMT
	I0916 11:10:24.456499 1488539 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-654612","uid":"17ee1588-6ece-4a45-8e72-a05ec6d1f806","resourceVersion":"295","creationTimestamp":"2024-09-16T11:10:07Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-654612","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-654612","minikube.k8s.io/primary":"true","minikube.k8s.io/updated_at":"2024_09_16T11_10_11_0700","minikube.k8s.io/version":"v1.34.0","node-role.kubernetes.io/control-plane":"","node.kubernetes.io/exclude-from-external-load-balancers":""},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///var/run/crio/crio.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubelet","operation":"Update","apiVe
rsion":"v1","time":"2024-09-16T11:10:07Z","fieldsType":"FieldsV1","fiel [truncated 6223 chars]
	I0916 11:10:24.953996 1488539 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/nodes/multinode-654612
	I0916 11:10:24.954022 1488539 round_trippers.go:469] Request Headers:
	I0916 11:10:24.954035 1488539 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:10:24.954043 1488539 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:10:24.956266 1488539 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:10:24.956292 1488539 round_trippers.go:577] Response Headers:
	I0916 11:10:24.956301 1488539 round_trippers.go:580]     Audit-Id: 099a6a58-ad29-4990-9301-42bc5be58cf7
	I0916 11:10:24.956306 1488539 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:10:24.956310 1488539 round_trippers.go:580]     Content-Type: application/json
	I0916 11:10:24.956313 1488539 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:10:24.956316 1488539 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:10:24.956319 1488539 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:10:24 GMT
	I0916 11:10:24.956727 1488539 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-654612","uid":"17ee1588-6ece-4a45-8e72-a05ec6d1f806","resourceVersion":"295","creationTimestamp":"2024-09-16T11:10:07Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-654612","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-654612","minikube.k8s.io/primary":"true","minikube.k8s.io/updated_at":"2024_09_16T11_10_11_0700","minikube.k8s.io/version":"v1.34.0","node-role.kubernetes.io/control-plane":"","node.kubernetes.io/exclude-from-external-load-balancers":""},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///var/run/crio/crio.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubelet","operation":"Update","apiVe
rsion":"v1","time":"2024-09-16T11:10:07Z","fieldsType":"FieldsV1","fiel [truncated 6223 chars]
	I0916 11:10:24.957146 1488539 node_ready.go:53] node "multinode-654612" has status "Ready":"False"
	I0916 11:10:25.453976 1488539 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/nodes/multinode-654612
	I0916 11:10:25.453998 1488539 round_trippers.go:469] Request Headers:
	I0916 11:10:25.454008 1488539 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:10:25.454013 1488539 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:10:25.456720 1488539 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:10:25.456745 1488539 round_trippers.go:577] Response Headers:
	I0916 11:10:25.456755 1488539 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:10:25.456758 1488539 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:10:25.456763 1488539 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:10:25 GMT
	I0916 11:10:25.456766 1488539 round_trippers.go:580]     Audit-Id: 3f6bb556-4d99-4bd2-9039-c81c4c188c6f
	I0916 11:10:25.456768 1488539 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:10:25.456771 1488539 round_trippers.go:580]     Content-Type: application/json
	I0916 11:10:25.457092 1488539 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-654612","uid":"17ee1588-6ece-4a45-8e72-a05ec6d1f806","resourceVersion":"295","creationTimestamp":"2024-09-16T11:10:07Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-654612","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-654612","minikube.k8s.io/primary":"true","minikube.k8s.io/updated_at":"2024_09_16T11_10_11_0700","minikube.k8s.io/version":"v1.34.0","node-role.kubernetes.io/control-plane":"","node.kubernetes.io/exclude-from-external-load-balancers":""},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///var/run/crio/crio.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubelet","operation":"Update","apiVe
rsion":"v1","time":"2024-09-16T11:10:07Z","fieldsType":"FieldsV1","fiel [truncated 6223 chars]
	I0916 11:10:25.954130 1488539 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/nodes/multinode-654612
	I0916 11:10:25.954157 1488539 round_trippers.go:469] Request Headers:
	I0916 11:10:25.954167 1488539 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:10:25.954173 1488539 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:10:25.956441 1488539 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:10:25.956464 1488539 round_trippers.go:577] Response Headers:
	I0916 11:10:25.956473 1488539 round_trippers.go:580]     Audit-Id: 635af718-7cf0-46dc-92b4-8f200ba9346a
	I0916 11:10:25.956478 1488539 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:10:25.956482 1488539 round_trippers.go:580]     Content-Type: application/json
	I0916 11:10:25.956484 1488539 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:10:25.956487 1488539 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:10:25.956490 1488539 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:10:25 GMT
	I0916 11:10:25.956643 1488539 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-654612","uid":"17ee1588-6ece-4a45-8e72-a05ec6d1f806","resourceVersion":"295","creationTimestamp":"2024-09-16T11:10:07Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-654612","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-654612","minikube.k8s.io/primary":"true","minikube.k8s.io/updated_at":"2024_09_16T11_10_11_0700","minikube.k8s.io/version":"v1.34.0","node-role.kubernetes.io/control-plane":"","node.kubernetes.io/exclude-from-external-load-balancers":""},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///var/run/crio/crio.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubelet","operation":"Update","apiVe
rsion":"v1","time":"2024-09-16T11:10:07Z","fieldsType":"FieldsV1","fiel [truncated 6223 chars]
	I0916 11:10:26.454769 1488539 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/nodes/multinode-654612
	I0916 11:10:26.454795 1488539 round_trippers.go:469] Request Headers:
	I0916 11:10:26.454805 1488539 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:10:26.454812 1488539 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:10:26.457242 1488539 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:10:26.457266 1488539 round_trippers.go:577] Response Headers:
	I0916 11:10:26.457274 1488539 round_trippers.go:580]     Content-Type: application/json
	I0916 11:10:26.457278 1488539 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:10:26.457281 1488539 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:10:26.457284 1488539 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:10:26 GMT
	I0916 11:10:26.457287 1488539 round_trippers.go:580]     Audit-Id: 38a39ce6-fa7d-4cba-a1f1-77d5b9529a79
	I0916 11:10:26.457289 1488539 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:10:26.457461 1488539 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-654612","uid":"17ee1588-6ece-4a45-8e72-a05ec6d1f806","resourceVersion":"295","creationTimestamp":"2024-09-16T11:10:07Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-654612","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-654612","minikube.k8s.io/primary":"true","minikube.k8s.io/updated_at":"2024_09_16T11_10_11_0700","minikube.k8s.io/version":"v1.34.0","node-role.kubernetes.io/control-plane":"","node.kubernetes.io/exclude-from-external-load-balancers":""},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///var/run/crio/crio.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubelet","operation":"Update","apiVe
rsion":"v1","time":"2024-09-16T11:10:07Z","fieldsType":"FieldsV1","fiel [truncated 6223 chars]
	I0916 11:10:26.953874 1488539 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/nodes/multinode-654612
	I0916 11:10:26.953905 1488539 round_trippers.go:469] Request Headers:
	I0916 11:10:26.953915 1488539 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:10:26.953919 1488539 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:10:26.956593 1488539 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:10:26.956616 1488539 round_trippers.go:577] Response Headers:
	I0916 11:10:26.956625 1488539 round_trippers.go:580]     Audit-Id: f22cff62-e9f0-4c41-b3ed-1d650690e546
	I0916 11:10:26.956631 1488539 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:10:26.956634 1488539 round_trippers.go:580]     Content-Type: application/json
	I0916 11:10:26.956636 1488539 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:10:26.956639 1488539 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:10:26.956643 1488539 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:10:26 GMT
	I0916 11:10:26.957034 1488539 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-654612","uid":"17ee1588-6ece-4a45-8e72-a05ec6d1f806","resourceVersion":"295","creationTimestamp":"2024-09-16T11:10:07Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-654612","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-654612","minikube.k8s.io/primary":"true","minikube.k8s.io/updated_at":"2024_09_16T11_10_11_0700","minikube.k8s.io/version":"v1.34.0","node-role.kubernetes.io/control-plane":"","node.kubernetes.io/exclude-from-external-load-balancers":""},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///var/run/crio/crio.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubelet","operation":"Update","apiVe
rsion":"v1","time":"2024-09-16T11:10:07Z","fieldsType":"FieldsV1","fiel [truncated 6223 chars]
	I0916 11:10:26.957443 1488539 node_ready.go:53] node "multinode-654612" has status "Ready":"False"
	I0916 11:10:27.454436 1488539 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/nodes/multinode-654612
	I0916 11:10:27.454463 1488539 round_trippers.go:469] Request Headers:
	I0916 11:10:27.454472 1488539 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:10:27.454476 1488539 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:10:27.456707 1488539 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:10:27.456731 1488539 round_trippers.go:577] Response Headers:
	I0916 11:10:27.456739 1488539 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:10:27.456744 1488539 round_trippers.go:580]     Content-Type: application/json
	I0916 11:10:27.456747 1488539 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:10:27.456751 1488539 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:10:27.456753 1488539 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:10:27 GMT
	I0916 11:10:27.456756 1488539 round_trippers.go:580]     Audit-Id: 83625c4a-ddd0-4eeb-a19b-721f0adc4041
	I0916 11:10:27.456938 1488539 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-654612","uid":"17ee1588-6ece-4a45-8e72-a05ec6d1f806","resourceVersion":"295","creationTimestamp":"2024-09-16T11:10:07Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-654612","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-654612","minikube.k8s.io/primary":"true","minikube.k8s.io/updated_at":"2024_09_16T11_10_11_0700","minikube.k8s.io/version":"v1.34.0","node-role.kubernetes.io/control-plane":"","node.kubernetes.io/exclude-from-external-load-balancers":""},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///var/run/crio/crio.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubelet","operation":"Update","apiVe
rsion":"v1","time":"2024-09-16T11:10:07Z","fieldsType":"FieldsV1","fiel [truncated 6223 chars]
	I0916 11:10:27.954777 1488539 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/nodes/multinode-654612
	I0916 11:10:27.954804 1488539 round_trippers.go:469] Request Headers:
	I0916 11:10:27.954814 1488539 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:10:27.954826 1488539 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:10:27.957246 1488539 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:10:27.957273 1488539 round_trippers.go:577] Response Headers:
	I0916 11:10:27.957282 1488539 round_trippers.go:580]     Audit-Id: b2e14cc4-3d7f-4812-9b03-a61a349d7c1a
	I0916 11:10:27.957287 1488539 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:10:27.957291 1488539 round_trippers.go:580]     Content-Type: application/json
	I0916 11:10:27.957296 1488539 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:10:27.957299 1488539 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:10:27.957302 1488539 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:10:27 GMT
	I0916 11:10:27.957475 1488539 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-654612","uid":"17ee1588-6ece-4a45-8e72-a05ec6d1f806","resourceVersion":"295","creationTimestamp":"2024-09-16T11:10:07Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-654612","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-654612","minikube.k8s.io/primary":"true","minikube.k8s.io/updated_at":"2024_09_16T11_10_11_0700","minikube.k8s.io/version":"v1.34.0","node-role.kubernetes.io/control-plane":"","node.kubernetes.io/exclude-from-external-load-balancers":""},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///var/run/crio/crio.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubelet","operation":"Update","apiVe
rsion":"v1","time":"2024-09-16T11:10:07Z","fieldsType":"FieldsV1","fiel [truncated 6223 chars]
	I0916 11:10:28.454655 1488539 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/nodes/multinode-654612
	I0916 11:10:28.454681 1488539 round_trippers.go:469] Request Headers:
	I0916 11:10:28.454691 1488539 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:10:28.454696 1488539 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:10:28.456804 1488539 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:10:28.456834 1488539 round_trippers.go:577] Response Headers:
	I0916 11:10:28.456849 1488539 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:10:28.456853 1488539 round_trippers.go:580]     Content-Type: application/json
	I0916 11:10:28.456856 1488539 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:10:28.456862 1488539 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:10:28.456865 1488539 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:10:28 GMT
	I0916 11:10:28.456868 1488539 round_trippers.go:580]     Audit-Id: 15f64c0f-3941-43d2-a355-7fb0b63ae448
	I0916 11:10:28.457031 1488539 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-654612","uid":"17ee1588-6ece-4a45-8e72-a05ec6d1f806","resourceVersion":"295","creationTimestamp":"2024-09-16T11:10:07Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-654612","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-654612","minikube.k8s.io/primary":"true","minikube.k8s.io/updated_at":"2024_09_16T11_10_11_0700","minikube.k8s.io/version":"v1.34.0","node-role.kubernetes.io/control-plane":"","node.kubernetes.io/exclude-from-external-load-balancers":""},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///var/run/crio/crio.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubelet","operation":"Update","apiVe
rsion":"v1","time":"2024-09-16T11:10:07Z","fieldsType":"FieldsV1","fiel [truncated 6223 chars]
	I0916 11:10:28.954254 1488539 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/nodes/multinode-654612
	I0916 11:10:28.954282 1488539 round_trippers.go:469] Request Headers:
	I0916 11:10:28.954294 1488539 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:10:28.954298 1488539 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:10:28.956564 1488539 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:10:28.956585 1488539 round_trippers.go:577] Response Headers:
	I0916 11:10:28.956594 1488539 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:10:28.956599 1488539 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:10:28 GMT
	I0916 11:10:28.956604 1488539 round_trippers.go:580]     Audit-Id: 94f05532-f47d-43e3-94c3-a1107b545001
	I0916 11:10:28.956608 1488539 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:10:28.956610 1488539 round_trippers.go:580]     Content-Type: application/json
	I0916 11:10:28.956613 1488539 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:10:28.956750 1488539 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-654612","uid":"17ee1588-6ece-4a45-8e72-a05ec6d1f806","resourceVersion":"295","creationTimestamp":"2024-09-16T11:10:07Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-654612","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-654612","minikube.k8s.io/primary":"true","minikube.k8s.io/updated_at":"2024_09_16T11_10_11_0700","minikube.k8s.io/version":"v1.34.0","node-role.kubernetes.io/control-plane":"","node.kubernetes.io/exclude-from-external-load-balancers":""},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///var/run/crio/crio.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubelet","operation":"Update","apiVe
rsion":"v1","time":"2024-09-16T11:10:07Z","fieldsType":"FieldsV1","fiel [truncated 6223 chars]
	I0916 11:10:29.453829 1488539 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/nodes/multinode-654612
	I0916 11:10:29.453855 1488539 round_trippers.go:469] Request Headers:
	I0916 11:10:29.453865 1488539 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:10:29.453871 1488539 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:10:29.456158 1488539 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:10:29.456176 1488539 round_trippers.go:577] Response Headers:
	I0916 11:10:29.456184 1488539 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:10:29.456189 1488539 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:10:29.456193 1488539 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:10:29 GMT
	I0916 11:10:29.456195 1488539 round_trippers.go:580]     Audit-Id: 8756fcb0-0726-4c33-b8c7-3691031c3fd6
	I0916 11:10:29.456198 1488539 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:10:29.456201 1488539 round_trippers.go:580]     Content-Type: application/json
	I0916 11:10:29.456315 1488539 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-654612","uid":"17ee1588-6ece-4a45-8e72-a05ec6d1f806","resourceVersion":"295","creationTimestamp":"2024-09-16T11:10:07Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-654612","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-654612","minikube.k8s.io/primary":"true","minikube.k8s.io/updated_at":"2024_09_16T11_10_11_0700","minikube.k8s.io/version":"v1.34.0","node-role.kubernetes.io/control-plane":"","node.kubernetes.io/exclude-from-external-load-balancers":""},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///var/run/crio/crio.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubelet","operation":"Update","apiVe
rsion":"v1","time":"2024-09-16T11:10:07Z","fieldsType":"FieldsV1","fiel [truncated 6223 chars]
	I0916 11:10:29.456747 1488539 node_ready.go:53] node "multinode-654612" has status "Ready":"False"
	I0916 11:10:29.954755 1488539 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/nodes/multinode-654612
	I0916 11:10:29.954779 1488539 round_trippers.go:469] Request Headers:
	I0916 11:10:29.954788 1488539 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:10:29.954792 1488539 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:10:29.957084 1488539 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:10:29.957109 1488539 round_trippers.go:577] Response Headers:
	I0916 11:10:29.957117 1488539 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:10:29.957123 1488539 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:10:29 GMT
	I0916 11:10:29.957127 1488539 round_trippers.go:580]     Audit-Id: 5864a457-0382-4a0f-86f6-4cbc2cd499aa
	I0916 11:10:29.957130 1488539 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:10:29.957133 1488539 round_trippers.go:580]     Content-Type: application/json
	I0916 11:10:29.957135 1488539 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:10:29.957282 1488539 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-654612","uid":"17ee1588-6ece-4a45-8e72-a05ec6d1f806","resourceVersion":"295","creationTimestamp":"2024-09-16T11:10:07Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-654612","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-654612","minikube.k8s.io/primary":"true","minikube.k8s.io/updated_at":"2024_09_16T11_10_11_0700","minikube.k8s.io/version":"v1.34.0","node-role.kubernetes.io/control-plane":"","node.kubernetes.io/exclude-from-external-load-balancers":""},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///var/run/crio/crio.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubelet","operation":"Update","apiVe
rsion":"v1","time":"2024-09-16T11:10:07Z","fieldsType":"FieldsV1","fiel [truncated 6223 chars]
	I0916 11:10:30.453900 1488539 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/nodes/multinode-654612
	I0916 11:10:30.453931 1488539 round_trippers.go:469] Request Headers:
	I0916 11:10:30.453940 1488539 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:10:30.453943 1488539 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:10:30.456268 1488539 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:10:30.456290 1488539 round_trippers.go:577] Response Headers:
	I0916 11:10:30.456297 1488539 round_trippers.go:580]     Audit-Id: 8dc6a3d4-07b5-42ed-869e-3ec3585a0519
	I0916 11:10:30.456302 1488539 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:10:30.456306 1488539 round_trippers.go:580]     Content-Type: application/json
	I0916 11:10:30.456309 1488539 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:10:30.456313 1488539 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:10:30.456316 1488539 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:10:30 GMT
	I0916 11:10:30.456478 1488539 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-654612","uid":"17ee1588-6ece-4a45-8e72-a05ec6d1f806","resourceVersion":"295","creationTimestamp":"2024-09-16T11:10:07Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-654612","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-654612","minikube.k8s.io/primary":"true","minikube.k8s.io/updated_at":"2024_09_16T11_10_11_0700","minikube.k8s.io/version":"v1.34.0","node-role.kubernetes.io/control-plane":"","node.kubernetes.io/exclude-from-external-load-balancers":""},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///var/run/crio/crio.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubelet","operation":"Update","apiVe
rsion":"v1","time":"2024-09-16T11:10:07Z","fieldsType":"FieldsV1","fiel [truncated 6223 chars]
	I0916 11:10:30.953858 1488539 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/nodes/multinode-654612
	I0916 11:10:30.953886 1488539 round_trippers.go:469] Request Headers:
	I0916 11:10:30.953897 1488539 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:10:30.953901 1488539 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:10:30.956294 1488539 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:10:30.956327 1488539 round_trippers.go:577] Response Headers:
	I0916 11:10:30.956337 1488539 round_trippers.go:580]     Audit-Id: f8bcd0cb-6149-499f-bc23-c5dd1661fd48
	I0916 11:10:30.956341 1488539 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:10:30.956345 1488539 round_trippers.go:580]     Content-Type: application/json
	I0916 11:10:30.956347 1488539 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:10:30.956350 1488539 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:10:30.956354 1488539 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:10:30 GMT
	I0916 11:10:30.956446 1488539 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-654612","uid":"17ee1588-6ece-4a45-8e72-a05ec6d1f806","resourceVersion":"295","creationTimestamp":"2024-09-16T11:10:07Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-654612","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-654612","minikube.k8s.io/primary":"true","minikube.k8s.io/updated_at":"2024_09_16T11_10_11_0700","minikube.k8s.io/version":"v1.34.0","node-role.kubernetes.io/control-plane":"","node.kubernetes.io/exclude-from-external-load-balancers":""},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///var/run/crio/crio.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubelet","operation":"Update","apiVe
rsion":"v1","time":"2024-09-16T11:10:07Z","fieldsType":"FieldsV1","fiel [truncated 6223 chars]
	I0916 11:10:31.453915 1488539 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/nodes/multinode-654612
	I0916 11:10:31.453944 1488539 round_trippers.go:469] Request Headers:
	I0916 11:10:31.453953 1488539 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:10:31.453962 1488539 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:10:31.456752 1488539 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:10:31.456777 1488539 round_trippers.go:577] Response Headers:
	I0916 11:10:31.456785 1488539 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:10:31 GMT
	I0916 11:10:31.456789 1488539 round_trippers.go:580]     Audit-Id: b2a2d11b-0c49-46d6-bbcf-5df4219fb1a7
	I0916 11:10:31.456793 1488539 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:10:31.456796 1488539 round_trippers.go:580]     Content-Type: application/json
	I0916 11:10:31.456799 1488539 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:10:31.456803 1488539 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:10:31.456907 1488539 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-654612","uid":"17ee1588-6ece-4a45-8e72-a05ec6d1f806","resourceVersion":"295","creationTimestamp":"2024-09-16T11:10:07Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-654612","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-654612","minikube.k8s.io/primary":"true","minikube.k8s.io/updated_at":"2024_09_16T11_10_11_0700","minikube.k8s.io/version":"v1.34.0","node-role.kubernetes.io/control-plane":"","node.kubernetes.io/exclude-from-external-load-balancers":""},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///var/run/crio/crio.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubelet","operation":"Update","apiVe
rsion":"v1","time":"2024-09-16T11:10:07Z","fieldsType":"FieldsV1","fiel [truncated 6223 chars]
	I0916 11:10:31.457315 1488539 node_ready.go:53] node "multinode-654612" has status "Ready":"False"
	I0916 11:10:31.954007 1488539 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/nodes/multinode-654612
	I0916 11:10:31.954032 1488539 round_trippers.go:469] Request Headers:
	I0916 11:10:31.954041 1488539 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:10:31.954046 1488539 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:10:31.956204 1488539 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:10:31.956225 1488539 round_trippers.go:577] Response Headers:
	I0916 11:10:31.956234 1488539 round_trippers.go:580]     Content-Type: application/json
	I0916 11:10:31.956240 1488539 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:10:31.956243 1488539 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:10:31.956247 1488539 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:10:31 GMT
	I0916 11:10:31.956250 1488539 round_trippers.go:580]     Audit-Id: 44a46614-798d-45ac-9930-ced5ca7b9b04
	I0916 11:10:31.956252 1488539 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:10:31.956349 1488539 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-654612","uid":"17ee1588-6ece-4a45-8e72-a05ec6d1f806","resourceVersion":"295","creationTimestamp":"2024-09-16T11:10:07Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-654612","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-654612","minikube.k8s.io/primary":"true","minikube.k8s.io/updated_at":"2024_09_16T11_10_11_0700","minikube.k8s.io/version":"v1.34.0","node-role.kubernetes.io/control-plane":"","node.kubernetes.io/exclude-from-external-load-balancers":""},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///var/run/crio/crio.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubelet","operation":"Update","apiVe
rsion":"v1","time":"2024-09-16T11:10:07Z","fieldsType":"FieldsV1","fiel [truncated 6223 chars]
	I0916 11:10:32.454550 1488539 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/nodes/multinode-654612
	I0916 11:10:32.454574 1488539 round_trippers.go:469] Request Headers:
	I0916 11:10:32.454585 1488539 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:10:32.454589 1488539 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:10:32.456723 1488539 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:10:32.456743 1488539 round_trippers.go:577] Response Headers:
	I0916 11:10:32.456752 1488539 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:10:32.456758 1488539 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:10:32 GMT
	I0916 11:10:32.456761 1488539 round_trippers.go:580]     Audit-Id: 83523ae0-0b20-4ae0-9588-50ed90df9dbf
	I0916 11:10:32.456763 1488539 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:10:32.456766 1488539 round_trippers.go:580]     Content-Type: application/json
	I0916 11:10:32.456768 1488539 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:10:32.456886 1488539 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-654612","uid":"17ee1588-6ece-4a45-8e72-a05ec6d1f806","resourceVersion":"295","creationTimestamp":"2024-09-16T11:10:07Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-654612","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-654612","minikube.k8s.io/primary":"true","minikube.k8s.io/updated_at":"2024_09_16T11_10_11_0700","minikube.k8s.io/version":"v1.34.0","node-role.kubernetes.io/control-plane":"","node.kubernetes.io/exclude-from-external-load-balancers":""},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///var/run/crio/crio.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubelet","operation":"Update","apiVe
rsion":"v1","time":"2024-09-16T11:10:07Z","fieldsType":"FieldsV1","fiel [truncated 6223 chars]
	I0916 11:10:32.954750 1488539 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/nodes/multinode-654612
	I0916 11:10:32.954779 1488539 round_trippers.go:469] Request Headers:
	I0916 11:10:32.954789 1488539 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:10:32.954793 1488539 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:10:32.957167 1488539 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:10:32.957200 1488539 round_trippers.go:577] Response Headers:
	I0916 11:10:32.957210 1488539 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:10:32.957214 1488539 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:10:32 GMT
	I0916 11:10:32.957219 1488539 round_trippers.go:580]     Audit-Id: 6828c841-94f3-4a64-a205-c1a3a98e2fc3
	I0916 11:10:32.957223 1488539 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:10:32.957226 1488539 round_trippers.go:580]     Content-Type: application/json
	I0916 11:10:32.957229 1488539 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:10:32.957449 1488539 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-654612","uid":"17ee1588-6ece-4a45-8e72-a05ec6d1f806","resourceVersion":"295","creationTimestamp":"2024-09-16T11:10:07Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-654612","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-654612","minikube.k8s.io/primary":"true","minikube.k8s.io/updated_at":"2024_09_16T11_10_11_0700","minikube.k8s.io/version":"v1.34.0","node-role.kubernetes.io/control-plane":"","node.kubernetes.io/exclude-from-external-load-balancers":""},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///var/run/crio/crio.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubelet","operation":"Update","apiVe
rsion":"v1","time":"2024-09-16T11:10:07Z","fieldsType":"FieldsV1","fiel [truncated 6223 chars]
	I0916 11:10:33.453826 1488539 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/nodes/multinode-654612
	I0916 11:10:33.453855 1488539 round_trippers.go:469] Request Headers:
	I0916 11:10:33.453864 1488539 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:10:33.453868 1488539 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:10:33.456083 1488539 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:10:33.456104 1488539 round_trippers.go:577] Response Headers:
	I0916 11:10:33.456112 1488539 round_trippers.go:580]     Audit-Id: 825871bf-2572-4e81-a83a-4b9af44eccbf
	I0916 11:10:33.456117 1488539 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:10:33.456120 1488539 round_trippers.go:580]     Content-Type: application/json
	I0916 11:10:33.456122 1488539 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:10:33.456125 1488539 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:10:33.456128 1488539 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:10:33 GMT
	I0916 11:10:33.456248 1488539 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-654612","uid":"17ee1588-6ece-4a45-8e72-a05ec6d1f806","resourceVersion":"295","creationTimestamp":"2024-09-16T11:10:07Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-654612","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-654612","minikube.k8s.io/primary":"true","minikube.k8s.io/updated_at":"2024_09_16T11_10_11_0700","minikube.k8s.io/version":"v1.34.0","node-role.kubernetes.io/control-plane":"","node.kubernetes.io/exclude-from-external-load-balancers":""},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///var/run/crio/crio.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubelet","operation":"Update","apiVe
rsion":"v1","time":"2024-09-16T11:10:07Z","fieldsType":"FieldsV1","fiel [truncated 6223 chars]
	I0916 11:10:33.954551 1488539 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/nodes/multinode-654612
	I0916 11:10:33.954575 1488539 round_trippers.go:469] Request Headers:
	I0916 11:10:33.954584 1488539 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:10:33.954588 1488539 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:10:33.956833 1488539 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:10:33.956860 1488539 round_trippers.go:577] Response Headers:
	I0916 11:10:33.956870 1488539 round_trippers.go:580]     Audit-Id: 32cb9e28-36ce-4705-a8c6-a8f111a0d358
	I0916 11:10:33.956876 1488539 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:10:33.956879 1488539 round_trippers.go:580]     Content-Type: application/json
	I0916 11:10:33.956881 1488539 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:10:33.956885 1488539 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:10:33.956888 1488539 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:10:33 GMT
	I0916 11:10:33.957141 1488539 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-654612","uid":"17ee1588-6ece-4a45-8e72-a05ec6d1f806","resourceVersion":"295","creationTimestamp":"2024-09-16T11:10:07Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-654612","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-654612","minikube.k8s.io/primary":"true","minikube.k8s.io/updated_at":"2024_09_16T11_10_11_0700","minikube.k8s.io/version":"v1.34.0","node-role.kubernetes.io/control-plane":"","node.kubernetes.io/exclude-from-external-load-balancers":""},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///var/run/crio/crio.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubelet","operation":"Update","apiVe
rsion":"v1","time":"2024-09-16T11:10:07Z","fieldsType":"FieldsV1","fiel [truncated 6223 chars]
	I0916 11:10:33.957589 1488539 node_ready.go:53] node "multinode-654612" has status "Ready":"False"
	I0916 11:10:34.453846 1488539 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/nodes/multinode-654612
	I0916 11:10:34.453873 1488539 round_trippers.go:469] Request Headers:
	I0916 11:10:34.453881 1488539 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:10:34.453886 1488539 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:10:34.456204 1488539 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:10:34.456226 1488539 round_trippers.go:577] Response Headers:
	I0916 11:10:34.456234 1488539 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:10:34.456239 1488539 round_trippers.go:580]     Content-Type: application/json
	I0916 11:10:34.456244 1488539 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:10:34.456248 1488539 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:10:34.456252 1488539 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:10:34 GMT
	I0916 11:10:34.456255 1488539 round_trippers.go:580]     Audit-Id: 7d8dc698-149f-497a-ba4c-cbf5fecfe310
	I0916 11:10:34.456586 1488539 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-654612","uid":"17ee1588-6ece-4a45-8e72-a05ec6d1f806","resourceVersion":"295","creationTimestamp":"2024-09-16T11:10:07Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-654612","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-654612","minikube.k8s.io/primary":"true","minikube.k8s.io/updated_at":"2024_09_16T11_10_11_0700","minikube.k8s.io/version":"v1.34.0","node-role.kubernetes.io/control-plane":"","node.kubernetes.io/exclude-from-external-load-balancers":""},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///var/run/crio/crio.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubelet","operation":"Update","apiVe
rsion":"v1","time":"2024-09-16T11:10:07Z","fieldsType":"FieldsV1","fiel [truncated 6223 chars]
	I0916 11:10:34.954479 1488539 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/nodes/multinode-654612
	I0916 11:10:34.954506 1488539 round_trippers.go:469] Request Headers:
	I0916 11:10:34.954513 1488539 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:10:34.954518 1488539 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:10:34.956971 1488539 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:10:34.956993 1488539 round_trippers.go:577] Response Headers:
	I0916 11:10:34.957001 1488539 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:10:34.957006 1488539 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:10:34 GMT
	I0916 11:10:34.957012 1488539 round_trippers.go:580]     Audit-Id: d2ab8967-9264-49b7-baf4-3b8f264fcbed
	I0916 11:10:34.957015 1488539 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:10:34.957018 1488539 round_trippers.go:580]     Content-Type: application/json
	I0916 11:10:34.957021 1488539 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:10:34.957182 1488539 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-654612","uid":"17ee1588-6ece-4a45-8e72-a05ec6d1f806","resourceVersion":"295","creationTimestamp":"2024-09-16T11:10:07Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-654612","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-654612","minikube.k8s.io/primary":"true","minikube.k8s.io/updated_at":"2024_09_16T11_10_11_0700","minikube.k8s.io/version":"v1.34.0","node-role.kubernetes.io/control-plane":"","node.kubernetes.io/exclude-from-external-load-balancers":""},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///var/run/crio/crio.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubelet","operation":"Update","apiVe
rsion":"v1","time":"2024-09-16T11:10:07Z","fieldsType":"FieldsV1","fiel [truncated 6223 chars]
	I0916 11:10:35.454397 1488539 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/nodes/multinode-654612
	I0916 11:10:35.454426 1488539 round_trippers.go:469] Request Headers:
	I0916 11:10:35.454435 1488539 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:10:35.454441 1488539 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:10:35.456636 1488539 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:10:35.456660 1488539 round_trippers.go:577] Response Headers:
	I0916 11:10:35.456670 1488539 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:10:35.456690 1488539 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:10:35.456695 1488539 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:10:35 GMT
	I0916 11:10:35.456699 1488539 round_trippers.go:580]     Audit-Id: 452ecf05-a7a2-4831-b9a3-f685239a10cf
	I0916 11:10:35.456703 1488539 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:10:35.456707 1488539 round_trippers.go:580]     Content-Type: application/json
	I0916 11:10:35.457070 1488539 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-654612","uid":"17ee1588-6ece-4a45-8e72-a05ec6d1f806","resourceVersion":"295","creationTimestamp":"2024-09-16T11:10:07Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-654612","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-654612","minikube.k8s.io/primary":"true","minikube.k8s.io/updated_at":"2024_09_16T11_10_11_0700","minikube.k8s.io/version":"v1.34.0","node-role.kubernetes.io/control-plane":"","node.kubernetes.io/exclude-from-external-load-balancers":""},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///var/run/crio/crio.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubelet","operation":"Update","apiVe
rsion":"v1","time":"2024-09-16T11:10:07Z","fieldsType":"FieldsV1","fiel [truncated 6223 chars]
	I0916 11:10:35.953944 1488539 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/nodes/multinode-654612
	I0916 11:10:35.953971 1488539 round_trippers.go:469] Request Headers:
	I0916 11:10:35.953980 1488539 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:10:35.953985 1488539 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:10:35.956192 1488539 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:10:35.956213 1488539 round_trippers.go:577] Response Headers:
	I0916 11:10:35.956222 1488539 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:10:35.956226 1488539 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:10:35 GMT
	I0916 11:10:35.956230 1488539 round_trippers.go:580]     Audit-Id: f5745e87-a54a-46e1-928f-dc8350a54a11
	I0916 11:10:35.956234 1488539 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:10:35.956236 1488539 round_trippers.go:580]     Content-Type: application/json
	I0916 11:10:35.956239 1488539 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:10:35.956498 1488539 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-654612","uid":"17ee1588-6ece-4a45-8e72-a05ec6d1f806","resourceVersion":"295","creationTimestamp":"2024-09-16T11:10:07Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-654612","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-654612","minikube.k8s.io/primary":"true","minikube.k8s.io/updated_at":"2024_09_16T11_10_11_0700","minikube.k8s.io/version":"v1.34.0","node-role.kubernetes.io/control-plane":"","node.kubernetes.io/exclude-from-external-load-balancers":""},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///var/run/crio/crio.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubelet","operation":"Update","apiVe
rsion":"v1","time":"2024-09-16T11:10:07Z","fieldsType":"FieldsV1","fiel [truncated 6223 chars]
	I0916 11:10:36.454192 1488539 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/nodes/multinode-654612
	I0916 11:10:36.454220 1488539 round_trippers.go:469] Request Headers:
	I0916 11:10:36.454229 1488539 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:10:36.454233 1488539 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:10:36.456427 1488539 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:10:36.456455 1488539 round_trippers.go:577] Response Headers:
	I0916 11:10:36.456464 1488539 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:10:36 GMT
	I0916 11:10:36.456468 1488539 round_trippers.go:580]     Audit-Id: aecd9d72-ac8e-4750-a552-3e6018dad1b8
	I0916 11:10:36.456471 1488539 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:10:36.456474 1488539 round_trippers.go:580]     Content-Type: application/json
	I0916 11:10:36.456476 1488539 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:10:36.456511 1488539 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:10:36.456761 1488539 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-654612","uid":"17ee1588-6ece-4a45-8e72-a05ec6d1f806","resourceVersion":"295","creationTimestamp":"2024-09-16T11:10:07Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-654612","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-654612","minikube.k8s.io/primary":"true","minikube.k8s.io/updated_at":"2024_09_16T11_10_11_0700","minikube.k8s.io/version":"v1.34.0","node-role.kubernetes.io/control-plane":"","node.kubernetes.io/exclude-from-external-load-balancers":""},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///var/run/crio/crio.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubelet","operation":"Update","apiVe
rsion":"v1","time":"2024-09-16T11:10:07Z","fieldsType":"FieldsV1","fiel [truncated 6223 chars]
	I0916 11:10:36.457201 1488539 node_ready.go:53] node "multinode-654612" has status "Ready":"False"
	I0916 11:10:36.954113 1488539 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/nodes/multinode-654612
	I0916 11:10:36.954166 1488539 round_trippers.go:469] Request Headers:
	I0916 11:10:36.954177 1488539 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:10:36.954183 1488539 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:10:36.956739 1488539 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:10:36.956769 1488539 round_trippers.go:577] Response Headers:
	I0916 11:10:36.956778 1488539 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:10:36.956783 1488539 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:10:36.956786 1488539 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:10:36 GMT
	I0916 11:10:36.956789 1488539 round_trippers.go:580]     Audit-Id: f605be91-7b12-4259-901e-da485de36443
	I0916 11:10:36.956792 1488539 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:10:36.956795 1488539 round_trippers.go:580]     Content-Type: application/json
	I0916 11:10:36.957208 1488539 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-654612","uid":"17ee1588-6ece-4a45-8e72-a05ec6d1f806","resourceVersion":"295","creationTimestamp":"2024-09-16T11:10:07Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-654612","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-654612","minikube.k8s.io/primary":"true","minikube.k8s.io/updated_at":"2024_09_16T11_10_11_0700","minikube.k8s.io/version":"v1.34.0","node-role.kubernetes.io/control-plane":"","node.kubernetes.io/exclude-from-external-load-balancers":""},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///var/run/crio/crio.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubelet","operation":"Update","apiVe
rsion":"v1","time":"2024-09-16T11:10:07Z","fieldsType":"FieldsV1","fiel [truncated 6223 chars]
	I0916 11:10:37.454772 1488539 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/nodes/multinode-654612
	I0916 11:10:37.454799 1488539 round_trippers.go:469] Request Headers:
	I0916 11:10:37.454809 1488539 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:10:37.454813 1488539 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:10:37.457033 1488539 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:10:37.457059 1488539 round_trippers.go:577] Response Headers:
	I0916 11:10:37.457068 1488539 round_trippers.go:580]     Content-Type: application/json
	I0916 11:10:37.457073 1488539 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:10:37.457076 1488539 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:10:37.457080 1488539 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:10:37 GMT
	I0916 11:10:37.457085 1488539 round_trippers.go:580]     Audit-Id: 588dec0a-70e7-4cd3-9836-3fe9dac07d3a
	I0916 11:10:37.457089 1488539 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:10:37.457365 1488539 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-654612","uid":"17ee1588-6ece-4a45-8e72-a05ec6d1f806","resourceVersion":"295","creationTimestamp":"2024-09-16T11:10:07Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-654612","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-654612","minikube.k8s.io/primary":"true","minikube.k8s.io/updated_at":"2024_09_16T11_10_11_0700","minikube.k8s.io/version":"v1.34.0","node-role.kubernetes.io/control-plane":"","node.kubernetes.io/exclude-from-external-load-balancers":""},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///var/run/crio/crio.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubelet","operation":"Update","apiVe
rsion":"v1","time":"2024-09-16T11:10:07Z","fieldsType":"FieldsV1","fiel [truncated 6223 chars]
	I0916 11:10:37.954609 1488539 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/nodes/multinode-654612
	I0916 11:10:37.954633 1488539 round_trippers.go:469] Request Headers:
	I0916 11:10:37.954648 1488539 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:10:37.954652 1488539 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:10:37.956881 1488539 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:10:37.956902 1488539 round_trippers.go:577] Response Headers:
	I0916 11:10:37.956911 1488539 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:10:37.956916 1488539 round_trippers.go:580]     Content-Type: application/json
	I0916 11:10:37.956919 1488539 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:10:37.956924 1488539 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:10:37.956928 1488539 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:10:37 GMT
	I0916 11:10:37.956931 1488539 round_trippers.go:580]     Audit-Id: 8b114895-b061-4441-b3f3-3c26f979c752
	I0916 11:10:37.957164 1488539 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-654612","uid":"17ee1588-6ece-4a45-8e72-a05ec6d1f806","resourceVersion":"295","creationTimestamp":"2024-09-16T11:10:07Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-654612","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-654612","minikube.k8s.io/primary":"true","minikube.k8s.io/updated_at":"2024_09_16T11_10_11_0700","minikube.k8s.io/version":"v1.34.0","node-role.kubernetes.io/control-plane":"","node.kubernetes.io/exclude-from-external-load-balancers":""},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///var/run/crio/crio.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubelet","operation":"Update","apiVe
rsion":"v1","time":"2024-09-16T11:10:07Z","fieldsType":"FieldsV1","fiel [truncated 6223 chars]
	I0916 11:10:38.454518 1488539 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/nodes/multinode-654612
	I0916 11:10:38.454547 1488539 round_trippers.go:469] Request Headers:
	I0916 11:10:38.454556 1488539 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:10:38.454562 1488539 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:10:38.456844 1488539 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:10:38.456869 1488539 round_trippers.go:577] Response Headers:
	I0916 11:10:38.456879 1488539 round_trippers.go:580]     Audit-Id: 5e33d479-1bb8-4e8f-ad9b-645da49502f1
	I0916 11:10:38.456883 1488539 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:10:38.456887 1488539 round_trippers.go:580]     Content-Type: application/json
	I0916 11:10:38.456890 1488539 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:10:38.456894 1488539 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:10:38.456898 1488539 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:10:38 GMT
	I0916 11:10:38.457254 1488539 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-654612","uid":"17ee1588-6ece-4a45-8e72-a05ec6d1f806","resourceVersion":"295","creationTimestamp":"2024-09-16T11:10:07Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-654612","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-654612","minikube.k8s.io/primary":"true","minikube.k8s.io/updated_at":"2024_09_16T11_10_11_0700","minikube.k8s.io/version":"v1.34.0","node-role.kubernetes.io/control-plane":"","node.kubernetes.io/exclude-from-external-load-balancers":""},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///var/run/crio/crio.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubelet","operation":"Update","apiVe
rsion":"v1","time":"2024-09-16T11:10:07Z","fieldsType":"FieldsV1","fiel [truncated 6223 chars]
	I0916 11:10:38.457715 1488539 node_ready.go:53] node "multinode-654612" has status "Ready":"False"
	I0916 11:10:38.954451 1488539 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/nodes/multinode-654612
	I0916 11:10:38.954473 1488539 round_trippers.go:469] Request Headers:
	I0916 11:10:38.954482 1488539 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:10:38.954486 1488539 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:10:38.956796 1488539 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:10:38.956822 1488539 round_trippers.go:577] Response Headers:
	I0916 11:10:38.956829 1488539 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:10:38.956834 1488539 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:10:38.956838 1488539 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:10:38 GMT
	I0916 11:10:38.956842 1488539 round_trippers.go:580]     Audit-Id: cf44a1ae-9735-4261-bb6c-36c582320b3f
	I0916 11:10:38.956845 1488539 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:10:38.956847 1488539 round_trippers.go:580]     Content-Type: application/json
	I0916 11:10:38.957111 1488539 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-654612","uid":"17ee1588-6ece-4a45-8e72-a05ec6d1f806","resourceVersion":"295","creationTimestamp":"2024-09-16T11:10:07Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-654612","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-654612","minikube.k8s.io/primary":"true","minikube.k8s.io/updated_at":"2024_09_16T11_10_11_0700","minikube.k8s.io/version":"v1.34.0","node-role.kubernetes.io/control-plane":"","node.kubernetes.io/exclude-from-external-load-balancers":""},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///var/run/crio/crio.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubelet","operation":"Update","apiVe
rsion":"v1","time":"2024-09-16T11:10:07Z","fieldsType":"FieldsV1","fiel [truncated 6223 chars]
	I0916 11:10:39.453861 1488539 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/nodes/multinode-654612
	I0916 11:10:39.453885 1488539 round_trippers.go:469] Request Headers:
	I0916 11:10:39.453895 1488539 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:10:39.453899 1488539 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:10:39.456133 1488539 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:10:39.456153 1488539 round_trippers.go:577] Response Headers:
	I0916 11:10:39.456161 1488539 round_trippers.go:580]     Audit-Id: b4f84c82-69f2-4cbe-8ba7-ecae49a96241
	I0916 11:10:39.456165 1488539 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:10:39.456168 1488539 round_trippers.go:580]     Content-Type: application/json
	I0916 11:10:39.456171 1488539 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:10:39.456174 1488539 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:10:39.456177 1488539 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:10:39 GMT
	I0916 11:10:39.456345 1488539 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-654612","uid":"17ee1588-6ece-4a45-8e72-a05ec6d1f806","resourceVersion":"295","creationTimestamp":"2024-09-16T11:10:07Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-654612","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-654612","minikube.k8s.io/primary":"true","minikube.k8s.io/updated_at":"2024_09_16T11_10_11_0700","minikube.k8s.io/version":"v1.34.0","node-role.kubernetes.io/control-plane":"","node.kubernetes.io/exclude-from-external-load-balancers":""},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///var/run/crio/crio.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubelet","operation":"Update","apiVe
rsion":"v1","time":"2024-09-16T11:10:07Z","fieldsType":"FieldsV1","fiel [truncated 6223 chars]
	I0916 11:10:39.954268 1488539 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/nodes/multinode-654612
	I0916 11:10:39.954295 1488539 round_trippers.go:469] Request Headers:
	I0916 11:10:39.954305 1488539 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:10:39.954310 1488539 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:10:39.956537 1488539 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:10:39.956557 1488539 round_trippers.go:577] Response Headers:
	I0916 11:10:39.956564 1488539 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:10:39.956568 1488539 round_trippers.go:580]     Content-Type: application/json
	I0916 11:10:39.956571 1488539 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:10:39.956575 1488539 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:10:39.956578 1488539 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:10:39 GMT
	I0916 11:10:39.956581 1488539 round_trippers.go:580]     Audit-Id: bd8e9144-eb30-4447-a21f-96dc71d7be9a
	I0916 11:10:39.956706 1488539 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-654612","uid":"17ee1588-6ece-4a45-8e72-a05ec6d1f806","resourceVersion":"295","creationTimestamp":"2024-09-16T11:10:07Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-654612","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-654612","minikube.k8s.io/primary":"true","minikube.k8s.io/updated_at":"2024_09_16T11_10_11_0700","minikube.k8s.io/version":"v1.34.0","node-role.kubernetes.io/control-plane":"","node.kubernetes.io/exclude-from-external-load-balancers":""},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///var/run/crio/crio.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubelet","operation":"Update","apiVe
rsion":"v1","time":"2024-09-16T11:10:07Z","fieldsType":"FieldsV1","fiel [truncated 6223 chars]
	I0916 11:10:40.454601 1488539 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/nodes/multinode-654612
	I0916 11:10:40.454629 1488539 round_trippers.go:469] Request Headers:
	I0916 11:10:40.454638 1488539 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:10:40.454645 1488539 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:10:40.456906 1488539 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:10:40.456933 1488539 round_trippers.go:577] Response Headers:
	I0916 11:10:40.456941 1488539 round_trippers.go:580]     Audit-Id: 5de4c2e5-838b-47c8-b053-883675b14f77
	I0916 11:10:40.456946 1488539 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:10:40.456949 1488539 round_trippers.go:580]     Content-Type: application/json
	I0916 11:10:40.456952 1488539 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:10:40.456955 1488539 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:10:40.456958 1488539 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:10:40 GMT
	I0916 11:10:40.457152 1488539 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-654612","uid":"17ee1588-6ece-4a45-8e72-a05ec6d1f806","resourceVersion":"295","creationTimestamp":"2024-09-16T11:10:07Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-654612","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-654612","minikube.k8s.io/primary":"true","minikube.k8s.io/updated_at":"2024_09_16T11_10_11_0700","minikube.k8s.io/version":"v1.34.0","node-role.kubernetes.io/control-plane":"","node.kubernetes.io/exclude-from-external-load-balancers":""},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///var/run/crio/crio.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubelet","operation":"Update","apiVe
rsion":"v1","time":"2024-09-16T11:10:07Z","fieldsType":"FieldsV1","fiel [truncated 6223 chars]
	I0916 11:10:40.954085 1488539 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/nodes/multinode-654612
	I0916 11:10:40.954110 1488539 round_trippers.go:469] Request Headers:
	I0916 11:10:40.954120 1488539 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:10:40.954124 1488539 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:10:40.956204 1488539 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:10:40.956233 1488539 round_trippers.go:577] Response Headers:
	I0916 11:10:40.956241 1488539 round_trippers.go:580]     Audit-Id: 0d400ef8-dd42-4c22-b85c-28e17835cef6
	I0916 11:10:40.956247 1488539 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:10:40.956251 1488539 round_trippers.go:580]     Content-Type: application/json
	I0916 11:10:40.956253 1488539 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:10:40.956257 1488539 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:10:40.956260 1488539 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:10:40 GMT
	I0916 11:10:40.956538 1488539 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-654612","uid":"17ee1588-6ece-4a45-8e72-a05ec6d1f806","resourceVersion":"295","creationTimestamp":"2024-09-16T11:10:07Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-654612","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-654612","minikube.k8s.io/primary":"true","minikube.k8s.io/updated_at":"2024_09_16T11_10_11_0700","minikube.k8s.io/version":"v1.34.0","node-role.kubernetes.io/control-plane":"","node.kubernetes.io/exclude-from-external-load-balancers":""},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///var/run/crio/crio.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubelet","operation":"Update","apiVe
rsion":"v1","time":"2024-09-16T11:10:07Z","fieldsType":"FieldsV1","fiel [truncated 6223 chars]
	I0916 11:10:40.956961 1488539 node_ready.go:53] node "multinode-654612" has status "Ready":"False"
	I0916 11:10:41.454376 1488539 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/nodes/multinode-654612
	I0916 11:10:41.454401 1488539 round_trippers.go:469] Request Headers:
	I0916 11:10:41.454410 1488539 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:10:41.454415 1488539 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:10:41.456648 1488539 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:10:41.456668 1488539 round_trippers.go:577] Response Headers:
	I0916 11:10:41.456692 1488539 round_trippers.go:580]     Content-Type: application/json
	I0916 11:10:41.456699 1488539 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:10:41.456705 1488539 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:10:41.456709 1488539 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:10:41 GMT
	I0916 11:10:41.456711 1488539 round_trippers.go:580]     Audit-Id: 31d7865b-c626-49b1-8777-2d59ef2660c6
	I0916 11:10:41.456714 1488539 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:10:41.456867 1488539 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-654612","uid":"17ee1588-6ece-4a45-8e72-a05ec6d1f806","resourceVersion":"295","creationTimestamp":"2024-09-16T11:10:07Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-654612","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-654612","minikube.k8s.io/primary":"true","minikube.k8s.io/updated_at":"2024_09_16T11_10_11_0700","minikube.k8s.io/version":"v1.34.0","node-role.kubernetes.io/control-plane":"","node.kubernetes.io/exclude-from-external-load-balancers":""},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///var/run/crio/crio.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubelet","operation":"Update","apiVe
rsion":"v1","time":"2024-09-16T11:10:07Z","fieldsType":"FieldsV1","fiel [truncated 6223 chars]
	I0916 11:10:41.954562 1488539 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/nodes/multinode-654612
	I0916 11:10:41.954595 1488539 round_trippers.go:469] Request Headers:
	I0916 11:10:41.954606 1488539 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:10:41.954610 1488539 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:10:41.956900 1488539 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:10:41.956926 1488539 round_trippers.go:577] Response Headers:
	I0916 11:10:41.956935 1488539 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:10:41 GMT
	I0916 11:10:41.956940 1488539 round_trippers.go:580]     Audit-Id: 2a89490e-2657-4b4b-9841-c065c25ed297
	I0916 11:10:41.956952 1488539 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:10:41.956959 1488539 round_trippers.go:580]     Content-Type: application/json
	I0916 11:10:41.956963 1488539 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:10:41.956966 1488539 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:10:41.957339 1488539 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-654612","uid":"17ee1588-6ece-4a45-8e72-a05ec6d1f806","resourceVersion":"295","creationTimestamp":"2024-09-16T11:10:07Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-654612","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-654612","minikube.k8s.io/primary":"true","minikube.k8s.io/updated_at":"2024_09_16T11_10_11_0700","minikube.k8s.io/version":"v1.34.0","node-role.kubernetes.io/control-plane":"","node.kubernetes.io/exclude-from-external-load-balancers":""},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///var/run/crio/crio.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubelet","operation":"Update","apiVe
rsion":"v1","time":"2024-09-16T11:10:07Z","fieldsType":"FieldsV1","fiel [truncated 6223 chars]
	I0916 11:10:42.454494 1488539 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/nodes/multinode-654612
	I0916 11:10:42.454523 1488539 round_trippers.go:469] Request Headers:
	I0916 11:10:42.454533 1488539 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:10:42.454539 1488539 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:10:42.456791 1488539 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:10:42.456814 1488539 round_trippers.go:577] Response Headers:
	I0916 11:10:42.456823 1488539 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:10:42.456826 1488539 round_trippers.go:580]     Content-Type: application/json
	I0916 11:10:42.456829 1488539 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:10:42.456831 1488539 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:10:42.456835 1488539 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:10:42 GMT
	I0916 11:10:42.456837 1488539 round_trippers.go:580]     Audit-Id: e7c7ee62-5779-4a3c-80f6-f40491c3d515
	I0916 11:10:42.457073 1488539 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-654612","uid":"17ee1588-6ece-4a45-8e72-a05ec6d1f806","resourceVersion":"295","creationTimestamp":"2024-09-16T11:10:07Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-654612","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-654612","minikube.k8s.io/primary":"true","minikube.k8s.io/updated_at":"2024_09_16T11_10_11_0700","minikube.k8s.io/version":"v1.34.0","node-role.kubernetes.io/control-plane":"","node.kubernetes.io/exclude-from-external-load-balancers":""},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///var/run/crio/crio.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubelet","operation":"Update","apiVe
rsion":"v1","time":"2024-09-16T11:10:07Z","fieldsType":"FieldsV1","fiel [truncated 6223 chars]
	I0916 11:10:42.954036 1488539 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/nodes/multinode-654612
	I0916 11:10:42.954064 1488539 round_trippers.go:469] Request Headers:
	I0916 11:10:42.954074 1488539 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:10:42.954080 1488539 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:10:42.956309 1488539 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:10:42.956331 1488539 round_trippers.go:577] Response Headers:
	I0916 11:10:42.956339 1488539 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:10:42 GMT
	I0916 11:10:42.956343 1488539 round_trippers.go:580]     Audit-Id: 4459c841-84e8-4dde-9686-219b2243ca24
	I0916 11:10:42.956346 1488539 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:10:42.956348 1488539 round_trippers.go:580]     Content-Type: application/json
	I0916 11:10:42.956351 1488539 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:10:42.956355 1488539 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:10:42.956466 1488539 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-654612","uid":"17ee1588-6ece-4a45-8e72-a05ec6d1f806","resourceVersion":"295","creationTimestamp":"2024-09-16T11:10:07Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-654612","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-654612","minikube.k8s.io/primary":"true","minikube.k8s.io/updated_at":"2024_09_16T11_10_11_0700","minikube.k8s.io/version":"v1.34.0","node-role.kubernetes.io/control-plane":"","node.kubernetes.io/exclude-from-external-load-balancers":""},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///var/run/crio/crio.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubelet","operation":"Update","apiVe
rsion":"v1","time":"2024-09-16T11:10:07Z","fieldsType":"FieldsV1","fiel [truncated 6223 chars]
	I0916 11:10:43.454455 1488539 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/nodes/multinode-654612
	I0916 11:10:43.454485 1488539 round_trippers.go:469] Request Headers:
	I0916 11:10:43.454495 1488539 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:10:43.454500 1488539 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:10:43.456666 1488539 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:10:43.456707 1488539 round_trippers.go:577] Response Headers:
	I0916 11:10:43.456716 1488539 round_trippers.go:580]     Content-Type: application/json
	I0916 11:10:43.456721 1488539 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:10:43.456724 1488539 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:10:43.456728 1488539 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:10:43 GMT
	I0916 11:10:43.456733 1488539 round_trippers.go:580]     Audit-Id: 78f05987-9e1a-41bd-92e3-ee1834158a8e
	I0916 11:10:43.456736 1488539 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:10:43.456859 1488539 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-654612","uid":"17ee1588-6ece-4a45-8e72-a05ec6d1f806","resourceVersion":"295","creationTimestamp":"2024-09-16T11:10:07Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-654612","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-654612","minikube.k8s.io/primary":"true","minikube.k8s.io/updated_at":"2024_09_16T11_10_11_0700","minikube.k8s.io/version":"v1.34.0","node-role.kubernetes.io/control-plane":"","node.kubernetes.io/exclude-from-external-load-balancers":""},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///var/run/crio/crio.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubelet","operation":"Update","apiVe
rsion":"v1","time":"2024-09-16T11:10:07Z","fieldsType":"FieldsV1","fiel [truncated 6223 chars]
	I0916 11:10:43.457261 1488539 node_ready.go:53] node "multinode-654612" has status "Ready":"False"
	I0916 11:10:43.953867 1488539 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/nodes/multinode-654612
	I0916 11:10:43.953889 1488539 round_trippers.go:469] Request Headers:
	I0916 11:10:43.953899 1488539 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:10:43.953903 1488539 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:10:43.956083 1488539 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:10:43.956103 1488539 round_trippers.go:577] Response Headers:
	I0916 11:10:43.956112 1488539 round_trippers.go:580]     Audit-Id: 22dae938-c42b-4542-82cd-7c6df22931f4
	I0916 11:10:43.956116 1488539 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:10:43.956121 1488539 round_trippers.go:580]     Content-Type: application/json
	I0916 11:10:43.956125 1488539 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:10:43.956128 1488539 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:10:43.956132 1488539 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:10:43 GMT
	I0916 11:10:43.956226 1488539 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-654612","uid":"17ee1588-6ece-4a45-8e72-a05ec6d1f806","resourceVersion":"295","creationTimestamp":"2024-09-16T11:10:07Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-654612","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-654612","minikube.k8s.io/primary":"true","minikube.k8s.io/updated_at":"2024_09_16T11_10_11_0700","minikube.k8s.io/version":"v1.34.0","node-role.kubernetes.io/control-plane":"","node.kubernetes.io/exclude-from-external-load-balancers":""},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///var/run/crio/crio.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubelet","operation":"Update","apiVe
rsion":"v1","time":"2024-09-16T11:10:07Z","fieldsType":"FieldsV1","fiel [truncated 6223 chars]
	I0916 11:10:44.453937 1488539 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/nodes/multinode-654612
	I0916 11:10:44.453963 1488539 round_trippers.go:469] Request Headers:
	I0916 11:10:44.453973 1488539 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:10:44.453979 1488539 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:10:44.462029 1488539 round_trippers.go:574] Response Status: 200 OK in 7 milliseconds
	I0916 11:10:44.462057 1488539 round_trippers.go:577] Response Headers:
	I0916 11:10:44.462066 1488539 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:10:44.462071 1488539 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:10:44.462076 1488539 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:10:44 GMT
	I0916 11:10:44.462079 1488539 round_trippers.go:580]     Audit-Id: 9ea6e98d-a982-4163-a307-da5249fe15bf
	I0916 11:10:44.462082 1488539 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:10:44.462096 1488539 round_trippers.go:580]     Content-Type: application/json
	I0916 11:10:44.462235 1488539 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-654612","uid":"17ee1588-6ece-4a45-8e72-a05ec6d1f806","resourceVersion":"295","creationTimestamp":"2024-09-16T11:10:07Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-654612","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-654612","minikube.k8s.io/primary":"true","minikube.k8s.io/updated_at":"2024_09_16T11_10_11_0700","minikube.k8s.io/version":"v1.34.0","node-role.kubernetes.io/control-plane":"","node.kubernetes.io/exclude-from-external-load-balancers":""},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///var/run/crio/crio.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubelet","operation":"Update","apiVe
rsion":"v1","time":"2024-09-16T11:10:07Z","fieldsType":"FieldsV1","fiel [truncated 6223 chars]
	I0916 11:10:44.954698 1488539 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/nodes/multinode-654612
	I0916 11:10:44.954743 1488539 round_trippers.go:469] Request Headers:
	I0916 11:10:44.954753 1488539 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:10:44.954759 1488539 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:10:44.957049 1488539 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:10:44.957078 1488539 round_trippers.go:577] Response Headers:
	I0916 11:10:44.957087 1488539 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:10:44.957091 1488539 round_trippers.go:580]     Content-Type: application/json
	I0916 11:10:44.957094 1488539 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:10:44.957096 1488539 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:10:44.957099 1488539 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:10:44 GMT
	I0916 11:10:44.957101 1488539 round_trippers.go:580]     Audit-Id: ff25ec64-5017-4a54-890b-2e595e356223
	I0916 11:10:44.957213 1488539 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-654612","uid":"17ee1588-6ece-4a45-8e72-a05ec6d1f806","resourceVersion":"295","creationTimestamp":"2024-09-16T11:10:07Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-654612","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-654612","minikube.k8s.io/primary":"true","minikube.k8s.io/updated_at":"2024_09_16T11_10_11_0700","minikube.k8s.io/version":"v1.34.0","node-role.kubernetes.io/control-plane":"","node.kubernetes.io/exclude-from-external-load-balancers":""},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///var/run/crio/crio.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubelet","operation":"Update","apiVe
rsion":"v1","time":"2024-09-16T11:10:07Z","fieldsType":"FieldsV1","fiel [truncated 6223 chars]
	I0916 11:10:45.454479 1488539 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/nodes/multinode-654612
	I0916 11:10:45.454508 1488539 round_trippers.go:469] Request Headers:
	I0916 11:10:45.454518 1488539 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:10:45.454524 1488539 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:10:45.456695 1488539 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:10:45.456722 1488539 round_trippers.go:577] Response Headers:
	I0916 11:10:45.456732 1488539 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:10:45.456736 1488539 round_trippers.go:580]     Content-Type: application/json
	I0916 11:10:45.456739 1488539 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:10:45.456742 1488539 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:10:45.456747 1488539 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:10:45 GMT
	I0916 11:10:45.456749 1488539 round_trippers.go:580]     Audit-Id: 693b3b47-85ef-47b6-885f-dd693aed574f
	I0916 11:10:45.456892 1488539 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-654612","uid":"17ee1588-6ece-4a45-8e72-a05ec6d1f806","resourceVersion":"295","creationTimestamp":"2024-09-16T11:10:07Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-654612","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-654612","minikube.k8s.io/primary":"true","minikube.k8s.io/updated_at":"2024_09_16T11_10_11_0700","minikube.k8s.io/version":"v1.34.0","node-role.kubernetes.io/control-plane":"","node.kubernetes.io/exclude-from-external-load-balancers":""},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///var/run/crio/crio.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubelet","operation":"Update","apiVe
rsion":"v1","time":"2024-09-16T11:10:07Z","fieldsType":"FieldsV1","fiel [truncated 6223 chars]
	I0916 11:10:45.457311 1488539 node_ready.go:53] node "multinode-654612" has status "Ready":"False"
	I0916 11:10:45.954043 1488539 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/nodes/multinode-654612
	I0916 11:10:45.954068 1488539 round_trippers.go:469] Request Headers:
	I0916 11:10:45.954078 1488539 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:10:45.954084 1488539 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:10:45.957652 1488539 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 11:10:45.957674 1488539 round_trippers.go:577] Response Headers:
	I0916 11:10:45.957683 1488539 round_trippers.go:580]     Audit-Id: b824005c-69c0-4c3a-907f-3f16011a1e5c
	I0916 11:10:45.957689 1488539 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:10:45.957692 1488539 round_trippers.go:580]     Content-Type: application/json
	I0916 11:10:45.957695 1488539 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:10:45.957698 1488539 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:10:45.957701 1488539 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:10:45 GMT
	I0916 11:10:45.957840 1488539 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-654612","uid":"17ee1588-6ece-4a45-8e72-a05ec6d1f806","resourceVersion":"295","creationTimestamp":"2024-09-16T11:10:07Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-654612","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-654612","minikube.k8s.io/primary":"true","minikube.k8s.io/updated_at":"2024_09_16T11_10_11_0700","minikube.k8s.io/version":"v1.34.0","node-role.kubernetes.io/control-plane":"","node.kubernetes.io/exclude-from-external-load-balancers":""},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///var/run/crio/crio.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubelet","operation":"Update","apiVe
rsion":"v1","time":"2024-09-16T11:10:07Z","fieldsType":"FieldsV1","fiel [truncated 6223 chars]
	I0916 11:10:46.454664 1488539 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/nodes/multinode-654612
	I0916 11:10:46.454690 1488539 round_trippers.go:469] Request Headers:
	I0916 11:10:46.454700 1488539 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:10:46.454709 1488539 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:10:46.457016 1488539 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:10:46.457046 1488539 round_trippers.go:577] Response Headers:
	I0916 11:10:46.457054 1488539 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:10:46.457059 1488539 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:10:46 GMT
	I0916 11:10:46.457064 1488539 round_trippers.go:580]     Audit-Id: fd144db7-5071-4279-ab99-caeed8674348
	I0916 11:10:46.457067 1488539 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:10:46.457069 1488539 round_trippers.go:580]     Content-Type: application/json
	I0916 11:10:46.457072 1488539 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:10:46.457192 1488539 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-654612","uid":"17ee1588-6ece-4a45-8e72-a05ec6d1f806","resourceVersion":"295","creationTimestamp":"2024-09-16T11:10:07Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-654612","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-654612","minikube.k8s.io/primary":"true","minikube.k8s.io/updated_at":"2024_09_16T11_10_11_0700","minikube.k8s.io/version":"v1.34.0","node-role.kubernetes.io/control-plane":"","node.kubernetes.io/exclude-from-external-load-balancers":""},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///var/run/crio/crio.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubelet","operation":"Update","apiVe
rsion":"v1","time":"2024-09-16T11:10:07Z","fieldsType":"FieldsV1","fiel [truncated 6223 chars]
	I0916 11:10:46.953895 1488539 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/nodes/multinode-654612
	I0916 11:10:46.953918 1488539 round_trippers.go:469] Request Headers:
	I0916 11:10:46.953927 1488539 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:10:46.953933 1488539 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:10:46.956227 1488539 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:10:46.956247 1488539 round_trippers.go:577] Response Headers:
	I0916 11:10:46.956256 1488539 round_trippers.go:580]     Content-Type: application/json
	I0916 11:10:46.956260 1488539 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:10:46.956264 1488539 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:10:46.956267 1488539 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:10:46 GMT
	I0916 11:10:46.956270 1488539 round_trippers.go:580]     Audit-Id: 779ccb12-dfeb-4a26-866b-bffb37c566fe
	I0916 11:10:46.956273 1488539 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:10:46.956420 1488539 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-654612","uid":"17ee1588-6ece-4a45-8e72-a05ec6d1f806","resourceVersion":"295","creationTimestamp":"2024-09-16T11:10:07Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-654612","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-654612","minikube.k8s.io/primary":"true","minikube.k8s.io/updated_at":"2024_09_16T11_10_11_0700","minikube.k8s.io/version":"v1.34.0","node-role.kubernetes.io/control-plane":"","node.kubernetes.io/exclude-from-external-load-balancers":""},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///var/run/crio/crio.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubelet","operation":"Update","apiVe
rsion":"v1","time":"2024-09-16T11:10:07Z","fieldsType":"FieldsV1","fiel [truncated 6223 chars]
	I0916 11:10:47.453922 1488539 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/nodes/multinode-654612
	I0916 11:10:47.453950 1488539 round_trippers.go:469] Request Headers:
	I0916 11:10:47.453960 1488539 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:10:47.453965 1488539 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:10:47.457353 1488539 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 11:10:47.457377 1488539 round_trippers.go:577] Response Headers:
	I0916 11:10:47.457385 1488539 round_trippers.go:580]     Audit-Id: a28f7485-9256-4d3d-ba7e-be6f7835fcf2
	I0916 11:10:47.457391 1488539 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:10:47.457396 1488539 round_trippers.go:580]     Content-Type: application/json
	I0916 11:10:47.457398 1488539 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:10:47.457401 1488539 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:10:47.457404 1488539 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:10:47 GMT
	I0916 11:10:47.457568 1488539 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-654612","uid":"17ee1588-6ece-4a45-8e72-a05ec6d1f806","resourceVersion":"295","creationTimestamp":"2024-09-16T11:10:07Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-654612","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-654612","minikube.k8s.io/primary":"true","minikube.k8s.io/updated_at":"2024_09_16T11_10_11_0700","minikube.k8s.io/version":"v1.34.0","node-role.kubernetes.io/control-plane":"","node.kubernetes.io/exclude-from-external-load-balancers":""},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///var/run/crio/crio.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubelet","operation":"Update","apiVe
rsion":"v1","time":"2024-09-16T11:10:07Z","fieldsType":"FieldsV1","fiel [truncated 6223 chars]
	I0916 11:10:47.457968 1488539 node_ready.go:53] node "multinode-654612" has status "Ready":"False"
	I0916 11:10:47.954711 1488539 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/nodes/multinode-654612
	I0916 11:10:47.954731 1488539 round_trippers.go:469] Request Headers:
	I0916 11:10:47.954742 1488539 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:10:47.954746 1488539 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:10:47.957369 1488539 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:10:47.957419 1488539 round_trippers.go:577] Response Headers:
	I0916 11:10:47.957432 1488539 round_trippers.go:580]     Audit-Id: 8163da0b-6b56-4887-87ec-939a1c745304
	I0916 11:10:47.957438 1488539 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:10:47.957441 1488539 round_trippers.go:580]     Content-Type: application/json
	I0916 11:10:47.957444 1488539 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:10:47.957447 1488539 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:10:47.957450 1488539 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:10:47 GMT
	I0916 11:10:47.957558 1488539 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-654612","uid":"17ee1588-6ece-4a45-8e72-a05ec6d1f806","resourceVersion":"295","creationTimestamp":"2024-09-16T11:10:07Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-654612","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-654612","minikube.k8s.io/primary":"true","minikube.k8s.io/updated_at":"2024_09_16T11_10_11_0700","minikube.k8s.io/version":"v1.34.0","node-role.kubernetes.io/control-plane":"","node.kubernetes.io/exclude-from-external-load-balancers":""},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///var/run/crio/crio.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubelet","operation":"Update","apiVe
rsion":"v1","time":"2024-09-16T11:10:07Z","fieldsType":"FieldsV1","fiel [truncated 6223 chars]
	I0916 11:10:48.454695 1488539 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/nodes/multinode-654612
	I0916 11:10:48.454723 1488539 round_trippers.go:469] Request Headers:
	I0916 11:10:48.454732 1488539 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:10:48.454737 1488539 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:10:48.457178 1488539 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:10:48.457212 1488539 round_trippers.go:577] Response Headers:
	I0916 11:10:48.457222 1488539 round_trippers.go:580]     Audit-Id: 40eedab5-72d6-40f0-abd7-f7d30f572f63
	I0916 11:10:48.457228 1488539 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:10:48.457231 1488539 round_trippers.go:580]     Content-Type: application/json
	I0916 11:10:48.457234 1488539 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:10:48.457242 1488539 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:10:48.457251 1488539 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:10:48 GMT
	I0916 11:10:48.457723 1488539 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-654612","uid":"17ee1588-6ece-4a45-8e72-a05ec6d1f806","resourceVersion":"295","creationTimestamp":"2024-09-16T11:10:07Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-654612","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-654612","minikube.k8s.io/primary":"true","minikube.k8s.io/updated_at":"2024_09_16T11_10_11_0700","minikube.k8s.io/version":"v1.34.0","node-role.kubernetes.io/control-plane":"","node.kubernetes.io/exclude-from-external-load-balancers":""},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///var/run/crio/crio.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubelet","operation":"Update","apiVe
rsion":"v1","time":"2024-09-16T11:10:07Z","fieldsType":"FieldsV1","fiel [truncated 6223 chars]
	I0916 11:10:48.954702 1488539 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/nodes/multinode-654612
	I0916 11:10:48.954730 1488539 round_trippers.go:469] Request Headers:
	I0916 11:10:48.954740 1488539 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:10:48.954746 1488539 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:10:48.957071 1488539 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:10:48.957100 1488539 round_trippers.go:577] Response Headers:
	I0916 11:10:48.957108 1488539 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:10:48.957113 1488539 round_trippers.go:580]     Content-Type: application/json
	I0916 11:10:48.957116 1488539 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:10:48.957119 1488539 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:10:48.957122 1488539 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:10:48 GMT
	I0916 11:10:48.957125 1488539 round_trippers.go:580]     Audit-Id: 2e34b4b7-8ddb-4406-9782-e438e388f6ed
	I0916 11:10:48.957269 1488539 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-654612","uid":"17ee1588-6ece-4a45-8e72-a05ec6d1f806","resourceVersion":"295","creationTimestamp":"2024-09-16T11:10:07Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-654612","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-654612","minikube.k8s.io/primary":"true","minikube.k8s.io/updated_at":"2024_09_16T11_10_11_0700","minikube.k8s.io/version":"v1.34.0","node-role.kubernetes.io/control-plane":"","node.kubernetes.io/exclude-from-external-load-balancers":""},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///var/run/crio/crio.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubelet","operation":"Update","apiVe
rsion":"v1","time":"2024-09-16T11:10:07Z","fieldsType":"FieldsV1","fiel [truncated 6223 chars]
	I0916 11:10:49.454474 1488539 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/nodes/multinode-654612
	I0916 11:10:49.454516 1488539 round_trippers.go:469] Request Headers:
	I0916 11:10:49.454531 1488539 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:10:49.454537 1488539 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:10:49.456787 1488539 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:10:49.456813 1488539 round_trippers.go:577] Response Headers:
	I0916 11:10:49.456823 1488539 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:10:49.456829 1488539 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:10:49 GMT
	I0916 11:10:49.456832 1488539 round_trippers.go:580]     Audit-Id: dd52f4d1-9cae-4805-9d9e-b68fd02d3b2d
	I0916 11:10:49.456836 1488539 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:10:49.456842 1488539 round_trippers.go:580]     Content-Type: application/json
	I0916 11:10:49.456846 1488539 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:10:49.457206 1488539 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-654612","uid":"17ee1588-6ece-4a45-8e72-a05ec6d1f806","resourceVersion":"295","creationTimestamp":"2024-09-16T11:10:07Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-654612","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-654612","minikube.k8s.io/primary":"true","minikube.k8s.io/updated_at":"2024_09_16T11_10_11_0700","minikube.k8s.io/version":"v1.34.0","node-role.kubernetes.io/control-plane":"","node.kubernetes.io/exclude-from-external-load-balancers":""},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///var/run/crio/crio.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubelet","operation":"Update","apiVe
rsion":"v1","time":"2024-09-16T11:10:07Z","fieldsType":"FieldsV1","fiel [truncated 6223 chars]
	I0916 11:10:49.954226 1488539 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/nodes/multinode-654612
	I0916 11:10:49.954251 1488539 round_trippers.go:469] Request Headers:
	I0916 11:10:49.954261 1488539 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:10:49.954265 1488539 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:10:49.956440 1488539 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:10:49.956470 1488539 round_trippers.go:577] Response Headers:
	I0916 11:10:49.956480 1488539 round_trippers.go:580]     Audit-Id: 13f543cf-3ea2-45d4-9ff9-2f423bd577b4
	I0916 11:10:49.956483 1488539 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:10:49.956486 1488539 round_trippers.go:580]     Content-Type: application/json
	I0916 11:10:49.956489 1488539 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:10:49.956492 1488539 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:10:49.956496 1488539 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:10:49 GMT
	I0916 11:10:49.956632 1488539 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-654612","uid":"17ee1588-6ece-4a45-8e72-a05ec6d1f806","resourceVersion":"295","creationTimestamp":"2024-09-16T11:10:07Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-654612","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-654612","minikube.k8s.io/primary":"true","minikube.k8s.io/updated_at":"2024_09_16T11_10_11_0700","minikube.k8s.io/version":"v1.34.0","node-role.kubernetes.io/control-plane":"","node.kubernetes.io/exclude-from-external-load-balancers":""},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///var/run/crio/crio.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubelet","operation":"Update","apiVe
rsion":"v1","time":"2024-09-16T11:10:07Z","fieldsType":"FieldsV1","fiel [truncated 6223 chars]
	I0916 11:10:49.957080 1488539 node_ready.go:53] node "multinode-654612" has status "Ready":"False"
	I0916 11:10:50.453858 1488539 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/nodes/multinode-654612
	I0916 11:10:50.453885 1488539 round_trippers.go:469] Request Headers:
	I0916 11:10:50.453895 1488539 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:10:50.453901 1488539 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:10:50.456102 1488539 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:10:50.456121 1488539 round_trippers.go:577] Response Headers:
	I0916 11:10:50.456129 1488539 round_trippers.go:580]     Audit-Id: 1613bed7-45ae-4b2e-80b6-49623fb446c4
	I0916 11:10:50.456133 1488539 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:10:50.456138 1488539 round_trippers.go:580]     Content-Type: application/json
	I0916 11:10:50.456141 1488539 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:10:50.456144 1488539 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:10:50.456147 1488539 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:10:50 GMT
	I0916 11:10:50.456303 1488539 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-654612","uid":"17ee1588-6ece-4a45-8e72-a05ec6d1f806","resourceVersion":"295","creationTimestamp":"2024-09-16T11:10:07Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-654612","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-654612","minikube.k8s.io/primary":"true","minikube.k8s.io/updated_at":"2024_09_16T11_10_11_0700","minikube.k8s.io/version":"v1.34.0","node-role.kubernetes.io/control-plane":"","node.kubernetes.io/exclude-from-external-load-balancers":""},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///var/run/crio/crio.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubelet","operation":"Update","apiVe
rsion":"v1","time":"2024-09-16T11:10:07Z","fieldsType":"FieldsV1","fiel [truncated 6223 chars]
	I0916 11:10:50.954020 1488539 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/nodes/multinode-654612
	I0916 11:10:50.954046 1488539 round_trippers.go:469] Request Headers:
	I0916 11:10:50.954057 1488539 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:10:50.954061 1488539 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:10:50.956421 1488539 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:10:50.956446 1488539 round_trippers.go:577] Response Headers:
	I0916 11:10:50.956454 1488539 round_trippers.go:580]     Audit-Id: c888938e-098c-4019-8536-a23040fac883
	I0916 11:10:50.956459 1488539 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:10:50.956462 1488539 round_trippers.go:580]     Content-Type: application/json
	I0916 11:10:50.956465 1488539 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:10:50.956468 1488539 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:10:50.956471 1488539 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:10:50 GMT
	I0916 11:10:50.956639 1488539 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-654612","uid":"17ee1588-6ece-4a45-8e72-a05ec6d1f806","resourceVersion":"295","creationTimestamp":"2024-09-16T11:10:07Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-654612","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-654612","minikube.k8s.io/primary":"true","minikube.k8s.io/updated_at":"2024_09_16T11_10_11_0700","minikube.k8s.io/version":"v1.34.0","node-role.kubernetes.io/control-plane":"","node.kubernetes.io/exclude-from-external-load-balancers":""},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///var/run/crio/crio.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubelet","operation":"Update","apiVe
rsion":"v1","time":"2024-09-16T11:10:07Z","fieldsType":"FieldsV1","fiel [truncated 6223 chars]
	I0916 11:10:51.454579 1488539 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/nodes/multinode-654612
	I0916 11:10:51.454605 1488539 round_trippers.go:469] Request Headers:
	I0916 11:10:51.454615 1488539 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:10:51.454622 1488539 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:10:51.456912 1488539 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:10:51.456937 1488539 round_trippers.go:577] Response Headers:
	I0916 11:10:51.456946 1488539 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:10:51.456950 1488539 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:10:51.456953 1488539 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:10:51 GMT
	I0916 11:10:51.456956 1488539 round_trippers.go:580]     Audit-Id: d729565a-fd30-43b1-98d5-7a3ce301235c
	I0916 11:10:51.456959 1488539 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:10:51.456962 1488539 round_trippers.go:580]     Content-Type: application/json
	I0916 11:10:51.457347 1488539 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-654612","uid":"17ee1588-6ece-4a45-8e72-a05ec6d1f806","resourceVersion":"295","creationTimestamp":"2024-09-16T11:10:07Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-654612","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-654612","minikube.k8s.io/primary":"true","minikube.k8s.io/updated_at":"2024_09_16T11_10_11_0700","minikube.k8s.io/version":"v1.34.0","node-role.kubernetes.io/control-plane":"","node.kubernetes.io/exclude-from-external-load-balancers":""},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///var/run/crio/crio.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubelet","operation":"Update","apiVe
rsion":"v1","time":"2024-09-16T11:10:07Z","fieldsType":"FieldsV1","fiel [truncated 6223 chars]
	I0916 11:10:51.953844 1488539 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/nodes/multinode-654612
	I0916 11:10:51.953872 1488539 round_trippers.go:469] Request Headers:
	I0916 11:10:51.953882 1488539 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:10:51.953889 1488539 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:10:51.956111 1488539 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:10:51.956136 1488539 round_trippers.go:577] Response Headers:
	I0916 11:10:51.956144 1488539 round_trippers.go:580]     Content-Type: application/json
	I0916 11:10:51.956149 1488539 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:10:51.956153 1488539 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:10:51.956155 1488539 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:10:51 GMT
	I0916 11:10:51.956159 1488539 round_trippers.go:580]     Audit-Id: f6cbd2ee-a536-4ec6-8aa0-56e2c8ff9691
	I0916 11:10:51.956161 1488539 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:10:51.956351 1488539 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-654612","uid":"17ee1588-6ece-4a45-8e72-a05ec6d1f806","resourceVersion":"295","creationTimestamp":"2024-09-16T11:10:07Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-654612","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-654612","minikube.k8s.io/primary":"true","minikube.k8s.io/updated_at":"2024_09_16T11_10_11_0700","minikube.k8s.io/version":"v1.34.0","node-role.kubernetes.io/control-plane":"","node.kubernetes.io/exclude-from-external-load-balancers":""},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///var/run/crio/crio.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubelet","operation":"Update","apiVe
rsion":"v1","time":"2024-09-16T11:10:07Z","fieldsType":"FieldsV1","fiel [truncated 6223 chars]
	I0916 11:10:52.454676 1488539 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/nodes/multinode-654612
	I0916 11:10:52.454703 1488539 round_trippers.go:469] Request Headers:
	I0916 11:10:52.454712 1488539 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:10:52.454717 1488539 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:10:52.457010 1488539 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:10:52.457030 1488539 round_trippers.go:577] Response Headers:
	I0916 11:10:52.457042 1488539 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:10:52 GMT
	I0916 11:10:52.457049 1488539 round_trippers.go:580]     Audit-Id: 4fac3f61-0e0e-4406-9ad7-d6ccf9194254
	I0916 11:10:52.457053 1488539 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:10:52.457055 1488539 round_trippers.go:580]     Content-Type: application/json
	I0916 11:10:52.457058 1488539 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:10:52.457060 1488539 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:10:52.457188 1488539 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-654612","uid":"17ee1588-6ece-4a45-8e72-a05ec6d1f806","resourceVersion":"295","creationTimestamp":"2024-09-16T11:10:07Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-654612","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-654612","minikube.k8s.io/primary":"true","minikube.k8s.io/updated_at":"2024_09_16T11_10_11_0700","minikube.k8s.io/version":"v1.34.0","node-role.kubernetes.io/control-plane":"","node.kubernetes.io/exclude-from-external-load-balancers":""},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///var/run/crio/crio.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubelet","operation":"Update","apiVe
rsion":"v1","time":"2024-09-16T11:10:07Z","fieldsType":"FieldsV1","fiel [truncated 6223 chars]
	I0916 11:10:52.457589 1488539 node_ready.go:53] node "multinode-654612" has status "Ready":"False"
	I0916 11:10:52.953876 1488539 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/nodes/multinode-654612
	I0916 11:10:52.953905 1488539 round_trippers.go:469] Request Headers:
	I0916 11:10:52.953915 1488539 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:10:52.953919 1488539 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:10:52.956394 1488539 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:10:52.956537 1488539 round_trippers.go:577] Response Headers:
	I0916 11:10:52.956552 1488539 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:10:52.956572 1488539 round_trippers.go:580]     Content-Type: application/json
	I0916 11:10:52.956578 1488539 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:10:52.956604 1488539 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:10:52.956610 1488539 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:10:52 GMT
	I0916 11:10:52.956613 1488539 round_trippers.go:580]     Audit-Id: c26661a1-c6b9-4818-81a0-4c5d52474636
	I0916 11:10:52.956763 1488539 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-654612","uid":"17ee1588-6ece-4a45-8e72-a05ec6d1f806","resourceVersion":"295","creationTimestamp":"2024-09-16T11:10:07Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-654612","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-654612","minikube.k8s.io/primary":"true","minikube.k8s.io/updated_at":"2024_09_16T11_10_11_0700","minikube.k8s.io/version":"v1.34.0","node-role.kubernetes.io/control-plane":"","node.kubernetes.io/exclude-from-external-load-balancers":""},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///var/run/crio/crio.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubelet","operation":"Update","apiVe
rsion":"v1","time":"2024-09-16T11:10:07Z","fieldsType":"FieldsV1","fiel [truncated 6223 chars]
	I0916 11:10:53.453998 1488539 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/nodes/multinode-654612
	I0916 11:10:53.454025 1488539 round_trippers.go:469] Request Headers:
	I0916 11:10:53.454035 1488539 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:10:53.454040 1488539 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:10:53.456574 1488539 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:10:53.456701 1488539 round_trippers.go:577] Response Headers:
	I0916 11:10:53.456716 1488539 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:10:53 GMT
	I0916 11:10:53.456721 1488539 round_trippers.go:580]     Audit-Id: a810cf31-6f88-4d2f-81e5-8ab5ce633b65
	I0916 11:10:53.456724 1488539 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:10:53.456727 1488539 round_trippers.go:580]     Content-Type: application/json
	I0916 11:10:53.456730 1488539 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:10:53.456733 1488539 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:10:53.456863 1488539 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-654612","uid":"17ee1588-6ece-4a45-8e72-a05ec6d1f806","resourceVersion":"295","creationTimestamp":"2024-09-16T11:10:07Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-654612","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-654612","minikube.k8s.io/primary":"true","minikube.k8s.io/updated_at":"2024_09_16T11_10_11_0700","minikube.k8s.io/version":"v1.34.0","node-role.kubernetes.io/control-plane":"","node.kubernetes.io/exclude-from-external-load-balancers":""},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///var/run/crio/crio.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubelet","operation":"Update","apiVe
rsion":"v1","time":"2024-09-16T11:10:07Z","fieldsType":"FieldsV1","fiel [truncated 6223 chars]
	I0916 11:10:53.954319 1488539 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/nodes/multinode-654612
	I0916 11:10:53.954347 1488539 round_trippers.go:469] Request Headers:
	I0916 11:10:53.954357 1488539 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:10:53.954362 1488539 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:10:53.956743 1488539 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:10:53.956770 1488539 round_trippers.go:577] Response Headers:
	I0916 11:10:53.956778 1488539 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:10:53.956785 1488539 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:10:53.956789 1488539 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:10:53 GMT
	I0916 11:10:53.956792 1488539 round_trippers.go:580]     Audit-Id: d7bd5034-2c62-4432-9797-7f8285d043d4
	I0916 11:10:53.956795 1488539 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:10:53.956798 1488539 round_trippers.go:580]     Content-Type: application/json
	I0916 11:10:53.956905 1488539 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-654612","uid":"17ee1588-6ece-4a45-8e72-a05ec6d1f806","resourceVersion":"295","creationTimestamp":"2024-09-16T11:10:07Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-654612","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-654612","minikube.k8s.io/primary":"true","minikube.k8s.io/updated_at":"2024_09_16T11_10_11_0700","minikube.k8s.io/version":"v1.34.0","node-role.kubernetes.io/control-plane":"","node.kubernetes.io/exclude-from-external-load-balancers":""},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///var/run/crio/crio.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubelet","operation":"Update","apiVe
rsion":"v1","time":"2024-09-16T11:10:07Z","fieldsType":"FieldsV1","fiel [truncated 6223 chars]
	I0916 11:10:54.453890 1488539 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/nodes/multinode-654612
	I0916 11:10:54.453912 1488539 round_trippers.go:469] Request Headers:
	I0916 11:10:54.453922 1488539 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:10:54.453927 1488539 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:10:54.456334 1488539 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:10:54.456363 1488539 round_trippers.go:577] Response Headers:
	I0916 11:10:54.456372 1488539 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:10:54 GMT
	I0916 11:10:54.456377 1488539 round_trippers.go:580]     Audit-Id: 80a28004-a8ab-4d15-8917-a62bb08e69f1
	I0916 11:10:54.456382 1488539 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:10:54.456385 1488539 round_trippers.go:580]     Content-Type: application/json
	I0916 11:10:54.456389 1488539 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:10:54.456392 1488539 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:10:54.456515 1488539 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-654612","uid":"17ee1588-6ece-4a45-8e72-a05ec6d1f806","resourceVersion":"295","creationTimestamp":"2024-09-16T11:10:07Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-654612","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-654612","minikube.k8s.io/primary":"true","minikube.k8s.io/updated_at":"2024_09_16T11_10_11_0700","minikube.k8s.io/version":"v1.34.0","node-role.kubernetes.io/control-plane":"","node.kubernetes.io/exclude-from-external-load-balancers":""},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///var/run/crio/crio.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubelet","operation":"Update","apiVe
rsion":"v1","time":"2024-09-16T11:10:07Z","fieldsType":"FieldsV1","fiel [truncated 6223 chars]
	I0916 11:10:54.954856 1488539 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/nodes/multinode-654612
	I0916 11:10:54.954881 1488539 round_trippers.go:469] Request Headers:
	I0916 11:10:54.954890 1488539 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:10:54.954894 1488539 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:10:54.957257 1488539 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:10:54.957280 1488539 round_trippers.go:577] Response Headers:
	I0916 11:10:54.957289 1488539 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:10:54.957293 1488539 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:10:54.957296 1488539 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:10:54 GMT
	I0916 11:10:54.957299 1488539 round_trippers.go:580]     Audit-Id: 4fab01a1-adb6-4e8c-a7b8-f550304ef242
	I0916 11:10:54.957302 1488539 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:10:54.957306 1488539 round_trippers.go:580]     Content-Type: application/json
	I0916 11:10:54.957420 1488539 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-654612","uid":"17ee1588-6ece-4a45-8e72-a05ec6d1f806","resourceVersion":"295","creationTimestamp":"2024-09-16T11:10:07Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-654612","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-654612","minikube.k8s.io/primary":"true","minikube.k8s.io/updated_at":"2024_09_16T11_10_11_0700","minikube.k8s.io/version":"v1.34.0","node-role.kubernetes.io/control-plane":"","node.kubernetes.io/exclude-from-external-load-balancers":""},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///var/run/crio/crio.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubelet","operation":"Update","apiVe
rsion":"v1","time":"2024-09-16T11:10:07Z","fieldsType":"FieldsV1","fiel [truncated 6223 chars]
	I0916 11:10:54.957815 1488539 node_ready.go:53] node "multinode-654612" has status "Ready":"False"
	I0916 11:10:55.454642 1488539 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/nodes/multinode-654612
	I0916 11:10:55.454669 1488539 round_trippers.go:469] Request Headers:
	I0916 11:10:55.454679 1488539 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:10:55.454684 1488539 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:10:55.456953 1488539 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:10:55.456980 1488539 round_trippers.go:577] Response Headers:
	I0916 11:10:55.456989 1488539 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:10:55.456992 1488539 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:10:55.456996 1488539 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:10:55 GMT
	I0916 11:10:55.456999 1488539 round_trippers.go:580]     Audit-Id: 7c43051f-da85-4474-8ee2-6a8170836e9e
	I0916 11:10:55.457002 1488539 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:10:55.457010 1488539 round_trippers.go:580]     Content-Type: application/json
	I0916 11:10:55.457473 1488539 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-654612","uid":"17ee1588-6ece-4a45-8e72-a05ec6d1f806","resourceVersion":"295","creationTimestamp":"2024-09-16T11:10:07Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-654612","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-654612","minikube.k8s.io/primary":"true","minikube.k8s.io/updated_at":"2024_09_16T11_10_11_0700","minikube.k8s.io/version":"v1.34.0","node-role.kubernetes.io/control-plane":"","node.kubernetes.io/exclude-from-external-load-balancers":""},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///var/run/crio/crio.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubelet","operation":"Update","apiVe
rsion":"v1","time":"2024-09-16T11:10:07Z","fieldsType":"FieldsV1","fiel [truncated 6223 chars]
	I0916 11:10:55.954752 1488539 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/nodes/multinode-654612
	I0916 11:10:55.954782 1488539 round_trippers.go:469] Request Headers:
	I0916 11:10:55.954796 1488539 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:10:55.954806 1488539 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:10:55.957201 1488539 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:10:55.957228 1488539 round_trippers.go:577] Response Headers:
	I0916 11:10:55.957237 1488539 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:10:55 GMT
	I0916 11:10:55.957242 1488539 round_trippers.go:580]     Audit-Id: fe1ee733-8810-4883-92d8-2f87e4d9d5d2
	I0916 11:10:55.957245 1488539 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:10:55.957248 1488539 round_trippers.go:580]     Content-Type: application/json
	I0916 11:10:55.957251 1488539 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:10:55.957253 1488539 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:10:55.957538 1488539 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-654612","uid":"17ee1588-6ece-4a45-8e72-a05ec6d1f806","resourceVersion":"295","creationTimestamp":"2024-09-16T11:10:07Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-654612","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-654612","minikube.k8s.io/primary":"true","minikube.k8s.io/updated_at":"2024_09_16T11_10_11_0700","minikube.k8s.io/version":"v1.34.0","node-role.kubernetes.io/control-plane":"","node.kubernetes.io/exclude-from-external-load-balancers":""},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///var/run/crio/crio.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubelet","operation":"Update","apiVe
rsion":"v1","time":"2024-09-16T11:10:07Z","fieldsType":"FieldsV1","fiel [truncated 6223 chars]
	I0916 11:10:56.454679 1488539 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/nodes/multinode-654612
	I0916 11:10:56.454712 1488539 round_trippers.go:469] Request Headers:
	I0916 11:10:56.454723 1488539 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:10:56.454728 1488539 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:10:56.457209 1488539 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:10:56.457242 1488539 round_trippers.go:577] Response Headers:
	I0916 11:10:56.457258 1488539 round_trippers.go:580]     Content-Type: application/json
	I0916 11:10:56.457262 1488539 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:10:56.457265 1488539 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:10:56.457269 1488539 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:10:56 GMT
	I0916 11:10:56.457272 1488539 round_trippers.go:580]     Audit-Id: 7bfffaf3-1ed4-4448-a1cc-9a220e861d45
	I0916 11:10:56.457275 1488539 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:10:56.457626 1488539 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-654612","uid":"17ee1588-6ece-4a45-8e72-a05ec6d1f806","resourceVersion":"295","creationTimestamp":"2024-09-16T11:10:07Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-654612","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-654612","minikube.k8s.io/primary":"true","minikube.k8s.io/updated_at":"2024_09_16T11_10_11_0700","minikube.k8s.io/version":"v1.34.0","node-role.kubernetes.io/control-plane":"","node.kubernetes.io/exclude-from-external-load-balancers":""},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///var/run/crio/crio.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubelet","operation":"Update","apiVe
rsion":"v1","time":"2024-09-16T11:10:07Z","fieldsType":"FieldsV1","fiel [truncated 6223 chars]
	I0916 11:10:56.954474 1488539 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/nodes/multinode-654612
	I0916 11:10:56.954497 1488539 round_trippers.go:469] Request Headers:
	I0916 11:10:56.954507 1488539 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:10:56.954512 1488539 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:10:56.956731 1488539 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:10:56.956758 1488539 round_trippers.go:577] Response Headers:
	I0916 11:10:56.956766 1488539 round_trippers.go:580]     Audit-Id: 8cfa60ec-9e6f-4c32-a60c-b21ae4a76951
	I0916 11:10:56.956770 1488539 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:10:56.956773 1488539 round_trippers.go:580]     Content-Type: application/json
	I0916 11:10:56.956776 1488539 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:10:56.956779 1488539 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:10:56.956783 1488539 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:10:56 GMT
	I0916 11:10:56.956940 1488539 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-654612","uid":"17ee1588-6ece-4a45-8e72-a05ec6d1f806","resourceVersion":"401","creationTimestamp":"2024-09-16T11:10:07Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-654612","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-654612","minikube.k8s.io/primary":"true","minikube.k8s.io/updated_at":"2024_09_16T11_10_11_0700","minikube.k8s.io/version":"v1.34.0","node-role.kubernetes.io/control-plane":"","node.kubernetes.io/exclude-from-external-load-balancers":""},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///var/run/crio/crio.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubelet","operation":"Update","apiVe
rsion":"v1","time":"2024-09-16T11:10:07Z","fieldsType":"FieldsV1","fiel [truncated 6029 chars]
	I0916 11:10:56.957345 1488539 node_ready.go:49] node "multinode-654612" has status "Ready":"True"
	I0916 11:10:56.957366 1488539 node_ready.go:38] duration metric: took 41.003724975s for node "multinode-654612" to be "Ready" ...
	I0916 11:10:56.957376 1488539 pod_ready.go:36] extra waiting up to 6m0s for all system-critical pods including labels [k8s-app=kube-dns component=etcd component=kube-apiserver component=kube-controller-manager k8s-app=kube-proxy component=kube-scheduler] to be "Ready" ...
	I0916 11:10:56.957463 1488539 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/namespaces/kube-system/pods
	I0916 11:10:56.957476 1488539 round_trippers.go:469] Request Headers:
	I0916 11:10:56.957485 1488539 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:10:56.957493 1488539 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:10:56.960544 1488539 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 11:10:56.960572 1488539 round_trippers.go:577] Response Headers:
	I0916 11:10:56.960581 1488539 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:10:56.960585 1488539 round_trippers.go:580]     Content-Type: application/json
	I0916 11:10:56.960588 1488539 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:10:56.960591 1488539 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:10:56.960595 1488539 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:10:56 GMT
	I0916 11:10:56.960597 1488539 round_trippers.go:580]     Audit-Id: f4def611-ea75-4386-867f-eb8fd8d4ad18
	I0916 11:10:56.961051 1488539 request.go:1351] Response Body: {"kind":"PodList","apiVersion":"v1","metadata":{"resourceVersion":"407"},"items":[{"metadata":{"name":"coredns-7c65d6cfc9-szvv9","generateName":"coredns-7c65d6cfc9-","namespace":"kube-system","uid":"26df8cd4-36bc-49e1-98bf-9c30f5555b7b","resourceVersion":"407","creationTimestamp":"2024-09-16T11:10:15Z","labels":{"k8s-app":"kube-dns","pod-template-hash":"7c65d6cfc9"},"ownerReferences":[{"apiVersion":"apps/v1","kind":"ReplicaSet","name":"coredns-7c65d6cfc9","uid":"a88acc4a-b14b-4341-a616-6a6077ab8958","controller":true,"blockOwnerDeletion":true}],"managedFields":[{"manager":"kube-controller-manager","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:10:15Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:generateName":{},"f:labels":{".":{},"f:k8s-app":{},"f:pod-template-hash":{}},"f:ownerReferences":{".":{},"k:{\"uid\":\"a88acc4a-b14b-4341-a616-6a6077ab8958\"}":{}}},"f:spec":{"f:affinity":{".":{},"f:podAntiAffinity":{".":{},"f
:preferredDuringSchedulingIgnoredDuringExecution":{}}},"f:containers":{ [truncated 59368 chars]
	I0916 11:10:56.965450 1488539 pod_ready.go:79] waiting up to 6m0s for pod "coredns-7c65d6cfc9-szvv9" in "kube-system" namespace to be "Ready" ...
	I0916 11:10:56.965561 1488539 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/namespaces/kube-system/pods/coredns-7c65d6cfc9-szvv9
	I0916 11:10:56.965575 1488539 round_trippers.go:469] Request Headers:
	I0916 11:10:56.965584 1488539 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:10:56.965587 1488539 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:10:56.967860 1488539 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:10:56.967884 1488539 round_trippers.go:577] Response Headers:
	I0916 11:10:56.967892 1488539 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:10:56.967897 1488539 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:10:56 GMT
	I0916 11:10:56.967906 1488539 round_trippers.go:580]     Audit-Id: fa6a0dcc-8b9d-4fa6-899c-26687234b2f6
	I0916 11:10:56.967909 1488539 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:10:56.967912 1488539 round_trippers.go:580]     Content-Type: application/json
	I0916 11:10:56.967915 1488539 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:10:56.968203 1488539 request.go:1351] Response Body: {"kind":"Pod","apiVersion":"v1","metadata":{"name":"coredns-7c65d6cfc9-szvv9","generateName":"coredns-7c65d6cfc9-","namespace":"kube-system","uid":"26df8cd4-36bc-49e1-98bf-9c30f5555b7b","resourceVersion":"407","creationTimestamp":"2024-09-16T11:10:15Z","labels":{"k8s-app":"kube-dns","pod-template-hash":"7c65d6cfc9"},"ownerReferences":[{"apiVersion":"apps/v1","kind":"ReplicaSet","name":"coredns-7c65d6cfc9","uid":"a88acc4a-b14b-4341-a616-6a6077ab8958","controller":true,"blockOwnerDeletion":true}],"managedFields":[{"manager":"kube-controller-manager","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:10:15Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:generateName":{},"f:labels":{".":{},"f:k8s-app":{},"f:pod-template-hash":{}},"f:ownerReferences":{".":{},"k:{\"uid\":\"a88acc4a-b14b-4341-a616-6a6077ab8958\"}":{}}},"f:spec":{"f:affinity":{".":{},"f:podAntiAffinity":{".":{},"f:preferredDuringSchedulingIgnoredDuringExecution":{
}}},"f:containers":{"k:{\"name\":\"coredns\"}":{".":{},"f:args":{},"f:i [truncated 6701 chars]
	I0916 11:10:56.968801 1488539 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/nodes/multinode-654612
	I0916 11:10:56.968819 1488539 round_trippers.go:469] Request Headers:
	I0916 11:10:56.968828 1488539 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:10:56.968833 1488539 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:10:56.970883 1488539 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:10:56.970904 1488539 round_trippers.go:577] Response Headers:
	I0916 11:10:56.970912 1488539 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:10:56.970918 1488539 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:10:56 GMT
	I0916 11:10:56.970922 1488539 round_trippers.go:580]     Audit-Id: e2d6e67e-8281-4a02-bf21-cb8a6e2b3954
	I0916 11:10:56.970926 1488539 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:10:56.970930 1488539 round_trippers.go:580]     Content-Type: application/json
	I0916 11:10:56.970933 1488539 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:10:56.971098 1488539 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-654612","uid":"17ee1588-6ece-4a45-8e72-a05ec6d1f806","resourceVersion":"401","creationTimestamp":"2024-09-16T11:10:07Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-654612","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-654612","minikube.k8s.io/primary":"true","minikube.k8s.io/updated_at":"2024_09_16T11_10_11_0700","minikube.k8s.io/version":"v1.34.0","node-role.kubernetes.io/control-plane":"","node.kubernetes.io/exclude-from-external-load-balancers":""},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///var/run/crio/crio.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubelet","operation":"Update","apiVe
rsion":"v1","time":"2024-09-16T11:10:07Z","fieldsType":"FieldsV1","fiel [truncated 6029 chars]
	I0916 11:10:57.465754 1488539 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/namespaces/kube-system/pods/coredns-7c65d6cfc9-szvv9
	I0916 11:10:57.465794 1488539 round_trippers.go:469] Request Headers:
	I0916 11:10:57.465807 1488539 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:10:57.465818 1488539 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:10:57.468444 1488539 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:10:57.468513 1488539 round_trippers.go:577] Response Headers:
	I0916 11:10:57.468552 1488539 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:10:57.468580 1488539 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:10:57.468599 1488539 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:10:57 GMT
	I0916 11:10:57.468629 1488539 round_trippers.go:580]     Audit-Id: e12e5efd-461f-4f8a-a1b9-8cd6c3998411
	I0916 11:10:57.468650 1488539 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:10:57.468667 1488539 round_trippers.go:580]     Content-Type: application/json
	I0916 11:10:57.468876 1488539 request.go:1351] Response Body: {"kind":"Pod","apiVersion":"v1","metadata":{"name":"coredns-7c65d6cfc9-szvv9","generateName":"coredns-7c65d6cfc9-","namespace":"kube-system","uid":"26df8cd4-36bc-49e1-98bf-9c30f5555b7b","resourceVersion":"407","creationTimestamp":"2024-09-16T11:10:15Z","labels":{"k8s-app":"kube-dns","pod-template-hash":"7c65d6cfc9"},"ownerReferences":[{"apiVersion":"apps/v1","kind":"ReplicaSet","name":"coredns-7c65d6cfc9","uid":"a88acc4a-b14b-4341-a616-6a6077ab8958","controller":true,"blockOwnerDeletion":true}],"managedFields":[{"manager":"kube-controller-manager","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:10:15Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:generateName":{},"f:labels":{".":{},"f:k8s-app":{},"f:pod-template-hash":{}},"f:ownerReferences":{".":{},"k:{\"uid\":\"a88acc4a-b14b-4341-a616-6a6077ab8958\"}":{}}},"f:spec":{"f:affinity":{".":{},"f:podAntiAffinity":{".":{},"f:preferredDuringSchedulingIgnoredDuringExecution":{
}}},"f:containers":{"k:{\"name\":\"coredns\"}":{".":{},"f:args":{},"f:i [truncated 6701 chars]
	I0916 11:10:57.469500 1488539 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/nodes/multinode-654612
	I0916 11:10:57.469523 1488539 round_trippers.go:469] Request Headers:
	I0916 11:10:57.469531 1488539 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:10:57.469538 1488539 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:10:57.471748 1488539 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:10:57.471766 1488539 round_trippers.go:577] Response Headers:
	I0916 11:10:57.471774 1488539 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:10:57 GMT
	I0916 11:10:57.471778 1488539 round_trippers.go:580]     Audit-Id: 8dc315b6-dff0-4b0d-8c90-272dd2af6883
	I0916 11:10:57.471783 1488539 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:10:57.471786 1488539 round_trippers.go:580]     Content-Type: application/json
	I0916 11:10:57.471789 1488539 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:10:57.471792 1488539 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:10:57.471916 1488539 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-654612","uid":"17ee1588-6ece-4a45-8e72-a05ec6d1f806","resourceVersion":"401","creationTimestamp":"2024-09-16T11:10:07Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-654612","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-654612","minikube.k8s.io/primary":"true","minikube.k8s.io/updated_at":"2024_09_16T11_10_11_0700","minikube.k8s.io/version":"v1.34.0","node-role.kubernetes.io/control-plane":"","node.kubernetes.io/exclude-from-external-load-balancers":""},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///var/run/crio/crio.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubelet","operation":"Update","apiVe
rsion":"v1","time":"2024-09-16T11:10:07Z","fieldsType":"FieldsV1","fiel [truncated 6029 chars]
	I0916 11:10:57.965716 1488539 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/namespaces/kube-system/pods/coredns-7c65d6cfc9-szvv9
	I0916 11:10:57.965740 1488539 round_trippers.go:469] Request Headers:
	I0916 11:10:57.965749 1488539 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:10:57.965754 1488539 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:10:57.968099 1488539 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:10:57.968121 1488539 round_trippers.go:577] Response Headers:
	I0916 11:10:57.968129 1488539 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:10:57.968136 1488539 round_trippers.go:580]     Content-Type: application/json
	I0916 11:10:57.968139 1488539 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:10:57.968143 1488539 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:10:57.968146 1488539 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:10:57 GMT
	I0916 11:10:57.968149 1488539 round_trippers.go:580]     Audit-Id: 1f84b90e-213a-407b-ad42-9dce9531bc91
	I0916 11:10:57.968337 1488539 request.go:1351] Response Body: {"kind":"Pod","apiVersion":"v1","metadata":{"name":"coredns-7c65d6cfc9-szvv9","generateName":"coredns-7c65d6cfc9-","namespace":"kube-system","uid":"26df8cd4-36bc-49e1-98bf-9c30f5555b7b","resourceVersion":"407","creationTimestamp":"2024-09-16T11:10:15Z","labels":{"k8s-app":"kube-dns","pod-template-hash":"7c65d6cfc9"},"ownerReferences":[{"apiVersion":"apps/v1","kind":"ReplicaSet","name":"coredns-7c65d6cfc9","uid":"a88acc4a-b14b-4341-a616-6a6077ab8958","controller":true,"blockOwnerDeletion":true}],"managedFields":[{"manager":"kube-controller-manager","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:10:15Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:generateName":{},"f:labels":{".":{},"f:k8s-app":{},"f:pod-template-hash":{}},"f:ownerReferences":{".":{},"k:{\"uid\":\"a88acc4a-b14b-4341-a616-6a6077ab8958\"}":{}}},"f:spec":{"f:affinity":{".":{},"f:podAntiAffinity":{".":{},"f:preferredDuringSchedulingIgnoredDuringExecution":{
}}},"f:containers":{"k:{\"name\":\"coredns\"}":{".":{},"f:args":{},"f:i [truncated 6701 chars]
	I0916 11:10:57.968937 1488539 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/nodes/multinode-654612
	I0916 11:10:57.968949 1488539 round_trippers.go:469] Request Headers:
	I0916 11:10:57.968957 1488539 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:10:57.968963 1488539 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:10:57.971088 1488539 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:10:57.971108 1488539 round_trippers.go:577] Response Headers:
	I0916 11:10:57.971116 1488539 round_trippers.go:580]     Audit-Id: 8b432d10-c050-4e57-a406-36d293743e90
	I0916 11:10:57.971119 1488539 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:10:57.971122 1488539 round_trippers.go:580]     Content-Type: application/json
	I0916 11:10:57.971124 1488539 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:10:57.971127 1488539 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:10:57.971130 1488539 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:10:57 GMT
	I0916 11:10:57.971245 1488539 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-654612","uid":"17ee1588-6ece-4a45-8e72-a05ec6d1f806","resourceVersion":"401","creationTimestamp":"2024-09-16T11:10:07Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-654612","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-654612","minikube.k8s.io/primary":"true","minikube.k8s.io/updated_at":"2024_09_16T11_10_11_0700","minikube.k8s.io/version":"v1.34.0","node-role.kubernetes.io/control-plane":"","node.kubernetes.io/exclude-from-external-load-balancers":""},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///var/run/crio/crio.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubelet","operation":"Update","apiVe
rsion":"v1","time":"2024-09-16T11:10:07Z","fieldsType":"FieldsV1","fiel [truncated 6029 chars]
	I0916 11:10:58.466374 1488539 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/namespaces/kube-system/pods/coredns-7c65d6cfc9-szvv9
	I0916 11:10:58.466400 1488539 round_trippers.go:469] Request Headers:
	I0916 11:10:58.466410 1488539 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:10:58.466415 1488539 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:10:58.468785 1488539 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:10:58.468809 1488539 round_trippers.go:577] Response Headers:
	I0916 11:10:58.468818 1488539 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:10:58.468824 1488539 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:10:58.468827 1488539 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:10:58 GMT
	I0916 11:10:58.468829 1488539 round_trippers.go:580]     Audit-Id: 8750444e-d4db-4751-8176-465a492dd88b
	I0916 11:10:58.468832 1488539 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:10:58.468835 1488539 round_trippers.go:580]     Content-Type: application/json
	I0916 11:10:58.468968 1488539 request.go:1351] Response Body: {"kind":"Pod","apiVersion":"v1","metadata":{"name":"coredns-7c65d6cfc9-szvv9","generateName":"coredns-7c65d6cfc9-","namespace":"kube-system","uid":"26df8cd4-36bc-49e1-98bf-9c30f5555b7b","resourceVersion":"407","creationTimestamp":"2024-09-16T11:10:15Z","labels":{"k8s-app":"kube-dns","pod-template-hash":"7c65d6cfc9"},"ownerReferences":[{"apiVersion":"apps/v1","kind":"ReplicaSet","name":"coredns-7c65d6cfc9","uid":"a88acc4a-b14b-4341-a616-6a6077ab8958","controller":true,"blockOwnerDeletion":true}],"managedFields":[{"manager":"kube-controller-manager","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:10:15Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:generateName":{},"f:labels":{".":{},"f:k8s-app":{},"f:pod-template-hash":{}},"f:ownerReferences":{".":{},"k:{\"uid\":\"a88acc4a-b14b-4341-a616-6a6077ab8958\"}":{}}},"f:spec":{"f:affinity":{".":{},"f:podAntiAffinity":{".":{},"f:preferredDuringSchedulingIgnoredDuringExecution":{
}}},"f:containers":{"k:{\"name\":\"coredns\"}":{".":{},"f:args":{},"f:i [truncated 6701 chars]
	I0916 11:10:58.469526 1488539 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/nodes/multinode-654612
	I0916 11:10:58.469542 1488539 round_trippers.go:469] Request Headers:
	I0916 11:10:58.469550 1488539 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:10:58.469555 1488539 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:10:58.471429 1488539 round_trippers.go:574] Response Status: 200 OK in 1 milliseconds
	I0916 11:10:58.471456 1488539 round_trippers.go:577] Response Headers:
	I0916 11:10:58.471476 1488539 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:10:58 GMT
	I0916 11:10:58.471481 1488539 round_trippers.go:580]     Audit-Id: 2abf9428-5bb0-462b-9537-0c955075176c
	I0916 11:10:58.471485 1488539 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:10:58.471489 1488539 round_trippers.go:580]     Content-Type: application/json
	I0916 11:10:58.471492 1488539 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:10:58.471495 1488539 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:10:58.471638 1488539 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-654612","uid":"17ee1588-6ece-4a45-8e72-a05ec6d1f806","resourceVersion":"401","creationTimestamp":"2024-09-16T11:10:07Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-654612","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-654612","minikube.k8s.io/primary":"true","minikube.k8s.io/updated_at":"2024_09_16T11_10_11_0700","minikube.k8s.io/version":"v1.34.0","node-role.kubernetes.io/control-plane":"","node.kubernetes.io/exclude-from-external-load-balancers":""},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///var/run/crio/crio.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubelet","operation":"Update","apiVe
rsion":"v1","time":"2024-09-16T11:10:07Z","fieldsType":"FieldsV1","fiel [truncated 6029 chars]
	I0916 11:10:58.966357 1488539 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/namespaces/kube-system/pods/coredns-7c65d6cfc9-szvv9
	I0916 11:10:58.966385 1488539 round_trippers.go:469] Request Headers:
	I0916 11:10:58.966397 1488539 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:10:58.966401 1488539 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:10:58.969620 1488539 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 11:10:58.969716 1488539 round_trippers.go:577] Response Headers:
	I0916 11:10:58.969738 1488539 round_trippers.go:580]     Content-Type: application/json
	I0916 11:10:58.969756 1488539 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:10:58.969848 1488539 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:10:58.969874 1488539 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:10:58 GMT
	I0916 11:10:58.969878 1488539 round_trippers.go:580]     Audit-Id: 653cd0fd-26dc-42f8-8364-b04d53e0eee5
	I0916 11:10:58.969880 1488539 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:10:58.970015 1488539 request.go:1351] Response Body: {"kind":"Pod","apiVersion":"v1","metadata":{"name":"coredns-7c65d6cfc9-szvv9","generateName":"coredns-7c65d6cfc9-","namespace":"kube-system","uid":"26df8cd4-36bc-49e1-98bf-9c30f5555b7b","resourceVersion":"407","creationTimestamp":"2024-09-16T11:10:15Z","labels":{"k8s-app":"kube-dns","pod-template-hash":"7c65d6cfc9"},"ownerReferences":[{"apiVersion":"apps/v1","kind":"ReplicaSet","name":"coredns-7c65d6cfc9","uid":"a88acc4a-b14b-4341-a616-6a6077ab8958","controller":true,"blockOwnerDeletion":true}],"managedFields":[{"manager":"kube-controller-manager","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:10:15Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:generateName":{},"f:labels":{".":{},"f:k8s-app":{},"f:pod-template-hash":{}},"f:ownerReferences":{".":{},"k:{\"uid\":\"a88acc4a-b14b-4341-a616-6a6077ab8958\"}":{}}},"f:spec":{"f:affinity":{".":{},"f:podAntiAffinity":{".":{},"f:preferredDuringSchedulingIgnoredDuringExecution":{
}}},"f:containers":{"k:{\"name\":\"coredns\"}":{".":{},"f:args":{},"f:i [truncated 6701 chars]
	I0916 11:10:58.970598 1488539 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/nodes/multinode-654612
	I0916 11:10:58.970618 1488539 round_trippers.go:469] Request Headers:
	I0916 11:10:58.970627 1488539 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:10:58.970632 1488539 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:10:58.972910 1488539 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:10:58.973001 1488539 round_trippers.go:577] Response Headers:
	I0916 11:10:58.973027 1488539 round_trippers.go:580]     Audit-Id: 68b2e1f7-43ad-4d2a-a07b-7787472b9701
	I0916 11:10:58.973058 1488539 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:10:58.973081 1488539 round_trippers.go:580]     Content-Type: application/json
	I0916 11:10:58.973100 1488539 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:10:58.973117 1488539 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:10:58.973144 1488539 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:10:58 GMT
	I0916 11:10:58.973325 1488539 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-654612","uid":"17ee1588-6ece-4a45-8e72-a05ec6d1f806","resourceVersion":"401","creationTimestamp":"2024-09-16T11:10:07Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-654612","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-654612","minikube.k8s.io/primary":"true","minikube.k8s.io/updated_at":"2024_09_16T11_10_11_0700","minikube.k8s.io/version":"v1.34.0","node-role.kubernetes.io/control-plane":"","node.kubernetes.io/exclude-from-external-load-balancers":""},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///var/run/crio/crio.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubelet","operation":"Update","apiVe
rsion":"v1","time":"2024-09-16T11:10:07Z","fieldsType":"FieldsV1","fiel [truncated 6029 chars]
	I0916 11:10:58.973761 1488539 pod_ready.go:103] pod "coredns-7c65d6cfc9-szvv9" in "kube-system" namespace has status "Ready":"False"
	I0916 11:10:59.465645 1488539 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/namespaces/kube-system/pods/coredns-7c65d6cfc9-szvv9
	I0916 11:10:59.465669 1488539 round_trippers.go:469] Request Headers:
	I0916 11:10:59.465679 1488539 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:10:59.465683 1488539 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:10:59.468326 1488539 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:10:59.468469 1488539 round_trippers.go:577] Response Headers:
	I0916 11:10:59.468511 1488539 round_trippers.go:580]     Content-Type: application/json
	I0916 11:10:59.468529 1488539 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:10:59.468548 1488539 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:10:59.468556 1488539 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:10:59 GMT
	I0916 11:10:59.468559 1488539 round_trippers.go:580]     Audit-Id: ea09de90-9c25-4ca3-8a98-2e908ac79e36
	I0916 11:10:59.468562 1488539 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:10:59.468698 1488539 request.go:1351] Response Body: {"kind":"Pod","apiVersion":"v1","metadata":{"name":"coredns-7c65d6cfc9-szvv9","generateName":"coredns-7c65d6cfc9-","namespace":"kube-system","uid":"26df8cd4-36bc-49e1-98bf-9c30f5555b7b","resourceVersion":"424","creationTimestamp":"2024-09-16T11:10:15Z","labels":{"k8s-app":"kube-dns","pod-template-hash":"7c65d6cfc9"},"ownerReferences":[{"apiVersion":"apps/v1","kind":"ReplicaSet","name":"coredns-7c65d6cfc9","uid":"a88acc4a-b14b-4341-a616-6a6077ab8958","controller":true,"blockOwnerDeletion":true}],"managedFields":[{"manager":"kube-controller-manager","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:10:15Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:generateName":{},"f:labels":{".":{},"f:k8s-app":{},"f:pod-template-hash":{}},"f:ownerReferences":{".":{},"k:{\"uid\":\"a88acc4a-b14b-4341-a616-6a6077ab8958\"}":{}}},"f:spec":{"f:affinity":{".":{},"f:podAntiAffinity":{".":{},"f:preferredDuringSchedulingIgnoredDuringExecution":{
}}},"f:containers":{"k:{\"name\":\"coredns\"}":{".":{},"f:args":{},"f:i [truncated 6813 chars]
	I0916 11:10:59.469284 1488539 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/nodes/multinode-654612
	I0916 11:10:59.469305 1488539 round_trippers.go:469] Request Headers:
	I0916 11:10:59.469314 1488539 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:10:59.469319 1488539 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:10:59.471523 1488539 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:10:59.471542 1488539 round_trippers.go:577] Response Headers:
	I0916 11:10:59.471550 1488539 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:10:59.471556 1488539 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:10:59 GMT
	I0916 11:10:59.471559 1488539 round_trippers.go:580]     Audit-Id: 1f01af88-f328-4fa2-8bfb-bdf3273b2bf9
	I0916 11:10:59.471562 1488539 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:10:59.471565 1488539 round_trippers.go:580]     Content-Type: application/json
	I0916 11:10:59.471567 1488539 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:10:59.471729 1488539 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-654612","uid":"17ee1588-6ece-4a45-8e72-a05ec6d1f806","resourceVersion":"401","creationTimestamp":"2024-09-16T11:10:07Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-654612","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-654612","minikube.k8s.io/primary":"true","minikube.k8s.io/updated_at":"2024_09_16T11_10_11_0700","minikube.k8s.io/version":"v1.34.0","node-role.kubernetes.io/control-plane":"","node.kubernetes.io/exclude-from-external-load-balancers":""},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///var/run/crio/crio.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubelet","operation":"Update","apiVe
rsion":"v1","time":"2024-09-16T11:10:07Z","fieldsType":"FieldsV1","fiel [truncated 6029 chars]
	I0916 11:10:59.472137 1488539 pod_ready.go:93] pod "coredns-7c65d6cfc9-szvv9" in "kube-system" namespace has status "Ready":"True"
	I0916 11:10:59.472161 1488539 pod_ready.go:82] duration metric: took 2.506681902s for pod "coredns-7c65d6cfc9-szvv9" in "kube-system" namespace to be "Ready" ...
	I0916 11:10:59.472173 1488539 pod_ready.go:79] waiting up to 6m0s for pod "etcd-multinode-654612" in "kube-system" namespace to be "Ready" ...
	I0916 11:10:59.472244 1488539 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/namespaces/kube-system/pods/etcd-multinode-654612
	I0916 11:10:59.472254 1488539 round_trippers.go:469] Request Headers:
	I0916 11:10:59.472262 1488539 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:10:59.472267 1488539 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:10:59.474540 1488539 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:10:59.474560 1488539 round_trippers.go:577] Response Headers:
	I0916 11:10:59.474570 1488539 round_trippers.go:580]     Audit-Id: 393fab81-d354-4cf5-95dc-a7a82cf6ad0d
	I0916 11:10:59.474575 1488539 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:10:59.474579 1488539 round_trippers.go:580]     Content-Type: application/json
	I0916 11:10:59.474582 1488539 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:10:59.474587 1488539 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:10:59.474594 1488539 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:10:59 GMT
	I0916 11:10:59.474856 1488539 request.go:1351] Response Body: {"kind":"Pod","apiVersion":"v1","metadata":{"name":"etcd-multinode-654612","namespace":"kube-system","uid":"bb46feea-e4d5-411b-9ebc-e5984b1147a8","resourceVersion":"388","creationTimestamp":"2024-09-16T11:10:10Z","labels":{"component":"etcd","tier":"control-plane"},"annotations":{"kubeadm.kubernetes.io/etcd.advertise-client-urls":"https://192.168.67.2:2379","kubernetes.io/config.hash":"d0a18dbc2f101ac77b9a3f54b47797a2","kubernetes.io/config.mirror":"d0a18dbc2f101ac77b9a3f54b47797a2","kubernetes.io/config.seen":"2024-09-16T11:10:10.145147523Z","kubernetes.io/config.source":"file"},"ownerReferences":[{"apiVersion":"v1","kind":"Node","name":"multinode-654612","uid":"17ee1588-6ece-4a45-8e72-a05ec6d1f806","controller":true}],"managedFields":[{"manager":"kubelet","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:10:10Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:annotations":{".":{},"f:kubeadm.kubernetes.io/etcd.advertise-cl
ient-urls":{},"f:kubernetes.io/config.hash":{},"f:kubernetes.io/config. [truncated 6435 chars]
	I0916 11:10:59.475391 1488539 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/nodes/multinode-654612
	I0916 11:10:59.475409 1488539 round_trippers.go:469] Request Headers:
	I0916 11:10:59.475419 1488539 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:10:59.475425 1488539 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:10:59.477595 1488539 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:10:59.477617 1488539 round_trippers.go:577] Response Headers:
	I0916 11:10:59.477625 1488539 round_trippers.go:580]     Content-Type: application/json
	I0916 11:10:59.477629 1488539 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:10:59.477633 1488539 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:10:59.477636 1488539 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:10:59 GMT
	I0916 11:10:59.477638 1488539 round_trippers.go:580]     Audit-Id: ceac1902-fb40-4be7-985c-6cd16ee0e8f8
	I0916 11:10:59.477641 1488539 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:10:59.478015 1488539 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-654612","uid":"17ee1588-6ece-4a45-8e72-a05ec6d1f806","resourceVersion":"401","creationTimestamp":"2024-09-16T11:10:07Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-654612","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-654612","minikube.k8s.io/primary":"true","minikube.k8s.io/updated_at":"2024_09_16T11_10_11_0700","minikube.k8s.io/version":"v1.34.0","node-role.kubernetes.io/control-plane":"","node.kubernetes.io/exclude-from-external-load-balancers":""},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///var/run/crio/crio.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubelet","operation":"Update","apiVe
rsion":"v1","time":"2024-09-16T11:10:07Z","fieldsType":"FieldsV1","fiel [truncated 6029 chars]
	I0916 11:10:59.478411 1488539 pod_ready.go:93] pod "etcd-multinode-654612" in "kube-system" namespace has status "Ready":"True"
	I0916 11:10:59.478431 1488539 pod_ready.go:82] duration metric: took 6.246396ms for pod "etcd-multinode-654612" in "kube-system" namespace to be "Ready" ...
	I0916 11:10:59.478446 1488539 pod_ready.go:79] waiting up to 6m0s for pod "kube-apiserver-multinode-654612" in "kube-system" namespace to be "Ready" ...
	I0916 11:10:59.478520 1488539 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/namespaces/kube-system/pods/kube-apiserver-multinode-654612
	I0916 11:10:59.478531 1488539 round_trippers.go:469] Request Headers:
	I0916 11:10:59.478539 1488539 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:10:59.478544 1488539 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:10:59.480713 1488539 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:10:59.480734 1488539 round_trippers.go:577] Response Headers:
	I0916 11:10:59.480742 1488539 round_trippers.go:580]     Content-Type: application/json
	I0916 11:10:59.480746 1488539 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:10:59.480749 1488539 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:10:59.480753 1488539 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:10:59 GMT
	I0916 11:10:59.480756 1488539 round_trippers.go:580]     Audit-Id: 71e2ae1e-0063-4af3-8b08-9be39ac6fe3b
	I0916 11:10:59.480759 1488539 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:10:59.480953 1488539 request.go:1351] Response Body: {"kind":"Pod","apiVersion":"v1","metadata":{"name":"kube-apiserver-multinode-654612","namespace":"kube-system","uid":"8a56377d-b2a9-46dc-90b0-6d8f0aadec52","resourceVersion":"386","creationTimestamp":"2024-09-16T11:10:10Z","labels":{"component":"kube-apiserver","tier":"control-plane"},"annotations":{"kubeadm.kubernetes.io/kube-apiserver.advertise-address.endpoint":"192.168.67.2:8443","kubernetes.io/config.hash":"f3fdb95ee92c3c630b459a996a1fc6f9","kubernetes.io/config.mirror":"f3fdb95ee92c3c630b459a996a1fc6f9","kubernetes.io/config.seen":"2024-09-16T11:10:10.145153931Z","kubernetes.io/config.source":"file"},"ownerReferences":[{"apiVersion":"v1","kind":"Node","name":"multinode-654612","uid":"17ee1588-6ece-4a45-8e72-a05ec6d1f806","controller":true}],"managedFields":[{"manager":"kubelet","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:10:10Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:annotations":{".":{},"f:kubeadm.kube
rnetes.io/kube-apiserver.advertise-address.endpoint":{},"f:kubernetes.i [truncated 8513 chars]
	I0916 11:10:59.481497 1488539 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/nodes/multinode-654612
	I0916 11:10:59.481512 1488539 round_trippers.go:469] Request Headers:
	I0916 11:10:59.481520 1488539 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:10:59.481525 1488539 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:10:59.483572 1488539 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:10:59.483588 1488539 round_trippers.go:577] Response Headers:
	I0916 11:10:59.483596 1488539 round_trippers.go:580]     Audit-Id: a2b20a94-21c0-49fb-9d48-a1312fafcee4
	I0916 11:10:59.483601 1488539 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:10:59.483604 1488539 round_trippers.go:580]     Content-Type: application/json
	I0916 11:10:59.483607 1488539 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:10:59.483611 1488539 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:10:59.483614 1488539 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:10:59 GMT
	I0916 11:10:59.483888 1488539 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-654612","uid":"17ee1588-6ece-4a45-8e72-a05ec6d1f806","resourceVersion":"401","creationTimestamp":"2024-09-16T11:10:07Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-654612","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-654612","minikube.k8s.io/primary":"true","minikube.k8s.io/updated_at":"2024_09_16T11_10_11_0700","minikube.k8s.io/version":"v1.34.0","node-role.kubernetes.io/control-plane":"","node.kubernetes.io/exclude-from-external-load-balancers":""},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///var/run/crio/crio.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubelet","operation":"Update","apiVe
rsion":"v1","time":"2024-09-16T11:10:07Z","fieldsType":"FieldsV1","fiel [truncated 6029 chars]
	I0916 11:10:59.484310 1488539 pod_ready.go:93] pod "kube-apiserver-multinode-654612" in "kube-system" namespace has status "Ready":"True"
	I0916 11:10:59.484330 1488539 pod_ready.go:82] duration metric: took 5.871717ms for pod "kube-apiserver-multinode-654612" in "kube-system" namespace to be "Ready" ...
	I0916 11:10:59.484342 1488539 pod_ready.go:79] waiting up to 6m0s for pod "kube-controller-manager-multinode-654612" in "kube-system" namespace to be "Ready" ...
	I0916 11:10:59.484411 1488539 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/namespaces/kube-system/pods/kube-controller-manager-multinode-654612
	I0916 11:10:59.484420 1488539 round_trippers.go:469] Request Headers:
	I0916 11:10:59.484428 1488539 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:10:59.484434 1488539 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:10:59.486536 1488539 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:10:59.486562 1488539 round_trippers.go:577] Response Headers:
	I0916 11:10:59.486570 1488539 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:10:59 GMT
	I0916 11:10:59.486575 1488539 round_trippers.go:580]     Audit-Id: 6f2a8475-ad58-4a32-b1bc-c866054c08fd
	I0916 11:10:59.486578 1488539 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:10:59.486582 1488539 round_trippers.go:580]     Content-Type: application/json
	I0916 11:10:59.486586 1488539 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:10:59.486589 1488539 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:10:59.486923 1488539 request.go:1351] Response Body: {"kind":"Pod","apiVersion":"v1","metadata":{"name":"kube-controller-manager-multinode-654612","namespace":"kube-system","uid":"08e87c01-201e-4373-bbd7-0a8a7a724a84","resourceVersion":"372","creationTimestamp":"2024-09-16T11:10:10Z","labels":{"component":"kube-controller-manager","tier":"control-plane"},"annotations":{"kubernetes.io/config.hash":"c7028fbfd05aaf11bd1f32f252589714","kubernetes.io/config.mirror":"c7028fbfd05aaf11bd1f32f252589714","kubernetes.io/config.seen":"2024-09-16T11:10:10.145155408Z","kubernetes.io/config.source":"file"},"ownerReferences":[{"apiVersion":"v1","kind":"Node","name":"multinode-654612","uid":"17ee1588-6ece-4a45-8e72-a05ec6d1f806","controller":true}],"managedFields":[{"manager":"kubelet","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:10:10Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:annotations":{".":{},"f:kubernetes.io/config.hash":{},"f:kubernetes.io/config.mirror":{},"f:kubernetes.i
o/config.seen":{},"f:kubernetes.io/config.source":{}},"f:labels":{".":{ [truncated 8088 chars]
	I0916 11:10:59.487489 1488539 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/nodes/multinode-654612
	I0916 11:10:59.487506 1488539 round_trippers.go:469] Request Headers:
	I0916 11:10:59.487515 1488539 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:10:59.487519 1488539 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:10:59.489478 1488539 round_trippers.go:574] Response Status: 200 OK in 1 milliseconds
	I0916 11:10:59.489499 1488539 round_trippers.go:577] Response Headers:
	I0916 11:10:59.489506 1488539 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:10:59.489512 1488539 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:10:59 GMT
	I0916 11:10:59.489516 1488539 round_trippers.go:580]     Audit-Id: fa56402c-74cc-4e98-8dbb-b40646ce4dd6
	I0916 11:10:59.489519 1488539 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:10:59.489524 1488539 round_trippers.go:580]     Content-Type: application/json
	I0916 11:10:59.489528 1488539 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:10:59.489763 1488539 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-654612","uid":"17ee1588-6ece-4a45-8e72-a05ec6d1f806","resourceVersion":"401","creationTimestamp":"2024-09-16T11:10:07Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-654612","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-654612","minikube.k8s.io/primary":"true","minikube.k8s.io/updated_at":"2024_09_16T11_10_11_0700","minikube.k8s.io/version":"v1.34.0","node-role.kubernetes.io/control-plane":"","node.kubernetes.io/exclude-from-external-load-balancers":""},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///var/run/crio/crio.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubelet","operation":"Update","apiVe
rsion":"v1","time":"2024-09-16T11:10:07Z","fieldsType":"FieldsV1","fiel [truncated 6029 chars]
	I0916 11:10:59.490144 1488539 pod_ready.go:93] pod "kube-controller-manager-multinode-654612" in "kube-system" namespace has status "Ready":"True"
	I0916 11:10:59.490163 1488539 pod_ready.go:82] duration metric: took 5.812116ms for pod "kube-controller-manager-multinode-654612" in "kube-system" namespace to be "Ready" ...
	I0916 11:10:59.490178 1488539 pod_ready.go:79] waiting up to 6m0s for pod "kube-proxy-t9pzq" in "kube-system" namespace to be "Ready" ...
	I0916 11:10:59.490239 1488539 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/namespaces/kube-system/pods/kube-proxy-t9pzq
	I0916 11:10:59.490248 1488539 round_trippers.go:469] Request Headers:
	I0916 11:10:59.490256 1488539 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:10:59.490259 1488539 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:10:59.492302 1488539 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:10:59.492357 1488539 round_trippers.go:577] Response Headers:
	I0916 11:10:59.492380 1488539 round_trippers.go:580]     Audit-Id: 64686014-8698-440c-80c8-024c917ea91e
	I0916 11:10:59.492395 1488539 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:10:59.492400 1488539 round_trippers.go:580]     Content-Type: application/json
	I0916 11:10:59.492405 1488539 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:10:59.492408 1488539 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:10:59.492412 1488539 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:10:59 GMT
	I0916 11:10:59.492651 1488539 request.go:1351] Response Body: {"kind":"Pod","apiVersion":"v1","metadata":{"name":"kube-proxy-t9pzq","generateName":"kube-proxy-","namespace":"kube-system","uid":"d5dac41c-8386-4ad5-a463-1730169d8062","resourceVersion":"381","creationTimestamp":"2024-09-16T11:10:14Z","labels":{"controller-revision-hash":"648b489c5b","k8s-app":"kube-proxy","pod-template-generation":"1"},"ownerReferences":[{"apiVersion":"apps/v1","kind":"DaemonSet","name":"kube-proxy","uid":"8b5954ba-7bb1-4f7b-8014-fdfa99b2ac77","controller":true,"blockOwnerDeletion":true}],"managedFields":[{"manager":"kube-controller-manager","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:10:14Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:generateName":{},"f:labels":{".":{},"f:controller-revision-hash":{},"f:k8s-app":{},"f:pod-template-generation":{}},"f:ownerReferences":{".":{},"k:{\"uid\":\"8b5954ba-7bb1-4f7b-8014-fdfa99b2ac77\"}":{}}},"f:spec":{"f:affinity":{".":{},"f:nodeAffinity":{".":{},"f:r
equiredDuringSchedulingIgnoredDuringExecution":{}}},"f:containers":{"k: [truncated 6170 chars]
	I0916 11:10:59.493215 1488539 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/nodes/multinode-654612
	I0916 11:10:59.493235 1488539 round_trippers.go:469] Request Headers:
	I0916 11:10:59.493244 1488539 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:10:59.493250 1488539 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:10:59.498681 1488539 round_trippers.go:574] Response Status: 200 OK in 5 milliseconds
	I0916 11:10:59.498707 1488539 round_trippers.go:577] Response Headers:
	I0916 11:10:59.498721 1488539 round_trippers.go:580]     Audit-Id: 7c8e2471-3933-4b20-ae44-f62ca8509a31
	I0916 11:10:59.498726 1488539 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:10:59.498730 1488539 round_trippers.go:580]     Content-Type: application/json
	I0916 11:10:59.498733 1488539 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:10:59.498741 1488539 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:10:59.498747 1488539 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:10:59 GMT
	I0916 11:10:59.498843 1488539 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-654612","uid":"17ee1588-6ece-4a45-8e72-a05ec6d1f806","resourceVersion":"401","creationTimestamp":"2024-09-16T11:10:07Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-654612","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-654612","minikube.k8s.io/primary":"true","minikube.k8s.io/updated_at":"2024_09_16T11_10_11_0700","minikube.k8s.io/version":"v1.34.0","node-role.kubernetes.io/control-plane":"","node.kubernetes.io/exclude-from-external-load-balancers":""},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///var/run/crio/crio.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubelet","operation":"Update","apiVe
rsion":"v1","time":"2024-09-16T11:10:07Z","fieldsType":"FieldsV1","fiel [truncated 6029 chars]
	I0916 11:10:59.499294 1488539 pod_ready.go:93] pod "kube-proxy-t9pzq" in "kube-system" namespace has status "Ready":"True"
	I0916 11:10:59.499311 1488539 pod_ready.go:82] duration metric: took 9.125324ms for pod "kube-proxy-t9pzq" in "kube-system" namespace to be "Ready" ...
	I0916 11:10:59.499323 1488539 pod_ready.go:79] waiting up to 6m0s for pod "kube-scheduler-multinode-654612" in "kube-system" namespace to be "Ready" ...
	I0916 11:10:59.665710 1488539 request.go:632] Waited for 166.301049ms due to client-side throttling, not priority and fairness, request: GET:https://192.168.67.2:8443/api/v1/namespaces/kube-system/pods/kube-scheduler-multinode-654612
	I0916 11:10:59.665806 1488539 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/namespaces/kube-system/pods/kube-scheduler-multinode-654612
	I0916 11:10:59.665813 1488539 round_trippers.go:469] Request Headers:
	I0916 11:10:59.665821 1488539 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:10:59.665826 1488539 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:10:59.668420 1488539 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:10:59.668495 1488539 round_trippers.go:577] Response Headers:
	I0916 11:10:59.668515 1488539 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:10:59.668522 1488539 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:10:59.668530 1488539 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:10:59 GMT
	I0916 11:10:59.668536 1488539 round_trippers.go:580]     Audit-Id: a8306d45-5dd2-43f6-bcaf-f0d96146eaa9
	I0916 11:10:59.668539 1488539 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:10:59.668542 1488539 round_trippers.go:580]     Content-Type: application/json
	I0916 11:10:59.668707 1488539 request.go:1351] Response Body: {"kind":"Pod","apiVersion":"v1","metadata":{"name":"kube-scheduler-multinode-654612","namespace":"kube-system","uid":"fd553108-8193-4f33-8190-d4ec25a66de1","resourceVersion":"380","creationTimestamp":"2024-09-16T11:10:10Z","labels":{"component":"kube-scheduler","tier":"control-plane"},"annotations":{"kubernetes.io/config.hash":"281b64f61502642475e3dbc1b139b188","kubernetes.io/config.mirror":"281b64f61502642475e3dbc1b139b188","kubernetes.io/config.seen":"2024-09-16T11:10:10.145156597Z","kubernetes.io/config.source":"file"},"ownerReferences":[{"apiVersion":"v1","kind":"Node","name":"multinode-654612","uid":"17ee1588-6ece-4a45-8e72-a05ec6d1f806","controller":true}],"managedFields":[{"manager":"kubelet","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:10:10Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:annotations":{".":{},"f:kubernetes.io/config.hash":{},"f:kubernetes.io/config.mirror":{},"f:kubernetes.io/config.seen":{},
"f:kubernetes.io/config.source":{}},"f:labels":{".":{},"f:component":{} [truncated 4970 chars]
	I0916 11:10:59.866099 1488539 request.go:632] Waited for 196.94763ms due to client-side throttling, not priority and fairness, request: GET:https://192.168.67.2:8443/api/v1/nodes/multinode-654612
	I0916 11:10:59.866222 1488539 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/nodes/multinode-654612
	I0916 11:10:59.866234 1488539 round_trippers.go:469] Request Headers:
	I0916 11:10:59.866244 1488539 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:10:59.866254 1488539 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:10:59.868657 1488539 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:10:59.868742 1488539 round_trippers.go:577] Response Headers:
	I0916 11:10:59.868759 1488539 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:10:59.868764 1488539 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:10:59.868789 1488539 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:10:59 GMT
	I0916 11:10:59.868794 1488539 round_trippers.go:580]     Audit-Id: df89a71d-3b08-476c-82fc-39a1997f4052
	I0916 11:10:59.868797 1488539 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:10:59.868814 1488539 round_trippers.go:580]     Content-Type: application/json
	I0916 11:10:59.868934 1488539 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-654612","uid":"17ee1588-6ece-4a45-8e72-a05ec6d1f806","resourceVersion":"401","creationTimestamp":"2024-09-16T11:10:07Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-654612","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-654612","minikube.k8s.io/primary":"true","minikube.k8s.io/updated_at":"2024_09_16T11_10_11_0700","minikube.k8s.io/version":"v1.34.0","node-role.kubernetes.io/control-plane":"","node.kubernetes.io/exclude-from-external-load-balancers":""},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///var/run/crio/crio.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubelet","operation":"Update","apiVe
rsion":"v1","time":"2024-09-16T11:10:07Z","fieldsType":"FieldsV1","fiel [truncated 6029 chars]
	I0916 11:10:59.869370 1488539 pod_ready.go:93] pod "kube-scheduler-multinode-654612" in "kube-system" namespace has status "Ready":"True"
	I0916 11:10:59.869392 1488539 pod_ready.go:82] duration metric: took 370.055117ms for pod "kube-scheduler-multinode-654612" in "kube-system" namespace to be "Ready" ...
	I0916 11:10:59.869405 1488539 pod_ready.go:39] duration metric: took 2.912012681s for extra waiting for all system-critical and pods with labels [k8s-app=kube-dns component=etcd component=kube-apiserver component=kube-controller-manager k8s-app=kube-proxy component=kube-scheduler] to be "Ready" ...
	I0916 11:10:59.869424 1488539 api_server.go:52] waiting for apiserver process to appear ...
	I0916 11:10:59.869494 1488539 ssh_runner.go:195] Run: sudo pgrep -xnf kube-apiserver.*minikube.*
	I0916 11:10:59.880592 1488539 command_runner.go:130] > 1383
	I0916 11:10:59.882010 1488539 api_server.go:72] duration metric: took 44.948124134s to wait for apiserver process to appear ...
	I0916 11:10:59.882033 1488539 api_server.go:88] waiting for apiserver healthz status ...
	I0916 11:10:59.882054 1488539 api_server.go:253] Checking apiserver healthz at https://192.168.67.2:8443/healthz ...
	I0916 11:10:59.891560 1488539 api_server.go:279] https://192.168.67.2:8443/healthz returned 200:
	ok
	I0916 11:10:59.891656 1488539 round_trippers.go:463] GET https://192.168.67.2:8443/version
	I0916 11:10:59.891664 1488539 round_trippers.go:469] Request Headers:
	I0916 11:10:59.891673 1488539 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:10:59.891678 1488539 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:10:59.892639 1488539 round_trippers.go:574] Response Status: 200 OK in 0 milliseconds
	I0916 11:10:59.892659 1488539 round_trippers.go:577] Response Headers:
	I0916 11:10:59.892668 1488539 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:10:59 GMT
	I0916 11:10:59.892756 1488539 round_trippers.go:580]     Audit-Id: 07ce97be-5d9b-43b2-9c1c-426c9b9cf22e
	I0916 11:10:59.892767 1488539 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:10:59.892775 1488539 round_trippers.go:580]     Content-Type: application/json
	I0916 11:10:59.892778 1488539 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:10:59.892781 1488539 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:10:59.892784 1488539 round_trippers.go:580]     Content-Length: 263
	I0916 11:10:59.892800 1488539 request.go:1351] Response Body: {
	  "major": "1",
	  "minor": "31",
	  "gitVersion": "v1.31.1",
	  "gitCommit": "948afe5ca072329a73c8e79ed5938717a5cb3d21",
	  "gitTreeState": "clean",
	  "buildDate": "2024-09-11T21:22:08Z",
	  "goVersion": "go1.22.6",
	  "compiler": "gc",
	  "platform": "linux/arm64"
	}
	I0916 11:10:59.892886 1488539 api_server.go:141] control plane version: v1.31.1
	I0916 11:10:59.892908 1488539 api_server.go:131] duration metric: took 10.868387ms to wait for apiserver health ...
	I0916 11:10:59.892916 1488539 system_pods.go:43] waiting for kube-system pods to appear ...
	I0916 11:11:00.066576 1488539 request.go:632] Waited for 173.570698ms due to client-side throttling, not priority and fairness, request: GET:https://192.168.67.2:8443/api/v1/namespaces/kube-system/pods
	I0916 11:11:00.066702 1488539 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/namespaces/kube-system/pods
	I0916 11:11:00.066729 1488539 round_trippers.go:469] Request Headers:
	I0916 11:11:00.066754 1488539 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:11:00.066778 1488539 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:11:00.089467 1488539 round_trippers.go:574] Response Status: 200 OK in 22 milliseconds
	I0916 11:11:00.089564 1488539 round_trippers.go:577] Response Headers:
	I0916 11:11:00.089591 1488539 round_trippers.go:580]     Audit-Id: a5a10f7a-2d53-49c8-b97a-380f63cd08c9
	I0916 11:11:00.089612 1488539 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:11:00.089654 1488539 round_trippers.go:580]     Content-Type: application/json
	I0916 11:11:00.089673 1488539 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:11:00.089693 1488539 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:11:00.089715 1488539 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:11:00 GMT
	I0916 11:11:00.100667 1488539 request.go:1351] Response Body: {"kind":"PodList","apiVersion":"v1","metadata":{"resourceVersion":"428"},"items":[{"metadata":{"name":"coredns-7c65d6cfc9-szvv9","generateName":"coredns-7c65d6cfc9-","namespace":"kube-system","uid":"26df8cd4-36bc-49e1-98bf-9c30f5555b7b","resourceVersion":"424","creationTimestamp":"2024-09-16T11:10:15Z","labels":{"k8s-app":"kube-dns","pod-template-hash":"7c65d6cfc9"},"ownerReferences":[{"apiVersion":"apps/v1","kind":"ReplicaSet","name":"coredns-7c65d6cfc9","uid":"a88acc4a-b14b-4341-a616-6a6077ab8958","controller":true,"blockOwnerDeletion":true}],"managedFields":[{"manager":"kube-controller-manager","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:10:15Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:generateName":{},"f:labels":{".":{},"f:k8s-app":{},"f:pod-template-hash":{}},"f:ownerReferences":{".":{},"k:{\"uid\":\"a88acc4a-b14b-4341-a616-6a6077ab8958\"}":{}}},"f:spec":{"f:affinity":{".":{},"f:podAntiAffinity":{".":{},"f
:preferredDuringSchedulingIgnoredDuringExecution":{}}},"f:containers":{ [truncated 59444 chars]
	I0916 11:11:00.120364 1488539 system_pods.go:59] 8 kube-system pods found
	I0916 11:11:00.120411 1488539 system_pods.go:61] "coredns-7c65d6cfc9-szvv9" [26df8cd4-36bc-49e1-98bf-9c30f5555b7b] Running
	I0916 11:11:00.120420 1488539 system_pods.go:61] "etcd-multinode-654612" [bb46feea-e4d5-411b-9ebc-e5984b1147a8] Running
	I0916 11:11:00.120425 1488539 system_pods.go:61] "kindnet-whjqt" [0ed90b6c-0a03-4af6-a0ab-ea90794fa963] Running
	I0916 11:11:00.120431 1488539 system_pods.go:61] "kube-apiserver-multinode-654612" [8a56377d-b2a9-46dc-90b0-6d8f0aadec52] Running
	I0916 11:11:00.120436 1488539 system_pods.go:61] "kube-controller-manager-multinode-654612" [08e87c01-201e-4373-bbd7-0a8a7a724a84] Running
	I0916 11:11:00.120440 1488539 system_pods.go:61] "kube-proxy-t9pzq" [d5dac41c-8386-4ad5-a463-1730169d8062] Running
	I0916 11:11:00.120444 1488539 system_pods.go:61] "kube-scheduler-multinode-654612" [fd553108-8193-4f33-8190-d4ec25a66de1] Running
	I0916 11:11:00.120449 1488539 system_pods.go:61] "storage-provisioner" [2b21455e-8cb4-4c70-937b-6ff3cd85b42f] Running
	I0916 11:11:00.120456 1488539 system_pods.go:74] duration metric: took 227.532665ms to wait for pod list to return data ...
	I0916 11:11:00.120465 1488539 default_sa.go:34] waiting for default service account to be created ...
	I0916 11:11:00.265711 1488539 request.go:632] Waited for 145.145617ms due to client-side throttling, not priority and fairness, request: GET:https://192.168.67.2:8443/api/v1/namespaces/default/serviceaccounts
	I0916 11:11:00.265812 1488539 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/namespaces/default/serviceaccounts
	I0916 11:11:00.265820 1488539 round_trippers.go:469] Request Headers:
	I0916 11:11:00.265829 1488539 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:11:00.265840 1488539 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:11:00.270264 1488539 round_trippers.go:574] Response Status: 200 OK in 4 milliseconds
	I0916 11:11:00.270297 1488539 round_trippers.go:577] Response Headers:
	I0916 11:11:00.270314 1488539 round_trippers.go:580]     Audit-Id: 31d397e2-ac96-42ab-9672-10bf68d9d264
	I0916 11:11:00.270319 1488539 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:11:00.270322 1488539 round_trippers.go:580]     Content-Type: application/json
	I0916 11:11:00.270326 1488539 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:11:00.270329 1488539 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:11:00.270333 1488539 round_trippers.go:580]     Content-Length: 261
	I0916 11:11:00.270336 1488539 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:11:00 GMT
	I0916 11:11:00.270555 1488539 request.go:1351] Response Body: {"kind":"ServiceAccountList","apiVersion":"v1","metadata":{"resourceVersion":"428"},"items":[{"metadata":{"name":"default","namespace":"default","uid":"8b0a4fd5-1ca6-4da1-beae-b1e2017b49fd","resourceVersion":"297","creationTimestamp":"2024-09-16T11:10:14Z"}}]}
	I0916 11:11:00.270811 1488539 default_sa.go:45] found service account: "default"
	I0916 11:11:00.270839 1488539 default_sa.go:55] duration metric: took 150.366331ms for default service account to be created ...
	I0916 11:11:00.270861 1488539 system_pods.go:116] waiting for k8s-apps to be running ...
	I0916 11:11:00.466538 1488539 request.go:632] Waited for 195.530187ms due to client-side throttling, not priority and fairness, request: GET:https://192.168.67.2:8443/api/v1/namespaces/kube-system/pods
	I0916 11:11:00.466615 1488539 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/namespaces/kube-system/pods
	I0916 11:11:00.466623 1488539 round_trippers.go:469] Request Headers:
	I0916 11:11:00.466633 1488539 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:11:00.466639 1488539 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:11:00.470039 1488539 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 11:11:00.470073 1488539 round_trippers.go:577] Response Headers:
	I0916 11:11:00.470083 1488539 round_trippers.go:580]     Audit-Id: 8d22c61f-f01a-448b-a62c-8b72e08ed17d
	I0916 11:11:00.470087 1488539 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:11:00.470091 1488539 round_trippers.go:580]     Content-Type: application/json
	I0916 11:11:00.470095 1488539 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:11:00.470098 1488539 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:11:00.470101 1488539 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:11:00 GMT
	I0916 11:11:00.470957 1488539 request.go:1351] Response Body: {"kind":"PodList","apiVersion":"v1","metadata":{"resourceVersion":"428"},"items":[{"metadata":{"name":"coredns-7c65d6cfc9-szvv9","generateName":"coredns-7c65d6cfc9-","namespace":"kube-system","uid":"26df8cd4-36bc-49e1-98bf-9c30f5555b7b","resourceVersion":"424","creationTimestamp":"2024-09-16T11:10:15Z","labels":{"k8s-app":"kube-dns","pod-template-hash":"7c65d6cfc9"},"ownerReferences":[{"apiVersion":"apps/v1","kind":"ReplicaSet","name":"coredns-7c65d6cfc9","uid":"a88acc4a-b14b-4341-a616-6a6077ab8958","controller":true,"blockOwnerDeletion":true}],"managedFields":[{"manager":"kube-controller-manager","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:10:15Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:generateName":{},"f:labels":{".":{},"f:k8s-app":{},"f:pod-template-hash":{}},"f:ownerReferences":{".":{},"k:{\"uid\":\"a88acc4a-b14b-4341-a616-6a6077ab8958\"}":{}}},"f:spec":{"f:affinity":{".":{},"f:podAntiAffinity":{".":{},"f
:preferredDuringSchedulingIgnoredDuringExecution":{}}},"f:containers":{ [truncated 59444 chars]
	I0916 11:11:00.473681 1488539 system_pods.go:86] 8 kube-system pods found
	I0916 11:11:00.473722 1488539 system_pods.go:89] "coredns-7c65d6cfc9-szvv9" [26df8cd4-36bc-49e1-98bf-9c30f5555b7b] Running
	I0916 11:11:00.473730 1488539 system_pods.go:89] "etcd-multinode-654612" [bb46feea-e4d5-411b-9ebc-e5984b1147a8] Running
	I0916 11:11:00.473735 1488539 system_pods.go:89] "kindnet-whjqt" [0ed90b6c-0a03-4af6-a0ab-ea90794fa963] Running
	I0916 11:11:00.473739 1488539 system_pods.go:89] "kube-apiserver-multinode-654612" [8a56377d-b2a9-46dc-90b0-6d8f0aadec52] Running
	I0916 11:11:00.473744 1488539 system_pods.go:89] "kube-controller-manager-multinode-654612" [08e87c01-201e-4373-bbd7-0a8a7a724a84] Running
	I0916 11:11:00.473750 1488539 system_pods.go:89] "kube-proxy-t9pzq" [d5dac41c-8386-4ad5-a463-1730169d8062] Running
	I0916 11:11:00.473754 1488539 system_pods.go:89] "kube-scheduler-multinode-654612" [fd553108-8193-4f33-8190-d4ec25a66de1] Running
	I0916 11:11:00.473758 1488539 system_pods.go:89] "storage-provisioner" [2b21455e-8cb4-4c70-937b-6ff3cd85b42f] Running
	I0916 11:11:00.473765 1488539 system_pods.go:126] duration metric: took 202.894512ms to wait for k8s-apps to be running ...
	I0916 11:11:00.473777 1488539 system_svc.go:44] waiting for kubelet service to be running ....
	I0916 11:11:00.473840 1488539 ssh_runner.go:195] Run: sudo systemctl is-active --quiet service kubelet
	I0916 11:11:00.487269 1488539 system_svc.go:56] duration metric: took 13.481666ms WaitForService to wait for kubelet
	I0916 11:11:00.487299 1488539 kubeadm.go:582] duration metric: took 45.553416606s to wait for: map[apiserver:true apps_running:true default_sa:true extra:true kubelet:true node_ready:true system_pods:true]
	I0916 11:11:00.487318 1488539 node_conditions.go:102] verifying NodePressure condition ...
	I0916 11:11:00.665713 1488539 request.go:632] Waited for 178.262718ms due to client-side throttling, not priority and fairness, request: GET:https://192.168.67.2:8443/api/v1/nodes
	I0916 11:11:00.665806 1488539 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/nodes
	I0916 11:11:00.665818 1488539 round_trippers.go:469] Request Headers:
	I0916 11:11:00.665848 1488539 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:11:00.665864 1488539 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:11:00.668567 1488539 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:11:00.668645 1488539 round_trippers.go:577] Response Headers:
	I0916 11:11:00.668670 1488539 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:11:00.668723 1488539 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:11:00.668743 1488539 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:11:00 GMT
	I0916 11:11:00.668762 1488539 round_trippers.go:580]     Audit-Id: 57b02d9f-1bb5-4431-933d-dbb6ed1664ae
	I0916 11:11:00.668778 1488539 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:11:00.668782 1488539 round_trippers.go:580]     Content-Type: application/json
	I0916 11:11:00.668969 1488539 request.go:1351] Response Body: {"kind":"NodeList","apiVersion":"v1","metadata":{"resourceVersion":"428"},"items":[{"metadata":{"name":"multinode-654612","uid":"17ee1588-6ece-4a45-8e72-a05ec6d1f806","resourceVersion":"401","creationTimestamp":"2024-09-16T11:10:07Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-654612","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-654612","minikube.k8s.io/primary":"true","minikube.k8s.io/updated_at":"2024_09_16T11_10_11_0700","minikube.k8s.io/version":"v1.34.0","node-role.kubernetes.io/control-plane":"","node.kubernetes.io/exclude-from-external-load-balancers":""},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///var/run/crio/crio.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields
":[{"manager":"kubelet","operation":"Update","apiVersion":"v1","time":" [truncated 6082 chars]
	I0916 11:11:00.669474 1488539 node_conditions.go:122] node storage ephemeral capacity is 203034800Ki
	I0916 11:11:00.669508 1488539 node_conditions.go:123] node cpu capacity is 2
	I0916 11:11:00.669522 1488539 node_conditions.go:105] duration metric: took 182.198499ms to run NodePressure ...
	I0916 11:11:00.669536 1488539 start.go:241] waiting for startup goroutines ...
	I0916 11:11:00.669543 1488539 start.go:246] waiting for cluster config update ...
	I0916 11:11:00.669555 1488539 start.go:255] writing updated cluster config ...
	I0916 11:11:00.672794 1488539 out.go:201] 
	I0916 11:11:00.675785 1488539 config.go:182] Loaded profile config "multinode-654612": Driver=docker, ContainerRuntime=crio, KubernetesVersion=v1.31.1
	I0916 11:11:00.675885 1488539 profile.go:143] Saving config to /home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/multinode-654612/config.json ...
	I0916 11:11:00.678947 1488539 out.go:177] * Starting "multinode-654612-m02" worker node in "multinode-654612" cluster
	I0916 11:11:00.682259 1488539 cache.go:121] Beginning downloading kic base image for docker with crio
	I0916 11:11:00.685034 1488539 out.go:177] * Pulling base image v0.0.45-1726358845-19644 ...
	I0916 11:11:00.687880 1488539 preload.go:131] Checking if preload exists for k8s version v1.31.1 and runtime crio
	I0916 11:11:00.687920 1488539 cache.go:56] Caching tarball of preloaded images
	I0916 11:11:00.687977 1488539 image.go:79] Checking for gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 in local docker daemon
	I0916 11:11:00.688063 1488539 preload.go:172] Found /home/jenkins/minikube-integration/19651-1378450/.minikube/cache/preloaded-tarball/preloaded-images-k8s-v18-v1.31.1-cri-o-overlay-arm64.tar.lz4 in cache, skipping download
	I0916 11:11:00.688077 1488539 cache.go:59] Finished verifying existence of preloaded tar for v1.31.1 on crio
	I0916 11:11:00.688179 1488539 profile.go:143] Saving config to /home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/multinode-654612/config.json ...
	W0916 11:11:00.706652 1488539 image.go:95] image gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 is of wrong architecture
	I0916 11:11:00.706683 1488539 cache.go:149] Downloading gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 to local cache
	I0916 11:11:00.706771 1488539 image.go:63] Checking for gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 in local cache directory
	I0916 11:11:00.706796 1488539 image.go:66] Found gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 in local cache directory, skipping pull
	I0916 11:11:00.706802 1488539 image.go:135] gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 exists in cache, skipping pull
	I0916 11:11:00.706811 1488539 cache.go:152] successfully saved gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 as a tarball
	I0916 11:11:00.706821 1488539 cache.go:162] Loading gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 from local cache
	I0916 11:11:00.708139 1488539 image.go:273] response: 
	I0916 11:11:00.853461 1488539 cache.go:164] successfully loaded and using gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 from cached tarball
	I0916 11:11:00.853503 1488539 cache.go:194] Successfully downloaded all kic artifacts
	I0916 11:11:00.853538 1488539 start.go:360] acquireMachinesLock for multinode-654612-m02: {Name:mk70904bbc860a548c4a9726b7d64e227f1f9cac Clock:{} Delay:500ms Timeout:10m0s Cancel:<nil>}
	I0916 11:11:00.853676 1488539 start.go:364] duration metric: took 116.969µs to acquireMachinesLock for "multinode-654612-m02"
	I0916 11:11:00.853712 1488539 start.go:93] Provisioning new machine with config: &{Name:multinode-654612 KeepContext:false EmbedCerts:false MinikubeISO: KicBaseImage:gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 Memory:2200 CPUs:2 DiskSize:20000 Driver:docker HyperkitVpnKitSock: HyperkitVSockPorts:[] DockerEnv:[] ContainerVolumeMounts:[] InsecureRegistry:[] RegistryMirror:[] HostOnlyCIDR:192.168.59.1/24 HypervVirtualSwitch: HypervUseExternalSwitch:false HypervExternalAdapter: KVMNetwork:default KVMQemuURI:qemu:///system KVMGPU:false KVMHidden:false KVMNUMACount:1 APIServerPort:8443 DockerOpt:[] DisableDriverMounts:false NFSShare:[] NFSSharesRoot:/nfsshares UUID: NoVTXCheck:false DNSProxy:false HostDNSResolver:true HostOnlyNicType:virtio NatNicType:virtio SSHIPAddress: SSHUser:root SSHKey: SSHPort:22 KubernetesConfig:{KubernetesVersion:v1.31.1 ClusterName:multinode-654612 Namespace:default APIServerHAVIP: APIServerNa
me:minikubeCA APIServerNames:[] APIServerIPs:[] DNSDomain:cluster.local ContainerRuntime:crio CRISocket: NetworkPlugin:cni FeatureGates: ServiceCIDR:10.96.0.0/12 ImageRepository: LoadBalancerStartIP: LoadBalancerEndIP: CustomIngressCert: RegistryAliases: ExtraOptions:[] ShouldLoadCachedImages:true EnableDefaultCNI:false CNI:} Nodes:[{Name: IP:192.168.67.2 Port:8443 KubernetesVersion:v1.31.1 ContainerRuntime:crio ControlPlane:true Worker:true} {Name:m02 IP: Port:8443 KubernetesVersion:v1.31.1 ContainerRuntime:crio ControlPlane:false Worker:true}] Addons:map[default-storageclass:true storage-provisioner:true] CustomAddonImages:map[] CustomAddonRegistries:map[] VerifyComponents:map[apiserver:true apps_running:true default_sa:true extra:true kubelet:true node_ready:true system_pods:true] StartHostTimeout:6m0s ScheduledStop:<nil> ExposedPorts:[] ListenAddress: Network: Subnet: MultiNodeRequested:true ExtraDisks:0 CertExpiration:26280h0m0s Mount:false MountString:/home/jenkins:/minikube-host Mount9PVersion:9p2000.L
MountGID:docker MountIP: MountMSize:262144 MountOptions:[] MountPort:0 MountType:9p MountUID:docker BinaryMirror: DisableOptimizations:false DisableMetrics:false CustomQemuFirmwarePath: SocketVMnetClientPath: SocketVMnetPath: StaticIP: SSHAuthSock: SSHAgentPID:0 GPUs: AutoPauseInterval:1m0s} &{Name:m02 IP: Port:8443 KubernetesVersion:v1.31.1 ContainerRuntime:crio ControlPlane:false Worker:true}
	I0916 11:11:00.853791 1488539 start.go:125] createHost starting for "m02" (driver="docker")
	I0916 11:11:00.857131 1488539 out.go:235] * Creating docker container (CPUs=2, Memory=2200MB) ...
	I0916 11:11:00.857265 1488539 start.go:159] libmachine.API.Create for "multinode-654612" (driver="docker")
	I0916 11:11:00.857297 1488539 client.go:168] LocalClient.Create starting
	I0916 11:11:00.857381 1488539 main.go:141] libmachine: Reading certificate data from /home/jenkins/minikube-integration/19651-1378450/.minikube/certs/ca.pem
	I0916 11:11:00.857418 1488539 main.go:141] libmachine: Decoding PEM data...
	I0916 11:11:00.857437 1488539 main.go:141] libmachine: Parsing certificate...
	I0916 11:11:00.857492 1488539 main.go:141] libmachine: Reading certificate data from /home/jenkins/minikube-integration/19651-1378450/.minikube/certs/cert.pem
	I0916 11:11:00.857514 1488539 main.go:141] libmachine: Decoding PEM data...
	I0916 11:11:00.857527 1488539 main.go:141] libmachine: Parsing certificate...
	I0916 11:11:00.857800 1488539 cli_runner.go:164] Run: docker network inspect multinode-654612 --format "{"Name": "{{.Name}}","Driver": "{{.Driver}}","Subnet": "{{range .IPAM.Config}}{{.Subnet}}{{end}}","Gateway": "{{range .IPAM.Config}}{{.Gateway}}{{end}}","MTU": {{if (index .Options "com.docker.network.driver.mtu")}}{{(index .Options "com.docker.network.driver.mtu")}}{{else}}0{{end}}, "ContainerIPs": [{{range $k,$v := .Containers }}"{{$v.IPv4Address}}",{{end}}]}"
	I0916 11:11:00.874572 1488539 network_create.go:77] Found existing network {name:multinode-654612 subnet:0x4001935740 gateway:[0 0 0 0 0 0 0 0 0 0 255 255 192 168 67 1] mtu:1500}
	I0916 11:11:00.874624 1488539 kic.go:121] calculated static IP "192.168.67.3" for the "multinode-654612-m02" container
	I0916 11:11:00.874702 1488539 cli_runner.go:164] Run: docker ps -a --format {{.Names}}
	I0916 11:11:00.894126 1488539 cli_runner.go:164] Run: docker volume create multinode-654612-m02 --label name.minikube.sigs.k8s.io=multinode-654612-m02 --label created_by.minikube.sigs.k8s.io=true
	I0916 11:11:00.912587 1488539 oci.go:103] Successfully created a docker volume multinode-654612-m02
	I0916 11:11:00.912769 1488539 cli_runner.go:164] Run: docker run --rm --name multinode-654612-m02-preload-sidecar --label created_by.minikube.sigs.k8s.io=true --label name.minikube.sigs.k8s.io=multinode-654612-m02 --entrypoint /usr/bin/test -v multinode-654612-m02:/var gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 -d /var/lib
	I0916 11:11:01.512883 1488539 oci.go:107] Successfully prepared a docker volume multinode-654612-m02
	I0916 11:11:01.512928 1488539 preload.go:131] Checking if preload exists for k8s version v1.31.1 and runtime crio
	I0916 11:11:01.512950 1488539 kic.go:194] Starting extracting preloaded images to volume ...
	I0916 11:11:01.513034 1488539 cli_runner.go:164] Run: docker run --rm --entrypoint /usr/bin/tar -v /home/jenkins/minikube-integration/19651-1378450/.minikube/cache/preloaded-tarball/preloaded-images-k8s-v18-v1.31.1-cri-o-overlay-arm64.tar.lz4:/preloaded.tar:ro -v multinode-654612-m02:/extractDir gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 -I lz4 -xf /preloaded.tar -C /extractDir
	I0916 11:11:05.694032 1488539 cli_runner.go:217] Completed: docker run --rm --entrypoint /usr/bin/tar -v /home/jenkins/minikube-integration/19651-1378450/.minikube/cache/preloaded-tarball/preloaded-images-k8s-v18-v1.31.1-cri-o-overlay-arm64.tar.lz4:/preloaded.tar:ro -v multinode-654612-m02:/extractDir gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 -I lz4 -xf /preloaded.tar -C /extractDir: (4.180954104s)
	I0916 11:11:05.694068 1488539 kic.go:203] duration metric: took 4.181114322s to extract preloaded images to volume ...
	W0916 11:11:05.694209 1488539 cgroups_linux.go:77] Your kernel does not support swap limit capabilities or the cgroup is not mounted.
	I0916 11:11:05.694314 1488539 cli_runner.go:164] Run: docker info --format "'{{json .SecurityOptions}}'"
	I0916 11:11:05.747729 1488539 cli_runner.go:164] Run: docker run -d -t --privileged --security-opt seccomp=unconfined --tmpfs /tmp --tmpfs /run -v /lib/modules:/lib/modules:ro --hostname multinode-654612-m02 --name multinode-654612-m02 --label created_by.minikube.sigs.k8s.io=true --label name.minikube.sigs.k8s.io=multinode-654612-m02 --label role.minikube.sigs.k8s.io= --label mode.minikube.sigs.k8s.io=multinode-654612-m02 --network multinode-654612 --ip 192.168.67.3 --volume multinode-654612-m02:/var --security-opt apparmor=unconfined --memory=2200mb --cpus=2 -e container=docker --expose 8443 --publish=127.0.0.1::8443 --publish=127.0.0.1::22 --publish=127.0.0.1::2376 --publish=127.0.0.1::5000 --publish=127.0.0.1::32443 gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0
	I0916 11:11:06.094697 1488539 cli_runner.go:164] Run: docker container inspect multinode-654612-m02 --format={{.State.Running}}
	I0916 11:11:06.124132 1488539 cli_runner.go:164] Run: docker container inspect multinode-654612-m02 --format={{.State.Status}}
	I0916 11:11:06.145177 1488539 cli_runner.go:164] Run: docker exec multinode-654612-m02 stat /var/lib/dpkg/alternatives/iptables
	I0916 11:11:06.195786 1488539 oci.go:144] the created container "multinode-654612-m02" has a running status.
	I0916 11:11:06.195813 1488539 kic.go:225] Creating ssh key for kic: /home/jenkins/minikube-integration/19651-1378450/.minikube/machines/multinode-654612-m02/id_rsa...
	I0916 11:11:06.924417 1488539 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-1378450/.minikube/machines/multinode-654612-m02/id_rsa.pub -> /home/docker/.ssh/authorized_keys
	I0916 11:11:06.924542 1488539 kic_runner.go:191] docker (temp): /home/jenkins/minikube-integration/19651-1378450/.minikube/machines/multinode-654612-m02/id_rsa.pub --> /home/docker/.ssh/authorized_keys (381 bytes)
	I0916 11:11:06.952721 1488539 cli_runner.go:164] Run: docker container inspect multinode-654612-m02 --format={{.State.Status}}
	I0916 11:11:06.978671 1488539 kic_runner.go:93] Run: chown docker:docker /home/docker/.ssh/authorized_keys
	I0916 11:11:06.978691 1488539 kic_runner.go:114] Args: [docker exec --privileged multinode-654612-m02 chown docker:docker /home/docker/.ssh/authorized_keys]
	I0916 11:11:07.046268 1488539 cli_runner.go:164] Run: docker container inspect multinode-654612-m02 --format={{.State.Status}}
	I0916 11:11:07.067328 1488539 machine.go:93] provisionDockerMachine start ...
	I0916 11:11:07.067438 1488539 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" multinode-654612-m02
	I0916 11:11:07.118996 1488539 main.go:141] libmachine: Using SSH client type: native
	I0916 11:11:07.119278 1488539 main.go:141] libmachine: &{{{<nil> 0 [] [] []} docker [0x41abe0] 0x41d420 <nil>  [] 0s} 127.0.0.1 34743 <nil> <nil>}
	I0916 11:11:07.119288 1488539 main.go:141] libmachine: About to run SSH command:
	hostname
	I0916 11:11:07.278066 1488539 main.go:141] libmachine: SSH cmd err, output: <nil>: multinode-654612-m02
	
	I0916 11:11:07.278093 1488539 ubuntu.go:169] provisioning hostname "multinode-654612-m02"
	I0916 11:11:07.278165 1488539 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" multinode-654612-m02
	I0916 11:11:07.308033 1488539 main.go:141] libmachine: Using SSH client type: native
	I0916 11:11:07.308345 1488539 main.go:141] libmachine: &{{{<nil> 0 [] [] []} docker [0x41abe0] 0x41d420 <nil>  [] 0s} 127.0.0.1 34743 <nil> <nil>}
	I0916 11:11:07.308366 1488539 main.go:141] libmachine: About to run SSH command:
	sudo hostname multinode-654612-m02 && echo "multinode-654612-m02" | sudo tee /etc/hostname
	I0916 11:11:07.463803 1488539 main.go:141] libmachine: SSH cmd err, output: <nil>: multinode-654612-m02
	
	I0916 11:11:07.463884 1488539 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" multinode-654612-m02
	I0916 11:11:07.484002 1488539 main.go:141] libmachine: Using SSH client type: native
	I0916 11:11:07.484250 1488539 main.go:141] libmachine: &{{{<nil> 0 [] [] []} docker [0x41abe0] 0x41d420 <nil>  [] 0s} 127.0.0.1 34743 <nil> <nil>}
	I0916 11:11:07.484268 1488539 main.go:141] libmachine: About to run SSH command:
	
			if ! grep -xq '.*\smultinode-654612-m02' /etc/hosts; then
				if grep -xq '127.0.1.1\s.*' /etc/hosts; then
					sudo sed -i 's/^127.0.1.1\s.*/127.0.1.1 multinode-654612-m02/g' /etc/hosts;
				else 
					echo '127.0.1.1 multinode-654612-m02' | sudo tee -a /etc/hosts; 
				fi
			fi
	I0916 11:11:07.620968 1488539 main.go:141] libmachine: SSH cmd err, output: <nil>: 
	I0916 11:11:07.620998 1488539 ubuntu.go:175] set auth options {CertDir:/home/jenkins/minikube-integration/19651-1378450/.minikube CaCertPath:/home/jenkins/minikube-integration/19651-1378450/.minikube/certs/ca.pem CaPrivateKeyPath:/home/jenkins/minikube-integration/19651-1378450/.minikube/certs/ca-key.pem CaCertRemotePath:/etc/docker/ca.pem ServerCertPath:/home/jenkins/minikube-integration/19651-1378450/.minikube/machines/server.pem ServerKeyPath:/home/jenkins/minikube-integration/19651-1378450/.minikube/machines/server-key.pem ClientKeyPath:/home/jenkins/minikube-integration/19651-1378450/.minikube/certs/key.pem ServerCertRemotePath:/etc/docker/server.pem ServerKeyRemotePath:/etc/docker/server-key.pem ClientCertPath:/home/jenkins/minikube-integration/19651-1378450/.minikube/certs/cert.pem ServerCertSANs:[] StorePath:/home/jenkins/minikube-integration/19651-1378450/.minikube}
	I0916 11:11:07.621015 1488539 ubuntu.go:177] setting up certificates
	I0916 11:11:07.621027 1488539 provision.go:84] configureAuth start
	I0916 11:11:07.621089 1488539 cli_runner.go:164] Run: docker container inspect -f "{{range .NetworkSettings.Networks}}{{.IPAddress}},{{.GlobalIPv6Address}}{{end}}" multinode-654612-m02
	I0916 11:11:07.638084 1488539 provision.go:143] copyHostCerts
	I0916 11:11:07.638149 1488539 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-1378450/.minikube/certs/cert.pem -> /home/jenkins/minikube-integration/19651-1378450/.minikube/cert.pem
	I0916 11:11:07.638187 1488539 exec_runner.go:144] found /home/jenkins/minikube-integration/19651-1378450/.minikube/cert.pem, removing ...
	I0916 11:11:07.638200 1488539 exec_runner.go:203] rm: /home/jenkins/minikube-integration/19651-1378450/.minikube/cert.pem
	I0916 11:11:07.638279 1488539 exec_runner.go:151] cp: /home/jenkins/minikube-integration/19651-1378450/.minikube/certs/cert.pem --> /home/jenkins/minikube-integration/19651-1378450/.minikube/cert.pem (1123 bytes)
	I0916 11:11:07.638383 1488539 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-1378450/.minikube/certs/key.pem -> /home/jenkins/minikube-integration/19651-1378450/.minikube/key.pem
	I0916 11:11:07.638404 1488539 exec_runner.go:144] found /home/jenkins/minikube-integration/19651-1378450/.minikube/key.pem, removing ...
	I0916 11:11:07.638408 1488539 exec_runner.go:203] rm: /home/jenkins/minikube-integration/19651-1378450/.minikube/key.pem
	I0916 11:11:07.638440 1488539 exec_runner.go:151] cp: /home/jenkins/minikube-integration/19651-1378450/.minikube/certs/key.pem --> /home/jenkins/minikube-integration/19651-1378450/.minikube/key.pem (1679 bytes)
	I0916 11:11:07.638496 1488539 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-1378450/.minikube/certs/ca.pem -> /home/jenkins/minikube-integration/19651-1378450/.minikube/ca.pem
	I0916 11:11:07.638516 1488539 exec_runner.go:144] found /home/jenkins/minikube-integration/19651-1378450/.minikube/ca.pem, removing ...
	I0916 11:11:07.638522 1488539 exec_runner.go:203] rm: /home/jenkins/minikube-integration/19651-1378450/.minikube/ca.pem
	I0916 11:11:07.638552 1488539 exec_runner.go:151] cp: /home/jenkins/minikube-integration/19651-1378450/.minikube/certs/ca.pem --> /home/jenkins/minikube-integration/19651-1378450/.minikube/ca.pem (1078 bytes)
	I0916 11:11:07.638600 1488539 provision.go:117] generating server cert: /home/jenkins/minikube-integration/19651-1378450/.minikube/machines/server.pem ca-key=/home/jenkins/minikube-integration/19651-1378450/.minikube/certs/ca.pem private-key=/home/jenkins/minikube-integration/19651-1378450/.minikube/certs/ca-key.pem org=jenkins.multinode-654612-m02 san=[127.0.0.1 192.168.67.3 localhost minikube multinode-654612-m02]
	I0916 11:11:08.737757 1488539 provision.go:177] copyRemoteCerts
	I0916 11:11:08.737832 1488539 ssh_runner.go:195] Run: sudo mkdir -p /etc/docker /etc/docker /etc/docker
	I0916 11:11:08.737876 1488539 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" multinode-654612-m02
	I0916 11:11:08.757155 1488539 sshutil.go:53] new ssh client: &{IP:127.0.0.1 Port:34743 SSHKeyPath:/home/jenkins/minikube-integration/19651-1378450/.minikube/machines/multinode-654612-m02/id_rsa Username:docker}
	I0916 11:11:08.854353 1488539 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-1378450/.minikube/certs/ca.pem -> /etc/docker/ca.pem
	I0916 11:11:08.854422 1488539 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-1378450/.minikube/certs/ca.pem --> /etc/docker/ca.pem (1078 bytes)
	I0916 11:11:08.880881 1488539 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-1378450/.minikube/machines/server.pem -> /etc/docker/server.pem
	I0916 11:11:08.880949 1488539 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-1378450/.minikube/machines/server.pem --> /etc/docker/server.pem (1229 bytes)
	I0916 11:11:08.908329 1488539 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-1378450/.minikube/machines/server-key.pem -> /etc/docker/server-key.pem
	I0916 11:11:08.908449 1488539 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-1378450/.minikube/machines/server-key.pem --> /etc/docker/server-key.pem (1679 bytes)
	I0916 11:11:08.934795 1488539 provision.go:87] duration metric: took 1.313752535s to configureAuth
	I0916 11:11:08.934825 1488539 ubuntu.go:193] setting minikube options for container-runtime
	I0916 11:11:08.935059 1488539 config.go:182] Loaded profile config "multinode-654612": Driver=docker, ContainerRuntime=crio, KubernetesVersion=v1.31.1
	I0916 11:11:08.935182 1488539 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" multinode-654612-m02
	I0916 11:11:08.952259 1488539 main.go:141] libmachine: Using SSH client type: native
	I0916 11:11:08.952507 1488539 main.go:141] libmachine: &{{{<nil> 0 [] [] []} docker [0x41abe0] 0x41d420 <nil>  [] 0s} 127.0.0.1 34743 <nil> <nil>}
	I0916 11:11:08.952522 1488539 main.go:141] libmachine: About to run SSH command:
	sudo mkdir -p /etc/sysconfig && printf %s "
	CRIO_MINIKUBE_OPTIONS='--insecure-registry 10.96.0.0/12 '
	" | sudo tee /etc/sysconfig/crio.minikube && sudo systemctl restart crio
	I0916 11:11:09.190216 1488539 main.go:141] libmachine: SSH cmd err, output: <nil>: 
	CRIO_MINIKUBE_OPTIONS='--insecure-registry 10.96.0.0/12 '
	
	I0916 11:11:09.190244 1488539 machine.go:96] duration metric: took 2.122894048s to provisionDockerMachine
	I0916 11:11:09.190255 1488539 client.go:171] duration metric: took 8.332949187s to LocalClient.Create
	I0916 11:11:09.190268 1488539 start.go:167] duration metric: took 8.333013235s to libmachine.API.Create "multinode-654612"
	I0916 11:11:09.190276 1488539 start.go:293] postStartSetup for "multinode-654612-m02" (driver="docker")
	I0916 11:11:09.190287 1488539 start.go:322] creating required directories: [/etc/kubernetes/addons /etc/kubernetes/manifests /var/tmp/minikube /var/lib/minikube /var/lib/minikube/certs /var/lib/minikube/images /var/lib/minikube/binaries /tmp/gvisor /usr/share/ca-certificates /etc/ssl/certs]
	I0916 11:11:09.190352 1488539 ssh_runner.go:195] Run: sudo mkdir -p /etc/kubernetes/addons /etc/kubernetes/manifests /var/tmp/minikube /var/lib/minikube /var/lib/minikube/certs /var/lib/minikube/images /var/lib/minikube/binaries /tmp/gvisor /usr/share/ca-certificates /etc/ssl/certs
	I0916 11:11:09.190400 1488539 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" multinode-654612-m02
	I0916 11:11:09.207541 1488539 sshutil.go:53] new ssh client: &{IP:127.0.0.1 Port:34743 SSHKeyPath:/home/jenkins/minikube-integration/19651-1378450/.minikube/machines/multinode-654612-m02/id_rsa Username:docker}
	I0916 11:11:09.307405 1488539 ssh_runner.go:195] Run: cat /etc/os-release
	I0916 11:11:09.311100 1488539 command_runner.go:130] > PRETTY_NAME="Ubuntu 22.04.4 LTS"
	I0916 11:11:09.311132 1488539 command_runner.go:130] > NAME="Ubuntu"
	I0916 11:11:09.311140 1488539 command_runner.go:130] > VERSION_ID="22.04"
	I0916 11:11:09.311168 1488539 command_runner.go:130] > VERSION="22.04.4 LTS (Jammy Jellyfish)"
	I0916 11:11:09.311179 1488539 command_runner.go:130] > VERSION_CODENAME=jammy
	I0916 11:11:09.311183 1488539 command_runner.go:130] > ID=ubuntu
	I0916 11:11:09.311187 1488539 command_runner.go:130] > ID_LIKE=debian
	I0916 11:11:09.311191 1488539 command_runner.go:130] > HOME_URL="https://www.ubuntu.com/"
	I0916 11:11:09.311196 1488539 command_runner.go:130] > SUPPORT_URL="https://help.ubuntu.com/"
	I0916 11:11:09.311208 1488539 command_runner.go:130] > BUG_REPORT_URL="https://bugs.launchpad.net/ubuntu/"
	I0916 11:11:09.311216 1488539 command_runner.go:130] > PRIVACY_POLICY_URL="https://www.ubuntu.com/legal/terms-and-policies/privacy-policy"
	I0916 11:11:09.311238 1488539 command_runner.go:130] > UBUNTU_CODENAME=jammy
	I0916 11:11:09.311612 1488539 main.go:141] libmachine: Couldn't set key VERSION_CODENAME, no corresponding struct field found
	I0916 11:11:09.311671 1488539 main.go:141] libmachine: Couldn't set key PRIVACY_POLICY_URL, no corresponding struct field found
	I0916 11:11:09.311699 1488539 main.go:141] libmachine: Couldn't set key UBUNTU_CODENAME, no corresponding struct field found
	I0916 11:11:09.311723 1488539 info.go:137] Remote host: Ubuntu 22.04.4 LTS
	I0916 11:11:09.311755 1488539 filesync.go:126] Scanning /home/jenkins/minikube-integration/19651-1378450/.minikube/addons for local assets ...
	I0916 11:11:09.311831 1488539 filesync.go:126] Scanning /home/jenkins/minikube-integration/19651-1378450/.minikube/files for local assets ...
	I0916 11:11:09.311935 1488539 filesync.go:149] local asset: /home/jenkins/minikube-integration/19651-1378450/.minikube/files/etc/ssl/certs/13838332.pem -> 13838332.pem in /etc/ssl/certs
	I0916 11:11:09.311964 1488539 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-1378450/.minikube/files/etc/ssl/certs/13838332.pem -> /etc/ssl/certs/13838332.pem
	I0916 11:11:09.312090 1488539 ssh_runner.go:195] Run: sudo mkdir -p /etc/ssl/certs
	I0916 11:11:09.322001 1488539 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-1378450/.minikube/files/etc/ssl/certs/13838332.pem --> /etc/ssl/certs/13838332.pem (1708 bytes)
	I0916 11:11:09.349115 1488539 start.go:296] duration metric: took 158.797246ms for postStartSetup
	I0916 11:11:09.349587 1488539 cli_runner.go:164] Run: docker container inspect -f "{{range .NetworkSettings.Networks}}{{.IPAddress}},{{.GlobalIPv6Address}}{{end}}" multinode-654612-m02
	I0916 11:11:09.367303 1488539 profile.go:143] Saving config to /home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/multinode-654612/config.json ...
	I0916 11:11:09.367820 1488539 ssh_runner.go:195] Run: sh -c "df -h /var | awk 'NR==2{print $5}'"
	I0916 11:11:09.367875 1488539 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" multinode-654612-m02
	I0916 11:11:09.385781 1488539 sshutil.go:53] new ssh client: &{IP:127.0.0.1 Port:34743 SSHKeyPath:/home/jenkins/minikube-integration/19651-1378450/.minikube/machines/multinode-654612-m02/id_rsa Username:docker}
	I0916 11:11:09.481853 1488539 command_runner.go:130] > 12%
	I0916 11:11:09.481954 1488539 ssh_runner.go:195] Run: sh -c "df -BG /var | awk 'NR==2{print $4}'"
	I0916 11:11:09.486528 1488539 command_runner.go:130] > 171G
	I0916 11:11:09.486931 1488539 start.go:128] duration metric: took 8.633125281s to createHost
	I0916 11:11:09.486964 1488539 start.go:83] releasing machines lock for "multinode-654612-m02", held for 8.633275849s
	I0916 11:11:09.487037 1488539 cli_runner.go:164] Run: docker container inspect -f "{{range .NetworkSettings.Networks}}{{.IPAddress}},{{.GlobalIPv6Address}}{{end}}" multinode-654612-m02
	I0916 11:11:09.510532 1488539 out.go:177] * Found network options:
	I0916 11:11:09.513308 1488539 out.go:177]   - NO_PROXY=192.168.67.2
	W0916 11:11:09.515867 1488539 proxy.go:119] fail to check proxy env: Error ip not in block
	W0916 11:11:09.515907 1488539 proxy.go:119] fail to check proxy env: Error ip not in block
	I0916 11:11:09.515979 1488539 ssh_runner.go:195] Run: sudo sh -c "podman version >/dev/null"
	I0916 11:11:09.516030 1488539 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" multinode-654612-m02
	I0916 11:11:09.516075 1488539 ssh_runner.go:195] Run: curl -sS -m 2 https://registry.k8s.io/
	I0916 11:11:09.516130 1488539 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" multinode-654612-m02
	I0916 11:11:09.540288 1488539 sshutil.go:53] new ssh client: &{IP:127.0.0.1 Port:34743 SSHKeyPath:/home/jenkins/minikube-integration/19651-1378450/.minikube/machines/multinode-654612-m02/id_rsa Username:docker}
	I0916 11:11:09.554543 1488539 sshutil.go:53] new ssh client: &{IP:127.0.0.1 Port:34743 SSHKeyPath:/home/jenkins/minikube-integration/19651-1378450/.minikube/machines/multinode-654612-m02/id_rsa Username:docker}
	I0916 11:11:09.788185 1488539 ssh_runner.go:195] Run: sh -c "stat /etc/cni/net.d/*loopback.conf*"
	I0916 11:11:09.788270 1488539 command_runner.go:130] > <a href="https://github.com/kubernetes/registry.k8s.io">Temporary Redirect</a>.
	I0916 11:11:09.792278 1488539 command_runner.go:130] >   File: /etc/cni/net.d/200-loopback.conf
	I0916 11:11:09.792301 1488539 command_runner.go:130] >   Size: 54        	Blocks: 8          IO Block: 4096   regular file
	I0916 11:11:09.792308 1488539 command_runner.go:130] > Device: b3h/179d	Inode: 1570512     Links: 1
	I0916 11:11:09.792314 1488539 command_runner.go:130] > Access: (0644/-rw-r--r--)  Uid: (    0/    root)   Gid: (    0/    root)
	I0916 11:11:09.792320 1488539 command_runner.go:130] > Access: 2023-06-14 14:44:50.000000000 +0000
	I0916 11:11:09.792325 1488539 command_runner.go:130] > Modify: 2023-06-14 14:44:50.000000000 +0000
	I0916 11:11:09.792330 1488539 command_runner.go:130] > Change: 2024-09-16 10:35:03.060526663 +0000
	I0916 11:11:09.792336 1488539 command_runner.go:130] >  Birth: 2024-09-16 10:35:03.060526663 +0000
	I0916 11:11:09.792651 1488539 ssh_runner.go:195] Run: sudo find /etc/cni/net.d -maxdepth 1 -type f -name *loopback.conf* -not -name *.mk_disabled -exec sh -c "sudo mv {} {}.mk_disabled" ;
	I0916 11:11:09.815261 1488539 cni.go:221] loopback cni configuration disabled: "/etc/cni/net.d/*loopback.conf*" found
	I0916 11:11:09.815338 1488539 ssh_runner.go:195] Run: sudo find /etc/cni/net.d -maxdepth 1 -type f ( ( -name *bridge* -or -name *podman* ) -and -not -name *.mk_disabled ) -printf "%p, " -exec sh -c "sudo mv {} {}.mk_disabled" ;
	I0916 11:11:09.851663 1488539 command_runner.go:139] > /etc/cni/net.d/87-podman-bridge.conflist, /etc/cni/net.d/100-crio-bridge.conf, 
	I0916 11:11:09.851707 1488539 cni.go:262] disabled [/etc/cni/net.d/87-podman-bridge.conflist, /etc/cni/net.d/100-crio-bridge.conf] bridge cni config(s)
	I0916 11:11:09.851716 1488539 start.go:495] detecting cgroup driver to use...
	I0916 11:11:09.851747 1488539 detect.go:187] detected "cgroupfs" cgroup driver on host os
	I0916 11:11:09.851811 1488539 ssh_runner.go:195] Run: sudo systemctl stop -f containerd
	I0916 11:11:09.870178 1488539 ssh_runner.go:195] Run: sudo systemctl is-active --quiet service containerd
	I0916 11:11:09.882882 1488539 docker.go:217] disabling cri-docker service (if available) ...
	I0916 11:11:09.882995 1488539 ssh_runner.go:195] Run: sudo systemctl stop -f cri-docker.socket
	I0916 11:11:09.898160 1488539 ssh_runner.go:195] Run: sudo systemctl stop -f cri-docker.service
	I0916 11:11:09.914062 1488539 ssh_runner.go:195] Run: sudo systemctl disable cri-docker.socket
	I0916 11:11:10.017548 1488539 ssh_runner.go:195] Run: sudo systemctl mask cri-docker.service
	I0916 11:11:10.130274 1488539 command_runner.go:130] ! Created symlink /etc/systemd/system/cri-docker.service → /dev/null.
	I0916 11:11:10.130373 1488539 docker.go:233] disabling docker service ...
	I0916 11:11:10.130516 1488539 ssh_runner.go:195] Run: sudo systemctl stop -f docker.socket
	I0916 11:11:10.154434 1488539 ssh_runner.go:195] Run: sudo systemctl stop -f docker.service
	I0916 11:11:10.169082 1488539 ssh_runner.go:195] Run: sudo systemctl disable docker.socket
	I0916 11:11:10.273837 1488539 command_runner.go:130] ! Removed /etc/systemd/system/sockets.target.wants/docker.socket.
	I0916 11:11:10.273915 1488539 ssh_runner.go:195] Run: sudo systemctl mask docker.service
	I0916 11:11:10.372559 1488539 command_runner.go:130] ! Created symlink /etc/systemd/system/docker.service → /dev/null.
	I0916 11:11:10.372733 1488539 ssh_runner.go:195] Run: sudo systemctl is-active --quiet service docker
	I0916 11:11:10.385735 1488539 ssh_runner.go:195] Run: /bin/bash -c "sudo mkdir -p /etc && printf %s "runtime-endpoint: unix:///var/run/crio/crio.sock
	" | sudo tee /etc/crictl.yaml"
	I0916 11:11:10.402452 1488539 command_runner.go:130] > runtime-endpoint: unix:///var/run/crio/crio.sock
	I0916 11:11:10.403842 1488539 crio.go:59] configure cri-o to use "registry.k8s.io/pause:3.10" pause image...
	I0916 11:11:10.403945 1488539 ssh_runner.go:195] Run: sh -c "sudo sed -i 's|^.*pause_image = .*$|pause_image = "registry.k8s.io/pause:3.10"|' /etc/crio/crio.conf.d/02-crio.conf"
	I0916 11:11:10.414785 1488539 crio.go:70] configuring cri-o to use "cgroupfs" as cgroup driver...
	I0916 11:11:10.414903 1488539 ssh_runner.go:195] Run: sh -c "sudo sed -i 's|^.*cgroup_manager = .*$|cgroup_manager = "cgroupfs"|' /etc/crio/crio.conf.d/02-crio.conf"
	I0916 11:11:10.426239 1488539 ssh_runner.go:195] Run: sh -c "sudo sed -i '/conmon_cgroup = .*/d' /etc/crio/crio.conf.d/02-crio.conf"
	I0916 11:11:10.437599 1488539 ssh_runner.go:195] Run: sh -c "sudo sed -i '/cgroup_manager = .*/a conmon_cgroup = "pod"' /etc/crio/crio.conf.d/02-crio.conf"
	I0916 11:11:10.448596 1488539 ssh_runner.go:195] Run: sh -c "sudo rm -rf /etc/cni/net.mk"
	I0916 11:11:10.460133 1488539 ssh_runner.go:195] Run: sh -c "sudo sed -i '/^ *"net.ipv4.ip_unprivileged_port_start=.*"/d' /etc/crio/crio.conf.d/02-crio.conf"
	I0916 11:11:10.471372 1488539 ssh_runner.go:195] Run: sh -c "sudo grep -q "^ *default_sysctls" /etc/crio/crio.conf.d/02-crio.conf || sudo sed -i '/conmon_cgroup = .*/a default_sysctls = \[\n\]' /etc/crio/crio.conf.d/02-crio.conf"
	I0916 11:11:10.489257 1488539 ssh_runner.go:195] Run: sh -c "sudo sed -i -r 's|^default_sysctls *= *\[|&\n  "net.ipv4.ip_unprivileged_port_start=0",|' /etc/crio/crio.conf.d/02-crio.conf"
	I0916 11:11:10.500785 1488539 ssh_runner.go:195] Run: sudo sysctl net.bridge.bridge-nf-call-iptables
	I0916 11:11:10.509177 1488539 command_runner.go:130] > net.bridge.bridge-nf-call-iptables = 1
	I0916 11:11:10.510610 1488539 ssh_runner.go:195] Run: sudo sh -c "echo 1 > /proc/sys/net/ipv4/ip_forward"
	I0916 11:11:10.519719 1488539 ssh_runner.go:195] Run: sudo systemctl daemon-reload
	I0916 11:11:10.611398 1488539 ssh_runner.go:195] Run: sudo systemctl restart crio
	I0916 11:11:10.730082 1488539 start.go:542] Will wait 60s for socket path /var/run/crio/crio.sock
	I0916 11:11:10.730176 1488539 ssh_runner.go:195] Run: stat /var/run/crio/crio.sock
	I0916 11:11:10.734378 1488539 command_runner.go:130] >   File: /var/run/crio/crio.sock
	I0916 11:11:10.734409 1488539 command_runner.go:130] >   Size: 0         	Blocks: 0          IO Block: 4096   socket
	I0916 11:11:10.734421 1488539 command_runner.go:130] > Device: bch/188d	Inode: 186         Links: 1
	I0916 11:11:10.734428 1488539 command_runner.go:130] > Access: (0660/srw-rw----)  Uid: (    0/    root)   Gid: (    0/    root)
	I0916 11:11:10.734433 1488539 command_runner.go:130] > Access: 2024-09-16 11:11:10.713238796 +0000
	I0916 11:11:10.734441 1488539 command_runner.go:130] > Modify: 2024-09-16 11:11:10.713238796 +0000
	I0916 11:11:10.734450 1488539 command_runner.go:130] > Change: 2024-09-16 11:11:10.713238796 +0000
	I0916 11:11:10.734453 1488539 command_runner.go:130] >  Birth: -
	I0916 11:11:10.734684 1488539 start.go:563] Will wait 60s for crictl version
	I0916 11:11:10.734745 1488539 ssh_runner.go:195] Run: which crictl
	I0916 11:11:10.738210 1488539 command_runner.go:130] > /usr/bin/crictl
	I0916 11:11:10.738596 1488539 ssh_runner.go:195] Run: sudo /usr/bin/crictl version
	I0916 11:11:10.780606 1488539 command_runner.go:130] > Version:  0.1.0
	I0916 11:11:10.781046 1488539 command_runner.go:130] > RuntimeName:  cri-o
	I0916 11:11:10.781215 1488539 command_runner.go:130] > RuntimeVersion:  1.24.6
	I0916 11:11:10.781373 1488539 command_runner.go:130] > RuntimeApiVersion:  v1
	I0916 11:11:10.784585 1488539 start.go:579] Version:  0.1.0
	RuntimeName:  cri-o
	RuntimeVersion:  1.24.6
	RuntimeApiVersion:  v1
	I0916 11:11:10.784719 1488539 ssh_runner.go:195] Run: crio --version
	I0916 11:11:10.823377 1488539 command_runner.go:130] > crio version 1.24.6
	I0916 11:11:10.823400 1488539 command_runner.go:130] > Version:          1.24.6
	I0916 11:11:10.823409 1488539 command_runner.go:130] > GitCommit:        4bfe15a9feb74ffc95e66a21c04b15fa7bbc2b90
	I0916 11:11:10.823414 1488539 command_runner.go:130] > GitTreeState:     clean
	I0916 11:11:10.823423 1488539 command_runner.go:130] > BuildDate:        2023-06-14T14:44:50Z
	I0916 11:11:10.823428 1488539 command_runner.go:130] > GoVersion:        go1.18.2
	I0916 11:11:10.823432 1488539 command_runner.go:130] > Compiler:         gc
	I0916 11:11:10.823438 1488539 command_runner.go:130] > Platform:         linux/arm64
	I0916 11:11:10.823442 1488539 command_runner.go:130] > Linkmode:         dynamic
	I0916 11:11:10.823451 1488539 command_runner.go:130] > BuildTags:        apparmor, exclude_graphdriver_devicemapper, containers_image_ostree_stub, seccomp
	I0916 11:11:10.823458 1488539 command_runner.go:130] > SeccompEnabled:   true
	I0916 11:11:10.823462 1488539 command_runner.go:130] > AppArmorEnabled:  false
	I0916 11:11:10.826040 1488539 ssh_runner.go:195] Run: crio --version
	I0916 11:11:10.878676 1488539 command_runner.go:130] > crio version 1.24.6
	I0916 11:11:10.878701 1488539 command_runner.go:130] > Version:          1.24.6
	I0916 11:11:10.878710 1488539 command_runner.go:130] > GitCommit:        4bfe15a9feb74ffc95e66a21c04b15fa7bbc2b90
	I0916 11:11:10.878715 1488539 command_runner.go:130] > GitTreeState:     clean
	I0916 11:11:10.878722 1488539 command_runner.go:130] > BuildDate:        2023-06-14T14:44:50Z
	I0916 11:11:10.878726 1488539 command_runner.go:130] > GoVersion:        go1.18.2
	I0916 11:11:10.878730 1488539 command_runner.go:130] > Compiler:         gc
	I0916 11:11:10.878734 1488539 command_runner.go:130] > Platform:         linux/arm64
	I0916 11:11:10.878740 1488539 command_runner.go:130] > Linkmode:         dynamic
	I0916 11:11:10.878748 1488539 command_runner.go:130] > BuildTags:        apparmor, exclude_graphdriver_devicemapper, containers_image_ostree_stub, seccomp
	I0916 11:11:10.878755 1488539 command_runner.go:130] > SeccompEnabled:   true
	I0916 11:11:10.878765 1488539 command_runner.go:130] > AppArmorEnabled:  false
	I0916 11:11:10.883253 1488539 out.go:177] * Preparing Kubernetes v1.31.1 on CRI-O 1.24.6 ...
	I0916 11:11:10.885850 1488539 out.go:177]   - env NO_PROXY=192.168.67.2
	I0916 11:11:10.888527 1488539 cli_runner.go:164] Run: docker network inspect multinode-654612 --format "{"Name": "{{.Name}}","Driver": "{{.Driver}}","Subnet": "{{range .IPAM.Config}}{{.Subnet}}{{end}}","Gateway": "{{range .IPAM.Config}}{{.Gateway}}{{end}}","MTU": {{if (index .Options "com.docker.network.driver.mtu")}}{{(index .Options "com.docker.network.driver.mtu")}}{{else}}0{{end}}, "ContainerIPs": [{{range $k,$v := .Containers }}"{{$v.IPv4Address}}",{{end}}]}"
	I0916 11:11:10.904887 1488539 ssh_runner.go:195] Run: grep 192.168.67.1	host.minikube.internal$ /etc/hosts
	I0916 11:11:10.908885 1488539 ssh_runner.go:195] Run: /bin/bash -c "{ grep -v $'\thost.minikube.internal$' "/etc/hosts"; echo "192.168.67.1	host.minikube.internal"; } > /tmp/h.$$; sudo cp /tmp/h.$$ "/etc/hosts""
	I0916 11:11:10.919789 1488539 mustload.go:65] Loading cluster: multinode-654612
	I0916 11:11:10.920029 1488539 config.go:182] Loaded profile config "multinode-654612": Driver=docker, ContainerRuntime=crio, KubernetesVersion=v1.31.1
	I0916 11:11:10.920300 1488539 cli_runner.go:164] Run: docker container inspect multinode-654612 --format={{.State.Status}}
	I0916 11:11:10.937108 1488539 host.go:66] Checking if "multinode-654612" exists ...
	I0916 11:11:10.937482 1488539 certs.go:68] Setting up /home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/multinode-654612 for IP: 192.168.67.3
	I0916 11:11:10.937500 1488539 certs.go:194] generating shared ca certs ...
	I0916 11:11:10.937522 1488539 certs.go:226] acquiring lock for ca certs: {Name:mk0ae46b50e2e49d53ad6fcc94535aa50d9156d6 Clock:{} Delay:500ms Timeout:1m0s Cancel:<nil>}
	I0916 11:11:10.937690 1488539 certs.go:235] skipping valid "minikubeCA" ca cert: /home/jenkins/minikube-integration/19651-1378450/.minikube/ca.key
	I0916 11:11:10.937751 1488539 certs.go:235] skipping valid "proxyClientCA" ca cert: /home/jenkins/minikube-integration/19651-1378450/.minikube/proxy-client-ca.key
	I0916 11:11:10.937767 1488539 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-1378450/.minikube/ca.crt -> /var/lib/minikube/certs/ca.crt
	I0916 11:11:10.937787 1488539 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-1378450/.minikube/ca.key -> /var/lib/minikube/certs/ca.key
	I0916 11:11:10.937805 1488539 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-1378450/.minikube/proxy-client-ca.crt -> /var/lib/minikube/certs/proxy-client-ca.crt
	I0916 11:11:10.937823 1488539 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-1378450/.minikube/proxy-client-ca.key -> /var/lib/minikube/certs/proxy-client-ca.key
	I0916 11:11:10.937911 1488539 certs.go:484] found cert: /home/jenkins/minikube-integration/19651-1378450/.minikube/certs/1383833.pem (1338 bytes)
	W0916 11:11:10.937958 1488539 certs.go:480] ignoring /home/jenkins/minikube-integration/19651-1378450/.minikube/certs/1383833_empty.pem, impossibly tiny 0 bytes
	I0916 11:11:10.938029 1488539 certs.go:484] found cert: /home/jenkins/minikube-integration/19651-1378450/.minikube/certs/ca-key.pem (1679 bytes)
	I0916 11:11:10.938128 1488539 certs.go:484] found cert: /home/jenkins/minikube-integration/19651-1378450/.minikube/certs/ca.pem (1078 bytes)
	I0916 11:11:10.938206 1488539 certs.go:484] found cert: /home/jenkins/minikube-integration/19651-1378450/.minikube/certs/cert.pem (1123 bytes)
	I0916 11:11:10.938246 1488539 certs.go:484] found cert: /home/jenkins/minikube-integration/19651-1378450/.minikube/certs/key.pem (1679 bytes)
	I0916 11:11:10.938320 1488539 certs.go:484] found cert: /home/jenkins/minikube-integration/19651-1378450/.minikube/files/etc/ssl/certs/13838332.pem (1708 bytes)
	I0916 11:11:10.938370 1488539 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-1378450/.minikube/files/etc/ssl/certs/13838332.pem -> /usr/share/ca-certificates/13838332.pem
	I0916 11:11:10.938397 1488539 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-1378450/.minikube/ca.crt -> /usr/share/ca-certificates/minikubeCA.pem
	I0916 11:11:10.938409 1488539 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-1378450/.minikube/certs/1383833.pem -> /usr/share/ca-certificates/1383833.pem
	I0916 11:11:10.938472 1488539 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-1378450/.minikube/ca.crt --> /var/lib/minikube/certs/ca.crt (1111 bytes)
	I0916 11:11:10.969739 1488539 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-1378450/.minikube/ca.key --> /var/lib/minikube/certs/ca.key (1675 bytes)
	I0916 11:11:10.998757 1488539 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-1378450/.minikube/proxy-client-ca.crt --> /var/lib/minikube/certs/proxy-client-ca.crt (1119 bytes)
	I0916 11:11:11.026459 1488539 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-1378450/.minikube/proxy-client-ca.key --> /var/lib/minikube/certs/proxy-client-ca.key (1679 bytes)
	I0916 11:11:11.051456 1488539 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-1378450/.minikube/files/etc/ssl/certs/13838332.pem --> /usr/share/ca-certificates/13838332.pem (1708 bytes)
	I0916 11:11:11.078334 1488539 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-1378450/.minikube/ca.crt --> /usr/share/ca-certificates/minikubeCA.pem (1111 bytes)
	I0916 11:11:11.104702 1488539 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-1378450/.minikube/certs/1383833.pem --> /usr/share/ca-certificates/1383833.pem (1338 bytes)
	I0916 11:11:11.132426 1488539 ssh_runner.go:195] Run: openssl version
	I0916 11:11:11.137827 1488539 command_runner.go:130] > OpenSSL 3.0.2 15 Mar 2022 (Library: OpenSSL 3.0.2 15 Mar 2022)
	I0916 11:11:11.138236 1488539 ssh_runner.go:195] Run: sudo /bin/bash -c "test -s /usr/share/ca-certificates/13838332.pem && ln -fs /usr/share/ca-certificates/13838332.pem /etc/ssl/certs/13838332.pem"
	I0916 11:11:11.148491 1488539 ssh_runner.go:195] Run: ls -la /usr/share/ca-certificates/13838332.pem
	I0916 11:11:11.152754 1488539 command_runner.go:130] > -rw-r--r-- 1 root root 1708 Sep 16 10:46 /usr/share/ca-certificates/13838332.pem
	I0916 11:11:11.152793 1488539 certs.go:528] hashing: -rw-r--r-- 1 root root 1708 Sep 16 10:46 /usr/share/ca-certificates/13838332.pem
	I0916 11:11:11.152850 1488539 ssh_runner.go:195] Run: openssl x509 -hash -noout -in /usr/share/ca-certificates/13838332.pem
	I0916 11:11:11.159733 1488539 command_runner.go:130] > 3ec20f2e
	I0916 11:11:11.160194 1488539 ssh_runner.go:195] Run: sudo /bin/bash -c "test -L /etc/ssl/certs/3ec20f2e.0 || ln -fs /etc/ssl/certs/13838332.pem /etc/ssl/certs/3ec20f2e.0"
	I0916 11:11:11.170159 1488539 ssh_runner.go:195] Run: sudo /bin/bash -c "test -s /usr/share/ca-certificates/minikubeCA.pem && ln -fs /usr/share/ca-certificates/minikubeCA.pem /etc/ssl/certs/minikubeCA.pem"
	I0916 11:11:11.179701 1488539 ssh_runner.go:195] Run: ls -la /usr/share/ca-certificates/minikubeCA.pem
	I0916 11:11:11.184008 1488539 command_runner.go:130] > -rw-r--r-- 1 root root 1111 Sep 16 10:35 /usr/share/ca-certificates/minikubeCA.pem
	I0916 11:11:11.184176 1488539 certs.go:528] hashing: -rw-r--r-- 1 root root 1111 Sep 16 10:35 /usr/share/ca-certificates/minikubeCA.pem
	I0916 11:11:11.184244 1488539 ssh_runner.go:195] Run: openssl x509 -hash -noout -in /usr/share/ca-certificates/minikubeCA.pem
	I0916 11:11:11.191019 1488539 command_runner.go:130] > b5213941
	I0916 11:11:11.191441 1488539 ssh_runner.go:195] Run: sudo /bin/bash -c "test -L /etc/ssl/certs/b5213941.0 || ln -fs /etc/ssl/certs/minikubeCA.pem /etc/ssl/certs/b5213941.0"
	I0916 11:11:11.201136 1488539 ssh_runner.go:195] Run: sudo /bin/bash -c "test -s /usr/share/ca-certificates/1383833.pem && ln -fs /usr/share/ca-certificates/1383833.pem /etc/ssl/certs/1383833.pem"
	I0916 11:11:11.210539 1488539 ssh_runner.go:195] Run: ls -la /usr/share/ca-certificates/1383833.pem
	I0916 11:11:11.213875 1488539 command_runner.go:130] > -rw-r--r-- 1 root root 1338 Sep 16 10:46 /usr/share/ca-certificates/1383833.pem
	I0916 11:11:11.214175 1488539 certs.go:528] hashing: -rw-r--r-- 1 root root 1338 Sep 16 10:46 /usr/share/ca-certificates/1383833.pem
	I0916 11:11:11.214241 1488539 ssh_runner.go:195] Run: openssl x509 -hash -noout -in /usr/share/ca-certificates/1383833.pem
	I0916 11:11:11.221459 1488539 command_runner.go:130] > 51391683
	I0916 11:11:11.221604 1488539 ssh_runner.go:195] Run: sudo /bin/bash -c "test -L /etc/ssl/certs/51391683.0 || ln -fs /etc/ssl/certs/1383833.pem /etc/ssl/certs/51391683.0"
	I0916 11:11:11.231519 1488539 ssh_runner.go:195] Run: stat /var/lib/minikube/certs/apiserver-kubelet-client.crt
	I0916 11:11:11.234972 1488539 command_runner.go:130] ! stat: cannot statx '/var/lib/minikube/certs/apiserver-kubelet-client.crt': No such file or directory
	I0916 11:11:11.235011 1488539 certs.go:399] 'apiserver-kubelet-client' cert doesn't exist, likely first start: stat /var/lib/minikube/certs/apiserver-kubelet-client.crt: Process exited with status 1
	stdout:
	
	stderr:
	stat: cannot statx '/var/lib/minikube/certs/apiserver-kubelet-client.crt': No such file or directory
	I0916 11:11:11.235041 1488539 kubeadm.go:934] updating node {m02 192.168.67.3 8443 v1.31.1 crio false true} ...
	I0916 11:11:11.235129 1488539 kubeadm.go:946] kubelet [Unit]
	Wants=crio.service
	
	[Service]
	ExecStart=
	ExecStart=/var/lib/minikube/binaries/v1.31.1/kubelet --bootstrap-kubeconfig=/etc/kubernetes/bootstrap-kubelet.conf --cgroups-per-qos=false --config=/var/lib/kubelet/config.yaml --enforce-node-allocatable= --hostname-override=multinode-654612-m02 --kubeconfig=/etc/kubernetes/kubelet.conf --node-ip=192.168.67.3
	
	[Install]
	 config:
	{KubernetesVersion:v1.31.1 ClusterName:multinode-654612 Namespace:default APIServerHAVIP: APIServerName:minikubeCA APIServerNames:[] APIServerIPs:[] DNSDomain:cluster.local ContainerRuntime:crio CRISocket: NetworkPlugin:cni FeatureGates: ServiceCIDR:10.96.0.0/12 ImageRepository: LoadBalancerStartIP: LoadBalancerEndIP: CustomIngressCert: RegistryAliases: ExtraOptions:[] ShouldLoadCachedImages:true EnableDefaultCNI:false CNI:}
	I0916 11:11:11.235201 1488539 ssh_runner.go:195] Run: sudo ls /var/lib/minikube/binaries/v1.31.1
	I0916 11:11:11.242965 1488539 command_runner.go:130] > kubeadm
	I0916 11:11:11.242990 1488539 command_runner.go:130] > kubectl
	I0916 11:11:11.242995 1488539 command_runner.go:130] > kubelet
	I0916 11:11:11.243993 1488539 binaries.go:44] Found k8s binaries, skipping transfer
	I0916 11:11:11.244085 1488539 ssh_runner.go:195] Run: sudo mkdir -p /etc/systemd/system/kubelet.service.d /lib/systemd/system
	I0916 11:11:11.253211 1488539 ssh_runner.go:362] scp memory --> /etc/systemd/system/kubelet.service.d/10-kubeadm.conf (370 bytes)
	I0916 11:11:11.272594 1488539 ssh_runner.go:362] scp memory --> /lib/systemd/system/kubelet.service (352 bytes)
	I0916 11:11:11.292079 1488539 ssh_runner.go:195] Run: grep 192.168.67.2	control-plane.minikube.internal$ /etc/hosts
	I0916 11:11:11.295744 1488539 ssh_runner.go:195] Run: /bin/bash -c "{ grep -v $'\tcontrol-plane.minikube.internal$' "/etc/hosts"; echo "192.168.67.2	control-plane.minikube.internal"; } > /tmp/h.$$; sudo cp /tmp/h.$$ "/etc/hosts""
	I0916 11:11:11.307441 1488539 ssh_runner.go:195] Run: sudo systemctl daemon-reload
	I0916 11:11:11.406960 1488539 ssh_runner.go:195] Run: sudo systemctl start kubelet
	I0916 11:11:11.422192 1488539 host.go:66] Checking if "multinode-654612" exists ...
	I0916 11:11:11.422471 1488539 start.go:317] joinCluster: &{Name:multinode-654612 KeepContext:false EmbedCerts:false MinikubeISO: KicBaseImage:gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 Memory:2200 CPUs:2 DiskSize:20000 Driver:docker HyperkitVpnKitSock: HyperkitVSockPorts:[] DockerEnv:[] ContainerVolumeMounts:[] InsecureRegistry:[] RegistryMirror:[] HostOnlyCIDR:192.168.59.1/24 HypervVirtualSwitch: HypervUseExternalSwitch:false HypervExternalAdapter: KVMNetwork:default KVMQemuURI:qemu:///system KVMGPU:false KVMHidden:false KVMNUMACount:1 APIServerPort:8443 DockerOpt:[] DisableDriverMounts:false NFSShare:[] NFSSharesRoot:/nfsshares UUID: NoVTXCheck:false DNSProxy:false HostDNSResolver:true HostOnlyNicType:virtio NatNicType:virtio SSHIPAddress: SSHUser:root SSHKey: SSHPort:22 KubernetesConfig:{KubernetesVersion:v1.31.1 ClusterName:multinode-654612 Namespace:default APIServerHAVIP: APIServerName:minikubeCA APIServerN
ames:[] APIServerIPs:[] DNSDomain:cluster.local ContainerRuntime:crio CRISocket: NetworkPlugin:cni FeatureGates: ServiceCIDR:10.96.0.0/12 ImageRepository: LoadBalancerStartIP: LoadBalancerEndIP: CustomIngressCert: RegistryAliases: ExtraOptions:[] ShouldLoadCachedImages:true EnableDefaultCNI:false CNI:} Nodes:[{Name: IP:192.168.67.2 Port:8443 KubernetesVersion:v1.31.1 ContainerRuntime:crio ControlPlane:true Worker:true} {Name:m02 IP:192.168.67.3 Port:8443 KubernetesVersion:v1.31.1 ContainerRuntime:crio ControlPlane:false Worker:true}] Addons:map[default-storageclass:true storage-provisioner:true] CustomAddonImages:map[] CustomAddonRegistries:map[] VerifyComponents:map[apiserver:true apps_running:true default_sa:true extra:true kubelet:true node_ready:true system_pods:true] StartHostTimeout:6m0s ScheduledStop:<nil> ExposedPorts:[] ListenAddress: Network: Subnet: MultiNodeRequested:true ExtraDisks:0 CertExpiration:26280h0m0s Mount:false MountString:/home/jenkins:/minikube-host Mount9PVersion:9p2000.L MountGID:do
cker MountIP: MountMSize:262144 MountOptions:[] MountPort:0 MountType:9p MountUID:docker BinaryMirror: DisableOptimizations:false DisableMetrics:false CustomQemuFirmwarePath: SocketVMnetClientPath: SocketVMnetPath: StaticIP: SSHAuthSock: SSHAgentPID:0 GPUs: AutoPauseInterval:1m0s}
	I0916 11:11:11.422568 1488539 ssh_runner.go:195] Run: /bin/bash -c "sudo env PATH="/var/lib/minikube/binaries/v1.31.1:$PATH" kubeadm token create --print-join-command --ttl=0"
	I0916 11:11:11.422625 1488539 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" multinode-654612
	I0916 11:11:11.447222 1488539 sshutil.go:53] new ssh client: &{IP:127.0.0.1 Port:34738 SSHKeyPath:/home/jenkins/minikube-integration/19651-1378450/.minikube/machines/multinode-654612/id_rsa Username:docker}
	I0916 11:11:11.613253 1488539 command_runner.go:130] > kubeadm join control-plane.minikube.internal:8443 --token bvq0jt.ttp95g4knqxro6b5 --discovery-token-ca-cert-hash sha256:a39d4a6e06a2efc97f5d9564a89b81063790e757dde370e866d9dc4c2ed0ec07 
	I0916 11:11:11.613295 1488539 start.go:343] trying to join worker node "m02" to cluster: &{Name:m02 IP:192.168.67.3 Port:8443 KubernetesVersion:v1.31.1 ContainerRuntime:crio ControlPlane:false Worker:true}
	I0916 11:11:11.613333 1488539 ssh_runner.go:195] Run: /bin/bash -c "sudo env PATH="/var/lib/minikube/binaries/v1.31.1:$PATH" kubeadm join control-plane.minikube.internal:8443 --token bvq0jt.ttp95g4knqxro6b5 --discovery-token-ca-cert-hash sha256:a39d4a6e06a2efc97f5d9564a89b81063790e757dde370e866d9dc4c2ed0ec07 --ignore-preflight-errors=all --cri-socket unix:///var/run/crio/crio.sock --node-name=multinode-654612-m02"
	I0916 11:11:11.656532 1488539 command_runner.go:130] > [preflight] Running pre-flight checks
	I0916 11:11:11.666665 1488539 command_runner.go:130] > [preflight] The system verification failed. Printing the output from the verification:
	I0916 11:11:11.666690 1488539 command_runner.go:130] > KERNEL_VERSION: 5.15.0-1069-aws
	I0916 11:11:11.666696 1488539 command_runner.go:130] > OS: Linux
	I0916 11:11:11.666708 1488539 command_runner.go:130] > CGROUPS_CPU: enabled
	I0916 11:11:11.666714 1488539 command_runner.go:130] > CGROUPS_CPUACCT: enabled
	I0916 11:11:11.666719 1488539 command_runner.go:130] > CGROUPS_CPUSET: enabled
	I0916 11:11:11.666724 1488539 command_runner.go:130] > CGROUPS_DEVICES: enabled
	I0916 11:11:11.666729 1488539 command_runner.go:130] > CGROUPS_FREEZER: enabled
	I0916 11:11:11.666734 1488539 command_runner.go:130] > CGROUPS_MEMORY: enabled
	I0916 11:11:11.666741 1488539 command_runner.go:130] > CGROUPS_PIDS: enabled
	I0916 11:11:11.666746 1488539 command_runner.go:130] > CGROUPS_HUGETLB: enabled
	I0916 11:11:11.666751 1488539 command_runner.go:130] > CGROUPS_BLKIO: enabled
	I0916 11:11:11.757112 1488539 command_runner.go:130] > [preflight] Reading configuration from the cluster...
	I0916 11:11:11.757182 1488539 command_runner.go:130] > [preflight] FYI: You can look at this config file with 'kubectl -n kube-system get cm kubeadm-config -o yaml'
	I0916 11:11:11.795717 1488539 command_runner.go:130] > [kubelet-start] Writing kubelet configuration to file "/var/lib/kubelet/config.yaml"
	I0916 11:11:11.795973 1488539 command_runner.go:130] > [kubelet-start] Writing kubelet environment file with flags to file "/var/lib/kubelet/kubeadm-flags.env"
	I0916 11:11:11.796169 1488539 command_runner.go:130] > [kubelet-start] Starting the kubelet
	I0916 11:11:11.901509 1488539 command_runner.go:130] > [kubelet-check] Waiting for a healthy kubelet at http://127.0.0.1:10248/healthz. This can take up to 4m0s
	I0916 11:11:13.407549 1488539 command_runner.go:130] > [kubelet-check] The kubelet is healthy after 1.504072304s
	I0916 11:11:13.407576 1488539 command_runner.go:130] > [kubelet-start] Waiting for the kubelet to perform the TLS Bootstrap
	I0916 11:11:13.921373 1488539 command_runner.go:130] > This node has joined the cluster:
	I0916 11:11:13.921412 1488539 command_runner.go:130] > * Certificate signing request was sent to apiserver and a response was received.
	I0916 11:11:13.921419 1488539 command_runner.go:130] > * The Kubelet was informed of the new secure connection details.
	I0916 11:11:13.921426 1488539 command_runner.go:130] > Run 'kubectl get nodes' on the control-plane to see this node join the cluster.
	I0916 11:11:13.925918 1488539 command_runner.go:130] ! 	[WARNING SystemVerification]: failed to parse kernel config: unable to load kernel module: "configs", output: "modprobe: FATAL: Module configs not found in directory /lib/modules/5.15.0-1069-aws\n", err: exit status 1
	I0916 11:11:13.925950 1488539 command_runner.go:130] ! 	[WARNING Service-Kubelet]: kubelet service is not enabled, please run 'systemctl enable kubelet.service'
	I0916 11:11:13.925972 1488539 ssh_runner.go:235] Completed: /bin/bash -c "sudo env PATH="/var/lib/minikube/binaries/v1.31.1:$PATH" kubeadm join control-plane.minikube.internal:8443 --token bvq0jt.ttp95g4knqxro6b5 --discovery-token-ca-cert-hash sha256:a39d4a6e06a2efc97f5d9564a89b81063790e757dde370e866d9dc4c2ed0ec07 --ignore-preflight-errors=all --cri-socket unix:///var/run/crio/crio.sock --node-name=multinode-654612-m02": (2.312624427s)
	I0916 11:11:13.925997 1488539 ssh_runner.go:195] Run: /bin/bash -c "sudo systemctl daemon-reload && sudo systemctl enable kubelet && sudo systemctl start kubelet"
	I0916 11:11:14.029869 1488539 command_runner.go:130] ! Created symlink /etc/systemd/system/multi-user.target.wants/kubelet.service → /lib/systemd/system/kubelet.service.
	I0916 11:11:14.120221 1488539 ssh_runner.go:195] Run: sudo /var/lib/minikube/binaries/v1.31.1/kubectl --kubeconfig=/var/lib/minikube/kubeconfig label --overwrite nodes multinode-654612-m02 minikube.k8s.io/updated_at=2024_09_16T11_11_14_0700 minikube.k8s.io/version=v1.34.0 minikube.k8s.io/commit=90d544f06ea0f69499271b003be64a9a224d57ed minikube.k8s.io/name=multinode-654612 minikube.k8s.io/primary=false
	I0916 11:11:14.248261 1488539 command_runner.go:130] > node/multinode-654612-m02 labeled
	I0916 11:11:14.252726 1488539 start.go:319] duration metric: took 2.830250727s to joinCluster
	I0916 11:11:14.252786 1488539 start.go:235] Will wait 6m0s for node &{Name:m02 IP:192.168.67.3 Port:8443 KubernetesVersion:v1.31.1 ContainerRuntime:crio ControlPlane:false Worker:true}
	I0916 11:11:14.253276 1488539 config.go:182] Loaded profile config "multinode-654612": Driver=docker, ContainerRuntime=crio, KubernetesVersion=v1.31.1
	I0916 11:11:14.255757 1488539 out.go:177] * Verifying Kubernetes components...
	I0916 11:11:14.258506 1488539 ssh_runner.go:195] Run: sudo systemctl daemon-reload
	I0916 11:11:14.357209 1488539 ssh_runner.go:195] Run: sudo systemctl start kubelet
	I0916 11:11:14.375013 1488539 loader.go:395] Config loaded from file:  /home/jenkins/minikube-integration/19651-1378450/kubeconfig
	I0916 11:11:14.375454 1488539 kapi.go:59] client config for multinode-654612: &rest.Config{Host:"https://192.168.67.2:8443", APIPath:"", ContentConfig:rest.ContentConfig{AcceptContentTypes:"", ContentType:"", GroupVersion:(*schema.GroupVersion)(nil), NegotiatedSerializer:runtime.NegotiatedSerializer(nil)}, Username:"", Password:"", BearerToken:"", BearerTokenFile:"", Impersonate:rest.ImpersonationConfig{UserName:"", UID:"", Groups:[]string(nil), Extra:map[string][]string(nil)}, AuthProvider:<nil>, AuthConfigPersister:rest.AuthProviderConfigPersister(nil), ExecProvider:<nil>, TLSClientConfig:rest.sanitizedTLSClientConfig{Insecure:false, ServerName:"", CertFile:"/home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/multinode-654612/client.crt", KeyFile:"/home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/multinode-654612/client.key", CAFile:"/home/jenkins/minikube-integration/19651-1378450/.minikube/ca.crt", CertData:[]uint8(nil), KeyData:[]uint8(nil), CAData:[]uint8(nil),
NextProtos:[]string(nil)}, UserAgent:"", DisableCompression:false, Transport:http.RoundTripper(nil), WrapTransport:(transport.WrapperFunc)(0x1a1e6c0), QPS:0, Burst:0, RateLimiter:flowcontrol.RateLimiter(nil), WarningHandler:rest.WarningHandler(nil), Timeout:0, Dial:(func(context.Context, string, string) (net.Conn, error))(nil), Proxy:(func(*http.Request) (*url.URL, error))(nil)}
	I0916 11:11:14.375804 1488539 node_ready.go:35] waiting up to 6m0s for node "multinode-654612-m02" to be "Ready" ...
	I0916 11:11:14.375920 1488539 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/nodes/multinode-654612-m02
	I0916 11:11:14.375944 1488539 round_trippers.go:469] Request Headers:
	I0916 11:11:14.375965 1488539 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:11:14.375999 1488539 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:11:14.378546 1488539 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:11:14.378570 1488539 round_trippers.go:577] Response Headers:
	I0916 11:11:14.378579 1488539 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:11:14.378585 1488539 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:11:14.378591 1488539 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:11:14 GMT
	I0916 11:11:14.378595 1488539 round_trippers.go:580]     Audit-Id: 6eef9699-3cb4-4b38-8f65-1c0c18ae22b0
	I0916 11:11:14.378598 1488539 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:11:14.378601 1488539 round_trippers.go:580]     Content-Type: application/json
	I0916 11:11:14.378759 1488539 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-654612-m02","uid":"aa1e2e1b-4957-47bd-bcf9-786ad66f873a","resourceVersion":"469","creationTimestamp":"2024-09-16T11:11:13Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-654612-m02","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-654612","minikube.k8s.io/primary":"false","minikube.k8s.io/updated_at":"2024_09_16T11_11_14_0700","minikube.k8s.io/version":"v1.34.0"},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///var/run/crio/crio.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kube-controller-manager","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:11:13Z","fieldsType":"FieldsV1","fieldsV1":{"
f:metadata":{"f:annotations":{"f:node.alpha.kubernetes.io/ttl":{}}},"f: [truncated 5619 chars]
	I0916 11:11:14.876974 1488539 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/nodes/multinode-654612-m02
	I0916 11:11:14.877001 1488539 round_trippers.go:469] Request Headers:
	I0916 11:11:14.877011 1488539 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:11:14.877016 1488539 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:11:14.879316 1488539 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:11:14.879338 1488539 round_trippers.go:577] Response Headers:
	I0916 11:11:14.879347 1488539 round_trippers.go:580]     Audit-Id: 1acc090a-2827-44db-bc3f-bc84c4655786
	I0916 11:11:14.879352 1488539 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:11:14.879355 1488539 round_trippers.go:580]     Content-Type: application/json
	I0916 11:11:14.879360 1488539 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:11:14.879364 1488539 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:11:14.879368 1488539 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:11:14 GMT
	I0916 11:11:14.879570 1488539 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-654612-m02","uid":"aa1e2e1b-4957-47bd-bcf9-786ad66f873a","resourceVersion":"471","creationTimestamp":"2024-09-16T11:11:13Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-654612-m02","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-654612","minikube.k8s.io/primary":"false","minikube.k8s.io/updated_at":"2024_09_16T11_11_14_0700","minikube.k8s.io/version":"v1.34.0"},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///var/run/crio/crio.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubeadm","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:11:13Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:
annotations":{"f:kubeadm.alpha.kubernetes.io/cri-socket":{}}}}},{"manag [truncated 5728 chars]
	I0916 11:11:15.376266 1488539 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/nodes/multinode-654612-m02
	I0916 11:11:15.376293 1488539 round_trippers.go:469] Request Headers:
	I0916 11:11:15.376303 1488539 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:11:15.376307 1488539 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:11:15.379471 1488539 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 11:11:15.379503 1488539 round_trippers.go:577] Response Headers:
	I0916 11:11:15.379511 1488539 round_trippers.go:580]     Audit-Id: 47eb4630-91dd-4529-85cd-9f3e3c025efe
	I0916 11:11:15.379516 1488539 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:11:15.379520 1488539 round_trippers.go:580]     Content-Type: application/json
	I0916 11:11:15.379523 1488539 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:11:15.379527 1488539 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:11:15.379530 1488539 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:11:15 GMT
	I0916 11:11:15.380873 1488539 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-654612-m02","uid":"aa1e2e1b-4957-47bd-bcf9-786ad66f873a","resourceVersion":"471","creationTimestamp":"2024-09-16T11:11:13Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-654612-m02","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-654612","minikube.k8s.io/primary":"false","minikube.k8s.io/updated_at":"2024_09_16T11_11_14_0700","minikube.k8s.io/version":"v1.34.0"},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///var/run/crio/crio.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubeadm","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:11:13Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:
annotations":{"f:kubeadm.alpha.kubernetes.io/cri-socket":{}}}}},{"manag [truncated 5728 chars]
	I0916 11:11:15.876914 1488539 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/nodes/multinode-654612-m02
	I0916 11:11:15.876940 1488539 round_trippers.go:469] Request Headers:
	I0916 11:11:15.876950 1488539 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:11:15.876956 1488539 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:11:15.879560 1488539 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:11:15.879599 1488539 round_trippers.go:577] Response Headers:
	I0916 11:11:15.879608 1488539 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:11:15.879612 1488539 round_trippers.go:580]     Content-Type: application/json
	I0916 11:11:15.879617 1488539 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:11:15.879636 1488539 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:11:15.879643 1488539 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:11:15 GMT
	I0916 11:11:15.879647 1488539 round_trippers.go:580]     Audit-Id: a0b5b511-eb0f-423b-a821-83b992a05a05
	I0916 11:11:15.879836 1488539 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-654612-m02","uid":"aa1e2e1b-4957-47bd-bcf9-786ad66f873a","resourceVersion":"471","creationTimestamp":"2024-09-16T11:11:13Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-654612-m02","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-654612","minikube.k8s.io/primary":"false","minikube.k8s.io/updated_at":"2024_09_16T11_11_14_0700","minikube.k8s.io/version":"v1.34.0"},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///var/run/crio/crio.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubeadm","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:11:13Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:
annotations":{"f:kubeadm.alpha.kubernetes.io/cri-socket":{}}}}},{"manag [truncated 5728 chars]
	I0916 11:11:16.376946 1488539 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/nodes/multinode-654612-m02
	I0916 11:11:16.376966 1488539 round_trippers.go:469] Request Headers:
	I0916 11:11:16.376976 1488539 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:11:16.376980 1488539 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:11:16.379126 1488539 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:11:16.379193 1488539 round_trippers.go:577] Response Headers:
	I0916 11:11:16.379212 1488539 round_trippers.go:580]     Audit-Id: 6993bcf0-193e-4be6-ab85-876bd53df1ef
	I0916 11:11:16.379231 1488539 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:11:16.379250 1488539 round_trippers.go:580]     Content-Type: application/json
	I0916 11:11:16.379284 1488539 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:11:16.379327 1488539 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:11:16.379344 1488539 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:11:16 GMT
	I0916 11:11:16.379468 1488539 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-654612-m02","uid":"aa1e2e1b-4957-47bd-bcf9-786ad66f873a","resourceVersion":"471","creationTimestamp":"2024-09-16T11:11:13Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-654612-m02","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-654612","minikube.k8s.io/primary":"false","minikube.k8s.io/updated_at":"2024_09_16T11_11_14_0700","minikube.k8s.io/version":"v1.34.0"},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///var/run/crio/crio.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubeadm","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:11:13Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:
annotations":{"f:kubeadm.alpha.kubernetes.io/cri-socket":{}}}}},{"manag [truncated 5728 chars]
	I0916 11:11:16.379859 1488539 node_ready.go:53] node "multinode-654612-m02" has status "Ready":"False"
	I0916 11:11:16.876017 1488539 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/nodes/multinode-654612-m02
	I0916 11:11:16.876043 1488539 round_trippers.go:469] Request Headers:
	I0916 11:11:16.876050 1488539 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:11:16.876055 1488539 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:11:16.878812 1488539 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:11:16.878935 1488539 round_trippers.go:577] Response Headers:
	I0916 11:11:16.878969 1488539 round_trippers.go:580]     Audit-Id: 51e825d9-82d9-4b6d-a74f-9d04bcd0df78
	I0916 11:11:16.878994 1488539 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:11:16.878999 1488539 round_trippers.go:580]     Content-Type: application/json
	I0916 11:11:16.879002 1488539 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:11:16.879005 1488539 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:11:16.879008 1488539 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:11:16 GMT
	I0916 11:11:16.879183 1488539 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-654612-m02","uid":"aa1e2e1b-4957-47bd-bcf9-786ad66f873a","resourceVersion":"471","creationTimestamp":"2024-09-16T11:11:13Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-654612-m02","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-654612","minikube.k8s.io/primary":"false","minikube.k8s.io/updated_at":"2024_09_16T11_11_14_0700","minikube.k8s.io/version":"v1.34.0"},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///var/run/crio/crio.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubeadm","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:11:13Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:
annotations":{"f:kubeadm.alpha.kubernetes.io/cri-socket":{}}}}},{"manag [truncated 5728 chars]
	I0916 11:11:17.376700 1488539 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/nodes/multinode-654612-m02
	I0916 11:11:17.376725 1488539 round_trippers.go:469] Request Headers:
	I0916 11:11:17.376735 1488539 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:11:17.376749 1488539 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:11:17.379291 1488539 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:11:17.379313 1488539 round_trippers.go:577] Response Headers:
	I0916 11:11:17.379321 1488539 round_trippers.go:580]     Audit-Id: c60528b5-2803-4fbf-b3df-0e00c8218b97
	I0916 11:11:17.379325 1488539 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:11:17.379329 1488539 round_trippers.go:580]     Content-Type: application/json
	I0916 11:11:17.379342 1488539 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:11:17.379347 1488539 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:11:17.379350 1488539 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:11:17 GMT
	I0916 11:11:17.379656 1488539 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-654612-m02","uid":"aa1e2e1b-4957-47bd-bcf9-786ad66f873a","resourceVersion":"471","creationTimestamp":"2024-09-16T11:11:13Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-654612-m02","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-654612","minikube.k8s.io/primary":"false","minikube.k8s.io/updated_at":"2024_09_16T11_11_14_0700","minikube.k8s.io/version":"v1.34.0"},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///var/run/crio/crio.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubeadm","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:11:13Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:
annotations":{"f:kubeadm.alpha.kubernetes.io/cri-socket":{}}}}},{"manag [truncated 5728 chars]
	I0916 11:11:17.876872 1488539 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/nodes/multinode-654612-m02
	I0916 11:11:17.876898 1488539 round_trippers.go:469] Request Headers:
	I0916 11:11:17.876909 1488539 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:11:17.876913 1488539 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:11:17.879285 1488539 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:11:17.879310 1488539 round_trippers.go:577] Response Headers:
	I0916 11:11:17.879319 1488539 round_trippers.go:580]     Audit-Id: 75b573d1-7f5d-4287-8f70-3ed725b0ca5c
	I0916 11:11:17.879324 1488539 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:11:17.879329 1488539 round_trippers.go:580]     Content-Type: application/json
	I0916 11:11:17.879332 1488539 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:11:17.879337 1488539 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:11:17.879340 1488539 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:11:17 GMT
	I0916 11:11:17.879724 1488539 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-654612-m02","uid":"aa1e2e1b-4957-47bd-bcf9-786ad66f873a","resourceVersion":"471","creationTimestamp":"2024-09-16T11:11:13Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-654612-m02","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-654612","minikube.k8s.io/primary":"false","minikube.k8s.io/updated_at":"2024_09_16T11_11_14_0700","minikube.k8s.io/version":"v1.34.0"},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///var/run/crio/crio.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubeadm","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:11:13Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:
annotations":{"f:kubeadm.alpha.kubernetes.io/cri-socket":{}}}}},{"manag [truncated 5728 chars]
	I0916 11:11:18.376113 1488539 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/nodes/multinode-654612-m02
	I0916 11:11:18.376140 1488539 round_trippers.go:469] Request Headers:
	I0916 11:11:18.376151 1488539 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:11:18.376155 1488539 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:11:18.378577 1488539 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:11:18.378605 1488539 round_trippers.go:577] Response Headers:
	I0916 11:11:18.378613 1488539 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:11:18.378617 1488539 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:11:18 GMT
	I0916 11:11:18.378621 1488539 round_trippers.go:580]     Audit-Id: 7590e0c6-37b6-41e5-960c-b55c4d154b9d
	I0916 11:11:18.378625 1488539 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:11:18.378628 1488539 round_trippers.go:580]     Content-Type: application/json
	I0916 11:11:18.378631 1488539 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:11:18.378934 1488539 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-654612-m02","uid":"aa1e2e1b-4957-47bd-bcf9-786ad66f873a","resourceVersion":"471","creationTimestamp":"2024-09-16T11:11:13Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-654612-m02","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-654612","minikube.k8s.io/primary":"false","minikube.k8s.io/updated_at":"2024_09_16T11_11_14_0700","minikube.k8s.io/version":"v1.34.0"},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///var/run/crio/crio.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubeadm","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:11:13Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:
annotations":{"f:kubeadm.alpha.kubernetes.io/cri-socket":{}}}}},{"manag [truncated 5728 chars]
	I0916 11:11:18.876037 1488539 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/nodes/multinode-654612-m02
	I0916 11:11:18.876059 1488539 round_trippers.go:469] Request Headers:
	I0916 11:11:18.876069 1488539 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:11:18.876074 1488539 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:11:18.878731 1488539 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:11:18.878753 1488539 round_trippers.go:577] Response Headers:
	I0916 11:11:18.878762 1488539 round_trippers.go:580]     Audit-Id: ff102ec7-0259-49ee-b50a-5a62cb9a308b
	I0916 11:11:18.878768 1488539 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:11:18.878773 1488539 round_trippers.go:580]     Content-Type: application/json
	I0916 11:11:18.878776 1488539 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:11:18.878779 1488539 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:11:18.878781 1488539 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:11:18 GMT
	I0916 11:11:18.878888 1488539 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-654612-m02","uid":"aa1e2e1b-4957-47bd-bcf9-786ad66f873a","resourceVersion":"471","creationTimestamp":"2024-09-16T11:11:13Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-654612-m02","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-654612","minikube.k8s.io/primary":"false","minikube.k8s.io/updated_at":"2024_09_16T11_11_14_0700","minikube.k8s.io/version":"v1.34.0"},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///var/run/crio/crio.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubeadm","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:11:13Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:
annotations":{"f:kubeadm.alpha.kubernetes.io/cri-socket":{}}}}},{"manag [truncated 5728 chars]
	I0916 11:11:18.879287 1488539 node_ready.go:53] node "multinode-654612-m02" has status "Ready":"False"
	I0916 11:11:19.376449 1488539 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/nodes/multinode-654612-m02
	I0916 11:11:19.376472 1488539 round_trippers.go:469] Request Headers:
	I0916 11:11:19.376482 1488539 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:11:19.376487 1488539 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:11:19.378887 1488539 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:11:19.378910 1488539 round_trippers.go:577] Response Headers:
	I0916 11:11:19.378918 1488539 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:11:19.378924 1488539 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:11:19.378927 1488539 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:11:19 GMT
	I0916 11:11:19.378930 1488539 round_trippers.go:580]     Audit-Id: 8e56b254-8400-489a-b51b-b4fd1909804c
	I0916 11:11:19.378933 1488539 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:11:19.378936 1488539 round_trippers.go:580]     Content-Type: application/json
	I0916 11:11:19.379037 1488539 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-654612-m02","uid":"aa1e2e1b-4957-47bd-bcf9-786ad66f873a","resourceVersion":"471","creationTimestamp":"2024-09-16T11:11:13Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-654612-m02","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-654612","minikube.k8s.io/primary":"false","minikube.k8s.io/updated_at":"2024_09_16T11_11_14_0700","minikube.k8s.io/version":"v1.34.0"},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///var/run/crio/crio.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubeadm","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:11:13Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:
annotations":{"f:kubeadm.alpha.kubernetes.io/cri-socket":{}}}}},{"manag [truncated 5728 chars]
	I0916 11:11:19.876264 1488539 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/nodes/multinode-654612-m02
	I0916 11:11:19.876296 1488539 round_trippers.go:469] Request Headers:
	I0916 11:11:19.876306 1488539 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:11:19.876312 1488539 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:11:19.878640 1488539 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:11:19.878669 1488539 round_trippers.go:577] Response Headers:
	I0916 11:11:19.878678 1488539 round_trippers.go:580]     Content-Type: application/json
	I0916 11:11:19.878684 1488539 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:11:19.878688 1488539 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:11:19.878694 1488539 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:11:19 GMT
	I0916 11:11:19.878697 1488539 round_trippers.go:580]     Audit-Id: e0bbb52e-690a-489d-b2e4-f7abb263b910
	I0916 11:11:19.878702 1488539 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:11:19.878833 1488539 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-654612-m02","uid":"aa1e2e1b-4957-47bd-bcf9-786ad66f873a","resourceVersion":"471","creationTimestamp":"2024-09-16T11:11:13Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-654612-m02","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-654612","minikube.k8s.io/primary":"false","minikube.k8s.io/updated_at":"2024_09_16T11_11_14_0700","minikube.k8s.io/version":"v1.34.0"},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///var/run/crio/crio.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubeadm","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:11:13Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:
annotations":{"f:kubeadm.alpha.kubernetes.io/cri-socket":{}}}}},{"manag [truncated 5728 chars]
	I0916 11:11:20.377009 1488539 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/nodes/multinode-654612-m02
	I0916 11:11:20.377037 1488539 round_trippers.go:469] Request Headers:
	I0916 11:11:20.377047 1488539 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:11:20.377052 1488539 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:11:20.379282 1488539 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:11:20.379306 1488539 round_trippers.go:577] Response Headers:
	I0916 11:11:20.379315 1488539 round_trippers.go:580]     Audit-Id: 968a2336-ea50-4a69-b188-92e287e1e61c
	I0916 11:11:20.379336 1488539 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:11:20.379349 1488539 round_trippers.go:580]     Content-Type: application/json
	I0916 11:11:20.379352 1488539 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:11:20.379356 1488539 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:11:20.379366 1488539 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:11:20 GMT
	I0916 11:11:20.379758 1488539 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-654612-m02","uid":"aa1e2e1b-4957-47bd-bcf9-786ad66f873a","resourceVersion":"471","creationTimestamp":"2024-09-16T11:11:13Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-654612-m02","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-654612","minikube.k8s.io/primary":"false","minikube.k8s.io/updated_at":"2024_09_16T11_11_14_0700","minikube.k8s.io/version":"v1.34.0"},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///var/run/crio/crio.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubeadm","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:11:13Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:
annotations":{"f:kubeadm.alpha.kubernetes.io/cri-socket":{}}}}},{"manag [truncated 5728 chars]
	I0916 11:11:20.876368 1488539 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/nodes/multinode-654612-m02
	I0916 11:11:20.876397 1488539 round_trippers.go:469] Request Headers:
	I0916 11:11:20.876406 1488539 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:11:20.876410 1488539 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:11:20.879272 1488539 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:11:20.879303 1488539 round_trippers.go:577] Response Headers:
	I0916 11:11:20.879325 1488539 round_trippers.go:580]     Audit-Id: d945c715-b17d-4d14-b534-eaf5a33c9c49
	I0916 11:11:20.879330 1488539 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:11:20.879333 1488539 round_trippers.go:580]     Content-Type: application/json
	I0916 11:11:20.879336 1488539 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:11:20.879339 1488539 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:11:20.879343 1488539 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:11:20 GMT
	I0916 11:11:20.879761 1488539 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-654612-m02","uid":"aa1e2e1b-4957-47bd-bcf9-786ad66f873a","resourceVersion":"471","creationTimestamp":"2024-09-16T11:11:13Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-654612-m02","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-654612","minikube.k8s.io/primary":"false","minikube.k8s.io/updated_at":"2024_09_16T11_11_14_0700","minikube.k8s.io/version":"v1.34.0"},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///var/run/crio/crio.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubeadm","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:11:13Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:
annotations":{"f:kubeadm.alpha.kubernetes.io/cri-socket":{}}}}},{"manag [truncated 5728 chars]
	I0916 11:11:20.880158 1488539 node_ready.go:53] node "multinode-654612-m02" has status "Ready":"False"
	I0916 11:11:21.376080 1488539 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/nodes/multinode-654612-m02
	I0916 11:11:21.376102 1488539 round_trippers.go:469] Request Headers:
	I0916 11:11:21.376113 1488539 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:11:21.376117 1488539 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:11:21.378443 1488539 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:11:21.378466 1488539 round_trippers.go:577] Response Headers:
	I0916 11:11:21.378475 1488539 round_trippers.go:580]     Audit-Id: 1cd3838c-afff-4933-8104-ea7949cebaae
	I0916 11:11:21.378479 1488539 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:11:21.378482 1488539 round_trippers.go:580]     Content-Type: application/json
	I0916 11:11:21.378487 1488539 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:11:21.378490 1488539 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:11:21.378492 1488539 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:11:21 GMT
	I0916 11:11:21.378887 1488539 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-654612-m02","uid":"aa1e2e1b-4957-47bd-bcf9-786ad66f873a","resourceVersion":"471","creationTimestamp":"2024-09-16T11:11:13Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-654612-m02","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-654612","minikube.k8s.io/primary":"false","minikube.k8s.io/updated_at":"2024_09_16T11_11_14_0700","minikube.k8s.io/version":"v1.34.0"},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///var/run/crio/crio.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubeadm","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:11:13Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:
annotations":{"f:kubeadm.alpha.kubernetes.io/cri-socket":{}}}}},{"manag [truncated 5728 chars]
	I0916 11:11:21.876570 1488539 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/nodes/multinode-654612-m02
	I0916 11:11:21.876596 1488539 round_trippers.go:469] Request Headers:
	I0916 11:11:21.876606 1488539 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:11:21.876611 1488539 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:11:21.878907 1488539 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:11:21.878933 1488539 round_trippers.go:577] Response Headers:
	I0916 11:11:21.878942 1488539 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:11:21.878946 1488539 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:11:21.878950 1488539 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:11:21 GMT
	I0916 11:11:21.878954 1488539 round_trippers.go:580]     Audit-Id: 4f4a643c-22bf-404d-84ae-3ac4c3c0c7c2
	I0916 11:11:21.878957 1488539 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:11:21.878960 1488539 round_trippers.go:580]     Content-Type: application/json
	I0916 11:11:21.879323 1488539 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-654612-m02","uid":"aa1e2e1b-4957-47bd-bcf9-786ad66f873a","resourceVersion":"471","creationTimestamp":"2024-09-16T11:11:13Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-654612-m02","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-654612","minikube.k8s.io/primary":"false","minikube.k8s.io/updated_at":"2024_09_16T11_11_14_0700","minikube.k8s.io/version":"v1.34.0"},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///var/run/crio/crio.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubeadm","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:11:13Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:
annotations":{"f:kubeadm.alpha.kubernetes.io/cri-socket":{}}}}},{"manag [truncated 5728 chars]
	I0916 11:11:22.376445 1488539 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/nodes/multinode-654612-m02
	I0916 11:11:22.376467 1488539 round_trippers.go:469] Request Headers:
	I0916 11:11:22.376476 1488539 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:11:22.376480 1488539 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:11:22.379357 1488539 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:11:22.379387 1488539 round_trippers.go:577] Response Headers:
	I0916 11:11:22.379396 1488539 round_trippers.go:580]     Content-Type: application/json
	I0916 11:11:22.379417 1488539 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:11:22.379421 1488539 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:11:22.379425 1488539 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:11:22 GMT
	I0916 11:11:22.379432 1488539 round_trippers.go:580]     Audit-Id: 0ba4f08e-ea6d-49c7-aebf-b0c80b21f7bc
	I0916 11:11:22.379435 1488539 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:11:22.379861 1488539 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-654612-m02","uid":"aa1e2e1b-4957-47bd-bcf9-786ad66f873a","resourceVersion":"471","creationTimestamp":"2024-09-16T11:11:13Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-654612-m02","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-654612","minikube.k8s.io/primary":"false","minikube.k8s.io/updated_at":"2024_09_16T11_11_14_0700","minikube.k8s.io/version":"v1.34.0"},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///var/run/crio/crio.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubeadm","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:11:13Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:
annotations":{"f:kubeadm.alpha.kubernetes.io/cri-socket":{}}}}},{"manag [truncated 5728 chars]
	I0916 11:11:22.876494 1488539 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/nodes/multinode-654612-m02
	I0916 11:11:22.876527 1488539 round_trippers.go:469] Request Headers:
	I0916 11:11:22.876537 1488539 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:11:22.876541 1488539 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:11:22.878793 1488539 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:11:22.878815 1488539 round_trippers.go:577] Response Headers:
	I0916 11:11:22.878823 1488539 round_trippers.go:580]     Audit-Id: 88cd8697-bb9b-4770-a6c9-600bfdf09e38
	I0916 11:11:22.878827 1488539 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:11:22.878831 1488539 round_trippers.go:580]     Content-Type: application/json
	I0916 11:11:22.878836 1488539 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:11:22.878840 1488539 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:11:22.878844 1488539 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:11:22 GMT
	I0916 11:11:22.878960 1488539 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-654612-m02","uid":"aa1e2e1b-4957-47bd-bcf9-786ad66f873a","resourceVersion":"471","creationTimestamp":"2024-09-16T11:11:13Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-654612-m02","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-654612","minikube.k8s.io/primary":"false","minikube.k8s.io/updated_at":"2024_09_16T11_11_14_0700","minikube.k8s.io/version":"v1.34.0"},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///var/run/crio/crio.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubeadm","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:11:13Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:
annotations":{"f:kubeadm.alpha.kubernetes.io/cri-socket":{}}}}},{"manag [truncated 5728 chars]
	I0916 11:11:23.376147 1488539 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/nodes/multinode-654612-m02
	I0916 11:11:23.376171 1488539 round_trippers.go:469] Request Headers:
	I0916 11:11:23.376181 1488539 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:11:23.376184 1488539 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:11:23.379138 1488539 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:11:23.379162 1488539 round_trippers.go:577] Response Headers:
	I0916 11:11:23.379171 1488539 round_trippers.go:580]     Audit-Id: 988f79f3-6497-4095-b28d-beebe5879581
	I0916 11:11:23.379175 1488539 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:11:23.379178 1488539 round_trippers.go:580]     Content-Type: application/json
	I0916 11:11:23.379181 1488539 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:11:23.379184 1488539 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:11:23.379187 1488539 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:11:23 GMT
	I0916 11:11:23.379810 1488539 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-654612-m02","uid":"aa1e2e1b-4957-47bd-bcf9-786ad66f873a","resourceVersion":"471","creationTimestamp":"2024-09-16T11:11:13Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-654612-m02","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-654612","minikube.k8s.io/primary":"false","minikube.k8s.io/updated_at":"2024_09_16T11_11_14_0700","minikube.k8s.io/version":"v1.34.0"},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///var/run/crio/crio.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubeadm","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:11:13Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:
annotations":{"f:kubeadm.alpha.kubernetes.io/cri-socket":{}}}}},{"manag [truncated 5728 chars]
	I0916 11:11:23.380261 1488539 node_ready.go:53] node "multinode-654612-m02" has status "Ready":"False"
	I0916 11:11:23.876514 1488539 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/nodes/multinode-654612-m02
	I0916 11:11:23.876537 1488539 round_trippers.go:469] Request Headers:
	I0916 11:11:23.876546 1488539 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:11:23.876553 1488539 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:11:23.883023 1488539 round_trippers.go:574] Response Status: 200 OK in 6 milliseconds
	I0916 11:11:23.883054 1488539 round_trippers.go:577] Response Headers:
	I0916 11:11:23.883064 1488539 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:11:23 GMT
	I0916 11:11:23.883070 1488539 round_trippers.go:580]     Audit-Id: ea9ea0a9-be12-4e01-908a-505e4776f737
	I0916 11:11:23.883073 1488539 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:11:23.883079 1488539 round_trippers.go:580]     Content-Type: application/json
	I0916 11:11:23.883083 1488539 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:11:23.883086 1488539 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:11:23.883270 1488539 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-654612-m02","uid":"aa1e2e1b-4957-47bd-bcf9-786ad66f873a","resourceVersion":"492","creationTimestamp":"2024-09-16T11:11:13Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-654612-m02","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-654612","minikube.k8s.io/primary":"false","minikube.k8s.io/updated_at":"2024_09_16T11_11_14_0700","minikube.k8s.io/version":"v1.34.0"},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///var/run/crio/crio.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubeadm","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:11:13Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:
annotations":{"f:kubeadm.alpha.kubernetes.io/cri-socket":{}}}}},{"manag [truncated 6120 chars]
	I0916 11:11:24.376469 1488539 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/nodes/multinode-654612-m02
	I0916 11:11:24.376493 1488539 round_trippers.go:469] Request Headers:
	I0916 11:11:24.376503 1488539 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:11:24.376507 1488539 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:11:24.378869 1488539 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:11:24.378897 1488539 round_trippers.go:577] Response Headers:
	I0916 11:11:24.378906 1488539 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:11:24.378912 1488539 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:11:24 GMT
	I0916 11:11:24.378916 1488539 round_trippers.go:580]     Audit-Id: 67e4f910-e530-48fc-977d-48b1d98ed762
	I0916 11:11:24.378918 1488539 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:11:24.378924 1488539 round_trippers.go:580]     Content-Type: application/json
	I0916 11:11:24.378927 1488539 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:11:24.379156 1488539 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-654612-m02","uid":"aa1e2e1b-4957-47bd-bcf9-786ad66f873a","resourceVersion":"492","creationTimestamp":"2024-09-16T11:11:13Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-654612-m02","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-654612","minikube.k8s.io/primary":"false","minikube.k8s.io/updated_at":"2024_09_16T11_11_14_0700","minikube.k8s.io/version":"v1.34.0"},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///var/run/crio/crio.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubeadm","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:11:13Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:
annotations":{"f:kubeadm.alpha.kubernetes.io/cri-socket":{}}}}},{"manag [truncated 6120 chars]
	I0916 11:11:24.876824 1488539 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/nodes/multinode-654612-m02
	I0916 11:11:24.876849 1488539 round_trippers.go:469] Request Headers:
	I0916 11:11:24.876859 1488539 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:11:24.876864 1488539 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:11:24.879124 1488539 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:11:24.879145 1488539 round_trippers.go:577] Response Headers:
	I0916 11:11:24.879153 1488539 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:11:24.879158 1488539 round_trippers.go:580]     Content-Type: application/json
	I0916 11:11:24.879162 1488539 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:11:24.879164 1488539 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:11:24.879167 1488539 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:11:24 GMT
	I0916 11:11:24.879170 1488539 round_trippers.go:580]     Audit-Id: 5fdb15d8-7711-4ddc-912b-625186708776
	I0916 11:11:24.879336 1488539 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-654612-m02","uid":"aa1e2e1b-4957-47bd-bcf9-786ad66f873a","resourceVersion":"492","creationTimestamp":"2024-09-16T11:11:13Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-654612-m02","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-654612","minikube.k8s.io/primary":"false","minikube.k8s.io/updated_at":"2024_09_16T11_11_14_0700","minikube.k8s.io/version":"v1.34.0"},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///var/run/crio/crio.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubeadm","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:11:13Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:
annotations":{"f:kubeadm.alpha.kubernetes.io/cri-socket":{}}}}},{"manag [truncated 6120 chars]
	I0916 11:11:25.376217 1488539 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/nodes/multinode-654612-m02
	I0916 11:11:25.376243 1488539 round_trippers.go:469] Request Headers:
	I0916 11:11:25.376253 1488539 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:11:25.376259 1488539 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:11:25.380469 1488539 round_trippers.go:574] Response Status: 200 OK in 4 milliseconds
	I0916 11:11:25.380494 1488539 round_trippers.go:577] Response Headers:
	I0916 11:11:25.380502 1488539 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:11:25.380507 1488539 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:11:25 GMT
	I0916 11:11:25.380511 1488539 round_trippers.go:580]     Audit-Id: 269ea3da-803e-4aa0-a0a1-a4aa2801e1a8
	I0916 11:11:25.380513 1488539 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:11:25.380516 1488539 round_trippers.go:580]     Content-Type: application/json
	I0916 11:11:25.380520 1488539 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:11:25.380860 1488539 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-654612-m02","uid":"aa1e2e1b-4957-47bd-bcf9-786ad66f873a","resourceVersion":"492","creationTimestamp":"2024-09-16T11:11:13Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-654612-m02","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-654612","minikube.k8s.io/primary":"false","minikube.k8s.io/updated_at":"2024_09_16T11_11_14_0700","minikube.k8s.io/version":"v1.34.0"},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///var/run/crio/crio.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubeadm","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:11:13Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:
annotations":{"f:kubeadm.alpha.kubernetes.io/cri-socket":{}}}}},{"manag [truncated 6120 chars]
	I0916 11:11:25.381367 1488539 node_ready.go:53] node "multinode-654612-m02" has status "Ready":"False"
	I0916 11:11:25.876751 1488539 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/nodes/multinode-654612-m02
	I0916 11:11:25.876773 1488539 round_trippers.go:469] Request Headers:
	I0916 11:11:25.876783 1488539 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:11:25.876788 1488539 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:11:25.879208 1488539 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:11:25.879233 1488539 round_trippers.go:577] Response Headers:
	I0916 11:11:25.879242 1488539 round_trippers.go:580]     Audit-Id: 1d62600f-fcd0-4736-a2a0-8b49ba7d004a
	I0916 11:11:25.879247 1488539 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:11:25.879252 1488539 round_trippers.go:580]     Content-Type: application/json
	I0916 11:11:25.879255 1488539 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:11:25.879259 1488539 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:11:25.879264 1488539 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:11:25 GMT
	I0916 11:11:25.879606 1488539 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-654612-m02","uid":"aa1e2e1b-4957-47bd-bcf9-786ad66f873a","resourceVersion":"496","creationTimestamp":"2024-09-16T11:11:13Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-654612-m02","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-654612","minikube.k8s.io/primary":"false","minikube.k8s.io/updated_at":"2024_09_16T11_11_14_0700","minikube.k8s.io/version":"v1.34.0"},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///var/run/crio/crio.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubeadm","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:11:13Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:
annotations":{"f:kubeadm.alpha.kubernetes.io/cri-socket":{}}}}},{"manag [truncated 5937 chars]
	I0916 11:11:25.880035 1488539 node_ready.go:49] node "multinode-654612-m02" has status "Ready":"True"
	I0916 11:11:25.880057 1488539 node_ready.go:38] duration metric: took 11.504217601s for node "multinode-654612-m02" to be "Ready" ...
	I0916 11:11:25.880068 1488539 pod_ready.go:36] extra waiting up to 6m0s for all system-critical pods including labels [k8s-app=kube-dns component=etcd component=kube-apiserver component=kube-controller-manager k8s-app=kube-proxy component=kube-scheduler] to be "Ready" ...
	I0916 11:11:25.880138 1488539 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/namespaces/kube-system/pods
	I0916 11:11:25.880148 1488539 round_trippers.go:469] Request Headers:
	I0916 11:11:25.880157 1488539 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:11:25.880163 1488539 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:11:25.883900 1488539 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 11:11:25.883925 1488539 round_trippers.go:577] Response Headers:
	I0916 11:11:25.883940 1488539 round_trippers.go:580]     Content-Type: application/json
	I0916 11:11:25.883945 1488539 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:11:25.883948 1488539 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:11:25.883951 1488539 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:11:25 GMT
	I0916 11:11:25.883954 1488539 round_trippers.go:580]     Audit-Id: 8e4d315b-e8a1-4079-8661-d4e87d78f00f
	I0916 11:11:25.883957 1488539 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:11:25.884651 1488539 request.go:1351] Response Body: {"kind":"PodList","apiVersion":"v1","metadata":{"resourceVersion":"496"},"items":[{"metadata":{"name":"coredns-7c65d6cfc9-szvv9","generateName":"coredns-7c65d6cfc9-","namespace":"kube-system","uid":"26df8cd4-36bc-49e1-98bf-9c30f5555b7b","resourceVersion":"424","creationTimestamp":"2024-09-16T11:10:15Z","labels":{"k8s-app":"kube-dns","pod-template-hash":"7c65d6cfc9"},"ownerReferences":[{"apiVersion":"apps/v1","kind":"ReplicaSet","name":"coredns-7c65d6cfc9","uid":"a88acc4a-b14b-4341-a616-6a6077ab8958","controller":true,"blockOwnerDeletion":true}],"managedFields":[{"manager":"kube-controller-manager","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:10:15Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:generateName":{},"f:labels":{".":{},"f:k8s-app":{},"f:pod-template-hash":{}},"f:ownerReferences":{".":{},"k:{\"uid\":\"a88acc4a-b14b-4341-a616-6a6077ab8958\"}":{}}},"f:spec":{"f:affinity":{".":{},"f:podAntiAffinity":{".":{},"f
:preferredDuringSchedulingIgnoredDuringExecution":{}}},"f:containers":{ [truncated 74117 chars]
	I0916 11:11:25.888991 1488539 pod_ready.go:79] waiting up to 6m0s for pod "coredns-7c65d6cfc9-szvv9" in "kube-system" namespace to be "Ready" ...
	I0916 11:11:25.889146 1488539 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/namespaces/kube-system/pods/coredns-7c65d6cfc9-szvv9
	I0916 11:11:25.889159 1488539 round_trippers.go:469] Request Headers:
	I0916 11:11:25.889170 1488539 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:11:25.889175 1488539 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:11:25.891622 1488539 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:11:25.891690 1488539 round_trippers.go:577] Response Headers:
	I0916 11:11:25.891707 1488539 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:11:25.891720 1488539 round_trippers.go:580]     Content-Type: application/json
	I0916 11:11:25.891723 1488539 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:11:25.891727 1488539 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:11:25.891729 1488539 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:11:25 GMT
	I0916 11:11:25.891732 1488539 round_trippers.go:580]     Audit-Id: d406b4bb-4fc3-41de-a84d-7a350b8ef72b
	I0916 11:11:25.892041 1488539 request.go:1351] Response Body: {"kind":"Pod","apiVersion":"v1","metadata":{"name":"coredns-7c65d6cfc9-szvv9","generateName":"coredns-7c65d6cfc9-","namespace":"kube-system","uid":"26df8cd4-36bc-49e1-98bf-9c30f5555b7b","resourceVersion":"424","creationTimestamp":"2024-09-16T11:10:15Z","labels":{"k8s-app":"kube-dns","pod-template-hash":"7c65d6cfc9"},"ownerReferences":[{"apiVersion":"apps/v1","kind":"ReplicaSet","name":"coredns-7c65d6cfc9","uid":"a88acc4a-b14b-4341-a616-6a6077ab8958","controller":true,"blockOwnerDeletion":true}],"managedFields":[{"manager":"kube-controller-manager","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:10:15Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:generateName":{},"f:labels":{".":{},"f:k8s-app":{},"f:pod-template-hash":{}},"f:ownerReferences":{".":{},"k:{\"uid\":\"a88acc4a-b14b-4341-a616-6a6077ab8958\"}":{}}},"f:spec":{"f:affinity":{".":{},"f:podAntiAffinity":{".":{},"f:preferredDuringSchedulingIgnoredDuringExecution":{
}}},"f:containers":{"k:{\"name\":\"coredns\"}":{".":{},"f:args":{},"f:i [truncated 6813 chars]
	I0916 11:11:25.892666 1488539 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/nodes/multinode-654612
	I0916 11:11:25.892721 1488539 round_trippers.go:469] Request Headers:
	I0916 11:11:25.892731 1488539 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:11:25.892746 1488539 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:11:25.894970 1488539 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:11:25.895003 1488539 round_trippers.go:577] Response Headers:
	I0916 11:11:25.895014 1488539 round_trippers.go:580]     Audit-Id: 340403b6-a361-4554-9399-58215f49305f
	I0916 11:11:25.895024 1488539 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:11:25.895030 1488539 round_trippers.go:580]     Content-Type: application/json
	I0916 11:11:25.895033 1488539 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:11:25.895036 1488539 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:11:25.895045 1488539 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:11:25 GMT
	I0916 11:11:25.895201 1488539 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-654612","uid":"17ee1588-6ece-4a45-8e72-a05ec6d1f806","resourceVersion":"401","creationTimestamp":"2024-09-16T11:10:07Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-654612","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-654612","minikube.k8s.io/primary":"true","minikube.k8s.io/updated_at":"2024_09_16T11_10_11_0700","minikube.k8s.io/version":"v1.34.0","node-role.kubernetes.io/control-plane":"","node.kubernetes.io/exclude-from-external-load-balancers":""},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///var/run/crio/crio.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubelet","operation":"Update","apiVe
rsion":"v1","time":"2024-09-16T11:10:07Z","fieldsType":"FieldsV1","fiel [truncated 6029 chars]
	I0916 11:11:25.895706 1488539 pod_ready.go:93] pod "coredns-7c65d6cfc9-szvv9" in "kube-system" namespace has status "Ready":"True"
	I0916 11:11:25.895729 1488539 pod_ready.go:82] duration metric: took 6.705485ms for pod "coredns-7c65d6cfc9-szvv9" in "kube-system" namespace to be "Ready" ...
	I0916 11:11:25.895746 1488539 pod_ready.go:79] waiting up to 6m0s for pod "etcd-multinode-654612" in "kube-system" namespace to be "Ready" ...
	I0916 11:11:25.895841 1488539 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/namespaces/kube-system/pods/etcd-multinode-654612
	I0916 11:11:25.895856 1488539 round_trippers.go:469] Request Headers:
	I0916 11:11:25.895867 1488539 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:11:25.895873 1488539 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:11:25.898278 1488539 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:11:25.898306 1488539 round_trippers.go:577] Response Headers:
	I0916 11:11:25.898315 1488539 round_trippers.go:580]     Audit-Id: a100b7ef-e39c-4b9c-a042-fd226c208c0a
	I0916 11:11:25.898330 1488539 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:11:25.898334 1488539 round_trippers.go:580]     Content-Type: application/json
	I0916 11:11:25.898337 1488539 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:11:25.898340 1488539 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:11:25.898343 1488539 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:11:25 GMT
	I0916 11:11:25.898480 1488539 request.go:1351] Response Body: {"kind":"Pod","apiVersion":"v1","metadata":{"name":"etcd-multinode-654612","namespace":"kube-system","uid":"bb46feea-e4d5-411b-9ebc-e5984b1147a8","resourceVersion":"388","creationTimestamp":"2024-09-16T11:10:10Z","labels":{"component":"etcd","tier":"control-plane"},"annotations":{"kubeadm.kubernetes.io/etcd.advertise-client-urls":"https://192.168.67.2:2379","kubernetes.io/config.hash":"d0a18dbc2f101ac77b9a3f54b47797a2","kubernetes.io/config.mirror":"d0a18dbc2f101ac77b9a3f54b47797a2","kubernetes.io/config.seen":"2024-09-16T11:10:10.145147523Z","kubernetes.io/config.source":"file"},"ownerReferences":[{"apiVersion":"v1","kind":"Node","name":"multinode-654612","uid":"17ee1588-6ece-4a45-8e72-a05ec6d1f806","controller":true}],"managedFields":[{"manager":"kubelet","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:10:10Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:annotations":{".":{},"f:kubeadm.kubernetes.io/etcd.advertise-cl
ient-urls":{},"f:kubernetes.io/config.hash":{},"f:kubernetes.io/config. [truncated 6435 chars]
	I0916 11:11:25.898988 1488539 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/nodes/multinode-654612
	I0916 11:11:25.899002 1488539 round_trippers.go:469] Request Headers:
	I0916 11:11:25.899011 1488539 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:11:25.899017 1488539 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:11:25.901177 1488539 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:11:25.901196 1488539 round_trippers.go:577] Response Headers:
	I0916 11:11:25.901205 1488539 round_trippers.go:580]     Content-Type: application/json
	I0916 11:11:25.901210 1488539 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:11:25.901215 1488539 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:11:25.901217 1488539 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:11:25 GMT
	I0916 11:11:25.901220 1488539 round_trippers.go:580]     Audit-Id: 2716640e-8a67-4ae7-8934-d85f52790903
	I0916 11:11:25.901223 1488539 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:11:25.901380 1488539 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-654612","uid":"17ee1588-6ece-4a45-8e72-a05ec6d1f806","resourceVersion":"401","creationTimestamp":"2024-09-16T11:10:07Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-654612","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-654612","minikube.k8s.io/primary":"true","minikube.k8s.io/updated_at":"2024_09_16T11_10_11_0700","minikube.k8s.io/version":"v1.34.0","node-role.kubernetes.io/control-plane":"","node.kubernetes.io/exclude-from-external-load-balancers":""},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///var/run/crio/crio.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubelet","operation":"Update","apiVe
rsion":"v1","time":"2024-09-16T11:10:07Z","fieldsType":"FieldsV1","fiel [truncated 6029 chars]
	I0916 11:11:25.901788 1488539 pod_ready.go:93] pod "etcd-multinode-654612" in "kube-system" namespace has status "Ready":"True"
	I0916 11:11:25.901808 1488539 pod_ready.go:82] duration metric: took 6.05111ms for pod "etcd-multinode-654612" in "kube-system" namespace to be "Ready" ...
	I0916 11:11:25.901826 1488539 pod_ready.go:79] waiting up to 6m0s for pod "kube-apiserver-multinode-654612" in "kube-system" namespace to be "Ready" ...
	I0916 11:11:25.901897 1488539 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/namespaces/kube-system/pods/kube-apiserver-multinode-654612
	I0916 11:11:25.901907 1488539 round_trippers.go:469] Request Headers:
	I0916 11:11:25.901914 1488539 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:11:25.901919 1488539 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:11:25.903933 1488539 round_trippers.go:574] Response Status: 200 OK in 1 milliseconds
	I0916 11:11:25.903960 1488539 round_trippers.go:577] Response Headers:
	I0916 11:11:25.903968 1488539 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:11:25 GMT
	I0916 11:11:25.903972 1488539 round_trippers.go:580]     Audit-Id: 74b45ee3-ee8b-4ff6-b6ea-7262fd3720e4
	I0916 11:11:25.903975 1488539 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:11:25.903978 1488539 round_trippers.go:580]     Content-Type: application/json
	I0916 11:11:25.903987 1488539 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:11:25.903994 1488539 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:11:25.904143 1488539 request.go:1351] Response Body: {"kind":"Pod","apiVersion":"v1","metadata":{"name":"kube-apiserver-multinode-654612","namespace":"kube-system","uid":"8a56377d-b2a9-46dc-90b0-6d8f0aadec52","resourceVersion":"386","creationTimestamp":"2024-09-16T11:10:10Z","labels":{"component":"kube-apiserver","tier":"control-plane"},"annotations":{"kubeadm.kubernetes.io/kube-apiserver.advertise-address.endpoint":"192.168.67.2:8443","kubernetes.io/config.hash":"f3fdb95ee92c3c630b459a996a1fc6f9","kubernetes.io/config.mirror":"f3fdb95ee92c3c630b459a996a1fc6f9","kubernetes.io/config.seen":"2024-09-16T11:10:10.145153931Z","kubernetes.io/config.source":"file"},"ownerReferences":[{"apiVersion":"v1","kind":"Node","name":"multinode-654612","uid":"17ee1588-6ece-4a45-8e72-a05ec6d1f806","controller":true}],"managedFields":[{"manager":"kubelet","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:10:10Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:annotations":{".":{},"f:kubeadm.kube
rnetes.io/kube-apiserver.advertise-address.endpoint":{},"f:kubernetes.i [truncated 8513 chars]
	I0916 11:11:25.904818 1488539 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/nodes/multinode-654612
	I0916 11:11:25.904837 1488539 round_trippers.go:469] Request Headers:
	I0916 11:11:25.904846 1488539 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:11:25.904856 1488539 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:11:25.906893 1488539 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:11:25.906915 1488539 round_trippers.go:577] Response Headers:
	I0916 11:11:25.906924 1488539 round_trippers.go:580]     Audit-Id: 4e7e6cfa-d066-4c67-a3a4-d3c22480452a
	I0916 11:11:25.906928 1488539 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:11:25.906931 1488539 round_trippers.go:580]     Content-Type: application/json
	I0916 11:11:25.906949 1488539 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:11:25.906956 1488539 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:11:25.906959 1488539 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:11:25 GMT
	I0916 11:11:25.907204 1488539 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-654612","uid":"17ee1588-6ece-4a45-8e72-a05ec6d1f806","resourceVersion":"401","creationTimestamp":"2024-09-16T11:10:07Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-654612","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-654612","minikube.k8s.io/primary":"true","minikube.k8s.io/updated_at":"2024_09_16T11_10_11_0700","minikube.k8s.io/version":"v1.34.0","node-role.kubernetes.io/control-plane":"","node.kubernetes.io/exclude-from-external-load-balancers":""},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///var/run/crio/crio.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubelet","operation":"Update","apiVe
rsion":"v1","time":"2024-09-16T11:10:07Z","fieldsType":"FieldsV1","fiel [truncated 6029 chars]
	I0916 11:11:25.907590 1488539 pod_ready.go:93] pod "kube-apiserver-multinode-654612" in "kube-system" namespace has status "Ready":"True"
	I0916 11:11:25.907603 1488539 pod_ready.go:82] duration metric: took 5.767014ms for pod "kube-apiserver-multinode-654612" in "kube-system" namespace to be "Ready" ...
	I0916 11:11:25.907613 1488539 pod_ready.go:79] waiting up to 6m0s for pod "kube-controller-manager-multinode-654612" in "kube-system" namespace to be "Ready" ...
	I0916 11:11:25.907689 1488539 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/namespaces/kube-system/pods/kube-controller-manager-multinode-654612
	I0916 11:11:25.907695 1488539 round_trippers.go:469] Request Headers:
	I0916 11:11:25.907703 1488539 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:11:25.907707 1488539 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:11:25.909692 1488539 round_trippers.go:574] Response Status: 200 OK in 1 milliseconds
	I0916 11:11:25.909710 1488539 round_trippers.go:577] Response Headers:
	I0916 11:11:25.909718 1488539 round_trippers.go:580]     Audit-Id: 84acaced-5ae1-460a-b56d-1e4c3d7f58bf
	I0916 11:11:25.909724 1488539 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:11:25.909728 1488539 round_trippers.go:580]     Content-Type: application/json
	I0916 11:11:25.909732 1488539 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:11:25.909735 1488539 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:11:25.909738 1488539 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:11:25 GMT
	I0916 11:11:25.909880 1488539 request.go:1351] Response Body: {"kind":"Pod","apiVersion":"v1","metadata":{"name":"kube-controller-manager-multinode-654612","namespace":"kube-system","uid":"08e87c01-201e-4373-bbd7-0a8a7a724a84","resourceVersion":"372","creationTimestamp":"2024-09-16T11:10:10Z","labels":{"component":"kube-controller-manager","tier":"control-plane"},"annotations":{"kubernetes.io/config.hash":"c7028fbfd05aaf11bd1f32f252589714","kubernetes.io/config.mirror":"c7028fbfd05aaf11bd1f32f252589714","kubernetes.io/config.seen":"2024-09-16T11:10:10.145155408Z","kubernetes.io/config.source":"file"},"ownerReferences":[{"apiVersion":"v1","kind":"Node","name":"multinode-654612","uid":"17ee1588-6ece-4a45-8e72-a05ec6d1f806","controller":true}],"managedFields":[{"manager":"kubelet","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:10:10Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:annotations":{".":{},"f:kubernetes.io/config.hash":{},"f:kubernetes.io/config.mirror":{},"f:kubernetes.i
o/config.seen":{},"f:kubernetes.io/config.source":{}},"f:labels":{".":{ [truncated 8088 chars]
	I0916 11:11:25.910394 1488539 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/nodes/multinode-654612
	I0916 11:11:25.910405 1488539 round_trippers.go:469] Request Headers:
	I0916 11:11:25.910413 1488539 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:11:25.910419 1488539 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:11:25.912415 1488539 round_trippers.go:574] Response Status: 200 OK in 1 milliseconds
	I0916 11:11:25.912437 1488539 round_trippers.go:577] Response Headers:
	I0916 11:11:25.912444 1488539 round_trippers.go:580]     Audit-Id: edaf1560-20c9-42d1-a04d-9566b906e42a
	I0916 11:11:25.912450 1488539 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:11:25.912457 1488539 round_trippers.go:580]     Content-Type: application/json
	I0916 11:11:25.912460 1488539 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:11:25.912469 1488539 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:11:25.912472 1488539 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:11:25 GMT
	I0916 11:11:25.912592 1488539 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-654612","uid":"17ee1588-6ece-4a45-8e72-a05ec6d1f806","resourceVersion":"401","creationTimestamp":"2024-09-16T11:10:07Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-654612","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-654612","minikube.k8s.io/primary":"true","minikube.k8s.io/updated_at":"2024_09_16T11_10_11_0700","minikube.k8s.io/version":"v1.34.0","node-role.kubernetes.io/control-plane":"","node.kubernetes.io/exclude-from-external-load-balancers":""},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///var/run/crio/crio.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubelet","operation":"Update","apiVe
rsion":"v1","time":"2024-09-16T11:10:07Z","fieldsType":"FieldsV1","fiel [truncated 6029 chars]
	I0916 11:11:25.913087 1488539 pod_ready.go:93] pod "kube-controller-manager-multinode-654612" in "kube-system" namespace has status "Ready":"True"
	I0916 11:11:25.913107 1488539 pod_ready.go:82] duration metric: took 5.485659ms for pod "kube-controller-manager-multinode-654612" in "kube-system" namespace to be "Ready" ...
	I0916 11:11:25.913119 1488539 pod_ready.go:79] waiting up to 6m0s for pod "kube-proxy-gf2tw" in "kube-system" namespace to be "Ready" ...
	I0916 11:11:26.077520 1488539 request.go:632] Waited for 164.320653ms due to client-side throttling, not priority and fairness, request: GET:https://192.168.67.2:8443/api/v1/namespaces/kube-system/pods/kube-proxy-gf2tw
	I0916 11:11:26.077615 1488539 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/namespaces/kube-system/pods/kube-proxy-gf2tw
	I0916 11:11:26.077628 1488539 round_trippers.go:469] Request Headers:
	I0916 11:11:26.077641 1488539 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:11:26.077648 1488539 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:11:26.080335 1488539 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:11:26.080407 1488539 round_trippers.go:577] Response Headers:
	I0916 11:11:26.080430 1488539 round_trippers.go:580]     Content-Type: application/json
	I0916 11:11:26.080450 1488539 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:11:26.080484 1488539 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:11:26.080508 1488539 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:11:26 GMT
	I0916 11:11:26.080527 1488539 round_trippers.go:580]     Audit-Id: e2e31149-de87-4b13-b79c-15dd39017298
	I0916 11:11:26.080563 1488539 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:11:26.080763 1488539 request.go:1351] Response Body: {"kind":"Pod","apiVersion":"v1","metadata":{"name":"kube-proxy-gf2tw","generateName":"kube-proxy-","namespace":"kube-system","uid":"814e8a89-b190-4aef-a303-44981c9e19c9","resourceVersion":"480","creationTimestamp":"2024-09-16T11:11:13Z","labels":{"controller-revision-hash":"648b489c5b","k8s-app":"kube-proxy","pod-template-generation":"1"},"ownerReferences":[{"apiVersion":"apps/v1","kind":"DaemonSet","name":"kube-proxy","uid":"8b5954ba-7bb1-4f7b-8014-fdfa99b2ac77","controller":true,"blockOwnerDeletion":true}],"managedFields":[{"manager":"kube-controller-manager","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:11:13Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:generateName":{},"f:labels":{".":{},"f:controller-revision-hash":{},"f:k8s-app":{},"f:pod-template-generation":{}},"f:ownerReferences":{".":{},"k:{\"uid\":\"8b5954ba-7bb1-4f7b-8014-fdfa99b2ac77\"}":{}}},"f:spec":{"f:affinity":{".":{},"f:nodeAffinity":{".":{},"f:r
equiredDuringSchedulingIgnoredDuringExecution":{}}},"f:containers":{"k: [truncated 6178 chars]
	I0916 11:11:26.277518 1488539 request.go:632] Waited for 196.225146ms due to client-side throttling, not priority and fairness, request: GET:https://192.168.67.2:8443/api/v1/nodes/multinode-654612-m02
	I0916 11:11:26.277597 1488539 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/nodes/multinode-654612-m02
	I0916 11:11:26.277607 1488539 round_trippers.go:469] Request Headers:
	I0916 11:11:26.277616 1488539 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:11:26.277620 1488539 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:11:26.279591 1488539 round_trippers.go:574] Response Status: 200 OK in 1 milliseconds
	I0916 11:11:26.279617 1488539 round_trippers.go:577] Response Headers:
	I0916 11:11:26.279625 1488539 round_trippers.go:580]     Content-Type: application/json
	I0916 11:11:26.279629 1488539 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:11:26.279634 1488539 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:11:26.279674 1488539 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:11:26 GMT
	I0916 11:11:26.279682 1488539 round_trippers.go:580]     Audit-Id: 6d4ec83f-1894-4ad0-a510-40c4043c2092
	I0916 11:11:26.279685 1488539 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:11:26.280083 1488539 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-654612-m02","uid":"aa1e2e1b-4957-47bd-bcf9-786ad66f873a","resourceVersion":"496","creationTimestamp":"2024-09-16T11:11:13Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-654612-m02","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-654612","minikube.k8s.io/primary":"false","minikube.k8s.io/updated_at":"2024_09_16T11_11_14_0700","minikube.k8s.io/version":"v1.34.0"},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///var/run/crio/crio.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubeadm","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:11:13Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:
annotations":{"f:kubeadm.alpha.kubernetes.io/cri-socket":{}}}}},{"manag [truncated 5937 chars]
	I0916 11:11:26.280532 1488539 pod_ready.go:93] pod "kube-proxy-gf2tw" in "kube-system" namespace has status "Ready":"True"
	I0916 11:11:26.280552 1488539 pod_ready.go:82] duration metric: took 367.419527ms for pod "kube-proxy-gf2tw" in "kube-system" namespace to be "Ready" ...
	I0916 11:11:26.280563 1488539 pod_ready.go:79] waiting up to 6m0s for pod "kube-proxy-t9pzq" in "kube-system" namespace to be "Ready" ...
	I0916 11:11:26.477565 1488539 request.go:632] Waited for 196.907722ms due to client-side throttling, not priority and fairness, request: GET:https://192.168.67.2:8443/api/v1/namespaces/kube-system/pods/kube-proxy-t9pzq
	I0916 11:11:26.477633 1488539 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/namespaces/kube-system/pods/kube-proxy-t9pzq
	I0916 11:11:26.477644 1488539 round_trippers.go:469] Request Headers:
	I0916 11:11:26.477653 1488539 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:11:26.477667 1488539 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:11:26.479978 1488539 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:11:26.480086 1488539 round_trippers.go:577] Response Headers:
	I0916 11:11:26.480126 1488539 round_trippers.go:580]     Audit-Id: 3286276e-0dd1-4467-a26f-7d35bed0fe93
	I0916 11:11:26.480153 1488539 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:11:26.480163 1488539 round_trippers.go:580]     Content-Type: application/json
	I0916 11:11:26.480168 1488539 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:11:26.480172 1488539 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:11:26.480176 1488539 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:11:26 GMT
	I0916 11:11:26.480319 1488539 request.go:1351] Response Body: {"kind":"Pod","apiVersion":"v1","metadata":{"name":"kube-proxy-t9pzq","generateName":"kube-proxy-","namespace":"kube-system","uid":"d5dac41c-8386-4ad5-a463-1730169d8062","resourceVersion":"381","creationTimestamp":"2024-09-16T11:10:14Z","labels":{"controller-revision-hash":"648b489c5b","k8s-app":"kube-proxy","pod-template-generation":"1"},"ownerReferences":[{"apiVersion":"apps/v1","kind":"DaemonSet","name":"kube-proxy","uid":"8b5954ba-7bb1-4f7b-8014-fdfa99b2ac77","controller":true,"blockOwnerDeletion":true}],"managedFields":[{"manager":"kube-controller-manager","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:10:14Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:generateName":{},"f:labels":{".":{},"f:controller-revision-hash":{},"f:k8s-app":{},"f:pod-template-generation":{}},"f:ownerReferences":{".":{},"k:{\"uid\":\"8b5954ba-7bb1-4f7b-8014-fdfa99b2ac77\"}":{}}},"f:spec":{"f:affinity":{".":{},"f:nodeAffinity":{".":{},"f:r
equiredDuringSchedulingIgnoredDuringExecution":{}}},"f:containers":{"k: [truncated 6170 chars]
	I0916 11:11:26.677094 1488539 request.go:632] Waited for 196.215867ms due to client-side throttling, not priority and fairness, request: GET:https://192.168.67.2:8443/api/v1/nodes/multinode-654612
	I0916 11:11:26.677175 1488539 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/nodes/multinode-654612
	I0916 11:11:26.677183 1488539 round_trippers.go:469] Request Headers:
	I0916 11:11:26.677191 1488539 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:11:26.677199 1488539 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:11:26.679546 1488539 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:11:26.679577 1488539 round_trippers.go:577] Response Headers:
	I0916 11:11:26.679592 1488539 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:11:26 GMT
	I0916 11:11:26.679597 1488539 round_trippers.go:580]     Audit-Id: 0d82fd4c-11eb-4cad-81bc-5ee2099948bf
	I0916 11:11:26.679600 1488539 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:11:26.679603 1488539 round_trippers.go:580]     Content-Type: application/json
	I0916 11:11:26.679606 1488539 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:11:26.679610 1488539 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:11:26.679732 1488539 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-654612","uid":"17ee1588-6ece-4a45-8e72-a05ec6d1f806","resourceVersion":"401","creationTimestamp":"2024-09-16T11:10:07Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-654612","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-654612","minikube.k8s.io/primary":"true","minikube.k8s.io/updated_at":"2024_09_16T11_10_11_0700","minikube.k8s.io/version":"v1.34.0","node-role.kubernetes.io/control-plane":"","node.kubernetes.io/exclude-from-external-load-balancers":""},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///var/run/crio/crio.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubelet","operation":"Update","apiVe
rsion":"v1","time":"2024-09-16T11:10:07Z","fieldsType":"FieldsV1","fiel [truncated 6029 chars]
	I0916 11:11:26.680176 1488539 pod_ready.go:93] pod "kube-proxy-t9pzq" in "kube-system" namespace has status "Ready":"True"
	I0916 11:11:26.680196 1488539 pod_ready.go:82] duration metric: took 399.618315ms for pod "kube-proxy-t9pzq" in "kube-system" namespace to be "Ready" ...
	I0916 11:11:26.680207 1488539 pod_ready.go:79] waiting up to 6m0s for pod "kube-scheduler-multinode-654612" in "kube-system" namespace to be "Ready" ...
	I0916 11:11:26.877625 1488539 request.go:632] Waited for 197.329316ms due to client-side throttling, not priority and fairness, request: GET:https://192.168.67.2:8443/api/v1/namespaces/kube-system/pods/kube-scheduler-multinode-654612
	I0916 11:11:26.877693 1488539 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/namespaces/kube-system/pods/kube-scheduler-multinode-654612
	I0916 11:11:26.877702 1488539 round_trippers.go:469] Request Headers:
	I0916 11:11:26.877711 1488539 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:11:26.877725 1488539 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:11:26.880271 1488539 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:11:26.880293 1488539 round_trippers.go:577] Response Headers:
	I0916 11:11:26.880302 1488539 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:11:26.880306 1488539 round_trippers.go:580]     Content-Type: application/json
	I0916 11:11:26.880309 1488539 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:11:26.880313 1488539 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:11:26.880316 1488539 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:11:26 GMT
	I0916 11:11:26.880327 1488539 round_trippers.go:580]     Audit-Id: f048178d-d03b-4e9f-b1c8-d59ea03d7ecd
	I0916 11:11:26.880440 1488539 request.go:1351] Response Body: {"kind":"Pod","apiVersion":"v1","metadata":{"name":"kube-scheduler-multinode-654612","namespace":"kube-system","uid":"fd553108-8193-4f33-8190-d4ec25a66de1","resourceVersion":"380","creationTimestamp":"2024-09-16T11:10:10Z","labels":{"component":"kube-scheduler","tier":"control-plane"},"annotations":{"kubernetes.io/config.hash":"281b64f61502642475e3dbc1b139b188","kubernetes.io/config.mirror":"281b64f61502642475e3dbc1b139b188","kubernetes.io/config.seen":"2024-09-16T11:10:10.145156597Z","kubernetes.io/config.source":"file"},"ownerReferences":[{"apiVersion":"v1","kind":"Node","name":"multinode-654612","uid":"17ee1588-6ece-4a45-8e72-a05ec6d1f806","controller":true}],"managedFields":[{"manager":"kubelet","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:10:10Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:annotations":{".":{},"f:kubernetes.io/config.hash":{},"f:kubernetes.io/config.mirror":{},"f:kubernetes.io/config.seen":{},
"f:kubernetes.io/config.source":{}},"f:labels":{".":{},"f:component":{} [truncated 4970 chars]
	I0916 11:11:27.077367 1488539 request.go:632] Waited for 196.389901ms due to client-side throttling, not priority and fairness, request: GET:https://192.168.67.2:8443/api/v1/nodes/multinode-654612
	I0916 11:11:27.077432 1488539 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/nodes/multinode-654612
	I0916 11:11:27.077439 1488539 round_trippers.go:469] Request Headers:
	I0916 11:11:27.077449 1488539 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:11:27.077460 1488539 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:11:27.079936 1488539 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:11:27.079999 1488539 round_trippers.go:577] Response Headers:
	I0916 11:11:27.080008 1488539 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:11:27.080012 1488539 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:11:27.080024 1488539 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:11:27 GMT
	I0916 11:11:27.080029 1488539 round_trippers.go:580]     Audit-Id: 62241b97-8695-473c-bb03-f7912c98aeae
	I0916 11:11:27.080032 1488539 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:11:27.080035 1488539 round_trippers.go:580]     Content-Type: application/json
	I0916 11:11:27.080134 1488539 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-654612","uid":"17ee1588-6ece-4a45-8e72-a05ec6d1f806","resourceVersion":"401","creationTimestamp":"2024-09-16T11:10:07Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-654612","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-654612","minikube.k8s.io/primary":"true","minikube.k8s.io/updated_at":"2024_09_16T11_10_11_0700","minikube.k8s.io/version":"v1.34.0","node-role.kubernetes.io/control-plane":"","node.kubernetes.io/exclude-from-external-load-balancers":""},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///var/run/crio/crio.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubelet","operation":"Update","apiVe
rsion":"v1","time":"2024-09-16T11:10:07Z","fieldsType":"FieldsV1","fiel [truncated 6029 chars]
	I0916 11:11:27.080598 1488539 pod_ready.go:93] pod "kube-scheduler-multinode-654612" in "kube-system" namespace has status "Ready":"True"
	I0916 11:11:27.080617 1488539 pod_ready.go:82] duration metric: took 400.402526ms for pod "kube-scheduler-multinode-654612" in "kube-system" namespace to be "Ready" ...
	I0916 11:11:27.080628 1488539 pod_ready.go:39] duration metric: took 1.200545275s for extra waiting for all system-critical and pods with labels [k8s-app=kube-dns component=etcd component=kube-apiserver component=kube-controller-manager k8s-app=kube-proxy component=kube-scheduler] to be "Ready" ...
	I0916 11:11:27.080646 1488539 system_svc.go:44] waiting for kubelet service to be running ....
	I0916 11:11:27.080725 1488539 ssh_runner.go:195] Run: sudo systemctl is-active --quiet service kubelet
	I0916 11:11:27.093012 1488539 system_svc.go:56] duration metric: took 12.356868ms WaitForService to wait for kubelet
	I0916 11:11:27.093042 1488539 kubeadm.go:582] duration metric: took 12.840232566s to wait for: map[apiserver:true apps_running:true default_sa:true extra:true kubelet:true node_ready:true system_pods:true]
	I0916 11:11:27.093061 1488539 node_conditions.go:102] verifying NodePressure condition ...
	I0916 11:11:27.277447 1488539 request.go:632] Waited for 184.307636ms due to client-side throttling, not priority and fairness, request: GET:https://192.168.67.2:8443/api/v1/nodes
	I0916 11:11:27.277523 1488539 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/nodes
	I0916 11:11:27.277542 1488539 round_trippers.go:469] Request Headers:
	I0916 11:11:27.277554 1488539 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:11:27.277561 1488539 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:11:27.280031 1488539 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:11:27.280147 1488539 round_trippers.go:577] Response Headers:
	I0916 11:11:27.280161 1488539 round_trippers.go:580]     Audit-Id: 0f62021e-0ed4-4157-a99d-5bac6c161952
	I0916 11:11:27.280166 1488539 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:11:27.280169 1488539 round_trippers.go:580]     Content-Type: application/json
	I0916 11:11:27.280174 1488539 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:11:27.280177 1488539 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:11:27.280180 1488539 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:11:27 GMT
	I0916 11:11:27.280399 1488539 request.go:1351] Response Body: {"kind":"NodeList","apiVersion":"v1","metadata":{"resourceVersion":"496"},"items":[{"metadata":{"name":"multinode-654612","uid":"17ee1588-6ece-4a45-8e72-a05ec6d1f806","resourceVersion":"401","creationTimestamp":"2024-09-16T11:10:07Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-654612","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-654612","minikube.k8s.io/primary":"true","minikube.k8s.io/updated_at":"2024_09_16T11_10_11_0700","minikube.k8s.io/version":"v1.34.0","node-role.kubernetes.io/control-plane":"","node.kubernetes.io/exclude-from-external-load-balancers":""},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///var/run/crio/crio.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields
":[{"manager":"kubelet","operation":"Update","apiVersion":"v1","time":" [truncated 13011 chars]
	I0916 11:11:27.281202 1488539 node_conditions.go:122] node storage ephemeral capacity is 203034800Ki
	I0916 11:11:27.281232 1488539 node_conditions.go:123] node cpu capacity is 2
	I0916 11:11:27.281244 1488539 node_conditions.go:122] node storage ephemeral capacity is 203034800Ki
	I0916 11:11:27.281249 1488539 node_conditions.go:123] node cpu capacity is 2
	I0916 11:11:27.281254 1488539 node_conditions.go:105] duration metric: took 188.187762ms to run NodePressure ...
	I0916 11:11:27.281268 1488539 start.go:241] waiting for startup goroutines ...
	I0916 11:11:27.281297 1488539 start.go:255] writing updated cluster config ...
	I0916 11:11:27.281640 1488539 ssh_runner.go:195] Run: rm -f paused
	I0916 11:11:27.290662 1488539 out.go:177] * Done! kubectl is now configured to use "multinode-654612" cluster and "default" namespace by default
	E0916 11:11:27.293311 1488539 start.go:291] kubectl info: exec: fork/exec /usr/local/bin/kubectl: exec format error
	
	
	==> CRI-O <==
	Sep 16 11:10:58 multinode-654612 crio[972]: time="2024-09-16 11:10:58.707094730Z" level=info msg="Created container f1f1cadfeb97f3dab9d88820dff0a395485906647e023bda62d5fa4b8fe29f51: kube-system/coredns-7c65d6cfc9-szvv9/coredns" id=27a44a9a-eb3a-416e-b2cb-c51a2e28306c name=/runtime.v1.RuntimeService/CreateContainer
	Sep 16 11:10:58 multinode-654612 crio[972]: time="2024-09-16 11:10:58.707959505Z" level=info msg="Starting container: f1f1cadfeb97f3dab9d88820dff0a395485906647e023bda62d5fa4b8fe29f51" id=1f2a4b0f-7b4a-4688-a5d2-7e39de4df255 name=/runtime.v1.RuntimeService/StartContainer
	Sep 16 11:10:58 multinode-654612 crio[972]: time="2024-09-16 11:10:58.717686792Z" level=info msg="Started container" PID=2034 containerID=f1f1cadfeb97f3dab9d88820dff0a395485906647e023bda62d5fa4b8fe29f51 description=kube-system/coredns-7c65d6cfc9-szvv9/coredns id=1f2a4b0f-7b4a-4688-a5d2-7e39de4df255 name=/runtime.v1.RuntimeService/StartContainer sandboxID=3c6d4cc55a80f4d37dca4b29803a144965d8762aa6f909ee7e5ece0519a03e2d
	Sep 16 11:11:28 multinode-654612 crio[972]: time="2024-09-16 11:11:28.397099824Z" level=info msg="Running pod sandbox: default/busybox-7dff88458-rdtjw/POD" id=eb76a363-6add-42ae-a335-f67ab64212ac name=/runtime.v1.RuntimeService/RunPodSandbox
	Sep 16 11:11:28 multinode-654612 crio[972]: time="2024-09-16 11:11:28.397169516Z" level=warning msg="Allowed annotations are specified for workload []"
	Sep 16 11:11:28 multinode-654612 crio[972]: time="2024-09-16 11:11:28.421494831Z" level=info msg="Got pod network &{Name:busybox-7dff88458-rdtjw Namespace:default ID:4ef152fa69638e911923a9267d3d8efc9c1242343e215b70a8dac70cafd91414 UID:39d79774-0a74-4464-a7fe-d312a92e8749 NetNS:/var/run/netns/ea67f675-e985-437a-ada1-08d17099372f Networks:[] RuntimeConfig:map[kindnet:{IP: MAC: PortMappings:[] Bandwidth:<nil> IpRanges:[]}] Aliases:map[]}"
	Sep 16 11:11:28 multinode-654612 crio[972]: time="2024-09-16 11:11:28.421535412Z" level=info msg="Adding pod default_busybox-7dff88458-rdtjw to CNI network \"kindnet\" (type=ptp)"
	Sep 16 11:11:28 multinode-654612 crio[972]: time="2024-09-16 11:11:28.430956721Z" level=info msg="Got pod network &{Name:busybox-7dff88458-rdtjw Namespace:default ID:4ef152fa69638e911923a9267d3d8efc9c1242343e215b70a8dac70cafd91414 UID:39d79774-0a74-4464-a7fe-d312a92e8749 NetNS:/var/run/netns/ea67f675-e985-437a-ada1-08d17099372f Networks:[] RuntimeConfig:map[kindnet:{IP: MAC: PortMappings:[] Bandwidth:<nil> IpRanges:[]}] Aliases:map[]}"
	Sep 16 11:11:28 multinode-654612 crio[972]: time="2024-09-16 11:11:28.431107536Z" level=info msg="Checking pod default_busybox-7dff88458-rdtjw for CNI network kindnet (type=ptp)"
	Sep 16 11:11:28 multinode-654612 crio[972]: time="2024-09-16 11:11:28.434199899Z" level=info msg="Ran pod sandbox 4ef152fa69638e911923a9267d3d8efc9c1242343e215b70a8dac70cafd91414 with infra container: default/busybox-7dff88458-rdtjw/POD" id=eb76a363-6add-42ae-a335-f67ab64212ac name=/runtime.v1.RuntimeService/RunPodSandbox
	Sep 16 11:11:28 multinode-654612 crio[972]: time="2024-09-16 11:11:28.435333541Z" level=info msg="Checking image status: gcr.io/k8s-minikube/busybox:1.28" id=e504711b-1b9c-4895-85b5-5984f4f6c966 name=/runtime.v1.ImageService/ImageStatus
	Sep 16 11:11:28 multinode-654612 crio[972]: time="2024-09-16 11:11:28.435540141Z" level=info msg="Image gcr.io/k8s-minikube/busybox:1.28 not found" id=e504711b-1b9c-4895-85b5-5984f4f6c966 name=/runtime.v1.ImageService/ImageStatus
	Sep 16 11:11:28 multinode-654612 crio[972]: time="2024-09-16 11:11:28.436980229Z" level=info msg="Pulling image: gcr.io/k8s-minikube/busybox:1.28" id=de9dde56-8541-4b4b-9aab-d772372f4595 name=/runtime.v1.ImageService/PullImage
	Sep 16 11:11:28 multinode-654612 crio[972]: time="2024-09-16 11:11:28.438347835Z" level=info msg="Trying to access \"gcr.io/k8s-minikube/busybox:1.28\""
	Sep 16 11:11:29 multinode-654612 crio[972]: time="2024-09-16 11:11:29.711560207Z" level=info msg="Trying to access \"gcr.io/k8s-minikube/busybox:1.28\""
	Sep 16 11:11:31 multinode-654612 crio[972]: time="2024-09-16 11:11:31.747055733Z" level=info msg="Pulled image: gcr.io/k8s-minikube/busybox@sha256:859d41e4316c182cb559f9ae3c5ffcac8602ee1179794a1707c06cd092a008d3" id=de9dde56-8541-4b4b-9aab-d772372f4595 name=/runtime.v1.ImageService/PullImage
	Sep 16 11:11:31 multinode-654612 crio[972]: time="2024-09-16 11:11:31.747857182Z" level=info msg="Checking image status: gcr.io/k8s-minikube/busybox:1.28" id=fdea80e0-8c7b-4064-9e0c-3aa24a9c23a8 name=/runtime.v1.ImageService/ImageStatus
	Sep 16 11:11:31 multinode-654612 crio[972]: time="2024-09-16 11:11:31.748574391Z" level=info msg="Image status: &ImageStatusResponse{Image:&Image{Id:89a35e2ebb6b938201966889b5e8c85b931db6432c5643966116cd1c28bf45cd,RepoTags:[gcr.io/k8s-minikube/busybox:1.28],RepoDigests:[gcr.io/k8s-minikube/busybox@sha256:859d41e4316c182cb559f9ae3c5ffcac8602ee1179794a1707c06cd092a008d3 gcr.io/k8s-minikube/busybox@sha256:9afb80db71730dbb303fe00765cbf34bddbdc6b66e49897fc2e1861967584b12],Size_:1496796,Uid:nil,Username:,Spec:nil,},Info:map[string]string{},}" id=fdea80e0-8c7b-4064-9e0c-3aa24a9c23a8 name=/runtime.v1.ImageService/ImageStatus
	Sep 16 11:11:31 multinode-654612 crio[972]: time="2024-09-16 11:11:31.750907159Z" level=info msg="Checking image status: gcr.io/k8s-minikube/busybox:1.28" id=3e750e05-3736-48da-8bf3-a21449779615 name=/runtime.v1.ImageService/ImageStatus
	Sep 16 11:11:31 multinode-654612 crio[972]: time="2024-09-16 11:11:31.752331550Z" level=info msg="Image status: &ImageStatusResponse{Image:&Image{Id:89a35e2ebb6b938201966889b5e8c85b931db6432c5643966116cd1c28bf45cd,RepoTags:[gcr.io/k8s-minikube/busybox:1.28],RepoDigests:[gcr.io/k8s-minikube/busybox@sha256:859d41e4316c182cb559f9ae3c5ffcac8602ee1179794a1707c06cd092a008d3 gcr.io/k8s-minikube/busybox@sha256:9afb80db71730dbb303fe00765cbf34bddbdc6b66e49897fc2e1861967584b12],Size_:1496796,Uid:nil,Username:,Spec:nil,},Info:map[string]string{},}" id=3e750e05-3736-48da-8bf3-a21449779615 name=/runtime.v1.ImageService/ImageStatus
	Sep 16 11:11:31 multinode-654612 crio[972]: time="2024-09-16 11:11:31.753437288Z" level=info msg="Creating container: default/busybox-7dff88458-rdtjw/busybox" id=6a5c5c9a-e338-4a50-8a11-a4dd364a425d name=/runtime.v1.RuntimeService/CreateContainer
	Sep 16 11:11:31 multinode-654612 crio[972]: time="2024-09-16 11:11:31.753534844Z" level=warning msg="Allowed annotations are specified for workload []"
	Sep 16 11:11:31 multinode-654612 crio[972]: time="2024-09-16 11:11:31.807962628Z" level=info msg="Created container c2520810d50a78569a057f793a6598a2405c3f4f21742485e9bcb7daf22c1ebc: default/busybox-7dff88458-rdtjw/busybox" id=6a5c5c9a-e338-4a50-8a11-a4dd364a425d name=/runtime.v1.RuntimeService/CreateContainer
	Sep 16 11:11:31 multinode-654612 crio[972]: time="2024-09-16 11:11:31.808503292Z" level=info msg="Starting container: c2520810d50a78569a057f793a6598a2405c3f4f21742485e9bcb7daf22c1ebc" id=8de8909e-6a59-4241-8695-89643b731ce1 name=/runtime.v1.RuntimeService/StartContainer
	Sep 16 11:11:31 multinode-654612 crio[972]: time="2024-09-16 11:11:31.817273531Z" level=info msg="Started container" PID=2145 containerID=c2520810d50a78569a057f793a6598a2405c3f4f21742485e9bcb7daf22c1ebc description=default/busybox-7dff88458-rdtjw/busybox id=8de8909e-6a59-4241-8695-89643b731ce1 name=/runtime.v1.RuntimeService/StartContainer sandboxID=4ef152fa69638e911923a9267d3d8efc9c1242343e215b70a8dac70cafd91414
	
	
	==> container status <==
	CONTAINER           IMAGE                                                                                                 CREATED              STATE               NAME                      ATTEMPT             POD ID              POD
	c2520810d50a7       gcr.io/k8s-minikube/busybox@sha256:859d41e4316c182cb559f9ae3c5ffcac8602ee1179794a1707c06cd092a008d3   34 seconds ago       Running             busybox                   0                   4ef152fa69638       busybox-7dff88458-rdtjw
	f1f1cadfeb97f       2f6c962e7b8311337352d9fdea917da2184d9919f4da7695bc2a6517cf392fe4                                      About a minute ago   Running             coredns                   0                   3c6d4cc55a80f       coredns-7c65d6cfc9-szvv9
	88a12fcbc6eb5       ba04bb24b95753201135cbc420b233c1b0b9fa2e1fd21d28319c348c33fbcde6                                      About a minute ago   Running             storage-provisioner       0                   bcd5c2b8fce3c       storage-provisioner
	c2386662da70d       24a140c548c075e487e45d0ee73b1aa89f8bfb40c08a57e05975559728822b1d                                      About a minute ago   Running             kube-proxy                0                   e8daba284c881       kube-proxy-t9pzq
	9af0dfbb5d2f2       6a23fa8fd2b78ab58e42ba273808edc936a9c53d8ac4a919f6337be094843a51                                      About a minute ago   Running             kindnet-cni               0                   bbb8bb7e5ac7a       kindnet-whjqt
	5d34b90bf3c2a       279f381cb37365bbbcd133c9531fba9c2beb0f38dbbe6ddfcd0b1b1643d3450e                                      2 minutes ago        Running             kube-controller-manager   0                   d431cd82769b7       kube-controller-manager-multinode-654612
	7e4a553dd98fc       7f8aa378bb47dffcf430f3a601abe39137e88aee0238e23ed8530fdd18dab82d                                      2 minutes ago        Running             kube-scheduler            0                   76a6e90530729       kube-scheduler-multinode-654612
	00cc927e5dcd6       d3f53a98c0a9d9163c4848bcf34b2d2f5e1e3691b79f3d1dd6d0206809e02853                                      2 minutes ago        Running             kube-apiserver            0                   5fff8c0ab15f3       kube-apiserver-multinode-654612
	c65aacc72d266       27e3830e1402783674d8b594038967deea9d51f0d91b34c93c8f39d2f68af7da                                      2 minutes ago        Running             etcd                      0                   fa4d67791d874       etcd-multinode-654612
	
	
	==> coredns [f1f1cadfeb97f3dab9d88820dff0a395485906647e023bda62d5fa4b8fe29f51] <==
	[INFO] 10.244.0.3:35946 - 9 "PTR IN 1.0.96.10.in-addr.arpa. udp 40 false 512" NOERROR qr,aa,rd 112 0.00011523s
	[INFO] 10.244.2.2:51440 - 2 "PTR IN 10.0.96.10.in-addr.arpa. udp 41 false 512" NOERROR qr,aa,rd 116 0.000129236s
	[INFO] 10.244.2.2:58230 - 3 "AAAA IN kubernetes.default. udp 36 false 512" NXDOMAIN qr,rd,ra 36 0.001142438s
	[INFO] 10.244.2.2:37607 - 4 "AAAA IN kubernetes.default.default.svc.cluster.local. udp 62 false 512" NXDOMAIN qr,aa,rd 155 0.000091895s
	[INFO] 10.244.2.2:59551 - 5 "AAAA IN kubernetes.default.svc.cluster.local. udp 54 false 512" NOERROR qr,aa,rd 147 0.000071325s
	[INFO] 10.244.2.2:54758 - 6 "A IN kubernetes.default. udp 36 false 512" NXDOMAIN qr,rd,ra 36 0.001095875s
	[INFO] 10.244.2.2:45539 - 7 "A IN kubernetes.default.default.svc.cluster.local. udp 62 false 512" NXDOMAIN qr,aa,rd 155 0.000145334s
	[INFO] 10.244.2.2:60301 - 8 "A IN kubernetes.default.svc.cluster.local. udp 54 false 512" NOERROR qr,aa,rd 106 0.000089524s
	[INFO] 10.244.2.2:50456 - 9 "PTR IN 1.0.96.10.in-addr.arpa. udp 40 false 512" NOERROR qr,aa,rd 112 0.000124509s
	[INFO] 10.244.0.3:39906 - 2 "PTR IN 10.0.96.10.in-addr.arpa. udp 41 false 512" NOERROR qr,aa,rd 116 0.000180779s
	[INFO] 10.244.0.3:56779 - 3 "AAAA IN kubernetes.default.svc.cluster.local. udp 54 false 512" NOERROR qr,aa,rd 147 0.000089704s
	[INFO] 10.244.0.3:52775 - 4 "A IN kubernetes.default.svc.cluster.local. udp 54 false 512" NOERROR qr,aa,rd 106 0.000088252s
	[INFO] 10.244.0.3:35755 - 5 "PTR IN 1.0.96.10.in-addr.arpa. udp 40 false 512" NOERROR qr,aa,rd 112 0.000078915s
	[INFO] 10.244.2.2:41846 - 2 "PTR IN 10.0.96.10.in-addr.arpa. udp 41 false 512" NOERROR qr,aa,rd 116 0.000114139s
	[INFO] 10.244.2.2:51082 - 3 "AAAA IN kubernetes.default.svc.cluster.local. udp 54 false 512" NOERROR qr,aa,rd 147 0.00011674s
	[INFO] 10.244.2.2:43718 - 4 "A IN kubernetes.default.svc.cluster.local. udp 54 false 512" NOERROR qr,aa,rd 106 0.000079989s
	[INFO] 10.244.2.2:40056 - 5 "PTR IN 1.0.96.10.in-addr.arpa. udp 40 false 512" NOERROR qr,aa,rd 112 0.000092609s
	[INFO] 10.244.0.3:47718 - 2 "PTR IN 10.0.96.10.in-addr.arpa. udp 41 false 512" NOERROR qr,aa,rd 116 0.000150273s
	[INFO] 10.244.0.3:56808 - 3 "AAAA IN host.minikube.internal. udp 40 false 512" NOERROR qr,aa,rd 40 0.000175955s
	[INFO] 10.244.0.3:52572 - 4 "A IN host.minikube.internal. udp 40 false 512" NOERROR qr,aa,rd 78 0.000140033s
	[INFO] 10.244.0.3:48292 - 5 "PTR IN 1.67.168.192.in-addr.arpa. udp 43 false 512" NOERROR qr,aa,rd 104 0.000144661s
	[INFO] 10.244.2.2:40796 - 2 "PTR IN 10.0.96.10.in-addr.arpa. udp 41 false 512" NOERROR qr,aa,rd 116 0.000146483s
	[INFO] 10.244.2.2:49060 - 3 "AAAA IN host.minikube.internal. udp 40 false 512" NOERROR qr,aa,rd 40 0.000089253s
	[INFO] 10.244.2.2:43910 - 4 "A IN host.minikube.internal. udp 40 false 512" NOERROR qr,aa,rd 78 0.000064646s
	[INFO] 10.244.2.2:45764 - 5 "PTR IN 1.67.168.192.in-addr.arpa. udp 43 false 512" NOERROR qr,aa,rd 104 0.000085364s
	
	
	==> describe nodes <==
	Name:               multinode-654612
	Roles:              control-plane
	Labels:             beta.kubernetes.io/arch=arm64
	                    beta.kubernetes.io/os=linux
	                    kubernetes.io/arch=arm64
	                    kubernetes.io/hostname=multinode-654612
	                    kubernetes.io/os=linux
	                    minikube.k8s.io/commit=90d544f06ea0f69499271b003be64a9a224d57ed
	                    minikube.k8s.io/name=multinode-654612
	                    minikube.k8s.io/primary=true
	                    minikube.k8s.io/updated_at=2024_09_16T11_10_11_0700
	                    minikube.k8s.io/version=v1.34.0
	                    node-role.kubernetes.io/control-plane=
	                    node.kubernetes.io/exclude-from-external-load-balancers=
	Annotations:        kubeadm.alpha.kubernetes.io/cri-socket: unix:///var/run/crio/crio.sock
	                    node.alpha.kubernetes.io/ttl: 0
	                    volumes.kubernetes.io/controller-managed-attach-detach: true
	CreationTimestamp:  Mon, 16 Sep 2024 11:10:07 +0000
	Taints:             <none>
	Unschedulable:      false
	Lease:
	  HolderIdentity:  multinode-654612
	  AcquireTime:     <unset>
	  RenewTime:       Mon, 16 Sep 2024 11:12:02 +0000
	Conditions:
	  Type             Status  LastHeartbeatTime                 LastTransitionTime                Reason                       Message
	  ----             ------  -----------------                 ------------------                ------                       -------
	  MemoryPressure   False   Mon, 16 Sep 2024 11:11:42 +0000   Mon, 16 Sep 2024 11:10:04 +0000   KubeletHasSufficientMemory   kubelet has sufficient memory available
	  DiskPressure     False   Mon, 16 Sep 2024 11:11:42 +0000   Mon, 16 Sep 2024 11:10:04 +0000   KubeletHasNoDiskPressure     kubelet has no disk pressure
	  PIDPressure      False   Mon, 16 Sep 2024 11:11:42 +0000   Mon, 16 Sep 2024 11:10:04 +0000   KubeletHasSufficientPID      kubelet has sufficient PID available
	  Ready            True    Mon, 16 Sep 2024 11:11:42 +0000   Mon, 16 Sep 2024 11:10:56 +0000   KubeletReady                 kubelet is posting ready status
	Addresses:
	  InternalIP:  192.168.67.2
	  Hostname:    multinode-654612
	Capacity:
	  cpu:                2
	  ephemeral-storage:  203034800Ki
	  hugepages-1Gi:      0
	  hugepages-2Mi:      0
	  hugepages-32Mi:     0
	  hugepages-64Ki:     0
	  memory:             8022304Ki
	  pods:               110
	Allocatable:
	  cpu:                2
	  ephemeral-storage:  203034800Ki
	  hugepages-1Gi:      0
	  hugepages-2Mi:      0
	  hugepages-32Mi:     0
	  hugepages-64Ki:     0
	  memory:             8022304Ki
	  pods:               110
	System Info:
	  Machine ID:                 eae974bfbdcf45cc93be3557efd7b033
	  System UUID:                b0403d6b-24c6-42eb-8273-193a1e97b1c8
	  Boot ID:                    34b2555f-ef29-4c31-9b47-b3b930bd3b4b
	  Kernel Version:             5.15.0-1069-aws
	  OS Image:                   Ubuntu 22.04.4 LTS
	  Operating System:           linux
	  Architecture:               arm64
	  Container Runtime Version:  cri-o://1.24.6
	  Kubelet Version:            v1.31.1
	  Kube-Proxy Version:         v1.31.1
	PodCIDR:                      10.244.0.0/24
	PodCIDRs:                     10.244.0.0/24
	Non-terminated Pods:          (9 in total)
	  Namespace                   Name                                        CPU Requests  CPU Limits  Memory Requests  Memory Limits  Age
	  ---------                   ----                                        ------------  ----------  ---------------  -------------  ---
	  default                     busybox-7dff88458-rdtjw                     0 (0%)        0 (0%)      0 (0%)           0 (0%)         37s
	  kube-system                 coredns-7c65d6cfc9-szvv9                    100m (5%)     0 (0%)      70Mi (0%)        170Mi (2%)     110s
	  kube-system                 etcd-multinode-654612                       100m (5%)     0 (0%)      100Mi (1%)       0 (0%)         115s
	  kube-system                 kindnet-whjqt                               100m (5%)     100m (5%)   50Mi (0%)        50Mi (0%)      111s
	  kube-system                 kube-apiserver-multinode-654612             250m (12%)    0 (0%)      0 (0%)           0 (0%)         115s
	  kube-system                 kube-controller-manager-multinode-654612    200m (10%)    0 (0%)      0 (0%)           0 (0%)         115s
	  kube-system                 kube-proxy-t9pzq                            0 (0%)        0 (0%)      0 (0%)           0 (0%)         111s
	  kube-system                 kube-scheduler-multinode-654612             100m (5%)     0 (0%)      0 (0%)           0 (0%)         115s
	  kube-system                 storage-provisioner                         0 (0%)        0 (0%)      0 (0%)           0 (0%)         110s
	Allocated resources:
	  (Total limits may be over 100 percent, i.e., overcommitted.)
	  Resource           Requests    Limits
	  --------           --------    ------
	  cpu                850m (42%)  100m (5%)
	  memory             220Mi (2%)  220Mi (2%)
	  ephemeral-storage  0 (0%)      0 (0%)
	  hugepages-1Gi      0 (0%)      0 (0%)
	  hugepages-2Mi      0 (0%)      0 (0%)
	  hugepages-32Mi     0 (0%)      0 (0%)
	  hugepages-64Ki     0 (0%)      0 (0%)
	Events:
	  Type     Reason                   Age   From             Message
	  ----     ------                   ----  ----             -------
	  Normal   Starting                 109s  kube-proxy       
	  Normal   Starting                 115s  kubelet          Starting kubelet.
	  Warning  CgroupV1                 115s  kubelet          Cgroup v1 support is in maintenance mode, please migrate to Cgroup v2.
	  Normal   NodeHasSufficientMemory  115s  kubelet          Node multinode-654612 status is now: NodeHasSufficientMemory
	  Normal   NodeHasNoDiskPressure    115s  kubelet          Node multinode-654612 status is now: NodeHasNoDiskPressure
	  Normal   NodeHasSufficientPID     115s  kubelet          Node multinode-654612 status is now: NodeHasSufficientPID
	  Normal   RegisteredNode           111s  node-controller  Node multinode-654612 event: Registered Node multinode-654612 in Controller
	  Normal   CIDRAssignmentFailed     111s  cidrAllocator    Node multinode-654612 status is now: CIDRAssignmentFailed
	  Normal   NodeReady                69s   kubelet          Node multinode-654612 status is now: NodeReady
	
	
	Name:               multinode-654612-m02
	Roles:              <none>
	Labels:             beta.kubernetes.io/arch=arm64
	                    beta.kubernetes.io/os=linux
	                    kubernetes.io/arch=arm64
	                    kubernetes.io/hostname=multinode-654612-m02
	                    kubernetes.io/os=linux
	                    minikube.k8s.io/commit=90d544f06ea0f69499271b003be64a9a224d57ed
	                    minikube.k8s.io/name=multinode-654612
	                    minikube.k8s.io/primary=false
	                    minikube.k8s.io/updated_at=2024_09_16T11_11_14_0700
	                    minikube.k8s.io/version=v1.34.0
	Annotations:        kubeadm.alpha.kubernetes.io/cri-socket: unix:///var/run/crio/crio.sock
	                    node.alpha.kubernetes.io/ttl: 0
	                    volumes.kubernetes.io/controller-managed-attach-detach: true
	CreationTimestamp:  Mon, 16 Sep 2024 11:11:13 +0000
	Taints:             <none>
	Unschedulable:      false
	Lease:
	  HolderIdentity:  multinode-654612-m02
	  AcquireTime:     <unset>
	  RenewTime:       Mon, 16 Sep 2024 11:12:04 +0000
	Conditions:
	  Type             Status  LastHeartbeatTime                 LastTransitionTime                Reason                       Message
	  ----             ------  -----------------                 ------------------                ------                       -------
	  MemoryPressure   False   Mon, 16 Sep 2024 11:11:44 +0000   Mon, 16 Sep 2024 11:11:13 +0000   KubeletHasSufficientMemory   kubelet has sufficient memory available
	  DiskPressure     False   Mon, 16 Sep 2024 11:11:44 +0000   Mon, 16 Sep 2024 11:11:13 +0000   KubeletHasNoDiskPressure     kubelet has no disk pressure
	  PIDPressure      False   Mon, 16 Sep 2024 11:11:44 +0000   Mon, 16 Sep 2024 11:11:13 +0000   KubeletHasSufficientPID      kubelet has sufficient PID available
	  Ready            True    Mon, 16 Sep 2024 11:11:44 +0000   Mon, 16 Sep 2024 11:11:25 +0000   KubeletReady                 kubelet is posting ready status
	Addresses:
	  InternalIP:  192.168.67.3
	  Hostname:    multinode-654612-m02
	Capacity:
	  cpu:                2
	  ephemeral-storage:  203034800Ki
	  hugepages-1Gi:      0
	  hugepages-2Mi:      0
	  hugepages-32Mi:     0
	  hugepages-64Ki:     0
	  memory:             8022304Ki
	  pods:               110
	Allocatable:
	  cpu:                2
	  ephemeral-storage:  203034800Ki
	  hugepages-1Gi:      0
	  hugepages-2Mi:      0
	  hugepages-32Mi:     0
	  hugepages-64Ki:     0
	  memory:             8022304Ki
	  pods:               110
	System Info:
	  Machine ID:                 a4b1086ea37f43a28c08df81961f6812
	  System UUID:                9e565e5c-62ec-45b7-a6b4-8e158afd85b2
	  Boot ID:                    34b2555f-ef29-4c31-9b47-b3b930bd3b4b
	  Kernel Version:             5.15.0-1069-aws
	  OS Image:                   Ubuntu 22.04.4 LTS
	  Operating System:           linux
	  Architecture:               arm64
	  Container Runtime Version:  cri-o://1.24.6
	  Kubelet Version:            v1.31.1
	  Kube-Proxy Version:         v1.31.1
	PodCIDR:                      10.244.2.0/24
	PodCIDRs:                     10.244.2.0/24
	Non-terminated Pods:          (3 in total)
	  Namespace                   Name                       CPU Requests  CPU Limits  Memory Requests  Memory Limits  Age
	  ---------                   ----                       ------------  ----------  ---------------  -------------  ---
	  default                     busybox-7dff88458-sfkxt    0 (0%)        0 (0%)      0 (0%)           0 (0%)         37s
	  kube-system                 kindnet-687xg              100m (5%)     100m (5%)   50Mi (0%)        50Mi (0%)      52s
	  kube-system                 kube-proxy-gf2tw           0 (0%)        0 (0%)      0 (0%)           0 (0%)         52s
	Allocated resources:
	  (Total limits may be over 100 percent, i.e., overcommitted.)
	  Resource           Requests   Limits
	  --------           --------   ------
	  cpu                100m (5%)  100m (5%)
	  memory             50Mi (0%)  50Mi (0%)
	  ephemeral-storage  0 (0%)     0 (0%)
	  hugepages-1Gi      0 (0%)     0 (0%)
	  hugepages-2Mi      0 (0%)     0 (0%)
	  hugepages-32Mi     0 (0%)     0 (0%)
	  hugepages-64Ki     0 (0%)     0 (0%)
	Events:
	  Type     Reason                   Age                From             Message
	  ----     ------                   ----               ----             -------
	  Normal   Starting                 51s                kube-proxy       
	  Warning  CgroupV1                 52s                kubelet          Cgroup v1 support is in maintenance mode, please migrate to Cgroup v2.
	  Normal   NodeHasSufficientMemory  52s (x2 over 52s)  kubelet          Node multinode-654612-m02 status is now: NodeHasSufficientMemory
	  Normal   NodeHasNoDiskPressure    52s (x2 over 52s)  kubelet          Node multinode-654612-m02 status is now: NodeHasNoDiskPressure
	  Normal   NodeHasSufficientPID     52s (x2 over 52s)  kubelet          Node multinode-654612-m02 status is now: NodeHasSufficientPID
	  Normal   RegisteredNode           51s                node-controller  Node multinode-654612-m02 event: Registered Node multinode-654612-m02 in Controller
	  Normal   NodeReady                40s                kubelet          Node multinode-654612-m02 status is now: NodeReady
	
	
	Name:               multinode-654612-m03
	Roles:              <none>
	Labels:             beta.kubernetes.io/arch=arm64
	                    beta.kubernetes.io/os=linux
	                    kubernetes.io/arch=arm64
	                    kubernetes.io/hostname=multinode-654612-m03
	                    kubernetes.io/os=linux
	                    minikube.k8s.io/commit=90d544f06ea0f69499271b003be64a9a224d57ed
	                    minikube.k8s.io/name=multinode-654612
	                    minikube.k8s.io/primary=false
	                    minikube.k8s.io/updated_at=2024_09_16T11_11_49_0700
	                    minikube.k8s.io/version=v1.34.0
	Annotations:        kubeadm.alpha.kubernetes.io/cri-socket: unix:///var/run/crio/crio.sock
	                    node.alpha.kubernetes.io/ttl: 0
	                    volumes.kubernetes.io/controller-managed-attach-detach: true
	CreationTimestamp:  Mon, 16 Sep 2024 11:11:48 +0000
	Taints:             <none>
	Unschedulable:      false
	Lease:
	  HolderIdentity:  multinode-654612-m03
	  AcquireTime:     <unset>
	  RenewTime:       Mon, 16 Sep 2024 11:11:59 +0000
	Conditions:
	  Type             Status  LastHeartbeatTime                 LastTransitionTime                Reason                       Message
	  ----             ------  -----------------                 ------------------                ------                       -------
	  MemoryPressure   False   Mon, 16 Sep 2024 11:12:01 +0000   Mon, 16 Sep 2024 11:11:48 +0000   KubeletHasSufficientMemory   kubelet has sufficient memory available
	  DiskPressure     False   Mon, 16 Sep 2024 11:12:01 +0000   Mon, 16 Sep 2024 11:11:48 +0000   KubeletHasNoDiskPressure     kubelet has no disk pressure
	  PIDPressure      False   Mon, 16 Sep 2024 11:12:01 +0000   Mon, 16 Sep 2024 11:11:48 +0000   KubeletHasSufficientPID      kubelet has sufficient PID available
	  Ready            True    Mon, 16 Sep 2024 11:12:01 +0000   Mon, 16 Sep 2024 11:12:01 +0000   KubeletReady                 kubelet is posting ready status
	Addresses:
	  InternalIP:  192.168.67.4
	  Hostname:    multinode-654612-m03
	Capacity:
	  cpu:                2
	  ephemeral-storage:  203034800Ki
	  hugepages-1Gi:      0
	  hugepages-2Mi:      0
	  hugepages-32Mi:     0
	  hugepages-64Ki:     0
	  memory:             8022304Ki
	  pods:               110
	Allocatable:
	  cpu:                2
	  ephemeral-storage:  203034800Ki
	  hugepages-1Gi:      0
	  hugepages-2Mi:      0
	  hugepages-32Mi:     0
	  hugepages-64Ki:     0
	  memory:             8022304Ki
	  pods:               110
	System Info:
	  Machine ID:                 8323217d2d004891992d40b8b53ad006
	  System UUID:                54271cc2-86f8-4253-83a9-e426ec1746ce
	  Boot ID:                    34b2555f-ef29-4c31-9b47-b3b930bd3b4b
	  Kernel Version:             5.15.0-1069-aws
	  OS Image:                   Ubuntu 22.04.4 LTS
	  Operating System:           linux
	  Architecture:               arm64
	  Container Runtime Version:  cri-o://1.24.6
	  Kubelet Version:            v1.31.1
	  Kube-Proxy Version:         v1.31.1
	PodCIDR:                      10.244.3.0/24
	PodCIDRs:                     10.244.3.0/24
	Non-terminated Pods:          (2 in total)
	  Namespace                   Name                CPU Requests  CPU Limits  Memory Requests  Memory Limits  Age
	  ---------                   ----                ------------  ----------  ---------------  -------------  ---
	  kube-system                 kindnet-ncfhl       100m (5%)     100m (5%)   50Mi (0%)        50Mi (0%)      18s
	  kube-system                 kube-proxy-vf648    0 (0%)        0 (0%)      0 (0%)           0 (0%)         18s
	Allocated resources:
	  (Total limits may be over 100 percent, i.e., overcommitted.)
	  Resource           Requests   Limits
	  --------           --------   ------
	  cpu                100m (5%)  100m (5%)
	  memory             50Mi (0%)  50Mi (0%)
	  ephemeral-storage  0 (0%)     0 (0%)
	  hugepages-1Gi      0 (0%)     0 (0%)
	  hugepages-2Mi      0 (0%)     0 (0%)
	  hugepages-32Mi     0 (0%)     0 (0%)
	  hugepages-64Ki     0 (0%)     0 (0%)
	Events:
	  Type    Reason                   Age                From             Message
	  ----    ------                   ----               ----             -------
	  Normal  Starting                 14s                kube-proxy       
	  Normal  NodeHasSufficientMemory  18s (x2 over 18s)  kubelet          Node multinode-654612-m03 status is now: NodeHasSufficientMemory
	  Normal  NodeHasNoDiskPressure    18s (x2 over 18s)  kubelet          Node multinode-654612-m03 status is now: NodeHasNoDiskPressure
	  Normal  NodeHasSufficientPID     18s (x2 over 18s)  kubelet          Node multinode-654612-m03 status is now: NodeHasSufficientPID
	  Normal  RegisteredNode           17s                node-controller  Node multinode-654612-m03 event: Registered Node multinode-654612-m03 in Controller
	  Normal  NodeReady                5s                 kubelet          Node multinode-654612-m03 status is now: NodeReady
	
	
	==> dmesg <==
	[Sep16 10:07] systemd-journald[226]: Failed to send stream file descriptor to service manager: Connection refused
	
	
	==> etcd [c65aacc72d2663a923ab63ffa1649959e0e5fe51df61d0c8285d08becdf417a3] <==
	{"level":"info","ts":"2024-09-16T11:10:03.898258Z","caller":"embed/etcd.go:599","msg":"serving peer traffic","address":"192.168.67.2:2380"}
	{"level":"info","ts":"2024-09-16T11:10:03.898427Z","caller":"embed/etcd.go:571","msg":"cmux::serve","address":"192.168.67.2:2380"}
	{"level":"info","ts":"2024-09-16T11:10:03.898514Z","caller":"embed/etcd.go:279","msg":"now serving peer/client/metrics","local-member-id":"8688e899f7831fc7","initial-advertise-peer-urls":["https://192.168.67.2:2380"],"listen-peer-urls":["https://192.168.67.2:2380"],"advertise-client-urls":["https://192.168.67.2:2379"],"listen-client-urls":["https://127.0.0.1:2379","https://192.168.67.2:2379"],"listen-metrics-urls":["http://127.0.0.1:2381"]}
	{"level":"info","ts":"2024-09-16T11:10:03.898539Z","caller":"embed/etcd.go:870","msg":"serving metrics","address":"http://127.0.0.1:2381"}
	{"level":"info","ts":"2024-09-16T11:10:04.631232Z","logger":"raft","caller":"etcdserver/zap_raft.go:77","msg":"8688e899f7831fc7 is starting a new election at term 1"}
	{"level":"info","ts":"2024-09-16T11:10:04.631434Z","logger":"raft","caller":"etcdserver/zap_raft.go:77","msg":"8688e899f7831fc7 became pre-candidate at term 1"}
	{"level":"info","ts":"2024-09-16T11:10:04.631547Z","logger":"raft","caller":"etcdserver/zap_raft.go:77","msg":"8688e899f7831fc7 received MsgPreVoteResp from 8688e899f7831fc7 at term 1"}
	{"level":"info","ts":"2024-09-16T11:10:04.631589Z","logger":"raft","caller":"etcdserver/zap_raft.go:77","msg":"8688e899f7831fc7 became candidate at term 2"}
	{"level":"info","ts":"2024-09-16T11:10:04.631642Z","logger":"raft","caller":"etcdserver/zap_raft.go:77","msg":"8688e899f7831fc7 received MsgVoteResp from 8688e899f7831fc7 at term 2"}
	{"level":"info","ts":"2024-09-16T11:10:04.631699Z","logger":"raft","caller":"etcdserver/zap_raft.go:77","msg":"8688e899f7831fc7 became leader at term 2"}
	{"level":"info","ts":"2024-09-16T11:10:04.631763Z","logger":"raft","caller":"etcdserver/zap_raft.go:77","msg":"raft.node: 8688e899f7831fc7 elected leader 8688e899f7831fc7 at term 2"}
	{"level":"info","ts":"2024-09-16T11:10:04.636890Z","caller":"etcdserver/server.go:2118","msg":"published local member to cluster through raft","local-member-id":"8688e899f7831fc7","local-member-attributes":"{Name:multinode-654612 ClientURLs:[https://192.168.67.2:2379]}","request-path":"/0/members/8688e899f7831fc7/attributes","cluster-id":"9d8fdeb88b6def78","publish-timeout":"7s"}
	{"level":"info","ts":"2024-09-16T11:10:04.637094Z","caller":"etcdserver/server.go:2629","msg":"setting up initial cluster version using v2 API","cluster-version":"3.5"}
	{"level":"info","ts":"2024-09-16T11:10:04.638745Z","caller":"embed/serve.go:103","msg":"ready to serve client requests"}
	{"level":"info","ts":"2024-09-16T11:10:04.639216Z","caller":"embed/serve.go:103","msg":"ready to serve client requests"}
	{"level":"info","ts":"2024-09-16T11:10:04.640138Z","caller":"v3rpc/health.go:61","msg":"grpc service status changed","service":"","status":"SERVING"}
	{"level":"info","ts":"2024-09-16T11:10:04.641356Z","caller":"embed/serve.go:250","msg":"serving client traffic securely","traffic":"grpc+http","address":"192.168.67.2:2379"}
	{"level":"info","ts":"2024-09-16T11:10:04.642224Z","caller":"v3rpc/health.go:61","msg":"grpc service status changed","service":"","status":"SERVING"}
	{"level":"info","ts":"2024-09-16T11:10:04.643322Z","caller":"embed/serve.go:250","msg":"serving client traffic securely","traffic":"grpc+http","address":"127.0.0.1:2379"}
	{"level":"info","ts":"2024-09-16T11:10:04.644711Z","caller":"etcdmain/main.go:44","msg":"notifying init daemon"}
	{"level":"info","ts":"2024-09-16T11:10:04.644780Z","caller":"etcdmain/main.go:50","msg":"successfully notified init daemon"}
	{"level":"info","ts":"2024-09-16T11:10:04.644861Z","caller":"membership/cluster.go:584","msg":"set initial cluster version","cluster-id":"9d8fdeb88b6def78","local-member-id":"8688e899f7831fc7","cluster-version":"3.5"}
	{"level":"info","ts":"2024-09-16T11:10:04.644956Z","caller":"api/capability.go:75","msg":"enabled capabilities for version","cluster-version":"3.5"}
	{"level":"info","ts":"2024-09-16T11:10:04.645009Z","caller":"etcdserver/server.go:2653","msg":"cluster version is updated","cluster-version":"3.5"}
	{"level":"info","ts":"2024-09-16T11:11:39.551153Z","caller":"traceutil/trace.go:171","msg":"trace[43988492] transaction","detail":"{read_only:false; response_revision:542; number_of_response:1; }","duration":"100.767855ms","start":"2024-09-16T11:11:39.450367Z","end":"2024-09-16T11:11:39.551134Z","steps":["trace[43988492] 'process raft request'  (duration: 100.609417ms)"],"step_count":1}
	
	
	==> kernel <==
	 11:12:06 up 10:54,  0 users,  load average: 2.00, 2.78, 2.74
	Linux multinode-654612 5.15.0-1069-aws #75~20.04.1-Ubuntu SMP Mon Aug 19 16:22:47 UTC 2024 aarch64 aarch64 aarch64 GNU/Linux
	PRETTY_NAME="Ubuntu 22.04.4 LTS"
	
	
	==> kindnet [9af0dfbb5d2f2d8bccd103497822725f259c42a87b482311032dd374224dc861] <==
	I0916 11:11:06.643384       1 main.go:299] handling current node
	I0916 11:11:16.635530       1 main.go:295] Handling node with IPs: map[192.168.67.2:{}]
	I0916 11:11:16.635563       1 main.go:299] handling current node
	I0916 11:11:16.635579       1 main.go:295] Handling node with IPs: map[192.168.67.3:{}]
	I0916 11:11:16.635585       1 main.go:322] Node multinode-654612-m02 has CIDR [10.244.2.0/24] 
	I0916 11:11:16.635746       1 routes.go:62] Adding route {Ifindex: 0 Dst: 10.244.2.0/24 Src: <nil> Gw: 192.168.67.3 Flags: [] Table: 0} 
	I0916 11:11:26.635254       1 main.go:295] Handling node with IPs: map[192.168.67.2:{}]
	I0916 11:11:26.635301       1 main.go:299] handling current node
	I0916 11:11:26.635318       1 main.go:295] Handling node with IPs: map[192.168.67.3:{}]
	I0916 11:11:26.635326       1 main.go:322] Node multinode-654612-m02 has CIDR [10.244.2.0/24] 
	I0916 11:11:36.636786       1 main.go:295] Handling node with IPs: map[192.168.67.3:{}]
	I0916 11:11:36.636892       1 main.go:322] Node multinode-654612-m02 has CIDR [10.244.2.0/24] 
	I0916 11:11:36.637038       1 main.go:295] Handling node with IPs: map[192.168.67.2:{}]
	I0916 11:11:36.637096       1 main.go:299] handling current node
	I0916 11:11:46.634828       1 main.go:295] Handling node with IPs: map[192.168.67.2:{}]
	I0916 11:11:46.634877       1 main.go:299] handling current node
	I0916 11:11:46.634915       1 main.go:295] Handling node with IPs: map[192.168.67.3:{}]
	I0916 11:11:46.634925       1 main.go:322] Node multinode-654612-m02 has CIDR [10.244.2.0/24] 
	I0916 11:11:56.639195       1 main.go:295] Handling node with IPs: map[192.168.67.2:{}]
	I0916 11:11:56.639231       1 main.go:299] handling current node
	I0916 11:11:56.639255       1 main.go:295] Handling node with IPs: map[192.168.67.3:{}]
	I0916 11:11:56.639261       1 main.go:322] Node multinode-654612-m02 has CIDR [10.244.2.0/24] 
	I0916 11:11:56.639429       1 main.go:295] Handling node with IPs: map[192.168.67.4:{}]
	I0916 11:11:56.639446       1 main.go:322] Node multinode-654612-m03 has CIDR [10.244.3.0/24] 
	I0916 11:11:56.639513       1 routes.go:62] Adding route {Ifindex: 0 Dst: 10.244.3.0/24 Src: <nil> Gw: 192.168.67.4 Flags: [] Table: 0} 
	
	
	==> kube-apiserver [00cc927e5dcd6bba6542407e57d794c19b8cdd3c3a3e876481e838d3d1bb32ca] <==
	I0916 11:10:07.969847       1 controller.go:615] quota admission added evaluator for: leases.coordination.k8s.io
	I0916 11:10:08.464735       1 storage_scheduling.go:95] created PriorityClass system-node-critical with value 2000001000
	I0916 11:10:08.470271       1 storage_scheduling.go:95] created PriorityClass system-cluster-critical with value 2000000000
	I0916 11:10:08.470301       1 storage_scheduling.go:111] all system priority classes are created successfully or already exist.
	I0916 11:10:09.126362       1 controller.go:615] quota admission added evaluator for: roles.rbac.authorization.k8s.io
	I0916 11:10:09.184879       1 controller.go:615] quota admission added evaluator for: rolebindings.rbac.authorization.k8s.io
	I0916 11:10:09.269996       1 alloc.go:330] "allocated clusterIPs" service="default/kubernetes" clusterIPs={"IPv4":"10.96.0.1"}
	W0916 11:10:09.277769       1 lease.go:265] Resetting endpoints for master service "kubernetes" to [192.168.67.2]
	I0916 11:10:09.278883       1 controller.go:615] quota admission added evaluator for: endpoints
	I0916 11:10:09.284228       1 controller.go:615] quota admission added evaluator for: endpointslices.discovery.k8s.io
	I0916 11:10:09.675905       1 controller.go:615] quota admission added evaluator for: serviceaccounts
	I0916 11:10:10.271952       1 controller.go:615] quota admission added evaluator for: deployments.apps
	I0916 11:10:10.287877       1 alloc.go:330] "allocated clusterIPs" service="kube-system/kube-dns" clusterIPs={"IPv4":"10.96.0.10"}
	I0916 11:10:10.307423       1 controller.go:615] quota admission added evaluator for: daemonsets.apps
	I0916 11:10:14.930615       1 controller.go:615] quota admission added evaluator for: controllerrevisions.apps
	I0916 11:10:15.641848       1 controller.go:615] quota admission added evaluator for: replicasets.apps
	E0916 11:11:33.130242       1 conn.go:339] Error on socket receive: read tcp 192.168.67.2:8443->192.168.67.1:48710: use of closed network connection
	E0916 11:11:33.355844       1 conn.go:339] Error on socket receive: read tcp 192.168.67.2:8443->192.168.67.1:48724: use of closed network connection
	E0916 11:11:33.570301       1 conn.go:339] Error on socket receive: read tcp 192.168.67.2:8443->192.168.67.1:48748: use of closed network connection
	E0916 11:11:33.990711       1 conn.go:339] Error on socket receive: read tcp 192.168.67.2:8443->192.168.67.1:48782: use of closed network connection
	E0916 11:11:34.198507       1 conn.go:339] Error on socket receive: read tcp 192.168.67.2:8443->192.168.67.1:48794: use of closed network connection
	E0916 11:11:34.549020       1 conn.go:339] Error on socket receive: read tcp 192.168.67.2:8443->192.168.67.1:48824: use of closed network connection
	E0916 11:11:34.768739       1 conn.go:339] Error on socket receive: read tcp 192.168.67.2:8443->192.168.67.1:48848: use of closed network connection
	E0916 11:11:34.972991       1 conn.go:339] Error on socket receive: read tcp 192.168.67.2:8443->192.168.67.1:48868: use of closed network connection
	E0916 11:11:35.194855       1 conn.go:339] Error on socket receive: read tcp 192.168.67.2:8443->192.168.67.1:48886: use of closed network connection
	
	
	==> kube-controller-manager [5d34b90bf3c2a4fe15bdb23da3582f1eda44e32048621175218fd569c86a37e4] <==
	I0916 11:11:28.080583       1 replica_set.go:679] "Finished syncing" logger="replicaset-controller" kind="ReplicaSet" key="default/busybox-7dff88458" duration="62.160108ms"
	I0916 11:11:28.113164       1 replica_set.go:679] "Finished syncing" logger="replicaset-controller" kind="ReplicaSet" key="default/busybox-7dff88458" duration="32.517221ms"
	I0916 11:11:28.149191       1 replica_set.go:679] "Finished syncing" logger="replicaset-controller" kind="ReplicaSet" key="default/busybox-7dff88458" duration="35.973293ms"
	I0916 11:11:28.149317       1 replica_set.go:679] "Finished syncing" logger="replicaset-controller" kind="ReplicaSet" key="default/busybox-7dff88458" duration="70.964µs"
	I0916 11:11:29.446716       1 range_allocator.go:241] "Successfully synced" logger="node-ipam-controller" key="multinode-654612-m02"
	I0916 11:11:31.413475       1 replica_set.go:679] "Finished syncing" logger="replicaset-controller" kind="ReplicaSet" key="default/busybox-7dff88458" duration="9.285853ms"
	I0916 11:11:31.414015       1 replica_set.go:679] "Finished syncing" logger="replicaset-controller" kind="ReplicaSet" key="default/busybox-7dff88458" duration="67.576µs"
	I0916 11:11:32.445448       1 replica_set.go:679] "Finished syncing" logger="replicaset-controller" kind="ReplicaSet" key="default/busybox-7dff88458" duration="6.381424ms"
	I0916 11:11:32.445537       1 replica_set.go:679] "Finished syncing" logger="replicaset-controller" kind="ReplicaSet" key="default/busybox-7dff88458" duration="43.068µs"
	I0916 11:11:42.095090       1 range_allocator.go:241] "Successfully synced" logger="node-ipam-controller" key="multinode-654612"
	I0916 11:11:44.122886       1 range_allocator.go:241] "Successfully synced" logger="node-ipam-controller" key="multinode-654612-m02"
	I0916 11:11:48.783812       1 topologycache.go:237] "Can't get CPU or zone information for node" logger="endpointslice-controller" node="multinode-654612-m02"
	I0916 11:11:48.783977       1 actual_state_of_world.go:540] "Failed to update statusUpdateNeeded field in actual state of world" logger="persistentvolume-attach-detach-controller" err="Failed to set statusUpdateNeeded to needed true, because nodeName=\"multinode-654612-m03\" does not exist"
	I0916 11:11:48.820854       1 range_allocator.go:422] "Set node PodCIDR" logger="node-ipam-controller" node="multinode-654612-m03" podCIDRs=["10.244.3.0/24"]
	I0916 11:11:48.820891       1 range_allocator.go:241] "Successfully synced" logger="node-ipam-controller" key="multinode-654612-m03"
	I0916 11:11:48.820914       1 range_allocator.go:241] "Successfully synced" logger="node-ipam-controller" key="multinode-654612-m03"
	I0916 11:11:48.945125       1 range_allocator.go:241] "Successfully synced" logger="node-ipam-controller" key="multinode-654612-m03"
	I0916 11:11:49.263718       1 range_allocator.go:241] "Successfully synced" logger="node-ipam-controller" key="multinode-654612-m03"
	I0916 11:11:49.448971       1 node_lifecycle_controller.go:884] "Missing timestamp for Node. Assuming now as a timestamp" logger="node-lifecycle-controller" node="multinode-654612-m03"
	I0916 11:11:49.514110       1 range_allocator.go:241] "Successfully synced" logger="node-ipam-controller" key="multinode-654612-m03"
	I0916 11:11:58.976456       1 range_allocator.go:241] "Successfully synced" logger="node-ipam-controller" key="multinode-654612-m03"
	I0916 11:12:01.782972       1 topologycache.go:237] "Can't get CPU or zone information for node" logger="endpointslice-controller" node="multinode-654612-m02"
	I0916 11:12:01.783032       1 range_allocator.go:241] "Successfully synced" logger="node-ipam-controller" key="multinode-654612-m03"
	I0916 11:12:01.798232       1 range_allocator.go:241] "Successfully synced" logger="node-ipam-controller" key="multinode-654612-m03"
	I0916 11:12:04.469038       1 range_allocator.go:241] "Successfully synced" logger="node-ipam-controller" key="multinode-654612-m03"
	
	
	==> kube-proxy [c2386662da70d7e3af63aeaec77233018972fa9dd01dd520da9d367adb678c73] <==
	I0916 11:10:16.501243       1 server_linux.go:66] "Using iptables proxy"
	I0916 11:10:16.684816       1 server.go:677] "Successfully retrieved node IP(s)" IPs=["192.168.67.2"]
	E0916 11:10:16.684888       1 server.go:234] "Kube-proxy configuration may be incomplete or incorrect" err="nodePortAddresses is unset; NodePort connections will be accepted on all local IPs. Consider using `--nodeport-addresses primary`"
	I0916 11:10:16.703638       1 server.go:243] "kube-proxy running in dual-stack mode" primary ipFamily="IPv4"
	I0916 11:10:16.703703       1 server_linux.go:169] "Using iptables Proxier"
	I0916 11:10:16.705525       1 proxier.go:255] "Setting route_localnet=1 to allow node-ports on localhost; to change this either disable iptables.localhostNodePorts (--iptables-localhost-nodeports) or set nodePortAddresses (--nodeport-addresses) to filter loopback addresses" ipFamily="IPv4"
	I0916 11:10:16.705846       1 server.go:483] "Version info" version="v1.31.1"
	I0916 11:10:16.705867       1 server.go:485] "Golang settings" GOGC="" GOMAXPROCS="" GOTRACEBACK=""
	I0916 11:10:16.707680       1 config.go:199] "Starting service config controller"
	I0916 11:10:16.707780       1 shared_informer.go:313] Waiting for caches to sync for service config
	I0916 11:10:16.708010       1 config.go:105] "Starting endpoint slice config controller"
	I0916 11:10:16.708046       1 shared_informer.go:313] Waiting for caches to sync for endpoint slice config
	I0916 11:10:16.709387       1 config.go:328] "Starting node config controller"
	I0916 11:10:16.709452       1 shared_informer.go:313] Waiting for caches to sync for node config
	I0916 11:10:16.808466       1 shared_informer.go:320] Caches are synced for endpoint slice config
	I0916 11:10:16.808483       1 shared_informer.go:320] Caches are synced for service config
	I0916 11:10:16.809681       1 shared_informer.go:320] Caches are synced for node config
	
	
	==> kube-scheduler [7e4a553dd98fcbff4d79c739829ae001409f265cf2c56dad37fb1e8c0a8ec53f] <==
	W0916 11:10:07.742823       1 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.StorageClass: storageclasses.storage.k8s.io is forbidden: User "system:kube-scheduler" cannot list resource "storageclasses" in API group "storage.k8s.io" at the cluster scope
	E0916 11:10:07.742864       1 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.StorageClass: failed to list *v1.StorageClass: storageclasses.storage.k8s.io is forbidden: User \"system:kube-scheduler\" cannot list resource \"storageclasses\" in API group \"storage.k8s.io\" at the cluster scope" logger="UnhandledError"
	W0916 11:10:07.743388       1 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.ReplicationController: replicationcontrollers is forbidden: User "system:kube-scheduler" cannot list resource "replicationcontrollers" in API group "" at the cluster scope
	E0916 11:10:07.743480       1 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.ReplicationController: failed to list *v1.ReplicationController: replicationcontrollers is forbidden: User \"system:kube-scheduler\" cannot list resource \"replicationcontrollers\" in API group \"\" at the cluster scope" logger="UnhandledError"
	W0916 11:10:07.743615       1 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.CSINode: csinodes.storage.k8s.io is forbidden: User "system:kube-scheduler" cannot list resource "csinodes" in API group "storage.k8s.io" at the cluster scope
	E0916 11:10:07.743675       1 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.CSINode: failed to list *v1.CSINode: csinodes.storage.k8s.io is forbidden: User \"system:kube-scheduler\" cannot list resource \"csinodes\" in API group \"storage.k8s.io\" at the cluster scope" logger="UnhandledError"
	W0916 11:10:07.743717       1 reflector.go:561] runtime/asm_arm64.s:1222: failed to list *v1.ConfigMap: configmaps "extension-apiserver-authentication" is forbidden: User "system:kube-scheduler" cannot list resource "configmaps" in API group "" in the namespace "kube-system"
	E0916 11:10:07.743754       1 reflector.go:158] "Unhandled Error" err="runtime/asm_arm64.s:1222: Failed to watch *v1.ConfigMap: failed to list *v1.ConfigMap: configmaps \"extension-apiserver-authentication\" is forbidden: User \"system:kube-scheduler\" cannot list resource \"configmaps\" in API group \"\" in the namespace \"kube-system\"" logger="UnhandledError"
	W0916 11:10:07.743805       1 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.StatefulSet: statefulsets.apps is forbidden: User "system:kube-scheduler" cannot list resource "statefulsets" in API group "apps" at the cluster scope
	E0916 11:10:07.743848       1 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.StatefulSet: failed to list *v1.StatefulSet: statefulsets.apps is forbidden: User \"system:kube-scheduler\" cannot list resource \"statefulsets\" in API group \"apps\" at the cluster scope" logger="UnhandledError"
	W0916 11:10:07.745181       1 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Pod: pods is forbidden: User "system:kube-scheduler" cannot list resource "pods" in API group "" at the cluster scope
	E0916 11:10:07.745214       1 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Pod: failed to list *v1.Pod: pods is forbidden: User \"system:kube-scheduler\" cannot list resource \"pods\" in API group \"\" at the cluster scope" logger="UnhandledError"
	W0916 11:10:08.604869       1 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.PodDisruptionBudget: poddisruptionbudgets.policy is forbidden: User "system:kube-scheduler" cannot list resource "poddisruptionbudgets" in API group "policy" at the cluster scope
	E0916 11:10:08.604927       1 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.PodDisruptionBudget: failed to list *v1.PodDisruptionBudget: poddisruptionbudgets.policy is forbidden: User \"system:kube-scheduler\" cannot list resource \"poddisruptionbudgets\" in API group \"policy\" at the cluster scope" logger="UnhandledError"
	W0916 11:10:08.705130       1 reflector.go:561] runtime/asm_arm64.s:1222: failed to list *v1.ConfigMap: configmaps "extension-apiserver-authentication" is forbidden: User "system:kube-scheduler" cannot list resource "configmaps" in API group "" in the namespace "kube-system"
	E0916 11:10:08.705179       1 reflector.go:158] "Unhandled Error" err="runtime/asm_arm64.s:1222: Failed to watch *v1.ConfigMap: failed to list *v1.ConfigMap: configmaps \"extension-apiserver-authentication\" is forbidden: User \"system:kube-scheduler\" cannot list resource \"configmaps\" in API group \"\" in the namespace \"kube-system\"" logger="UnhandledError"
	W0916 11:10:08.713648       1 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Service: services is forbidden: User "system:kube-scheduler" cannot list resource "services" in API group "" at the cluster scope
	E0916 11:10:08.713691       1 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Service: failed to list *v1.Service: services is forbidden: User \"system:kube-scheduler\" cannot list resource \"services\" in API group \"\" at the cluster scope" logger="UnhandledError"
	W0916 11:10:08.735570       1 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.CSIDriver: csidrivers.storage.k8s.io is forbidden: User "system:kube-scheduler" cannot list resource "csidrivers" in API group "storage.k8s.io" at the cluster scope
	E0916 11:10:08.735734       1 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.CSIDriver: failed to list *v1.CSIDriver: csidrivers.storage.k8s.io is forbidden: User \"system:kube-scheduler\" cannot list resource \"csidrivers\" in API group \"storage.k8s.io\" at the cluster scope" logger="UnhandledError"
	W0916 11:10:08.874688       1 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.PersistentVolumeClaim: persistentvolumeclaims is forbidden: User "system:kube-scheduler" cannot list resource "persistentvolumeclaims" in API group "" at the cluster scope
	E0916 11:10:08.874727       1 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.PersistentVolumeClaim: failed to list *v1.PersistentVolumeClaim: persistentvolumeclaims is forbidden: User \"system:kube-scheduler\" cannot list resource \"persistentvolumeclaims\" in API group \"\" at the cluster scope" logger="UnhandledError"
	W0916 11:10:08.904363       1 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Pod: pods is forbidden: User "system:kube-scheduler" cannot list resource "pods" in API group "" at the cluster scope
	E0916 11:10:08.904411       1 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Pod: failed to list *v1.Pod: pods is forbidden: User \"system:kube-scheduler\" cannot list resource \"pods\" in API group \"\" at the cluster scope" logger="UnhandledError"
	I0916 11:10:10.824337       1 shared_informer.go:320] Caches are synced for client-ca::kube-system::extension-apiserver-authentication::client-ca-file
	
	
	==> kubelet <==
	Sep 16 11:10:56 multinode-654612 kubelet[1533]: W0916 11:10:56.809977    1533 reflector.go:561] object-"kube-system"/"coredns": failed to list *v1.ConfigMap: configmaps "coredns" is forbidden: User "system:node:multinode-654612" cannot list resource "configmaps" in API group "" in the namespace "kube-system": no relationship found between node 'multinode-654612' and this object
	Sep 16 11:10:56 multinode-654612 kubelet[1533]: E0916 11:10:56.810028    1533 reflector.go:158] "Unhandled Error" err="object-\"kube-system\"/\"coredns\": Failed to watch *v1.ConfigMap: failed to list *v1.ConfigMap: configmaps \"coredns\" is forbidden: User \"system:node:multinode-654612\" cannot list resource \"configmaps\" in API group \"\" in the namespace \"kube-system\": no relationship found between node 'multinode-654612' and this object" logger="UnhandledError"
	Sep 16 11:10:56 multinode-654612 kubelet[1533]: I0916 11:10:56.953119    1533 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9vqhk\" (UniqueName: \"kubernetes.io/projected/26df8cd4-36bc-49e1-98bf-9c30f5555b7b-kube-api-access-9vqhk\") pod \"coredns-7c65d6cfc9-szvv9\" (UID: \"26df8cd4-36bc-49e1-98bf-9c30f5555b7b\") " pod="kube-system/coredns-7c65d6cfc9-szvv9"
	Sep 16 11:10:56 multinode-654612 kubelet[1533]: I0916 11:10:56.953183    1533 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"tmp\" (UniqueName: \"kubernetes.io/host-path/2b21455e-8cb4-4c70-937b-6ff3cd85b42f-tmp\") pod \"storage-provisioner\" (UID: \"2b21455e-8cb4-4c70-937b-6ff3cd85b42f\") " pod="kube-system/storage-provisioner"
	Sep 16 11:10:56 multinode-654612 kubelet[1533]: I0916 11:10:56.953204    1533 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6kdt4\" (UniqueName: \"kubernetes.io/projected/2b21455e-8cb4-4c70-937b-6ff3cd85b42f-kube-api-access-6kdt4\") pod \"storage-provisioner\" (UID: \"2b21455e-8cb4-4c70-937b-6ff3cd85b42f\") " pod="kube-system/storage-provisioner"
	Sep 16 11:10:56 multinode-654612 kubelet[1533]: I0916 11:10:56.953227    1533 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/26df8cd4-36bc-49e1-98bf-9c30f5555b7b-config-volume\") pod \"coredns-7c65d6cfc9-szvv9\" (UID: \"26df8cd4-36bc-49e1-98bf-9c30f5555b7b\") " pod="kube-system/coredns-7c65d6cfc9-szvv9"
	Sep 16 11:10:58 multinode-654612 kubelet[1533]: E0916 11:10:58.054295    1533 configmap.go:193] Couldn't get configMap kube-system/coredns: failed to sync configmap cache: timed out waiting for the condition
	Sep 16 11:10:58 multinode-654612 kubelet[1533]: E0916 11:10:58.054424    1533 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/26df8cd4-36bc-49e1-98bf-9c30f5555b7b-config-volume podName:26df8cd4-36bc-49e1-98bf-9c30f5555b7b nodeName:}" failed. No retries permitted until 2024-09-16 11:10:58.554400655 +0000 UTC m=+48.507549149 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "config-volume" (UniqueName: "kubernetes.io/configmap/26df8cd4-36bc-49e1-98bf-9c30f5555b7b-config-volume") pod "coredns-7c65d6cfc9-szvv9" (UID: "26df8cd4-36bc-49e1-98bf-9c30f5555b7b") : failed to sync configmap cache: timed out waiting for the condition
	Sep 16 11:10:59 multinode-654612 kubelet[1533]: I0916 11:10:59.374126    1533 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="kube-system/storage-provisioner" podStartSLOduration=44.374105486 podStartE2EDuration="44.374105486s" podCreationTimestamp="2024-09-16 11:10:15 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2024-09-16 11:10:57.369315089 +0000 UTC m=+47.322463599" watchObservedRunningTime="2024-09-16 11:10:59.374105486 +0000 UTC m=+49.327253980"
	Sep 16 11:10:59 multinode-654612 kubelet[1533]: I0916 11:10:59.394335    1533 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="kube-system/coredns-7c65d6cfc9-szvv9" podStartSLOduration=44.394313568 podStartE2EDuration="44.394313568s" podCreationTimestamp="2024-09-16 11:10:15 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2024-09-16 11:10:59.374733475 +0000 UTC m=+49.327881969" watchObservedRunningTime="2024-09-16 11:10:59.394313568 +0000 UTC m=+49.347462070"
	Sep 16 11:11:00 multinode-654612 kubelet[1533]: E0916 11:11:00.285056    1533 eviction_manager.go:257] "Eviction manager: failed to get HasDedicatedImageFs" err="missing image stats: &ImageFsInfoResponse{ImageFilesystems:[]*FilesystemUsage{&FilesystemUsage{Timestamp:1726485060284637473,FsId:&FilesystemIdentifier{Mountpoint:/var/lib/containers/storage/overlay-images,},UsedBytes:&UInt64Value{Value:125700,},InodesUsed:&UInt64Value{Value:57,},},},ContainerFilesystems:[]*FilesystemUsage{},}"
	Sep 16 11:11:00 multinode-654612 kubelet[1533]: E0916 11:11:00.285554    1533 eviction_manager.go:212] "Eviction manager: failed to synchronize" err="eviction manager: failed to get HasDedicatedImageFs: missing image stats: &ImageFsInfoResponse{ImageFilesystems:[]*FilesystemUsage{&FilesystemUsage{Timestamp:1726485060284637473,FsId:&FilesystemIdentifier{Mountpoint:/var/lib/containers/storage/overlay-images,},UsedBytes:&UInt64Value{Value:125700,},InodesUsed:&UInt64Value{Value:57,},},},ContainerFilesystems:[]*FilesystemUsage{},}"
	Sep 16 11:11:10 multinode-654612 kubelet[1533]: E0916 11:11:10.286751    1533 eviction_manager.go:257] "Eviction manager: failed to get HasDedicatedImageFs" err="missing image stats: &ImageFsInfoResponse{ImageFilesystems:[]*FilesystemUsage{&FilesystemUsage{Timestamp:1726485070286573355,FsId:&FilesystemIdentifier{Mountpoint:/var/lib/containers/storage/overlay-images,},UsedBytes:&UInt64Value{Value:125700,},InodesUsed:&UInt64Value{Value:57,},},},ContainerFilesystems:[]*FilesystemUsage{},}"
	Sep 16 11:11:10 multinode-654612 kubelet[1533]: E0916 11:11:10.286796    1533 eviction_manager.go:212] "Eviction manager: failed to synchronize" err="eviction manager: failed to get HasDedicatedImageFs: missing image stats: &ImageFsInfoResponse{ImageFilesystems:[]*FilesystemUsage{&FilesystemUsage{Timestamp:1726485070286573355,FsId:&FilesystemIdentifier{Mountpoint:/var/lib/containers/storage/overlay-images,},UsedBytes:&UInt64Value{Value:125700,},InodesUsed:&UInt64Value{Value:57,},},},ContainerFilesystems:[]*FilesystemUsage{},}"
	Sep 16 11:11:20 multinode-654612 kubelet[1533]: E0916 11:11:20.287771    1533 eviction_manager.go:257] "Eviction manager: failed to get HasDedicatedImageFs" err="missing image stats: &ImageFsInfoResponse{ImageFilesystems:[]*FilesystemUsage{&FilesystemUsage{Timestamp:1726485080287591079,FsId:&FilesystemIdentifier{Mountpoint:/var/lib/containers/storage/overlay-images,},UsedBytes:&UInt64Value{Value:125700,},InodesUsed:&UInt64Value{Value:57,},},},ContainerFilesystems:[]*FilesystemUsage{},}"
	Sep 16 11:11:20 multinode-654612 kubelet[1533]: E0916 11:11:20.287807    1533 eviction_manager.go:212] "Eviction manager: failed to synchronize" err="eviction manager: failed to get HasDedicatedImageFs: missing image stats: &ImageFsInfoResponse{ImageFilesystems:[]*FilesystemUsage{&FilesystemUsage{Timestamp:1726485080287591079,FsId:&FilesystemIdentifier{Mountpoint:/var/lib/containers/storage/overlay-images,},UsedBytes:&UInt64Value{Value:125700,},InodesUsed:&UInt64Value{Value:57,},},},ContainerFilesystems:[]*FilesystemUsage{},}"
	Sep 16 11:11:28 multinode-654612 kubelet[1533]: I0916 11:11:28.253017    1533 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zqwgq\" (UniqueName: \"kubernetes.io/projected/39d79774-0a74-4464-a7fe-d312a92e8749-kube-api-access-zqwgq\") pod \"busybox-7dff88458-rdtjw\" (UID: \"39d79774-0a74-4464-a7fe-d312a92e8749\") " pod="default/busybox-7dff88458-rdtjw"
	Sep 16 11:11:30 multinode-654612 kubelet[1533]: E0916 11:11:30.289088    1533 eviction_manager.go:257] "Eviction manager: failed to get HasDedicatedImageFs" err="missing image stats: &ImageFsInfoResponse{ImageFilesystems:[]*FilesystemUsage{&FilesystemUsage{Timestamp:1726485090288882050,FsId:&FilesystemIdentifier{Mountpoint:/var/lib/containers/storage/overlay-images,},UsedBytes:&UInt64Value{Value:125700,},InodesUsed:&UInt64Value{Value:57,},},},ContainerFilesystems:[]*FilesystemUsage{},}"
	Sep 16 11:11:30 multinode-654612 kubelet[1533]: E0916 11:11:30.289126    1533 eviction_manager.go:212] "Eviction manager: failed to synchronize" err="eviction manager: failed to get HasDedicatedImageFs: missing image stats: &ImageFsInfoResponse{ImageFilesystems:[]*FilesystemUsage{&FilesystemUsage{Timestamp:1726485090288882050,FsId:&FilesystemIdentifier{Mountpoint:/var/lib/containers/storage/overlay-images,},UsedBytes:&UInt64Value{Value:125700,},InodesUsed:&UInt64Value{Value:57,},},},ContainerFilesystems:[]*FilesystemUsage{},}"
	Sep 16 11:11:40 multinode-654612 kubelet[1533]: E0916 11:11:40.290465    1533 eviction_manager.go:257] "Eviction manager: failed to get HasDedicatedImageFs" err="missing image stats: &ImageFsInfoResponse{ImageFilesystems:[]*FilesystemUsage{&FilesystemUsage{Timestamp:1726485100290254974,FsId:&FilesystemIdentifier{Mountpoint:/var/lib/containers/storage/overlay-images,},UsedBytes:&UInt64Value{Value:135010,},InodesUsed:&UInt64Value{Value:63,},},},ContainerFilesystems:[]*FilesystemUsage{},}"
	Sep 16 11:11:40 multinode-654612 kubelet[1533]: E0916 11:11:40.290505    1533 eviction_manager.go:212] "Eviction manager: failed to synchronize" err="eviction manager: failed to get HasDedicatedImageFs: missing image stats: &ImageFsInfoResponse{ImageFilesystems:[]*FilesystemUsage{&FilesystemUsage{Timestamp:1726485100290254974,FsId:&FilesystemIdentifier{Mountpoint:/var/lib/containers/storage/overlay-images,},UsedBytes:&UInt64Value{Value:135010,},InodesUsed:&UInt64Value{Value:63,},},},ContainerFilesystems:[]*FilesystemUsage{},}"
	Sep 16 11:11:50 multinode-654612 kubelet[1533]: E0916 11:11:50.292597    1533 eviction_manager.go:257] "Eviction manager: failed to get HasDedicatedImageFs" err="missing image stats: &ImageFsInfoResponse{ImageFilesystems:[]*FilesystemUsage{&FilesystemUsage{Timestamp:1726485110292320307,FsId:&FilesystemIdentifier{Mountpoint:/var/lib/containers/storage/overlay-images,},UsedBytes:&UInt64Value{Value:135010,},InodesUsed:&UInt64Value{Value:63,},},},ContainerFilesystems:[]*FilesystemUsage{},}"
	Sep 16 11:11:50 multinode-654612 kubelet[1533]: E0916 11:11:50.292641    1533 eviction_manager.go:212] "Eviction manager: failed to synchronize" err="eviction manager: failed to get HasDedicatedImageFs: missing image stats: &ImageFsInfoResponse{ImageFilesystems:[]*FilesystemUsage{&FilesystemUsage{Timestamp:1726485110292320307,FsId:&FilesystemIdentifier{Mountpoint:/var/lib/containers/storage/overlay-images,},UsedBytes:&UInt64Value{Value:135010,},InodesUsed:&UInt64Value{Value:63,},},},ContainerFilesystems:[]*FilesystemUsage{},}"
	Sep 16 11:12:00 multinode-654612 kubelet[1533]: E0916 11:12:00.307720    1533 eviction_manager.go:257] "Eviction manager: failed to get HasDedicatedImageFs" err="missing image stats: &ImageFsInfoResponse{ImageFilesystems:[]*FilesystemUsage{&FilesystemUsage{Timestamp:1726485120304192586,FsId:&FilesystemIdentifier{Mountpoint:/var/lib/containers/storage/overlay-images,},UsedBytes:&UInt64Value{Value:135010,},InodesUsed:&UInt64Value{Value:63,},},},ContainerFilesystems:[]*FilesystemUsage{},}"
	Sep 16 11:12:00 multinode-654612 kubelet[1533]: E0916 11:12:00.308246    1533 eviction_manager.go:212] "Eviction manager: failed to synchronize" err="eviction manager: failed to get HasDedicatedImageFs: missing image stats: &ImageFsInfoResponse{ImageFilesystems:[]*FilesystemUsage{&FilesystemUsage{Timestamp:1726485120304192586,FsId:&FilesystemIdentifier{Mountpoint:/var/lib/containers/storage/overlay-images,},UsedBytes:&UInt64Value{Value:135010,},InodesUsed:&UInt64Value{Value:63,},},},ContainerFilesystems:[]*FilesystemUsage{},}"
	

                                                
                                                
-- /stdout --
helpers_test.go:254: (dbg) Run:  out/minikube-linux-arm64 status --format={{.APIServer}} -p multinode-654612 -n multinode-654612
helpers_test.go:261: (dbg) Run:  kubectl --context multinode-654612 get po -o=jsonpath={.items[*].metadata.name} -A --field-selector=status.phase!=Running
helpers_test.go:261: (dbg) Non-zero exit: kubectl --context multinode-654612 get po -o=jsonpath={.items[*].metadata.name} -A --field-selector=status.phase!=Running: fork/exec /usr/local/bin/kubectl: exec format error (773.405µs)
helpers_test.go:263: kubectl --context multinode-654612 get po -o=jsonpath={.items[*].metadata.name} -A --field-selector=status.phase!=Running: fork/exec /usr/local/bin/kubectl: exec format error
--- FAIL: TestMultiNode/serial/MultiNodeLabels (2.97s)

                                                
                                    
x
+
TestMultiNode/serial/StartAfterStop (13.1s)

                                                
                                                
=== RUN   TestMultiNode/serial/StartAfterStop
multinode_test.go:282: (dbg) Run:  out/minikube-linux-arm64 -p multinode-654612 node start m03 -v=7 --alsologtostderr
multinode_test.go:282: (dbg) Done: out/minikube-linux-arm64 -p multinode-654612 node start m03 -v=7 --alsologtostderr: (9.301850365s)
multinode_test.go:290: (dbg) Run:  out/minikube-linux-arm64 -p multinode-654612 status -v=7 --alsologtostderr
multinode_test.go:306: (dbg) Run:  kubectl get nodes
multinode_test.go:306: (dbg) Non-zero exit: kubectl get nodes: fork/exec /usr/local/bin/kubectl: exec format error (375.983µs)
multinode_test.go:308: failed to kubectl get nodes. args "kubectl get nodes" : fork/exec /usr/local/bin/kubectl: exec format error
helpers_test.go:222: -----------------------post-mortem--------------------------------
helpers_test.go:230: ======>  post-mortem[TestMultiNode/serial/StartAfterStop]: docker inspect <======
helpers_test.go:231: (dbg) Run:  docker inspect multinode-654612
helpers_test.go:235: (dbg) docker inspect multinode-654612:

                                                
                                                
-- stdout --
	[
	    {
	        "Id": "402497514f0b8b3453fe3f147b28574766ee05bfb7c084c9f8550757726f30cd",
	        "Created": "2024-09-16T11:09:45.282229543Z",
	        "Path": "/usr/local/bin/entrypoint",
	        "Args": [
	            "/sbin/init"
	        ],
	        "State": {
	            "Status": "running",
	            "Running": true,
	            "Paused": false,
	            "Restarting": false,
	            "OOMKilled": false,
	            "Dead": false,
	            "Pid": 1489022,
	            "ExitCode": 0,
	            "Error": "",
	            "StartedAt": "2024-09-16T11:09:45.436723255Z",
	            "FinishedAt": "0001-01-01T00:00:00Z"
	        },
	        "Image": "sha256:a1b71fa87733590eb4674b16f6945626ae533f3af37066893e3fd70eb9476268",
	        "ResolvConfPath": "/var/lib/docker/containers/402497514f0b8b3453fe3f147b28574766ee05bfb7c084c9f8550757726f30cd/resolv.conf",
	        "HostnamePath": "/var/lib/docker/containers/402497514f0b8b3453fe3f147b28574766ee05bfb7c084c9f8550757726f30cd/hostname",
	        "HostsPath": "/var/lib/docker/containers/402497514f0b8b3453fe3f147b28574766ee05bfb7c084c9f8550757726f30cd/hosts",
	        "LogPath": "/var/lib/docker/containers/402497514f0b8b3453fe3f147b28574766ee05bfb7c084c9f8550757726f30cd/402497514f0b8b3453fe3f147b28574766ee05bfb7c084c9f8550757726f30cd-json.log",
	        "Name": "/multinode-654612",
	        "RestartCount": 0,
	        "Driver": "overlay2",
	        "Platform": "linux",
	        "MountLabel": "",
	        "ProcessLabel": "",
	        "AppArmorProfile": "unconfined",
	        "ExecIDs": null,
	        "HostConfig": {
	            "Binds": [
	                "/lib/modules:/lib/modules:ro",
	                "multinode-654612:/var"
	            ],
	            "ContainerIDFile": "",
	            "LogConfig": {
	                "Type": "json-file",
	                "Config": {}
	            },
	            "NetworkMode": "multinode-654612",
	            "PortBindings": {
	                "22/tcp": [
	                    {
	                        "HostIp": "127.0.0.1",
	                        "HostPort": ""
	                    }
	                ],
	                "2376/tcp": [
	                    {
	                        "HostIp": "127.0.0.1",
	                        "HostPort": ""
	                    }
	                ],
	                "32443/tcp": [
	                    {
	                        "HostIp": "127.0.0.1",
	                        "HostPort": ""
	                    }
	                ],
	                "5000/tcp": [
	                    {
	                        "HostIp": "127.0.0.1",
	                        "HostPort": ""
	                    }
	                ],
	                "8443/tcp": [
	                    {
	                        "HostIp": "127.0.0.1",
	                        "HostPort": ""
	                    }
	                ]
	            },
	            "RestartPolicy": {
	                "Name": "no",
	                "MaximumRetryCount": 0
	            },
	            "AutoRemove": false,
	            "VolumeDriver": "",
	            "VolumesFrom": null,
	            "ConsoleSize": [
	                0,
	                0
	            ],
	            "CapAdd": null,
	            "CapDrop": null,
	            "CgroupnsMode": "host",
	            "Dns": [],
	            "DnsOptions": [],
	            "DnsSearch": [],
	            "ExtraHosts": null,
	            "GroupAdd": null,
	            "IpcMode": "private",
	            "Cgroup": "",
	            "Links": null,
	            "OomScoreAdj": 0,
	            "PidMode": "",
	            "Privileged": true,
	            "PublishAllPorts": false,
	            "ReadonlyRootfs": false,
	            "SecurityOpt": [
	                "seccomp=unconfined",
	                "apparmor=unconfined",
	                "label=disable"
	            ],
	            "Tmpfs": {
	                "/run": "",
	                "/tmp": ""
	            },
	            "UTSMode": "",
	            "UsernsMode": "",
	            "ShmSize": 67108864,
	            "Runtime": "runc",
	            "Isolation": "",
	            "CpuShares": 0,
	            "Memory": 2306867200,
	            "NanoCpus": 2000000000,
	            "CgroupParent": "",
	            "BlkioWeight": 0,
	            "BlkioWeightDevice": [],
	            "BlkioDeviceReadBps": [],
	            "BlkioDeviceWriteBps": [],
	            "BlkioDeviceReadIOps": [],
	            "BlkioDeviceWriteIOps": [],
	            "CpuPeriod": 0,
	            "CpuQuota": 0,
	            "CpuRealtimePeriod": 0,
	            "CpuRealtimeRuntime": 0,
	            "CpusetCpus": "",
	            "CpusetMems": "",
	            "Devices": [],
	            "DeviceCgroupRules": null,
	            "DeviceRequests": null,
	            "MemoryReservation": 0,
	            "MemorySwap": 4613734400,
	            "MemorySwappiness": null,
	            "OomKillDisable": false,
	            "PidsLimit": null,
	            "Ulimits": [],
	            "CpuCount": 0,
	            "CpuPercent": 0,
	            "IOMaximumIOps": 0,
	            "IOMaximumBandwidth": 0,
	            "MaskedPaths": null,
	            "ReadonlyPaths": null
	        },
	        "GraphDriver": {
	            "Data": {
	                "LowerDir": "/var/lib/docker/overlay2/8f4b3b4182dcd0f3e388c56cd4cf94240822ac61d3fc96a90ebc4c74757003f6-init/diff:/var/lib/docker/overlay2/1502e35c27c097cfc834a7c6caeee5bb9f58b41375577f491b73f55bc131cbae/diff",
	                "MergedDir": "/var/lib/docker/overlay2/8f4b3b4182dcd0f3e388c56cd4cf94240822ac61d3fc96a90ebc4c74757003f6/merged",
	                "UpperDir": "/var/lib/docker/overlay2/8f4b3b4182dcd0f3e388c56cd4cf94240822ac61d3fc96a90ebc4c74757003f6/diff",
	                "WorkDir": "/var/lib/docker/overlay2/8f4b3b4182dcd0f3e388c56cd4cf94240822ac61d3fc96a90ebc4c74757003f6/work"
	            },
	            "Name": "overlay2"
	        },
	        "Mounts": [
	            {
	                "Type": "bind",
	                "Source": "/lib/modules",
	                "Destination": "/lib/modules",
	                "Mode": "ro",
	                "RW": false,
	                "Propagation": "rprivate"
	            },
	            {
	                "Type": "volume",
	                "Name": "multinode-654612",
	                "Source": "/var/lib/docker/volumes/multinode-654612/_data",
	                "Destination": "/var",
	                "Driver": "local",
	                "Mode": "z",
	                "RW": true,
	                "Propagation": ""
	            }
	        ],
	        "Config": {
	            "Hostname": "multinode-654612",
	            "Domainname": "",
	            "User": "",
	            "AttachStdin": false,
	            "AttachStdout": false,
	            "AttachStderr": false,
	            "ExposedPorts": {
	                "22/tcp": {},
	                "2376/tcp": {},
	                "32443/tcp": {},
	                "5000/tcp": {},
	                "8443/tcp": {}
	            },
	            "Tty": true,
	            "OpenStdin": false,
	            "StdinOnce": false,
	            "Env": [
	                "container=docker",
	                "PATH=/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin"
	            ],
	            "Cmd": null,
	            "Image": "gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0",
	            "Volumes": null,
	            "WorkingDir": "/",
	            "Entrypoint": [
	                "/usr/local/bin/entrypoint",
	                "/sbin/init"
	            ],
	            "OnBuild": null,
	            "Labels": {
	                "created_by.minikube.sigs.k8s.io": "true",
	                "mode.minikube.sigs.k8s.io": "multinode-654612",
	                "name.minikube.sigs.k8s.io": "multinode-654612",
	                "role.minikube.sigs.k8s.io": ""
	            },
	            "StopSignal": "SIGRTMIN+3"
	        },
	        "NetworkSettings": {
	            "Bridge": "",
	            "SandboxID": "e9bdb8a96f941d3fb83485ff41db347cd0ca4b8ec9abe479b77dc1f947540d87",
	            "SandboxKey": "/var/run/docker/netns/e9bdb8a96f94",
	            "Ports": {
	                "22/tcp": [
	                    {
	                        "HostIp": "127.0.0.1",
	                        "HostPort": "34738"
	                    }
	                ],
	                "2376/tcp": [
	                    {
	                        "HostIp": "127.0.0.1",
	                        "HostPort": "34739"
	                    }
	                ],
	                "32443/tcp": [
	                    {
	                        "HostIp": "127.0.0.1",
	                        "HostPort": "34742"
	                    }
	                ],
	                "5000/tcp": [
	                    {
	                        "HostIp": "127.0.0.1",
	                        "HostPort": "34740"
	                    }
	                ],
	                "8443/tcp": [
	                    {
	                        "HostIp": "127.0.0.1",
	                        "HostPort": "34741"
	                    }
	                ]
	            },
	            "HairpinMode": false,
	            "LinkLocalIPv6Address": "",
	            "LinkLocalIPv6PrefixLen": 0,
	            "SecondaryIPAddresses": null,
	            "SecondaryIPv6Addresses": null,
	            "EndpointID": "",
	            "Gateway": "",
	            "GlobalIPv6Address": "",
	            "GlobalIPv6PrefixLen": 0,
	            "IPAddress": "",
	            "IPPrefixLen": 0,
	            "IPv6Gateway": "",
	            "MacAddress": "",
	            "Networks": {
	                "multinode-654612": {
	                    "IPAMConfig": {
	                        "IPv4Address": "192.168.67.2"
	                    },
	                    "Links": null,
	                    "Aliases": null,
	                    "MacAddress": "02:42:c0:a8:43:02",
	                    "DriverOpts": null,
	                    "NetworkID": "76703dbf7b5c303b888ff80e924d3dab5e1ece3140da60ee94903d5d35e68013",
	                    "EndpointID": "55b225e9580b23d8f9c99debd0c6695a2fd84eb21d47b2088c34127e4a33c4a8",
	                    "Gateway": "192.168.67.1",
	                    "IPAddress": "192.168.67.2",
	                    "IPPrefixLen": 24,
	                    "IPv6Gateway": "",
	                    "GlobalIPv6Address": "",
	                    "GlobalIPv6PrefixLen": 0,
	                    "DNSNames": [
	                        "multinode-654612",
	                        "402497514f0b"
	                    ]
	                }
	            }
	        }
	    }
	]

                                                
                                                
-- /stdout --
helpers_test.go:239: (dbg) Run:  out/minikube-linux-arm64 status --format={{.Host}} -p multinode-654612 -n multinode-654612
helpers_test.go:244: <<< TestMultiNode/serial/StartAfterStop FAILED: start of post-mortem logs <<<
helpers_test.go:245: ======>  post-mortem[TestMultiNode/serial/StartAfterStop]: minikube logs <======
helpers_test.go:247: (dbg) Run:  out/minikube-linux-arm64 -p multinode-654612 logs -n 25
helpers_test.go:247: (dbg) Done: out/minikube-linux-arm64 -p multinode-654612 logs -n 25: (1.642503102s)
helpers_test.go:252: TestMultiNode/serial/StartAfterStop logs: 
-- stdout --
	
	==> Audit <==
	|---------|-----------------------------------------------------------------------------------------|------------------|---------|---------|---------------------|---------------------|
	| Command |                                          Args                                           |     Profile      |  User   | Version |     Start Time      |      End Time       |
	|---------|-----------------------------------------------------------------------------------------|------------------|---------|---------|---------------------|---------------------|
	| cp      | multinode-654612 cp multinode-654612:/home/docker/cp-test.txt                           | multinode-654612 | jenkins | v1.34.0 | 16 Sep 24 11:12 UTC | 16 Sep 24 11:12 UTC |
	|         | multinode-654612-m03:/home/docker/cp-test_multinode-654612_multinode-654612-m03.txt     |                  |         |         |                     |                     |
	| ssh     | multinode-654612 ssh -n                                                                 | multinode-654612 | jenkins | v1.34.0 | 16 Sep 24 11:12 UTC | 16 Sep 24 11:12 UTC |
	|         | multinode-654612 sudo cat                                                               |                  |         |         |                     |                     |
	|         | /home/docker/cp-test.txt                                                                |                  |         |         |                     |                     |
	| ssh     | multinode-654612 ssh -n multinode-654612-m03 sudo cat                                   | multinode-654612 | jenkins | v1.34.0 | 16 Sep 24 11:12 UTC | 16 Sep 24 11:12 UTC |
	|         | /home/docker/cp-test_multinode-654612_multinode-654612-m03.txt                          |                  |         |         |                     |                     |
	| cp      | multinode-654612 cp testdata/cp-test.txt                                                | multinode-654612 | jenkins | v1.34.0 | 16 Sep 24 11:12 UTC | 16 Sep 24 11:12 UTC |
	|         | multinode-654612-m02:/home/docker/cp-test.txt                                           |                  |         |         |                     |                     |
	| ssh     | multinode-654612 ssh -n                                                                 | multinode-654612 | jenkins | v1.34.0 | 16 Sep 24 11:12 UTC | 16 Sep 24 11:12 UTC |
	|         | multinode-654612-m02 sudo cat                                                           |                  |         |         |                     |                     |
	|         | /home/docker/cp-test.txt                                                                |                  |         |         |                     |                     |
	| cp      | multinode-654612 cp multinode-654612-m02:/home/docker/cp-test.txt                       | multinode-654612 | jenkins | v1.34.0 | 16 Sep 24 11:12 UTC | 16 Sep 24 11:12 UTC |
	|         | /tmp/TestMultiNodeserialCopyFile4098428863/001/cp-test_multinode-654612-m02.txt         |                  |         |         |                     |                     |
	| ssh     | multinode-654612 ssh -n                                                                 | multinode-654612 | jenkins | v1.34.0 | 16 Sep 24 11:12 UTC | 16 Sep 24 11:12 UTC |
	|         | multinode-654612-m02 sudo cat                                                           |                  |         |         |                     |                     |
	|         | /home/docker/cp-test.txt                                                                |                  |         |         |                     |                     |
	| cp      | multinode-654612 cp multinode-654612-m02:/home/docker/cp-test.txt                       | multinode-654612 | jenkins | v1.34.0 | 16 Sep 24 11:12 UTC | 16 Sep 24 11:12 UTC |
	|         | multinode-654612:/home/docker/cp-test_multinode-654612-m02_multinode-654612.txt         |                  |         |         |                     |                     |
	| ssh     | multinode-654612 ssh -n                                                                 | multinode-654612 | jenkins | v1.34.0 | 16 Sep 24 11:12 UTC | 16 Sep 24 11:12 UTC |
	|         | multinode-654612-m02 sudo cat                                                           |                  |         |         |                     |                     |
	|         | /home/docker/cp-test.txt                                                                |                  |         |         |                     |                     |
	| ssh     | multinode-654612 ssh -n multinode-654612 sudo cat                                       | multinode-654612 | jenkins | v1.34.0 | 16 Sep 24 11:12 UTC | 16 Sep 24 11:12 UTC |
	|         | /home/docker/cp-test_multinode-654612-m02_multinode-654612.txt                          |                  |         |         |                     |                     |
	| cp      | multinode-654612 cp multinode-654612-m02:/home/docker/cp-test.txt                       | multinode-654612 | jenkins | v1.34.0 | 16 Sep 24 11:12 UTC | 16 Sep 24 11:12 UTC |
	|         | multinode-654612-m03:/home/docker/cp-test_multinode-654612-m02_multinode-654612-m03.txt |                  |         |         |                     |                     |
	| ssh     | multinode-654612 ssh -n                                                                 | multinode-654612 | jenkins | v1.34.0 | 16 Sep 24 11:12 UTC | 16 Sep 24 11:12 UTC |
	|         | multinode-654612-m02 sudo cat                                                           |                  |         |         |                     |                     |
	|         | /home/docker/cp-test.txt                                                                |                  |         |         |                     |                     |
	| ssh     | multinode-654612 ssh -n multinode-654612-m03 sudo cat                                   | multinode-654612 | jenkins | v1.34.0 | 16 Sep 24 11:12 UTC | 16 Sep 24 11:12 UTC |
	|         | /home/docker/cp-test_multinode-654612-m02_multinode-654612-m03.txt                      |                  |         |         |                     |                     |
	| cp      | multinode-654612 cp testdata/cp-test.txt                                                | multinode-654612 | jenkins | v1.34.0 | 16 Sep 24 11:12 UTC | 16 Sep 24 11:12 UTC |
	|         | multinode-654612-m03:/home/docker/cp-test.txt                                           |                  |         |         |                     |                     |
	| ssh     | multinode-654612 ssh -n                                                                 | multinode-654612 | jenkins | v1.34.0 | 16 Sep 24 11:12 UTC | 16 Sep 24 11:12 UTC |
	|         | multinode-654612-m03 sudo cat                                                           |                  |         |         |                     |                     |
	|         | /home/docker/cp-test.txt                                                                |                  |         |         |                     |                     |
	| cp      | multinode-654612 cp multinode-654612-m03:/home/docker/cp-test.txt                       | multinode-654612 | jenkins | v1.34.0 | 16 Sep 24 11:12 UTC | 16 Sep 24 11:12 UTC |
	|         | /tmp/TestMultiNodeserialCopyFile4098428863/001/cp-test_multinode-654612-m03.txt         |                  |         |         |                     |                     |
	| ssh     | multinode-654612 ssh -n                                                                 | multinode-654612 | jenkins | v1.34.0 | 16 Sep 24 11:12 UTC | 16 Sep 24 11:12 UTC |
	|         | multinode-654612-m03 sudo cat                                                           |                  |         |         |                     |                     |
	|         | /home/docker/cp-test.txt                                                                |                  |         |         |                     |                     |
	| cp      | multinode-654612 cp multinode-654612-m03:/home/docker/cp-test.txt                       | multinode-654612 | jenkins | v1.34.0 | 16 Sep 24 11:12 UTC | 16 Sep 24 11:12 UTC |
	|         | multinode-654612:/home/docker/cp-test_multinode-654612-m03_multinode-654612.txt         |                  |         |         |                     |                     |
	| ssh     | multinode-654612 ssh -n                                                                 | multinode-654612 | jenkins | v1.34.0 | 16 Sep 24 11:12 UTC | 16 Sep 24 11:12 UTC |
	|         | multinode-654612-m03 sudo cat                                                           |                  |         |         |                     |                     |
	|         | /home/docker/cp-test.txt                                                                |                  |         |         |                     |                     |
	| ssh     | multinode-654612 ssh -n multinode-654612 sudo cat                                       | multinode-654612 | jenkins | v1.34.0 | 16 Sep 24 11:12 UTC | 16 Sep 24 11:12 UTC |
	|         | /home/docker/cp-test_multinode-654612-m03_multinode-654612.txt                          |                  |         |         |                     |                     |
	| cp      | multinode-654612 cp multinode-654612-m03:/home/docker/cp-test.txt                       | multinode-654612 | jenkins | v1.34.0 | 16 Sep 24 11:12 UTC | 16 Sep 24 11:12 UTC |
	|         | multinode-654612-m02:/home/docker/cp-test_multinode-654612-m03_multinode-654612-m02.txt |                  |         |         |                     |                     |
	| ssh     | multinode-654612 ssh -n                                                                 | multinode-654612 | jenkins | v1.34.0 | 16 Sep 24 11:12 UTC | 16 Sep 24 11:12 UTC |
	|         | multinode-654612-m03 sudo cat                                                           |                  |         |         |                     |                     |
	|         | /home/docker/cp-test.txt                                                                |                  |         |         |                     |                     |
	| ssh     | multinode-654612 ssh -n multinode-654612-m02 sudo cat                                   | multinode-654612 | jenkins | v1.34.0 | 16 Sep 24 11:12 UTC | 16 Sep 24 11:12 UTC |
	|         | /home/docker/cp-test_multinode-654612-m03_multinode-654612-m02.txt                      |                  |         |         |                     |                     |
	| node    | multinode-654612 node stop m03                                                          | multinode-654612 | jenkins | v1.34.0 | 16 Sep 24 11:12 UTC | 16 Sep 24 11:12 UTC |
	| node    | multinode-654612 node start                                                             | multinode-654612 | jenkins | v1.34.0 | 16 Sep 24 11:12 UTC | 16 Sep 24 11:12 UTC |
	|         | m03 -v=7 --alsologtostderr                                                              |                  |         |         |                     |                     |
	|---------|-----------------------------------------------------------------------------------------|------------------|---------|---------|---------------------|---------------------|
	
	
	==> Last Start <==
	Log file created at: 2024/09/16 11:09:39
	Running on machine: ip-172-31-21-244
	Binary: Built with gc go1.23.0 for linux/arm64
	Log line format: [IWEF]mmdd hh:mm:ss.uuuuuu threadid file:line] msg
	I0916 11:09:39.863368 1488539 out.go:345] Setting OutFile to fd 1 ...
	I0916 11:09:39.863515 1488539 out.go:392] TERM=,COLORTERM=, which probably does not support color
	I0916 11:09:39.863527 1488539 out.go:358] Setting ErrFile to fd 2...
	I0916 11:09:39.863532 1488539 out.go:392] TERM=,COLORTERM=, which probably does not support color
	I0916 11:09:39.863795 1488539 root.go:338] Updating PATH: /home/jenkins/minikube-integration/19651-1378450/.minikube/bin
	I0916 11:09:39.864221 1488539 out.go:352] Setting JSON to false
	I0916 11:09:39.865166 1488539 start.go:129] hostinfo: {"hostname":"ip-172-31-21-244","uptime":39125,"bootTime":1726445855,"procs":181,"os":"linux","platform":"ubuntu","platformFamily":"debian","platformVersion":"20.04","kernelVersion":"5.15.0-1069-aws","kernelArch":"aarch64","virtualizationSystem":"","virtualizationRole":"","hostId":"da8ac1fd-6236-412a-a346-95873c98230d"}
	I0916 11:09:39.865239 1488539 start.go:139] virtualization:  
	I0916 11:09:39.868551 1488539 out.go:177] * [multinode-654612] minikube v1.34.0 on Ubuntu 20.04 (arm64)
	I0916 11:09:39.872083 1488539 out.go:177]   - MINIKUBE_LOCATION=19651
	I0916 11:09:39.872162 1488539 notify.go:220] Checking for updates...
	I0916 11:09:39.877390 1488539 out.go:177]   - MINIKUBE_SUPPRESS_DOCKER_PERFORMANCE=true
	I0916 11:09:39.880030 1488539 out.go:177]   - KUBECONFIG=/home/jenkins/minikube-integration/19651-1378450/kubeconfig
	I0916 11:09:39.882656 1488539 out.go:177]   - MINIKUBE_HOME=/home/jenkins/minikube-integration/19651-1378450/.minikube
	I0916 11:09:39.885306 1488539 out.go:177]   - MINIKUBE_BIN=out/minikube-linux-arm64
	I0916 11:09:39.887961 1488539 out.go:177]   - MINIKUBE_FORCE_SYSTEMD=
	I0916 11:09:39.890832 1488539 driver.go:394] Setting default libvirt URI to qemu:///system
	I0916 11:09:39.916693 1488539 docker.go:123] docker version: linux-27.2.1:Docker Engine - Community
	I0916 11:09:39.916927 1488539 cli_runner.go:164] Run: docker system info --format "{{json .}}"
	I0916 11:09:39.974976 1488539 info.go:266] docker info: {ID:5FDH:SA5P:5GCT:NLAS:B73P:SGDQ:PBG5:UBVH:UZY3:RXGO:CI7S:WAIH Containers:0 ContainersRunning:0 ContainersPaused:0 ContainersStopped:0 Images:3 Driver:overlay2 DriverStatus:[[Backing Filesystem extfs] [Supports d_type true] [Using metacopy false] [Native Overlay Diff true] [userxattr false]] SystemStatus:<nil> Plugins:{Volume:[local] Network:[bridge host ipvlan macvlan null overlay] Authorization:<nil> Log:[awslogs fluentd gcplogs gelf journald json-file local splunk syslog]} MemoryLimit:true SwapLimit:true KernelMemory:false KernelMemoryTCP:true CPUCfsPeriod:true CPUCfsQuota:true CPUShares:true CPUSet:true PidsLimit:true IPv4Forwarding:true BridgeNfIptables:true BridgeNfIP6Tables:true Debug:false NFd:25 OomKillDisable:true NGoroutines:41 SystemTime:2024-09-16 11:09:39.964987506 +0000 UTC LoggingDriver:json-file CgroupDriver:cgroupfs NEventsListener:0 KernelVersion:5.15.0-1069-aws OperatingSystem:Ubuntu 20.04.6 LTS OSType:linux Architecture:aar
ch64 IndexServerAddress:https://index.docker.io/v1/ RegistryConfig:{AllowNondistributableArtifactsCIDRs:[] AllowNondistributableArtifactsHostnames:[] InsecureRegistryCIDRs:[127.0.0.0/8] IndexConfigs:{DockerIo:{Name:docker.io Mirrors:[] Secure:true Official:true}} Mirrors:[]} NCPU:2 MemTotal:8214839296 GenericResources:<nil> DockerRootDir:/var/lib/docker HTTPProxy: HTTPSProxy: NoProxy: Name:ip-172-31-21-244 Labels:[] ExperimentalBuild:false ServerVersion:27.2.1 ClusterStore: ClusterAdvertise: Runtimes:{Runc:{Path:runc}} DefaultRuntime:runc Swarm:{NodeID: NodeAddr: LocalNodeState:inactive ControlAvailable:false Error: RemoteManagers:<nil>} LiveRestoreEnabled:false Isolation: InitBinary:docker-init ContainerdCommit:{ID:7f7fdf5fed64eb6a7caf99b3e12efcf9d60e311c Expected:7f7fdf5fed64eb6a7caf99b3e12efcf9d60e311c} RuncCommit:{ID:v1.1.14-0-g2c9f560 Expected:v1.1.14-0-g2c9f560} InitCommit:{ID:de40ad0 Expected:de40ad0} SecurityOptions:[name=apparmor name=seccomp,profile=builtin] ProductLicense: Warnings:<nil> ServerErro
rs:[] ClientInfo:{Debug:false Plugins:[map[Name:buildx Path:/usr/libexec/docker/cli-plugins/docker-buildx SchemaVersion:0.1.0 ShortDescription:Docker Buildx Vendor:Docker Inc. Version:v0.16.2] map[Name:compose Path:/usr/libexec/docker/cli-plugins/docker-compose SchemaVersion:0.1.0 ShortDescription:Docker Compose Vendor:Docker Inc. Version:v2.29.2]] Warnings:<nil>}}
	I0916 11:09:39.975089 1488539 docker.go:318] overlay module found
	I0916 11:09:39.977984 1488539 out.go:177] * Using the docker driver based on user configuration
	I0916 11:09:39.980581 1488539 start.go:297] selected driver: docker
	I0916 11:09:39.980603 1488539 start.go:901] validating driver "docker" against <nil>
	I0916 11:09:39.980625 1488539 start.go:912] status for docker: {Installed:true Healthy:true Running:false NeedsImprovement:false Error:<nil> Reason: Fix: Doc: Version:}
	I0916 11:09:39.981381 1488539 cli_runner.go:164] Run: docker system info --format "{{json .}}"
	I0916 11:09:40.041171 1488539 info.go:266] docker info: {ID:5FDH:SA5P:5GCT:NLAS:B73P:SGDQ:PBG5:UBVH:UZY3:RXGO:CI7S:WAIH Containers:0 ContainersRunning:0 ContainersPaused:0 ContainersStopped:0 Images:3 Driver:overlay2 DriverStatus:[[Backing Filesystem extfs] [Supports d_type true] [Using metacopy false] [Native Overlay Diff true] [userxattr false]] SystemStatus:<nil> Plugins:{Volume:[local] Network:[bridge host ipvlan macvlan null overlay] Authorization:<nil> Log:[awslogs fluentd gcplogs gelf journald json-file local splunk syslog]} MemoryLimit:true SwapLimit:true KernelMemory:false KernelMemoryTCP:true CPUCfsPeriod:true CPUCfsQuota:true CPUShares:true CPUSet:true PidsLimit:true IPv4Forwarding:true BridgeNfIptables:true BridgeNfIP6Tables:true Debug:false NFd:25 OomKillDisable:true NGoroutines:41 SystemTime:2024-09-16 11:09:40.030394576 +0000 UTC LoggingDriver:json-file CgroupDriver:cgroupfs NEventsListener:0 KernelVersion:5.15.0-1069-aws OperatingSystem:Ubuntu 20.04.6 LTS OSType:linux Architecture:aar
ch64 IndexServerAddress:https://index.docker.io/v1/ RegistryConfig:{AllowNondistributableArtifactsCIDRs:[] AllowNondistributableArtifactsHostnames:[] InsecureRegistryCIDRs:[127.0.0.0/8] IndexConfigs:{DockerIo:{Name:docker.io Mirrors:[] Secure:true Official:true}} Mirrors:[]} NCPU:2 MemTotal:8214839296 GenericResources:<nil> DockerRootDir:/var/lib/docker HTTPProxy: HTTPSProxy: NoProxy: Name:ip-172-31-21-244 Labels:[] ExperimentalBuild:false ServerVersion:27.2.1 ClusterStore: ClusterAdvertise: Runtimes:{Runc:{Path:runc}} DefaultRuntime:runc Swarm:{NodeID: NodeAddr: LocalNodeState:inactive ControlAvailable:false Error: RemoteManagers:<nil>} LiveRestoreEnabled:false Isolation: InitBinary:docker-init ContainerdCommit:{ID:7f7fdf5fed64eb6a7caf99b3e12efcf9d60e311c Expected:7f7fdf5fed64eb6a7caf99b3e12efcf9d60e311c} RuncCommit:{ID:v1.1.14-0-g2c9f560 Expected:v1.1.14-0-g2c9f560} InitCommit:{ID:de40ad0 Expected:de40ad0} SecurityOptions:[name=apparmor name=seccomp,profile=builtin] ProductLicense: Warnings:<nil> ServerErro
rs:[] ClientInfo:{Debug:false Plugins:[map[Name:buildx Path:/usr/libexec/docker/cli-plugins/docker-buildx SchemaVersion:0.1.0 ShortDescription:Docker Buildx Vendor:Docker Inc. Version:v0.16.2] map[Name:compose Path:/usr/libexec/docker/cli-plugins/docker-compose SchemaVersion:0.1.0 ShortDescription:Docker Compose Vendor:Docker Inc. Version:v2.29.2]] Warnings:<nil>}}
	I0916 11:09:40.041409 1488539 start_flags.go:310] no existing cluster config was found, will generate one from the flags 
	I0916 11:09:40.041688 1488539 start_flags.go:947] Waiting for all components: map[apiserver:true apps_running:true default_sa:true extra:true kubelet:true node_ready:true system_pods:true]
	I0916 11:09:40.044619 1488539 out.go:177] * Using Docker driver with root privileges
	I0916 11:09:40.047373 1488539 cni.go:84] Creating CNI manager for ""
	I0916 11:09:40.047450 1488539 cni.go:136] multinode detected (0 nodes found), recommending kindnet
	I0916 11:09:40.047463 1488539 start_flags.go:319] Found "CNI" CNI - setting NetworkPlugin=cni
	I0916 11:09:40.047559 1488539 start.go:340] cluster config:
	{Name:multinode-654612 KeepContext:false EmbedCerts:false MinikubeISO: KicBaseImage:gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 Memory:2200 CPUs:2 DiskSize:20000 Driver:docker HyperkitVpnKitSock: HyperkitVSockPorts:[] DockerEnv:[] ContainerVolumeMounts:[] InsecureRegistry:[] RegistryMirror:[] HostOnlyCIDR:192.168.59.1/24 HypervVirtualSwitch: HypervUseExternalSwitch:false HypervExternalAdapter: KVMNetwork:default KVMQemuURI:qemu:///system KVMGPU:false KVMHidden:false KVMNUMACount:1 APIServerPort:8443 DockerOpt:[] DisableDriverMounts:false NFSShare:[] NFSSharesRoot:/nfsshares UUID: NoVTXCheck:false DNSProxy:false HostDNSResolver:true HostOnlyNicType:virtio NatNicType:virtio SSHIPAddress: SSHUser:root SSHKey: SSHPort:22 KubernetesConfig:{KubernetesVersion:v1.31.1 ClusterName:multinode-654612 Namespace:default APIServerHAVIP: APIServerName:minikubeCA APIServerNames:[] APIServerIPs:[] DNSDomain:cluster.local ContainerR
untime:crio CRISocket: NetworkPlugin:cni FeatureGates: ServiceCIDR:10.96.0.0/12 ImageRepository: LoadBalancerStartIP: LoadBalancerEndIP: CustomIngressCert: RegistryAliases: ExtraOptions:[] ShouldLoadCachedImages:true EnableDefaultCNI:false CNI:} Nodes:[{Name: IP: Port:8443 KubernetesVersion:v1.31.1 ContainerRuntime:crio ControlPlane:true Worker:true}] Addons:map[] CustomAddonImages:map[] CustomAddonRegistries:map[] VerifyComponents:map[apiserver:true apps_running:true default_sa:true extra:true kubelet:true node_ready:true system_pods:true] StartHostTimeout:6m0s ScheduledStop:<nil> ExposedPorts:[] ListenAddress: Network: Subnet: MultiNodeRequested:true ExtraDisks:0 CertExpiration:26280h0m0s Mount:false MountString:/home/jenkins:/minikube-host Mount9PVersion:9p2000.L MountGID:docker MountIP: MountMSize:262144 MountOptions:[] MountPort:0 MountType:9p MountUID:docker BinaryMirror: DisableOptimizations:false DisableMetrics:false CustomQemuFirmwarePath: SocketVMnetClientPath: SocketVMnetPath: StaticIP: SSHAuthSock
: SSHAgentPID:0 GPUs: AutoPauseInterval:1m0s}
	I0916 11:09:40.050654 1488539 out.go:177] * Starting "multinode-654612" primary control-plane node in "multinode-654612" cluster
	I0916 11:09:40.053501 1488539 cache.go:121] Beginning downloading kic base image for docker with crio
	I0916 11:09:40.056432 1488539 out.go:177] * Pulling base image v0.0.45-1726358845-19644 ...
	I0916 11:09:40.059144 1488539 preload.go:131] Checking if preload exists for k8s version v1.31.1 and runtime crio
	I0916 11:09:40.059238 1488539 preload.go:146] Found local preload: /home/jenkins/minikube-integration/19651-1378450/.minikube/cache/preloaded-tarball/preloaded-images-k8s-v18-v1.31.1-cri-o-overlay-arm64.tar.lz4
	I0916 11:09:40.059253 1488539 cache.go:56] Caching tarball of preloaded images
	I0916 11:09:40.059264 1488539 image.go:79] Checking for gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 in local docker daemon
	I0916 11:09:40.059356 1488539 preload.go:172] Found /home/jenkins/minikube-integration/19651-1378450/.minikube/cache/preloaded-tarball/preloaded-images-k8s-v18-v1.31.1-cri-o-overlay-arm64.tar.lz4 in cache, skipping download
	I0916 11:09:40.059369 1488539 cache.go:59] Finished verifying existence of preloaded tar for v1.31.1 on crio
	I0916 11:09:40.059794 1488539 profile.go:143] Saving config to /home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/multinode-654612/config.json ...
	I0916 11:09:40.059873 1488539 lock.go:35] WriteFile acquiring /home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/multinode-654612/config.json: {Name:mk35ee7a773defc02a83448392f8abace23e005e Clock:{} Delay:500ms Timeout:1m0s Cancel:<nil>}
	W0916 11:09:40.079455 1488539 image.go:95] image gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 is of wrong architecture
	I0916 11:09:40.079483 1488539 cache.go:149] Downloading gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 to local cache
	I0916 11:09:40.079575 1488539 image.go:63] Checking for gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 in local cache directory
	I0916 11:09:40.079629 1488539 image.go:66] Found gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 in local cache directory, skipping pull
	I0916 11:09:40.079639 1488539 image.go:135] gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 exists in cache, skipping pull
	I0916 11:09:40.079647 1488539 cache.go:152] successfully saved gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 as a tarball
	I0916 11:09:40.079656 1488539 cache.go:162] Loading gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 from local cache
	I0916 11:09:40.081374 1488539 image.go:273] response: 
	I0916 11:09:40.211671 1488539 cache.go:164] successfully loaded and using gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 from cached tarball
	I0916 11:09:40.211740 1488539 cache.go:194] Successfully downloaded all kic artifacts
	I0916 11:09:40.211785 1488539 start.go:360] acquireMachinesLock for multinode-654612: {Name:mkfbf36af9c510d3c0697cdadc867dcd6648c047 Clock:{} Delay:500ms Timeout:10m0s Cancel:<nil>}
	I0916 11:09:40.211918 1488539 start.go:364] duration metric: took 108.116µs to acquireMachinesLock for "multinode-654612"
	I0916 11:09:40.211952 1488539 start.go:93] Provisioning new machine with config: &{Name:multinode-654612 KeepContext:false EmbedCerts:false MinikubeISO: KicBaseImage:gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 Memory:2200 CPUs:2 DiskSize:20000 Driver:docker HyperkitVpnKitSock: HyperkitVSockPorts:[] DockerEnv:[] ContainerVolumeMounts:[] InsecureRegistry:[] RegistryMirror:[] HostOnlyCIDR:192.168.59.1/24 HypervVirtualSwitch: HypervUseExternalSwitch:false HypervExternalAdapter: KVMNetwork:default KVMQemuURI:qemu:///system KVMGPU:false KVMHidden:false KVMNUMACount:1 APIServerPort:8443 DockerOpt:[] DisableDriverMounts:false NFSShare:[] NFSSharesRoot:/nfsshares UUID: NoVTXCheck:false DNSProxy:false HostDNSResolver:true HostOnlyNicType:virtio NatNicType:virtio SSHIPAddress: SSHUser:root SSHKey: SSHPort:22 KubernetesConfig:{KubernetesVersion:v1.31.1 ClusterName:multinode-654612 Namespace:default APIServerHAVIP: APIServerNa
me:minikubeCA APIServerNames:[] APIServerIPs:[] DNSDomain:cluster.local ContainerRuntime:crio CRISocket: NetworkPlugin:cni FeatureGates: ServiceCIDR:10.96.0.0/12 ImageRepository: LoadBalancerStartIP: LoadBalancerEndIP: CustomIngressCert: RegistryAliases: ExtraOptions:[] ShouldLoadCachedImages:true EnableDefaultCNI:false CNI:} Nodes:[{Name: IP: Port:8443 KubernetesVersion:v1.31.1 ContainerRuntime:crio ControlPlane:true Worker:true}] Addons:map[] CustomAddonImages:map[] CustomAddonRegistries:map[] VerifyComponents:map[apiserver:true apps_running:true default_sa:true extra:true kubelet:true node_ready:true system_pods:true] StartHostTimeout:6m0s ScheduledStop:<nil> ExposedPorts:[] ListenAddress: Network: Subnet: MultiNodeRequested:true ExtraDisks:0 CertExpiration:26280h0m0s Mount:false MountString:/home/jenkins:/minikube-host Mount9PVersion:9p2000.L MountGID:docker MountIP: MountMSize:262144 MountOptions:[] MountPort:0 MountType:9p MountUID:docker BinaryMirror: DisableOptimizations:false DisableMetrics:false Cus
tomQemuFirmwarePath: SocketVMnetClientPath: SocketVMnetPath: StaticIP: SSHAuthSock: SSHAgentPID:0 GPUs: AutoPauseInterval:1m0s} &{Name: IP: Port:8443 KubernetesVersion:v1.31.1 ContainerRuntime:crio ControlPlane:true Worker:true}
	I0916 11:09:40.212028 1488539 start.go:125] createHost starting for "" (driver="docker")
	I0916 11:09:40.215209 1488539 out.go:235] * Creating docker container (CPUs=2, Memory=2200MB) ...
	I0916 11:09:40.215498 1488539 start.go:159] libmachine.API.Create for "multinode-654612" (driver="docker")
	I0916 11:09:40.215539 1488539 client.go:168] LocalClient.Create starting
	I0916 11:09:40.215658 1488539 main.go:141] libmachine: Reading certificate data from /home/jenkins/minikube-integration/19651-1378450/.minikube/certs/ca.pem
	I0916 11:09:40.215697 1488539 main.go:141] libmachine: Decoding PEM data...
	I0916 11:09:40.215713 1488539 main.go:141] libmachine: Parsing certificate...
	I0916 11:09:40.215764 1488539 main.go:141] libmachine: Reading certificate data from /home/jenkins/minikube-integration/19651-1378450/.minikube/certs/cert.pem
	I0916 11:09:40.215782 1488539 main.go:141] libmachine: Decoding PEM data...
	I0916 11:09:40.215793 1488539 main.go:141] libmachine: Parsing certificate...
	I0916 11:09:40.216194 1488539 cli_runner.go:164] Run: docker network inspect multinode-654612 --format "{"Name": "{{.Name}}","Driver": "{{.Driver}}","Subnet": "{{range .IPAM.Config}}{{.Subnet}}{{end}}","Gateway": "{{range .IPAM.Config}}{{.Gateway}}{{end}}","MTU": {{if (index .Options "com.docker.network.driver.mtu")}}{{(index .Options "com.docker.network.driver.mtu")}}{{else}}0{{end}}, "ContainerIPs": [{{range $k,$v := .Containers }}"{{$v.IPv4Address}}",{{end}}]}"
	W0916 11:09:40.232508 1488539 cli_runner.go:211] docker network inspect multinode-654612 --format "{"Name": "{{.Name}}","Driver": "{{.Driver}}","Subnet": "{{range .IPAM.Config}}{{.Subnet}}{{end}}","Gateway": "{{range .IPAM.Config}}{{.Gateway}}{{end}}","MTU": {{if (index .Options "com.docker.network.driver.mtu")}}{{(index .Options "com.docker.network.driver.mtu")}}{{else}}0{{end}}, "ContainerIPs": [{{range $k,$v := .Containers }}"{{$v.IPv4Address}}",{{end}}]}" returned with exit code 1
	I0916 11:09:40.232598 1488539 network_create.go:284] running [docker network inspect multinode-654612] to gather additional debugging logs...
	I0916 11:09:40.232618 1488539 cli_runner.go:164] Run: docker network inspect multinode-654612
	W0916 11:09:40.247001 1488539 cli_runner.go:211] docker network inspect multinode-654612 returned with exit code 1
	I0916 11:09:40.247035 1488539 network_create.go:287] error running [docker network inspect multinode-654612]: docker network inspect multinode-654612: exit status 1
	stdout:
	[]
	
	stderr:
	Error response from daemon: network multinode-654612 not found
	I0916 11:09:40.247049 1488539 network_create.go:289] output of [docker network inspect multinode-654612]: -- stdout --
	[]
	
	-- /stdout --
	** stderr ** 
	Error response from daemon: network multinode-654612 not found
	
	** /stderr **
	I0916 11:09:40.247160 1488539 cli_runner.go:164] Run: docker network inspect bridge --format "{"Name": "{{.Name}}","Driver": "{{.Driver}}","Subnet": "{{range .IPAM.Config}}{{.Subnet}}{{end}}","Gateway": "{{range .IPAM.Config}}{{.Gateway}}{{end}}","MTU": {{if (index .Options "com.docker.network.driver.mtu")}}{{(index .Options "com.docker.network.driver.mtu")}}{{else}}0{{end}}, "ContainerIPs": [{{range $k,$v := .Containers }}"{{$v.IPv4Address}}",{{end}}]}"
	I0916 11:09:40.263391 1488539 network.go:211] skipping subnet 192.168.49.0/24 that is taken: &{IP:192.168.49.0 Netmask:255.255.255.0 Prefix:24 CIDR:192.168.49.0/24 Gateway:192.168.49.1 ClientMin:192.168.49.2 ClientMax:192.168.49.254 Broadcast:192.168.49.255 IsPrivate:true Interface:{IfaceName:br-a49e1846148d IfaceIPv4:192.168.49.1 IfaceMTU:1500 IfaceMAC:02:42:d3:9d:ef:74} reservation:<nil>}
	I0916 11:09:40.263722 1488539 network.go:211] skipping subnet 192.168.58.0/24 that is taken: &{IP:192.168.58.0 Netmask:255.255.255.0 Prefix:24 CIDR:192.168.58.0/24 Gateway:192.168.58.1 ClientMin:192.168.58.2 ClientMax:192.168.58.254 Broadcast:192.168.58.255 IsPrivate:true Interface:{IfaceName:br-2e9863632116 IfaceIPv4:192.168.58.1 IfaceMTU:1500 IfaceMAC:02:42:77:c8:06:b6} reservation:<nil>}
	I0916 11:09:40.264086 1488539 network.go:206] using free private subnet 192.168.67.0/24: &{IP:192.168.67.0 Netmask:255.255.255.0 Prefix:24 CIDR:192.168.67.0/24 Gateway:192.168.67.1 ClientMin:192.168.67.2 ClientMax:192.168.67.254 Broadcast:192.168.67.255 IsPrivate:true Interface:{IfaceName: IfaceIPv4: IfaceMTU:0 IfaceMAC:} reservation:0x40017ccb30}
	I0916 11:09:40.264113 1488539 network_create.go:124] attempt to create docker network multinode-654612 192.168.67.0/24 with gateway 192.168.67.1 and MTU of 1500 ...
	I0916 11:09:40.264184 1488539 cli_runner.go:164] Run: docker network create --driver=bridge --subnet=192.168.67.0/24 --gateway=192.168.67.1 -o --ip-masq -o --icc -o com.docker.network.driver.mtu=1500 --label=created_by.minikube.sigs.k8s.io=true --label=name.minikube.sigs.k8s.io=multinode-654612 multinode-654612
	I0916 11:09:40.339078 1488539 network_create.go:108] docker network multinode-654612 192.168.67.0/24 created
	I0916 11:09:40.339126 1488539 kic.go:121] calculated static IP "192.168.67.2" for the "multinode-654612" container
	I0916 11:09:40.339207 1488539 cli_runner.go:164] Run: docker ps -a --format {{.Names}}
	I0916 11:09:40.354826 1488539 cli_runner.go:164] Run: docker volume create multinode-654612 --label name.minikube.sigs.k8s.io=multinode-654612 --label created_by.minikube.sigs.k8s.io=true
	I0916 11:09:40.371838 1488539 oci.go:103] Successfully created a docker volume multinode-654612
	I0916 11:09:40.371968 1488539 cli_runner.go:164] Run: docker run --rm --name multinode-654612-preload-sidecar --label created_by.minikube.sigs.k8s.io=true --label name.minikube.sigs.k8s.io=multinode-654612 --entrypoint /usr/bin/test -v multinode-654612:/var gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 -d /var/lib
	I0916 11:09:40.940233 1488539 oci.go:107] Successfully prepared a docker volume multinode-654612
	I0916 11:09:40.940293 1488539 preload.go:131] Checking if preload exists for k8s version v1.31.1 and runtime crio
	I0916 11:09:40.940314 1488539 kic.go:194] Starting extracting preloaded images to volume ...
	I0916 11:09:40.940388 1488539 cli_runner.go:164] Run: docker run --rm --entrypoint /usr/bin/tar -v /home/jenkins/minikube-integration/19651-1378450/.minikube/cache/preloaded-tarball/preloaded-images-k8s-v18-v1.31.1-cri-o-overlay-arm64.tar.lz4:/preloaded.tar:ro -v multinode-654612:/extractDir gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 -I lz4 -xf /preloaded.tar -C /extractDir
	I0916 11:09:45.179143 1488539 cli_runner.go:217] Completed: docker run --rm --entrypoint /usr/bin/tar -v /home/jenkins/minikube-integration/19651-1378450/.minikube/cache/preloaded-tarball/preloaded-images-k8s-v18-v1.31.1-cri-o-overlay-arm64.tar.lz4:/preloaded.tar:ro -v multinode-654612:/extractDir gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 -I lz4 -xf /preloaded.tar -C /extractDir: (4.238708379s)
	I0916 11:09:45.179188 1488539 kic.go:203] duration metric: took 4.238869672s to extract preloaded images to volume ...
	W0916 11:09:45.179381 1488539 cgroups_linux.go:77] Your kernel does not support swap limit capabilities or the cgroup is not mounted.
	I0916 11:09:45.179529 1488539 cli_runner.go:164] Run: docker info --format "'{{json .SecurityOptions}}'"
	I0916 11:09:45.263136 1488539 cli_runner.go:164] Run: docker run -d -t --privileged --security-opt seccomp=unconfined --tmpfs /tmp --tmpfs /run -v /lib/modules:/lib/modules:ro --hostname multinode-654612 --name multinode-654612 --label created_by.minikube.sigs.k8s.io=true --label name.minikube.sigs.k8s.io=multinode-654612 --label role.minikube.sigs.k8s.io= --label mode.minikube.sigs.k8s.io=multinode-654612 --network multinode-654612 --ip 192.168.67.2 --volume multinode-654612:/var --security-opt apparmor=unconfined --memory=2200mb --cpus=2 -e container=docker --expose 8443 --publish=127.0.0.1::8443 --publish=127.0.0.1::22 --publish=127.0.0.1::2376 --publish=127.0.0.1::5000 --publish=127.0.0.1::32443 gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0
	I0916 11:09:45.638223 1488539 cli_runner.go:164] Run: docker container inspect multinode-654612 --format={{.State.Running}}
	I0916 11:09:45.656026 1488539 cli_runner.go:164] Run: docker container inspect multinode-654612 --format={{.State.Status}}
	I0916 11:09:45.675985 1488539 cli_runner.go:164] Run: docker exec multinode-654612 stat /var/lib/dpkg/alternatives/iptables
	I0916 11:09:45.744966 1488539 oci.go:144] the created container "multinode-654612" has a running status.
	I0916 11:09:45.744998 1488539 kic.go:225] Creating ssh key for kic: /home/jenkins/minikube-integration/19651-1378450/.minikube/machines/multinode-654612/id_rsa...
	I0916 11:09:46.839280 1488539 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-1378450/.minikube/machines/multinode-654612/id_rsa.pub -> /home/docker/.ssh/authorized_keys
	I0916 11:09:46.839333 1488539 kic_runner.go:191] docker (temp): /home/jenkins/minikube-integration/19651-1378450/.minikube/machines/multinode-654612/id_rsa.pub --> /home/docker/.ssh/authorized_keys (381 bytes)
	I0916 11:09:46.857527 1488539 cli_runner.go:164] Run: docker container inspect multinode-654612 --format={{.State.Status}}
	I0916 11:09:46.873242 1488539 kic_runner.go:93] Run: chown docker:docker /home/docker/.ssh/authorized_keys
	I0916 11:09:46.873265 1488539 kic_runner.go:114] Args: [docker exec --privileged multinode-654612 chown docker:docker /home/docker/.ssh/authorized_keys]
	I0916 11:09:46.923651 1488539 cli_runner.go:164] Run: docker container inspect multinode-654612 --format={{.State.Status}}
	I0916 11:09:46.939897 1488539 machine.go:93] provisionDockerMachine start ...
	I0916 11:09:46.939998 1488539 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" multinode-654612
	I0916 11:09:46.955785 1488539 main.go:141] libmachine: Using SSH client type: native
	I0916 11:09:46.956066 1488539 main.go:141] libmachine: &{{{<nil> 0 [] [] []} docker [0x41abe0] 0x41d420 <nil>  [] 0s} 127.0.0.1 34738 <nil> <nil>}
	I0916 11:09:46.956081 1488539 main.go:141] libmachine: About to run SSH command:
	hostname
	I0916 11:09:47.092400 1488539 main.go:141] libmachine: SSH cmd err, output: <nil>: multinode-654612
	
	I0916 11:09:47.092427 1488539 ubuntu.go:169] provisioning hostname "multinode-654612"
	I0916 11:09:47.092539 1488539 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" multinode-654612
	I0916 11:09:47.111222 1488539 main.go:141] libmachine: Using SSH client type: native
	I0916 11:09:47.111472 1488539 main.go:141] libmachine: &{{{<nil> 0 [] [] []} docker [0x41abe0] 0x41d420 <nil>  [] 0s} 127.0.0.1 34738 <nil> <nil>}
	I0916 11:09:47.111490 1488539 main.go:141] libmachine: About to run SSH command:
	sudo hostname multinode-654612 && echo "multinode-654612" | sudo tee /etc/hostname
	I0916 11:09:47.261836 1488539 main.go:141] libmachine: SSH cmd err, output: <nil>: multinode-654612
	
	I0916 11:09:47.261918 1488539 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" multinode-654612
	I0916 11:09:47.280380 1488539 main.go:141] libmachine: Using SSH client type: native
	I0916 11:09:47.280631 1488539 main.go:141] libmachine: &{{{<nil> 0 [] [] []} docker [0x41abe0] 0x41d420 <nil>  [] 0s} 127.0.0.1 34738 <nil> <nil>}
	I0916 11:09:47.280651 1488539 main.go:141] libmachine: About to run SSH command:
	
			if ! grep -xq '.*\smultinode-654612' /etc/hosts; then
				if grep -xq '127.0.1.1\s.*' /etc/hosts; then
					sudo sed -i 's/^127.0.1.1\s.*/127.0.1.1 multinode-654612/g' /etc/hosts;
				else 
					echo '127.0.1.1 multinode-654612' | sudo tee -a /etc/hosts; 
				fi
			fi
	I0916 11:09:47.417316 1488539 main.go:141] libmachine: SSH cmd err, output: <nil>: 
	I0916 11:09:47.417345 1488539 ubuntu.go:175] set auth options {CertDir:/home/jenkins/minikube-integration/19651-1378450/.minikube CaCertPath:/home/jenkins/minikube-integration/19651-1378450/.minikube/certs/ca.pem CaPrivateKeyPath:/home/jenkins/minikube-integration/19651-1378450/.minikube/certs/ca-key.pem CaCertRemotePath:/etc/docker/ca.pem ServerCertPath:/home/jenkins/minikube-integration/19651-1378450/.minikube/machines/server.pem ServerKeyPath:/home/jenkins/minikube-integration/19651-1378450/.minikube/machines/server-key.pem ClientKeyPath:/home/jenkins/minikube-integration/19651-1378450/.minikube/certs/key.pem ServerCertRemotePath:/etc/docker/server.pem ServerKeyRemotePath:/etc/docker/server-key.pem ClientCertPath:/home/jenkins/minikube-integration/19651-1378450/.minikube/certs/cert.pem ServerCertSANs:[] StorePath:/home/jenkins/minikube-integration/19651-1378450/.minikube}
	I0916 11:09:47.417414 1488539 ubuntu.go:177] setting up certificates
	I0916 11:09:47.417426 1488539 provision.go:84] configureAuth start
	I0916 11:09:47.417508 1488539 cli_runner.go:164] Run: docker container inspect -f "{{range .NetworkSettings.Networks}}{{.IPAddress}},{{.GlobalIPv6Address}}{{end}}" multinode-654612
	I0916 11:09:47.434212 1488539 provision.go:143] copyHostCerts
	I0916 11:09:47.434260 1488539 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-1378450/.minikube/certs/cert.pem -> /home/jenkins/minikube-integration/19651-1378450/.minikube/cert.pem
	I0916 11:09:47.434299 1488539 exec_runner.go:144] found /home/jenkins/minikube-integration/19651-1378450/.minikube/cert.pem, removing ...
	I0916 11:09:47.434309 1488539 exec_runner.go:203] rm: /home/jenkins/minikube-integration/19651-1378450/.minikube/cert.pem
	I0916 11:09:47.434394 1488539 exec_runner.go:151] cp: /home/jenkins/minikube-integration/19651-1378450/.minikube/certs/cert.pem --> /home/jenkins/minikube-integration/19651-1378450/.minikube/cert.pem (1123 bytes)
	I0916 11:09:47.434483 1488539 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-1378450/.minikube/certs/key.pem -> /home/jenkins/minikube-integration/19651-1378450/.minikube/key.pem
	I0916 11:09:47.434506 1488539 exec_runner.go:144] found /home/jenkins/minikube-integration/19651-1378450/.minikube/key.pem, removing ...
	I0916 11:09:47.434514 1488539 exec_runner.go:203] rm: /home/jenkins/minikube-integration/19651-1378450/.minikube/key.pem
	I0916 11:09:47.434541 1488539 exec_runner.go:151] cp: /home/jenkins/minikube-integration/19651-1378450/.minikube/certs/key.pem --> /home/jenkins/minikube-integration/19651-1378450/.minikube/key.pem (1679 bytes)
	I0916 11:09:47.434583 1488539 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-1378450/.minikube/certs/ca.pem -> /home/jenkins/minikube-integration/19651-1378450/.minikube/ca.pem
	I0916 11:09:47.434610 1488539 exec_runner.go:144] found /home/jenkins/minikube-integration/19651-1378450/.minikube/ca.pem, removing ...
	I0916 11:09:47.434618 1488539 exec_runner.go:203] rm: /home/jenkins/minikube-integration/19651-1378450/.minikube/ca.pem
	I0916 11:09:47.434642 1488539 exec_runner.go:151] cp: /home/jenkins/minikube-integration/19651-1378450/.minikube/certs/ca.pem --> /home/jenkins/minikube-integration/19651-1378450/.minikube/ca.pem (1078 bytes)
	I0916 11:09:47.434694 1488539 provision.go:117] generating server cert: /home/jenkins/minikube-integration/19651-1378450/.minikube/machines/server.pem ca-key=/home/jenkins/minikube-integration/19651-1378450/.minikube/certs/ca.pem private-key=/home/jenkins/minikube-integration/19651-1378450/.minikube/certs/ca-key.pem org=jenkins.multinode-654612 san=[127.0.0.1 192.168.67.2 localhost minikube multinode-654612]
	I0916 11:09:47.759510 1488539 provision.go:177] copyRemoteCerts
	I0916 11:09:47.759580 1488539 ssh_runner.go:195] Run: sudo mkdir -p /etc/docker /etc/docker /etc/docker
	I0916 11:09:47.759640 1488539 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" multinode-654612
	I0916 11:09:47.776880 1488539 sshutil.go:53] new ssh client: &{IP:127.0.0.1 Port:34738 SSHKeyPath:/home/jenkins/minikube-integration/19651-1378450/.minikube/machines/multinode-654612/id_rsa Username:docker}
	I0916 11:09:47.873818 1488539 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-1378450/.minikube/machines/server.pem -> /etc/docker/server.pem
	I0916 11:09:47.873883 1488539 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-1378450/.minikube/machines/server.pem --> /etc/docker/server.pem (1216 bytes)
	I0916 11:09:47.899736 1488539 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-1378450/.minikube/machines/server-key.pem -> /etc/docker/server-key.pem
	I0916 11:09:47.899808 1488539 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-1378450/.minikube/machines/server-key.pem --> /etc/docker/server-key.pem (1679 bytes)
	I0916 11:09:47.924490 1488539 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-1378450/.minikube/certs/ca.pem -> /etc/docker/ca.pem
	I0916 11:09:47.924556 1488539 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-1378450/.minikube/certs/ca.pem --> /etc/docker/ca.pem (1078 bytes)
	I0916 11:09:47.948605 1488539 provision.go:87] duration metric: took 531.159961ms to configureAuth
	I0916 11:09:47.948637 1488539 ubuntu.go:193] setting minikube options for container-runtime
	I0916 11:09:47.948948 1488539 config.go:182] Loaded profile config "multinode-654612": Driver=docker, ContainerRuntime=crio, KubernetesVersion=v1.31.1
	I0916 11:09:47.949067 1488539 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" multinode-654612
	I0916 11:09:47.966342 1488539 main.go:141] libmachine: Using SSH client type: native
	I0916 11:09:47.966644 1488539 main.go:141] libmachine: &{{{<nil> 0 [] [] []} docker [0x41abe0] 0x41d420 <nil>  [] 0s} 127.0.0.1 34738 <nil> <nil>}
	I0916 11:09:47.966673 1488539 main.go:141] libmachine: About to run SSH command:
	sudo mkdir -p /etc/sysconfig && printf %s "
	CRIO_MINIKUBE_OPTIONS='--insecure-registry 10.96.0.0/12 '
	" | sudo tee /etc/sysconfig/crio.minikube && sudo systemctl restart crio
	I0916 11:09:48.213760 1488539 main.go:141] libmachine: SSH cmd err, output: <nil>: 
	CRIO_MINIKUBE_OPTIONS='--insecure-registry 10.96.0.0/12 '
	
	I0916 11:09:48.213788 1488539 machine.go:96] duration metric: took 1.273865466s to provisionDockerMachine
	I0916 11:09:48.213798 1488539 client.go:171] duration metric: took 7.99825059s to LocalClient.Create
	I0916 11:09:48.213832 1488539 start.go:167] duration metric: took 7.998335929s to libmachine.API.Create "multinode-654612"
	I0916 11:09:48.213847 1488539 start.go:293] postStartSetup for "multinode-654612" (driver="docker")
	I0916 11:09:48.213865 1488539 start.go:322] creating required directories: [/etc/kubernetes/addons /etc/kubernetes/manifests /var/tmp/minikube /var/lib/minikube /var/lib/minikube/certs /var/lib/minikube/images /var/lib/minikube/binaries /tmp/gvisor /usr/share/ca-certificates /etc/ssl/certs]
	I0916 11:09:48.213948 1488539 ssh_runner.go:195] Run: sudo mkdir -p /etc/kubernetes/addons /etc/kubernetes/manifests /var/tmp/minikube /var/lib/minikube /var/lib/minikube/certs /var/lib/minikube/images /var/lib/minikube/binaries /tmp/gvisor /usr/share/ca-certificates /etc/ssl/certs
	I0916 11:09:48.214017 1488539 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" multinode-654612
	I0916 11:09:48.231965 1488539 sshutil.go:53] new ssh client: &{IP:127.0.0.1 Port:34738 SSHKeyPath:/home/jenkins/minikube-integration/19651-1378450/.minikube/machines/multinode-654612/id_rsa Username:docker}
	I0916 11:09:48.329975 1488539 ssh_runner.go:195] Run: cat /etc/os-release
	I0916 11:09:48.333135 1488539 command_runner.go:130] > PRETTY_NAME="Ubuntu 22.04.4 LTS"
	I0916 11:09:48.333154 1488539 command_runner.go:130] > NAME="Ubuntu"
	I0916 11:09:48.333164 1488539 command_runner.go:130] > VERSION_ID="22.04"
	I0916 11:09:48.333170 1488539 command_runner.go:130] > VERSION="22.04.4 LTS (Jammy Jellyfish)"
	I0916 11:09:48.333175 1488539 command_runner.go:130] > VERSION_CODENAME=jammy
	I0916 11:09:48.333178 1488539 command_runner.go:130] > ID=ubuntu
	I0916 11:09:48.333181 1488539 command_runner.go:130] > ID_LIKE=debian
	I0916 11:09:48.333188 1488539 command_runner.go:130] > HOME_URL="https://www.ubuntu.com/"
	I0916 11:09:48.333192 1488539 command_runner.go:130] > SUPPORT_URL="https://help.ubuntu.com/"
	I0916 11:09:48.333199 1488539 command_runner.go:130] > BUG_REPORT_URL="https://bugs.launchpad.net/ubuntu/"
	I0916 11:09:48.333206 1488539 command_runner.go:130] > PRIVACY_POLICY_URL="https://www.ubuntu.com/legal/terms-and-policies/privacy-policy"
	I0916 11:09:48.333211 1488539 command_runner.go:130] > UBUNTU_CODENAME=jammy
	I0916 11:09:48.333251 1488539 main.go:141] libmachine: Couldn't set key VERSION_CODENAME, no corresponding struct field found
	I0916 11:09:48.333274 1488539 main.go:141] libmachine: Couldn't set key PRIVACY_POLICY_URL, no corresponding struct field found
	I0916 11:09:48.333284 1488539 main.go:141] libmachine: Couldn't set key UBUNTU_CODENAME, no corresponding struct field found
	I0916 11:09:48.333292 1488539 info.go:137] Remote host: Ubuntu 22.04.4 LTS
	I0916 11:09:48.333303 1488539 filesync.go:126] Scanning /home/jenkins/minikube-integration/19651-1378450/.minikube/addons for local assets ...
	I0916 11:09:48.333364 1488539 filesync.go:126] Scanning /home/jenkins/minikube-integration/19651-1378450/.minikube/files for local assets ...
	I0916 11:09:48.333453 1488539 filesync.go:149] local asset: /home/jenkins/minikube-integration/19651-1378450/.minikube/files/etc/ssl/certs/13838332.pem -> 13838332.pem in /etc/ssl/certs
	I0916 11:09:48.333460 1488539 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-1378450/.minikube/files/etc/ssl/certs/13838332.pem -> /etc/ssl/certs/13838332.pem
	I0916 11:09:48.333570 1488539 ssh_runner.go:195] Run: sudo mkdir -p /etc/ssl/certs
	I0916 11:09:48.342289 1488539 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-1378450/.minikube/files/etc/ssl/certs/13838332.pem --> /etc/ssl/certs/13838332.pem (1708 bytes)
	I0916 11:09:48.366208 1488539 start.go:296] duration metric: took 152.341296ms for postStartSetup
	I0916 11:09:48.366571 1488539 cli_runner.go:164] Run: docker container inspect -f "{{range .NetworkSettings.Networks}}{{.IPAddress}},{{.GlobalIPv6Address}}{{end}}" multinode-654612
	I0916 11:09:48.382958 1488539 profile.go:143] Saving config to /home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/multinode-654612/config.json ...
	I0916 11:09:48.383247 1488539 ssh_runner.go:195] Run: sh -c "df -h /var | awk 'NR==2{print $5}'"
	I0916 11:09:48.383290 1488539 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" multinode-654612
	I0916 11:09:48.399589 1488539 sshutil.go:53] new ssh client: &{IP:127.0.0.1 Port:34738 SSHKeyPath:/home/jenkins/minikube-integration/19651-1378450/.minikube/machines/multinode-654612/id_rsa Username:docker}
	I0916 11:09:48.497545 1488539 command_runner.go:130] > 12%
	I0916 11:09:48.498084 1488539 ssh_runner.go:195] Run: sh -c "df -BG /var | awk 'NR==2{print $4}'"
	I0916 11:09:48.503022 1488539 command_runner.go:130] > 172G
	I0916 11:09:48.503051 1488539 start.go:128] duration metric: took 8.291011625s to createHost
	I0916 11:09:48.503062 1488539 start.go:83] releasing machines lock for "multinode-654612", held for 8.291131384s
	I0916 11:09:48.503141 1488539 cli_runner.go:164] Run: docker container inspect -f "{{range .NetworkSettings.Networks}}{{.IPAddress}},{{.GlobalIPv6Address}}{{end}}" multinode-654612
	I0916 11:09:48.519372 1488539 ssh_runner.go:195] Run: cat /version.json
	I0916 11:09:48.519433 1488539 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" multinode-654612
	I0916 11:09:48.519689 1488539 ssh_runner.go:195] Run: curl -sS -m 2 https://registry.k8s.io/
	I0916 11:09:48.519759 1488539 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" multinode-654612
	I0916 11:09:48.539532 1488539 sshutil.go:53] new ssh client: &{IP:127.0.0.1 Port:34738 SSHKeyPath:/home/jenkins/minikube-integration/19651-1378450/.minikube/machines/multinode-654612/id_rsa Username:docker}
	I0916 11:09:48.556842 1488539 sshutil.go:53] new ssh client: &{IP:127.0.0.1 Port:34738 SSHKeyPath:/home/jenkins/minikube-integration/19651-1378450/.minikube/machines/multinode-654612/id_rsa Username:docker}
	I0916 11:09:48.757240 1488539 command_runner.go:130] > <a href="https://github.com/kubernetes/registry.k8s.io">Temporary Redirect</a>.
	I0916 11:09:48.757311 1488539 command_runner.go:130] > {"iso_version": "v1.34.0-1726281733-19643", "kicbase_version": "v0.0.45-1726358845-19644", "minikube_version": "v1.34.0", "commit": "f890713149c79cf50e25c13e6a5c0470aa0f0450"}
	I0916 11:09:48.757476 1488539 ssh_runner.go:195] Run: systemctl --version
	I0916 11:09:48.761376 1488539 command_runner.go:130] > systemd 249 (249.11-0ubuntu3.12)
	I0916 11:09:48.761419 1488539 command_runner.go:130] > +PAM +AUDIT +SELINUX +APPARMOR +IMA +SMACK +SECCOMP +GCRYPT +GNUTLS +OPENSSL +ACL +BLKID +CURL +ELFUTILS +FIDO2 +IDN2 -IDN +IPTC +KMOD +LIBCRYPTSETUP +LIBFDISK +PCRE2 -PWQUALITY -P11KIT -QRENCODE +BZIP2 +LZ4 +XZ +ZLIB +ZSTD -XKBCOMMON +UTMP +SYSVINIT default-hierarchy=unified
	I0916 11:09:48.761712 1488539 ssh_runner.go:195] Run: sudo sh -c "podman version >/dev/null"
	I0916 11:09:48.903904 1488539 ssh_runner.go:195] Run: sh -c "stat /etc/cni/net.d/*loopback.conf*"
	I0916 11:09:48.908107 1488539 command_runner.go:130] >   File: /etc/cni/net.d/200-loopback.conf
	I0916 11:09:48.908135 1488539 command_runner.go:130] >   Size: 54        	Blocks: 8          IO Block: 4096   regular file
	I0916 11:09:48.908142 1488539 command_runner.go:130] > Device: 36h/54d	Inode: 1570512     Links: 1
	I0916 11:09:48.908149 1488539 command_runner.go:130] > Access: (0644/-rw-r--r--)  Uid: (    0/    root)   Gid: (    0/    root)
	I0916 11:09:48.908155 1488539 command_runner.go:130] > Access: 2023-06-14 14:44:50.000000000 +0000
	I0916 11:09:48.908174 1488539 command_runner.go:130] > Modify: 2023-06-14 14:44:50.000000000 +0000
	I0916 11:09:48.908183 1488539 command_runner.go:130] > Change: 2024-09-16 10:35:03.060526663 +0000
	I0916 11:09:48.908188 1488539 command_runner.go:130] >  Birth: 2024-09-16 10:35:03.060526663 +0000
	I0916 11:09:48.908668 1488539 ssh_runner.go:195] Run: sudo find /etc/cni/net.d -maxdepth 1 -type f -name *loopback.conf* -not -name *.mk_disabled -exec sh -c "sudo mv {} {}.mk_disabled" ;
	I0916 11:09:48.929931 1488539 cni.go:221] loopback cni configuration disabled: "/etc/cni/net.d/*loopback.conf*" found
	I0916 11:09:48.930058 1488539 ssh_runner.go:195] Run: sudo find /etc/cni/net.d -maxdepth 1 -type f ( ( -name *bridge* -or -name *podman* ) -and -not -name *.mk_disabled ) -printf "%p, " -exec sh -c "sudo mv {} {}.mk_disabled" ;
	I0916 11:09:48.963124 1488539 command_runner.go:139] > /etc/cni/net.d/87-podman-bridge.conflist, /etc/cni/net.d/100-crio-bridge.conf, 
	I0916 11:09:48.963191 1488539 cni.go:262] disabled [/etc/cni/net.d/87-podman-bridge.conflist, /etc/cni/net.d/100-crio-bridge.conf] bridge cni config(s)
	I0916 11:09:48.963203 1488539 start.go:495] detecting cgroup driver to use...
	I0916 11:09:48.963237 1488539 detect.go:187] detected "cgroupfs" cgroup driver on host os
	I0916 11:09:48.963304 1488539 ssh_runner.go:195] Run: sudo systemctl stop -f containerd
	I0916 11:09:48.979439 1488539 ssh_runner.go:195] Run: sudo systemctl is-active --quiet service containerd
	I0916 11:09:48.991317 1488539 docker.go:217] disabling cri-docker service (if available) ...
	I0916 11:09:48.991426 1488539 ssh_runner.go:195] Run: sudo systemctl stop -f cri-docker.socket
	I0916 11:09:49.007780 1488539 ssh_runner.go:195] Run: sudo systemctl stop -f cri-docker.service
	I0916 11:09:49.025143 1488539 ssh_runner.go:195] Run: sudo systemctl disable cri-docker.socket
	I0916 11:09:49.115879 1488539 ssh_runner.go:195] Run: sudo systemctl mask cri-docker.service
	I0916 11:09:49.212140 1488539 command_runner.go:130] ! Created symlink /etc/systemd/system/cri-docker.service → /dev/null.
	I0916 11:09:49.212222 1488539 docker.go:233] disabling docker service ...
	I0916 11:09:49.212302 1488539 ssh_runner.go:195] Run: sudo systemctl stop -f docker.socket
	I0916 11:09:49.234143 1488539 ssh_runner.go:195] Run: sudo systemctl stop -f docker.service
	I0916 11:09:49.246639 1488539 ssh_runner.go:195] Run: sudo systemctl disable docker.socket
	I0916 11:09:49.258136 1488539 command_runner.go:130] ! Removed /etc/systemd/system/sockets.target.wants/docker.socket.
	I0916 11:09:49.341778 1488539 ssh_runner.go:195] Run: sudo systemctl mask docker.service
	I0916 11:09:49.353681 1488539 command_runner.go:130] ! Created symlink /etc/systemd/system/docker.service → /dev/null.
	I0916 11:09:49.439730 1488539 ssh_runner.go:195] Run: sudo systemctl is-active --quiet service docker
	I0916 11:09:49.451566 1488539 ssh_runner.go:195] Run: /bin/bash -c "sudo mkdir -p /etc && printf %s "runtime-endpoint: unix:///var/run/crio/crio.sock
	" | sudo tee /etc/crictl.yaml"
	I0916 11:09:49.467301 1488539 command_runner.go:130] > runtime-endpoint: unix:///var/run/crio/crio.sock
	I0916 11:09:49.468467 1488539 crio.go:59] configure cri-o to use "registry.k8s.io/pause:3.10" pause image...
	I0916 11:09:49.468529 1488539 ssh_runner.go:195] Run: sh -c "sudo sed -i 's|^.*pause_image = .*$|pause_image = "registry.k8s.io/pause:3.10"|' /etc/crio/crio.conf.d/02-crio.conf"
	I0916 11:09:49.479433 1488539 crio.go:70] configuring cri-o to use "cgroupfs" as cgroup driver...
	I0916 11:09:49.479520 1488539 ssh_runner.go:195] Run: sh -c "sudo sed -i 's|^.*cgroup_manager = .*$|cgroup_manager = "cgroupfs"|' /etc/crio/crio.conf.d/02-crio.conf"
	I0916 11:09:49.490641 1488539 ssh_runner.go:195] Run: sh -c "sudo sed -i '/conmon_cgroup = .*/d' /etc/crio/crio.conf.d/02-crio.conf"
	I0916 11:09:49.501363 1488539 ssh_runner.go:195] Run: sh -c "sudo sed -i '/cgroup_manager = .*/a conmon_cgroup = "pod"' /etc/crio/crio.conf.d/02-crio.conf"
	I0916 11:09:49.512267 1488539 ssh_runner.go:195] Run: sh -c "sudo rm -rf /etc/cni/net.mk"
	I0916 11:09:49.522639 1488539 ssh_runner.go:195] Run: sh -c "sudo sed -i '/^ *"net.ipv4.ip_unprivileged_port_start=.*"/d' /etc/crio/crio.conf.d/02-crio.conf"
	I0916 11:09:49.533656 1488539 ssh_runner.go:195] Run: sh -c "sudo grep -q "^ *default_sysctls" /etc/crio/crio.conf.d/02-crio.conf || sudo sed -i '/conmon_cgroup = .*/a default_sysctls = \[\n\]' /etc/crio/crio.conf.d/02-crio.conf"
	I0916 11:09:49.550687 1488539 ssh_runner.go:195] Run: sh -c "sudo sed -i -r 's|^default_sysctls *= *\[|&\n  "net.ipv4.ip_unprivileged_port_start=0",|' /etc/crio/crio.conf.d/02-crio.conf"
	I0916 11:09:49.560865 1488539 ssh_runner.go:195] Run: sudo sysctl net.bridge.bridge-nf-call-iptables
	I0916 11:09:49.569553 1488539 command_runner.go:130] > net.bridge.bridge-nf-call-iptables = 1
	I0916 11:09:49.570749 1488539 ssh_runner.go:195] Run: sudo sh -c "echo 1 > /proc/sys/net/ipv4/ip_forward"
	I0916 11:09:49.579548 1488539 ssh_runner.go:195] Run: sudo systemctl daemon-reload
	I0916 11:09:49.668236 1488539 ssh_runner.go:195] Run: sudo systemctl restart crio
	I0916 11:09:49.766038 1488539 start.go:542] Will wait 60s for socket path /var/run/crio/crio.sock
	I0916 11:09:49.766111 1488539 ssh_runner.go:195] Run: stat /var/run/crio/crio.sock
	I0916 11:09:49.769662 1488539 command_runner.go:130] >   File: /var/run/crio/crio.sock
	I0916 11:09:49.769684 1488539 command_runner.go:130] >   Size: 0         	Blocks: 0          IO Block: 4096   socket
	I0916 11:09:49.769692 1488539 command_runner.go:130] > Device: 43h/67d	Inode: 186         Links: 1
	I0916 11:09:49.769700 1488539 command_runner.go:130] > Access: (0660/srw-rw----)  Uid: (    0/    root)   Gid: (    0/    root)
	I0916 11:09:49.769705 1488539 command_runner.go:130] > Access: 2024-09-16 11:09:49.751207935 +0000
	I0916 11:09:49.769710 1488539 command_runner.go:130] > Modify: 2024-09-16 11:09:49.751207935 +0000
	I0916 11:09:49.769715 1488539 command_runner.go:130] > Change: 2024-09-16 11:09:49.751207935 +0000
	I0916 11:09:49.769718 1488539 command_runner.go:130] >  Birth: -
	I0916 11:09:49.769734 1488539 start.go:563] Will wait 60s for crictl version
	I0916 11:09:49.769793 1488539 ssh_runner.go:195] Run: which crictl
	I0916 11:09:49.772913 1488539 command_runner.go:130] > /usr/bin/crictl
	I0916 11:09:49.773248 1488539 ssh_runner.go:195] Run: sudo /usr/bin/crictl version
	I0916 11:09:49.810657 1488539 command_runner.go:130] > Version:  0.1.0
	I0916 11:09:49.810680 1488539 command_runner.go:130] > RuntimeName:  cri-o
	I0916 11:09:49.810689 1488539 command_runner.go:130] > RuntimeVersion:  1.24.6
	I0916 11:09:49.810851 1488539 command_runner.go:130] > RuntimeApiVersion:  v1
	I0916 11:09:49.813451 1488539 start.go:579] Version:  0.1.0
	RuntimeName:  cri-o
	RuntimeVersion:  1.24.6
	RuntimeApiVersion:  v1
	I0916 11:09:49.813535 1488539 ssh_runner.go:195] Run: crio --version
	I0916 11:09:49.850914 1488539 command_runner.go:130] > crio version 1.24.6
	I0916 11:09:49.850939 1488539 command_runner.go:130] > Version:          1.24.6
	I0916 11:09:49.850948 1488539 command_runner.go:130] > GitCommit:        4bfe15a9feb74ffc95e66a21c04b15fa7bbc2b90
	I0916 11:09:49.850952 1488539 command_runner.go:130] > GitTreeState:     clean
	I0916 11:09:49.850959 1488539 command_runner.go:130] > BuildDate:        2023-06-14T14:44:50Z
	I0916 11:09:49.850963 1488539 command_runner.go:130] > GoVersion:        go1.18.2
	I0916 11:09:49.850967 1488539 command_runner.go:130] > Compiler:         gc
	I0916 11:09:49.850971 1488539 command_runner.go:130] > Platform:         linux/arm64
	I0916 11:09:49.850976 1488539 command_runner.go:130] > Linkmode:         dynamic
	I0916 11:09:49.850984 1488539 command_runner.go:130] > BuildTags:        apparmor, exclude_graphdriver_devicemapper, containers_image_ostree_stub, seccomp
	I0916 11:09:49.850991 1488539 command_runner.go:130] > SeccompEnabled:   true
	I0916 11:09:49.850995 1488539 command_runner.go:130] > AppArmorEnabled:  false
	I0916 11:09:49.853424 1488539 ssh_runner.go:195] Run: crio --version
	I0916 11:09:49.893332 1488539 command_runner.go:130] > crio version 1.24.6
	I0916 11:09:49.893356 1488539 command_runner.go:130] > Version:          1.24.6
	I0916 11:09:49.893364 1488539 command_runner.go:130] > GitCommit:        4bfe15a9feb74ffc95e66a21c04b15fa7bbc2b90
	I0916 11:09:49.893369 1488539 command_runner.go:130] > GitTreeState:     clean
	I0916 11:09:49.893375 1488539 command_runner.go:130] > BuildDate:        2023-06-14T14:44:50Z
	I0916 11:09:49.893380 1488539 command_runner.go:130] > GoVersion:        go1.18.2
	I0916 11:09:49.893384 1488539 command_runner.go:130] > Compiler:         gc
	I0916 11:09:49.893388 1488539 command_runner.go:130] > Platform:         linux/arm64
	I0916 11:09:49.893399 1488539 command_runner.go:130] > Linkmode:         dynamic
	I0916 11:09:49.893412 1488539 command_runner.go:130] > BuildTags:        apparmor, exclude_graphdriver_devicemapper, containers_image_ostree_stub, seccomp
	I0916 11:09:49.893420 1488539 command_runner.go:130] > SeccompEnabled:   true
	I0916 11:09:49.893425 1488539 command_runner.go:130] > AppArmorEnabled:  false
	I0916 11:09:49.898428 1488539 out.go:177] * Preparing Kubernetes v1.31.1 on CRI-O 1.24.6 ...
	I0916 11:09:49.901050 1488539 cli_runner.go:164] Run: docker network inspect multinode-654612 --format "{"Name": "{{.Name}}","Driver": "{{.Driver}}","Subnet": "{{range .IPAM.Config}}{{.Subnet}}{{end}}","Gateway": "{{range .IPAM.Config}}{{.Gateway}}{{end}}","MTU": {{if (index .Options "com.docker.network.driver.mtu")}}{{(index .Options "com.docker.network.driver.mtu")}}{{else}}0{{end}}, "ContainerIPs": [{{range $k,$v := .Containers }}"{{$v.IPv4Address}}",{{end}}]}"
	I0916 11:09:49.915805 1488539 ssh_runner.go:195] Run: grep 192.168.67.1	host.minikube.internal$ /etc/hosts
	I0916 11:09:49.919461 1488539 ssh_runner.go:195] Run: /bin/bash -c "{ grep -v $'\thost.minikube.internal$' "/etc/hosts"; echo "192.168.67.1	host.minikube.internal"; } > /tmp/h.$$; sudo cp /tmp/h.$$ "/etc/hosts""
	I0916 11:09:49.930712 1488539 kubeadm.go:883] updating cluster {Name:multinode-654612 KeepContext:false EmbedCerts:false MinikubeISO: KicBaseImage:gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 Memory:2200 CPUs:2 DiskSize:20000 Driver:docker HyperkitVpnKitSock: HyperkitVSockPorts:[] DockerEnv:[] ContainerVolumeMounts:[] InsecureRegistry:[] RegistryMirror:[] HostOnlyCIDR:192.168.59.1/24 HypervVirtualSwitch: HypervUseExternalSwitch:false HypervExternalAdapter: KVMNetwork:default KVMQemuURI:qemu:///system KVMGPU:false KVMHidden:false KVMNUMACount:1 APIServerPort:8443 DockerOpt:[] DisableDriverMounts:false NFSShare:[] NFSSharesRoot:/nfsshares UUID: NoVTXCheck:false DNSProxy:false HostDNSResolver:true HostOnlyNicType:virtio NatNicType:virtio SSHIPAddress: SSHUser:root SSHKey: SSHPort:22 KubernetesConfig:{KubernetesVersion:v1.31.1 ClusterName:multinode-654612 Namespace:default APIServerHAVIP: APIServerName:minikubeCA APISe
rverNames:[] APIServerIPs:[] DNSDomain:cluster.local ContainerRuntime:crio CRISocket: NetworkPlugin:cni FeatureGates: ServiceCIDR:10.96.0.0/12 ImageRepository: LoadBalancerStartIP: LoadBalancerEndIP: CustomIngressCert: RegistryAliases: ExtraOptions:[] ShouldLoadCachedImages:true EnableDefaultCNI:false CNI:} Nodes:[{Name: IP:192.168.67.2 Port:8443 KubernetesVersion:v1.31.1 ContainerRuntime:crio ControlPlane:true Worker:true}] Addons:map[] CustomAddonImages:map[] CustomAddonRegistries:map[] VerifyComponents:map[apiserver:true apps_running:true default_sa:true extra:true kubelet:true node_ready:true system_pods:true] StartHostTimeout:6m0s ScheduledStop:<nil> ExposedPorts:[] ListenAddress: Network: Subnet: MultiNodeRequested:true ExtraDisks:0 CertExpiration:26280h0m0s Mount:false MountString:/home/jenkins:/minikube-host Mount9PVersion:9p2000.L MountGID:docker MountIP: MountMSize:262144 MountOptions:[] MountPort:0 MountType:9p MountUID:docker BinaryMirror: DisableOptimizations:false DisableMetrics:false CustomQemu
FirmwarePath: SocketVMnetClientPath: SocketVMnetPath: StaticIP: SSHAuthSock: SSHAgentPID:0 GPUs: AutoPauseInterval:1m0s} ...
	I0916 11:09:49.930838 1488539 preload.go:131] Checking if preload exists for k8s version v1.31.1 and runtime crio
	I0916 11:09:49.930906 1488539 ssh_runner.go:195] Run: sudo crictl images --output json
	I0916 11:09:50.014687 1488539 command_runner.go:130] > {
	I0916 11:09:50.014715 1488539 command_runner.go:130] >   "images": [
	I0916 11:09:50.014720 1488539 command_runner.go:130] >     {
	I0916 11:09:50.014730 1488539 command_runner.go:130] >       "id": "6a23fa8fd2b78ab58e42ba273808edc936a9c53d8ac4a919f6337be094843a51",
	I0916 11:09:50.014736 1488539 command_runner.go:130] >       "repoTags": [
	I0916 11:09:50.014742 1488539 command_runner.go:130] >         "docker.io/kindest/kindnetd:v20240813-c6f155d6"
	I0916 11:09:50.014753 1488539 command_runner.go:130] >       ],
	I0916 11:09:50.014758 1488539 command_runner.go:130] >       "repoDigests": [
	I0916 11:09:50.014767 1488539 command_runner.go:130] >         "docker.io/kindest/kindnetd@sha256:4d39335073da9a0b82be8e01028f0aa75aff16caff2e2d8889d0effd579a6f64",
	I0916 11:09:50.014775 1488539 command_runner.go:130] >         "docker.io/kindest/kindnetd@sha256:e59a687ca28ae274a2fc92f1e2f5f1c739f353178a43a23aafc71adb802ed166"
	I0916 11:09:50.014779 1488539 command_runner.go:130] >       ],
	I0916 11:09:50.014784 1488539 command_runner.go:130] >       "size": "90295858",
	I0916 11:09:50.014787 1488539 command_runner.go:130] >       "uid": null,
	I0916 11:09:50.014791 1488539 command_runner.go:130] >       "username": "",
	I0916 11:09:50.014796 1488539 command_runner.go:130] >       "spec": null,
	I0916 11:09:50.014804 1488539 command_runner.go:130] >       "pinned": false
	I0916 11:09:50.014807 1488539 command_runner.go:130] >     },
	I0916 11:09:50.014811 1488539 command_runner.go:130] >     {
	I0916 11:09:50.014817 1488539 command_runner.go:130] >       "id": "ba04bb24b95753201135cbc420b233c1b0b9fa2e1fd21d28319c348c33fbcde6",
	I0916 11:09:50.014823 1488539 command_runner.go:130] >       "repoTags": [
	I0916 11:09:50.014829 1488539 command_runner.go:130] >         "gcr.io/k8s-minikube/storage-provisioner:v5"
	I0916 11:09:50.014832 1488539 command_runner.go:130] >       ],
	I0916 11:09:50.014837 1488539 command_runner.go:130] >       "repoDigests": [
	I0916 11:09:50.014846 1488539 command_runner.go:130] >         "gcr.io/k8s-minikube/storage-provisioner@sha256:0ba370588274b88531ab311a5d2e645d240a853555c1e58fd1dd428fc333c9d2",
	I0916 11:09:50.014859 1488539 command_runner.go:130] >         "gcr.io/k8s-minikube/storage-provisioner@sha256:18eb69d1418e854ad5a19e399310e52808a8321e4c441c1dddad8977a0d7a944"
	I0916 11:09:50.014862 1488539 command_runner.go:130] >       ],
	I0916 11:09:50.014868 1488539 command_runner.go:130] >       "size": "29037500",
	I0916 11:09:50.014875 1488539 command_runner.go:130] >       "uid": null,
	I0916 11:09:50.014879 1488539 command_runner.go:130] >       "username": "",
	I0916 11:09:50.014883 1488539 command_runner.go:130] >       "spec": null,
	I0916 11:09:50.014887 1488539 command_runner.go:130] >       "pinned": false
	I0916 11:09:50.014890 1488539 command_runner.go:130] >     },
	I0916 11:09:50.014893 1488539 command_runner.go:130] >     {
	I0916 11:09:50.014903 1488539 command_runner.go:130] >       "id": "2f6c962e7b8311337352d9fdea917da2184d9919f4da7695bc2a6517cf392fe4",
	I0916 11:09:50.014909 1488539 command_runner.go:130] >       "repoTags": [
	I0916 11:09:50.014914 1488539 command_runner.go:130] >         "registry.k8s.io/coredns/coredns:v1.11.3"
	I0916 11:09:50.014917 1488539 command_runner.go:130] >       ],
	I0916 11:09:50.014921 1488539 command_runner.go:130] >       "repoDigests": [
	I0916 11:09:50.014932 1488539 command_runner.go:130] >         "registry.k8s.io/coredns/coredns@sha256:31440a2bef59e2f1ffb600113b557103740ff851e27b0aef5b849f6e3ab994a6",
	I0916 11:09:50.014945 1488539 command_runner.go:130] >         "registry.k8s.io/coredns/coredns@sha256:9caabbf6238b189a65d0d6e6ac138de60d6a1c419e5a341fbbb7c78382559c6e"
	I0916 11:09:50.014949 1488539 command_runner.go:130] >       ],
	I0916 11:09:50.014959 1488539 command_runner.go:130] >       "size": "61647114",
	I0916 11:09:50.014963 1488539 command_runner.go:130] >       "uid": null,
	I0916 11:09:50.014967 1488539 command_runner.go:130] >       "username": "nonroot",
	I0916 11:09:50.014971 1488539 command_runner.go:130] >       "spec": null,
	I0916 11:09:50.014975 1488539 command_runner.go:130] >       "pinned": false
	I0916 11:09:50.014978 1488539 command_runner.go:130] >     },
	I0916 11:09:50.014981 1488539 command_runner.go:130] >     {
	I0916 11:09:50.014987 1488539 command_runner.go:130] >       "id": "27e3830e1402783674d8b594038967deea9d51f0d91b34c93c8f39d2f68af7da",
	I0916 11:09:50.014995 1488539 command_runner.go:130] >       "repoTags": [
	I0916 11:09:50.015003 1488539 command_runner.go:130] >         "registry.k8s.io/etcd:3.5.15-0"
	I0916 11:09:50.015008 1488539 command_runner.go:130] >       ],
	I0916 11:09:50.015013 1488539 command_runner.go:130] >       "repoDigests": [
	I0916 11:09:50.015024 1488539 command_runner.go:130] >         "registry.k8s.io/etcd@sha256:a6dc63e6e8cfa0307d7851762fa6b629afb18f28d8aa3fab5a6e91b4af60026a",
	I0916 11:09:50.015037 1488539 command_runner.go:130] >         "registry.k8s.io/etcd@sha256:e3ee3ca2dbaf511385000dbd54123629c71b6cfaabd469e658d76a116b7f43da"
	I0916 11:09:50.015044 1488539 command_runner.go:130] >       ],
	I0916 11:09:50.015049 1488539 command_runner.go:130] >       "size": "139912446",
	I0916 11:09:50.015052 1488539 command_runner.go:130] >       "uid": {
	I0916 11:09:50.015056 1488539 command_runner.go:130] >         "value": "0"
	I0916 11:09:50.015065 1488539 command_runner.go:130] >       },
	I0916 11:09:50.015069 1488539 command_runner.go:130] >       "username": "",
	I0916 11:09:50.015073 1488539 command_runner.go:130] >       "spec": null,
	I0916 11:09:50.015077 1488539 command_runner.go:130] >       "pinned": false
	I0916 11:09:50.015084 1488539 command_runner.go:130] >     },
	I0916 11:09:50.015087 1488539 command_runner.go:130] >     {
	I0916 11:09:50.015094 1488539 command_runner.go:130] >       "id": "d3f53a98c0a9d9163c4848bcf34b2d2f5e1e3691b79f3d1dd6d0206809e02853",
	I0916 11:09:50.015098 1488539 command_runner.go:130] >       "repoTags": [
	I0916 11:09:50.015107 1488539 command_runner.go:130] >         "registry.k8s.io/kube-apiserver:v1.31.1"
	I0916 11:09:50.015111 1488539 command_runner.go:130] >       ],
	I0916 11:09:50.015120 1488539 command_runner.go:130] >       "repoDigests": [
	I0916 11:09:50.015128 1488539 command_runner.go:130] >         "registry.k8s.io/kube-apiserver@sha256:2409c23dbb5a2b7a81adbb184d3eac43ac653e9b97a7c0ee121b89bb3ef61fdb",
	I0916 11:09:50.015148 1488539 command_runner.go:130] >         "registry.k8s.io/kube-apiserver@sha256:e3a40e6c6e99ba4a4d72432b3eda702099a2926e49d4afeb6138f2d95e6371ef"
	I0916 11:09:50.015155 1488539 command_runner.go:130] >       ],
	I0916 11:09:50.015159 1488539 command_runner.go:130] >       "size": "92632544",
	I0916 11:09:50.015162 1488539 command_runner.go:130] >       "uid": {
	I0916 11:09:50.015166 1488539 command_runner.go:130] >         "value": "0"
	I0916 11:09:50.015169 1488539 command_runner.go:130] >       },
	I0916 11:09:50.015173 1488539 command_runner.go:130] >       "username": "",
	I0916 11:09:50.015177 1488539 command_runner.go:130] >       "spec": null,
	I0916 11:09:50.015181 1488539 command_runner.go:130] >       "pinned": false
	I0916 11:09:50.015188 1488539 command_runner.go:130] >     },
	I0916 11:09:50.015191 1488539 command_runner.go:130] >     {
	I0916 11:09:50.015203 1488539 command_runner.go:130] >       "id": "279f381cb37365bbbcd133c9531fba9c2beb0f38dbbe6ddfcd0b1b1643d3450e",
	I0916 11:09:50.015206 1488539 command_runner.go:130] >       "repoTags": [
	I0916 11:09:50.015212 1488539 command_runner.go:130] >         "registry.k8s.io/kube-controller-manager:v1.31.1"
	I0916 11:09:50.015216 1488539 command_runner.go:130] >       ],
	I0916 11:09:50.015220 1488539 command_runner.go:130] >       "repoDigests": [
	I0916 11:09:50.015229 1488539 command_runner.go:130] >         "registry.k8s.io/kube-controller-manager@sha256:9f9da5b27e03f89599cc40ba89150aebf3b4cff001e6db6d998674b34181e1a1",
	I0916 11:09:50.015240 1488539 command_runner.go:130] >         "registry.k8s.io/kube-controller-manager@sha256:a9a0505b7d0caca0edd18e37bacc9425b2c8824546b26f5b286e8cb144669849"
	I0916 11:09:50.015244 1488539 command_runner.go:130] >       ],
	I0916 11:09:50.015248 1488539 command_runner.go:130] >       "size": "86930758",
	I0916 11:09:50.015252 1488539 command_runner.go:130] >       "uid": {
	I0916 11:09:50.015259 1488539 command_runner.go:130] >         "value": "0"
	I0916 11:09:50.015262 1488539 command_runner.go:130] >       },
	I0916 11:09:50.015266 1488539 command_runner.go:130] >       "username": "",
	I0916 11:09:50.015270 1488539 command_runner.go:130] >       "spec": null,
	I0916 11:09:50.015274 1488539 command_runner.go:130] >       "pinned": false
	I0916 11:09:50.015277 1488539 command_runner.go:130] >     },
	I0916 11:09:50.015281 1488539 command_runner.go:130] >     {
	I0916 11:09:50.015288 1488539 command_runner.go:130] >       "id": "24a140c548c075e487e45d0ee73b1aa89f8bfb40c08a57e05975559728822b1d",
	I0916 11:09:50.015296 1488539 command_runner.go:130] >       "repoTags": [
	I0916 11:09:50.015301 1488539 command_runner.go:130] >         "registry.k8s.io/kube-proxy:v1.31.1"
	I0916 11:09:50.015305 1488539 command_runner.go:130] >       ],
	I0916 11:09:50.015309 1488539 command_runner.go:130] >       "repoDigests": [
	I0916 11:09:50.015321 1488539 command_runner.go:130] >         "registry.k8s.io/kube-proxy@sha256:4ee50b00484d7f39a90fc4cda92251177ef5ad8fdf2f2a0c768f9e634b4c6d44",
	I0916 11:09:50.015330 1488539 command_runner.go:130] >         "registry.k8s.io/kube-proxy@sha256:7b3bf9f1e260ccb1fd543570e1e9869a373f716fb050cd23a6a2771aa4e06ae9"
	I0916 11:09:50.015337 1488539 command_runner.go:130] >       ],
	I0916 11:09:50.015341 1488539 command_runner.go:130] >       "size": "95951255",
	I0916 11:09:50.015345 1488539 command_runner.go:130] >       "uid": null,
	I0916 11:09:50.015349 1488539 command_runner.go:130] >       "username": "",
	I0916 11:09:50.015353 1488539 command_runner.go:130] >       "spec": null,
	I0916 11:09:50.015356 1488539 command_runner.go:130] >       "pinned": false
	I0916 11:09:50.015360 1488539 command_runner.go:130] >     },
	I0916 11:09:50.015363 1488539 command_runner.go:130] >     {
	I0916 11:09:50.015370 1488539 command_runner.go:130] >       "id": "7f8aa378bb47dffcf430f3a601abe39137e88aee0238e23ed8530fdd18dab82d",
	I0916 11:09:50.015373 1488539 command_runner.go:130] >       "repoTags": [
	I0916 11:09:50.015378 1488539 command_runner.go:130] >         "registry.k8s.io/kube-scheduler:v1.31.1"
	I0916 11:09:50.015381 1488539 command_runner.go:130] >       ],
	I0916 11:09:50.015385 1488539 command_runner.go:130] >       "repoDigests": [
	I0916 11:09:50.015410 1488539 command_runner.go:130] >         "registry.k8s.io/kube-scheduler@sha256:65212209347a96b08a97e679b98dca46885f09cf3a53e8d13b28d2c083a5b690",
	I0916 11:09:50.015422 1488539 command_runner.go:130] >         "registry.k8s.io/kube-scheduler@sha256:969a7e96340f3a927b3d652582edec2d6d82a083871d81ef5064b7edaab430d0"
	I0916 11:09:50.015425 1488539 command_runner.go:130] >       ],
	I0916 11:09:50.015429 1488539 command_runner.go:130] >       "size": "67007814",
	I0916 11:09:50.015432 1488539 command_runner.go:130] >       "uid": {
	I0916 11:09:50.015436 1488539 command_runner.go:130] >         "value": "0"
	I0916 11:09:50.015439 1488539 command_runner.go:130] >       },
	I0916 11:09:50.015443 1488539 command_runner.go:130] >       "username": "",
	I0916 11:09:50.015446 1488539 command_runner.go:130] >       "spec": null,
	I0916 11:09:50.015450 1488539 command_runner.go:130] >       "pinned": false
	I0916 11:09:50.015453 1488539 command_runner.go:130] >     },
	I0916 11:09:50.015456 1488539 command_runner.go:130] >     {
	I0916 11:09:50.015464 1488539 command_runner.go:130] >       "id": "afb61768ce381961ca0beff95337601f29dc70ff3ed14e5e4b3e5699057e6aa8",
	I0916 11:09:50.015471 1488539 command_runner.go:130] >       "repoTags": [
	I0916 11:09:50.015475 1488539 command_runner.go:130] >         "registry.k8s.io/pause:3.10"
	I0916 11:09:50.015479 1488539 command_runner.go:130] >       ],
	I0916 11:09:50.015483 1488539 command_runner.go:130] >       "repoDigests": [
	I0916 11:09:50.015491 1488539 command_runner.go:130] >         "registry.k8s.io/pause@sha256:e50b7059b633caf3c1449b8da680d11845cda4506b513ee7a2de00725f0a34a7",
	I0916 11:09:50.015502 1488539 command_runner.go:130] >         "registry.k8s.io/pause@sha256:ee6521f290b2168b6e0935a181d4cff9be1ac3f505666ef0e3c98fae8199917a"
	I0916 11:09:50.015511 1488539 command_runner.go:130] >       ],
	I0916 11:09:50.015518 1488539 command_runner.go:130] >       "size": "519877",
	I0916 11:09:50.015522 1488539 command_runner.go:130] >       "uid": {
	I0916 11:09:50.015526 1488539 command_runner.go:130] >         "value": "65535"
	I0916 11:09:50.015529 1488539 command_runner.go:130] >       },
	I0916 11:09:50.015533 1488539 command_runner.go:130] >       "username": "",
	I0916 11:09:50.015540 1488539 command_runner.go:130] >       "spec": null,
	I0916 11:09:50.015544 1488539 command_runner.go:130] >       "pinned": false
	I0916 11:09:50.015547 1488539 command_runner.go:130] >     }
	I0916 11:09:50.015550 1488539 command_runner.go:130] >   ]
	I0916 11:09:50.015553 1488539 command_runner.go:130] > }
	I0916 11:09:50.017697 1488539 crio.go:514] all images are preloaded for cri-o runtime.
	I0916 11:09:50.017728 1488539 crio.go:433] Images already preloaded, skipping extraction
	I0916 11:09:50.017803 1488539 ssh_runner.go:195] Run: sudo crictl images --output json
	I0916 11:09:50.057885 1488539 command_runner.go:130] > {
	I0916 11:09:50.057911 1488539 command_runner.go:130] >   "images": [
	I0916 11:09:50.057917 1488539 command_runner.go:130] >     {
	I0916 11:09:50.057927 1488539 command_runner.go:130] >       "id": "6a23fa8fd2b78ab58e42ba273808edc936a9c53d8ac4a919f6337be094843a51",
	I0916 11:09:50.057932 1488539 command_runner.go:130] >       "repoTags": [
	I0916 11:09:50.057939 1488539 command_runner.go:130] >         "docker.io/kindest/kindnetd:v20240813-c6f155d6"
	I0916 11:09:50.057943 1488539 command_runner.go:130] >       ],
	I0916 11:09:50.057948 1488539 command_runner.go:130] >       "repoDigests": [
	I0916 11:09:50.057957 1488539 command_runner.go:130] >         "docker.io/kindest/kindnetd@sha256:4d39335073da9a0b82be8e01028f0aa75aff16caff2e2d8889d0effd579a6f64",
	I0916 11:09:50.057969 1488539 command_runner.go:130] >         "docker.io/kindest/kindnetd@sha256:e59a687ca28ae274a2fc92f1e2f5f1c739f353178a43a23aafc71adb802ed166"
	I0916 11:09:50.057973 1488539 command_runner.go:130] >       ],
	I0916 11:09:50.057977 1488539 command_runner.go:130] >       "size": "90295858",
	I0916 11:09:50.057985 1488539 command_runner.go:130] >       "uid": null,
	I0916 11:09:50.057990 1488539 command_runner.go:130] >       "username": "",
	I0916 11:09:50.058002 1488539 command_runner.go:130] >       "spec": null,
	I0916 11:09:50.058009 1488539 command_runner.go:130] >       "pinned": false
	I0916 11:09:50.058013 1488539 command_runner.go:130] >     },
	I0916 11:09:50.058019 1488539 command_runner.go:130] >     {
	I0916 11:09:50.058026 1488539 command_runner.go:130] >       "id": "ba04bb24b95753201135cbc420b233c1b0b9fa2e1fd21d28319c348c33fbcde6",
	I0916 11:09:50.058033 1488539 command_runner.go:130] >       "repoTags": [
	I0916 11:09:50.058039 1488539 command_runner.go:130] >         "gcr.io/k8s-minikube/storage-provisioner:v5"
	I0916 11:09:50.058046 1488539 command_runner.go:130] >       ],
	I0916 11:09:50.058050 1488539 command_runner.go:130] >       "repoDigests": [
	I0916 11:09:50.058059 1488539 command_runner.go:130] >         "gcr.io/k8s-minikube/storage-provisioner@sha256:0ba370588274b88531ab311a5d2e645d240a853555c1e58fd1dd428fc333c9d2",
	I0916 11:09:50.058071 1488539 command_runner.go:130] >         "gcr.io/k8s-minikube/storage-provisioner@sha256:18eb69d1418e854ad5a19e399310e52808a8321e4c441c1dddad8977a0d7a944"
	I0916 11:09:50.058077 1488539 command_runner.go:130] >       ],
	I0916 11:09:50.058083 1488539 command_runner.go:130] >       "size": "29037500",
	I0916 11:09:50.058090 1488539 command_runner.go:130] >       "uid": null,
	I0916 11:09:50.058099 1488539 command_runner.go:130] >       "username": "",
	I0916 11:09:50.058106 1488539 command_runner.go:130] >       "spec": null,
	I0916 11:09:50.058110 1488539 command_runner.go:130] >       "pinned": false
	I0916 11:09:50.058116 1488539 command_runner.go:130] >     },
	I0916 11:09:50.058120 1488539 command_runner.go:130] >     {
	I0916 11:09:50.058127 1488539 command_runner.go:130] >       "id": "2f6c962e7b8311337352d9fdea917da2184d9919f4da7695bc2a6517cf392fe4",
	I0916 11:09:50.058136 1488539 command_runner.go:130] >       "repoTags": [
	I0916 11:09:50.058142 1488539 command_runner.go:130] >         "registry.k8s.io/coredns/coredns:v1.11.3"
	I0916 11:09:50.058169 1488539 command_runner.go:130] >       ],
	I0916 11:09:50.058178 1488539 command_runner.go:130] >       "repoDigests": [
	I0916 11:09:50.058188 1488539 command_runner.go:130] >         "registry.k8s.io/coredns/coredns@sha256:31440a2bef59e2f1ffb600113b557103740ff851e27b0aef5b849f6e3ab994a6",
	I0916 11:09:50.058199 1488539 command_runner.go:130] >         "registry.k8s.io/coredns/coredns@sha256:9caabbf6238b189a65d0d6e6ac138de60d6a1c419e5a341fbbb7c78382559c6e"
	I0916 11:09:50.058206 1488539 command_runner.go:130] >       ],
	I0916 11:09:50.058210 1488539 command_runner.go:130] >       "size": "61647114",
	I0916 11:09:50.058218 1488539 command_runner.go:130] >       "uid": null,
	I0916 11:09:50.058222 1488539 command_runner.go:130] >       "username": "nonroot",
	I0916 11:09:50.058229 1488539 command_runner.go:130] >       "spec": null,
	I0916 11:09:50.058233 1488539 command_runner.go:130] >       "pinned": false
	I0916 11:09:50.058239 1488539 command_runner.go:130] >     },
	I0916 11:09:50.058243 1488539 command_runner.go:130] >     {
	I0916 11:09:50.058254 1488539 command_runner.go:130] >       "id": "27e3830e1402783674d8b594038967deea9d51f0d91b34c93c8f39d2f68af7da",
	I0916 11:09:50.058262 1488539 command_runner.go:130] >       "repoTags": [
	I0916 11:09:50.058267 1488539 command_runner.go:130] >         "registry.k8s.io/etcd:3.5.15-0"
	I0916 11:09:50.058273 1488539 command_runner.go:130] >       ],
	I0916 11:09:50.058277 1488539 command_runner.go:130] >       "repoDigests": [
	I0916 11:09:50.058289 1488539 command_runner.go:130] >         "registry.k8s.io/etcd@sha256:a6dc63e6e8cfa0307d7851762fa6b629afb18f28d8aa3fab5a6e91b4af60026a",
	I0916 11:09:50.058302 1488539 command_runner.go:130] >         "registry.k8s.io/etcd@sha256:e3ee3ca2dbaf511385000dbd54123629c71b6cfaabd469e658d76a116b7f43da"
	I0916 11:09:50.058309 1488539 command_runner.go:130] >       ],
	I0916 11:09:50.058313 1488539 command_runner.go:130] >       "size": "139912446",
	I0916 11:09:50.058320 1488539 command_runner.go:130] >       "uid": {
	I0916 11:09:50.058324 1488539 command_runner.go:130] >         "value": "0"
	I0916 11:09:50.058330 1488539 command_runner.go:130] >       },
	I0916 11:09:50.058334 1488539 command_runner.go:130] >       "username": "",
	I0916 11:09:50.058344 1488539 command_runner.go:130] >       "spec": null,
	I0916 11:09:50.058348 1488539 command_runner.go:130] >       "pinned": false
	I0916 11:09:50.058354 1488539 command_runner.go:130] >     },
	I0916 11:09:50.058357 1488539 command_runner.go:130] >     {
	I0916 11:09:50.058364 1488539 command_runner.go:130] >       "id": "d3f53a98c0a9d9163c4848bcf34b2d2f5e1e3691b79f3d1dd6d0206809e02853",
	I0916 11:09:50.058371 1488539 command_runner.go:130] >       "repoTags": [
	I0916 11:09:50.058377 1488539 command_runner.go:130] >         "registry.k8s.io/kube-apiserver:v1.31.1"
	I0916 11:09:50.058383 1488539 command_runner.go:130] >       ],
	I0916 11:09:50.058387 1488539 command_runner.go:130] >       "repoDigests": [
	I0916 11:09:50.058399 1488539 command_runner.go:130] >         "registry.k8s.io/kube-apiserver@sha256:2409c23dbb5a2b7a81adbb184d3eac43ac653e9b97a7c0ee121b89bb3ef61fdb",
	I0916 11:09:50.058411 1488539 command_runner.go:130] >         "registry.k8s.io/kube-apiserver@sha256:e3a40e6c6e99ba4a4d72432b3eda702099a2926e49d4afeb6138f2d95e6371ef"
	I0916 11:09:50.058417 1488539 command_runner.go:130] >       ],
	I0916 11:09:50.058421 1488539 command_runner.go:130] >       "size": "92632544",
	I0916 11:09:50.058428 1488539 command_runner.go:130] >       "uid": {
	I0916 11:09:50.058432 1488539 command_runner.go:130] >         "value": "0"
	I0916 11:09:50.058439 1488539 command_runner.go:130] >       },
	I0916 11:09:50.058443 1488539 command_runner.go:130] >       "username": "",
	I0916 11:09:50.058451 1488539 command_runner.go:130] >       "spec": null,
	I0916 11:09:50.058455 1488539 command_runner.go:130] >       "pinned": false
	I0916 11:09:50.058462 1488539 command_runner.go:130] >     },
	I0916 11:09:50.058466 1488539 command_runner.go:130] >     {
	I0916 11:09:50.058473 1488539 command_runner.go:130] >       "id": "279f381cb37365bbbcd133c9531fba9c2beb0f38dbbe6ddfcd0b1b1643d3450e",
	I0916 11:09:50.058481 1488539 command_runner.go:130] >       "repoTags": [
	I0916 11:09:50.058487 1488539 command_runner.go:130] >         "registry.k8s.io/kube-controller-manager:v1.31.1"
	I0916 11:09:50.058493 1488539 command_runner.go:130] >       ],
	I0916 11:09:50.058497 1488539 command_runner.go:130] >       "repoDigests": [
	I0916 11:09:50.058509 1488539 command_runner.go:130] >         "registry.k8s.io/kube-controller-manager@sha256:9f9da5b27e03f89599cc40ba89150aebf3b4cff001e6db6d998674b34181e1a1",
	I0916 11:09:50.058521 1488539 command_runner.go:130] >         "registry.k8s.io/kube-controller-manager@sha256:a9a0505b7d0caca0edd18e37bacc9425b2c8824546b26f5b286e8cb144669849"
	I0916 11:09:50.058527 1488539 command_runner.go:130] >       ],
	I0916 11:09:50.058532 1488539 command_runner.go:130] >       "size": "86930758",
	I0916 11:09:50.058539 1488539 command_runner.go:130] >       "uid": {
	I0916 11:09:50.058543 1488539 command_runner.go:130] >         "value": "0"
	I0916 11:09:50.058550 1488539 command_runner.go:130] >       },
	I0916 11:09:50.058555 1488539 command_runner.go:130] >       "username": "",
	I0916 11:09:50.058563 1488539 command_runner.go:130] >       "spec": null,
	I0916 11:09:50.058568 1488539 command_runner.go:130] >       "pinned": false
	I0916 11:09:50.058575 1488539 command_runner.go:130] >     },
	I0916 11:09:50.058579 1488539 command_runner.go:130] >     {
	I0916 11:09:50.058590 1488539 command_runner.go:130] >       "id": "24a140c548c075e487e45d0ee73b1aa89f8bfb40c08a57e05975559728822b1d",
	I0916 11:09:50.058598 1488539 command_runner.go:130] >       "repoTags": [
	I0916 11:09:50.058604 1488539 command_runner.go:130] >         "registry.k8s.io/kube-proxy:v1.31.1"
	I0916 11:09:50.058610 1488539 command_runner.go:130] >       ],
	I0916 11:09:50.058615 1488539 command_runner.go:130] >       "repoDigests": [
	I0916 11:09:50.058626 1488539 command_runner.go:130] >         "registry.k8s.io/kube-proxy@sha256:4ee50b00484d7f39a90fc4cda92251177ef5ad8fdf2f2a0c768f9e634b4c6d44",
	I0916 11:09:50.058637 1488539 command_runner.go:130] >         "registry.k8s.io/kube-proxy@sha256:7b3bf9f1e260ccb1fd543570e1e9869a373f716fb050cd23a6a2771aa4e06ae9"
	I0916 11:09:50.058643 1488539 command_runner.go:130] >       ],
	I0916 11:09:50.058647 1488539 command_runner.go:130] >       "size": "95951255",
	I0916 11:09:50.058655 1488539 command_runner.go:130] >       "uid": null,
	I0916 11:09:50.058659 1488539 command_runner.go:130] >       "username": "",
	I0916 11:09:50.058669 1488539 command_runner.go:130] >       "spec": null,
	I0916 11:09:50.058673 1488539 command_runner.go:130] >       "pinned": false
	I0916 11:09:50.058680 1488539 command_runner.go:130] >     },
	I0916 11:09:50.058683 1488539 command_runner.go:130] >     {
	I0916 11:09:50.058694 1488539 command_runner.go:130] >       "id": "7f8aa378bb47dffcf430f3a601abe39137e88aee0238e23ed8530fdd18dab82d",
	I0916 11:09:50.058702 1488539 command_runner.go:130] >       "repoTags": [
	I0916 11:09:50.058707 1488539 command_runner.go:130] >         "registry.k8s.io/kube-scheduler:v1.31.1"
	I0916 11:09:50.058714 1488539 command_runner.go:130] >       ],
	I0916 11:09:50.058718 1488539 command_runner.go:130] >       "repoDigests": [
	I0916 11:09:50.058735 1488539 command_runner.go:130] >         "registry.k8s.io/kube-scheduler@sha256:65212209347a96b08a97e679b98dca46885f09cf3a53e8d13b28d2c083a5b690",
	I0916 11:09:50.058747 1488539 command_runner.go:130] >         "registry.k8s.io/kube-scheduler@sha256:969a7e96340f3a927b3d652582edec2d6d82a083871d81ef5064b7edaab430d0"
	I0916 11:09:50.058753 1488539 command_runner.go:130] >       ],
	I0916 11:09:50.058758 1488539 command_runner.go:130] >       "size": "67007814",
	I0916 11:09:50.058765 1488539 command_runner.go:130] >       "uid": {
	I0916 11:09:50.058769 1488539 command_runner.go:130] >         "value": "0"
	I0916 11:09:50.058776 1488539 command_runner.go:130] >       },
	I0916 11:09:50.058780 1488539 command_runner.go:130] >       "username": "",
	I0916 11:09:50.058789 1488539 command_runner.go:130] >       "spec": null,
	I0916 11:09:50.058794 1488539 command_runner.go:130] >       "pinned": false
	I0916 11:09:50.058801 1488539 command_runner.go:130] >     },
	I0916 11:09:50.058805 1488539 command_runner.go:130] >     {
	I0916 11:09:50.058811 1488539 command_runner.go:130] >       "id": "afb61768ce381961ca0beff95337601f29dc70ff3ed14e5e4b3e5699057e6aa8",
	I0916 11:09:50.058818 1488539 command_runner.go:130] >       "repoTags": [
	I0916 11:09:50.058824 1488539 command_runner.go:130] >         "registry.k8s.io/pause:3.10"
	I0916 11:09:50.058831 1488539 command_runner.go:130] >       ],
	I0916 11:09:50.058835 1488539 command_runner.go:130] >       "repoDigests": [
	I0916 11:09:50.058858 1488539 command_runner.go:130] >         "registry.k8s.io/pause@sha256:e50b7059b633caf3c1449b8da680d11845cda4506b513ee7a2de00725f0a34a7",
	I0916 11:09:50.058869 1488539 command_runner.go:130] >         "registry.k8s.io/pause@sha256:ee6521f290b2168b6e0935a181d4cff9be1ac3f505666ef0e3c98fae8199917a"
	I0916 11:09:50.058876 1488539 command_runner.go:130] >       ],
	I0916 11:09:50.058880 1488539 command_runner.go:130] >       "size": "519877",
	I0916 11:09:50.058887 1488539 command_runner.go:130] >       "uid": {
	I0916 11:09:50.058892 1488539 command_runner.go:130] >         "value": "65535"
	I0916 11:09:50.058898 1488539 command_runner.go:130] >       },
	I0916 11:09:50.058903 1488539 command_runner.go:130] >       "username": "",
	I0916 11:09:50.058911 1488539 command_runner.go:130] >       "spec": null,
	I0916 11:09:50.058915 1488539 command_runner.go:130] >       "pinned": false
	I0916 11:09:50.058921 1488539 command_runner.go:130] >     }
	I0916 11:09:50.058924 1488539 command_runner.go:130] >   ]
	I0916 11:09:50.058927 1488539 command_runner.go:130] > }
	I0916 11:09:50.061494 1488539 crio.go:514] all images are preloaded for cri-o runtime.
	I0916 11:09:50.061521 1488539 cache_images.go:84] Images are preloaded, skipping loading
	I0916 11:09:50.061531 1488539 kubeadm.go:934] updating node { 192.168.67.2 8443 v1.31.1 crio true true} ...
	I0916 11:09:50.061679 1488539 kubeadm.go:946] kubelet [Unit]
	Wants=crio.service
	
	[Service]
	ExecStart=
	ExecStart=/var/lib/minikube/binaries/v1.31.1/kubelet --bootstrap-kubeconfig=/etc/kubernetes/bootstrap-kubelet.conf --cgroups-per-qos=false --config=/var/lib/kubelet/config.yaml --enforce-node-allocatable= --hostname-override=multinode-654612 --kubeconfig=/etc/kubernetes/kubelet.conf --node-ip=192.168.67.2
	
	[Install]
	 config:
	{KubernetesVersion:v1.31.1 ClusterName:multinode-654612 Namespace:default APIServerHAVIP: APIServerName:minikubeCA APIServerNames:[] APIServerIPs:[] DNSDomain:cluster.local ContainerRuntime:crio CRISocket: NetworkPlugin:cni FeatureGates: ServiceCIDR:10.96.0.0/12 ImageRepository: LoadBalancerStartIP: LoadBalancerEndIP: CustomIngressCert: RegistryAliases: ExtraOptions:[] ShouldLoadCachedImages:true EnableDefaultCNI:false CNI:}
	I0916 11:09:50.061817 1488539 ssh_runner.go:195] Run: crio config
	I0916 11:09:50.109237 1488539 command_runner.go:130] > # The CRI-O configuration file specifies all of the available configuration
	I0916 11:09:50.109264 1488539 command_runner.go:130] > # options and command-line flags for the crio(8) OCI Kubernetes Container Runtime
	I0916 11:09:50.109271 1488539 command_runner.go:130] > # daemon, but in a TOML format that can be more easily modified and versioned.
	I0916 11:09:50.109275 1488539 command_runner.go:130] > #
	I0916 11:09:50.109283 1488539 command_runner.go:130] > # Please refer to crio.conf(5) for details of all configuration options.
	I0916 11:09:50.109290 1488539 command_runner.go:130] > # CRI-O supports partial configuration reload during runtime, which can be
	I0916 11:09:50.109296 1488539 command_runner.go:130] > # done by sending SIGHUP to the running process. Currently supported options
	I0916 11:09:50.109316 1488539 command_runner.go:130] > # are explicitly mentioned with: 'This option supports live configuration
	I0916 11:09:50.109320 1488539 command_runner.go:130] > # reload'.
	I0916 11:09:50.109327 1488539 command_runner.go:130] > # CRI-O reads its storage defaults from the containers-storage.conf(5) file
	I0916 11:09:50.109333 1488539 command_runner.go:130] > # located at /etc/containers/storage.conf. Modify this storage configuration if
	I0916 11:09:50.109340 1488539 command_runner.go:130] > # you want to change the system's defaults. If you want to modify storage just
	I0916 11:09:50.109346 1488539 command_runner.go:130] > # for CRI-O, you can change the storage configuration options here.
	I0916 11:09:50.109349 1488539 command_runner.go:130] > [crio]
	I0916 11:09:50.109360 1488539 command_runner.go:130] > # Path to the "root directory". CRI-O stores all of its data, including
	I0916 11:09:50.109365 1488539 command_runner.go:130] > # containers images, in this directory.
	I0916 11:09:50.109375 1488539 command_runner.go:130] > # root = "/home/docker/.local/share/containers/storage"
	I0916 11:09:50.109382 1488539 command_runner.go:130] > # Path to the "run directory". CRI-O stores all of its state in this directory.
	I0916 11:09:50.109387 1488539 command_runner.go:130] > # runroot = "/tmp/containers-user-1000/containers"
	I0916 11:09:50.109393 1488539 command_runner.go:130] > # Storage driver used to manage the storage of images and containers. Please
	I0916 11:09:50.109400 1488539 command_runner.go:130] > # refer to containers-storage.conf(5) to see all available storage drivers.
	I0916 11:09:50.109404 1488539 command_runner.go:130] > # storage_driver = "vfs"
	I0916 11:09:50.109409 1488539 command_runner.go:130] > # List to pass options to the storage driver. Please refer to
	I0916 11:09:50.109415 1488539 command_runner.go:130] > # containers-storage.conf(5) to see all available storage options.
	I0916 11:09:50.109418 1488539 command_runner.go:130] > # storage_option = [
	I0916 11:09:50.109421 1488539 command_runner.go:130] > # ]
	I0916 11:09:50.109427 1488539 command_runner.go:130] > # The default log directory where all logs will go unless directly specified by
	I0916 11:09:50.109434 1488539 command_runner.go:130] > # the kubelet. The log directory specified must be an absolute directory.
	I0916 11:09:50.109438 1488539 command_runner.go:130] > # log_dir = "/var/log/crio/pods"
	I0916 11:09:50.109444 1488539 command_runner.go:130] > # Location for CRI-O to lay down the temporary version file.
	I0916 11:09:50.109450 1488539 command_runner.go:130] > # It is used to check if crio wipe should wipe containers, which should
	I0916 11:09:50.109454 1488539 command_runner.go:130] > # always happen on a node reboot
	I0916 11:09:50.109458 1488539 command_runner.go:130] > # version_file = "/var/run/crio/version"
	I0916 11:09:50.109464 1488539 command_runner.go:130] > # Location for CRI-O to lay down the persistent version file.
	I0916 11:09:50.109470 1488539 command_runner.go:130] > # It is used to check if crio wipe should wipe images, which should
	I0916 11:09:50.109479 1488539 command_runner.go:130] > # only happen when CRI-O has been upgraded
	I0916 11:09:50.109484 1488539 command_runner.go:130] > # version_file_persist = "/var/lib/crio/version"
	I0916 11:09:50.109494 1488539 command_runner.go:130] > # InternalWipe is whether CRI-O should wipe containers and images after a reboot when the server starts.
	I0916 11:09:50.109502 1488539 command_runner.go:130] > # If set to false, one must use the external command 'crio wipe' to wipe the containers and images in these situations.
	I0916 11:09:50.109505 1488539 command_runner.go:130] > # internal_wipe = true
	I0916 11:09:50.109511 1488539 command_runner.go:130] > # Location for CRI-O to lay down the clean shutdown file.
	I0916 11:09:50.109517 1488539 command_runner.go:130] > # It is used to check whether crio had time to sync before shutting down.
	I0916 11:09:50.109522 1488539 command_runner.go:130] > # If not found, crio wipe will clear the storage directory.
	I0916 11:09:50.109527 1488539 command_runner.go:130] > # clean_shutdown_file = "/var/lib/crio/clean.shutdown"
	I0916 11:09:50.109533 1488539 command_runner.go:130] > # The crio.api table contains settings for the kubelet/gRPC interface.
	I0916 11:09:50.109537 1488539 command_runner.go:130] > [crio.api]
	I0916 11:09:50.109542 1488539 command_runner.go:130] > # Path to AF_LOCAL socket on which CRI-O will listen.
	I0916 11:09:50.109550 1488539 command_runner.go:130] > # listen = "/var/run/crio/crio.sock"
	I0916 11:09:50.109555 1488539 command_runner.go:130] > # IP address on which the stream server will listen.
	I0916 11:09:50.109559 1488539 command_runner.go:130] > # stream_address = "127.0.0.1"
	I0916 11:09:50.109566 1488539 command_runner.go:130] > # The port on which the stream server will listen. If the port is set to "0", then
	I0916 11:09:50.109571 1488539 command_runner.go:130] > # CRI-O will allocate a random free port number.
	I0916 11:09:50.109600 1488539 command_runner.go:130] > # stream_port = "0"
	I0916 11:09:50.109609 1488539 command_runner.go:130] > # Enable encrypted TLS transport of the stream server.
	I0916 11:09:50.109700 1488539 command_runner.go:130] > # stream_enable_tls = false
	I0916 11:09:50.109710 1488539 command_runner.go:130] > # Length of time until open streams terminate due to lack of activity
	I0916 11:09:50.109961 1488539 command_runner.go:130] > # stream_idle_timeout = ""
	I0916 11:09:50.109973 1488539 command_runner.go:130] > # Path to the x509 certificate file used to serve the encrypted stream. This
	I0916 11:09:50.109981 1488539 command_runner.go:130] > # file can change, and CRI-O will automatically pick up the changes within 5
	I0916 11:09:50.109990 1488539 command_runner.go:130] > # minutes.
	I0916 11:09:50.110223 1488539 command_runner.go:130] > # stream_tls_cert = ""
	I0916 11:09:50.110235 1488539 command_runner.go:130] > # Path to the key file used to serve the encrypted stream. This file can
	I0916 11:09:50.110247 1488539 command_runner.go:130] > # change and CRI-O will automatically pick up the changes within 5 minutes.
	I0916 11:09:50.110520 1488539 command_runner.go:130] > # stream_tls_key = ""
	I0916 11:09:50.110531 1488539 command_runner.go:130] > # Path to the x509 CA(s) file used to verify and authenticate client
	I0916 11:09:50.110538 1488539 command_runner.go:130] > # communication with the encrypted stream. This file can change and CRI-O will
	I0916 11:09:50.110549 1488539 command_runner.go:130] > # automatically pick up the changes within 5 minutes.
	I0916 11:09:50.110793 1488539 command_runner.go:130] > # stream_tls_ca = ""
	I0916 11:09:50.110838 1488539 command_runner.go:130] > # Maximum grpc send message size in bytes. If not set or <=0, then CRI-O will default to 16 * 1024 * 1024.
	I0916 11:09:50.111125 1488539 command_runner.go:130] > # grpc_max_send_msg_size = 83886080
	I0916 11:09:50.111137 1488539 command_runner.go:130] > # Maximum grpc receive message size. If not set or <= 0, then CRI-O will default to 16 * 1024 * 1024.
	I0916 11:09:50.111518 1488539 command_runner.go:130] > # grpc_max_recv_msg_size = 83886080
	I0916 11:09:50.111553 1488539 command_runner.go:130] > # The crio.runtime table contains settings pertaining to the OCI runtime used
	I0916 11:09:50.111560 1488539 command_runner.go:130] > # and options for how to set up and manage the OCI runtime.
	I0916 11:09:50.111565 1488539 command_runner.go:130] > [crio.runtime]
	I0916 11:09:50.111571 1488539 command_runner.go:130] > # A list of ulimits to be set in containers by default, specified as
	I0916 11:09:50.111577 1488539 command_runner.go:130] > # "<ulimit name>=<soft limit>:<hard limit>", for example:
	I0916 11:09:50.111581 1488539 command_runner.go:130] > # "nofile=1024:2048"
	I0916 11:09:50.111587 1488539 command_runner.go:130] > # If nothing is set here, settings will be inherited from the CRI-O daemon
	I0916 11:09:50.111710 1488539 command_runner.go:130] > # default_ulimits = [
	I0916 11:09:50.111854 1488539 command_runner.go:130] > # ]
	I0916 11:09:50.111870 1488539 command_runner.go:130] > # If true, the runtime will not use pivot_root, but instead use MS_MOVE.
	I0916 11:09:50.112197 1488539 command_runner.go:130] > # no_pivot = false
	I0916 11:09:50.112219 1488539 command_runner.go:130] > # decryption_keys_path is the path where the keys required for
	I0916 11:09:50.112228 1488539 command_runner.go:130] > # image decryption are stored. This option supports live configuration reload.
	I0916 11:09:50.112524 1488539 command_runner.go:130] > # decryption_keys_path = "/etc/crio/keys/"
	I0916 11:09:50.112535 1488539 command_runner.go:130] > # Path to the conmon binary, used for monitoring the OCI runtime.
	I0916 11:09:50.112540 1488539 command_runner.go:130] > # Will be searched for using $PATH if empty.
	I0916 11:09:50.112548 1488539 command_runner.go:130] > # This option is currently deprecated, and will be replaced with RuntimeHandler.MonitorEnv.
	I0916 11:09:50.112840 1488539 command_runner.go:130] > # conmon = ""
	I0916 11:09:50.112850 1488539 command_runner.go:130] > # Cgroup setting for conmon
	I0916 11:09:50.112865 1488539 command_runner.go:130] > # This option is currently deprecated, and will be replaced with RuntimeHandler.MonitorCgroup.
	I0916 11:09:50.113037 1488539 command_runner.go:130] > conmon_cgroup = "pod"
	I0916 11:09:50.113048 1488539 command_runner.go:130] > # Environment variable list for the conmon process, used for passing necessary
	I0916 11:09:50.113054 1488539 command_runner.go:130] > # environment variables to conmon or the runtime.
	I0916 11:09:50.113066 1488539 command_runner.go:130] > # This option is currently deprecated, and will be replaced with RuntimeHandler.MonitorEnv.
	I0916 11:09:50.113216 1488539 command_runner.go:130] > # conmon_env = [
	I0916 11:09:50.113416 1488539 command_runner.go:130] > # ]
	I0916 11:09:50.113480 1488539 command_runner.go:130] > # Additional environment variables to set for all the
	I0916 11:09:50.113567 1488539 command_runner.go:130] > # containers. These are overridden if set in the
	I0916 11:09:50.113594 1488539 command_runner.go:130] > # container image spec or in the container runtime configuration.
	I0916 11:09:50.113613 1488539 command_runner.go:130] > # default_env = [
	I0916 11:09:50.113753 1488539 command_runner.go:130] > # ]
	I0916 11:09:50.113809 1488539 command_runner.go:130] > # If true, SELinux will be used for pod separation on the host.
	I0916 11:09:50.114106 1488539 command_runner.go:130] > # selinux = false
	I0916 11:09:50.114174 1488539 command_runner.go:130] > # Path to the seccomp.json profile which is used as the default seccomp profile
	I0916 11:09:50.114251 1488539 command_runner.go:130] > # for the runtime. If not specified, then the internal default seccomp profile
	I0916 11:09:50.114279 1488539 command_runner.go:130] > # will be used. This option supports live configuration reload.
	I0916 11:09:50.114406 1488539 command_runner.go:130] > # seccomp_profile = ""
	I0916 11:09:50.114457 1488539 command_runner.go:130] > # Changes the meaning of an empty seccomp profile. By default
	I0916 11:09:50.114481 1488539 command_runner.go:130] > # (and according to CRI spec), an empty profile means unconfined.
	I0916 11:09:50.114501 1488539 command_runner.go:130] > # This option tells CRI-O to treat an empty profile as the default profile,
	I0916 11:09:50.114576 1488539 command_runner.go:130] > # which might increase security.
	I0916 11:09:50.114750 1488539 command_runner.go:130] > # seccomp_use_default_when_empty = true
	I0916 11:09:50.114806 1488539 command_runner.go:130] > # Used to change the name of the default AppArmor profile of CRI-O. The default
	I0916 11:09:50.114829 1488539 command_runner.go:130] > # profile name is "crio-default". This profile only takes effect if the user
	I0916 11:09:50.114891 1488539 command_runner.go:130] > # does not specify a profile via the Kubernetes Pod's metadata annotation. If
	I0916 11:09:50.114932 1488539 command_runner.go:130] > # the profile is set to "unconfined", then this equals to disabling AppArmor.
	I0916 11:09:50.114969 1488539 command_runner.go:130] > # This option supports live configuration reload.
	I0916 11:09:50.115048 1488539 command_runner.go:130] > # apparmor_profile = "crio-default"
	I0916 11:09:50.115101 1488539 command_runner.go:130] > # Path to the blockio class configuration file for configuring
	I0916 11:09:50.115122 1488539 command_runner.go:130] > # the cgroup blockio controller.
	I0916 11:09:50.115351 1488539 command_runner.go:130] > # blockio_config_file = ""
	I0916 11:09:50.115403 1488539 command_runner.go:130] > # Used to change irqbalance service config file path which is used for configuring
	I0916 11:09:50.115485 1488539 command_runner.go:130] > # irqbalance daemon.
	I0916 11:09:50.115668 1488539 command_runner.go:130] > # irqbalance_config_file = "/etc/sysconfig/irqbalance"
	I0916 11:09:50.115711 1488539 command_runner.go:130] > # Path to the RDT configuration file for configuring the resctrl pseudo-filesystem.
	I0916 11:09:50.115779 1488539 command_runner.go:130] > # This option supports live configuration reload.
	I0916 11:09:50.115994 1488539 command_runner.go:130] > # rdt_config_file = ""
	I0916 11:09:50.116061 1488539 command_runner.go:130] > # Cgroup management implementation used for the runtime.
	I0916 11:09:50.116167 1488539 command_runner.go:130] > cgroup_manager = "cgroupfs"
	I0916 11:09:50.116228 1488539 command_runner.go:130] > # Specify whether the image pull must be performed in a separate cgroup.
	I0916 11:09:50.116394 1488539 command_runner.go:130] > # separate_pull_cgroup = ""
	I0916 11:09:50.116455 1488539 command_runner.go:130] > # List of default capabilities for containers. If it is empty or commented out,
	I0916 11:09:50.116535 1488539 command_runner.go:130] > # only the capabilities defined in the containers json file by the user/kube
	I0916 11:09:50.116574 1488539 command_runner.go:130] > # will be added.
	I0916 11:09:50.116593 1488539 command_runner.go:130] > # default_capabilities = [
	I0916 11:09:50.116841 1488539 command_runner.go:130] > # 	"CHOWN",
	I0916 11:09:50.117031 1488539 command_runner.go:130] > # 	"DAC_OVERRIDE",
	I0916 11:09:50.117206 1488539 command_runner.go:130] > # 	"FSETID",
	I0916 11:09:50.117389 1488539 command_runner.go:130] > # 	"FOWNER",
	I0916 11:09:50.117571 1488539 command_runner.go:130] > # 	"SETGID",
	I0916 11:09:50.117760 1488539 command_runner.go:130] > # 	"SETUID",
	I0916 11:09:50.117994 1488539 command_runner.go:130] > # 	"SETPCAP",
	I0916 11:09:50.118143 1488539 command_runner.go:130] > # 	"NET_BIND_SERVICE",
	I0916 11:09:50.118302 1488539 command_runner.go:130] > # 	"KILL",
	I0916 11:09:50.118466 1488539 command_runner.go:130] > # ]
	I0916 11:09:50.118551 1488539 command_runner.go:130] > # Add capabilities to the inheritable set, as well as the default group of permitted, bounding and effective.
	I0916 11:09:50.118643 1488539 command_runner.go:130] > # If capabilities are expected to work for non-root users, this option should be set.
	I0916 11:09:50.118836 1488539 command_runner.go:130] > # add_inheritable_capabilities = true
	I0916 11:09:50.118894 1488539 command_runner.go:130] > # List of default sysctls. If it is empty or commented out, only the sysctls
	I0916 11:09:50.118926 1488539 command_runner.go:130] > # defined in the container json file by the user/kube will be added.
	I0916 11:09:50.119032 1488539 command_runner.go:130] > default_sysctls = [
	I0916 11:09:50.119085 1488539 command_runner.go:130] > 	"net.ipv4.ip_unprivileged_port_start=0",
	I0916 11:09:50.119158 1488539 command_runner.go:130] > ]
	I0916 11:09:50.119253 1488539 command_runner.go:130] > # List of devices on the host that a
	I0916 11:09:50.119376 1488539 command_runner.go:130] > # user can specify with the "io.kubernetes.cri-o.Devices" allowed annotation.
	I0916 11:09:50.119467 1488539 command_runner.go:130] > # allowed_devices = [
	I0916 11:09:50.119510 1488539 command_runner.go:130] > # 	"/dev/fuse",
	I0916 11:09:50.119708 1488539 command_runner.go:130] > # ]
	I0916 11:09:50.119758 1488539 command_runner.go:130] > # List of additional devices. specified as
	I0916 11:09:50.119877 1488539 command_runner.go:130] > # "<device-on-host>:<device-on-container>:<permissions>", for example: "--device=/dev/sdc:/dev/xvdc:rwm".
	I0916 11:09:50.119934 1488539 command_runner.go:130] > # If it is empty or commented out, only the devices
	I0916 11:09:50.119992 1488539 command_runner.go:130] > # defined in the container json file by the user/kube will be added.
	I0916 11:09:50.120066 1488539 command_runner.go:130] > # additional_devices = [
	I0916 11:09:50.120115 1488539 command_runner.go:130] > # ]
	I0916 11:09:50.120152 1488539 command_runner.go:130] > # List of directories to scan for CDI Spec files.
	I0916 11:09:50.120219 1488539 command_runner.go:130] > # cdi_spec_dirs = [
	I0916 11:09:50.120406 1488539 command_runner.go:130] > # 	"/etc/cdi",
	I0916 11:09:50.120561 1488539 command_runner.go:130] > # 	"/var/run/cdi",
	I0916 11:09:50.120728 1488539 command_runner.go:130] > # ]
	I0916 11:09:50.120818 1488539 command_runner.go:130] > # Change the default behavior of setting container devices uid/gid from CRI's
	I0916 11:09:50.120898 1488539 command_runner.go:130] > # SecurityContext (RunAsUser/RunAsGroup) instead of taking host's uid/gid.
	I0916 11:09:50.120960 1488539 command_runner.go:130] > # Defaults to false.
	I0916 11:09:50.121087 1488539 command_runner.go:130] > # device_ownership_from_security_context = false
	I0916 11:09:50.121121 1488539 command_runner.go:130] > # Path to OCI hooks directories for automatically executed hooks. If one of the
	I0916 11:09:50.121209 1488539 command_runner.go:130] > # directories does not exist, then CRI-O will automatically skip them.
	I0916 11:09:50.121281 1488539 command_runner.go:130] > # hooks_dir = [
	I0916 11:09:50.121503 1488539 command_runner.go:130] > # 	"/usr/share/containers/oci/hooks.d",
	I0916 11:09:50.121679 1488539 command_runner.go:130] > # ]
	I0916 11:09:50.121779 1488539 command_runner.go:130] > # Path to the file specifying the defaults mounts for each container. The
	I0916 11:09:50.121845 1488539 command_runner.go:130] > # format of the config is /SRC:/DST, one mount per line. Notice that CRI-O reads
	I0916 11:09:50.121886 1488539 command_runner.go:130] > # its default mounts from the following two files:
	I0916 11:09:50.121928 1488539 command_runner.go:130] > #
	I0916 11:09:50.121948 1488539 command_runner.go:130] > #   1) /etc/containers/mounts.conf (i.e., default_mounts_file): This is the
	I0916 11:09:50.122021 1488539 command_runner.go:130] > #      override file, where users can either add in their own default mounts, or
	I0916 11:09:50.122073 1488539 command_runner.go:130] > #      override the default mounts shipped with the package.
	I0916 11:09:50.122091 1488539 command_runner.go:130] > #
	I0916 11:09:50.122127 1488539 command_runner.go:130] > #   2) /usr/share/containers/mounts.conf: This is the default file read for
	I0916 11:09:50.122199 1488539 command_runner.go:130] > #      mounts. If you want CRI-O to read from a different, specific mounts file,
	I0916 11:09:50.122254 1488539 command_runner.go:130] > #      you can change the default_mounts_file. Note, if this is done, CRI-O will
	I0916 11:09:50.122284 1488539 command_runner.go:130] > #      only add mounts it finds in this file.
	I0916 11:09:50.122299 1488539 command_runner.go:130] > #
	I0916 11:09:50.122377 1488539 command_runner.go:130] > # default_mounts_file = ""
	I0916 11:09:50.122455 1488539 command_runner.go:130] > # Maximum number of processes allowed in a container.
	I0916 11:09:50.122520 1488539 command_runner.go:130] > # This option is deprecated. The Kubelet flag '--pod-pids-limit' should be used instead.
	I0916 11:09:50.122585 1488539 command_runner.go:130] > # pids_limit = 0
	I0916 11:09:50.122617 1488539 command_runner.go:130] > # Maximum sized allowed for the container log file. Negative numbers indicate
	I0916 11:09:50.122694 1488539 command_runner.go:130] > # that no size limit is imposed. If it is positive, it must be >= 8192 to
	I0916 11:09:50.122740 1488539 command_runner.go:130] > # match/exceed conmon's read buffer. The file is truncated and re-opened so the
	I0916 11:09:50.122792 1488539 command_runner.go:130] > # limit is never exceeded. This option is deprecated. The Kubelet flag '--container-log-max-size' should be used instead.
	I0916 11:09:50.122866 1488539 command_runner.go:130] > # log_size_max = -1
	I0916 11:09:50.122890 1488539 command_runner.go:130] > # Whether container output should be logged to journald in addition to the kuberentes log file
	I0916 11:09:50.122951 1488539 command_runner.go:130] > # log_to_journald = false
	I0916 11:09:50.122971 1488539 command_runner.go:130] > # Path to directory in which container exit files are written to by conmon.
	I0916 11:09:50.123272 1488539 command_runner.go:130] > # container_exits_dir = "/var/run/crio/exits"
	I0916 11:09:50.123312 1488539 command_runner.go:130] > # Path to directory for container attach sockets.
	I0916 11:09:50.123612 1488539 command_runner.go:130] > # container_attach_socket_dir = "/var/run/crio"
	I0916 11:09:50.123651 1488539 command_runner.go:130] > # The prefix to use for the source of the bind mounts.
	I0916 11:09:50.123790 1488539 command_runner.go:130] > # bind_mount_prefix = ""
	I0916 11:09:50.123836 1488539 command_runner.go:130] > # If set to true, all containers will run in read-only mode.
	I0916 11:09:50.124117 1488539 command_runner.go:130] > # read_only = false
	I0916 11:09:50.124171 1488539 command_runner.go:130] > # Changes the verbosity of the logs based on the level it is set to. Options
	I0916 11:09:50.124251 1488539 command_runner.go:130] > # are fatal, panic, error, warn, info, debug and trace. This option supports
	I0916 11:09:50.124297 1488539 command_runner.go:130] > # live configuration reload.
	I0916 11:09:50.124438 1488539 command_runner.go:130] > # log_level = "info"
	I0916 11:09:50.124471 1488539 command_runner.go:130] > # Filter the log messages by the provided regular expression.
	I0916 11:09:50.124532 1488539 command_runner.go:130] > # This option supports live configuration reload.
	I0916 11:09:50.124721 1488539 command_runner.go:130] > # log_filter = ""
	I0916 11:09:50.124788 1488539 command_runner.go:130] > # The UID mappings for the user namespace of each container. A range is
	I0916 11:09:50.124809 1488539 command_runner.go:130] > # specified in the form containerUID:HostUID:Size. Multiple ranges must be
	I0916 11:09:50.124880 1488539 command_runner.go:130] > # separated by comma.
	I0916 11:09:50.124931 1488539 command_runner.go:130] > # uid_mappings = ""
	I0916 11:09:50.124962 1488539 command_runner.go:130] > # The GID mappings for the user namespace of each container. A range is
	I0916 11:09:50.124998 1488539 command_runner.go:130] > # specified in the form containerGID:HostGID:Size. Multiple ranges must be
	I0916 11:09:50.125017 1488539 command_runner.go:130] > # separated by comma.
	I0916 11:09:50.125201 1488539 command_runner.go:130] > # gid_mappings = ""
	I0916 11:09:50.125269 1488539 command_runner.go:130] > # If set, CRI-O will reject any attempt to map host UIDs below this value
	I0916 11:09:50.125307 1488539 command_runner.go:130] > # into user namespaces.  A negative value indicates that no minimum is set,
	I0916 11:09:50.125392 1488539 command_runner.go:130] > # so specifying mappings will only be allowed for pods that run as UID 0.
	I0916 11:09:50.125587 1488539 command_runner.go:130] > # minimum_mappable_uid = -1
	I0916 11:09:50.125600 1488539 command_runner.go:130] > # If set, CRI-O will reject any attempt to map host GIDs below this value
	I0916 11:09:50.125607 1488539 command_runner.go:130] > # into user namespaces.  A negative value indicates that no minimum is set,
	I0916 11:09:50.125615 1488539 command_runner.go:130] > # so specifying mappings will only be allowed for pods that run as UID 0.
	I0916 11:09:50.125940 1488539 command_runner.go:130] > # minimum_mappable_gid = -1
	I0916 11:09:50.125952 1488539 command_runner.go:130] > # The minimal amount of time in seconds to wait before issuing a timeout
	I0916 11:09:50.125958 1488539 command_runner.go:130] > # regarding the proper termination of the container. The lowest possible
	I0916 11:09:50.125977 1488539 command_runner.go:130] > # value is 30s, whereas lower values are not considered by CRI-O.
	I0916 11:09:50.126266 1488539 command_runner.go:130] > # ctr_stop_timeout = 30
	I0916 11:09:50.126277 1488539 command_runner.go:130] > # drop_infra_ctr determines whether CRI-O drops the infra container
	I0916 11:09:50.126314 1488539 command_runner.go:130] > # when a pod does not have a private PID namespace, and does not use
	I0916 11:09:50.126322 1488539 command_runner.go:130] > # a kernel separating runtime (like kata).
	I0916 11:09:50.126327 1488539 command_runner.go:130] > # It requires manage_ns_lifecycle to be true.
	I0916 11:09:50.126661 1488539 command_runner.go:130] > # drop_infra_ctr = true
	I0916 11:09:50.126673 1488539 command_runner.go:130] > # infra_ctr_cpuset determines what CPUs will be used to run infra containers.
	I0916 11:09:50.126680 1488539 command_runner.go:130] > # You can use linux CPU list format to specify desired CPUs.
	I0916 11:09:50.126687 1488539 command_runner.go:130] > # To get better isolation for guaranteed pods, set this parameter to be equal to kubelet reserved-cpus.
	I0916 11:09:50.126937 1488539 command_runner.go:130] > # infra_ctr_cpuset = ""
	I0916 11:09:50.126947 1488539 command_runner.go:130] > # The directory where the state of the managed namespaces gets tracked.
	I0916 11:09:50.126970 1488539 command_runner.go:130] > # Only used when manage_ns_lifecycle is true.
	I0916 11:09:50.127279 1488539 command_runner.go:130] > # namespaces_dir = "/var/run"
	I0916 11:09:50.127290 1488539 command_runner.go:130] > # pinns_path is the path to find the pinns binary, which is needed to manage namespace lifecycle
	I0916 11:09:50.127545 1488539 command_runner.go:130] > # pinns_path = ""
	I0916 11:09:50.127556 1488539 command_runner.go:130] > # default_runtime is the _name_ of the OCI runtime to be used as the default.
	I0916 11:09:50.127589 1488539 command_runner.go:130] > # The name is matched against the runtimes map below. If this value is changed,
	I0916 11:09:50.127597 1488539 command_runner.go:130] > # the corresponding existing entry from the runtimes map below will be ignored.
	I0916 11:09:50.128040 1488539 command_runner.go:130] > # default_runtime = "runc"
	I0916 11:09:50.128057 1488539 command_runner.go:130] > # A list of paths that, when absent from the host,
	I0916 11:09:50.128066 1488539 command_runner.go:130] > # will cause a container creation to fail (as opposed to the current behavior being created as a directory).
	I0916 11:09:50.128088 1488539 command_runner.go:130] > # This option is to protect from source locations whose existence as a directory could jepordize the health of the node, and whose
	I0916 11:09:50.128118 1488539 command_runner.go:130] > # creation as a file is not desired either.
	I0916 11:09:50.128129 1488539 command_runner.go:130] > # An example is /etc/hostname, which will cause failures on reboot if it's created as a directory, but often doesn't exist because
	I0916 11:09:50.128133 1488539 command_runner.go:130] > # the hostname is being managed dynamically.
	I0916 11:09:50.128138 1488539 command_runner.go:130] > # absent_mount_sources_to_reject = [
	I0916 11:09:50.128141 1488539 command_runner.go:130] > # ]
	I0916 11:09:50.128147 1488539 command_runner.go:130] > # The "crio.runtime.runtimes" table defines a list of OCI compatible runtimes.
	I0916 11:09:50.128154 1488539 command_runner.go:130] > # The runtime to use is picked based on the runtime handler provided by the CRI.
	I0916 11:09:50.128161 1488539 command_runner.go:130] > # If no runtime handler is provided, the runtime will be picked based on the level
	I0916 11:09:50.128167 1488539 command_runner.go:130] > # of trust of the workload. Each entry in the table should follow the format:
	I0916 11:09:50.128170 1488539 command_runner.go:130] > #
	I0916 11:09:50.128175 1488539 command_runner.go:130] > #[crio.runtime.runtimes.runtime-handler]
	I0916 11:09:50.128194 1488539 command_runner.go:130] > #  runtime_path = "/path/to/the/executable"
	I0916 11:09:50.128200 1488539 command_runner.go:130] > #  runtime_type = "oci"
	I0916 11:09:50.128205 1488539 command_runner.go:130] > #  runtime_root = "/path/to/the/root"
	I0916 11:09:50.128210 1488539 command_runner.go:130] > #  privileged_without_host_devices = false
	I0916 11:09:50.128214 1488539 command_runner.go:130] > #  allowed_annotations = []
	I0916 11:09:50.128217 1488539 command_runner.go:130] > # Where:
	I0916 11:09:50.128223 1488539 command_runner.go:130] > # - runtime-handler: name used to identify the runtime
	I0916 11:09:50.128230 1488539 command_runner.go:130] > # - runtime_path (optional, string): absolute path to the runtime executable in
	I0916 11:09:50.128244 1488539 command_runner.go:130] > #   the host filesystem. If omitted, the runtime-handler identifier should match
	I0916 11:09:50.128250 1488539 command_runner.go:130] > #   the runtime executable name, and the runtime executable should be placed
	I0916 11:09:50.128254 1488539 command_runner.go:130] > #   in $PATH.
	I0916 11:09:50.128273 1488539 command_runner.go:130] > # - runtime_type (optional, string): type of runtime, one of: "oci", "vm". If
	I0916 11:09:50.128279 1488539 command_runner.go:130] > #   omitted, an "oci" runtime is assumed.
	I0916 11:09:50.128286 1488539 command_runner.go:130] > # - runtime_root (optional, string): root directory for storage of containers
	I0916 11:09:50.128298 1488539 command_runner.go:130] > #   state.
	I0916 11:09:50.128305 1488539 command_runner.go:130] > # - runtime_config_path (optional, string): the path for the runtime configuration
	I0916 11:09:50.128311 1488539 command_runner.go:130] > #   file. This can only be used with when using the VM runtime_type.
	I0916 11:09:50.128317 1488539 command_runner.go:130] > # - privileged_without_host_devices (optional, bool): an option for restricting
	I0916 11:09:50.128330 1488539 command_runner.go:130] > #   host devices from being passed to privileged containers.
	I0916 11:09:50.128350 1488539 command_runner.go:130] > # - allowed_annotations (optional, array of strings): an option for specifying
	I0916 11:09:50.128365 1488539 command_runner.go:130] > #   a list of experimental annotations that this runtime handler is allowed to process.
	I0916 11:09:50.128370 1488539 command_runner.go:130] > #   The currently recognized values are:
	I0916 11:09:50.128380 1488539 command_runner.go:130] > #   "io.kubernetes.cri-o.userns-mode" for configuring a user namespace for the pod.
	I0916 11:09:50.128403 1488539 command_runner.go:130] > #   "io.kubernetes.cri-o.cgroup2-mount-hierarchy-rw" for mounting cgroups writably when set to "true".
	I0916 11:09:50.128411 1488539 command_runner.go:130] > #   "io.kubernetes.cri-o.Devices" for configuring devices for the pod.
	I0916 11:09:50.128417 1488539 command_runner.go:130] > #   "io.kubernetes.cri-o.ShmSize" for configuring the size of /dev/shm.
	I0916 11:09:50.128425 1488539 command_runner.go:130] > #   "io.kubernetes.cri-o.UnifiedCgroup.$CTR_NAME" for configuring the cgroup v2 unified block for a container.
	I0916 11:09:50.128431 1488539 command_runner.go:130] > #   "io.containers.trace-syscall" for tracing syscalls via the OCI seccomp BPF hook.
	I0916 11:09:50.128437 1488539 command_runner.go:130] > #   "io.kubernetes.cri.rdt-class" for setting the RDT class of a container
	I0916 11:09:50.128445 1488539 command_runner.go:130] > # - monitor_exec_cgroup (optional, string): if set to "container", indicates exec probes
	I0916 11:09:50.128450 1488539 command_runner.go:130] > #   should be moved to the container's cgroup
	I0916 11:09:50.128454 1488539 command_runner.go:130] > [crio.runtime.runtimes.runc]
	I0916 11:09:50.128458 1488539 command_runner.go:130] > runtime_path = "/usr/lib/cri-o-runc/sbin/runc"
	I0916 11:09:50.128462 1488539 command_runner.go:130] > runtime_type = "oci"
	I0916 11:09:50.128479 1488539 command_runner.go:130] > runtime_root = "/run/runc"
	I0916 11:09:50.128484 1488539 command_runner.go:130] > runtime_config_path = ""
	I0916 11:09:50.128525 1488539 command_runner.go:130] > monitor_path = ""
	I0916 11:09:50.128529 1488539 command_runner.go:130] > monitor_cgroup = ""
	I0916 11:09:50.128536 1488539 command_runner.go:130] > monitor_exec_cgroup = ""
	I0916 11:09:50.128584 1488539 command_runner.go:130] > # crun is a fast and lightweight fully featured OCI runtime and C library for
	I0916 11:09:50.128590 1488539 command_runner.go:130] > # running containers
	I0916 11:09:50.128595 1488539 command_runner.go:130] > #[crio.runtime.runtimes.crun]
	I0916 11:09:50.128602 1488539 command_runner.go:130] > # Kata Containers is an OCI runtime, where containers are run inside lightweight
	I0916 11:09:50.128609 1488539 command_runner.go:130] > # VMs. Kata provides additional isolation towards the host, minimizing the host attack
	I0916 11:09:50.128614 1488539 command_runner.go:130] > # surface and mitigating the consequences of containers breakout.
	I0916 11:09:50.128633 1488539 command_runner.go:130] > # Kata Containers with the default configured VMM
	I0916 11:09:50.128639 1488539 command_runner.go:130] > #[crio.runtime.runtimes.kata-runtime]
	I0916 11:09:50.128644 1488539 command_runner.go:130] > # Kata Containers with the QEMU VMM
	I0916 11:09:50.128648 1488539 command_runner.go:130] > #[crio.runtime.runtimes.kata-qemu]
	I0916 11:09:50.128652 1488539 command_runner.go:130] > # Kata Containers with the Firecracker VMM
	I0916 11:09:50.128656 1488539 command_runner.go:130] > #[crio.runtime.runtimes.kata-fc]
	I0916 11:09:50.128666 1488539 command_runner.go:130] > # The workloads table defines ways to customize containers with different resources
	I0916 11:09:50.128692 1488539 command_runner.go:130] > # that work based on annotations, rather than the CRI.
	I0916 11:09:50.128700 1488539 command_runner.go:130] > # Note, the behavior of this table is EXPERIMENTAL and may change at any time.
	I0916 11:09:50.128708 1488539 command_runner.go:130] > # Each workload, has a name, activation_annotation, annotation_prefix and set of resources it supports mutating.
	I0916 11:09:50.128716 1488539 command_runner.go:130] > # The currently supported resources are "cpu" (to configure the cpu shares) and "cpuset" to configure the cpuset.
	I0916 11:09:50.128722 1488539 command_runner.go:130] > # Each resource can have a default value specified, or be empty.
	I0916 11:09:50.128731 1488539 command_runner.go:130] > # For a container to opt-into this workload, the pod should be configured with the annotation $activation_annotation (key only, value is ignored).
	I0916 11:09:50.128742 1488539 command_runner.go:130] > # To customize per-container, an annotation of the form $annotation_prefix.$resource/$ctrName = "value" can be specified
	I0916 11:09:50.128748 1488539 command_runner.go:130] > # signifying for that resource type to override the default value.
	I0916 11:09:50.128755 1488539 command_runner.go:130] > # If the annotation_prefix is not present, every container in the pod will be given the default values.
	I0916 11:09:50.128758 1488539 command_runner.go:130] > # Example:
	I0916 11:09:50.128763 1488539 command_runner.go:130] > # [crio.runtime.workloads.workload-type]
	I0916 11:09:50.128770 1488539 command_runner.go:130] > # activation_annotation = "io.crio/workload"
	I0916 11:09:50.128774 1488539 command_runner.go:130] > # annotation_prefix = "io.crio.workload-type"
	I0916 11:09:50.128794 1488539 command_runner.go:130] > # [crio.runtime.workloads.workload-type.resources]
	I0916 11:09:50.128798 1488539 command_runner.go:130] > # cpuset = 0
	I0916 11:09:50.128802 1488539 command_runner.go:130] > # cpushares = "0-1"
	I0916 11:09:50.128805 1488539 command_runner.go:130] > # Where:
	I0916 11:09:50.128809 1488539 command_runner.go:130] > # The workload name is workload-type.
	I0916 11:09:50.128816 1488539 command_runner.go:130] > # To specify, the pod must have the "io.crio.workload" annotation (this is a precise string match).
	I0916 11:09:50.128821 1488539 command_runner.go:130] > # This workload supports setting cpuset and cpu resources.
	I0916 11:09:50.128827 1488539 command_runner.go:130] > # annotation_prefix is used to customize the different resources.
	I0916 11:09:50.128835 1488539 command_runner.go:130] > # To configure the cpu shares a container gets in the example above, the pod would have to have the following annotation:
	I0916 11:09:50.128849 1488539 command_runner.go:130] > # "io.crio.workload-type/$container_name = {"cpushares": "value"}"
	I0916 11:09:50.128852 1488539 command_runner.go:130] > # 
	I0916 11:09:50.128871 1488539 command_runner.go:130] > # The crio.image table contains settings pertaining to the management of OCI images.
	I0916 11:09:50.128875 1488539 command_runner.go:130] > #
	I0916 11:09:50.128882 1488539 command_runner.go:130] > # CRI-O reads its configured registries defaults from the system wide
	I0916 11:09:50.128888 1488539 command_runner.go:130] > # containers-registries.conf(5) located in /etc/containers/registries.conf. If
	I0916 11:09:50.128894 1488539 command_runner.go:130] > # you want to modify just CRI-O, you can change the registries configuration in
	I0916 11:09:50.128901 1488539 command_runner.go:130] > # this file. Otherwise, leave insecure_registries and registries commented out to
	I0916 11:09:50.128907 1488539 command_runner.go:130] > # use the system's defaults from /etc/containers/registries.conf.
	I0916 11:09:50.128910 1488539 command_runner.go:130] > [crio.image]
	I0916 11:09:50.128916 1488539 command_runner.go:130] > # Default transport for pulling images from a remote container storage.
	I0916 11:09:50.128923 1488539 command_runner.go:130] > # default_transport = "docker://"
	I0916 11:09:50.128929 1488539 command_runner.go:130] > # The path to a file containing credentials necessary for pulling images from
	I0916 11:09:50.128947 1488539 command_runner.go:130] > # secure registries. The file is similar to that of /var/lib/kubelet/config.json
	I0916 11:09:50.128953 1488539 command_runner.go:130] > # global_auth_file = ""
	I0916 11:09:50.128958 1488539 command_runner.go:130] > # The image used to instantiate infra containers.
	I0916 11:09:50.128975 1488539 command_runner.go:130] > # This option supports live configuration reload.
	I0916 11:09:50.128981 1488539 command_runner.go:130] > pause_image = "registry.k8s.io/pause:3.10"
	I0916 11:09:50.128987 1488539 command_runner.go:130] > # The path to a file containing credentials specific for pulling the pause_image from
	I0916 11:09:50.128994 1488539 command_runner.go:130] > # above. The file is similar to that of /var/lib/kubelet/config.json
	I0916 11:09:50.128999 1488539 command_runner.go:130] > # This option supports live configuration reload.
	I0916 11:09:50.129003 1488539 command_runner.go:130] > # pause_image_auth_file = ""
	I0916 11:09:50.129008 1488539 command_runner.go:130] > # The command to run to have a container stay in the paused state.
	I0916 11:09:50.129014 1488539 command_runner.go:130] > # When explicitly set to "", it will fallback to the entrypoint and command
	I0916 11:09:50.129020 1488539 command_runner.go:130] > # specified in the pause image. When commented out, it will fallback to the
	I0916 11:09:50.129026 1488539 command_runner.go:130] > # default: "/pause". This option supports live configuration reload.
	I0916 11:09:50.129075 1488539 command_runner.go:130] > # pause_command = "/pause"
	I0916 11:09:50.129083 1488539 command_runner.go:130] > # Path to the file which decides what sort of policy we use when deciding
	I0916 11:09:50.129090 1488539 command_runner.go:130] > # whether or not to trust an image that we've pulled. It is not recommended that
	I0916 11:09:50.129096 1488539 command_runner.go:130] > # this option be used, as the default behavior of using the system-wide default
	I0916 11:09:50.129102 1488539 command_runner.go:130] > # policy (i.e., /etc/containers/policy.json) is most often preferred. Please
	I0916 11:09:50.129107 1488539 command_runner.go:130] > # refer to containers-policy.json(5) for more details.
	I0916 11:09:50.129110 1488539 command_runner.go:130] > # signature_policy = ""
	I0916 11:09:50.129118 1488539 command_runner.go:130] > # List of registries to skip TLS verification for pulling images. Please
	I0916 11:09:50.129138 1488539 command_runner.go:130] > # consider configuring the registries via /etc/containers/registries.conf before
	I0916 11:09:50.129143 1488539 command_runner.go:130] > # changing them here.
	I0916 11:09:50.129147 1488539 command_runner.go:130] > # insecure_registries = [
	I0916 11:09:50.129150 1488539 command_runner.go:130] > # ]
	I0916 11:09:50.129156 1488539 command_runner.go:130] > # Controls how image volumes are handled. The valid values are mkdir, bind and
	I0916 11:09:50.129161 1488539 command_runner.go:130] > # ignore; the latter will ignore volumes entirely.
	I0916 11:09:50.129165 1488539 command_runner.go:130] > # image_volumes = "mkdir"
	I0916 11:09:50.129170 1488539 command_runner.go:130] > # Temporary directory to use for storing big files
	I0916 11:09:50.129174 1488539 command_runner.go:130] > # big_files_temporary_dir = ""
	I0916 11:09:50.129180 1488539 command_runner.go:130] > # The crio.network table containers settings pertaining to the management of
	I0916 11:09:50.129183 1488539 command_runner.go:130] > # CNI plugins.
	I0916 11:09:50.129186 1488539 command_runner.go:130] > [crio.network]
	I0916 11:09:50.129192 1488539 command_runner.go:130] > # The default CNI network name to be selected. If not set or "", then
	I0916 11:09:50.129211 1488539 command_runner.go:130] > # CRI-O will pick-up the first one found in network_dir.
	I0916 11:09:50.129216 1488539 command_runner.go:130] > # cni_default_network = ""
	I0916 11:09:50.129225 1488539 command_runner.go:130] > # Path to the directory where CNI configuration files are located.
	I0916 11:09:50.129229 1488539 command_runner.go:130] > # network_dir = "/etc/cni/net.d/"
	I0916 11:09:50.129235 1488539 command_runner.go:130] > # Paths to directories where CNI plugin binaries are located.
	I0916 11:09:50.129240 1488539 command_runner.go:130] > # plugin_dirs = [
	I0916 11:09:50.129243 1488539 command_runner.go:130] > # 	"/opt/cni/bin/",
	I0916 11:09:50.129270 1488539 command_runner.go:130] > # ]
	I0916 11:09:50.129289 1488539 command_runner.go:130] > # A necessary configuration for Prometheus based metrics retrieval
	I0916 11:09:50.129293 1488539 command_runner.go:130] > [crio.metrics]
	I0916 11:09:50.129298 1488539 command_runner.go:130] > # Globally enable or disable metrics support.
	I0916 11:09:50.129303 1488539 command_runner.go:130] > # enable_metrics = false
	I0916 11:09:50.129308 1488539 command_runner.go:130] > # Specify enabled metrics collectors.
	I0916 11:09:50.129312 1488539 command_runner.go:130] > # Per default all metrics are enabled.
	I0916 11:09:50.129318 1488539 command_runner.go:130] > # It is possible, to prefix the metrics with "container_runtime_" and "crio_".
	I0916 11:09:50.129324 1488539 command_runner.go:130] > # For example, the metrics collector "operations" would be treated in the same
	I0916 11:09:50.129330 1488539 command_runner.go:130] > # way as "crio_operations" and "container_runtime_crio_operations".
	I0916 11:09:50.129334 1488539 command_runner.go:130] > # metrics_collectors = [
	I0916 11:09:50.129337 1488539 command_runner.go:130] > # 	"operations",
	I0916 11:09:50.129343 1488539 command_runner.go:130] > # 	"operations_latency_microseconds_total",
	I0916 11:09:50.129347 1488539 command_runner.go:130] > # 	"operations_latency_microseconds",
	I0916 11:09:50.129400 1488539 command_runner.go:130] > # 	"operations_errors",
	I0916 11:09:50.129583 1488539 command_runner.go:130] > # 	"image_pulls_by_digest",
	I0916 11:09:50.129773 1488539 command_runner.go:130] > # 	"image_pulls_by_name",
	I0916 11:09:50.129802 1488539 command_runner.go:130] > # 	"image_pulls_by_name_skipped",
	I0916 11:09:50.129808 1488539 command_runner.go:130] > # 	"image_pulls_failures",
	I0916 11:09:50.129813 1488539 command_runner.go:130] > # 	"image_pulls_successes",
	I0916 11:09:50.129816 1488539 command_runner.go:130] > # 	"image_pulls_layer_size",
	I0916 11:09:50.129820 1488539 command_runner.go:130] > # 	"image_layer_reuse",
	I0916 11:09:50.129824 1488539 command_runner.go:130] > # 	"containers_oom_total",
	I0916 11:09:50.129828 1488539 command_runner.go:130] > # 	"containers_oom",
	I0916 11:09:50.129832 1488539 command_runner.go:130] > # 	"processes_defunct",
	I0916 11:09:50.129878 1488539 command_runner.go:130] > # 	"operations_total",
	I0916 11:09:50.130001 1488539 command_runner.go:130] > # 	"operations_latency_seconds",
	I0916 11:09:50.130170 1488539 command_runner.go:130] > # 	"operations_latency_seconds_total",
	I0916 11:09:50.130178 1488539 command_runner.go:130] > # 	"operations_errors_total",
	I0916 11:09:50.130184 1488539 command_runner.go:130] > # 	"image_pulls_bytes_total",
	I0916 11:09:50.130204 1488539 command_runner.go:130] > # 	"image_pulls_skipped_bytes_total",
	I0916 11:09:50.130223 1488539 command_runner.go:130] > # 	"image_pulls_failure_total",
	I0916 11:09:50.130228 1488539 command_runner.go:130] > # 	"image_pulls_success_total",
	I0916 11:09:50.130232 1488539 command_runner.go:130] > # 	"image_layer_reuse_total",
	I0916 11:09:50.130236 1488539 command_runner.go:130] > # 	"containers_oom_count_total",
	I0916 11:09:50.130239 1488539 command_runner.go:130] > # ]
	I0916 11:09:50.130244 1488539 command_runner.go:130] > # The port on which the metrics server will listen.
	I0916 11:09:50.130314 1488539 command_runner.go:130] > # metrics_port = 9090
	I0916 11:09:50.130321 1488539 command_runner.go:130] > # Local socket path to bind the metrics server to
	I0916 11:09:50.130553 1488539 command_runner.go:130] > # metrics_socket = ""
	I0916 11:09:50.130570 1488539 command_runner.go:130] > # The certificate for the secure metrics server.
	I0916 11:09:50.130585 1488539 command_runner.go:130] > # If the certificate is not available on disk, then CRI-O will generate a
	I0916 11:09:50.130593 1488539 command_runner.go:130] > # self-signed one. CRI-O also watches for changes of this path and reloads the
	I0916 11:09:50.130598 1488539 command_runner.go:130] > # certificate on any modification event.
	I0916 11:09:50.130601 1488539 command_runner.go:130] > # metrics_cert = ""
	I0916 11:09:50.130607 1488539 command_runner.go:130] > # The certificate key for the secure metrics server.
	I0916 11:09:50.130612 1488539 command_runner.go:130] > # Behaves in the same way as the metrics_cert.
	I0916 11:09:50.130632 1488539 command_runner.go:130] > # metrics_key = ""
	I0916 11:09:50.130640 1488539 command_runner.go:130] > # A necessary configuration for OpenTelemetry trace data exporting
	I0916 11:09:50.130643 1488539 command_runner.go:130] > [crio.tracing]
	I0916 11:09:50.130649 1488539 command_runner.go:130] > # Globally enable or disable exporting OpenTelemetry traces.
	I0916 11:09:50.130652 1488539 command_runner.go:130] > # enable_tracing = false
	I0916 11:09:50.130658 1488539 command_runner.go:130] > # Address on which the gRPC trace collector listens on.
	I0916 11:09:50.130662 1488539 command_runner.go:130] > # tracing_endpoint = "0.0.0.0:4317"
	I0916 11:09:50.130666 1488539 command_runner.go:130] > # Number of samples to collect per million spans.
	I0916 11:09:50.130707 1488539 command_runner.go:130] > # tracing_sampling_rate_per_million = 0
	I0916 11:09:50.130715 1488539 command_runner.go:130] > # Necessary information pertaining to container and pod stats reporting.
	I0916 11:09:50.130718 1488539 command_runner.go:130] > [crio.stats]
	I0916 11:09:50.130724 1488539 command_runner.go:130] > # The number of seconds between collecting pod and container stats.
	I0916 11:09:50.130729 1488539 command_runner.go:130] > # If set to 0, the stats are collected on-demand instead.
	I0916 11:09:50.130875 1488539 command_runner.go:130] > # stats_collection_period = 0
	I0916 11:09:50.133009 1488539 command_runner.go:130] ! time="2024-09-16 11:09:50.106299699Z" level=info msg="Starting CRI-O, version: 1.24.6, git: 4bfe15a9feb74ffc95e66a21c04b15fa7bbc2b90(clean)"
	I0916 11:09:50.133031 1488539 command_runner.go:130] ! level=info msg="Using default capabilities: CAP_CHOWN, CAP_DAC_OVERRIDE, CAP_FSETID, CAP_FOWNER, CAP_SETGID, CAP_SETUID, CAP_SETPCAP, CAP_NET_BIND_SERVICE, CAP_KILL"
	I0916 11:09:50.133134 1488539 cni.go:84] Creating CNI manager for ""
	I0916 11:09:50.133142 1488539 cni.go:136] multinode detected (1 nodes found), recommending kindnet
	I0916 11:09:50.133151 1488539 kubeadm.go:84] Using pod CIDR: 10.244.0.0/16
	I0916 11:09:50.133179 1488539 kubeadm.go:181] kubeadm options: {CertDir:/var/lib/minikube/certs ServiceCIDR:10.96.0.0/12 PodSubnet:10.244.0.0/16 AdvertiseAddress:192.168.67.2 APIServerPort:8443 KubernetesVersion:v1.31.1 EtcdDataDir:/var/lib/minikube/etcd EtcdExtraArgs:map[] ClusterName:multinode-654612 NodeName:multinode-654612 DNSDomain:cluster.local CRISocket:/var/run/crio/crio.sock ImageRepository: ComponentOptions:[{Component:apiServer ExtraArgs:map[enable-admission-plugins:NamespaceLifecycle,LimitRanger,ServiceAccount,DefaultStorageClass,DefaultTolerationSeconds,NodeRestriction,MutatingAdmissionWebhook,ValidatingAdmissionWebhook,ResourceQuota] Pairs:map[certSANs:["127.0.0.1", "localhost", "192.168.67.2"]]} {Component:controllerManager ExtraArgs:map[allocate-node-cidrs:true leader-elect:false] Pairs:map[]} {Component:scheduler ExtraArgs:map[leader-elect:false] Pairs:map[]}] FeatureArgs:map[] NodeIP:192.168.67.2 CgroupDriver:cgroupfs ClientCAFile:/var/lib/minikube/certs/ca.crt StaticPodPath:/etc/k
ubernetes/manifests ControlPlaneAddress:control-plane.minikube.internal KubeProxyOptions:map[] ResolvConfSearchRegression:false KubeletConfigOpts:map[containerRuntimeEndpoint:unix:///var/run/crio/crio.sock hairpinMode:hairpin-veth runtimeRequestTimeout:15m] PrependCriSocketUnix:true}
	I0916 11:09:50.133319 1488539 kubeadm.go:187] kubeadm config:
	apiVersion: kubeadm.k8s.io/v1beta3
	kind: InitConfiguration
	localAPIEndpoint:
	  advertiseAddress: 192.168.67.2
	  bindPort: 8443
	bootstrapTokens:
	  - groups:
	      - system:bootstrappers:kubeadm:default-node-token
	    ttl: 24h0m0s
	    usages:
	      - signing
	      - authentication
	nodeRegistration:
	  criSocket: unix:///var/run/crio/crio.sock
	  name: "multinode-654612"
	  kubeletExtraArgs:
	    node-ip: 192.168.67.2
	  taints: []
	---
	apiVersion: kubeadm.k8s.io/v1beta3
	kind: ClusterConfiguration
	apiServer:
	  certSANs: ["127.0.0.1", "localhost", "192.168.67.2"]
	  extraArgs:
	    enable-admission-plugins: "NamespaceLifecycle,LimitRanger,ServiceAccount,DefaultStorageClass,DefaultTolerationSeconds,NodeRestriction,MutatingAdmissionWebhook,ValidatingAdmissionWebhook,ResourceQuota"
	controllerManager:
	  extraArgs:
	    allocate-node-cidrs: "true"
	    leader-elect: "false"
	scheduler:
	  extraArgs:
	    leader-elect: "false"
	certificatesDir: /var/lib/minikube/certs
	clusterName: mk
	controlPlaneEndpoint: control-plane.minikube.internal:8443
	etcd:
	  local:
	    dataDir: /var/lib/minikube/etcd
	    extraArgs:
	      proxy-refresh-interval: "70000"
	kubernetesVersion: v1.31.1
	networking:
	  dnsDomain: cluster.local
	  podSubnet: "10.244.0.0/16"
	  serviceSubnet: 10.96.0.0/12
	---
	apiVersion: kubelet.config.k8s.io/v1beta1
	kind: KubeletConfiguration
	authentication:
	  x509:
	    clientCAFile: /var/lib/minikube/certs/ca.crt
	cgroupDriver: cgroupfs
	containerRuntimeEndpoint: unix:///var/run/crio/crio.sock
	hairpinMode: hairpin-veth
	runtimeRequestTimeout: 15m
	clusterDomain: "cluster.local"
	# disable disk resource management by default
	imageGCHighThresholdPercent: 100
	evictionHard:
	  nodefs.available: "0%"
	  nodefs.inodesFree: "0%"
	  imagefs.available: "0%"
	failSwapOn: false
	staticPodPath: /etc/kubernetes/manifests
	---
	apiVersion: kubeproxy.config.k8s.io/v1alpha1
	kind: KubeProxyConfiguration
	clusterCIDR: "10.244.0.0/16"
	metricsBindAddress: 0.0.0.0:10249
	conntrack:
	  maxPerCore: 0
	# Skip setting "net.netfilter.nf_conntrack_tcp_timeout_established"
	  tcpEstablishedTimeout: 0s
	# Skip setting "net.netfilter.nf_conntrack_tcp_timeout_close"
	  tcpCloseWaitTimeout: 0s
	
	I0916 11:09:50.133399 1488539 ssh_runner.go:195] Run: sudo ls /var/lib/minikube/binaries/v1.31.1
	I0916 11:09:50.141869 1488539 command_runner.go:130] > kubeadm
	I0916 11:09:50.141894 1488539 command_runner.go:130] > kubectl
	I0916 11:09:50.141898 1488539 command_runner.go:130] > kubelet
	I0916 11:09:50.143073 1488539 binaries.go:44] Found k8s binaries, skipping transfer
	I0916 11:09:50.143177 1488539 ssh_runner.go:195] Run: sudo mkdir -p /etc/systemd/system/kubelet.service.d /lib/systemd/system /var/tmp/minikube
	I0916 11:09:50.152626 1488539 ssh_runner.go:362] scp memory --> /etc/systemd/system/kubelet.service.d/10-kubeadm.conf (366 bytes)
	I0916 11:09:50.172497 1488539 ssh_runner.go:362] scp memory --> /lib/systemd/system/kubelet.service (352 bytes)
	I0916 11:09:50.192328 1488539 ssh_runner.go:362] scp memory --> /var/tmp/minikube/kubeadm.yaml.new (2154 bytes)
	I0916 11:09:50.210914 1488539 ssh_runner.go:195] Run: grep 192.168.67.2	control-plane.minikube.internal$ /etc/hosts
	I0916 11:09:50.214537 1488539 ssh_runner.go:195] Run: /bin/bash -c "{ grep -v $'\tcontrol-plane.minikube.internal$' "/etc/hosts"; echo "192.168.67.2	control-plane.minikube.internal"; } > /tmp/h.$$; sudo cp /tmp/h.$$ "/etc/hosts""
	I0916 11:09:50.225794 1488539 ssh_runner.go:195] Run: sudo systemctl daemon-reload
	I0916 11:09:50.313520 1488539 ssh_runner.go:195] Run: sudo systemctl start kubelet
	I0916 11:09:50.327991 1488539 certs.go:68] Setting up /home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/multinode-654612 for IP: 192.168.67.2
	I0916 11:09:50.328015 1488539 certs.go:194] generating shared ca certs ...
	I0916 11:09:50.328041 1488539 certs.go:226] acquiring lock for ca certs: {Name:mk0ae46b50e2e49d53ad6fcc94535aa50d9156d6 Clock:{} Delay:500ms Timeout:1m0s Cancel:<nil>}
	I0916 11:09:50.328216 1488539 certs.go:235] skipping valid "minikubeCA" ca cert: /home/jenkins/minikube-integration/19651-1378450/.minikube/ca.key
	I0916 11:09:50.328272 1488539 certs.go:235] skipping valid "proxyClientCA" ca cert: /home/jenkins/minikube-integration/19651-1378450/.minikube/proxy-client-ca.key
	I0916 11:09:50.328291 1488539 certs.go:256] generating profile certs ...
	I0916 11:09:50.328365 1488539 certs.go:363] generating signed profile cert for "minikube-user": /home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/multinode-654612/client.key
	I0916 11:09:50.328382 1488539 crypto.go:68] Generating cert /home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/multinode-654612/client.crt with IP's: []
	I0916 11:09:51.208048 1488539 crypto.go:156] Writing cert to /home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/multinode-654612/client.crt ...
	I0916 11:09:51.208131 1488539 lock.go:35] WriteFile acquiring /home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/multinode-654612/client.crt: {Name:mk1816cbc9363ecfe161b609b475f722dc15370b Clock:{} Delay:500ms Timeout:1m0s Cancel:<nil>}
	I0916 11:09:51.208371 1488539 crypto.go:164] Writing key to /home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/multinode-654612/client.key ...
	I0916 11:09:51.208408 1488539 lock.go:35] WriteFile acquiring /home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/multinode-654612/client.key: {Name:mk78c749df7c64f579f85fe55fa244d192f30537 Clock:{} Delay:500ms Timeout:1m0s Cancel:<nil>}
	I0916 11:09:51.208556 1488539 certs.go:363] generating signed profile cert for "minikube": /home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/multinode-654612/apiserver.key.51b1752e
	I0916 11:09:51.208599 1488539 crypto.go:68] Generating cert /home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/multinode-654612/apiserver.crt.51b1752e with IP's: [10.96.0.1 127.0.0.1 10.0.0.1 192.168.67.2]
	I0916 11:09:51.585190 1488539 crypto.go:156] Writing cert to /home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/multinode-654612/apiserver.crt.51b1752e ...
	I0916 11:09:51.585227 1488539 lock.go:35] WriteFile acquiring /home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/multinode-654612/apiserver.crt.51b1752e: {Name:mk85af17d14460b81f65bc98a438799a21dcc7e2 Clock:{} Delay:500ms Timeout:1m0s Cancel:<nil>}
	I0916 11:09:51.585489 1488539 crypto.go:164] Writing key to /home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/multinode-654612/apiserver.key.51b1752e ...
	I0916 11:09:51.585507 1488539 lock.go:35] WriteFile acquiring /home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/multinode-654612/apiserver.key.51b1752e: {Name:mkd172abd2af8e3371642f78522e3ee51cf4c879 Clock:{} Delay:500ms Timeout:1m0s Cancel:<nil>}
	I0916 11:09:51.585602 1488539 certs.go:381] copying /home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/multinode-654612/apiserver.crt.51b1752e -> /home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/multinode-654612/apiserver.crt
	I0916 11:09:51.585697 1488539 certs.go:385] copying /home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/multinode-654612/apiserver.key.51b1752e -> /home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/multinode-654612/apiserver.key
	I0916 11:09:51.585760 1488539 certs.go:363] generating signed profile cert for "aggregator": /home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/multinode-654612/proxy-client.key
	I0916 11:09:51.585779 1488539 crypto.go:68] Generating cert /home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/multinode-654612/proxy-client.crt with IP's: []
	I0916 11:09:52.392062 1488539 crypto.go:156] Writing cert to /home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/multinode-654612/proxy-client.crt ...
	I0916 11:09:52.392097 1488539 lock.go:35] WriteFile acquiring /home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/multinode-654612/proxy-client.crt: {Name:mk3f8a946d3a53bec11f22f47ded66010decb891 Clock:{} Delay:500ms Timeout:1m0s Cancel:<nil>}
	I0916 11:09:52.392285 1488539 crypto.go:164] Writing key to /home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/multinode-654612/proxy-client.key ...
	I0916 11:09:52.392312 1488539 lock.go:35] WriteFile acquiring /home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/multinode-654612/proxy-client.key: {Name:mk2e493f7cbfb27e5386bd8f92eff0a2de000bf2 Clock:{} Delay:500ms Timeout:1m0s Cancel:<nil>}
	I0916 11:09:52.392404 1488539 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-1378450/.minikube/ca.crt -> /var/lib/minikube/certs/ca.crt
	I0916 11:09:52.392426 1488539 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-1378450/.minikube/ca.key -> /var/lib/minikube/certs/ca.key
	I0916 11:09:52.392440 1488539 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-1378450/.minikube/proxy-client-ca.crt -> /var/lib/minikube/certs/proxy-client-ca.crt
	I0916 11:09:52.392459 1488539 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-1378450/.minikube/proxy-client-ca.key -> /var/lib/minikube/certs/proxy-client-ca.key
	I0916 11:09:52.392474 1488539 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/multinode-654612/apiserver.crt -> /var/lib/minikube/certs/apiserver.crt
	I0916 11:09:52.392493 1488539 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/multinode-654612/apiserver.key -> /var/lib/minikube/certs/apiserver.key
	I0916 11:09:52.392513 1488539 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/multinode-654612/proxy-client.crt -> /var/lib/minikube/certs/proxy-client.crt
	I0916 11:09:52.392527 1488539 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/multinode-654612/proxy-client.key -> /var/lib/minikube/certs/proxy-client.key
	I0916 11:09:52.392585 1488539 certs.go:484] found cert: /home/jenkins/minikube-integration/19651-1378450/.minikube/certs/1383833.pem (1338 bytes)
	W0916 11:09:52.392632 1488539 certs.go:480] ignoring /home/jenkins/minikube-integration/19651-1378450/.minikube/certs/1383833_empty.pem, impossibly tiny 0 bytes
	I0916 11:09:52.392640 1488539 certs.go:484] found cert: /home/jenkins/minikube-integration/19651-1378450/.minikube/certs/ca-key.pem (1679 bytes)
	I0916 11:09:52.392667 1488539 certs.go:484] found cert: /home/jenkins/minikube-integration/19651-1378450/.minikube/certs/ca.pem (1078 bytes)
	I0916 11:09:52.392718 1488539 certs.go:484] found cert: /home/jenkins/minikube-integration/19651-1378450/.minikube/certs/cert.pem (1123 bytes)
	I0916 11:09:52.392746 1488539 certs.go:484] found cert: /home/jenkins/minikube-integration/19651-1378450/.minikube/certs/key.pem (1679 bytes)
	I0916 11:09:52.392795 1488539 certs.go:484] found cert: /home/jenkins/minikube-integration/19651-1378450/.minikube/files/etc/ssl/certs/13838332.pem (1708 bytes)
	I0916 11:09:52.392832 1488539 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-1378450/.minikube/certs/1383833.pem -> /usr/share/ca-certificates/1383833.pem
	I0916 11:09:52.392850 1488539 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-1378450/.minikube/files/etc/ssl/certs/13838332.pem -> /usr/share/ca-certificates/13838332.pem
	I0916 11:09:52.392862 1488539 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-1378450/.minikube/ca.crt -> /usr/share/ca-certificates/minikubeCA.pem
	I0916 11:09:52.393473 1488539 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-1378450/.minikube/ca.crt --> /var/lib/minikube/certs/ca.crt (1111 bytes)
	I0916 11:09:52.420759 1488539 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-1378450/.minikube/ca.key --> /var/lib/minikube/certs/ca.key (1675 bytes)
	I0916 11:09:52.447472 1488539 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-1378450/.minikube/proxy-client-ca.crt --> /var/lib/minikube/certs/proxy-client-ca.crt (1119 bytes)
	I0916 11:09:52.472263 1488539 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-1378450/.minikube/proxy-client-ca.key --> /var/lib/minikube/certs/proxy-client-ca.key (1679 bytes)
	I0916 11:09:52.496610 1488539 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/multinode-654612/apiserver.crt --> /var/lib/minikube/certs/apiserver.crt (1424 bytes)
	I0916 11:09:52.521557 1488539 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/multinode-654612/apiserver.key --> /var/lib/minikube/certs/apiserver.key (1679 bytes)
	I0916 11:09:52.546624 1488539 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/multinode-654612/proxy-client.crt --> /var/lib/minikube/certs/proxy-client.crt (1147 bytes)
	I0916 11:09:52.572181 1488539 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/multinode-654612/proxy-client.key --> /var/lib/minikube/certs/proxy-client.key (1675 bytes)
	I0916 11:09:52.597083 1488539 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-1378450/.minikube/certs/1383833.pem --> /usr/share/ca-certificates/1383833.pem (1338 bytes)
	I0916 11:09:52.623828 1488539 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-1378450/.minikube/files/etc/ssl/certs/13838332.pem --> /usr/share/ca-certificates/13838332.pem (1708 bytes)
	I0916 11:09:52.648742 1488539 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-1378450/.minikube/ca.crt --> /usr/share/ca-certificates/minikubeCA.pem (1111 bytes)
	I0916 11:09:52.678092 1488539 ssh_runner.go:362] scp memory --> /var/lib/minikube/kubeconfig (738 bytes)
	I0916 11:09:52.695974 1488539 ssh_runner.go:195] Run: openssl version
	I0916 11:09:52.701144 1488539 command_runner.go:130] > OpenSSL 3.0.2 15 Mar 2022 (Library: OpenSSL 3.0.2 15 Mar 2022)
	I0916 11:09:52.701598 1488539 ssh_runner.go:195] Run: sudo /bin/bash -c "test -s /usr/share/ca-certificates/minikubeCA.pem && ln -fs /usr/share/ca-certificates/minikubeCA.pem /etc/ssl/certs/minikubeCA.pem"
	I0916 11:09:52.711032 1488539 ssh_runner.go:195] Run: ls -la /usr/share/ca-certificates/minikubeCA.pem
	I0916 11:09:52.714481 1488539 command_runner.go:130] > -rw-r--r-- 1 root root 1111 Sep 16 10:35 /usr/share/ca-certificates/minikubeCA.pem
	I0916 11:09:52.714826 1488539 certs.go:528] hashing: -rw-r--r-- 1 root root 1111 Sep 16 10:35 /usr/share/ca-certificates/minikubeCA.pem
	I0916 11:09:52.714897 1488539 ssh_runner.go:195] Run: openssl x509 -hash -noout -in /usr/share/ca-certificates/minikubeCA.pem
	I0916 11:09:52.721306 1488539 command_runner.go:130] > b5213941
	I0916 11:09:52.721806 1488539 ssh_runner.go:195] Run: sudo /bin/bash -c "test -L /etc/ssl/certs/b5213941.0 || ln -fs /etc/ssl/certs/minikubeCA.pem /etc/ssl/certs/b5213941.0"
	I0916 11:09:52.731165 1488539 ssh_runner.go:195] Run: sudo /bin/bash -c "test -s /usr/share/ca-certificates/1383833.pem && ln -fs /usr/share/ca-certificates/1383833.pem /etc/ssl/certs/1383833.pem"
	I0916 11:09:52.741068 1488539 ssh_runner.go:195] Run: ls -la /usr/share/ca-certificates/1383833.pem
	I0916 11:09:52.744721 1488539 command_runner.go:130] > -rw-r--r-- 1 root root 1338 Sep 16 10:46 /usr/share/ca-certificates/1383833.pem
	I0916 11:09:52.744762 1488539 certs.go:528] hashing: -rw-r--r-- 1 root root 1338 Sep 16 10:46 /usr/share/ca-certificates/1383833.pem
	I0916 11:09:52.744815 1488539 ssh_runner.go:195] Run: openssl x509 -hash -noout -in /usr/share/ca-certificates/1383833.pem
	I0916 11:09:52.751341 1488539 command_runner.go:130] > 51391683
	I0916 11:09:52.751749 1488539 ssh_runner.go:195] Run: sudo /bin/bash -c "test -L /etc/ssl/certs/51391683.0 || ln -fs /etc/ssl/certs/1383833.pem /etc/ssl/certs/51391683.0"
	I0916 11:09:52.761375 1488539 ssh_runner.go:195] Run: sudo /bin/bash -c "test -s /usr/share/ca-certificates/13838332.pem && ln -fs /usr/share/ca-certificates/13838332.pem /etc/ssl/certs/13838332.pem"
	I0916 11:09:52.770588 1488539 ssh_runner.go:195] Run: ls -la /usr/share/ca-certificates/13838332.pem
	I0916 11:09:52.774092 1488539 command_runner.go:130] > -rw-r--r-- 1 root root 1708 Sep 16 10:46 /usr/share/ca-certificates/13838332.pem
	I0916 11:09:52.774123 1488539 certs.go:528] hashing: -rw-r--r-- 1 root root 1708 Sep 16 10:46 /usr/share/ca-certificates/13838332.pem
	I0916 11:09:52.774189 1488539 ssh_runner.go:195] Run: openssl x509 -hash -noout -in /usr/share/ca-certificates/13838332.pem
	I0916 11:09:52.781373 1488539 command_runner.go:130] > 3ec20f2e
	I0916 11:09:52.781816 1488539 ssh_runner.go:195] Run: sudo /bin/bash -c "test -L /etc/ssl/certs/3ec20f2e.0 || ln -fs /etc/ssl/certs/13838332.pem /etc/ssl/certs/3ec20f2e.0"
	I0916 11:09:52.791027 1488539 ssh_runner.go:195] Run: stat /var/lib/minikube/certs/apiserver-kubelet-client.crt
	I0916 11:09:52.794279 1488539 command_runner.go:130] ! stat: cannot statx '/var/lib/minikube/certs/apiserver-kubelet-client.crt': No such file or directory
	I0916 11:09:52.794318 1488539 certs.go:399] 'apiserver-kubelet-client' cert doesn't exist, likely first start: stat /var/lib/minikube/certs/apiserver-kubelet-client.crt: Process exited with status 1
	stdout:
	
	stderr:
	stat: cannot statx '/var/lib/minikube/certs/apiserver-kubelet-client.crt': No such file or directory
	I0916 11:09:52.794356 1488539 kubeadm.go:392] StartCluster: {Name:multinode-654612 KeepContext:false EmbedCerts:false MinikubeISO: KicBaseImage:gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 Memory:2200 CPUs:2 DiskSize:20000 Driver:docker HyperkitVpnKitSock: HyperkitVSockPorts:[] DockerEnv:[] ContainerVolumeMounts:[] InsecureRegistry:[] RegistryMirror:[] HostOnlyCIDR:192.168.59.1/24 HypervVirtualSwitch: HypervUseExternalSwitch:false HypervExternalAdapter: KVMNetwork:default KVMQemuURI:qemu:///system KVMGPU:false KVMHidden:false KVMNUMACount:1 APIServerPort:8443 DockerOpt:[] DisableDriverMounts:false NFSShare:[] NFSSharesRoot:/nfsshares UUID: NoVTXCheck:false DNSProxy:false HostDNSResolver:true HostOnlyNicType:virtio NatNicType:virtio SSHIPAddress: SSHUser:root SSHKey: SSHPort:22 KubernetesConfig:{KubernetesVersion:v1.31.1 ClusterName:multinode-654612 Namespace:default APIServerHAVIP: APIServerName:minikubeCA APIServe
rNames:[] APIServerIPs:[] DNSDomain:cluster.local ContainerRuntime:crio CRISocket: NetworkPlugin:cni FeatureGates: ServiceCIDR:10.96.0.0/12 ImageRepository: LoadBalancerStartIP: LoadBalancerEndIP: CustomIngressCert: RegistryAliases: ExtraOptions:[] ShouldLoadCachedImages:true EnableDefaultCNI:false CNI:} Nodes:[{Name: IP:192.168.67.2 Port:8443 KubernetesVersion:v1.31.1 ContainerRuntime:crio ControlPlane:true Worker:true}] Addons:map[] CustomAddonImages:map[] CustomAddonRegistries:map[] VerifyComponents:map[apiserver:true apps_running:true default_sa:true extra:true kubelet:true node_ready:true system_pods:true] StartHostTimeout:6m0s ScheduledStop:<nil> ExposedPorts:[] ListenAddress: Network: Subnet: MultiNodeRequested:true ExtraDisks:0 CertExpiration:26280h0m0s Mount:false MountString:/home/jenkins:/minikube-host Mount9PVersion:9p2000.L MountGID:docker MountIP: MountMSize:262144 MountOptions:[] MountPort:0 MountType:9p MountUID:docker BinaryMirror: DisableOptimizations:false DisableMetrics:false CustomQemuFir
mwarePath: SocketVMnetClientPath: SocketVMnetPath: StaticIP: SSHAuthSock: SSHAgentPID:0 GPUs: AutoPauseInterval:1m0s}
	I0916 11:09:52.794436 1488539 cri.go:54] listing CRI containers in root : {State:paused Name: Namespaces:[kube-system]}
	I0916 11:09:52.794492 1488539 ssh_runner.go:195] Run: sudo -s eval "crictl ps -a --quiet --label io.kubernetes.pod.namespace=kube-system"
	I0916 11:09:52.834750 1488539 cri.go:89] found id: ""
	I0916 11:09:52.834863 1488539 ssh_runner.go:195] Run: sudo ls /var/lib/kubelet/kubeadm-flags.env /var/lib/kubelet/config.yaml /var/lib/minikube/etcd
	I0916 11:09:52.843934 1488539 command_runner.go:130] ! ls: cannot access '/var/lib/kubelet/kubeadm-flags.env': No such file or directory
	I0916 11:09:52.843972 1488539 command_runner.go:130] ! ls: cannot access '/var/lib/kubelet/config.yaml': No such file or directory
	I0916 11:09:52.843982 1488539 command_runner.go:130] ! ls: cannot access '/var/lib/minikube/etcd': No such file or directory
	I0916 11:09:52.844097 1488539 ssh_runner.go:195] Run: sudo cp /var/tmp/minikube/kubeadm.yaml.new /var/tmp/minikube/kubeadm.yaml
	I0916 11:09:52.853358 1488539 kubeadm.go:214] ignoring SystemVerification for kubeadm because of docker driver
	I0916 11:09:52.853460 1488539 ssh_runner.go:195] Run: sudo ls -la /etc/kubernetes/admin.conf /etc/kubernetes/kubelet.conf /etc/kubernetes/controller-manager.conf /etc/kubernetes/scheduler.conf
	I0916 11:09:52.862655 1488539 command_runner.go:130] ! ls: cannot access '/etc/kubernetes/admin.conf': No such file or directory
	I0916 11:09:52.862680 1488539 command_runner.go:130] ! ls: cannot access '/etc/kubernetes/kubelet.conf': No such file or directory
	I0916 11:09:52.862688 1488539 command_runner.go:130] ! ls: cannot access '/etc/kubernetes/controller-manager.conf': No such file or directory
	I0916 11:09:52.862910 1488539 command_runner.go:130] ! ls: cannot access '/etc/kubernetes/scheduler.conf': No such file or directory
	I0916 11:09:52.863920 1488539 kubeadm.go:155] config check failed, skipping stale config cleanup: sudo ls -la /etc/kubernetes/admin.conf /etc/kubernetes/kubelet.conf /etc/kubernetes/controller-manager.conf /etc/kubernetes/scheduler.conf: Process exited with status 2
	stdout:
	
	stderr:
	ls: cannot access '/etc/kubernetes/admin.conf': No such file or directory
	ls: cannot access '/etc/kubernetes/kubelet.conf': No such file or directory
	ls: cannot access '/etc/kubernetes/controller-manager.conf': No such file or directory
	ls: cannot access '/etc/kubernetes/scheduler.conf': No such file or directory
	I0916 11:09:52.863941 1488539 kubeadm.go:157] found existing configuration files:
	
	I0916 11:09:52.863994 1488539 ssh_runner.go:195] Run: sudo grep https://control-plane.minikube.internal:8443 /etc/kubernetes/admin.conf
	I0916 11:09:52.872703 1488539 command_runner.go:130] ! grep: /etc/kubernetes/admin.conf: No such file or directory
	I0916 11:09:52.872756 1488539 kubeadm.go:163] "https://control-plane.minikube.internal:8443" may not be in /etc/kubernetes/admin.conf - will remove: sudo grep https://control-plane.minikube.internal:8443 /etc/kubernetes/admin.conf: Process exited with status 2
	stdout:
	
	stderr:
	grep: /etc/kubernetes/admin.conf: No such file or directory
	I0916 11:09:52.872837 1488539 ssh_runner.go:195] Run: sudo rm -f /etc/kubernetes/admin.conf
	I0916 11:09:52.881329 1488539 ssh_runner.go:195] Run: sudo grep https://control-plane.minikube.internal:8443 /etc/kubernetes/kubelet.conf
	I0916 11:09:52.890064 1488539 command_runner.go:130] ! grep: /etc/kubernetes/kubelet.conf: No such file or directory
	I0916 11:09:52.890105 1488539 kubeadm.go:163] "https://control-plane.minikube.internal:8443" may not be in /etc/kubernetes/kubelet.conf - will remove: sudo grep https://control-plane.minikube.internal:8443 /etc/kubernetes/kubelet.conf: Process exited with status 2
	stdout:
	
	stderr:
	grep: /etc/kubernetes/kubelet.conf: No such file or directory
	I0916 11:09:52.890159 1488539 ssh_runner.go:195] Run: sudo rm -f /etc/kubernetes/kubelet.conf
	I0916 11:09:52.898828 1488539 ssh_runner.go:195] Run: sudo grep https://control-plane.minikube.internal:8443 /etc/kubernetes/controller-manager.conf
	I0916 11:09:52.907846 1488539 command_runner.go:130] ! grep: /etc/kubernetes/controller-manager.conf: No such file or directory
	I0916 11:09:52.907891 1488539 kubeadm.go:163] "https://control-plane.minikube.internal:8443" may not be in /etc/kubernetes/controller-manager.conf - will remove: sudo grep https://control-plane.minikube.internal:8443 /etc/kubernetes/controller-manager.conf: Process exited with status 2
	stdout:
	
	stderr:
	grep: /etc/kubernetes/controller-manager.conf: No such file or directory
	I0916 11:09:52.907967 1488539 ssh_runner.go:195] Run: sudo rm -f /etc/kubernetes/controller-manager.conf
	I0916 11:09:52.916769 1488539 ssh_runner.go:195] Run: sudo grep https://control-plane.minikube.internal:8443 /etc/kubernetes/scheduler.conf
	I0916 11:09:52.925409 1488539 command_runner.go:130] ! grep: /etc/kubernetes/scheduler.conf: No such file or directory
	I0916 11:09:52.925451 1488539 kubeadm.go:163] "https://control-plane.minikube.internal:8443" may not be in /etc/kubernetes/scheduler.conf - will remove: sudo grep https://control-plane.minikube.internal:8443 /etc/kubernetes/scheduler.conf: Process exited with status 2
	stdout:
	
	stderr:
	grep: /etc/kubernetes/scheduler.conf: No such file or directory
	I0916 11:09:52.925513 1488539 ssh_runner.go:195] Run: sudo rm -f /etc/kubernetes/scheduler.conf
	I0916 11:09:52.934105 1488539 ssh_runner.go:286] Start: /bin/bash -c "sudo env PATH="/var/lib/minikube/binaries/v1.31.1:$PATH" kubeadm init --config /var/tmp/minikube/kubeadm.yaml  --ignore-preflight-errors=DirAvailable--etc-kubernetes-manifests,DirAvailable--var-lib-minikube,DirAvailable--var-lib-minikube-etcd,FileAvailable--etc-kubernetes-manifests-kube-scheduler.yaml,FileAvailable--etc-kubernetes-manifests-kube-apiserver.yaml,FileAvailable--etc-kubernetes-manifests-kube-controller-manager.yaml,FileAvailable--etc-kubernetes-manifests-etcd.yaml,Port-10250,Swap,NumCPU,Mem,SystemVerification,FileContent--proc-sys-net-bridge-bridge-nf-call-iptables"
	I0916 11:09:52.980348 1488539 kubeadm.go:310] W0916 11:09:52.979690    1216 common.go:101] your configuration file uses a deprecated API spec: "kubeadm.k8s.io/v1beta3" (kind: "ClusterConfiguration"). Please use 'kubeadm config migrate --old-config old.yaml --new-config new.yaml', which will write the new, similar spec using a newer API version.
	I0916 11:09:52.980422 1488539 command_runner.go:130] ! W0916 11:09:52.979690    1216 common.go:101] your configuration file uses a deprecated API spec: "kubeadm.k8s.io/v1beta3" (kind: "ClusterConfiguration"). Please use 'kubeadm config migrate --old-config old.yaml --new-config new.yaml', which will write the new, similar spec using a newer API version.
	I0916 11:09:52.981213 1488539 kubeadm.go:310] W0916 11:09:52.980671    1216 common.go:101] your configuration file uses a deprecated API spec: "kubeadm.k8s.io/v1beta3" (kind: "InitConfiguration"). Please use 'kubeadm config migrate --old-config old.yaml --new-config new.yaml', which will write the new, similar spec using a newer API version.
	I0916 11:09:52.981271 1488539 command_runner.go:130] ! W0916 11:09:52.980671    1216 common.go:101] your configuration file uses a deprecated API spec: "kubeadm.k8s.io/v1beta3" (kind: "InitConfiguration"). Please use 'kubeadm config migrate --old-config old.yaml --new-config new.yaml', which will write the new, similar spec using a newer API version.
	I0916 11:09:53.001534 1488539 kubeadm.go:310] 	[WARNING SystemVerification]: failed to parse kernel config: unable to load kernel module: "configs", output: "modprobe: FATAL: Module configs not found in directory /lib/modules/5.15.0-1069-aws\n", err: exit status 1
	I0916 11:09:53.001617 1488539 command_runner.go:130] ! 	[WARNING SystemVerification]: failed to parse kernel config: unable to load kernel module: "configs", output: "modprobe: FATAL: Module configs not found in directory /lib/modules/5.15.0-1069-aws\n", err: exit status 1
	I0916 11:09:53.065728 1488539 kubeadm.go:310] 	[WARNING Service-Kubelet]: kubelet service is not enabled, please run 'systemctl enable kubelet.service'
	I0916 11:09:53.065736 1488539 command_runner.go:130] ! 	[WARNING Service-Kubelet]: kubelet service is not enabled, please run 'systemctl enable kubelet.service'
	I0916 11:10:10.854424 1488539 kubeadm.go:310] [init] Using Kubernetes version: v1.31.1
	I0916 11:10:10.854451 1488539 command_runner.go:130] > [init] Using Kubernetes version: v1.31.1
	I0916 11:10:10.854499 1488539 kubeadm.go:310] [preflight] Running pre-flight checks
	I0916 11:10:10.854505 1488539 command_runner.go:130] > [preflight] Running pre-flight checks
	I0916 11:10:10.854616 1488539 kubeadm.go:310] [preflight] The system verification failed. Printing the output from the verification:
	I0916 11:10:10.854635 1488539 command_runner.go:130] > [preflight] The system verification failed. Printing the output from the verification:
	I0916 11:10:10.854694 1488539 kubeadm.go:310] KERNEL_VERSION: 5.15.0-1069-aws
	I0916 11:10:10.854707 1488539 command_runner.go:130] > KERNEL_VERSION: 5.15.0-1069-aws
	I0916 11:10:10.854759 1488539 kubeadm.go:310] OS: Linux
	I0916 11:10:10.854777 1488539 command_runner.go:130] > OS: Linux
	I0916 11:10:10.854835 1488539 kubeadm.go:310] CGROUPS_CPU: enabled
	I0916 11:10:10.854844 1488539 command_runner.go:130] > CGROUPS_CPU: enabled
	I0916 11:10:10.854891 1488539 kubeadm.go:310] CGROUPS_CPUACCT: enabled
	I0916 11:10:10.854899 1488539 command_runner.go:130] > CGROUPS_CPUACCT: enabled
	I0916 11:10:10.854945 1488539 kubeadm.go:310] CGROUPS_CPUSET: enabled
	I0916 11:10:10.854952 1488539 command_runner.go:130] > CGROUPS_CPUSET: enabled
	I0916 11:10:10.855000 1488539 kubeadm.go:310] CGROUPS_DEVICES: enabled
	I0916 11:10:10.855007 1488539 command_runner.go:130] > CGROUPS_DEVICES: enabled
	I0916 11:10:10.855054 1488539 kubeadm.go:310] CGROUPS_FREEZER: enabled
	I0916 11:10:10.855061 1488539 command_runner.go:130] > CGROUPS_FREEZER: enabled
	I0916 11:10:10.855122 1488539 kubeadm.go:310] CGROUPS_MEMORY: enabled
	I0916 11:10:10.855145 1488539 command_runner.go:130] > CGROUPS_MEMORY: enabled
	I0916 11:10:10.855190 1488539 kubeadm.go:310] CGROUPS_PIDS: enabled
	I0916 11:10:10.855199 1488539 command_runner.go:130] > CGROUPS_PIDS: enabled
	I0916 11:10:10.855246 1488539 kubeadm.go:310] CGROUPS_HUGETLB: enabled
	I0916 11:10:10.855254 1488539 command_runner.go:130] > CGROUPS_HUGETLB: enabled
	I0916 11:10:10.855319 1488539 kubeadm.go:310] CGROUPS_BLKIO: enabled
	I0916 11:10:10.855325 1488539 command_runner.go:130] > CGROUPS_BLKIO: enabled
	I0916 11:10:10.855401 1488539 kubeadm.go:310] [preflight] Pulling images required for setting up a Kubernetes cluster
	I0916 11:10:10.855406 1488539 command_runner.go:130] > [preflight] Pulling images required for setting up a Kubernetes cluster
	I0916 11:10:10.855522 1488539 kubeadm.go:310] [preflight] This might take a minute or two, depending on the speed of your internet connection
	I0916 11:10:10.855536 1488539 command_runner.go:130] > [preflight] This might take a minute or two, depending on the speed of your internet connection
	I0916 11:10:10.855635 1488539 kubeadm.go:310] [preflight] You can also perform this action beforehand using 'kubeadm config images pull'
	I0916 11:10:10.855646 1488539 command_runner.go:130] > [preflight] You can also perform this action beforehand using 'kubeadm config images pull'
	I0916 11:10:10.855711 1488539 kubeadm.go:310] [certs] Using certificateDir folder "/var/lib/minikube/certs"
	I0916 11:10:10.855754 1488539 command_runner.go:130] > [certs] Using certificateDir folder "/var/lib/minikube/certs"
	I0916 11:10:10.858473 1488539 out.go:235]   - Generating certificates and keys ...
	I0916 11:10:10.858574 1488539 command_runner.go:130] > [certs] Using existing ca certificate authority
	I0916 11:10:10.858585 1488539 kubeadm.go:310] [certs] Using existing ca certificate authority
	I0916 11:10:10.858656 1488539 command_runner.go:130] > [certs] Using existing apiserver certificate and key on disk
	I0916 11:10:10.858665 1488539 kubeadm.go:310] [certs] Using existing apiserver certificate and key on disk
	I0916 11:10:10.858730 1488539 command_runner.go:130] > [certs] Generating "apiserver-kubelet-client" certificate and key
	I0916 11:10:10.858738 1488539 kubeadm.go:310] [certs] Generating "apiserver-kubelet-client" certificate and key
	I0916 11:10:10.858794 1488539 command_runner.go:130] > [certs] Generating "front-proxy-ca" certificate and key
	I0916 11:10:10.858802 1488539 kubeadm.go:310] [certs] Generating "front-proxy-ca" certificate and key
	I0916 11:10:10.858862 1488539 command_runner.go:130] > [certs] Generating "front-proxy-client" certificate and key
	I0916 11:10:10.858869 1488539 kubeadm.go:310] [certs] Generating "front-proxy-client" certificate and key
	I0916 11:10:10.858919 1488539 command_runner.go:130] > [certs] Generating "etcd/ca" certificate and key
	I0916 11:10:10.858927 1488539 kubeadm.go:310] [certs] Generating "etcd/ca" certificate and key
	I0916 11:10:10.858979 1488539 command_runner.go:130] > [certs] Generating "etcd/server" certificate and key
	I0916 11:10:10.858986 1488539 kubeadm.go:310] [certs] Generating "etcd/server" certificate and key
	I0916 11:10:10.859110 1488539 command_runner.go:130] > [certs] etcd/server serving cert is signed for DNS names [localhost multinode-654612] and IPs [192.168.67.2 127.0.0.1 ::1]
	I0916 11:10:10.859122 1488539 kubeadm.go:310] [certs] etcd/server serving cert is signed for DNS names [localhost multinode-654612] and IPs [192.168.67.2 127.0.0.1 ::1]
	I0916 11:10:10.859173 1488539 command_runner.go:130] > [certs] Generating "etcd/peer" certificate and key
	I0916 11:10:10.859180 1488539 kubeadm.go:310] [certs] Generating "etcd/peer" certificate and key
	I0916 11:10:10.859299 1488539 command_runner.go:130] > [certs] etcd/peer serving cert is signed for DNS names [localhost multinode-654612] and IPs [192.168.67.2 127.0.0.1 ::1]
	I0916 11:10:10.859306 1488539 kubeadm.go:310] [certs] etcd/peer serving cert is signed for DNS names [localhost multinode-654612] and IPs [192.168.67.2 127.0.0.1 ::1]
	I0916 11:10:10.859370 1488539 command_runner.go:130] > [certs] Generating "etcd/healthcheck-client" certificate and key
	I0916 11:10:10.859377 1488539 kubeadm.go:310] [certs] Generating "etcd/healthcheck-client" certificate and key
	I0916 11:10:10.859440 1488539 command_runner.go:130] > [certs] Generating "apiserver-etcd-client" certificate and key
	I0916 11:10:10.859448 1488539 kubeadm.go:310] [certs] Generating "apiserver-etcd-client" certificate and key
	I0916 11:10:10.859491 1488539 command_runner.go:130] > [certs] Generating "sa" key and public key
	I0916 11:10:10.859499 1488539 kubeadm.go:310] [certs] Generating "sa" key and public key
	I0916 11:10:10.859553 1488539 command_runner.go:130] > [kubeconfig] Using kubeconfig folder "/etc/kubernetes"
	I0916 11:10:10.859561 1488539 kubeadm.go:310] [kubeconfig] Using kubeconfig folder "/etc/kubernetes"
	I0916 11:10:10.859612 1488539 command_runner.go:130] > [kubeconfig] Writing "admin.conf" kubeconfig file
	I0916 11:10:10.859623 1488539 kubeadm.go:310] [kubeconfig] Writing "admin.conf" kubeconfig file
	I0916 11:10:10.859680 1488539 command_runner.go:130] > [kubeconfig] Writing "super-admin.conf" kubeconfig file
	I0916 11:10:10.859687 1488539 kubeadm.go:310] [kubeconfig] Writing "super-admin.conf" kubeconfig file
	I0916 11:10:10.859740 1488539 command_runner.go:130] > [kubeconfig] Writing "kubelet.conf" kubeconfig file
	I0916 11:10:10.859749 1488539 kubeadm.go:310] [kubeconfig] Writing "kubelet.conf" kubeconfig file
	I0916 11:10:10.859810 1488539 command_runner.go:130] > [kubeconfig] Writing "controller-manager.conf" kubeconfig file
	I0916 11:10:10.859820 1488539 kubeadm.go:310] [kubeconfig] Writing "controller-manager.conf" kubeconfig file
	I0916 11:10:10.859886 1488539 command_runner.go:130] > [kubeconfig] Writing "scheduler.conf" kubeconfig file
	I0916 11:10:10.859911 1488539 kubeadm.go:310] [kubeconfig] Writing "scheduler.conf" kubeconfig file
	I0916 11:10:10.859991 1488539 command_runner.go:130] > [etcd] Creating static Pod manifest for local etcd in "/etc/kubernetes/manifests"
	I0916 11:10:10.859998 1488539 kubeadm.go:310] [etcd] Creating static Pod manifest for local etcd in "/etc/kubernetes/manifests"
	I0916 11:10:10.860063 1488539 command_runner.go:130] > [control-plane] Using manifest folder "/etc/kubernetes/manifests"
	I0916 11:10:10.860070 1488539 kubeadm.go:310] [control-plane] Using manifest folder "/etc/kubernetes/manifests"
	I0916 11:10:10.862753 1488539 out.go:235]   - Booting up control plane ...
	I0916 11:10:10.862923 1488539 command_runner.go:130] > [control-plane] Creating static Pod manifest for "kube-apiserver"
	I0916 11:10:10.862935 1488539 kubeadm.go:310] [control-plane] Creating static Pod manifest for "kube-apiserver"
	I0916 11:10:10.863063 1488539 command_runner.go:130] > [control-plane] Creating static Pod manifest for "kube-controller-manager"
	I0916 11:10:10.863073 1488539 kubeadm.go:310] [control-plane] Creating static Pod manifest for "kube-controller-manager"
	I0916 11:10:10.863141 1488539 command_runner.go:130] > [control-plane] Creating static Pod manifest for "kube-scheduler"
	I0916 11:10:10.863150 1488539 kubeadm.go:310] [control-plane] Creating static Pod manifest for "kube-scheduler"
	I0916 11:10:10.863257 1488539 command_runner.go:130] > [kubelet-start] Writing kubelet environment file with flags to file "/var/lib/kubelet/kubeadm-flags.env"
	I0916 11:10:10.863267 1488539 kubeadm.go:310] [kubelet-start] Writing kubelet environment file with flags to file "/var/lib/kubelet/kubeadm-flags.env"
	I0916 11:10:10.863403 1488539 command_runner.go:130] > [kubelet-start] Writing kubelet configuration to file "/var/lib/kubelet/config.yaml"
	I0916 11:10:10.863413 1488539 kubeadm.go:310] [kubelet-start] Writing kubelet configuration to file "/var/lib/kubelet/config.yaml"
	I0916 11:10:10.863452 1488539 command_runner.go:130] > [kubelet-start] Starting the kubelet
	I0916 11:10:10.863459 1488539 kubeadm.go:310] [kubelet-start] Starting the kubelet
	I0916 11:10:10.863625 1488539 command_runner.go:130] > [wait-control-plane] Waiting for the kubelet to boot up the control plane as static Pods from directory "/etc/kubernetes/manifests"
	I0916 11:10:10.863635 1488539 kubeadm.go:310] [wait-control-plane] Waiting for the kubelet to boot up the control plane as static Pods from directory "/etc/kubernetes/manifests"
	I0916 11:10:10.863736 1488539 command_runner.go:130] > [kubelet-check] Waiting for a healthy kubelet at http://127.0.0.1:10248/healthz. This can take up to 4m0s
	I0916 11:10:10.863746 1488539 kubeadm.go:310] [kubelet-check] Waiting for a healthy kubelet at http://127.0.0.1:10248/healthz. This can take up to 4m0s
	I0916 11:10:10.863808 1488539 command_runner.go:130] > [kubelet-check] The kubelet is healthy after 2.001682005s
	I0916 11:10:10.863818 1488539 kubeadm.go:310] [kubelet-check] The kubelet is healthy after 2.001682005s
	I0916 11:10:10.863896 1488539 command_runner.go:130] > [api-check] Waiting for a healthy API server. This can take up to 4m0s
	I0916 11:10:10.863907 1488539 kubeadm.go:310] [api-check] Waiting for a healthy API server. This can take up to 4m0s
	I0916 11:10:10.863970 1488539 command_runner.go:130] > [api-check] The API server is healthy after 6.001409713s
	I0916 11:10:10.863978 1488539 kubeadm.go:310] [api-check] The API server is healthy after 6.001409713s
	I0916 11:10:10.864108 1488539 command_runner.go:130] > [upload-config] Storing the configuration used in ConfigMap "kubeadm-config" in the "kube-system" Namespace
	I0916 11:10:10.864132 1488539 kubeadm.go:310] [upload-config] Storing the configuration used in ConfigMap "kubeadm-config" in the "kube-system" Namespace
	I0916 11:10:10.864257 1488539 command_runner.go:130] > [kubelet] Creating a ConfigMap "kubelet-config" in namespace kube-system with the configuration for the kubelets in the cluster
	I0916 11:10:10.864262 1488539 kubeadm.go:310] [kubelet] Creating a ConfigMap "kubelet-config" in namespace kube-system with the configuration for the kubelets in the cluster
	I0916 11:10:10.864319 1488539 command_runner.go:130] > [upload-certs] Skipping phase. Please see --upload-certs
	I0916 11:10:10.864323 1488539 kubeadm.go:310] [upload-certs] Skipping phase. Please see --upload-certs
	I0916 11:10:10.864510 1488539 command_runner.go:130] > [mark-control-plane] Marking the node multinode-654612 as control-plane by adding the labels: [node-role.kubernetes.io/control-plane node.kubernetes.io/exclude-from-external-load-balancers]
	I0916 11:10:10.864516 1488539 kubeadm.go:310] [mark-control-plane] Marking the node multinode-654612 as control-plane by adding the labels: [node-role.kubernetes.io/control-plane node.kubernetes.io/exclude-from-external-load-balancers]
	I0916 11:10:10.864571 1488539 command_runner.go:130] > [bootstrap-token] Using token: b40s63.zibnp7p33t2buer0
	I0916 11:10:10.864576 1488539 kubeadm.go:310] [bootstrap-token] Using token: b40s63.zibnp7p33t2buer0
	I0916 11:10:10.868852 1488539 out.go:235]   - Configuring RBAC rules ...
	I0916 11:10:10.868981 1488539 command_runner.go:130] > [bootstrap-token] Configuring bootstrap tokens, cluster-info ConfigMap, RBAC Roles
	I0916 11:10:10.868995 1488539 kubeadm.go:310] [bootstrap-token] Configuring bootstrap tokens, cluster-info ConfigMap, RBAC Roles
	I0916 11:10:10.869116 1488539 kubeadm.go:310] [bootstrap-token] Configured RBAC rules to allow Node Bootstrap tokens to get nodes
	I0916 11:10:10.869121 1488539 command_runner.go:130] > [bootstrap-token] Configured RBAC rules to allow Node Bootstrap tokens to get nodes
	I0916 11:10:10.869321 1488539 kubeadm.go:310] [bootstrap-token] Configured RBAC rules to allow Node Bootstrap tokens to post CSRs in order for nodes to get long term certificate credentials
	I0916 11:10:10.869332 1488539 command_runner.go:130] > [bootstrap-token] Configured RBAC rules to allow Node Bootstrap tokens to post CSRs in order for nodes to get long term certificate credentials
	I0916 11:10:10.869481 1488539 kubeadm.go:310] [bootstrap-token] Configured RBAC rules to allow the csrapprover controller automatically approve CSRs from a Node Bootstrap Token
	I0916 11:10:10.869512 1488539 command_runner.go:130] > [bootstrap-token] Configured RBAC rules to allow the csrapprover controller automatically approve CSRs from a Node Bootstrap Token
	I0916 11:10:10.869655 1488539 kubeadm.go:310] [bootstrap-token] Configured RBAC rules to allow certificate rotation for all node client certificates in the cluster
	I0916 11:10:10.869675 1488539 command_runner.go:130] > [bootstrap-token] Configured RBAC rules to allow certificate rotation for all node client certificates in the cluster
	I0916 11:10:10.869762 1488539 kubeadm.go:310] [bootstrap-token] Creating the "cluster-info" ConfigMap in the "kube-public" namespace
	I0916 11:10:10.869772 1488539 command_runner.go:130] > [bootstrap-token] Creating the "cluster-info" ConfigMap in the "kube-public" namespace
	I0916 11:10:10.869889 1488539 kubeadm.go:310] [kubelet-finalize] Updating "/etc/kubernetes/kubelet.conf" to point to a rotatable kubelet client certificate and key
	I0916 11:10:10.869899 1488539 command_runner.go:130] > [kubelet-finalize] Updating "/etc/kubernetes/kubelet.conf" to point to a rotatable kubelet client certificate and key
	I0916 11:10:10.869943 1488539 kubeadm.go:310] [addons] Applied essential addon: CoreDNS
	I0916 11:10:10.869950 1488539 command_runner.go:130] > [addons] Applied essential addon: CoreDNS
	I0916 11:10:10.869995 1488539 kubeadm.go:310] [addons] Applied essential addon: kube-proxy
	I0916 11:10:10.870002 1488539 command_runner.go:130] > [addons] Applied essential addon: kube-proxy
	I0916 11:10:10.870006 1488539 kubeadm.go:310] 
	I0916 11:10:10.870066 1488539 kubeadm.go:310] Your Kubernetes control-plane has initialized successfully!
	I0916 11:10:10.870072 1488539 command_runner.go:130] > Your Kubernetes control-plane has initialized successfully!
	I0916 11:10:10.870077 1488539 kubeadm.go:310] 
	I0916 11:10:10.870152 1488539 kubeadm.go:310] To start using your cluster, you need to run the following as a regular user:
	I0916 11:10:10.870159 1488539 command_runner.go:130] > To start using your cluster, you need to run the following as a regular user:
	I0916 11:10:10.870164 1488539 kubeadm.go:310] 
	I0916 11:10:10.870189 1488539 kubeadm.go:310]   mkdir -p $HOME/.kube
	I0916 11:10:10.870196 1488539 command_runner.go:130] >   mkdir -p $HOME/.kube
	I0916 11:10:10.870253 1488539 kubeadm.go:310]   sudo cp -i /etc/kubernetes/admin.conf $HOME/.kube/config
	I0916 11:10:10.870260 1488539 command_runner.go:130] >   sudo cp -i /etc/kubernetes/admin.conf $HOME/.kube/config
	I0916 11:10:10.870310 1488539 kubeadm.go:310]   sudo chown $(id -u):$(id -g) $HOME/.kube/config
	I0916 11:10:10.870317 1488539 command_runner.go:130] >   sudo chown $(id -u):$(id -g) $HOME/.kube/config
	I0916 11:10:10.870321 1488539 kubeadm.go:310] 
	I0916 11:10:10.870375 1488539 kubeadm.go:310] Alternatively, if you are the root user, you can run:
	I0916 11:10:10.870382 1488539 command_runner.go:130] > Alternatively, if you are the root user, you can run:
	I0916 11:10:10.870387 1488539 kubeadm.go:310] 
	I0916 11:10:10.870434 1488539 kubeadm.go:310]   export KUBECONFIG=/etc/kubernetes/admin.conf
	I0916 11:10:10.870440 1488539 command_runner.go:130] >   export KUBECONFIG=/etc/kubernetes/admin.conf
	I0916 11:10:10.870445 1488539 kubeadm.go:310] 
	I0916 11:10:10.870496 1488539 kubeadm.go:310] You should now deploy a pod network to the cluster.
	I0916 11:10:10.870503 1488539 command_runner.go:130] > You should now deploy a pod network to the cluster.
	I0916 11:10:10.870576 1488539 kubeadm.go:310] Run "kubectl apply -f [podnetwork].yaml" with one of the options listed at:
	I0916 11:10:10.870583 1488539 command_runner.go:130] > Run "kubectl apply -f [podnetwork].yaml" with one of the options listed at:
	I0916 11:10:10.870654 1488539 kubeadm.go:310]   https://kubernetes.io/docs/concepts/cluster-administration/addons/
	I0916 11:10:10.870661 1488539 command_runner.go:130] >   https://kubernetes.io/docs/concepts/cluster-administration/addons/
	I0916 11:10:10.870664 1488539 kubeadm.go:310] 
	I0916 11:10:10.870747 1488539 kubeadm.go:310] You can now join any number of control-plane nodes by copying certificate authorities
	I0916 11:10:10.870756 1488539 command_runner.go:130] > You can now join any number of control-plane nodes by copying certificate authorities
	I0916 11:10:10.870831 1488539 kubeadm.go:310] and service account keys on each node and then running the following as root:
	I0916 11:10:10.870838 1488539 command_runner.go:130] > and service account keys on each node and then running the following as root:
	I0916 11:10:10.870842 1488539 kubeadm.go:310] 
	I0916 11:10:10.870924 1488539 kubeadm.go:310]   kubeadm join control-plane.minikube.internal:8443 --token b40s63.zibnp7p33t2buer0 \
	I0916 11:10:10.870931 1488539 command_runner.go:130] >   kubeadm join control-plane.minikube.internal:8443 --token b40s63.zibnp7p33t2buer0 \
	I0916 11:10:10.871032 1488539 kubeadm.go:310] 	--discovery-token-ca-cert-hash sha256:a39d4a6e06a2efc97f5d9564a89b81063790e757dde370e866d9dc4c2ed0ec07 \
	I0916 11:10:10.871038 1488539 command_runner.go:130] > 	--discovery-token-ca-cert-hash sha256:a39d4a6e06a2efc97f5d9564a89b81063790e757dde370e866d9dc4c2ed0ec07 \
	I0916 11:10:10.871058 1488539 kubeadm.go:310] 	--control-plane 
	I0916 11:10:10.871067 1488539 command_runner.go:130] > 	--control-plane 
	I0916 11:10:10.871071 1488539 kubeadm.go:310] 
	I0916 11:10:10.871155 1488539 kubeadm.go:310] Then you can join any number of worker nodes by running the following on each as root:
	I0916 11:10:10.871162 1488539 command_runner.go:130] > Then you can join any number of worker nodes by running the following on each as root:
	I0916 11:10:10.871166 1488539 kubeadm.go:310] 
	I0916 11:10:10.871247 1488539 kubeadm.go:310] kubeadm join control-plane.minikube.internal:8443 --token b40s63.zibnp7p33t2buer0 \
	I0916 11:10:10.871253 1488539 command_runner.go:130] > kubeadm join control-plane.minikube.internal:8443 --token b40s63.zibnp7p33t2buer0 \
	I0916 11:10:10.871354 1488539 kubeadm.go:310] 	--discovery-token-ca-cert-hash sha256:a39d4a6e06a2efc97f5d9564a89b81063790e757dde370e866d9dc4c2ed0ec07 
	I0916 11:10:10.871360 1488539 command_runner.go:130] > 	--discovery-token-ca-cert-hash sha256:a39d4a6e06a2efc97f5d9564a89b81063790e757dde370e866d9dc4c2ed0ec07 
	I0916 11:10:10.871382 1488539 cni.go:84] Creating CNI manager for ""
	I0916 11:10:10.871390 1488539 cni.go:136] multinode detected (1 nodes found), recommending kindnet
	I0916 11:10:10.875799 1488539 out.go:177] * Configuring CNI (Container Networking Interface) ...
	I0916 11:10:10.878343 1488539 ssh_runner.go:195] Run: stat /opt/cni/bin/portmap
	I0916 11:10:10.882115 1488539 command_runner.go:130] >   File: /opt/cni/bin/portmap
	I0916 11:10:10.882163 1488539 command_runner.go:130] >   Size: 4030506   	Blocks: 7880       IO Block: 4096   regular file
	I0916 11:10:10.882174 1488539 command_runner.go:130] > Device: 36h/54d	Inode: 1574378     Links: 1
	I0916 11:10:10.882181 1488539 command_runner.go:130] > Access: (0755/-rwxr-xr-x)  Uid: (    0/    root)   Gid: (    0/    root)
	I0916 11:10:10.882187 1488539 command_runner.go:130] > Access: 2023-12-04 16:39:54.000000000 +0000
	I0916 11:10:10.882192 1488539 command_runner.go:130] > Modify: 2023-12-04 16:39:54.000000000 +0000
	I0916 11:10:10.882197 1488539 command_runner.go:130] > Change: 2024-09-16 10:35:03.752507709 +0000
	I0916 11:10:10.882201 1488539 command_runner.go:130] >  Birth: 2024-09-16 10:35:03.704509024 +0000
	I0916 11:10:10.882316 1488539 cni.go:182] applying CNI manifest using /var/lib/minikube/binaries/v1.31.1/kubectl ...
	I0916 11:10:10.882324 1488539 ssh_runner.go:362] scp memory --> /var/tmp/minikube/cni.yaml (2601 bytes)
	I0916 11:10:10.902078 1488539 ssh_runner.go:195] Run: sudo /var/lib/minikube/binaries/v1.31.1/kubectl apply --kubeconfig=/var/lib/minikube/kubeconfig -f /var/tmp/minikube/cni.yaml
	I0916 11:10:11.153612 1488539 command_runner.go:130] > clusterrole.rbac.authorization.k8s.io/kindnet created
	I0916 11:10:11.164125 1488539 command_runner.go:130] > clusterrolebinding.rbac.authorization.k8s.io/kindnet created
	I0916 11:10:11.173732 1488539 command_runner.go:130] > serviceaccount/kindnet created
	I0916 11:10:11.188063 1488539 command_runner.go:130] > daemonset.apps/kindnet created
	I0916 11:10:11.191784 1488539 ssh_runner.go:195] Run: /bin/bash -c "cat /proc/$(pgrep kube-apiserver)/oom_adj"
	I0916 11:10:11.191917 1488539 ssh_runner.go:195] Run: sudo /var/lib/minikube/binaries/v1.31.1/kubectl create clusterrolebinding minikube-rbac --clusterrole=cluster-admin --serviceaccount=kube-system:default --kubeconfig=/var/lib/minikube/kubeconfig
	I0916 11:10:11.192009 1488539 ssh_runner.go:195] Run: sudo /var/lib/minikube/binaries/v1.31.1/kubectl --kubeconfig=/var/lib/minikube/kubeconfig label --overwrite nodes multinode-654612 minikube.k8s.io/updated_at=2024_09_16T11_10_11_0700 minikube.k8s.io/version=v1.34.0 minikube.k8s.io/commit=90d544f06ea0f69499271b003be64a9a224d57ed minikube.k8s.io/name=multinode-654612 minikube.k8s.io/primary=true
	I0916 11:10:11.205670 1488539 command_runner.go:130] > -16
	I0916 11:10:11.205950 1488539 ops.go:34] apiserver oom_adj: -16
	I0916 11:10:11.325674 1488539 command_runner.go:130] > clusterrolebinding.rbac.authorization.k8s.io/minikube-rbac created
	I0916 11:10:11.330218 1488539 ssh_runner.go:195] Run: sudo /var/lib/minikube/binaries/v1.31.1/kubectl get sa default --kubeconfig=/var/lib/minikube/kubeconfig
	I0916 11:10:11.335730 1488539 command_runner.go:130] > node/multinode-654612 labeled
	I0916 11:10:11.453794 1488539 command_runner.go:130] ! Error from server (NotFound): serviceaccounts "default" not found
	I0916 11:10:11.830372 1488539 ssh_runner.go:195] Run: sudo /var/lib/minikube/binaries/v1.31.1/kubectl get sa default --kubeconfig=/var/lib/minikube/kubeconfig
	I0916 11:10:11.920614 1488539 command_runner.go:130] ! Error from server (NotFound): serviceaccounts "default" not found
	I0916 11:10:12.331316 1488539 ssh_runner.go:195] Run: sudo /var/lib/minikube/binaries/v1.31.1/kubectl get sa default --kubeconfig=/var/lib/minikube/kubeconfig
	I0916 11:10:12.416556 1488539 command_runner.go:130] ! Error from server (NotFound): serviceaccounts "default" not found
	I0916 11:10:12.831139 1488539 ssh_runner.go:195] Run: sudo /var/lib/minikube/binaries/v1.31.1/kubectl get sa default --kubeconfig=/var/lib/minikube/kubeconfig
	I0916 11:10:12.914763 1488539 command_runner.go:130] ! Error from server (NotFound): serviceaccounts "default" not found
	I0916 11:10:13.330338 1488539 ssh_runner.go:195] Run: sudo /var/lib/minikube/binaries/v1.31.1/kubectl get sa default --kubeconfig=/var/lib/minikube/kubeconfig
	I0916 11:10:13.426637 1488539 command_runner.go:130] ! Error from server (NotFound): serviceaccounts "default" not found
	I0916 11:10:13.830893 1488539 ssh_runner.go:195] Run: sudo /var/lib/minikube/binaries/v1.31.1/kubectl get sa default --kubeconfig=/var/lib/minikube/kubeconfig
	I0916 11:10:13.917716 1488539 command_runner.go:130] ! Error from server (NotFound): serviceaccounts "default" not found
	I0916 11:10:14.330987 1488539 ssh_runner.go:195] Run: sudo /var/lib/minikube/binaries/v1.31.1/kubectl get sa default --kubeconfig=/var/lib/minikube/kubeconfig
	I0916 11:10:14.461637 1488539 command_runner.go:130] ! Error from server (NotFound): serviceaccounts "default" not found
	I0916 11:10:14.830246 1488539 ssh_runner.go:195] Run: sudo /var/lib/minikube/binaries/v1.31.1/kubectl get sa default --kubeconfig=/var/lib/minikube/kubeconfig
	I0916 11:10:14.929204 1488539 command_runner.go:130] > NAME      SECRETS   AGE
	I0916 11:10:14.929233 1488539 command_runner.go:130] > default   0         0s
	I0916 11:10:14.932893 1488539 kubeadm.go:1113] duration metric: took 3.741020617s to wait for elevateKubeSystemPrivileges
	I0916 11:10:14.932920 1488539 kubeadm.go:394] duration metric: took 22.138566358s to StartCluster
	I0916 11:10:14.932938 1488539 settings.go:142] acquiring lock: {Name:mkc0474d366ad36774e47290c7932cc180a1b9f8 Clock:{} Delay:500ms Timeout:1m0s Cancel:<nil>}
	I0916 11:10:14.933003 1488539 settings.go:150] Updating kubeconfig:  /home/jenkins/minikube-integration/19651-1378450/kubeconfig
	I0916 11:10:14.933675 1488539 lock.go:35] WriteFile acquiring /home/jenkins/minikube-integration/19651-1378450/kubeconfig: {Name:mk806df66aa01ad28d0c99bc1a876b4310e8a3a0 Clock:{} Delay:500ms Timeout:1m0s Cancel:<nil>}
	I0916 11:10:14.933861 1488539 start.go:235] Will wait 6m0s for node &{Name: IP:192.168.67.2 Port:8443 KubernetesVersion:v1.31.1 ContainerRuntime:crio ControlPlane:true Worker:true}
	I0916 11:10:14.934025 1488539 ssh_runner.go:195] Run: /bin/bash -c "sudo /var/lib/minikube/binaries/v1.31.1/kubectl --kubeconfig=/var/lib/minikube/kubeconfig -n kube-system get configmap coredns -o yaml"
	I0916 11:10:14.934265 1488539 config.go:182] Loaded profile config "multinode-654612": Driver=docker, ContainerRuntime=crio, KubernetesVersion=v1.31.1
	I0916 11:10:14.934298 1488539 addons.go:507] enable addons start: toEnable=map[ambassador:false auto-pause:false cloud-spanner:false csi-hostpath-driver:false dashboard:false default-storageclass:true efk:false freshpod:false gcp-auth:false gvisor:false headlamp:false helm-tiller:false inaccel:false ingress:false ingress-dns:false inspektor-gadget:false istio:false istio-provisioner:false kong:false kubeflow:false kubevirt:false logviewer:false metallb:false metrics-server:false nvidia-device-plugin:false nvidia-driver-installer:false nvidia-gpu-device-plugin:false olm:false pod-security-policy:false portainer:false registry:false registry-aliases:false registry-creds:false storage-provisioner:true storage-provisioner-gluster:false storage-provisioner-rancher:false volcano:false volumesnapshots:false yakd:false]
	I0916 11:10:14.934358 1488539 addons.go:69] Setting storage-provisioner=true in profile "multinode-654612"
	I0916 11:10:14.934373 1488539 addons.go:234] Setting addon storage-provisioner=true in "multinode-654612"
	I0916 11:10:14.934396 1488539 host.go:66] Checking if "multinode-654612" exists ...
	I0916 11:10:14.934899 1488539 cli_runner.go:164] Run: docker container inspect multinode-654612 --format={{.State.Status}}
	I0916 11:10:14.935368 1488539 addons.go:69] Setting default-storageclass=true in profile "multinode-654612"
	I0916 11:10:14.935397 1488539 addons_storage_classes.go:33] enableOrDisableStorageClasses default-storageclass=true on "multinode-654612"
	I0916 11:10:14.935714 1488539 cli_runner.go:164] Run: docker container inspect multinode-654612 --format={{.State.Status}}
	I0916 11:10:14.937810 1488539 out.go:177] * Verifying Kubernetes components...
	I0916 11:10:14.942180 1488539 ssh_runner.go:195] Run: sudo systemctl daemon-reload
	I0916 11:10:14.982007 1488539 loader.go:395] Config loaded from file:  /home/jenkins/minikube-integration/19651-1378450/kubeconfig
	I0916 11:10:14.982298 1488539 kapi.go:59] client config for multinode-654612: &rest.Config{Host:"https://192.168.67.2:8443", APIPath:"", ContentConfig:rest.ContentConfig{AcceptContentTypes:"", ContentType:"", GroupVersion:(*schema.GroupVersion)(nil), NegotiatedSerializer:runtime.NegotiatedSerializer(nil)}, Username:"", Password:"", BearerToken:"", BearerTokenFile:"", Impersonate:rest.ImpersonationConfig{UserName:"", UID:"", Groups:[]string(nil), Extra:map[string][]string(nil)}, AuthProvider:<nil>, AuthConfigPersister:rest.AuthProviderConfigPersister(nil), ExecProvider:<nil>, TLSClientConfig:rest.sanitizedTLSClientConfig{Insecure:false, ServerName:"", CertFile:"/home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/multinode-654612/client.crt", KeyFile:"/home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/multinode-654612/client.key", CAFile:"/home/jenkins/minikube-integration/19651-1378450/.minikube/ca.crt", CertData:[]uint8(nil), KeyData:[]uint8(nil), CAData:[]uint8(nil),
NextProtos:[]string(nil)}, UserAgent:"", DisableCompression:false, Transport:http.RoundTripper(nil), WrapTransport:(transport.WrapperFunc)(0x1a1e6c0), QPS:0, Burst:0, RateLimiter:flowcontrol.RateLimiter(nil), WarningHandler:rest.WarningHandler(nil), Timeout:0, Dial:(func(context.Context, string, string) (net.Conn, error))(nil), Proxy:(func(*http.Request) (*url.URL, error))(nil)}
	I0916 11:10:14.982887 1488539 cert_rotation.go:140] Starting client certificate rotation controller
	I0916 11:10:14.983143 1488539 addons.go:234] Setting addon default-storageclass=true in "multinode-654612"
	I0916 11:10:14.983177 1488539 host.go:66] Checking if "multinode-654612" exists ...
	I0916 11:10:14.983606 1488539 cli_runner.go:164] Run: docker container inspect multinode-654612 --format={{.State.Status}}
	I0916 11:10:14.991099 1488539 out.go:177]   - Using image gcr.io/k8s-minikube/storage-provisioner:v5
	I0916 11:10:14.994064 1488539 addons.go:431] installing /etc/kubernetes/addons/storage-provisioner.yaml
	I0916 11:10:14.994089 1488539 ssh_runner.go:362] scp memory --> /etc/kubernetes/addons/storage-provisioner.yaml (2676 bytes)
	I0916 11:10:14.994163 1488539 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" multinode-654612
	I0916 11:10:15.010146 1488539 addons.go:431] installing /etc/kubernetes/addons/storageclass.yaml
	I0916 11:10:15.010178 1488539 ssh_runner.go:362] scp storageclass/storageclass.yaml --> /etc/kubernetes/addons/storageclass.yaml (271 bytes)
	I0916 11:10:15.010259 1488539 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" multinode-654612
	I0916 11:10:15.039435 1488539 sshutil.go:53] new ssh client: &{IP:127.0.0.1 Port:34738 SSHKeyPath:/home/jenkins/minikube-integration/19651-1378450/.minikube/machines/multinode-654612/id_rsa Username:docker}
	I0916 11:10:15.056948 1488539 sshutil.go:53] new ssh client: &{IP:127.0.0.1 Port:34738 SSHKeyPath:/home/jenkins/minikube-integration/19651-1378450/.minikube/machines/multinode-654612/id_rsa Username:docker}
	I0916 11:10:15.216075 1488539 ssh_runner.go:195] Run: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.31.1/kubectl apply -f /etc/kubernetes/addons/storage-provisioner.yaml
	I0916 11:10:15.282816 1488539 ssh_runner.go:195] Run: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.31.1/kubectl apply -f /etc/kubernetes/addons/storageclass.yaml
	I0916 11:10:15.336866 1488539 command_runner.go:130] > apiVersion: v1
	I0916 11:10:15.336939 1488539 command_runner.go:130] > data:
	I0916 11:10:15.336974 1488539 command_runner.go:130] >   Corefile: |
	I0916 11:10:15.336996 1488539 command_runner.go:130] >     .:53 {
	I0916 11:10:15.337016 1488539 command_runner.go:130] >         errors
	I0916 11:10:15.337051 1488539 command_runner.go:130] >         health {
	I0916 11:10:15.337073 1488539 command_runner.go:130] >            lameduck 5s
	I0916 11:10:15.337089 1488539 command_runner.go:130] >         }
	I0916 11:10:15.337109 1488539 command_runner.go:130] >         ready
	I0916 11:10:15.337148 1488539 command_runner.go:130] >         kubernetes cluster.local in-addr.arpa ip6.arpa {
	I0916 11:10:15.337170 1488539 command_runner.go:130] >            pods insecure
	I0916 11:10:15.337189 1488539 command_runner.go:130] >            fallthrough in-addr.arpa ip6.arpa
	I0916 11:10:15.337222 1488539 command_runner.go:130] >            ttl 30
	I0916 11:10:15.337247 1488539 command_runner.go:130] >         }
	I0916 11:10:15.337268 1488539 command_runner.go:130] >         prometheus :9153
	I0916 11:10:15.337301 1488539 command_runner.go:130] >         forward . /etc/resolv.conf {
	I0916 11:10:15.337322 1488539 command_runner.go:130] >            max_concurrent 1000
	I0916 11:10:15.337338 1488539 command_runner.go:130] >         }
	I0916 11:10:15.337357 1488539 command_runner.go:130] >         cache 30
	I0916 11:10:15.337387 1488539 command_runner.go:130] >         loop
	I0916 11:10:15.337410 1488539 command_runner.go:130] >         reload
	I0916 11:10:15.337467 1488539 command_runner.go:130] >         loadbalance
	I0916 11:10:15.337492 1488539 command_runner.go:130] >     }
	I0916 11:10:15.337510 1488539 command_runner.go:130] > kind: ConfigMap
	I0916 11:10:15.337541 1488539 command_runner.go:130] > metadata:
	I0916 11:10:15.337568 1488539 command_runner.go:130] >   creationTimestamp: "2024-09-16T11:10:10Z"
	I0916 11:10:15.337588 1488539 command_runner.go:130] >   name: coredns
	I0916 11:10:15.337620 1488539 command_runner.go:130] >   namespace: kube-system
	I0916 11:10:15.337644 1488539 command_runner.go:130] >   resourceVersion: "229"
	I0916 11:10:15.337663 1488539 command_runner.go:130] >   uid: 0328fa4b-2cc5-463c-941b-8ca226ae16f8
	I0916 11:10:15.341864 1488539 ssh_runner.go:195] Run: /bin/bash -c "sudo /var/lib/minikube/binaries/v1.31.1/kubectl --kubeconfig=/var/lib/minikube/kubeconfig -n kube-system get configmap coredns -o yaml | sed -e '/^        forward . \/etc\/resolv.conf.*/i \        hosts {\n           192.168.67.1 host.minikube.internal\n           fallthrough\n        }' -e '/^        errors *$/i \        log' | sudo /var/lib/minikube/binaries/v1.31.1/kubectl --kubeconfig=/var/lib/minikube/kubeconfig replace -f -"
	I0916 11:10:15.341998 1488539 ssh_runner.go:195] Run: sudo systemctl start kubelet
	I0916 11:10:15.894297 1488539 command_runner.go:130] > serviceaccount/storage-provisioner created
	I0916 11:10:15.904577 1488539 command_runner.go:130] > clusterrolebinding.rbac.authorization.k8s.io/storage-provisioner created
	I0916 11:10:15.915984 1488539 command_runner.go:130] > role.rbac.authorization.k8s.io/system:persistent-volume-provisioner created
	I0916 11:10:15.925831 1488539 command_runner.go:130] > rolebinding.rbac.authorization.k8s.io/system:persistent-volume-provisioner created
	I0916 11:10:15.934179 1488539 command_runner.go:130] > endpoints/k8s.io-minikube-hostpath created
	I0916 11:10:15.945567 1488539 command_runner.go:130] > pod/storage-provisioner created
	I0916 11:10:15.950869 1488539 command_runner.go:130] > storageclass.storage.k8s.io/standard created
	I0916 11:10:15.950940 1488539 envvar.go:172] "Feature gate default state" feature="WatchListClient" enabled=false
	I0916 11:10:15.950955 1488539 envvar.go:172] "Feature gate default state" feature="InformerResourceVersion" enabled=false
	I0916 11:10:15.951046 1488539 round_trippers.go:463] GET https://192.168.67.2:8443/apis/storage.k8s.io/v1/storageclasses
	I0916 11:10:15.951052 1488539 round_trippers.go:469] Request Headers:
	I0916 11:10:15.951060 1488539 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:10:15.951063 1488539 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:10:15.951263 1488539 command_runner.go:130] > configmap/coredns replaced
	I0916 11:10:15.951283 1488539 start.go:971] {"host.minikube.internal": 192.168.67.1} host record injected into CoreDNS's ConfigMap
	I0916 11:10:15.951686 1488539 loader.go:395] Config loaded from file:  /home/jenkins/minikube-integration/19651-1378450/kubeconfig
	I0916 11:10:15.951943 1488539 kapi.go:59] client config for multinode-654612: &rest.Config{Host:"https://192.168.67.2:8443", APIPath:"", ContentConfig:rest.ContentConfig{AcceptContentTypes:"", ContentType:"", GroupVersion:(*schema.GroupVersion)(nil), NegotiatedSerializer:runtime.NegotiatedSerializer(nil)}, Username:"", Password:"", BearerToken:"", BearerTokenFile:"", Impersonate:rest.ImpersonationConfig{UserName:"", UID:"", Groups:[]string(nil), Extra:map[string][]string(nil)}, AuthProvider:<nil>, AuthConfigPersister:rest.AuthProviderConfigPersister(nil), ExecProvider:<nil>, TLSClientConfig:rest.sanitizedTLSClientConfig{Insecure:false, ServerName:"", CertFile:"/home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/multinode-654612/client.crt", KeyFile:"/home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/multinode-654612/client.key", CAFile:"/home/jenkins/minikube-integration/19651-1378450/.minikube/ca.crt", CertData:[]uint8(nil), KeyData:[]uint8(nil), CAData:[]uint8(nil),
NextProtos:[]string(nil)}, UserAgent:"", DisableCompression:false, Transport:http.RoundTripper(nil), WrapTransport:(transport.WrapperFunc)(0x1a1e6c0), QPS:0, Burst:0, RateLimiter:flowcontrol.RateLimiter(nil), WarningHandler:rest.WarningHandler(nil), Timeout:0, Dial:(func(context.Context, string, string) (net.Conn, error))(nil), Proxy:(func(*http.Request) (*url.URL, error))(nil)}
	I0916 11:10:15.952252 1488539 round_trippers.go:463] GET https://192.168.67.2:8443/apis/apps/v1/namespaces/kube-system/deployments/coredns/scale
	I0916 11:10:15.952260 1488539 round_trippers.go:469] Request Headers:
	I0916 11:10:15.952268 1488539 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:10:15.952273 1488539 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:10:15.952922 1488539 loader.go:395] Config loaded from file:  /home/jenkins/minikube-integration/19651-1378450/kubeconfig
	I0916 11:10:15.953282 1488539 kapi.go:59] client config for multinode-654612: &rest.Config{Host:"https://192.168.67.2:8443", APIPath:"", ContentConfig:rest.ContentConfig{AcceptContentTypes:"", ContentType:"", GroupVersion:(*schema.GroupVersion)(nil), NegotiatedSerializer:runtime.NegotiatedSerializer(nil)}, Username:"", Password:"", BearerToken:"", BearerTokenFile:"", Impersonate:rest.ImpersonationConfig{UserName:"", UID:"", Groups:[]string(nil), Extra:map[string][]string(nil)}, AuthProvider:<nil>, AuthConfigPersister:rest.AuthProviderConfigPersister(nil), ExecProvider:<nil>, TLSClientConfig:rest.sanitizedTLSClientConfig{Insecure:false, ServerName:"", CertFile:"/home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/multinode-654612/client.crt", KeyFile:"/home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/multinode-654612/client.key", CAFile:"/home/jenkins/minikube-integration/19651-1378450/.minikube/ca.crt", CertData:[]uint8(nil), KeyData:[]uint8(nil), CAData:[]uint8(nil),
NextProtos:[]string(nil)}, UserAgent:"", DisableCompression:false, Transport:http.RoundTripper(nil), WrapTransport:(transport.WrapperFunc)(0x1a1e6c0), QPS:0, Burst:0, RateLimiter:flowcontrol.RateLimiter(nil), WarningHandler:rest.WarningHandler(nil), Timeout:0, Dial:(func(context.Context, string, string) (net.Conn, error))(nil), Proxy:(func(*http.Request) (*url.URL, error))(nil)}
	I0916 11:10:15.953603 1488539 node_ready.go:35] waiting up to 6m0s for node "multinode-654612" to be "Ready" ...
	I0916 11:10:15.953718 1488539 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/nodes/multinode-654612
	I0916 11:10:15.953753 1488539 round_trippers.go:469] Request Headers:
	I0916 11:10:15.953776 1488539 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:10:15.953794 1488539 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:10:15.986653 1488539 round_trippers.go:574] Response Status: 200 OK in 32 milliseconds
	I0916 11:10:15.986676 1488539 round_trippers.go:577] Response Headers:
	I0916 11:10:15.986685 1488539 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:10:15.986689 1488539 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:10:15.986694 1488539 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:10:15 GMT
	I0916 11:10:15.986697 1488539 round_trippers.go:580]     Audit-Id: 9e3a4fce-4c3f-4ee7-b99a-11967743a7b9
	I0916 11:10:15.986700 1488539 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:10:15.986703 1488539 round_trippers.go:580]     Content-Type: application/json
	I0916 11:10:15.986898 1488539 round_trippers.go:574] Response Status: 200 OK in 34 milliseconds
	I0916 11:10:15.986928 1488539 round_trippers.go:577] Response Headers:
	I0916 11:10:15.986938 1488539 round_trippers.go:580]     Audit-Id: 772c45c3-c98f-43c3-aef5-dab4ae1bfe33
	I0916 11:10:15.986942 1488539 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:10:15.986945 1488539 round_trippers.go:580]     Content-Type: application/json
	I0916 11:10:15.986948 1488539 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:10:15.986951 1488539 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:10:15.986955 1488539 round_trippers.go:580]     Content-Length: 291
	I0916 11:10:15.986958 1488539 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:10:15 GMT
	I0916 11:10:15.986980 1488539 request.go:1351] Response Body: {"kind":"Scale","apiVersion":"autoscaling/v1","metadata":{"name":"coredns","namespace":"kube-system","uid":"b57d8e9c-a4d8-4110-8ca0-89835edf91fa","resourceVersion":"348","creationTimestamp":"2024-09-16T11:10:10Z"},"spec":{"replicas":2},"status":{"replicas":2,"selector":"k8s-app=kube-dns"}}
	I0916 11:10:15.987027 1488539 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-654612","uid":"17ee1588-6ece-4a45-8e72-a05ec6d1f806","resourceVersion":"295","creationTimestamp":"2024-09-16T11:10:07Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-654612","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-654612","minikube.k8s.io/primary":"true","minikube.k8s.io/updated_at":"2024_09_16T11_10_11_0700","minikube.k8s.io/version":"v1.34.0","node-role.kubernetes.io/control-plane":"","node.kubernetes.io/exclude-from-external-load-balancers":""},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///var/run/crio/crio.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubelet","operation":"Update","apiVe
rsion":"v1","time":"2024-09-16T11:10:07Z","fieldsType":"FieldsV1","fiel [truncated 6223 chars]
	I0916 11:10:15.987418 1488539 request.go:1351] Request Body: {"kind":"Scale","apiVersion":"autoscaling/v1","metadata":{"name":"coredns","namespace":"kube-system","uid":"b57d8e9c-a4d8-4110-8ca0-89835edf91fa","resourceVersion":"348","creationTimestamp":"2024-09-16T11:10:10Z"},"spec":{"replicas":1},"status":{"replicas":2,"selector":"k8s-app=kube-dns"}}
	I0916 11:10:15.987477 1488539 round_trippers.go:463] PUT https://192.168.67.2:8443/apis/apps/v1/namespaces/kube-system/deployments/coredns/scale
	I0916 11:10:15.987488 1488539 round_trippers.go:469] Request Headers:
	I0916 11:10:15.987497 1488539 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:10:15.987505 1488539 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:10:15.987509 1488539 round_trippers.go:473]     Content-Type: application/json
	I0916 11:10:15.989002 1488539 round_trippers.go:574] Response Status: 200 OK in 37 milliseconds
	I0916 11:10:15.989022 1488539 round_trippers.go:577] Response Headers:
	I0916 11:10:15.989029 1488539 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:10:15.989033 1488539 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:10:15.989036 1488539 round_trippers.go:580]     Content-Length: 1273
	I0916 11:10:15.989039 1488539 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:10:15 GMT
	I0916 11:10:15.989042 1488539 round_trippers.go:580]     Audit-Id: 42c79c11-4fd7-43ef-b1ad-fb91fa16b6c4
	I0916 11:10:15.989045 1488539 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:10:15.989056 1488539 round_trippers.go:580]     Content-Type: application/json
	I0916 11:10:15.989118 1488539 request.go:1351] Response Body: {"kind":"StorageClassList","apiVersion":"storage.k8s.io/v1","metadata":{"resourceVersion":"356"},"items":[{"metadata":{"name":"standard","uid":"824e6101-7039-44b4-b417-59d2cf58814a","resourceVersion":"332","creationTimestamp":"2024-09-16T11:10:15Z","labels":{"addonmanager.kubernetes.io/mode":"EnsureExists"},"annotations":{"kubectl.kubernetes.io/last-applied-configuration":"{\"apiVersion\":\"storage.k8s.io/v1\",\"kind\":\"StorageClass\",\"metadata\":{\"annotations\":{\"storageclass.kubernetes.io/is-default-class\":\"true\"},\"labels\":{\"addonmanager.kubernetes.io/mode\":\"EnsureExists\"},\"name\":\"standard\"},\"provisioner\":\"k8s.io/minikube-hostpath\"}\n","storageclass.kubernetes.io/is-default-class":"true"},"managedFields":[{"manager":"kubectl-client-side-apply","operation":"Update","apiVersion":"storage.k8s.io/v1","time":"2024-09-16T11:10:15Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:annotations":{".":{},"f:kubectl.kuberne
tes.io/last-applied-configuration":{},"f:storageclass.kubernetes.io/is- [truncated 249 chars]
	I0916 11:10:15.989474 1488539 request.go:1351] Request Body: {"kind":"StorageClass","apiVersion":"storage.k8s.io/v1","metadata":{"name":"standard","uid":"824e6101-7039-44b4-b417-59d2cf58814a","resourceVersion":"332","creationTimestamp":"2024-09-16T11:10:15Z","labels":{"addonmanager.kubernetes.io/mode":"EnsureExists"},"annotations":{"kubectl.kubernetes.io/last-applied-configuration":"{\"apiVersion\":\"storage.k8s.io/v1\",\"kind\":\"StorageClass\",\"metadata\":{\"annotations\":{\"storageclass.kubernetes.io/is-default-class\":\"true\"},\"labels\":{\"addonmanager.kubernetes.io/mode\":\"EnsureExists\"},\"name\":\"standard\"},\"provisioner\":\"k8s.io/minikube-hostpath\"}\n","storageclass.kubernetes.io/is-default-class":"true"},"managedFields":[{"manager":"kubectl-client-side-apply","operation":"Update","apiVersion":"storage.k8s.io/v1","time":"2024-09-16T11:10:15Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:annotations":{".":{},"f:kubectl.kubernetes.io/last-applied-configuration":{},"f:storageclas
s.kubernetes.io/is-default-class":{}},"f:labels":{".":{},"f:addonmanag [truncated 196 chars]
	I0916 11:10:15.989519 1488539 round_trippers.go:463] PUT https://192.168.67.2:8443/apis/storage.k8s.io/v1/storageclasses/standard
	I0916 11:10:15.989525 1488539 round_trippers.go:469] Request Headers:
	I0916 11:10:15.989533 1488539 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:10:15.989538 1488539 round_trippers.go:473]     Content-Type: application/json
	I0916 11:10:15.989541 1488539 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:10:15.993468 1488539 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 11:10:15.993488 1488539 round_trippers.go:577] Response Headers:
	I0916 11:10:15.993529 1488539 round_trippers.go:580]     Audit-Id: f2821cf8-2c2a-4b57-81bb-17f287d257d6
	I0916 11:10:15.993535 1488539 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:10:15.993538 1488539 round_trippers.go:580]     Content-Type: application/json
	I0916 11:10:15.993541 1488539 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:10:15.993543 1488539 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:10:15.993546 1488539 round_trippers.go:580]     Content-Length: 1220
	I0916 11:10:15.993549 1488539 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:10:15 GMT
	I0916 11:10:15.993582 1488539 request.go:1351] Response Body: {"kind":"StorageClass","apiVersion":"storage.k8s.io/v1","metadata":{"name":"standard","uid":"824e6101-7039-44b4-b417-59d2cf58814a","resourceVersion":"332","creationTimestamp":"2024-09-16T11:10:15Z","labels":{"addonmanager.kubernetes.io/mode":"EnsureExists"},"annotations":{"kubectl.kubernetes.io/last-applied-configuration":"{\"apiVersion\":\"storage.k8s.io/v1\",\"kind\":\"StorageClass\",\"metadata\":{\"annotations\":{\"storageclass.kubernetes.io/is-default-class\":\"true\"},\"labels\":{\"addonmanager.kubernetes.io/mode\":\"EnsureExists\"},\"name\":\"standard\"},\"provisioner\":\"k8s.io/minikube-hostpath\"}\n","storageclass.kubernetes.io/is-default-class":"true"},"managedFields":[{"manager":"kubectl-client-side-apply","operation":"Update","apiVersion":"storage.k8s.io/v1","time":"2024-09-16T11:10:15Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:annotations":{".":{},"f:kubectl.kubernetes.io/last-applied-configuration":{},"f:storagecla
ss.kubernetes.io/is-default-class":{}},"f:labels":{".":{},"f:addonmanag [truncated 196 chars]
	I0916 11:10:15.997988 1488539 out.go:177] * Enabled addons: storage-provisioner, default-storageclass
	I0916 11:10:16.000565 1488539 addons.go:510] duration metric: took 1.066255785s for enable addons: enabled=[storage-provisioner default-storageclass]
	I0916 11:10:16.001134 1488539 round_trippers.go:574] Response Status: 200 OK in 13 milliseconds
	I0916 11:10:16.001159 1488539 round_trippers.go:577] Response Headers:
	I0916 11:10:16.001168 1488539 round_trippers.go:580]     Content-Length: 291
	I0916 11:10:16.001172 1488539 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:10:16 GMT
	I0916 11:10:16.001174 1488539 round_trippers.go:580]     Audit-Id: 6a8e0e97-01ec-42cc-bd8d-fa8cb222517d
	I0916 11:10:16.001177 1488539 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:10:16.001180 1488539 round_trippers.go:580]     Content-Type: application/json
	I0916 11:10:16.001183 1488539 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:10:16.001185 1488539 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:10:16.001213 1488539 request.go:1351] Response Body: {"kind":"Scale","apiVersion":"autoscaling/v1","metadata":{"name":"coredns","namespace":"kube-system","uid":"b57d8e9c-a4d8-4110-8ca0-89835edf91fa","resourceVersion":"357","creationTimestamp":"2024-09-16T11:10:10Z"},"spec":{"replicas":1},"status":{"replicas":2,"selector":"k8s-app=kube-dns"}}
	I0916 11:10:16.452910 1488539 round_trippers.go:463] GET https://192.168.67.2:8443/apis/apps/v1/namespaces/kube-system/deployments/coredns/scale
	I0916 11:10:16.452938 1488539 round_trippers.go:469] Request Headers:
	I0916 11:10:16.452948 1488539 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:10:16.452952 1488539 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:10:16.454229 1488539 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/nodes/multinode-654612
	I0916 11:10:16.454250 1488539 round_trippers.go:469] Request Headers:
	I0916 11:10:16.454259 1488539 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:10:16.454263 1488539 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:10:16.455335 1488539 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:10:16.455360 1488539 round_trippers.go:577] Response Headers:
	I0916 11:10:16.455368 1488539 round_trippers.go:580]     Content-Length: 291
	I0916 11:10:16.455372 1488539 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:10:16 GMT
	I0916 11:10:16.455376 1488539 round_trippers.go:580]     Audit-Id: 5d43ac4c-8fd6-40a7-a385-8096fc959cb8
	I0916 11:10:16.455379 1488539 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:10:16.455382 1488539 round_trippers.go:580]     Content-Type: application/json
	I0916 11:10:16.455385 1488539 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:10:16.455388 1488539 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:10:16.459002 1488539 request.go:1351] Response Body: {"kind":"Scale","apiVersion":"autoscaling/v1","metadata":{"name":"coredns","namespace":"kube-system","uid":"b57d8e9c-a4d8-4110-8ca0-89835edf91fa","resourceVersion":"370","creationTimestamp":"2024-09-16T11:10:10Z"},"spec":{"replicas":1},"status":{"replicas":1,"selector":"k8s-app=kube-dns"}}
	I0916 11:10:16.459118 1488539 kapi.go:214] "coredns" deployment in "kube-system" namespace and "multinode-654612" context rescaled to 1 replicas
	I0916 11:10:16.463388 1488539 round_trippers.go:574] Response Status: 200 OK in 9 milliseconds
	I0916 11:10:16.463409 1488539 round_trippers.go:577] Response Headers:
	I0916 11:10:16.463417 1488539 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:10:16.463422 1488539 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:10:16.463426 1488539 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:10:16 GMT
	I0916 11:10:16.463430 1488539 round_trippers.go:580]     Audit-Id: 0218f56f-e80a-48de-b984-ca95fd447639
	I0916 11:10:16.463433 1488539 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:10:16.463436 1488539 round_trippers.go:580]     Content-Type: application/json
	I0916 11:10:16.464022 1488539 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-654612","uid":"17ee1588-6ece-4a45-8e72-a05ec6d1f806","resourceVersion":"295","creationTimestamp":"2024-09-16T11:10:07Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-654612","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-654612","minikube.k8s.io/primary":"true","minikube.k8s.io/updated_at":"2024_09_16T11_10_11_0700","minikube.k8s.io/version":"v1.34.0","node-role.kubernetes.io/control-plane":"","node.kubernetes.io/exclude-from-external-load-balancers":""},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///var/run/crio/crio.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubelet","operation":"Update","apiVe
rsion":"v1","time":"2024-09-16T11:10:07Z","fieldsType":"FieldsV1","fiel [truncated 6223 chars]
	I0916 11:10:16.954221 1488539 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/nodes/multinode-654612
	I0916 11:10:16.954245 1488539 round_trippers.go:469] Request Headers:
	I0916 11:10:16.954254 1488539 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:10:16.954258 1488539 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:10:16.956643 1488539 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:10:16.956752 1488539 round_trippers.go:577] Response Headers:
	I0916 11:10:16.956778 1488539 round_trippers.go:580]     Audit-Id: a08dc693-1630-468a-9c24-9b04c1268c2f
	I0916 11:10:16.956796 1488539 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:10:16.956830 1488539 round_trippers.go:580]     Content-Type: application/json
	I0916 11:10:16.956853 1488539 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:10:16.956872 1488539 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:10:16.956889 1488539 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:10:16 GMT
	I0916 11:10:16.957042 1488539 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-654612","uid":"17ee1588-6ece-4a45-8e72-a05ec6d1f806","resourceVersion":"295","creationTimestamp":"2024-09-16T11:10:07Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-654612","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-654612","minikube.k8s.io/primary":"true","minikube.k8s.io/updated_at":"2024_09_16T11_10_11_0700","minikube.k8s.io/version":"v1.34.0","node-role.kubernetes.io/control-plane":"","node.kubernetes.io/exclude-from-external-load-balancers":""},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///var/run/crio/crio.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubelet","operation":"Update","apiVe
rsion":"v1","time":"2024-09-16T11:10:07Z","fieldsType":"FieldsV1","fiel [truncated 6223 chars]
	I0916 11:10:17.454418 1488539 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/nodes/multinode-654612
	I0916 11:10:17.454444 1488539 round_trippers.go:469] Request Headers:
	I0916 11:10:17.454453 1488539 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:10:17.454460 1488539 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:10:17.456531 1488539 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:10:17.456558 1488539 round_trippers.go:577] Response Headers:
	I0916 11:10:17.456567 1488539 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:10:17.456572 1488539 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:10:17 GMT
	I0916 11:10:17.456577 1488539 round_trippers.go:580]     Audit-Id: de55e2e4-5a2b-4c91-800e-25dff7c49dd0
	I0916 11:10:17.456579 1488539 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:10:17.456582 1488539 round_trippers.go:580]     Content-Type: application/json
	I0916 11:10:17.456585 1488539 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:10:17.456879 1488539 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-654612","uid":"17ee1588-6ece-4a45-8e72-a05ec6d1f806","resourceVersion":"295","creationTimestamp":"2024-09-16T11:10:07Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-654612","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-654612","minikube.k8s.io/primary":"true","minikube.k8s.io/updated_at":"2024_09_16T11_10_11_0700","minikube.k8s.io/version":"v1.34.0","node-role.kubernetes.io/control-plane":"","node.kubernetes.io/exclude-from-external-load-balancers":""},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///var/run/crio/crio.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubelet","operation":"Update","apiVe
rsion":"v1","time":"2024-09-16T11:10:07Z","fieldsType":"FieldsV1","fiel [truncated 6223 chars]
	I0916 11:10:17.953920 1488539 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/nodes/multinode-654612
	I0916 11:10:17.953946 1488539 round_trippers.go:469] Request Headers:
	I0916 11:10:17.953960 1488539 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:10:17.953964 1488539 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:10:17.956789 1488539 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:10:17.956816 1488539 round_trippers.go:577] Response Headers:
	I0916 11:10:17.956824 1488539 round_trippers.go:580]     Audit-Id: 0ec9fcd8-8ee1-4a01-a556-ea935e3cae51
	I0916 11:10:17.956829 1488539 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:10:17.956834 1488539 round_trippers.go:580]     Content-Type: application/json
	I0916 11:10:17.956837 1488539 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:10:17.956840 1488539 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:10:17.956843 1488539 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:10:17 GMT
	I0916 11:10:17.957410 1488539 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-654612","uid":"17ee1588-6ece-4a45-8e72-a05ec6d1f806","resourceVersion":"295","creationTimestamp":"2024-09-16T11:10:07Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-654612","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-654612","minikube.k8s.io/primary":"true","minikube.k8s.io/updated_at":"2024_09_16T11_10_11_0700","minikube.k8s.io/version":"v1.34.0","node-role.kubernetes.io/control-plane":"","node.kubernetes.io/exclude-from-external-load-balancers":""},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///var/run/crio/crio.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubelet","operation":"Update","apiVe
rsion":"v1","time":"2024-09-16T11:10:07Z","fieldsType":"FieldsV1","fiel [truncated 6223 chars]
	I0916 11:10:17.957851 1488539 node_ready.go:53] node "multinode-654612" has status "Ready":"False"
	I0916 11:10:18.454209 1488539 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/nodes/multinode-654612
	I0916 11:10:18.454234 1488539 round_trippers.go:469] Request Headers:
	I0916 11:10:18.454244 1488539 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:10:18.454249 1488539 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:10:18.456484 1488539 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:10:18.456506 1488539 round_trippers.go:577] Response Headers:
	I0916 11:10:18.456516 1488539 round_trippers.go:580]     Audit-Id: aaeb0cf1-23c4-44fa-87ba-7b762493099f
	I0916 11:10:18.456521 1488539 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:10:18.456525 1488539 round_trippers.go:580]     Content-Type: application/json
	I0916 11:10:18.456527 1488539 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:10:18.456530 1488539 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:10:18.456533 1488539 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:10:18 GMT
	I0916 11:10:18.456723 1488539 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-654612","uid":"17ee1588-6ece-4a45-8e72-a05ec6d1f806","resourceVersion":"295","creationTimestamp":"2024-09-16T11:10:07Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-654612","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-654612","minikube.k8s.io/primary":"true","minikube.k8s.io/updated_at":"2024_09_16T11_10_11_0700","minikube.k8s.io/version":"v1.34.0","node-role.kubernetes.io/control-plane":"","node.kubernetes.io/exclude-from-external-load-balancers":""},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///var/run/crio/crio.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubelet","operation":"Update","apiVe
rsion":"v1","time":"2024-09-16T11:10:07Z","fieldsType":"FieldsV1","fiel [truncated 6223 chars]
	I0916 11:10:18.953870 1488539 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/nodes/multinode-654612
	I0916 11:10:18.953897 1488539 round_trippers.go:469] Request Headers:
	I0916 11:10:18.953907 1488539 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:10:18.953914 1488539 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:10:18.956511 1488539 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:10:18.956537 1488539 round_trippers.go:577] Response Headers:
	I0916 11:10:18.956548 1488539 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:10:18.956554 1488539 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:10:18 GMT
	I0916 11:10:18.956557 1488539 round_trippers.go:580]     Audit-Id: eb4746f8-fc2d-4e60-a90f-8db6804ccdfa
	I0916 11:10:18.956560 1488539 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:10:18.956563 1488539 round_trippers.go:580]     Content-Type: application/json
	I0916 11:10:18.956601 1488539 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:10:18.957195 1488539 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-654612","uid":"17ee1588-6ece-4a45-8e72-a05ec6d1f806","resourceVersion":"295","creationTimestamp":"2024-09-16T11:10:07Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-654612","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-654612","minikube.k8s.io/primary":"true","minikube.k8s.io/updated_at":"2024_09_16T11_10_11_0700","minikube.k8s.io/version":"v1.34.0","node-role.kubernetes.io/control-plane":"","node.kubernetes.io/exclude-from-external-load-balancers":""},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///var/run/crio/crio.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubelet","operation":"Update","apiVe
rsion":"v1","time":"2024-09-16T11:10:07Z","fieldsType":"FieldsV1","fiel [truncated 6223 chars]
	I0916 11:10:19.454444 1488539 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/nodes/multinode-654612
	I0916 11:10:19.454526 1488539 round_trippers.go:469] Request Headers:
	I0916 11:10:19.454541 1488539 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:10:19.454545 1488539 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:10:19.457018 1488539 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:10:19.457041 1488539 round_trippers.go:577] Response Headers:
	I0916 11:10:19.457049 1488539 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:10:19.457053 1488539 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:10:19.457057 1488539 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:10:19 GMT
	I0916 11:10:19.457061 1488539 round_trippers.go:580]     Audit-Id: 75e5924f-5e80-421e-8c03-0e7c8d5e1999
	I0916 11:10:19.457064 1488539 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:10:19.457066 1488539 round_trippers.go:580]     Content-Type: application/json
	I0916 11:10:19.457273 1488539 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-654612","uid":"17ee1588-6ece-4a45-8e72-a05ec6d1f806","resourceVersion":"295","creationTimestamp":"2024-09-16T11:10:07Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-654612","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-654612","minikube.k8s.io/primary":"true","minikube.k8s.io/updated_at":"2024_09_16T11_10_11_0700","minikube.k8s.io/version":"v1.34.0","node-role.kubernetes.io/control-plane":"","node.kubernetes.io/exclude-from-external-load-balancers":""},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///var/run/crio/crio.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubelet","operation":"Update","apiVe
rsion":"v1","time":"2024-09-16T11:10:07Z","fieldsType":"FieldsV1","fiel [truncated 6223 chars]
	I0916 11:10:19.954857 1488539 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/nodes/multinode-654612
	I0916 11:10:19.954884 1488539 round_trippers.go:469] Request Headers:
	I0916 11:10:19.954894 1488539 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:10:19.954900 1488539 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:10:19.957120 1488539 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:10:19.957143 1488539 round_trippers.go:577] Response Headers:
	I0916 11:10:19.957152 1488539 round_trippers.go:580]     Audit-Id: 7819e35d-efd6-4772-bcf8-b67acc5e845f
	I0916 11:10:19.957183 1488539 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:10:19.957195 1488539 round_trippers.go:580]     Content-Type: application/json
	I0916 11:10:19.957199 1488539 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:10:19.957202 1488539 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:10:19.957208 1488539 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:10:19 GMT
	I0916 11:10:19.957456 1488539 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-654612","uid":"17ee1588-6ece-4a45-8e72-a05ec6d1f806","resourceVersion":"295","creationTimestamp":"2024-09-16T11:10:07Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-654612","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-654612","minikube.k8s.io/primary":"true","minikube.k8s.io/updated_at":"2024_09_16T11_10_11_0700","minikube.k8s.io/version":"v1.34.0","node-role.kubernetes.io/control-plane":"","node.kubernetes.io/exclude-from-external-load-balancers":""},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///var/run/crio/crio.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubelet","operation":"Update","apiVe
rsion":"v1","time":"2024-09-16T11:10:07Z","fieldsType":"FieldsV1","fiel [truncated 6223 chars]
	I0916 11:10:19.957885 1488539 node_ready.go:53] node "multinode-654612" has status "Ready":"False"
	I0916 11:10:20.454664 1488539 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/nodes/multinode-654612
	I0916 11:10:20.454699 1488539 round_trippers.go:469] Request Headers:
	I0916 11:10:20.454708 1488539 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:10:20.454779 1488539 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:10:20.457204 1488539 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:10:20.457229 1488539 round_trippers.go:577] Response Headers:
	I0916 11:10:20.457238 1488539 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:10:20 GMT
	I0916 11:10:20.457244 1488539 round_trippers.go:580]     Audit-Id: 73a4dccc-dd39-4644-831b-8b526d28bea4
	I0916 11:10:20.457247 1488539 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:10:20.457250 1488539 round_trippers.go:580]     Content-Type: application/json
	I0916 11:10:20.457253 1488539 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:10:20.457256 1488539 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:10:20.457949 1488539 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-654612","uid":"17ee1588-6ece-4a45-8e72-a05ec6d1f806","resourceVersion":"295","creationTimestamp":"2024-09-16T11:10:07Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-654612","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-654612","minikube.k8s.io/primary":"true","minikube.k8s.io/updated_at":"2024_09_16T11_10_11_0700","minikube.k8s.io/version":"v1.34.0","node-role.kubernetes.io/control-plane":"","node.kubernetes.io/exclude-from-external-load-balancers":""},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///var/run/crio/crio.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubelet","operation":"Update","apiVe
rsion":"v1","time":"2024-09-16T11:10:07Z","fieldsType":"FieldsV1","fiel [truncated 6223 chars]
	I0916 11:10:20.954019 1488539 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/nodes/multinode-654612
	I0916 11:10:20.954042 1488539 round_trippers.go:469] Request Headers:
	I0916 11:10:20.954052 1488539 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:10:20.954057 1488539 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:10:20.956589 1488539 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:10:20.956664 1488539 round_trippers.go:577] Response Headers:
	I0916 11:10:20.956709 1488539 round_trippers.go:580]     Audit-Id: a9aecfc3-86c9-4513-9fba-891a05e4f329
	I0916 11:10:20.956729 1488539 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:10:20.956746 1488539 round_trippers.go:580]     Content-Type: application/json
	I0916 11:10:20.956764 1488539 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:10:20.956796 1488539 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:10:20.956814 1488539 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:10:20 GMT
	I0916 11:10:20.956960 1488539 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-654612","uid":"17ee1588-6ece-4a45-8e72-a05ec6d1f806","resourceVersion":"295","creationTimestamp":"2024-09-16T11:10:07Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-654612","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-654612","minikube.k8s.io/primary":"true","minikube.k8s.io/updated_at":"2024_09_16T11_10_11_0700","minikube.k8s.io/version":"v1.34.0","node-role.kubernetes.io/control-plane":"","node.kubernetes.io/exclude-from-external-load-balancers":""},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///var/run/crio/crio.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubelet","operation":"Update","apiVe
rsion":"v1","time":"2024-09-16T11:10:07Z","fieldsType":"FieldsV1","fiel [truncated 6223 chars]
	I0916 11:10:21.454622 1488539 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/nodes/multinode-654612
	I0916 11:10:21.454649 1488539 round_trippers.go:469] Request Headers:
	I0916 11:10:21.454657 1488539 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:10:21.454661 1488539 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:10:21.456893 1488539 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:10:21.456922 1488539 round_trippers.go:577] Response Headers:
	I0916 11:10:21.456932 1488539 round_trippers.go:580]     Content-Type: application/json
	I0916 11:10:21.456936 1488539 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:10:21.456939 1488539 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:10:21.456948 1488539 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:10:21 GMT
	I0916 11:10:21.456951 1488539 round_trippers.go:580]     Audit-Id: 28eecb8a-5628-45ac-9b56-8267a72e6ddb
	I0916 11:10:21.456954 1488539 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:10:21.457213 1488539 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-654612","uid":"17ee1588-6ece-4a45-8e72-a05ec6d1f806","resourceVersion":"295","creationTimestamp":"2024-09-16T11:10:07Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-654612","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-654612","minikube.k8s.io/primary":"true","minikube.k8s.io/updated_at":"2024_09_16T11_10_11_0700","minikube.k8s.io/version":"v1.34.0","node-role.kubernetes.io/control-plane":"","node.kubernetes.io/exclude-from-external-load-balancers":""},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///var/run/crio/crio.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubelet","operation":"Update","apiVe
rsion":"v1","time":"2024-09-16T11:10:07Z","fieldsType":"FieldsV1","fiel [truncated 6223 chars]
	I0916 11:10:21.953902 1488539 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/nodes/multinode-654612
	I0916 11:10:21.953925 1488539 round_trippers.go:469] Request Headers:
	I0916 11:10:21.953952 1488539 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:10:21.953957 1488539 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:10:21.956189 1488539 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:10:21.956216 1488539 round_trippers.go:577] Response Headers:
	I0916 11:10:21.956226 1488539 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:10:21 GMT
	I0916 11:10:21.956230 1488539 round_trippers.go:580]     Audit-Id: 82cbcf3e-7b6b-4014-82ec-5ea14697ef6f
	I0916 11:10:21.956234 1488539 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:10:21.956237 1488539 round_trippers.go:580]     Content-Type: application/json
	I0916 11:10:21.956240 1488539 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:10:21.956245 1488539 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:10:21.956577 1488539 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-654612","uid":"17ee1588-6ece-4a45-8e72-a05ec6d1f806","resourceVersion":"295","creationTimestamp":"2024-09-16T11:10:07Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-654612","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-654612","minikube.k8s.io/primary":"true","minikube.k8s.io/updated_at":"2024_09_16T11_10_11_0700","minikube.k8s.io/version":"v1.34.0","node-role.kubernetes.io/control-plane":"","node.kubernetes.io/exclude-from-external-load-balancers":""},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///var/run/crio/crio.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubelet","operation":"Update","apiVe
rsion":"v1","time":"2024-09-16T11:10:07Z","fieldsType":"FieldsV1","fiel [truncated 6223 chars]
	I0916 11:10:22.453921 1488539 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/nodes/multinode-654612
	I0916 11:10:22.453949 1488539 round_trippers.go:469] Request Headers:
	I0916 11:10:22.453959 1488539 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:10:22.453965 1488539 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:10:22.456257 1488539 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:10:22.456289 1488539 round_trippers.go:577] Response Headers:
	I0916 11:10:22.456298 1488539 round_trippers.go:580]     Audit-Id: 4628a911-12f8-4994-9189-bfc4438a271c
	I0916 11:10:22.456302 1488539 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:10:22.456307 1488539 round_trippers.go:580]     Content-Type: application/json
	I0916 11:10:22.456313 1488539 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:10:22.456319 1488539 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:10:22.456322 1488539 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:10:22 GMT
	I0916 11:10:22.456533 1488539 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-654612","uid":"17ee1588-6ece-4a45-8e72-a05ec6d1f806","resourceVersion":"295","creationTimestamp":"2024-09-16T11:10:07Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-654612","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-654612","minikube.k8s.io/primary":"true","minikube.k8s.io/updated_at":"2024_09_16T11_10_11_0700","minikube.k8s.io/version":"v1.34.0","node-role.kubernetes.io/control-plane":"","node.kubernetes.io/exclude-from-external-load-balancers":""},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///var/run/crio/crio.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubelet","operation":"Update","apiVe
rsion":"v1","time":"2024-09-16T11:10:07Z","fieldsType":"FieldsV1","fiel [truncated 6223 chars]
	I0916 11:10:22.457033 1488539 node_ready.go:53] node "multinode-654612" has status "Ready":"False"
	I0916 11:10:22.953892 1488539 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/nodes/multinode-654612
	I0916 11:10:22.953919 1488539 round_trippers.go:469] Request Headers:
	I0916 11:10:22.953934 1488539 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:10:22.953940 1488539 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:10:22.956452 1488539 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:10:22.956477 1488539 round_trippers.go:577] Response Headers:
	I0916 11:10:22.956486 1488539 round_trippers.go:580]     Audit-Id: 60290daf-b57e-4d57-9d58-69549d9cc092
	I0916 11:10:22.956490 1488539 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:10:22.956492 1488539 round_trippers.go:580]     Content-Type: application/json
	I0916 11:10:22.956495 1488539 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:10:22.956498 1488539 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:10:22.956504 1488539 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:10:22 GMT
	I0916 11:10:22.956688 1488539 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-654612","uid":"17ee1588-6ece-4a45-8e72-a05ec6d1f806","resourceVersion":"295","creationTimestamp":"2024-09-16T11:10:07Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-654612","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-654612","minikube.k8s.io/primary":"true","minikube.k8s.io/updated_at":"2024_09_16T11_10_11_0700","minikube.k8s.io/version":"v1.34.0","node-role.kubernetes.io/control-plane":"","node.kubernetes.io/exclude-from-external-load-balancers":""},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///var/run/crio/crio.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubelet","operation":"Update","apiVe
rsion":"v1","time":"2024-09-16T11:10:07Z","fieldsType":"FieldsV1","fiel [truncated 6223 chars]
	I0916 11:10:23.454765 1488539 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/nodes/multinode-654612
	I0916 11:10:23.454792 1488539 round_trippers.go:469] Request Headers:
	I0916 11:10:23.454802 1488539 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:10:23.454806 1488539 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:10:23.457387 1488539 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:10:23.457422 1488539 round_trippers.go:577] Response Headers:
	I0916 11:10:23.457431 1488539 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:10:23.457437 1488539 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:10:23.457441 1488539 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:10:23 GMT
	I0916 11:10:23.457445 1488539 round_trippers.go:580]     Audit-Id: f50118d4-4ef1-47a2-a00f-fb0147e0a79b
	I0916 11:10:23.457448 1488539 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:10:23.457451 1488539 round_trippers.go:580]     Content-Type: application/json
	I0916 11:10:23.457680 1488539 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-654612","uid":"17ee1588-6ece-4a45-8e72-a05ec6d1f806","resourceVersion":"295","creationTimestamp":"2024-09-16T11:10:07Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-654612","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-654612","minikube.k8s.io/primary":"true","minikube.k8s.io/updated_at":"2024_09_16T11_10_11_0700","minikube.k8s.io/version":"v1.34.0","node-role.kubernetes.io/control-plane":"","node.kubernetes.io/exclude-from-external-load-balancers":""},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///var/run/crio/crio.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubelet","operation":"Update","apiVe
rsion":"v1","time":"2024-09-16T11:10:07Z","fieldsType":"FieldsV1","fiel [truncated 6223 chars]
	I0916 11:10:23.954750 1488539 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/nodes/multinode-654612
	I0916 11:10:23.954779 1488539 round_trippers.go:469] Request Headers:
	I0916 11:10:23.954788 1488539 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:10:23.954792 1488539 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:10:23.957410 1488539 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:10:23.957436 1488539 round_trippers.go:577] Response Headers:
	I0916 11:10:23.957445 1488539 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:10:23 GMT
	I0916 11:10:23.957451 1488539 round_trippers.go:580]     Audit-Id: 9e6cdacf-6b86-4c8e-a9e9-f8f3c8786206
	I0916 11:10:23.957455 1488539 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:10:23.957458 1488539 round_trippers.go:580]     Content-Type: application/json
	I0916 11:10:23.957466 1488539 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:10:23.957470 1488539 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:10:23.957645 1488539 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-654612","uid":"17ee1588-6ece-4a45-8e72-a05ec6d1f806","resourceVersion":"295","creationTimestamp":"2024-09-16T11:10:07Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-654612","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-654612","minikube.k8s.io/primary":"true","minikube.k8s.io/updated_at":"2024_09_16T11_10_11_0700","minikube.k8s.io/version":"v1.34.0","node-role.kubernetes.io/control-plane":"","node.kubernetes.io/exclude-from-external-load-balancers":""},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///var/run/crio/crio.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubelet","operation":"Update","apiVe
rsion":"v1","time":"2024-09-16T11:10:07Z","fieldsType":"FieldsV1","fiel [truncated 6223 chars]
	I0916 11:10:24.453867 1488539 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/nodes/multinode-654612
	I0916 11:10:24.453895 1488539 round_trippers.go:469] Request Headers:
	I0916 11:10:24.453905 1488539 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:10:24.453911 1488539 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:10:24.456090 1488539 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:10:24.456112 1488539 round_trippers.go:577] Response Headers:
	I0916 11:10:24.456121 1488539 round_trippers.go:580]     Audit-Id: 7982c331-9512-43dc-82f4-1937e1dcd231
	I0916 11:10:24.456125 1488539 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:10:24.456127 1488539 round_trippers.go:580]     Content-Type: application/json
	I0916 11:10:24.456130 1488539 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:10:24.456139 1488539 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:10:24.456142 1488539 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:10:24 GMT
	I0916 11:10:24.456499 1488539 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-654612","uid":"17ee1588-6ece-4a45-8e72-a05ec6d1f806","resourceVersion":"295","creationTimestamp":"2024-09-16T11:10:07Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-654612","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-654612","minikube.k8s.io/primary":"true","minikube.k8s.io/updated_at":"2024_09_16T11_10_11_0700","minikube.k8s.io/version":"v1.34.0","node-role.kubernetes.io/control-plane":"","node.kubernetes.io/exclude-from-external-load-balancers":""},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///var/run/crio/crio.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubelet","operation":"Update","apiVe
rsion":"v1","time":"2024-09-16T11:10:07Z","fieldsType":"FieldsV1","fiel [truncated 6223 chars]
	I0916 11:10:24.953996 1488539 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/nodes/multinode-654612
	I0916 11:10:24.954022 1488539 round_trippers.go:469] Request Headers:
	I0916 11:10:24.954035 1488539 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:10:24.954043 1488539 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:10:24.956266 1488539 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:10:24.956292 1488539 round_trippers.go:577] Response Headers:
	I0916 11:10:24.956301 1488539 round_trippers.go:580]     Audit-Id: 099a6a58-ad29-4990-9301-42bc5be58cf7
	I0916 11:10:24.956306 1488539 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:10:24.956310 1488539 round_trippers.go:580]     Content-Type: application/json
	I0916 11:10:24.956313 1488539 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:10:24.956316 1488539 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:10:24.956319 1488539 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:10:24 GMT
	I0916 11:10:24.956727 1488539 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-654612","uid":"17ee1588-6ece-4a45-8e72-a05ec6d1f806","resourceVersion":"295","creationTimestamp":"2024-09-16T11:10:07Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-654612","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-654612","minikube.k8s.io/primary":"true","minikube.k8s.io/updated_at":"2024_09_16T11_10_11_0700","minikube.k8s.io/version":"v1.34.0","node-role.kubernetes.io/control-plane":"","node.kubernetes.io/exclude-from-external-load-balancers":""},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///var/run/crio/crio.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubelet","operation":"Update","apiVe
rsion":"v1","time":"2024-09-16T11:10:07Z","fieldsType":"FieldsV1","fiel [truncated 6223 chars]
	I0916 11:10:24.957146 1488539 node_ready.go:53] node "multinode-654612" has status "Ready":"False"
	I0916 11:10:25.453976 1488539 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/nodes/multinode-654612
	I0916 11:10:25.453998 1488539 round_trippers.go:469] Request Headers:
	I0916 11:10:25.454008 1488539 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:10:25.454013 1488539 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:10:25.456720 1488539 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:10:25.456745 1488539 round_trippers.go:577] Response Headers:
	I0916 11:10:25.456755 1488539 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:10:25.456758 1488539 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:10:25.456763 1488539 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:10:25 GMT
	I0916 11:10:25.456766 1488539 round_trippers.go:580]     Audit-Id: 3f6bb556-4d99-4bd2-9039-c81c4c188c6f
	I0916 11:10:25.456768 1488539 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:10:25.456771 1488539 round_trippers.go:580]     Content-Type: application/json
	I0916 11:10:25.457092 1488539 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-654612","uid":"17ee1588-6ece-4a45-8e72-a05ec6d1f806","resourceVersion":"295","creationTimestamp":"2024-09-16T11:10:07Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-654612","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-654612","minikube.k8s.io/primary":"true","minikube.k8s.io/updated_at":"2024_09_16T11_10_11_0700","minikube.k8s.io/version":"v1.34.0","node-role.kubernetes.io/control-plane":"","node.kubernetes.io/exclude-from-external-load-balancers":""},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///var/run/crio/crio.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubelet","operation":"Update","apiVe
rsion":"v1","time":"2024-09-16T11:10:07Z","fieldsType":"FieldsV1","fiel [truncated 6223 chars]
	I0916 11:10:25.954130 1488539 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/nodes/multinode-654612
	I0916 11:10:25.954157 1488539 round_trippers.go:469] Request Headers:
	I0916 11:10:25.954167 1488539 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:10:25.954173 1488539 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:10:25.956441 1488539 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:10:25.956464 1488539 round_trippers.go:577] Response Headers:
	I0916 11:10:25.956473 1488539 round_trippers.go:580]     Audit-Id: 635af718-7cf0-46dc-92b4-8f200ba9346a
	I0916 11:10:25.956478 1488539 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:10:25.956482 1488539 round_trippers.go:580]     Content-Type: application/json
	I0916 11:10:25.956484 1488539 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:10:25.956487 1488539 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:10:25.956490 1488539 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:10:25 GMT
	I0916 11:10:25.956643 1488539 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-654612","uid":"17ee1588-6ece-4a45-8e72-a05ec6d1f806","resourceVersion":"295","creationTimestamp":"2024-09-16T11:10:07Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-654612","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-654612","minikube.k8s.io/primary":"true","minikube.k8s.io/updated_at":"2024_09_16T11_10_11_0700","minikube.k8s.io/version":"v1.34.0","node-role.kubernetes.io/control-plane":"","node.kubernetes.io/exclude-from-external-load-balancers":""},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///var/run/crio/crio.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubelet","operation":"Update","apiVe
rsion":"v1","time":"2024-09-16T11:10:07Z","fieldsType":"FieldsV1","fiel [truncated 6223 chars]
	I0916 11:10:26.454769 1488539 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/nodes/multinode-654612
	I0916 11:10:26.454795 1488539 round_trippers.go:469] Request Headers:
	I0916 11:10:26.454805 1488539 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:10:26.454812 1488539 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:10:26.457242 1488539 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:10:26.457266 1488539 round_trippers.go:577] Response Headers:
	I0916 11:10:26.457274 1488539 round_trippers.go:580]     Content-Type: application/json
	I0916 11:10:26.457278 1488539 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:10:26.457281 1488539 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:10:26.457284 1488539 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:10:26 GMT
	I0916 11:10:26.457287 1488539 round_trippers.go:580]     Audit-Id: 38a39ce6-fa7d-4cba-a1f1-77d5b9529a79
	I0916 11:10:26.457289 1488539 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:10:26.457461 1488539 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-654612","uid":"17ee1588-6ece-4a45-8e72-a05ec6d1f806","resourceVersion":"295","creationTimestamp":"2024-09-16T11:10:07Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-654612","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-654612","minikube.k8s.io/primary":"true","minikube.k8s.io/updated_at":"2024_09_16T11_10_11_0700","minikube.k8s.io/version":"v1.34.0","node-role.kubernetes.io/control-plane":"","node.kubernetes.io/exclude-from-external-load-balancers":""},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///var/run/crio/crio.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubelet","operation":"Update","apiVe
rsion":"v1","time":"2024-09-16T11:10:07Z","fieldsType":"FieldsV1","fiel [truncated 6223 chars]
	I0916 11:10:26.953874 1488539 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/nodes/multinode-654612
	I0916 11:10:26.953905 1488539 round_trippers.go:469] Request Headers:
	I0916 11:10:26.953915 1488539 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:10:26.953919 1488539 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:10:26.956593 1488539 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:10:26.956616 1488539 round_trippers.go:577] Response Headers:
	I0916 11:10:26.956625 1488539 round_trippers.go:580]     Audit-Id: f22cff62-e9f0-4c41-b3ed-1d650690e546
	I0916 11:10:26.956631 1488539 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:10:26.956634 1488539 round_trippers.go:580]     Content-Type: application/json
	I0916 11:10:26.956636 1488539 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:10:26.956639 1488539 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:10:26.956643 1488539 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:10:26 GMT
	I0916 11:10:26.957034 1488539 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-654612","uid":"17ee1588-6ece-4a45-8e72-a05ec6d1f806","resourceVersion":"295","creationTimestamp":"2024-09-16T11:10:07Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-654612","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-654612","minikube.k8s.io/primary":"true","minikube.k8s.io/updated_at":"2024_09_16T11_10_11_0700","minikube.k8s.io/version":"v1.34.0","node-role.kubernetes.io/control-plane":"","node.kubernetes.io/exclude-from-external-load-balancers":""},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///var/run/crio/crio.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubelet","operation":"Update","apiVe
rsion":"v1","time":"2024-09-16T11:10:07Z","fieldsType":"FieldsV1","fiel [truncated 6223 chars]
	I0916 11:10:26.957443 1488539 node_ready.go:53] node "multinode-654612" has status "Ready":"False"
	I0916 11:10:27.454436 1488539 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/nodes/multinode-654612
	I0916 11:10:27.454463 1488539 round_trippers.go:469] Request Headers:
	I0916 11:10:27.454472 1488539 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:10:27.454476 1488539 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:10:27.456707 1488539 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:10:27.456731 1488539 round_trippers.go:577] Response Headers:
	I0916 11:10:27.456739 1488539 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:10:27.456744 1488539 round_trippers.go:580]     Content-Type: application/json
	I0916 11:10:27.456747 1488539 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:10:27.456751 1488539 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:10:27.456753 1488539 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:10:27 GMT
	I0916 11:10:27.456756 1488539 round_trippers.go:580]     Audit-Id: 83625c4a-ddd0-4eeb-a19b-721f0adc4041
	I0916 11:10:27.456938 1488539 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-654612","uid":"17ee1588-6ece-4a45-8e72-a05ec6d1f806","resourceVersion":"295","creationTimestamp":"2024-09-16T11:10:07Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-654612","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-654612","minikube.k8s.io/primary":"true","minikube.k8s.io/updated_at":"2024_09_16T11_10_11_0700","minikube.k8s.io/version":"v1.34.0","node-role.kubernetes.io/control-plane":"","node.kubernetes.io/exclude-from-external-load-balancers":""},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///var/run/crio/crio.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubelet","operation":"Update","apiVe
rsion":"v1","time":"2024-09-16T11:10:07Z","fieldsType":"FieldsV1","fiel [truncated 6223 chars]
	I0916 11:10:27.954777 1488539 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/nodes/multinode-654612
	I0916 11:10:27.954804 1488539 round_trippers.go:469] Request Headers:
	I0916 11:10:27.954814 1488539 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:10:27.954826 1488539 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:10:27.957246 1488539 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:10:27.957273 1488539 round_trippers.go:577] Response Headers:
	I0916 11:10:27.957282 1488539 round_trippers.go:580]     Audit-Id: b2e14cc4-3d7f-4812-9b03-a61a349d7c1a
	I0916 11:10:27.957287 1488539 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:10:27.957291 1488539 round_trippers.go:580]     Content-Type: application/json
	I0916 11:10:27.957296 1488539 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:10:27.957299 1488539 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:10:27.957302 1488539 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:10:27 GMT
	I0916 11:10:27.957475 1488539 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-654612","uid":"17ee1588-6ece-4a45-8e72-a05ec6d1f806","resourceVersion":"295","creationTimestamp":"2024-09-16T11:10:07Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-654612","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-654612","minikube.k8s.io/primary":"true","minikube.k8s.io/updated_at":"2024_09_16T11_10_11_0700","minikube.k8s.io/version":"v1.34.0","node-role.kubernetes.io/control-plane":"","node.kubernetes.io/exclude-from-external-load-balancers":""},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///var/run/crio/crio.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubelet","operation":"Update","apiVe
rsion":"v1","time":"2024-09-16T11:10:07Z","fieldsType":"FieldsV1","fiel [truncated 6223 chars]
	I0916 11:10:28.454655 1488539 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/nodes/multinode-654612
	I0916 11:10:28.454681 1488539 round_trippers.go:469] Request Headers:
	I0916 11:10:28.454691 1488539 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:10:28.454696 1488539 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:10:28.456804 1488539 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:10:28.456834 1488539 round_trippers.go:577] Response Headers:
	I0916 11:10:28.456849 1488539 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:10:28.456853 1488539 round_trippers.go:580]     Content-Type: application/json
	I0916 11:10:28.456856 1488539 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:10:28.456862 1488539 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:10:28.456865 1488539 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:10:28 GMT
	I0916 11:10:28.456868 1488539 round_trippers.go:580]     Audit-Id: 15f64c0f-3941-43d2-a355-7fb0b63ae448
	I0916 11:10:28.457031 1488539 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-654612","uid":"17ee1588-6ece-4a45-8e72-a05ec6d1f806","resourceVersion":"295","creationTimestamp":"2024-09-16T11:10:07Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-654612","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-654612","minikube.k8s.io/primary":"true","minikube.k8s.io/updated_at":"2024_09_16T11_10_11_0700","minikube.k8s.io/version":"v1.34.0","node-role.kubernetes.io/control-plane":"","node.kubernetes.io/exclude-from-external-load-balancers":""},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///var/run/crio/crio.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubelet","operation":"Update","apiVe
rsion":"v1","time":"2024-09-16T11:10:07Z","fieldsType":"FieldsV1","fiel [truncated 6223 chars]
	I0916 11:10:28.954254 1488539 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/nodes/multinode-654612
	I0916 11:10:28.954282 1488539 round_trippers.go:469] Request Headers:
	I0916 11:10:28.954294 1488539 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:10:28.954298 1488539 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:10:28.956564 1488539 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:10:28.956585 1488539 round_trippers.go:577] Response Headers:
	I0916 11:10:28.956594 1488539 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:10:28.956599 1488539 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:10:28 GMT
	I0916 11:10:28.956604 1488539 round_trippers.go:580]     Audit-Id: 94f05532-f47d-43e3-94c3-a1107b545001
	I0916 11:10:28.956608 1488539 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:10:28.956610 1488539 round_trippers.go:580]     Content-Type: application/json
	I0916 11:10:28.956613 1488539 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:10:28.956750 1488539 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-654612","uid":"17ee1588-6ece-4a45-8e72-a05ec6d1f806","resourceVersion":"295","creationTimestamp":"2024-09-16T11:10:07Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-654612","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-654612","minikube.k8s.io/primary":"true","minikube.k8s.io/updated_at":"2024_09_16T11_10_11_0700","minikube.k8s.io/version":"v1.34.0","node-role.kubernetes.io/control-plane":"","node.kubernetes.io/exclude-from-external-load-balancers":""},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///var/run/crio/crio.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubelet","operation":"Update","apiVe
rsion":"v1","time":"2024-09-16T11:10:07Z","fieldsType":"FieldsV1","fiel [truncated 6223 chars]
	I0916 11:10:29.453829 1488539 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/nodes/multinode-654612
	I0916 11:10:29.453855 1488539 round_trippers.go:469] Request Headers:
	I0916 11:10:29.453865 1488539 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:10:29.453871 1488539 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:10:29.456158 1488539 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:10:29.456176 1488539 round_trippers.go:577] Response Headers:
	I0916 11:10:29.456184 1488539 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:10:29.456189 1488539 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:10:29.456193 1488539 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:10:29 GMT
	I0916 11:10:29.456195 1488539 round_trippers.go:580]     Audit-Id: 8756fcb0-0726-4c33-b8c7-3691031c3fd6
	I0916 11:10:29.456198 1488539 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:10:29.456201 1488539 round_trippers.go:580]     Content-Type: application/json
	I0916 11:10:29.456315 1488539 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-654612","uid":"17ee1588-6ece-4a45-8e72-a05ec6d1f806","resourceVersion":"295","creationTimestamp":"2024-09-16T11:10:07Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-654612","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-654612","minikube.k8s.io/primary":"true","minikube.k8s.io/updated_at":"2024_09_16T11_10_11_0700","minikube.k8s.io/version":"v1.34.0","node-role.kubernetes.io/control-plane":"","node.kubernetes.io/exclude-from-external-load-balancers":""},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///var/run/crio/crio.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubelet","operation":"Update","apiVe
rsion":"v1","time":"2024-09-16T11:10:07Z","fieldsType":"FieldsV1","fiel [truncated 6223 chars]
	I0916 11:10:29.456747 1488539 node_ready.go:53] node "multinode-654612" has status "Ready":"False"
	I0916 11:10:29.954755 1488539 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/nodes/multinode-654612
	I0916 11:10:29.954779 1488539 round_trippers.go:469] Request Headers:
	I0916 11:10:29.954788 1488539 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:10:29.954792 1488539 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:10:29.957084 1488539 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:10:29.957109 1488539 round_trippers.go:577] Response Headers:
	I0916 11:10:29.957117 1488539 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:10:29.957123 1488539 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:10:29 GMT
	I0916 11:10:29.957127 1488539 round_trippers.go:580]     Audit-Id: 5864a457-0382-4a0f-86f6-4cbc2cd499aa
	I0916 11:10:29.957130 1488539 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:10:29.957133 1488539 round_trippers.go:580]     Content-Type: application/json
	I0916 11:10:29.957135 1488539 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:10:29.957282 1488539 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-654612","uid":"17ee1588-6ece-4a45-8e72-a05ec6d1f806","resourceVersion":"295","creationTimestamp":"2024-09-16T11:10:07Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-654612","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-654612","minikube.k8s.io/primary":"true","minikube.k8s.io/updated_at":"2024_09_16T11_10_11_0700","minikube.k8s.io/version":"v1.34.0","node-role.kubernetes.io/control-plane":"","node.kubernetes.io/exclude-from-external-load-balancers":""},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///var/run/crio/crio.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubelet","operation":"Update","apiVe
rsion":"v1","time":"2024-09-16T11:10:07Z","fieldsType":"FieldsV1","fiel [truncated 6223 chars]
	I0916 11:10:30.453900 1488539 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/nodes/multinode-654612
	I0916 11:10:30.453931 1488539 round_trippers.go:469] Request Headers:
	I0916 11:10:30.453940 1488539 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:10:30.453943 1488539 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:10:30.456268 1488539 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:10:30.456290 1488539 round_trippers.go:577] Response Headers:
	I0916 11:10:30.456297 1488539 round_trippers.go:580]     Audit-Id: 8dc6a3d4-07b5-42ed-869e-3ec3585a0519
	I0916 11:10:30.456302 1488539 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:10:30.456306 1488539 round_trippers.go:580]     Content-Type: application/json
	I0916 11:10:30.456309 1488539 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:10:30.456313 1488539 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:10:30.456316 1488539 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:10:30 GMT
	I0916 11:10:30.456478 1488539 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-654612","uid":"17ee1588-6ece-4a45-8e72-a05ec6d1f806","resourceVersion":"295","creationTimestamp":"2024-09-16T11:10:07Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-654612","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-654612","minikube.k8s.io/primary":"true","minikube.k8s.io/updated_at":"2024_09_16T11_10_11_0700","minikube.k8s.io/version":"v1.34.0","node-role.kubernetes.io/control-plane":"","node.kubernetes.io/exclude-from-external-load-balancers":""},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///var/run/crio/crio.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubelet","operation":"Update","apiVe
rsion":"v1","time":"2024-09-16T11:10:07Z","fieldsType":"FieldsV1","fiel [truncated 6223 chars]
	I0916 11:10:30.953858 1488539 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/nodes/multinode-654612
	I0916 11:10:30.953886 1488539 round_trippers.go:469] Request Headers:
	I0916 11:10:30.953897 1488539 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:10:30.953901 1488539 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:10:30.956294 1488539 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:10:30.956327 1488539 round_trippers.go:577] Response Headers:
	I0916 11:10:30.956337 1488539 round_trippers.go:580]     Audit-Id: f8bcd0cb-6149-499f-bc23-c5dd1661fd48
	I0916 11:10:30.956341 1488539 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:10:30.956345 1488539 round_trippers.go:580]     Content-Type: application/json
	I0916 11:10:30.956347 1488539 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:10:30.956350 1488539 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:10:30.956354 1488539 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:10:30 GMT
	I0916 11:10:30.956446 1488539 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-654612","uid":"17ee1588-6ece-4a45-8e72-a05ec6d1f806","resourceVersion":"295","creationTimestamp":"2024-09-16T11:10:07Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-654612","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-654612","minikube.k8s.io/primary":"true","minikube.k8s.io/updated_at":"2024_09_16T11_10_11_0700","minikube.k8s.io/version":"v1.34.0","node-role.kubernetes.io/control-plane":"","node.kubernetes.io/exclude-from-external-load-balancers":""},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///var/run/crio/crio.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubelet","operation":"Update","apiVe
rsion":"v1","time":"2024-09-16T11:10:07Z","fieldsType":"FieldsV1","fiel [truncated 6223 chars]
	I0916 11:10:31.453915 1488539 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/nodes/multinode-654612
	I0916 11:10:31.453944 1488539 round_trippers.go:469] Request Headers:
	I0916 11:10:31.453953 1488539 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:10:31.453962 1488539 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:10:31.456752 1488539 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:10:31.456777 1488539 round_trippers.go:577] Response Headers:
	I0916 11:10:31.456785 1488539 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:10:31 GMT
	I0916 11:10:31.456789 1488539 round_trippers.go:580]     Audit-Id: b2a2d11b-0c49-46d6-bbcf-5df4219fb1a7
	I0916 11:10:31.456793 1488539 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:10:31.456796 1488539 round_trippers.go:580]     Content-Type: application/json
	I0916 11:10:31.456799 1488539 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:10:31.456803 1488539 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:10:31.456907 1488539 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-654612","uid":"17ee1588-6ece-4a45-8e72-a05ec6d1f806","resourceVersion":"295","creationTimestamp":"2024-09-16T11:10:07Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-654612","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-654612","minikube.k8s.io/primary":"true","minikube.k8s.io/updated_at":"2024_09_16T11_10_11_0700","minikube.k8s.io/version":"v1.34.0","node-role.kubernetes.io/control-plane":"","node.kubernetes.io/exclude-from-external-load-balancers":""},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///var/run/crio/crio.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubelet","operation":"Update","apiVe
rsion":"v1","time":"2024-09-16T11:10:07Z","fieldsType":"FieldsV1","fiel [truncated 6223 chars]
	I0916 11:10:31.457315 1488539 node_ready.go:53] node "multinode-654612" has status "Ready":"False"
	I0916 11:10:31.954007 1488539 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/nodes/multinode-654612
	I0916 11:10:31.954032 1488539 round_trippers.go:469] Request Headers:
	I0916 11:10:31.954041 1488539 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:10:31.954046 1488539 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:10:31.956204 1488539 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:10:31.956225 1488539 round_trippers.go:577] Response Headers:
	I0916 11:10:31.956234 1488539 round_trippers.go:580]     Content-Type: application/json
	I0916 11:10:31.956240 1488539 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:10:31.956243 1488539 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:10:31.956247 1488539 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:10:31 GMT
	I0916 11:10:31.956250 1488539 round_trippers.go:580]     Audit-Id: 44a46614-798d-45ac-9930-ced5ca7b9b04
	I0916 11:10:31.956252 1488539 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:10:31.956349 1488539 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-654612","uid":"17ee1588-6ece-4a45-8e72-a05ec6d1f806","resourceVersion":"295","creationTimestamp":"2024-09-16T11:10:07Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-654612","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-654612","minikube.k8s.io/primary":"true","minikube.k8s.io/updated_at":"2024_09_16T11_10_11_0700","minikube.k8s.io/version":"v1.34.0","node-role.kubernetes.io/control-plane":"","node.kubernetes.io/exclude-from-external-load-balancers":""},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///var/run/crio/crio.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubelet","operation":"Update","apiVe
rsion":"v1","time":"2024-09-16T11:10:07Z","fieldsType":"FieldsV1","fiel [truncated 6223 chars]
	I0916 11:10:32.454550 1488539 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/nodes/multinode-654612
	I0916 11:10:32.454574 1488539 round_trippers.go:469] Request Headers:
	I0916 11:10:32.454585 1488539 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:10:32.454589 1488539 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:10:32.456723 1488539 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:10:32.456743 1488539 round_trippers.go:577] Response Headers:
	I0916 11:10:32.456752 1488539 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:10:32.456758 1488539 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:10:32 GMT
	I0916 11:10:32.456761 1488539 round_trippers.go:580]     Audit-Id: 83523ae0-0b20-4ae0-9588-50ed90df9dbf
	I0916 11:10:32.456763 1488539 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:10:32.456766 1488539 round_trippers.go:580]     Content-Type: application/json
	I0916 11:10:32.456768 1488539 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:10:32.456886 1488539 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-654612","uid":"17ee1588-6ece-4a45-8e72-a05ec6d1f806","resourceVersion":"295","creationTimestamp":"2024-09-16T11:10:07Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-654612","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-654612","minikube.k8s.io/primary":"true","minikube.k8s.io/updated_at":"2024_09_16T11_10_11_0700","minikube.k8s.io/version":"v1.34.0","node-role.kubernetes.io/control-plane":"","node.kubernetes.io/exclude-from-external-load-balancers":""},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///var/run/crio/crio.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubelet","operation":"Update","apiVe
rsion":"v1","time":"2024-09-16T11:10:07Z","fieldsType":"FieldsV1","fiel [truncated 6223 chars]
	I0916 11:10:32.954750 1488539 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/nodes/multinode-654612
	I0916 11:10:32.954779 1488539 round_trippers.go:469] Request Headers:
	I0916 11:10:32.954789 1488539 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:10:32.954793 1488539 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:10:32.957167 1488539 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:10:32.957200 1488539 round_trippers.go:577] Response Headers:
	I0916 11:10:32.957210 1488539 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:10:32.957214 1488539 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:10:32 GMT
	I0916 11:10:32.957219 1488539 round_trippers.go:580]     Audit-Id: 6828c841-94f3-4a64-a205-c1a3a98e2fc3
	I0916 11:10:32.957223 1488539 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:10:32.957226 1488539 round_trippers.go:580]     Content-Type: application/json
	I0916 11:10:32.957229 1488539 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:10:32.957449 1488539 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-654612","uid":"17ee1588-6ece-4a45-8e72-a05ec6d1f806","resourceVersion":"295","creationTimestamp":"2024-09-16T11:10:07Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-654612","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-654612","minikube.k8s.io/primary":"true","minikube.k8s.io/updated_at":"2024_09_16T11_10_11_0700","minikube.k8s.io/version":"v1.34.0","node-role.kubernetes.io/control-plane":"","node.kubernetes.io/exclude-from-external-load-balancers":""},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///var/run/crio/crio.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubelet","operation":"Update","apiVe
rsion":"v1","time":"2024-09-16T11:10:07Z","fieldsType":"FieldsV1","fiel [truncated 6223 chars]
	I0916 11:10:33.453826 1488539 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/nodes/multinode-654612
	I0916 11:10:33.453855 1488539 round_trippers.go:469] Request Headers:
	I0916 11:10:33.453864 1488539 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:10:33.453868 1488539 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:10:33.456083 1488539 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:10:33.456104 1488539 round_trippers.go:577] Response Headers:
	I0916 11:10:33.456112 1488539 round_trippers.go:580]     Audit-Id: 825871bf-2572-4e81-a83a-4b9af44eccbf
	I0916 11:10:33.456117 1488539 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:10:33.456120 1488539 round_trippers.go:580]     Content-Type: application/json
	I0916 11:10:33.456122 1488539 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:10:33.456125 1488539 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:10:33.456128 1488539 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:10:33 GMT
	I0916 11:10:33.456248 1488539 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-654612","uid":"17ee1588-6ece-4a45-8e72-a05ec6d1f806","resourceVersion":"295","creationTimestamp":"2024-09-16T11:10:07Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-654612","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-654612","minikube.k8s.io/primary":"true","minikube.k8s.io/updated_at":"2024_09_16T11_10_11_0700","minikube.k8s.io/version":"v1.34.0","node-role.kubernetes.io/control-plane":"","node.kubernetes.io/exclude-from-external-load-balancers":""},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///var/run/crio/crio.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubelet","operation":"Update","apiVe
rsion":"v1","time":"2024-09-16T11:10:07Z","fieldsType":"FieldsV1","fiel [truncated 6223 chars]
	I0916 11:10:33.954551 1488539 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/nodes/multinode-654612
	I0916 11:10:33.954575 1488539 round_trippers.go:469] Request Headers:
	I0916 11:10:33.954584 1488539 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:10:33.954588 1488539 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:10:33.956833 1488539 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:10:33.956860 1488539 round_trippers.go:577] Response Headers:
	I0916 11:10:33.956870 1488539 round_trippers.go:580]     Audit-Id: 32cb9e28-36ce-4705-a8c6-a8f111a0d358
	I0916 11:10:33.956876 1488539 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:10:33.956879 1488539 round_trippers.go:580]     Content-Type: application/json
	I0916 11:10:33.956881 1488539 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:10:33.956885 1488539 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:10:33.956888 1488539 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:10:33 GMT
	I0916 11:10:33.957141 1488539 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-654612","uid":"17ee1588-6ece-4a45-8e72-a05ec6d1f806","resourceVersion":"295","creationTimestamp":"2024-09-16T11:10:07Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-654612","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-654612","minikube.k8s.io/primary":"true","minikube.k8s.io/updated_at":"2024_09_16T11_10_11_0700","minikube.k8s.io/version":"v1.34.0","node-role.kubernetes.io/control-plane":"","node.kubernetes.io/exclude-from-external-load-balancers":""},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///var/run/crio/crio.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubelet","operation":"Update","apiVe
rsion":"v1","time":"2024-09-16T11:10:07Z","fieldsType":"FieldsV1","fiel [truncated 6223 chars]
	I0916 11:10:33.957589 1488539 node_ready.go:53] node "multinode-654612" has status "Ready":"False"
	I0916 11:10:34.453846 1488539 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/nodes/multinode-654612
	I0916 11:10:34.453873 1488539 round_trippers.go:469] Request Headers:
	I0916 11:10:34.453881 1488539 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:10:34.453886 1488539 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:10:34.456204 1488539 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:10:34.456226 1488539 round_trippers.go:577] Response Headers:
	I0916 11:10:34.456234 1488539 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:10:34.456239 1488539 round_trippers.go:580]     Content-Type: application/json
	I0916 11:10:34.456244 1488539 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:10:34.456248 1488539 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:10:34.456252 1488539 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:10:34 GMT
	I0916 11:10:34.456255 1488539 round_trippers.go:580]     Audit-Id: 7d8dc698-149f-497a-ba4c-cbf5fecfe310
	I0916 11:10:34.456586 1488539 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-654612","uid":"17ee1588-6ece-4a45-8e72-a05ec6d1f806","resourceVersion":"295","creationTimestamp":"2024-09-16T11:10:07Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-654612","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-654612","minikube.k8s.io/primary":"true","minikube.k8s.io/updated_at":"2024_09_16T11_10_11_0700","minikube.k8s.io/version":"v1.34.0","node-role.kubernetes.io/control-plane":"","node.kubernetes.io/exclude-from-external-load-balancers":""},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///var/run/crio/crio.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubelet","operation":"Update","apiVe
rsion":"v1","time":"2024-09-16T11:10:07Z","fieldsType":"FieldsV1","fiel [truncated 6223 chars]
	I0916 11:10:34.954479 1488539 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/nodes/multinode-654612
	I0916 11:10:34.954506 1488539 round_trippers.go:469] Request Headers:
	I0916 11:10:34.954513 1488539 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:10:34.954518 1488539 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:10:34.956971 1488539 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:10:34.956993 1488539 round_trippers.go:577] Response Headers:
	I0916 11:10:34.957001 1488539 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:10:34.957006 1488539 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:10:34 GMT
	I0916 11:10:34.957012 1488539 round_trippers.go:580]     Audit-Id: d2ab8967-9264-49b7-baf4-3b8f264fcbed
	I0916 11:10:34.957015 1488539 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:10:34.957018 1488539 round_trippers.go:580]     Content-Type: application/json
	I0916 11:10:34.957021 1488539 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:10:34.957182 1488539 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-654612","uid":"17ee1588-6ece-4a45-8e72-a05ec6d1f806","resourceVersion":"295","creationTimestamp":"2024-09-16T11:10:07Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-654612","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-654612","minikube.k8s.io/primary":"true","minikube.k8s.io/updated_at":"2024_09_16T11_10_11_0700","minikube.k8s.io/version":"v1.34.0","node-role.kubernetes.io/control-plane":"","node.kubernetes.io/exclude-from-external-load-balancers":""},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///var/run/crio/crio.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubelet","operation":"Update","apiVe
rsion":"v1","time":"2024-09-16T11:10:07Z","fieldsType":"FieldsV1","fiel [truncated 6223 chars]
	I0916 11:10:35.454397 1488539 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/nodes/multinode-654612
	I0916 11:10:35.454426 1488539 round_trippers.go:469] Request Headers:
	I0916 11:10:35.454435 1488539 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:10:35.454441 1488539 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:10:35.456636 1488539 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:10:35.456660 1488539 round_trippers.go:577] Response Headers:
	I0916 11:10:35.456670 1488539 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:10:35.456690 1488539 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:10:35.456695 1488539 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:10:35 GMT
	I0916 11:10:35.456699 1488539 round_trippers.go:580]     Audit-Id: 452ecf05-a7a2-4831-b9a3-f685239a10cf
	I0916 11:10:35.456703 1488539 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:10:35.456707 1488539 round_trippers.go:580]     Content-Type: application/json
	I0916 11:10:35.457070 1488539 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-654612","uid":"17ee1588-6ece-4a45-8e72-a05ec6d1f806","resourceVersion":"295","creationTimestamp":"2024-09-16T11:10:07Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-654612","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-654612","minikube.k8s.io/primary":"true","minikube.k8s.io/updated_at":"2024_09_16T11_10_11_0700","minikube.k8s.io/version":"v1.34.0","node-role.kubernetes.io/control-plane":"","node.kubernetes.io/exclude-from-external-load-balancers":""},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///var/run/crio/crio.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubelet","operation":"Update","apiVe
rsion":"v1","time":"2024-09-16T11:10:07Z","fieldsType":"FieldsV1","fiel [truncated 6223 chars]
	I0916 11:10:35.953944 1488539 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/nodes/multinode-654612
	I0916 11:10:35.953971 1488539 round_trippers.go:469] Request Headers:
	I0916 11:10:35.953980 1488539 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:10:35.953985 1488539 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:10:35.956192 1488539 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:10:35.956213 1488539 round_trippers.go:577] Response Headers:
	I0916 11:10:35.956222 1488539 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:10:35.956226 1488539 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:10:35 GMT
	I0916 11:10:35.956230 1488539 round_trippers.go:580]     Audit-Id: f5745e87-a54a-46e1-928f-dc8350a54a11
	I0916 11:10:35.956234 1488539 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:10:35.956236 1488539 round_trippers.go:580]     Content-Type: application/json
	I0916 11:10:35.956239 1488539 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:10:35.956498 1488539 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-654612","uid":"17ee1588-6ece-4a45-8e72-a05ec6d1f806","resourceVersion":"295","creationTimestamp":"2024-09-16T11:10:07Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-654612","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-654612","minikube.k8s.io/primary":"true","minikube.k8s.io/updated_at":"2024_09_16T11_10_11_0700","minikube.k8s.io/version":"v1.34.0","node-role.kubernetes.io/control-plane":"","node.kubernetes.io/exclude-from-external-load-balancers":""},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///var/run/crio/crio.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubelet","operation":"Update","apiVe
rsion":"v1","time":"2024-09-16T11:10:07Z","fieldsType":"FieldsV1","fiel [truncated 6223 chars]
	I0916 11:10:36.454192 1488539 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/nodes/multinode-654612
	I0916 11:10:36.454220 1488539 round_trippers.go:469] Request Headers:
	I0916 11:10:36.454229 1488539 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:10:36.454233 1488539 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:10:36.456427 1488539 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:10:36.456455 1488539 round_trippers.go:577] Response Headers:
	I0916 11:10:36.456464 1488539 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:10:36 GMT
	I0916 11:10:36.456468 1488539 round_trippers.go:580]     Audit-Id: aecd9d72-ac8e-4750-a552-3e6018dad1b8
	I0916 11:10:36.456471 1488539 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:10:36.456474 1488539 round_trippers.go:580]     Content-Type: application/json
	I0916 11:10:36.456476 1488539 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:10:36.456511 1488539 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:10:36.456761 1488539 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-654612","uid":"17ee1588-6ece-4a45-8e72-a05ec6d1f806","resourceVersion":"295","creationTimestamp":"2024-09-16T11:10:07Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-654612","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-654612","minikube.k8s.io/primary":"true","minikube.k8s.io/updated_at":"2024_09_16T11_10_11_0700","minikube.k8s.io/version":"v1.34.0","node-role.kubernetes.io/control-plane":"","node.kubernetes.io/exclude-from-external-load-balancers":""},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///var/run/crio/crio.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubelet","operation":"Update","apiVe
rsion":"v1","time":"2024-09-16T11:10:07Z","fieldsType":"FieldsV1","fiel [truncated 6223 chars]
	I0916 11:10:36.457201 1488539 node_ready.go:53] node "multinode-654612" has status "Ready":"False"
	I0916 11:10:36.954113 1488539 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/nodes/multinode-654612
	I0916 11:10:36.954166 1488539 round_trippers.go:469] Request Headers:
	I0916 11:10:36.954177 1488539 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:10:36.954183 1488539 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:10:36.956739 1488539 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:10:36.956769 1488539 round_trippers.go:577] Response Headers:
	I0916 11:10:36.956778 1488539 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:10:36.956783 1488539 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:10:36.956786 1488539 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:10:36 GMT
	I0916 11:10:36.956789 1488539 round_trippers.go:580]     Audit-Id: f605be91-7b12-4259-901e-da485de36443
	I0916 11:10:36.956792 1488539 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:10:36.956795 1488539 round_trippers.go:580]     Content-Type: application/json
	I0916 11:10:36.957208 1488539 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-654612","uid":"17ee1588-6ece-4a45-8e72-a05ec6d1f806","resourceVersion":"295","creationTimestamp":"2024-09-16T11:10:07Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-654612","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-654612","minikube.k8s.io/primary":"true","minikube.k8s.io/updated_at":"2024_09_16T11_10_11_0700","minikube.k8s.io/version":"v1.34.0","node-role.kubernetes.io/control-plane":"","node.kubernetes.io/exclude-from-external-load-balancers":""},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///var/run/crio/crio.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubelet","operation":"Update","apiVe
rsion":"v1","time":"2024-09-16T11:10:07Z","fieldsType":"FieldsV1","fiel [truncated 6223 chars]
	I0916 11:10:37.454772 1488539 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/nodes/multinode-654612
	I0916 11:10:37.454799 1488539 round_trippers.go:469] Request Headers:
	I0916 11:10:37.454809 1488539 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:10:37.454813 1488539 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:10:37.457033 1488539 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:10:37.457059 1488539 round_trippers.go:577] Response Headers:
	I0916 11:10:37.457068 1488539 round_trippers.go:580]     Content-Type: application/json
	I0916 11:10:37.457073 1488539 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:10:37.457076 1488539 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:10:37.457080 1488539 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:10:37 GMT
	I0916 11:10:37.457085 1488539 round_trippers.go:580]     Audit-Id: 588dec0a-70e7-4cd3-9836-3fe9dac07d3a
	I0916 11:10:37.457089 1488539 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:10:37.457365 1488539 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-654612","uid":"17ee1588-6ece-4a45-8e72-a05ec6d1f806","resourceVersion":"295","creationTimestamp":"2024-09-16T11:10:07Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-654612","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-654612","minikube.k8s.io/primary":"true","minikube.k8s.io/updated_at":"2024_09_16T11_10_11_0700","minikube.k8s.io/version":"v1.34.0","node-role.kubernetes.io/control-plane":"","node.kubernetes.io/exclude-from-external-load-balancers":""},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///var/run/crio/crio.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubelet","operation":"Update","apiVe
rsion":"v1","time":"2024-09-16T11:10:07Z","fieldsType":"FieldsV1","fiel [truncated 6223 chars]
	I0916 11:10:37.954609 1488539 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/nodes/multinode-654612
	I0916 11:10:37.954633 1488539 round_trippers.go:469] Request Headers:
	I0916 11:10:37.954648 1488539 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:10:37.954652 1488539 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:10:37.956881 1488539 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:10:37.956902 1488539 round_trippers.go:577] Response Headers:
	I0916 11:10:37.956911 1488539 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:10:37.956916 1488539 round_trippers.go:580]     Content-Type: application/json
	I0916 11:10:37.956919 1488539 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:10:37.956924 1488539 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:10:37.956928 1488539 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:10:37 GMT
	I0916 11:10:37.956931 1488539 round_trippers.go:580]     Audit-Id: 8b114895-b061-4441-b3f3-3c26f979c752
	I0916 11:10:37.957164 1488539 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-654612","uid":"17ee1588-6ece-4a45-8e72-a05ec6d1f806","resourceVersion":"295","creationTimestamp":"2024-09-16T11:10:07Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-654612","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-654612","minikube.k8s.io/primary":"true","minikube.k8s.io/updated_at":"2024_09_16T11_10_11_0700","minikube.k8s.io/version":"v1.34.0","node-role.kubernetes.io/control-plane":"","node.kubernetes.io/exclude-from-external-load-balancers":""},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///var/run/crio/crio.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubelet","operation":"Update","apiVe
rsion":"v1","time":"2024-09-16T11:10:07Z","fieldsType":"FieldsV1","fiel [truncated 6223 chars]
	I0916 11:10:38.454518 1488539 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/nodes/multinode-654612
	I0916 11:10:38.454547 1488539 round_trippers.go:469] Request Headers:
	I0916 11:10:38.454556 1488539 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:10:38.454562 1488539 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:10:38.456844 1488539 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:10:38.456869 1488539 round_trippers.go:577] Response Headers:
	I0916 11:10:38.456879 1488539 round_trippers.go:580]     Audit-Id: 5e33d479-1bb8-4e8f-ad9b-645da49502f1
	I0916 11:10:38.456883 1488539 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:10:38.456887 1488539 round_trippers.go:580]     Content-Type: application/json
	I0916 11:10:38.456890 1488539 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:10:38.456894 1488539 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:10:38.456898 1488539 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:10:38 GMT
	I0916 11:10:38.457254 1488539 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-654612","uid":"17ee1588-6ece-4a45-8e72-a05ec6d1f806","resourceVersion":"295","creationTimestamp":"2024-09-16T11:10:07Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-654612","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-654612","minikube.k8s.io/primary":"true","minikube.k8s.io/updated_at":"2024_09_16T11_10_11_0700","minikube.k8s.io/version":"v1.34.0","node-role.kubernetes.io/control-plane":"","node.kubernetes.io/exclude-from-external-load-balancers":""},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///var/run/crio/crio.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubelet","operation":"Update","apiVe
rsion":"v1","time":"2024-09-16T11:10:07Z","fieldsType":"FieldsV1","fiel [truncated 6223 chars]
	I0916 11:10:38.457715 1488539 node_ready.go:53] node "multinode-654612" has status "Ready":"False"
	I0916 11:10:38.954451 1488539 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/nodes/multinode-654612
	I0916 11:10:38.954473 1488539 round_trippers.go:469] Request Headers:
	I0916 11:10:38.954482 1488539 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:10:38.954486 1488539 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:10:38.956796 1488539 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:10:38.956822 1488539 round_trippers.go:577] Response Headers:
	I0916 11:10:38.956829 1488539 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:10:38.956834 1488539 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:10:38.956838 1488539 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:10:38 GMT
	I0916 11:10:38.956842 1488539 round_trippers.go:580]     Audit-Id: cf44a1ae-9735-4261-bb6c-36c582320b3f
	I0916 11:10:38.956845 1488539 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:10:38.956847 1488539 round_trippers.go:580]     Content-Type: application/json
	I0916 11:10:38.957111 1488539 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-654612","uid":"17ee1588-6ece-4a45-8e72-a05ec6d1f806","resourceVersion":"295","creationTimestamp":"2024-09-16T11:10:07Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-654612","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-654612","minikube.k8s.io/primary":"true","minikube.k8s.io/updated_at":"2024_09_16T11_10_11_0700","minikube.k8s.io/version":"v1.34.0","node-role.kubernetes.io/control-plane":"","node.kubernetes.io/exclude-from-external-load-balancers":""},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///var/run/crio/crio.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubelet","operation":"Update","apiVe
rsion":"v1","time":"2024-09-16T11:10:07Z","fieldsType":"FieldsV1","fiel [truncated 6223 chars]
	I0916 11:10:39.453861 1488539 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/nodes/multinode-654612
	I0916 11:10:39.453885 1488539 round_trippers.go:469] Request Headers:
	I0916 11:10:39.453895 1488539 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:10:39.453899 1488539 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:10:39.456133 1488539 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:10:39.456153 1488539 round_trippers.go:577] Response Headers:
	I0916 11:10:39.456161 1488539 round_trippers.go:580]     Audit-Id: b4f84c82-69f2-4cbe-8ba7-ecae49a96241
	I0916 11:10:39.456165 1488539 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:10:39.456168 1488539 round_trippers.go:580]     Content-Type: application/json
	I0916 11:10:39.456171 1488539 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:10:39.456174 1488539 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:10:39.456177 1488539 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:10:39 GMT
	I0916 11:10:39.456345 1488539 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-654612","uid":"17ee1588-6ece-4a45-8e72-a05ec6d1f806","resourceVersion":"295","creationTimestamp":"2024-09-16T11:10:07Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-654612","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-654612","minikube.k8s.io/primary":"true","minikube.k8s.io/updated_at":"2024_09_16T11_10_11_0700","minikube.k8s.io/version":"v1.34.0","node-role.kubernetes.io/control-plane":"","node.kubernetes.io/exclude-from-external-load-balancers":""},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///var/run/crio/crio.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubelet","operation":"Update","apiVe
rsion":"v1","time":"2024-09-16T11:10:07Z","fieldsType":"FieldsV1","fiel [truncated 6223 chars]
	I0916 11:10:39.954268 1488539 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/nodes/multinode-654612
	I0916 11:10:39.954295 1488539 round_trippers.go:469] Request Headers:
	I0916 11:10:39.954305 1488539 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:10:39.954310 1488539 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:10:39.956537 1488539 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:10:39.956557 1488539 round_trippers.go:577] Response Headers:
	I0916 11:10:39.956564 1488539 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:10:39.956568 1488539 round_trippers.go:580]     Content-Type: application/json
	I0916 11:10:39.956571 1488539 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:10:39.956575 1488539 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:10:39.956578 1488539 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:10:39 GMT
	I0916 11:10:39.956581 1488539 round_trippers.go:580]     Audit-Id: bd8e9144-eb30-4447-a21f-96dc71d7be9a
	I0916 11:10:39.956706 1488539 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-654612","uid":"17ee1588-6ece-4a45-8e72-a05ec6d1f806","resourceVersion":"295","creationTimestamp":"2024-09-16T11:10:07Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-654612","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-654612","minikube.k8s.io/primary":"true","minikube.k8s.io/updated_at":"2024_09_16T11_10_11_0700","minikube.k8s.io/version":"v1.34.0","node-role.kubernetes.io/control-plane":"","node.kubernetes.io/exclude-from-external-load-balancers":""},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///var/run/crio/crio.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubelet","operation":"Update","apiVe
rsion":"v1","time":"2024-09-16T11:10:07Z","fieldsType":"FieldsV1","fiel [truncated 6223 chars]
	I0916 11:10:40.454601 1488539 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/nodes/multinode-654612
	I0916 11:10:40.454629 1488539 round_trippers.go:469] Request Headers:
	I0916 11:10:40.454638 1488539 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:10:40.454645 1488539 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:10:40.456906 1488539 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:10:40.456933 1488539 round_trippers.go:577] Response Headers:
	I0916 11:10:40.456941 1488539 round_trippers.go:580]     Audit-Id: 5de4c2e5-838b-47c8-b053-883675b14f77
	I0916 11:10:40.456946 1488539 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:10:40.456949 1488539 round_trippers.go:580]     Content-Type: application/json
	I0916 11:10:40.456952 1488539 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:10:40.456955 1488539 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:10:40.456958 1488539 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:10:40 GMT
	I0916 11:10:40.457152 1488539 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-654612","uid":"17ee1588-6ece-4a45-8e72-a05ec6d1f806","resourceVersion":"295","creationTimestamp":"2024-09-16T11:10:07Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-654612","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-654612","minikube.k8s.io/primary":"true","minikube.k8s.io/updated_at":"2024_09_16T11_10_11_0700","minikube.k8s.io/version":"v1.34.0","node-role.kubernetes.io/control-plane":"","node.kubernetes.io/exclude-from-external-load-balancers":""},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///var/run/crio/crio.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubelet","operation":"Update","apiVe
rsion":"v1","time":"2024-09-16T11:10:07Z","fieldsType":"FieldsV1","fiel [truncated 6223 chars]
	I0916 11:10:40.954085 1488539 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/nodes/multinode-654612
	I0916 11:10:40.954110 1488539 round_trippers.go:469] Request Headers:
	I0916 11:10:40.954120 1488539 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:10:40.954124 1488539 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:10:40.956204 1488539 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:10:40.956233 1488539 round_trippers.go:577] Response Headers:
	I0916 11:10:40.956241 1488539 round_trippers.go:580]     Audit-Id: 0d400ef8-dd42-4c22-b85c-28e17835cef6
	I0916 11:10:40.956247 1488539 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:10:40.956251 1488539 round_trippers.go:580]     Content-Type: application/json
	I0916 11:10:40.956253 1488539 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:10:40.956257 1488539 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:10:40.956260 1488539 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:10:40 GMT
	I0916 11:10:40.956538 1488539 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-654612","uid":"17ee1588-6ece-4a45-8e72-a05ec6d1f806","resourceVersion":"295","creationTimestamp":"2024-09-16T11:10:07Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-654612","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-654612","minikube.k8s.io/primary":"true","minikube.k8s.io/updated_at":"2024_09_16T11_10_11_0700","minikube.k8s.io/version":"v1.34.0","node-role.kubernetes.io/control-plane":"","node.kubernetes.io/exclude-from-external-load-balancers":""},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///var/run/crio/crio.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubelet","operation":"Update","apiVe
rsion":"v1","time":"2024-09-16T11:10:07Z","fieldsType":"FieldsV1","fiel [truncated 6223 chars]
	I0916 11:10:40.956961 1488539 node_ready.go:53] node "multinode-654612" has status "Ready":"False"
	I0916 11:10:41.454376 1488539 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/nodes/multinode-654612
	I0916 11:10:41.454401 1488539 round_trippers.go:469] Request Headers:
	I0916 11:10:41.454410 1488539 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:10:41.454415 1488539 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:10:41.456648 1488539 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:10:41.456668 1488539 round_trippers.go:577] Response Headers:
	I0916 11:10:41.456692 1488539 round_trippers.go:580]     Content-Type: application/json
	I0916 11:10:41.456699 1488539 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:10:41.456705 1488539 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:10:41.456709 1488539 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:10:41 GMT
	I0916 11:10:41.456711 1488539 round_trippers.go:580]     Audit-Id: 31d7865b-c626-49b1-8777-2d59ef2660c6
	I0916 11:10:41.456714 1488539 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:10:41.456867 1488539 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-654612","uid":"17ee1588-6ece-4a45-8e72-a05ec6d1f806","resourceVersion":"295","creationTimestamp":"2024-09-16T11:10:07Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-654612","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-654612","minikube.k8s.io/primary":"true","minikube.k8s.io/updated_at":"2024_09_16T11_10_11_0700","minikube.k8s.io/version":"v1.34.0","node-role.kubernetes.io/control-plane":"","node.kubernetes.io/exclude-from-external-load-balancers":""},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///var/run/crio/crio.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubelet","operation":"Update","apiVe
rsion":"v1","time":"2024-09-16T11:10:07Z","fieldsType":"FieldsV1","fiel [truncated 6223 chars]
	I0916 11:10:41.954562 1488539 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/nodes/multinode-654612
	I0916 11:10:41.954595 1488539 round_trippers.go:469] Request Headers:
	I0916 11:10:41.954606 1488539 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:10:41.954610 1488539 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:10:41.956900 1488539 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:10:41.956926 1488539 round_trippers.go:577] Response Headers:
	I0916 11:10:41.956935 1488539 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:10:41 GMT
	I0916 11:10:41.956940 1488539 round_trippers.go:580]     Audit-Id: 2a89490e-2657-4b4b-9841-c065c25ed297
	I0916 11:10:41.956952 1488539 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:10:41.956959 1488539 round_trippers.go:580]     Content-Type: application/json
	I0916 11:10:41.956963 1488539 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:10:41.956966 1488539 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:10:41.957339 1488539 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-654612","uid":"17ee1588-6ece-4a45-8e72-a05ec6d1f806","resourceVersion":"295","creationTimestamp":"2024-09-16T11:10:07Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-654612","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-654612","minikube.k8s.io/primary":"true","minikube.k8s.io/updated_at":"2024_09_16T11_10_11_0700","minikube.k8s.io/version":"v1.34.0","node-role.kubernetes.io/control-plane":"","node.kubernetes.io/exclude-from-external-load-balancers":""},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///var/run/crio/crio.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubelet","operation":"Update","apiVe
rsion":"v1","time":"2024-09-16T11:10:07Z","fieldsType":"FieldsV1","fiel [truncated 6223 chars]
	I0916 11:10:42.454494 1488539 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/nodes/multinode-654612
	I0916 11:10:42.454523 1488539 round_trippers.go:469] Request Headers:
	I0916 11:10:42.454533 1488539 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:10:42.454539 1488539 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:10:42.456791 1488539 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:10:42.456814 1488539 round_trippers.go:577] Response Headers:
	I0916 11:10:42.456823 1488539 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:10:42.456826 1488539 round_trippers.go:580]     Content-Type: application/json
	I0916 11:10:42.456829 1488539 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:10:42.456831 1488539 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:10:42.456835 1488539 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:10:42 GMT
	I0916 11:10:42.456837 1488539 round_trippers.go:580]     Audit-Id: e7c7ee62-5779-4a3c-80f6-f40491c3d515
	I0916 11:10:42.457073 1488539 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-654612","uid":"17ee1588-6ece-4a45-8e72-a05ec6d1f806","resourceVersion":"295","creationTimestamp":"2024-09-16T11:10:07Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-654612","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-654612","minikube.k8s.io/primary":"true","minikube.k8s.io/updated_at":"2024_09_16T11_10_11_0700","minikube.k8s.io/version":"v1.34.0","node-role.kubernetes.io/control-plane":"","node.kubernetes.io/exclude-from-external-load-balancers":""},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///var/run/crio/crio.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubelet","operation":"Update","apiVe
rsion":"v1","time":"2024-09-16T11:10:07Z","fieldsType":"FieldsV1","fiel [truncated 6223 chars]
	I0916 11:10:42.954036 1488539 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/nodes/multinode-654612
	I0916 11:10:42.954064 1488539 round_trippers.go:469] Request Headers:
	I0916 11:10:42.954074 1488539 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:10:42.954080 1488539 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:10:42.956309 1488539 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:10:42.956331 1488539 round_trippers.go:577] Response Headers:
	I0916 11:10:42.956339 1488539 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:10:42 GMT
	I0916 11:10:42.956343 1488539 round_trippers.go:580]     Audit-Id: 4459c841-84e8-4dde-9686-219b2243ca24
	I0916 11:10:42.956346 1488539 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:10:42.956348 1488539 round_trippers.go:580]     Content-Type: application/json
	I0916 11:10:42.956351 1488539 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:10:42.956355 1488539 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:10:42.956466 1488539 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-654612","uid":"17ee1588-6ece-4a45-8e72-a05ec6d1f806","resourceVersion":"295","creationTimestamp":"2024-09-16T11:10:07Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-654612","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-654612","minikube.k8s.io/primary":"true","minikube.k8s.io/updated_at":"2024_09_16T11_10_11_0700","minikube.k8s.io/version":"v1.34.0","node-role.kubernetes.io/control-plane":"","node.kubernetes.io/exclude-from-external-load-balancers":""},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///var/run/crio/crio.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubelet","operation":"Update","apiVe
rsion":"v1","time":"2024-09-16T11:10:07Z","fieldsType":"FieldsV1","fiel [truncated 6223 chars]
	I0916 11:10:43.454455 1488539 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/nodes/multinode-654612
	I0916 11:10:43.454485 1488539 round_trippers.go:469] Request Headers:
	I0916 11:10:43.454495 1488539 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:10:43.454500 1488539 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:10:43.456666 1488539 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:10:43.456707 1488539 round_trippers.go:577] Response Headers:
	I0916 11:10:43.456716 1488539 round_trippers.go:580]     Content-Type: application/json
	I0916 11:10:43.456721 1488539 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:10:43.456724 1488539 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:10:43.456728 1488539 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:10:43 GMT
	I0916 11:10:43.456733 1488539 round_trippers.go:580]     Audit-Id: 78f05987-9e1a-41bd-92e3-ee1834158a8e
	I0916 11:10:43.456736 1488539 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:10:43.456859 1488539 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-654612","uid":"17ee1588-6ece-4a45-8e72-a05ec6d1f806","resourceVersion":"295","creationTimestamp":"2024-09-16T11:10:07Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-654612","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-654612","minikube.k8s.io/primary":"true","minikube.k8s.io/updated_at":"2024_09_16T11_10_11_0700","minikube.k8s.io/version":"v1.34.0","node-role.kubernetes.io/control-plane":"","node.kubernetes.io/exclude-from-external-load-balancers":""},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///var/run/crio/crio.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubelet","operation":"Update","apiVe
rsion":"v1","time":"2024-09-16T11:10:07Z","fieldsType":"FieldsV1","fiel [truncated 6223 chars]
	I0916 11:10:43.457261 1488539 node_ready.go:53] node "multinode-654612" has status "Ready":"False"
	I0916 11:10:43.953867 1488539 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/nodes/multinode-654612
	I0916 11:10:43.953889 1488539 round_trippers.go:469] Request Headers:
	I0916 11:10:43.953899 1488539 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:10:43.953903 1488539 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:10:43.956083 1488539 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:10:43.956103 1488539 round_trippers.go:577] Response Headers:
	I0916 11:10:43.956112 1488539 round_trippers.go:580]     Audit-Id: 22dae938-c42b-4542-82cd-7c6df22931f4
	I0916 11:10:43.956116 1488539 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:10:43.956121 1488539 round_trippers.go:580]     Content-Type: application/json
	I0916 11:10:43.956125 1488539 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:10:43.956128 1488539 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:10:43.956132 1488539 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:10:43 GMT
	I0916 11:10:43.956226 1488539 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-654612","uid":"17ee1588-6ece-4a45-8e72-a05ec6d1f806","resourceVersion":"295","creationTimestamp":"2024-09-16T11:10:07Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-654612","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-654612","minikube.k8s.io/primary":"true","minikube.k8s.io/updated_at":"2024_09_16T11_10_11_0700","minikube.k8s.io/version":"v1.34.0","node-role.kubernetes.io/control-plane":"","node.kubernetes.io/exclude-from-external-load-balancers":""},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///var/run/crio/crio.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubelet","operation":"Update","apiVe
rsion":"v1","time":"2024-09-16T11:10:07Z","fieldsType":"FieldsV1","fiel [truncated 6223 chars]
	I0916 11:10:44.453937 1488539 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/nodes/multinode-654612
	I0916 11:10:44.453963 1488539 round_trippers.go:469] Request Headers:
	I0916 11:10:44.453973 1488539 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:10:44.453979 1488539 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:10:44.462029 1488539 round_trippers.go:574] Response Status: 200 OK in 7 milliseconds
	I0916 11:10:44.462057 1488539 round_trippers.go:577] Response Headers:
	I0916 11:10:44.462066 1488539 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:10:44.462071 1488539 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:10:44.462076 1488539 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:10:44 GMT
	I0916 11:10:44.462079 1488539 round_trippers.go:580]     Audit-Id: 9ea6e98d-a982-4163-a307-da5249fe15bf
	I0916 11:10:44.462082 1488539 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:10:44.462096 1488539 round_trippers.go:580]     Content-Type: application/json
	I0916 11:10:44.462235 1488539 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-654612","uid":"17ee1588-6ece-4a45-8e72-a05ec6d1f806","resourceVersion":"295","creationTimestamp":"2024-09-16T11:10:07Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-654612","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-654612","minikube.k8s.io/primary":"true","minikube.k8s.io/updated_at":"2024_09_16T11_10_11_0700","minikube.k8s.io/version":"v1.34.0","node-role.kubernetes.io/control-plane":"","node.kubernetes.io/exclude-from-external-load-balancers":""},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///var/run/crio/crio.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubelet","operation":"Update","apiVe
rsion":"v1","time":"2024-09-16T11:10:07Z","fieldsType":"FieldsV1","fiel [truncated 6223 chars]
	I0916 11:10:44.954698 1488539 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/nodes/multinode-654612
	I0916 11:10:44.954743 1488539 round_trippers.go:469] Request Headers:
	I0916 11:10:44.954753 1488539 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:10:44.954759 1488539 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:10:44.957049 1488539 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:10:44.957078 1488539 round_trippers.go:577] Response Headers:
	I0916 11:10:44.957087 1488539 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:10:44.957091 1488539 round_trippers.go:580]     Content-Type: application/json
	I0916 11:10:44.957094 1488539 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:10:44.957096 1488539 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:10:44.957099 1488539 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:10:44 GMT
	I0916 11:10:44.957101 1488539 round_trippers.go:580]     Audit-Id: ff25ec64-5017-4a54-890b-2e595e356223
	I0916 11:10:44.957213 1488539 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-654612","uid":"17ee1588-6ece-4a45-8e72-a05ec6d1f806","resourceVersion":"295","creationTimestamp":"2024-09-16T11:10:07Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-654612","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-654612","minikube.k8s.io/primary":"true","minikube.k8s.io/updated_at":"2024_09_16T11_10_11_0700","minikube.k8s.io/version":"v1.34.0","node-role.kubernetes.io/control-plane":"","node.kubernetes.io/exclude-from-external-load-balancers":""},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///var/run/crio/crio.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubelet","operation":"Update","apiVe
rsion":"v1","time":"2024-09-16T11:10:07Z","fieldsType":"FieldsV1","fiel [truncated 6223 chars]
	I0916 11:10:45.454479 1488539 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/nodes/multinode-654612
	I0916 11:10:45.454508 1488539 round_trippers.go:469] Request Headers:
	I0916 11:10:45.454518 1488539 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:10:45.454524 1488539 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:10:45.456695 1488539 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:10:45.456722 1488539 round_trippers.go:577] Response Headers:
	I0916 11:10:45.456732 1488539 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:10:45.456736 1488539 round_trippers.go:580]     Content-Type: application/json
	I0916 11:10:45.456739 1488539 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:10:45.456742 1488539 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:10:45.456747 1488539 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:10:45 GMT
	I0916 11:10:45.456749 1488539 round_trippers.go:580]     Audit-Id: 693b3b47-85ef-47b6-885f-dd693aed574f
	I0916 11:10:45.456892 1488539 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-654612","uid":"17ee1588-6ece-4a45-8e72-a05ec6d1f806","resourceVersion":"295","creationTimestamp":"2024-09-16T11:10:07Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-654612","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-654612","minikube.k8s.io/primary":"true","minikube.k8s.io/updated_at":"2024_09_16T11_10_11_0700","minikube.k8s.io/version":"v1.34.0","node-role.kubernetes.io/control-plane":"","node.kubernetes.io/exclude-from-external-load-balancers":""},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///var/run/crio/crio.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubelet","operation":"Update","apiVe
rsion":"v1","time":"2024-09-16T11:10:07Z","fieldsType":"FieldsV1","fiel [truncated 6223 chars]
	I0916 11:10:45.457311 1488539 node_ready.go:53] node "multinode-654612" has status "Ready":"False"
	I0916 11:10:45.954043 1488539 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/nodes/multinode-654612
	I0916 11:10:45.954068 1488539 round_trippers.go:469] Request Headers:
	I0916 11:10:45.954078 1488539 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:10:45.954084 1488539 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:10:45.957652 1488539 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 11:10:45.957674 1488539 round_trippers.go:577] Response Headers:
	I0916 11:10:45.957683 1488539 round_trippers.go:580]     Audit-Id: b824005c-69c0-4c3a-907f-3f16011a1e5c
	I0916 11:10:45.957689 1488539 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:10:45.957692 1488539 round_trippers.go:580]     Content-Type: application/json
	I0916 11:10:45.957695 1488539 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:10:45.957698 1488539 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:10:45.957701 1488539 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:10:45 GMT
	I0916 11:10:45.957840 1488539 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-654612","uid":"17ee1588-6ece-4a45-8e72-a05ec6d1f806","resourceVersion":"295","creationTimestamp":"2024-09-16T11:10:07Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-654612","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-654612","minikube.k8s.io/primary":"true","minikube.k8s.io/updated_at":"2024_09_16T11_10_11_0700","minikube.k8s.io/version":"v1.34.0","node-role.kubernetes.io/control-plane":"","node.kubernetes.io/exclude-from-external-load-balancers":""},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///var/run/crio/crio.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubelet","operation":"Update","apiVe
rsion":"v1","time":"2024-09-16T11:10:07Z","fieldsType":"FieldsV1","fiel [truncated 6223 chars]
	I0916 11:10:46.454664 1488539 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/nodes/multinode-654612
	I0916 11:10:46.454690 1488539 round_trippers.go:469] Request Headers:
	I0916 11:10:46.454700 1488539 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:10:46.454709 1488539 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:10:46.457016 1488539 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:10:46.457046 1488539 round_trippers.go:577] Response Headers:
	I0916 11:10:46.457054 1488539 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:10:46.457059 1488539 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:10:46 GMT
	I0916 11:10:46.457064 1488539 round_trippers.go:580]     Audit-Id: fd144db7-5071-4279-ab99-caeed8674348
	I0916 11:10:46.457067 1488539 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:10:46.457069 1488539 round_trippers.go:580]     Content-Type: application/json
	I0916 11:10:46.457072 1488539 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:10:46.457192 1488539 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-654612","uid":"17ee1588-6ece-4a45-8e72-a05ec6d1f806","resourceVersion":"295","creationTimestamp":"2024-09-16T11:10:07Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-654612","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-654612","minikube.k8s.io/primary":"true","minikube.k8s.io/updated_at":"2024_09_16T11_10_11_0700","minikube.k8s.io/version":"v1.34.0","node-role.kubernetes.io/control-plane":"","node.kubernetes.io/exclude-from-external-load-balancers":""},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///var/run/crio/crio.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubelet","operation":"Update","apiVe
rsion":"v1","time":"2024-09-16T11:10:07Z","fieldsType":"FieldsV1","fiel [truncated 6223 chars]
	I0916 11:10:46.953895 1488539 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/nodes/multinode-654612
	I0916 11:10:46.953918 1488539 round_trippers.go:469] Request Headers:
	I0916 11:10:46.953927 1488539 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:10:46.953933 1488539 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:10:46.956227 1488539 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:10:46.956247 1488539 round_trippers.go:577] Response Headers:
	I0916 11:10:46.956256 1488539 round_trippers.go:580]     Content-Type: application/json
	I0916 11:10:46.956260 1488539 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:10:46.956264 1488539 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:10:46.956267 1488539 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:10:46 GMT
	I0916 11:10:46.956270 1488539 round_trippers.go:580]     Audit-Id: 779ccb12-dfeb-4a26-866b-bffb37c566fe
	I0916 11:10:46.956273 1488539 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:10:46.956420 1488539 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-654612","uid":"17ee1588-6ece-4a45-8e72-a05ec6d1f806","resourceVersion":"295","creationTimestamp":"2024-09-16T11:10:07Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-654612","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-654612","minikube.k8s.io/primary":"true","minikube.k8s.io/updated_at":"2024_09_16T11_10_11_0700","minikube.k8s.io/version":"v1.34.0","node-role.kubernetes.io/control-plane":"","node.kubernetes.io/exclude-from-external-load-balancers":""},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///var/run/crio/crio.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubelet","operation":"Update","apiVe
rsion":"v1","time":"2024-09-16T11:10:07Z","fieldsType":"FieldsV1","fiel [truncated 6223 chars]
	I0916 11:10:47.453922 1488539 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/nodes/multinode-654612
	I0916 11:10:47.453950 1488539 round_trippers.go:469] Request Headers:
	I0916 11:10:47.453960 1488539 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:10:47.453965 1488539 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:10:47.457353 1488539 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 11:10:47.457377 1488539 round_trippers.go:577] Response Headers:
	I0916 11:10:47.457385 1488539 round_trippers.go:580]     Audit-Id: a28f7485-9256-4d3d-ba7e-be6f7835fcf2
	I0916 11:10:47.457391 1488539 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:10:47.457396 1488539 round_trippers.go:580]     Content-Type: application/json
	I0916 11:10:47.457398 1488539 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:10:47.457401 1488539 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:10:47.457404 1488539 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:10:47 GMT
	I0916 11:10:47.457568 1488539 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-654612","uid":"17ee1588-6ece-4a45-8e72-a05ec6d1f806","resourceVersion":"295","creationTimestamp":"2024-09-16T11:10:07Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-654612","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-654612","minikube.k8s.io/primary":"true","minikube.k8s.io/updated_at":"2024_09_16T11_10_11_0700","minikube.k8s.io/version":"v1.34.0","node-role.kubernetes.io/control-plane":"","node.kubernetes.io/exclude-from-external-load-balancers":""},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///var/run/crio/crio.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubelet","operation":"Update","apiVe
rsion":"v1","time":"2024-09-16T11:10:07Z","fieldsType":"FieldsV1","fiel [truncated 6223 chars]
	I0916 11:10:47.457968 1488539 node_ready.go:53] node "multinode-654612" has status "Ready":"False"
	I0916 11:10:47.954711 1488539 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/nodes/multinode-654612
	I0916 11:10:47.954731 1488539 round_trippers.go:469] Request Headers:
	I0916 11:10:47.954742 1488539 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:10:47.954746 1488539 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:10:47.957369 1488539 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:10:47.957419 1488539 round_trippers.go:577] Response Headers:
	I0916 11:10:47.957432 1488539 round_trippers.go:580]     Audit-Id: 8163da0b-6b56-4887-87ec-939a1c745304
	I0916 11:10:47.957438 1488539 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:10:47.957441 1488539 round_trippers.go:580]     Content-Type: application/json
	I0916 11:10:47.957444 1488539 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:10:47.957447 1488539 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:10:47.957450 1488539 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:10:47 GMT
	I0916 11:10:47.957558 1488539 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-654612","uid":"17ee1588-6ece-4a45-8e72-a05ec6d1f806","resourceVersion":"295","creationTimestamp":"2024-09-16T11:10:07Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-654612","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-654612","minikube.k8s.io/primary":"true","minikube.k8s.io/updated_at":"2024_09_16T11_10_11_0700","minikube.k8s.io/version":"v1.34.0","node-role.kubernetes.io/control-plane":"","node.kubernetes.io/exclude-from-external-load-balancers":""},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///var/run/crio/crio.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubelet","operation":"Update","apiVe
rsion":"v1","time":"2024-09-16T11:10:07Z","fieldsType":"FieldsV1","fiel [truncated 6223 chars]
	I0916 11:10:48.454695 1488539 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/nodes/multinode-654612
	I0916 11:10:48.454723 1488539 round_trippers.go:469] Request Headers:
	I0916 11:10:48.454732 1488539 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:10:48.454737 1488539 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:10:48.457178 1488539 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:10:48.457212 1488539 round_trippers.go:577] Response Headers:
	I0916 11:10:48.457222 1488539 round_trippers.go:580]     Audit-Id: 40eedab5-72d6-40f0-abd7-f7d30f572f63
	I0916 11:10:48.457228 1488539 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:10:48.457231 1488539 round_trippers.go:580]     Content-Type: application/json
	I0916 11:10:48.457234 1488539 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:10:48.457242 1488539 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:10:48.457251 1488539 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:10:48 GMT
	I0916 11:10:48.457723 1488539 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-654612","uid":"17ee1588-6ece-4a45-8e72-a05ec6d1f806","resourceVersion":"295","creationTimestamp":"2024-09-16T11:10:07Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-654612","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-654612","minikube.k8s.io/primary":"true","minikube.k8s.io/updated_at":"2024_09_16T11_10_11_0700","minikube.k8s.io/version":"v1.34.0","node-role.kubernetes.io/control-plane":"","node.kubernetes.io/exclude-from-external-load-balancers":""},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///var/run/crio/crio.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubelet","operation":"Update","apiVe
rsion":"v1","time":"2024-09-16T11:10:07Z","fieldsType":"FieldsV1","fiel [truncated 6223 chars]
	I0916 11:10:48.954702 1488539 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/nodes/multinode-654612
	I0916 11:10:48.954730 1488539 round_trippers.go:469] Request Headers:
	I0916 11:10:48.954740 1488539 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:10:48.954746 1488539 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:10:48.957071 1488539 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:10:48.957100 1488539 round_trippers.go:577] Response Headers:
	I0916 11:10:48.957108 1488539 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:10:48.957113 1488539 round_trippers.go:580]     Content-Type: application/json
	I0916 11:10:48.957116 1488539 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:10:48.957119 1488539 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:10:48.957122 1488539 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:10:48 GMT
	I0916 11:10:48.957125 1488539 round_trippers.go:580]     Audit-Id: 2e34b4b7-8ddb-4406-9782-e438e388f6ed
	I0916 11:10:48.957269 1488539 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-654612","uid":"17ee1588-6ece-4a45-8e72-a05ec6d1f806","resourceVersion":"295","creationTimestamp":"2024-09-16T11:10:07Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-654612","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-654612","minikube.k8s.io/primary":"true","minikube.k8s.io/updated_at":"2024_09_16T11_10_11_0700","minikube.k8s.io/version":"v1.34.0","node-role.kubernetes.io/control-plane":"","node.kubernetes.io/exclude-from-external-load-balancers":""},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///var/run/crio/crio.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubelet","operation":"Update","apiVe
rsion":"v1","time":"2024-09-16T11:10:07Z","fieldsType":"FieldsV1","fiel [truncated 6223 chars]
	I0916 11:10:49.454474 1488539 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/nodes/multinode-654612
	I0916 11:10:49.454516 1488539 round_trippers.go:469] Request Headers:
	I0916 11:10:49.454531 1488539 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:10:49.454537 1488539 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:10:49.456787 1488539 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:10:49.456813 1488539 round_trippers.go:577] Response Headers:
	I0916 11:10:49.456823 1488539 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:10:49.456829 1488539 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:10:49 GMT
	I0916 11:10:49.456832 1488539 round_trippers.go:580]     Audit-Id: dd52f4d1-9cae-4805-9d9e-b68fd02d3b2d
	I0916 11:10:49.456836 1488539 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:10:49.456842 1488539 round_trippers.go:580]     Content-Type: application/json
	I0916 11:10:49.456846 1488539 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:10:49.457206 1488539 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-654612","uid":"17ee1588-6ece-4a45-8e72-a05ec6d1f806","resourceVersion":"295","creationTimestamp":"2024-09-16T11:10:07Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-654612","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-654612","minikube.k8s.io/primary":"true","minikube.k8s.io/updated_at":"2024_09_16T11_10_11_0700","minikube.k8s.io/version":"v1.34.0","node-role.kubernetes.io/control-plane":"","node.kubernetes.io/exclude-from-external-load-balancers":""},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///var/run/crio/crio.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubelet","operation":"Update","apiVe
rsion":"v1","time":"2024-09-16T11:10:07Z","fieldsType":"FieldsV1","fiel [truncated 6223 chars]
	I0916 11:10:49.954226 1488539 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/nodes/multinode-654612
	I0916 11:10:49.954251 1488539 round_trippers.go:469] Request Headers:
	I0916 11:10:49.954261 1488539 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:10:49.954265 1488539 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:10:49.956440 1488539 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:10:49.956470 1488539 round_trippers.go:577] Response Headers:
	I0916 11:10:49.956480 1488539 round_trippers.go:580]     Audit-Id: 13f543cf-3ea2-45d4-9ff9-2f423bd577b4
	I0916 11:10:49.956483 1488539 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:10:49.956486 1488539 round_trippers.go:580]     Content-Type: application/json
	I0916 11:10:49.956489 1488539 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:10:49.956492 1488539 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:10:49.956496 1488539 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:10:49 GMT
	I0916 11:10:49.956632 1488539 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-654612","uid":"17ee1588-6ece-4a45-8e72-a05ec6d1f806","resourceVersion":"295","creationTimestamp":"2024-09-16T11:10:07Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-654612","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-654612","minikube.k8s.io/primary":"true","minikube.k8s.io/updated_at":"2024_09_16T11_10_11_0700","minikube.k8s.io/version":"v1.34.0","node-role.kubernetes.io/control-plane":"","node.kubernetes.io/exclude-from-external-load-balancers":""},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///var/run/crio/crio.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubelet","operation":"Update","apiVe
rsion":"v1","time":"2024-09-16T11:10:07Z","fieldsType":"FieldsV1","fiel [truncated 6223 chars]
	I0916 11:10:49.957080 1488539 node_ready.go:53] node "multinode-654612" has status "Ready":"False"
	I0916 11:10:50.453858 1488539 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/nodes/multinode-654612
	I0916 11:10:50.453885 1488539 round_trippers.go:469] Request Headers:
	I0916 11:10:50.453895 1488539 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:10:50.453901 1488539 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:10:50.456102 1488539 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:10:50.456121 1488539 round_trippers.go:577] Response Headers:
	I0916 11:10:50.456129 1488539 round_trippers.go:580]     Audit-Id: 1613bed7-45ae-4b2e-80b6-49623fb446c4
	I0916 11:10:50.456133 1488539 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:10:50.456138 1488539 round_trippers.go:580]     Content-Type: application/json
	I0916 11:10:50.456141 1488539 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:10:50.456144 1488539 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:10:50.456147 1488539 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:10:50 GMT
	I0916 11:10:50.456303 1488539 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-654612","uid":"17ee1588-6ece-4a45-8e72-a05ec6d1f806","resourceVersion":"295","creationTimestamp":"2024-09-16T11:10:07Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-654612","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-654612","minikube.k8s.io/primary":"true","minikube.k8s.io/updated_at":"2024_09_16T11_10_11_0700","minikube.k8s.io/version":"v1.34.0","node-role.kubernetes.io/control-plane":"","node.kubernetes.io/exclude-from-external-load-balancers":""},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///var/run/crio/crio.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubelet","operation":"Update","apiVe
rsion":"v1","time":"2024-09-16T11:10:07Z","fieldsType":"FieldsV1","fiel [truncated 6223 chars]
	I0916 11:10:50.954020 1488539 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/nodes/multinode-654612
	I0916 11:10:50.954046 1488539 round_trippers.go:469] Request Headers:
	I0916 11:10:50.954057 1488539 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:10:50.954061 1488539 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:10:50.956421 1488539 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:10:50.956446 1488539 round_trippers.go:577] Response Headers:
	I0916 11:10:50.956454 1488539 round_trippers.go:580]     Audit-Id: c888938e-098c-4019-8536-a23040fac883
	I0916 11:10:50.956459 1488539 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:10:50.956462 1488539 round_trippers.go:580]     Content-Type: application/json
	I0916 11:10:50.956465 1488539 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:10:50.956468 1488539 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:10:50.956471 1488539 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:10:50 GMT
	I0916 11:10:50.956639 1488539 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-654612","uid":"17ee1588-6ece-4a45-8e72-a05ec6d1f806","resourceVersion":"295","creationTimestamp":"2024-09-16T11:10:07Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-654612","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-654612","minikube.k8s.io/primary":"true","minikube.k8s.io/updated_at":"2024_09_16T11_10_11_0700","minikube.k8s.io/version":"v1.34.0","node-role.kubernetes.io/control-plane":"","node.kubernetes.io/exclude-from-external-load-balancers":""},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///var/run/crio/crio.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubelet","operation":"Update","apiVe
rsion":"v1","time":"2024-09-16T11:10:07Z","fieldsType":"FieldsV1","fiel [truncated 6223 chars]
	I0916 11:10:51.454579 1488539 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/nodes/multinode-654612
	I0916 11:10:51.454605 1488539 round_trippers.go:469] Request Headers:
	I0916 11:10:51.454615 1488539 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:10:51.454622 1488539 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:10:51.456912 1488539 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:10:51.456937 1488539 round_trippers.go:577] Response Headers:
	I0916 11:10:51.456946 1488539 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:10:51.456950 1488539 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:10:51.456953 1488539 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:10:51 GMT
	I0916 11:10:51.456956 1488539 round_trippers.go:580]     Audit-Id: d729565a-fd30-43b1-98d5-7a3ce301235c
	I0916 11:10:51.456959 1488539 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:10:51.456962 1488539 round_trippers.go:580]     Content-Type: application/json
	I0916 11:10:51.457347 1488539 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-654612","uid":"17ee1588-6ece-4a45-8e72-a05ec6d1f806","resourceVersion":"295","creationTimestamp":"2024-09-16T11:10:07Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-654612","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-654612","minikube.k8s.io/primary":"true","minikube.k8s.io/updated_at":"2024_09_16T11_10_11_0700","minikube.k8s.io/version":"v1.34.0","node-role.kubernetes.io/control-plane":"","node.kubernetes.io/exclude-from-external-load-balancers":""},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///var/run/crio/crio.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubelet","operation":"Update","apiVe
rsion":"v1","time":"2024-09-16T11:10:07Z","fieldsType":"FieldsV1","fiel [truncated 6223 chars]
	I0916 11:10:51.953844 1488539 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/nodes/multinode-654612
	I0916 11:10:51.953872 1488539 round_trippers.go:469] Request Headers:
	I0916 11:10:51.953882 1488539 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:10:51.953889 1488539 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:10:51.956111 1488539 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:10:51.956136 1488539 round_trippers.go:577] Response Headers:
	I0916 11:10:51.956144 1488539 round_trippers.go:580]     Content-Type: application/json
	I0916 11:10:51.956149 1488539 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:10:51.956153 1488539 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:10:51.956155 1488539 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:10:51 GMT
	I0916 11:10:51.956159 1488539 round_trippers.go:580]     Audit-Id: f6cbd2ee-a536-4ec6-8aa0-56e2c8ff9691
	I0916 11:10:51.956161 1488539 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:10:51.956351 1488539 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-654612","uid":"17ee1588-6ece-4a45-8e72-a05ec6d1f806","resourceVersion":"295","creationTimestamp":"2024-09-16T11:10:07Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-654612","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-654612","minikube.k8s.io/primary":"true","minikube.k8s.io/updated_at":"2024_09_16T11_10_11_0700","minikube.k8s.io/version":"v1.34.0","node-role.kubernetes.io/control-plane":"","node.kubernetes.io/exclude-from-external-load-balancers":""},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///var/run/crio/crio.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubelet","operation":"Update","apiVe
rsion":"v1","time":"2024-09-16T11:10:07Z","fieldsType":"FieldsV1","fiel [truncated 6223 chars]
	I0916 11:10:52.454676 1488539 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/nodes/multinode-654612
	I0916 11:10:52.454703 1488539 round_trippers.go:469] Request Headers:
	I0916 11:10:52.454712 1488539 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:10:52.454717 1488539 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:10:52.457010 1488539 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:10:52.457030 1488539 round_trippers.go:577] Response Headers:
	I0916 11:10:52.457042 1488539 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:10:52 GMT
	I0916 11:10:52.457049 1488539 round_trippers.go:580]     Audit-Id: 4fac3f61-0e0e-4406-9ad7-d6ccf9194254
	I0916 11:10:52.457053 1488539 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:10:52.457055 1488539 round_trippers.go:580]     Content-Type: application/json
	I0916 11:10:52.457058 1488539 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:10:52.457060 1488539 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:10:52.457188 1488539 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-654612","uid":"17ee1588-6ece-4a45-8e72-a05ec6d1f806","resourceVersion":"295","creationTimestamp":"2024-09-16T11:10:07Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-654612","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-654612","minikube.k8s.io/primary":"true","minikube.k8s.io/updated_at":"2024_09_16T11_10_11_0700","minikube.k8s.io/version":"v1.34.0","node-role.kubernetes.io/control-plane":"","node.kubernetes.io/exclude-from-external-load-balancers":""},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///var/run/crio/crio.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubelet","operation":"Update","apiVe
rsion":"v1","time":"2024-09-16T11:10:07Z","fieldsType":"FieldsV1","fiel [truncated 6223 chars]
	I0916 11:10:52.457589 1488539 node_ready.go:53] node "multinode-654612" has status "Ready":"False"
	I0916 11:10:52.953876 1488539 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/nodes/multinode-654612
	I0916 11:10:52.953905 1488539 round_trippers.go:469] Request Headers:
	I0916 11:10:52.953915 1488539 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:10:52.953919 1488539 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:10:52.956394 1488539 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:10:52.956537 1488539 round_trippers.go:577] Response Headers:
	I0916 11:10:52.956552 1488539 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:10:52.956572 1488539 round_trippers.go:580]     Content-Type: application/json
	I0916 11:10:52.956578 1488539 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:10:52.956604 1488539 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:10:52.956610 1488539 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:10:52 GMT
	I0916 11:10:52.956613 1488539 round_trippers.go:580]     Audit-Id: c26661a1-c6b9-4818-81a0-4c5d52474636
	I0916 11:10:52.956763 1488539 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-654612","uid":"17ee1588-6ece-4a45-8e72-a05ec6d1f806","resourceVersion":"295","creationTimestamp":"2024-09-16T11:10:07Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-654612","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-654612","minikube.k8s.io/primary":"true","minikube.k8s.io/updated_at":"2024_09_16T11_10_11_0700","minikube.k8s.io/version":"v1.34.0","node-role.kubernetes.io/control-plane":"","node.kubernetes.io/exclude-from-external-load-balancers":""},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///var/run/crio/crio.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubelet","operation":"Update","apiVe
rsion":"v1","time":"2024-09-16T11:10:07Z","fieldsType":"FieldsV1","fiel [truncated 6223 chars]
	I0916 11:10:53.453998 1488539 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/nodes/multinode-654612
	I0916 11:10:53.454025 1488539 round_trippers.go:469] Request Headers:
	I0916 11:10:53.454035 1488539 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:10:53.454040 1488539 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:10:53.456574 1488539 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:10:53.456701 1488539 round_trippers.go:577] Response Headers:
	I0916 11:10:53.456716 1488539 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:10:53 GMT
	I0916 11:10:53.456721 1488539 round_trippers.go:580]     Audit-Id: a810cf31-6f88-4d2f-81e5-8ab5ce633b65
	I0916 11:10:53.456724 1488539 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:10:53.456727 1488539 round_trippers.go:580]     Content-Type: application/json
	I0916 11:10:53.456730 1488539 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:10:53.456733 1488539 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:10:53.456863 1488539 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-654612","uid":"17ee1588-6ece-4a45-8e72-a05ec6d1f806","resourceVersion":"295","creationTimestamp":"2024-09-16T11:10:07Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-654612","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-654612","minikube.k8s.io/primary":"true","minikube.k8s.io/updated_at":"2024_09_16T11_10_11_0700","minikube.k8s.io/version":"v1.34.0","node-role.kubernetes.io/control-plane":"","node.kubernetes.io/exclude-from-external-load-balancers":""},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///var/run/crio/crio.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubelet","operation":"Update","apiVe
rsion":"v1","time":"2024-09-16T11:10:07Z","fieldsType":"FieldsV1","fiel [truncated 6223 chars]
	I0916 11:10:53.954319 1488539 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/nodes/multinode-654612
	I0916 11:10:53.954347 1488539 round_trippers.go:469] Request Headers:
	I0916 11:10:53.954357 1488539 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:10:53.954362 1488539 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:10:53.956743 1488539 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:10:53.956770 1488539 round_trippers.go:577] Response Headers:
	I0916 11:10:53.956778 1488539 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:10:53.956785 1488539 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:10:53.956789 1488539 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:10:53 GMT
	I0916 11:10:53.956792 1488539 round_trippers.go:580]     Audit-Id: d7bd5034-2c62-4432-9797-7f8285d043d4
	I0916 11:10:53.956795 1488539 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:10:53.956798 1488539 round_trippers.go:580]     Content-Type: application/json
	I0916 11:10:53.956905 1488539 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-654612","uid":"17ee1588-6ece-4a45-8e72-a05ec6d1f806","resourceVersion":"295","creationTimestamp":"2024-09-16T11:10:07Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-654612","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-654612","minikube.k8s.io/primary":"true","minikube.k8s.io/updated_at":"2024_09_16T11_10_11_0700","minikube.k8s.io/version":"v1.34.0","node-role.kubernetes.io/control-plane":"","node.kubernetes.io/exclude-from-external-load-balancers":""},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///var/run/crio/crio.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubelet","operation":"Update","apiVe
rsion":"v1","time":"2024-09-16T11:10:07Z","fieldsType":"FieldsV1","fiel [truncated 6223 chars]
	I0916 11:10:54.453890 1488539 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/nodes/multinode-654612
	I0916 11:10:54.453912 1488539 round_trippers.go:469] Request Headers:
	I0916 11:10:54.453922 1488539 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:10:54.453927 1488539 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:10:54.456334 1488539 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:10:54.456363 1488539 round_trippers.go:577] Response Headers:
	I0916 11:10:54.456372 1488539 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:10:54 GMT
	I0916 11:10:54.456377 1488539 round_trippers.go:580]     Audit-Id: 80a28004-a8ab-4d15-8917-a62bb08e69f1
	I0916 11:10:54.456382 1488539 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:10:54.456385 1488539 round_trippers.go:580]     Content-Type: application/json
	I0916 11:10:54.456389 1488539 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:10:54.456392 1488539 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:10:54.456515 1488539 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-654612","uid":"17ee1588-6ece-4a45-8e72-a05ec6d1f806","resourceVersion":"295","creationTimestamp":"2024-09-16T11:10:07Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-654612","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-654612","minikube.k8s.io/primary":"true","minikube.k8s.io/updated_at":"2024_09_16T11_10_11_0700","minikube.k8s.io/version":"v1.34.0","node-role.kubernetes.io/control-plane":"","node.kubernetes.io/exclude-from-external-load-balancers":""},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///var/run/crio/crio.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubelet","operation":"Update","apiVe
rsion":"v1","time":"2024-09-16T11:10:07Z","fieldsType":"FieldsV1","fiel [truncated 6223 chars]
	I0916 11:10:54.954856 1488539 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/nodes/multinode-654612
	I0916 11:10:54.954881 1488539 round_trippers.go:469] Request Headers:
	I0916 11:10:54.954890 1488539 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:10:54.954894 1488539 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:10:54.957257 1488539 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:10:54.957280 1488539 round_trippers.go:577] Response Headers:
	I0916 11:10:54.957289 1488539 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:10:54.957293 1488539 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:10:54.957296 1488539 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:10:54 GMT
	I0916 11:10:54.957299 1488539 round_trippers.go:580]     Audit-Id: 4fab01a1-adb6-4e8c-a7b8-f550304ef242
	I0916 11:10:54.957302 1488539 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:10:54.957306 1488539 round_trippers.go:580]     Content-Type: application/json
	I0916 11:10:54.957420 1488539 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-654612","uid":"17ee1588-6ece-4a45-8e72-a05ec6d1f806","resourceVersion":"295","creationTimestamp":"2024-09-16T11:10:07Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-654612","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-654612","minikube.k8s.io/primary":"true","minikube.k8s.io/updated_at":"2024_09_16T11_10_11_0700","minikube.k8s.io/version":"v1.34.0","node-role.kubernetes.io/control-plane":"","node.kubernetes.io/exclude-from-external-load-balancers":""},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///var/run/crio/crio.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubelet","operation":"Update","apiVe
rsion":"v1","time":"2024-09-16T11:10:07Z","fieldsType":"FieldsV1","fiel [truncated 6223 chars]
	I0916 11:10:54.957815 1488539 node_ready.go:53] node "multinode-654612" has status "Ready":"False"
	I0916 11:10:55.454642 1488539 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/nodes/multinode-654612
	I0916 11:10:55.454669 1488539 round_trippers.go:469] Request Headers:
	I0916 11:10:55.454679 1488539 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:10:55.454684 1488539 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:10:55.456953 1488539 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:10:55.456980 1488539 round_trippers.go:577] Response Headers:
	I0916 11:10:55.456989 1488539 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:10:55.456992 1488539 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:10:55.456996 1488539 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:10:55 GMT
	I0916 11:10:55.456999 1488539 round_trippers.go:580]     Audit-Id: 7c43051f-da85-4474-8ee2-6a8170836e9e
	I0916 11:10:55.457002 1488539 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:10:55.457010 1488539 round_trippers.go:580]     Content-Type: application/json
	I0916 11:10:55.457473 1488539 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-654612","uid":"17ee1588-6ece-4a45-8e72-a05ec6d1f806","resourceVersion":"295","creationTimestamp":"2024-09-16T11:10:07Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-654612","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-654612","minikube.k8s.io/primary":"true","minikube.k8s.io/updated_at":"2024_09_16T11_10_11_0700","minikube.k8s.io/version":"v1.34.0","node-role.kubernetes.io/control-plane":"","node.kubernetes.io/exclude-from-external-load-balancers":""},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///var/run/crio/crio.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubelet","operation":"Update","apiVe
rsion":"v1","time":"2024-09-16T11:10:07Z","fieldsType":"FieldsV1","fiel [truncated 6223 chars]
	I0916 11:10:55.954752 1488539 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/nodes/multinode-654612
	I0916 11:10:55.954782 1488539 round_trippers.go:469] Request Headers:
	I0916 11:10:55.954796 1488539 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:10:55.954806 1488539 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:10:55.957201 1488539 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:10:55.957228 1488539 round_trippers.go:577] Response Headers:
	I0916 11:10:55.957237 1488539 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:10:55 GMT
	I0916 11:10:55.957242 1488539 round_trippers.go:580]     Audit-Id: fe1ee733-8810-4883-92d8-2f87e4d9d5d2
	I0916 11:10:55.957245 1488539 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:10:55.957248 1488539 round_trippers.go:580]     Content-Type: application/json
	I0916 11:10:55.957251 1488539 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:10:55.957253 1488539 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:10:55.957538 1488539 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-654612","uid":"17ee1588-6ece-4a45-8e72-a05ec6d1f806","resourceVersion":"295","creationTimestamp":"2024-09-16T11:10:07Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-654612","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-654612","minikube.k8s.io/primary":"true","minikube.k8s.io/updated_at":"2024_09_16T11_10_11_0700","minikube.k8s.io/version":"v1.34.0","node-role.kubernetes.io/control-plane":"","node.kubernetes.io/exclude-from-external-load-balancers":""},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///var/run/crio/crio.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubelet","operation":"Update","apiVe
rsion":"v1","time":"2024-09-16T11:10:07Z","fieldsType":"FieldsV1","fiel [truncated 6223 chars]
	I0916 11:10:56.454679 1488539 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/nodes/multinode-654612
	I0916 11:10:56.454712 1488539 round_trippers.go:469] Request Headers:
	I0916 11:10:56.454723 1488539 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:10:56.454728 1488539 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:10:56.457209 1488539 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:10:56.457242 1488539 round_trippers.go:577] Response Headers:
	I0916 11:10:56.457258 1488539 round_trippers.go:580]     Content-Type: application/json
	I0916 11:10:56.457262 1488539 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:10:56.457265 1488539 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:10:56.457269 1488539 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:10:56 GMT
	I0916 11:10:56.457272 1488539 round_trippers.go:580]     Audit-Id: 7bfffaf3-1ed4-4448-a1cc-9a220e861d45
	I0916 11:10:56.457275 1488539 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:10:56.457626 1488539 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-654612","uid":"17ee1588-6ece-4a45-8e72-a05ec6d1f806","resourceVersion":"295","creationTimestamp":"2024-09-16T11:10:07Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-654612","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-654612","minikube.k8s.io/primary":"true","minikube.k8s.io/updated_at":"2024_09_16T11_10_11_0700","minikube.k8s.io/version":"v1.34.0","node-role.kubernetes.io/control-plane":"","node.kubernetes.io/exclude-from-external-load-balancers":""},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///var/run/crio/crio.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubelet","operation":"Update","apiVe
rsion":"v1","time":"2024-09-16T11:10:07Z","fieldsType":"FieldsV1","fiel [truncated 6223 chars]
	I0916 11:10:56.954474 1488539 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/nodes/multinode-654612
	I0916 11:10:56.954497 1488539 round_trippers.go:469] Request Headers:
	I0916 11:10:56.954507 1488539 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:10:56.954512 1488539 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:10:56.956731 1488539 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:10:56.956758 1488539 round_trippers.go:577] Response Headers:
	I0916 11:10:56.956766 1488539 round_trippers.go:580]     Audit-Id: 8cfa60ec-9e6f-4c32-a60c-b21ae4a76951
	I0916 11:10:56.956770 1488539 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:10:56.956773 1488539 round_trippers.go:580]     Content-Type: application/json
	I0916 11:10:56.956776 1488539 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:10:56.956779 1488539 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:10:56.956783 1488539 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:10:56 GMT
	I0916 11:10:56.956940 1488539 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-654612","uid":"17ee1588-6ece-4a45-8e72-a05ec6d1f806","resourceVersion":"401","creationTimestamp":"2024-09-16T11:10:07Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-654612","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-654612","minikube.k8s.io/primary":"true","minikube.k8s.io/updated_at":"2024_09_16T11_10_11_0700","minikube.k8s.io/version":"v1.34.0","node-role.kubernetes.io/control-plane":"","node.kubernetes.io/exclude-from-external-load-balancers":""},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///var/run/crio/crio.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubelet","operation":"Update","apiVe
rsion":"v1","time":"2024-09-16T11:10:07Z","fieldsType":"FieldsV1","fiel [truncated 6029 chars]
	I0916 11:10:56.957345 1488539 node_ready.go:49] node "multinode-654612" has status "Ready":"True"
	I0916 11:10:56.957366 1488539 node_ready.go:38] duration metric: took 41.003724975s for node "multinode-654612" to be "Ready" ...
	I0916 11:10:56.957376 1488539 pod_ready.go:36] extra waiting up to 6m0s for all system-critical pods including labels [k8s-app=kube-dns component=etcd component=kube-apiserver component=kube-controller-manager k8s-app=kube-proxy component=kube-scheduler] to be "Ready" ...
	I0916 11:10:56.957463 1488539 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/namespaces/kube-system/pods
	I0916 11:10:56.957476 1488539 round_trippers.go:469] Request Headers:
	I0916 11:10:56.957485 1488539 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:10:56.957493 1488539 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:10:56.960544 1488539 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 11:10:56.960572 1488539 round_trippers.go:577] Response Headers:
	I0916 11:10:56.960581 1488539 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:10:56.960585 1488539 round_trippers.go:580]     Content-Type: application/json
	I0916 11:10:56.960588 1488539 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:10:56.960591 1488539 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:10:56.960595 1488539 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:10:56 GMT
	I0916 11:10:56.960597 1488539 round_trippers.go:580]     Audit-Id: f4def611-ea75-4386-867f-eb8fd8d4ad18
	I0916 11:10:56.961051 1488539 request.go:1351] Response Body: {"kind":"PodList","apiVersion":"v1","metadata":{"resourceVersion":"407"},"items":[{"metadata":{"name":"coredns-7c65d6cfc9-szvv9","generateName":"coredns-7c65d6cfc9-","namespace":"kube-system","uid":"26df8cd4-36bc-49e1-98bf-9c30f5555b7b","resourceVersion":"407","creationTimestamp":"2024-09-16T11:10:15Z","labels":{"k8s-app":"kube-dns","pod-template-hash":"7c65d6cfc9"},"ownerReferences":[{"apiVersion":"apps/v1","kind":"ReplicaSet","name":"coredns-7c65d6cfc9","uid":"a88acc4a-b14b-4341-a616-6a6077ab8958","controller":true,"blockOwnerDeletion":true}],"managedFields":[{"manager":"kube-controller-manager","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:10:15Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:generateName":{},"f:labels":{".":{},"f:k8s-app":{},"f:pod-template-hash":{}},"f:ownerReferences":{".":{},"k:{\"uid\":\"a88acc4a-b14b-4341-a616-6a6077ab8958\"}":{}}},"f:spec":{"f:affinity":{".":{},"f:podAntiAffinity":{".":{},"f
:preferredDuringSchedulingIgnoredDuringExecution":{}}},"f:containers":{ [truncated 59368 chars]
	I0916 11:10:56.965450 1488539 pod_ready.go:79] waiting up to 6m0s for pod "coredns-7c65d6cfc9-szvv9" in "kube-system" namespace to be "Ready" ...
	I0916 11:10:56.965561 1488539 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/namespaces/kube-system/pods/coredns-7c65d6cfc9-szvv9
	I0916 11:10:56.965575 1488539 round_trippers.go:469] Request Headers:
	I0916 11:10:56.965584 1488539 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:10:56.965587 1488539 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:10:56.967860 1488539 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:10:56.967884 1488539 round_trippers.go:577] Response Headers:
	I0916 11:10:56.967892 1488539 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:10:56.967897 1488539 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:10:56 GMT
	I0916 11:10:56.967906 1488539 round_trippers.go:580]     Audit-Id: fa6a0dcc-8b9d-4fa6-899c-26687234b2f6
	I0916 11:10:56.967909 1488539 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:10:56.967912 1488539 round_trippers.go:580]     Content-Type: application/json
	I0916 11:10:56.967915 1488539 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:10:56.968203 1488539 request.go:1351] Response Body: {"kind":"Pod","apiVersion":"v1","metadata":{"name":"coredns-7c65d6cfc9-szvv9","generateName":"coredns-7c65d6cfc9-","namespace":"kube-system","uid":"26df8cd4-36bc-49e1-98bf-9c30f5555b7b","resourceVersion":"407","creationTimestamp":"2024-09-16T11:10:15Z","labels":{"k8s-app":"kube-dns","pod-template-hash":"7c65d6cfc9"},"ownerReferences":[{"apiVersion":"apps/v1","kind":"ReplicaSet","name":"coredns-7c65d6cfc9","uid":"a88acc4a-b14b-4341-a616-6a6077ab8958","controller":true,"blockOwnerDeletion":true}],"managedFields":[{"manager":"kube-controller-manager","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:10:15Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:generateName":{},"f:labels":{".":{},"f:k8s-app":{},"f:pod-template-hash":{}},"f:ownerReferences":{".":{},"k:{\"uid\":\"a88acc4a-b14b-4341-a616-6a6077ab8958\"}":{}}},"f:spec":{"f:affinity":{".":{},"f:podAntiAffinity":{".":{},"f:preferredDuringSchedulingIgnoredDuringExecution":{
}}},"f:containers":{"k:{\"name\":\"coredns\"}":{".":{},"f:args":{},"f:i [truncated 6701 chars]
	I0916 11:10:56.968801 1488539 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/nodes/multinode-654612
	I0916 11:10:56.968819 1488539 round_trippers.go:469] Request Headers:
	I0916 11:10:56.968828 1488539 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:10:56.968833 1488539 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:10:56.970883 1488539 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:10:56.970904 1488539 round_trippers.go:577] Response Headers:
	I0916 11:10:56.970912 1488539 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:10:56.970918 1488539 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:10:56 GMT
	I0916 11:10:56.970922 1488539 round_trippers.go:580]     Audit-Id: e2d6e67e-8281-4a02-bf21-cb8a6e2b3954
	I0916 11:10:56.970926 1488539 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:10:56.970930 1488539 round_trippers.go:580]     Content-Type: application/json
	I0916 11:10:56.970933 1488539 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:10:56.971098 1488539 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-654612","uid":"17ee1588-6ece-4a45-8e72-a05ec6d1f806","resourceVersion":"401","creationTimestamp":"2024-09-16T11:10:07Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-654612","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-654612","minikube.k8s.io/primary":"true","minikube.k8s.io/updated_at":"2024_09_16T11_10_11_0700","minikube.k8s.io/version":"v1.34.0","node-role.kubernetes.io/control-plane":"","node.kubernetes.io/exclude-from-external-load-balancers":""},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///var/run/crio/crio.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubelet","operation":"Update","apiVe
rsion":"v1","time":"2024-09-16T11:10:07Z","fieldsType":"FieldsV1","fiel [truncated 6029 chars]
	I0916 11:10:57.465754 1488539 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/namespaces/kube-system/pods/coredns-7c65d6cfc9-szvv9
	I0916 11:10:57.465794 1488539 round_trippers.go:469] Request Headers:
	I0916 11:10:57.465807 1488539 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:10:57.465818 1488539 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:10:57.468444 1488539 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:10:57.468513 1488539 round_trippers.go:577] Response Headers:
	I0916 11:10:57.468552 1488539 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:10:57.468580 1488539 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:10:57.468599 1488539 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:10:57 GMT
	I0916 11:10:57.468629 1488539 round_trippers.go:580]     Audit-Id: e12e5efd-461f-4f8a-a1b9-8cd6c3998411
	I0916 11:10:57.468650 1488539 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:10:57.468667 1488539 round_trippers.go:580]     Content-Type: application/json
	I0916 11:10:57.468876 1488539 request.go:1351] Response Body: {"kind":"Pod","apiVersion":"v1","metadata":{"name":"coredns-7c65d6cfc9-szvv9","generateName":"coredns-7c65d6cfc9-","namespace":"kube-system","uid":"26df8cd4-36bc-49e1-98bf-9c30f5555b7b","resourceVersion":"407","creationTimestamp":"2024-09-16T11:10:15Z","labels":{"k8s-app":"kube-dns","pod-template-hash":"7c65d6cfc9"},"ownerReferences":[{"apiVersion":"apps/v1","kind":"ReplicaSet","name":"coredns-7c65d6cfc9","uid":"a88acc4a-b14b-4341-a616-6a6077ab8958","controller":true,"blockOwnerDeletion":true}],"managedFields":[{"manager":"kube-controller-manager","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:10:15Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:generateName":{},"f:labels":{".":{},"f:k8s-app":{},"f:pod-template-hash":{}},"f:ownerReferences":{".":{},"k:{\"uid\":\"a88acc4a-b14b-4341-a616-6a6077ab8958\"}":{}}},"f:spec":{"f:affinity":{".":{},"f:podAntiAffinity":{".":{},"f:preferredDuringSchedulingIgnoredDuringExecution":{
}}},"f:containers":{"k:{\"name\":\"coredns\"}":{".":{},"f:args":{},"f:i [truncated 6701 chars]
	I0916 11:10:57.469500 1488539 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/nodes/multinode-654612
	I0916 11:10:57.469523 1488539 round_trippers.go:469] Request Headers:
	I0916 11:10:57.469531 1488539 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:10:57.469538 1488539 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:10:57.471748 1488539 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:10:57.471766 1488539 round_trippers.go:577] Response Headers:
	I0916 11:10:57.471774 1488539 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:10:57 GMT
	I0916 11:10:57.471778 1488539 round_trippers.go:580]     Audit-Id: 8dc315b6-dff0-4b0d-8c90-272dd2af6883
	I0916 11:10:57.471783 1488539 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:10:57.471786 1488539 round_trippers.go:580]     Content-Type: application/json
	I0916 11:10:57.471789 1488539 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:10:57.471792 1488539 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:10:57.471916 1488539 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-654612","uid":"17ee1588-6ece-4a45-8e72-a05ec6d1f806","resourceVersion":"401","creationTimestamp":"2024-09-16T11:10:07Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-654612","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-654612","minikube.k8s.io/primary":"true","minikube.k8s.io/updated_at":"2024_09_16T11_10_11_0700","minikube.k8s.io/version":"v1.34.0","node-role.kubernetes.io/control-plane":"","node.kubernetes.io/exclude-from-external-load-balancers":""},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///var/run/crio/crio.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubelet","operation":"Update","apiVe
rsion":"v1","time":"2024-09-16T11:10:07Z","fieldsType":"FieldsV1","fiel [truncated 6029 chars]
	I0916 11:10:57.965716 1488539 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/namespaces/kube-system/pods/coredns-7c65d6cfc9-szvv9
	I0916 11:10:57.965740 1488539 round_trippers.go:469] Request Headers:
	I0916 11:10:57.965749 1488539 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:10:57.965754 1488539 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:10:57.968099 1488539 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:10:57.968121 1488539 round_trippers.go:577] Response Headers:
	I0916 11:10:57.968129 1488539 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:10:57.968136 1488539 round_trippers.go:580]     Content-Type: application/json
	I0916 11:10:57.968139 1488539 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:10:57.968143 1488539 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:10:57.968146 1488539 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:10:57 GMT
	I0916 11:10:57.968149 1488539 round_trippers.go:580]     Audit-Id: 1f84b90e-213a-407b-ad42-9dce9531bc91
	I0916 11:10:57.968337 1488539 request.go:1351] Response Body: {"kind":"Pod","apiVersion":"v1","metadata":{"name":"coredns-7c65d6cfc9-szvv9","generateName":"coredns-7c65d6cfc9-","namespace":"kube-system","uid":"26df8cd4-36bc-49e1-98bf-9c30f5555b7b","resourceVersion":"407","creationTimestamp":"2024-09-16T11:10:15Z","labels":{"k8s-app":"kube-dns","pod-template-hash":"7c65d6cfc9"},"ownerReferences":[{"apiVersion":"apps/v1","kind":"ReplicaSet","name":"coredns-7c65d6cfc9","uid":"a88acc4a-b14b-4341-a616-6a6077ab8958","controller":true,"blockOwnerDeletion":true}],"managedFields":[{"manager":"kube-controller-manager","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:10:15Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:generateName":{},"f:labels":{".":{},"f:k8s-app":{},"f:pod-template-hash":{}},"f:ownerReferences":{".":{},"k:{\"uid\":\"a88acc4a-b14b-4341-a616-6a6077ab8958\"}":{}}},"f:spec":{"f:affinity":{".":{},"f:podAntiAffinity":{".":{},"f:preferredDuringSchedulingIgnoredDuringExecution":{
}}},"f:containers":{"k:{\"name\":\"coredns\"}":{".":{},"f:args":{},"f:i [truncated 6701 chars]
	I0916 11:10:57.968937 1488539 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/nodes/multinode-654612
	I0916 11:10:57.968949 1488539 round_trippers.go:469] Request Headers:
	I0916 11:10:57.968957 1488539 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:10:57.968963 1488539 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:10:57.971088 1488539 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:10:57.971108 1488539 round_trippers.go:577] Response Headers:
	I0916 11:10:57.971116 1488539 round_trippers.go:580]     Audit-Id: 8b432d10-c050-4e57-a406-36d293743e90
	I0916 11:10:57.971119 1488539 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:10:57.971122 1488539 round_trippers.go:580]     Content-Type: application/json
	I0916 11:10:57.971124 1488539 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:10:57.971127 1488539 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:10:57.971130 1488539 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:10:57 GMT
	I0916 11:10:57.971245 1488539 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-654612","uid":"17ee1588-6ece-4a45-8e72-a05ec6d1f806","resourceVersion":"401","creationTimestamp":"2024-09-16T11:10:07Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-654612","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-654612","minikube.k8s.io/primary":"true","minikube.k8s.io/updated_at":"2024_09_16T11_10_11_0700","minikube.k8s.io/version":"v1.34.0","node-role.kubernetes.io/control-plane":"","node.kubernetes.io/exclude-from-external-load-balancers":""},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///var/run/crio/crio.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubelet","operation":"Update","apiVe
rsion":"v1","time":"2024-09-16T11:10:07Z","fieldsType":"FieldsV1","fiel [truncated 6029 chars]
	I0916 11:10:58.466374 1488539 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/namespaces/kube-system/pods/coredns-7c65d6cfc9-szvv9
	I0916 11:10:58.466400 1488539 round_trippers.go:469] Request Headers:
	I0916 11:10:58.466410 1488539 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:10:58.466415 1488539 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:10:58.468785 1488539 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:10:58.468809 1488539 round_trippers.go:577] Response Headers:
	I0916 11:10:58.468818 1488539 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:10:58.468824 1488539 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:10:58.468827 1488539 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:10:58 GMT
	I0916 11:10:58.468829 1488539 round_trippers.go:580]     Audit-Id: 8750444e-d4db-4751-8176-465a492dd88b
	I0916 11:10:58.468832 1488539 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:10:58.468835 1488539 round_trippers.go:580]     Content-Type: application/json
	I0916 11:10:58.468968 1488539 request.go:1351] Response Body: {"kind":"Pod","apiVersion":"v1","metadata":{"name":"coredns-7c65d6cfc9-szvv9","generateName":"coredns-7c65d6cfc9-","namespace":"kube-system","uid":"26df8cd4-36bc-49e1-98bf-9c30f5555b7b","resourceVersion":"407","creationTimestamp":"2024-09-16T11:10:15Z","labels":{"k8s-app":"kube-dns","pod-template-hash":"7c65d6cfc9"},"ownerReferences":[{"apiVersion":"apps/v1","kind":"ReplicaSet","name":"coredns-7c65d6cfc9","uid":"a88acc4a-b14b-4341-a616-6a6077ab8958","controller":true,"blockOwnerDeletion":true}],"managedFields":[{"manager":"kube-controller-manager","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:10:15Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:generateName":{},"f:labels":{".":{},"f:k8s-app":{},"f:pod-template-hash":{}},"f:ownerReferences":{".":{},"k:{\"uid\":\"a88acc4a-b14b-4341-a616-6a6077ab8958\"}":{}}},"f:spec":{"f:affinity":{".":{},"f:podAntiAffinity":{".":{},"f:preferredDuringSchedulingIgnoredDuringExecution":{
}}},"f:containers":{"k:{\"name\":\"coredns\"}":{".":{},"f:args":{},"f:i [truncated 6701 chars]
	I0916 11:10:58.469526 1488539 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/nodes/multinode-654612
	I0916 11:10:58.469542 1488539 round_trippers.go:469] Request Headers:
	I0916 11:10:58.469550 1488539 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:10:58.469555 1488539 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:10:58.471429 1488539 round_trippers.go:574] Response Status: 200 OK in 1 milliseconds
	I0916 11:10:58.471456 1488539 round_trippers.go:577] Response Headers:
	I0916 11:10:58.471476 1488539 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:10:58 GMT
	I0916 11:10:58.471481 1488539 round_trippers.go:580]     Audit-Id: 2abf9428-5bb0-462b-9537-0c955075176c
	I0916 11:10:58.471485 1488539 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:10:58.471489 1488539 round_trippers.go:580]     Content-Type: application/json
	I0916 11:10:58.471492 1488539 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:10:58.471495 1488539 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:10:58.471638 1488539 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-654612","uid":"17ee1588-6ece-4a45-8e72-a05ec6d1f806","resourceVersion":"401","creationTimestamp":"2024-09-16T11:10:07Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-654612","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-654612","minikube.k8s.io/primary":"true","minikube.k8s.io/updated_at":"2024_09_16T11_10_11_0700","minikube.k8s.io/version":"v1.34.0","node-role.kubernetes.io/control-plane":"","node.kubernetes.io/exclude-from-external-load-balancers":""},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///var/run/crio/crio.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubelet","operation":"Update","apiVe
rsion":"v1","time":"2024-09-16T11:10:07Z","fieldsType":"FieldsV1","fiel [truncated 6029 chars]
	I0916 11:10:58.966357 1488539 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/namespaces/kube-system/pods/coredns-7c65d6cfc9-szvv9
	I0916 11:10:58.966385 1488539 round_trippers.go:469] Request Headers:
	I0916 11:10:58.966397 1488539 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:10:58.966401 1488539 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:10:58.969620 1488539 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 11:10:58.969716 1488539 round_trippers.go:577] Response Headers:
	I0916 11:10:58.969738 1488539 round_trippers.go:580]     Content-Type: application/json
	I0916 11:10:58.969756 1488539 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:10:58.969848 1488539 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:10:58.969874 1488539 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:10:58 GMT
	I0916 11:10:58.969878 1488539 round_trippers.go:580]     Audit-Id: 653cd0fd-26dc-42f8-8364-b04d53e0eee5
	I0916 11:10:58.969880 1488539 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:10:58.970015 1488539 request.go:1351] Response Body: {"kind":"Pod","apiVersion":"v1","metadata":{"name":"coredns-7c65d6cfc9-szvv9","generateName":"coredns-7c65d6cfc9-","namespace":"kube-system","uid":"26df8cd4-36bc-49e1-98bf-9c30f5555b7b","resourceVersion":"407","creationTimestamp":"2024-09-16T11:10:15Z","labels":{"k8s-app":"kube-dns","pod-template-hash":"7c65d6cfc9"},"ownerReferences":[{"apiVersion":"apps/v1","kind":"ReplicaSet","name":"coredns-7c65d6cfc9","uid":"a88acc4a-b14b-4341-a616-6a6077ab8958","controller":true,"blockOwnerDeletion":true}],"managedFields":[{"manager":"kube-controller-manager","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:10:15Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:generateName":{},"f:labels":{".":{},"f:k8s-app":{},"f:pod-template-hash":{}},"f:ownerReferences":{".":{},"k:{\"uid\":\"a88acc4a-b14b-4341-a616-6a6077ab8958\"}":{}}},"f:spec":{"f:affinity":{".":{},"f:podAntiAffinity":{".":{},"f:preferredDuringSchedulingIgnoredDuringExecution":{
}}},"f:containers":{"k:{\"name\":\"coredns\"}":{".":{},"f:args":{},"f:i [truncated 6701 chars]
	I0916 11:10:58.970598 1488539 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/nodes/multinode-654612
	I0916 11:10:58.970618 1488539 round_trippers.go:469] Request Headers:
	I0916 11:10:58.970627 1488539 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:10:58.970632 1488539 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:10:58.972910 1488539 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:10:58.973001 1488539 round_trippers.go:577] Response Headers:
	I0916 11:10:58.973027 1488539 round_trippers.go:580]     Audit-Id: 68b2e1f7-43ad-4d2a-a07b-7787472b9701
	I0916 11:10:58.973058 1488539 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:10:58.973081 1488539 round_trippers.go:580]     Content-Type: application/json
	I0916 11:10:58.973100 1488539 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:10:58.973117 1488539 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:10:58.973144 1488539 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:10:58 GMT
	I0916 11:10:58.973325 1488539 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-654612","uid":"17ee1588-6ece-4a45-8e72-a05ec6d1f806","resourceVersion":"401","creationTimestamp":"2024-09-16T11:10:07Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-654612","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-654612","minikube.k8s.io/primary":"true","minikube.k8s.io/updated_at":"2024_09_16T11_10_11_0700","minikube.k8s.io/version":"v1.34.0","node-role.kubernetes.io/control-plane":"","node.kubernetes.io/exclude-from-external-load-balancers":""},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///var/run/crio/crio.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubelet","operation":"Update","apiVe
rsion":"v1","time":"2024-09-16T11:10:07Z","fieldsType":"FieldsV1","fiel [truncated 6029 chars]
	I0916 11:10:58.973761 1488539 pod_ready.go:103] pod "coredns-7c65d6cfc9-szvv9" in "kube-system" namespace has status "Ready":"False"
	I0916 11:10:59.465645 1488539 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/namespaces/kube-system/pods/coredns-7c65d6cfc9-szvv9
	I0916 11:10:59.465669 1488539 round_trippers.go:469] Request Headers:
	I0916 11:10:59.465679 1488539 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:10:59.465683 1488539 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:10:59.468326 1488539 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:10:59.468469 1488539 round_trippers.go:577] Response Headers:
	I0916 11:10:59.468511 1488539 round_trippers.go:580]     Content-Type: application/json
	I0916 11:10:59.468529 1488539 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:10:59.468548 1488539 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:10:59.468556 1488539 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:10:59 GMT
	I0916 11:10:59.468559 1488539 round_trippers.go:580]     Audit-Id: ea09de90-9c25-4ca3-8a98-2e908ac79e36
	I0916 11:10:59.468562 1488539 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:10:59.468698 1488539 request.go:1351] Response Body: {"kind":"Pod","apiVersion":"v1","metadata":{"name":"coredns-7c65d6cfc9-szvv9","generateName":"coredns-7c65d6cfc9-","namespace":"kube-system","uid":"26df8cd4-36bc-49e1-98bf-9c30f5555b7b","resourceVersion":"424","creationTimestamp":"2024-09-16T11:10:15Z","labels":{"k8s-app":"kube-dns","pod-template-hash":"7c65d6cfc9"},"ownerReferences":[{"apiVersion":"apps/v1","kind":"ReplicaSet","name":"coredns-7c65d6cfc9","uid":"a88acc4a-b14b-4341-a616-6a6077ab8958","controller":true,"blockOwnerDeletion":true}],"managedFields":[{"manager":"kube-controller-manager","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:10:15Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:generateName":{},"f:labels":{".":{},"f:k8s-app":{},"f:pod-template-hash":{}},"f:ownerReferences":{".":{},"k:{\"uid\":\"a88acc4a-b14b-4341-a616-6a6077ab8958\"}":{}}},"f:spec":{"f:affinity":{".":{},"f:podAntiAffinity":{".":{},"f:preferredDuringSchedulingIgnoredDuringExecution":{
}}},"f:containers":{"k:{\"name\":\"coredns\"}":{".":{},"f:args":{},"f:i [truncated 6813 chars]
	I0916 11:10:59.469284 1488539 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/nodes/multinode-654612
	I0916 11:10:59.469305 1488539 round_trippers.go:469] Request Headers:
	I0916 11:10:59.469314 1488539 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:10:59.469319 1488539 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:10:59.471523 1488539 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:10:59.471542 1488539 round_trippers.go:577] Response Headers:
	I0916 11:10:59.471550 1488539 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:10:59.471556 1488539 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:10:59 GMT
	I0916 11:10:59.471559 1488539 round_trippers.go:580]     Audit-Id: 1f01af88-f328-4fa2-8bfb-bdf3273b2bf9
	I0916 11:10:59.471562 1488539 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:10:59.471565 1488539 round_trippers.go:580]     Content-Type: application/json
	I0916 11:10:59.471567 1488539 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:10:59.471729 1488539 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-654612","uid":"17ee1588-6ece-4a45-8e72-a05ec6d1f806","resourceVersion":"401","creationTimestamp":"2024-09-16T11:10:07Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-654612","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-654612","minikube.k8s.io/primary":"true","minikube.k8s.io/updated_at":"2024_09_16T11_10_11_0700","minikube.k8s.io/version":"v1.34.0","node-role.kubernetes.io/control-plane":"","node.kubernetes.io/exclude-from-external-load-balancers":""},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///var/run/crio/crio.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubelet","operation":"Update","apiVe
rsion":"v1","time":"2024-09-16T11:10:07Z","fieldsType":"FieldsV1","fiel [truncated 6029 chars]
	I0916 11:10:59.472137 1488539 pod_ready.go:93] pod "coredns-7c65d6cfc9-szvv9" in "kube-system" namespace has status "Ready":"True"
	I0916 11:10:59.472161 1488539 pod_ready.go:82] duration metric: took 2.506681902s for pod "coredns-7c65d6cfc9-szvv9" in "kube-system" namespace to be "Ready" ...
	I0916 11:10:59.472173 1488539 pod_ready.go:79] waiting up to 6m0s for pod "etcd-multinode-654612" in "kube-system" namespace to be "Ready" ...
	I0916 11:10:59.472244 1488539 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/namespaces/kube-system/pods/etcd-multinode-654612
	I0916 11:10:59.472254 1488539 round_trippers.go:469] Request Headers:
	I0916 11:10:59.472262 1488539 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:10:59.472267 1488539 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:10:59.474540 1488539 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:10:59.474560 1488539 round_trippers.go:577] Response Headers:
	I0916 11:10:59.474570 1488539 round_trippers.go:580]     Audit-Id: 393fab81-d354-4cf5-95dc-a7a82cf6ad0d
	I0916 11:10:59.474575 1488539 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:10:59.474579 1488539 round_trippers.go:580]     Content-Type: application/json
	I0916 11:10:59.474582 1488539 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:10:59.474587 1488539 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:10:59.474594 1488539 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:10:59 GMT
	I0916 11:10:59.474856 1488539 request.go:1351] Response Body: {"kind":"Pod","apiVersion":"v1","metadata":{"name":"etcd-multinode-654612","namespace":"kube-system","uid":"bb46feea-e4d5-411b-9ebc-e5984b1147a8","resourceVersion":"388","creationTimestamp":"2024-09-16T11:10:10Z","labels":{"component":"etcd","tier":"control-plane"},"annotations":{"kubeadm.kubernetes.io/etcd.advertise-client-urls":"https://192.168.67.2:2379","kubernetes.io/config.hash":"d0a18dbc2f101ac77b9a3f54b47797a2","kubernetes.io/config.mirror":"d0a18dbc2f101ac77b9a3f54b47797a2","kubernetes.io/config.seen":"2024-09-16T11:10:10.145147523Z","kubernetes.io/config.source":"file"},"ownerReferences":[{"apiVersion":"v1","kind":"Node","name":"multinode-654612","uid":"17ee1588-6ece-4a45-8e72-a05ec6d1f806","controller":true}],"managedFields":[{"manager":"kubelet","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:10:10Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:annotations":{".":{},"f:kubeadm.kubernetes.io/etcd.advertise-cl
ient-urls":{},"f:kubernetes.io/config.hash":{},"f:kubernetes.io/config. [truncated 6435 chars]
	I0916 11:10:59.475391 1488539 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/nodes/multinode-654612
	I0916 11:10:59.475409 1488539 round_trippers.go:469] Request Headers:
	I0916 11:10:59.475419 1488539 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:10:59.475425 1488539 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:10:59.477595 1488539 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:10:59.477617 1488539 round_trippers.go:577] Response Headers:
	I0916 11:10:59.477625 1488539 round_trippers.go:580]     Content-Type: application/json
	I0916 11:10:59.477629 1488539 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:10:59.477633 1488539 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:10:59.477636 1488539 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:10:59 GMT
	I0916 11:10:59.477638 1488539 round_trippers.go:580]     Audit-Id: ceac1902-fb40-4be7-985c-6cd16ee0e8f8
	I0916 11:10:59.477641 1488539 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:10:59.478015 1488539 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-654612","uid":"17ee1588-6ece-4a45-8e72-a05ec6d1f806","resourceVersion":"401","creationTimestamp":"2024-09-16T11:10:07Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-654612","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-654612","minikube.k8s.io/primary":"true","minikube.k8s.io/updated_at":"2024_09_16T11_10_11_0700","minikube.k8s.io/version":"v1.34.0","node-role.kubernetes.io/control-plane":"","node.kubernetes.io/exclude-from-external-load-balancers":""},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///var/run/crio/crio.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubelet","operation":"Update","apiVe
rsion":"v1","time":"2024-09-16T11:10:07Z","fieldsType":"FieldsV1","fiel [truncated 6029 chars]
	I0916 11:10:59.478411 1488539 pod_ready.go:93] pod "etcd-multinode-654612" in "kube-system" namespace has status "Ready":"True"
	I0916 11:10:59.478431 1488539 pod_ready.go:82] duration metric: took 6.246396ms for pod "etcd-multinode-654612" in "kube-system" namespace to be "Ready" ...
	I0916 11:10:59.478446 1488539 pod_ready.go:79] waiting up to 6m0s for pod "kube-apiserver-multinode-654612" in "kube-system" namespace to be "Ready" ...
	I0916 11:10:59.478520 1488539 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/namespaces/kube-system/pods/kube-apiserver-multinode-654612
	I0916 11:10:59.478531 1488539 round_trippers.go:469] Request Headers:
	I0916 11:10:59.478539 1488539 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:10:59.478544 1488539 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:10:59.480713 1488539 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:10:59.480734 1488539 round_trippers.go:577] Response Headers:
	I0916 11:10:59.480742 1488539 round_trippers.go:580]     Content-Type: application/json
	I0916 11:10:59.480746 1488539 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:10:59.480749 1488539 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:10:59.480753 1488539 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:10:59 GMT
	I0916 11:10:59.480756 1488539 round_trippers.go:580]     Audit-Id: 71e2ae1e-0063-4af3-8b08-9be39ac6fe3b
	I0916 11:10:59.480759 1488539 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:10:59.480953 1488539 request.go:1351] Response Body: {"kind":"Pod","apiVersion":"v1","metadata":{"name":"kube-apiserver-multinode-654612","namespace":"kube-system","uid":"8a56377d-b2a9-46dc-90b0-6d8f0aadec52","resourceVersion":"386","creationTimestamp":"2024-09-16T11:10:10Z","labels":{"component":"kube-apiserver","tier":"control-plane"},"annotations":{"kubeadm.kubernetes.io/kube-apiserver.advertise-address.endpoint":"192.168.67.2:8443","kubernetes.io/config.hash":"f3fdb95ee92c3c630b459a996a1fc6f9","kubernetes.io/config.mirror":"f3fdb95ee92c3c630b459a996a1fc6f9","kubernetes.io/config.seen":"2024-09-16T11:10:10.145153931Z","kubernetes.io/config.source":"file"},"ownerReferences":[{"apiVersion":"v1","kind":"Node","name":"multinode-654612","uid":"17ee1588-6ece-4a45-8e72-a05ec6d1f806","controller":true}],"managedFields":[{"manager":"kubelet","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:10:10Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:annotations":{".":{},"f:kubeadm.kube
rnetes.io/kube-apiserver.advertise-address.endpoint":{},"f:kubernetes.i [truncated 8513 chars]
	I0916 11:10:59.481497 1488539 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/nodes/multinode-654612
	I0916 11:10:59.481512 1488539 round_trippers.go:469] Request Headers:
	I0916 11:10:59.481520 1488539 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:10:59.481525 1488539 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:10:59.483572 1488539 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:10:59.483588 1488539 round_trippers.go:577] Response Headers:
	I0916 11:10:59.483596 1488539 round_trippers.go:580]     Audit-Id: a2b20a94-21c0-49fb-9d48-a1312fafcee4
	I0916 11:10:59.483601 1488539 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:10:59.483604 1488539 round_trippers.go:580]     Content-Type: application/json
	I0916 11:10:59.483607 1488539 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:10:59.483611 1488539 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:10:59.483614 1488539 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:10:59 GMT
	I0916 11:10:59.483888 1488539 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-654612","uid":"17ee1588-6ece-4a45-8e72-a05ec6d1f806","resourceVersion":"401","creationTimestamp":"2024-09-16T11:10:07Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-654612","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-654612","minikube.k8s.io/primary":"true","minikube.k8s.io/updated_at":"2024_09_16T11_10_11_0700","minikube.k8s.io/version":"v1.34.0","node-role.kubernetes.io/control-plane":"","node.kubernetes.io/exclude-from-external-load-balancers":""},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///var/run/crio/crio.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubelet","operation":"Update","apiVe
rsion":"v1","time":"2024-09-16T11:10:07Z","fieldsType":"FieldsV1","fiel [truncated 6029 chars]
	I0916 11:10:59.484310 1488539 pod_ready.go:93] pod "kube-apiserver-multinode-654612" in "kube-system" namespace has status "Ready":"True"
	I0916 11:10:59.484330 1488539 pod_ready.go:82] duration metric: took 5.871717ms for pod "kube-apiserver-multinode-654612" in "kube-system" namespace to be "Ready" ...
	I0916 11:10:59.484342 1488539 pod_ready.go:79] waiting up to 6m0s for pod "kube-controller-manager-multinode-654612" in "kube-system" namespace to be "Ready" ...
	I0916 11:10:59.484411 1488539 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/namespaces/kube-system/pods/kube-controller-manager-multinode-654612
	I0916 11:10:59.484420 1488539 round_trippers.go:469] Request Headers:
	I0916 11:10:59.484428 1488539 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:10:59.484434 1488539 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:10:59.486536 1488539 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:10:59.486562 1488539 round_trippers.go:577] Response Headers:
	I0916 11:10:59.486570 1488539 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:10:59 GMT
	I0916 11:10:59.486575 1488539 round_trippers.go:580]     Audit-Id: 6f2a8475-ad58-4a32-b1bc-c866054c08fd
	I0916 11:10:59.486578 1488539 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:10:59.486582 1488539 round_trippers.go:580]     Content-Type: application/json
	I0916 11:10:59.486586 1488539 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:10:59.486589 1488539 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:10:59.486923 1488539 request.go:1351] Response Body: {"kind":"Pod","apiVersion":"v1","metadata":{"name":"kube-controller-manager-multinode-654612","namespace":"kube-system","uid":"08e87c01-201e-4373-bbd7-0a8a7a724a84","resourceVersion":"372","creationTimestamp":"2024-09-16T11:10:10Z","labels":{"component":"kube-controller-manager","tier":"control-plane"},"annotations":{"kubernetes.io/config.hash":"c7028fbfd05aaf11bd1f32f252589714","kubernetes.io/config.mirror":"c7028fbfd05aaf11bd1f32f252589714","kubernetes.io/config.seen":"2024-09-16T11:10:10.145155408Z","kubernetes.io/config.source":"file"},"ownerReferences":[{"apiVersion":"v1","kind":"Node","name":"multinode-654612","uid":"17ee1588-6ece-4a45-8e72-a05ec6d1f806","controller":true}],"managedFields":[{"manager":"kubelet","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:10:10Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:annotations":{".":{},"f:kubernetes.io/config.hash":{},"f:kubernetes.io/config.mirror":{},"f:kubernetes.i
o/config.seen":{},"f:kubernetes.io/config.source":{}},"f:labels":{".":{ [truncated 8088 chars]
	I0916 11:10:59.487489 1488539 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/nodes/multinode-654612
	I0916 11:10:59.487506 1488539 round_trippers.go:469] Request Headers:
	I0916 11:10:59.487515 1488539 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:10:59.487519 1488539 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:10:59.489478 1488539 round_trippers.go:574] Response Status: 200 OK in 1 milliseconds
	I0916 11:10:59.489499 1488539 round_trippers.go:577] Response Headers:
	I0916 11:10:59.489506 1488539 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:10:59.489512 1488539 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:10:59 GMT
	I0916 11:10:59.489516 1488539 round_trippers.go:580]     Audit-Id: fa56402c-74cc-4e98-8dbb-b40646ce4dd6
	I0916 11:10:59.489519 1488539 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:10:59.489524 1488539 round_trippers.go:580]     Content-Type: application/json
	I0916 11:10:59.489528 1488539 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:10:59.489763 1488539 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-654612","uid":"17ee1588-6ece-4a45-8e72-a05ec6d1f806","resourceVersion":"401","creationTimestamp":"2024-09-16T11:10:07Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-654612","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-654612","minikube.k8s.io/primary":"true","minikube.k8s.io/updated_at":"2024_09_16T11_10_11_0700","minikube.k8s.io/version":"v1.34.0","node-role.kubernetes.io/control-plane":"","node.kubernetes.io/exclude-from-external-load-balancers":""},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///var/run/crio/crio.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubelet","operation":"Update","apiVe
rsion":"v1","time":"2024-09-16T11:10:07Z","fieldsType":"FieldsV1","fiel [truncated 6029 chars]
	I0916 11:10:59.490144 1488539 pod_ready.go:93] pod "kube-controller-manager-multinode-654612" in "kube-system" namespace has status "Ready":"True"
	I0916 11:10:59.490163 1488539 pod_ready.go:82] duration metric: took 5.812116ms for pod "kube-controller-manager-multinode-654612" in "kube-system" namespace to be "Ready" ...
	I0916 11:10:59.490178 1488539 pod_ready.go:79] waiting up to 6m0s for pod "kube-proxy-t9pzq" in "kube-system" namespace to be "Ready" ...
	I0916 11:10:59.490239 1488539 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/namespaces/kube-system/pods/kube-proxy-t9pzq
	I0916 11:10:59.490248 1488539 round_trippers.go:469] Request Headers:
	I0916 11:10:59.490256 1488539 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:10:59.490259 1488539 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:10:59.492302 1488539 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:10:59.492357 1488539 round_trippers.go:577] Response Headers:
	I0916 11:10:59.492380 1488539 round_trippers.go:580]     Audit-Id: 64686014-8698-440c-80c8-024c917ea91e
	I0916 11:10:59.492395 1488539 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:10:59.492400 1488539 round_trippers.go:580]     Content-Type: application/json
	I0916 11:10:59.492405 1488539 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:10:59.492408 1488539 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:10:59.492412 1488539 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:10:59 GMT
	I0916 11:10:59.492651 1488539 request.go:1351] Response Body: {"kind":"Pod","apiVersion":"v1","metadata":{"name":"kube-proxy-t9pzq","generateName":"kube-proxy-","namespace":"kube-system","uid":"d5dac41c-8386-4ad5-a463-1730169d8062","resourceVersion":"381","creationTimestamp":"2024-09-16T11:10:14Z","labels":{"controller-revision-hash":"648b489c5b","k8s-app":"kube-proxy","pod-template-generation":"1"},"ownerReferences":[{"apiVersion":"apps/v1","kind":"DaemonSet","name":"kube-proxy","uid":"8b5954ba-7bb1-4f7b-8014-fdfa99b2ac77","controller":true,"blockOwnerDeletion":true}],"managedFields":[{"manager":"kube-controller-manager","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:10:14Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:generateName":{},"f:labels":{".":{},"f:controller-revision-hash":{},"f:k8s-app":{},"f:pod-template-generation":{}},"f:ownerReferences":{".":{},"k:{\"uid\":\"8b5954ba-7bb1-4f7b-8014-fdfa99b2ac77\"}":{}}},"f:spec":{"f:affinity":{".":{},"f:nodeAffinity":{".":{},"f:r
equiredDuringSchedulingIgnoredDuringExecution":{}}},"f:containers":{"k: [truncated 6170 chars]
	I0916 11:10:59.493215 1488539 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/nodes/multinode-654612
	I0916 11:10:59.493235 1488539 round_trippers.go:469] Request Headers:
	I0916 11:10:59.493244 1488539 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:10:59.493250 1488539 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:10:59.498681 1488539 round_trippers.go:574] Response Status: 200 OK in 5 milliseconds
	I0916 11:10:59.498707 1488539 round_trippers.go:577] Response Headers:
	I0916 11:10:59.498721 1488539 round_trippers.go:580]     Audit-Id: 7c8e2471-3933-4b20-ae44-f62ca8509a31
	I0916 11:10:59.498726 1488539 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:10:59.498730 1488539 round_trippers.go:580]     Content-Type: application/json
	I0916 11:10:59.498733 1488539 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:10:59.498741 1488539 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:10:59.498747 1488539 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:10:59 GMT
	I0916 11:10:59.498843 1488539 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-654612","uid":"17ee1588-6ece-4a45-8e72-a05ec6d1f806","resourceVersion":"401","creationTimestamp":"2024-09-16T11:10:07Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-654612","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-654612","minikube.k8s.io/primary":"true","minikube.k8s.io/updated_at":"2024_09_16T11_10_11_0700","minikube.k8s.io/version":"v1.34.0","node-role.kubernetes.io/control-plane":"","node.kubernetes.io/exclude-from-external-load-balancers":""},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///var/run/crio/crio.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubelet","operation":"Update","apiVe
rsion":"v1","time":"2024-09-16T11:10:07Z","fieldsType":"FieldsV1","fiel [truncated 6029 chars]
	I0916 11:10:59.499294 1488539 pod_ready.go:93] pod "kube-proxy-t9pzq" in "kube-system" namespace has status "Ready":"True"
	I0916 11:10:59.499311 1488539 pod_ready.go:82] duration metric: took 9.125324ms for pod "kube-proxy-t9pzq" in "kube-system" namespace to be "Ready" ...
	I0916 11:10:59.499323 1488539 pod_ready.go:79] waiting up to 6m0s for pod "kube-scheduler-multinode-654612" in "kube-system" namespace to be "Ready" ...
	I0916 11:10:59.665710 1488539 request.go:632] Waited for 166.301049ms due to client-side throttling, not priority and fairness, request: GET:https://192.168.67.2:8443/api/v1/namespaces/kube-system/pods/kube-scheduler-multinode-654612
	I0916 11:10:59.665806 1488539 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/namespaces/kube-system/pods/kube-scheduler-multinode-654612
	I0916 11:10:59.665813 1488539 round_trippers.go:469] Request Headers:
	I0916 11:10:59.665821 1488539 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:10:59.665826 1488539 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:10:59.668420 1488539 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:10:59.668495 1488539 round_trippers.go:577] Response Headers:
	I0916 11:10:59.668515 1488539 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:10:59.668522 1488539 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:10:59.668530 1488539 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:10:59 GMT
	I0916 11:10:59.668536 1488539 round_trippers.go:580]     Audit-Id: a8306d45-5dd2-43f6-bcaf-f0d96146eaa9
	I0916 11:10:59.668539 1488539 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:10:59.668542 1488539 round_trippers.go:580]     Content-Type: application/json
	I0916 11:10:59.668707 1488539 request.go:1351] Response Body: {"kind":"Pod","apiVersion":"v1","metadata":{"name":"kube-scheduler-multinode-654612","namespace":"kube-system","uid":"fd553108-8193-4f33-8190-d4ec25a66de1","resourceVersion":"380","creationTimestamp":"2024-09-16T11:10:10Z","labels":{"component":"kube-scheduler","tier":"control-plane"},"annotations":{"kubernetes.io/config.hash":"281b64f61502642475e3dbc1b139b188","kubernetes.io/config.mirror":"281b64f61502642475e3dbc1b139b188","kubernetes.io/config.seen":"2024-09-16T11:10:10.145156597Z","kubernetes.io/config.source":"file"},"ownerReferences":[{"apiVersion":"v1","kind":"Node","name":"multinode-654612","uid":"17ee1588-6ece-4a45-8e72-a05ec6d1f806","controller":true}],"managedFields":[{"manager":"kubelet","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:10:10Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:annotations":{".":{},"f:kubernetes.io/config.hash":{},"f:kubernetes.io/config.mirror":{},"f:kubernetes.io/config.seen":{},
"f:kubernetes.io/config.source":{}},"f:labels":{".":{},"f:component":{} [truncated 4970 chars]
	I0916 11:10:59.866099 1488539 request.go:632] Waited for 196.94763ms due to client-side throttling, not priority and fairness, request: GET:https://192.168.67.2:8443/api/v1/nodes/multinode-654612
	I0916 11:10:59.866222 1488539 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/nodes/multinode-654612
	I0916 11:10:59.866234 1488539 round_trippers.go:469] Request Headers:
	I0916 11:10:59.866244 1488539 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:10:59.866254 1488539 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:10:59.868657 1488539 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:10:59.868742 1488539 round_trippers.go:577] Response Headers:
	I0916 11:10:59.868759 1488539 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:10:59.868764 1488539 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:10:59.868789 1488539 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:10:59 GMT
	I0916 11:10:59.868794 1488539 round_trippers.go:580]     Audit-Id: df89a71d-3b08-476c-82fc-39a1997f4052
	I0916 11:10:59.868797 1488539 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:10:59.868814 1488539 round_trippers.go:580]     Content-Type: application/json
	I0916 11:10:59.868934 1488539 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-654612","uid":"17ee1588-6ece-4a45-8e72-a05ec6d1f806","resourceVersion":"401","creationTimestamp":"2024-09-16T11:10:07Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-654612","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-654612","minikube.k8s.io/primary":"true","minikube.k8s.io/updated_at":"2024_09_16T11_10_11_0700","minikube.k8s.io/version":"v1.34.0","node-role.kubernetes.io/control-plane":"","node.kubernetes.io/exclude-from-external-load-balancers":""},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///var/run/crio/crio.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubelet","operation":"Update","apiVe
rsion":"v1","time":"2024-09-16T11:10:07Z","fieldsType":"FieldsV1","fiel [truncated 6029 chars]
	I0916 11:10:59.869370 1488539 pod_ready.go:93] pod "kube-scheduler-multinode-654612" in "kube-system" namespace has status "Ready":"True"
	I0916 11:10:59.869392 1488539 pod_ready.go:82] duration metric: took 370.055117ms for pod "kube-scheduler-multinode-654612" in "kube-system" namespace to be "Ready" ...
	I0916 11:10:59.869405 1488539 pod_ready.go:39] duration metric: took 2.912012681s for extra waiting for all system-critical and pods with labels [k8s-app=kube-dns component=etcd component=kube-apiserver component=kube-controller-manager k8s-app=kube-proxy component=kube-scheduler] to be "Ready" ...
	I0916 11:10:59.869424 1488539 api_server.go:52] waiting for apiserver process to appear ...
	I0916 11:10:59.869494 1488539 ssh_runner.go:195] Run: sudo pgrep -xnf kube-apiserver.*minikube.*
	I0916 11:10:59.880592 1488539 command_runner.go:130] > 1383
	I0916 11:10:59.882010 1488539 api_server.go:72] duration metric: took 44.948124134s to wait for apiserver process to appear ...
	I0916 11:10:59.882033 1488539 api_server.go:88] waiting for apiserver healthz status ...
	I0916 11:10:59.882054 1488539 api_server.go:253] Checking apiserver healthz at https://192.168.67.2:8443/healthz ...
	I0916 11:10:59.891560 1488539 api_server.go:279] https://192.168.67.2:8443/healthz returned 200:
	ok
	I0916 11:10:59.891656 1488539 round_trippers.go:463] GET https://192.168.67.2:8443/version
	I0916 11:10:59.891664 1488539 round_trippers.go:469] Request Headers:
	I0916 11:10:59.891673 1488539 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:10:59.891678 1488539 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:10:59.892639 1488539 round_trippers.go:574] Response Status: 200 OK in 0 milliseconds
	I0916 11:10:59.892659 1488539 round_trippers.go:577] Response Headers:
	I0916 11:10:59.892668 1488539 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:10:59 GMT
	I0916 11:10:59.892756 1488539 round_trippers.go:580]     Audit-Id: 07ce97be-5d9b-43b2-9c1c-426c9b9cf22e
	I0916 11:10:59.892767 1488539 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:10:59.892775 1488539 round_trippers.go:580]     Content-Type: application/json
	I0916 11:10:59.892778 1488539 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:10:59.892781 1488539 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:10:59.892784 1488539 round_trippers.go:580]     Content-Length: 263
	I0916 11:10:59.892800 1488539 request.go:1351] Response Body: {
	  "major": "1",
	  "minor": "31",
	  "gitVersion": "v1.31.1",
	  "gitCommit": "948afe5ca072329a73c8e79ed5938717a5cb3d21",
	  "gitTreeState": "clean",
	  "buildDate": "2024-09-11T21:22:08Z",
	  "goVersion": "go1.22.6",
	  "compiler": "gc",
	  "platform": "linux/arm64"
	}
	I0916 11:10:59.892886 1488539 api_server.go:141] control plane version: v1.31.1
	I0916 11:10:59.892908 1488539 api_server.go:131] duration metric: took 10.868387ms to wait for apiserver health ...
	I0916 11:10:59.892916 1488539 system_pods.go:43] waiting for kube-system pods to appear ...
	I0916 11:11:00.066576 1488539 request.go:632] Waited for 173.570698ms due to client-side throttling, not priority and fairness, request: GET:https://192.168.67.2:8443/api/v1/namespaces/kube-system/pods
	I0916 11:11:00.066702 1488539 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/namespaces/kube-system/pods
	I0916 11:11:00.066729 1488539 round_trippers.go:469] Request Headers:
	I0916 11:11:00.066754 1488539 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:11:00.066778 1488539 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:11:00.089467 1488539 round_trippers.go:574] Response Status: 200 OK in 22 milliseconds
	I0916 11:11:00.089564 1488539 round_trippers.go:577] Response Headers:
	I0916 11:11:00.089591 1488539 round_trippers.go:580]     Audit-Id: a5a10f7a-2d53-49c8-b97a-380f63cd08c9
	I0916 11:11:00.089612 1488539 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:11:00.089654 1488539 round_trippers.go:580]     Content-Type: application/json
	I0916 11:11:00.089673 1488539 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:11:00.089693 1488539 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:11:00.089715 1488539 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:11:00 GMT
	I0916 11:11:00.100667 1488539 request.go:1351] Response Body: {"kind":"PodList","apiVersion":"v1","metadata":{"resourceVersion":"428"},"items":[{"metadata":{"name":"coredns-7c65d6cfc9-szvv9","generateName":"coredns-7c65d6cfc9-","namespace":"kube-system","uid":"26df8cd4-36bc-49e1-98bf-9c30f5555b7b","resourceVersion":"424","creationTimestamp":"2024-09-16T11:10:15Z","labels":{"k8s-app":"kube-dns","pod-template-hash":"7c65d6cfc9"},"ownerReferences":[{"apiVersion":"apps/v1","kind":"ReplicaSet","name":"coredns-7c65d6cfc9","uid":"a88acc4a-b14b-4341-a616-6a6077ab8958","controller":true,"blockOwnerDeletion":true}],"managedFields":[{"manager":"kube-controller-manager","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:10:15Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:generateName":{},"f:labels":{".":{},"f:k8s-app":{},"f:pod-template-hash":{}},"f:ownerReferences":{".":{},"k:{\"uid\":\"a88acc4a-b14b-4341-a616-6a6077ab8958\"}":{}}},"f:spec":{"f:affinity":{".":{},"f:podAntiAffinity":{".":{},"f
:preferredDuringSchedulingIgnoredDuringExecution":{}}},"f:containers":{ [truncated 59444 chars]
	I0916 11:11:00.120364 1488539 system_pods.go:59] 8 kube-system pods found
	I0916 11:11:00.120411 1488539 system_pods.go:61] "coredns-7c65d6cfc9-szvv9" [26df8cd4-36bc-49e1-98bf-9c30f5555b7b] Running
	I0916 11:11:00.120420 1488539 system_pods.go:61] "etcd-multinode-654612" [bb46feea-e4d5-411b-9ebc-e5984b1147a8] Running
	I0916 11:11:00.120425 1488539 system_pods.go:61] "kindnet-whjqt" [0ed90b6c-0a03-4af6-a0ab-ea90794fa963] Running
	I0916 11:11:00.120431 1488539 system_pods.go:61] "kube-apiserver-multinode-654612" [8a56377d-b2a9-46dc-90b0-6d8f0aadec52] Running
	I0916 11:11:00.120436 1488539 system_pods.go:61] "kube-controller-manager-multinode-654612" [08e87c01-201e-4373-bbd7-0a8a7a724a84] Running
	I0916 11:11:00.120440 1488539 system_pods.go:61] "kube-proxy-t9pzq" [d5dac41c-8386-4ad5-a463-1730169d8062] Running
	I0916 11:11:00.120444 1488539 system_pods.go:61] "kube-scheduler-multinode-654612" [fd553108-8193-4f33-8190-d4ec25a66de1] Running
	I0916 11:11:00.120449 1488539 system_pods.go:61] "storage-provisioner" [2b21455e-8cb4-4c70-937b-6ff3cd85b42f] Running
	I0916 11:11:00.120456 1488539 system_pods.go:74] duration metric: took 227.532665ms to wait for pod list to return data ...
	I0916 11:11:00.120465 1488539 default_sa.go:34] waiting for default service account to be created ...
	I0916 11:11:00.265711 1488539 request.go:632] Waited for 145.145617ms due to client-side throttling, not priority and fairness, request: GET:https://192.168.67.2:8443/api/v1/namespaces/default/serviceaccounts
	I0916 11:11:00.265812 1488539 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/namespaces/default/serviceaccounts
	I0916 11:11:00.265820 1488539 round_trippers.go:469] Request Headers:
	I0916 11:11:00.265829 1488539 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:11:00.265840 1488539 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:11:00.270264 1488539 round_trippers.go:574] Response Status: 200 OK in 4 milliseconds
	I0916 11:11:00.270297 1488539 round_trippers.go:577] Response Headers:
	I0916 11:11:00.270314 1488539 round_trippers.go:580]     Audit-Id: 31d397e2-ac96-42ab-9672-10bf68d9d264
	I0916 11:11:00.270319 1488539 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:11:00.270322 1488539 round_trippers.go:580]     Content-Type: application/json
	I0916 11:11:00.270326 1488539 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:11:00.270329 1488539 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:11:00.270333 1488539 round_trippers.go:580]     Content-Length: 261
	I0916 11:11:00.270336 1488539 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:11:00 GMT
	I0916 11:11:00.270555 1488539 request.go:1351] Response Body: {"kind":"ServiceAccountList","apiVersion":"v1","metadata":{"resourceVersion":"428"},"items":[{"metadata":{"name":"default","namespace":"default","uid":"8b0a4fd5-1ca6-4da1-beae-b1e2017b49fd","resourceVersion":"297","creationTimestamp":"2024-09-16T11:10:14Z"}}]}
	I0916 11:11:00.270811 1488539 default_sa.go:45] found service account: "default"
	I0916 11:11:00.270839 1488539 default_sa.go:55] duration metric: took 150.366331ms for default service account to be created ...
	I0916 11:11:00.270861 1488539 system_pods.go:116] waiting for k8s-apps to be running ...
	I0916 11:11:00.466538 1488539 request.go:632] Waited for 195.530187ms due to client-side throttling, not priority and fairness, request: GET:https://192.168.67.2:8443/api/v1/namespaces/kube-system/pods
	I0916 11:11:00.466615 1488539 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/namespaces/kube-system/pods
	I0916 11:11:00.466623 1488539 round_trippers.go:469] Request Headers:
	I0916 11:11:00.466633 1488539 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:11:00.466639 1488539 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:11:00.470039 1488539 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 11:11:00.470073 1488539 round_trippers.go:577] Response Headers:
	I0916 11:11:00.470083 1488539 round_trippers.go:580]     Audit-Id: 8d22c61f-f01a-448b-a62c-8b72e08ed17d
	I0916 11:11:00.470087 1488539 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:11:00.470091 1488539 round_trippers.go:580]     Content-Type: application/json
	I0916 11:11:00.470095 1488539 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:11:00.470098 1488539 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:11:00.470101 1488539 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:11:00 GMT
	I0916 11:11:00.470957 1488539 request.go:1351] Response Body: {"kind":"PodList","apiVersion":"v1","metadata":{"resourceVersion":"428"},"items":[{"metadata":{"name":"coredns-7c65d6cfc9-szvv9","generateName":"coredns-7c65d6cfc9-","namespace":"kube-system","uid":"26df8cd4-36bc-49e1-98bf-9c30f5555b7b","resourceVersion":"424","creationTimestamp":"2024-09-16T11:10:15Z","labels":{"k8s-app":"kube-dns","pod-template-hash":"7c65d6cfc9"},"ownerReferences":[{"apiVersion":"apps/v1","kind":"ReplicaSet","name":"coredns-7c65d6cfc9","uid":"a88acc4a-b14b-4341-a616-6a6077ab8958","controller":true,"blockOwnerDeletion":true}],"managedFields":[{"manager":"kube-controller-manager","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:10:15Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:generateName":{},"f:labels":{".":{},"f:k8s-app":{},"f:pod-template-hash":{}},"f:ownerReferences":{".":{},"k:{\"uid\":\"a88acc4a-b14b-4341-a616-6a6077ab8958\"}":{}}},"f:spec":{"f:affinity":{".":{},"f:podAntiAffinity":{".":{},"f
:preferredDuringSchedulingIgnoredDuringExecution":{}}},"f:containers":{ [truncated 59444 chars]
	I0916 11:11:00.473681 1488539 system_pods.go:86] 8 kube-system pods found
	I0916 11:11:00.473722 1488539 system_pods.go:89] "coredns-7c65d6cfc9-szvv9" [26df8cd4-36bc-49e1-98bf-9c30f5555b7b] Running
	I0916 11:11:00.473730 1488539 system_pods.go:89] "etcd-multinode-654612" [bb46feea-e4d5-411b-9ebc-e5984b1147a8] Running
	I0916 11:11:00.473735 1488539 system_pods.go:89] "kindnet-whjqt" [0ed90b6c-0a03-4af6-a0ab-ea90794fa963] Running
	I0916 11:11:00.473739 1488539 system_pods.go:89] "kube-apiserver-multinode-654612" [8a56377d-b2a9-46dc-90b0-6d8f0aadec52] Running
	I0916 11:11:00.473744 1488539 system_pods.go:89] "kube-controller-manager-multinode-654612" [08e87c01-201e-4373-bbd7-0a8a7a724a84] Running
	I0916 11:11:00.473750 1488539 system_pods.go:89] "kube-proxy-t9pzq" [d5dac41c-8386-4ad5-a463-1730169d8062] Running
	I0916 11:11:00.473754 1488539 system_pods.go:89] "kube-scheduler-multinode-654612" [fd553108-8193-4f33-8190-d4ec25a66de1] Running
	I0916 11:11:00.473758 1488539 system_pods.go:89] "storage-provisioner" [2b21455e-8cb4-4c70-937b-6ff3cd85b42f] Running
	I0916 11:11:00.473765 1488539 system_pods.go:126] duration metric: took 202.894512ms to wait for k8s-apps to be running ...
	I0916 11:11:00.473777 1488539 system_svc.go:44] waiting for kubelet service to be running ....
	I0916 11:11:00.473840 1488539 ssh_runner.go:195] Run: sudo systemctl is-active --quiet service kubelet
	I0916 11:11:00.487269 1488539 system_svc.go:56] duration metric: took 13.481666ms WaitForService to wait for kubelet
	I0916 11:11:00.487299 1488539 kubeadm.go:582] duration metric: took 45.553416606s to wait for: map[apiserver:true apps_running:true default_sa:true extra:true kubelet:true node_ready:true system_pods:true]
	I0916 11:11:00.487318 1488539 node_conditions.go:102] verifying NodePressure condition ...
	I0916 11:11:00.665713 1488539 request.go:632] Waited for 178.262718ms due to client-side throttling, not priority and fairness, request: GET:https://192.168.67.2:8443/api/v1/nodes
	I0916 11:11:00.665806 1488539 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/nodes
	I0916 11:11:00.665818 1488539 round_trippers.go:469] Request Headers:
	I0916 11:11:00.665848 1488539 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:11:00.665864 1488539 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:11:00.668567 1488539 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:11:00.668645 1488539 round_trippers.go:577] Response Headers:
	I0916 11:11:00.668670 1488539 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:11:00.668723 1488539 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:11:00.668743 1488539 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:11:00 GMT
	I0916 11:11:00.668762 1488539 round_trippers.go:580]     Audit-Id: 57b02d9f-1bb5-4431-933d-dbb6ed1664ae
	I0916 11:11:00.668778 1488539 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:11:00.668782 1488539 round_trippers.go:580]     Content-Type: application/json
	I0916 11:11:00.668969 1488539 request.go:1351] Response Body: {"kind":"NodeList","apiVersion":"v1","metadata":{"resourceVersion":"428"},"items":[{"metadata":{"name":"multinode-654612","uid":"17ee1588-6ece-4a45-8e72-a05ec6d1f806","resourceVersion":"401","creationTimestamp":"2024-09-16T11:10:07Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-654612","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-654612","minikube.k8s.io/primary":"true","minikube.k8s.io/updated_at":"2024_09_16T11_10_11_0700","minikube.k8s.io/version":"v1.34.0","node-role.kubernetes.io/control-plane":"","node.kubernetes.io/exclude-from-external-load-balancers":""},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///var/run/crio/crio.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields
":[{"manager":"kubelet","operation":"Update","apiVersion":"v1","time":" [truncated 6082 chars]
	I0916 11:11:00.669474 1488539 node_conditions.go:122] node storage ephemeral capacity is 203034800Ki
	I0916 11:11:00.669508 1488539 node_conditions.go:123] node cpu capacity is 2
	I0916 11:11:00.669522 1488539 node_conditions.go:105] duration metric: took 182.198499ms to run NodePressure ...
	I0916 11:11:00.669536 1488539 start.go:241] waiting for startup goroutines ...
	I0916 11:11:00.669543 1488539 start.go:246] waiting for cluster config update ...
	I0916 11:11:00.669555 1488539 start.go:255] writing updated cluster config ...
	I0916 11:11:00.672794 1488539 out.go:201] 
	I0916 11:11:00.675785 1488539 config.go:182] Loaded profile config "multinode-654612": Driver=docker, ContainerRuntime=crio, KubernetesVersion=v1.31.1
	I0916 11:11:00.675885 1488539 profile.go:143] Saving config to /home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/multinode-654612/config.json ...
	I0916 11:11:00.678947 1488539 out.go:177] * Starting "multinode-654612-m02" worker node in "multinode-654612" cluster
	I0916 11:11:00.682259 1488539 cache.go:121] Beginning downloading kic base image for docker with crio
	I0916 11:11:00.685034 1488539 out.go:177] * Pulling base image v0.0.45-1726358845-19644 ...
	I0916 11:11:00.687880 1488539 preload.go:131] Checking if preload exists for k8s version v1.31.1 and runtime crio
	I0916 11:11:00.687920 1488539 cache.go:56] Caching tarball of preloaded images
	I0916 11:11:00.687977 1488539 image.go:79] Checking for gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 in local docker daemon
	I0916 11:11:00.688063 1488539 preload.go:172] Found /home/jenkins/minikube-integration/19651-1378450/.minikube/cache/preloaded-tarball/preloaded-images-k8s-v18-v1.31.1-cri-o-overlay-arm64.tar.lz4 in cache, skipping download
	I0916 11:11:00.688077 1488539 cache.go:59] Finished verifying existence of preloaded tar for v1.31.1 on crio
	I0916 11:11:00.688179 1488539 profile.go:143] Saving config to /home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/multinode-654612/config.json ...
	W0916 11:11:00.706652 1488539 image.go:95] image gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 is of wrong architecture
	I0916 11:11:00.706683 1488539 cache.go:149] Downloading gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 to local cache
	I0916 11:11:00.706771 1488539 image.go:63] Checking for gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 in local cache directory
	I0916 11:11:00.706796 1488539 image.go:66] Found gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 in local cache directory, skipping pull
	I0916 11:11:00.706802 1488539 image.go:135] gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 exists in cache, skipping pull
	I0916 11:11:00.706811 1488539 cache.go:152] successfully saved gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 as a tarball
	I0916 11:11:00.706821 1488539 cache.go:162] Loading gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 from local cache
	I0916 11:11:00.708139 1488539 image.go:273] response: 
	I0916 11:11:00.853461 1488539 cache.go:164] successfully loaded and using gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 from cached tarball
	I0916 11:11:00.853503 1488539 cache.go:194] Successfully downloaded all kic artifacts
	I0916 11:11:00.853538 1488539 start.go:360] acquireMachinesLock for multinode-654612-m02: {Name:mk70904bbc860a548c4a9726b7d64e227f1f9cac Clock:{} Delay:500ms Timeout:10m0s Cancel:<nil>}
	I0916 11:11:00.853676 1488539 start.go:364] duration metric: took 116.969µs to acquireMachinesLock for "multinode-654612-m02"
	I0916 11:11:00.853712 1488539 start.go:93] Provisioning new machine with config: &{Name:multinode-654612 KeepContext:false EmbedCerts:false MinikubeISO: KicBaseImage:gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 Memory:2200 CPUs:2 DiskSize:20000 Driver:docker HyperkitVpnKitSock: HyperkitVSockPorts:[] DockerEnv:[] ContainerVolumeMounts:[] InsecureRegistry:[] RegistryMirror:[] HostOnlyCIDR:192.168.59.1/24 HypervVirtualSwitch: HypervUseExternalSwitch:false HypervExternalAdapter: KVMNetwork:default KVMQemuURI:qemu:///system KVMGPU:false KVMHidden:false KVMNUMACount:1 APIServerPort:8443 DockerOpt:[] DisableDriverMounts:false NFSShare:[] NFSSharesRoot:/nfsshares UUID: NoVTXCheck:false DNSProxy:false HostDNSResolver:true HostOnlyNicType:virtio NatNicType:virtio SSHIPAddress: SSHUser:root SSHKey: SSHPort:22 KubernetesConfig:{KubernetesVersion:v1.31.1 ClusterName:multinode-654612 Namespace:default APIServerHAVIP: APIServerNa
me:minikubeCA APIServerNames:[] APIServerIPs:[] DNSDomain:cluster.local ContainerRuntime:crio CRISocket: NetworkPlugin:cni FeatureGates: ServiceCIDR:10.96.0.0/12 ImageRepository: LoadBalancerStartIP: LoadBalancerEndIP: CustomIngressCert: RegistryAliases: ExtraOptions:[] ShouldLoadCachedImages:true EnableDefaultCNI:false CNI:} Nodes:[{Name: IP:192.168.67.2 Port:8443 KubernetesVersion:v1.31.1 ContainerRuntime:crio ControlPlane:true Worker:true} {Name:m02 IP: Port:8443 KubernetesVersion:v1.31.1 ContainerRuntime:crio ControlPlane:false Worker:true}] Addons:map[default-storageclass:true storage-provisioner:true] CustomAddonImages:map[] CustomAddonRegistries:map[] VerifyComponents:map[apiserver:true apps_running:true default_sa:true extra:true kubelet:true node_ready:true system_pods:true] StartHostTimeout:6m0s ScheduledStop:<nil> ExposedPorts:[] ListenAddress: Network: Subnet: MultiNodeRequested:true ExtraDisks:0 CertExpiration:26280h0m0s Mount:false MountString:/home/jenkins:/minikube-host Mount9PVersion:9p2000.L
MountGID:docker MountIP: MountMSize:262144 MountOptions:[] MountPort:0 MountType:9p MountUID:docker BinaryMirror: DisableOptimizations:false DisableMetrics:false CustomQemuFirmwarePath: SocketVMnetClientPath: SocketVMnetPath: StaticIP: SSHAuthSock: SSHAgentPID:0 GPUs: AutoPauseInterval:1m0s} &{Name:m02 IP: Port:8443 KubernetesVersion:v1.31.1 ContainerRuntime:crio ControlPlane:false Worker:true}
	I0916 11:11:00.853791 1488539 start.go:125] createHost starting for "m02" (driver="docker")
	I0916 11:11:00.857131 1488539 out.go:235] * Creating docker container (CPUs=2, Memory=2200MB) ...
	I0916 11:11:00.857265 1488539 start.go:159] libmachine.API.Create for "multinode-654612" (driver="docker")
	I0916 11:11:00.857297 1488539 client.go:168] LocalClient.Create starting
	I0916 11:11:00.857381 1488539 main.go:141] libmachine: Reading certificate data from /home/jenkins/minikube-integration/19651-1378450/.minikube/certs/ca.pem
	I0916 11:11:00.857418 1488539 main.go:141] libmachine: Decoding PEM data...
	I0916 11:11:00.857437 1488539 main.go:141] libmachine: Parsing certificate...
	I0916 11:11:00.857492 1488539 main.go:141] libmachine: Reading certificate data from /home/jenkins/minikube-integration/19651-1378450/.minikube/certs/cert.pem
	I0916 11:11:00.857514 1488539 main.go:141] libmachine: Decoding PEM data...
	I0916 11:11:00.857527 1488539 main.go:141] libmachine: Parsing certificate...
	I0916 11:11:00.857800 1488539 cli_runner.go:164] Run: docker network inspect multinode-654612 --format "{"Name": "{{.Name}}","Driver": "{{.Driver}}","Subnet": "{{range .IPAM.Config}}{{.Subnet}}{{end}}","Gateway": "{{range .IPAM.Config}}{{.Gateway}}{{end}}","MTU": {{if (index .Options "com.docker.network.driver.mtu")}}{{(index .Options "com.docker.network.driver.mtu")}}{{else}}0{{end}}, "ContainerIPs": [{{range $k,$v := .Containers }}"{{$v.IPv4Address}}",{{end}}]}"
	I0916 11:11:00.874572 1488539 network_create.go:77] Found existing network {name:multinode-654612 subnet:0x4001935740 gateway:[0 0 0 0 0 0 0 0 0 0 255 255 192 168 67 1] mtu:1500}
	I0916 11:11:00.874624 1488539 kic.go:121] calculated static IP "192.168.67.3" for the "multinode-654612-m02" container
	I0916 11:11:00.874702 1488539 cli_runner.go:164] Run: docker ps -a --format {{.Names}}
	I0916 11:11:00.894126 1488539 cli_runner.go:164] Run: docker volume create multinode-654612-m02 --label name.minikube.sigs.k8s.io=multinode-654612-m02 --label created_by.minikube.sigs.k8s.io=true
	I0916 11:11:00.912587 1488539 oci.go:103] Successfully created a docker volume multinode-654612-m02
	I0916 11:11:00.912769 1488539 cli_runner.go:164] Run: docker run --rm --name multinode-654612-m02-preload-sidecar --label created_by.minikube.sigs.k8s.io=true --label name.minikube.sigs.k8s.io=multinode-654612-m02 --entrypoint /usr/bin/test -v multinode-654612-m02:/var gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 -d /var/lib
	I0916 11:11:01.512883 1488539 oci.go:107] Successfully prepared a docker volume multinode-654612-m02
	I0916 11:11:01.512928 1488539 preload.go:131] Checking if preload exists for k8s version v1.31.1 and runtime crio
	I0916 11:11:01.512950 1488539 kic.go:194] Starting extracting preloaded images to volume ...
	I0916 11:11:01.513034 1488539 cli_runner.go:164] Run: docker run --rm --entrypoint /usr/bin/tar -v /home/jenkins/minikube-integration/19651-1378450/.minikube/cache/preloaded-tarball/preloaded-images-k8s-v18-v1.31.1-cri-o-overlay-arm64.tar.lz4:/preloaded.tar:ro -v multinode-654612-m02:/extractDir gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 -I lz4 -xf /preloaded.tar -C /extractDir
	I0916 11:11:05.694032 1488539 cli_runner.go:217] Completed: docker run --rm --entrypoint /usr/bin/tar -v /home/jenkins/minikube-integration/19651-1378450/.minikube/cache/preloaded-tarball/preloaded-images-k8s-v18-v1.31.1-cri-o-overlay-arm64.tar.lz4:/preloaded.tar:ro -v multinode-654612-m02:/extractDir gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 -I lz4 -xf /preloaded.tar -C /extractDir: (4.180954104s)
	I0916 11:11:05.694068 1488539 kic.go:203] duration metric: took 4.181114322s to extract preloaded images to volume ...
	W0916 11:11:05.694209 1488539 cgroups_linux.go:77] Your kernel does not support swap limit capabilities or the cgroup is not mounted.
	I0916 11:11:05.694314 1488539 cli_runner.go:164] Run: docker info --format "'{{json .SecurityOptions}}'"
	I0916 11:11:05.747729 1488539 cli_runner.go:164] Run: docker run -d -t --privileged --security-opt seccomp=unconfined --tmpfs /tmp --tmpfs /run -v /lib/modules:/lib/modules:ro --hostname multinode-654612-m02 --name multinode-654612-m02 --label created_by.minikube.sigs.k8s.io=true --label name.minikube.sigs.k8s.io=multinode-654612-m02 --label role.minikube.sigs.k8s.io= --label mode.minikube.sigs.k8s.io=multinode-654612-m02 --network multinode-654612 --ip 192.168.67.3 --volume multinode-654612-m02:/var --security-opt apparmor=unconfined --memory=2200mb --cpus=2 -e container=docker --expose 8443 --publish=127.0.0.1::8443 --publish=127.0.0.1::22 --publish=127.0.0.1::2376 --publish=127.0.0.1::5000 --publish=127.0.0.1::32443 gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0
	I0916 11:11:06.094697 1488539 cli_runner.go:164] Run: docker container inspect multinode-654612-m02 --format={{.State.Running}}
	I0916 11:11:06.124132 1488539 cli_runner.go:164] Run: docker container inspect multinode-654612-m02 --format={{.State.Status}}
	I0916 11:11:06.145177 1488539 cli_runner.go:164] Run: docker exec multinode-654612-m02 stat /var/lib/dpkg/alternatives/iptables
	I0916 11:11:06.195786 1488539 oci.go:144] the created container "multinode-654612-m02" has a running status.
	I0916 11:11:06.195813 1488539 kic.go:225] Creating ssh key for kic: /home/jenkins/minikube-integration/19651-1378450/.minikube/machines/multinode-654612-m02/id_rsa...
	I0916 11:11:06.924417 1488539 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-1378450/.minikube/machines/multinode-654612-m02/id_rsa.pub -> /home/docker/.ssh/authorized_keys
	I0916 11:11:06.924542 1488539 kic_runner.go:191] docker (temp): /home/jenkins/minikube-integration/19651-1378450/.minikube/machines/multinode-654612-m02/id_rsa.pub --> /home/docker/.ssh/authorized_keys (381 bytes)
	I0916 11:11:06.952721 1488539 cli_runner.go:164] Run: docker container inspect multinode-654612-m02 --format={{.State.Status}}
	I0916 11:11:06.978671 1488539 kic_runner.go:93] Run: chown docker:docker /home/docker/.ssh/authorized_keys
	I0916 11:11:06.978691 1488539 kic_runner.go:114] Args: [docker exec --privileged multinode-654612-m02 chown docker:docker /home/docker/.ssh/authorized_keys]
	I0916 11:11:07.046268 1488539 cli_runner.go:164] Run: docker container inspect multinode-654612-m02 --format={{.State.Status}}
	I0916 11:11:07.067328 1488539 machine.go:93] provisionDockerMachine start ...
	I0916 11:11:07.067438 1488539 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" multinode-654612-m02
	I0916 11:11:07.118996 1488539 main.go:141] libmachine: Using SSH client type: native
	I0916 11:11:07.119278 1488539 main.go:141] libmachine: &{{{<nil> 0 [] [] []} docker [0x41abe0] 0x41d420 <nil>  [] 0s} 127.0.0.1 34743 <nil> <nil>}
	I0916 11:11:07.119288 1488539 main.go:141] libmachine: About to run SSH command:
	hostname
	I0916 11:11:07.278066 1488539 main.go:141] libmachine: SSH cmd err, output: <nil>: multinode-654612-m02
	
	I0916 11:11:07.278093 1488539 ubuntu.go:169] provisioning hostname "multinode-654612-m02"
	I0916 11:11:07.278165 1488539 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" multinode-654612-m02
	I0916 11:11:07.308033 1488539 main.go:141] libmachine: Using SSH client type: native
	I0916 11:11:07.308345 1488539 main.go:141] libmachine: &{{{<nil> 0 [] [] []} docker [0x41abe0] 0x41d420 <nil>  [] 0s} 127.0.0.1 34743 <nil> <nil>}
	I0916 11:11:07.308366 1488539 main.go:141] libmachine: About to run SSH command:
	sudo hostname multinode-654612-m02 && echo "multinode-654612-m02" | sudo tee /etc/hostname
	I0916 11:11:07.463803 1488539 main.go:141] libmachine: SSH cmd err, output: <nil>: multinode-654612-m02
	
	I0916 11:11:07.463884 1488539 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" multinode-654612-m02
	I0916 11:11:07.484002 1488539 main.go:141] libmachine: Using SSH client type: native
	I0916 11:11:07.484250 1488539 main.go:141] libmachine: &{{{<nil> 0 [] [] []} docker [0x41abe0] 0x41d420 <nil>  [] 0s} 127.0.0.1 34743 <nil> <nil>}
	I0916 11:11:07.484268 1488539 main.go:141] libmachine: About to run SSH command:
	
			if ! grep -xq '.*\smultinode-654612-m02' /etc/hosts; then
				if grep -xq '127.0.1.1\s.*' /etc/hosts; then
					sudo sed -i 's/^127.0.1.1\s.*/127.0.1.1 multinode-654612-m02/g' /etc/hosts;
				else 
					echo '127.0.1.1 multinode-654612-m02' | sudo tee -a /etc/hosts; 
				fi
			fi
	I0916 11:11:07.620968 1488539 main.go:141] libmachine: SSH cmd err, output: <nil>: 
	I0916 11:11:07.620998 1488539 ubuntu.go:175] set auth options {CertDir:/home/jenkins/minikube-integration/19651-1378450/.minikube CaCertPath:/home/jenkins/minikube-integration/19651-1378450/.minikube/certs/ca.pem CaPrivateKeyPath:/home/jenkins/minikube-integration/19651-1378450/.minikube/certs/ca-key.pem CaCertRemotePath:/etc/docker/ca.pem ServerCertPath:/home/jenkins/minikube-integration/19651-1378450/.minikube/machines/server.pem ServerKeyPath:/home/jenkins/minikube-integration/19651-1378450/.minikube/machines/server-key.pem ClientKeyPath:/home/jenkins/minikube-integration/19651-1378450/.minikube/certs/key.pem ServerCertRemotePath:/etc/docker/server.pem ServerKeyRemotePath:/etc/docker/server-key.pem ClientCertPath:/home/jenkins/minikube-integration/19651-1378450/.minikube/certs/cert.pem ServerCertSANs:[] StorePath:/home/jenkins/minikube-integration/19651-1378450/.minikube}
	I0916 11:11:07.621015 1488539 ubuntu.go:177] setting up certificates
	I0916 11:11:07.621027 1488539 provision.go:84] configureAuth start
	I0916 11:11:07.621089 1488539 cli_runner.go:164] Run: docker container inspect -f "{{range .NetworkSettings.Networks}}{{.IPAddress}},{{.GlobalIPv6Address}}{{end}}" multinode-654612-m02
	I0916 11:11:07.638084 1488539 provision.go:143] copyHostCerts
	I0916 11:11:07.638149 1488539 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-1378450/.minikube/certs/cert.pem -> /home/jenkins/minikube-integration/19651-1378450/.minikube/cert.pem
	I0916 11:11:07.638187 1488539 exec_runner.go:144] found /home/jenkins/minikube-integration/19651-1378450/.minikube/cert.pem, removing ...
	I0916 11:11:07.638200 1488539 exec_runner.go:203] rm: /home/jenkins/minikube-integration/19651-1378450/.minikube/cert.pem
	I0916 11:11:07.638279 1488539 exec_runner.go:151] cp: /home/jenkins/minikube-integration/19651-1378450/.minikube/certs/cert.pem --> /home/jenkins/minikube-integration/19651-1378450/.minikube/cert.pem (1123 bytes)
	I0916 11:11:07.638383 1488539 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-1378450/.minikube/certs/key.pem -> /home/jenkins/minikube-integration/19651-1378450/.minikube/key.pem
	I0916 11:11:07.638404 1488539 exec_runner.go:144] found /home/jenkins/minikube-integration/19651-1378450/.minikube/key.pem, removing ...
	I0916 11:11:07.638408 1488539 exec_runner.go:203] rm: /home/jenkins/minikube-integration/19651-1378450/.minikube/key.pem
	I0916 11:11:07.638440 1488539 exec_runner.go:151] cp: /home/jenkins/minikube-integration/19651-1378450/.minikube/certs/key.pem --> /home/jenkins/minikube-integration/19651-1378450/.minikube/key.pem (1679 bytes)
	I0916 11:11:07.638496 1488539 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-1378450/.minikube/certs/ca.pem -> /home/jenkins/minikube-integration/19651-1378450/.minikube/ca.pem
	I0916 11:11:07.638516 1488539 exec_runner.go:144] found /home/jenkins/minikube-integration/19651-1378450/.minikube/ca.pem, removing ...
	I0916 11:11:07.638522 1488539 exec_runner.go:203] rm: /home/jenkins/minikube-integration/19651-1378450/.minikube/ca.pem
	I0916 11:11:07.638552 1488539 exec_runner.go:151] cp: /home/jenkins/minikube-integration/19651-1378450/.minikube/certs/ca.pem --> /home/jenkins/minikube-integration/19651-1378450/.minikube/ca.pem (1078 bytes)
	I0916 11:11:07.638600 1488539 provision.go:117] generating server cert: /home/jenkins/minikube-integration/19651-1378450/.minikube/machines/server.pem ca-key=/home/jenkins/minikube-integration/19651-1378450/.minikube/certs/ca.pem private-key=/home/jenkins/minikube-integration/19651-1378450/.minikube/certs/ca-key.pem org=jenkins.multinode-654612-m02 san=[127.0.0.1 192.168.67.3 localhost minikube multinode-654612-m02]
	I0916 11:11:08.737757 1488539 provision.go:177] copyRemoteCerts
	I0916 11:11:08.737832 1488539 ssh_runner.go:195] Run: sudo mkdir -p /etc/docker /etc/docker /etc/docker
	I0916 11:11:08.737876 1488539 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" multinode-654612-m02
	I0916 11:11:08.757155 1488539 sshutil.go:53] new ssh client: &{IP:127.0.0.1 Port:34743 SSHKeyPath:/home/jenkins/minikube-integration/19651-1378450/.minikube/machines/multinode-654612-m02/id_rsa Username:docker}
	I0916 11:11:08.854353 1488539 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-1378450/.minikube/certs/ca.pem -> /etc/docker/ca.pem
	I0916 11:11:08.854422 1488539 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-1378450/.minikube/certs/ca.pem --> /etc/docker/ca.pem (1078 bytes)
	I0916 11:11:08.880881 1488539 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-1378450/.minikube/machines/server.pem -> /etc/docker/server.pem
	I0916 11:11:08.880949 1488539 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-1378450/.minikube/machines/server.pem --> /etc/docker/server.pem (1229 bytes)
	I0916 11:11:08.908329 1488539 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-1378450/.minikube/machines/server-key.pem -> /etc/docker/server-key.pem
	I0916 11:11:08.908449 1488539 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-1378450/.minikube/machines/server-key.pem --> /etc/docker/server-key.pem (1679 bytes)
	I0916 11:11:08.934795 1488539 provision.go:87] duration metric: took 1.313752535s to configureAuth
	I0916 11:11:08.934825 1488539 ubuntu.go:193] setting minikube options for container-runtime
	I0916 11:11:08.935059 1488539 config.go:182] Loaded profile config "multinode-654612": Driver=docker, ContainerRuntime=crio, KubernetesVersion=v1.31.1
	I0916 11:11:08.935182 1488539 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" multinode-654612-m02
	I0916 11:11:08.952259 1488539 main.go:141] libmachine: Using SSH client type: native
	I0916 11:11:08.952507 1488539 main.go:141] libmachine: &{{{<nil> 0 [] [] []} docker [0x41abe0] 0x41d420 <nil>  [] 0s} 127.0.0.1 34743 <nil> <nil>}
	I0916 11:11:08.952522 1488539 main.go:141] libmachine: About to run SSH command:
	sudo mkdir -p /etc/sysconfig && printf %s "
	CRIO_MINIKUBE_OPTIONS='--insecure-registry 10.96.0.0/12 '
	" | sudo tee /etc/sysconfig/crio.minikube && sudo systemctl restart crio
	I0916 11:11:09.190216 1488539 main.go:141] libmachine: SSH cmd err, output: <nil>: 
	CRIO_MINIKUBE_OPTIONS='--insecure-registry 10.96.0.0/12 '
	
	I0916 11:11:09.190244 1488539 machine.go:96] duration metric: took 2.122894048s to provisionDockerMachine
	I0916 11:11:09.190255 1488539 client.go:171] duration metric: took 8.332949187s to LocalClient.Create
	I0916 11:11:09.190268 1488539 start.go:167] duration metric: took 8.333013235s to libmachine.API.Create "multinode-654612"
	I0916 11:11:09.190276 1488539 start.go:293] postStartSetup for "multinode-654612-m02" (driver="docker")
	I0916 11:11:09.190287 1488539 start.go:322] creating required directories: [/etc/kubernetes/addons /etc/kubernetes/manifests /var/tmp/minikube /var/lib/minikube /var/lib/minikube/certs /var/lib/minikube/images /var/lib/minikube/binaries /tmp/gvisor /usr/share/ca-certificates /etc/ssl/certs]
	I0916 11:11:09.190352 1488539 ssh_runner.go:195] Run: sudo mkdir -p /etc/kubernetes/addons /etc/kubernetes/manifests /var/tmp/minikube /var/lib/minikube /var/lib/minikube/certs /var/lib/minikube/images /var/lib/minikube/binaries /tmp/gvisor /usr/share/ca-certificates /etc/ssl/certs
	I0916 11:11:09.190400 1488539 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" multinode-654612-m02
	I0916 11:11:09.207541 1488539 sshutil.go:53] new ssh client: &{IP:127.0.0.1 Port:34743 SSHKeyPath:/home/jenkins/minikube-integration/19651-1378450/.minikube/machines/multinode-654612-m02/id_rsa Username:docker}
	I0916 11:11:09.307405 1488539 ssh_runner.go:195] Run: cat /etc/os-release
	I0916 11:11:09.311100 1488539 command_runner.go:130] > PRETTY_NAME="Ubuntu 22.04.4 LTS"
	I0916 11:11:09.311132 1488539 command_runner.go:130] > NAME="Ubuntu"
	I0916 11:11:09.311140 1488539 command_runner.go:130] > VERSION_ID="22.04"
	I0916 11:11:09.311168 1488539 command_runner.go:130] > VERSION="22.04.4 LTS (Jammy Jellyfish)"
	I0916 11:11:09.311179 1488539 command_runner.go:130] > VERSION_CODENAME=jammy
	I0916 11:11:09.311183 1488539 command_runner.go:130] > ID=ubuntu
	I0916 11:11:09.311187 1488539 command_runner.go:130] > ID_LIKE=debian
	I0916 11:11:09.311191 1488539 command_runner.go:130] > HOME_URL="https://www.ubuntu.com/"
	I0916 11:11:09.311196 1488539 command_runner.go:130] > SUPPORT_URL="https://help.ubuntu.com/"
	I0916 11:11:09.311208 1488539 command_runner.go:130] > BUG_REPORT_URL="https://bugs.launchpad.net/ubuntu/"
	I0916 11:11:09.311216 1488539 command_runner.go:130] > PRIVACY_POLICY_URL="https://www.ubuntu.com/legal/terms-and-policies/privacy-policy"
	I0916 11:11:09.311238 1488539 command_runner.go:130] > UBUNTU_CODENAME=jammy
	I0916 11:11:09.311612 1488539 main.go:141] libmachine: Couldn't set key VERSION_CODENAME, no corresponding struct field found
	I0916 11:11:09.311671 1488539 main.go:141] libmachine: Couldn't set key PRIVACY_POLICY_URL, no corresponding struct field found
	I0916 11:11:09.311699 1488539 main.go:141] libmachine: Couldn't set key UBUNTU_CODENAME, no corresponding struct field found
	I0916 11:11:09.311723 1488539 info.go:137] Remote host: Ubuntu 22.04.4 LTS
	I0916 11:11:09.311755 1488539 filesync.go:126] Scanning /home/jenkins/minikube-integration/19651-1378450/.minikube/addons for local assets ...
	I0916 11:11:09.311831 1488539 filesync.go:126] Scanning /home/jenkins/minikube-integration/19651-1378450/.minikube/files for local assets ...
	I0916 11:11:09.311935 1488539 filesync.go:149] local asset: /home/jenkins/minikube-integration/19651-1378450/.minikube/files/etc/ssl/certs/13838332.pem -> 13838332.pem in /etc/ssl/certs
	I0916 11:11:09.311964 1488539 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-1378450/.minikube/files/etc/ssl/certs/13838332.pem -> /etc/ssl/certs/13838332.pem
	I0916 11:11:09.312090 1488539 ssh_runner.go:195] Run: sudo mkdir -p /etc/ssl/certs
	I0916 11:11:09.322001 1488539 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-1378450/.minikube/files/etc/ssl/certs/13838332.pem --> /etc/ssl/certs/13838332.pem (1708 bytes)
	I0916 11:11:09.349115 1488539 start.go:296] duration metric: took 158.797246ms for postStartSetup
	I0916 11:11:09.349587 1488539 cli_runner.go:164] Run: docker container inspect -f "{{range .NetworkSettings.Networks}}{{.IPAddress}},{{.GlobalIPv6Address}}{{end}}" multinode-654612-m02
	I0916 11:11:09.367303 1488539 profile.go:143] Saving config to /home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/multinode-654612/config.json ...
	I0916 11:11:09.367820 1488539 ssh_runner.go:195] Run: sh -c "df -h /var | awk 'NR==2{print $5}'"
	I0916 11:11:09.367875 1488539 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" multinode-654612-m02
	I0916 11:11:09.385781 1488539 sshutil.go:53] new ssh client: &{IP:127.0.0.1 Port:34743 SSHKeyPath:/home/jenkins/minikube-integration/19651-1378450/.minikube/machines/multinode-654612-m02/id_rsa Username:docker}
	I0916 11:11:09.481853 1488539 command_runner.go:130] > 12%
	I0916 11:11:09.481954 1488539 ssh_runner.go:195] Run: sh -c "df -BG /var | awk 'NR==2{print $4}'"
	I0916 11:11:09.486528 1488539 command_runner.go:130] > 171G
	I0916 11:11:09.486931 1488539 start.go:128] duration metric: took 8.633125281s to createHost
	I0916 11:11:09.486964 1488539 start.go:83] releasing machines lock for "multinode-654612-m02", held for 8.633275849s
	I0916 11:11:09.487037 1488539 cli_runner.go:164] Run: docker container inspect -f "{{range .NetworkSettings.Networks}}{{.IPAddress}},{{.GlobalIPv6Address}}{{end}}" multinode-654612-m02
	I0916 11:11:09.510532 1488539 out.go:177] * Found network options:
	I0916 11:11:09.513308 1488539 out.go:177]   - NO_PROXY=192.168.67.2
	W0916 11:11:09.515867 1488539 proxy.go:119] fail to check proxy env: Error ip not in block
	W0916 11:11:09.515907 1488539 proxy.go:119] fail to check proxy env: Error ip not in block
	I0916 11:11:09.515979 1488539 ssh_runner.go:195] Run: sudo sh -c "podman version >/dev/null"
	I0916 11:11:09.516030 1488539 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" multinode-654612-m02
	I0916 11:11:09.516075 1488539 ssh_runner.go:195] Run: curl -sS -m 2 https://registry.k8s.io/
	I0916 11:11:09.516130 1488539 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" multinode-654612-m02
	I0916 11:11:09.540288 1488539 sshutil.go:53] new ssh client: &{IP:127.0.0.1 Port:34743 SSHKeyPath:/home/jenkins/minikube-integration/19651-1378450/.minikube/machines/multinode-654612-m02/id_rsa Username:docker}
	I0916 11:11:09.554543 1488539 sshutil.go:53] new ssh client: &{IP:127.0.0.1 Port:34743 SSHKeyPath:/home/jenkins/minikube-integration/19651-1378450/.minikube/machines/multinode-654612-m02/id_rsa Username:docker}
	I0916 11:11:09.788185 1488539 ssh_runner.go:195] Run: sh -c "stat /etc/cni/net.d/*loopback.conf*"
	I0916 11:11:09.788270 1488539 command_runner.go:130] > <a href="https://github.com/kubernetes/registry.k8s.io">Temporary Redirect</a>.
	I0916 11:11:09.792278 1488539 command_runner.go:130] >   File: /etc/cni/net.d/200-loopback.conf
	I0916 11:11:09.792301 1488539 command_runner.go:130] >   Size: 54        	Blocks: 8          IO Block: 4096   regular file
	I0916 11:11:09.792308 1488539 command_runner.go:130] > Device: b3h/179d	Inode: 1570512     Links: 1
	I0916 11:11:09.792314 1488539 command_runner.go:130] > Access: (0644/-rw-r--r--)  Uid: (    0/    root)   Gid: (    0/    root)
	I0916 11:11:09.792320 1488539 command_runner.go:130] > Access: 2023-06-14 14:44:50.000000000 +0000
	I0916 11:11:09.792325 1488539 command_runner.go:130] > Modify: 2023-06-14 14:44:50.000000000 +0000
	I0916 11:11:09.792330 1488539 command_runner.go:130] > Change: 2024-09-16 10:35:03.060526663 +0000
	I0916 11:11:09.792336 1488539 command_runner.go:130] >  Birth: 2024-09-16 10:35:03.060526663 +0000
	I0916 11:11:09.792651 1488539 ssh_runner.go:195] Run: sudo find /etc/cni/net.d -maxdepth 1 -type f -name *loopback.conf* -not -name *.mk_disabled -exec sh -c "sudo mv {} {}.mk_disabled" ;
	I0916 11:11:09.815261 1488539 cni.go:221] loopback cni configuration disabled: "/etc/cni/net.d/*loopback.conf*" found
	I0916 11:11:09.815338 1488539 ssh_runner.go:195] Run: sudo find /etc/cni/net.d -maxdepth 1 -type f ( ( -name *bridge* -or -name *podman* ) -and -not -name *.mk_disabled ) -printf "%p, " -exec sh -c "sudo mv {} {}.mk_disabled" ;
	I0916 11:11:09.851663 1488539 command_runner.go:139] > /etc/cni/net.d/87-podman-bridge.conflist, /etc/cni/net.d/100-crio-bridge.conf, 
	I0916 11:11:09.851707 1488539 cni.go:262] disabled [/etc/cni/net.d/87-podman-bridge.conflist, /etc/cni/net.d/100-crio-bridge.conf] bridge cni config(s)
	I0916 11:11:09.851716 1488539 start.go:495] detecting cgroup driver to use...
	I0916 11:11:09.851747 1488539 detect.go:187] detected "cgroupfs" cgroup driver on host os
	I0916 11:11:09.851811 1488539 ssh_runner.go:195] Run: sudo systemctl stop -f containerd
	I0916 11:11:09.870178 1488539 ssh_runner.go:195] Run: sudo systemctl is-active --quiet service containerd
	I0916 11:11:09.882882 1488539 docker.go:217] disabling cri-docker service (if available) ...
	I0916 11:11:09.882995 1488539 ssh_runner.go:195] Run: sudo systemctl stop -f cri-docker.socket
	I0916 11:11:09.898160 1488539 ssh_runner.go:195] Run: sudo systemctl stop -f cri-docker.service
	I0916 11:11:09.914062 1488539 ssh_runner.go:195] Run: sudo systemctl disable cri-docker.socket
	I0916 11:11:10.017548 1488539 ssh_runner.go:195] Run: sudo systemctl mask cri-docker.service
	I0916 11:11:10.130274 1488539 command_runner.go:130] ! Created symlink /etc/systemd/system/cri-docker.service → /dev/null.
	I0916 11:11:10.130373 1488539 docker.go:233] disabling docker service ...
	I0916 11:11:10.130516 1488539 ssh_runner.go:195] Run: sudo systemctl stop -f docker.socket
	I0916 11:11:10.154434 1488539 ssh_runner.go:195] Run: sudo systemctl stop -f docker.service
	I0916 11:11:10.169082 1488539 ssh_runner.go:195] Run: sudo systemctl disable docker.socket
	I0916 11:11:10.273837 1488539 command_runner.go:130] ! Removed /etc/systemd/system/sockets.target.wants/docker.socket.
	I0916 11:11:10.273915 1488539 ssh_runner.go:195] Run: sudo systemctl mask docker.service
	I0916 11:11:10.372559 1488539 command_runner.go:130] ! Created symlink /etc/systemd/system/docker.service → /dev/null.
	I0916 11:11:10.372733 1488539 ssh_runner.go:195] Run: sudo systemctl is-active --quiet service docker
	I0916 11:11:10.385735 1488539 ssh_runner.go:195] Run: /bin/bash -c "sudo mkdir -p /etc && printf %s "runtime-endpoint: unix:///var/run/crio/crio.sock
	" | sudo tee /etc/crictl.yaml"
	I0916 11:11:10.402452 1488539 command_runner.go:130] > runtime-endpoint: unix:///var/run/crio/crio.sock
	I0916 11:11:10.403842 1488539 crio.go:59] configure cri-o to use "registry.k8s.io/pause:3.10" pause image...
	I0916 11:11:10.403945 1488539 ssh_runner.go:195] Run: sh -c "sudo sed -i 's|^.*pause_image = .*$|pause_image = "registry.k8s.io/pause:3.10"|' /etc/crio/crio.conf.d/02-crio.conf"
	I0916 11:11:10.414785 1488539 crio.go:70] configuring cri-o to use "cgroupfs" as cgroup driver...
	I0916 11:11:10.414903 1488539 ssh_runner.go:195] Run: sh -c "sudo sed -i 's|^.*cgroup_manager = .*$|cgroup_manager = "cgroupfs"|' /etc/crio/crio.conf.d/02-crio.conf"
	I0916 11:11:10.426239 1488539 ssh_runner.go:195] Run: sh -c "sudo sed -i '/conmon_cgroup = .*/d' /etc/crio/crio.conf.d/02-crio.conf"
	I0916 11:11:10.437599 1488539 ssh_runner.go:195] Run: sh -c "sudo sed -i '/cgroup_manager = .*/a conmon_cgroup = "pod"' /etc/crio/crio.conf.d/02-crio.conf"
	I0916 11:11:10.448596 1488539 ssh_runner.go:195] Run: sh -c "sudo rm -rf /etc/cni/net.mk"
	I0916 11:11:10.460133 1488539 ssh_runner.go:195] Run: sh -c "sudo sed -i '/^ *"net.ipv4.ip_unprivileged_port_start=.*"/d' /etc/crio/crio.conf.d/02-crio.conf"
	I0916 11:11:10.471372 1488539 ssh_runner.go:195] Run: sh -c "sudo grep -q "^ *default_sysctls" /etc/crio/crio.conf.d/02-crio.conf || sudo sed -i '/conmon_cgroup = .*/a default_sysctls = \[\n\]' /etc/crio/crio.conf.d/02-crio.conf"
	I0916 11:11:10.489257 1488539 ssh_runner.go:195] Run: sh -c "sudo sed -i -r 's|^default_sysctls *= *\[|&\n  "net.ipv4.ip_unprivileged_port_start=0",|' /etc/crio/crio.conf.d/02-crio.conf"
	I0916 11:11:10.500785 1488539 ssh_runner.go:195] Run: sudo sysctl net.bridge.bridge-nf-call-iptables
	I0916 11:11:10.509177 1488539 command_runner.go:130] > net.bridge.bridge-nf-call-iptables = 1
	I0916 11:11:10.510610 1488539 ssh_runner.go:195] Run: sudo sh -c "echo 1 > /proc/sys/net/ipv4/ip_forward"
	I0916 11:11:10.519719 1488539 ssh_runner.go:195] Run: sudo systemctl daemon-reload
	I0916 11:11:10.611398 1488539 ssh_runner.go:195] Run: sudo systemctl restart crio
	I0916 11:11:10.730082 1488539 start.go:542] Will wait 60s for socket path /var/run/crio/crio.sock
	I0916 11:11:10.730176 1488539 ssh_runner.go:195] Run: stat /var/run/crio/crio.sock
	I0916 11:11:10.734378 1488539 command_runner.go:130] >   File: /var/run/crio/crio.sock
	I0916 11:11:10.734409 1488539 command_runner.go:130] >   Size: 0         	Blocks: 0          IO Block: 4096   socket
	I0916 11:11:10.734421 1488539 command_runner.go:130] > Device: bch/188d	Inode: 186         Links: 1
	I0916 11:11:10.734428 1488539 command_runner.go:130] > Access: (0660/srw-rw----)  Uid: (    0/    root)   Gid: (    0/    root)
	I0916 11:11:10.734433 1488539 command_runner.go:130] > Access: 2024-09-16 11:11:10.713238796 +0000
	I0916 11:11:10.734441 1488539 command_runner.go:130] > Modify: 2024-09-16 11:11:10.713238796 +0000
	I0916 11:11:10.734450 1488539 command_runner.go:130] > Change: 2024-09-16 11:11:10.713238796 +0000
	I0916 11:11:10.734453 1488539 command_runner.go:130] >  Birth: -
	I0916 11:11:10.734684 1488539 start.go:563] Will wait 60s for crictl version
	I0916 11:11:10.734745 1488539 ssh_runner.go:195] Run: which crictl
	I0916 11:11:10.738210 1488539 command_runner.go:130] > /usr/bin/crictl
	I0916 11:11:10.738596 1488539 ssh_runner.go:195] Run: sudo /usr/bin/crictl version
	I0916 11:11:10.780606 1488539 command_runner.go:130] > Version:  0.1.0
	I0916 11:11:10.781046 1488539 command_runner.go:130] > RuntimeName:  cri-o
	I0916 11:11:10.781215 1488539 command_runner.go:130] > RuntimeVersion:  1.24.6
	I0916 11:11:10.781373 1488539 command_runner.go:130] > RuntimeApiVersion:  v1
	I0916 11:11:10.784585 1488539 start.go:579] Version:  0.1.0
	RuntimeName:  cri-o
	RuntimeVersion:  1.24.6
	RuntimeApiVersion:  v1
	I0916 11:11:10.784719 1488539 ssh_runner.go:195] Run: crio --version
	I0916 11:11:10.823377 1488539 command_runner.go:130] > crio version 1.24.6
	I0916 11:11:10.823400 1488539 command_runner.go:130] > Version:          1.24.6
	I0916 11:11:10.823409 1488539 command_runner.go:130] > GitCommit:        4bfe15a9feb74ffc95e66a21c04b15fa7bbc2b90
	I0916 11:11:10.823414 1488539 command_runner.go:130] > GitTreeState:     clean
	I0916 11:11:10.823423 1488539 command_runner.go:130] > BuildDate:        2023-06-14T14:44:50Z
	I0916 11:11:10.823428 1488539 command_runner.go:130] > GoVersion:        go1.18.2
	I0916 11:11:10.823432 1488539 command_runner.go:130] > Compiler:         gc
	I0916 11:11:10.823438 1488539 command_runner.go:130] > Platform:         linux/arm64
	I0916 11:11:10.823442 1488539 command_runner.go:130] > Linkmode:         dynamic
	I0916 11:11:10.823451 1488539 command_runner.go:130] > BuildTags:        apparmor, exclude_graphdriver_devicemapper, containers_image_ostree_stub, seccomp
	I0916 11:11:10.823458 1488539 command_runner.go:130] > SeccompEnabled:   true
	I0916 11:11:10.823462 1488539 command_runner.go:130] > AppArmorEnabled:  false
	I0916 11:11:10.826040 1488539 ssh_runner.go:195] Run: crio --version
	I0916 11:11:10.878676 1488539 command_runner.go:130] > crio version 1.24.6
	I0916 11:11:10.878701 1488539 command_runner.go:130] > Version:          1.24.6
	I0916 11:11:10.878710 1488539 command_runner.go:130] > GitCommit:        4bfe15a9feb74ffc95e66a21c04b15fa7bbc2b90
	I0916 11:11:10.878715 1488539 command_runner.go:130] > GitTreeState:     clean
	I0916 11:11:10.878722 1488539 command_runner.go:130] > BuildDate:        2023-06-14T14:44:50Z
	I0916 11:11:10.878726 1488539 command_runner.go:130] > GoVersion:        go1.18.2
	I0916 11:11:10.878730 1488539 command_runner.go:130] > Compiler:         gc
	I0916 11:11:10.878734 1488539 command_runner.go:130] > Platform:         linux/arm64
	I0916 11:11:10.878740 1488539 command_runner.go:130] > Linkmode:         dynamic
	I0916 11:11:10.878748 1488539 command_runner.go:130] > BuildTags:        apparmor, exclude_graphdriver_devicemapper, containers_image_ostree_stub, seccomp
	I0916 11:11:10.878755 1488539 command_runner.go:130] > SeccompEnabled:   true
	I0916 11:11:10.878765 1488539 command_runner.go:130] > AppArmorEnabled:  false
	I0916 11:11:10.883253 1488539 out.go:177] * Preparing Kubernetes v1.31.1 on CRI-O 1.24.6 ...
	I0916 11:11:10.885850 1488539 out.go:177]   - env NO_PROXY=192.168.67.2
	I0916 11:11:10.888527 1488539 cli_runner.go:164] Run: docker network inspect multinode-654612 --format "{"Name": "{{.Name}}","Driver": "{{.Driver}}","Subnet": "{{range .IPAM.Config}}{{.Subnet}}{{end}}","Gateway": "{{range .IPAM.Config}}{{.Gateway}}{{end}}","MTU": {{if (index .Options "com.docker.network.driver.mtu")}}{{(index .Options "com.docker.network.driver.mtu")}}{{else}}0{{end}}, "ContainerIPs": [{{range $k,$v := .Containers }}"{{$v.IPv4Address}}",{{end}}]}"
	I0916 11:11:10.904887 1488539 ssh_runner.go:195] Run: grep 192.168.67.1	host.minikube.internal$ /etc/hosts
	I0916 11:11:10.908885 1488539 ssh_runner.go:195] Run: /bin/bash -c "{ grep -v $'\thost.minikube.internal$' "/etc/hosts"; echo "192.168.67.1	host.minikube.internal"; } > /tmp/h.$$; sudo cp /tmp/h.$$ "/etc/hosts""
	I0916 11:11:10.919789 1488539 mustload.go:65] Loading cluster: multinode-654612
	I0916 11:11:10.920029 1488539 config.go:182] Loaded profile config "multinode-654612": Driver=docker, ContainerRuntime=crio, KubernetesVersion=v1.31.1
	I0916 11:11:10.920300 1488539 cli_runner.go:164] Run: docker container inspect multinode-654612 --format={{.State.Status}}
	I0916 11:11:10.937108 1488539 host.go:66] Checking if "multinode-654612" exists ...
	I0916 11:11:10.937482 1488539 certs.go:68] Setting up /home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/multinode-654612 for IP: 192.168.67.3
	I0916 11:11:10.937500 1488539 certs.go:194] generating shared ca certs ...
	I0916 11:11:10.937522 1488539 certs.go:226] acquiring lock for ca certs: {Name:mk0ae46b50e2e49d53ad6fcc94535aa50d9156d6 Clock:{} Delay:500ms Timeout:1m0s Cancel:<nil>}
	I0916 11:11:10.937690 1488539 certs.go:235] skipping valid "minikubeCA" ca cert: /home/jenkins/minikube-integration/19651-1378450/.minikube/ca.key
	I0916 11:11:10.937751 1488539 certs.go:235] skipping valid "proxyClientCA" ca cert: /home/jenkins/minikube-integration/19651-1378450/.minikube/proxy-client-ca.key
	I0916 11:11:10.937767 1488539 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-1378450/.minikube/ca.crt -> /var/lib/minikube/certs/ca.crt
	I0916 11:11:10.937787 1488539 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-1378450/.minikube/ca.key -> /var/lib/minikube/certs/ca.key
	I0916 11:11:10.937805 1488539 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-1378450/.minikube/proxy-client-ca.crt -> /var/lib/minikube/certs/proxy-client-ca.crt
	I0916 11:11:10.937823 1488539 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-1378450/.minikube/proxy-client-ca.key -> /var/lib/minikube/certs/proxy-client-ca.key
	I0916 11:11:10.937911 1488539 certs.go:484] found cert: /home/jenkins/minikube-integration/19651-1378450/.minikube/certs/1383833.pem (1338 bytes)
	W0916 11:11:10.937958 1488539 certs.go:480] ignoring /home/jenkins/minikube-integration/19651-1378450/.minikube/certs/1383833_empty.pem, impossibly tiny 0 bytes
	I0916 11:11:10.938029 1488539 certs.go:484] found cert: /home/jenkins/minikube-integration/19651-1378450/.minikube/certs/ca-key.pem (1679 bytes)
	I0916 11:11:10.938128 1488539 certs.go:484] found cert: /home/jenkins/minikube-integration/19651-1378450/.minikube/certs/ca.pem (1078 bytes)
	I0916 11:11:10.938206 1488539 certs.go:484] found cert: /home/jenkins/minikube-integration/19651-1378450/.minikube/certs/cert.pem (1123 bytes)
	I0916 11:11:10.938246 1488539 certs.go:484] found cert: /home/jenkins/minikube-integration/19651-1378450/.minikube/certs/key.pem (1679 bytes)
	I0916 11:11:10.938320 1488539 certs.go:484] found cert: /home/jenkins/minikube-integration/19651-1378450/.minikube/files/etc/ssl/certs/13838332.pem (1708 bytes)
	I0916 11:11:10.938370 1488539 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-1378450/.minikube/files/etc/ssl/certs/13838332.pem -> /usr/share/ca-certificates/13838332.pem
	I0916 11:11:10.938397 1488539 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-1378450/.minikube/ca.crt -> /usr/share/ca-certificates/minikubeCA.pem
	I0916 11:11:10.938409 1488539 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-1378450/.minikube/certs/1383833.pem -> /usr/share/ca-certificates/1383833.pem
	I0916 11:11:10.938472 1488539 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-1378450/.minikube/ca.crt --> /var/lib/minikube/certs/ca.crt (1111 bytes)
	I0916 11:11:10.969739 1488539 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-1378450/.minikube/ca.key --> /var/lib/minikube/certs/ca.key (1675 bytes)
	I0916 11:11:10.998757 1488539 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-1378450/.minikube/proxy-client-ca.crt --> /var/lib/minikube/certs/proxy-client-ca.crt (1119 bytes)
	I0916 11:11:11.026459 1488539 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-1378450/.minikube/proxy-client-ca.key --> /var/lib/minikube/certs/proxy-client-ca.key (1679 bytes)
	I0916 11:11:11.051456 1488539 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-1378450/.minikube/files/etc/ssl/certs/13838332.pem --> /usr/share/ca-certificates/13838332.pem (1708 bytes)
	I0916 11:11:11.078334 1488539 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-1378450/.minikube/ca.crt --> /usr/share/ca-certificates/minikubeCA.pem (1111 bytes)
	I0916 11:11:11.104702 1488539 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-1378450/.minikube/certs/1383833.pem --> /usr/share/ca-certificates/1383833.pem (1338 bytes)
	I0916 11:11:11.132426 1488539 ssh_runner.go:195] Run: openssl version
	I0916 11:11:11.137827 1488539 command_runner.go:130] > OpenSSL 3.0.2 15 Mar 2022 (Library: OpenSSL 3.0.2 15 Mar 2022)
	I0916 11:11:11.138236 1488539 ssh_runner.go:195] Run: sudo /bin/bash -c "test -s /usr/share/ca-certificates/13838332.pem && ln -fs /usr/share/ca-certificates/13838332.pem /etc/ssl/certs/13838332.pem"
	I0916 11:11:11.148491 1488539 ssh_runner.go:195] Run: ls -la /usr/share/ca-certificates/13838332.pem
	I0916 11:11:11.152754 1488539 command_runner.go:130] > -rw-r--r-- 1 root root 1708 Sep 16 10:46 /usr/share/ca-certificates/13838332.pem
	I0916 11:11:11.152793 1488539 certs.go:528] hashing: -rw-r--r-- 1 root root 1708 Sep 16 10:46 /usr/share/ca-certificates/13838332.pem
	I0916 11:11:11.152850 1488539 ssh_runner.go:195] Run: openssl x509 -hash -noout -in /usr/share/ca-certificates/13838332.pem
	I0916 11:11:11.159733 1488539 command_runner.go:130] > 3ec20f2e
	I0916 11:11:11.160194 1488539 ssh_runner.go:195] Run: sudo /bin/bash -c "test -L /etc/ssl/certs/3ec20f2e.0 || ln -fs /etc/ssl/certs/13838332.pem /etc/ssl/certs/3ec20f2e.0"
	I0916 11:11:11.170159 1488539 ssh_runner.go:195] Run: sudo /bin/bash -c "test -s /usr/share/ca-certificates/minikubeCA.pem && ln -fs /usr/share/ca-certificates/minikubeCA.pem /etc/ssl/certs/minikubeCA.pem"
	I0916 11:11:11.179701 1488539 ssh_runner.go:195] Run: ls -la /usr/share/ca-certificates/minikubeCA.pem
	I0916 11:11:11.184008 1488539 command_runner.go:130] > -rw-r--r-- 1 root root 1111 Sep 16 10:35 /usr/share/ca-certificates/minikubeCA.pem
	I0916 11:11:11.184176 1488539 certs.go:528] hashing: -rw-r--r-- 1 root root 1111 Sep 16 10:35 /usr/share/ca-certificates/minikubeCA.pem
	I0916 11:11:11.184244 1488539 ssh_runner.go:195] Run: openssl x509 -hash -noout -in /usr/share/ca-certificates/minikubeCA.pem
	I0916 11:11:11.191019 1488539 command_runner.go:130] > b5213941
	I0916 11:11:11.191441 1488539 ssh_runner.go:195] Run: sudo /bin/bash -c "test -L /etc/ssl/certs/b5213941.0 || ln -fs /etc/ssl/certs/minikubeCA.pem /etc/ssl/certs/b5213941.0"
	I0916 11:11:11.201136 1488539 ssh_runner.go:195] Run: sudo /bin/bash -c "test -s /usr/share/ca-certificates/1383833.pem && ln -fs /usr/share/ca-certificates/1383833.pem /etc/ssl/certs/1383833.pem"
	I0916 11:11:11.210539 1488539 ssh_runner.go:195] Run: ls -la /usr/share/ca-certificates/1383833.pem
	I0916 11:11:11.213875 1488539 command_runner.go:130] > -rw-r--r-- 1 root root 1338 Sep 16 10:46 /usr/share/ca-certificates/1383833.pem
	I0916 11:11:11.214175 1488539 certs.go:528] hashing: -rw-r--r-- 1 root root 1338 Sep 16 10:46 /usr/share/ca-certificates/1383833.pem
	I0916 11:11:11.214241 1488539 ssh_runner.go:195] Run: openssl x509 -hash -noout -in /usr/share/ca-certificates/1383833.pem
	I0916 11:11:11.221459 1488539 command_runner.go:130] > 51391683
	I0916 11:11:11.221604 1488539 ssh_runner.go:195] Run: sudo /bin/bash -c "test -L /etc/ssl/certs/51391683.0 || ln -fs /etc/ssl/certs/1383833.pem /etc/ssl/certs/51391683.0"
	I0916 11:11:11.231519 1488539 ssh_runner.go:195] Run: stat /var/lib/minikube/certs/apiserver-kubelet-client.crt
	I0916 11:11:11.234972 1488539 command_runner.go:130] ! stat: cannot statx '/var/lib/minikube/certs/apiserver-kubelet-client.crt': No such file or directory
	I0916 11:11:11.235011 1488539 certs.go:399] 'apiserver-kubelet-client' cert doesn't exist, likely first start: stat /var/lib/minikube/certs/apiserver-kubelet-client.crt: Process exited with status 1
	stdout:
	
	stderr:
	stat: cannot statx '/var/lib/minikube/certs/apiserver-kubelet-client.crt': No such file or directory
	I0916 11:11:11.235041 1488539 kubeadm.go:934] updating node {m02 192.168.67.3 8443 v1.31.1 crio false true} ...
	I0916 11:11:11.235129 1488539 kubeadm.go:946] kubelet [Unit]
	Wants=crio.service
	
	[Service]
	ExecStart=
	ExecStart=/var/lib/minikube/binaries/v1.31.1/kubelet --bootstrap-kubeconfig=/etc/kubernetes/bootstrap-kubelet.conf --cgroups-per-qos=false --config=/var/lib/kubelet/config.yaml --enforce-node-allocatable= --hostname-override=multinode-654612-m02 --kubeconfig=/etc/kubernetes/kubelet.conf --node-ip=192.168.67.3
	
	[Install]
	 config:
	{KubernetesVersion:v1.31.1 ClusterName:multinode-654612 Namespace:default APIServerHAVIP: APIServerName:minikubeCA APIServerNames:[] APIServerIPs:[] DNSDomain:cluster.local ContainerRuntime:crio CRISocket: NetworkPlugin:cni FeatureGates: ServiceCIDR:10.96.0.0/12 ImageRepository: LoadBalancerStartIP: LoadBalancerEndIP: CustomIngressCert: RegistryAliases: ExtraOptions:[] ShouldLoadCachedImages:true EnableDefaultCNI:false CNI:}
	I0916 11:11:11.235201 1488539 ssh_runner.go:195] Run: sudo ls /var/lib/minikube/binaries/v1.31.1
	I0916 11:11:11.242965 1488539 command_runner.go:130] > kubeadm
	I0916 11:11:11.242990 1488539 command_runner.go:130] > kubectl
	I0916 11:11:11.242995 1488539 command_runner.go:130] > kubelet
	I0916 11:11:11.243993 1488539 binaries.go:44] Found k8s binaries, skipping transfer
	I0916 11:11:11.244085 1488539 ssh_runner.go:195] Run: sudo mkdir -p /etc/systemd/system/kubelet.service.d /lib/systemd/system
	I0916 11:11:11.253211 1488539 ssh_runner.go:362] scp memory --> /etc/systemd/system/kubelet.service.d/10-kubeadm.conf (370 bytes)
	I0916 11:11:11.272594 1488539 ssh_runner.go:362] scp memory --> /lib/systemd/system/kubelet.service (352 bytes)
	I0916 11:11:11.292079 1488539 ssh_runner.go:195] Run: grep 192.168.67.2	control-plane.minikube.internal$ /etc/hosts
	I0916 11:11:11.295744 1488539 ssh_runner.go:195] Run: /bin/bash -c "{ grep -v $'\tcontrol-plane.minikube.internal$' "/etc/hosts"; echo "192.168.67.2	control-plane.minikube.internal"; } > /tmp/h.$$; sudo cp /tmp/h.$$ "/etc/hosts""
	I0916 11:11:11.307441 1488539 ssh_runner.go:195] Run: sudo systemctl daemon-reload
	I0916 11:11:11.406960 1488539 ssh_runner.go:195] Run: sudo systemctl start kubelet
	I0916 11:11:11.422192 1488539 host.go:66] Checking if "multinode-654612" exists ...
	I0916 11:11:11.422471 1488539 start.go:317] joinCluster: &{Name:multinode-654612 KeepContext:false EmbedCerts:false MinikubeISO: KicBaseImage:gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 Memory:2200 CPUs:2 DiskSize:20000 Driver:docker HyperkitVpnKitSock: HyperkitVSockPorts:[] DockerEnv:[] ContainerVolumeMounts:[] InsecureRegistry:[] RegistryMirror:[] HostOnlyCIDR:192.168.59.1/24 HypervVirtualSwitch: HypervUseExternalSwitch:false HypervExternalAdapter: KVMNetwork:default KVMQemuURI:qemu:///system KVMGPU:false KVMHidden:false KVMNUMACount:1 APIServerPort:8443 DockerOpt:[] DisableDriverMounts:false NFSShare:[] NFSSharesRoot:/nfsshares UUID: NoVTXCheck:false DNSProxy:false HostDNSResolver:true HostOnlyNicType:virtio NatNicType:virtio SSHIPAddress: SSHUser:root SSHKey: SSHPort:22 KubernetesConfig:{KubernetesVersion:v1.31.1 ClusterName:multinode-654612 Namespace:default APIServerHAVIP: APIServerName:minikubeCA APIServerN
ames:[] APIServerIPs:[] DNSDomain:cluster.local ContainerRuntime:crio CRISocket: NetworkPlugin:cni FeatureGates: ServiceCIDR:10.96.0.0/12 ImageRepository: LoadBalancerStartIP: LoadBalancerEndIP: CustomIngressCert: RegistryAliases: ExtraOptions:[] ShouldLoadCachedImages:true EnableDefaultCNI:false CNI:} Nodes:[{Name: IP:192.168.67.2 Port:8443 KubernetesVersion:v1.31.1 ContainerRuntime:crio ControlPlane:true Worker:true} {Name:m02 IP:192.168.67.3 Port:8443 KubernetesVersion:v1.31.1 ContainerRuntime:crio ControlPlane:false Worker:true}] Addons:map[default-storageclass:true storage-provisioner:true] CustomAddonImages:map[] CustomAddonRegistries:map[] VerifyComponents:map[apiserver:true apps_running:true default_sa:true extra:true kubelet:true node_ready:true system_pods:true] StartHostTimeout:6m0s ScheduledStop:<nil> ExposedPorts:[] ListenAddress: Network: Subnet: MultiNodeRequested:true ExtraDisks:0 CertExpiration:26280h0m0s Mount:false MountString:/home/jenkins:/minikube-host Mount9PVersion:9p2000.L MountGID:do
cker MountIP: MountMSize:262144 MountOptions:[] MountPort:0 MountType:9p MountUID:docker BinaryMirror: DisableOptimizations:false DisableMetrics:false CustomQemuFirmwarePath: SocketVMnetClientPath: SocketVMnetPath: StaticIP: SSHAuthSock: SSHAgentPID:0 GPUs: AutoPauseInterval:1m0s}
	I0916 11:11:11.422568 1488539 ssh_runner.go:195] Run: /bin/bash -c "sudo env PATH="/var/lib/minikube/binaries/v1.31.1:$PATH" kubeadm token create --print-join-command --ttl=0"
	I0916 11:11:11.422625 1488539 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" multinode-654612
	I0916 11:11:11.447222 1488539 sshutil.go:53] new ssh client: &{IP:127.0.0.1 Port:34738 SSHKeyPath:/home/jenkins/minikube-integration/19651-1378450/.minikube/machines/multinode-654612/id_rsa Username:docker}
	I0916 11:11:11.613253 1488539 command_runner.go:130] > kubeadm join control-plane.minikube.internal:8443 --token bvq0jt.ttp95g4knqxro6b5 --discovery-token-ca-cert-hash sha256:a39d4a6e06a2efc97f5d9564a89b81063790e757dde370e866d9dc4c2ed0ec07 
	I0916 11:11:11.613295 1488539 start.go:343] trying to join worker node "m02" to cluster: &{Name:m02 IP:192.168.67.3 Port:8443 KubernetesVersion:v1.31.1 ContainerRuntime:crio ControlPlane:false Worker:true}
	I0916 11:11:11.613333 1488539 ssh_runner.go:195] Run: /bin/bash -c "sudo env PATH="/var/lib/minikube/binaries/v1.31.1:$PATH" kubeadm join control-plane.minikube.internal:8443 --token bvq0jt.ttp95g4knqxro6b5 --discovery-token-ca-cert-hash sha256:a39d4a6e06a2efc97f5d9564a89b81063790e757dde370e866d9dc4c2ed0ec07 --ignore-preflight-errors=all --cri-socket unix:///var/run/crio/crio.sock --node-name=multinode-654612-m02"
	I0916 11:11:11.656532 1488539 command_runner.go:130] > [preflight] Running pre-flight checks
	I0916 11:11:11.666665 1488539 command_runner.go:130] > [preflight] The system verification failed. Printing the output from the verification:
	I0916 11:11:11.666690 1488539 command_runner.go:130] > KERNEL_VERSION: 5.15.0-1069-aws
	I0916 11:11:11.666696 1488539 command_runner.go:130] > OS: Linux
	I0916 11:11:11.666708 1488539 command_runner.go:130] > CGROUPS_CPU: enabled
	I0916 11:11:11.666714 1488539 command_runner.go:130] > CGROUPS_CPUACCT: enabled
	I0916 11:11:11.666719 1488539 command_runner.go:130] > CGROUPS_CPUSET: enabled
	I0916 11:11:11.666724 1488539 command_runner.go:130] > CGROUPS_DEVICES: enabled
	I0916 11:11:11.666729 1488539 command_runner.go:130] > CGROUPS_FREEZER: enabled
	I0916 11:11:11.666734 1488539 command_runner.go:130] > CGROUPS_MEMORY: enabled
	I0916 11:11:11.666741 1488539 command_runner.go:130] > CGROUPS_PIDS: enabled
	I0916 11:11:11.666746 1488539 command_runner.go:130] > CGROUPS_HUGETLB: enabled
	I0916 11:11:11.666751 1488539 command_runner.go:130] > CGROUPS_BLKIO: enabled
	I0916 11:11:11.757112 1488539 command_runner.go:130] > [preflight] Reading configuration from the cluster...
	I0916 11:11:11.757182 1488539 command_runner.go:130] > [preflight] FYI: You can look at this config file with 'kubectl -n kube-system get cm kubeadm-config -o yaml'
	I0916 11:11:11.795717 1488539 command_runner.go:130] > [kubelet-start] Writing kubelet configuration to file "/var/lib/kubelet/config.yaml"
	I0916 11:11:11.795973 1488539 command_runner.go:130] > [kubelet-start] Writing kubelet environment file with flags to file "/var/lib/kubelet/kubeadm-flags.env"
	I0916 11:11:11.796169 1488539 command_runner.go:130] > [kubelet-start] Starting the kubelet
	I0916 11:11:11.901509 1488539 command_runner.go:130] > [kubelet-check] Waiting for a healthy kubelet at http://127.0.0.1:10248/healthz. This can take up to 4m0s
	I0916 11:11:13.407549 1488539 command_runner.go:130] > [kubelet-check] The kubelet is healthy after 1.504072304s
	I0916 11:11:13.407576 1488539 command_runner.go:130] > [kubelet-start] Waiting for the kubelet to perform the TLS Bootstrap
	I0916 11:11:13.921373 1488539 command_runner.go:130] > This node has joined the cluster:
	I0916 11:11:13.921412 1488539 command_runner.go:130] > * Certificate signing request was sent to apiserver and a response was received.
	I0916 11:11:13.921419 1488539 command_runner.go:130] > * The Kubelet was informed of the new secure connection details.
	I0916 11:11:13.921426 1488539 command_runner.go:130] > Run 'kubectl get nodes' on the control-plane to see this node join the cluster.
	I0916 11:11:13.925918 1488539 command_runner.go:130] ! 	[WARNING SystemVerification]: failed to parse kernel config: unable to load kernel module: "configs", output: "modprobe: FATAL: Module configs not found in directory /lib/modules/5.15.0-1069-aws\n", err: exit status 1
	I0916 11:11:13.925950 1488539 command_runner.go:130] ! 	[WARNING Service-Kubelet]: kubelet service is not enabled, please run 'systemctl enable kubelet.service'
	I0916 11:11:13.925972 1488539 ssh_runner.go:235] Completed: /bin/bash -c "sudo env PATH="/var/lib/minikube/binaries/v1.31.1:$PATH" kubeadm join control-plane.minikube.internal:8443 --token bvq0jt.ttp95g4knqxro6b5 --discovery-token-ca-cert-hash sha256:a39d4a6e06a2efc97f5d9564a89b81063790e757dde370e866d9dc4c2ed0ec07 --ignore-preflight-errors=all --cri-socket unix:///var/run/crio/crio.sock --node-name=multinode-654612-m02": (2.312624427s)
	I0916 11:11:13.925997 1488539 ssh_runner.go:195] Run: /bin/bash -c "sudo systemctl daemon-reload && sudo systemctl enable kubelet && sudo systemctl start kubelet"
	I0916 11:11:14.029869 1488539 command_runner.go:130] ! Created symlink /etc/systemd/system/multi-user.target.wants/kubelet.service → /lib/systemd/system/kubelet.service.
	I0916 11:11:14.120221 1488539 ssh_runner.go:195] Run: sudo /var/lib/minikube/binaries/v1.31.1/kubectl --kubeconfig=/var/lib/minikube/kubeconfig label --overwrite nodes multinode-654612-m02 minikube.k8s.io/updated_at=2024_09_16T11_11_14_0700 minikube.k8s.io/version=v1.34.0 minikube.k8s.io/commit=90d544f06ea0f69499271b003be64a9a224d57ed minikube.k8s.io/name=multinode-654612 minikube.k8s.io/primary=false
	I0916 11:11:14.248261 1488539 command_runner.go:130] > node/multinode-654612-m02 labeled
	I0916 11:11:14.252726 1488539 start.go:319] duration metric: took 2.830250727s to joinCluster
	I0916 11:11:14.252786 1488539 start.go:235] Will wait 6m0s for node &{Name:m02 IP:192.168.67.3 Port:8443 KubernetesVersion:v1.31.1 ContainerRuntime:crio ControlPlane:false Worker:true}
	I0916 11:11:14.253276 1488539 config.go:182] Loaded profile config "multinode-654612": Driver=docker, ContainerRuntime=crio, KubernetesVersion=v1.31.1
	I0916 11:11:14.255757 1488539 out.go:177] * Verifying Kubernetes components...
	I0916 11:11:14.258506 1488539 ssh_runner.go:195] Run: sudo systemctl daemon-reload
	I0916 11:11:14.357209 1488539 ssh_runner.go:195] Run: sudo systemctl start kubelet
	I0916 11:11:14.375013 1488539 loader.go:395] Config loaded from file:  /home/jenkins/minikube-integration/19651-1378450/kubeconfig
	I0916 11:11:14.375454 1488539 kapi.go:59] client config for multinode-654612: &rest.Config{Host:"https://192.168.67.2:8443", APIPath:"", ContentConfig:rest.ContentConfig{AcceptContentTypes:"", ContentType:"", GroupVersion:(*schema.GroupVersion)(nil), NegotiatedSerializer:runtime.NegotiatedSerializer(nil)}, Username:"", Password:"", BearerToken:"", BearerTokenFile:"", Impersonate:rest.ImpersonationConfig{UserName:"", UID:"", Groups:[]string(nil), Extra:map[string][]string(nil)}, AuthProvider:<nil>, AuthConfigPersister:rest.AuthProviderConfigPersister(nil), ExecProvider:<nil>, TLSClientConfig:rest.sanitizedTLSClientConfig{Insecure:false, ServerName:"", CertFile:"/home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/multinode-654612/client.crt", KeyFile:"/home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/multinode-654612/client.key", CAFile:"/home/jenkins/minikube-integration/19651-1378450/.minikube/ca.crt", CertData:[]uint8(nil), KeyData:[]uint8(nil), CAData:[]uint8(nil),
NextProtos:[]string(nil)}, UserAgent:"", DisableCompression:false, Transport:http.RoundTripper(nil), WrapTransport:(transport.WrapperFunc)(0x1a1e6c0), QPS:0, Burst:0, RateLimiter:flowcontrol.RateLimiter(nil), WarningHandler:rest.WarningHandler(nil), Timeout:0, Dial:(func(context.Context, string, string) (net.Conn, error))(nil), Proxy:(func(*http.Request) (*url.URL, error))(nil)}
	I0916 11:11:14.375804 1488539 node_ready.go:35] waiting up to 6m0s for node "multinode-654612-m02" to be "Ready" ...
	I0916 11:11:14.375920 1488539 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/nodes/multinode-654612-m02
	I0916 11:11:14.375944 1488539 round_trippers.go:469] Request Headers:
	I0916 11:11:14.375965 1488539 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:11:14.375999 1488539 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:11:14.378546 1488539 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:11:14.378570 1488539 round_trippers.go:577] Response Headers:
	I0916 11:11:14.378579 1488539 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:11:14.378585 1488539 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:11:14.378591 1488539 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:11:14 GMT
	I0916 11:11:14.378595 1488539 round_trippers.go:580]     Audit-Id: 6eef9699-3cb4-4b38-8f65-1c0c18ae22b0
	I0916 11:11:14.378598 1488539 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:11:14.378601 1488539 round_trippers.go:580]     Content-Type: application/json
	I0916 11:11:14.378759 1488539 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-654612-m02","uid":"aa1e2e1b-4957-47bd-bcf9-786ad66f873a","resourceVersion":"469","creationTimestamp":"2024-09-16T11:11:13Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-654612-m02","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-654612","minikube.k8s.io/primary":"false","minikube.k8s.io/updated_at":"2024_09_16T11_11_14_0700","minikube.k8s.io/version":"v1.34.0"},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///var/run/crio/crio.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kube-controller-manager","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:11:13Z","fieldsType":"FieldsV1","fieldsV1":{"
f:metadata":{"f:annotations":{"f:node.alpha.kubernetes.io/ttl":{}}},"f: [truncated 5619 chars]
	I0916 11:11:14.876974 1488539 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/nodes/multinode-654612-m02
	I0916 11:11:14.877001 1488539 round_trippers.go:469] Request Headers:
	I0916 11:11:14.877011 1488539 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:11:14.877016 1488539 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:11:14.879316 1488539 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:11:14.879338 1488539 round_trippers.go:577] Response Headers:
	I0916 11:11:14.879347 1488539 round_trippers.go:580]     Audit-Id: 1acc090a-2827-44db-bc3f-bc84c4655786
	I0916 11:11:14.879352 1488539 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:11:14.879355 1488539 round_trippers.go:580]     Content-Type: application/json
	I0916 11:11:14.879360 1488539 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:11:14.879364 1488539 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:11:14.879368 1488539 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:11:14 GMT
	I0916 11:11:14.879570 1488539 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-654612-m02","uid":"aa1e2e1b-4957-47bd-bcf9-786ad66f873a","resourceVersion":"471","creationTimestamp":"2024-09-16T11:11:13Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-654612-m02","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-654612","minikube.k8s.io/primary":"false","minikube.k8s.io/updated_at":"2024_09_16T11_11_14_0700","minikube.k8s.io/version":"v1.34.0"},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///var/run/crio/crio.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubeadm","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:11:13Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:
annotations":{"f:kubeadm.alpha.kubernetes.io/cri-socket":{}}}}},{"manag [truncated 5728 chars]
	I0916 11:11:15.376266 1488539 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/nodes/multinode-654612-m02
	I0916 11:11:15.376293 1488539 round_trippers.go:469] Request Headers:
	I0916 11:11:15.376303 1488539 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:11:15.376307 1488539 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:11:15.379471 1488539 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 11:11:15.379503 1488539 round_trippers.go:577] Response Headers:
	I0916 11:11:15.379511 1488539 round_trippers.go:580]     Audit-Id: 47eb4630-91dd-4529-85cd-9f3e3c025efe
	I0916 11:11:15.379516 1488539 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:11:15.379520 1488539 round_trippers.go:580]     Content-Type: application/json
	I0916 11:11:15.379523 1488539 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:11:15.379527 1488539 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:11:15.379530 1488539 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:11:15 GMT
	I0916 11:11:15.380873 1488539 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-654612-m02","uid":"aa1e2e1b-4957-47bd-bcf9-786ad66f873a","resourceVersion":"471","creationTimestamp":"2024-09-16T11:11:13Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-654612-m02","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-654612","minikube.k8s.io/primary":"false","minikube.k8s.io/updated_at":"2024_09_16T11_11_14_0700","minikube.k8s.io/version":"v1.34.0"},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///var/run/crio/crio.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubeadm","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:11:13Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:
annotations":{"f:kubeadm.alpha.kubernetes.io/cri-socket":{}}}}},{"manag [truncated 5728 chars]
	I0916 11:11:15.876914 1488539 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/nodes/multinode-654612-m02
	I0916 11:11:15.876940 1488539 round_trippers.go:469] Request Headers:
	I0916 11:11:15.876950 1488539 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:11:15.876956 1488539 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:11:15.879560 1488539 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:11:15.879599 1488539 round_trippers.go:577] Response Headers:
	I0916 11:11:15.879608 1488539 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:11:15.879612 1488539 round_trippers.go:580]     Content-Type: application/json
	I0916 11:11:15.879617 1488539 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:11:15.879636 1488539 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:11:15.879643 1488539 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:11:15 GMT
	I0916 11:11:15.879647 1488539 round_trippers.go:580]     Audit-Id: a0b5b511-eb0f-423b-a821-83b992a05a05
	I0916 11:11:15.879836 1488539 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-654612-m02","uid":"aa1e2e1b-4957-47bd-bcf9-786ad66f873a","resourceVersion":"471","creationTimestamp":"2024-09-16T11:11:13Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-654612-m02","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-654612","minikube.k8s.io/primary":"false","minikube.k8s.io/updated_at":"2024_09_16T11_11_14_0700","minikube.k8s.io/version":"v1.34.0"},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///var/run/crio/crio.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubeadm","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:11:13Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:
annotations":{"f:kubeadm.alpha.kubernetes.io/cri-socket":{}}}}},{"manag [truncated 5728 chars]
	I0916 11:11:16.376946 1488539 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/nodes/multinode-654612-m02
	I0916 11:11:16.376966 1488539 round_trippers.go:469] Request Headers:
	I0916 11:11:16.376976 1488539 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:11:16.376980 1488539 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:11:16.379126 1488539 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:11:16.379193 1488539 round_trippers.go:577] Response Headers:
	I0916 11:11:16.379212 1488539 round_trippers.go:580]     Audit-Id: 6993bcf0-193e-4be6-ab85-876bd53df1ef
	I0916 11:11:16.379231 1488539 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:11:16.379250 1488539 round_trippers.go:580]     Content-Type: application/json
	I0916 11:11:16.379284 1488539 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:11:16.379327 1488539 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:11:16.379344 1488539 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:11:16 GMT
	I0916 11:11:16.379468 1488539 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-654612-m02","uid":"aa1e2e1b-4957-47bd-bcf9-786ad66f873a","resourceVersion":"471","creationTimestamp":"2024-09-16T11:11:13Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-654612-m02","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-654612","minikube.k8s.io/primary":"false","minikube.k8s.io/updated_at":"2024_09_16T11_11_14_0700","minikube.k8s.io/version":"v1.34.0"},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///var/run/crio/crio.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubeadm","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:11:13Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:
annotations":{"f:kubeadm.alpha.kubernetes.io/cri-socket":{}}}}},{"manag [truncated 5728 chars]
	I0916 11:11:16.379859 1488539 node_ready.go:53] node "multinode-654612-m02" has status "Ready":"False"
	I0916 11:11:16.876017 1488539 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/nodes/multinode-654612-m02
	I0916 11:11:16.876043 1488539 round_trippers.go:469] Request Headers:
	I0916 11:11:16.876050 1488539 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:11:16.876055 1488539 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:11:16.878812 1488539 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:11:16.878935 1488539 round_trippers.go:577] Response Headers:
	I0916 11:11:16.878969 1488539 round_trippers.go:580]     Audit-Id: 51e825d9-82d9-4b6d-a74f-9d04bcd0df78
	I0916 11:11:16.878994 1488539 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:11:16.878999 1488539 round_trippers.go:580]     Content-Type: application/json
	I0916 11:11:16.879002 1488539 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:11:16.879005 1488539 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:11:16.879008 1488539 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:11:16 GMT
	I0916 11:11:16.879183 1488539 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-654612-m02","uid":"aa1e2e1b-4957-47bd-bcf9-786ad66f873a","resourceVersion":"471","creationTimestamp":"2024-09-16T11:11:13Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-654612-m02","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-654612","minikube.k8s.io/primary":"false","minikube.k8s.io/updated_at":"2024_09_16T11_11_14_0700","minikube.k8s.io/version":"v1.34.0"},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///var/run/crio/crio.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubeadm","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:11:13Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:
annotations":{"f:kubeadm.alpha.kubernetes.io/cri-socket":{}}}}},{"manag [truncated 5728 chars]
	I0916 11:11:17.376700 1488539 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/nodes/multinode-654612-m02
	I0916 11:11:17.376725 1488539 round_trippers.go:469] Request Headers:
	I0916 11:11:17.376735 1488539 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:11:17.376749 1488539 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:11:17.379291 1488539 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:11:17.379313 1488539 round_trippers.go:577] Response Headers:
	I0916 11:11:17.379321 1488539 round_trippers.go:580]     Audit-Id: c60528b5-2803-4fbf-b3df-0e00c8218b97
	I0916 11:11:17.379325 1488539 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:11:17.379329 1488539 round_trippers.go:580]     Content-Type: application/json
	I0916 11:11:17.379342 1488539 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:11:17.379347 1488539 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:11:17.379350 1488539 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:11:17 GMT
	I0916 11:11:17.379656 1488539 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-654612-m02","uid":"aa1e2e1b-4957-47bd-bcf9-786ad66f873a","resourceVersion":"471","creationTimestamp":"2024-09-16T11:11:13Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-654612-m02","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-654612","minikube.k8s.io/primary":"false","minikube.k8s.io/updated_at":"2024_09_16T11_11_14_0700","minikube.k8s.io/version":"v1.34.0"},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///var/run/crio/crio.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubeadm","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:11:13Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:
annotations":{"f:kubeadm.alpha.kubernetes.io/cri-socket":{}}}}},{"manag [truncated 5728 chars]
	I0916 11:11:17.876872 1488539 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/nodes/multinode-654612-m02
	I0916 11:11:17.876898 1488539 round_trippers.go:469] Request Headers:
	I0916 11:11:17.876909 1488539 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:11:17.876913 1488539 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:11:17.879285 1488539 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:11:17.879310 1488539 round_trippers.go:577] Response Headers:
	I0916 11:11:17.879319 1488539 round_trippers.go:580]     Audit-Id: 75b573d1-7f5d-4287-8f70-3ed725b0ca5c
	I0916 11:11:17.879324 1488539 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:11:17.879329 1488539 round_trippers.go:580]     Content-Type: application/json
	I0916 11:11:17.879332 1488539 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:11:17.879337 1488539 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:11:17.879340 1488539 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:11:17 GMT
	I0916 11:11:17.879724 1488539 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-654612-m02","uid":"aa1e2e1b-4957-47bd-bcf9-786ad66f873a","resourceVersion":"471","creationTimestamp":"2024-09-16T11:11:13Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-654612-m02","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-654612","minikube.k8s.io/primary":"false","minikube.k8s.io/updated_at":"2024_09_16T11_11_14_0700","minikube.k8s.io/version":"v1.34.0"},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///var/run/crio/crio.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubeadm","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:11:13Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:
annotations":{"f:kubeadm.alpha.kubernetes.io/cri-socket":{}}}}},{"manag [truncated 5728 chars]
	I0916 11:11:18.376113 1488539 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/nodes/multinode-654612-m02
	I0916 11:11:18.376140 1488539 round_trippers.go:469] Request Headers:
	I0916 11:11:18.376151 1488539 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:11:18.376155 1488539 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:11:18.378577 1488539 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:11:18.378605 1488539 round_trippers.go:577] Response Headers:
	I0916 11:11:18.378613 1488539 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:11:18.378617 1488539 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:11:18 GMT
	I0916 11:11:18.378621 1488539 round_trippers.go:580]     Audit-Id: 7590e0c6-37b6-41e5-960c-b55c4d154b9d
	I0916 11:11:18.378625 1488539 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:11:18.378628 1488539 round_trippers.go:580]     Content-Type: application/json
	I0916 11:11:18.378631 1488539 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:11:18.378934 1488539 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-654612-m02","uid":"aa1e2e1b-4957-47bd-bcf9-786ad66f873a","resourceVersion":"471","creationTimestamp":"2024-09-16T11:11:13Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-654612-m02","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-654612","minikube.k8s.io/primary":"false","minikube.k8s.io/updated_at":"2024_09_16T11_11_14_0700","minikube.k8s.io/version":"v1.34.0"},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///var/run/crio/crio.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubeadm","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:11:13Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:
annotations":{"f:kubeadm.alpha.kubernetes.io/cri-socket":{}}}}},{"manag [truncated 5728 chars]
	I0916 11:11:18.876037 1488539 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/nodes/multinode-654612-m02
	I0916 11:11:18.876059 1488539 round_trippers.go:469] Request Headers:
	I0916 11:11:18.876069 1488539 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:11:18.876074 1488539 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:11:18.878731 1488539 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:11:18.878753 1488539 round_trippers.go:577] Response Headers:
	I0916 11:11:18.878762 1488539 round_trippers.go:580]     Audit-Id: ff102ec7-0259-49ee-b50a-5a62cb9a308b
	I0916 11:11:18.878768 1488539 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:11:18.878773 1488539 round_trippers.go:580]     Content-Type: application/json
	I0916 11:11:18.878776 1488539 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:11:18.878779 1488539 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:11:18.878781 1488539 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:11:18 GMT
	I0916 11:11:18.878888 1488539 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-654612-m02","uid":"aa1e2e1b-4957-47bd-bcf9-786ad66f873a","resourceVersion":"471","creationTimestamp":"2024-09-16T11:11:13Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-654612-m02","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-654612","minikube.k8s.io/primary":"false","minikube.k8s.io/updated_at":"2024_09_16T11_11_14_0700","minikube.k8s.io/version":"v1.34.0"},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///var/run/crio/crio.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubeadm","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:11:13Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:
annotations":{"f:kubeadm.alpha.kubernetes.io/cri-socket":{}}}}},{"manag [truncated 5728 chars]
	I0916 11:11:18.879287 1488539 node_ready.go:53] node "multinode-654612-m02" has status "Ready":"False"
	I0916 11:11:19.376449 1488539 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/nodes/multinode-654612-m02
	I0916 11:11:19.376472 1488539 round_trippers.go:469] Request Headers:
	I0916 11:11:19.376482 1488539 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:11:19.376487 1488539 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:11:19.378887 1488539 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:11:19.378910 1488539 round_trippers.go:577] Response Headers:
	I0916 11:11:19.378918 1488539 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:11:19.378924 1488539 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:11:19.378927 1488539 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:11:19 GMT
	I0916 11:11:19.378930 1488539 round_trippers.go:580]     Audit-Id: 8e56b254-8400-489a-b51b-b4fd1909804c
	I0916 11:11:19.378933 1488539 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:11:19.378936 1488539 round_trippers.go:580]     Content-Type: application/json
	I0916 11:11:19.379037 1488539 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-654612-m02","uid":"aa1e2e1b-4957-47bd-bcf9-786ad66f873a","resourceVersion":"471","creationTimestamp":"2024-09-16T11:11:13Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-654612-m02","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-654612","minikube.k8s.io/primary":"false","minikube.k8s.io/updated_at":"2024_09_16T11_11_14_0700","minikube.k8s.io/version":"v1.34.0"},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///var/run/crio/crio.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubeadm","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:11:13Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:
annotations":{"f:kubeadm.alpha.kubernetes.io/cri-socket":{}}}}},{"manag [truncated 5728 chars]
	I0916 11:11:19.876264 1488539 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/nodes/multinode-654612-m02
	I0916 11:11:19.876296 1488539 round_trippers.go:469] Request Headers:
	I0916 11:11:19.876306 1488539 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:11:19.876312 1488539 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:11:19.878640 1488539 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:11:19.878669 1488539 round_trippers.go:577] Response Headers:
	I0916 11:11:19.878678 1488539 round_trippers.go:580]     Content-Type: application/json
	I0916 11:11:19.878684 1488539 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:11:19.878688 1488539 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:11:19.878694 1488539 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:11:19 GMT
	I0916 11:11:19.878697 1488539 round_trippers.go:580]     Audit-Id: e0bbb52e-690a-489d-b2e4-f7abb263b910
	I0916 11:11:19.878702 1488539 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:11:19.878833 1488539 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-654612-m02","uid":"aa1e2e1b-4957-47bd-bcf9-786ad66f873a","resourceVersion":"471","creationTimestamp":"2024-09-16T11:11:13Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-654612-m02","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-654612","minikube.k8s.io/primary":"false","minikube.k8s.io/updated_at":"2024_09_16T11_11_14_0700","minikube.k8s.io/version":"v1.34.0"},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///var/run/crio/crio.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubeadm","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:11:13Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:
annotations":{"f:kubeadm.alpha.kubernetes.io/cri-socket":{}}}}},{"manag [truncated 5728 chars]
	I0916 11:11:20.377009 1488539 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/nodes/multinode-654612-m02
	I0916 11:11:20.377037 1488539 round_trippers.go:469] Request Headers:
	I0916 11:11:20.377047 1488539 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:11:20.377052 1488539 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:11:20.379282 1488539 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:11:20.379306 1488539 round_trippers.go:577] Response Headers:
	I0916 11:11:20.379315 1488539 round_trippers.go:580]     Audit-Id: 968a2336-ea50-4a69-b188-92e287e1e61c
	I0916 11:11:20.379336 1488539 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:11:20.379349 1488539 round_trippers.go:580]     Content-Type: application/json
	I0916 11:11:20.379352 1488539 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:11:20.379356 1488539 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:11:20.379366 1488539 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:11:20 GMT
	I0916 11:11:20.379758 1488539 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-654612-m02","uid":"aa1e2e1b-4957-47bd-bcf9-786ad66f873a","resourceVersion":"471","creationTimestamp":"2024-09-16T11:11:13Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-654612-m02","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-654612","minikube.k8s.io/primary":"false","minikube.k8s.io/updated_at":"2024_09_16T11_11_14_0700","minikube.k8s.io/version":"v1.34.0"},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///var/run/crio/crio.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubeadm","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:11:13Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:
annotations":{"f:kubeadm.alpha.kubernetes.io/cri-socket":{}}}}},{"manag [truncated 5728 chars]
	I0916 11:11:20.876368 1488539 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/nodes/multinode-654612-m02
	I0916 11:11:20.876397 1488539 round_trippers.go:469] Request Headers:
	I0916 11:11:20.876406 1488539 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:11:20.876410 1488539 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:11:20.879272 1488539 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:11:20.879303 1488539 round_trippers.go:577] Response Headers:
	I0916 11:11:20.879325 1488539 round_trippers.go:580]     Audit-Id: d945c715-b17d-4d14-b534-eaf5a33c9c49
	I0916 11:11:20.879330 1488539 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:11:20.879333 1488539 round_trippers.go:580]     Content-Type: application/json
	I0916 11:11:20.879336 1488539 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:11:20.879339 1488539 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:11:20.879343 1488539 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:11:20 GMT
	I0916 11:11:20.879761 1488539 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-654612-m02","uid":"aa1e2e1b-4957-47bd-bcf9-786ad66f873a","resourceVersion":"471","creationTimestamp":"2024-09-16T11:11:13Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-654612-m02","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-654612","minikube.k8s.io/primary":"false","minikube.k8s.io/updated_at":"2024_09_16T11_11_14_0700","minikube.k8s.io/version":"v1.34.0"},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///var/run/crio/crio.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubeadm","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:11:13Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:
annotations":{"f:kubeadm.alpha.kubernetes.io/cri-socket":{}}}}},{"manag [truncated 5728 chars]
	I0916 11:11:20.880158 1488539 node_ready.go:53] node "multinode-654612-m02" has status "Ready":"False"
	I0916 11:11:21.376080 1488539 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/nodes/multinode-654612-m02
	I0916 11:11:21.376102 1488539 round_trippers.go:469] Request Headers:
	I0916 11:11:21.376113 1488539 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:11:21.376117 1488539 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:11:21.378443 1488539 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:11:21.378466 1488539 round_trippers.go:577] Response Headers:
	I0916 11:11:21.378475 1488539 round_trippers.go:580]     Audit-Id: 1cd3838c-afff-4933-8104-ea7949cebaae
	I0916 11:11:21.378479 1488539 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:11:21.378482 1488539 round_trippers.go:580]     Content-Type: application/json
	I0916 11:11:21.378487 1488539 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:11:21.378490 1488539 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:11:21.378492 1488539 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:11:21 GMT
	I0916 11:11:21.378887 1488539 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-654612-m02","uid":"aa1e2e1b-4957-47bd-bcf9-786ad66f873a","resourceVersion":"471","creationTimestamp":"2024-09-16T11:11:13Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-654612-m02","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-654612","minikube.k8s.io/primary":"false","minikube.k8s.io/updated_at":"2024_09_16T11_11_14_0700","minikube.k8s.io/version":"v1.34.0"},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///var/run/crio/crio.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubeadm","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:11:13Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:
annotations":{"f:kubeadm.alpha.kubernetes.io/cri-socket":{}}}}},{"manag [truncated 5728 chars]
	I0916 11:11:21.876570 1488539 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/nodes/multinode-654612-m02
	I0916 11:11:21.876596 1488539 round_trippers.go:469] Request Headers:
	I0916 11:11:21.876606 1488539 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:11:21.876611 1488539 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:11:21.878907 1488539 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:11:21.878933 1488539 round_trippers.go:577] Response Headers:
	I0916 11:11:21.878942 1488539 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:11:21.878946 1488539 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:11:21.878950 1488539 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:11:21 GMT
	I0916 11:11:21.878954 1488539 round_trippers.go:580]     Audit-Id: 4f4a643c-22bf-404d-84ae-3ac4c3c0c7c2
	I0916 11:11:21.878957 1488539 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:11:21.878960 1488539 round_trippers.go:580]     Content-Type: application/json
	I0916 11:11:21.879323 1488539 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-654612-m02","uid":"aa1e2e1b-4957-47bd-bcf9-786ad66f873a","resourceVersion":"471","creationTimestamp":"2024-09-16T11:11:13Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-654612-m02","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-654612","minikube.k8s.io/primary":"false","minikube.k8s.io/updated_at":"2024_09_16T11_11_14_0700","minikube.k8s.io/version":"v1.34.0"},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///var/run/crio/crio.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubeadm","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:11:13Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:
annotations":{"f:kubeadm.alpha.kubernetes.io/cri-socket":{}}}}},{"manag [truncated 5728 chars]
	I0916 11:11:22.376445 1488539 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/nodes/multinode-654612-m02
	I0916 11:11:22.376467 1488539 round_trippers.go:469] Request Headers:
	I0916 11:11:22.376476 1488539 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:11:22.376480 1488539 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:11:22.379357 1488539 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:11:22.379387 1488539 round_trippers.go:577] Response Headers:
	I0916 11:11:22.379396 1488539 round_trippers.go:580]     Content-Type: application/json
	I0916 11:11:22.379417 1488539 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:11:22.379421 1488539 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:11:22.379425 1488539 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:11:22 GMT
	I0916 11:11:22.379432 1488539 round_trippers.go:580]     Audit-Id: 0ba4f08e-ea6d-49c7-aebf-b0c80b21f7bc
	I0916 11:11:22.379435 1488539 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:11:22.379861 1488539 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-654612-m02","uid":"aa1e2e1b-4957-47bd-bcf9-786ad66f873a","resourceVersion":"471","creationTimestamp":"2024-09-16T11:11:13Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-654612-m02","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-654612","minikube.k8s.io/primary":"false","minikube.k8s.io/updated_at":"2024_09_16T11_11_14_0700","minikube.k8s.io/version":"v1.34.0"},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///var/run/crio/crio.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubeadm","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:11:13Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:
annotations":{"f:kubeadm.alpha.kubernetes.io/cri-socket":{}}}}},{"manag [truncated 5728 chars]
	I0916 11:11:22.876494 1488539 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/nodes/multinode-654612-m02
	I0916 11:11:22.876527 1488539 round_trippers.go:469] Request Headers:
	I0916 11:11:22.876537 1488539 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:11:22.876541 1488539 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:11:22.878793 1488539 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:11:22.878815 1488539 round_trippers.go:577] Response Headers:
	I0916 11:11:22.878823 1488539 round_trippers.go:580]     Audit-Id: 88cd8697-bb9b-4770-a6c9-600bfdf09e38
	I0916 11:11:22.878827 1488539 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:11:22.878831 1488539 round_trippers.go:580]     Content-Type: application/json
	I0916 11:11:22.878836 1488539 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:11:22.878840 1488539 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:11:22.878844 1488539 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:11:22 GMT
	I0916 11:11:22.878960 1488539 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-654612-m02","uid":"aa1e2e1b-4957-47bd-bcf9-786ad66f873a","resourceVersion":"471","creationTimestamp":"2024-09-16T11:11:13Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-654612-m02","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-654612","minikube.k8s.io/primary":"false","minikube.k8s.io/updated_at":"2024_09_16T11_11_14_0700","minikube.k8s.io/version":"v1.34.0"},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///var/run/crio/crio.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubeadm","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:11:13Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:
annotations":{"f:kubeadm.alpha.kubernetes.io/cri-socket":{}}}}},{"manag [truncated 5728 chars]
	I0916 11:11:23.376147 1488539 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/nodes/multinode-654612-m02
	I0916 11:11:23.376171 1488539 round_trippers.go:469] Request Headers:
	I0916 11:11:23.376181 1488539 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:11:23.376184 1488539 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:11:23.379138 1488539 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:11:23.379162 1488539 round_trippers.go:577] Response Headers:
	I0916 11:11:23.379171 1488539 round_trippers.go:580]     Audit-Id: 988f79f3-6497-4095-b28d-beebe5879581
	I0916 11:11:23.379175 1488539 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:11:23.379178 1488539 round_trippers.go:580]     Content-Type: application/json
	I0916 11:11:23.379181 1488539 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:11:23.379184 1488539 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:11:23.379187 1488539 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:11:23 GMT
	I0916 11:11:23.379810 1488539 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-654612-m02","uid":"aa1e2e1b-4957-47bd-bcf9-786ad66f873a","resourceVersion":"471","creationTimestamp":"2024-09-16T11:11:13Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-654612-m02","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-654612","minikube.k8s.io/primary":"false","minikube.k8s.io/updated_at":"2024_09_16T11_11_14_0700","minikube.k8s.io/version":"v1.34.0"},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///var/run/crio/crio.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubeadm","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:11:13Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:
annotations":{"f:kubeadm.alpha.kubernetes.io/cri-socket":{}}}}},{"manag [truncated 5728 chars]
	I0916 11:11:23.380261 1488539 node_ready.go:53] node "multinode-654612-m02" has status "Ready":"False"
	I0916 11:11:23.876514 1488539 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/nodes/multinode-654612-m02
	I0916 11:11:23.876537 1488539 round_trippers.go:469] Request Headers:
	I0916 11:11:23.876546 1488539 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:11:23.876553 1488539 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:11:23.883023 1488539 round_trippers.go:574] Response Status: 200 OK in 6 milliseconds
	I0916 11:11:23.883054 1488539 round_trippers.go:577] Response Headers:
	I0916 11:11:23.883064 1488539 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:11:23 GMT
	I0916 11:11:23.883070 1488539 round_trippers.go:580]     Audit-Id: ea9ea0a9-be12-4e01-908a-505e4776f737
	I0916 11:11:23.883073 1488539 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:11:23.883079 1488539 round_trippers.go:580]     Content-Type: application/json
	I0916 11:11:23.883083 1488539 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:11:23.883086 1488539 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:11:23.883270 1488539 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-654612-m02","uid":"aa1e2e1b-4957-47bd-bcf9-786ad66f873a","resourceVersion":"492","creationTimestamp":"2024-09-16T11:11:13Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-654612-m02","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-654612","minikube.k8s.io/primary":"false","minikube.k8s.io/updated_at":"2024_09_16T11_11_14_0700","minikube.k8s.io/version":"v1.34.0"},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///var/run/crio/crio.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubeadm","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:11:13Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:
annotations":{"f:kubeadm.alpha.kubernetes.io/cri-socket":{}}}}},{"manag [truncated 6120 chars]
	I0916 11:11:24.376469 1488539 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/nodes/multinode-654612-m02
	I0916 11:11:24.376493 1488539 round_trippers.go:469] Request Headers:
	I0916 11:11:24.376503 1488539 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:11:24.376507 1488539 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:11:24.378869 1488539 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:11:24.378897 1488539 round_trippers.go:577] Response Headers:
	I0916 11:11:24.378906 1488539 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:11:24.378912 1488539 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:11:24 GMT
	I0916 11:11:24.378916 1488539 round_trippers.go:580]     Audit-Id: 67e4f910-e530-48fc-977d-48b1d98ed762
	I0916 11:11:24.378918 1488539 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:11:24.378924 1488539 round_trippers.go:580]     Content-Type: application/json
	I0916 11:11:24.378927 1488539 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:11:24.379156 1488539 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-654612-m02","uid":"aa1e2e1b-4957-47bd-bcf9-786ad66f873a","resourceVersion":"492","creationTimestamp":"2024-09-16T11:11:13Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-654612-m02","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-654612","minikube.k8s.io/primary":"false","minikube.k8s.io/updated_at":"2024_09_16T11_11_14_0700","minikube.k8s.io/version":"v1.34.0"},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///var/run/crio/crio.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubeadm","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:11:13Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:
annotations":{"f:kubeadm.alpha.kubernetes.io/cri-socket":{}}}}},{"manag [truncated 6120 chars]
	I0916 11:11:24.876824 1488539 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/nodes/multinode-654612-m02
	I0916 11:11:24.876849 1488539 round_trippers.go:469] Request Headers:
	I0916 11:11:24.876859 1488539 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:11:24.876864 1488539 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:11:24.879124 1488539 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:11:24.879145 1488539 round_trippers.go:577] Response Headers:
	I0916 11:11:24.879153 1488539 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:11:24.879158 1488539 round_trippers.go:580]     Content-Type: application/json
	I0916 11:11:24.879162 1488539 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:11:24.879164 1488539 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:11:24.879167 1488539 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:11:24 GMT
	I0916 11:11:24.879170 1488539 round_trippers.go:580]     Audit-Id: 5fdb15d8-7711-4ddc-912b-625186708776
	I0916 11:11:24.879336 1488539 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-654612-m02","uid":"aa1e2e1b-4957-47bd-bcf9-786ad66f873a","resourceVersion":"492","creationTimestamp":"2024-09-16T11:11:13Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-654612-m02","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-654612","minikube.k8s.io/primary":"false","minikube.k8s.io/updated_at":"2024_09_16T11_11_14_0700","minikube.k8s.io/version":"v1.34.0"},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///var/run/crio/crio.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubeadm","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:11:13Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:
annotations":{"f:kubeadm.alpha.kubernetes.io/cri-socket":{}}}}},{"manag [truncated 6120 chars]
	I0916 11:11:25.376217 1488539 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/nodes/multinode-654612-m02
	I0916 11:11:25.376243 1488539 round_trippers.go:469] Request Headers:
	I0916 11:11:25.376253 1488539 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:11:25.376259 1488539 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:11:25.380469 1488539 round_trippers.go:574] Response Status: 200 OK in 4 milliseconds
	I0916 11:11:25.380494 1488539 round_trippers.go:577] Response Headers:
	I0916 11:11:25.380502 1488539 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:11:25.380507 1488539 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:11:25 GMT
	I0916 11:11:25.380511 1488539 round_trippers.go:580]     Audit-Id: 269ea3da-803e-4aa0-a0a1-a4aa2801e1a8
	I0916 11:11:25.380513 1488539 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:11:25.380516 1488539 round_trippers.go:580]     Content-Type: application/json
	I0916 11:11:25.380520 1488539 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:11:25.380860 1488539 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-654612-m02","uid":"aa1e2e1b-4957-47bd-bcf9-786ad66f873a","resourceVersion":"492","creationTimestamp":"2024-09-16T11:11:13Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-654612-m02","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-654612","minikube.k8s.io/primary":"false","minikube.k8s.io/updated_at":"2024_09_16T11_11_14_0700","minikube.k8s.io/version":"v1.34.0"},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///var/run/crio/crio.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubeadm","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:11:13Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:
annotations":{"f:kubeadm.alpha.kubernetes.io/cri-socket":{}}}}},{"manag [truncated 6120 chars]
	I0916 11:11:25.381367 1488539 node_ready.go:53] node "multinode-654612-m02" has status "Ready":"False"
	I0916 11:11:25.876751 1488539 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/nodes/multinode-654612-m02
	I0916 11:11:25.876773 1488539 round_trippers.go:469] Request Headers:
	I0916 11:11:25.876783 1488539 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:11:25.876788 1488539 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:11:25.879208 1488539 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:11:25.879233 1488539 round_trippers.go:577] Response Headers:
	I0916 11:11:25.879242 1488539 round_trippers.go:580]     Audit-Id: 1d62600f-fcd0-4736-a2a0-8b49ba7d004a
	I0916 11:11:25.879247 1488539 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:11:25.879252 1488539 round_trippers.go:580]     Content-Type: application/json
	I0916 11:11:25.879255 1488539 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:11:25.879259 1488539 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:11:25.879264 1488539 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:11:25 GMT
	I0916 11:11:25.879606 1488539 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-654612-m02","uid":"aa1e2e1b-4957-47bd-bcf9-786ad66f873a","resourceVersion":"496","creationTimestamp":"2024-09-16T11:11:13Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-654612-m02","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-654612","minikube.k8s.io/primary":"false","minikube.k8s.io/updated_at":"2024_09_16T11_11_14_0700","minikube.k8s.io/version":"v1.34.0"},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///var/run/crio/crio.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubeadm","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:11:13Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:
annotations":{"f:kubeadm.alpha.kubernetes.io/cri-socket":{}}}}},{"manag [truncated 5937 chars]
	I0916 11:11:25.880035 1488539 node_ready.go:49] node "multinode-654612-m02" has status "Ready":"True"
	I0916 11:11:25.880057 1488539 node_ready.go:38] duration metric: took 11.504217601s for node "multinode-654612-m02" to be "Ready" ...
	I0916 11:11:25.880068 1488539 pod_ready.go:36] extra waiting up to 6m0s for all system-critical pods including labels [k8s-app=kube-dns component=etcd component=kube-apiserver component=kube-controller-manager k8s-app=kube-proxy component=kube-scheduler] to be "Ready" ...
	I0916 11:11:25.880138 1488539 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/namespaces/kube-system/pods
	I0916 11:11:25.880148 1488539 round_trippers.go:469] Request Headers:
	I0916 11:11:25.880157 1488539 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:11:25.880163 1488539 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:11:25.883900 1488539 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 11:11:25.883925 1488539 round_trippers.go:577] Response Headers:
	I0916 11:11:25.883940 1488539 round_trippers.go:580]     Content-Type: application/json
	I0916 11:11:25.883945 1488539 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:11:25.883948 1488539 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:11:25.883951 1488539 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:11:25 GMT
	I0916 11:11:25.883954 1488539 round_trippers.go:580]     Audit-Id: 8e4d315b-e8a1-4079-8661-d4e87d78f00f
	I0916 11:11:25.883957 1488539 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:11:25.884651 1488539 request.go:1351] Response Body: {"kind":"PodList","apiVersion":"v1","metadata":{"resourceVersion":"496"},"items":[{"metadata":{"name":"coredns-7c65d6cfc9-szvv9","generateName":"coredns-7c65d6cfc9-","namespace":"kube-system","uid":"26df8cd4-36bc-49e1-98bf-9c30f5555b7b","resourceVersion":"424","creationTimestamp":"2024-09-16T11:10:15Z","labels":{"k8s-app":"kube-dns","pod-template-hash":"7c65d6cfc9"},"ownerReferences":[{"apiVersion":"apps/v1","kind":"ReplicaSet","name":"coredns-7c65d6cfc9","uid":"a88acc4a-b14b-4341-a616-6a6077ab8958","controller":true,"blockOwnerDeletion":true}],"managedFields":[{"manager":"kube-controller-manager","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:10:15Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:generateName":{},"f:labels":{".":{},"f:k8s-app":{},"f:pod-template-hash":{}},"f:ownerReferences":{".":{},"k:{\"uid\":\"a88acc4a-b14b-4341-a616-6a6077ab8958\"}":{}}},"f:spec":{"f:affinity":{".":{},"f:podAntiAffinity":{".":{},"f
:preferredDuringSchedulingIgnoredDuringExecution":{}}},"f:containers":{ [truncated 74117 chars]
	I0916 11:11:25.888991 1488539 pod_ready.go:79] waiting up to 6m0s for pod "coredns-7c65d6cfc9-szvv9" in "kube-system" namespace to be "Ready" ...
	I0916 11:11:25.889146 1488539 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/namespaces/kube-system/pods/coredns-7c65d6cfc9-szvv9
	I0916 11:11:25.889159 1488539 round_trippers.go:469] Request Headers:
	I0916 11:11:25.889170 1488539 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:11:25.889175 1488539 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:11:25.891622 1488539 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:11:25.891690 1488539 round_trippers.go:577] Response Headers:
	I0916 11:11:25.891707 1488539 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:11:25.891720 1488539 round_trippers.go:580]     Content-Type: application/json
	I0916 11:11:25.891723 1488539 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:11:25.891727 1488539 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:11:25.891729 1488539 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:11:25 GMT
	I0916 11:11:25.891732 1488539 round_trippers.go:580]     Audit-Id: d406b4bb-4fc3-41de-a84d-7a350b8ef72b
	I0916 11:11:25.892041 1488539 request.go:1351] Response Body: {"kind":"Pod","apiVersion":"v1","metadata":{"name":"coredns-7c65d6cfc9-szvv9","generateName":"coredns-7c65d6cfc9-","namespace":"kube-system","uid":"26df8cd4-36bc-49e1-98bf-9c30f5555b7b","resourceVersion":"424","creationTimestamp":"2024-09-16T11:10:15Z","labels":{"k8s-app":"kube-dns","pod-template-hash":"7c65d6cfc9"},"ownerReferences":[{"apiVersion":"apps/v1","kind":"ReplicaSet","name":"coredns-7c65d6cfc9","uid":"a88acc4a-b14b-4341-a616-6a6077ab8958","controller":true,"blockOwnerDeletion":true}],"managedFields":[{"manager":"kube-controller-manager","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:10:15Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:generateName":{},"f:labels":{".":{},"f:k8s-app":{},"f:pod-template-hash":{}},"f:ownerReferences":{".":{},"k:{\"uid\":\"a88acc4a-b14b-4341-a616-6a6077ab8958\"}":{}}},"f:spec":{"f:affinity":{".":{},"f:podAntiAffinity":{".":{},"f:preferredDuringSchedulingIgnoredDuringExecution":{
}}},"f:containers":{"k:{\"name\":\"coredns\"}":{".":{},"f:args":{},"f:i [truncated 6813 chars]
	I0916 11:11:25.892666 1488539 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/nodes/multinode-654612
	I0916 11:11:25.892721 1488539 round_trippers.go:469] Request Headers:
	I0916 11:11:25.892731 1488539 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:11:25.892746 1488539 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:11:25.894970 1488539 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:11:25.895003 1488539 round_trippers.go:577] Response Headers:
	I0916 11:11:25.895014 1488539 round_trippers.go:580]     Audit-Id: 340403b6-a361-4554-9399-58215f49305f
	I0916 11:11:25.895024 1488539 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:11:25.895030 1488539 round_trippers.go:580]     Content-Type: application/json
	I0916 11:11:25.895033 1488539 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:11:25.895036 1488539 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:11:25.895045 1488539 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:11:25 GMT
	I0916 11:11:25.895201 1488539 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-654612","uid":"17ee1588-6ece-4a45-8e72-a05ec6d1f806","resourceVersion":"401","creationTimestamp":"2024-09-16T11:10:07Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-654612","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-654612","minikube.k8s.io/primary":"true","minikube.k8s.io/updated_at":"2024_09_16T11_10_11_0700","minikube.k8s.io/version":"v1.34.0","node-role.kubernetes.io/control-plane":"","node.kubernetes.io/exclude-from-external-load-balancers":""},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///var/run/crio/crio.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubelet","operation":"Update","apiVe
rsion":"v1","time":"2024-09-16T11:10:07Z","fieldsType":"FieldsV1","fiel [truncated 6029 chars]
	I0916 11:11:25.895706 1488539 pod_ready.go:93] pod "coredns-7c65d6cfc9-szvv9" in "kube-system" namespace has status "Ready":"True"
	I0916 11:11:25.895729 1488539 pod_ready.go:82] duration metric: took 6.705485ms for pod "coredns-7c65d6cfc9-szvv9" in "kube-system" namespace to be "Ready" ...
	I0916 11:11:25.895746 1488539 pod_ready.go:79] waiting up to 6m0s for pod "etcd-multinode-654612" in "kube-system" namespace to be "Ready" ...
	I0916 11:11:25.895841 1488539 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/namespaces/kube-system/pods/etcd-multinode-654612
	I0916 11:11:25.895856 1488539 round_trippers.go:469] Request Headers:
	I0916 11:11:25.895867 1488539 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:11:25.895873 1488539 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:11:25.898278 1488539 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:11:25.898306 1488539 round_trippers.go:577] Response Headers:
	I0916 11:11:25.898315 1488539 round_trippers.go:580]     Audit-Id: a100b7ef-e39c-4b9c-a042-fd226c208c0a
	I0916 11:11:25.898330 1488539 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:11:25.898334 1488539 round_trippers.go:580]     Content-Type: application/json
	I0916 11:11:25.898337 1488539 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:11:25.898340 1488539 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:11:25.898343 1488539 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:11:25 GMT
	I0916 11:11:25.898480 1488539 request.go:1351] Response Body: {"kind":"Pod","apiVersion":"v1","metadata":{"name":"etcd-multinode-654612","namespace":"kube-system","uid":"bb46feea-e4d5-411b-9ebc-e5984b1147a8","resourceVersion":"388","creationTimestamp":"2024-09-16T11:10:10Z","labels":{"component":"etcd","tier":"control-plane"},"annotations":{"kubeadm.kubernetes.io/etcd.advertise-client-urls":"https://192.168.67.2:2379","kubernetes.io/config.hash":"d0a18dbc2f101ac77b9a3f54b47797a2","kubernetes.io/config.mirror":"d0a18dbc2f101ac77b9a3f54b47797a2","kubernetes.io/config.seen":"2024-09-16T11:10:10.145147523Z","kubernetes.io/config.source":"file"},"ownerReferences":[{"apiVersion":"v1","kind":"Node","name":"multinode-654612","uid":"17ee1588-6ece-4a45-8e72-a05ec6d1f806","controller":true}],"managedFields":[{"manager":"kubelet","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:10:10Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:annotations":{".":{},"f:kubeadm.kubernetes.io/etcd.advertise-cl
ient-urls":{},"f:kubernetes.io/config.hash":{},"f:kubernetes.io/config. [truncated 6435 chars]
	I0916 11:11:25.898988 1488539 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/nodes/multinode-654612
	I0916 11:11:25.899002 1488539 round_trippers.go:469] Request Headers:
	I0916 11:11:25.899011 1488539 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:11:25.899017 1488539 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:11:25.901177 1488539 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:11:25.901196 1488539 round_trippers.go:577] Response Headers:
	I0916 11:11:25.901205 1488539 round_trippers.go:580]     Content-Type: application/json
	I0916 11:11:25.901210 1488539 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:11:25.901215 1488539 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:11:25.901217 1488539 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:11:25 GMT
	I0916 11:11:25.901220 1488539 round_trippers.go:580]     Audit-Id: 2716640e-8a67-4ae7-8934-d85f52790903
	I0916 11:11:25.901223 1488539 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:11:25.901380 1488539 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-654612","uid":"17ee1588-6ece-4a45-8e72-a05ec6d1f806","resourceVersion":"401","creationTimestamp":"2024-09-16T11:10:07Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-654612","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-654612","minikube.k8s.io/primary":"true","minikube.k8s.io/updated_at":"2024_09_16T11_10_11_0700","minikube.k8s.io/version":"v1.34.0","node-role.kubernetes.io/control-plane":"","node.kubernetes.io/exclude-from-external-load-balancers":""},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///var/run/crio/crio.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubelet","operation":"Update","apiVe
rsion":"v1","time":"2024-09-16T11:10:07Z","fieldsType":"FieldsV1","fiel [truncated 6029 chars]
	I0916 11:11:25.901788 1488539 pod_ready.go:93] pod "etcd-multinode-654612" in "kube-system" namespace has status "Ready":"True"
	I0916 11:11:25.901808 1488539 pod_ready.go:82] duration metric: took 6.05111ms for pod "etcd-multinode-654612" in "kube-system" namespace to be "Ready" ...
	I0916 11:11:25.901826 1488539 pod_ready.go:79] waiting up to 6m0s for pod "kube-apiserver-multinode-654612" in "kube-system" namespace to be "Ready" ...
	I0916 11:11:25.901897 1488539 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/namespaces/kube-system/pods/kube-apiserver-multinode-654612
	I0916 11:11:25.901907 1488539 round_trippers.go:469] Request Headers:
	I0916 11:11:25.901914 1488539 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:11:25.901919 1488539 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:11:25.903933 1488539 round_trippers.go:574] Response Status: 200 OK in 1 milliseconds
	I0916 11:11:25.903960 1488539 round_trippers.go:577] Response Headers:
	I0916 11:11:25.903968 1488539 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:11:25 GMT
	I0916 11:11:25.903972 1488539 round_trippers.go:580]     Audit-Id: 74b45ee3-ee8b-4ff6-b6ea-7262fd3720e4
	I0916 11:11:25.903975 1488539 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:11:25.903978 1488539 round_trippers.go:580]     Content-Type: application/json
	I0916 11:11:25.903987 1488539 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:11:25.903994 1488539 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:11:25.904143 1488539 request.go:1351] Response Body: {"kind":"Pod","apiVersion":"v1","metadata":{"name":"kube-apiserver-multinode-654612","namespace":"kube-system","uid":"8a56377d-b2a9-46dc-90b0-6d8f0aadec52","resourceVersion":"386","creationTimestamp":"2024-09-16T11:10:10Z","labels":{"component":"kube-apiserver","tier":"control-plane"},"annotations":{"kubeadm.kubernetes.io/kube-apiserver.advertise-address.endpoint":"192.168.67.2:8443","kubernetes.io/config.hash":"f3fdb95ee92c3c630b459a996a1fc6f9","kubernetes.io/config.mirror":"f3fdb95ee92c3c630b459a996a1fc6f9","kubernetes.io/config.seen":"2024-09-16T11:10:10.145153931Z","kubernetes.io/config.source":"file"},"ownerReferences":[{"apiVersion":"v1","kind":"Node","name":"multinode-654612","uid":"17ee1588-6ece-4a45-8e72-a05ec6d1f806","controller":true}],"managedFields":[{"manager":"kubelet","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:10:10Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:annotations":{".":{},"f:kubeadm.kube
rnetes.io/kube-apiserver.advertise-address.endpoint":{},"f:kubernetes.i [truncated 8513 chars]
	I0916 11:11:25.904818 1488539 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/nodes/multinode-654612
	I0916 11:11:25.904837 1488539 round_trippers.go:469] Request Headers:
	I0916 11:11:25.904846 1488539 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:11:25.904856 1488539 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:11:25.906893 1488539 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:11:25.906915 1488539 round_trippers.go:577] Response Headers:
	I0916 11:11:25.906924 1488539 round_trippers.go:580]     Audit-Id: 4e7e6cfa-d066-4c67-a3a4-d3c22480452a
	I0916 11:11:25.906928 1488539 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:11:25.906931 1488539 round_trippers.go:580]     Content-Type: application/json
	I0916 11:11:25.906949 1488539 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:11:25.906956 1488539 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:11:25.906959 1488539 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:11:25 GMT
	I0916 11:11:25.907204 1488539 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-654612","uid":"17ee1588-6ece-4a45-8e72-a05ec6d1f806","resourceVersion":"401","creationTimestamp":"2024-09-16T11:10:07Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-654612","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-654612","minikube.k8s.io/primary":"true","minikube.k8s.io/updated_at":"2024_09_16T11_10_11_0700","minikube.k8s.io/version":"v1.34.0","node-role.kubernetes.io/control-plane":"","node.kubernetes.io/exclude-from-external-load-balancers":""},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///var/run/crio/crio.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubelet","operation":"Update","apiVe
rsion":"v1","time":"2024-09-16T11:10:07Z","fieldsType":"FieldsV1","fiel [truncated 6029 chars]
	I0916 11:11:25.907590 1488539 pod_ready.go:93] pod "kube-apiserver-multinode-654612" in "kube-system" namespace has status "Ready":"True"
	I0916 11:11:25.907603 1488539 pod_ready.go:82] duration metric: took 5.767014ms for pod "kube-apiserver-multinode-654612" in "kube-system" namespace to be "Ready" ...
	I0916 11:11:25.907613 1488539 pod_ready.go:79] waiting up to 6m0s for pod "kube-controller-manager-multinode-654612" in "kube-system" namespace to be "Ready" ...
	I0916 11:11:25.907689 1488539 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/namespaces/kube-system/pods/kube-controller-manager-multinode-654612
	I0916 11:11:25.907695 1488539 round_trippers.go:469] Request Headers:
	I0916 11:11:25.907703 1488539 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:11:25.907707 1488539 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:11:25.909692 1488539 round_trippers.go:574] Response Status: 200 OK in 1 milliseconds
	I0916 11:11:25.909710 1488539 round_trippers.go:577] Response Headers:
	I0916 11:11:25.909718 1488539 round_trippers.go:580]     Audit-Id: 84acaced-5ae1-460a-b56d-1e4c3d7f58bf
	I0916 11:11:25.909724 1488539 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:11:25.909728 1488539 round_trippers.go:580]     Content-Type: application/json
	I0916 11:11:25.909732 1488539 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:11:25.909735 1488539 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:11:25.909738 1488539 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:11:25 GMT
	I0916 11:11:25.909880 1488539 request.go:1351] Response Body: {"kind":"Pod","apiVersion":"v1","metadata":{"name":"kube-controller-manager-multinode-654612","namespace":"kube-system","uid":"08e87c01-201e-4373-bbd7-0a8a7a724a84","resourceVersion":"372","creationTimestamp":"2024-09-16T11:10:10Z","labels":{"component":"kube-controller-manager","tier":"control-plane"},"annotations":{"kubernetes.io/config.hash":"c7028fbfd05aaf11bd1f32f252589714","kubernetes.io/config.mirror":"c7028fbfd05aaf11bd1f32f252589714","kubernetes.io/config.seen":"2024-09-16T11:10:10.145155408Z","kubernetes.io/config.source":"file"},"ownerReferences":[{"apiVersion":"v1","kind":"Node","name":"multinode-654612","uid":"17ee1588-6ece-4a45-8e72-a05ec6d1f806","controller":true}],"managedFields":[{"manager":"kubelet","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:10:10Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:annotations":{".":{},"f:kubernetes.io/config.hash":{},"f:kubernetes.io/config.mirror":{},"f:kubernetes.i
o/config.seen":{},"f:kubernetes.io/config.source":{}},"f:labels":{".":{ [truncated 8088 chars]
	I0916 11:11:25.910394 1488539 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/nodes/multinode-654612
	I0916 11:11:25.910405 1488539 round_trippers.go:469] Request Headers:
	I0916 11:11:25.910413 1488539 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:11:25.910419 1488539 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:11:25.912415 1488539 round_trippers.go:574] Response Status: 200 OK in 1 milliseconds
	I0916 11:11:25.912437 1488539 round_trippers.go:577] Response Headers:
	I0916 11:11:25.912444 1488539 round_trippers.go:580]     Audit-Id: edaf1560-20c9-42d1-a04d-9566b906e42a
	I0916 11:11:25.912450 1488539 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:11:25.912457 1488539 round_trippers.go:580]     Content-Type: application/json
	I0916 11:11:25.912460 1488539 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:11:25.912469 1488539 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:11:25.912472 1488539 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:11:25 GMT
	I0916 11:11:25.912592 1488539 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-654612","uid":"17ee1588-6ece-4a45-8e72-a05ec6d1f806","resourceVersion":"401","creationTimestamp":"2024-09-16T11:10:07Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-654612","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-654612","minikube.k8s.io/primary":"true","minikube.k8s.io/updated_at":"2024_09_16T11_10_11_0700","minikube.k8s.io/version":"v1.34.0","node-role.kubernetes.io/control-plane":"","node.kubernetes.io/exclude-from-external-load-balancers":""},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///var/run/crio/crio.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubelet","operation":"Update","apiVe
rsion":"v1","time":"2024-09-16T11:10:07Z","fieldsType":"FieldsV1","fiel [truncated 6029 chars]
	I0916 11:11:25.913087 1488539 pod_ready.go:93] pod "kube-controller-manager-multinode-654612" in "kube-system" namespace has status "Ready":"True"
	I0916 11:11:25.913107 1488539 pod_ready.go:82] duration metric: took 5.485659ms for pod "kube-controller-manager-multinode-654612" in "kube-system" namespace to be "Ready" ...
	I0916 11:11:25.913119 1488539 pod_ready.go:79] waiting up to 6m0s for pod "kube-proxy-gf2tw" in "kube-system" namespace to be "Ready" ...
	I0916 11:11:26.077520 1488539 request.go:632] Waited for 164.320653ms due to client-side throttling, not priority and fairness, request: GET:https://192.168.67.2:8443/api/v1/namespaces/kube-system/pods/kube-proxy-gf2tw
	I0916 11:11:26.077615 1488539 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/namespaces/kube-system/pods/kube-proxy-gf2tw
	I0916 11:11:26.077628 1488539 round_trippers.go:469] Request Headers:
	I0916 11:11:26.077641 1488539 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:11:26.077648 1488539 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:11:26.080335 1488539 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:11:26.080407 1488539 round_trippers.go:577] Response Headers:
	I0916 11:11:26.080430 1488539 round_trippers.go:580]     Content-Type: application/json
	I0916 11:11:26.080450 1488539 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:11:26.080484 1488539 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:11:26.080508 1488539 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:11:26 GMT
	I0916 11:11:26.080527 1488539 round_trippers.go:580]     Audit-Id: e2e31149-de87-4b13-b79c-15dd39017298
	I0916 11:11:26.080563 1488539 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:11:26.080763 1488539 request.go:1351] Response Body: {"kind":"Pod","apiVersion":"v1","metadata":{"name":"kube-proxy-gf2tw","generateName":"kube-proxy-","namespace":"kube-system","uid":"814e8a89-b190-4aef-a303-44981c9e19c9","resourceVersion":"480","creationTimestamp":"2024-09-16T11:11:13Z","labels":{"controller-revision-hash":"648b489c5b","k8s-app":"kube-proxy","pod-template-generation":"1"},"ownerReferences":[{"apiVersion":"apps/v1","kind":"DaemonSet","name":"kube-proxy","uid":"8b5954ba-7bb1-4f7b-8014-fdfa99b2ac77","controller":true,"blockOwnerDeletion":true}],"managedFields":[{"manager":"kube-controller-manager","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:11:13Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:generateName":{},"f:labels":{".":{},"f:controller-revision-hash":{},"f:k8s-app":{},"f:pod-template-generation":{}},"f:ownerReferences":{".":{},"k:{\"uid\":\"8b5954ba-7bb1-4f7b-8014-fdfa99b2ac77\"}":{}}},"f:spec":{"f:affinity":{".":{},"f:nodeAffinity":{".":{},"f:r
equiredDuringSchedulingIgnoredDuringExecution":{}}},"f:containers":{"k: [truncated 6178 chars]
	I0916 11:11:26.277518 1488539 request.go:632] Waited for 196.225146ms due to client-side throttling, not priority and fairness, request: GET:https://192.168.67.2:8443/api/v1/nodes/multinode-654612-m02
	I0916 11:11:26.277597 1488539 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/nodes/multinode-654612-m02
	I0916 11:11:26.277607 1488539 round_trippers.go:469] Request Headers:
	I0916 11:11:26.277616 1488539 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:11:26.277620 1488539 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:11:26.279591 1488539 round_trippers.go:574] Response Status: 200 OK in 1 milliseconds
	I0916 11:11:26.279617 1488539 round_trippers.go:577] Response Headers:
	I0916 11:11:26.279625 1488539 round_trippers.go:580]     Content-Type: application/json
	I0916 11:11:26.279629 1488539 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:11:26.279634 1488539 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:11:26.279674 1488539 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:11:26 GMT
	I0916 11:11:26.279682 1488539 round_trippers.go:580]     Audit-Id: 6d4ec83f-1894-4ad0-a510-40c4043c2092
	I0916 11:11:26.279685 1488539 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:11:26.280083 1488539 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-654612-m02","uid":"aa1e2e1b-4957-47bd-bcf9-786ad66f873a","resourceVersion":"496","creationTimestamp":"2024-09-16T11:11:13Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-654612-m02","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-654612","minikube.k8s.io/primary":"false","minikube.k8s.io/updated_at":"2024_09_16T11_11_14_0700","minikube.k8s.io/version":"v1.34.0"},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///var/run/crio/crio.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubeadm","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:11:13Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:
annotations":{"f:kubeadm.alpha.kubernetes.io/cri-socket":{}}}}},{"manag [truncated 5937 chars]
	I0916 11:11:26.280532 1488539 pod_ready.go:93] pod "kube-proxy-gf2tw" in "kube-system" namespace has status "Ready":"True"
	I0916 11:11:26.280552 1488539 pod_ready.go:82] duration metric: took 367.419527ms for pod "kube-proxy-gf2tw" in "kube-system" namespace to be "Ready" ...
	I0916 11:11:26.280563 1488539 pod_ready.go:79] waiting up to 6m0s for pod "kube-proxy-t9pzq" in "kube-system" namespace to be "Ready" ...
	I0916 11:11:26.477565 1488539 request.go:632] Waited for 196.907722ms due to client-side throttling, not priority and fairness, request: GET:https://192.168.67.2:8443/api/v1/namespaces/kube-system/pods/kube-proxy-t9pzq
	I0916 11:11:26.477633 1488539 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/namespaces/kube-system/pods/kube-proxy-t9pzq
	I0916 11:11:26.477644 1488539 round_trippers.go:469] Request Headers:
	I0916 11:11:26.477653 1488539 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:11:26.477667 1488539 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:11:26.479978 1488539 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:11:26.480086 1488539 round_trippers.go:577] Response Headers:
	I0916 11:11:26.480126 1488539 round_trippers.go:580]     Audit-Id: 3286276e-0dd1-4467-a26f-7d35bed0fe93
	I0916 11:11:26.480153 1488539 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:11:26.480163 1488539 round_trippers.go:580]     Content-Type: application/json
	I0916 11:11:26.480168 1488539 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:11:26.480172 1488539 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:11:26.480176 1488539 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:11:26 GMT
	I0916 11:11:26.480319 1488539 request.go:1351] Response Body: {"kind":"Pod","apiVersion":"v1","metadata":{"name":"kube-proxy-t9pzq","generateName":"kube-proxy-","namespace":"kube-system","uid":"d5dac41c-8386-4ad5-a463-1730169d8062","resourceVersion":"381","creationTimestamp":"2024-09-16T11:10:14Z","labels":{"controller-revision-hash":"648b489c5b","k8s-app":"kube-proxy","pod-template-generation":"1"},"ownerReferences":[{"apiVersion":"apps/v1","kind":"DaemonSet","name":"kube-proxy","uid":"8b5954ba-7bb1-4f7b-8014-fdfa99b2ac77","controller":true,"blockOwnerDeletion":true}],"managedFields":[{"manager":"kube-controller-manager","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:10:14Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:generateName":{},"f:labels":{".":{},"f:controller-revision-hash":{},"f:k8s-app":{},"f:pod-template-generation":{}},"f:ownerReferences":{".":{},"k:{\"uid\":\"8b5954ba-7bb1-4f7b-8014-fdfa99b2ac77\"}":{}}},"f:spec":{"f:affinity":{".":{},"f:nodeAffinity":{".":{},"f:r
equiredDuringSchedulingIgnoredDuringExecution":{}}},"f:containers":{"k: [truncated 6170 chars]
	I0916 11:11:26.677094 1488539 request.go:632] Waited for 196.215867ms due to client-side throttling, not priority and fairness, request: GET:https://192.168.67.2:8443/api/v1/nodes/multinode-654612
	I0916 11:11:26.677175 1488539 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/nodes/multinode-654612
	I0916 11:11:26.677183 1488539 round_trippers.go:469] Request Headers:
	I0916 11:11:26.677191 1488539 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:11:26.677199 1488539 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:11:26.679546 1488539 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:11:26.679577 1488539 round_trippers.go:577] Response Headers:
	I0916 11:11:26.679592 1488539 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:11:26 GMT
	I0916 11:11:26.679597 1488539 round_trippers.go:580]     Audit-Id: 0d82fd4c-11eb-4cad-81bc-5ee2099948bf
	I0916 11:11:26.679600 1488539 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:11:26.679603 1488539 round_trippers.go:580]     Content-Type: application/json
	I0916 11:11:26.679606 1488539 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:11:26.679610 1488539 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:11:26.679732 1488539 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-654612","uid":"17ee1588-6ece-4a45-8e72-a05ec6d1f806","resourceVersion":"401","creationTimestamp":"2024-09-16T11:10:07Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-654612","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-654612","minikube.k8s.io/primary":"true","minikube.k8s.io/updated_at":"2024_09_16T11_10_11_0700","minikube.k8s.io/version":"v1.34.0","node-role.kubernetes.io/control-plane":"","node.kubernetes.io/exclude-from-external-load-balancers":""},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///var/run/crio/crio.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubelet","operation":"Update","apiVe
rsion":"v1","time":"2024-09-16T11:10:07Z","fieldsType":"FieldsV1","fiel [truncated 6029 chars]
	I0916 11:11:26.680176 1488539 pod_ready.go:93] pod "kube-proxy-t9pzq" in "kube-system" namespace has status "Ready":"True"
	I0916 11:11:26.680196 1488539 pod_ready.go:82] duration metric: took 399.618315ms for pod "kube-proxy-t9pzq" in "kube-system" namespace to be "Ready" ...
	I0916 11:11:26.680207 1488539 pod_ready.go:79] waiting up to 6m0s for pod "kube-scheduler-multinode-654612" in "kube-system" namespace to be "Ready" ...
	I0916 11:11:26.877625 1488539 request.go:632] Waited for 197.329316ms due to client-side throttling, not priority and fairness, request: GET:https://192.168.67.2:8443/api/v1/namespaces/kube-system/pods/kube-scheduler-multinode-654612
	I0916 11:11:26.877693 1488539 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/namespaces/kube-system/pods/kube-scheduler-multinode-654612
	I0916 11:11:26.877702 1488539 round_trippers.go:469] Request Headers:
	I0916 11:11:26.877711 1488539 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:11:26.877725 1488539 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:11:26.880271 1488539 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:11:26.880293 1488539 round_trippers.go:577] Response Headers:
	I0916 11:11:26.880302 1488539 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:11:26.880306 1488539 round_trippers.go:580]     Content-Type: application/json
	I0916 11:11:26.880309 1488539 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:11:26.880313 1488539 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:11:26.880316 1488539 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:11:26 GMT
	I0916 11:11:26.880327 1488539 round_trippers.go:580]     Audit-Id: f048178d-d03b-4e9f-b1c8-d59ea03d7ecd
	I0916 11:11:26.880440 1488539 request.go:1351] Response Body: {"kind":"Pod","apiVersion":"v1","metadata":{"name":"kube-scheduler-multinode-654612","namespace":"kube-system","uid":"fd553108-8193-4f33-8190-d4ec25a66de1","resourceVersion":"380","creationTimestamp":"2024-09-16T11:10:10Z","labels":{"component":"kube-scheduler","tier":"control-plane"},"annotations":{"kubernetes.io/config.hash":"281b64f61502642475e3dbc1b139b188","kubernetes.io/config.mirror":"281b64f61502642475e3dbc1b139b188","kubernetes.io/config.seen":"2024-09-16T11:10:10.145156597Z","kubernetes.io/config.source":"file"},"ownerReferences":[{"apiVersion":"v1","kind":"Node","name":"multinode-654612","uid":"17ee1588-6ece-4a45-8e72-a05ec6d1f806","controller":true}],"managedFields":[{"manager":"kubelet","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:10:10Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:annotations":{".":{},"f:kubernetes.io/config.hash":{},"f:kubernetes.io/config.mirror":{},"f:kubernetes.io/config.seen":{},
"f:kubernetes.io/config.source":{}},"f:labels":{".":{},"f:component":{} [truncated 4970 chars]
	I0916 11:11:27.077367 1488539 request.go:632] Waited for 196.389901ms due to client-side throttling, not priority and fairness, request: GET:https://192.168.67.2:8443/api/v1/nodes/multinode-654612
	I0916 11:11:27.077432 1488539 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/nodes/multinode-654612
	I0916 11:11:27.077439 1488539 round_trippers.go:469] Request Headers:
	I0916 11:11:27.077449 1488539 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:11:27.077460 1488539 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:11:27.079936 1488539 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:11:27.079999 1488539 round_trippers.go:577] Response Headers:
	I0916 11:11:27.080008 1488539 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:11:27.080012 1488539 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:11:27.080024 1488539 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:11:27 GMT
	I0916 11:11:27.080029 1488539 round_trippers.go:580]     Audit-Id: 62241b97-8695-473c-bb03-f7912c98aeae
	I0916 11:11:27.080032 1488539 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:11:27.080035 1488539 round_trippers.go:580]     Content-Type: application/json
	I0916 11:11:27.080134 1488539 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-654612","uid":"17ee1588-6ece-4a45-8e72-a05ec6d1f806","resourceVersion":"401","creationTimestamp":"2024-09-16T11:10:07Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-654612","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-654612","minikube.k8s.io/primary":"true","minikube.k8s.io/updated_at":"2024_09_16T11_10_11_0700","minikube.k8s.io/version":"v1.34.0","node-role.kubernetes.io/control-plane":"","node.kubernetes.io/exclude-from-external-load-balancers":""},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///var/run/crio/crio.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubelet","operation":"Update","apiVe
rsion":"v1","time":"2024-09-16T11:10:07Z","fieldsType":"FieldsV1","fiel [truncated 6029 chars]
	I0916 11:11:27.080598 1488539 pod_ready.go:93] pod "kube-scheduler-multinode-654612" in "kube-system" namespace has status "Ready":"True"
	I0916 11:11:27.080617 1488539 pod_ready.go:82] duration metric: took 400.402526ms for pod "kube-scheduler-multinode-654612" in "kube-system" namespace to be "Ready" ...
	I0916 11:11:27.080628 1488539 pod_ready.go:39] duration metric: took 1.200545275s for extra waiting for all system-critical and pods with labels [k8s-app=kube-dns component=etcd component=kube-apiserver component=kube-controller-manager k8s-app=kube-proxy component=kube-scheduler] to be "Ready" ...
	I0916 11:11:27.080646 1488539 system_svc.go:44] waiting for kubelet service to be running ....
	I0916 11:11:27.080725 1488539 ssh_runner.go:195] Run: sudo systemctl is-active --quiet service kubelet
	I0916 11:11:27.093012 1488539 system_svc.go:56] duration metric: took 12.356868ms WaitForService to wait for kubelet
	I0916 11:11:27.093042 1488539 kubeadm.go:582] duration metric: took 12.840232566s to wait for: map[apiserver:true apps_running:true default_sa:true extra:true kubelet:true node_ready:true system_pods:true]
	I0916 11:11:27.093061 1488539 node_conditions.go:102] verifying NodePressure condition ...
	I0916 11:11:27.277447 1488539 request.go:632] Waited for 184.307636ms due to client-side throttling, not priority and fairness, request: GET:https://192.168.67.2:8443/api/v1/nodes
	I0916 11:11:27.277523 1488539 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/nodes
	I0916 11:11:27.277542 1488539 round_trippers.go:469] Request Headers:
	I0916 11:11:27.277554 1488539 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:11:27.277561 1488539 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:11:27.280031 1488539 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:11:27.280147 1488539 round_trippers.go:577] Response Headers:
	I0916 11:11:27.280161 1488539 round_trippers.go:580]     Audit-Id: 0f62021e-0ed4-4157-a99d-5bac6c161952
	I0916 11:11:27.280166 1488539 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:11:27.280169 1488539 round_trippers.go:580]     Content-Type: application/json
	I0916 11:11:27.280174 1488539 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:11:27.280177 1488539 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:11:27.280180 1488539 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:11:27 GMT
	I0916 11:11:27.280399 1488539 request.go:1351] Response Body: {"kind":"NodeList","apiVersion":"v1","metadata":{"resourceVersion":"496"},"items":[{"metadata":{"name":"multinode-654612","uid":"17ee1588-6ece-4a45-8e72-a05ec6d1f806","resourceVersion":"401","creationTimestamp":"2024-09-16T11:10:07Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-654612","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-654612","minikube.k8s.io/primary":"true","minikube.k8s.io/updated_at":"2024_09_16T11_10_11_0700","minikube.k8s.io/version":"v1.34.0","node-role.kubernetes.io/control-plane":"","node.kubernetes.io/exclude-from-external-load-balancers":""},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///var/run/crio/crio.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields
":[{"manager":"kubelet","operation":"Update","apiVersion":"v1","time":" [truncated 13011 chars]
	I0916 11:11:27.281202 1488539 node_conditions.go:122] node storage ephemeral capacity is 203034800Ki
	I0916 11:11:27.281232 1488539 node_conditions.go:123] node cpu capacity is 2
	I0916 11:11:27.281244 1488539 node_conditions.go:122] node storage ephemeral capacity is 203034800Ki
	I0916 11:11:27.281249 1488539 node_conditions.go:123] node cpu capacity is 2
	I0916 11:11:27.281254 1488539 node_conditions.go:105] duration metric: took 188.187762ms to run NodePressure ...
	I0916 11:11:27.281268 1488539 start.go:241] waiting for startup goroutines ...
	I0916 11:11:27.281297 1488539 start.go:255] writing updated cluster config ...
	I0916 11:11:27.281640 1488539 ssh_runner.go:195] Run: rm -f paused
	I0916 11:11:27.290662 1488539 out.go:177] * Done! kubectl is now configured to use "multinode-654612" cluster and "default" namespace by default
	E0916 11:11:27.293311 1488539 start.go:291] kubectl info: exec: fork/exec /usr/local/bin/kubectl: exec format error
	
	
	==> CRI-O <==
	Sep 16 11:10:58 multinode-654612 crio[972]: time="2024-09-16 11:10:58.707094730Z" level=info msg="Created container f1f1cadfeb97f3dab9d88820dff0a395485906647e023bda62d5fa4b8fe29f51: kube-system/coredns-7c65d6cfc9-szvv9/coredns" id=27a44a9a-eb3a-416e-b2cb-c51a2e28306c name=/runtime.v1.RuntimeService/CreateContainer
	Sep 16 11:10:58 multinode-654612 crio[972]: time="2024-09-16 11:10:58.707959505Z" level=info msg="Starting container: f1f1cadfeb97f3dab9d88820dff0a395485906647e023bda62d5fa4b8fe29f51" id=1f2a4b0f-7b4a-4688-a5d2-7e39de4df255 name=/runtime.v1.RuntimeService/StartContainer
	Sep 16 11:10:58 multinode-654612 crio[972]: time="2024-09-16 11:10:58.717686792Z" level=info msg="Started container" PID=2034 containerID=f1f1cadfeb97f3dab9d88820dff0a395485906647e023bda62d5fa4b8fe29f51 description=kube-system/coredns-7c65d6cfc9-szvv9/coredns id=1f2a4b0f-7b4a-4688-a5d2-7e39de4df255 name=/runtime.v1.RuntimeService/StartContainer sandboxID=3c6d4cc55a80f4d37dca4b29803a144965d8762aa6f909ee7e5ece0519a03e2d
	Sep 16 11:11:28 multinode-654612 crio[972]: time="2024-09-16 11:11:28.397099824Z" level=info msg="Running pod sandbox: default/busybox-7dff88458-rdtjw/POD" id=eb76a363-6add-42ae-a335-f67ab64212ac name=/runtime.v1.RuntimeService/RunPodSandbox
	Sep 16 11:11:28 multinode-654612 crio[972]: time="2024-09-16 11:11:28.397169516Z" level=warning msg="Allowed annotations are specified for workload []"
	Sep 16 11:11:28 multinode-654612 crio[972]: time="2024-09-16 11:11:28.421494831Z" level=info msg="Got pod network &{Name:busybox-7dff88458-rdtjw Namespace:default ID:4ef152fa69638e911923a9267d3d8efc9c1242343e215b70a8dac70cafd91414 UID:39d79774-0a74-4464-a7fe-d312a92e8749 NetNS:/var/run/netns/ea67f675-e985-437a-ada1-08d17099372f Networks:[] RuntimeConfig:map[kindnet:{IP: MAC: PortMappings:[] Bandwidth:<nil> IpRanges:[]}] Aliases:map[]}"
	Sep 16 11:11:28 multinode-654612 crio[972]: time="2024-09-16 11:11:28.421535412Z" level=info msg="Adding pod default_busybox-7dff88458-rdtjw to CNI network \"kindnet\" (type=ptp)"
	Sep 16 11:11:28 multinode-654612 crio[972]: time="2024-09-16 11:11:28.430956721Z" level=info msg="Got pod network &{Name:busybox-7dff88458-rdtjw Namespace:default ID:4ef152fa69638e911923a9267d3d8efc9c1242343e215b70a8dac70cafd91414 UID:39d79774-0a74-4464-a7fe-d312a92e8749 NetNS:/var/run/netns/ea67f675-e985-437a-ada1-08d17099372f Networks:[] RuntimeConfig:map[kindnet:{IP: MAC: PortMappings:[] Bandwidth:<nil> IpRanges:[]}] Aliases:map[]}"
	Sep 16 11:11:28 multinode-654612 crio[972]: time="2024-09-16 11:11:28.431107536Z" level=info msg="Checking pod default_busybox-7dff88458-rdtjw for CNI network kindnet (type=ptp)"
	Sep 16 11:11:28 multinode-654612 crio[972]: time="2024-09-16 11:11:28.434199899Z" level=info msg="Ran pod sandbox 4ef152fa69638e911923a9267d3d8efc9c1242343e215b70a8dac70cafd91414 with infra container: default/busybox-7dff88458-rdtjw/POD" id=eb76a363-6add-42ae-a335-f67ab64212ac name=/runtime.v1.RuntimeService/RunPodSandbox
	Sep 16 11:11:28 multinode-654612 crio[972]: time="2024-09-16 11:11:28.435333541Z" level=info msg="Checking image status: gcr.io/k8s-minikube/busybox:1.28" id=e504711b-1b9c-4895-85b5-5984f4f6c966 name=/runtime.v1.ImageService/ImageStatus
	Sep 16 11:11:28 multinode-654612 crio[972]: time="2024-09-16 11:11:28.435540141Z" level=info msg="Image gcr.io/k8s-minikube/busybox:1.28 not found" id=e504711b-1b9c-4895-85b5-5984f4f6c966 name=/runtime.v1.ImageService/ImageStatus
	Sep 16 11:11:28 multinode-654612 crio[972]: time="2024-09-16 11:11:28.436980229Z" level=info msg="Pulling image: gcr.io/k8s-minikube/busybox:1.28" id=de9dde56-8541-4b4b-9aab-d772372f4595 name=/runtime.v1.ImageService/PullImage
	Sep 16 11:11:28 multinode-654612 crio[972]: time="2024-09-16 11:11:28.438347835Z" level=info msg="Trying to access \"gcr.io/k8s-minikube/busybox:1.28\""
	Sep 16 11:11:29 multinode-654612 crio[972]: time="2024-09-16 11:11:29.711560207Z" level=info msg="Trying to access \"gcr.io/k8s-minikube/busybox:1.28\""
	Sep 16 11:11:31 multinode-654612 crio[972]: time="2024-09-16 11:11:31.747055733Z" level=info msg="Pulled image: gcr.io/k8s-minikube/busybox@sha256:859d41e4316c182cb559f9ae3c5ffcac8602ee1179794a1707c06cd092a008d3" id=de9dde56-8541-4b4b-9aab-d772372f4595 name=/runtime.v1.ImageService/PullImage
	Sep 16 11:11:31 multinode-654612 crio[972]: time="2024-09-16 11:11:31.747857182Z" level=info msg="Checking image status: gcr.io/k8s-minikube/busybox:1.28" id=fdea80e0-8c7b-4064-9e0c-3aa24a9c23a8 name=/runtime.v1.ImageService/ImageStatus
	Sep 16 11:11:31 multinode-654612 crio[972]: time="2024-09-16 11:11:31.748574391Z" level=info msg="Image status: &ImageStatusResponse{Image:&Image{Id:89a35e2ebb6b938201966889b5e8c85b931db6432c5643966116cd1c28bf45cd,RepoTags:[gcr.io/k8s-minikube/busybox:1.28],RepoDigests:[gcr.io/k8s-minikube/busybox@sha256:859d41e4316c182cb559f9ae3c5ffcac8602ee1179794a1707c06cd092a008d3 gcr.io/k8s-minikube/busybox@sha256:9afb80db71730dbb303fe00765cbf34bddbdc6b66e49897fc2e1861967584b12],Size_:1496796,Uid:nil,Username:,Spec:nil,},Info:map[string]string{},}" id=fdea80e0-8c7b-4064-9e0c-3aa24a9c23a8 name=/runtime.v1.ImageService/ImageStatus
	Sep 16 11:11:31 multinode-654612 crio[972]: time="2024-09-16 11:11:31.750907159Z" level=info msg="Checking image status: gcr.io/k8s-minikube/busybox:1.28" id=3e750e05-3736-48da-8bf3-a21449779615 name=/runtime.v1.ImageService/ImageStatus
	Sep 16 11:11:31 multinode-654612 crio[972]: time="2024-09-16 11:11:31.752331550Z" level=info msg="Image status: &ImageStatusResponse{Image:&Image{Id:89a35e2ebb6b938201966889b5e8c85b931db6432c5643966116cd1c28bf45cd,RepoTags:[gcr.io/k8s-minikube/busybox:1.28],RepoDigests:[gcr.io/k8s-minikube/busybox@sha256:859d41e4316c182cb559f9ae3c5ffcac8602ee1179794a1707c06cd092a008d3 gcr.io/k8s-minikube/busybox@sha256:9afb80db71730dbb303fe00765cbf34bddbdc6b66e49897fc2e1861967584b12],Size_:1496796,Uid:nil,Username:,Spec:nil,},Info:map[string]string{},}" id=3e750e05-3736-48da-8bf3-a21449779615 name=/runtime.v1.ImageService/ImageStatus
	Sep 16 11:11:31 multinode-654612 crio[972]: time="2024-09-16 11:11:31.753437288Z" level=info msg="Creating container: default/busybox-7dff88458-rdtjw/busybox" id=6a5c5c9a-e338-4a50-8a11-a4dd364a425d name=/runtime.v1.RuntimeService/CreateContainer
	Sep 16 11:11:31 multinode-654612 crio[972]: time="2024-09-16 11:11:31.753534844Z" level=warning msg="Allowed annotations are specified for workload []"
	Sep 16 11:11:31 multinode-654612 crio[972]: time="2024-09-16 11:11:31.807962628Z" level=info msg="Created container c2520810d50a78569a057f793a6598a2405c3f4f21742485e9bcb7daf22c1ebc: default/busybox-7dff88458-rdtjw/busybox" id=6a5c5c9a-e338-4a50-8a11-a4dd364a425d name=/runtime.v1.RuntimeService/CreateContainer
	Sep 16 11:11:31 multinode-654612 crio[972]: time="2024-09-16 11:11:31.808503292Z" level=info msg="Starting container: c2520810d50a78569a057f793a6598a2405c3f4f21742485e9bcb7daf22c1ebc" id=8de8909e-6a59-4241-8695-89643b731ce1 name=/runtime.v1.RuntimeService/StartContainer
	Sep 16 11:11:31 multinode-654612 crio[972]: time="2024-09-16 11:11:31.817273531Z" level=info msg="Started container" PID=2145 containerID=c2520810d50a78569a057f793a6598a2405c3f4f21742485e9bcb7daf22c1ebc description=default/busybox-7dff88458-rdtjw/busybox id=8de8909e-6a59-4241-8695-89643b731ce1 name=/runtime.v1.RuntimeService/StartContainer sandboxID=4ef152fa69638e911923a9267d3d8efc9c1242343e215b70a8dac70cafd91414
	
	
	==> container status <==
	CONTAINER           IMAGE                                                                                                 CREATED              STATE               NAME                      ATTEMPT             POD ID              POD
	c2520810d50a7       gcr.io/k8s-minikube/busybox@sha256:859d41e4316c182cb559f9ae3c5ffcac8602ee1179794a1707c06cd092a008d3   59 seconds ago       Running             busybox                   0                   4ef152fa69638       busybox-7dff88458-rdtjw
	f1f1cadfeb97f       2f6c962e7b8311337352d9fdea917da2184d9919f4da7695bc2a6517cf392fe4                                      About a minute ago   Running             coredns                   0                   3c6d4cc55a80f       coredns-7c65d6cfc9-szvv9
	88a12fcbc6eb5       ba04bb24b95753201135cbc420b233c1b0b9fa2e1fd21d28319c348c33fbcde6                                      About a minute ago   Running             storage-provisioner       0                   bcd5c2b8fce3c       storage-provisioner
	c2386662da70d       24a140c548c075e487e45d0ee73b1aa89f8bfb40c08a57e05975559728822b1d                                      2 minutes ago        Running             kube-proxy                0                   e8daba284c881       kube-proxy-t9pzq
	9af0dfbb5d2f2       6a23fa8fd2b78ab58e42ba273808edc936a9c53d8ac4a919f6337be094843a51                                      2 minutes ago        Running             kindnet-cni               0                   bbb8bb7e5ac7a       kindnet-whjqt
	5d34b90bf3c2a       279f381cb37365bbbcd133c9531fba9c2beb0f38dbbe6ddfcd0b1b1643d3450e                                      2 minutes ago        Running             kube-controller-manager   0                   d431cd82769b7       kube-controller-manager-multinode-654612
	7e4a553dd98fc       7f8aa378bb47dffcf430f3a601abe39137e88aee0238e23ed8530fdd18dab82d                                      2 minutes ago        Running             kube-scheduler            0                   76a6e90530729       kube-scheduler-multinode-654612
	00cc927e5dcd6       d3f53a98c0a9d9163c4848bcf34b2d2f5e1e3691b79f3d1dd6d0206809e02853                                      2 minutes ago        Running             kube-apiserver            0                   5fff8c0ab15f3       kube-apiserver-multinode-654612
	c65aacc72d266       27e3830e1402783674d8b594038967deea9d51f0d91b34c93c8f39d2f68af7da                                      2 minutes ago        Running             etcd                      0                   fa4d67791d874       etcd-multinode-654612
	
	
	==> coredns [f1f1cadfeb97f3dab9d88820dff0a395485906647e023bda62d5fa4b8fe29f51] <==
	[INFO] 10.244.0.3:35946 - 9 "PTR IN 1.0.96.10.in-addr.arpa. udp 40 false 512" NOERROR qr,aa,rd 112 0.00011523s
	[INFO] 10.244.2.2:51440 - 2 "PTR IN 10.0.96.10.in-addr.arpa. udp 41 false 512" NOERROR qr,aa,rd 116 0.000129236s
	[INFO] 10.244.2.2:58230 - 3 "AAAA IN kubernetes.default. udp 36 false 512" NXDOMAIN qr,rd,ra 36 0.001142438s
	[INFO] 10.244.2.2:37607 - 4 "AAAA IN kubernetes.default.default.svc.cluster.local. udp 62 false 512" NXDOMAIN qr,aa,rd 155 0.000091895s
	[INFO] 10.244.2.2:59551 - 5 "AAAA IN kubernetes.default.svc.cluster.local. udp 54 false 512" NOERROR qr,aa,rd 147 0.000071325s
	[INFO] 10.244.2.2:54758 - 6 "A IN kubernetes.default. udp 36 false 512" NXDOMAIN qr,rd,ra 36 0.001095875s
	[INFO] 10.244.2.2:45539 - 7 "A IN kubernetes.default.default.svc.cluster.local. udp 62 false 512" NXDOMAIN qr,aa,rd 155 0.000145334s
	[INFO] 10.244.2.2:60301 - 8 "A IN kubernetes.default.svc.cluster.local. udp 54 false 512" NOERROR qr,aa,rd 106 0.000089524s
	[INFO] 10.244.2.2:50456 - 9 "PTR IN 1.0.96.10.in-addr.arpa. udp 40 false 512" NOERROR qr,aa,rd 112 0.000124509s
	[INFO] 10.244.0.3:39906 - 2 "PTR IN 10.0.96.10.in-addr.arpa. udp 41 false 512" NOERROR qr,aa,rd 116 0.000180779s
	[INFO] 10.244.0.3:56779 - 3 "AAAA IN kubernetes.default.svc.cluster.local. udp 54 false 512" NOERROR qr,aa,rd 147 0.000089704s
	[INFO] 10.244.0.3:52775 - 4 "A IN kubernetes.default.svc.cluster.local. udp 54 false 512" NOERROR qr,aa,rd 106 0.000088252s
	[INFO] 10.244.0.3:35755 - 5 "PTR IN 1.0.96.10.in-addr.arpa. udp 40 false 512" NOERROR qr,aa,rd 112 0.000078915s
	[INFO] 10.244.2.2:41846 - 2 "PTR IN 10.0.96.10.in-addr.arpa. udp 41 false 512" NOERROR qr,aa,rd 116 0.000114139s
	[INFO] 10.244.2.2:51082 - 3 "AAAA IN kubernetes.default.svc.cluster.local. udp 54 false 512" NOERROR qr,aa,rd 147 0.00011674s
	[INFO] 10.244.2.2:43718 - 4 "A IN kubernetes.default.svc.cluster.local. udp 54 false 512" NOERROR qr,aa,rd 106 0.000079989s
	[INFO] 10.244.2.2:40056 - 5 "PTR IN 1.0.96.10.in-addr.arpa. udp 40 false 512" NOERROR qr,aa,rd 112 0.000092609s
	[INFO] 10.244.0.3:47718 - 2 "PTR IN 10.0.96.10.in-addr.arpa. udp 41 false 512" NOERROR qr,aa,rd 116 0.000150273s
	[INFO] 10.244.0.3:56808 - 3 "AAAA IN host.minikube.internal. udp 40 false 512" NOERROR qr,aa,rd 40 0.000175955s
	[INFO] 10.244.0.3:52572 - 4 "A IN host.minikube.internal. udp 40 false 512" NOERROR qr,aa,rd 78 0.000140033s
	[INFO] 10.244.0.3:48292 - 5 "PTR IN 1.67.168.192.in-addr.arpa. udp 43 false 512" NOERROR qr,aa,rd 104 0.000144661s
	[INFO] 10.244.2.2:40796 - 2 "PTR IN 10.0.96.10.in-addr.arpa. udp 41 false 512" NOERROR qr,aa,rd 116 0.000146483s
	[INFO] 10.244.2.2:49060 - 3 "AAAA IN host.minikube.internal. udp 40 false 512" NOERROR qr,aa,rd 40 0.000089253s
	[INFO] 10.244.2.2:43910 - 4 "A IN host.minikube.internal. udp 40 false 512" NOERROR qr,aa,rd 78 0.000064646s
	[INFO] 10.244.2.2:45764 - 5 "PTR IN 1.67.168.192.in-addr.arpa. udp 43 false 512" NOERROR qr,aa,rd 104 0.000085364s
	
	
	==> describe nodes <==
	Name:               multinode-654612
	Roles:              control-plane
	Labels:             beta.kubernetes.io/arch=arm64
	                    beta.kubernetes.io/os=linux
	                    kubernetes.io/arch=arm64
	                    kubernetes.io/hostname=multinode-654612
	                    kubernetes.io/os=linux
	                    minikube.k8s.io/commit=90d544f06ea0f69499271b003be64a9a224d57ed
	                    minikube.k8s.io/name=multinode-654612
	                    minikube.k8s.io/primary=true
	                    minikube.k8s.io/updated_at=2024_09_16T11_10_11_0700
	                    minikube.k8s.io/version=v1.34.0
	                    node-role.kubernetes.io/control-plane=
	                    node.kubernetes.io/exclude-from-external-load-balancers=
	Annotations:        kubeadm.alpha.kubernetes.io/cri-socket: unix:///var/run/crio/crio.sock
	                    node.alpha.kubernetes.io/ttl: 0
	                    volumes.kubernetes.io/controller-managed-attach-detach: true
	CreationTimestamp:  Mon, 16 Sep 2024 11:10:07 +0000
	Taints:             <none>
	Unschedulable:      false
	Lease:
	  HolderIdentity:  multinode-654612
	  AcquireTime:     <unset>
	  RenewTime:       Mon, 16 Sep 2024 11:12:22 +0000
	Conditions:
	  Type             Status  LastHeartbeatTime                 LastTransitionTime                Reason                       Message
	  ----             ------  -----------------                 ------------------                ------                       -------
	  MemoryPressure   False   Mon, 16 Sep 2024 11:11:42 +0000   Mon, 16 Sep 2024 11:10:04 +0000   KubeletHasSufficientMemory   kubelet has sufficient memory available
	  DiskPressure     False   Mon, 16 Sep 2024 11:11:42 +0000   Mon, 16 Sep 2024 11:10:04 +0000   KubeletHasNoDiskPressure     kubelet has no disk pressure
	  PIDPressure      False   Mon, 16 Sep 2024 11:11:42 +0000   Mon, 16 Sep 2024 11:10:04 +0000   KubeletHasSufficientPID      kubelet has sufficient PID available
	  Ready            True    Mon, 16 Sep 2024 11:11:42 +0000   Mon, 16 Sep 2024 11:10:56 +0000   KubeletReady                 kubelet is posting ready status
	Addresses:
	  InternalIP:  192.168.67.2
	  Hostname:    multinode-654612
	Capacity:
	  cpu:                2
	  ephemeral-storage:  203034800Ki
	  hugepages-1Gi:      0
	  hugepages-2Mi:      0
	  hugepages-32Mi:     0
	  hugepages-64Ki:     0
	  memory:             8022304Ki
	  pods:               110
	Allocatable:
	  cpu:                2
	  ephemeral-storage:  203034800Ki
	  hugepages-1Gi:      0
	  hugepages-2Mi:      0
	  hugepages-32Mi:     0
	  hugepages-64Ki:     0
	  memory:             8022304Ki
	  pods:               110
	System Info:
	  Machine ID:                 eae974bfbdcf45cc93be3557efd7b033
	  System UUID:                b0403d6b-24c6-42eb-8273-193a1e97b1c8
	  Boot ID:                    34b2555f-ef29-4c31-9b47-b3b930bd3b4b
	  Kernel Version:             5.15.0-1069-aws
	  OS Image:                   Ubuntu 22.04.4 LTS
	  Operating System:           linux
	  Architecture:               arm64
	  Container Runtime Version:  cri-o://1.24.6
	  Kubelet Version:            v1.31.1
	  Kube-Proxy Version:         v1.31.1
	PodCIDR:                      10.244.0.0/24
	PodCIDRs:                     10.244.0.0/24
	Non-terminated Pods:          (9 in total)
	  Namespace                   Name                                        CPU Requests  CPU Limits  Memory Requests  Memory Limits  Age
	  ---------                   ----                                        ------------  ----------  ---------------  -------------  ---
	  default                     busybox-7dff88458-rdtjw                     0 (0%)        0 (0%)      0 (0%)           0 (0%)         63s
	  kube-system                 coredns-7c65d6cfc9-szvv9                    100m (5%)     0 (0%)      70Mi (0%)        170Mi (2%)     2m16s
	  kube-system                 etcd-multinode-654612                       100m (5%)     0 (0%)      100Mi (1%)       0 (0%)         2m21s
	  kube-system                 kindnet-whjqt                               100m (5%)     100m (5%)   50Mi (0%)        50Mi (0%)      2m17s
	  kube-system                 kube-apiserver-multinode-654612             250m (12%)    0 (0%)      0 (0%)           0 (0%)         2m21s
	  kube-system                 kube-controller-manager-multinode-654612    200m (10%)    0 (0%)      0 (0%)           0 (0%)         2m21s
	  kube-system                 kube-proxy-t9pzq                            0 (0%)        0 (0%)      0 (0%)           0 (0%)         2m17s
	  kube-system                 kube-scheduler-multinode-654612             100m (5%)     0 (0%)      0 (0%)           0 (0%)         2m21s
	  kube-system                 storage-provisioner                         0 (0%)        0 (0%)      0 (0%)           0 (0%)         2m16s
	Allocated resources:
	  (Total limits may be over 100 percent, i.e., overcommitted.)
	  Resource           Requests    Limits
	  --------           --------    ------
	  cpu                850m (42%)  100m (5%)
	  memory             220Mi (2%)  220Mi (2%)
	  ephemeral-storage  0 (0%)      0 (0%)
	  hugepages-1Gi      0 (0%)      0 (0%)
	  hugepages-2Mi      0 (0%)      0 (0%)
	  hugepages-32Mi     0 (0%)      0 (0%)
	  hugepages-64Ki     0 (0%)      0 (0%)
	Events:
	  Type     Reason                   Age    From             Message
	  ----     ------                   ----   ----             -------
	  Normal   Starting                 2m15s  kube-proxy       
	  Normal   Starting                 2m21s  kubelet          Starting kubelet.
	  Warning  CgroupV1                 2m21s  kubelet          Cgroup v1 support is in maintenance mode, please migrate to Cgroup v2.
	  Normal   NodeHasSufficientMemory  2m21s  kubelet          Node multinode-654612 status is now: NodeHasSufficientMemory
	  Normal   NodeHasNoDiskPressure    2m21s  kubelet          Node multinode-654612 status is now: NodeHasNoDiskPressure
	  Normal   NodeHasSufficientPID     2m21s  kubelet          Node multinode-654612 status is now: NodeHasSufficientPID
	  Normal   RegisteredNode           2m17s  node-controller  Node multinode-654612 event: Registered Node multinode-654612 in Controller
	  Normal   CIDRAssignmentFailed     2m17s  cidrAllocator    Node multinode-654612 status is now: CIDRAssignmentFailed
	  Normal   NodeReady                95s    kubelet          Node multinode-654612 status is now: NodeReady
	
	
	Name:               multinode-654612-m02
	Roles:              <none>
	Labels:             beta.kubernetes.io/arch=arm64
	                    beta.kubernetes.io/os=linux
	                    kubernetes.io/arch=arm64
	                    kubernetes.io/hostname=multinode-654612-m02
	                    kubernetes.io/os=linux
	                    minikube.k8s.io/commit=90d544f06ea0f69499271b003be64a9a224d57ed
	                    minikube.k8s.io/name=multinode-654612
	                    minikube.k8s.io/primary=false
	                    minikube.k8s.io/updated_at=2024_09_16T11_11_14_0700
	                    minikube.k8s.io/version=v1.34.0
	Annotations:        kubeadm.alpha.kubernetes.io/cri-socket: unix:///var/run/crio/crio.sock
	                    node.alpha.kubernetes.io/ttl: 0
	                    volumes.kubernetes.io/controller-managed-attach-detach: true
	CreationTimestamp:  Mon, 16 Sep 2024 11:11:13 +0000
	Taints:             <none>
	Unschedulable:      false
	Lease:
	  HolderIdentity:  multinode-654612-m02
	  AcquireTime:     <unset>
	  RenewTime:       Mon, 16 Sep 2024 11:12:24 +0000
	Conditions:
	  Type             Status  LastHeartbeatTime                 LastTransitionTime                Reason                       Message
	  ----             ------  -----------------                 ------------------                ------                       -------
	  MemoryPressure   False   Mon, 16 Sep 2024 11:11:44 +0000   Mon, 16 Sep 2024 11:11:13 +0000   KubeletHasSufficientMemory   kubelet has sufficient memory available
	  DiskPressure     False   Mon, 16 Sep 2024 11:11:44 +0000   Mon, 16 Sep 2024 11:11:13 +0000   KubeletHasNoDiskPressure     kubelet has no disk pressure
	  PIDPressure      False   Mon, 16 Sep 2024 11:11:44 +0000   Mon, 16 Sep 2024 11:11:13 +0000   KubeletHasSufficientPID      kubelet has sufficient PID available
	  Ready            True    Mon, 16 Sep 2024 11:11:44 +0000   Mon, 16 Sep 2024 11:11:25 +0000   KubeletReady                 kubelet is posting ready status
	Addresses:
	  InternalIP:  192.168.67.3
	  Hostname:    multinode-654612-m02
	Capacity:
	  cpu:                2
	  ephemeral-storage:  203034800Ki
	  hugepages-1Gi:      0
	  hugepages-2Mi:      0
	  hugepages-32Mi:     0
	  hugepages-64Ki:     0
	  memory:             8022304Ki
	  pods:               110
	Allocatable:
	  cpu:                2
	  ephemeral-storage:  203034800Ki
	  hugepages-1Gi:      0
	  hugepages-2Mi:      0
	  hugepages-32Mi:     0
	  hugepages-64Ki:     0
	  memory:             8022304Ki
	  pods:               110
	System Info:
	  Machine ID:                 a4b1086ea37f43a28c08df81961f6812
	  System UUID:                9e565e5c-62ec-45b7-a6b4-8e158afd85b2
	  Boot ID:                    34b2555f-ef29-4c31-9b47-b3b930bd3b4b
	  Kernel Version:             5.15.0-1069-aws
	  OS Image:                   Ubuntu 22.04.4 LTS
	  Operating System:           linux
	  Architecture:               arm64
	  Container Runtime Version:  cri-o://1.24.6
	  Kubelet Version:            v1.31.1
	  Kube-Proxy Version:         v1.31.1
	PodCIDR:                      10.244.2.0/24
	PodCIDRs:                     10.244.2.0/24
	Non-terminated Pods:          (3 in total)
	  Namespace                   Name                       CPU Requests  CPU Limits  Memory Requests  Memory Limits  Age
	  ---------                   ----                       ------------  ----------  ---------------  -------------  ---
	  default                     busybox-7dff88458-sfkxt    0 (0%)        0 (0%)      0 (0%)           0 (0%)         63s
	  kube-system                 kindnet-687xg              100m (5%)     100m (5%)   50Mi (0%)        50Mi (0%)      78s
	  kube-system                 kube-proxy-gf2tw           0 (0%)        0 (0%)      0 (0%)           0 (0%)         78s
	Allocated resources:
	  (Total limits may be over 100 percent, i.e., overcommitted.)
	  Resource           Requests   Limits
	  --------           --------   ------
	  cpu                100m (5%)  100m (5%)
	  memory             50Mi (0%)  50Mi (0%)
	  ephemeral-storage  0 (0%)     0 (0%)
	  hugepages-1Gi      0 (0%)     0 (0%)
	  hugepages-2Mi      0 (0%)     0 (0%)
	  hugepages-32Mi     0 (0%)     0 (0%)
	  hugepages-64Ki     0 (0%)     0 (0%)
	Events:
	  Type     Reason                   Age                From             Message
	  ----     ------                   ----               ----             -------
	  Normal   Starting                 77s                kube-proxy       
	  Warning  CgroupV1                 78s                kubelet          Cgroup v1 support is in maintenance mode, please migrate to Cgroup v2.
	  Normal   NodeHasSufficientMemory  78s (x2 over 78s)  kubelet          Node multinode-654612-m02 status is now: NodeHasSufficientMemory
	  Normal   NodeHasNoDiskPressure    78s (x2 over 78s)  kubelet          Node multinode-654612-m02 status is now: NodeHasNoDiskPressure
	  Normal   NodeHasSufficientPID     78s (x2 over 78s)  kubelet          Node multinode-654612-m02 status is now: NodeHasSufficientPID
	  Normal   RegisteredNode           77s                node-controller  Node multinode-654612-m02 event: Registered Node multinode-654612-m02 in Controller
	  Normal   NodeReady                66s                kubelet          Node multinode-654612-m02 status is now: NodeReady
	
	
	Name:               multinode-654612-m03
	Roles:              <none>
	Labels:             beta.kubernetes.io/arch=arm64
	                    beta.kubernetes.io/os=linux
	                    kubernetes.io/arch=arm64
	                    kubernetes.io/hostname=multinode-654612-m03
	                    kubernetes.io/os=linux
	                    minikube.k8s.io/commit=90d544f06ea0f69499271b003be64a9a224d57ed
	                    minikube.k8s.io/name=multinode-654612
	                    minikube.k8s.io/primary=false
	                    minikube.k8s.io/updated_at=2024_09_16T11_11_49_0700
	                    minikube.k8s.io/version=v1.34.0
	Annotations:        kubeadm.alpha.kubernetes.io/cri-socket: unix:///var/run/crio/crio.sock
	                    node.alpha.kubernetes.io/ttl: 0
	                    volumes.kubernetes.io/controller-managed-attach-detach: true
	CreationTimestamp:  Mon, 16 Sep 2024 11:11:48 +0000
	Taints:             <none>
	Unschedulable:      false
	Lease:
	  HolderIdentity:  multinode-654612-m03
	  AcquireTime:     <unset>
	  RenewTime:       Mon, 16 Sep 2024 11:12:09 +0000
	Conditions:
	  Type             Status  LastHeartbeatTime                 LastTransitionTime                Reason                       Message
	  ----             ------  -----------------                 ------------------                ------                       -------
	  MemoryPressure   False   Mon, 16 Sep 2024 11:12:01 +0000   Mon, 16 Sep 2024 11:11:48 +0000   KubeletHasSufficientMemory   kubelet has sufficient memory available
	  DiskPressure     False   Mon, 16 Sep 2024 11:12:01 +0000   Mon, 16 Sep 2024 11:11:48 +0000   KubeletHasNoDiskPressure     kubelet has no disk pressure
	  PIDPressure      False   Mon, 16 Sep 2024 11:12:01 +0000   Mon, 16 Sep 2024 11:11:48 +0000   KubeletHasSufficientPID      kubelet has sufficient PID available
	  Ready            True    Mon, 16 Sep 2024 11:12:01 +0000   Mon, 16 Sep 2024 11:12:01 +0000   KubeletReady                 kubelet is posting ready status
	Addresses:
	  InternalIP:  192.168.67.4
	  Hostname:    multinode-654612-m03
	Capacity:
	  cpu:                2
	  ephemeral-storage:  203034800Ki
	  hugepages-1Gi:      0
	  hugepages-2Mi:      0
	  hugepages-32Mi:     0
	  hugepages-64Ki:     0
	  memory:             8022304Ki
	  pods:               110
	Allocatable:
	  cpu:                2
	  ephemeral-storage:  203034800Ki
	  hugepages-1Gi:      0
	  hugepages-2Mi:      0
	  hugepages-32Mi:     0
	  hugepages-64Ki:     0
	  memory:             8022304Ki
	  pods:               110
	System Info:
	  Machine ID:                 8323217d2d004891992d40b8b53ad006
	  System UUID:                54271cc2-86f8-4253-83a9-e426ec1746ce
	  Boot ID:                    34b2555f-ef29-4c31-9b47-b3b930bd3b4b
	  Kernel Version:             5.15.0-1069-aws
	  OS Image:                   Ubuntu 22.04.4 LTS
	  Operating System:           linux
	  Architecture:               arm64
	  Container Runtime Version:  cri-o://1.24.6
	  Kubelet Version:            v1.31.1
	  Kube-Proxy Version:         v1.31.1
	PodCIDR:                      10.244.3.0/24
	PodCIDRs:                     10.244.3.0/24
	Non-terminated Pods:          (2 in total)
	  Namespace                   Name                CPU Requests  CPU Limits  Memory Requests  Memory Limits  Age
	  ---------                   ----                ------------  ----------  ---------------  -------------  ---
	  kube-system                 kindnet-ncfhl       100m (5%)     100m (5%)   50Mi (0%)        50Mi (0%)      43s
	  kube-system                 kube-proxy-vf648    0 (0%)        0 (0%)      0 (0%)           0 (0%)         43s
	Allocated resources:
	  (Total limits may be over 100 percent, i.e., overcommitted.)
	  Resource           Requests   Limits
	  --------           --------   ------
	  cpu                100m (5%)  100m (5%)
	  memory             50Mi (0%)  50Mi (0%)
	  ephemeral-storage  0 (0%)     0 (0%)
	  hugepages-1Gi      0 (0%)     0 (0%)
	  hugepages-2Mi      0 (0%)     0 (0%)
	  hugepages-32Mi     0 (0%)     0 (0%)
	  hugepages-64Ki     0 (0%)     0 (0%)
	Events:
	  Type    Reason                   Age                From             Message
	  ----    ------                   ----               ----             -------
	  Normal  Starting                 40s                kube-proxy       
	  Normal  NodeHasSufficientMemory  43s (x2 over 43s)  kubelet          Node multinode-654612-m03 status is now: NodeHasSufficientMemory
	  Normal  NodeHasNoDiskPressure    43s (x2 over 43s)  kubelet          Node multinode-654612-m03 status is now: NodeHasNoDiskPressure
	  Normal  NodeHasSufficientPID     43s (x2 over 43s)  kubelet          Node multinode-654612-m03 status is now: NodeHasSufficientPID
	  Normal  RegisteredNode           42s                node-controller  Node multinode-654612-m03 event: Registered Node multinode-654612-m03 in Controller
	  Normal  NodeReady                30s                kubelet          Node multinode-654612-m03 status is now: NodeReady
	
	
	==> dmesg <==
	[Sep16 10:07] systemd-journald[226]: Failed to send stream file descriptor to service manager: Connection refused
	
	
	==> etcd [c65aacc72d2663a923ab63ffa1649959e0e5fe51df61d0c8285d08becdf417a3] <==
	{"level":"info","ts":"2024-09-16T11:10:03.898258Z","caller":"embed/etcd.go:599","msg":"serving peer traffic","address":"192.168.67.2:2380"}
	{"level":"info","ts":"2024-09-16T11:10:03.898427Z","caller":"embed/etcd.go:571","msg":"cmux::serve","address":"192.168.67.2:2380"}
	{"level":"info","ts":"2024-09-16T11:10:03.898514Z","caller":"embed/etcd.go:279","msg":"now serving peer/client/metrics","local-member-id":"8688e899f7831fc7","initial-advertise-peer-urls":["https://192.168.67.2:2380"],"listen-peer-urls":["https://192.168.67.2:2380"],"advertise-client-urls":["https://192.168.67.2:2379"],"listen-client-urls":["https://127.0.0.1:2379","https://192.168.67.2:2379"],"listen-metrics-urls":["http://127.0.0.1:2381"]}
	{"level":"info","ts":"2024-09-16T11:10:03.898539Z","caller":"embed/etcd.go:870","msg":"serving metrics","address":"http://127.0.0.1:2381"}
	{"level":"info","ts":"2024-09-16T11:10:04.631232Z","logger":"raft","caller":"etcdserver/zap_raft.go:77","msg":"8688e899f7831fc7 is starting a new election at term 1"}
	{"level":"info","ts":"2024-09-16T11:10:04.631434Z","logger":"raft","caller":"etcdserver/zap_raft.go:77","msg":"8688e899f7831fc7 became pre-candidate at term 1"}
	{"level":"info","ts":"2024-09-16T11:10:04.631547Z","logger":"raft","caller":"etcdserver/zap_raft.go:77","msg":"8688e899f7831fc7 received MsgPreVoteResp from 8688e899f7831fc7 at term 1"}
	{"level":"info","ts":"2024-09-16T11:10:04.631589Z","logger":"raft","caller":"etcdserver/zap_raft.go:77","msg":"8688e899f7831fc7 became candidate at term 2"}
	{"level":"info","ts":"2024-09-16T11:10:04.631642Z","logger":"raft","caller":"etcdserver/zap_raft.go:77","msg":"8688e899f7831fc7 received MsgVoteResp from 8688e899f7831fc7 at term 2"}
	{"level":"info","ts":"2024-09-16T11:10:04.631699Z","logger":"raft","caller":"etcdserver/zap_raft.go:77","msg":"8688e899f7831fc7 became leader at term 2"}
	{"level":"info","ts":"2024-09-16T11:10:04.631763Z","logger":"raft","caller":"etcdserver/zap_raft.go:77","msg":"raft.node: 8688e899f7831fc7 elected leader 8688e899f7831fc7 at term 2"}
	{"level":"info","ts":"2024-09-16T11:10:04.636890Z","caller":"etcdserver/server.go:2118","msg":"published local member to cluster through raft","local-member-id":"8688e899f7831fc7","local-member-attributes":"{Name:multinode-654612 ClientURLs:[https://192.168.67.2:2379]}","request-path":"/0/members/8688e899f7831fc7/attributes","cluster-id":"9d8fdeb88b6def78","publish-timeout":"7s"}
	{"level":"info","ts":"2024-09-16T11:10:04.637094Z","caller":"etcdserver/server.go:2629","msg":"setting up initial cluster version using v2 API","cluster-version":"3.5"}
	{"level":"info","ts":"2024-09-16T11:10:04.638745Z","caller":"embed/serve.go:103","msg":"ready to serve client requests"}
	{"level":"info","ts":"2024-09-16T11:10:04.639216Z","caller":"embed/serve.go:103","msg":"ready to serve client requests"}
	{"level":"info","ts":"2024-09-16T11:10:04.640138Z","caller":"v3rpc/health.go:61","msg":"grpc service status changed","service":"","status":"SERVING"}
	{"level":"info","ts":"2024-09-16T11:10:04.641356Z","caller":"embed/serve.go:250","msg":"serving client traffic securely","traffic":"grpc+http","address":"192.168.67.2:2379"}
	{"level":"info","ts":"2024-09-16T11:10:04.642224Z","caller":"v3rpc/health.go:61","msg":"grpc service status changed","service":"","status":"SERVING"}
	{"level":"info","ts":"2024-09-16T11:10:04.643322Z","caller":"embed/serve.go:250","msg":"serving client traffic securely","traffic":"grpc+http","address":"127.0.0.1:2379"}
	{"level":"info","ts":"2024-09-16T11:10:04.644711Z","caller":"etcdmain/main.go:44","msg":"notifying init daemon"}
	{"level":"info","ts":"2024-09-16T11:10:04.644780Z","caller":"etcdmain/main.go:50","msg":"successfully notified init daemon"}
	{"level":"info","ts":"2024-09-16T11:10:04.644861Z","caller":"membership/cluster.go:584","msg":"set initial cluster version","cluster-id":"9d8fdeb88b6def78","local-member-id":"8688e899f7831fc7","cluster-version":"3.5"}
	{"level":"info","ts":"2024-09-16T11:10:04.644956Z","caller":"api/capability.go:75","msg":"enabled capabilities for version","cluster-version":"3.5"}
	{"level":"info","ts":"2024-09-16T11:10:04.645009Z","caller":"etcdserver/server.go:2653","msg":"cluster version is updated","cluster-version":"3.5"}
	{"level":"info","ts":"2024-09-16T11:11:39.551153Z","caller":"traceutil/trace.go:171","msg":"trace[43988492] transaction","detail":"{read_only:false; response_revision:542; number_of_response:1; }","duration":"100.767855ms","start":"2024-09-16T11:11:39.450367Z","end":"2024-09-16T11:11:39.551134Z","steps":["trace[43988492] 'process raft request'  (duration: 100.609417ms)"],"step_count":1}
	
	
	==> kernel <==
	 11:12:32 up 10:54,  0 users,  load average: 2.03, 2.71, 2.72
	Linux multinode-654612 5.15.0-1069-aws #75~20.04.1-Ubuntu SMP Mon Aug 19 16:22:47 UTC 2024 aarch64 aarch64 aarch64 GNU/Linux
	PRETTY_NAME="Ubuntu 22.04.4 LTS"
	
	
	==> kindnet [9af0dfbb5d2f2d8bccd103497822725f259c42a87b482311032dd374224dc861] <==
	I0916 11:11:56.639195       1 main.go:295] Handling node with IPs: map[192.168.67.2:{}]
	I0916 11:11:56.639231       1 main.go:299] handling current node
	I0916 11:11:56.639255       1 main.go:295] Handling node with IPs: map[192.168.67.3:{}]
	I0916 11:11:56.639261       1 main.go:322] Node multinode-654612-m02 has CIDR [10.244.2.0/24] 
	I0916 11:11:56.639429       1 main.go:295] Handling node with IPs: map[192.168.67.4:{}]
	I0916 11:11:56.639446       1 main.go:322] Node multinode-654612-m03 has CIDR [10.244.3.0/24] 
	I0916 11:11:56.639513       1 routes.go:62] Adding route {Ifindex: 0 Dst: 10.244.3.0/24 Src: <nil> Gw: 192.168.67.4 Flags: [] Table: 0} 
	I0916 11:12:06.635209       1 main.go:295] Handling node with IPs: map[192.168.67.2:{}]
	I0916 11:12:06.635245       1 main.go:299] handling current node
	I0916 11:12:06.635262       1 main.go:295] Handling node with IPs: map[192.168.67.3:{}]
	I0916 11:12:06.635269       1 main.go:322] Node multinode-654612-m02 has CIDR [10.244.2.0/24] 
	I0916 11:12:06.635428       1 main.go:295] Handling node with IPs: map[192.168.67.4:{}]
	I0916 11:12:06.635445       1 main.go:322] Node multinode-654612-m03 has CIDR [10.244.3.0/24] 
	I0916 11:12:16.635838       1 main.go:295] Handling node with IPs: map[192.168.67.2:{}]
	I0916 11:12:16.635955       1 main.go:299] handling current node
	I0916 11:12:16.636010       1 main.go:295] Handling node with IPs: map[192.168.67.3:{}]
	I0916 11:12:16.636062       1 main.go:322] Node multinode-654612-m02 has CIDR [10.244.2.0/24] 
	I0916 11:12:16.636207       1 main.go:295] Handling node with IPs: map[192.168.67.4:{}]
	I0916 11:12:16.636244       1 main.go:322] Node multinode-654612-m03 has CIDR [10.244.3.0/24] 
	I0916 11:12:26.640771       1 main.go:295] Handling node with IPs: map[192.168.67.2:{}]
	I0916 11:12:26.640880       1 main.go:299] handling current node
	I0916 11:12:26.640904       1 main.go:295] Handling node with IPs: map[192.168.67.3:{}]
	I0916 11:12:26.640912       1 main.go:322] Node multinode-654612-m02 has CIDR [10.244.2.0/24] 
	I0916 11:12:26.641060       1 main.go:295] Handling node with IPs: map[192.168.67.4:{}]
	I0916 11:12:26.641079       1 main.go:322] Node multinode-654612-m03 has CIDR [10.244.3.0/24] 
	
	
	==> kube-apiserver [00cc927e5dcd6bba6542407e57d794c19b8cdd3c3a3e876481e838d3d1bb32ca] <==
	I0916 11:10:07.969847       1 controller.go:615] quota admission added evaluator for: leases.coordination.k8s.io
	I0916 11:10:08.464735       1 storage_scheduling.go:95] created PriorityClass system-node-critical with value 2000001000
	I0916 11:10:08.470271       1 storage_scheduling.go:95] created PriorityClass system-cluster-critical with value 2000000000
	I0916 11:10:08.470301       1 storage_scheduling.go:111] all system priority classes are created successfully or already exist.
	I0916 11:10:09.126362       1 controller.go:615] quota admission added evaluator for: roles.rbac.authorization.k8s.io
	I0916 11:10:09.184879       1 controller.go:615] quota admission added evaluator for: rolebindings.rbac.authorization.k8s.io
	I0916 11:10:09.269996       1 alloc.go:330] "allocated clusterIPs" service="default/kubernetes" clusterIPs={"IPv4":"10.96.0.1"}
	W0916 11:10:09.277769       1 lease.go:265] Resetting endpoints for master service "kubernetes" to [192.168.67.2]
	I0916 11:10:09.278883       1 controller.go:615] quota admission added evaluator for: endpoints
	I0916 11:10:09.284228       1 controller.go:615] quota admission added evaluator for: endpointslices.discovery.k8s.io
	I0916 11:10:09.675905       1 controller.go:615] quota admission added evaluator for: serviceaccounts
	I0916 11:10:10.271952       1 controller.go:615] quota admission added evaluator for: deployments.apps
	I0916 11:10:10.287877       1 alloc.go:330] "allocated clusterIPs" service="kube-system/kube-dns" clusterIPs={"IPv4":"10.96.0.10"}
	I0916 11:10:10.307423       1 controller.go:615] quota admission added evaluator for: daemonsets.apps
	I0916 11:10:14.930615       1 controller.go:615] quota admission added evaluator for: controllerrevisions.apps
	I0916 11:10:15.641848       1 controller.go:615] quota admission added evaluator for: replicasets.apps
	E0916 11:11:33.130242       1 conn.go:339] Error on socket receive: read tcp 192.168.67.2:8443->192.168.67.1:48710: use of closed network connection
	E0916 11:11:33.355844       1 conn.go:339] Error on socket receive: read tcp 192.168.67.2:8443->192.168.67.1:48724: use of closed network connection
	E0916 11:11:33.570301       1 conn.go:339] Error on socket receive: read tcp 192.168.67.2:8443->192.168.67.1:48748: use of closed network connection
	E0916 11:11:33.990711       1 conn.go:339] Error on socket receive: read tcp 192.168.67.2:8443->192.168.67.1:48782: use of closed network connection
	E0916 11:11:34.198507       1 conn.go:339] Error on socket receive: read tcp 192.168.67.2:8443->192.168.67.1:48794: use of closed network connection
	E0916 11:11:34.549020       1 conn.go:339] Error on socket receive: read tcp 192.168.67.2:8443->192.168.67.1:48824: use of closed network connection
	E0916 11:11:34.768739       1 conn.go:339] Error on socket receive: read tcp 192.168.67.2:8443->192.168.67.1:48848: use of closed network connection
	E0916 11:11:34.972991       1 conn.go:339] Error on socket receive: read tcp 192.168.67.2:8443->192.168.67.1:48868: use of closed network connection
	E0916 11:11:35.194855       1 conn.go:339] Error on socket receive: read tcp 192.168.67.2:8443->192.168.67.1:48886: use of closed network connection
	
	
	==> kube-controller-manager [5d34b90bf3c2a4fe15bdb23da3582f1eda44e32048621175218fd569c86a37e4] <==
	I0916 11:11:28.080583       1 replica_set.go:679] "Finished syncing" logger="replicaset-controller" kind="ReplicaSet" key="default/busybox-7dff88458" duration="62.160108ms"
	I0916 11:11:28.113164       1 replica_set.go:679] "Finished syncing" logger="replicaset-controller" kind="ReplicaSet" key="default/busybox-7dff88458" duration="32.517221ms"
	I0916 11:11:28.149191       1 replica_set.go:679] "Finished syncing" logger="replicaset-controller" kind="ReplicaSet" key="default/busybox-7dff88458" duration="35.973293ms"
	I0916 11:11:28.149317       1 replica_set.go:679] "Finished syncing" logger="replicaset-controller" kind="ReplicaSet" key="default/busybox-7dff88458" duration="70.964µs"
	I0916 11:11:29.446716       1 range_allocator.go:241] "Successfully synced" logger="node-ipam-controller" key="multinode-654612-m02"
	I0916 11:11:31.413475       1 replica_set.go:679] "Finished syncing" logger="replicaset-controller" kind="ReplicaSet" key="default/busybox-7dff88458" duration="9.285853ms"
	I0916 11:11:31.414015       1 replica_set.go:679] "Finished syncing" logger="replicaset-controller" kind="ReplicaSet" key="default/busybox-7dff88458" duration="67.576µs"
	I0916 11:11:32.445448       1 replica_set.go:679] "Finished syncing" logger="replicaset-controller" kind="ReplicaSet" key="default/busybox-7dff88458" duration="6.381424ms"
	I0916 11:11:32.445537       1 replica_set.go:679] "Finished syncing" logger="replicaset-controller" kind="ReplicaSet" key="default/busybox-7dff88458" duration="43.068µs"
	I0916 11:11:42.095090       1 range_allocator.go:241] "Successfully synced" logger="node-ipam-controller" key="multinode-654612"
	I0916 11:11:44.122886       1 range_allocator.go:241] "Successfully synced" logger="node-ipam-controller" key="multinode-654612-m02"
	I0916 11:11:48.783812       1 topologycache.go:237] "Can't get CPU or zone information for node" logger="endpointslice-controller" node="multinode-654612-m02"
	I0916 11:11:48.783977       1 actual_state_of_world.go:540] "Failed to update statusUpdateNeeded field in actual state of world" logger="persistentvolume-attach-detach-controller" err="Failed to set statusUpdateNeeded to needed true, because nodeName=\"multinode-654612-m03\" does not exist"
	I0916 11:11:48.820854       1 range_allocator.go:422] "Set node PodCIDR" logger="node-ipam-controller" node="multinode-654612-m03" podCIDRs=["10.244.3.0/24"]
	I0916 11:11:48.820891       1 range_allocator.go:241] "Successfully synced" logger="node-ipam-controller" key="multinode-654612-m03"
	I0916 11:11:48.820914       1 range_allocator.go:241] "Successfully synced" logger="node-ipam-controller" key="multinode-654612-m03"
	I0916 11:11:48.945125       1 range_allocator.go:241] "Successfully synced" logger="node-ipam-controller" key="multinode-654612-m03"
	I0916 11:11:49.263718       1 range_allocator.go:241] "Successfully synced" logger="node-ipam-controller" key="multinode-654612-m03"
	I0916 11:11:49.448971       1 node_lifecycle_controller.go:884] "Missing timestamp for Node. Assuming now as a timestamp" logger="node-lifecycle-controller" node="multinode-654612-m03"
	I0916 11:11:49.514110       1 range_allocator.go:241] "Successfully synced" logger="node-ipam-controller" key="multinode-654612-m03"
	I0916 11:11:58.976456       1 range_allocator.go:241] "Successfully synced" logger="node-ipam-controller" key="multinode-654612-m03"
	I0916 11:12:01.782972       1 topologycache.go:237] "Can't get CPU or zone information for node" logger="endpointslice-controller" node="multinode-654612-m02"
	I0916 11:12:01.783032       1 range_allocator.go:241] "Successfully synced" logger="node-ipam-controller" key="multinode-654612-m03"
	I0916 11:12:01.798232       1 range_allocator.go:241] "Successfully synced" logger="node-ipam-controller" key="multinode-654612-m03"
	I0916 11:12:04.469038       1 range_allocator.go:241] "Successfully synced" logger="node-ipam-controller" key="multinode-654612-m03"
	
	
	==> kube-proxy [c2386662da70d7e3af63aeaec77233018972fa9dd01dd520da9d367adb678c73] <==
	I0916 11:10:16.501243       1 server_linux.go:66] "Using iptables proxy"
	I0916 11:10:16.684816       1 server.go:677] "Successfully retrieved node IP(s)" IPs=["192.168.67.2"]
	E0916 11:10:16.684888       1 server.go:234] "Kube-proxy configuration may be incomplete or incorrect" err="nodePortAddresses is unset; NodePort connections will be accepted on all local IPs. Consider using `--nodeport-addresses primary`"
	I0916 11:10:16.703638       1 server.go:243] "kube-proxy running in dual-stack mode" primary ipFamily="IPv4"
	I0916 11:10:16.703703       1 server_linux.go:169] "Using iptables Proxier"
	I0916 11:10:16.705525       1 proxier.go:255] "Setting route_localnet=1 to allow node-ports on localhost; to change this either disable iptables.localhostNodePorts (--iptables-localhost-nodeports) or set nodePortAddresses (--nodeport-addresses) to filter loopback addresses" ipFamily="IPv4"
	I0916 11:10:16.705846       1 server.go:483] "Version info" version="v1.31.1"
	I0916 11:10:16.705867       1 server.go:485] "Golang settings" GOGC="" GOMAXPROCS="" GOTRACEBACK=""
	I0916 11:10:16.707680       1 config.go:199] "Starting service config controller"
	I0916 11:10:16.707780       1 shared_informer.go:313] Waiting for caches to sync for service config
	I0916 11:10:16.708010       1 config.go:105] "Starting endpoint slice config controller"
	I0916 11:10:16.708046       1 shared_informer.go:313] Waiting for caches to sync for endpoint slice config
	I0916 11:10:16.709387       1 config.go:328] "Starting node config controller"
	I0916 11:10:16.709452       1 shared_informer.go:313] Waiting for caches to sync for node config
	I0916 11:10:16.808466       1 shared_informer.go:320] Caches are synced for endpoint slice config
	I0916 11:10:16.808483       1 shared_informer.go:320] Caches are synced for service config
	I0916 11:10:16.809681       1 shared_informer.go:320] Caches are synced for node config
	
	
	==> kube-scheduler [7e4a553dd98fcbff4d79c739829ae001409f265cf2c56dad37fb1e8c0a8ec53f] <==
	W0916 11:10:07.742823       1 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.StorageClass: storageclasses.storage.k8s.io is forbidden: User "system:kube-scheduler" cannot list resource "storageclasses" in API group "storage.k8s.io" at the cluster scope
	E0916 11:10:07.742864       1 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.StorageClass: failed to list *v1.StorageClass: storageclasses.storage.k8s.io is forbidden: User \"system:kube-scheduler\" cannot list resource \"storageclasses\" in API group \"storage.k8s.io\" at the cluster scope" logger="UnhandledError"
	W0916 11:10:07.743388       1 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.ReplicationController: replicationcontrollers is forbidden: User "system:kube-scheduler" cannot list resource "replicationcontrollers" in API group "" at the cluster scope
	E0916 11:10:07.743480       1 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.ReplicationController: failed to list *v1.ReplicationController: replicationcontrollers is forbidden: User \"system:kube-scheduler\" cannot list resource \"replicationcontrollers\" in API group \"\" at the cluster scope" logger="UnhandledError"
	W0916 11:10:07.743615       1 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.CSINode: csinodes.storage.k8s.io is forbidden: User "system:kube-scheduler" cannot list resource "csinodes" in API group "storage.k8s.io" at the cluster scope
	E0916 11:10:07.743675       1 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.CSINode: failed to list *v1.CSINode: csinodes.storage.k8s.io is forbidden: User \"system:kube-scheduler\" cannot list resource \"csinodes\" in API group \"storage.k8s.io\" at the cluster scope" logger="UnhandledError"
	W0916 11:10:07.743717       1 reflector.go:561] runtime/asm_arm64.s:1222: failed to list *v1.ConfigMap: configmaps "extension-apiserver-authentication" is forbidden: User "system:kube-scheduler" cannot list resource "configmaps" in API group "" in the namespace "kube-system"
	E0916 11:10:07.743754       1 reflector.go:158] "Unhandled Error" err="runtime/asm_arm64.s:1222: Failed to watch *v1.ConfigMap: failed to list *v1.ConfigMap: configmaps \"extension-apiserver-authentication\" is forbidden: User \"system:kube-scheduler\" cannot list resource \"configmaps\" in API group \"\" in the namespace \"kube-system\"" logger="UnhandledError"
	W0916 11:10:07.743805       1 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.StatefulSet: statefulsets.apps is forbidden: User "system:kube-scheduler" cannot list resource "statefulsets" in API group "apps" at the cluster scope
	E0916 11:10:07.743848       1 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.StatefulSet: failed to list *v1.StatefulSet: statefulsets.apps is forbidden: User \"system:kube-scheduler\" cannot list resource \"statefulsets\" in API group \"apps\" at the cluster scope" logger="UnhandledError"
	W0916 11:10:07.745181       1 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Pod: pods is forbidden: User "system:kube-scheduler" cannot list resource "pods" in API group "" at the cluster scope
	E0916 11:10:07.745214       1 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Pod: failed to list *v1.Pod: pods is forbidden: User \"system:kube-scheduler\" cannot list resource \"pods\" in API group \"\" at the cluster scope" logger="UnhandledError"
	W0916 11:10:08.604869       1 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.PodDisruptionBudget: poddisruptionbudgets.policy is forbidden: User "system:kube-scheduler" cannot list resource "poddisruptionbudgets" in API group "policy" at the cluster scope
	E0916 11:10:08.604927       1 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.PodDisruptionBudget: failed to list *v1.PodDisruptionBudget: poddisruptionbudgets.policy is forbidden: User \"system:kube-scheduler\" cannot list resource \"poddisruptionbudgets\" in API group \"policy\" at the cluster scope" logger="UnhandledError"
	W0916 11:10:08.705130       1 reflector.go:561] runtime/asm_arm64.s:1222: failed to list *v1.ConfigMap: configmaps "extension-apiserver-authentication" is forbidden: User "system:kube-scheduler" cannot list resource "configmaps" in API group "" in the namespace "kube-system"
	E0916 11:10:08.705179       1 reflector.go:158] "Unhandled Error" err="runtime/asm_arm64.s:1222: Failed to watch *v1.ConfigMap: failed to list *v1.ConfigMap: configmaps \"extension-apiserver-authentication\" is forbidden: User \"system:kube-scheduler\" cannot list resource \"configmaps\" in API group \"\" in the namespace \"kube-system\"" logger="UnhandledError"
	W0916 11:10:08.713648       1 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Service: services is forbidden: User "system:kube-scheduler" cannot list resource "services" in API group "" at the cluster scope
	E0916 11:10:08.713691       1 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Service: failed to list *v1.Service: services is forbidden: User \"system:kube-scheduler\" cannot list resource \"services\" in API group \"\" at the cluster scope" logger="UnhandledError"
	W0916 11:10:08.735570       1 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.CSIDriver: csidrivers.storage.k8s.io is forbidden: User "system:kube-scheduler" cannot list resource "csidrivers" in API group "storage.k8s.io" at the cluster scope
	E0916 11:10:08.735734       1 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.CSIDriver: failed to list *v1.CSIDriver: csidrivers.storage.k8s.io is forbidden: User \"system:kube-scheduler\" cannot list resource \"csidrivers\" in API group \"storage.k8s.io\" at the cluster scope" logger="UnhandledError"
	W0916 11:10:08.874688       1 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.PersistentVolumeClaim: persistentvolumeclaims is forbidden: User "system:kube-scheduler" cannot list resource "persistentvolumeclaims" in API group "" at the cluster scope
	E0916 11:10:08.874727       1 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.PersistentVolumeClaim: failed to list *v1.PersistentVolumeClaim: persistentvolumeclaims is forbidden: User \"system:kube-scheduler\" cannot list resource \"persistentvolumeclaims\" in API group \"\" at the cluster scope" logger="UnhandledError"
	W0916 11:10:08.904363       1 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Pod: pods is forbidden: User "system:kube-scheduler" cannot list resource "pods" in API group "" at the cluster scope
	E0916 11:10:08.904411       1 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Pod: failed to list *v1.Pod: pods is forbidden: User \"system:kube-scheduler\" cannot list resource \"pods\" in API group \"\" at the cluster scope" logger="UnhandledError"
	I0916 11:10:10.824337       1 shared_informer.go:320] Caches are synced for client-ca::kube-system::extension-apiserver-authentication::client-ca-file
	
	
	==> kubelet <==
	Sep 16 11:10:58 multinode-654612 kubelet[1533]: E0916 11:10:58.054295    1533 configmap.go:193] Couldn't get configMap kube-system/coredns: failed to sync configmap cache: timed out waiting for the condition
	Sep 16 11:10:58 multinode-654612 kubelet[1533]: E0916 11:10:58.054424    1533 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/26df8cd4-36bc-49e1-98bf-9c30f5555b7b-config-volume podName:26df8cd4-36bc-49e1-98bf-9c30f5555b7b nodeName:}" failed. No retries permitted until 2024-09-16 11:10:58.554400655 +0000 UTC m=+48.507549149 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "config-volume" (UniqueName: "kubernetes.io/configmap/26df8cd4-36bc-49e1-98bf-9c30f5555b7b-config-volume") pod "coredns-7c65d6cfc9-szvv9" (UID: "26df8cd4-36bc-49e1-98bf-9c30f5555b7b") : failed to sync configmap cache: timed out waiting for the condition
	Sep 16 11:10:59 multinode-654612 kubelet[1533]: I0916 11:10:59.374126    1533 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="kube-system/storage-provisioner" podStartSLOduration=44.374105486 podStartE2EDuration="44.374105486s" podCreationTimestamp="2024-09-16 11:10:15 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2024-09-16 11:10:57.369315089 +0000 UTC m=+47.322463599" watchObservedRunningTime="2024-09-16 11:10:59.374105486 +0000 UTC m=+49.327253980"
	Sep 16 11:10:59 multinode-654612 kubelet[1533]: I0916 11:10:59.394335    1533 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="kube-system/coredns-7c65d6cfc9-szvv9" podStartSLOduration=44.394313568 podStartE2EDuration="44.394313568s" podCreationTimestamp="2024-09-16 11:10:15 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2024-09-16 11:10:59.374733475 +0000 UTC m=+49.327881969" watchObservedRunningTime="2024-09-16 11:10:59.394313568 +0000 UTC m=+49.347462070"
	Sep 16 11:11:00 multinode-654612 kubelet[1533]: E0916 11:11:00.285056    1533 eviction_manager.go:257] "Eviction manager: failed to get HasDedicatedImageFs" err="missing image stats: &ImageFsInfoResponse{ImageFilesystems:[]*FilesystemUsage{&FilesystemUsage{Timestamp:1726485060284637473,FsId:&FilesystemIdentifier{Mountpoint:/var/lib/containers/storage/overlay-images,},UsedBytes:&UInt64Value{Value:125700,},InodesUsed:&UInt64Value{Value:57,},},},ContainerFilesystems:[]*FilesystemUsage{},}"
	Sep 16 11:11:00 multinode-654612 kubelet[1533]: E0916 11:11:00.285554    1533 eviction_manager.go:212] "Eviction manager: failed to synchronize" err="eviction manager: failed to get HasDedicatedImageFs: missing image stats: &ImageFsInfoResponse{ImageFilesystems:[]*FilesystemUsage{&FilesystemUsage{Timestamp:1726485060284637473,FsId:&FilesystemIdentifier{Mountpoint:/var/lib/containers/storage/overlay-images,},UsedBytes:&UInt64Value{Value:125700,},InodesUsed:&UInt64Value{Value:57,},},},ContainerFilesystems:[]*FilesystemUsage{},}"
	Sep 16 11:11:10 multinode-654612 kubelet[1533]: E0916 11:11:10.286751    1533 eviction_manager.go:257] "Eviction manager: failed to get HasDedicatedImageFs" err="missing image stats: &ImageFsInfoResponse{ImageFilesystems:[]*FilesystemUsage{&FilesystemUsage{Timestamp:1726485070286573355,FsId:&FilesystemIdentifier{Mountpoint:/var/lib/containers/storage/overlay-images,},UsedBytes:&UInt64Value{Value:125700,},InodesUsed:&UInt64Value{Value:57,},},},ContainerFilesystems:[]*FilesystemUsage{},}"
	Sep 16 11:11:10 multinode-654612 kubelet[1533]: E0916 11:11:10.286796    1533 eviction_manager.go:212] "Eviction manager: failed to synchronize" err="eviction manager: failed to get HasDedicatedImageFs: missing image stats: &ImageFsInfoResponse{ImageFilesystems:[]*FilesystemUsage{&FilesystemUsage{Timestamp:1726485070286573355,FsId:&FilesystemIdentifier{Mountpoint:/var/lib/containers/storage/overlay-images,},UsedBytes:&UInt64Value{Value:125700,},InodesUsed:&UInt64Value{Value:57,},},},ContainerFilesystems:[]*FilesystemUsage{},}"
	Sep 16 11:11:20 multinode-654612 kubelet[1533]: E0916 11:11:20.287771    1533 eviction_manager.go:257] "Eviction manager: failed to get HasDedicatedImageFs" err="missing image stats: &ImageFsInfoResponse{ImageFilesystems:[]*FilesystemUsage{&FilesystemUsage{Timestamp:1726485080287591079,FsId:&FilesystemIdentifier{Mountpoint:/var/lib/containers/storage/overlay-images,},UsedBytes:&UInt64Value{Value:125700,},InodesUsed:&UInt64Value{Value:57,},},},ContainerFilesystems:[]*FilesystemUsage{},}"
	Sep 16 11:11:20 multinode-654612 kubelet[1533]: E0916 11:11:20.287807    1533 eviction_manager.go:212] "Eviction manager: failed to synchronize" err="eviction manager: failed to get HasDedicatedImageFs: missing image stats: &ImageFsInfoResponse{ImageFilesystems:[]*FilesystemUsage{&FilesystemUsage{Timestamp:1726485080287591079,FsId:&FilesystemIdentifier{Mountpoint:/var/lib/containers/storage/overlay-images,},UsedBytes:&UInt64Value{Value:125700,},InodesUsed:&UInt64Value{Value:57,},},},ContainerFilesystems:[]*FilesystemUsage{},}"
	Sep 16 11:11:28 multinode-654612 kubelet[1533]: I0916 11:11:28.253017    1533 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zqwgq\" (UniqueName: \"kubernetes.io/projected/39d79774-0a74-4464-a7fe-d312a92e8749-kube-api-access-zqwgq\") pod \"busybox-7dff88458-rdtjw\" (UID: \"39d79774-0a74-4464-a7fe-d312a92e8749\") " pod="default/busybox-7dff88458-rdtjw"
	Sep 16 11:11:30 multinode-654612 kubelet[1533]: E0916 11:11:30.289088    1533 eviction_manager.go:257] "Eviction manager: failed to get HasDedicatedImageFs" err="missing image stats: &ImageFsInfoResponse{ImageFilesystems:[]*FilesystemUsage{&FilesystemUsage{Timestamp:1726485090288882050,FsId:&FilesystemIdentifier{Mountpoint:/var/lib/containers/storage/overlay-images,},UsedBytes:&UInt64Value{Value:125700,},InodesUsed:&UInt64Value{Value:57,},},},ContainerFilesystems:[]*FilesystemUsage{},}"
	Sep 16 11:11:30 multinode-654612 kubelet[1533]: E0916 11:11:30.289126    1533 eviction_manager.go:212] "Eviction manager: failed to synchronize" err="eviction manager: failed to get HasDedicatedImageFs: missing image stats: &ImageFsInfoResponse{ImageFilesystems:[]*FilesystemUsage{&FilesystemUsage{Timestamp:1726485090288882050,FsId:&FilesystemIdentifier{Mountpoint:/var/lib/containers/storage/overlay-images,},UsedBytes:&UInt64Value{Value:125700,},InodesUsed:&UInt64Value{Value:57,},},},ContainerFilesystems:[]*FilesystemUsage{},}"
	Sep 16 11:11:40 multinode-654612 kubelet[1533]: E0916 11:11:40.290465    1533 eviction_manager.go:257] "Eviction manager: failed to get HasDedicatedImageFs" err="missing image stats: &ImageFsInfoResponse{ImageFilesystems:[]*FilesystemUsage{&FilesystemUsage{Timestamp:1726485100290254974,FsId:&FilesystemIdentifier{Mountpoint:/var/lib/containers/storage/overlay-images,},UsedBytes:&UInt64Value{Value:135010,},InodesUsed:&UInt64Value{Value:63,},},},ContainerFilesystems:[]*FilesystemUsage{},}"
	Sep 16 11:11:40 multinode-654612 kubelet[1533]: E0916 11:11:40.290505    1533 eviction_manager.go:212] "Eviction manager: failed to synchronize" err="eviction manager: failed to get HasDedicatedImageFs: missing image stats: &ImageFsInfoResponse{ImageFilesystems:[]*FilesystemUsage{&FilesystemUsage{Timestamp:1726485100290254974,FsId:&FilesystemIdentifier{Mountpoint:/var/lib/containers/storage/overlay-images,},UsedBytes:&UInt64Value{Value:135010,},InodesUsed:&UInt64Value{Value:63,},},},ContainerFilesystems:[]*FilesystemUsage{},}"
	Sep 16 11:11:50 multinode-654612 kubelet[1533]: E0916 11:11:50.292597    1533 eviction_manager.go:257] "Eviction manager: failed to get HasDedicatedImageFs" err="missing image stats: &ImageFsInfoResponse{ImageFilesystems:[]*FilesystemUsage{&FilesystemUsage{Timestamp:1726485110292320307,FsId:&FilesystemIdentifier{Mountpoint:/var/lib/containers/storage/overlay-images,},UsedBytes:&UInt64Value{Value:135010,},InodesUsed:&UInt64Value{Value:63,},},},ContainerFilesystems:[]*FilesystemUsage{},}"
	Sep 16 11:11:50 multinode-654612 kubelet[1533]: E0916 11:11:50.292641    1533 eviction_manager.go:212] "Eviction manager: failed to synchronize" err="eviction manager: failed to get HasDedicatedImageFs: missing image stats: &ImageFsInfoResponse{ImageFilesystems:[]*FilesystemUsage{&FilesystemUsage{Timestamp:1726485110292320307,FsId:&FilesystemIdentifier{Mountpoint:/var/lib/containers/storage/overlay-images,},UsedBytes:&UInt64Value{Value:135010,},InodesUsed:&UInt64Value{Value:63,},},},ContainerFilesystems:[]*FilesystemUsage{},}"
	Sep 16 11:12:00 multinode-654612 kubelet[1533]: E0916 11:12:00.307720    1533 eviction_manager.go:257] "Eviction manager: failed to get HasDedicatedImageFs" err="missing image stats: &ImageFsInfoResponse{ImageFilesystems:[]*FilesystemUsage{&FilesystemUsage{Timestamp:1726485120304192586,FsId:&FilesystemIdentifier{Mountpoint:/var/lib/containers/storage/overlay-images,},UsedBytes:&UInt64Value{Value:135010,},InodesUsed:&UInt64Value{Value:63,},},},ContainerFilesystems:[]*FilesystemUsage{},}"
	Sep 16 11:12:00 multinode-654612 kubelet[1533]: E0916 11:12:00.308246    1533 eviction_manager.go:212] "Eviction manager: failed to synchronize" err="eviction manager: failed to get HasDedicatedImageFs: missing image stats: &ImageFsInfoResponse{ImageFilesystems:[]*FilesystemUsage{&FilesystemUsage{Timestamp:1726485120304192586,FsId:&FilesystemIdentifier{Mountpoint:/var/lib/containers/storage/overlay-images,},UsedBytes:&UInt64Value{Value:135010,},InodesUsed:&UInt64Value{Value:63,},},},ContainerFilesystems:[]*FilesystemUsage{},}"
	Sep 16 11:12:10 multinode-654612 kubelet[1533]: E0916 11:12:10.309999    1533 eviction_manager.go:257] "Eviction manager: failed to get HasDedicatedImageFs" err="missing image stats: &ImageFsInfoResponse{ImageFilesystems:[]*FilesystemUsage{&FilesystemUsage{Timestamp:1726485130309796456,FsId:&FilesystemIdentifier{Mountpoint:/var/lib/containers/storage/overlay-images,},UsedBytes:&UInt64Value{Value:135010,},InodesUsed:&UInt64Value{Value:63,},},},ContainerFilesystems:[]*FilesystemUsage{},}"
	Sep 16 11:12:10 multinode-654612 kubelet[1533]: E0916 11:12:10.310035    1533 eviction_manager.go:212] "Eviction manager: failed to synchronize" err="eviction manager: failed to get HasDedicatedImageFs: missing image stats: &ImageFsInfoResponse{ImageFilesystems:[]*FilesystemUsage{&FilesystemUsage{Timestamp:1726485130309796456,FsId:&FilesystemIdentifier{Mountpoint:/var/lib/containers/storage/overlay-images,},UsedBytes:&UInt64Value{Value:135010,},InodesUsed:&UInt64Value{Value:63,},},},ContainerFilesystems:[]*FilesystemUsage{},}"
	Sep 16 11:12:20 multinode-654612 kubelet[1533]: E0916 11:12:20.311760    1533 eviction_manager.go:257] "Eviction manager: failed to get HasDedicatedImageFs" err="missing image stats: &ImageFsInfoResponse{ImageFilesystems:[]*FilesystemUsage{&FilesystemUsage{Timestamp:1726485140311493477,FsId:&FilesystemIdentifier{Mountpoint:/var/lib/containers/storage/overlay-images,},UsedBytes:&UInt64Value{Value:135010,},InodesUsed:&UInt64Value{Value:63,},},},ContainerFilesystems:[]*FilesystemUsage{},}"
	Sep 16 11:12:20 multinode-654612 kubelet[1533]: E0916 11:12:20.311799    1533 eviction_manager.go:212] "Eviction manager: failed to synchronize" err="eviction manager: failed to get HasDedicatedImageFs: missing image stats: &ImageFsInfoResponse{ImageFilesystems:[]*FilesystemUsage{&FilesystemUsage{Timestamp:1726485140311493477,FsId:&FilesystemIdentifier{Mountpoint:/var/lib/containers/storage/overlay-images,},UsedBytes:&UInt64Value{Value:135010,},InodesUsed:&UInt64Value{Value:63,},},},ContainerFilesystems:[]*FilesystemUsage{},}"
	Sep 16 11:12:30 multinode-654612 kubelet[1533]: E0916 11:12:30.324345    1533 eviction_manager.go:257] "Eviction manager: failed to get HasDedicatedImageFs" err="missing image stats: &ImageFsInfoResponse{ImageFilesystems:[]*FilesystemUsage{&FilesystemUsage{Timestamp:1726485150324115208,FsId:&FilesystemIdentifier{Mountpoint:/var/lib/containers/storage/overlay-images,},UsedBytes:&UInt64Value{Value:135010,},InodesUsed:&UInt64Value{Value:63,},},},ContainerFilesystems:[]*FilesystemUsage{},}"
	Sep 16 11:12:30 multinode-654612 kubelet[1533]: E0916 11:12:30.324381    1533 eviction_manager.go:212] "Eviction manager: failed to synchronize" err="eviction manager: failed to get HasDedicatedImageFs: missing image stats: &ImageFsInfoResponse{ImageFilesystems:[]*FilesystemUsage{&FilesystemUsage{Timestamp:1726485150324115208,FsId:&FilesystemIdentifier{Mountpoint:/var/lib/containers/storage/overlay-images,},UsedBytes:&UInt64Value{Value:135010,},InodesUsed:&UInt64Value{Value:63,},},},ContainerFilesystems:[]*FilesystemUsage{},}"
	

                                                
                                                
-- /stdout --
helpers_test.go:254: (dbg) Run:  out/minikube-linux-arm64 status --format={{.APIServer}} -p multinode-654612 -n multinode-654612
helpers_test.go:261: (dbg) Run:  kubectl --context multinode-654612 get po -o=jsonpath={.items[*].metadata.name} -A --field-selector=status.phase!=Running
helpers_test.go:261: (dbg) Non-zero exit: kubectl --context multinode-654612 get po -o=jsonpath={.items[*].metadata.name} -A --field-selector=status.phase!=Running: fork/exec /usr/local/bin/kubectl: exec format error (525.78µs)
helpers_test.go:263: kubectl --context multinode-654612 get po -o=jsonpath={.items[*].metadata.name} -A --field-selector=status.phase!=Running: fork/exec /usr/local/bin/kubectl: exec format error
--- FAIL: TestMultiNode/serial/StartAfterStop (13.10s)

                                                
                                    
x
+
TestMultiNode/serial/DeleteNode (9.69s)

                                                
                                                
=== RUN   TestMultiNode/serial/DeleteNode
multinode_test.go:416: (dbg) Run:  out/minikube-linux-arm64 -p multinode-654612 node delete m03
multinode_test.go:416: (dbg) Done: out/minikube-linux-arm64 -p multinode-654612 node delete m03: (4.816470233s)
multinode_test.go:422: (dbg) Run:  out/minikube-linux-arm64 -p multinode-654612 status --alsologtostderr
multinode_test.go:436: (dbg) Run:  kubectl get nodes
multinode_test.go:436: (dbg) Non-zero exit: kubectl get nodes: fork/exec /usr/local/bin/kubectl: exec format error (485.15µs)
multinode_test.go:438: failed to run kubectl get nodes. args "kubectl get nodes" : fork/exec /usr/local/bin/kubectl: exec format error
helpers_test.go:222: -----------------------post-mortem--------------------------------
helpers_test.go:230: ======>  post-mortem[TestMultiNode/serial/DeleteNode]: docker inspect <======
helpers_test.go:231: (dbg) Run:  docker inspect multinode-654612
helpers_test.go:235: (dbg) docker inspect multinode-654612:

                                                
                                                
-- stdout --
	[
	    {
	        "Id": "402497514f0b8b3453fe3f147b28574766ee05bfb7c084c9f8550757726f30cd",
	        "Created": "2024-09-16T11:09:45.282229543Z",
	        "Path": "/usr/local/bin/entrypoint",
	        "Args": [
	            "/sbin/init"
	        ],
	        "State": {
	            "Status": "running",
	            "Running": true,
	            "Paused": false,
	            "Restarting": false,
	            "OOMKilled": false,
	            "Dead": false,
	            "Pid": 1501662,
	            "ExitCode": 0,
	            "Error": "",
	            "StartedAt": "2024-09-16T11:12:58.862880716Z",
	            "FinishedAt": "2024-09-16T11:12:58.03879029Z"
	        },
	        "Image": "sha256:a1b71fa87733590eb4674b16f6945626ae533f3af37066893e3fd70eb9476268",
	        "ResolvConfPath": "/var/lib/docker/containers/402497514f0b8b3453fe3f147b28574766ee05bfb7c084c9f8550757726f30cd/resolv.conf",
	        "HostnamePath": "/var/lib/docker/containers/402497514f0b8b3453fe3f147b28574766ee05bfb7c084c9f8550757726f30cd/hostname",
	        "HostsPath": "/var/lib/docker/containers/402497514f0b8b3453fe3f147b28574766ee05bfb7c084c9f8550757726f30cd/hosts",
	        "LogPath": "/var/lib/docker/containers/402497514f0b8b3453fe3f147b28574766ee05bfb7c084c9f8550757726f30cd/402497514f0b8b3453fe3f147b28574766ee05bfb7c084c9f8550757726f30cd-json.log",
	        "Name": "/multinode-654612",
	        "RestartCount": 0,
	        "Driver": "overlay2",
	        "Platform": "linux",
	        "MountLabel": "",
	        "ProcessLabel": "",
	        "AppArmorProfile": "unconfined",
	        "ExecIDs": null,
	        "HostConfig": {
	            "Binds": [
	                "/lib/modules:/lib/modules:ro",
	                "multinode-654612:/var"
	            ],
	            "ContainerIDFile": "",
	            "LogConfig": {
	                "Type": "json-file",
	                "Config": {}
	            },
	            "NetworkMode": "multinode-654612",
	            "PortBindings": {
	                "22/tcp": [
	                    {
	                        "HostIp": "127.0.0.1",
	                        "HostPort": ""
	                    }
	                ],
	                "2376/tcp": [
	                    {
	                        "HostIp": "127.0.0.1",
	                        "HostPort": ""
	                    }
	                ],
	                "32443/tcp": [
	                    {
	                        "HostIp": "127.0.0.1",
	                        "HostPort": ""
	                    }
	                ],
	                "5000/tcp": [
	                    {
	                        "HostIp": "127.0.0.1",
	                        "HostPort": ""
	                    }
	                ],
	                "8443/tcp": [
	                    {
	                        "HostIp": "127.0.0.1",
	                        "HostPort": ""
	                    }
	                ]
	            },
	            "RestartPolicy": {
	                "Name": "no",
	                "MaximumRetryCount": 0
	            },
	            "AutoRemove": false,
	            "VolumeDriver": "",
	            "VolumesFrom": null,
	            "ConsoleSize": [
	                0,
	                0
	            ],
	            "CapAdd": null,
	            "CapDrop": null,
	            "CgroupnsMode": "host",
	            "Dns": [],
	            "DnsOptions": [],
	            "DnsSearch": [],
	            "ExtraHosts": null,
	            "GroupAdd": null,
	            "IpcMode": "private",
	            "Cgroup": "",
	            "Links": null,
	            "OomScoreAdj": 0,
	            "PidMode": "",
	            "Privileged": true,
	            "PublishAllPorts": false,
	            "ReadonlyRootfs": false,
	            "SecurityOpt": [
	                "seccomp=unconfined",
	                "apparmor=unconfined",
	                "label=disable"
	            ],
	            "Tmpfs": {
	                "/run": "",
	                "/tmp": ""
	            },
	            "UTSMode": "",
	            "UsernsMode": "",
	            "ShmSize": 67108864,
	            "Runtime": "runc",
	            "Isolation": "",
	            "CpuShares": 0,
	            "Memory": 2306867200,
	            "NanoCpus": 2000000000,
	            "CgroupParent": "",
	            "BlkioWeight": 0,
	            "BlkioWeightDevice": [],
	            "BlkioDeviceReadBps": [],
	            "BlkioDeviceWriteBps": [],
	            "BlkioDeviceReadIOps": [],
	            "BlkioDeviceWriteIOps": [],
	            "CpuPeriod": 0,
	            "CpuQuota": 0,
	            "CpuRealtimePeriod": 0,
	            "CpuRealtimeRuntime": 0,
	            "CpusetCpus": "",
	            "CpusetMems": "",
	            "Devices": [],
	            "DeviceCgroupRules": null,
	            "DeviceRequests": null,
	            "MemoryReservation": 0,
	            "MemorySwap": 4613734400,
	            "MemorySwappiness": null,
	            "OomKillDisable": false,
	            "PidsLimit": null,
	            "Ulimits": [],
	            "CpuCount": 0,
	            "CpuPercent": 0,
	            "IOMaximumIOps": 0,
	            "IOMaximumBandwidth": 0,
	            "MaskedPaths": null,
	            "ReadonlyPaths": null
	        },
	        "GraphDriver": {
	            "Data": {
	                "LowerDir": "/var/lib/docker/overlay2/8f4b3b4182dcd0f3e388c56cd4cf94240822ac61d3fc96a90ebc4c74757003f6-init/diff:/var/lib/docker/overlay2/1502e35c27c097cfc834a7c6caeee5bb9f58b41375577f491b73f55bc131cbae/diff",
	                "MergedDir": "/var/lib/docker/overlay2/8f4b3b4182dcd0f3e388c56cd4cf94240822ac61d3fc96a90ebc4c74757003f6/merged",
	                "UpperDir": "/var/lib/docker/overlay2/8f4b3b4182dcd0f3e388c56cd4cf94240822ac61d3fc96a90ebc4c74757003f6/diff",
	                "WorkDir": "/var/lib/docker/overlay2/8f4b3b4182dcd0f3e388c56cd4cf94240822ac61d3fc96a90ebc4c74757003f6/work"
	            },
	            "Name": "overlay2"
	        },
	        "Mounts": [
	            {
	                "Type": "bind",
	                "Source": "/lib/modules",
	                "Destination": "/lib/modules",
	                "Mode": "ro",
	                "RW": false,
	                "Propagation": "rprivate"
	            },
	            {
	                "Type": "volume",
	                "Name": "multinode-654612",
	                "Source": "/var/lib/docker/volumes/multinode-654612/_data",
	                "Destination": "/var",
	                "Driver": "local",
	                "Mode": "z",
	                "RW": true,
	                "Propagation": ""
	            }
	        ],
	        "Config": {
	            "Hostname": "multinode-654612",
	            "Domainname": "",
	            "User": "",
	            "AttachStdin": false,
	            "AttachStdout": false,
	            "AttachStderr": false,
	            "ExposedPorts": {
	                "22/tcp": {},
	                "2376/tcp": {},
	                "32443/tcp": {},
	                "5000/tcp": {},
	                "8443/tcp": {}
	            },
	            "Tty": true,
	            "OpenStdin": false,
	            "StdinOnce": false,
	            "Env": [
	                "container=docker",
	                "PATH=/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin"
	            ],
	            "Cmd": null,
	            "Image": "gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0",
	            "Volumes": null,
	            "WorkingDir": "/",
	            "Entrypoint": [
	                "/usr/local/bin/entrypoint",
	                "/sbin/init"
	            ],
	            "OnBuild": null,
	            "Labels": {
	                "created_by.minikube.sigs.k8s.io": "true",
	                "mode.minikube.sigs.k8s.io": "multinode-654612",
	                "name.minikube.sigs.k8s.io": "multinode-654612",
	                "role.minikube.sigs.k8s.io": ""
	            },
	            "StopSignal": "SIGRTMIN+3"
	        },
	        "NetworkSettings": {
	            "Bridge": "",
	            "SandboxID": "69f49cc754d7be2ace6cd2dd24f7d518b32e857f3840cb21dbd9b88b81ada3e0",
	            "SandboxKey": "/var/run/docker/netns/69f49cc754d7",
	            "Ports": {
	                "22/tcp": [
	                    {
	                        "HostIp": "127.0.0.1",
	                        "HostPort": "34758"
	                    }
	                ],
	                "2376/tcp": [
	                    {
	                        "HostIp": "127.0.0.1",
	                        "HostPort": "34759"
	                    }
	                ],
	                "32443/tcp": [
	                    {
	                        "HostIp": "127.0.0.1",
	                        "HostPort": "34762"
	                    }
	                ],
	                "5000/tcp": [
	                    {
	                        "HostIp": "127.0.0.1",
	                        "HostPort": "34760"
	                    }
	                ],
	                "8443/tcp": [
	                    {
	                        "HostIp": "127.0.0.1",
	                        "HostPort": "34761"
	                    }
	                ]
	            },
	            "HairpinMode": false,
	            "LinkLocalIPv6Address": "",
	            "LinkLocalIPv6PrefixLen": 0,
	            "SecondaryIPAddresses": null,
	            "SecondaryIPv6Addresses": null,
	            "EndpointID": "",
	            "Gateway": "",
	            "GlobalIPv6Address": "",
	            "GlobalIPv6PrefixLen": 0,
	            "IPAddress": "",
	            "IPPrefixLen": 0,
	            "IPv6Gateway": "",
	            "MacAddress": "",
	            "Networks": {
	                "multinode-654612": {
	                    "IPAMConfig": {
	                        "IPv4Address": "192.168.67.2"
	                    },
	                    "Links": null,
	                    "Aliases": null,
	                    "MacAddress": "02:42:c0:a8:43:02",
	                    "DriverOpts": null,
	                    "NetworkID": "76703dbf7b5c303b888ff80e924d3dab5e1ece3140da60ee94903d5d35e68013",
	                    "EndpointID": "52b1249f874bfdda44338e22c73893ed84d8424ebce8e39c26664151191e5cfa",
	                    "Gateway": "192.168.67.1",
	                    "IPAddress": "192.168.67.2",
	                    "IPPrefixLen": 24,
	                    "IPv6Gateway": "",
	                    "GlobalIPv6Address": "",
	                    "GlobalIPv6PrefixLen": 0,
	                    "DNSNames": [
	                        "multinode-654612",
	                        "402497514f0b"
	                    ]
	                }
	            }
	        }
	    }
	]

                                                
                                                
-- /stdout --
helpers_test.go:239: (dbg) Run:  out/minikube-linux-arm64 status --format={{.Host}} -p multinode-654612 -n multinode-654612
helpers_test.go:244: <<< TestMultiNode/serial/DeleteNode FAILED: start of post-mortem logs <<<
helpers_test.go:245: ======>  post-mortem[TestMultiNode/serial/DeleteNode]: minikube logs <======
helpers_test.go:247: (dbg) Run:  out/minikube-linux-arm64 -p multinode-654612 logs -n 25
helpers_test.go:247: (dbg) Done: out/minikube-linux-arm64 -p multinode-654612 logs -n 25: (2.013927975s)
helpers_test.go:252: TestMultiNode/serial/DeleteNode logs: 
-- stdout --
	
	==> Audit <==
	|---------|-----------------------------------------------------------------------------------------|------------------|---------|---------|---------------------|---------------------|
	| Command |                                          Args                                           |     Profile      |  User   | Version |     Start Time      |      End Time       |
	|---------|-----------------------------------------------------------------------------------------|------------------|---------|---------|---------------------|---------------------|
	| cp      | multinode-654612 cp multinode-654612-m02:/home/docker/cp-test.txt                       | multinode-654612 | jenkins | v1.34.0 | 16 Sep 24 11:12 UTC | 16 Sep 24 11:12 UTC |
	|         | /tmp/TestMultiNodeserialCopyFile4098428863/001/cp-test_multinode-654612-m02.txt         |                  |         |         |                     |                     |
	| ssh     | multinode-654612 ssh -n                                                                 | multinode-654612 | jenkins | v1.34.0 | 16 Sep 24 11:12 UTC | 16 Sep 24 11:12 UTC |
	|         | multinode-654612-m02 sudo cat                                                           |                  |         |         |                     |                     |
	|         | /home/docker/cp-test.txt                                                                |                  |         |         |                     |                     |
	| cp      | multinode-654612 cp multinode-654612-m02:/home/docker/cp-test.txt                       | multinode-654612 | jenkins | v1.34.0 | 16 Sep 24 11:12 UTC | 16 Sep 24 11:12 UTC |
	|         | multinode-654612:/home/docker/cp-test_multinode-654612-m02_multinode-654612.txt         |                  |         |         |                     |                     |
	| ssh     | multinode-654612 ssh -n                                                                 | multinode-654612 | jenkins | v1.34.0 | 16 Sep 24 11:12 UTC | 16 Sep 24 11:12 UTC |
	|         | multinode-654612-m02 sudo cat                                                           |                  |         |         |                     |                     |
	|         | /home/docker/cp-test.txt                                                                |                  |         |         |                     |                     |
	| ssh     | multinode-654612 ssh -n multinode-654612 sudo cat                                       | multinode-654612 | jenkins | v1.34.0 | 16 Sep 24 11:12 UTC | 16 Sep 24 11:12 UTC |
	|         | /home/docker/cp-test_multinode-654612-m02_multinode-654612.txt                          |                  |         |         |                     |                     |
	| cp      | multinode-654612 cp multinode-654612-m02:/home/docker/cp-test.txt                       | multinode-654612 | jenkins | v1.34.0 | 16 Sep 24 11:12 UTC | 16 Sep 24 11:12 UTC |
	|         | multinode-654612-m03:/home/docker/cp-test_multinode-654612-m02_multinode-654612-m03.txt |                  |         |         |                     |                     |
	| ssh     | multinode-654612 ssh -n                                                                 | multinode-654612 | jenkins | v1.34.0 | 16 Sep 24 11:12 UTC | 16 Sep 24 11:12 UTC |
	|         | multinode-654612-m02 sudo cat                                                           |                  |         |         |                     |                     |
	|         | /home/docker/cp-test.txt                                                                |                  |         |         |                     |                     |
	| ssh     | multinode-654612 ssh -n multinode-654612-m03 sudo cat                                   | multinode-654612 | jenkins | v1.34.0 | 16 Sep 24 11:12 UTC | 16 Sep 24 11:12 UTC |
	|         | /home/docker/cp-test_multinode-654612-m02_multinode-654612-m03.txt                      |                  |         |         |                     |                     |
	| cp      | multinode-654612 cp testdata/cp-test.txt                                                | multinode-654612 | jenkins | v1.34.0 | 16 Sep 24 11:12 UTC | 16 Sep 24 11:12 UTC |
	|         | multinode-654612-m03:/home/docker/cp-test.txt                                           |                  |         |         |                     |                     |
	| ssh     | multinode-654612 ssh -n                                                                 | multinode-654612 | jenkins | v1.34.0 | 16 Sep 24 11:12 UTC | 16 Sep 24 11:12 UTC |
	|         | multinode-654612-m03 sudo cat                                                           |                  |         |         |                     |                     |
	|         | /home/docker/cp-test.txt                                                                |                  |         |         |                     |                     |
	| cp      | multinode-654612 cp multinode-654612-m03:/home/docker/cp-test.txt                       | multinode-654612 | jenkins | v1.34.0 | 16 Sep 24 11:12 UTC | 16 Sep 24 11:12 UTC |
	|         | /tmp/TestMultiNodeserialCopyFile4098428863/001/cp-test_multinode-654612-m03.txt         |                  |         |         |                     |                     |
	| ssh     | multinode-654612 ssh -n                                                                 | multinode-654612 | jenkins | v1.34.0 | 16 Sep 24 11:12 UTC | 16 Sep 24 11:12 UTC |
	|         | multinode-654612-m03 sudo cat                                                           |                  |         |         |                     |                     |
	|         | /home/docker/cp-test.txt                                                                |                  |         |         |                     |                     |
	| cp      | multinode-654612 cp multinode-654612-m03:/home/docker/cp-test.txt                       | multinode-654612 | jenkins | v1.34.0 | 16 Sep 24 11:12 UTC | 16 Sep 24 11:12 UTC |
	|         | multinode-654612:/home/docker/cp-test_multinode-654612-m03_multinode-654612.txt         |                  |         |         |                     |                     |
	| ssh     | multinode-654612 ssh -n                                                                 | multinode-654612 | jenkins | v1.34.0 | 16 Sep 24 11:12 UTC | 16 Sep 24 11:12 UTC |
	|         | multinode-654612-m03 sudo cat                                                           |                  |         |         |                     |                     |
	|         | /home/docker/cp-test.txt                                                                |                  |         |         |                     |                     |
	| ssh     | multinode-654612 ssh -n multinode-654612 sudo cat                                       | multinode-654612 | jenkins | v1.34.0 | 16 Sep 24 11:12 UTC | 16 Sep 24 11:12 UTC |
	|         | /home/docker/cp-test_multinode-654612-m03_multinode-654612.txt                          |                  |         |         |                     |                     |
	| cp      | multinode-654612 cp multinode-654612-m03:/home/docker/cp-test.txt                       | multinode-654612 | jenkins | v1.34.0 | 16 Sep 24 11:12 UTC | 16 Sep 24 11:12 UTC |
	|         | multinode-654612-m02:/home/docker/cp-test_multinode-654612-m03_multinode-654612-m02.txt |                  |         |         |                     |                     |
	| ssh     | multinode-654612 ssh -n                                                                 | multinode-654612 | jenkins | v1.34.0 | 16 Sep 24 11:12 UTC | 16 Sep 24 11:12 UTC |
	|         | multinode-654612-m03 sudo cat                                                           |                  |         |         |                     |                     |
	|         | /home/docker/cp-test.txt                                                                |                  |         |         |                     |                     |
	| ssh     | multinode-654612 ssh -n multinode-654612-m02 sudo cat                                   | multinode-654612 | jenkins | v1.34.0 | 16 Sep 24 11:12 UTC | 16 Sep 24 11:12 UTC |
	|         | /home/docker/cp-test_multinode-654612-m03_multinode-654612-m02.txt                      |                  |         |         |                     |                     |
	| node    | multinode-654612 node stop m03                                                          | multinode-654612 | jenkins | v1.34.0 | 16 Sep 24 11:12 UTC | 16 Sep 24 11:12 UTC |
	| node    | multinode-654612 node start                                                             | multinode-654612 | jenkins | v1.34.0 | 16 Sep 24 11:12 UTC | 16 Sep 24 11:12 UTC |
	|         | m03 -v=7 --alsologtostderr                                                              |                  |         |         |                     |                     |
	| node    | list -p multinode-654612                                                                | multinode-654612 | jenkins | v1.34.0 | 16 Sep 24 11:12 UTC |                     |
	| stop    | -p multinode-654612                                                                     | multinode-654612 | jenkins | v1.34.0 | 16 Sep 24 11:12 UTC | 16 Sep 24 11:12 UTC |
	| start   | -p multinode-654612                                                                     | multinode-654612 | jenkins | v1.34.0 | 16 Sep 24 11:12 UTC | 16 Sep 24 11:14 UTC |
	|         | --wait=true -v=8                                                                        |                  |         |         |                     |                     |
	|         | --alsologtostderr                                                                       |                  |         |         |                     |                     |
	| node    | list -p multinode-654612                                                                | multinode-654612 | jenkins | v1.34.0 | 16 Sep 24 11:14 UTC |                     |
	| node    | multinode-654612 node delete                                                            | multinode-654612 | jenkins | v1.34.0 | 16 Sep 24 11:14 UTC | 16 Sep 24 11:14 UTC |
	|         | m03                                                                                     |                  |         |         |                     |                     |
	|---------|-----------------------------------------------------------------------------------------|------------------|---------|---------|---------------------|---------------------|
	
	
	==> Last Start <==
	Log file created at: 2024/09/16 11:12:58
	Running on machine: ip-172-31-21-244
	Binary: Built with gc go1.23.0 for linux/arm64
	Log line format: [IWEF]mmdd hh:mm:ss.uuuuuu threadid file:line] msg
	I0916 11:12:58.373665 1501462 out.go:345] Setting OutFile to fd 1 ...
	I0916 11:12:58.373869 1501462 out.go:392] TERM=,COLORTERM=, which probably does not support color
	I0916 11:12:58.373882 1501462 out.go:358] Setting ErrFile to fd 2...
	I0916 11:12:58.373887 1501462 out.go:392] TERM=,COLORTERM=, which probably does not support color
	I0916 11:12:58.374164 1501462 root.go:338] Updating PATH: /home/jenkins/minikube-integration/19651-1378450/.minikube/bin
	I0916 11:12:58.374566 1501462 out.go:352] Setting JSON to false
	I0916 11:12:58.375508 1501462 start.go:129] hostinfo: {"hostname":"ip-172-31-21-244","uptime":39324,"bootTime":1726445855,"procs":175,"os":"linux","platform":"ubuntu","platformFamily":"debian","platformVersion":"20.04","kernelVersion":"5.15.0-1069-aws","kernelArch":"aarch64","virtualizationSystem":"","virtualizationRole":"","hostId":"da8ac1fd-6236-412a-a346-95873c98230d"}
	I0916 11:12:58.375586 1501462 start.go:139] virtualization:  
	I0916 11:12:58.378811 1501462 out.go:177] * [multinode-654612] minikube v1.34.0 on Ubuntu 20.04 (arm64)
	I0916 11:12:58.382298 1501462 out.go:177]   - MINIKUBE_LOCATION=19651
	I0916 11:12:58.382507 1501462 notify.go:220] Checking for updates...
	I0916 11:12:58.387635 1501462 out.go:177]   - MINIKUBE_SUPPRESS_DOCKER_PERFORMANCE=true
	I0916 11:12:58.390798 1501462 out.go:177]   - KUBECONFIG=/home/jenkins/minikube-integration/19651-1378450/kubeconfig
	I0916 11:12:58.393551 1501462 out.go:177]   - MINIKUBE_HOME=/home/jenkins/minikube-integration/19651-1378450/.minikube
	I0916 11:12:58.396197 1501462 out.go:177]   - MINIKUBE_BIN=out/minikube-linux-arm64
	I0916 11:12:58.398953 1501462 out.go:177]   - MINIKUBE_FORCE_SYSTEMD=
	I0916 11:12:58.403560 1501462 config.go:182] Loaded profile config "multinode-654612": Driver=docker, ContainerRuntime=crio, KubernetesVersion=v1.31.1
	I0916 11:12:58.403668 1501462 driver.go:394] Setting default libvirt URI to qemu:///system
	I0916 11:12:58.426194 1501462 docker.go:123] docker version: linux-27.2.1:Docker Engine - Community
	I0916 11:12:58.426332 1501462 cli_runner.go:164] Run: docker system info --format "{{json .}}"
	I0916 11:12:58.486079 1501462 info.go:266] docker info: {ID:5FDH:SA5P:5GCT:NLAS:B73P:SGDQ:PBG5:UBVH:UZY3:RXGO:CI7S:WAIH Containers:3 ContainersRunning:0 ContainersPaused:0 ContainersStopped:3 Images:3 Driver:overlay2 DriverStatus:[[Backing Filesystem extfs] [Supports d_type true] [Using metacopy false] [Native Overlay Diff true] [userxattr false]] SystemStatus:<nil> Plugins:{Volume:[local] Network:[bridge host ipvlan macvlan null overlay] Authorization:<nil> Log:[awslogs fluentd gcplogs gelf journald json-file local splunk syslog]} MemoryLimit:true SwapLimit:true KernelMemory:false KernelMemoryTCP:true CPUCfsPeriod:true CPUCfsQuota:true CPUShares:true CPUSet:true PidsLimit:true IPv4Forwarding:true BridgeNfIptables:true BridgeNfIP6Tables:true Debug:false NFd:32 OomKillDisable:true NGoroutines:41 SystemTime:2024-09-16 11:12:58.476374915 +0000 UTC LoggingDriver:json-file CgroupDriver:cgroupfs NEventsListener:0 KernelVersion:5.15.0-1069-aws OperatingSystem:Ubuntu 20.04.6 LTS OSType:linux Architecture:aar
ch64 IndexServerAddress:https://index.docker.io/v1/ RegistryConfig:{AllowNondistributableArtifactsCIDRs:[] AllowNondistributableArtifactsHostnames:[] InsecureRegistryCIDRs:[127.0.0.0/8] IndexConfigs:{DockerIo:{Name:docker.io Mirrors:[] Secure:true Official:true}} Mirrors:[]} NCPU:2 MemTotal:8214839296 GenericResources:<nil> DockerRootDir:/var/lib/docker HTTPProxy: HTTPSProxy: NoProxy: Name:ip-172-31-21-244 Labels:[] ExperimentalBuild:false ServerVersion:27.2.1 ClusterStore: ClusterAdvertise: Runtimes:{Runc:{Path:runc}} DefaultRuntime:runc Swarm:{NodeID: NodeAddr: LocalNodeState:inactive ControlAvailable:false Error: RemoteManagers:<nil>} LiveRestoreEnabled:false Isolation: InitBinary:docker-init ContainerdCommit:{ID:7f7fdf5fed64eb6a7caf99b3e12efcf9d60e311c Expected:7f7fdf5fed64eb6a7caf99b3e12efcf9d60e311c} RuncCommit:{ID:v1.1.14-0-g2c9f560 Expected:v1.1.14-0-g2c9f560} InitCommit:{ID:de40ad0 Expected:de40ad0} SecurityOptions:[name=apparmor name=seccomp,profile=builtin] ProductLicense: Warnings:<nil> ServerErro
rs:[] ClientInfo:{Debug:false Plugins:[map[Name:buildx Path:/usr/libexec/docker/cli-plugins/docker-buildx SchemaVersion:0.1.0 ShortDescription:Docker Buildx Vendor:Docker Inc. Version:v0.16.2] map[Name:compose Path:/usr/libexec/docker/cli-plugins/docker-compose SchemaVersion:0.1.0 ShortDescription:Docker Compose Vendor:Docker Inc. Version:v2.29.2]] Warnings:<nil>}}
	I0916 11:12:58.486226 1501462 docker.go:318] overlay module found
	I0916 11:12:58.489036 1501462 out.go:177] * Using the docker driver based on existing profile
	I0916 11:12:58.491518 1501462 start.go:297] selected driver: docker
	I0916 11:12:58.491541 1501462 start.go:901] validating driver "docker" against &{Name:multinode-654612 KeepContext:false EmbedCerts:false MinikubeISO: KicBaseImage:gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 Memory:2200 CPUs:2 DiskSize:20000 Driver:docker HyperkitVpnKitSock: HyperkitVSockPorts:[] DockerEnv:[] ContainerVolumeMounts:[] InsecureRegistry:[] RegistryMirror:[] HostOnlyCIDR:192.168.59.1/24 HypervVirtualSwitch: HypervUseExternalSwitch:false HypervExternalAdapter: KVMNetwork:default KVMQemuURI:qemu:///system KVMGPU:false KVMHidden:false KVMNUMACount:1 APIServerPort:8443 DockerOpt:[] DisableDriverMounts:false NFSShare:[] NFSSharesRoot:/nfsshares UUID: NoVTXCheck:false DNSProxy:false HostDNSResolver:true HostOnlyNicType:virtio NatNicType:virtio SSHIPAddress: SSHUser:root SSHKey: SSHPort:22 KubernetesConfig:{KubernetesVersion:v1.31.1 ClusterName:multinode-654612 Namespace:default APIServerHAVIP: APIServerName
:minikubeCA APIServerNames:[] APIServerIPs:[] DNSDomain:cluster.local ContainerRuntime:crio CRISocket: NetworkPlugin:cni FeatureGates: ServiceCIDR:10.96.0.0/12 ImageRepository: LoadBalancerStartIP: LoadBalancerEndIP: CustomIngressCert: RegistryAliases: ExtraOptions:[] ShouldLoadCachedImages:true EnableDefaultCNI:false CNI:} Nodes:[{Name: IP:192.168.67.2 Port:8443 KubernetesVersion:v1.31.1 ContainerRuntime:crio ControlPlane:true Worker:true} {Name:m02 IP:192.168.67.3 Port:8443 KubernetesVersion:v1.31.1 ContainerRuntime:crio ControlPlane:false Worker:true} {Name:m03 IP:192.168.67.4 Port:0 KubernetesVersion:v1.31.1 ContainerRuntime: ControlPlane:false Worker:true}] Addons:map[ambassador:false auto-pause:false cloud-spanner:false csi-hostpath-driver:false dashboard:false default-storageclass:false efk:false freshpod:false gcp-auth:false gvisor:false headlamp:false helm-tiller:false inaccel:false ingress:false ingress-dns:false inspektor-gadget:false istio:false istio-provisioner:false kong:false kubeflow:false ku
bevirt:false logviewer:false metallb:false metrics-server:false nvidia-device-plugin:false nvidia-driver-installer:false nvidia-gpu-device-plugin:false olm:false pod-security-policy:false portainer:false registry:false registry-aliases:false registry-creds:false storage-provisioner:false storage-provisioner-gluster:false storage-provisioner-rancher:false volcano:false volumesnapshots:false yakd:false] CustomAddonImages:map[] CustomAddonRegistries:map[] VerifyComponents:map[apiserver:true apps_running:true default_sa:true extra:true kubelet:true node_ready:true system_pods:true] StartHostTimeout:6m0s ScheduledStop:<nil> ExposedPorts:[] ListenAddress: Network: Subnet: MultiNodeRequested:true ExtraDisks:0 CertExpiration:26280h0m0s Mount:false MountString:/home/jenkins:/minikube-host Mount9PVersion:9p2000.L MountGID:docker MountIP: MountMSize:262144 MountOptions:[] MountPort:0 MountType:9p MountUID:docker BinaryMirror: DisableOptimizations:false DisableMetrics:false CustomQemuFirmwarePath: SocketVMnetClientPath:
SocketVMnetPath: StaticIP: SSHAuthSock: SSHAgentPID:0 GPUs: AutoPauseInterval:1m0s}
	I0916 11:12:58.491690 1501462 start.go:912] status for docker: {Installed:true Healthy:true Running:false NeedsImprovement:false Error:<nil> Reason: Fix: Doc: Version:}
	I0916 11:12:58.491825 1501462 cli_runner.go:164] Run: docker system info --format "{{json .}}"
	I0916 11:12:58.545751 1501462 info.go:266] docker info: {ID:5FDH:SA5P:5GCT:NLAS:B73P:SGDQ:PBG5:UBVH:UZY3:RXGO:CI7S:WAIH Containers:3 ContainersRunning:0 ContainersPaused:0 ContainersStopped:3 Images:3 Driver:overlay2 DriverStatus:[[Backing Filesystem extfs] [Supports d_type true] [Using metacopy false] [Native Overlay Diff true] [userxattr false]] SystemStatus:<nil> Plugins:{Volume:[local] Network:[bridge host ipvlan macvlan null overlay] Authorization:<nil> Log:[awslogs fluentd gcplogs gelf journald json-file local splunk syslog]} MemoryLimit:true SwapLimit:true KernelMemory:false KernelMemoryTCP:true CPUCfsPeriod:true CPUCfsQuota:true CPUShares:true CPUSet:true PidsLimit:true IPv4Forwarding:true BridgeNfIptables:true BridgeNfIP6Tables:true Debug:false NFd:32 OomKillDisable:true NGoroutines:41 SystemTime:2024-09-16 11:12:58.536316389 +0000 UTC LoggingDriver:json-file CgroupDriver:cgroupfs NEventsListener:0 KernelVersion:5.15.0-1069-aws OperatingSystem:Ubuntu 20.04.6 LTS OSType:linux Architecture:aar
ch64 IndexServerAddress:https://index.docker.io/v1/ RegistryConfig:{AllowNondistributableArtifactsCIDRs:[] AllowNondistributableArtifactsHostnames:[] InsecureRegistryCIDRs:[127.0.0.0/8] IndexConfigs:{DockerIo:{Name:docker.io Mirrors:[] Secure:true Official:true}} Mirrors:[]} NCPU:2 MemTotal:8214839296 GenericResources:<nil> DockerRootDir:/var/lib/docker HTTPProxy: HTTPSProxy: NoProxy: Name:ip-172-31-21-244 Labels:[] ExperimentalBuild:false ServerVersion:27.2.1 ClusterStore: ClusterAdvertise: Runtimes:{Runc:{Path:runc}} DefaultRuntime:runc Swarm:{NodeID: NodeAddr: LocalNodeState:inactive ControlAvailable:false Error: RemoteManagers:<nil>} LiveRestoreEnabled:false Isolation: InitBinary:docker-init ContainerdCommit:{ID:7f7fdf5fed64eb6a7caf99b3e12efcf9d60e311c Expected:7f7fdf5fed64eb6a7caf99b3e12efcf9d60e311c} RuncCommit:{ID:v1.1.14-0-g2c9f560 Expected:v1.1.14-0-g2c9f560} InitCommit:{ID:de40ad0 Expected:de40ad0} SecurityOptions:[name=apparmor name=seccomp,profile=builtin] ProductLicense: Warnings:<nil> ServerErro
rs:[] ClientInfo:{Debug:false Plugins:[map[Name:buildx Path:/usr/libexec/docker/cli-plugins/docker-buildx SchemaVersion:0.1.0 ShortDescription:Docker Buildx Vendor:Docker Inc. Version:v0.16.2] map[Name:compose Path:/usr/libexec/docker/cli-plugins/docker-compose SchemaVersion:0.1.0 ShortDescription:Docker Compose Vendor:Docker Inc. Version:v2.29.2]] Warnings:<nil>}}
	I0916 11:12:58.546167 1501462 start_flags.go:947] Waiting for all components: map[apiserver:true apps_running:true default_sa:true extra:true kubelet:true node_ready:true system_pods:true]
	I0916 11:12:58.546207 1501462 cni.go:84] Creating CNI manager for ""
	I0916 11:12:58.546264 1501462 cni.go:136] multinode detected (3 nodes found), recommending kindnet
	I0916 11:12:58.546320 1501462 start.go:340] cluster config:
	{Name:multinode-654612 KeepContext:false EmbedCerts:false MinikubeISO: KicBaseImage:gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 Memory:2200 CPUs:2 DiskSize:20000 Driver:docker HyperkitVpnKitSock: HyperkitVSockPorts:[] DockerEnv:[] ContainerVolumeMounts:[] InsecureRegistry:[] RegistryMirror:[] HostOnlyCIDR:192.168.59.1/24 HypervVirtualSwitch: HypervUseExternalSwitch:false HypervExternalAdapter: KVMNetwork:default KVMQemuURI:qemu:///system KVMGPU:false KVMHidden:false KVMNUMACount:1 APIServerPort:8443 DockerOpt:[] DisableDriverMounts:false NFSShare:[] NFSSharesRoot:/nfsshares UUID: NoVTXCheck:false DNSProxy:false HostDNSResolver:true HostOnlyNicType:virtio NatNicType:virtio SSHIPAddress: SSHUser:root SSHKey: SSHPort:22 KubernetesConfig:{KubernetesVersion:v1.31.1 ClusterName:multinode-654612 Namespace:default APIServerHAVIP: APIServerName:minikubeCA APIServerNames:[] APIServerIPs:[] DNSDomain:cluster.local ContainerR
untime:crio CRISocket: NetworkPlugin:cni FeatureGates: ServiceCIDR:10.96.0.0/12 ImageRepository: LoadBalancerStartIP: LoadBalancerEndIP: CustomIngressCert: RegistryAliases: ExtraOptions:[] ShouldLoadCachedImages:true EnableDefaultCNI:false CNI:} Nodes:[{Name: IP:192.168.67.2 Port:8443 KubernetesVersion:v1.31.1 ContainerRuntime:crio ControlPlane:true Worker:true} {Name:m02 IP:192.168.67.3 Port:8443 KubernetesVersion:v1.31.1 ContainerRuntime:crio ControlPlane:false Worker:true} {Name:m03 IP:192.168.67.4 Port:0 KubernetesVersion:v1.31.1 ContainerRuntime:crio ControlPlane:false Worker:true}] Addons:map[ambassador:false auto-pause:false cloud-spanner:false csi-hostpath-driver:false dashboard:false default-storageclass:false efk:false freshpod:false gcp-auth:false gvisor:false headlamp:false helm-tiller:false inaccel:false ingress:false ingress-dns:false inspektor-gadget:false istio:false istio-provisioner:false kong:false kubeflow:false kubevirt:false logviewer:false metallb:false metrics-server:false nvidia-devic
e-plugin:false nvidia-driver-installer:false nvidia-gpu-device-plugin:false olm:false pod-security-policy:false portainer:false registry:false registry-aliases:false registry-creds:false storage-provisioner:false storage-provisioner-gluster:false storage-provisioner-rancher:false volcano:false volumesnapshots:false yakd:false] CustomAddonImages:map[] CustomAddonRegistries:map[] VerifyComponents:map[apiserver:true apps_running:true default_sa:true extra:true kubelet:true node_ready:true system_pods:true] StartHostTimeout:6m0s ScheduledStop:<nil> ExposedPorts:[] ListenAddress: Network: Subnet: MultiNodeRequested:true ExtraDisks:0 CertExpiration:26280h0m0s Mount:false MountString:/home/jenkins:/minikube-host Mount9PVersion:9p2000.L MountGID:docker MountIP: MountMSize:262144 MountOptions:[] MountPort:0 MountType:9p MountUID:docker BinaryMirror: DisableOptimizations:false DisableMetrics:false CustomQemuFirmwarePath: SocketVMnetClientPath: SocketVMnetPath: StaticIP: SSHAuthSock: SSHAgentPID:0 GPUs: AutoPauseInterva
l:1m0s}
	I0916 11:12:58.549242 1501462 out.go:177] * Starting "multinode-654612" primary control-plane node in "multinode-654612" cluster
	I0916 11:12:58.551639 1501462 cache.go:121] Beginning downloading kic base image for docker with crio
	I0916 11:12:58.554229 1501462 out.go:177] * Pulling base image v0.0.45-1726358845-19644 ...
	I0916 11:12:58.556977 1501462 preload.go:131] Checking if preload exists for k8s version v1.31.1 and runtime crio
	I0916 11:12:58.557039 1501462 preload.go:146] Found local preload: /home/jenkins/minikube-integration/19651-1378450/.minikube/cache/preloaded-tarball/preloaded-images-k8s-v18-v1.31.1-cri-o-overlay-arm64.tar.lz4
	I0916 11:12:58.557052 1501462 cache.go:56] Caching tarball of preloaded images
	I0916 11:12:58.557082 1501462 image.go:79] Checking for gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 in local docker daemon
	I0916 11:12:58.557155 1501462 preload.go:172] Found /home/jenkins/minikube-integration/19651-1378450/.minikube/cache/preloaded-tarball/preloaded-images-k8s-v18-v1.31.1-cri-o-overlay-arm64.tar.lz4 in cache, skipping download
	I0916 11:12:58.557170 1501462 cache.go:59] Finished verifying existence of preloaded tar for v1.31.1 on crio
	I0916 11:12:58.557331 1501462 profile.go:143] Saving config to /home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/multinode-654612/config.json ...
	W0916 11:12:58.577004 1501462 image.go:95] image gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 is of wrong architecture
	I0916 11:12:58.577028 1501462 cache.go:149] Downloading gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 to local cache
	I0916 11:12:58.577122 1501462 image.go:63] Checking for gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 in local cache directory
	I0916 11:12:58.577148 1501462 image.go:66] Found gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 in local cache directory, skipping pull
	I0916 11:12:58.577154 1501462 image.go:135] gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 exists in cache, skipping pull
	I0916 11:12:58.577163 1501462 cache.go:152] successfully saved gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 as a tarball
	I0916 11:12:58.577172 1501462 cache.go:162] Loading gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 from local cache
	I0916 11:12:58.578849 1501462 image.go:273] response: 
	I0916 11:12:58.700084 1501462 cache.go:164] successfully loaded and using gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 from cached tarball
	I0916 11:12:58.700127 1501462 cache.go:194] Successfully downloaded all kic artifacts
	I0916 11:12:58.700174 1501462 start.go:360] acquireMachinesLock for multinode-654612: {Name:mkfbf36af9c510d3c0697cdadc867dcd6648c047 Clock:{} Delay:500ms Timeout:10m0s Cancel:<nil>}
	I0916 11:12:58.700246 1501462 start.go:364] duration metric: took 45.513µs to acquireMachinesLock for "multinode-654612"
	I0916 11:12:58.700272 1501462 start.go:96] Skipping create...Using existing machine configuration
	I0916 11:12:58.700280 1501462 fix.go:54] fixHost starting: 
	I0916 11:12:58.700563 1501462 cli_runner.go:164] Run: docker container inspect multinode-654612 --format={{.State.Status}}
	I0916 11:12:58.717879 1501462 fix.go:112] recreateIfNeeded on multinode-654612: state=Stopped err=<nil>
	W0916 11:12:58.717930 1501462 fix.go:138] unexpected machine state, will restart: <nil>
	I0916 11:12:58.720907 1501462 out.go:177] * Restarting existing docker container for "multinode-654612" ...
	I0916 11:12:58.723535 1501462 cli_runner.go:164] Run: docker start multinode-654612
	I0916 11:12:59.036864 1501462 cli_runner.go:164] Run: docker container inspect multinode-654612 --format={{.State.Status}}
	I0916 11:12:59.061473 1501462 kic.go:430] container "multinode-654612" state is running.
	I0916 11:12:59.061874 1501462 cli_runner.go:164] Run: docker container inspect -f "{{range .NetworkSettings.Networks}}{{.IPAddress}},{{.GlobalIPv6Address}}{{end}}" multinode-654612
	I0916 11:12:59.085301 1501462 profile.go:143] Saving config to /home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/multinode-654612/config.json ...
	I0916 11:12:59.086156 1501462 machine.go:93] provisionDockerMachine start ...
	I0916 11:12:59.086274 1501462 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" multinode-654612
	I0916 11:12:59.107587 1501462 main.go:141] libmachine: Using SSH client type: native
	I0916 11:12:59.107855 1501462 main.go:141] libmachine: &{{{<nil> 0 [] [] []} docker [0x41abe0] 0x41d420 <nil>  [] 0s} 127.0.0.1 34758 <nil> <nil>}
	I0916 11:12:59.107865 1501462 main.go:141] libmachine: About to run SSH command:
	hostname
	I0916 11:12:59.108555 1501462 main.go:141] libmachine: Error dialing TCP: ssh: handshake failed: EOF
	I0916 11:13:02.252983 1501462 main.go:141] libmachine: SSH cmd err, output: <nil>: multinode-654612
	
	I0916 11:13:02.253013 1501462 ubuntu.go:169] provisioning hostname "multinode-654612"
	I0916 11:13:02.253095 1501462 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" multinode-654612
	I0916 11:13:02.271056 1501462 main.go:141] libmachine: Using SSH client type: native
	I0916 11:13:02.271349 1501462 main.go:141] libmachine: &{{{<nil> 0 [] [] []} docker [0x41abe0] 0x41d420 <nil>  [] 0s} 127.0.0.1 34758 <nil> <nil>}
	I0916 11:13:02.271366 1501462 main.go:141] libmachine: About to run SSH command:
	sudo hostname multinode-654612 && echo "multinode-654612" | sudo tee /etc/hostname
	I0916 11:13:02.426393 1501462 main.go:141] libmachine: SSH cmd err, output: <nil>: multinode-654612
	
	I0916 11:13:02.426477 1501462 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" multinode-654612
	I0916 11:13:02.444857 1501462 main.go:141] libmachine: Using SSH client type: native
	I0916 11:13:02.445122 1501462 main.go:141] libmachine: &{{{<nil> 0 [] [] []} docker [0x41abe0] 0x41d420 <nil>  [] 0s} 127.0.0.1 34758 <nil> <nil>}
	I0916 11:13:02.445153 1501462 main.go:141] libmachine: About to run SSH command:
	
			if ! grep -xq '.*\smultinode-654612' /etc/hosts; then
				if grep -xq '127.0.1.1\s.*' /etc/hosts; then
					sudo sed -i 's/^127.0.1.1\s.*/127.0.1.1 multinode-654612/g' /etc/hosts;
				else 
					echo '127.0.1.1 multinode-654612' | sudo tee -a /etc/hosts; 
				fi
			fi
	I0916 11:13:02.580893 1501462 main.go:141] libmachine: SSH cmd err, output: <nil>: 
	I0916 11:13:02.580918 1501462 ubuntu.go:175] set auth options {CertDir:/home/jenkins/minikube-integration/19651-1378450/.minikube CaCertPath:/home/jenkins/minikube-integration/19651-1378450/.minikube/certs/ca.pem CaPrivateKeyPath:/home/jenkins/minikube-integration/19651-1378450/.minikube/certs/ca-key.pem CaCertRemotePath:/etc/docker/ca.pem ServerCertPath:/home/jenkins/minikube-integration/19651-1378450/.minikube/machines/server.pem ServerKeyPath:/home/jenkins/minikube-integration/19651-1378450/.minikube/machines/server-key.pem ClientKeyPath:/home/jenkins/minikube-integration/19651-1378450/.minikube/certs/key.pem ServerCertRemotePath:/etc/docker/server.pem ServerKeyRemotePath:/etc/docker/server-key.pem ClientCertPath:/home/jenkins/minikube-integration/19651-1378450/.minikube/certs/cert.pem ServerCertSANs:[] StorePath:/home/jenkins/minikube-integration/19651-1378450/.minikube}
	I0916 11:13:02.580938 1501462 ubuntu.go:177] setting up certificates
	I0916 11:13:02.580949 1501462 provision.go:84] configureAuth start
	I0916 11:13:02.581022 1501462 cli_runner.go:164] Run: docker container inspect -f "{{range .NetworkSettings.Networks}}{{.IPAddress}},{{.GlobalIPv6Address}}{{end}}" multinode-654612
	I0916 11:13:02.597512 1501462 provision.go:143] copyHostCerts
	I0916 11:13:02.597558 1501462 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-1378450/.minikube/certs/ca.pem -> /home/jenkins/minikube-integration/19651-1378450/.minikube/ca.pem
	I0916 11:13:02.597591 1501462 exec_runner.go:144] found /home/jenkins/minikube-integration/19651-1378450/.minikube/ca.pem, removing ...
	I0916 11:13:02.597604 1501462 exec_runner.go:203] rm: /home/jenkins/minikube-integration/19651-1378450/.minikube/ca.pem
	I0916 11:13:02.597681 1501462 exec_runner.go:151] cp: /home/jenkins/minikube-integration/19651-1378450/.minikube/certs/ca.pem --> /home/jenkins/minikube-integration/19651-1378450/.minikube/ca.pem (1078 bytes)
	I0916 11:13:02.597782 1501462 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-1378450/.minikube/certs/cert.pem -> /home/jenkins/minikube-integration/19651-1378450/.minikube/cert.pem
	I0916 11:13:02.597805 1501462 exec_runner.go:144] found /home/jenkins/minikube-integration/19651-1378450/.minikube/cert.pem, removing ...
	I0916 11:13:02.597810 1501462 exec_runner.go:203] rm: /home/jenkins/minikube-integration/19651-1378450/.minikube/cert.pem
	I0916 11:13:02.597842 1501462 exec_runner.go:151] cp: /home/jenkins/minikube-integration/19651-1378450/.minikube/certs/cert.pem --> /home/jenkins/minikube-integration/19651-1378450/.minikube/cert.pem (1123 bytes)
	I0916 11:13:02.597899 1501462 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-1378450/.minikube/certs/key.pem -> /home/jenkins/minikube-integration/19651-1378450/.minikube/key.pem
	I0916 11:13:02.597924 1501462 exec_runner.go:144] found /home/jenkins/minikube-integration/19651-1378450/.minikube/key.pem, removing ...
	I0916 11:13:02.597933 1501462 exec_runner.go:203] rm: /home/jenkins/minikube-integration/19651-1378450/.minikube/key.pem
	I0916 11:13:02.597960 1501462 exec_runner.go:151] cp: /home/jenkins/minikube-integration/19651-1378450/.minikube/certs/key.pem --> /home/jenkins/minikube-integration/19651-1378450/.minikube/key.pem (1679 bytes)
	I0916 11:13:02.598022 1501462 provision.go:117] generating server cert: /home/jenkins/minikube-integration/19651-1378450/.minikube/machines/server.pem ca-key=/home/jenkins/minikube-integration/19651-1378450/.minikube/certs/ca.pem private-key=/home/jenkins/minikube-integration/19651-1378450/.minikube/certs/ca-key.pem org=jenkins.multinode-654612 san=[127.0.0.1 192.168.67.2 localhost minikube multinode-654612]
	I0916 11:13:03.498435 1501462 provision.go:177] copyRemoteCerts
	I0916 11:13:03.498511 1501462 ssh_runner.go:195] Run: sudo mkdir -p /etc/docker /etc/docker /etc/docker
	I0916 11:13:03.498556 1501462 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" multinode-654612
	I0916 11:13:03.514865 1501462 sshutil.go:53] new ssh client: &{IP:127.0.0.1 Port:34758 SSHKeyPath:/home/jenkins/minikube-integration/19651-1378450/.minikube/machines/multinode-654612/id_rsa Username:docker}
	I0916 11:13:03.613366 1501462 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-1378450/.minikube/machines/server-key.pem -> /etc/docker/server-key.pem
	I0916 11:13:03.613439 1501462 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-1378450/.minikube/machines/server-key.pem --> /etc/docker/server-key.pem (1675 bytes)
	I0916 11:13:03.637927 1501462 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-1378450/.minikube/certs/ca.pem -> /etc/docker/ca.pem
	I0916 11:13:03.638016 1501462 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-1378450/.minikube/certs/ca.pem --> /etc/docker/ca.pem (1078 bytes)
	I0916 11:13:03.662023 1501462 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-1378450/.minikube/machines/server.pem -> /etc/docker/server.pem
	I0916 11:13:03.662144 1501462 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-1378450/.minikube/machines/server.pem --> /etc/docker/server.pem (1216 bytes)
	I0916 11:13:03.686756 1501462 provision.go:87] duration metric: took 1.105782068s to configureAuth
	I0916 11:13:03.686837 1501462 ubuntu.go:193] setting minikube options for container-runtime
	I0916 11:13:03.687093 1501462 config.go:182] Loaded profile config "multinode-654612": Driver=docker, ContainerRuntime=crio, KubernetesVersion=v1.31.1
	I0916 11:13:03.687213 1501462 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" multinode-654612
	I0916 11:13:03.704068 1501462 main.go:141] libmachine: Using SSH client type: native
	I0916 11:13:03.704322 1501462 main.go:141] libmachine: &{{{<nil> 0 [] [] []} docker [0x41abe0] 0x41d420 <nil>  [] 0s} 127.0.0.1 34758 <nil> <nil>}
	I0916 11:13:03.704345 1501462 main.go:141] libmachine: About to run SSH command:
	sudo mkdir -p /etc/sysconfig && printf %s "
	CRIO_MINIKUBE_OPTIONS='--insecure-registry 10.96.0.0/12 '
	" | sudo tee /etc/sysconfig/crio.minikube && sudo systemctl restart crio
	I0916 11:13:04.079035 1501462 main.go:141] libmachine: SSH cmd err, output: <nil>: 
	CRIO_MINIKUBE_OPTIONS='--insecure-registry 10.96.0.0/12 '
	
	I0916 11:13:04.079059 1501462 machine.go:96] duration metric: took 4.992878401s to provisionDockerMachine
	I0916 11:13:04.079076 1501462 start.go:293] postStartSetup for "multinode-654612" (driver="docker")
	I0916 11:13:04.079088 1501462 start.go:322] creating required directories: [/etc/kubernetes/addons /etc/kubernetes/manifests /var/tmp/minikube /var/lib/minikube /var/lib/minikube/certs /var/lib/minikube/images /var/lib/minikube/binaries /tmp/gvisor /usr/share/ca-certificates /etc/ssl/certs]
	I0916 11:13:04.079159 1501462 ssh_runner.go:195] Run: sudo mkdir -p /etc/kubernetes/addons /etc/kubernetes/manifests /var/tmp/minikube /var/lib/minikube /var/lib/minikube/certs /var/lib/minikube/images /var/lib/minikube/binaries /tmp/gvisor /usr/share/ca-certificates /etc/ssl/certs
	I0916 11:13:04.079208 1501462 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" multinode-654612
	I0916 11:13:04.103509 1501462 sshutil.go:53] new ssh client: &{IP:127.0.0.1 Port:34758 SSHKeyPath:/home/jenkins/minikube-integration/19651-1378450/.minikube/machines/multinode-654612/id_rsa Username:docker}
	I0916 11:13:04.201772 1501462 ssh_runner.go:195] Run: cat /etc/os-release
	I0916 11:13:04.204912 1501462 command_runner.go:130] > PRETTY_NAME="Ubuntu 22.04.4 LTS"
	I0916 11:13:04.204932 1501462 command_runner.go:130] > NAME="Ubuntu"
	I0916 11:13:04.204939 1501462 command_runner.go:130] > VERSION_ID="22.04"
	I0916 11:13:04.204945 1501462 command_runner.go:130] > VERSION="22.04.4 LTS (Jammy Jellyfish)"
	I0916 11:13:04.204950 1501462 command_runner.go:130] > VERSION_CODENAME=jammy
	I0916 11:13:04.204953 1501462 command_runner.go:130] > ID=ubuntu
	I0916 11:13:04.204956 1501462 command_runner.go:130] > ID_LIKE=debian
	I0916 11:13:04.204961 1501462 command_runner.go:130] > HOME_URL="https://www.ubuntu.com/"
	I0916 11:13:04.204965 1501462 command_runner.go:130] > SUPPORT_URL="https://help.ubuntu.com/"
	I0916 11:13:04.204982 1501462 command_runner.go:130] > BUG_REPORT_URL="https://bugs.launchpad.net/ubuntu/"
	I0916 11:13:04.204993 1501462 command_runner.go:130] > PRIVACY_POLICY_URL="https://www.ubuntu.com/legal/terms-and-policies/privacy-policy"
	I0916 11:13:04.205004 1501462 command_runner.go:130] > UBUNTU_CODENAME=jammy
	I0916 11:13:04.205299 1501462 main.go:141] libmachine: Couldn't set key VERSION_CODENAME, no corresponding struct field found
	I0916 11:13:04.205331 1501462 main.go:141] libmachine: Couldn't set key PRIVACY_POLICY_URL, no corresponding struct field found
	I0916 11:13:04.205341 1501462 main.go:141] libmachine: Couldn't set key UBUNTU_CODENAME, no corresponding struct field found
	I0916 11:13:04.205352 1501462 info.go:137] Remote host: Ubuntu 22.04.4 LTS
	I0916 11:13:04.205362 1501462 filesync.go:126] Scanning /home/jenkins/minikube-integration/19651-1378450/.minikube/addons for local assets ...
	I0916 11:13:04.205424 1501462 filesync.go:126] Scanning /home/jenkins/minikube-integration/19651-1378450/.minikube/files for local assets ...
	I0916 11:13:04.205507 1501462 filesync.go:149] local asset: /home/jenkins/minikube-integration/19651-1378450/.minikube/files/etc/ssl/certs/13838332.pem -> 13838332.pem in /etc/ssl/certs
	I0916 11:13:04.205520 1501462 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-1378450/.minikube/files/etc/ssl/certs/13838332.pem -> /etc/ssl/certs/13838332.pem
	I0916 11:13:04.205621 1501462 ssh_runner.go:195] Run: sudo mkdir -p /etc/ssl/certs
	I0916 11:13:04.214168 1501462 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-1378450/.minikube/files/etc/ssl/certs/13838332.pem --> /etc/ssl/certs/13838332.pem (1708 bytes)
	I0916 11:13:04.238686 1501462 start.go:296] duration metric: took 159.593006ms for postStartSetup
	I0916 11:13:04.238849 1501462 ssh_runner.go:195] Run: sh -c "df -h /var | awk 'NR==2{print $5}'"
	I0916 11:13:04.238919 1501462 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" multinode-654612
	I0916 11:13:04.255701 1501462 sshutil.go:53] new ssh client: &{IP:127.0.0.1 Port:34758 SSHKeyPath:/home/jenkins/minikube-integration/19651-1378450/.minikube/machines/multinode-654612/id_rsa Username:docker}
	I0916 11:13:04.349598 1501462 command_runner.go:130] > 13%
	I0916 11:13:04.349674 1501462 ssh_runner.go:195] Run: sh -c "df -BG /var | awk 'NR==2{print $4}'"
	I0916 11:13:04.353748 1501462 command_runner.go:130] > 170G
	I0916 11:13:04.354152 1501462 fix.go:56] duration metric: took 5.65386703s for fixHost
	I0916 11:13:04.354177 1501462 start.go:83] releasing machines lock for "multinode-654612", held for 5.653917975s
	I0916 11:13:04.354248 1501462 cli_runner.go:164] Run: docker container inspect -f "{{range .NetworkSettings.Networks}}{{.IPAddress}},{{.GlobalIPv6Address}}{{end}}" multinode-654612
	I0916 11:13:04.371072 1501462 ssh_runner.go:195] Run: cat /version.json
	I0916 11:13:04.371135 1501462 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" multinode-654612
	I0916 11:13:04.371398 1501462 ssh_runner.go:195] Run: curl -sS -m 2 https://registry.k8s.io/
	I0916 11:13:04.371496 1501462 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" multinode-654612
	I0916 11:13:04.394209 1501462 sshutil.go:53] new ssh client: &{IP:127.0.0.1 Port:34758 SSHKeyPath:/home/jenkins/minikube-integration/19651-1378450/.minikube/machines/multinode-654612/id_rsa Username:docker}
	I0916 11:13:04.398179 1501462 sshutil.go:53] new ssh client: &{IP:127.0.0.1 Port:34758 SSHKeyPath:/home/jenkins/minikube-integration/19651-1378450/.minikube/machines/multinode-654612/id_rsa Username:docker}
	I0916 11:13:04.612467 1501462 command_runner.go:130] > <a href="https://github.com/kubernetes/registry.k8s.io">Temporary Redirect</a>.
	I0916 11:13:04.612521 1501462 command_runner.go:130] > {"iso_version": "v1.34.0-1726281733-19643", "kicbase_version": "v0.0.45-1726358845-19644", "minikube_version": "v1.34.0", "commit": "f890713149c79cf50e25c13e6a5c0470aa0f0450"}
	I0916 11:13:04.612649 1501462 ssh_runner.go:195] Run: systemctl --version
	I0916 11:13:04.616641 1501462 command_runner.go:130] > systemd 249 (249.11-0ubuntu3.12)
	I0916 11:13:04.616710 1501462 command_runner.go:130] > +PAM +AUDIT +SELINUX +APPARMOR +IMA +SMACK +SECCOMP +GCRYPT +GNUTLS +OPENSSL +ACL +BLKID +CURL +ELFUTILS +FIDO2 +IDN2 -IDN +IPTC +KMOD +LIBCRYPTSETUP +LIBFDISK +PCRE2 -PWQUALITY -P11KIT -QRENCODE +BZIP2 +LZ4 +XZ +ZLIB +ZSTD -XKBCOMMON +UTMP +SYSVINIT default-hierarchy=unified
	I0916 11:13:04.617127 1501462 ssh_runner.go:195] Run: sudo sh -c "podman version >/dev/null"
	I0916 11:13:04.761870 1501462 ssh_runner.go:195] Run: sh -c "stat /etc/cni/net.d/*loopback.conf*"
	I0916 11:13:04.766165 1501462 command_runner.go:130] >   File: /etc/cni/net.d/200-loopback.conf.mk_disabled
	I0916 11:13:04.766193 1501462 command_runner.go:130] >   Size: 54        	Blocks: 8          IO Block: 4096   regular file
	I0916 11:13:04.766202 1501462 command_runner.go:130] > Device: 36h/54d	Inode: 1570512     Links: 1
	I0916 11:13:04.766209 1501462 command_runner.go:130] > Access: (0644/-rw-r--r--)  Uid: (    0/    root)   Gid: (    0/    root)
	I0916 11:13:04.766214 1501462 command_runner.go:130] > Access: 2023-06-14 14:44:50.000000000 +0000
	I0916 11:13:04.766220 1501462 command_runner.go:130] > Modify: 2023-06-14 14:44:50.000000000 +0000
	I0916 11:13:04.766224 1501462 command_runner.go:130] > Change: 2024-09-16 11:09:48.923228029 +0000
	I0916 11:13:04.766229 1501462 command_runner.go:130] >  Birth: 2024-09-16 11:09:48.919228126 +0000
	I0916 11:13:04.766450 1501462 ssh_runner.go:195] Run: sudo find /etc/cni/net.d -maxdepth 1 -type f -name *loopback.conf* -not -name *.mk_disabled -exec sh -c "sudo mv {} {}.mk_disabled" ;
	I0916 11:13:04.776010 1501462 cni.go:221] loopback cni configuration disabled: "/etc/cni/net.d/*loopback.conf*" found
	I0916 11:13:04.776109 1501462 ssh_runner.go:195] Run: sudo find /etc/cni/net.d -maxdepth 1 -type f ( ( -name *bridge* -or -name *podman* ) -and -not -name *.mk_disabled ) -printf "%p, " -exec sh -c "sudo mv {} {}.mk_disabled" ;
	I0916 11:13:04.785189 1501462 cni.go:259] no active bridge cni configs found in "/etc/cni/net.d" - nothing to disable
	I0916 11:13:04.785214 1501462 start.go:495] detecting cgroup driver to use...
	I0916 11:13:04.785248 1501462 detect.go:187] detected "cgroupfs" cgroup driver on host os
	I0916 11:13:04.785299 1501462 ssh_runner.go:195] Run: sudo systemctl stop -f containerd
	I0916 11:13:04.797673 1501462 ssh_runner.go:195] Run: sudo systemctl is-active --quiet service containerd
	I0916 11:13:04.809218 1501462 docker.go:217] disabling cri-docker service (if available) ...
	I0916 11:13:04.809332 1501462 ssh_runner.go:195] Run: sudo systemctl stop -f cri-docker.socket
	I0916 11:13:04.822810 1501462 ssh_runner.go:195] Run: sudo systemctl stop -f cri-docker.service
	I0916 11:13:04.834876 1501462 ssh_runner.go:195] Run: sudo systemctl disable cri-docker.socket
	I0916 11:13:04.931527 1501462 ssh_runner.go:195] Run: sudo systemctl mask cri-docker.service
	I0916 11:13:05.020319 1501462 docker.go:233] disabling docker service ...
	I0916 11:13:05.020397 1501462 ssh_runner.go:195] Run: sudo systemctl stop -f docker.socket
	I0916 11:13:05.034616 1501462 ssh_runner.go:195] Run: sudo systemctl stop -f docker.service
	I0916 11:13:05.047086 1501462 ssh_runner.go:195] Run: sudo systemctl disable docker.socket
	I0916 11:13:05.141412 1501462 ssh_runner.go:195] Run: sudo systemctl mask docker.service
	I0916 11:13:05.224426 1501462 ssh_runner.go:195] Run: sudo systemctl is-active --quiet service docker
	I0916 11:13:05.236660 1501462 ssh_runner.go:195] Run: /bin/bash -c "sudo mkdir -p /etc && printf %s "runtime-endpoint: unix:///var/run/crio/crio.sock
	" | sudo tee /etc/crictl.yaml"
	I0916 11:13:05.253713 1501462 command_runner.go:130] > runtime-endpoint: unix:///var/run/crio/crio.sock
	I0916 11:13:05.253747 1501462 crio.go:59] configure cri-o to use "registry.k8s.io/pause:3.10" pause image...
	I0916 11:13:05.253808 1501462 ssh_runner.go:195] Run: sh -c "sudo sed -i 's|^.*pause_image = .*$|pause_image = "registry.k8s.io/pause:3.10"|' /etc/crio/crio.conf.d/02-crio.conf"
	I0916 11:13:05.263763 1501462 crio.go:70] configuring cri-o to use "cgroupfs" as cgroup driver...
	I0916 11:13:05.263841 1501462 ssh_runner.go:195] Run: sh -c "sudo sed -i 's|^.*cgroup_manager = .*$|cgroup_manager = "cgroupfs"|' /etc/crio/crio.conf.d/02-crio.conf"
	I0916 11:13:05.274466 1501462 ssh_runner.go:195] Run: sh -c "sudo sed -i '/conmon_cgroup = .*/d' /etc/crio/crio.conf.d/02-crio.conf"
	I0916 11:13:05.285247 1501462 ssh_runner.go:195] Run: sh -c "sudo sed -i '/cgroup_manager = .*/a conmon_cgroup = "pod"' /etc/crio/crio.conf.d/02-crio.conf"
	I0916 11:13:05.295854 1501462 ssh_runner.go:195] Run: sh -c "sudo rm -rf /etc/cni/net.mk"
	I0916 11:13:05.305433 1501462 ssh_runner.go:195] Run: sh -c "sudo sed -i '/^ *"net.ipv4.ip_unprivileged_port_start=.*"/d' /etc/crio/crio.conf.d/02-crio.conf"
	I0916 11:13:05.315936 1501462 ssh_runner.go:195] Run: sh -c "sudo grep -q "^ *default_sysctls" /etc/crio/crio.conf.d/02-crio.conf || sudo sed -i '/conmon_cgroup = .*/a default_sysctls = \[\n\]' /etc/crio/crio.conf.d/02-crio.conf"
	I0916 11:13:05.325772 1501462 ssh_runner.go:195] Run: sh -c "sudo sed -i -r 's|^default_sysctls *= *\[|&\n  "net.ipv4.ip_unprivileged_port_start=0",|' /etc/crio/crio.conf.d/02-crio.conf"
	I0916 11:13:05.337010 1501462 ssh_runner.go:195] Run: sudo sysctl net.bridge.bridge-nf-call-iptables
	I0916 11:13:05.344895 1501462 command_runner.go:130] > net.bridge.bridge-nf-call-iptables = 1
	I0916 11:13:05.346123 1501462 ssh_runner.go:195] Run: sudo sh -c "echo 1 > /proc/sys/net/ipv4/ip_forward"
	I0916 11:13:05.355306 1501462 ssh_runner.go:195] Run: sudo systemctl daemon-reload
	I0916 11:13:05.447913 1501462 ssh_runner.go:195] Run: sudo systemctl restart crio
	I0916 11:13:05.569026 1501462 start.go:542] Will wait 60s for socket path /var/run/crio/crio.sock
	I0916 11:13:05.569197 1501462 ssh_runner.go:195] Run: stat /var/run/crio/crio.sock
	I0916 11:13:05.573303 1501462 command_runner.go:130] >   File: /var/run/crio/crio.sock
	I0916 11:13:05.573370 1501462 command_runner.go:130] >   Size: 0         	Blocks: 0          IO Block: 4096   socket
	I0916 11:13:05.573392 1501462 command_runner.go:130] > Device: 43h/67d	Inode: 207         Links: 1
	I0916 11:13:05.573421 1501462 command_runner.go:130] > Access: (0660/srw-rw----)  Uid: (    0/    root)   Gid: (    0/    root)
	I0916 11:13:05.573452 1501462 command_runner.go:130] > Access: 2024-09-16 11:13:05.554432673 +0000
	I0916 11:13:05.573478 1501462 command_runner.go:130] > Modify: 2024-09-16 11:13:05.554432673 +0000
	I0916 11:13:05.573498 1501462 command_runner.go:130] > Change: 2024-09-16 11:13:05.554432673 +0000
	I0916 11:13:05.573517 1501462 command_runner.go:130] >  Birth: -
	I0916 11:13:05.573579 1501462 start.go:563] Will wait 60s for crictl version
	I0916 11:13:05.573651 1501462 ssh_runner.go:195] Run: which crictl
	I0916 11:13:05.577570 1501462 command_runner.go:130] > /usr/bin/crictl
	I0916 11:13:05.577762 1501462 ssh_runner.go:195] Run: sudo /usr/bin/crictl version
	I0916 11:13:05.621198 1501462 command_runner.go:130] > Version:  0.1.0
	I0916 11:13:05.621324 1501462 command_runner.go:130] > RuntimeName:  cri-o
	I0916 11:13:05.621374 1501462 command_runner.go:130] > RuntimeVersion:  1.24.6
	I0916 11:13:05.621429 1501462 command_runner.go:130] > RuntimeApiVersion:  v1
	I0916 11:13:05.624157 1501462 start.go:579] Version:  0.1.0
	RuntimeName:  cri-o
	RuntimeVersion:  1.24.6
	RuntimeApiVersion:  v1
	I0916 11:13:05.624274 1501462 ssh_runner.go:195] Run: crio --version
	I0916 11:13:05.663127 1501462 command_runner.go:130] > crio version 1.24.6
	I0916 11:13:05.663165 1501462 command_runner.go:130] > Version:          1.24.6
	I0916 11:13:05.663173 1501462 command_runner.go:130] > GitCommit:        4bfe15a9feb74ffc95e66a21c04b15fa7bbc2b90
	I0916 11:13:05.663178 1501462 command_runner.go:130] > GitTreeState:     clean
	I0916 11:13:05.663184 1501462 command_runner.go:130] > BuildDate:        2023-06-14T14:44:50Z
	I0916 11:13:05.663198 1501462 command_runner.go:130] > GoVersion:        go1.18.2
	I0916 11:13:05.663203 1501462 command_runner.go:130] > Compiler:         gc
	I0916 11:13:05.663210 1501462 command_runner.go:130] > Platform:         linux/arm64
	I0916 11:13:05.663215 1501462 command_runner.go:130] > Linkmode:         dynamic
	I0916 11:13:05.663223 1501462 command_runner.go:130] > BuildTags:        apparmor, exclude_graphdriver_devicemapper, containers_image_ostree_stub, seccomp
	I0916 11:13:05.663230 1501462 command_runner.go:130] > SeccompEnabled:   true
	I0916 11:13:05.663233 1501462 command_runner.go:130] > AppArmorEnabled:  false
	I0916 11:13:05.665387 1501462 ssh_runner.go:195] Run: crio --version
	I0916 11:13:05.701114 1501462 command_runner.go:130] > crio version 1.24.6
	I0916 11:13:05.701194 1501462 command_runner.go:130] > Version:          1.24.6
	I0916 11:13:05.701217 1501462 command_runner.go:130] > GitCommit:        4bfe15a9feb74ffc95e66a21c04b15fa7bbc2b90
	I0916 11:13:05.701234 1501462 command_runner.go:130] > GitTreeState:     clean
	I0916 11:13:05.701264 1501462 command_runner.go:130] > BuildDate:        2023-06-14T14:44:50Z
	I0916 11:13:05.701283 1501462 command_runner.go:130] > GoVersion:        go1.18.2
	I0916 11:13:05.701321 1501462 command_runner.go:130] > Compiler:         gc
	I0916 11:13:05.701355 1501462 command_runner.go:130] > Platform:         linux/arm64
	I0916 11:13:05.701375 1501462 command_runner.go:130] > Linkmode:         dynamic
	I0916 11:13:05.701397 1501462 command_runner.go:130] > BuildTags:        apparmor, exclude_graphdriver_devicemapper, containers_image_ostree_stub, seccomp
	I0916 11:13:05.701416 1501462 command_runner.go:130] > SeccompEnabled:   true
	I0916 11:13:05.701448 1501462 command_runner.go:130] > AppArmorEnabled:  false
	I0916 11:13:05.707843 1501462 out.go:177] * Preparing Kubernetes v1.31.1 on CRI-O 1.24.6 ...
	I0916 11:13:05.710685 1501462 cli_runner.go:164] Run: docker network inspect multinode-654612 --format "{"Name": "{{.Name}}","Driver": "{{.Driver}}","Subnet": "{{range .IPAM.Config}}{{.Subnet}}{{end}}","Gateway": "{{range .IPAM.Config}}{{.Gateway}}{{end}}","MTU": {{if (index .Options "com.docker.network.driver.mtu")}}{{(index .Options "com.docker.network.driver.mtu")}}{{else}}0{{end}}, "ContainerIPs": [{{range $k,$v := .Containers }}"{{$v.IPv4Address}}",{{end}}]}"
	I0916 11:13:05.726962 1501462 ssh_runner.go:195] Run: grep 192.168.67.1	host.minikube.internal$ /etc/hosts
	I0916 11:13:05.730630 1501462 ssh_runner.go:195] Run: /bin/bash -c "{ grep -v $'\thost.minikube.internal$' "/etc/hosts"; echo "192.168.67.1	host.minikube.internal"; } > /tmp/h.$$; sudo cp /tmp/h.$$ "/etc/hosts""
	I0916 11:13:05.741497 1501462 kubeadm.go:883] updating cluster {Name:multinode-654612 KeepContext:false EmbedCerts:false MinikubeISO: KicBaseImage:gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 Memory:2200 CPUs:2 DiskSize:20000 Driver:docker HyperkitVpnKitSock: HyperkitVSockPorts:[] DockerEnv:[] ContainerVolumeMounts:[] InsecureRegistry:[] RegistryMirror:[] HostOnlyCIDR:192.168.59.1/24 HypervVirtualSwitch: HypervUseExternalSwitch:false HypervExternalAdapter: KVMNetwork:default KVMQemuURI:qemu:///system KVMGPU:false KVMHidden:false KVMNUMACount:1 APIServerPort:8443 DockerOpt:[] DisableDriverMounts:false NFSShare:[] NFSSharesRoot:/nfsshares UUID: NoVTXCheck:false DNSProxy:false HostDNSResolver:true HostOnlyNicType:virtio NatNicType:virtio SSHIPAddress: SSHUser:root SSHKey: SSHPort:22 KubernetesConfig:{KubernetesVersion:v1.31.1 ClusterName:multinode-654612 Namespace:default APIServerHAVIP: APIServerName:minikubeCA APISe
rverNames:[] APIServerIPs:[] DNSDomain:cluster.local ContainerRuntime:crio CRISocket: NetworkPlugin:cni FeatureGates: ServiceCIDR:10.96.0.0/12 ImageRepository: LoadBalancerStartIP: LoadBalancerEndIP: CustomIngressCert: RegistryAliases: ExtraOptions:[] ShouldLoadCachedImages:true EnableDefaultCNI:false CNI:} Nodes:[{Name: IP:192.168.67.2 Port:8443 KubernetesVersion:v1.31.1 ContainerRuntime:crio ControlPlane:true Worker:true} {Name:m02 IP:192.168.67.3 Port:8443 KubernetesVersion:v1.31.1 ContainerRuntime:crio ControlPlane:false Worker:true} {Name:m03 IP:192.168.67.4 Port:0 KubernetesVersion:v1.31.1 ContainerRuntime:crio ControlPlane:false Worker:true}] Addons:map[ambassador:false auto-pause:false cloud-spanner:false csi-hostpath-driver:false dashboard:false default-storageclass:false efk:false freshpod:false gcp-auth:false gvisor:false headlamp:false helm-tiller:false inaccel:false ingress:false ingress-dns:false inspektor-gadget:false istio:false istio-provisioner:false kong:false kubeflow:false kubevirt:false
logviewer:false metallb:false metrics-server:false nvidia-device-plugin:false nvidia-driver-installer:false nvidia-gpu-device-plugin:false olm:false pod-security-policy:false portainer:false registry:false registry-aliases:false registry-creds:false storage-provisioner:false storage-provisioner-gluster:false storage-provisioner-rancher:false volcano:false volumesnapshots:false yakd:false] CustomAddonImages:map[] CustomAddonRegistries:map[] VerifyComponents:map[apiserver:true apps_running:true default_sa:true extra:true kubelet:true node_ready:true system_pods:true] StartHostTimeout:6m0s ScheduledStop:<nil> ExposedPorts:[] ListenAddress: Network: Subnet: MultiNodeRequested:true ExtraDisks:0 CertExpiration:26280h0m0s Mount:false MountString:/home/jenkins:/minikube-host Mount9PVersion:9p2000.L MountGID:docker MountIP: MountMSize:262144 MountOptions:[] MountPort:0 MountType:9p MountUID:docker BinaryMirror: DisableOptimizations:false DisableMetrics:false CustomQemuFirmwarePath: SocketVMnetClientPath: SocketVMnetPa
th: StaticIP: SSHAuthSock: SSHAgentPID:0 GPUs: AutoPauseInterval:1m0s} ...
	I0916 11:13:05.741663 1501462 preload.go:131] Checking if preload exists for k8s version v1.31.1 and runtime crio
	I0916 11:13:05.741727 1501462 ssh_runner.go:195] Run: sudo crictl images --output json
	I0916 11:13:05.784867 1501462 command_runner.go:130] > {
	I0916 11:13:05.784888 1501462 command_runner.go:130] >   "images": [
	I0916 11:13:05.784893 1501462 command_runner.go:130] >     {
	I0916 11:13:05.784902 1501462 command_runner.go:130] >       "id": "6a23fa8fd2b78ab58e42ba273808edc936a9c53d8ac4a919f6337be094843a51",
	I0916 11:13:05.784907 1501462 command_runner.go:130] >       "repoTags": [
	I0916 11:13:05.784914 1501462 command_runner.go:130] >         "docker.io/kindest/kindnetd:v20240813-c6f155d6"
	I0916 11:13:05.784917 1501462 command_runner.go:130] >       ],
	I0916 11:13:05.784921 1501462 command_runner.go:130] >       "repoDigests": [
	I0916 11:13:05.784930 1501462 command_runner.go:130] >         "docker.io/kindest/kindnetd@sha256:4d39335073da9a0b82be8e01028f0aa75aff16caff2e2d8889d0effd579a6f64",
	I0916 11:13:05.784938 1501462 command_runner.go:130] >         "docker.io/kindest/kindnetd@sha256:e59a687ca28ae274a2fc92f1e2f5f1c739f353178a43a23aafc71adb802ed166"
	I0916 11:13:05.784941 1501462 command_runner.go:130] >       ],
	I0916 11:13:05.784945 1501462 command_runner.go:130] >       "size": "90295858",
	I0916 11:13:05.784948 1501462 command_runner.go:130] >       "uid": null,
	I0916 11:13:05.784952 1501462 command_runner.go:130] >       "username": "",
	I0916 11:13:05.784961 1501462 command_runner.go:130] >       "spec": null,
	I0916 11:13:05.784965 1501462 command_runner.go:130] >       "pinned": false
	I0916 11:13:05.784969 1501462 command_runner.go:130] >     },
	I0916 11:13:05.784972 1501462 command_runner.go:130] >     {
	I0916 11:13:05.784979 1501462 command_runner.go:130] >       "id": "89a35e2ebb6b938201966889b5e8c85b931db6432c5643966116cd1c28bf45cd",
	I0916 11:13:05.784982 1501462 command_runner.go:130] >       "repoTags": [
	I0916 11:13:05.784987 1501462 command_runner.go:130] >         "gcr.io/k8s-minikube/busybox:1.28"
	I0916 11:13:05.784990 1501462 command_runner.go:130] >       ],
	I0916 11:13:05.784994 1501462 command_runner.go:130] >       "repoDigests": [
	I0916 11:13:05.785002 1501462 command_runner.go:130] >         "gcr.io/k8s-minikube/busybox@sha256:859d41e4316c182cb559f9ae3c5ffcac8602ee1179794a1707c06cd092a008d3",
	I0916 11:13:05.785010 1501462 command_runner.go:130] >         "gcr.io/k8s-minikube/busybox@sha256:9afb80db71730dbb303fe00765cbf34bddbdc6b66e49897fc2e1861967584b12"
	I0916 11:13:05.785013 1501462 command_runner.go:130] >       ],
	I0916 11:13:05.785019 1501462 command_runner.go:130] >       "size": "1496796",
	I0916 11:13:05.785028 1501462 command_runner.go:130] >       "uid": null,
	I0916 11:13:05.785034 1501462 command_runner.go:130] >       "username": "",
	I0916 11:13:05.785037 1501462 command_runner.go:130] >       "spec": null,
	I0916 11:13:05.785041 1501462 command_runner.go:130] >       "pinned": false
	I0916 11:13:05.785044 1501462 command_runner.go:130] >     },
	I0916 11:13:05.785047 1501462 command_runner.go:130] >     {
	I0916 11:13:05.785053 1501462 command_runner.go:130] >       "id": "ba04bb24b95753201135cbc420b233c1b0b9fa2e1fd21d28319c348c33fbcde6",
	I0916 11:13:05.785056 1501462 command_runner.go:130] >       "repoTags": [
	I0916 11:13:05.785061 1501462 command_runner.go:130] >         "gcr.io/k8s-minikube/storage-provisioner:v5"
	I0916 11:13:05.785065 1501462 command_runner.go:130] >       ],
	I0916 11:13:05.785068 1501462 command_runner.go:130] >       "repoDigests": [
	I0916 11:13:05.785077 1501462 command_runner.go:130] >         "gcr.io/k8s-minikube/storage-provisioner@sha256:0ba370588274b88531ab311a5d2e645d240a853555c1e58fd1dd428fc333c9d2",
	I0916 11:13:05.785085 1501462 command_runner.go:130] >         "gcr.io/k8s-minikube/storage-provisioner@sha256:18eb69d1418e854ad5a19e399310e52808a8321e4c441c1dddad8977a0d7a944"
	I0916 11:13:05.785088 1501462 command_runner.go:130] >       ],
	I0916 11:13:05.785092 1501462 command_runner.go:130] >       "size": "29037500",
	I0916 11:13:05.785096 1501462 command_runner.go:130] >       "uid": null,
	I0916 11:13:05.785099 1501462 command_runner.go:130] >       "username": "",
	I0916 11:13:05.785103 1501462 command_runner.go:130] >       "spec": null,
	I0916 11:13:05.785106 1501462 command_runner.go:130] >       "pinned": false
	I0916 11:13:05.785111 1501462 command_runner.go:130] >     },
	I0916 11:13:05.785114 1501462 command_runner.go:130] >     {
	I0916 11:13:05.785120 1501462 command_runner.go:130] >       "id": "2f6c962e7b8311337352d9fdea917da2184d9919f4da7695bc2a6517cf392fe4",
	I0916 11:13:05.785124 1501462 command_runner.go:130] >       "repoTags": [
	I0916 11:13:05.785128 1501462 command_runner.go:130] >         "registry.k8s.io/coredns/coredns:v1.11.3"
	I0916 11:13:05.785131 1501462 command_runner.go:130] >       ],
	I0916 11:13:05.785135 1501462 command_runner.go:130] >       "repoDigests": [
	I0916 11:13:05.785143 1501462 command_runner.go:130] >         "registry.k8s.io/coredns/coredns@sha256:31440a2bef59e2f1ffb600113b557103740ff851e27b0aef5b849f6e3ab994a6",
	I0916 11:13:05.785155 1501462 command_runner.go:130] >         "registry.k8s.io/coredns/coredns@sha256:9caabbf6238b189a65d0d6e6ac138de60d6a1c419e5a341fbbb7c78382559c6e"
	I0916 11:13:05.785158 1501462 command_runner.go:130] >       ],
	I0916 11:13:05.785162 1501462 command_runner.go:130] >       "size": "61647114",
	I0916 11:13:05.785165 1501462 command_runner.go:130] >       "uid": null,
	I0916 11:13:05.785169 1501462 command_runner.go:130] >       "username": "nonroot",
	I0916 11:13:05.785172 1501462 command_runner.go:130] >       "spec": null,
	I0916 11:13:05.785180 1501462 command_runner.go:130] >       "pinned": false
	I0916 11:13:05.785183 1501462 command_runner.go:130] >     },
	I0916 11:13:05.785185 1501462 command_runner.go:130] >     {
	I0916 11:13:05.785192 1501462 command_runner.go:130] >       "id": "27e3830e1402783674d8b594038967deea9d51f0d91b34c93c8f39d2f68af7da",
	I0916 11:13:05.785195 1501462 command_runner.go:130] >       "repoTags": [
	I0916 11:13:05.785200 1501462 command_runner.go:130] >         "registry.k8s.io/etcd:3.5.15-0"
	I0916 11:13:05.785203 1501462 command_runner.go:130] >       ],
	I0916 11:13:05.785207 1501462 command_runner.go:130] >       "repoDigests": [
	I0916 11:13:05.785214 1501462 command_runner.go:130] >         "registry.k8s.io/etcd@sha256:a6dc63e6e8cfa0307d7851762fa6b629afb18f28d8aa3fab5a6e91b4af60026a",
	I0916 11:13:05.785221 1501462 command_runner.go:130] >         "registry.k8s.io/etcd@sha256:e3ee3ca2dbaf511385000dbd54123629c71b6cfaabd469e658d76a116b7f43da"
	I0916 11:13:05.785224 1501462 command_runner.go:130] >       ],
	I0916 11:13:05.785228 1501462 command_runner.go:130] >       "size": "139912446",
	I0916 11:13:05.785231 1501462 command_runner.go:130] >       "uid": {
	I0916 11:13:05.785235 1501462 command_runner.go:130] >         "value": "0"
	I0916 11:13:05.785238 1501462 command_runner.go:130] >       },
	I0916 11:13:05.785243 1501462 command_runner.go:130] >       "username": "",
	I0916 11:13:05.785247 1501462 command_runner.go:130] >       "spec": null,
	I0916 11:13:05.785250 1501462 command_runner.go:130] >       "pinned": false
	I0916 11:13:05.785253 1501462 command_runner.go:130] >     },
	I0916 11:13:05.785256 1501462 command_runner.go:130] >     {
	I0916 11:13:05.785262 1501462 command_runner.go:130] >       "id": "d3f53a98c0a9d9163c4848bcf34b2d2f5e1e3691b79f3d1dd6d0206809e02853",
	I0916 11:13:05.785266 1501462 command_runner.go:130] >       "repoTags": [
	I0916 11:13:05.785271 1501462 command_runner.go:130] >         "registry.k8s.io/kube-apiserver:v1.31.1"
	I0916 11:13:05.785273 1501462 command_runner.go:130] >       ],
	I0916 11:13:05.785277 1501462 command_runner.go:130] >       "repoDigests": [
	I0916 11:13:05.785285 1501462 command_runner.go:130] >         "registry.k8s.io/kube-apiserver@sha256:2409c23dbb5a2b7a81adbb184d3eac43ac653e9b97a7c0ee121b89bb3ef61fdb",
	I0916 11:13:05.785293 1501462 command_runner.go:130] >         "registry.k8s.io/kube-apiserver@sha256:e3a40e6c6e99ba4a4d72432b3eda702099a2926e49d4afeb6138f2d95e6371ef"
	I0916 11:13:05.785296 1501462 command_runner.go:130] >       ],
	I0916 11:13:05.785300 1501462 command_runner.go:130] >       "size": "92632544",
	I0916 11:13:05.785303 1501462 command_runner.go:130] >       "uid": {
	I0916 11:13:05.785307 1501462 command_runner.go:130] >         "value": "0"
	I0916 11:13:05.785311 1501462 command_runner.go:130] >       },
	I0916 11:13:05.785315 1501462 command_runner.go:130] >       "username": "",
	I0916 11:13:05.785319 1501462 command_runner.go:130] >       "spec": null,
	I0916 11:13:05.785323 1501462 command_runner.go:130] >       "pinned": false
	I0916 11:13:05.785326 1501462 command_runner.go:130] >     },
	I0916 11:13:05.785329 1501462 command_runner.go:130] >     {
	I0916 11:13:05.785335 1501462 command_runner.go:130] >       "id": "279f381cb37365bbbcd133c9531fba9c2beb0f38dbbe6ddfcd0b1b1643d3450e",
	I0916 11:13:05.785339 1501462 command_runner.go:130] >       "repoTags": [
	I0916 11:13:05.785345 1501462 command_runner.go:130] >         "registry.k8s.io/kube-controller-manager:v1.31.1"
	I0916 11:13:05.785348 1501462 command_runner.go:130] >       ],
	I0916 11:13:05.785351 1501462 command_runner.go:130] >       "repoDigests": [
	I0916 11:13:05.785360 1501462 command_runner.go:130] >         "registry.k8s.io/kube-controller-manager@sha256:9f9da5b27e03f89599cc40ba89150aebf3b4cff001e6db6d998674b34181e1a1",
	I0916 11:13:05.785368 1501462 command_runner.go:130] >         "registry.k8s.io/kube-controller-manager@sha256:a9a0505b7d0caca0edd18e37bacc9425b2c8824546b26f5b286e8cb144669849"
	I0916 11:13:05.785371 1501462 command_runner.go:130] >       ],
	I0916 11:13:05.785375 1501462 command_runner.go:130] >       "size": "86930758",
	I0916 11:13:05.785378 1501462 command_runner.go:130] >       "uid": {
	I0916 11:13:05.785382 1501462 command_runner.go:130] >         "value": "0"
	I0916 11:13:05.785385 1501462 command_runner.go:130] >       },
	I0916 11:13:05.785388 1501462 command_runner.go:130] >       "username": "",
	I0916 11:13:05.785392 1501462 command_runner.go:130] >       "spec": null,
	I0916 11:13:05.785396 1501462 command_runner.go:130] >       "pinned": false
	I0916 11:13:05.785399 1501462 command_runner.go:130] >     },
	I0916 11:13:05.785402 1501462 command_runner.go:130] >     {
	I0916 11:13:05.785408 1501462 command_runner.go:130] >       "id": "24a140c548c075e487e45d0ee73b1aa89f8bfb40c08a57e05975559728822b1d",
	I0916 11:13:05.785412 1501462 command_runner.go:130] >       "repoTags": [
	I0916 11:13:05.785416 1501462 command_runner.go:130] >         "registry.k8s.io/kube-proxy:v1.31.1"
	I0916 11:13:05.785419 1501462 command_runner.go:130] >       ],
	I0916 11:13:05.785423 1501462 command_runner.go:130] >       "repoDigests": [
	I0916 11:13:05.785436 1501462 command_runner.go:130] >         "registry.k8s.io/kube-proxy@sha256:4ee50b00484d7f39a90fc4cda92251177ef5ad8fdf2f2a0c768f9e634b4c6d44",
	I0916 11:13:05.785443 1501462 command_runner.go:130] >         "registry.k8s.io/kube-proxy@sha256:7b3bf9f1e260ccb1fd543570e1e9869a373f716fb050cd23a6a2771aa4e06ae9"
	I0916 11:13:05.785447 1501462 command_runner.go:130] >       ],
	I0916 11:13:05.785450 1501462 command_runner.go:130] >       "size": "95951255",
	I0916 11:13:05.785454 1501462 command_runner.go:130] >       "uid": null,
	I0916 11:13:05.785458 1501462 command_runner.go:130] >       "username": "",
	I0916 11:13:05.785461 1501462 command_runner.go:130] >       "spec": null,
	I0916 11:13:05.785466 1501462 command_runner.go:130] >       "pinned": false
	I0916 11:13:05.785469 1501462 command_runner.go:130] >     },
	I0916 11:13:05.785472 1501462 command_runner.go:130] >     {
	I0916 11:13:05.785478 1501462 command_runner.go:130] >       "id": "7f8aa378bb47dffcf430f3a601abe39137e88aee0238e23ed8530fdd18dab82d",
	I0916 11:13:05.785481 1501462 command_runner.go:130] >       "repoTags": [
	I0916 11:13:05.785486 1501462 command_runner.go:130] >         "registry.k8s.io/kube-scheduler:v1.31.1"
	I0916 11:13:05.785489 1501462 command_runner.go:130] >       ],
	I0916 11:13:05.785493 1501462 command_runner.go:130] >       "repoDigests": [
	I0916 11:13:05.785501 1501462 command_runner.go:130] >         "registry.k8s.io/kube-scheduler@sha256:65212209347a96b08a97e679b98dca46885f09cf3a53e8d13b28d2c083a5b690",
	I0916 11:13:05.785508 1501462 command_runner.go:130] >         "registry.k8s.io/kube-scheduler@sha256:969a7e96340f3a927b3d652582edec2d6d82a083871d81ef5064b7edaab430d0"
	I0916 11:13:05.785513 1501462 command_runner.go:130] >       ],
	I0916 11:13:05.785517 1501462 command_runner.go:130] >       "size": "67007814",
	I0916 11:13:05.785520 1501462 command_runner.go:130] >       "uid": {
	I0916 11:13:05.785523 1501462 command_runner.go:130] >         "value": "0"
	I0916 11:13:05.785526 1501462 command_runner.go:130] >       },
	I0916 11:13:05.785530 1501462 command_runner.go:130] >       "username": "",
	I0916 11:13:05.785533 1501462 command_runner.go:130] >       "spec": null,
	I0916 11:13:05.785537 1501462 command_runner.go:130] >       "pinned": false
	I0916 11:13:05.785540 1501462 command_runner.go:130] >     },
	I0916 11:13:05.785543 1501462 command_runner.go:130] >     {
	I0916 11:13:05.785549 1501462 command_runner.go:130] >       "id": "afb61768ce381961ca0beff95337601f29dc70ff3ed14e5e4b3e5699057e6aa8",
	I0916 11:13:05.785552 1501462 command_runner.go:130] >       "repoTags": [
	I0916 11:13:05.785557 1501462 command_runner.go:130] >         "registry.k8s.io/pause:3.10"
	I0916 11:13:05.785560 1501462 command_runner.go:130] >       ],
	I0916 11:13:05.785563 1501462 command_runner.go:130] >       "repoDigests": [
	I0916 11:13:05.785571 1501462 command_runner.go:130] >         "registry.k8s.io/pause@sha256:e50b7059b633caf3c1449b8da680d11845cda4506b513ee7a2de00725f0a34a7",
	I0916 11:13:05.785578 1501462 command_runner.go:130] >         "registry.k8s.io/pause@sha256:ee6521f290b2168b6e0935a181d4cff9be1ac3f505666ef0e3c98fae8199917a"
	I0916 11:13:05.785581 1501462 command_runner.go:130] >       ],
	I0916 11:13:05.785585 1501462 command_runner.go:130] >       "size": "519877",
	I0916 11:13:05.785589 1501462 command_runner.go:130] >       "uid": {
	I0916 11:13:05.785592 1501462 command_runner.go:130] >         "value": "65535"
	I0916 11:13:05.785595 1501462 command_runner.go:130] >       },
	I0916 11:13:05.785599 1501462 command_runner.go:130] >       "username": "",
	I0916 11:13:05.785604 1501462 command_runner.go:130] >       "spec": null,
	I0916 11:13:05.785607 1501462 command_runner.go:130] >       "pinned": false
	I0916 11:13:05.785610 1501462 command_runner.go:130] >     }
	I0916 11:13:05.785613 1501462 command_runner.go:130] >   ]
	I0916 11:13:05.785615 1501462 command_runner.go:130] > }
	I0916 11:13:05.788512 1501462 crio.go:514] all images are preloaded for cri-o runtime.
	I0916 11:13:05.788534 1501462 crio.go:433] Images already preloaded, skipping extraction
	I0916 11:13:05.788591 1501462 ssh_runner.go:195] Run: sudo crictl images --output json
	I0916 11:13:05.826775 1501462 command_runner.go:130] > {
	I0916 11:13:05.826795 1501462 command_runner.go:130] >   "images": [
	I0916 11:13:05.826799 1501462 command_runner.go:130] >     {
	I0916 11:13:05.826807 1501462 command_runner.go:130] >       "id": "6a23fa8fd2b78ab58e42ba273808edc936a9c53d8ac4a919f6337be094843a51",
	I0916 11:13:05.826812 1501462 command_runner.go:130] >       "repoTags": [
	I0916 11:13:05.826818 1501462 command_runner.go:130] >         "docker.io/kindest/kindnetd:v20240813-c6f155d6"
	I0916 11:13:05.826821 1501462 command_runner.go:130] >       ],
	I0916 11:13:05.826825 1501462 command_runner.go:130] >       "repoDigests": [
	I0916 11:13:05.826835 1501462 command_runner.go:130] >         "docker.io/kindest/kindnetd@sha256:4d39335073da9a0b82be8e01028f0aa75aff16caff2e2d8889d0effd579a6f64",
	I0916 11:13:05.826842 1501462 command_runner.go:130] >         "docker.io/kindest/kindnetd@sha256:e59a687ca28ae274a2fc92f1e2f5f1c739f353178a43a23aafc71adb802ed166"
	I0916 11:13:05.826845 1501462 command_runner.go:130] >       ],
	I0916 11:13:05.826859 1501462 command_runner.go:130] >       "size": "90295858",
	I0916 11:13:05.826863 1501462 command_runner.go:130] >       "uid": null,
	I0916 11:13:05.826867 1501462 command_runner.go:130] >       "username": "",
	I0916 11:13:05.826872 1501462 command_runner.go:130] >       "spec": null,
	I0916 11:13:05.826876 1501462 command_runner.go:130] >       "pinned": false
	I0916 11:13:05.826879 1501462 command_runner.go:130] >     },
	I0916 11:13:05.826882 1501462 command_runner.go:130] >     {
	I0916 11:13:05.826888 1501462 command_runner.go:130] >       "id": "89a35e2ebb6b938201966889b5e8c85b931db6432c5643966116cd1c28bf45cd",
	I0916 11:13:05.826894 1501462 command_runner.go:130] >       "repoTags": [
	I0916 11:13:05.826899 1501462 command_runner.go:130] >         "gcr.io/k8s-minikube/busybox:1.28"
	I0916 11:13:05.826902 1501462 command_runner.go:130] >       ],
	I0916 11:13:05.826907 1501462 command_runner.go:130] >       "repoDigests": [
	I0916 11:13:05.826915 1501462 command_runner.go:130] >         "gcr.io/k8s-minikube/busybox@sha256:859d41e4316c182cb559f9ae3c5ffcac8602ee1179794a1707c06cd092a008d3",
	I0916 11:13:05.826923 1501462 command_runner.go:130] >         "gcr.io/k8s-minikube/busybox@sha256:9afb80db71730dbb303fe00765cbf34bddbdc6b66e49897fc2e1861967584b12"
	I0916 11:13:05.826926 1501462 command_runner.go:130] >       ],
	I0916 11:13:05.826929 1501462 command_runner.go:130] >       "size": "1496796",
	I0916 11:13:05.826933 1501462 command_runner.go:130] >       "uid": null,
	I0916 11:13:05.826938 1501462 command_runner.go:130] >       "username": "",
	I0916 11:13:05.826942 1501462 command_runner.go:130] >       "spec": null,
	I0916 11:13:05.826945 1501462 command_runner.go:130] >       "pinned": false
	I0916 11:13:05.826949 1501462 command_runner.go:130] >     },
	I0916 11:13:05.826952 1501462 command_runner.go:130] >     {
	I0916 11:13:05.826958 1501462 command_runner.go:130] >       "id": "ba04bb24b95753201135cbc420b233c1b0b9fa2e1fd21d28319c348c33fbcde6",
	I0916 11:13:05.826962 1501462 command_runner.go:130] >       "repoTags": [
	I0916 11:13:05.826967 1501462 command_runner.go:130] >         "gcr.io/k8s-minikube/storage-provisioner:v5"
	I0916 11:13:05.826970 1501462 command_runner.go:130] >       ],
	I0916 11:13:05.826974 1501462 command_runner.go:130] >       "repoDigests": [
	I0916 11:13:05.826982 1501462 command_runner.go:130] >         "gcr.io/k8s-minikube/storage-provisioner@sha256:0ba370588274b88531ab311a5d2e645d240a853555c1e58fd1dd428fc333c9d2",
	I0916 11:13:05.826990 1501462 command_runner.go:130] >         "gcr.io/k8s-minikube/storage-provisioner@sha256:18eb69d1418e854ad5a19e399310e52808a8321e4c441c1dddad8977a0d7a944"
	I0916 11:13:05.826993 1501462 command_runner.go:130] >       ],
	I0916 11:13:05.826997 1501462 command_runner.go:130] >       "size": "29037500",
	I0916 11:13:05.827000 1501462 command_runner.go:130] >       "uid": null,
	I0916 11:13:05.827003 1501462 command_runner.go:130] >       "username": "",
	I0916 11:13:05.827008 1501462 command_runner.go:130] >       "spec": null,
	I0916 11:13:05.827011 1501462 command_runner.go:130] >       "pinned": false
	I0916 11:13:05.827016 1501462 command_runner.go:130] >     },
	I0916 11:13:05.827019 1501462 command_runner.go:130] >     {
	I0916 11:13:05.827025 1501462 command_runner.go:130] >       "id": "2f6c962e7b8311337352d9fdea917da2184d9919f4da7695bc2a6517cf392fe4",
	I0916 11:13:05.827028 1501462 command_runner.go:130] >       "repoTags": [
	I0916 11:13:05.827033 1501462 command_runner.go:130] >         "registry.k8s.io/coredns/coredns:v1.11.3"
	I0916 11:13:05.827036 1501462 command_runner.go:130] >       ],
	I0916 11:13:05.827040 1501462 command_runner.go:130] >       "repoDigests": [
	I0916 11:13:05.827047 1501462 command_runner.go:130] >         "registry.k8s.io/coredns/coredns@sha256:31440a2bef59e2f1ffb600113b557103740ff851e27b0aef5b849f6e3ab994a6",
	I0916 11:13:05.827058 1501462 command_runner.go:130] >         "registry.k8s.io/coredns/coredns@sha256:9caabbf6238b189a65d0d6e6ac138de60d6a1c419e5a341fbbb7c78382559c6e"
	I0916 11:13:05.827061 1501462 command_runner.go:130] >       ],
	I0916 11:13:05.827065 1501462 command_runner.go:130] >       "size": "61647114",
	I0916 11:13:05.827069 1501462 command_runner.go:130] >       "uid": null,
	I0916 11:13:05.827072 1501462 command_runner.go:130] >       "username": "nonroot",
	I0916 11:13:05.827076 1501462 command_runner.go:130] >       "spec": null,
	I0916 11:13:05.827079 1501462 command_runner.go:130] >       "pinned": false
	I0916 11:13:05.827082 1501462 command_runner.go:130] >     },
	I0916 11:13:05.827085 1501462 command_runner.go:130] >     {
	I0916 11:13:05.827092 1501462 command_runner.go:130] >       "id": "27e3830e1402783674d8b594038967deea9d51f0d91b34c93c8f39d2f68af7da",
	I0916 11:13:05.827096 1501462 command_runner.go:130] >       "repoTags": [
	I0916 11:13:05.827101 1501462 command_runner.go:130] >         "registry.k8s.io/etcd:3.5.15-0"
	I0916 11:13:05.827104 1501462 command_runner.go:130] >       ],
	I0916 11:13:05.827107 1501462 command_runner.go:130] >       "repoDigests": [
	I0916 11:13:05.827115 1501462 command_runner.go:130] >         "registry.k8s.io/etcd@sha256:a6dc63e6e8cfa0307d7851762fa6b629afb18f28d8aa3fab5a6e91b4af60026a",
	I0916 11:13:05.827122 1501462 command_runner.go:130] >         "registry.k8s.io/etcd@sha256:e3ee3ca2dbaf511385000dbd54123629c71b6cfaabd469e658d76a116b7f43da"
	I0916 11:13:05.827125 1501462 command_runner.go:130] >       ],
	I0916 11:13:05.827128 1501462 command_runner.go:130] >       "size": "139912446",
	I0916 11:13:05.827132 1501462 command_runner.go:130] >       "uid": {
	I0916 11:13:05.827136 1501462 command_runner.go:130] >         "value": "0"
	I0916 11:13:05.827138 1501462 command_runner.go:130] >       },
	I0916 11:13:05.827142 1501462 command_runner.go:130] >       "username": "",
	I0916 11:13:05.827145 1501462 command_runner.go:130] >       "spec": null,
	I0916 11:13:05.827149 1501462 command_runner.go:130] >       "pinned": false
	I0916 11:13:05.827152 1501462 command_runner.go:130] >     },
	I0916 11:13:05.827155 1501462 command_runner.go:130] >     {
	I0916 11:13:05.827162 1501462 command_runner.go:130] >       "id": "d3f53a98c0a9d9163c4848bcf34b2d2f5e1e3691b79f3d1dd6d0206809e02853",
	I0916 11:13:05.827167 1501462 command_runner.go:130] >       "repoTags": [
	I0916 11:13:05.827172 1501462 command_runner.go:130] >         "registry.k8s.io/kube-apiserver:v1.31.1"
	I0916 11:13:05.827175 1501462 command_runner.go:130] >       ],
	I0916 11:13:05.827179 1501462 command_runner.go:130] >       "repoDigests": [
	I0916 11:13:05.827186 1501462 command_runner.go:130] >         "registry.k8s.io/kube-apiserver@sha256:2409c23dbb5a2b7a81adbb184d3eac43ac653e9b97a7c0ee121b89bb3ef61fdb",
	I0916 11:13:05.827194 1501462 command_runner.go:130] >         "registry.k8s.io/kube-apiserver@sha256:e3a40e6c6e99ba4a4d72432b3eda702099a2926e49d4afeb6138f2d95e6371ef"
	I0916 11:13:05.827197 1501462 command_runner.go:130] >       ],
	I0916 11:13:05.827201 1501462 command_runner.go:130] >       "size": "92632544",
	I0916 11:13:05.827205 1501462 command_runner.go:130] >       "uid": {
	I0916 11:13:05.827208 1501462 command_runner.go:130] >         "value": "0"
	I0916 11:13:05.827212 1501462 command_runner.go:130] >       },
	I0916 11:13:05.827215 1501462 command_runner.go:130] >       "username": "",
	I0916 11:13:05.827219 1501462 command_runner.go:130] >       "spec": null,
	I0916 11:13:05.827222 1501462 command_runner.go:130] >       "pinned": false
	I0916 11:13:05.827225 1501462 command_runner.go:130] >     },
	I0916 11:13:05.827228 1501462 command_runner.go:130] >     {
	I0916 11:13:05.827235 1501462 command_runner.go:130] >       "id": "279f381cb37365bbbcd133c9531fba9c2beb0f38dbbe6ddfcd0b1b1643d3450e",
	I0916 11:13:05.827238 1501462 command_runner.go:130] >       "repoTags": [
	I0916 11:13:05.827243 1501462 command_runner.go:130] >         "registry.k8s.io/kube-controller-manager:v1.31.1"
	I0916 11:13:05.827246 1501462 command_runner.go:130] >       ],
	I0916 11:13:05.827250 1501462 command_runner.go:130] >       "repoDigests": [
	I0916 11:13:05.827258 1501462 command_runner.go:130] >         "registry.k8s.io/kube-controller-manager@sha256:9f9da5b27e03f89599cc40ba89150aebf3b4cff001e6db6d998674b34181e1a1",
	I0916 11:13:05.827266 1501462 command_runner.go:130] >         "registry.k8s.io/kube-controller-manager@sha256:a9a0505b7d0caca0edd18e37bacc9425b2c8824546b26f5b286e8cb144669849"
	I0916 11:13:05.827269 1501462 command_runner.go:130] >       ],
	I0916 11:13:05.827273 1501462 command_runner.go:130] >       "size": "86930758",
	I0916 11:13:05.827276 1501462 command_runner.go:130] >       "uid": {
	I0916 11:13:05.827280 1501462 command_runner.go:130] >         "value": "0"
	I0916 11:13:05.827283 1501462 command_runner.go:130] >       },
	I0916 11:13:05.827288 1501462 command_runner.go:130] >       "username": "",
	I0916 11:13:05.827292 1501462 command_runner.go:130] >       "spec": null,
	I0916 11:13:05.827296 1501462 command_runner.go:130] >       "pinned": false
	I0916 11:13:05.827298 1501462 command_runner.go:130] >     },
	I0916 11:13:05.827303 1501462 command_runner.go:130] >     {
	I0916 11:13:05.827309 1501462 command_runner.go:130] >       "id": "24a140c548c075e487e45d0ee73b1aa89f8bfb40c08a57e05975559728822b1d",
	I0916 11:13:05.827313 1501462 command_runner.go:130] >       "repoTags": [
	I0916 11:13:05.827317 1501462 command_runner.go:130] >         "registry.k8s.io/kube-proxy:v1.31.1"
	I0916 11:13:05.827320 1501462 command_runner.go:130] >       ],
	I0916 11:13:05.827324 1501462 command_runner.go:130] >       "repoDigests": [
	I0916 11:13:05.827336 1501462 command_runner.go:130] >         "registry.k8s.io/kube-proxy@sha256:4ee50b00484d7f39a90fc4cda92251177ef5ad8fdf2f2a0c768f9e634b4c6d44",
	I0916 11:13:05.827344 1501462 command_runner.go:130] >         "registry.k8s.io/kube-proxy@sha256:7b3bf9f1e260ccb1fd543570e1e9869a373f716fb050cd23a6a2771aa4e06ae9"
	I0916 11:13:05.827347 1501462 command_runner.go:130] >       ],
	I0916 11:13:05.827351 1501462 command_runner.go:130] >       "size": "95951255",
	I0916 11:13:05.827354 1501462 command_runner.go:130] >       "uid": null,
	I0916 11:13:05.827358 1501462 command_runner.go:130] >       "username": "",
	I0916 11:13:05.827362 1501462 command_runner.go:130] >       "spec": null,
	I0916 11:13:05.827365 1501462 command_runner.go:130] >       "pinned": false
	I0916 11:13:05.827368 1501462 command_runner.go:130] >     },
	I0916 11:13:05.827372 1501462 command_runner.go:130] >     {
	I0916 11:13:05.827378 1501462 command_runner.go:130] >       "id": "7f8aa378bb47dffcf430f3a601abe39137e88aee0238e23ed8530fdd18dab82d",
	I0916 11:13:05.827381 1501462 command_runner.go:130] >       "repoTags": [
	I0916 11:13:05.827386 1501462 command_runner.go:130] >         "registry.k8s.io/kube-scheduler:v1.31.1"
	I0916 11:13:05.827389 1501462 command_runner.go:130] >       ],
	I0916 11:13:05.827393 1501462 command_runner.go:130] >       "repoDigests": [
	I0916 11:13:05.827401 1501462 command_runner.go:130] >         "registry.k8s.io/kube-scheduler@sha256:65212209347a96b08a97e679b98dca46885f09cf3a53e8d13b28d2c083a5b690",
	I0916 11:13:05.827409 1501462 command_runner.go:130] >         "registry.k8s.io/kube-scheduler@sha256:969a7e96340f3a927b3d652582edec2d6d82a083871d81ef5064b7edaab430d0"
	I0916 11:13:05.827412 1501462 command_runner.go:130] >       ],
	I0916 11:13:05.827416 1501462 command_runner.go:130] >       "size": "67007814",
	I0916 11:13:05.827420 1501462 command_runner.go:130] >       "uid": {
	I0916 11:13:05.827423 1501462 command_runner.go:130] >         "value": "0"
	I0916 11:13:05.827426 1501462 command_runner.go:130] >       },
	I0916 11:13:05.827429 1501462 command_runner.go:130] >       "username": "",
	I0916 11:13:05.827433 1501462 command_runner.go:130] >       "spec": null,
	I0916 11:13:05.827437 1501462 command_runner.go:130] >       "pinned": false
	I0916 11:13:05.827440 1501462 command_runner.go:130] >     },
	I0916 11:13:05.827442 1501462 command_runner.go:130] >     {
	I0916 11:13:05.827449 1501462 command_runner.go:130] >       "id": "afb61768ce381961ca0beff95337601f29dc70ff3ed14e5e4b3e5699057e6aa8",
	I0916 11:13:05.827452 1501462 command_runner.go:130] >       "repoTags": [
	I0916 11:13:05.827456 1501462 command_runner.go:130] >         "registry.k8s.io/pause:3.10"
	I0916 11:13:05.827459 1501462 command_runner.go:130] >       ],
	I0916 11:13:05.827463 1501462 command_runner.go:130] >       "repoDigests": [
	I0916 11:13:05.827470 1501462 command_runner.go:130] >         "registry.k8s.io/pause@sha256:e50b7059b633caf3c1449b8da680d11845cda4506b513ee7a2de00725f0a34a7",
	I0916 11:13:05.827477 1501462 command_runner.go:130] >         "registry.k8s.io/pause@sha256:ee6521f290b2168b6e0935a181d4cff9be1ac3f505666ef0e3c98fae8199917a"
	I0916 11:13:05.827480 1501462 command_runner.go:130] >       ],
	I0916 11:13:05.827484 1501462 command_runner.go:130] >       "size": "519877",
	I0916 11:13:05.827489 1501462 command_runner.go:130] >       "uid": {
	I0916 11:13:05.827493 1501462 command_runner.go:130] >         "value": "65535"
	I0916 11:13:05.827495 1501462 command_runner.go:130] >       },
	I0916 11:13:05.827499 1501462 command_runner.go:130] >       "username": "",
	I0916 11:13:05.827502 1501462 command_runner.go:130] >       "spec": null,
	I0916 11:13:05.827506 1501462 command_runner.go:130] >       "pinned": false
	I0916 11:13:05.827509 1501462 command_runner.go:130] >     }
	I0916 11:13:05.827512 1501462 command_runner.go:130] >   ]
	I0916 11:13:05.827514 1501462 command_runner.go:130] > }
	I0916 11:13:05.830698 1501462 crio.go:514] all images are preloaded for cri-o runtime.
	I0916 11:13:05.830720 1501462 cache_images.go:84] Images are preloaded, skipping loading
	I0916 11:13:05.830728 1501462 kubeadm.go:934] updating node { 192.168.67.2 8443 v1.31.1 crio true true} ...
	I0916 11:13:05.830834 1501462 kubeadm.go:946] kubelet [Unit]
	Wants=crio.service
	
	[Service]
	ExecStart=
	ExecStart=/var/lib/minikube/binaries/v1.31.1/kubelet --bootstrap-kubeconfig=/etc/kubernetes/bootstrap-kubelet.conf --cgroups-per-qos=false --config=/var/lib/kubelet/config.yaml --enforce-node-allocatable= --hostname-override=multinode-654612 --kubeconfig=/etc/kubernetes/kubelet.conf --node-ip=192.168.67.2
	
	[Install]
	 config:
	{KubernetesVersion:v1.31.1 ClusterName:multinode-654612 Namespace:default APIServerHAVIP: APIServerName:minikubeCA APIServerNames:[] APIServerIPs:[] DNSDomain:cluster.local ContainerRuntime:crio CRISocket: NetworkPlugin:cni FeatureGates: ServiceCIDR:10.96.0.0/12 ImageRepository: LoadBalancerStartIP: LoadBalancerEndIP: CustomIngressCert: RegistryAliases: ExtraOptions:[] ShouldLoadCachedImages:true EnableDefaultCNI:false CNI:}
	I0916 11:13:05.830929 1501462 ssh_runner.go:195] Run: crio config
	I0916 11:13:05.879293 1501462 command_runner.go:130] > # The CRI-O configuration file specifies all of the available configuration
	I0916 11:13:05.879320 1501462 command_runner.go:130] > # options and command-line flags for the crio(8) OCI Kubernetes Container Runtime
	I0916 11:13:05.879330 1501462 command_runner.go:130] > # daemon, but in a TOML format that can be more easily modified and versioned.
	I0916 11:13:05.879334 1501462 command_runner.go:130] > #
	I0916 11:13:05.879342 1501462 command_runner.go:130] > # Please refer to crio.conf(5) for details of all configuration options.
	I0916 11:13:05.879349 1501462 command_runner.go:130] > # CRI-O supports partial configuration reload during runtime, which can be
	I0916 11:13:05.879355 1501462 command_runner.go:130] > # done by sending SIGHUP to the running process. Currently supported options
	I0916 11:13:05.879362 1501462 command_runner.go:130] > # are explicitly mentioned with: 'This option supports live configuration
	I0916 11:13:05.879365 1501462 command_runner.go:130] > # reload'.
	I0916 11:13:05.879372 1501462 command_runner.go:130] > # CRI-O reads its storage defaults from the containers-storage.conf(5) file
	I0916 11:13:05.879379 1501462 command_runner.go:130] > # located at /etc/containers/storage.conf. Modify this storage configuration if
	I0916 11:13:05.879385 1501462 command_runner.go:130] > # you want to change the system's defaults. If you want to modify storage just
	I0916 11:13:05.879391 1501462 command_runner.go:130] > # for CRI-O, you can change the storage configuration options here.
	I0916 11:13:05.879394 1501462 command_runner.go:130] > [crio]
	I0916 11:13:05.879400 1501462 command_runner.go:130] > # Path to the "root directory". CRI-O stores all of its data, including
	I0916 11:13:05.879408 1501462 command_runner.go:130] > # containers images, in this directory.
	I0916 11:13:05.879644 1501462 command_runner.go:130] > # root = "/home/docker/.local/share/containers/storage"
	I0916 11:13:05.879656 1501462 command_runner.go:130] > # Path to the "run directory". CRI-O stores all of its state in this directory.
	I0916 11:13:05.879662 1501462 command_runner.go:130] > # runroot = "/tmp/containers-user-1000/containers"
	I0916 11:13:05.879671 1501462 command_runner.go:130] > # Storage driver used to manage the storage of images and containers. Please
	I0916 11:13:05.879678 1501462 command_runner.go:130] > # refer to containers-storage.conf(5) to see all available storage drivers.
	I0916 11:13:05.879682 1501462 command_runner.go:130] > # storage_driver = "vfs"
	I0916 11:13:05.879687 1501462 command_runner.go:130] > # List to pass options to the storage driver. Please refer to
	I0916 11:13:05.879696 1501462 command_runner.go:130] > # containers-storage.conf(5) to see all available storage options.
	I0916 11:13:05.879699 1501462 command_runner.go:130] > # storage_option = [
	I0916 11:13:05.879702 1501462 command_runner.go:130] > # ]
	I0916 11:13:05.879709 1501462 command_runner.go:130] > # The default log directory where all logs will go unless directly specified by
	I0916 11:13:05.879715 1501462 command_runner.go:130] > # the kubelet. The log directory specified must be an absolute directory.
	I0916 11:13:05.879958 1501462 command_runner.go:130] > # log_dir = "/var/log/crio/pods"
	I0916 11:13:05.879987 1501462 command_runner.go:130] > # Location for CRI-O to lay down the temporary version file.
	I0916 11:13:05.879994 1501462 command_runner.go:130] > # It is used to check if crio wipe should wipe containers, which should
	I0916 11:13:05.879998 1501462 command_runner.go:130] > # always happen on a node reboot
	I0916 11:13:05.880002 1501462 command_runner.go:130] > # version_file = "/var/run/crio/version"
	I0916 11:13:05.880008 1501462 command_runner.go:130] > # Location for CRI-O to lay down the persistent version file.
	I0916 11:13:05.880013 1501462 command_runner.go:130] > # It is used to check if crio wipe should wipe images, which should
	I0916 11:13:05.880027 1501462 command_runner.go:130] > # only happen when CRI-O has been upgraded
	I0916 11:13:05.880167 1501462 command_runner.go:130] > # version_file_persist = "/var/lib/crio/version"
	I0916 11:13:05.880181 1501462 command_runner.go:130] > # InternalWipe is whether CRI-O should wipe containers and images after a reboot when the server starts.
	I0916 11:13:05.880190 1501462 command_runner.go:130] > # If set to false, one must use the external command 'crio wipe' to wipe the containers and images in these situations.
	I0916 11:13:05.880194 1501462 command_runner.go:130] > # internal_wipe = true
	I0916 11:13:05.880199 1501462 command_runner.go:130] > # Location for CRI-O to lay down the clean shutdown file.
	I0916 11:13:05.880205 1501462 command_runner.go:130] > # It is used to check whether crio had time to sync before shutting down.
	I0916 11:13:05.880211 1501462 command_runner.go:130] > # If not found, crio wipe will clear the storage directory.
	I0916 11:13:05.880360 1501462 command_runner.go:130] > # clean_shutdown_file = "/var/lib/crio/clean.shutdown"
	I0916 11:13:05.880370 1501462 command_runner.go:130] > # The crio.api table contains settings for the kubelet/gRPC interface.
	I0916 11:13:05.880373 1501462 command_runner.go:130] > [crio.api]
	I0916 11:13:05.880385 1501462 command_runner.go:130] > # Path to AF_LOCAL socket on which CRI-O will listen.
	I0916 11:13:05.880390 1501462 command_runner.go:130] > # listen = "/var/run/crio/crio.sock"
	I0916 11:13:05.880396 1501462 command_runner.go:130] > # IP address on which the stream server will listen.
	I0916 11:13:05.880525 1501462 command_runner.go:130] > # stream_address = "127.0.0.1"
	I0916 11:13:05.880536 1501462 command_runner.go:130] > # The port on which the stream server will listen. If the port is set to "0", then
	I0916 11:13:05.880541 1501462 command_runner.go:130] > # CRI-O will allocate a random free port number.
	I0916 11:13:05.880545 1501462 command_runner.go:130] > # stream_port = "0"
	I0916 11:13:05.880550 1501462 command_runner.go:130] > # Enable encrypted TLS transport of the stream server.
	I0916 11:13:05.880735 1501462 command_runner.go:130] > # stream_enable_tls = false
	I0916 11:13:05.880745 1501462 command_runner.go:130] > # Length of time until open streams terminate due to lack of activity
	I0916 11:13:05.880750 1501462 command_runner.go:130] > # stream_idle_timeout = ""
	I0916 11:13:05.880765 1501462 command_runner.go:130] > # Path to the x509 certificate file used to serve the encrypted stream. This
	I0916 11:13:05.880771 1501462 command_runner.go:130] > # file can change, and CRI-O will automatically pick up the changes within 5
	I0916 11:13:05.880774 1501462 command_runner.go:130] > # minutes.
	I0916 11:13:05.880911 1501462 command_runner.go:130] > # stream_tls_cert = ""
	I0916 11:13:05.880921 1501462 command_runner.go:130] > # Path to the key file used to serve the encrypted stream. This file can
	I0916 11:13:05.880928 1501462 command_runner.go:130] > # change and CRI-O will automatically pick up the changes within 5 minutes.
	I0916 11:13:05.881083 1501462 command_runner.go:130] > # stream_tls_key = ""
	I0916 11:13:05.881100 1501462 command_runner.go:130] > # Path to the x509 CA(s) file used to verify and authenticate client
	I0916 11:13:05.881108 1501462 command_runner.go:130] > # communication with the encrypted stream. This file can change and CRI-O will
	I0916 11:13:05.881113 1501462 command_runner.go:130] > # automatically pick up the changes within 5 minutes.
	I0916 11:13:05.881117 1501462 command_runner.go:130] > # stream_tls_ca = ""
	I0916 11:13:05.881125 1501462 command_runner.go:130] > # Maximum grpc send message size in bytes. If not set or <=0, then CRI-O will default to 16 * 1024 * 1024.
	I0916 11:13:05.881315 1501462 command_runner.go:130] > # grpc_max_send_msg_size = 83886080
	I0916 11:13:05.881329 1501462 command_runner.go:130] > # Maximum grpc receive message size. If not set or <= 0, then CRI-O will default to 16 * 1024 * 1024.
	I0916 11:13:05.881333 1501462 command_runner.go:130] > # grpc_max_recv_msg_size = 83886080
	I0916 11:13:05.881347 1501462 command_runner.go:130] > # The crio.runtime table contains settings pertaining to the OCI runtime used
	I0916 11:13:05.881387 1501462 command_runner.go:130] > # and options for how to set up and manage the OCI runtime.
	I0916 11:13:05.881405 1501462 command_runner.go:130] > [crio.runtime]
	I0916 11:13:05.881425 1501462 command_runner.go:130] > # A list of ulimits to be set in containers by default, specified as
	I0916 11:13:05.881458 1501462 command_runner.go:130] > # "<ulimit name>=<soft limit>:<hard limit>", for example:
	I0916 11:13:05.881479 1501462 command_runner.go:130] > # "nofile=1024:2048"
	I0916 11:13:05.881501 1501462 command_runner.go:130] > # If nothing is set here, settings will be inherited from the CRI-O daemon
	I0916 11:13:05.881519 1501462 command_runner.go:130] > # default_ulimits = [
	I0916 11:13:05.881793 1501462 command_runner.go:130] > # ]
	I0916 11:13:05.881803 1501462 command_runner.go:130] > # If true, the runtime will not use pivot_root, but instead use MS_MOVE.
	I0916 11:13:05.881807 1501462 command_runner.go:130] > # no_pivot = false
	I0916 11:13:05.881815 1501462 command_runner.go:130] > # decryption_keys_path is the path where the keys required for
	I0916 11:13:05.881851 1501462 command_runner.go:130] > # image decryption are stored. This option supports live configuration reload.
	I0916 11:13:05.881859 1501462 command_runner.go:130] > # decryption_keys_path = "/etc/crio/keys/"
	I0916 11:13:05.881865 1501462 command_runner.go:130] > # Path to the conmon binary, used for monitoring the OCI runtime.
	I0916 11:13:05.881870 1501462 command_runner.go:130] > # Will be searched for using $PATH if empty.
	I0916 11:13:05.881883 1501462 command_runner.go:130] > # This option is currently deprecated, and will be replaced with RuntimeHandler.MonitorEnv.
	I0916 11:13:05.881887 1501462 command_runner.go:130] > # conmon = ""
	I0916 11:13:05.881892 1501462 command_runner.go:130] > # Cgroup setting for conmon
	I0916 11:13:05.881926 1501462 command_runner.go:130] > # This option is currently deprecated, and will be replaced with RuntimeHandler.MonitorCgroup.
	I0916 11:13:05.881933 1501462 command_runner.go:130] > conmon_cgroup = "pod"
	I0916 11:13:05.881939 1501462 command_runner.go:130] > # Environment variable list for the conmon process, used for passing necessary
	I0916 11:13:05.881944 1501462 command_runner.go:130] > # environment variables to conmon or the runtime.
	I0916 11:13:05.881954 1501462 command_runner.go:130] > # This option is currently deprecated, and will be replaced with RuntimeHandler.MonitorEnv.
	I0916 11:13:05.882146 1501462 command_runner.go:130] > # conmon_env = [
	I0916 11:13:05.882188 1501462 command_runner.go:130] > # ]
	I0916 11:13:05.882209 1501462 command_runner.go:130] > # Additional environment variables to set for all the
	I0916 11:13:05.882226 1501462 command_runner.go:130] > # containers. These are overridden if set in the
	I0916 11:13:05.882260 1501462 command_runner.go:130] > # container image spec or in the container runtime configuration.
	I0916 11:13:05.882280 1501462 command_runner.go:130] > # default_env = [
	I0916 11:13:05.882296 1501462 command_runner.go:130] > # ]
	I0916 11:13:05.882316 1501462 command_runner.go:130] > # If true, SELinux will be used for pod separation on the host.
	I0916 11:13:05.882346 1501462 command_runner.go:130] > # selinux = false
	I0916 11:13:05.882371 1501462 command_runner.go:130] > # Path to the seccomp.json profile which is used as the default seccomp profile
	I0916 11:13:05.882393 1501462 command_runner.go:130] > # for the runtime. If not specified, then the internal default seccomp profile
	I0916 11:13:05.882428 1501462 command_runner.go:130] > # will be used. This option supports live configuration reload.
	I0916 11:13:05.882627 1501462 command_runner.go:130] > # seccomp_profile = ""
	I0916 11:13:05.882638 1501462 command_runner.go:130] > # Changes the meaning of an empty seccomp profile. By default
	I0916 11:13:05.882644 1501462 command_runner.go:130] > # (and according to CRI spec), an empty profile means unconfined.
	I0916 11:13:05.882651 1501462 command_runner.go:130] > # This option tells CRI-O to treat an empty profile as the default profile,
	I0916 11:13:05.882685 1501462 command_runner.go:130] > # which might increase security.
	I0916 11:13:05.882693 1501462 command_runner.go:130] > # seccomp_use_default_when_empty = true
	I0916 11:13:05.882700 1501462 command_runner.go:130] > # Used to change the name of the default AppArmor profile of CRI-O. The default
	I0916 11:13:05.882706 1501462 command_runner.go:130] > # profile name is "crio-default". This profile only takes effect if the user
	I0916 11:13:05.882712 1501462 command_runner.go:130] > # does not specify a profile via the Kubernetes Pod's metadata annotation. If
	I0916 11:13:05.882718 1501462 command_runner.go:130] > # the profile is set to "unconfined", then this equals to disabling AppArmor.
	I0916 11:13:05.882723 1501462 command_runner.go:130] > # This option supports live configuration reload.
	I0916 11:13:05.882727 1501462 command_runner.go:130] > # apparmor_profile = "crio-default"
	I0916 11:13:05.882758 1501462 command_runner.go:130] > # Path to the blockio class configuration file for configuring
	I0916 11:13:05.882767 1501462 command_runner.go:130] > # the cgroup blockio controller.
	I0916 11:13:05.882771 1501462 command_runner.go:130] > # blockio_config_file = ""
	I0916 11:13:05.882778 1501462 command_runner.go:130] > # Used to change irqbalance service config file path which is used for configuring
	I0916 11:13:05.882782 1501462 command_runner.go:130] > # irqbalance daemon.
	I0916 11:13:05.882787 1501462 command_runner.go:130] > # irqbalance_config_file = "/etc/sysconfig/irqbalance"
	I0916 11:13:05.882794 1501462 command_runner.go:130] > # Path to the RDT configuration file for configuring the resctrl pseudo-filesystem.
	I0916 11:13:05.882799 1501462 command_runner.go:130] > # This option supports live configuration reload.
	I0916 11:13:05.882926 1501462 command_runner.go:130] > # rdt_config_file = ""
	I0916 11:13:05.882955 1501462 command_runner.go:130] > # Cgroup management implementation used for the runtime.
	I0916 11:13:05.882984 1501462 command_runner.go:130] > cgroup_manager = "cgroupfs"
	I0916 11:13:05.883003 1501462 command_runner.go:130] > # Specify whether the image pull must be performed in a separate cgroup.
	I0916 11:13:05.883033 1501462 command_runner.go:130] > # separate_pull_cgroup = ""
	I0916 11:13:05.883056 1501462 command_runner.go:130] > # List of default capabilities for containers. If it is empty or commented out,
	I0916 11:13:05.883078 1501462 command_runner.go:130] > # only the capabilities defined in the containers json file by the user/kube
	I0916 11:13:05.883096 1501462 command_runner.go:130] > # will be added.
	I0916 11:13:05.883128 1501462 command_runner.go:130] > # default_capabilities = [
	I0916 11:13:05.883310 1501462 command_runner.go:130] > # 	"CHOWN",
	I0916 11:13:05.883329 1501462 command_runner.go:130] > # 	"DAC_OVERRIDE",
	I0916 11:13:05.883333 1501462 command_runner.go:130] > # 	"FSETID",
	I0916 11:13:05.883336 1501462 command_runner.go:130] > # 	"FOWNER",
	I0916 11:13:05.883340 1501462 command_runner.go:130] > # 	"SETGID",
	I0916 11:13:05.883343 1501462 command_runner.go:130] > # 	"SETUID",
	I0916 11:13:05.883346 1501462 command_runner.go:130] > # 	"SETPCAP",
	I0916 11:13:05.883501 1501462 command_runner.go:130] > # 	"NET_BIND_SERVICE",
	I0916 11:13:05.883527 1501462 command_runner.go:130] > # 	"KILL",
	I0916 11:13:05.883542 1501462 command_runner.go:130] > # ]
	I0916 11:13:05.883583 1501462 command_runner.go:130] > # Add capabilities to the inheritable set, as well as the default group of permitted, bounding and effective.
	I0916 11:13:05.883607 1501462 command_runner.go:130] > # If capabilities are expected to work for non-root users, this option should be set.
	I0916 11:13:05.883788 1501462 command_runner.go:130] > # add_inheritable_capabilities = true
	I0916 11:13:05.883843 1501462 command_runner.go:130] > # List of default sysctls. If it is empty or commented out, only the sysctls
	I0916 11:13:05.883865 1501462 command_runner.go:130] > # defined in the container json file by the user/kube will be added.
	I0916 11:13:05.883880 1501462 command_runner.go:130] > default_sysctls = [
	I0916 11:13:05.883912 1501462 command_runner.go:130] > 	"net.ipv4.ip_unprivileged_port_start=0",
	I0916 11:13:05.883934 1501462 command_runner.go:130] > ]
	I0916 11:13:05.883963 1501462 command_runner.go:130] > # List of devices on the host that a
	I0916 11:13:05.884012 1501462 command_runner.go:130] > # user can specify with the "io.kubernetes.cri-o.Devices" allowed annotation.
	I0916 11:13:05.884146 1501462 command_runner.go:130] > # allowed_devices = [
	I0916 11:13:05.884254 1501462 command_runner.go:130] > # 	"/dev/fuse",
	I0916 11:13:05.884403 1501462 command_runner.go:130] > # ]
	I0916 11:13:05.884613 1501462 command_runner.go:130] > # List of additional devices. specified as
	I0916 11:13:05.884789 1501462 command_runner.go:130] > # "<device-on-host>:<device-on-container>:<permissions>", for example: "--device=/dev/sdc:/dev/xvdc:rwm".
	I0916 11:13:05.884939 1501462 command_runner.go:130] > # If it is empty or commented out, only the devices
	I0916 11:13:05.885064 1501462 command_runner.go:130] > # defined in the container json file by the user/kube will be added.
	I0916 11:13:05.885164 1501462 command_runner.go:130] > # additional_devices = [
	I0916 11:13:05.885286 1501462 command_runner.go:130] > # ]
	I0916 11:13:05.885366 1501462 command_runner.go:130] > # List of directories to scan for CDI Spec files.
	I0916 11:13:05.885490 1501462 command_runner.go:130] > # cdi_spec_dirs = [
	I0916 11:13:05.885570 1501462 command_runner.go:130] > # 	"/etc/cdi",
	I0916 11:13:05.885634 1501462 command_runner.go:130] > # 	"/var/run/cdi",
	I0916 11:13:05.885735 1501462 command_runner.go:130] > # ]
	I0916 11:13:05.885808 1501462 command_runner.go:130] > # Change the default behavior of setting container devices uid/gid from CRI's
	I0916 11:13:05.885863 1501462 command_runner.go:130] > # SecurityContext (RunAsUser/RunAsGroup) instead of taking host's uid/gid.
	I0916 11:13:05.885993 1501462 command_runner.go:130] > # Defaults to false.
	I0916 11:13:05.886173 1501462 command_runner.go:130] > # device_ownership_from_security_context = false
	I0916 11:13:05.886321 1501462 command_runner.go:130] > # Path to OCI hooks directories for automatically executed hooks. If one of the
	I0916 11:13:05.886509 1501462 command_runner.go:130] > # directories does not exist, then CRI-O will automatically skip them.
	I0916 11:13:05.886803 1501462 command_runner.go:130] > # hooks_dir = [
	I0916 11:13:05.886939 1501462 command_runner.go:130] > # 	"/usr/share/containers/oci/hooks.d",
	I0916 11:13:05.887065 1501462 command_runner.go:130] > # ]
	I0916 11:13:05.887269 1501462 command_runner.go:130] > # Path to the file specifying the defaults mounts for each container. The
	I0916 11:13:05.887453 1501462 command_runner.go:130] > # format of the config is /SRC:/DST, one mount per line. Notice that CRI-O reads
	I0916 11:13:05.887596 1501462 command_runner.go:130] > # its default mounts from the following two files:
	I0916 11:13:05.887717 1501462 command_runner.go:130] > #
	I0916 11:13:05.887849 1501462 command_runner.go:130] > #   1) /etc/containers/mounts.conf (i.e., default_mounts_file): This is the
	I0916 11:13:05.887968 1501462 command_runner.go:130] > #      override file, where users can either add in their own default mounts, or
	I0916 11:13:05.888127 1501462 command_runner.go:130] > #      override the default mounts shipped with the package.
	I0916 11:13:05.888366 1501462 command_runner.go:130] > #
	I0916 11:13:05.888527 1501462 command_runner.go:130] > #   2) /usr/share/containers/mounts.conf: This is the default file read for
	I0916 11:13:05.888738 1501462 command_runner.go:130] > #      mounts. If you want CRI-O to read from a different, specific mounts file,
	I0916 11:13:05.888841 1501462 command_runner.go:130] > #      you can change the default_mounts_file. Note, if this is done, CRI-O will
	I0916 11:13:05.888952 1501462 command_runner.go:130] > #      only add mounts it finds in this file.
	I0916 11:13:05.889104 1501462 command_runner.go:130] > #
	I0916 11:13:05.889413 1501462 command_runner.go:130] > # default_mounts_file = ""
	I0916 11:13:05.889504 1501462 command_runner.go:130] > # Maximum number of processes allowed in a container.
	I0916 11:13:05.889527 1501462 command_runner.go:130] > # This option is deprecated. The Kubelet flag '--pod-pids-limit' should be used instead.
	I0916 11:13:05.889557 1501462 command_runner.go:130] > # pids_limit = 0
	I0916 11:13:05.889584 1501462 command_runner.go:130] > # Maximum sized allowed for the container log file. Negative numbers indicate
	I0916 11:13:05.889606 1501462 command_runner.go:130] > # that no size limit is imposed. If it is positive, it must be >= 8192 to
	I0916 11:13:05.892054 1501462 command_runner.go:130] > # match/exceed conmon's read buffer. The file is truncated and re-opened so the
	I0916 11:13:05.892099 1501462 command_runner.go:130] > # limit is never exceeded. This option is deprecated. The Kubelet flag '--container-log-max-size' should be used instead.
	I0916 11:13:05.892129 1501462 command_runner.go:130] > # log_size_max = -1
	I0916 11:13:05.892152 1501462 command_runner.go:130] > # Whether container output should be logged to journald in addition to the kuberentes log file
	I0916 11:13:05.892169 1501462 command_runner.go:130] > # log_to_journald = false
	I0916 11:13:05.892187 1501462 command_runner.go:130] > # Path to directory in which container exit files are written to by conmon.
	I0916 11:13:05.892217 1501462 command_runner.go:130] > # container_exits_dir = "/var/run/crio/exits"
	I0916 11:13:05.892238 1501462 command_runner.go:130] > # Path to directory for container attach sockets.
	I0916 11:13:05.892258 1501462 command_runner.go:130] > # container_attach_socket_dir = "/var/run/crio"
	I0916 11:13:05.892277 1501462 command_runner.go:130] > # The prefix to use for the source of the bind mounts.
	I0916 11:13:05.892306 1501462 command_runner.go:130] > # bind_mount_prefix = ""
	I0916 11:13:05.892331 1501462 command_runner.go:130] > # If set to true, all containers will run in read-only mode.
	I0916 11:13:05.892349 1501462 command_runner.go:130] > # read_only = false
	I0916 11:13:05.892370 1501462 command_runner.go:130] > # Changes the verbosity of the logs based on the level it is set to. Options
	I0916 11:13:05.892406 1501462 command_runner.go:130] > # are fatal, panic, error, warn, info, debug and trace. This option supports
	I0916 11:13:05.892424 1501462 command_runner.go:130] > # live configuration reload.
	I0916 11:13:05.892465 1501462 command_runner.go:130] > # log_level = "info"
	I0916 11:13:05.892489 1501462 command_runner.go:130] > # Filter the log messages by the provided regular expression.
	I0916 11:13:05.892515 1501462 command_runner.go:130] > # This option supports live configuration reload.
	I0916 11:13:05.892546 1501462 command_runner.go:130] > # log_filter = ""
	I0916 11:13:05.892578 1501462 command_runner.go:130] > # The UID mappings for the user namespace of each container. A range is
	I0916 11:13:05.892599 1501462 command_runner.go:130] > # specified in the form containerUID:HostUID:Size. Multiple ranges must be
	I0916 11:13:05.892629 1501462 command_runner.go:130] > # separated by comma.
	I0916 11:13:05.892650 1501462 command_runner.go:130] > # uid_mappings = ""
	I0916 11:13:05.892694 1501462 command_runner.go:130] > # The GID mappings for the user namespace of each container. A range is
	I0916 11:13:05.892719 1501462 command_runner.go:130] > # specified in the form containerGID:HostGID:Size. Multiple ranges must be
	I0916 11:13:05.892737 1501462 command_runner.go:130] > # separated by comma.
	I0916 11:13:05.892754 1501462 command_runner.go:130] > # gid_mappings = ""
	I0916 11:13:05.892784 1501462 command_runner.go:130] > # If set, CRI-O will reject any attempt to map host UIDs below this value
	I0916 11:13:05.892809 1501462 command_runner.go:130] > # into user namespaces.  A negative value indicates that no minimum is set,
	I0916 11:13:05.892830 1501462 command_runner.go:130] > # so specifying mappings will only be allowed for pods that run as UID 0.
	I0916 11:13:05.892861 1501462 command_runner.go:130] > # minimum_mappable_uid = -1
	I0916 11:13:05.892884 1501462 command_runner.go:130] > # If set, CRI-O will reject any attempt to map host GIDs below this value
	I0916 11:13:05.892902 1501462 command_runner.go:130] > # into user namespaces.  A negative value indicates that no minimum is set,
	I0916 11:13:05.892922 1501462 command_runner.go:130] > # so specifying mappings will only be allowed for pods that run as UID 0.
	I0916 11:13:05.892957 1501462 command_runner.go:130] > # minimum_mappable_gid = -1
	I0916 11:13:05.892976 1501462 command_runner.go:130] > # The minimal amount of time in seconds to wait before issuing a timeout
	I0916 11:13:05.892997 1501462 command_runner.go:130] > # regarding the proper termination of the container. The lowest possible
	I0916 11:13:05.893026 1501462 command_runner.go:130] > # value is 30s, whereas lower values are not considered by CRI-O.
	I0916 11:13:05.893046 1501462 command_runner.go:130] > # ctr_stop_timeout = 30
	I0916 11:13:05.893066 1501462 command_runner.go:130] > # drop_infra_ctr determines whether CRI-O drops the infra container
	I0916 11:13:05.893085 1501462 command_runner.go:130] > # when a pod does not have a private PID namespace, and does not use
	I0916 11:13:05.893114 1501462 command_runner.go:130] > # a kernel separating runtime (like kata).
	I0916 11:13:05.893136 1501462 command_runner.go:130] > # It requires manage_ns_lifecycle to be true.
	I0916 11:13:05.893153 1501462 command_runner.go:130] > # drop_infra_ctr = true
	I0916 11:13:05.893174 1501462 command_runner.go:130] > # infra_ctr_cpuset determines what CPUs will be used to run infra containers.
	I0916 11:13:05.893203 1501462 command_runner.go:130] > # You can use linux CPU list format to specify desired CPUs.
	I0916 11:13:05.893229 1501462 command_runner.go:130] > # To get better isolation for guaranteed pods, set this parameter to be equal to kubelet reserved-cpus.
	I0916 11:13:05.893248 1501462 command_runner.go:130] > # infra_ctr_cpuset = ""
	I0916 11:13:05.893280 1501462 command_runner.go:130] > # The directory where the state of the managed namespaces gets tracked.
	I0916 11:13:05.893302 1501462 command_runner.go:130] > # Only used when manage_ns_lifecycle is true.
	I0916 11:13:05.893340 1501462 command_runner.go:130] > # namespaces_dir = "/var/run"
	I0916 11:13:05.893372 1501462 command_runner.go:130] > # pinns_path is the path to find the pinns binary, which is needed to manage namespace lifecycle
	I0916 11:13:05.893399 1501462 command_runner.go:130] > # pinns_path = ""
	I0916 11:13:05.893418 1501462 command_runner.go:130] > # default_runtime is the _name_ of the OCI runtime to be used as the default.
	I0916 11:13:05.893453 1501462 command_runner.go:130] > # The name is matched against the runtimes map below. If this value is changed,
	I0916 11:13:05.893478 1501462 command_runner.go:130] > # the corresponding existing entry from the runtimes map below will be ignored.
	I0916 11:13:05.893496 1501462 command_runner.go:130] > # default_runtime = "runc"
	I0916 11:13:05.893528 1501462 command_runner.go:130] > # A list of paths that, when absent from the host,
	I0916 11:13:05.893555 1501462 command_runner.go:130] > # will cause a container creation to fail (as opposed to the current behavior being created as a directory).
	I0916 11:13:05.893579 1501462 command_runner.go:130] > # This option is to protect from source locations whose existence as a directory could jepordize the health of the node, and whose
	I0916 11:13:05.893610 1501462 command_runner.go:130] > # creation as a file is not desired either.
	I0916 11:13:05.893638 1501462 command_runner.go:130] > # An example is /etc/hostname, which will cause failures on reboot if it's created as a directory, but often doesn't exist because
	I0916 11:13:05.893656 1501462 command_runner.go:130] > # the hostname is being managed dynamically.
	I0916 11:13:05.893687 1501462 command_runner.go:130] > # absent_mount_sources_to_reject = [
	I0916 11:13:05.893707 1501462 command_runner.go:130] > # ]
	I0916 11:13:05.893726 1501462 command_runner.go:130] > # The "crio.runtime.runtimes" table defines a list of OCI compatible runtimes.
	I0916 11:13:05.893747 1501462 command_runner.go:130] > # The runtime to use is picked based on the runtime handler provided by the CRI.
	I0916 11:13:05.893778 1501462 command_runner.go:130] > # If no runtime handler is provided, the runtime will be picked based on the level
	I0916 11:13:05.893800 1501462 command_runner.go:130] > # of trust of the workload. Each entry in the table should follow the format:
	I0916 11:13:05.893816 1501462 command_runner.go:130] > #
	I0916 11:13:05.893834 1501462 command_runner.go:130] > #[crio.runtime.runtimes.runtime-handler]
	I0916 11:13:05.893862 1501462 command_runner.go:130] > #  runtime_path = "/path/to/the/executable"
	I0916 11:13:05.893884 1501462 command_runner.go:130] > #  runtime_type = "oci"
	I0916 11:13:05.893903 1501462 command_runner.go:130] > #  runtime_root = "/path/to/the/root"
	I0916 11:13:05.893922 1501462 command_runner.go:130] > #  privileged_without_host_devices = false
	I0916 11:13:05.893950 1501462 command_runner.go:130] > #  allowed_annotations = []
	I0916 11:13:05.893973 1501462 command_runner.go:130] > # Where:
	I0916 11:13:05.893992 1501462 command_runner.go:130] > # - runtime-handler: name used to identify the runtime
	I0916 11:13:05.894012 1501462 command_runner.go:130] > # - runtime_path (optional, string): absolute path to the runtime executable in
	I0916 11:13:05.894046 1501462 command_runner.go:130] > #   the host filesystem. If omitted, the runtime-handler identifier should match
	I0916 11:13:05.894066 1501462 command_runner.go:130] > #   the runtime executable name, and the runtime executable should be placed
	I0916 11:13:05.894084 1501462 command_runner.go:130] > #   in $PATH.
	I0916 11:13:05.894116 1501462 command_runner.go:130] > # - runtime_type (optional, string): type of runtime, one of: "oci", "vm". If
	I0916 11:13:05.894135 1501462 command_runner.go:130] > #   omitted, an "oci" runtime is assumed.
	I0916 11:13:05.894154 1501462 command_runner.go:130] > # - runtime_root (optional, string): root directory for storage of containers
	I0916 11:13:05.894193 1501462 command_runner.go:130] > #   state.
	I0916 11:13:05.894224 1501462 command_runner.go:130] > # - runtime_config_path (optional, string): the path for the runtime configuration
	I0916 11:13:05.894244 1501462 command_runner.go:130] > #   file. This can only be used with when using the VM runtime_type.
	I0916 11:13:05.894284 1501462 command_runner.go:130] > # - privileged_without_host_devices (optional, bool): an option for restricting
	I0916 11:13:05.894310 1501462 command_runner.go:130] > #   host devices from being passed to privileged containers.
	I0916 11:13:05.894329 1501462 command_runner.go:130] > # - allowed_annotations (optional, array of strings): an option for specifying
	I0916 11:13:05.894371 1501462 command_runner.go:130] > #   a list of experimental annotations that this runtime handler is allowed to process.
	I0916 11:13:05.894388 1501462 command_runner.go:130] > #   The currently recognized values are:
	I0916 11:13:05.894421 1501462 command_runner.go:130] > #   "io.kubernetes.cri-o.userns-mode" for configuring a user namespace for the pod.
	I0916 11:13:05.894451 1501462 command_runner.go:130] > #   "io.kubernetes.cri-o.cgroup2-mount-hierarchy-rw" for mounting cgroups writably when set to "true".
	I0916 11:13:05.894470 1501462 command_runner.go:130] > #   "io.kubernetes.cri-o.Devices" for configuring devices for the pod.
	I0916 11:13:05.894502 1501462 command_runner.go:130] > #   "io.kubernetes.cri-o.ShmSize" for configuring the size of /dev/shm.
	I0916 11:13:05.894530 1501462 command_runner.go:130] > #   "io.kubernetes.cri-o.UnifiedCgroup.$CTR_NAME" for configuring the cgroup v2 unified block for a container.
	I0916 11:13:05.894550 1501462 command_runner.go:130] > #   "io.containers.trace-syscall" for tracing syscalls via the OCI seccomp BPF hook.
	I0916 11:13:05.894583 1501462 command_runner.go:130] > #   "io.kubernetes.cri.rdt-class" for setting the RDT class of a container
	I0916 11:13:05.894611 1501462 command_runner.go:130] > # - monitor_exec_cgroup (optional, string): if set to "container", indicates exec probes
	I0916 11:13:05.894630 1501462 command_runner.go:130] > #   should be moved to the container's cgroup
	I0916 11:13:05.894660 1501462 command_runner.go:130] > [crio.runtime.runtimes.runc]
	I0916 11:13:05.894687 1501462 command_runner.go:130] > runtime_path = "/usr/lib/cri-o-runc/sbin/runc"
	I0916 11:13:05.894703 1501462 command_runner.go:130] > runtime_type = "oci"
	I0916 11:13:05.894720 1501462 command_runner.go:130] > runtime_root = "/run/runc"
	I0916 11:13:05.894759 1501462 command_runner.go:130] > runtime_config_path = ""
	I0916 11:13:05.894776 1501462 command_runner.go:130] > monitor_path = ""
	I0916 11:13:05.894794 1501462 command_runner.go:130] > monitor_cgroup = ""
	I0916 11:13:05.894825 1501462 command_runner.go:130] > monitor_exec_cgroup = ""
	I0916 11:13:05.894859 1501462 command_runner.go:130] > # crun is a fast and lightweight fully featured OCI runtime and C library for
	I0916 11:13:05.894876 1501462 command_runner.go:130] > # running containers
	I0916 11:13:05.894908 1501462 command_runner.go:130] > #[crio.runtime.runtimes.crun]
	I0916 11:13:05.894929 1501462 command_runner.go:130] > # Kata Containers is an OCI runtime, where containers are run inside lightweight
	I0916 11:13:05.894950 1501462 command_runner.go:130] > # VMs. Kata provides additional isolation towards the host, minimizing the host attack
	I0916 11:13:05.894982 1501462 command_runner.go:130] > # surface and mitigating the consequences of containers breakout.
	I0916 11:13:05.895008 1501462 command_runner.go:130] > # Kata Containers with the default configured VMM
	I0916 11:13:05.895025 1501462 command_runner.go:130] > #[crio.runtime.runtimes.kata-runtime]
	I0916 11:13:05.895058 1501462 command_runner.go:130] > # Kata Containers with the QEMU VMM
	I0916 11:13:05.895084 1501462 command_runner.go:130] > #[crio.runtime.runtimes.kata-qemu]
	I0916 11:13:05.895101 1501462 command_runner.go:130] > # Kata Containers with the Firecracker VMM
	I0916 11:13:05.895118 1501462 command_runner.go:130] > #[crio.runtime.runtimes.kata-fc]
	I0916 11:13:05.895155 1501462 command_runner.go:130] > # The workloads table defines ways to customize containers with different resources
	I0916 11:13:05.895174 1501462 command_runner.go:130] > # that work based on annotations, rather than the CRI.
	I0916 11:13:05.895194 1501462 command_runner.go:130] > # Note, the behavior of this table is EXPERIMENTAL and may change at any time.
	I0916 11:13:05.895234 1501462 command_runner.go:130] > # Each workload, has a name, activation_annotation, annotation_prefix and set of resources it supports mutating.
	I0916 11:13:05.895257 1501462 command_runner.go:130] > # The currently supported resources are "cpu" (to configure the cpu shares) and "cpuset" to configure the cpuset.
	I0916 11:13:05.895276 1501462 command_runner.go:130] > # Each resource can have a default value specified, or be empty.
	I0916 11:13:05.895320 1501462 command_runner.go:130] > # For a container to opt-into this workload, the pod should be configured with the annotation $activation_annotation (key only, value is ignored).
	I0916 11:13:05.895342 1501462 command_runner.go:130] > # To customize per-container, an annotation of the form $annotation_prefix.$resource/$ctrName = "value" can be specified
	I0916 11:13:05.895374 1501462 command_runner.go:130] > # signifying for that resource type to override the default value.
	I0916 11:13:05.895401 1501462 command_runner.go:130] > # If the annotation_prefix is not present, every container in the pod will be given the default values.
	I0916 11:13:05.895418 1501462 command_runner.go:130] > # Example:
	I0916 11:13:05.895436 1501462 command_runner.go:130] > # [crio.runtime.workloads.workload-type]
	I0916 11:13:05.895475 1501462 command_runner.go:130] > # activation_annotation = "io.crio/workload"
	I0916 11:13:05.895493 1501462 command_runner.go:130] > # annotation_prefix = "io.crio.workload-type"
	I0916 11:13:05.895512 1501462 command_runner.go:130] > # [crio.runtime.workloads.workload-type.resources]
	I0916 11:13:05.895550 1501462 command_runner.go:130] > # cpuset = 0
	I0916 11:13:05.895567 1501462 command_runner.go:130] > # cpushares = "0-1"
	I0916 11:13:05.895584 1501462 command_runner.go:130] > # Where:
	I0916 11:13:05.895615 1501462 command_runner.go:130] > # The workload name is workload-type.
	I0916 11:13:05.895646 1501462 command_runner.go:130] > # To specify, the pod must have the "io.crio.workload" annotation (this is a precise string match).
	I0916 11:13:05.895665 1501462 command_runner.go:130] > # This workload supports setting cpuset and cpu resources.
	I0916 11:13:05.895697 1501462 command_runner.go:130] > # annotation_prefix is used to customize the different resources.
	I0916 11:13:05.895721 1501462 command_runner.go:130] > # To configure the cpu shares a container gets in the example above, the pod would have to have the following annotation:
	I0916 11:13:05.895741 1501462 command_runner.go:130] > # "io.crio.workload-type/$container_name = {"cpushares": "value"}"
	I0916 11:13:05.895771 1501462 command_runner.go:130] > # 
	I0916 11:13:05.895793 1501462 command_runner.go:130] > # The crio.image table contains settings pertaining to the management of OCI images.
	I0916 11:13:05.895810 1501462 command_runner.go:130] > #
	I0916 11:13:05.895851 1501462 command_runner.go:130] > # CRI-O reads its configured registries defaults from the system wide
	I0916 11:13:05.895884 1501462 command_runner.go:130] > # containers-registries.conf(5) located in /etc/containers/registries.conf. If
	I0916 11:13:05.895909 1501462 command_runner.go:130] > # you want to modify just CRI-O, you can change the registries configuration in
	I0916 11:13:05.895950 1501462 command_runner.go:130] > # this file. Otherwise, leave insecure_registries and registries commented out to
	I0916 11:13:05.895969 1501462 command_runner.go:130] > # use the system's defaults from /etc/containers/registries.conf.
	I0916 11:13:05.895986 1501462 command_runner.go:130] > [crio.image]
	I0916 11:13:05.896006 1501462 command_runner.go:130] > # Default transport for pulling images from a remote container storage.
	I0916 11:13:05.896043 1501462 command_runner.go:130] > # default_transport = "docker://"
	I0916 11:13:05.896062 1501462 command_runner.go:130] > # The path to a file containing credentials necessary for pulling images from
	I0916 11:13:05.896092 1501462 command_runner.go:130] > # secure registries. The file is similar to that of /var/lib/kubelet/config.json
	I0916 11:13:05.896122 1501462 command_runner.go:130] > # global_auth_file = ""
	I0916 11:13:05.896147 1501462 command_runner.go:130] > # The image used to instantiate infra containers.
	I0916 11:13:05.896165 1501462 command_runner.go:130] > # This option supports live configuration reload.
	I0916 11:13:05.896192 1501462 command_runner.go:130] > pause_image = "registry.k8s.io/pause:3.10"
	I0916 11:13:05.896218 1501462 command_runner.go:130] > # The path to a file containing credentials specific for pulling the pause_image from
	I0916 11:13:05.896237 1501462 command_runner.go:130] > # above. The file is similar to that of /var/lib/kubelet/config.json
	I0916 11:13:05.896256 1501462 command_runner.go:130] > # This option supports live configuration reload.
	I0916 11:13:05.896274 1501462 command_runner.go:130] > # pause_image_auth_file = ""
	I0916 11:13:05.896314 1501462 command_runner.go:130] > # The command to run to have a container stay in the paused state.
	I0916 11:13:05.896333 1501462 command_runner.go:130] > # When explicitly set to "", it will fallback to the entrypoint and command
	I0916 11:13:05.896354 1501462 command_runner.go:130] > # specified in the pause image. When commented out, it will fallback to the
	I0916 11:13:05.896381 1501462 command_runner.go:130] > # default: "/pause". This option supports live configuration reload.
	I0916 11:13:05.896405 1501462 command_runner.go:130] > # pause_command = "/pause"
	I0916 11:13:05.896423 1501462 command_runner.go:130] > # Path to the file which decides what sort of policy we use when deciding
	I0916 11:13:05.896443 1501462 command_runner.go:130] > # whether or not to trust an image that we've pulled. It is not recommended that
	I0916 11:13:05.896463 1501462 command_runner.go:130] > # this option be used, as the default behavior of using the system-wide default
	I0916 11:13:05.896499 1501462 command_runner.go:130] > # policy (i.e., /etc/containers/policy.json) is most often preferred. Please
	I0916 11:13:05.896519 1501462 command_runner.go:130] > # refer to containers-policy.json(5) for more details.
	I0916 11:13:05.896535 1501462 command_runner.go:130] > # signature_policy = ""
	I0916 11:13:05.896555 1501462 command_runner.go:130] > # List of registries to skip TLS verification for pulling images. Please
	I0916 11:13:05.896592 1501462 command_runner.go:130] > # consider configuring the registries via /etc/containers/registries.conf before
	I0916 11:13:05.896609 1501462 command_runner.go:130] > # changing them here.
	I0916 11:13:05.896628 1501462 command_runner.go:130] > # insecure_registries = [
	I0916 11:13:05.896644 1501462 command_runner.go:130] > # ]
	I0916 11:13:05.896697 1501462 command_runner.go:130] > # Controls how image volumes are handled. The valid values are mkdir, bind and
	I0916 11:13:05.896720 1501462 command_runner.go:130] > # ignore; the latter will ignore volumes entirely.
	I0916 11:13:05.896726 1501462 command_runner.go:130] > # image_volumes = "mkdir"
	I0916 11:13:05.896733 1501462 command_runner.go:130] > # Temporary directory to use for storing big files
	I0916 11:13:05.896737 1501462 command_runner.go:130] > # big_files_temporary_dir = ""
	I0916 11:13:05.896743 1501462 command_runner.go:130] > # The crio.network table containers settings pertaining to the management of
	I0916 11:13:05.896747 1501462 command_runner.go:130] > # CNI plugins.
	I0916 11:13:05.896750 1501462 command_runner.go:130] > [crio.network]
	I0916 11:13:05.896776 1501462 command_runner.go:130] > # The default CNI network name to be selected. If not set or "", then
	I0916 11:13:05.896782 1501462 command_runner.go:130] > # CRI-O will pick-up the first one found in network_dir.
	I0916 11:13:05.896798 1501462 command_runner.go:130] > # cni_default_network = ""
	I0916 11:13:05.896811 1501462 command_runner.go:130] > # Path to the directory where CNI configuration files are located.
	I0916 11:13:05.896815 1501462 command_runner.go:130] > # network_dir = "/etc/cni/net.d/"
	I0916 11:13:05.896821 1501462 command_runner.go:130] > # Paths to directories where CNI plugin binaries are located.
	I0916 11:13:05.896827 1501462 command_runner.go:130] > # plugin_dirs = [
	I0916 11:13:05.896831 1501462 command_runner.go:130] > # 	"/opt/cni/bin/",
	I0916 11:13:05.896843 1501462 command_runner.go:130] > # ]
	I0916 11:13:05.896849 1501462 command_runner.go:130] > # A necessary configuration for Prometheus based metrics retrieval
	I0916 11:13:05.896856 1501462 command_runner.go:130] > [crio.metrics]
	I0916 11:13:05.896861 1501462 command_runner.go:130] > # Globally enable or disable metrics support.
	I0916 11:13:05.896883 1501462 command_runner.go:130] > # enable_metrics = false
	I0916 11:13:05.896888 1501462 command_runner.go:130] > # Specify enabled metrics collectors.
	I0916 11:13:05.896903 1501462 command_runner.go:130] > # Per default all metrics are enabled.
	I0916 11:13:05.896917 1501462 command_runner.go:130] > # It is possible, to prefix the metrics with "container_runtime_" and "crio_".
	I0916 11:13:05.896924 1501462 command_runner.go:130] > # For example, the metrics collector "operations" would be treated in the same
	I0916 11:13:05.896934 1501462 command_runner.go:130] > # way as "crio_operations" and "container_runtime_crio_operations".
	I0916 11:13:05.896938 1501462 command_runner.go:130] > # metrics_collectors = [
	I0916 11:13:05.896942 1501462 command_runner.go:130] > # 	"operations",
	I0916 11:13:05.896953 1501462 command_runner.go:130] > # 	"operations_latency_microseconds_total",
	I0916 11:13:05.896957 1501462 command_runner.go:130] > # 	"operations_latency_microseconds",
	I0916 11:13:05.896961 1501462 command_runner.go:130] > # 	"operations_errors",
	I0916 11:13:05.896978 1501462 command_runner.go:130] > # 	"image_pulls_by_digest",
	I0916 11:13:05.896990 1501462 command_runner.go:130] > # 	"image_pulls_by_name",
	I0916 11:13:05.896994 1501462 command_runner.go:130] > # 	"image_pulls_by_name_skipped",
	I0916 11:13:05.897015 1501462 command_runner.go:130] > # 	"image_pulls_failures",
	I0916 11:13:05.897026 1501462 command_runner.go:130] > # 	"image_pulls_successes",
	I0916 11:13:05.897031 1501462 command_runner.go:130] > # 	"image_pulls_layer_size",
	I0916 11:13:05.897035 1501462 command_runner.go:130] > # 	"image_layer_reuse",
	I0916 11:13:05.897040 1501462 command_runner.go:130] > # 	"containers_oom_total",
	I0916 11:13:05.897044 1501462 command_runner.go:130] > # 	"containers_oom",
	I0916 11:13:05.897050 1501462 command_runner.go:130] > # 	"processes_defunct",
	I0916 11:13:05.897054 1501462 command_runner.go:130] > # 	"operations_total",
	I0916 11:13:05.897058 1501462 command_runner.go:130] > # 	"operations_latency_seconds",
	I0916 11:13:05.897063 1501462 command_runner.go:130] > # 	"operations_latency_seconds_total",
	I0916 11:13:05.897069 1501462 command_runner.go:130] > # 	"operations_errors_total",
	I0916 11:13:05.897078 1501462 command_runner.go:130] > # 	"image_pulls_bytes_total",
	I0916 11:13:05.897084 1501462 command_runner.go:130] > # 	"image_pulls_skipped_bytes_total",
	I0916 11:13:05.897089 1501462 command_runner.go:130] > # 	"image_pulls_failure_total",
	I0916 11:13:05.897093 1501462 command_runner.go:130] > # 	"image_pulls_success_total",
	I0916 11:13:05.897098 1501462 command_runner.go:130] > # 	"image_layer_reuse_total",
	I0916 11:13:05.897102 1501462 command_runner.go:130] > # 	"containers_oom_count_total",
	I0916 11:13:05.897104 1501462 command_runner.go:130] > # ]
	I0916 11:13:05.897110 1501462 command_runner.go:130] > # The port on which the metrics server will listen.
	I0916 11:13:05.897116 1501462 command_runner.go:130] > # metrics_port = 9090
	I0916 11:13:05.897121 1501462 command_runner.go:130] > # Local socket path to bind the metrics server to
	I0916 11:13:05.897125 1501462 command_runner.go:130] > # metrics_socket = ""
	I0916 11:13:05.897132 1501462 command_runner.go:130] > # The certificate for the secure metrics server.
	I0916 11:13:05.897138 1501462 command_runner.go:130] > # If the certificate is not available on disk, then CRI-O will generate a
	I0916 11:13:05.897147 1501462 command_runner.go:130] > # self-signed one. CRI-O also watches for changes of this path and reloads the
	I0916 11:13:05.897152 1501462 command_runner.go:130] > # certificate on any modification event.
	I0916 11:13:05.897162 1501462 command_runner.go:130] > # metrics_cert = ""
	I0916 11:13:05.897169 1501462 command_runner.go:130] > # The certificate key for the secure metrics server.
	I0916 11:13:05.897176 1501462 command_runner.go:130] > # Behaves in the same way as the metrics_cert.
	I0916 11:13:05.897180 1501462 command_runner.go:130] > # metrics_key = ""
	I0916 11:13:05.897186 1501462 command_runner.go:130] > # A necessary configuration for OpenTelemetry trace data exporting
	I0916 11:13:05.897189 1501462 command_runner.go:130] > [crio.tracing]
	I0916 11:13:05.897195 1501462 command_runner.go:130] > # Globally enable or disable exporting OpenTelemetry traces.
	I0916 11:13:05.897202 1501462 command_runner.go:130] > # enable_tracing = false
	I0916 11:13:05.897207 1501462 command_runner.go:130] > # Address on which the gRPC trace collector listens on.
	I0916 11:13:05.897212 1501462 command_runner.go:130] > # tracing_endpoint = "0.0.0.0:4317"
	I0916 11:13:05.897225 1501462 command_runner.go:130] > # Number of samples to collect per million spans.
	I0916 11:13:05.897229 1501462 command_runner.go:130] > # tracing_sampling_rate_per_million = 0
	I0916 11:13:05.897236 1501462 command_runner.go:130] > # Necessary information pertaining to container and pod stats reporting.
	I0916 11:13:05.897242 1501462 command_runner.go:130] > [crio.stats]
	I0916 11:13:05.897248 1501462 command_runner.go:130] > # The number of seconds between collecting pod and container stats.
	I0916 11:13:05.897254 1501462 command_runner.go:130] > # If set to 0, the stats are collected on-demand instead.
	I0916 11:13:05.897260 1501462 command_runner.go:130] > # stats_collection_period = 0
	I0916 11:13:05.897294 1501462 command_runner.go:130] ! time="2024-09-16 11:13:05.876839988Z" level=info msg="Starting CRI-O, version: 1.24.6, git: 4bfe15a9feb74ffc95e66a21c04b15fa7bbc2b90(clean)"
	I0916 11:13:05.897312 1501462 command_runner.go:130] ! level=info msg="Using default capabilities: CAP_CHOWN, CAP_DAC_OVERRIDE, CAP_FSETID, CAP_FOWNER, CAP_SETGID, CAP_SETUID, CAP_SETPCAP, CAP_NET_BIND_SERVICE, CAP_KILL"
	I0916 11:13:05.897411 1501462 cni.go:84] Creating CNI manager for ""
	I0916 11:13:05.897428 1501462 cni.go:136] multinode detected (3 nodes found), recommending kindnet
	I0916 11:13:05.897437 1501462 kubeadm.go:84] Using pod CIDR: 10.244.0.0/16
	I0916 11:13:05.897461 1501462 kubeadm.go:181] kubeadm options: {CertDir:/var/lib/minikube/certs ServiceCIDR:10.96.0.0/12 PodSubnet:10.244.0.0/16 AdvertiseAddress:192.168.67.2 APIServerPort:8443 KubernetesVersion:v1.31.1 EtcdDataDir:/var/lib/minikube/etcd EtcdExtraArgs:map[] ClusterName:multinode-654612 NodeName:multinode-654612 DNSDomain:cluster.local CRISocket:/var/run/crio/crio.sock ImageRepository: ComponentOptions:[{Component:apiServer ExtraArgs:map[enable-admission-plugins:NamespaceLifecycle,LimitRanger,ServiceAccount,DefaultStorageClass,DefaultTolerationSeconds,NodeRestriction,MutatingAdmissionWebhook,ValidatingAdmissionWebhook,ResourceQuota] Pairs:map[certSANs:["127.0.0.1", "localhost", "192.168.67.2"]]} {Component:controllerManager ExtraArgs:map[allocate-node-cidrs:true leader-elect:false] Pairs:map[]} {Component:scheduler ExtraArgs:map[leader-elect:false] Pairs:map[]}] FeatureArgs:map[] NodeIP:192.168.67.2 CgroupDriver:cgroupfs ClientCAFile:/var/lib/minikube/certs/ca.crt StaticPodPath:/etc/k
ubernetes/manifests ControlPlaneAddress:control-plane.minikube.internal KubeProxyOptions:map[] ResolvConfSearchRegression:false KubeletConfigOpts:map[containerRuntimeEndpoint:unix:///var/run/crio/crio.sock hairpinMode:hairpin-veth runtimeRequestTimeout:15m] PrependCriSocketUnix:true}
	I0916 11:13:05.897606 1501462 kubeadm.go:187] kubeadm config:
	apiVersion: kubeadm.k8s.io/v1beta3
	kind: InitConfiguration
	localAPIEndpoint:
	  advertiseAddress: 192.168.67.2
	  bindPort: 8443
	bootstrapTokens:
	  - groups:
	      - system:bootstrappers:kubeadm:default-node-token
	    ttl: 24h0m0s
	    usages:
	      - signing
	      - authentication
	nodeRegistration:
	  criSocket: unix:///var/run/crio/crio.sock
	  name: "multinode-654612"
	  kubeletExtraArgs:
	    node-ip: 192.168.67.2
	  taints: []
	---
	apiVersion: kubeadm.k8s.io/v1beta3
	kind: ClusterConfiguration
	apiServer:
	  certSANs: ["127.0.0.1", "localhost", "192.168.67.2"]
	  extraArgs:
	    enable-admission-plugins: "NamespaceLifecycle,LimitRanger,ServiceAccount,DefaultStorageClass,DefaultTolerationSeconds,NodeRestriction,MutatingAdmissionWebhook,ValidatingAdmissionWebhook,ResourceQuota"
	controllerManager:
	  extraArgs:
	    allocate-node-cidrs: "true"
	    leader-elect: "false"
	scheduler:
	  extraArgs:
	    leader-elect: "false"
	certificatesDir: /var/lib/minikube/certs
	clusterName: mk
	controlPlaneEndpoint: control-plane.minikube.internal:8443
	etcd:
	  local:
	    dataDir: /var/lib/minikube/etcd
	    extraArgs:
	      proxy-refresh-interval: "70000"
	kubernetesVersion: v1.31.1
	networking:
	  dnsDomain: cluster.local
	  podSubnet: "10.244.0.0/16"
	  serviceSubnet: 10.96.0.0/12
	---
	apiVersion: kubelet.config.k8s.io/v1beta1
	kind: KubeletConfiguration
	authentication:
	  x509:
	    clientCAFile: /var/lib/minikube/certs/ca.crt
	cgroupDriver: cgroupfs
	containerRuntimeEndpoint: unix:///var/run/crio/crio.sock
	hairpinMode: hairpin-veth
	runtimeRequestTimeout: 15m
	clusterDomain: "cluster.local"
	# disable disk resource management by default
	imageGCHighThresholdPercent: 100
	evictionHard:
	  nodefs.available: "0%"
	  nodefs.inodesFree: "0%"
	  imagefs.available: "0%"
	failSwapOn: false
	staticPodPath: /etc/kubernetes/manifests
	---
	apiVersion: kubeproxy.config.k8s.io/v1alpha1
	kind: KubeProxyConfiguration
	clusterCIDR: "10.244.0.0/16"
	metricsBindAddress: 0.0.0.0:10249
	conntrack:
	  maxPerCore: 0
	# Skip setting "net.netfilter.nf_conntrack_tcp_timeout_established"
	  tcpEstablishedTimeout: 0s
	# Skip setting "net.netfilter.nf_conntrack_tcp_timeout_close"
	  tcpCloseWaitTimeout: 0s
	
	I0916 11:13:05.897693 1501462 ssh_runner.go:195] Run: sudo ls /var/lib/minikube/binaries/v1.31.1
	I0916 11:13:05.907675 1501462 command_runner.go:130] > kubeadm
	I0916 11:13:05.907697 1501462 command_runner.go:130] > kubectl
	I0916 11:13:05.907701 1501462 command_runner.go:130] > kubelet
	I0916 11:13:05.907729 1501462 binaries.go:44] Found k8s binaries, skipping transfer
	I0916 11:13:05.907787 1501462 ssh_runner.go:195] Run: sudo mkdir -p /etc/systemd/system/kubelet.service.d /lib/systemd/system /var/tmp/minikube
	I0916 11:13:05.916552 1501462 ssh_runner.go:362] scp memory --> /etc/systemd/system/kubelet.service.d/10-kubeadm.conf (366 bytes)
	I0916 11:13:05.937004 1501462 ssh_runner.go:362] scp memory --> /lib/systemd/system/kubelet.service (352 bytes)
	I0916 11:13:05.956196 1501462 ssh_runner.go:362] scp memory --> /var/tmp/minikube/kubeadm.yaml.new (2154 bytes)
	I0916 11:13:05.974764 1501462 ssh_runner.go:195] Run: grep 192.168.67.2	control-plane.minikube.internal$ /etc/hosts
	I0916 11:13:05.978633 1501462 ssh_runner.go:195] Run: /bin/bash -c "{ grep -v $'\tcontrol-plane.minikube.internal$' "/etc/hosts"; echo "192.168.67.2	control-plane.minikube.internal"; } > /tmp/h.$$; sudo cp /tmp/h.$$ "/etc/hosts""
	I0916 11:13:05.989899 1501462 ssh_runner.go:195] Run: sudo systemctl daemon-reload
	I0916 11:13:06.094693 1501462 ssh_runner.go:195] Run: sudo systemctl start kubelet
	I0916 11:13:06.110425 1501462 certs.go:68] Setting up /home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/multinode-654612 for IP: 192.168.67.2
	I0916 11:13:06.110449 1501462 certs.go:194] generating shared ca certs ...
	I0916 11:13:06.110467 1501462 certs.go:226] acquiring lock for ca certs: {Name:mk0ae46b50e2e49d53ad6fcc94535aa50d9156d6 Clock:{} Delay:500ms Timeout:1m0s Cancel:<nil>}
	I0916 11:13:06.110635 1501462 certs.go:235] skipping valid "minikubeCA" ca cert: /home/jenkins/minikube-integration/19651-1378450/.minikube/ca.key
	I0916 11:13:06.110709 1501462 certs.go:235] skipping valid "proxyClientCA" ca cert: /home/jenkins/minikube-integration/19651-1378450/.minikube/proxy-client-ca.key
	I0916 11:13:06.110730 1501462 certs.go:256] generating profile certs ...
	I0916 11:13:06.110849 1501462 certs.go:359] skipping valid signed profile cert regeneration for "minikube-user": /home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/multinode-654612/client.key
	I0916 11:13:06.110983 1501462 certs.go:359] skipping valid signed profile cert regeneration for "minikube": /home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/multinode-654612/apiserver.key.51b1752e
	I0916 11:13:06.111054 1501462 certs.go:359] skipping valid signed profile cert regeneration for "aggregator": /home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/multinode-654612/proxy-client.key
	I0916 11:13:06.111073 1501462 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-1378450/.minikube/ca.crt -> /var/lib/minikube/certs/ca.crt
	I0916 11:13:06.111089 1501462 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-1378450/.minikube/ca.key -> /var/lib/minikube/certs/ca.key
	I0916 11:13:06.111117 1501462 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-1378450/.minikube/proxy-client-ca.crt -> /var/lib/minikube/certs/proxy-client-ca.crt
	I0916 11:13:06.111133 1501462 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-1378450/.minikube/proxy-client-ca.key -> /var/lib/minikube/certs/proxy-client-ca.key
	I0916 11:13:06.111156 1501462 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/multinode-654612/apiserver.crt -> /var/lib/minikube/certs/apiserver.crt
	I0916 11:13:06.111172 1501462 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/multinode-654612/apiserver.key -> /var/lib/minikube/certs/apiserver.key
	I0916 11:13:06.111194 1501462 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/multinode-654612/proxy-client.crt -> /var/lib/minikube/certs/proxy-client.crt
	I0916 11:13:06.111210 1501462 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/multinode-654612/proxy-client.key -> /var/lib/minikube/certs/proxy-client.key
	I0916 11:13:06.111304 1501462 certs.go:484] found cert: /home/jenkins/minikube-integration/19651-1378450/.minikube/certs/1383833.pem (1338 bytes)
	W0916 11:13:06.111357 1501462 certs.go:480] ignoring /home/jenkins/minikube-integration/19651-1378450/.minikube/certs/1383833_empty.pem, impossibly tiny 0 bytes
	I0916 11:13:06.111371 1501462 certs.go:484] found cert: /home/jenkins/minikube-integration/19651-1378450/.minikube/certs/ca-key.pem (1679 bytes)
	I0916 11:13:06.111399 1501462 certs.go:484] found cert: /home/jenkins/minikube-integration/19651-1378450/.minikube/certs/ca.pem (1078 bytes)
	I0916 11:13:06.111433 1501462 certs.go:484] found cert: /home/jenkins/minikube-integration/19651-1378450/.minikube/certs/cert.pem (1123 bytes)
	I0916 11:13:06.111456 1501462 certs.go:484] found cert: /home/jenkins/minikube-integration/19651-1378450/.minikube/certs/key.pem (1679 bytes)
	I0916 11:13:06.111527 1501462 certs.go:484] found cert: /home/jenkins/minikube-integration/19651-1378450/.minikube/files/etc/ssl/certs/13838332.pem (1708 bytes)
	I0916 11:13:06.111564 1501462 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-1378450/.minikube/ca.crt -> /usr/share/ca-certificates/minikubeCA.pem
	I0916 11:13:06.111589 1501462 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-1378450/.minikube/certs/1383833.pem -> /usr/share/ca-certificates/1383833.pem
	I0916 11:13:06.111605 1501462 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-1378450/.minikube/files/etc/ssl/certs/13838332.pem -> /usr/share/ca-certificates/13838332.pem
	I0916 11:13:06.112653 1501462 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-1378450/.minikube/ca.crt --> /var/lib/minikube/certs/ca.crt (1111 bytes)
	I0916 11:13:06.146605 1501462 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-1378450/.minikube/ca.key --> /var/lib/minikube/certs/ca.key (1675 bytes)
	I0916 11:13:06.179837 1501462 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-1378450/.minikube/proxy-client-ca.crt --> /var/lib/minikube/certs/proxy-client-ca.crt (1119 bytes)
	I0916 11:13:06.223476 1501462 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-1378450/.minikube/proxy-client-ca.key --> /var/lib/minikube/certs/proxy-client-ca.key (1679 bytes)
	I0916 11:13:06.264035 1501462 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/multinode-654612/apiserver.crt --> /var/lib/minikube/certs/apiserver.crt (1424 bytes)
	I0916 11:13:06.298274 1501462 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/multinode-654612/apiserver.key --> /var/lib/minikube/certs/apiserver.key (1679 bytes)
	I0916 11:13:06.325847 1501462 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/multinode-654612/proxy-client.crt --> /var/lib/minikube/certs/proxy-client.crt (1147 bytes)
	I0916 11:13:06.352553 1501462 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/multinode-654612/proxy-client.key --> /var/lib/minikube/certs/proxy-client.key (1675 bytes)
	I0916 11:13:06.382727 1501462 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-1378450/.minikube/ca.crt --> /usr/share/ca-certificates/minikubeCA.pem (1111 bytes)
	I0916 11:13:06.409430 1501462 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-1378450/.minikube/certs/1383833.pem --> /usr/share/ca-certificates/1383833.pem (1338 bytes)
	I0916 11:13:06.435002 1501462 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-1378450/.minikube/files/etc/ssl/certs/13838332.pem --> /usr/share/ca-certificates/13838332.pem (1708 bytes)
	I0916 11:13:06.462666 1501462 ssh_runner.go:362] scp memory --> /var/lib/minikube/kubeconfig (738 bytes)
	I0916 11:13:06.482649 1501462 ssh_runner.go:195] Run: openssl version
	I0916 11:13:06.488094 1501462 command_runner.go:130] > OpenSSL 3.0.2 15 Mar 2022 (Library: OpenSSL 3.0.2 15 Mar 2022)
	I0916 11:13:06.488494 1501462 ssh_runner.go:195] Run: sudo /bin/bash -c "test -s /usr/share/ca-certificates/1383833.pem && ln -fs /usr/share/ca-certificates/1383833.pem /etc/ssl/certs/1383833.pem"
	I0916 11:13:06.498401 1501462 ssh_runner.go:195] Run: ls -la /usr/share/ca-certificates/1383833.pem
	I0916 11:13:06.502053 1501462 command_runner.go:130] > -rw-r--r-- 1 root root 1338 Sep 16 10:46 /usr/share/ca-certificates/1383833.pem
	I0916 11:13:06.502083 1501462 certs.go:528] hashing: -rw-r--r-- 1 root root 1338 Sep 16 10:46 /usr/share/ca-certificates/1383833.pem
	I0916 11:13:06.502141 1501462 ssh_runner.go:195] Run: openssl x509 -hash -noout -in /usr/share/ca-certificates/1383833.pem
	I0916 11:13:06.509367 1501462 command_runner.go:130] > 51391683
	I0916 11:13:06.509442 1501462 ssh_runner.go:195] Run: sudo /bin/bash -c "test -L /etc/ssl/certs/51391683.0 || ln -fs /etc/ssl/certs/1383833.pem /etc/ssl/certs/51391683.0"
	I0916 11:13:06.518777 1501462 ssh_runner.go:195] Run: sudo /bin/bash -c "test -s /usr/share/ca-certificates/13838332.pem && ln -fs /usr/share/ca-certificates/13838332.pem /etc/ssl/certs/13838332.pem"
	I0916 11:13:06.528617 1501462 ssh_runner.go:195] Run: ls -la /usr/share/ca-certificates/13838332.pem
	I0916 11:13:06.532048 1501462 command_runner.go:130] > -rw-r--r-- 1 root root 1708 Sep 16 10:46 /usr/share/ca-certificates/13838332.pem
	I0916 11:13:06.532077 1501462 certs.go:528] hashing: -rw-r--r-- 1 root root 1708 Sep 16 10:46 /usr/share/ca-certificates/13838332.pem
	I0916 11:13:06.532149 1501462 ssh_runner.go:195] Run: openssl x509 -hash -noout -in /usr/share/ca-certificates/13838332.pem
	I0916 11:13:06.539587 1501462 command_runner.go:130] > 3ec20f2e
	I0916 11:13:06.540016 1501462 ssh_runner.go:195] Run: sudo /bin/bash -c "test -L /etc/ssl/certs/3ec20f2e.0 || ln -fs /etc/ssl/certs/13838332.pem /etc/ssl/certs/3ec20f2e.0"
	I0916 11:13:06.549124 1501462 ssh_runner.go:195] Run: sudo /bin/bash -c "test -s /usr/share/ca-certificates/minikubeCA.pem && ln -fs /usr/share/ca-certificates/minikubeCA.pem /etc/ssl/certs/minikubeCA.pem"
	I0916 11:13:06.558719 1501462 ssh_runner.go:195] Run: ls -la /usr/share/ca-certificates/minikubeCA.pem
	I0916 11:13:06.562357 1501462 command_runner.go:130] > -rw-r--r-- 1 root root 1111 Sep 16 10:35 /usr/share/ca-certificates/minikubeCA.pem
	I0916 11:13:06.562454 1501462 certs.go:528] hashing: -rw-r--r-- 1 root root 1111 Sep 16 10:35 /usr/share/ca-certificates/minikubeCA.pem
	I0916 11:13:06.562515 1501462 ssh_runner.go:195] Run: openssl x509 -hash -noout -in /usr/share/ca-certificates/minikubeCA.pem
	I0916 11:13:06.569689 1501462 command_runner.go:130] > b5213941
	I0916 11:13:06.570162 1501462 ssh_runner.go:195] Run: sudo /bin/bash -c "test -L /etc/ssl/certs/b5213941.0 || ln -fs /etc/ssl/certs/minikubeCA.pem /etc/ssl/certs/b5213941.0"
	I0916 11:13:06.579410 1501462 ssh_runner.go:195] Run: stat /var/lib/minikube/certs/apiserver-kubelet-client.crt
	I0916 11:13:06.583059 1501462 command_runner.go:130] >   File: /var/lib/minikube/certs/apiserver-kubelet-client.crt
	I0916 11:13:06.583083 1501462 command_runner.go:130] >   Size: 1176      	Blocks: 8          IO Block: 4096   regular file
	I0916 11:13:06.583090 1501462 command_runner.go:130] > Device: 10301h/66305d	Inode: 1308755     Links: 1
	I0916 11:13:06.583097 1501462 command_runner.go:130] > Access: (0644/-rw-r--r--)  Uid: (    0/    root)   Gid: (    0/    root)
	I0916 11:13:06.583104 1501462 command_runner.go:130] > Access: 2024-09-16 11:09:53.807109492 +0000
	I0916 11:13:06.583109 1501462 command_runner.go:130] > Modify: 2024-09-16 11:09:53.807109492 +0000
	I0916 11:13:06.583116 1501462 command_runner.go:130] > Change: 2024-09-16 11:09:53.807109492 +0000
	I0916 11:13:06.583121 1501462 command_runner.go:130] >  Birth: 2024-09-16 11:09:53.807109492 +0000
	I0916 11:13:06.583190 1501462 ssh_runner.go:195] Run: openssl x509 -noout -in /var/lib/minikube/certs/apiserver-etcd-client.crt -checkend 86400
	I0916 11:13:06.589596 1501462 command_runner.go:130] > Certificate will not expire
	I0916 11:13:06.589995 1501462 ssh_runner.go:195] Run: openssl x509 -noout -in /var/lib/minikube/certs/apiserver-kubelet-client.crt -checkend 86400
	I0916 11:13:06.596849 1501462 command_runner.go:130] > Certificate will not expire
	I0916 11:13:06.597354 1501462 ssh_runner.go:195] Run: openssl x509 -noout -in /var/lib/minikube/certs/etcd/server.crt -checkend 86400
	I0916 11:13:06.604182 1501462 command_runner.go:130] > Certificate will not expire
	I0916 11:13:06.604615 1501462 ssh_runner.go:195] Run: openssl x509 -noout -in /var/lib/minikube/certs/etcd/healthcheck-client.crt -checkend 86400
	I0916 11:13:06.611293 1501462 command_runner.go:130] > Certificate will not expire
	I0916 11:13:06.611712 1501462 ssh_runner.go:195] Run: openssl x509 -noout -in /var/lib/minikube/certs/etcd/peer.crt -checkend 86400
	I0916 11:13:06.618453 1501462 command_runner.go:130] > Certificate will not expire
	I0916 11:13:06.618834 1501462 ssh_runner.go:195] Run: openssl x509 -noout -in /var/lib/minikube/certs/front-proxy-client.crt -checkend 86400
	I0916 11:13:06.625717 1501462 command_runner.go:130] > Certificate will not expire
	I0916 11:13:06.626118 1501462 kubeadm.go:392] StartCluster: {Name:multinode-654612 KeepContext:false EmbedCerts:false MinikubeISO: KicBaseImage:gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 Memory:2200 CPUs:2 DiskSize:20000 Driver:docker HyperkitVpnKitSock: HyperkitVSockPorts:[] DockerEnv:[] ContainerVolumeMounts:[] InsecureRegistry:[] RegistryMirror:[] HostOnlyCIDR:192.168.59.1/24 HypervVirtualSwitch: HypervUseExternalSwitch:false HypervExternalAdapter: KVMNetwork:default KVMQemuURI:qemu:///system KVMGPU:false KVMHidden:false KVMNUMACount:1 APIServerPort:8443 DockerOpt:[] DisableDriverMounts:false NFSShare:[] NFSSharesRoot:/nfsshares UUID: NoVTXCheck:false DNSProxy:false HostDNSResolver:true HostOnlyNicType:virtio NatNicType:virtio SSHIPAddress: SSHUser:root SSHKey: SSHPort:22 KubernetesConfig:{KubernetesVersion:v1.31.1 ClusterName:multinode-654612 Namespace:default APIServerHAVIP: APIServerName:minikubeCA APIServe
rNames:[] APIServerIPs:[] DNSDomain:cluster.local ContainerRuntime:crio CRISocket: NetworkPlugin:cni FeatureGates: ServiceCIDR:10.96.0.0/12 ImageRepository: LoadBalancerStartIP: LoadBalancerEndIP: CustomIngressCert: RegistryAliases: ExtraOptions:[] ShouldLoadCachedImages:true EnableDefaultCNI:false CNI:} Nodes:[{Name: IP:192.168.67.2 Port:8443 KubernetesVersion:v1.31.1 ContainerRuntime:crio ControlPlane:true Worker:true} {Name:m02 IP:192.168.67.3 Port:8443 KubernetesVersion:v1.31.1 ContainerRuntime:crio ControlPlane:false Worker:true} {Name:m03 IP:192.168.67.4 Port:0 KubernetesVersion:v1.31.1 ContainerRuntime:crio ControlPlane:false Worker:true}] Addons:map[ambassador:false auto-pause:false cloud-spanner:false csi-hostpath-driver:false dashboard:false default-storageclass:false efk:false freshpod:false gcp-auth:false gvisor:false headlamp:false helm-tiller:false inaccel:false ingress:false ingress-dns:false inspektor-gadget:false istio:false istio-provisioner:false kong:false kubeflow:false kubevirt:false log
viewer:false metallb:false metrics-server:false nvidia-device-plugin:false nvidia-driver-installer:false nvidia-gpu-device-plugin:false olm:false pod-security-policy:false portainer:false registry:false registry-aliases:false registry-creds:false storage-provisioner:false storage-provisioner-gluster:false storage-provisioner-rancher:false volcano:false volumesnapshots:false yakd:false] CustomAddonImages:map[] CustomAddonRegistries:map[] VerifyComponents:map[apiserver:true apps_running:true default_sa:true extra:true kubelet:true node_ready:true system_pods:true] StartHostTimeout:6m0s ScheduledStop:<nil> ExposedPorts:[] ListenAddress: Network: Subnet: MultiNodeRequested:true ExtraDisks:0 CertExpiration:26280h0m0s Mount:false MountString:/home/jenkins:/minikube-host Mount9PVersion:9p2000.L MountGID:docker MountIP: MountMSize:262144 MountOptions:[] MountPort:0 MountType:9p MountUID:docker BinaryMirror: DisableOptimizations:false DisableMetrics:false CustomQemuFirmwarePath: SocketVMnetClientPath: SocketVMnetPath:
StaticIP: SSHAuthSock: SSHAgentPID:0 GPUs: AutoPauseInterval:1m0s}
	I0916 11:13:06.626250 1501462 cri.go:54] listing CRI containers in root : {State:paused Name: Namespaces:[kube-system]}
	I0916 11:13:06.626353 1501462 ssh_runner.go:195] Run: sudo -s eval "crictl ps -a --quiet --label io.kubernetes.pod.namespace=kube-system"
	I0916 11:13:06.666635 1501462 cri.go:89] found id: ""
	I0916 11:13:06.666706 1501462 ssh_runner.go:195] Run: sudo ls /var/lib/kubelet/kubeadm-flags.env /var/lib/kubelet/config.yaml /var/lib/minikube/etcd
	I0916 11:13:06.676535 1501462 command_runner.go:130] > /var/lib/kubelet/config.yaml
	I0916 11:13:06.676560 1501462 command_runner.go:130] > /var/lib/kubelet/kubeadm-flags.env
	I0916 11:13:06.676568 1501462 command_runner.go:130] > /var/lib/minikube/etcd:
	I0916 11:13:06.676572 1501462 command_runner.go:130] > member
	I0916 11:13:06.677857 1501462 kubeadm.go:408] found existing configuration files, will attempt cluster restart
	I0916 11:13:06.677875 1501462 kubeadm.go:593] restartPrimaryControlPlane start ...
	I0916 11:13:06.677930 1501462 ssh_runner.go:195] Run: sudo test -d /data/minikube
	I0916 11:13:06.687004 1501462 kubeadm.go:130] /data/minikube skipping compat symlinks: sudo test -d /data/minikube: Process exited with status 1
	stdout:
	
	stderr:
	I0916 11:13:06.687488 1501462 kubeconfig.go:47] verify endpoint returned: get endpoint: "multinode-654612" does not appear in /home/jenkins/minikube-integration/19651-1378450/kubeconfig
	I0916 11:13:06.687603 1501462 kubeconfig.go:62] /home/jenkins/minikube-integration/19651-1378450/kubeconfig needs updating (will repair): [kubeconfig missing "multinode-654612" cluster setting kubeconfig missing "multinode-654612" context setting]
	I0916 11:13:06.687902 1501462 lock.go:35] WriteFile acquiring /home/jenkins/minikube-integration/19651-1378450/kubeconfig: {Name:mk806df66aa01ad28d0c99bc1a876b4310e8a3a0 Clock:{} Delay:500ms Timeout:1m0s Cancel:<nil>}
	I0916 11:13:06.688353 1501462 loader.go:395] Config loaded from file:  /home/jenkins/minikube-integration/19651-1378450/kubeconfig
	I0916 11:13:06.688639 1501462 kapi.go:59] client config for multinode-654612: &rest.Config{Host:"https://192.168.67.2:8443", APIPath:"", ContentConfig:rest.ContentConfig{AcceptContentTypes:"", ContentType:"", GroupVersion:(*schema.GroupVersion)(nil), NegotiatedSerializer:runtime.NegotiatedSerializer(nil)}, Username:"", Password:"", BearerToken:"", BearerTokenFile:"", Impersonate:rest.ImpersonationConfig{UserName:"", UID:"", Groups:[]string(nil), Extra:map[string][]string(nil)}, AuthProvider:<nil>, AuthConfigPersister:rest.AuthProviderConfigPersister(nil), ExecProvider:<nil>, TLSClientConfig:rest.sanitizedTLSClientConfig{Insecure:false, ServerName:"", CertFile:"/home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/multinode-654612/client.crt", KeyFile:"/home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/multinode-654612/client.key", CAFile:"/home/jenkins/minikube-integration/19651-1378450/.minikube/ca.crt", CertData:[]uint8(nil), KeyData:[]uint8(nil), CAData:[]uint8(nil),
NextProtos:[]string(nil)}, UserAgent:"", DisableCompression:false, Transport:http.RoundTripper(nil), WrapTransport:(transport.WrapperFunc)(0x1a1e6c0), QPS:0, Burst:0, RateLimiter:flowcontrol.RateLimiter(nil), WarningHandler:rest.WarningHandler(nil), Timeout:0, Dial:(func(context.Context, string, string) (net.Conn, error))(nil), Proxy:(func(*http.Request) (*url.URL, error))(nil)}
	I0916 11:13:06.689194 1501462 cert_rotation.go:140] Starting client certificate rotation controller
	I0916 11:13:06.689405 1501462 ssh_runner.go:195] Run: sudo diff -u /var/tmp/minikube/kubeadm.yaml /var/tmp/minikube/kubeadm.yaml.new
	I0916 11:13:06.698565 1501462 kubeadm.go:630] The running cluster does not require reconfiguration: 192.168.67.2
	I0916 11:13:06.698712 1501462 kubeadm.go:597] duration metric: took 20.829882ms to restartPrimaryControlPlane
	I0916 11:13:06.698754 1501462 kubeadm.go:394] duration metric: took 72.641954ms to StartCluster
	I0916 11:13:06.698784 1501462 settings.go:142] acquiring lock: {Name:mkc0474d366ad36774e47290c7932cc180a1b9f8 Clock:{} Delay:500ms Timeout:1m0s Cancel:<nil>}
	I0916 11:13:06.698876 1501462 settings.go:150] Updating kubeconfig:  /home/jenkins/minikube-integration/19651-1378450/kubeconfig
	I0916 11:13:06.699597 1501462 lock.go:35] WriteFile acquiring /home/jenkins/minikube-integration/19651-1378450/kubeconfig: {Name:mk806df66aa01ad28d0c99bc1a876b4310e8a3a0 Clock:{} Delay:500ms Timeout:1m0s Cancel:<nil>}
	I0916 11:13:06.700145 1501462 config.go:182] Loaded profile config "multinode-654612": Driver=docker, ContainerRuntime=crio, KubernetesVersion=v1.31.1
	I0916 11:13:06.700236 1501462 addons.go:507] enable addons start: toEnable=map[ambassador:false auto-pause:false cloud-spanner:false csi-hostpath-driver:false dashboard:false default-storageclass:false efk:false freshpod:false gcp-auth:false gvisor:false headlamp:false helm-tiller:false inaccel:false ingress:false ingress-dns:false inspektor-gadget:false istio:false istio-provisioner:false kong:false kubeflow:false kubevirt:false logviewer:false metallb:false metrics-server:false nvidia-device-plugin:false nvidia-driver-installer:false nvidia-gpu-device-plugin:false olm:false pod-security-policy:false portainer:false registry:false registry-aliases:false registry-creds:false storage-provisioner:false storage-provisioner-gluster:false storage-provisioner-rancher:false volcano:false volumesnapshots:false yakd:false]
	I0916 11:13:06.700770 1501462 start.go:235] Will wait 6m0s for node &{Name: IP:192.168.67.2 Port:8443 KubernetesVersion:v1.31.1 ContainerRuntime:crio ControlPlane:true Worker:true}
	I0916 11:13:06.704127 1501462 out.go:177] * Verifying Kubernetes components...
	I0916 11:13:06.704182 1501462 out.go:177] * Enabled addons: 
	I0916 11:13:06.706298 1501462 addons.go:510] duration metric: took 6.06281ms for enable addons: enabled=[]
	I0916 11:13:06.706393 1501462 ssh_runner.go:195] Run: sudo systemctl daemon-reload
	I0916 11:13:06.911968 1501462 ssh_runner.go:195] Run: sudo systemctl start kubelet
	I0916 11:13:06.936173 1501462 node_ready.go:35] waiting up to 6m0s for node "multinode-654612" to be "Ready" ...
	I0916 11:13:06.936304 1501462 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/nodes/multinode-654612
	I0916 11:13:06.936316 1501462 round_trippers.go:469] Request Headers:
	I0916 11:13:06.936325 1501462 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:13:06.936329 1501462 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:13:06.936542 1501462 round_trippers.go:574] Response Status:  in 0 milliseconds
	I0916 11:13:06.936561 1501462 round_trippers.go:577] Response Headers:
	I0916 11:13:07.437331 1501462 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/nodes/multinode-654612
	I0916 11:13:07.437358 1501462 round_trippers.go:469] Request Headers:
	I0916 11:13:07.437369 1501462 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:13:07.437376 1501462 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:13:11.262904 1501462 round_trippers.go:574] Response Status: 200 OK in 3825 milliseconds
	I0916 11:13:11.262925 1501462 round_trippers.go:577] Response Headers:
	I0916 11:13:11.262934 1501462 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:13:11.262938 1501462 round_trippers.go:580]     Content-Type: application/json
	I0916 11:13:11.262942 1501462 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 
	I0916 11:13:11.262945 1501462 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 
	I0916 11:13:11.262947 1501462 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:13:11 GMT
	I0916 11:13:11.262950 1501462 round_trippers.go:580]     Audit-Id: 29dfb868-ffae-44f2-b889-cb35c1644a96
	I0916 11:13:11.270657 1501462 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-654612","uid":"17ee1588-6ece-4a45-8e72-a05ec6d1f806","resourceVersion":"545","creationTimestamp":"2024-09-16T11:10:07Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-654612","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-654612","minikube.k8s.io/primary":"true","minikube.k8s.io/updated_at":"2024_09_16T11_10_11_0700","minikube.k8s.io/version":"v1.34.0","node-role.kubernetes.io/control-plane":"","node.kubernetes.io/exclude-from-external-load-balancers":""},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///var/run/crio/crio.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubelet","operation":"Update","apiVe
rsion":"v1","time":"2024-09-16T11:10:07Z","fieldsType":"FieldsV1","fiel [truncated 6314 chars]
	I0916 11:13:11.271434 1501462 node_ready.go:49] node "multinode-654612" has status "Ready":"True"
	I0916 11:13:11.271450 1501462 node_ready.go:38] duration metric: took 4.335243972s for node "multinode-654612" to be "Ready" ...
	I0916 11:13:11.271460 1501462 pod_ready.go:36] extra waiting up to 6m0s for all system-critical pods including labels [k8s-app=kube-dns component=etcd component=kube-apiserver component=kube-controller-manager k8s-app=kube-proxy component=kube-scheduler] to be "Ready" ...
	I0916 11:13:11.271499 1501462 envvar.go:172] "Feature gate default state" feature="WatchListClient" enabled=false
	I0916 11:13:11.271510 1501462 envvar.go:172] "Feature gate default state" feature="InformerResourceVersion" enabled=false
	I0916 11:13:11.271570 1501462 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/namespaces/kube-system/pods
	I0916 11:13:11.271575 1501462 round_trippers.go:469] Request Headers:
	I0916 11:13:11.271583 1501462 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:13:11.271587 1501462 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:13:11.329715 1501462 round_trippers.go:574] Response Status: 200 OK in 58 milliseconds
	I0916 11:13:11.329738 1501462 round_trippers.go:577] Response Headers:
	I0916 11:13:11.329747 1501462 round_trippers.go:580]     Audit-Id: 8fc99461-85c6-4ae7-8219-456fdb50c223
	I0916 11:13:11.329751 1501462 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:13:11.329756 1501462 round_trippers.go:580]     Content-Type: application/json
	I0916 11:13:11.329761 1501462 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 
	I0916 11:13:11.329764 1501462 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 
	I0916 11:13:11.329767 1501462 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:13:11 GMT
	I0916 11:13:11.341446 1501462 request.go:1351] Response Body: {"kind":"PodList","apiVersion":"v1","metadata":{"resourceVersion":"656"},"items":[{"metadata":{"name":"coredns-7c65d6cfc9-szvv9","generateName":"coredns-7c65d6cfc9-","namespace":"kube-system","uid":"26df8cd4-36bc-49e1-98bf-9c30f5555b7b","resourceVersion":"424","creationTimestamp":"2024-09-16T11:10:15Z","labels":{"k8s-app":"kube-dns","pod-template-hash":"7c65d6cfc9"},"ownerReferences":[{"apiVersion":"apps/v1","kind":"ReplicaSet","name":"coredns-7c65d6cfc9","uid":"a88acc4a-b14b-4341-a616-6a6077ab8958","controller":true,"blockOwnerDeletion":true}],"managedFields":[{"manager":"kube-controller-manager","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:10:15Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:generateName":{},"f:labels":{".":{},"f:k8s-app":{},"f:pod-template-hash":{}},"f:ownerReferences":{".":{},"k:{\"uid\":\"a88acc4a-b14b-4341-a616-6a6077ab8958\"}":{}}},"f:spec":{"f:affinity":{".":{},"f:podAntiAffinity":{".":{},"f
:preferredDuringSchedulingIgnoredDuringExecution":{}}},"f:containers":{ [truncated 89290 chars]
	I0916 11:13:11.347531 1501462 pod_ready.go:79] waiting up to 6m0s for pod "coredns-7c65d6cfc9-szvv9" in "kube-system" namespace to be "Ready" ...
	I0916 11:13:11.347688 1501462 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/namespaces/kube-system/pods/coredns-7c65d6cfc9-szvv9
	I0916 11:13:11.347719 1501462 round_trippers.go:469] Request Headers:
	I0916 11:13:11.347744 1501462 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:13:11.347764 1501462 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:13:11.358457 1501462 round_trippers.go:574] Response Status: 200 OK in 10 milliseconds
	I0916 11:13:11.358531 1501462 round_trippers.go:577] Response Headers:
	I0916 11:13:11.358578 1501462 round_trippers.go:580]     Content-Type: application/json
	I0916 11:13:11.358612 1501462 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:13:11.358631 1501462 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:13:11.358648 1501462 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:13:11 GMT
	I0916 11:13:11.358664 1501462 round_trippers.go:580]     Audit-Id: 7815d909-9130-42c1-9efe-2a3b17da57c7
	I0916 11:13:11.358693 1501462 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:13:11.363681 1501462 request.go:1351] Response Body: {"kind":"Pod","apiVersion":"v1","metadata":{"name":"coredns-7c65d6cfc9-szvv9","generateName":"coredns-7c65d6cfc9-","namespace":"kube-system","uid":"26df8cd4-36bc-49e1-98bf-9c30f5555b7b","resourceVersion":"424","creationTimestamp":"2024-09-16T11:10:15Z","labels":{"k8s-app":"kube-dns","pod-template-hash":"7c65d6cfc9"},"ownerReferences":[{"apiVersion":"apps/v1","kind":"ReplicaSet","name":"coredns-7c65d6cfc9","uid":"a88acc4a-b14b-4341-a616-6a6077ab8958","controller":true,"blockOwnerDeletion":true}],"managedFields":[{"manager":"kube-controller-manager","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:10:15Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:generateName":{},"f:labels":{".":{},"f:k8s-app":{},"f:pod-template-hash":{}},"f:ownerReferences":{".":{},"k:{\"uid\":\"a88acc4a-b14b-4341-a616-6a6077ab8958\"}":{}}},"f:spec":{"f:affinity":{".":{},"f:podAntiAffinity":{".":{},"f:preferredDuringSchedulingIgnoredDuringExecution":{
}}},"f:containers":{"k:{\"name\":\"coredns\"}":{".":{},"f:args":{},"f:i [truncated 6813 chars]
	I0916 11:13:11.364450 1501462 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/nodes/multinode-654612
	I0916 11:13:11.364491 1501462 round_trippers.go:469] Request Headers:
	I0916 11:13:11.364524 1501462 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:13:11.364545 1501462 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:13:11.381092 1501462 round_trippers.go:574] Response Status: 200 OK in 16 milliseconds
	I0916 11:13:11.381165 1501462 round_trippers.go:577] Response Headers:
	I0916 11:13:11.381191 1501462 round_trippers.go:580]     Audit-Id: 07fa83ff-f47c-417b-a14b-c1809f823908
	I0916 11:13:11.381212 1501462 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:13:11.381238 1501462 round_trippers.go:580]     Content-Type: application/json
	I0916 11:13:11.381274 1501462 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:13:11.381291 1501462 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:13:11.381319 1501462 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:13:11 GMT
	I0916 11:13:11.381515 1501462 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-654612","uid":"17ee1588-6ece-4a45-8e72-a05ec6d1f806","resourceVersion":"545","creationTimestamp":"2024-09-16T11:10:07Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-654612","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-654612","minikube.k8s.io/primary":"true","minikube.k8s.io/updated_at":"2024_09_16T11_10_11_0700","minikube.k8s.io/version":"v1.34.0","node-role.kubernetes.io/control-plane":"","node.kubernetes.io/exclude-from-external-load-balancers":""},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///var/run/crio/crio.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubelet","operation":"Update","apiVe
rsion":"v1","time":"2024-09-16T11:10:07Z","fieldsType":"FieldsV1","fiel [truncated 6314 chars]
	I0916 11:13:11.381984 1501462 pod_ready.go:93] pod "coredns-7c65d6cfc9-szvv9" in "kube-system" namespace has status "Ready":"True"
	I0916 11:13:11.382029 1501462 pod_ready.go:82] duration metric: took 34.40751ms for pod "coredns-7c65d6cfc9-szvv9" in "kube-system" namespace to be "Ready" ...
	I0916 11:13:11.382064 1501462 pod_ready.go:79] waiting up to 6m0s for pod "etcd-multinode-654612" in "kube-system" namespace to be "Ready" ...
	I0916 11:13:11.382159 1501462 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/namespaces/kube-system/pods/etcd-multinode-654612
	I0916 11:13:11.382195 1501462 round_trippers.go:469] Request Headers:
	I0916 11:13:11.382220 1501462 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:13:11.382239 1501462 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:13:11.394547 1501462 round_trippers.go:574] Response Status: 200 OK in 12 milliseconds
	I0916 11:13:11.394621 1501462 round_trippers.go:577] Response Headers:
	I0916 11:13:11.394644 1501462 round_trippers.go:580]     Audit-Id: 9b4cd24a-6df4-4a14-b629-86f7503271c0
	I0916 11:13:11.394664 1501462 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:13:11.394694 1501462 round_trippers.go:580]     Content-Type: application/json
	I0916 11:13:11.394711 1501462 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:13:11.394728 1501462 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:13:11.394746 1501462 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:13:11 GMT
	I0916 11:13:11.395624 1501462 request.go:1351] Response Body: {"kind":"Pod","apiVersion":"v1","metadata":{"name":"etcd-multinode-654612","namespace":"kube-system","uid":"bb46feea-e4d5-411b-9ebc-e5984b1147a8","resourceVersion":"388","creationTimestamp":"2024-09-16T11:10:10Z","labels":{"component":"etcd","tier":"control-plane"},"annotations":{"kubeadm.kubernetes.io/etcd.advertise-client-urls":"https://192.168.67.2:2379","kubernetes.io/config.hash":"d0a18dbc2f101ac77b9a3f54b47797a2","kubernetes.io/config.mirror":"d0a18dbc2f101ac77b9a3f54b47797a2","kubernetes.io/config.seen":"2024-09-16T11:10:10.145147523Z","kubernetes.io/config.source":"file"},"ownerReferences":[{"apiVersion":"v1","kind":"Node","name":"multinode-654612","uid":"17ee1588-6ece-4a45-8e72-a05ec6d1f806","controller":true}],"managedFields":[{"manager":"kubelet","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:10:10Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:annotations":{".":{},"f:kubeadm.kubernetes.io/etcd.advertise-cl
ient-urls":{},"f:kubernetes.io/config.hash":{},"f:kubernetes.io/config. [truncated 6435 chars]
	I0916 11:13:11.396244 1501462 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/nodes/multinode-654612
	I0916 11:13:11.396293 1501462 round_trippers.go:469] Request Headers:
	I0916 11:13:11.396317 1501462 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:13:11.396342 1501462 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:13:11.445601 1501462 round_trippers.go:574] Response Status: 200 OK in 49 milliseconds
	I0916 11:13:11.445675 1501462 round_trippers.go:577] Response Headers:
	I0916 11:13:11.445699 1501462 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:13:11 GMT
	I0916 11:13:11.445719 1501462 round_trippers.go:580]     Audit-Id: c37076cb-83e2-43a9-ae5d-8ef458165dd6
	I0916 11:13:11.445748 1501462 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:13:11.445764 1501462 round_trippers.go:580]     Content-Type: application/json
	I0916 11:13:11.445781 1501462 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:13:11.445798 1501462 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:13:11.453785 1501462 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-654612","uid":"17ee1588-6ece-4a45-8e72-a05ec6d1f806","resourceVersion":"545","creationTimestamp":"2024-09-16T11:10:07Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-654612","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-654612","minikube.k8s.io/primary":"true","minikube.k8s.io/updated_at":"2024_09_16T11_10_11_0700","minikube.k8s.io/version":"v1.34.0","node-role.kubernetes.io/control-plane":"","node.kubernetes.io/exclude-from-external-load-balancers":""},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///var/run/crio/crio.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubelet","operation":"Update","apiVe
rsion":"v1","time":"2024-09-16T11:10:07Z","fieldsType":"FieldsV1","fiel [truncated 6314 chars]
	I0916 11:13:11.454311 1501462 pod_ready.go:93] pod "etcd-multinode-654612" in "kube-system" namespace has status "Ready":"True"
	I0916 11:13:11.454350 1501462 pod_ready.go:82] duration metric: took 72.265397ms for pod "etcd-multinode-654612" in "kube-system" namespace to be "Ready" ...
	I0916 11:13:11.454394 1501462 pod_ready.go:79] waiting up to 6m0s for pod "kube-apiserver-multinode-654612" in "kube-system" namespace to be "Ready" ...
	I0916 11:13:11.454486 1501462 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/namespaces/kube-system/pods/kube-apiserver-multinode-654612
	I0916 11:13:11.454513 1501462 round_trippers.go:469] Request Headers:
	I0916 11:13:11.454547 1501462 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:13:11.454566 1501462 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:13:11.477305 1501462 round_trippers.go:574] Response Status: 200 OK in 22 milliseconds
	I0916 11:13:11.477392 1501462 round_trippers.go:577] Response Headers:
	I0916 11:13:11.477423 1501462 round_trippers.go:580]     Audit-Id: 4b5768ba-9549-47b9-8b3f-606594e0b36f
	I0916 11:13:11.477456 1501462 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:13:11.477484 1501462 round_trippers.go:580]     Content-Type: application/json
	I0916 11:13:11.477511 1501462 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:13:11.477539 1501462 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:13:11.477565 1501462 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:13:11 GMT
	I0916 11:13:11.490541 1501462 request.go:1351] Response Body: {"kind":"Pod","apiVersion":"v1","metadata":{"name":"kube-apiserver-multinode-654612","namespace":"kube-system","uid":"8a56377d-b2a9-46dc-90b0-6d8f0aadec52","resourceVersion":"386","creationTimestamp":"2024-09-16T11:10:10Z","labels":{"component":"kube-apiserver","tier":"control-plane"},"annotations":{"kubeadm.kubernetes.io/kube-apiserver.advertise-address.endpoint":"192.168.67.2:8443","kubernetes.io/config.hash":"f3fdb95ee92c3c630b459a996a1fc6f9","kubernetes.io/config.mirror":"f3fdb95ee92c3c630b459a996a1fc6f9","kubernetes.io/config.seen":"2024-09-16T11:10:10.145153931Z","kubernetes.io/config.source":"file"},"ownerReferences":[{"apiVersion":"v1","kind":"Node","name":"multinode-654612","uid":"17ee1588-6ece-4a45-8e72-a05ec6d1f806","controller":true}],"managedFields":[{"manager":"kubelet","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:10:10Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:annotations":{".":{},"f:kubeadm.kube
rnetes.io/kube-apiserver.advertise-address.endpoint":{},"f:kubernetes.i [truncated 8513 chars]
	I0916 11:13:11.491319 1501462 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/nodes/multinode-654612
	I0916 11:13:11.491369 1501462 round_trippers.go:469] Request Headers:
	I0916 11:13:11.491406 1501462 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:13:11.491425 1501462 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:13:11.503419 1501462 round_trippers.go:574] Response Status: 200 OK in 11 milliseconds
	I0916 11:13:11.503493 1501462 round_trippers.go:577] Response Headers:
	I0916 11:13:11.503515 1501462 round_trippers.go:580]     Audit-Id: 16f1c0b0-400e-4d6a-8c37-f4c546db092a
	I0916 11:13:11.503533 1501462 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:13:11.503561 1501462 round_trippers.go:580]     Content-Type: application/json
	I0916 11:13:11.503582 1501462 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:13:11.503600 1501462 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:13:11.503618 1501462 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:13:11 GMT
	I0916 11:13:11.507635 1501462 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-654612","uid":"17ee1588-6ece-4a45-8e72-a05ec6d1f806","resourceVersion":"545","creationTimestamp":"2024-09-16T11:10:07Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-654612","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-654612","minikube.k8s.io/primary":"true","minikube.k8s.io/updated_at":"2024_09_16T11_10_11_0700","minikube.k8s.io/version":"v1.34.0","node-role.kubernetes.io/control-plane":"","node.kubernetes.io/exclude-from-external-load-balancers":""},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///var/run/crio/crio.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubelet","operation":"Update","apiVe
rsion":"v1","time":"2024-09-16T11:10:07Z","fieldsType":"FieldsV1","fiel [truncated 6314 chars]
	I0916 11:13:11.508175 1501462 pod_ready.go:93] pod "kube-apiserver-multinode-654612" in "kube-system" namespace has status "Ready":"True"
	I0916 11:13:11.508232 1501462 pod_ready.go:82] duration metric: took 53.801702ms for pod "kube-apiserver-multinode-654612" in "kube-system" namespace to be "Ready" ...
	I0916 11:13:11.508260 1501462 pod_ready.go:79] waiting up to 6m0s for pod "kube-controller-manager-multinode-654612" in "kube-system" namespace to be "Ready" ...
	I0916 11:13:11.508364 1501462 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/namespaces/kube-system/pods/kube-controller-manager-multinode-654612
	I0916 11:13:11.508400 1501462 round_trippers.go:469] Request Headers:
	I0916 11:13:11.508422 1501462 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:13:11.508442 1501462 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:13:11.522690 1501462 round_trippers.go:574] Response Status: 200 OK in 14 milliseconds
	I0916 11:13:11.522762 1501462 round_trippers.go:577] Response Headers:
	I0916 11:13:11.522785 1501462 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:13:11.522804 1501462 round_trippers.go:580]     Content-Type: application/json
	I0916 11:13:11.522822 1501462 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:13:11.522853 1501462 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:13:11.522871 1501462 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:13:11 GMT
	I0916 11:13:11.522888 1501462 round_trippers.go:580]     Audit-Id: fe6a3513-6da4-4f14-bc14-24cea94c87b9
	I0916 11:13:11.526309 1501462 request.go:1351] Response Body: {"kind":"Pod","apiVersion":"v1","metadata":{"name":"kube-controller-manager-multinode-654612","namespace":"kube-system","uid":"08e87c01-201e-4373-bbd7-0a8a7a724a84","resourceVersion":"659","creationTimestamp":"2024-09-16T11:10:10Z","labels":{"component":"kube-controller-manager","tier":"control-plane"},"annotations":{"kubernetes.io/config.hash":"c7028fbfd05aaf11bd1f32f252589714","kubernetes.io/config.mirror":"c7028fbfd05aaf11bd1f32f252589714","kubernetes.io/config.seen":"2024-09-16T11:10:10.145155408Z","kubernetes.io/config.source":"file"},"ownerReferences":[{"apiVersion":"v1","kind":"Node","name":"multinode-654612","uid":"17ee1588-6ece-4a45-8e72-a05ec6d1f806","controller":true}],"managedFields":[{"manager":"kubelet","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:10:10Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:annotations":{".":{},"f:kubernetes.io/config.hash":{},"f:kubernetes.io/config.mirror":{},"f:kubernetes.i
o/config.seen":{},"f:kubernetes.io/config.source":{}},"f:labels":{".":{ [truncated 9159 chars]
	I0916 11:13:11.527037 1501462 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/nodes/multinode-654612
	I0916 11:13:11.527085 1501462 round_trippers.go:469] Request Headers:
	I0916 11:13:11.527109 1501462 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:13:11.527127 1501462 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:13:11.532854 1501462 round_trippers.go:574] Response Status: 200 OK in 5 milliseconds
	I0916 11:13:11.532927 1501462 round_trippers.go:577] Response Headers:
	I0916 11:13:11.532974 1501462 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:13:11.533006 1501462 round_trippers.go:580]     Content-Type: application/json
	I0916 11:13:11.533024 1501462 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:13:11.533042 1501462 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:13:11.533060 1501462 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:13:11 GMT
	I0916 11:13:11.533095 1501462 round_trippers.go:580]     Audit-Id: 4a853ee0-66b9-4760-bb5e-051fdec6255c
	I0916 11:13:11.537407 1501462 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-654612","uid":"17ee1588-6ece-4a45-8e72-a05ec6d1f806","resourceVersion":"662","creationTimestamp":"2024-09-16T11:10:07Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-654612","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-654612","minikube.k8s.io/primary":"true","minikube.k8s.io/updated_at":"2024_09_16T11_10_11_0700","minikube.k8s.io/version":"v1.34.0","node-role.kubernetes.io/control-plane":"","node.kubernetes.io/exclude-from-external-load-balancers":""},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///var/run/crio/crio.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubelet","operation":"Update","apiVe
rsion":"v1","time":"2024-09-16T11:10:07Z","fieldsType":"FieldsV1","fiel [truncated 6346 chars]
	I0916 11:13:12.009204 1501462 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/namespaces/kube-system/pods/kube-controller-manager-multinode-654612
	I0916 11:13:12.009341 1501462 round_trippers.go:469] Request Headers:
	I0916 11:13:12.009435 1501462 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:13:12.009457 1501462 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:13:12.013811 1501462 round_trippers.go:574] Response Status: 200 OK in 4 milliseconds
	I0916 11:13:12.013912 1501462 round_trippers.go:577] Response Headers:
	I0916 11:13:12.013939 1501462 round_trippers.go:580]     Content-Type: application/json
	I0916 11:13:12.013972 1501462 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:13:12.013992 1501462 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:13:12.014012 1501462 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:13:12 GMT
	I0916 11:13:12.014033 1501462 round_trippers.go:580]     Audit-Id: 1f00fbb5-31fa-46c6-85bb-ada1f30260b5
	I0916 11:13:12.014069 1501462 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:13:12.014736 1501462 request.go:1351] Response Body: {"kind":"Pod","apiVersion":"v1","metadata":{"name":"kube-controller-manager-multinode-654612","namespace":"kube-system","uid":"08e87c01-201e-4373-bbd7-0a8a7a724a84","resourceVersion":"659","creationTimestamp":"2024-09-16T11:10:10Z","labels":{"component":"kube-controller-manager","tier":"control-plane"},"annotations":{"kubernetes.io/config.hash":"c7028fbfd05aaf11bd1f32f252589714","kubernetes.io/config.mirror":"c7028fbfd05aaf11bd1f32f252589714","kubernetes.io/config.seen":"2024-09-16T11:10:10.145155408Z","kubernetes.io/config.source":"file"},"ownerReferences":[{"apiVersion":"v1","kind":"Node","name":"multinode-654612","uid":"17ee1588-6ece-4a45-8e72-a05ec6d1f806","controller":true}],"managedFields":[{"manager":"kubelet","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:10:10Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:annotations":{".":{},"f:kubernetes.io/config.hash":{},"f:kubernetes.io/config.mirror":{},"f:kubernetes.i
o/config.seen":{},"f:kubernetes.io/config.source":{}},"f:labels":{".":{ [truncated 9159 chars]
	I0916 11:13:12.015580 1501462 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/nodes/multinode-654612
	I0916 11:13:12.015629 1501462 round_trippers.go:469] Request Headers:
	I0916 11:13:12.015661 1501462 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:13:12.015680 1501462 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:13:12.018587 1501462 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:13:12.018614 1501462 round_trippers.go:577] Response Headers:
	I0916 11:13:12.018623 1501462 round_trippers.go:580]     Audit-Id: d62f2b49-6f2a-42a0-ac95-7ac2eebe7ed4
	I0916 11:13:12.018645 1501462 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:13:12.018654 1501462 round_trippers.go:580]     Content-Type: application/json
	I0916 11:13:12.018658 1501462 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:13:12.018661 1501462 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:13:12.018664 1501462 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:13:12 GMT
	I0916 11:13:12.019118 1501462 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-654612","uid":"17ee1588-6ece-4a45-8e72-a05ec6d1f806","resourceVersion":"662","creationTimestamp":"2024-09-16T11:10:07Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-654612","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-654612","minikube.k8s.io/primary":"true","minikube.k8s.io/updated_at":"2024_09_16T11_10_11_0700","minikube.k8s.io/version":"v1.34.0","node-role.kubernetes.io/control-plane":"","node.kubernetes.io/exclude-from-external-load-balancers":""},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///var/run/crio/crio.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubelet","operation":"Update","apiVe
rsion":"v1","time":"2024-09-16T11:10:07Z","fieldsType":"FieldsV1","fiel [truncated 6346 chars]
	I0916 11:13:12.508776 1501462 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/namespaces/kube-system/pods/kube-controller-manager-multinode-654612
	I0916 11:13:12.508806 1501462 round_trippers.go:469] Request Headers:
	I0916 11:13:12.508815 1501462 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:13:12.508822 1501462 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:13:12.511942 1501462 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:13:12.511977 1501462 round_trippers.go:577] Response Headers:
	I0916 11:13:12.511986 1501462 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:13:12.511992 1501462 round_trippers.go:580]     Content-Type: application/json
	I0916 11:13:12.511997 1501462 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:13:12.512001 1501462 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:13:12.512008 1501462 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:13:12 GMT
	I0916 11:13:12.512017 1501462 round_trippers.go:580]     Audit-Id: 4379f9c1-0da3-4e31-b9b3-3faf3d5db86c
	I0916 11:13:12.512692 1501462 request.go:1351] Response Body: {"kind":"Pod","apiVersion":"v1","metadata":{"name":"kube-controller-manager-multinode-654612","namespace":"kube-system","uid":"08e87c01-201e-4373-bbd7-0a8a7a724a84","resourceVersion":"659","creationTimestamp":"2024-09-16T11:10:10Z","labels":{"component":"kube-controller-manager","tier":"control-plane"},"annotations":{"kubernetes.io/config.hash":"c7028fbfd05aaf11bd1f32f252589714","kubernetes.io/config.mirror":"c7028fbfd05aaf11bd1f32f252589714","kubernetes.io/config.seen":"2024-09-16T11:10:10.145155408Z","kubernetes.io/config.source":"file"},"ownerReferences":[{"apiVersion":"v1","kind":"Node","name":"multinode-654612","uid":"17ee1588-6ece-4a45-8e72-a05ec6d1f806","controller":true}],"managedFields":[{"manager":"kubelet","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:10:10Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:annotations":{".":{},"f:kubernetes.io/config.hash":{},"f:kubernetes.io/config.mirror":{},"f:kubernetes.i
o/config.seen":{},"f:kubernetes.io/config.source":{}},"f:labels":{".":{ [truncated 9159 chars]
	I0916 11:13:12.513344 1501462 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/nodes/multinode-654612
	I0916 11:13:12.513365 1501462 round_trippers.go:469] Request Headers:
	I0916 11:13:12.513374 1501462 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:13:12.513378 1501462 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:13:12.515968 1501462 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:13:12.515994 1501462 round_trippers.go:577] Response Headers:
	I0916 11:13:12.516004 1501462 round_trippers.go:580]     Audit-Id: 71a2c33b-2d51-4461-8439-875fba626487
	I0916 11:13:12.516010 1501462 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:13:12.516018 1501462 round_trippers.go:580]     Content-Type: application/json
	I0916 11:13:12.516022 1501462 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:13:12.516026 1501462 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:13:12.516030 1501462 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:13:12 GMT
	I0916 11:13:12.516252 1501462 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-654612","uid":"17ee1588-6ece-4a45-8e72-a05ec6d1f806","resourceVersion":"662","creationTimestamp":"2024-09-16T11:10:07Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-654612","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-654612","minikube.k8s.io/primary":"true","minikube.k8s.io/updated_at":"2024_09_16T11_10_11_0700","minikube.k8s.io/version":"v1.34.0","node-role.kubernetes.io/control-plane":"","node.kubernetes.io/exclude-from-external-load-balancers":""},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///var/run/crio/crio.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubelet","operation":"Update","apiVe
rsion":"v1","time":"2024-09-16T11:10:07Z","fieldsType":"FieldsV1","fiel [truncated 6346 chars]
	I0916 11:13:13.008934 1501462 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/namespaces/kube-system/pods/kube-controller-manager-multinode-654612
	I0916 11:13:13.008963 1501462 round_trippers.go:469] Request Headers:
	I0916 11:13:13.008973 1501462 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:13:13.008979 1501462 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:13:13.011459 1501462 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:13:13.011524 1501462 round_trippers.go:577] Response Headers:
	I0916 11:13:13.011546 1501462 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:13:13.011568 1501462 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:13:13.011588 1501462 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:13:13 GMT
	I0916 11:13:13.011606 1501462 round_trippers.go:580]     Audit-Id: 0af0505d-e88f-4ac8-833c-f299d63ef333
	I0916 11:13:13.011624 1501462 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:13:13.011642 1501462 round_trippers.go:580]     Content-Type: application/json
	I0916 11:13:13.012025 1501462 request.go:1351] Response Body: {"kind":"Pod","apiVersion":"v1","metadata":{"name":"kube-controller-manager-multinode-654612","namespace":"kube-system","uid":"08e87c01-201e-4373-bbd7-0a8a7a724a84","resourceVersion":"659","creationTimestamp":"2024-09-16T11:10:10Z","labels":{"component":"kube-controller-manager","tier":"control-plane"},"annotations":{"kubernetes.io/config.hash":"c7028fbfd05aaf11bd1f32f252589714","kubernetes.io/config.mirror":"c7028fbfd05aaf11bd1f32f252589714","kubernetes.io/config.seen":"2024-09-16T11:10:10.145155408Z","kubernetes.io/config.source":"file"},"ownerReferences":[{"apiVersion":"v1","kind":"Node","name":"multinode-654612","uid":"17ee1588-6ece-4a45-8e72-a05ec6d1f806","controller":true}],"managedFields":[{"manager":"kubelet","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:10:10Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:annotations":{".":{},"f:kubernetes.io/config.hash":{},"f:kubernetes.io/config.mirror":{},"f:kubernetes.i
o/config.seen":{},"f:kubernetes.io/config.source":{}},"f:labels":{".":{ [truncated 9159 chars]
	I0916 11:13:13.012740 1501462 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/nodes/multinode-654612
	I0916 11:13:13.012762 1501462 round_trippers.go:469] Request Headers:
	I0916 11:13:13.012772 1501462 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:13:13.012778 1501462 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:13:13.015032 1501462 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:13:13.015095 1501462 round_trippers.go:577] Response Headers:
	I0916 11:13:13.015118 1501462 round_trippers.go:580]     Audit-Id: d731afac-80d4-4303-8272-ebc06a3105cf
	I0916 11:13:13.015137 1501462 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:13:13.015154 1501462 round_trippers.go:580]     Content-Type: application/json
	I0916 11:13:13.015186 1501462 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:13:13.015204 1501462 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:13:13.015221 1501462 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:13:13 GMT
	I0916 11:13:13.015382 1501462 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-654612","uid":"17ee1588-6ece-4a45-8e72-a05ec6d1f806","resourceVersion":"662","creationTimestamp":"2024-09-16T11:10:07Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-654612","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-654612","minikube.k8s.io/primary":"true","minikube.k8s.io/updated_at":"2024_09_16T11_10_11_0700","minikube.k8s.io/version":"v1.34.0","node-role.kubernetes.io/control-plane":"","node.kubernetes.io/exclude-from-external-load-balancers":""},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///var/run/crio/crio.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubelet","operation":"Update","apiVe
rsion":"v1","time":"2024-09-16T11:10:07Z","fieldsType":"FieldsV1","fiel [truncated 6346 chars]
	I0916 11:13:13.509068 1501462 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/namespaces/kube-system/pods/kube-controller-manager-multinode-654612
	I0916 11:13:13.509094 1501462 round_trippers.go:469] Request Headers:
	I0916 11:13:13.509105 1501462 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:13:13.509110 1501462 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:13:13.512537 1501462 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 11:13:13.512566 1501462 round_trippers.go:577] Response Headers:
	I0916 11:13:13.512575 1501462 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:13:13 GMT
	I0916 11:13:13.512581 1501462 round_trippers.go:580]     Audit-Id: 64c956ce-a12b-4ee8-a1a7-30ce831a7deb
	I0916 11:13:13.512617 1501462 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:13:13.512627 1501462 round_trippers.go:580]     Content-Type: application/json
	I0916 11:13:13.512632 1501462 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:13:13.512643 1501462 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:13:13.513253 1501462 request.go:1351] Response Body: {"kind":"Pod","apiVersion":"v1","metadata":{"name":"kube-controller-manager-multinode-654612","namespace":"kube-system","uid":"08e87c01-201e-4373-bbd7-0a8a7a724a84","resourceVersion":"659","creationTimestamp":"2024-09-16T11:10:10Z","labels":{"component":"kube-controller-manager","tier":"control-plane"},"annotations":{"kubernetes.io/config.hash":"c7028fbfd05aaf11bd1f32f252589714","kubernetes.io/config.mirror":"c7028fbfd05aaf11bd1f32f252589714","kubernetes.io/config.seen":"2024-09-16T11:10:10.145155408Z","kubernetes.io/config.source":"file"},"ownerReferences":[{"apiVersion":"v1","kind":"Node","name":"multinode-654612","uid":"17ee1588-6ece-4a45-8e72-a05ec6d1f806","controller":true}],"managedFields":[{"manager":"kubelet","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:10:10Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:annotations":{".":{},"f:kubernetes.io/config.hash":{},"f:kubernetes.io/config.mirror":{},"f:kubernetes.i
o/config.seen":{},"f:kubernetes.io/config.source":{}},"f:labels":{".":{ [truncated 9159 chars]
	I0916 11:13:13.513966 1501462 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/nodes/multinode-654612
	I0916 11:13:13.514017 1501462 round_trippers.go:469] Request Headers:
	I0916 11:13:13.514041 1501462 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:13:13.514065 1501462 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:13:13.516773 1501462 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:13:13.516797 1501462 round_trippers.go:577] Response Headers:
	I0916 11:13:13.516805 1501462 round_trippers.go:580]     Audit-Id: 37c96e9b-f35f-4eb9-b4d0-9b9c6ff5997b
	I0916 11:13:13.516809 1501462 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:13:13.516812 1501462 round_trippers.go:580]     Content-Type: application/json
	I0916 11:13:13.516815 1501462 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:13:13.516818 1501462 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:13:13.516821 1501462 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:13:13 GMT
	I0916 11:13:13.517420 1501462 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-654612","uid":"17ee1588-6ece-4a45-8e72-a05ec6d1f806","resourceVersion":"662","creationTimestamp":"2024-09-16T11:10:07Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-654612","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-654612","minikube.k8s.io/primary":"true","minikube.k8s.io/updated_at":"2024_09_16T11_10_11_0700","minikube.k8s.io/version":"v1.34.0","node-role.kubernetes.io/control-plane":"","node.kubernetes.io/exclude-from-external-load-balancers":""},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///var/run/crio/crio.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubelet","operation":"Update","apiVe
rsion":"v1","time":"2024-09-16T11:10:07Z","fieldsType":"FieldsV1","fiel [truncated 6346 chars]
	I0916 11:13:13.517943 1501462 pod_ready.go:103] pod "kube-controller-manager-multinode-654612" in "kube-system" namespace has status "Ready":"False"
	I0916 11:13:14.009529 1501462 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/namespaces/kube-system/pods/kube-controller-manager-multinode-654612
	I0916 11:13:14.009578 1501462 round_trippers.go:469] Request Headers:
	I0916 11:13:14.009588 1501462 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:13:14.009592 1501462 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:13:14.012803 1501462 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 11:13:14.012831 1501462 round_trippers.go:577] Response Headers:
	I0916 11:13:14.012840 1501462 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:13:14.012846 1501462 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:13:14 GMT
	I0916 11:13:14.012870 1501462 round_trippers.go:580]     Audit-Id: 2446ec00-1c54-4bc8-be0b-cba6bfc7092e
	I0916 11:13:14.012885 1501462 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:13:14.012889 1501462 round_trippers.go:580]     Content-Type: application/json
	I0916 11:13:14.012892 1501462 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:13:14.013118 1501462 request.go:1351] Response Body: {"kind":"Pod","apiVersion":"v1","metadata":{"name":"kube-controller-manager-multinode-654612","namespace":"kube-system","uid":"08e87c01-201e-4373-bbd7-0a8a7a724a84","resourceVersion":"659","creationTimestamp":"2024-09-16T11:10:10Z","labels":{"component":"kube-controller-manager","tier":"control-plane"},"annotations":{"kubernetes.io/config.hash":"c7028fbfd05aaf11bd1f32f252589714","kubernetes.io/config.mirror":"c7028fbfd05aaf11bd1f32f252589714","kubernetes.io/config.seen":"2024-09-16T11:10:10.145155408Z","kubernetes.io/config.source":"file"},"ownerReferences":[{"apiVersion":"v1","kind":"Node","name":"multinode-654612","uid":"17ee1588-6ece-4a45-8e72-a05ec6d1f806","controller":true}],"managedFields":[{"manager":"kubelet","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:10:10Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:annotations":{".":{},"f:kubernetes.io/config.hash":{},"f:kubernetes.io/config.mirror":{},"f:kubernetes.i
o/config.seen":{},"f:kubernetes.io/config.source":{}},"f:labels":{".":{ [truncated 9159 chars]
	I0916 11:13:14.013748 1501462 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/nodes/multinode-654612
	I0916 11:13:14.013767 1501462 round_trippers.go:469] Request Headers:
	I0916 11:13:14.013777 1501462 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:13:14.013783 1501462 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:13:14.016178 1501462 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:13:14.016204 1501462 round_trippers.go:577] Response Headers:
	I0916 11:13:14.016212 1501462 round_trippers.go:580]     Audit-Id: 580ff0f8-e069-499a-80d9-d33e19849c69
	I0916 11:13:14.016216 1501462 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:13:14.016218 1501462 round_trippers.go:580]     Content-Type: application/json
	I0916 11:13:14.016221 1501462 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:13:14.016225 1501462 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:13:14.016227 1501462 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:13:14 GMT
	I0916 11:13:14.016660 1501462 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-654612","uid":"17ee1588-6ece-4a45-8e72-a05ec6d1f806","resourceVersion":"662","creationTimestamp":"2024-09-16T11:10:07Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-654612","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-654612","minikube.k8s.io/primary":"true","minikube.k8s.io/updated_at":"2024_09_16T11_10_11_0700","minikube.k8s.io/version":"v1.34.0","node-role.kubernetes.io/control-plane":"","node.kubernetes.io/exclude-from-external-load-balancers":""},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///var/run/crio/crio.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubelet","operation":"Update","apiVe
rsion":"v1","time":"2024-09-16T11:10:07Z","fieldsType":"FieldsV1","fiel [truncated 6346 chars]
	I0916 11:13:14.508702 1501462 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/namespaces/kube-system/pods/kube-controller-manager-multinode-654612
	I0916 11:13:14.508728 1501462 round_trippers.go:469] Request Headers:
	I0916 11:13:14.508739 1501462 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:13:14.508743 1501462 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:13:14.511351 1501462 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:13:14.511435 1501462 round_trippers.go:577] Response Headers:
	I0916 11:13:14.511469 1501462 round_trippers.go:580]     Content-Type: application/json
	I0916 11:13:14.511490 1501462 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:13:14.511524 1501462 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:13:14.511555 1501462 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:13:14 GMT
	I0916 11:13:14.511573 1501462 round_trippers.go:580]     Audit-Id: 3fd78727-4d89-4b54-b47f-6b61f6a8dde1
	I0916 11:13:14.511611 1501462 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:13:14.511858 1501462 request.go:1351] Response Body: {"kind":"Pod","apiVersion":"v1","metadata":{"name":"kube-controller-manager-multinode-654612","namespace":"kube-system","uid":"08e87c01-201e-4373-bbd7-0a8a7a724a84","resourceVersion":"659","creationTimestamp":"2024-09-16T11:10:10Z","labels":{"component":"kube-controller-manager","tier":"control-plane"},"annotations":{"kubernetes.io/config.hash":"c7028fbfd05aaf11bd1f32f252589714","kubernetes.io/config.mirror":"c7028fbfd05aaf11bd1f32f252589714","kubernetes.io/config.seen":"2024-09-16T11:10:10.145155408Z","kubernetes.io/config.source":"file"},"ownerReferences":[{"apiVersion":"v1","kind":"Node","name":"multinode-654612","uid":"17ee1588-6ece-4a45-8e72-a05ec6d1f806","controller":true}],"managedFields":[{"manager":"kubelet","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:10:10Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:annotations":{".":{},"f:kubernetes.io/config.hash":{},"f:kubernetes.io/config.mirror":{},"f:kubernetes.i
o/config.seen":{},"f:kubernetes.io/config.source":{}},"f:labels":{".":{ [truncated 9159 chars]
	I0916 11:13:14.512512 1501462 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/nodes/multinode-654612
	I0916 11:13:14.512539 1501462 round_trippers.go:469] Request Headers:
	I0916 11:13:14.512548 1501462 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:13:14.512553 1501462 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:13:14.517433 1501462 round_trippers.go:574] Response Status: 200 OK in 4 milliseconds
	I0916 11:13:14.517458 1501462 round_trippers.go:577] Response Headers:
	I0916 11:13:14.517477 1501462 round_trippers.go:580]     Content-Type: application/json
	I0916 11:13:14.517482 1501462 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:13:14.517487 1501462 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:13:14.517491 1501462 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:13:14 GMT
	I0916 11:13:14.517495 1501462 round_trippers.go:580]     Audit-Id: fa2e500f-ceed-4e68-a36f-93c776ed3bac
	I0916 11:13:14.517498 1501462 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:13:14.517782 1501462 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-654612","uid":"17ee1588-6ece-4a45-8e72-a05ec6d1f806","resourceVersion":"662","creationTimestamp":"2024-09-16T11:10:07Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-654612","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-654612","minikube.k8s.io/primary":"true","minikube.k8s.io/updated_at":"2024_09_16T11_10_11_0700","minikube.k8s.io/version":"v1.34.0","node-role.kubernetes.io/control-plane":"","node.kubernetes.io/exclude-from-external-load-balancers":""},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///var/run/crio/crio.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubelet","operation":"Update","apiVe
rsion":"v1","time":"2024-09-16T11:10:07Z","fieldsType":"FieldsV1","fiel [truncated 6346 chars]
	I0916 11:13:15.014546 1501462 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/namespaces/kube-system/pods/kube-controller-manager-multinode-654612
	I0916 11:13:15.014574 1501462 round_trippers.go:469] Request Headers:
	I0916 11:13:15.014585 1501462 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:13:15.014590 1501462 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:13:15.028495 1501462 round_trippers.go:574] Response Status: 200 OK in 13 milliseconds
	I0916 11:13:15.028582 1501462 round_trippers.go:577] Response Headers:
	I0916 11:13:15.028616 1501462 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:13:15.028634 1501462 round_trippers.go:580]     Content-Type: application/json
	I0916 11:13:15.028717 1501462 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:13:15.028750 1501462 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:13:15.028768 1501462 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:13:15 GMT
	I0916 11:13:15.028802 1501462 round_trippers.go:580]     Audit-Id: 6fc3e6c9-5e42-461e-aac1-6426811c1333
	I0916 11:13:15.040326 1501462 request.go:1351] Response Body: {"kind":"Pod","apiVersion":"v1","metadata":{"name":"kube-controller-manager-multinode-654612","namespace":"kube-system","uid":"08e87c01-201e-4373-bbd7-0a8a7a724a84","resourceVersion":"659","creationTimestamp":"2024-09-16T11:10:10Z","labels":{"component":"kube-controller-manager","tier":"control-plane"},"annotations":{"kubernetes.io/config.hash":"c7028fbfd05aaf11bd1f32f252589714","kubernetes.io/config.mirror":"c7028fbfd05aaf11bd1f32f252589714","kubernetes.io/config.seen":"2024-09-16T11:10:10.145155408Z","kubernetes.io/config.source":"file"},"ownerReferences":[{"apiVersion":"v1","kind":"Node","name":"multinode-654612","uid":"17ee1588-6ece-4a45-8e72-a05ec6d1f806","controller":true}],"managedFields":[{"manager":"kubelet","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:10:10Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:annotations":{".":{},"f:kubernetes.io/config.hash":{},"f:kubernetes.io/config.mirror":{},"f:kubernetes.i
o/config.seen":{},"f:kubernetes.io/config.source":{}},"f:labels":{".":{ [truncated 9159 chars]
	I0916 11:13:15.041055 1501462 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/nodes/multinode-654612
	I0916 11:13:15.041077 1501462 round_trippers.go:469] Request Headers:
	I0916 11:13:15.041087 1501462 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:13:15.041091 1501462 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:13:15.046407 1501462 round_trippers.go:574] Response Status: 200 OK in 5 milliseconds
	I0916 11:13:15.046553 1501462 round_trippers.go:577] Response Headers:
	I0916 11:13:15.046594 1501462 round_trippers.go:580]     Audit-Id: 55414378-e6d8-48d3-a2b3-3b0a283519e7
	I0916 11:13:15.046627 1501462 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:13:15.046658 1501462 round_trippers.go:580]     Content-Type: application/json
	I0916 11:13:15.046686 1501462 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:13:15.046706 1501462 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:13:15.046735 1501462 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:13:15 GMT
	I0916 11:13:15.046896 1501462 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-654612","uid":"17ee1588-6ece-4a45-8e72-a05ec6d1f806","resourceVersion":"662","creationTimestamp":"2024-09-16T11:10:07Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-654612","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-654612","minikube.k8s.io/primary":"true","minikube.k8s.io/updated_at":"2024_09_16T11_10_11_0700","minikube.k8s.io/version":"v1.34.0","node-role.kubernetes.io/control-plane":"","node.kubernetes.io/exclude-from-external-load-balancers":""},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///var/run/crio/crio.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubelet","operation":"Update","apiVe
rsion":"v1","time":"2024-09-16T11:10:07Z","fieldsType":"FieldsV1","fiel [truncated 6346 chars]
	I0916 11:13:15.509286 1501462 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/namespaces/kube-system/pods/kube-controller-manager-multinode-654612
	I0916 11:13:15.509317 1501462 round_trippers.go:469] Request Headers:
	I0916 11:13:15.509327 1501462 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:13:15.509332 1501462 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:13:15.512027 1501462 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:13:15.512146 1501462 round_trippers.go:577] Response Headers:
	I0916 11:13:15.512171 1501462 round_trippers.go:580]     Audit-Id: a04d43ca-ba6c-4dbb-9d42-042c8c681e93
	I0916 11:13:15.512208 1501462 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:13:15.512233 1501462 round_trippers.go:580]     Content-Type: application/json
	I0916 11:13:15.512250 1501462 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:13:15.512253 1501462 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:13:15.512256 1501462 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:13:15 GMT
	I0916 11:13:15.512500 1501462 request.go:1351] Response Body: {"kind":"Pod","apiVersion":"v1","metadata":{"name":"kube-controller-manager-multinode-654612","namespace":"kube-system","uid":"08e87c01-201e-4373-bbd7-0a8a7a724a84","resourceVersion":"659","creationTimestamp":"2024-09-16T11:10:10Z","labels":{"component":"kube-controller-manager","tier":"control-plane"},"annotations":{"kubernetes.io/config.hash":"c7028fbfd05aaf11bd1f32f252589714","kubernetes.io/config.mirror":"c7028fbfd05aaf11bd1f32f252589714","kubernetes.io/config.seen":"2024-09-16T11:10:10.145155408Z","kubernetes.io/config.source":"file"},"ownerReferences":[{"apiVersion":"v1","kind":"Node","name":"multinode-654612","uid":"17ee1588-6ece-4a45-8e72-a05ec6d1f806","controller":true}],"managedFields":[{"manager":"kubelet","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:10:10Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:annotations":{".":{},"f:kubernetes.io/config.hash":{},"f:kubernetes.io/config.mirror":{},"f:kubernetes.i
o/config.seen":{},"f:kubernetes.io/config.source":{}},"f:labels":{".":{ [truncated 9159 chars]
	I0916 11:13:15.513291 1501462 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/nodes/multinode-654612
	I0916 11:13:15.513311 1501462 round_trippers.go:469] Request Headers:
	I0916 11:13:15.513320 1501462 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:13:15.513325 1501462 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:13:15.515406 1501462 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:13:15.515430 1501462 round_trippers.go:577] Response Headers:
	I0916 11:13:15.515439 1501462 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:13:15.515444 1501462 round_trippers.go:580]     Content-Type: application/json
	I0916 11:13:15.515449 1501462 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:13:15.515451 1501462 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:13:15.515454 1501462 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:13:15 GMT
	I0916 11:13:15.515457 1501462 round_trippers.go:580]     Audit-Id: 85992450-5bb7-4f0e-a95d-f56ef16be4a7
	I0916 11:13:15.515977 1501462 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-654612","uid":"17ee1588-6ece-4a45-8e72-a05ec6d1f806","resourceVersion":"662","creationTimestamp":"2024-09-16T11:10:07Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-654612","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-654612","minikube.k8s.io/primary":"true","minikube.k8s.io/updated_at":"2024_09_16T11_10_11_0700","minikube.k8s.io/version":"v1.34.0","node-role.kubernetes.io/control-plane":"","node.kubernetes.io/exclude-from-external-load-balancers":""},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///var/run/crio/crio.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubelet","operation":"Update","apiVe
rsion":"v1","time":"2024-09-16T11:10:07Z","fieldsType":"FieldsV1","fiel [truncated 6346 chars]
	I0916 11:13:16.008631 1501462 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/namespaces/kube-system/pods/kube-controller-manager-multinode-654612
	I0916 11:13:16.008662 1501462 round_trippers.go:469] Request Headers:
	I0916 11:13:16.008693 1501462 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:13:16.008700 1501462 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:13:16.011529 1501462 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:13:16.011557 1501462 round_trippers.go:577] Response Headers:
	I0916 11:13:16.011566 1501462 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:13:16.011571 1501462 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:13:16 GMT
	I0916 11:13:16.011574 1501462 round_trippers.go:580]     Audit-Id: f53a2f72-9f08-467c-afeb-d9d25d384580
	I0916 11:13:16.011577 1501462 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:13:16.011580 1501462 round_trippers.go:580]     Content-Type: application/json
	I0916 11:13:16.011614 1501462 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:13:16.011787 1501462 request.go:1351] Response Body: {"kind":"Pod","apiVersion":"v1","metadata":{"name":"kube-controller-manager-multinode-654612","namespace":"kube-system","uid":"08e87c01-201e-4373-bbd7-0a8a7a724a84","resourceVersion":"659","creationTimestamp":"2024-09-16T11:10:10Z","labels":{"component":"kube-controller-manager","tier":"control-plane"},"annotations":{"kubernetes.io/config.hash":"c7028fbfd05aaf11bd1f32f252589714","kubernetes.io/config.mirror":"c7028fbfd05aaf11bd1f32f252589714","kubernetes.io/config.seen":"2024-09-16T11:10:10.145155408Z","kubernetes.io/config.source":"file"},"ownerReferences":[{"apiVersion":"v1","kind":"Node","name":"multinode-654612","uid":"17ee1588-6ece-4a45-8e72-a05ec6d1f806","controller":true}],"managedFields":[{"manager":"kubelet","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:10:10Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:annotations":{".":{},"f:kubernetes.io/config.hash":{},"f:kubernetes.io/config.mirror":{},"f:kubernetes.i
o/config.seen":{},"f:kubernetes.io/config.source":{}},"f:labels":{".":{ [truncated 9159 chars]
	I0916 11:13:16.012446 1501462 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/nodes/multinode-654612
	I0916 11:13:16.012465 1501462 round_trippers.go:469] Request Headers:
	I0916 11:13:16.012474 1501462 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:13:16.012488 1501462 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:13:16.014850 1501462 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:13:16.014915 1501462 round_trippers.go:577] Response Headers:
	I0916 11:13:16.014946 1501462 round_trippers.go:580]     Audit-Id: e89d85a8-84f9-4895-8b20-f8c5c57d6343
	I0916 11:13:16.014967 1501462 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:13:16.014999 1501462 round_trippers.go:580]     Content-Type: application/json
	I0916 11:13:16.015021 1501462 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:13:16.015038 1501462 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:13:16.015055 1501462 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:13:16 GMT
	I0916 11:13:16.015200 1501462 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-654612","uid":"17ee1588-6ece-4a45-8e72-a05ec6d1f806","resourceVersion":"662","creationTimestamp":"2024-09-16T11:10:07Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-654612","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-654612","minikube.k8s.io/primary":"true","minikube.k8s.io/updated_at":"2024_09_16T11_10_11_0700","minikube.k8s.io/version":"v1.34.0","node-role.kubernetes.io/control-plane":"","node.kubernetes.io/exclude-from-external-load-balancers":""},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///var/run/crio/crio.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubelet","operation":"Update","apiVe
rsion":"v1","time":"2024-09-16T11:10:07Z","fieldsType":"FieldsV1","fiel [truncated 6346 chars]
	I0916 11:13:16.015679 1501462 pod_ready.go:103] pod "kube-controller-manager-multinode-654612" in "kube-system" namespace has status "Ready":"False"
	I0916 11:13:16.508616 1501462 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/namespaces/kube-system/pods/kube-controller-manager-multinode-654612
	I0916 11:13:16.508644 1501462 round_trippers.go:469] Request Headers:
	I0916 11:13:16.508657 1501462 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:13:16.508661 1501462 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:13:16.511317 1501462 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:13:16.511344 1501462 round_trippers.go:577] Response Headers:
	I0916 11:13:16.511353 1501462 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:13:16 GMT
	I0916 11:13:16.511359 1501462 round_trippers.go:580]     Audit-Id: 581558cf-f726-4476-a1f1-ceb6d9816e6f
	I0916 11:13:16.511391 1501462 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:13:16.511401 1501462 round_trippers.go:580]     Content-Type: application/json
	I0916 11:13:16.511404 1501462 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:13:16.511408 1501462 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:13:16.511869 1501462 request.go:1351] Response Body: {"kind":"Pod","apiVersion":"v1","metadata":{"name":"kube-controller-manager-multinode-654612","namespace":"kube-system","uid":"08e87c01-201e-4373-bbd7-0a8a7a724a84","resourceVersion":"659","creationTimestamp":"2024-09-16T11:10:10Z","labels":{"component":"kube-controller-manager","tier":"control-plane"},"annotations":{"kubernetes.io/config.hash":"c7028fbfd05aaf11bd1f32f252589714","kubernetes.io/config.mirror":"c7028fbfd05aaf11bd1f32f252589714","kubernetes.io/config.seen":"2024-09-16T11:10:10.145155408Z","kubernetes.io/config.source":"file"},"ownerReferences":[{"apiVersion":"v1","kind":"Node","name":"multinode-654612","uid":"17ee1588-6ece-4a45-8e72-a05ec6d1f806","controller":true}],"managedFields":[{"manager":"kubelet","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:10:10Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:annotations":{".":{},"f:kubernetes.io/config.hash":{},"f:kubernetes.io/config.mirror":{},"f:kubernetes.i
o/config.seen":{},"f:kubernetes.io/config.source":{}},"f:labels":{".":{ [truncated 9159 chars]
	I0916 11:13:16.512717 1501462 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/nodes/multinode-654612
	I0916 11:13:16.512740 1501462 round_trippers.go:469] Request Headers:
	I0916 11:13:16.512756 1501462 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:13:16.512761 1501462 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:13:16.518361 1501462 round_trippers.go:574] Response Status: 200 OK in 5 milliseconds
	I0916 11:13:16.518386 1501462 round_trippers.go:577] Response Headers:
	I0916 11:13:16.518395 1501462 round_trippers.go:580]     Audit-Id: 92423d86-989d-4173-a713-04137889fd7b
	I0916 11:13:16.518405 1501462 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:13:16.518410 1501462 round_trippers.go:580]     Content-Type: application/json
	I0916 11:13:16.518414 1501462 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:13:16.518418 1501462 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:13:16.518421 1501462 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:13:16 GMT
	I0916 11:13:16.518740 1501462 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-654612","uid":"17ee1588-6ece-4a45-8e72-a05ec6d1f806","resourceVersion":"662","creationTimestamp":"2024-09-16T11:10:07Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-654612","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-654612","minikube.k8s.io/primary":"true","minikube.k8s.io/updated_at":"2024_09_16T11_10_11_0700","minikube.k8s.io/version":"v1.34.0","node-role.kubernetes.io/control-plane":"","node.kubernetes.io/exclude-from-external-load-balancers":""},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///var/run/crio/crio.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubelet","operation":"Update","apiVe
rsion":"v1","time":"2024-09-16T11:10:07Z","fieldsType":"FieldsV1","fiel [truncated 6346 chars]
	I0916 11:13:17.009155 1501462 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/namespaces/kube-system/pods/kube-controller-manager-multinode-654612
	I0916 11:13:17.009184 1501462 round_trippers.go:469] Request Headers:
	I0916 11:13:17.009195 1501462 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:13:17.009201 1501462 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:13:17.011767 1501462 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:13:17.011793 1501462 round_trippers.go:577] Response Headers:
	I0916 11:13:17.011802 1501462 round_trippers.go:580]     Audit-Id: 5054c6c7-eb67-4ad9-9cd5-2c28c29593ed
	I0916 11:13:17.011805 1501462 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:13:17.011808 1501462 round_trippers.go:580]     Content-Type: application/json
	I0916 11:13:17.011810 1501462 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:13:17.011813 1501462 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:13:17.011828 1501462 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:13:17 GMT
	I0916 11:13:17.012139 1501462 request.go:1351] Response Body: {"kind":"Pod","apiVersion":"v1","metadata":{"name":"kube-controller-manager-multinode-654612","namespace":"kube-system","uid":"08e87c01-201e-4373-bbd7-0a8a7a724a84","resourceVersion":"659","creationTimestamp":"2024-09-16T11:10:10Z","labels":{"component":"kube-controller-manager","tier":"control-plane"},"annotations":{"kubernetes.io/config.hash":"c7028fbfd05aaf11bd1f32f252589714","kubernetes.io/config.mirror":"c7028fbfd05aaf11bd1f32f252589714","kubernetes.io/config.seen":"2024-09-16T11:10:10.145155408Z","kubernetes.io/config.source":"file"},"ownerReferences":[{"apiVersion":"v1","kind":"Node","name":"multinode-654612","uid":"17ee1588-6ece-4a45-8e72-a05ec6d1f806","controller":true}],"managedFields":[{"manager":"kubelet","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:10:10Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:annotations":{".":{},"f:kubernetes.io/config.hash":{},"f:kubernetes.io/config.mirror":{},"f:kubernetes.i
o/config.seen":{},"f:kubernetes.io/config.source":{}},"f:labels":{".":{ [truncated 9159 chars]
	I0916 11:13:17.012780 1501462 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/nodes/multinode-654612
	I0916 11:13:17.012802 1501462 round_trippers.go:469] Request Headers:
	I0916 11:13:17.012811 1501462 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:13:17.012817 1501462 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:13:17.014998 1501462 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:13:17.015020 1501462 round_trippers.go:577] Response Headers:
	I0916 11:13:17.015029 1501462 round_trippers.go:580]     Audit-Id: 5a14ec89-2ff7-4138-b04e-c51f9e4b976b
	I0916 11:13:17.015037 1501462 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:13:17.015045 1501462 round_trippers.go:580]     Content-Type: application/json
	I0916 11:13:17.015049 1501462 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:13:17.015052 1501462 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:13:17.015055 1501462 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:13:17 GMT
	I0916 11:13:17.015443 1501462 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-654612","uid":"17ee1588-6ece-4a45-8e72-a05ec6d1f806","resourceVersion":"662","creationTimestamp":"2024-09-16T11:10:07Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-654612","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-654612","minikube.k8s.io/primary":"true","minikube.k8s.io/updated_at":"2024_09_16T11_10_11_0700","minikube.k8s.io/version":"v1.34.0","node-role.kubernetes.io/control-plane":"","node.kubernetes.io/exclude-from-external-load-balancers":""},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///var/run/crio/crio.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubelet","operation":"Update","apiVe
rsion":"v1","time":"2024-09-16T11:10:07Z","fieldsType":"FieldsV1","fiel [truncated 6346 chars]
	I0916 11:13:17.508569 1501462 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/namespaces/kube-system/pods/kube-controller-manager-multinode-654612
	I0916 11:13:17.508593 1501462 round_trippers.go:469] Request Headers:
	I0916 11:13:17.508603 1501462 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:13:17.508607 1501462 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:13:17.511101 1501462 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:13:17.511165 1501462 round_trippers.go:577] Response Headers:
	I0916 11:13:17.511188 1501462 round_trippers.go:580]     Audit-Id: f3e6b033-ceac-4ebc-be95-b0618340ef6a
	I0916 11:13:17.511208 1501462 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:13:17.511238 1501462 round_trippers.go:580]     Content-Type: application/json
	I0916 11:13:17.511257 1501462 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:13:17.511276 1501462 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:13:17.511293 1501462 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:13:17 GMT
	I0916 11:13:17.511480 1501462 request.go:1351] Response Body: {"kind":"Pod","apiVersion":"v1","metadata":{"name":"kube-controller-manager-multinode-654612","namespace":"kube-system","uid":"08e87c01-201e-4373-bbd7-0a8a7a724a84","resourceVersion":"659","creationTimestamp":"2024-09-16T11:10:10Z","labels":{"component":"kube-controller-manager","tier":"control-plane"},"annotations":{"kubernetes.io/config.hash":"c7028fbfd05aaf11bd1f32f252589714","kubernetes.io/config.mirror":"c7028fbfd05aaf11bd1f32f252589714","kubernetes.io/config.seen":"2024-09-16T11:10:10.145155408Z","kubernetes.io/config.source":"file"},"ownerReferences":[{"apiVersion":"v1","kind":"Node","name":"multinode-654612","uid":"17ee1588-6ece-4a45-8e72-a05ec6d1f806","controller":true}],"managedFields":[{"manager":"kubelet","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:10:10Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:annotations":{".":{},"f:kubernetes.io/config.hash":{},"f:kubernetes.io/config.mirror":{},"f:kubernetes.i
o/config.seen":{},"f:kubernetes.io/config.source":{}},"f:labels":{".":{ [truncated 9159 chars]
	I0916 11:13:17.512077 1501462 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/nodes/multinode-654612
	I0916 11:13:17.512094 1501462 round_trippers.go:469] Request Headers:
	I0916 11:13:17.512103 1501462 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:13:17.512107 1501462 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:13:17.514358 1501462 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:13:17.514414 1501462 round_trippers.go:577] Response Headers:
	I0916 11:13:17.514453 1501462 round_trippers.go:580]     Audit-Id: 80d40ddc-755e-4b87-958b-02342b789a26
	I0916 11:13:17.514476 1501462 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:13:17.514495 1501462 round_trippers.go:580]     Content-Type: application/json
	I0916 11:13:17.514530 1501462 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:13:17.514540 1501462 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:13:17.514551 1501462 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:13:17 GMT
	I0916 11:13:17.514706 1501462 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-654612","uid":"17ee1588-6ece-4a45-8e72-a05ec6d1f806","resourceVersion":"662","creationTimestamp":"2024-09-16T11:10:07Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-654612","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-654612","minikube.k8s.io/primary":"true","minikube.k8s.io/updated_at":"2024_09_16T11_10_11_0700","minikube.k8s.io/version":"v1.34.0","node-role.kubernetes.io/control-plane":"","node.kubernetes.io/exclude-from-external-load-balancers":""},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///var/run/crio/crio.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubelet","operation":"Update","apiVe
rsion":"v1","time":"2024-09-16T11:10:07Z","fieldsType":"FieldsV1","fiel [truncated 6346 chars]
	I0916 11:13:18.009948 1501462 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/namespaces/kube-system/pods/kube-controller-manager-multinode-654612
	I0916 11:13:18.009989 1501462 round_trippers.go:469] Request Headers:
	I0916 11:13:18.009999 1501462 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:13:18.010004 1501462 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:13:18.013212 1501462 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 11:13:18.013240 1501462 round_trippers.go:577] Response Headers:
	I0916 11:13:18.013250 1501462 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:13:18.013257 1501462 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:13:18.013295 1501462 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:13:18 GMT
	I0916 11:13:18.013305 1501462 round_trippers.go:580]     Audit-Id: 37d55bf0-cce7-4176-868c-40262ed7b9f4
	I0916 11:13:18.013309 1501462 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:13:18.013312 1501462 round_trippers.go:580]     Content-Type: application/json
	I0916 11:13:18.013560 1501462 request.go:1351] Response Body: {"kind":"Pod","apiVersion":"v1","metadata":{"name":"kube-controller-manager-multinode-654612","namespace":"kube-system","uid":"08e87c01-201e-4373-bbd7-0a8a7a724a84","resourceVersion":"659","creationTimestamp":"2024-09-16T11:10:10Z","labels":{"component":"kube-controller-manager","tier":"control-plane"},"annotations":{"kubernetes.io/config.hash":"c7028fbfd05aaf11bd1f32f252589714","kubernetes.io/config.mirror":"c7028fbfd05aaf11bd1f32f252589714","kubernetes.io/config.seen":"2024-09-16T11:10:10.145155408Z","kubernetes.io/config.source":"file"},"ownerReferences":[{"apiVersion":"v1","kind":"Node","name":"multinode-654612","uid":"17ee1588-6ece-4a45-8e72-a05ec6d1f806","controller":true}],"managedFields":[{"manager":"kubelet","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:10:10Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:annotations":{".":{},"f:kubernetes.io/config.hash":{},"f:kubernetes.io/config.mirror":{},"f:kubernetes.i
o/config.seen":{},"f:kubernetes.io/config.source":{}},"f:labels":{".":{ [truncated 9159 chars]
	I0916 11:13:18.014219 1501462 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/nodes/multinode-654612
	I0916 11:13:18.014239 1501462 round_trippers.go:469] Request Headers:
	I0916 11:13:18.014248 1501462 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:13:18.014252 1501462 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:13:18.016531 1501462 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:13:18.016604 1501462 round_trippers.go:577] Response Headers:
	I0916 11:13:18.016641 1501462 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:13:18 GMT
	I0916 11:13:18.016665 1501462 round_trippers.go:580]     Audit-Id: 78231e0f-2e6c-4e37-930c-fd900f03ac85
	I0916 11:13:18.016712 1501462 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:13:18.016722 1501462 round_trippers.go:580]     Content-Type: application/json
	I0916 11:13:18.016742 1501462 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:13:18.016749 1501462 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:13:18.016920 1501462 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-654612","uid":"17ee1588-6ece-4a45-8e72-a05ec6d1f806","resourceVersion":"662","creationTimestamp":"2024-09-16T11:10:07Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-654612","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-654612","minikube.k8s.io/primary":"true","minikube.k8s.io/updated_at":"2024_09_16T11_10_11_0700","minikube.k8s.io/version":"v1.34.0","node-role.kubernetes.io/control-plane":"","node.kubernetes.io/exclude-from-external-load-balancers":""},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///var/run/crio/crio.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubelet","operation":"Update","apiVe
rsion":"v1","time":"2024-09-16T11:10:07Z","fieldsType":"FieldsV1","fiel [truncated 6346 chars]
	I0916 11:13:18.017373 1501462 pod_ready.go:103] pod "kube-controller-manager-multinode-654612" in "kube-system" namespace has status "Ready":"False"
	I0916 11:13:18.508896 1501462 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/namespaces/kube-system/pods/kube-controller-manager-multinode-654612
	I0916 11:13:18.508967 1501462 round_trippers.go:469] Request Headers:
	I0916 11:13:18.508983 1501462 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:13:18.508989 1501462 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:13:18.511400 1501462 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:13:18.511425 1501462 round_trippers.go:577] Response Headers:
	I0916 11:13:18.511432 1501462 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:13:18.511435 1501462 round_trippers.go:580]     Content-Type: application/json
	I0916 11:13:18.511438 1501462 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:13:18.511440 1501462 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:13:18.511445 1501462 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:13:18 GMT
	I0916 11:13:18.511447 1501462 round_trippers.go:580]     Audit-Id: fbbcb28f-e7cf-4885-bda1-a1697dd06762
	I0916 11:13:18.511693 1501462 request.go:1351] Response Body: {"kind":"Pod","apiVersion":"v1","metadata":{"name":"kube-controller-manager-multinode-654612","namespace":"kube-system","uid":"08e87c01-201e-4373-bbd7-0a8a7a724a84","resourceVersion":"659","creationTimestamp":"2024-09-16T11:10:10Z","labels":{"component":"kube-controller-manager","tier":"control-plane"},"annotations":{"kubernetes.io/config.hash":"c7028fbfd05aaf11bd1f32f252589714","kubernetes.io/config.mirror":"c7028fbfd05aaf11bd1f32f252589714","kubernetes.io/config.seen":"2024-09-16T11:10:10.145155408Z","kubernetes.io/config.source":"file"},"ownerReferences":[{"apiVersion":"v1","kind":"Node","name":"multinode-654612","uid":"17ee1588-6ece-4a45-8e72-a05ec6d1f806","controller":true}],"managedFields":[{"manager":"kubelet","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:10:10Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:annotations":{".":{},"f:kubernetes.io/config.hash":{},"f:kubernetes.io/config.mirror":{},"f:kubernetes.i
o/config.seen":{},"f:kubernetes.io/config.source":{}},"f:labels":{".":{ [truncated 9159 chars]
	I0916 11:13:18.512297 1501462 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/nodes/multinode-654612
	I0916 11:13:18.512316 1501462 round_trippers.go:469] Request Headers:
	I0916 11:13:18.512325 1501462 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:13:18.512332 1501462 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:13:18.514295 1501462 round_trippers.go:574] Response Status: 200 OK in 1 milliseconds
	I0916 11:13:18.514313 1501462 round_trippers.go:577] Response Headers:
	I0916 11:13:18.514321 1501462 round_trippers.go:580]     Audit-Id: be175dd2-5871-4b93-b5b0-218acbe695fb
	I0916 11:13:18.514325 1501462 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:13:18.514327 1501462 round_trippers.go:580]     Content-Type: application/json
	I0916 11:13:18.514330 1501462 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:13:18.514333 1501462 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:13:18.514336 1501462 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:13:18 GMT
	I0916 11:13:18.514671 1501462 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-654612","uid":"17ee1588-6ece-4a45-8e72-a05ec6d1f806","resourceVersion":"662","creationTimestamp":"2024-09-16T11:10:07Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-654612","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-654612","minikube.k8s.io/primary":"true","minikube.k8s.io/updated_at":"2024_09_16T11_10_11_0700","minikube.k8s.io/version":"v1.34.0","node-role.kubernetes.io/control-plane":"","node.kubernetes.io/exclude-from-external-load-balancers":""},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///var/run/crio/crio.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubelet","operation":"Update","apiVe
rsion":"v1","time":"2024-09-16T11:10:07Z","fieldsType":"FieldsV1","fiel [truncated 6346 chars]
	I0916 11:13:19.008547 1501462 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/namespaces/kube-system/pods/kube-controller-manager-multinode-654612
	I0916 11:13:19.008581 1501462 round_trippers.go:469] Request Headers:
	I0916 11:13:19.008596 1501462 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:13:19.008601 1501462 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:13:19.010980 1501462 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:13:19.011048 1501462 round_trippers.go:577] Response Headers:
	I0916 11:13:19.011072 1501462 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:13:19.011091 1501462 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:13:19 GMT
	I0916 11:13:19.011120 1501462 round_trippers.go:580]     Audit-Id: 28132f1e-5510-4c7d-8e87-bf1b8a1f9a43
	I0916 11:13:19.011140 1501462 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:13:19.011156 1501462 round_trippers.go:580]     Content-Type: application/json
	I0916 11:13:19.011172 1501462 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:13:19.011433 1501462 request.go:1351] Response Body: {"kind":"Pod","apiVersion":"v1","metadata":{"name":"kube-controller-manager-multinode-654612","namespace":"kube-system","uid":"08e87c01-201e-4373-bbd7-0a8a7a724a84","resourceVersion":"659","creationTimestamp":"2024-09-16T11:10:10Z","labels":{"component":"kube-controller-manager","tier":"control-plane"},"annotations":{"kubernetes.io/config.hash":"c7028fbfd05aaf11bd1f32f252589714","kubernetes.io/config.mirror":"c7028fbfd05aaf11bd1f32f252589714","kubernetes.io/config.seen":"2024-09-16T11:10:10.145155408Z","kubernetes.io/config.source":"file"},"ownerReferences":[{"apiVersion":"v1","kind":"Node","name":"multinode-654612","uid":"17ee1588-6ece-4a45-8e72-a05ec6d1f806","controller":true}],"managedFields":[{"manager":"kubelet","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:10:10Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:annotations":{".":{},"f:kubernetes.io/config.hash":{},"f:kubernetes.io/config.mirror":{},"f:kubernetes.i
o/config.seen":{},"f:kubernetes.io/config.source":{}},"f:labels":{".":{ [truncated 9159 chars]
	I0916 11:13:19.012099 1501462 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/nodes/multinode-654612
	I0916 11:13:19.012150 1501462 round_trippers.go:469] Request Headers:
	I0916 11:13:19.012165 1501462 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:13:19.012170 1501462 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:13:19.014282 1501462 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:13:19.014346 1501462 round_trippers.go:577] Response Headers:
	I0916 11:13:19.014368 1501462 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:13:19.014387 1501462 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:13:19.014425 1501462 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:13:19 GMT
	I0916 11:13:19.014446 1501462 round_trippers.go:580]     Audit-Id: 6b5ff937-6fb4-456e-8e5a-420cad83cc04
	I0916 11:13:19.014464 1501462 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:13:19.014482 1501462 round_trippers.go:580]     Content-Type: application/json
	I0916 11:13:19.014639 1501462 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-654612","uid":"17ee1588-6ece-4a45-8e72-a05ec6d1f806","resourceVersion":"662","creationTimestamp":"2024-09-16T11:10:07Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-654612","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-654612","minikube.k8s.io/primary":"true","minikube.k8s.io/updated_at":"2024_09_16T11_10_11_0700","minikube.k8s.io/version":"v1.34.0","node-role.kubernetes.io/control-plane":"","node.kubernetes.io/exclude-from-external-load-balancers":""},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///var/run/crio/crio.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubelet","operation":"Update","apiVe
rsion":"v1","time":"2024-09-16T11:10:07Z","fieldsType":"FieldsV1","fiel [truncated 6346 chars]
	I0916 11:13:19.509146 1501462 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/namespaces/kube-system/pods/kube-controller-manager-multinode-654612
	I0916 11:13:19.509174 1501462 round_trippers.go:469] Request Headers:
	I0916 11:13:19.509184 1501462 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:13:19.509190 1501462 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:13:19.511507 1501462 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:13:19.511565 1501462 round_trippers.go:577] Response Headers:
	I0916 11:13:19.511596 1501462 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:13:19.511615 1501462 round_trippers.go:580]     Content-Type: application/json
	I0916 11:13:19.511647 1501462 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:13:19.511666 1501462 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:13:19.511670 1501462 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:13:19 GMT
	I0916 11:13:19.511673 1501462 round_trippers.go:580]     Audit-Id: 1b1d193e-84b6-42bc-ae7d-96e6fcee7d82
	I0916 11:13:19.511904 1501462 request.go:1351] Response Body: {"kind":"Pod","apiVersion":"v1","metadata":{"name":"kube-controller-manager-multinode-654612","namespace":"kube-system","uid":"08e87c01-201e-4373-bbd7-0a8a7a724a84","resourceVersion":"659","creationTimestamp":"2024-09-16T11:10:10Z","labels":{"component":"kube-controller-manager","tier":"control-plane"},"annotations":{"kubernetes.io/config.hash":"c7028fbfd05aaf11bd1f32f252589714","kubernetes.io/config.mirror":"c7028fbfd05aaf11bd1f32f252589714","kubernetes.io/config.seen":"2024-09-16T11:10:10.145155408Z","kubernetes.io/config.source":"file"},"ownerReferences":[{"apiVersion":"v1","kind":"Node","name":"multinode-654612","uid":"17ee1588-6ece-4a45-8e72-a05ec6d1f806","controller":true}],"managedFields":[{"manager":"kubelet","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:10:10Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:annotations":{".":{},"f:kubernetes.io/config.hash":{},"f:kubernetes.io/config.mirror":{},"f:kubernetes.i
o/config.seen":{},"f:kubernetes.io/config.source":{}},"f:labels":{".":{ [truncated 9159 chars]
	I0916 11:13:19.512510 1501462 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/nodes/multinode-654612
	I0916 11:13:19.512530 1501462 round_trippers.go:469] Request Headers:
	I0916 11:13:19.512541 1501462 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:13:19.512545 1501462 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:13:19.514602 1501462 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:13:19.514626 1501462 round_trippers.go:577] Response Headers:
	I0916 11:13:19.514635 1501462 round_trippers.go:580]     Audit-Id: 7c7057e5-ca1d-4b14-a9ae-9b5f803ac9d2
	I0916 11:13:19.514643 1501462 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:13:19.514647 1501462 round_trippers.go:580]     Content-Type: application/json
	I0916 11:13:19.514651 1501462 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:13:19.514655 1501462 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:13:19.514658 1501462 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:13:19 GMT
	I0916 11:13:19.514893 1501462 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-654612","uid":"17ee1588-6ece-4a45-8e72-a05ec6d1f806","resourceVersion":"662","creationTimestamp":"2024-09-16T11:10:07Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-654612","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-654612","minikube.k8s.io/primary":"true","minikube.k8s.io/updated_at":"2024_09_16T11_10_11_0700","minikube.k8s.io/version":"v1.34.0","node-role.kubernetes.io/control-plane":"","node.kubernetes.io/exclude-from-external-load-balancers":""},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///var/run/crio/crio.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubelet","operation":"Update","apiVe
rsion":"v1","time":"2024-09-16T11:10:07Z","fieldsType":"FieldsV1","fiel [truncated 6346 chars]
	I0916 11:13:20.012176 1501462 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/namespaces/kube-system/pods/kube-controller-manager-multinode-654612
	I0916 11:13:20.012203 1501462 round_trippers.go:469] Request Headers:
	I0916 11:13:20.012214 1501462 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:13:20.012224 1501462 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:13:20.018694 1501462 round_trippers.go:574] Response Status: 200 OK in 6 milliseconds
	I0916 11:13:20.018723 1501462 round_trippers.go:577] Response Headers:
	I0916 11:13:20.018732 1501462 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:13:20.018738 1501462 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:13:20.018742 1501462 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:13:20 GMT
	I0916 11:13:20.018747 1501462 round_trippers.go:580]     Audit-Id: 01dc8658-9108-48a7-bfc3-c4f399110504
	I0916 11:13:20.018752 1501462 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:13:20.018755 1501462 round_trippers.go:580]     Content-Type: application/json
	I0916 11:13:20.018950 1501462 request.go:1351] Response Body: {"kind":"Pod","apiVersion":"v1","metadata":{"name":"kube-controller-manager-multinode-654612","namespace":"kube-system","uid":"08e87c01-201e-4373-bbd7-0a8a7a724a84","resourceVersion":"659","creationTimestamp":"2024-09-16T11:10:10Z","labels":{"component":"kube-controller-manager","tier":"control-plane"},"annotations":{"kubernetes.io/config.hash":"c7028fbfd05aaf11bd1f32f252589714","kubernetes.io/config.mirror":"c7028fbfd05aaf11bd1f32f252589714","kubernetes.io/config.seen":"2024-09-16T11:10:10.145155408Z","kubernetes.io/config.source":"file"},"ownerReferences":[{"apiVersion":"v1","kind":"Node","name":"multinode-654612","uid":"17ee1588-6ece-4a45-8e72-a05ec6d1f806","controller":true}],"managedFields":[{"manager":"kubelet","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:10:10Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:annotations":{".":{},"f:kubernetes.io/config.hash":{},"f:kubernetes.io/config.mirror":{},"f:kubernetes.i
o/config.seen":{},"f:kubernetes.io/config.source":{}},"f:labels":{".":{ [truncated 9159 chars]
	I0916 11:13:20.019635 1501462 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/nodes/multinode-654612
	I0916 11:13:20.019647 1501462 round_trippers.go:469] Request Headers:
	I0916 11:13:20.019658 1501462 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:13:20.019662 1501462 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:13:20.023720 1501462 round_trippers.go:574] Response Status: 200 OK in 4 milliseconds
	I0916 11:13:20.023744 1501462 round_trippers.go:577] Response Headers:
	I0916 11:13:20.023753 1501462 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:13:20.023758 1501462 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:13:20 GMT
	I0916 11:13:20.023769 1501462 round_trippers.go:580]     Audit-Id: d72f752d-d0b9-41db-9b90-e00932ca170a
	I0916 11:13:20.023772 1501462 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:13:20.023775 1501462 round_trippers.go:580]     Content-Type: application/json
	I0916 11:13:20.023778 1501462 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:13:20.023909 1501462 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-654612","uid":"17ee1588-6ece-4a45-8e72-a05ec6d1f806","resourceVersion":"662","creationTimestamp":"2024-09-16T11:10:07Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-654612","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-654612","minikube.k8s.io/primary":"true","minikube.k8s.io/updated_at":"2024_09_16T11_10_11_0700","minikube.k8s.io/version":"v1.34.0","node-role.kubernetes.io/control-plane":"","node.kubernetes.io/exclude-from-external-load-balancers":""},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///var/run/crio/crio.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubelet","operation":"Update","apiVe
rsion":"v1","time":"2024-09-16T11:10:07Z","fieldsType":"FieldsV1","fiel [truncated 6346 chars]
	I0916 11:13:20.024333 1501462 pod_ready.go:103] pod "kube-controller-manager-multinode-654612" in "kube-system" namespace has status "Ready":"False"
	I0916 11:13:20.508554 1501462 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/namespaces/kube-system/pods/kube-controller-manager-multinode-654612
	I0916 11:13:20.508577 1501462 round_trippers.go:469] Request Headers:
	I0916 11:13:20.508587 1501462 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:13:20.508591 1501462 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:13:20.511244 1501462 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:13:20.511278 1501462 round_trippers.go:577] Response Headers:
	I0916 11:13:20.511289 1501462 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:13:20.511295 1501462 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:13:20.511299 1501462 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:13:20 GMT
	I0916 11:13:20.511302 1501462 round_trippers.go:580]     Audit-Id: c6c27eec-760e-4488-afd4-ff1e53a613a0
	I0916 11:13:20.511305 1501462 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:13:20.511310 1501462 round_trippers.go:580]     Content-Type: application/json
	I0916 11:13:20.511515 1501462 request.go:1351] Response Body: {"kind":"Pod","apiVersion":"v1","metadata":{"name":"kube-controller-manager-multinode-654612","namespace":"kube-system","uid":"08e87c01-201e-4373-bbd7-0a8a7a724a84","resourceVersion":"659","creationTimestamp":"2024-09-16T11:10:10Z","labels":{"component":"kube-controller-manager","tier":"control-plane"},"annotations":{"kubernetes.io/config.hash":"c7028fbfd05aaf11bd1f32f252589714","kubernetes.io/config.mirror":"c7028fbfd05aaf11bd1f32f252589714","kubernetes.io/config.seen":"2024-09-16T11:10:10.145155408Z","kubernetes.io/config.source":"file"},"ownerReferences":[{"apiVersion":"v1","kind":"Node","name":"multinode-654612","uid":"17ee1588-6ece-4a45-8e72-a05ec6d1f806","controller":true}],"managedFields":[{"manager":"kubelet","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:10:10Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:annotations":{".":{},"f:kubernetes.io/config.hash":{},"f:kubernetes.io/config.mirror":{},"f:kubernetes.i
o/config.seen":{},"f:kubernetes.io/config.source":{}},"f:labels":{".":{ [truncated 9159 chars]
	I0916 11:13:20.512111 1501462 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/nodes/multinode-654612
	I0916 11:13:20.512130 1501462 round_trippers.go:469] Request Headers:
	I0916 11:13:20.512139 1501462 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:13:20.512143 1501462 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:13:20.514255 1501462 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:13:20.514326 1501462 round_trippers.go:577] Response Headers:
	I0916 11:13:20.514353 1501462 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:13:20.514371 1501462 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:13:20 GMT
	I0916 11:13:20.514405 1501462 round_trippers.go:580]     Audit-Id: ca0d4ede-da77-4834-ad57-022214e560d0
	I0916 11:13:20.514439 1501462 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:13:20.514456 1501462 round_trippers.go:580]     Content-Type: application/json
	I0916 11:13:20.514487 1501462 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:13:20.514627 1501462 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-654612","uid":"17ee1588-6ece-4a45-8e72-a05ec6d1f806","resourceVersion":"662","creationTimestamp":"2024-09-16T11:10:07Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-654612","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-654612","minikube.k8s.io/primary":"true","minikube.k8s.io/updated_at":"2024_09_16T11_10_11_0700","minikube.k8s.io/version":"v1.34.0","node-role.kubernetes.io/control-plane":"","node.kubernetes.io/exclude-from-external-load-balancers":""},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///var/run/crio/crio.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubelet","operation":"Update","apiVe
rsion":"v1","time":"2024-09-16T11:10:07Z","fieldsType":"FieldsV1","fiel [truncated 6346 chars]
	I0916 11:13:21.009150 1501462 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/namespaces/kube-system/pods/kube-controller-manager-multinode-654612
	I0916 11:13:21.009176 1501462 round_trippers.go:469] Request Headers:
	I0916 11:13:21.009187 1501462 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:13:21.009192 1501462 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:13:21.011830 1501462 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:13:21.011946 1501462 round_trippers.go:577] Response Headers:
	I0916 11:13:21.011963 1501462 round_trippers.go:580]     Content-Type: application/json
	I0916 11:13:21.011970 1501462 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:13:21.011974 1501462 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:13:21.011977 1501462 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:13:21 GMT
	I0916 11:13:21.011996 1501462 round_trippers.go:580]     Audit-Id: bf4905b3-2a66-4def-ba3c-50bf44061214
	I0916 11:13:21.012000 1501462 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:13:21.012191 1501462 request.go:1351] Response Body: {"kind":"Pod","apiVersion":"v1","metadata":{"name":"kube-controller-manager-multinode-654612","namespace":"kube-system","uid":"08e87c01-201e-4373-bbd7-0a8a7a724a84","resourceVersion":"659","creationTimestamp":"2024-09-16T11:10:10Z","labels":{"component":"kube-controller-manager","tier":"control-plane"},"annotations":{"kubernetes.io/config.hash":"c7028fbfd05aaf11bd1f32f252589714","kubernetes.io/config.mirror":"c7028fbfd05aaf11bd1f32f252589714","kubernetes.io/config.seen":"2024-09-16T11:10:10.145155408Z","kubernetes.io/config.source":"file"},"ownerReferences":[{"apiVersion":"v1","kind":"Node","name":"multinode-654612","uid":"17ee1588-6ece-4a45-8e72-a05ec6d1f806","controller":true}],"managedFields":[{"manager":"kubelet","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:10:10Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:annotations":{".":{},"f:kubernetes.io/config.hash":{},"f:kubernetes.io/config.mirror":{},"f:kubernetes.i
o/config.seen":{},"f:kubernetes.io/config.source":{}},"f:labels":{".":{ [truncated 9159 chars]
	I0916 11:13:21.012841 1501462 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/nodes/multinode-654612
	I0916 11:13:21.012857 1501462 round_trippers.go:469] Request Headers:
	I0916 11:13:21.012866 1501462 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:13:21.012871 1501462 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:13:21.015117 1501462 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:13:21.015191 1501462 round_trippers.go:577] Response Headers:
	I0916 11:13:21.015214 1501462 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:13:21 GMT
	I0916 11:13:21.015235 1501462 round_trippers.go:580]     Audit-Id: 3f657d32-d27f-4238-a6e2-ccd0131cd9ac
	I0916 11:13:21.015268 1501462 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:13:21.015295 1501462 round_trippers.go:580]     Content-Type: application/json
	I0916 11:13:21.015313 1501462 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:13:21.015325 1501462 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:13:21.015453 1501462 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-654612","uid":"17ee1588-6ece-4a45-8e72-a05ec6d1f806","resourceVersion":"662","creationTimestamp":"2024-09-16T11:10:07Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-654612","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-654612","minikube.k8s.io/primary":"true","minikube.k8s.io/updated_at":"2024_09_16T11_10_11_0700","minikube.k8s.io/version":"v1.34.0","node-role.kubernetes.io/control-plane":"","node.kubernetes.io/exclude-from-external-load-balancers":""},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///var/run/crio/crio.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubelet","operation":"Update","apiVe
rsion":"v1","time":"2024-09-16T11:10:07Z","fieldsType":"FieldsV1","fiel [truncated 6346 chars]
	I0916 11:13:21.508495 1501462 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/namespaces/kube-system/pods/kube-controller-manager-multinode-654612
	I0916 11:13:21.508524 1501462 round_trippers.go:469] Request Headers:
	I0916 11:13:21.508531 1501462 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:13:21.508538 1501462 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:13:21.510905 1501462 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:13:21.510984 1501462 round_trippers.go:577] Response Headers:
	I0916 11:13:21.511030 1501462 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:13:21.511060 1501462 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:13:21.511082 1501462 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:13:21 GMT
	I0916 11:13:21.511102 1501462 round_trippers.go:580]     Audit-Id: 36923697-4bcb-4af5-bcad-c73746ff9264
	I0916 11:13:21.511123 1501462 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:13:21.511141 1501462 round_trippers.go:580]     Content-Type: application/json
	I0916 11:13:21.511301 1501462 request.go:1351] Response Body: {"kind":"Pod","apiVersion":"v1","metadata":{"name":"kube-controller-manager-multinode-654612","namespace":"kube-system","uid":"08e87c01-201e-4373-bbd7-0a8a7a724a84","resourceVersion":"659","creationTimestamp":"2024-09-16T11:10:10Z","labels":{"component":"kube-controller-manager","tier":"control-plane"},"annotations":{"kubernetes.io/config.hash":"c7028fbfd05aaf11bd1f32f252589714","kubernetes.io/config.mirror":"c7028fbfd05aaf11bd1f32f252589714","kubernetes.io/config.seen":"2024-09-16T11:10:10.145155408Z","kubernetes.io/config.source":"file"},"ownerReferences":[{"apiVersion":"v1","kind":"Node","name":"multinode-654612","uid":"17ee1588-6ece-4a45-8e72-a05ec6d1f806","controller":true}],"managedFields":[{"manager":"kubelet","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:10:10Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:annotations":{".":{},"f:kubernetes.io/config.hash":{},"f:kubernetes.io/config.mirror":{},"f:kubernetes.i
o/config.seen":{},"f:kubernetes.io/config.source":{}},"f:labels":{".":{ [truncated 9159 chars]
	I0916 11:13:21.511915 1501462 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/nodes/multinode-654612
	I0916 11:13:21.511930 1501462 round_trippers.go:469] Request Headers:
	I0916 11:13:21.511939 1501462 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:13:21.511944 1501462 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:13:21.514021 1501462 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:13:21.514041 1501462 round_trippers.go:577] Response Headers:
	I0916 11:13:21.514049 1501462 round_trippers.go:580]     Audit-Id: 5305ccc7-b4dc-4d24-9a7f-09fe8adf76db
	I0916 11:13:21.514059 1501462 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:13:21.514063 1501462 round_trippers.go:580]     Content-Type: application/json
	I0916 11:13:21.514066 1501462 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:13:21.514069 1501462 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:13:21.514073 1501462 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:13:21 GMT
	I0916 11:13:21.514322 1501462 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-654612","uid":"17ee1588-6ece-4a45-8e72-a05ec6d1f806","resourceVersion":"662","creationTimestamp":"2024-09-16T11:10:07Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-654612","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-654612","minikube.k8s.io/primary":"true","minikube.k8s.io/updated_at":"2024_09_16T11_10_11_0700","minikube.k8s.io/version":"v1.34.0","node-role.kubernetes.io/control-plane":"","node.kubernetes.io/exclude-from-external-load-balancers":""},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///var/run/crio/crio.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubelet","operation":"Update","apiVe
rsion":"v1","time":"2024-09-16T11:10:07Z","fieldsType":"FieldsV1","fiel [truncated 6346 chars]
	I0916 11:13:22.012177 1501462 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/namespaces/kube-system/pods/kube-controller-manager-multinode-654612
	I0916 11:13:22.012226 1501462 round_trippers.go:469] Request Headers:
	I0916 11:13:22.012240 1501462 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:13:22.012253 1501462 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:13:22.015111 1501462 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:13:22.015191 1501462 round_trippers.go:577] Response Headers:
	I0916 11:13:22.015229 1501462 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:13:22.015245 1501462 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:13:22 GMT
	I0916 11:13:22.015250 1501462 round_trippers.go:580]     Audit-Id: a0b75bc2-1019-4adf-bfa1-373ebd1138de
	I0916 11:13:22.015257 1501462 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:13:22.015261 1501462 round_trippers.go:580]     Content-Type: application/json
	I0916 11:13:22.015265 1501462 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:13:22.015480 1501462 request.go:1351] Response Body: {"kind":"Pod","apiVersion":"v1","metadata":{"name":"kube-controller-manager-multinode-654612","namespace":"kube-system","uid":"08e87c01-201e-4373-bbd7-0a8a7a724a84","resourceVersion":"659","creationTimestamp":"2024-09-16T11:10:10Z","labels":{"component":"kube-controller-manager","tier":"control-plane"},"annotations":{"kubernetes.io/config.hash":"c7028fbfd05aaf11bd1f32f252589714","kubernetes.io/config.mirror":"c7028fbfd05aaf11bd1f32f252589714","kubernetes.io/config.seen":"2024-09-16T11:10:10.145155408Z","kubernetes.io/config.source":"file"},"ownerReferences":[{"apiVersion":"v1","kind":"Node","name":"multinode-654612","uid":"17ee1588-6ece-4a45-8e72-a05ec6d1f806","controller":true}],"managedFields":[{"manager":"kubelet","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:10:10Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:annotations":{".":{},"f:kubernetes.io/config.hash":{},"f:kubernetes.io/config.mirror":{},"f:kubernetes.i
o/config.seen":{},"f:kubernetes.io/config.source":{}},"f:labels":{".":{ [truncated 9159 chars]
	I0916 11:13:22.016293 1501462 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/nodes/multinode-654612
	I0916 11:13:22.016310 1501462 round_trippers.go:469] Request Headers:
	I0916 11:13:22.016321 1501462 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:13:22.016326 1501462 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:13:22.018735 1501462 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:13:22.018762 1501462 round_trippers.go:577] Response Headers:
	I0916 11:13:22.018772 1501462 round_trippers.go:580]     Audit-Id: cec14517-73de-4598-b5f0-a5eb294c87ae
	I0916 11:13:22.018776 1501462 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:13:22.018780 1501462 round_trippers.go:580]     Content-Type: application/json
	I0916 11:13:22.018783 1501462 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:13:22.018786 1501462 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:13:22.018789 1501462 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:13:22 GMT
	I0916 11:13:22.019113 1501462 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-654612","uid":"17ee1588-6ece-4a45-8e72-a05ec6d1f806","resourceVersion":"662","creationTimestamp":"2024-09-16T11:10:07Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-654612","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-654612","minikube.k8s.io/primary":"true","minikube.k8s.io/updated_at":"2024_09_16T11_10_11_0700","minikube.k8s.io/version":"v1.34.0","node-role.kubernetes.io/control-plane":"","node.kubernetes.io/exclude-from-external-load-balancers":""},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///var/run/crio/crio.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubelet","operation":"Update","apiVe
rsion":"v1","time":"2024-09-16T11:10:07Z","fieldsType":"FieldsV1","fiel [truncated 6346 chars]
	I0916 11:13:22.508542 1501462 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/namespaces/kube-system/pods/kube-controller-manager-multinode-654612
	I0916 11:13:22.508566 1501462 round_trippers.go:469] Request Headers:
	I0916 11:13:22.508576 1501462 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:13:22.508581 1501462 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:13:22.510968 1501462 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:13:22.510996 1501462 round_trippers.go:577] Response Headers:
	I0916 11:13:22.511005 1501462 round_trippers.go:580]     Audit-Id: 54c15b7d-5098-4d97-b77a-496afefd24e1
	I0916 11:13:22.511008 1501462 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:13:22.511011 1501462 round_trippers.go:580]     Content-Type: application/json
	I0916 11:13:22.511016 1501462 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:13:22.511019 1501462 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:13:22.511023 1501462 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:13:22 GMT
	I0916 11:13:22.511306 1501462 request.go:1351] Response Body: {"kind":"Pod","apiVersion":"v1","metadata":{"name":"kube-controller-manager-multinode-654612","namespace":"kube-system","uid":"08e87c01-201e-4373-bbd7-0a8a7a724a84","resourceVersion":"659","creationTimestamp":"2024-09-16T11:10:10Z","labels":{"component":"kube-controller-manager","tier":"control-plane"},"annotations":{"kubernetes.io/config.hash":"c7028fbfd05aaf11bd1f32f252589714","kubernetes.io/config.mirror":"c7028fbfd05aaf11bd1f32f252589714","kubernetes.io/config.seen":"2024-09-16T11:10:10.145155408Z","kubernetes.io/config.source":"file"},"ownerReferences":[{"apiVersion":"v1","kind":"Node","name":"multinode-654612","uid":"17ee1588-6ece-4a45-8e72-a05ec6d1f806","controller":true}],"managedFields":[{"manager":"kubelet","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:10:10Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:annotations":{".":{},"f:kubernetes.io/config.hash":{},"f:kubernetes.io/config.mirror":{},"f:kubernetes.i
o/config.seen":{},"f:kubernetes.io/config.source":{}},"f:labels":{".":{ [truncated 9159 chars]
	I0916 11:13:22.511901 1501462 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/nodes/multinode-654612
	I0916 11:13:22.511918 1501462 round_trippers.go:469] Request Headers:
	I0916 11:13:22.511926 1501462 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:13:22.511932 1501462 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:13:22.513958 1501462 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:13:22.513991 1501462 round_trippers.go:577] Response Headers:
	I0916 11:13:22.513999 1501462 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:13:22.514006 1501462 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:13:22.514013 1501462 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:13:22 GMT
	I0916 11:13:22.514016 1501462 round_trippers.go:580]     Audit-Id: 546023d9-653b-4087-a6a6-3fe371cc23c0
	I0916 11:13:22.514019 1501462 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:13:22.514022 1501462 round_trippers.go:580]     Content-Type: application/json
	I0916 11:13:22.514459 1501462 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-654612","uid":"17ee1588-6ece-4a45-8e72-a05ec6d1f806","resourceVersion":"662","creationTimestamp":"2024-09-16T11:10:07Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-654612","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-654612","minikube.k8s.io/primary":"true","minikube.k8s.io/updated_at":"2024_09_16T11_10_11_0700","minikube.k8s.io/version":"v1.34.0","node-role.kubernetes.io/control-plane":"","node.kubernetes.io/exclude-from-external-load-balancers":""},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///var/run/crio/crio.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubelet","operation":"Update","apiVe
rsion":"v1","time":"2024-09-16T11:10:07Z","fieldsType":"FieldsV1","fiel [truncated 6346 chars]
	I0916 11:13:22.514876 1501462 pod_ready.go:103] pod "kube-controller-manager-multinode-654612" in "kube-system" namespace has status "Ready":"False"
	I0916 11:13:23.009254 1501462 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/namespaces/kube-system/pods/kube-controller-manager-multinode-654612
	I0916 11:13:23.009281 1501462 round_trippers.go:469] Request Headers:
	I0916 11:13:23.009290 1501462 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:13:23.009294 1501462 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:13:23.011850 1501462 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:13:23.011874 1501462 round_trippers.go:577] Response Headers:
	I0916 11:13:23.011884 1501462 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:13:23.011891 1501462 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:13:23.011895 1501462 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:13:23 GMT
	I0916 11:13:23.011898 1501462 round_trippers.go:580]     Audit-Id: a373cd0f-f8bb-4c39-9a4b-e0b2e4832446
	I0916 11:13:23.011902 1501462 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:13:23.011905 1501462 round_trippers.go:580]     Content-Type: application/json
	I0916 11:13:23.012253 1501462 request.go:1351] Response Body: {"kind":"Pod","apiVersion":"v1","metadata":{"name":"kube-controller-manager-multinode-654612","namespace":"kube-system","uid":"08e87c01-201e-4373-bbd7-0a8a7a724a84","resourceVersion":"659","creationTimestamp":"2024-09-16T11:10:10Z","labels":{"component":"kube-controller-manager","tier":"control-plane"},"annotations":{"kubernetes.io/config.hash":"c7028fbfd05aaf11bd1f32f252589714","kubernetes.io/config.mirror":"c7028fbfd05aaf11bd1f32f252589714","kubernetes.io/config.seen":"2024-09-16T11:10:10.145155408Z","kubernetes.io/config.source":"file"},"ownerReferences":[{"apiVersion":"v1","kind":"Node","name":"multinode-654612","uid":"17ee1588-6ece-4a45-8e72-a05ec6d1f806","controller":true}],"managedFields":[{"manager":"kubelet","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:10:10Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:annotations":{".":{},"f:kubernetes.io/config.hash":{},"f:kubernetes.io/config.mirror":{},"f:kubernetes.i
o/config.seen":{},"f:kubernetes.io/config.source":{}},"f:labels":{".":{ [truncated 9159 chars]
	I0916 11:13:23.012882 1501462 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/nodes/multinode-654612
	I0916 11:13:23.012901 1501462 round_trippers.go:469] Request Headers:
	I0916 11:13:23.012911 1501462 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:13:23.012917 1501462 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:13:23.015436 1501462 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:13:23.015464 1501462 round_trippers.go:577] Response Headers:
	I0916 11:13:23.015475 1501462 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:13:23.015502 1501462 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:13:23.015505 1501462 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:13:23 GMT
	I0916 11:13:23.015508 1501462 round_trippers.go:580]     Audit-Id: 008d2e6e-0afe-42f2-8613-732b844de318
	I0916 11:13:23.015510 1501462 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:13:23.015514 1501462 round_trippers.go:580]     Content-Type: application/json
	I0916 11:13:23.015927 1501462 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-654612","uid":"17ee1588-6ece-4a45-8e72-a05ec6d1f806","resourceVersion":"662","creationTimestamp":"2024-09-16T11:10:07Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-654612","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-654612","minikube.k8s.io/primary":"true","minikube.k8s.io/updated_at":"2024_09_16T11_10_11_0700","minikube.k8s.io/version":"v1.34.0","node-role.kubernetes.io/control-plane":"","node.kubernetes.io/exclude-from-external-load-balancers":""},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///var/run/crio/crio.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubelet","operation":"Update","apiVe
rsion":"v1","time":"2024-09-16T11:10:07Z","fieldsType":"FieldsV1","fiel [truncated 6346 chars]
	I0916 11:13:23.509071 1501462 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/namespaces/kube-system/pods/kube-controller-manager-multinode-654612
	I0916 11:13:23.509099 1501462 round_trippers.go:469] Request Headers:
	I0916 11:13:23.509106 1501462 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:13:23.509111 1501462 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:13:23.511588 1501462 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:13:23.511616 1501462 round_trippers.go:577] Response Headers:
	I0916 11:13:23.511625 1501462 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:13:23.511631 1501462 round_trippers.go:580]     Content-Type: application/json
	I0916 11:13:23.511635 1501462 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:13:23.511637 1501462 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:13:23.511640 1501462 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:13:23 GMT
	I0916 11:13:23.511643 1501462 round_trippers.go:580]     Audit-Id: 63738ca1-36e6-4b1a-9357-a1e488637b90
	I0916 11:13:23.511920 1501462 request.go:1351] Response Body: {"kind":"Pod","apiVersion":"v1","metadata":{"name":"kube-controller-manager-multinode-654612","namespace":"kube-system","uid":"08e87c01-201e-4373-bbd7-0a8a7a724a84","resourceVersion":"659","creationTimestamp":"2024-09-16T11:10:10Z","labels":{"component":"kube-controller-manager","tier":"control-plane"},"annotations":{"kubernetes.io/config.hash":"c7028fbfd05aaf11bd1f32f252589714","kubernetes.io/config.mirror":"c7028fbfd05aaf11bd1f32f252589714","kubernetes.io/config.seen":"2024-09-16T11:10:10.145155408Z","kubernetes.io/config.source":"file"},"ownerReferences":[{"apiVersion":"v1","kind":"Node","name":"multinode-654612","uid":"17ee1588-6ece-4a45-8e72-a05ec6d1f806","controller":true}],"managedFields":[{"manager":"kubelet","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:10:10Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:annotations":{".":{},"f:kubernetes.io/config.hash":{},"f:kubernetes.io/config.mirror":{},"f:kubernetes.i
o/config.seen":{},"f:kubernetes.io/config.source":{}},"f:labels":{".":{ [truncated 9159 chars]
	I0916 11:13:23.512521 1501462 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/nodes/multinode-654612
	I0916 11:13:23.512537 1501462 round_trippers.go:469] Request Headers:
	I0916 11:13:23.512546 1501462 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:13:23.512550 1501462 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:13:23.514773 1501462 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:13:23.514839 1501462 round_trippers.go:577] Response Headers:
	I0916 11:13:23.514861 1501462 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:13:23 GMT
	I0916 11:13:23.514877 1501462 round_trippers.go:580]     Audit-Id: af04d11a-c9f5-45fb-bb2a-6148321439d4
	I0916 11:13:23.514895 1501462 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:13:23.514929 1501462 round_trippers.go:580]     Content-Type: application/json
	I0916 11:13:23.514946 1501462 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:13:23.514964 1501462 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:13:23.515131 1501462 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-654612","uid":"17ee1588-6ece-4a45-8e72-a05ec6d1f806","resourceVersion":"662","creationTimestamp":"2024-09-16T11:10:07Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-654612","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-654612","minikube.k8s.io/primary":"true","minikube.k8s.io/updated_at":"2024_09_16T11_10_11_0700","minikube.k8s.io/version":"v1.34.0","node-role.kubernetes.io/control-plane":"","node.kubernetes.io/exclude-from-external-load-balancers":""},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///var/run/crio/crio.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubelet","operation":"Update","apiVe
rsion":"v1","time":"2024-09-16T11:10:07Z","fieldsType":"FieldsV1","fiel [truncated 6346 chars]
	I0916 11:13:24.009365 1501462 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/namespaces/kube-system/pods/kube-controller-manager-multinode-654612
	I0916 11:13:24.009393 1501462 round_trippers.go:469] Request Headers:
	I0916 11:13:24.009414 1501462 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:13:24.009420 1501462 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:13:24.012295 1501462 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:13:24.012384 1501462 round_trippers.go:577] Response Headers:
	I0916 11:13:24.012408 1501462 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:13:24 GMT
	I0916 11:13:24.012425 1501462 round_trippers.go:580]     Audit-Id: 863ef9b2-c070-46e3-befb-cfa6bd3a3c5a
	I0916 11:13:24.012445 1501462 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:13:24.012473 1501462 round_trippers.go:580]     Content-Type: application/json
	I0916 11:13:24.012491 1501462 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:13:24.012509 1501462 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:13:24.012725 1501462 request.go:1351] Response Body: {"kind":"Pod","apiVersion":"v1","metadata":{"name":"kube-controller-manager-multinode-654612","namespace":"kube-system","uid":"08e87c01-201e-4373-bbd7-0a8a7a724a84","resourceVersion":"659","creationTimestamp":"2024-09-16T11:10:10Z","labels":{"component":"kube-controller-manager","tier":"control-plane"},"annotations":{"kubernetes.io/config.hash":"c7028fbfd05aaf11bd1f32f252589714","kubernetes.io/config.mirror":"c7028fbfd05aaf11bd1f32f252589714","kubernetes.io/config.seen":"2024-09-16T11:10:10.145155408Z","kubernetes.io/config.source":"file"},"ownerReferences":[{"apiVersion":"v1","kind":"Node","name":"multinode-654612","uid":"17ee1588-6ece-4a45-8e72-a05ec6d1f806","controller":true}],"managedFields":[{"manager":"kubelet","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:10:10Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:annotations":{".":{},"f:kubernetes.io/config.hash":{},"f:kubernetes.io/config.mirror":{},"f:kubernetes.i
o/config.seen":{},"f:kubernetes.io/config.source":{}},"f:labels":{".":{ [truncated 9159 chars]
	I0916 11:13:24.013404 1501462 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/nodes/multinode-654612
	I0916 11:13:24.013423 1501462 round_trippers.go:469] Request Headers:
	I0916 11:13:24.013433 1501462 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:13:24.013437 1501462 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:13:24.015998 1501462 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:13:24.016023 1501462 round_trippers.go:577] Response Headers:
	I0916 11:13:24.016032 1501462 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:13:24.016036 1501462 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:13:24 GMT
	I0916 11:13:24.016040 1501462 round_trippers.go:580]     Audit-Id: 096b8057-f986-48e5-82d9-c345ded55f1f
	I0916 11:13:24.016044 1501462 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:13:24.016048 1501462 round_trippers.go:580]     Content-Type: application/json
	I0916 11:13:24.016051 1501462 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:13:24.016189 1501462 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-654612","uid":"17ee1588-6ece-4a45-8e72-a05ec6d1f806","resourceVersion":"662","creationTimestamp":"2024-09-16T11:10:07Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-654612","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-654612","minikube.k8s.io/primary":"true","minikube.k8s.io/updated_at":"2024_09_16T11_10_11_0700","minikube.k8s.io/version":"v1.34.0","node-role.kubernetes.io/control-plane":"","node.kubernetes.io/exclude-from-external-load-balancers":""},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///var/run/crio/crio.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubelet","operation":"Update","apiVe
rsion":"v1","time":"2024-09-16T11:10:07Z","fieldsType":"FieldsV1","fiel [truncated 6346 chars]
	I0916 11:13:24.509499 1501462 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/namespaces/kube-system/pods/kube-controller-manager-multinode-654612
	I0916 11:13:24.509526 1501462 round_trippers.go:469] Request Headers:
	I0916 11:13:24.509536 1501462 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:13:24.509541 1501462 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:13:24.512060 1501462 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:13:24.512085 1501462 round_trippers.go:577] Response Headers:
	I0916 11:13:24.512094 1501462 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:13:24.512098 1501462 round_trippers.go:580]     Content-Type: application/json
	I0916 11:13:24.512102 1501462 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:13:24.512107 1501462 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:13:24.512110 1501462 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:13:24 GMT
	I0916 11:13:24.512113 1501462 round_trippers.go:580]     Audit-Id: 52dde0dd-de59-4746-ba7e-ffbbd2b67ef3
	I0916 11:13:24.512666 1501462 request.go:1351] Response Body: {"kind":"Pod","apiVersion":"v1","metadata":{"name":"kube-controller-manager-multinode-654612","namespace":"kube-system","uid":"08e87c01-201e-4373-bbd7-0a8a7a724a84","resourceVersion":"761","creationTimestamp":"2024-09-16T11:10:10Z","labels":{"component":"kube-controller-manager","tier":"control-plane"},"annotations":{"kubernetes.io/config.hash":"c7028fbfd05aaf11bd1f32f252589714","kubernetes.io/config.mirror":"c7028fbfd05aaf11bd1f32f252589714","kubernetes.io/config.seen":"2024-09-16T11:10:10.145155408Z","kubernetes.io/config.source":"file"},"ownerReferences":[{"apiVersion":"v1","kind":"Node","name":"multinode-654612","uid":"17ee1588-6ece-4a45-8e72-a05ec6d1f806","controller":true}],"managedFields":[{"manager":"kubelet","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:10:10Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:annotations":{".":{},"f:kubernetes.io/config.hash":{},"f:kubernetes.io/config.mirror":{},"f:kubernetes.i
o/config.seen":{},"f:kubernetes.io/config.source":{}},"f:labels":{".":{ [truncated 8897 chars]
	I0916 11:13:24.513275 1501462 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/nodes/multinode-654612
	I0916 11:13:24.513294 1501462 round_trippers.go:469] Request Headers:
	I0916 11:13:24.513303 1501462 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:13:24.513308 1501462 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:13:24.515344 1501462 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:13:24.515365 1501462 round_trippers.go:577] Response Headers:
	I0916 11:13:24.515374 1501462 round_trippers.go:580]     Content-Type: application/json
	I0916 11:13:24.515378 1501462 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:13:24.515383 1501462 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:13:24.515387 1501462 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:13:24 GMT
	I0916 11:13:24.515390 1501462 round_trippers.go:580]     Audit-Id: 39416e93-557d-48a7-ac48-22adff1ad0fa
	I0916 11:13:24.515393 1501462 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:13:24.515576 1501462 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-654612","uid":"17ee1588-6ece-4a45-8e72-a05ec6d1f806","resourceVersion":"662","creationTimestamp":"2024-09-16T11:10:07Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-654612","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-654612","minikube.k8s.io/primary":"true","minikube.k8s.io/updated_at":"2024_09_16T11_10_11_0700","minikube.k8s.io/version":"v1.34.0","node-role.kubernetes.io/control-plane":"","node.kubernetes.io/exclude-from-external-load-balancers":""},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///var/run/crio/crio.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubelet","operation":"Update","apiVe
rsion":"v1","time":"2024-09-16T11:10:07Z","fieldsType":"FieldsV1","fiel [truncated 6346 chars]
	I0916 11:13:24.516015 1501462 pod_ready.go:93] pod "kube-controller-manager-multinode-654612" in "kube-system" namespace has status "Ready":"True"
	I0916 11:13:24.516035 1501462 pod_ready.go:82] duration metric: took 13.007751452s for pod "kube-controller-manager-multinode-654612" in "kube-system" namespace to be "Ready" ...
	I0916 11:13:24.516047 1501462 pod_ready.go:79] waiting up to 6m0s for pod "kube-proxy-gf2tw" in "kube-system" namespace to be "Ready" ...
	I0916 11:13:24.516125 1501462 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/namespaces/kube-system/pods/kube-proxy-gf2tw
	I0916 11:13:24.516141 1501462 round_trippers.go:469] Request Headers:
	I0916 11:13:24.516149 1501462 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:13:24.516156 1501462 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:13:24.518395 1501462 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:13:24.518477 1501462 round_trippers.go:577] Response Headers:
	I0916 11:13:24.518501 1501462 round_trippers.go:580]     Audit-Id: 4887b7b1-8cef-4081-b41e-70282f896a73
	I0916 11:13:24.518527 1501462 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:13:24.518572 1501462 round_trippers.go:580]     Content-Type: application/json
	I0916 11:13:24.518590 1501462 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:13:24.518608 1501462 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:13:24.518644 1501462 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:13:24 GMT
	I0916 11:13:24.518827 1501462 request.go:1351] Response Body: {"kind":"Pod","apiVersion":"v1","metadata":{"name":"kube-proxy-gf2tw","generateName":"kube-proxy-","namespace":"kube-system","uid":"814e8a89-b190-4aef-a303-44981c9e19c9","resourceVersion":"480","creationTimestamp":"2024-09-16T11:11:13Z","labels":{"controller-revision-hash":"648b489c5b","k8s-app":"kube-proxy","pod-template-generation":"1"},"ownerReferences":[{"apiVersion":"apps/v1","kind":"DaemonSet","name":"kube-proxy","uid":"8b5954ba-7bb1-4f7b-8014-fdfa99b2ac77","controller":true,"blockOwnerDeletion":true}],"managedFields":[{"manager":"kube-controller-manager","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:11:13Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:generateName":{},"f:labels":{".":{},"f:controller-revision-hash":{},"f:k8s-app":{},"f:pod-template-generation":{}},"f:ownerReferences":{".":{},"k:{\"uid\":\"8b5954ba-7bb1-4f7b-8014-fdfa99b2ac77\"}":{}}},"f:spec":{"f:affinity":{".":{},"f:nodeAffinity":{".":{},"f:r
equiredDuringSchedulingIgnoredDuringExecution":{}}},"f:containers":{"k: [truncated 6178 chars]
	I0916 11:13:24.519378 1501462 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/nodes/multinode-654612-m02
	I0916 11:13:24.519401 1501462 round_trippers.go:469] Request Headers:
	I0916 11:13:24.519409 1501462 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:13:24.519415 1501462 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:13:24.521479 1501462 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:13:24.521498 1501462 round_trippers.go:577] Response Headers:
	I0916 11:13:24.521506 1501462 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:13:24.521510 1501462 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:13:24 GMT
	I0916 11:13:24.521513 1501462 round_trippers.go:580]     Audit-Id: c9dbe8f0-a3f5-4a8b-8d03-4f845fa13aa8
	I0916 11:13:24.521516 1501462 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:13:24.521519 1501462 round_trippers.go:580]     Content-Type: application/json
	I0916 11:13:24.521522 1501462 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:13:24.521638 1501462 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-654612-m02","uid":"aa1e2e1b-4957-47bd-bcf9-786ad66f873a","resourceVersion":"549","creationTimestamp":"2024-09-16T11:11:13Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-654612-m02","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-654612","minikube.k8s.io/primary":"false","minikube.k8s.io/updated_at":"2024_09_16T11_11_14_0700","minikube.k8s.io/version":"v1.34.0"},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///var/run/crio/crio.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubeadm","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:11:13Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:
annotations":{"f:kubeadm.alpha.kubernetes.io/cri-socket":{}}}}},{"manag [truncated 6102 chars]
	I0916 11:13:24.522025 1501462 pod_ready.go:93] pod "kube-proxy-gf2tw" in "kube-system" namespace has status "Ready":"True"
	I0916 11:13:24.522037 1501462 pod_ready.go:82] duration metric: took 5.97756ms for pod "kube-proxy-gf2tw" in "kube-system" namespace to be "Ready" ...
	I0916 11:13:24.522047 1501462 pod_ready.go:79] waiting up to 6m0s for pod "kube-proxy-t9pzq" in "kube-system" namespace to be "Ready" ...
	I0916 11:13:24.522108 1501462 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/namespaces/kube-system/pods/kube-proxy-t9pzq
	I0916 11:13:24.522113 1501462 round_trippers.go:469] Request Headers:
	I0916 11:13:24.522120 1501462 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:13:24.522123 1501462 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:13:24.524133 1501462 round_trippers.go:574] Response Status: 200 OK in 1 milliseconds
	I0916 11:13:24.524152 1501462 round_trippers.go:577] Response Headers:
	I0916 11:13:24.524160 1501462 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:13:24.524166 1501462 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:13:24 GMT
	I0916 11:13:24.524172 1501462 round_trippers.go:580]     Audit-Id: cfa8b7a3-dbd2-4539-ad35-6f2ccc096dbd
	I0916 11:13:24.524175 1501462 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:13:24.524179 1501462 round_trippers.go:580]     Content-Type: application/json
	I0916 11:13:24.524182 1501462 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:13:24.524451 1501462 request.go:1351] Response Body: {"kind":"Pod","apiVersion":"v1","metadata":{"name":"kube-proxy-t9pzq","generateName":"kube-proxy-","namespace":"kube-system","uid":"d5dac41c-8386-4ad5-a463-1730169d8062","resourceVersion":"681","creationTimestamp":"2024-09-16T11:10:14Z","labels":{"controller-revision-hash":"648b489c5b","k8s-app":"kube-proxy","pod-template-generation":"1"},"ownerReferences":[{"apiVersion":"apps/v1","kind":"DaemonSet","name":"kube-proxy","uid":"8b5954ba-7bb1-4f7b-8014-fdfa99b2ac77","controller":true,"blockOwnerDeletion":true}],"managedFields":[{"manager":"kube-controller-manager","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:10:14Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:generateName":{},"f:labels":{".":{},"f:controller-revision-hash":{},"f:k8s-app":{},"f:pod-template-generation":{}},"f:ownerReferences":{".":{},"k:{\"uid\":\"8b5954ba-7bb1-4f7b-8014-fdfa99b2ac77\"}":{}}},"f:spec":{"f:affinity":{".":{},"f:nodeAffinity":{".":{},"f:r
equiredDuringSchedulingIgnoredDuringExecution":{}}},"f:containers":{"k: [truncated 6170 chars]
	I0916 11:13:24.524992 1501462 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/nodes/multinode-654612
	I0916 11:13:24.525010 1501462 round_trippers.go:469] Request Headers:
	I0916 11:13:24.525019 1501462 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:13:24.525023 1501462 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:13:24.527014 1501462 round_trippers.go:574] Response Status: 200 OK in 1 milliseconds
	I0916 11:13:24.527033 1501462 round_trippers.go:577] Response Headers:
	I0916 11:13:24.527041 1501462 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:13:24.527044 1501462 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:13:24.527047 1501462 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:13:24 GMT
	I0916 11:13:24.527050 1501462 round_trippers.go:580]     Audit-Id: cfcfbfa2-acd2-4eb1-8e4c-a3a6270278ac
	I0916 11:13:24.527053 1501462 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:13:24.527055 1501462 round_trippers.go:580]     Content-Type: application/json
	I0916 11:13:24.527285 1501462 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-654612","uid":"17ee1588-6ece-4a45-8e72-a05ec6d1f806","resourceVersion":"662","creationTimestamp":"2024-09-16T11:10:07Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-654612","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-654612","minikube.k8s.io/primary":"true","minikube.k8s.io/updated_at":"2024_09_16T11_10_11_0700","minikube.k8s.io/version":"v1.34.0","node-role.kubernetes.io/control-plane":"","node.kubernetes.io/exclude-from-external-load-balancers":""},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///var/run/crio/crio.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubelet","operation":"Update","apiVe
rsion":"v1","time":"2024-09-16T11:10:07Z","fieldsType":"FieldsV1","fiel [truncated 6346 chars]
	I0916 11:13:24.527709 1501462 pod_ready.go:93] pod "kube-proxy-t9pzq" in "kube-system" namespace has status "Ready":"True"
	I0916 11:13:24.527728 1501462 pod_ready.go:82] duration metric: took 5.675045ms for pod "kube-proxy-t9pzq" in "kube-system" namespace to be "Ready" ...
	I0916 11:13:24.527751 1501462 pod_ready.go:79] waiting up to 6m0s for pod "kube-proxy-vf648" in "kube-system" namespace to be "Ready" ...
	I0916 11:13:24.527845 1501462 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/namespaces/kube-system/pods/kube-proxy-vf648
	I0916 11:13:24.527853 1501462 round_trippers.go:469] Request Headers:
	I0916 11:13:24.527861 1501462 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:13:24.527866 1501462 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:13:24.529964 1501462 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:13:24.529984 1501462 round_trippers.go:577] Response Headers:
	I0916 11:13:24.529990 1501462 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:13:24 GMT
	I0916 11:13:24.529993 1501462 round_trippers.go:580]     Audit-Id: 6bf19194-48c1-4e87-bb8f-f19899b0a76a
	I0916 11:13:24.529999 1501462 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:13:24.530002 1501462 round_trippers.go:580]     Content-Type: application/json
	I0916 11:13:24.530006 1501462 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:13:24.530009 1501462 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:13:24.530362 1501462 request.go:1351] Response Body: {"kind":"Pod","apiVersion":"v1","metadata":{"name":"kube-proxy-vf648","generateName":"kube-proxy-","namespace":"kube-system","uid":"376afe3e-390b-443b-b289-7dfeeb1deed1","resourceVersion":"642","creationTimestamp":"2024-09-16T11:11:48Z","labels":{"controller-revision-hash":"648b489c5b","k8s-app":"kube-proxy","pod-template-generation":"1"},"ownerReferences":[{"apiVersion":"apps/v1","kind":"DaemonSet","name":"kube-proxy","uid":"8b5954ba-7bb1-4f7b-8014-fdfa99b2ac77","controller":true,"blockOwnerDeletion":true}],"managedFields":[{"manager":"kube-controller-manager","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:11:48Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:generateName":{},"f:labels":{".":{},"f:controller-revision-hash":{},"f:k8s-app":{},"f:pod-template-generation":{}},"f:ownerReferences":{".":{},"k:{\"uid\":\"8b5954ba-7bb1-4f7b-8014-fdfa99b2ac77\"}":{}}},"f:spec":{"f:affinity":{".":{},"f:nodeAffinity":{".":{},"f:r
equiredDuringSchedulingIgnoredDuringExecution":{}}},"f:containers":{"k: [truncated 6427 chars]
	I0916 11:13:24.530987 1501462 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/nodes/multinode-654612-m03
	I0916 11:13:24.531008 1501462 round_trippers.go:469] Request Headers:
	I0916 11:13:24.531018 1501462 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:13:24.531021 1501462 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:13:24.533233 1501462 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:13:24.533265 1501462 round_trippers.go:577] Response Headers:
	I0916 11:13:24.533274 1501462 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:13:24.533279 1501462 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:13:24 GMT
	I0916 11:13:24.533283 1501462 round_trippers.go:580]     Audit-Id: 4e37aad5-9443-4ad0-85e2-7d0b4dcaa13c
	I0916 11:13:24.533288 1501462 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:13:24.533291 1501462 round_trippers.go:580]     Content-Type: application/json
	I0916 11:13:24.533294 1501462 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:13:24.533626 1501462 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-654612-m03","uid":"0a3ae4f6-3e42-45c5-9af6-3ab99d0c1b81","resourceVersion":"616","creationTimestamp":"2024-09-16T11:11:48Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-654612-m03","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-654612","minikube.k8s.io/primary":"false","minikube.k8s.io/updated_at":"2024_09_16T11_11_49_0700","minikube.k8s.io/version":"v1.34.0"},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///var/run/crio/crio.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubeadm","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:11:48Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:
annotations":{"f:kubeadm.alpha.kubernetes.io/cri-socket":{}}}}},{"manag [truncated 5817 chars]
	I0916 11:13:25.027981 1501462 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/namespaces/kube-system/pods/kube-proxy-vf648
	I0916 11:13:25.028010 1501462 round_trippers.go:469] Request Headers:
	I0916 11:13:25.028020 1501462 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:13:25.028026 1501462 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:13:25.030619 1501462 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:13:25.030645 1501462 round_trippers.go:577] Response Headers:
	I0916 11:13:25.030653 1501462 round_trippers.go:580]     Audit-Id: 327c7acb-45ea-4a6a-abb5-3c7e733bf359
	I0916 11:13:25.030659 1501462 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:13:25.030663 1501462 round_trippers.go:580]     Content-Type: application/json
	I0916 11:13:25.030666 1501462 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:13:25.030669 1501462 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:13:25.030673 1501462 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:13:25 GMT
	I0916 11:13:25.031152 1501462 request.go:1351] Response Body: {"kind":"Pod","apiVersion":"v1","metadata":{"name":"kube-proxy-vf648","generateName":"kube-proxy-","namespace":"kube-system","uid":"376afe3e-390b-443b-b289-7dfeeb1deed1","resourceVersion":"642","creationTimestamp":"2024-09-16T11:11:48Z","labels":{"controller-revision-hash":"648b489c5b","k8s-app":"kube-proxy","pod-template-generation":"1"},"ownerReferences":[{"apiVersion":"apps/v1","kind":"DaemonSet","name":"kube-proxy","uid":"8b5954ba-7bb1-4f7b-8014-fdfa99b2ac77","controller":true,"blockOwnerDeletion":true}],"managedFields":[{"manager":"kube-controller-manager","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:11:48Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:generateName":{},"f:labels":{".":{},"f:controller-revision-hash":{},"f:k8s-app":{},"f:pod-template-generation":{}},"f:ownerReferences":{".":{},"k:{\"uid\":\"8b5954ba-7bb1-4f7b-8014-fdfa99b2ac77\"}":{}}},"f:spec":{"f:affinity":{".":{},"f:nodeAffinity":{".":{},"f:r
equiredDuringSchedulingIgnoredDuringExecution":{}}},"f:containers":{"k: [truncated 6427 chars]
	I0916 11:13:25.031764 1501462 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/nodes/multinode-654612-m03
	I0916 11:13:25.031783 1501462 round_trippers.go:469] Request Headers:
	I0916 11:13:25.031793 1501462 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:13:25.031799 1501462 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:13:25.034439 1501462 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:13:25.034522 1501462 round_trippers.go:577] Response Headers:
	I0916 11:13:25.034546 1501462 round_trippers.go:580]     Audit-Id: d11fa895-ec12-424d-9297-3a1ffa2130c0
	I0916 11:13:25.034592 1501462 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:13:25.034618 1501462 round_trippers.go:580]     Content-Type: application/json
	I0916 11:13:25.034638 1501462 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:13:25.034648 1501462 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:13:25.034651 1501462 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:13:25 GMT
	I0916 11:13:25.034834 1501462 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-654612-m03","uid":"0a3ae4f6-3e42-45c5-9af6-3ab99d0c1b81","resourceVersion":"616","creationTimestamp":"2024-09-16T11:11:48Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-654612-m03","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-654612","minikube.k8s.io/primary":"false","minikube.k8s.io/updated_at":"2024_09_16T11_11_49_0700","minikube.k8s.io/version":"v1.34.0"},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///var/run/crio/crio.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubeadm","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:11:48Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:
annotations":{"f:kubeadm.alpha.kubernetes.io/cri-socket":{}}}}},{"manag [truncated 5817 chars]
	I0916 11:13:25.528105 1501462 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/namespaces/kube-system/pods/kube-proxy-vf648
	I0916 11:13:25.528130 1501462 round_trippers.go:469] Request Headers:
	I0916 11:13:25.528140 1501462 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:13:25.528144 1501462 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:13:25.530570 1501462 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:13:25.530608 1501462 round_trippers.go:577] Response Headers:
	I0916 11:13:25.530619 1501462 round_trippers.go:580]     Audit-Id: aa0a5a5d-c053-4068-ae2f-084f649880f0
	I0916 11:13:25.530623 1501462 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:13:25.530627 1501462 round_trippers.go:580]     Content-Type: application/json
	I0916 11:13:25.530631 1501462 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:13:25.530635 1501462 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:13:25.530638 1501462 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:13:25 GMT
	I0916 11:13:25.530825 1501462 request.go:1351] Response Body: {"kind":"Pod","apiVersion":"v1","metadata":{"name":"kube-proxy-vf648","generateName":"kube-proxy-","namespace":"kube-system","uid":"376afe3e-390b-443b-b289-7dfeeb1deed1","resourceVersion":"642","creationTimestamp":"2024-09-16T11:11:48Z","labels":{"controller-revision-hash":"648b489c5b","k8s-app":"kube-proxy","pod-template-generation":"1"},"ownerReferences":[{"apiVersion":"apps/v1","kind":"DaemonSet","name":"kube-proxy","uid":"8b5954ba-7bb1-4f7b-8014-fdfa99b2ac77","controller":true,"blockOwnerDeletion":true}],"managedFields":[{"manager":"kube-controller-manager","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:11:48Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:generateName":{},"f:labels":{".":{},"f:controller-revision-hash":{},"f:k8s-app":{},"f:pod-template-generation":{}},"f:ownerReferences":{".":{},"k:{\"uid\":\"8b5954ba-7bb1-4f7b-8014-fdfa99b2ac77\"}":{}}},"f:spec":{"f:affinity":{".":{},"f:nodeAffinity":{".":{},"f:r
equiredDuringSchedulingIgnoredDuringExecution":{}}},"f:containers":{"k: [truncated 6427 chars]
	I0916 11:13:25.531375 1501462 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/nodes/multinode-654612-m03
	I0916 11:13:25.531392 1501462 round_trippers.go:469] Request Headers:
	I0916 11:13:25.531402 1501462 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:13:25.531409 1501462 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:13:25.533767 1501462 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:13:25.533854 1501462 round_trippers.go:577] Response Headers:
	I0916 11:13:25.533906 1501462 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:13:25.533951 1501462 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:13:25.533962 1501462 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:13:25 GMT
	I0916 11:13:25.533966 1501462 round_trippers.go:580]     Audit-Id: 5be25b27-41ab-4930-896b-813d90194d6c
	I0916 11:13:25.533969 1501462 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:13:25.533972 1501462 round_trippers.go:580]     Content-Type: application/json
	I0916 11:13:25.534122 1501462 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-654612-m03","uid":"0a3ae4f6-3e42-45c5-9af6-3ab99d0c1b81","resourceVersion":"616","creationTimestamp":"2024-09-16T11:11:48Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-654612-m03","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-654612","minikube.k8s.io/primary":"false","minikube.k8s.io/updated_at":"2024_09_16T11_11_49_0700","minikube.k8s.io/version":"v1.34.0"},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///var/run/crio/crio.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubeadm","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:11:48Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:
annotations":{"f:kubeadm.alpha.kubernetes.io/cri-socket":{}}}}},{"manag [truncated 5817 chars]
	I0916 11:13:26.028699 1501462 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/namespaces/kube-system/pods/kube-proxy-vf648
	I0916 11:13:26.028728 1501462 round_trippers.go:469] Request Headers:
	I0916 11:13:26.028738 1501462 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:13:26.028742 1501462 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:13:26.031193 1501462 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:13:26.031214 1501462 round_trippers.go:577] Response Headers:
	I0916 11:13:26.031222 1501462 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:13:26.031229 1501462 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:13:26 GMT
	I0916 11:13:26.031232 1501462 round_trippers.go:580]     Audit-Id: 84d733d2-056f-4296-83c7-2b75fbf45b26
	I0916 11:13:26.031235 1501462 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:13:26.031238 1501462 round_trippers.go:580]     Content-Type: application/json
	I0916 11:13:26.031241 1501462 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:13:26.031387 1501462 request.go:1351] Response Body: {"kind":"Pod","apiVersion":"v1","metadata":{"name":"kube-proxy-vf648","generateName":"kube-proxy-","namespace":"kube-system","uid":"376afe3e-390b-443b-b289-7dfeeb1deed1","resourceVersion":"642","creationTimestamp":"2024-09-16T11:11:48Z","labels":{"controller-revision-hash":"648b489c5b","k8s-app":"kube-proxy","pod-template-generation":"1"},"ownerReferences":[{"apiVersion":"apps/v1","kind":"DaemonSet","name":"kube-proxy","uid":"8b5954ba-7bb1-4f7b-8014-fdfa99b2ac77","controller":true,"blockOwnerDeletion":true}],"managedFields":[{"manager":"kube-controller-manager","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:11:48Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:generateName":{},"f:labels":{".":{},"f:controller-revision-hash":{},"f:k8s-app":{},"f:pod-template-generation":{}},"f:ownerReferences":{".":{},"k:{\"uid\":\"8b5954ba-7bb1-4f7b-8014-fdfa99b2ac77\"}":{}}},"f:spec":{"f:affinity":{".":{},"f:nodeAffinity":{".":{},"f:r
equiredDuringSchedulingIgnoredDuringExecution":{}}},"f:containers":{"k: [truncated 6427 chars]
	I0916 11:13:26.031937 1501462 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/nodes/multinode-654612-m03
	I0916 11:13:26.031956 1501462 round_trippers.go:469] Request Headers:
	I0916 11:13:26.031965 1501462 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:13:26.031970 1501462 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:13:26.034018 1501462 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:13:26.034039 1501462 round_trippers.go:577] Response Headers:
	I0916 11:13:26.034048 1501462 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:13:26.034052 1501462 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:13:26 GMT
	I0916 11:13:26.034056 1501462 round_trippers.go:580]     Audit-Id: 8ab37595-a932-46cb-ac63-96dc1586bd58
	I0916 11:13:26.034066 1501462 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:13:26.034069 1501462 round_trippers.go:580]     Content-Type: application/json
	I0916 11:13:26.034072 1501462 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:13:26.034349 1501462 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-654612-m03","uid":"0a3ae4f6-3e42-45c5-9af6-3ab99d0c1b81","resourceVersion":"616","creationTimestamp":"2024-09-16T11:11:48Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-654612-m03","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-654612","minikube.k8s.io/primary":"false","minikube.k8s.io/updated_at":"2024_09_16T11_11_49_0700","minikube.k8s.io/version":"v1.34.0"},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///var/run/crio/crio.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubeadm","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:11:48Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:
annotations":{"f:kubeadm.alpha.kubernetes.io/cri-socket":{}}}}},{"manag [truncated 5817 chars]
	I0916 11:13:26.528007 1501462 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/namespaces/kube-system/pods/kube-proxy-vf648
	I0916 11:13:26.528043 1501462 round_trippers.go:469] Request Headers:
	I0916 11:13:26.528054 1501462 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:13:26.528058 1501462 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:13:26.530829 1501462 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:13:26.530856 1501462 round_trippers.go:577] Response Headers:
	I0916 11:13:26.530867 1501462 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:13:26.530871 1501462 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:13:26.530907 1501462 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:13:26 GMT
	I0916 11:13:26.530919 1501462 round_trippers.go:580]     Audit-Id: 289947ec-1807-496b-b793-84e0960291be
	I0916 11:13:26.530922 1501462 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:13:26.530924 1501462 round_trippers.go:580]     Content-Type: application/json
	I0916 11:13:26.531128 1501462 request.go:1351] Response Body: {"kind":"Pod","apiVersion":"v1","metadata":{"name":"kube-proxy-vf648","generateName":"kube-proxy-","namespace":"kube-system","uid":"376afe3e-390b-443b-b289-7dfeeb1deed1","resourceVersion":"642","creationTimestamp":"2024-09-16T11:11:48Z","labels":{"controller-revision-hash":"648b489c5b","k8s-app":"kube-proxy","pod-template-generation":"1"},"ownerReferences":[{"apiVersion":"apps/v1","kind":"DaemonSet","name":"kube-proxy","uid":"8b5954ba-7bb1-4f7b-8014-fdfa99b2ac77","controller":true,"blockOwnerDeletion":true}],"managedFields":[{"manager":"kube-controller-manager","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:11:48Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:generateName":{},"f:labels":{".":{},"f:controller-revision-hash":{},"f:k8s-app":{},"f:pod-template-generation":{}},"f:ownerReferences":{".":{},"k:{\"uid\":\"8b5954ba-7bb1-4f7b-8014-fdfa99b2ac77\"}":{}}},"f:spec":{"f:affinity":{".":{},"f:nodeAffinity":{".":{},"f:r
equiredDuringSchedulingIgnoredDuringExecution":{}}},"f:containers":{"k: [truncated 6427 chars]
	I0916 11:13:26.531701 1501462 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/nodes/multinode-654612-m03
	I0916 11:13:26.531718 1501462 round_trippers.go:469] Request Headers:
	I0916 11:13:26.531727 1501462 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:13:26.531732 1501462 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:13:26.533912 1501462 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:13:26.533977 1501462 round_trippers.go:577] Response Headers:
	I0916 11:13:26.534000 1501462 round_trippers.go:580]     Content-Type: application/json
	I0916 11:13:26.534017 1501462 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:13:26.534053 1501462 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:13:26.534076 1501462 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:13:26 GMT
	I0916 11:13:26.534092 1501462 round_trippers.go:580]     Audit-Id: 5121c950-e499-433f-9904-f2ac783fae21
	I0916 11:13:26.534110 1501462 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:13:26.534231 1501462 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-654612-m03","uid":"0a3ae4f6-3e42-45c5-9af6-3ab99d0c1b81","resourceVersion":"616","creationTimestamp":"2024-09-16T11:11:48Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-654612-m03","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-654612","minikube.k8s.io/primary":"false","minikube.k8s.io/updated_at":"2024_09_16T11_11_49_0700","minikube.k8s.io/version":"v1.34.0"},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///var/run/crio/crio.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubeadm","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:11:48Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:
annotations":{"f:kubeadm.alpha.kubernetes.io/cri-socket":{}}}}},{"manag [truncated 5817 chars]
	I0916 11:13:26.534674 1501462 pod_ready.go:103] pod "kube-proxy-vf648" in "kube-system" namespace has status "Ready":"False"
	I0916 11:13:27.028831 1501462 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/namespaces/kube-system/pods/kube-proxy-vf648
	I0916 11:13:27.028861 1501462 round_trippers.go:469] Request Headers:
	I0916 11:13:27.028871 1501462 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:13:27.028875 1501462 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:13:27.031403 1501462 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:13:27.031424 1501462 round_trippers.go:577] Response Headers:
	I0916 11:13:27.031433 1501462 round_trippers.go:580]     Audit-Id: 02e6f3d0-788c-4960-918d-9c7a05c0f247
	I0916 11:13:27.031437 1501462 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:13:27.031440 1501462 round_trippers.go:580]     Content-Type: application/json
	I0916 11:13:27.031443 1501462 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:13:27.031446 1501462 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:13:27.031448 1501462 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:13:27 GMT
	I0916 11:13:27.031572 1501462 request.go:1351] Response Body: {"kind":"Pod","apiVersion":"v1","metadata":{"name":"kube-proxy-vf648","generateName":"kube-proxy-","namespace":"kube-system","uid":"376afe3e-390b-443b-b289-7dfeeb1deed1","resourceVersion":"642","creationTimestamp":"2024-09-16T11:11:48Z","labels":{"controller-revision-hash":"648b489c5b","k8s-app":"kube-proxy","pod-template-generation":"1"},"ownerReferences":[{"apiVersion":"apps/v1","kind":"DaemonSet","name":"kube-proxy","uid":"8b5954ba-7bb1-4f7b-8014-fdfa99b2ac77","controller":true,"blockOwnerDeletion":true}],"managedFields":[{"manager":"kube-controller-manager","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:11:48Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:generateName":{},"f:labels":{".":{},"f:controller-revision-hash":{},"f:k8s-app":{},"f:pod-template-generation":{}},"f:ownerReferences":{".":{},"k:{\"uid\":\"8b5954ba-7bb1-4f7b-8014-fdfa99b2ac77\"}":{}}},"f:spec":{"f:affinity":{".":{},"f:nodeAffinity":{".":{},"f:r
equiredDuringSchedulingIgnoredDuringExecution":{}}},"f:containers":{"k: [truncated 6427 chars]
	I0916 11:13:27.032116 1501462 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/nodes/multinode-654612-m03
	I0916 11:13:27.032129 1501462 round_trippers.go:469] Request Headers:
	I0916 11:13:27.032138 1501462 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:13:27.032142 1501462 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:13:27.034453 1501462 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:13:27.034481 1501462 round_trippers.go:577] Response Headers:
	I0916 11:13:27.034497 1501462 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:13:27.034527 1501462 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:13:27 GMT
	I0916 11:13:27.034537 1501462 round_trippers.go:580]     Audit-Id: 44045823-fc9c-4706-9145-bba7c5485a1c
	I0916 11:13:27.034546 1501462 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:13:27.034558 1501462 round_trippers.go:580]     Content-Type: application/json
	I0916 11:13:27.034561 1501462 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:13:27.034702 1501462 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-654612-m03","uid":"0a3ae4f6-3e42-45c5-9af6-3ab99d0c1b81","resourceVersion":"616","creationTimestamp":"2024-09-16T11:11:48Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-654612-m03","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-654612","minikube.k8s.io/primary":"false","minikube.k8s.io/updated_at":"2024_09_16T11_11_49_0700","minikube.k8s.io/version":"v1.34.0"},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///var/run/crio/crio.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubeadm","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:11:48Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:
annotations":{"f:kubeadm.alpha.kubernetes.io/cri-socket":{}}}}},{"manag [truncated 5817 chars]
	I0916 11:13:27.528004 1501462 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/namespaces/kube-system/pods/kube-proxy-vf648
	I0916 11:13:27.528034 1501462 round_trippers.go:469] Request Headers:
	I0916 11:13:27.528044 1501462 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:13:27.528048 1501462 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:13:27.530894 1501462 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:13:27.530924 1501462 round_trippers.go:577] Response Headers:
	I0916 11:13:27.530933 1501462 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:13:27.530939 1501462 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:13:27 GMT
	I0916 11:13:27.530943 1501462 round_trippers.go:580]     Audit-Id: b067c9d3-90b4-44a7-8fa4-f8f9ad58e678
	I0916 11:13:27.530946 1501462 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:13:27.530950 1501462 round_trippers.go:580]     Content-Type: application/json
	I0916 11:13:27.530953 1501462 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:13:27.531562 1501462 request.go:1351] Response Body: {"kind":"Pod","apiVersion":"v1","metadata":{"name":"kube-proxy-vf648","generateName":"kube-proxy-","namespace":"kube-system","uid":"376afe3e-390b-443b-b289-7dfeeb1deed1","resourceVersion":"642","creationTimestamp":"2024-09-16T11:11:48Z","labels":{"controller-revision-hash":"648b489c5b","k8s-app":"kube-proxy","pod-template-generation":"1"},"ownerReferences":[{"apiVersion":"apps/v1","kind":"DaemonSet","name":"kube-proxy","uid":"8b5954ba-7bb1-4f7b-8014-fdfa99b2ac77","controller":true,"blockOwnerDeletion":true}],"managedFields":[{"manager":"kube-controller-manager","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:11:48Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:generateName":{},"f:labels":{".":{},"f:controller-revision-hash":{},"f:k8s-app":{},"f:pod-template-generation":{}},"f:ownerReferences":{".":{},"k:{\"uid\":\"8b5954ba-7bb1-4f7b-8014-fdfa99b2ac77\"}":{}}},"f:spec":{"f:affinity":{".":{},"f:nodeAffinity":{".":{},"f:r
equiredDuringSchedulingIgnoredDuringExecution":{}}},"f:containers":{"k: [truncated 6427 chars]
	I0916 11:13:27.532198 1501462 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/nodes/multinode-654612-m03
	I0916 11:13:27.532224 1501462 round_trippers.go:469] Request Headers:
	I0916 11:13:27.532234 1501462 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:13:27.532238 1501462 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:13:27.534893 1501462 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:13:27.534913 1501462 round_trippers.go:577] Response Headers:
	I0916 11:13:27.534921 1501462 round_trippers.go:580]     Audit-Id: 302656b2-3b0d-4cda-a6f2-c515d23aad42
	I0916 11:13:27.534926 1501462 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:13:27.534929 1501462 round_trippers.go:580]     Content-Type: application/json
	I0916 11:13:27.534932 1501462 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:13:27.534935 1501462 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:13:27.534937 1501462 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:13:27 GMT
	I0916 11:13:27.535037 1501462 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-654612-m03","uid":"0a3ae4f6-3e42-45c5-9af6-3ab99d0c1b81","resourceVersion":"616","creationTimestamp":"2024-09-16T11:11:48Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-654612-m03","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-654612","minikube.k8s.io/primary":"false","minikube.k8s.io/updated_at":"2024_09_16T11_11_49_0700","minikube.k8s.io/version":"v1.34.0"},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///var/run/crio/crio.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubeadm","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:11:48Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:
annotations":{"f:kubeadm.alpha.kubernetes.io/cri-socket":{}}}}},{"manag [truncated 5817 chars]
	I0916 11:13:28.028072 1501462 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/namespaces/kube-system/pods/kube-proxy-vf648
	I0916 11:13:28.028102 1501462 round_trippers.go:469] Request Headers:
	I0916 11:13:28.028112 1501462 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:13:28.028116 1501462 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:13:28.030699 1501462 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:13:28.030734 1501462 round_trippers.go:577] Response Headers:
	I0916 11:13:28.030748 1501462 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:13:28.030754 1501462 round_trippers.go:580]     Content-Type: application/json
	I0916 11:13:28.030758 1501462 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:13:28.030762 1501462 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:13:28.030765 1501462 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:13:28 GMT
	I0916 11:13:28.030768 1501462 round_trippers.go:580]     Audit-Id: abe18486-fd31-435c-85bd-0287ea5691ed
	I0916 11:13:28.030956 1501462 request.go:1351] Response Body: {"kind":"Pod","apiVersion":"v1","metadata":{"name":"kube-proxy-vf648","generateName":"kube-proxy-","namespace":"kube-system","uid":"376afe3e-390b-443b-b289-7dfeeb1deed1","resourceVersion":"642","creationTimestamp":"2024-09-16T11:11:48Z","labels":{"controller-revision-hash":"648b489c5b","k8s-app":"kube-proxy","pod-template-generation":"1"},"ownerReferences":[{"apiVersion":"apps/v1","kind":"DaemonSet","name":"kube-proxy","uid":"8b5954ba-7bb1-4f7b-8014-fdfa99b2ac77","controller":true,"blockOwnerDeletion":true}],"managedFields":[{"manager":"kube-controller-manager","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:11:48Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:generateName":{},"f:labels":{".":{},"f:controller-revision-hash":{},"f:k8s-app":{},"f:pod-template-generation":{}},"f:ownerReferences":{".":{},"k:{\"uid\":\"8b5954ba-7bb1-4f7b-8014-fdfa99b2ac77\"}":{}}},"f:spec":{"f:affinity":{".":{},"f:nodeAffinity":{".":{},"f:r
equiredDuringSchedulingIgnoredDuringExecution":{}}},"f:containers":{"k: [truncated 6427 chars]
	I0916 11:13:28.031499 1501462 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/nodes/multinode-654612-m03
	I0916 11:13:28.031518 1501462 round_trippers.go:469] Request Headers:
	I0916 11:13:28.031527 1501462 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:13:28.031532 1501462 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:13:28.033919 1501462 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:13:28.033943 1501462 round_trippers.go:577] Response Headers:
	I0916 11:13:28.033951 1501462 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:13:28.033956 1501462 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:13:28 GMT
	I0916 11:13:28.033960 1501462 round_trippers.go:580]     Audit-Id: 4d39f25d-6104-4668-a517-57d763bc99a9
	I0916 11:13:28.033964 1501462 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:13:28.033967 1501462 round_trippers.go:580]     Content-Type: application/json
	I0916 11:13:28.033970 1501462 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:13:28.034149 1501462 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-654612-m03","uid":"0a3ae4f6-3e42-45c5-9af6-3ab99d0c1b81","resourceVersion":"616","creationTimestamp":"2024-09-16T11:11:48Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-654612-m03","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-654612","minikube.k8s.io/primary":"false","minikube.k8s.io/updated_at":"2024_09_16T11_11_49_0700","minikube.k8s.io/version":"v1.34.0"},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///var/run/crio/crio.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubeadm","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:11:48Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:
annotations":{"f:kubeadm.alpha.kubernetes.io/cri-socket":{}}}}},{"manag [truncated 5817 chars]
	I0916 11:13:28.529007 1501462 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/namespaces/kube-system/pods/kube-proxy-vf648
	I0916 11:13:28.529036 1501462 round_trippers.go:469] Request Headers:
	I0916 11:13:28.529046 1501462 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:13:28.529054 1501462 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:13:28.531370 1501462 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:13:28.531398 1501462 round_trippers.go:577] Response Headers:
	I0916 11:13:28.531407 1501462 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:13:28 GMT
	I0916 11:13:28.531411 1501462 round_trippers.go:580]     Audit-Id: b25c05f2-6325-4b97-aa4a-8bfae1c6e11c
	I0916 11:13:28.531414 1501462 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:13:28.531417 1501462 round_trippers.go:580]     Content-Type: application/json
	I0916 11:13:28.531420 1501462 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:13:28.531423 1501462 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:13:28.531596 1501462 request.go:1351] Response Body: {"kind":"Pod","apiVersion":"v1","metadata":{"name":"kube-proxy-vf648","generateName":"kube-proxy-","namespace":"kube-system","uid":"376afe3e-390b-443b-b289-7dfeeb1deed1","resourceVersion":"642","creationTimestamp":"2024-09-16T11:11:48Z","labels":{"controller-revision-hash":"648b489c5b","k8s-app":"kube-proxy","pod-template-generation":"1"},"ownerReferences":[{"apiVersion":"apps/v1","kind":"DaemonSet","name":"kube-proxy","uid":"8b5954ba-7bb1-4f7b-8014-fdfa99b2ac77","controller":true,"blockOwnerDeletion":true}],"managedFields":[{"manager":"kube-controller-manager","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:11:48Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:generateName":{},"f:labels":{".":{},"f:controller-revision-hash":{},"f:k8s-app":{},"f:pod-template-generation":{}},"f:ownerReferences":{".":{},"k:{\"uid\":\"8b5954ba-7bb1-4f7b-8014-fdfa99b2ac77\"}":{}}},"f:spec":{"f:affinity":{".":{},"f:nodeAffinity":{".":{},"f:r
equiredDuringSchedulingIgnoredDuringExecution":{}}},"f:containers":{"k: [truncated 6427 chars]
	I0916 11:13:28.532161 1501462 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/nodes/multinode-654612-m03
	I0916 11:13:28.532178 1501462 round_trippers.go:469] Request Headers:
	I0916 11:13:28.532187 1501462 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:13:28.532191 1501462 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:13:28.534378 1501462 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:13:28.534404 1501462 round_trippers.go:577] Response Headers:
	I0916 11:13:28.534414 1501462 round_trippers.go:580]     Audit-Id: bfb78372-30cc-4dd9-a0b3-9b39995ff224
	I0916 11:13:28.534419 1501462 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:13:28.534426 1501462 round_trippers.go:580]     Content-Type: application/json
	I0916 11:13:28.534429 1501462 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:13:28.534432 1501462 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:13:28.534435 1501462 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:13:28 GMT
	I0916 11:13:28.534531 1501462 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-654612-m03","uid":"0a3ae4f6-3e42-45c5-9af6-3ab99d0c1b81","resourceVersion":"616","creationTimestamp":"2024-09-16T11:11:48Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-654612-m03","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-654612","minikube.k8s.io/primary":"false","minikube.k8s.io/updated_at":"2024_09_16T11_11_49_0700","minikube.k8s.io/version":"v1.34.0"},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///var/run/crio/crio.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubeadm","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:11:48Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:
annotations":{"f:kubeadm.alpha.kubernetes.io/cri-socket":{}}}}},{"manag [truncated 5817 chars]
	I0916 11:13:28.534925 1501462 pod_ready.go:103] pod "kube-proxy-vf648" in "kube-system" namespace has status "Ready":"False"
	I0916 11:13:29.028727 1501462 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/namespaces/kube-system/pods/kube-proxy-vf648
	I0916 11:13:29.028754 1501462 round_trippers.go:469] Request Headers:
	I0916 11:13:29.028763 1501462 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:13:29.028769 1501462 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:13:29.031133 1501462 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:13:29.031157 1501462 round_trippers.go:577] Response Headers:
	I0916 11:13:29.031166 1501462 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:13:29.031171 1501462 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:13:29 GMT
	I0916 11:13:29.031174 1501462 round_trippers.go:580]     Audit-Id: 2deb090d-fc07-43fc-b146-0d7f517b55b7
	I0916 11:13:29.031178 1501462 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:13:29.031182 1501462 round_trippers.go:580]     Content-Type: application/json
	I0916 11:13:29.031185 1501462 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:13:29.031310 1501462 request.go:1351] Response Body: {"kind":"Pod","apiVersion":"v1","metadata":{"name":"kube-proxy-vf648","generateName":"kube-proxy-","namespace":"kube-system","uid":"376afe3e-390b-443b-b289-7dfeeb1deed1","resourceVersion":"642","creationTimestamp":"2024-09-16T11:11:48Z","labels":{"controller-revision-hash":"648b489c5b","k8s-app":"kube-proxy","pod-template-generation":"1"},"ownerReferences":[{"apiVersion":"apps/v1","kind":"DaemonSet","name":"kube-proxy","uid":"8b5954ba-7bb1-4f7b-8014-fdfa99b2ac77","controller":true,"blockOwnerDeletion":true}],"managedFields":[{"manager":"kube-controller-manager","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:11:48Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:generateName":{},"f:labels":{".":{},"f:controller-revision-hash":{},"f:k8s-app":{},"f:pod-template-generation":{}},"f:ownerReferences":{".":{},"k:{\"uid\":\"8b5954ba-7bb1-4f7b-8014-fdfa99b2ac77\"}":{}}},"f:spec":{"f:affinity":{".":{},"f:nodeAffinity":{".":{},"f:r
equiredDuringSchedulingIgnoredDuringExecution":{}}},"f:containers":{"k: [truncated 6427 chars]
	I0916 11:13:29.031862 1501462 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/nodes/multinode-654612-m03
	I0916 11:13:29.031878 1501462 round_trippers.go:469] Request Headers:
	I0916 11:13:29.031886 1501462 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:13:29.031891 1501462 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:13:29.034058 1501462 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:13:29.034124 1501462 round_trippers.go:577] Response Headers:
	I0916 11:13:29.034147 1501462 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:13:29.034166 1501462 round_trippers.go:580]     Content-Type: application/json
	I0916 11:13:29.034200 1501462 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:13:29.034223 1501462 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:13:29.034242 1501462 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:13:29 GMT
	I0916 11:13:29.034246 1501462 round_trippers.go:580]     Audit-Id: 8453fd38-a685-4363-8298-a9ce6d7119db
	I0916 11:13:29.034373 1501462 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-654612-m03","uid":"0a3ae4f6-3e42-45c5-9af6-3ab99d0c1b81","resourceVersion":"616","creationTimestamp":"2024-09-16T11:11:48Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-654612-m03","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-654612","minikube.k8s.io/primary":"false","minikube.k8s.io/updated_at":"2024_09_16T11_11_49_0700","minikube.k8s.io/version":"v1.34.0"},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///var/run/crio/crio.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubeadm","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:11:48Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:
annotations":{"f:kubeadm.alpha.kubernetes.io/cri-socket":{}}}}},{"manag [truncated 5817 chars]
	I0916 11:13:29.528916 1501462 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/namespaces/kube-system/pods/kube-proxy-vf648
	I0916 11:13:29.528941 1501462 round_trippers.go:469] Request Headers:
	I0916 11:13:29.528951 1501462 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:13:29.528956 1501462 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:13:29.531399 1501462 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:13:29.531423 1501462 round_trippers.go:577] Response Headers:
	I0916 11:13:29.531431 1501462 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:13:29 GMT
	I0916 11:13:29.531436 1501462 round_trippers.go:580]     Audit-Id: df83b79e-e743-42a5-b363-14fe3648d94e
	I0916 11:13:29.531438 1501462 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:13:29.531442 1501462 round_trippers.go:580]     Content-Type: application/json
	I0916 11:13:29.531445 1501462 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:13:29.531448 1501462 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:13:29.531637 1501462 request.go:1351] Response Body: {"kind":"Pod","apiVersion":"v1","metadata":{"name":"kube-proxy-vf648","generateName":"kube-proxy-","namespace":"kube-system","uid":"376afe3e-390b-443b-b289-7dfeeb1deed1","resourceVersion":"642","creationTimestamp":"2024-09-16T11:11:48Z","labels":{"controller-revision-hash":"648b489c5b","k8s-app":"kube-proxy","pod-template-generation":"1"},"ownerReferences":[{"apiVersion":"apps/v1","kind":"DaemonSet","name":"kube-proxy","uid":"8b5954ba-7bb1-4f7b-8014-fdfa99b2ac77","controller":true,"blockOwnerDeletion":true}],"managedFields":[{"manager":"kube-controller-manager","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:11:48Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:generateName":{},"f:labels":{".":{},"f:controller-revision-hash":{},"f:k8s-app":{},"f:pod-template-generation":{}},"f:ownerReferences":{".":{},"k:{\"uid\":\"8b5954ba-7bb1-4f7b-8014-fdfa99b2ac77\"}":{}}},"f:spec":{"f:affinity":{".":{},"f:nodeAffinity":{".":{},"f:r
equiredDuringSchedulingIgnoredDuringExecution":{}}},"f:containers":{"k: [truncated 6427 chars]
	I0916 11:13:29.532223 1501462 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/nodes/multinode-654612-m03
	I0916 11:13:29.532244 1501462 round_trippers.go:469] Request Headers:
	I0916 11:13:29.532254 1501462 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:13:29.532259 1501462 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:13:29.534446 1501462 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:13:29.534472 1501462 round_trippers.go:577] Response Headers:
	I0916 11:13:29.534482 1501462 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:13:29.534495 1501462 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:13:29 GMT
	I0916 11:13:29.534504 1501462 round_trippers.go:580]     Audit-Id: 05738abf-4055-46ea-b043-1decebd0680d
	I0916 11:13:29.534510 1501462 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:13:29.534514 1501462 round_trippers.go:580]     Content-Type: application/json
	I0916 11:13:29.534517 1501462 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:13:29.534936 1501462 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-654612-m03","uid":"0a3ae4f6-3e42-45c5-9af6-3ab99d0c1b81","resourceVersion":"616","creationTimestamp":"2024-09-16T11:11:48Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-654612-m03","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-654612","minikube.k8s.io/primary":"false","minikube.k8s.io/updated_at":"2024_09_16T11_11_49_0700","minikube.k8s.io/version":"v1.34.0"},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///var/run/crio/crio.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubeadm","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:11:48Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:
annotations":{"f:kubeadm.alpha.kubernetes.io/cri-socket":{}}}}},{"manag [truncated 5817 chars]
	I0916 11:13:30.028116 1501462 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/namespaces/kube-system/pods/kube-proxy-vf648
	I0916 11:13:30.028149 1501462 round_trippers.go:469] Request Headers:
	I0916 11:13:30.028170 1501462 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:13:30.028176 1501462 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:13:30.031192 1501462 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:13:30.031265 1501462 round_trippers.go:577] Response Headers:
	I0916 11:13:30.031289 1501462 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:13:30.031309 1501462 round_trippers.go:580]     Content-Type: application/json
	I0916 11:13:30.031337 1501462 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:13:30.031356 1501462 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:13:30.031375 1501462 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:13:30 GMT
	I0916 11:13:30.031393 1501462 round_trippers.go:580]     Audit-Id: 23b1b4dc-e73c-4a6a-a33f-b0bbb97e0e49
	I0916 11:13:30.031623 1501462 request.go:1351] Response Body: {"kind":"Pod","apiVersion":"v1","metadata":{"name":"kube-proxy-vf648","generateName":"kube-proxy-","namespace":"kube-system","uid":"376afe3e-390b-443b-b289-7dfeeb1deed1","resourceVersion":"642","creationTimestamp":"2024-09-16T11:11:48Z","labels":{"controller-revision-hash":"648b489c5b","k8s-app":"kube-proxy","pod-template-generation":"1"},"ownerReferences":[{"apiVersion":"apps/v1","kind":"DaemonSet","name":"kube-proxy","uid":"8b5954ba-7bb1-4f7b-8014-fdfa99b2ac77","controller":true,"blockOwnerDeletion":true}],"managedFields":[{"manager":"kube-controller-manager","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:11:48Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:generateName":{},"f:labels":{".":{},"f:controller-revision-hash":{},"f:k8s-app":{},"f:pod-template-generation":{}},"f:ownerReferences":{".":{},"k:{\"uid\":\"8b5954ba-7bb1-4f7b-8014-fdfa99b2ac77\"}":{}}},"f:spec":{"f:affinity":{".":{},"f:nodeAffinity":{".":{},"f:r
equiredDuringSchedulingIgnoredDuringExecution":{}}},"f:containers":{"k: [truncated 6427 chars]
	I0916 11:13:30.032279 1501462 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/nodes/multinode-654612-m03
	I0916 11:13:30.032306 1501462 round_trippers.go:469] Request Headers:
	I0916 11:13:30.032317 1501462 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:13:30.032321 1501462 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:13:30.035068 1501462 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:13:30.035101 1501462 round_trippers.go:577] Response Headers:
	I0916 11:13:30.035111 1501462 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:13:30.035116 1501462 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:13:30 GMT
	I0916 11:13:30.035119 1501462 round_trippers.go:580]     Audit-Id: b2c3b9b0-6b8b-404e-ac59-10849b6a286c
	I0916 11:13:30.035124 1501462 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:13:30.035127 1501462 round_trippers.go:580]     Content-Type: application/json
	I0916 11:13:30.035130 1501462 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:13:30.035230 1501462 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-654612-m03","uid":"0a3ae4f6-3e42-45c5-9af6-3ab99d0c1b81","resourceVersion":"616","creationTimestamp":"2024-09-16T11:11:48Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-654612-m03","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-654612","minikube.k8s.io/primary":"false","minikube.k8s.io/updated_at":"2024_09_16T11_11_49_0700","minikube.k8s.io/version":"v1.34.0"},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///var/run/crio/crio.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubeadm","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:11:48Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:
annotations":{"f:kubeadm.alpha.kubernetes.io/cri-socket":{}}}}},{"manag [truncated 5817 chars]
	I0916 11:13:30.528036 1501462 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/namespaces/kube-system/pods/kube-proxy-vf648
	I0916 11:13:30.528062 1501462 round_trippers.go:469] Request Headers:
	I0916 11:13:30.528071 1501462 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:13:30.528088 1501462 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:13:30.530550 1501462 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:13:30.530579 1501462 round_trippers.go:577] Response Headers:
	I0916 11:13:30.530588 1501462 round_trippers.go:580]     Audit-Id: fc173d55-ac6e-4433-8c0e-d778a1eaf1fd
	I0916 11:13:30.530592 1501462 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:13:30.530596 1501462 round_trippers.go:580]     Content-Type: application/json
	I0916 11:13:30.530600 1501462 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:13:30.530603 1501462 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:13:30.530606 1501462 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:13:30 GMT
	I0916 11:13:30.530733 1501462 request.go:1351] Response Body: {"kind":"Pod","apiVersion":"v1","metadata":{"name":"kube-proxy-vf648","generateName":"kube-proxy-","namespace":"kube-system","uid":"376afe3e-390b-443b-b289-7dfeeb1deed1","resourceVersion":"642","creationTimestamp":"2024-09-16T11:11:48Z","labels":{"controller-revision-hash":"648b489c5b","k8s-app":"kube-proxy","pod-template-generation":"1"},"ownerReferences":[{"apiVersion":"apps/v1","kind":"DaemonSet","name":"kube-proxy","uid":"8b5954ba-7bb1-4f7b-8014-fdfa99b2ac77","controller":true,"blockOwnerDeletion":true}],"managedFields":[{"manager":"kube-controller-manager","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:11:48Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:generateName":{},"f:labels":{".":{},"f:controller-revision-hash":{},"f:k8s-app":{},"f:pod-template-generation":{}},"f:ownerReferences":{".":{},"k:{\"uid\":\"8b5954ba-7bb1-4f7b-8014-fdfa99b2ac77\"}":{}}},"f:spec":{"f:affinity":{".":{},"f:nodeAffinity":{".":{},"f:r
equiredDuringSchedulingIgnoredDuringExecution":{}}},"f:containers":{"k: [truncated 6427 chars]
	I0916 11:13:30.531278 1501462 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/nodes/multinode-654612-m03
	I0916 11:13:30.531294 1501462 round_trippers.go:469] Request Headers:
	I0916 11:13:30.531303 1501462 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:13:30.531307 1501462 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:13:30.533379 1501462 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:13:30.533414 1501462 round_trippers.go:577] Response Headers:
	I0916 11:13:30.533424 1501462 round_trippers.go:580]     Audit-Id: e0cd98cd-5121-407f-b540-30dfbf84cb86
	I0916 11:13:30.533428 1501462 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:13:30.533431 1501462 round_trippers.go:580]     Content-Type: application/json
	I0916 11:13:30.533434 1501462 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:13:30.533437 1501462 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:13:30.533440 1501462 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:13:30 GMT
	I0916 11:13:30.533655 1501462 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-654612-m03","uid":"0a3ae4f6-3e42-45c5-9af6-3ab99d0c1b81","resourceVersion":"616","creationTimestamp":"2024-09-16T11:11:48Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-654612-m03","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-654612","minikube.k8s.io/primary":"false","minikube.k8s.io/updated_at":"2024_09_16T11_11_49_0700","minikube.k8s.io/version":"v1.34.0"},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///var/run/crio/crio.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubeadm","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:11:48Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:
annotations":{"f:kubeadm.alpha.kubernetes.io/cri-socket":{}}}}},{"manag [truncated 5817 chars]
	I0916 11:13:31.028121 1501462 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/namespaces/kube-system/pods/kube-proxy-vf648
	I0916 11:13:31.028147 1501462 round_trippers.go:469] Request Headers:
	I0916 11:13:31.028157 1501462 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:13:31.028160 1501462 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:13:31.030566 1501462 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:13:31.030677 1501462 round_trippers.go:577] Response Headers:
	I0916 11:13:31.030699 1501462 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:13:31.030711 1501462 round_trippers.go:580]     Content-Type: application/json
	I0916 11:13:31.030718 1501462 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:13:31.030721 1501462 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:13:31.030726 1501462 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:13:31 GMT
	I0916 11:13:31.030729 1501462 round_trippers.go:580]     Audit-Id: 3ff56357-73c9-4f3d-8ab5-b2d5b9c19e46
	I0916 11:13:31.030868 1501462 request.go:1351] Response Body: {"kind":"Pod","apiVersion":"v1","metadata":{"name":"kube-proxy-vf648","generateName":"kube-proxy-","namespace":"kube-system","uid":"376afe3e-390b-443b-b289-7dfeeb1deed1","resourceVersion":"642","creationTimestamp":"2024-09-16T11:11:48Z","labels":{"controller-revision-hash":"648b489c5b","k8s-app":"kube-proxy","pod-template-generation":"1"},"ownerReferences":[{"apiVersion":"apps/v1","kind":"DaemonSet","name":"kube-proxy","uid":"8b5954ba-7bb1-4f7b-8014-fdfa99b2ac77","controller":true,"blockOwnerDeletion":true}],"managedFields":[{"manager":"kube-controller-manager","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:11:48Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:generateName":{},"f:labels":{".":{},"f:controller-revision-hash":{},"f:k8s-app":{},"f:pod-template-generation":{}},"f:ownerReferences":{".":{},"k:{\"uid\":\"8b5954ba-7bb1-4f7b-8014-fdfa99b2ac77\"}":{}}},"f:spec":{"f:affinity":{".":{},"f:nodeAffinity":{".":{},"f:r
equiredDuringSchedulingIgnoredDuringExecution":{}}},"f:containers":{"k: [truncated 6427 chars]
	I0916 11:13:31.031432 1501462 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/nodes/multinode-654612-m03
	I0916 11:13:31.031452 1501462 round_trippers.go:469] Request Headers:
	I0916 11:13:31.031461 1501462 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:13:31.031467 1501462 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:13:31.033586 1501462 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:13:31.033611 1501462 round_trippers.go:577] Response Headers:
	I0916 11:13:31.033621 1501462 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:13:31 GMT
	I0916 11:13:31.033625 1501462 round_trippers.go:580]     Audit-Id: 70a56b58-96b0-4ffc-bf37-bd839a811ab6
	I0916 11:13:31.033628 1501462 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:13:31.033631 1501462 round_trippers.go:580]     Content-Type: application/json
	I0916 11:13:31.033635 1501462 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:13:31.033638 1501462 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:13:31.034067 1501462 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-654612-m03","uid":"0a3ae4f6-3e42-45c5-9af6-3ab99d0c1b81","resourceVersion":"616","creationTimestamp":"2024-09-16T11:11:48Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-654612-m03","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-654612","minikube.k8s.io/primary":"false","minikube.k8s.io/updated_at":"2024_09_16T11_11_49_0700","minikube.k8s.io/version":"v1.34.0"},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///var/run/crio/crio.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubeadm","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:11:48Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:
annotations":{"f:kubeadm.alpha.kubernetes.io/cri-socket":{}}}}},{"manag [truncated 5817 chars]
	I0916 11:13:31.034472 1501462 pod_ready.go:103] pod "kube-proxy-vf648" in "kube-system" namespace has status "Ready":"False"
	I0916 11:13:31.528406 1501462 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/namespaces/kube-system/pods/kube-proxy-vf648
	I0916 11:13:31.528435 1501462 round_trippers.go:469] Request Headers:
	I0916 11:13:31.528446 1501462 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:13:31.528452 1501462 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:13:31.531059 1501462 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:13:31.531083 1501462 round_trippers.go:577] Response Headers:
	I0916 11:13:31.531092 1501462 round_trippers.go:580]     Content-Type: application/json
	I0916 11:13:31.531098 1501462 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:13:31.531101 1501462 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:13:31.531104 1501462 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:13:31 GMT
	I0916 11:13:31.531107 1501462 round_trippers.go:580]     Audit-Id: 308397e2-c905-4fe1-9f72-a74c8f1b8cbe
	I0916 11:13:31.531109 1501462 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:13:31.531582 1501462 request.go:1351] Response Body: {"kind":"Pod","apiVersion":"v1","metadata":{"name":"kube-proxy-vf648","generateName":"kube-proxy-","namespace":"kube-system","uid":"376afe3e-390b-443b-b289-7dfeeb1deed1","resourceVersion":"642","creationTimestamp":"2024-09-16T11:11:48Z","labels":{"controller-revision-hash":"648b489c5b","k8s-app":"kube-proxy","pod-template-generation":"1"},"ownerReferences":[{"apiVersion":"apps/v1","kind":"DaemonSet","name":"kube-proxy","uid":"8b5954ba-7bb1-4f7b-8014-fdfa99b2ac77","controller":true,"blockOwnerDeletion":true}],"managedFields":[{"manager":"kube-controller-manager","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:11:48Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:generateName":{},"f:labels":{".":{},"f:controller-revision-hash":{},"f:k8s-app":{},"f:pod-template-generation":{}},"f:ownerReferences":{".":{},"k:{\"uid\":\"8b5954ba-7bb1-4f7b-8014-fdfa99b2ac77\"}":{}}},"f:spec":{"f:affinity":{".":{},"f:nodeAffinity":{".":{},"f:r
equiredDuringSchedulingIgnoredDuringExecution":{}}},"f:containers":{"k: [truncated 6427 chars]
	I0916 11:13:31.532150 1501462 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/nodes/multinode-654612-m03
	I0916 11:13:31.532170 1501462 round_trippers.go:469] Request Headers:
	I0916 11:13:31.532189 1501462 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:13:31.532194 1501462 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:13:31.534537 1501462 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:13:31.534562 1501462 round_trippers.go:577] Response Headers:
	I0916 11:13:31.534569 1501462 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:13:31.534573 1501462 round_trippers.go:580]     Content-Type: application/json
	I0916 11:13:31.534575 1501462 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:13:31.534579 1501462 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:13:31.534582 1501462 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:13:31 GMT
	I0916 11:13:31.534592 1501462 round_trippers.go:580]     Audit-Id: 36ab2bc0-6b0b-4926-b8df-c8521e997069
	I0916 11:13:31.534689 1501462 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-654612-m03","uid":"0a3ae4f6-3e42-45c5-9af6-3ab99d0c1b81","resourceVersion":"616","creationTimestamp":"2024-09-16T11:11:48Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-654612-m03","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-654612","minikube.k8s.io/primary":"false","minikube.k8s.io/updated_at":"2024_09_16T11_11_49_0700","minikube.k8s.io/version":"v1.34.0"},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///var/run/crio/crio.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubeadm","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:11:48Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:
annotations":{"f:kubeadm.alpha.kubernetes.io/cri-socket":{}}}}},{"manag [truncated 5817 chars]
	I0916 11:13:32.028944 1501462 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/namespaces/kube-system/pods/kube-proxy-vf648
	I0916 11:13:32.028973 1501462 round_trippers.go:469] Request Headers:
	I0916 11:13:32.028983 1501462 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:13:32.028989 1501462 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:13:32.031350 1501462 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:13:32.031382 1501462 round_trippers.go:577] Response Headers:
	I0916 11:13:32.031402 1501462 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:13:32.031406 1501462 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:13:32.031409 1501462 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:13:32 GMT
	I0916 11:13:32.031412 1501462 round_trippers.go:580]     Audit-Id: 505a8169-3609-4d82-8de5-2cc1d99695a2
	I0916 11:13:32.031415 1501462 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:13:32.031417 1501462 round_trippers.go:580]     Content-Type: application/json
	I0916 11:13:32.031544 1501462 request.go:1351] Response Body: {"kind":"Pod","apiVersion":"v1","metadata":{"name":"kube-proxy-vf648","generateName":"kube-proxy-","namespace":"kube-system","uid":"376afe3e-390b-443b-b289-7dfeeb1deed1","resourceVersion":"642","creationTimestamp":"2024-09-16T11:11:48Z","labels":{"controller-revision-hash":"648b489c5b","k8s-app":"kube-proxy","pod-template-generation":"1"},"ownerReferences":[{"apiVersion":"apps/v1","kind":"DaemonSet","name":"kube-proxy","uid":"8b5954ba-7bb1-4f7b-8014-fdfa99b2ac77","controller":true,"blockOwnerDeletion":true}],"managedFields":[{"manager":"kube-controller-manager","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:11:48Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:generateName":{},"f:labels":{".":{},"f:controller-revision-hash":{},"f:k8s-app":{},"f:pod-template-generation":{}},"f:ownerReferences":{".":{},"k:{\"uid\":\"8b5954ba-7bb1-4f7b-8014-fdfa99b2ac77\"}":{}}},"f:spec":{"f:affinity":{".":{},"f:nodeAffinity":{".":{},"f:r
equiredDuringSchedulingIgnoredDuringExecution":{}}},"f:containers":{"k: [truncated 6427 chars]
	I0916 11:13:32.032102 1501462 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/nodes/multinode-654612-m03
	I0916 11:13:32.032119 1501462 round_trippers.go:469] Request Headers:
	I0916 11:13:32.032128 1501462 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:13:32.032131 1501462 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:13:32.034199 1501462 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:13:32.034222 1501462 round_trippers.go:577] Response Headers:
	I0916 11:13:32.034230 1501462 round_trippers.go:580]     Audit-Id: 909592b8-d15a-4ce1-836d-38ead0d0a8b3
	I0916 11:13:32.034234 1501462 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:13:32.034237 1501462 round_trippers.go:580]     Content-Type: application/json
	I0916 11:13:32.034242 1501462 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:13:32.034245 1501462 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:13:32.034248 1501462 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:13:32 GMT
	I0916 11:13:32.034344 1501462 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-654612-m03","uid":"0a3ae4f6-3e42-45c5-9af6-3ab99d0c1b81","resourceVersion":"616","creationTimestamp":"2024-09-16T11:11:48Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-654612-m03","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-654612","minikube.k8s.io/primary":"false","minikube.k8s.io/updated_at":"2024_09_16T11_11_49_0700","minikube.k8s.io/version":"v1.34.0"},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///var/run/crio/crio.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubeadm","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:11:48Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:
annotations":{"f:kubeadm.alpha.kubernetes.io/cri-socket":{}}}}},{"manag [truncated 5817 chars]
	I0916 11:13:32.528522 1501462 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/namespaces/kube-system/pods/kube-proxy-vf648
	I0916 11:13:32.528550 1501462 round_trippers.go:469] Request Headers:
	I0916 11:13:32.528564 1501462 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:13:32.528570 1501462 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:13:32.531056 1501462 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:13:32.531086 1501462 round_trippers.go:577] Response Headers:
	I0916 11:13:32.531094 1501462 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:13:32.531107 1501462 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:13:32 GMT
	I0916 11:13:32.531126 1501462 round_trippers.go:580]     Audit-Id: 34614d91-7bca-4027-99fc-97abb4489ff2
	I0916 11:13:32.531138 1501462 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:13:32.531142 1501462 round_trippers.go:580]     Content-Type: application/json
	I0916 11:13:32.531145 1501462 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:13:32.531281 1501462 request.go:1351] Response Body: {"kind":"Pod","apiVersion":"v1","metadata":{"name":"kube-proxy-vf648","generateName":"kube-proxy-","namespace":"kube-system","uid":"376afe3e-390b-443b-b289-7dfeeb1deed1","resourceVersion":"642","creationTimestamp":"2024-09-16T11:11:48Z","labels":{"controller-revision-hash":"648b489c5b","k8s-app":"kube-proxy","pod-template-generation":"1"},"ownerReferences":[{"apiVersion":"apps/v1","kind":"DaemonSet","name":"kube-proxy","uid":"8b5954ba-7bb1-4f7b-8014-fdfa99b2ac77","controller":true,"blockOwnerDeletion":true}],"managedFields":[{"manager":"kube-controller-manager","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:11:48Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:generateName":{},"f:labels":{".":{},"f:controller-revision-hash":{},"f:k8s-app":{},"f:pod-template-generation":{}},"f:ownerReferences":{".":{},"k:{\"uid\":\"8b5954ba-7bb1-4f7b-8014-fdfa99b2ac77\"}":{}}},"f:spec":{"f:affinity":{".":{},"f:nodeAffinity":{".":{},"f:r
equiredDuringSchedulingIgnoredDuringExecution":{}}},"f:containers":{"k: [truncated 6427 chars]
	I0916 11:13:32.531845 1501462 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/nodes/multinode-654612-m03
	I0916 11:13:32.531866 1501462 round_trippers.go:469] Request Headers:
	I0916 11:13:32.531875 1501462 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:13:32.531881 1501462 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:13:32.534154 1501462 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:13:32.534175 1501462 round_trippers.go:577] Response Headers:
	I0916 11:13:32.534184 1501462 round_trippers.go:580]     Audit-Id: dd05b51c-f313-48ae-bad0-408d2918bfb9
	I0916 11:13:32.534189 1501462 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:13:32.534192 1501462 round_trippers.go:580]     Content-Type: application/json
	I0916 11:13:32.534195 1501462 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:13:32.534198 1501462 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:13:32.534200 1501462 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:13:32 GMT
	I0916 11:13:32.534608 1501462 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-654612-m03","uid":"0a3ae4f6-3e42-45c5-9af6-3ab99d0c1b81","resourceVersion":"616","creationTimestamp":"2024-09-16T11:11:48Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-654612-m03","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-654612","minikube.k8s.io/primary":"false","minikube.k8s.io/updated_at":"2024_09_16T11_11_49_0700","minikube.k8s.io/version":"v1.34.0"},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///var/run/crio/crio.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubeadm","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:11:48Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:
annotations":{"f:kubeadm.alpha.kubernetes.io/cri-socket":{}}}}},{"manag [truncated 5817 chars]
	I0916 11:13:33.028081 1501462 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/namespaces/kube-system/pods/kube-proxy-vf648
	I0916 11:13:33.028111 1501462 round_trippers.go:469] Request Headers:
	I0916 11:13:33.028121 1501462 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:13:33.028125 1501462 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:13:33.030683 1501462 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:13:33.030731 1501462 round_trippers.go:577] Response Headers:
	I0916 11:13:33.030739 1501462 round_trippers.go:580]     Audit-Id: 71c9427b-595b-4e6f-9fad-bf8704249f4b
	I0916 11:13:33.030744 1501462 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:13:33.030753 1501462 round_trippers.go:580]     Content-Type: application/json
	I0916 11:13:33.030755 1501462 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:13:33.030758 1501462 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:13:33.030761 1501462 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:13:33 GMT
	I0916 11:13:33.030868 1501462 request.go:1351] Response Body: {"kind":"Pod","apiVersion":"v1","metadata":{"name":"kube-proxy-vf648","generateName":"kube-proxy-","namespace":"kube-system","uid":"376afe3e-390b-443b-b289-7dfeeb1deed1","resourceVersion":"642","creationTimestamp":"2024-09-16T11:11:48Z","labels":{"controller-revision-hash":"648b489c5b","k8s-app":"kube-proxy","pod-template-generation":"1"},"ownerReferences":[{"apiVersion":"apps/v1","kind":"DaemonSet","name":"kube-proxy","uid":"8b5954ba-7bb1-4f7b-8014-fdfa99b2ac77","controller":true,"blockOwnerDeletion":true}],"managedFields":[{"manager":"kube-controller-manager","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:11:48Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:generateName":{},"f:labels":{".":{},"f:controller-revision-hash":{},"f:k8s-app":{},"f:pod-template-generation":{}},"f:ownerReferences":{".":{},"k:{\"uid\":\"8b5954ba-7bb1-4f7b-8014-fdfa99b2ac77\"}":{}}},"f:spec":{"f:affinity":{".":{},"f:nodeAffinity":{".":{},"f:r
equiredDuringSchedulingIgnoredDuringExecution":{}}},"f:containers":{"k: [truncated 6427 chars]
	I0916 11:13:33.031400 1501462 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/nodes/multinode-654612-m03
	I0916 11:13:33.031417 1501462 round_trippers.go:469] Request Headers:
	I0916 11:13:33.031424 1501462 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:13:33.031428 1501462 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:13:33.033743 1501462 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:13:33.033772 1501462 round_trippers.go:577] Response Headers:
	I0916 11:13:33.033782 1501462 round_trippers.go:580]     Content-Type: application/json
	I0916 11:13:33.033788 1501462 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:13:33.033791 1501462 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:13:33.033794 1501462 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:13:33 GMT
	I0916 11:13:33.033797 1501462 round_trippers.go:580]     Audit-Id: 002d1714-e342-4608-809e-6f40f80c2809
	I0916 11:13:33.033799 1501462 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:13:33.034165 1501462 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-654612-m03","uid":"0a3ae4f6-3e42-45c5-9af6-3ab99d0c1b81","resourceVersion":"616","creationTimestamp":"2024-09-16T11:11:48Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-654612-m03","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-654612","minikube.k8s.io/primary":"false","minikube.k8s.io/updated_at":"2024_09_16T11_11_49_0700","minikube.k8s.io/version":"v1.34.0"},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///var/run/crio/crio.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubeadm","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:11:48Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:
annotations":{"f:kubeadm.alpha.kubernetes.io/cri-socket":{}}}}},{"manag [truncated 5817 chars]
	I0916 11:13:33.034567 1501462 pod_ready.go:103] pod "kube-proxy-vf648" in "kube-system" namespace has status "Ready":"False"
	I0916 11:13:33.528665 1501462 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/namespaces/kube-system/pods/kube-proxy-vf648
	I0916 11:13:33.528721 1501462 round_trippers.go:469] Request Headers:
	I0916 11:13:33.528732 1501462 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:13:33.528736 1501462 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:13:33.531116 1501462 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:13:33.531196 1501462 round_trippers.go:577] Response Headers:
	I0916 11:13:33.531261 1501462 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:13:33.531285 1501462 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:13:33.531304 1501462 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:13:33 GMT
	I0916 11:13:33.531327 1501462 round_trippers.go:580]     Audit-Id: 5f5bc330-3979-4b00-bb05-a4123b329c90
	I0916 11:13:33.531345 1501462 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:13:33.531350 1501462 round_trippers.go:580]     Content-Type: application/json
	I0916 11:13:33.531486 1501462 request.go:1351] Response Body: {"kind":"Pod","apiVersion":"v1","metadata":{"name":"kube-proxy-vf648","generateName":"kube-proxy-","namespace":"kube-system","uid":"376afe3e-390b-443b-b289-7dfeeb1deed1","resourceVersion":"642","creationTimestamp":"2024-09-16T11:11:48Z","labels":{"controller-revision-hash":"648b489c5b","k8s-app":"kube-proxy","pod-template-generation":"1"},"ownerReferences":[{"apiVersion":"apps/v1","kind":"DaemonSet","name":"kube-proxy","uid":"8b5954ba-7bb1-4f7b-8014-fdfa99b2ac77","controller":true,"blockOwnerDeletion":true}],"managedFields":[{"manager":"kube-controller-manager","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:11:48Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:generateName":{},"f:labels":{".":{},"f:controller-revision-hash":{},"f:k8s-app":{},"f:pod-template-generation":{}},"f:ownerReferences":{".":{},"k:{\"uid\":\"8b5954ba-7bb1-4f7b-8014-fdfa99b2ac77\"}":{}}},"f:spec":{"f:affinity":{".":{},"f:nodeAffinity":{".":{},"f:r
equiredDuringSchedulingIgnoredDuringExecution":{}}},"f:containers":{"k: [truncated 6427 chars]
	I0916 11:13:33.532029 1501462 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/nodes/multinode-654612-m03
	I0916 11:13:33.532049 1501462 round_trippers.go:469] Request Headers:
	I0916 11:13:33.532059 1501462 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:13:33.532065 1501462 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:13:33.534187 1501462 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:13:33.534212 1501462 round_trippers.go:577] Response Headers:
	I0916 11:13:33.534221 1501462 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:13:33.534225 1501462 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:13:33 GMT
	I0916 11:13:33.534230 1501462 round_trippers.go:580]     Audit-Id: 6ce64399-2925-4b4c-9568-49d3c6cdd56e
	I0916 11:13:33.534232 1501462 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:13:33.534235 1501462 round_trippers.go:580]     Content-Type: application/json
	I0916 11:13:33.534238 1501462 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:13:33.534336 1501462 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-654612-m03","uid":"0a3ae4f6-3e42-45c5-9af6-3ab99d0c1b81","resourceVersion":"616","creationTimestamp":"2024-09-16T11:11:48Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-654612-m03","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-654612","minikube.k8s.io/primary":"false","minikube.k8s.io/updated_at":"2024_09_16T11_11_49_0700","minikube.k8s.io/version":"v1.34.0"},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///var/run/crio/crio.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubeadm","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:11:48Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:
annotations":{"f:kubeadm.alpha.kubernetes.io/cri-socket":{}}}}},{"manag [truncated 5817 chars]
	I0916 11:13:34.028063 1501462 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/namespaces/kube-system/pods/kube-proxy-vf648
	I0916 11:13:34.028099 1501462 round_trippers.go:469] Request Headers:
	I0916 11:13:34.028109 1501462 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:13:34.028115 1501462 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:13:34.031179 1501462 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 11:13:34.031213 1501462 round_trippers.go:577] Response Headers:
	I0916 11:13:34.031230 1501462 round_trippers.go:580]     Audit-Id: 8ca985bd-ac2f-4803-82c8-3a9286a4ada9
	I0916 11:13:34.031236 1501462 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:13:34.031240 1501462 round_trippers.go:580]     Content-Type: application/json
	I0916 11:13:34.031252 1501462 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:13:34.031257 1501462 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:13:34.031263 1501462 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:13:34 GMT
	I0916 11:13:34.031407 1501462 request.go:1351] Response Body: {"kind":"Pod","apiVersion":"v1","metadata":{"name":"kube-proxy-vf648","generateName":"kube-proxy-","namespace":"kube-system","uid":"376afe3e-390b-443b-b289-7dfeeb1deed1","resourceVersion":"642","creationTimestamp":"2024-09-16T11:11:48Z","labels":{"controller-revision-hash":"648b489c5b","k8s-app":"kube-proxy","pod-template-generation":"1"},"ownerReferences":[{"apiVersion":"apps/v1","kind":"DaemonSet","name":"kube-proxy","uid":"8b5954ba-7bb1-4f7b-8014-fdfa99b2ac77","controller":true,"blockOwnerDeletion":true}],"managedFields":[{"manager":"kube-controller-manager","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:11:48Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:generateName":{},"f:labels":{".":{},"f:controller-revision-hash":{},"f:k8s-app":{},"f:pod-template-generation":{}},"f:ownerReferences":{".":{},"k:{\"uid\":\"8b5954ba-7bb1-4f7b-8014-fdfa99b2ac77\"}":{}}},"f:spec":{"f:affinity":{".":{},"f:nodeAffinity":{".":{},"f:r
equiredDuringSchedulingIgnoredDuringExecution":{}}},"f:containers":{"k: [truncated 6427 chars]
	I0916 11:13:34.032021 1501462 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/nodes/multinode-654612-m03
	I0916 11:13:34.032048 1501462 round_trippers.go:469] Request Headers:
	I0916 11:13:34.032057 1501462 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:13:34.032061 1501462 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:13:34.034452 1501462 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:13:34.034478 1501462 round_trippers.go:577] Response Headers:
	I0916 11:13:34.034498 1501462 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:13:34.034502 1501462 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:13:34.034506 1501462 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:13:34 GMT
	I0916 11:13:34.034514 1501462 round_trippers.go:580]     Audit-Id: 6cd89bbe-bca9-4421-a155-35654d0d5212
	I0916 11:13:34.034517 1501462 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:13:34.034520 1501462 round_trippers.go:580]     Content-Type: application/json
	I0916 11:13:34.034620 1501462 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-654612-m03","uid":"0a3ae4f6-3e42-45c5-9af6-3ab99d0c1b81","resourceVersion":"616","creationTimestamp":"2024-09-16T11:11:48Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-654612-m03","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-654612","minikube.k8s.io/primary":"false","minikube.k8s.io/updated_at":"2024_09_16T11_11_49_0700","minikube.k8s.io/version":"v1.34.0"},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///var/run/crio/crio.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubeadm","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:11:48Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:
annotations":{"f:kubeadm.alpha.kubernetes.io/cri-socket":{}}}}},{"manag [truncated 5817 chars]
	I0916 11:13:34.528156 1501462 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/namespaces/kube-system/pods/kube-proxy-vf648
	I0916 11:13:34.528180 1501462 round_trippers.go:469] Request Headers:
	I0916 11:13:34.528190 1501462 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:13:34.528196 1501462 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:13:34.530983 1501462 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:13:34.531006 1501462 round_trippers.go:577] Response Headers:
	I0916 11:13:34.531014 1501462 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:13:34.531019 1501462 round_trippers.go:580]     Content-Type: application/json
	I0916 11:13:34.531022 1501462 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:13:34.531025 1501462 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:13:34.531028 1501462 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:13:34 GMT
	I0916 11:13:34.531030 1501462 round_trippers.go:580]     Audit-Id: 842b21c6-4a40-4b98-bef9-bcab84d3fbd7
	I0916 11:13:34.531166 1501462 request.go:1351] Response Body: {"kind":"Pod","apiVersion":"v1","metadata":{"name":"kube-proxy-vf648","generateName":"kube-proxy-","namespace":"kube-system","uid":"376afe3e-390b-443b-b289-7dfeeb1deed1","resourceVersion":"642","creationTimestamp":"2024-09-16T11:11:48Z","labels":{"controller-revision-hash":"648b489c5b","k8s-app":"kube-proxy","pod-template-generation":"1"},"ownerReferences":[{"apiVersion":"apps/v1","kind":"DaemonSet","name":"kube-proxy","uid":"8b5954ba-7bb1-4f7b-8014-fdfa99b2ac77","controller":true,"blockOwnerDeletion":true}],"managedFields":[{"manager":"kube-controller-manager","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:11:48Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:generateName":{},"f:labels":{".":{},"f:controller-revision-hash":{},"f:k8s-app":{},"f:pod-template-generation":{}},"f:ownerReferences":{".":{},"k:{\"uid\":\"8b5954ba-7bb1-4f7b-8014-fdfa99b2ac77\"}":{}}},"f:spec":{"f:affinity":{".":{},"f:nodeAffinity":{".":{},"f:r
equiredDuringSchedulingIgnoredDuringExecution":{}}},"f:containers":{"k: [truncated 6427 chars]
	I0916 11:13:34.531752 1501462 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/nodes/multinode-654612-m03
	I0916 11:13:34.531780 1501462 round_trippers.go:469] Request Headers:
	I0916 11:13:34.531789 1501462 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:13:34.531794 1501462 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:13:34.534114 1501462 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:13:34.534140 1501462 round_trippers.go:577] Response Headers:
	I0916 11:13:34.534148 1501462 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:13:34.534153 1501462 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:13:34 GMT
	I0916 11:13:34.534157 1501462 round_trippers.go:580]     Audit-Id: b661ee48-fbb9-4866-a196-89cec70416d0
	I0916 11:13:34.534161 1501462 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:13:34.534165 1501462 round_trippers.go:580]     Content-Type: application/json
	I0916 11:13:34.534171 1501462 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:13:34.534284 1501462 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-654612-m03","uid":"0a3ae4f6-3e42-45c5-9af6-3ab99d0c1b81","resourceVersion":"616","creationTimestamp":"2024-09-16T11:11:48Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-654612-m03","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-654612","minikube.k8s.io/primary":"false","minikube.k8s.io/updated_at":"2024_09_16T11_11_49_0700","minikube.k8s.io/version":"v1.34.0"},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///var/run/crio/crio.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubeadm","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:11:48Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:
annotations":{"f:kubeadm.alpha.kubernetes.io/cri-socket":{}}}}},{"manag [truncated 5817 chars]
	I0916 11:13:35.028152 1501462 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/namespaces/kube-system/pods/kube-proxy-vf648
	I0916 11:13:35.028183 1501462 round_trippers.go:469] Request Headers:
	I0916 11:13:35.028195 1501462 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:13:35.028202 1501462 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:13:35.030933 1501462 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:13:35.030958 1501462 round_trippers.go:577] Response Headers:
	I0916 11:13:35.030967 1501462 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:13:35 GMT
	I0916 11:13:35.030972 1501462 round_trippers.go:580]     Audit-Id: afe10cfa-e3e2-4fa9-85a1-59392fc58267
	I0916 11:13:35.030975 1501462 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:13:35.030978 1501462 round_trippers.go:580]     Content-Type: application/json
	I0916 11:13:35.030983 1501462 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:13:35.030986 1501462 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:13:35.031146 1501462 request.go:1351] Response Body: {"kind":"Pod","apiVersion":"v1","metadata":{"name":"kube-proxy-vf648","generateName":"kube-proxy-","namespace":"kube-system","uid":"376afe3e-390b-443b-b289-7dfeeb1deed1","resourceVersion":"642","creationTimestamp":"2024-09-16T11:11:48Z","labels":{"controller-revision-hash":"648b489c5b","k8s-app":"kube-proxy","pod-template-generation":"1"},"ownerReferences":[{"apiVersion":"apps/v1","kind":"DaemonSet","name":"kube-proxy","uid":"8b5954ba-7bb1-4f7b-8014-fdfa99b2ac77","controller":true,"blockOwnerDeletion":true}],"managedFields":[{"manager":"kube-controller-manager","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:11:48Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:generateName":{},"f:labels":{".":{},"f:controller-revision-hash":{},"f:k8s-app":{},"f:pod-template-generation":{}},"f:ownerReferences":{".":{},"k:{\"uid\":\"8b5954ba-7bb1-4f7b-8014-fdfa99b2ac77\"}":{}}},"f:spec":{"f:affinity":{".":{},"f:nodeAffinity":{".":{},"f:r
equiredDuringSchedulingIgnoredDuringExecution":{}}},"f:containers":{"k: [truncated 6427 chars]
	I0916 11:13:35.031732 1501462 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/nodes/multinode-654612-m03
	I0916 11:13:35.031754 1501462 round_trippers.go:469] Request Headers:
	I0916 11:13:35.031763 1501462 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:13:35.031769 1501462 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:13:35.034173 1501462 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:13:35.034207 1501462 round_trippers.go:577] Response Headers:
	I0916 11:13:35.034217 1501462 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:13:35.034223 1501462 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:13:35.034231 1501462 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:13:35 GMT
	I0916 11:13:35.034235 1501462 round_trippers.go:580]     Audit-Id: 134d0d74-1a18-4a22-b576-c6097a156400
	I0916 11:13:35.034238 1501462 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:13:35.034241 1501462 round_trippers.go:580]     Content-Type: application/json
	I0916 11:13:35.034597 1501462 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-654612-m03","uid":"0a3ae4f6-3e42-45c5-9af6-3ab99d0c1b81","resourceVersion":"616","creationTimestamp":"2024-09-16T11:11:48Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-654612-m03","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-654612","minikube.k8s.io/primary":"false","minikube.k8s.io/updated_at":"2024_09_16T11_11_49_0700","minikube.k8s.io/version":"v1.34.0"},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///var/run/crio/crio.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubeadm","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:11:48Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:
annotations":{"f:kubeadm.alpha.kubernetes.io/cri-socket":{}}}}},{"manag [truncated 5817 chars]
	I0916 11:13:35.035022 1501462 pod_ready.go:103] pod "kube-proxy-vf648" in "kube-system" namespace has status "Ready":"False"
	I0916 11:13:35.528113 1501462 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/namespaces/kube-system/pods/kube-proxy-vf648
	I0916 11:13:35.528141 1501462 round_trippers.go:469] Request Headers:
	I0916 11:13:35.528152 1501462 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:13:35.528158 1501462 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:13:35.530525 1501462 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:13:35.530550 1501462 round_trippers.go:577] Response Headers:
	I0916 11:13:35.530560 1501462 round_trippers.go:580]     Audit-Id: b6e8350c-2328-4c0c-a3d5-5e9b9a486ae0
	I0916 11:13:35.530564 1501462 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:13:35.530568 1501462 round_trippers.go:580]     Content-Type: application/json
	I0916 11:13:35.530571 1501462 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:13:35.530574 1501462 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:13:35.530577 1501462 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:13:35 GMT
	I0916 11:13:35.530814 1501462 request.go:1351] Response Body: {"kind":"Pod","apiVersion":"v1","metadata":{"name":"kube-proxy-vf648","generateName":"kube-proxy-","namespace":"kube-system","uid":"376afe3e-390b-443b-b289-7dfeeb1deed1","resourceVersion":"642","creationTimestamp":"2024-09-16T11:11:48Z","labels":{"controller-revision-hash":"648b489c5b","k8s-app":"kube-proxy","pod-template-generation":"1"},"ownerReferences":[{"apiVersion":"apps/v1","kind":"DaemonSet","name":"kube-proxy","uid":"8b5954ba-7bb1-4f7b-8014-fdfa99b2ac77","controller":true,"blockOwnerDeletion":true}],"managedFields":[{"manager":"kube-controller-manager","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:11:48Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:generateName":{},"f:labels":{".":{},"f:controller-revision-hash":{},"f:k8s-app":{},"f:pod-template-generation":{}},"f:ownerReferences":{".":{},"k:{\"uid\":\"8b5954ba-7bb1-4f7b-8014-fdfa99b2ac77\"}":{}}},"f:spec":{"f:affinity":{".":{},"f:nodeAffinity":{".":{},"f:r
equiredDuringSchedulingIgnoredDuringExecution":{}}},"f:containers":{"k: [truncated 6427 chars]
	I0916 11:13:35.531346 1501462 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/nodes/multinode-654612-m03
	I0916 11:13:35.531364 1501462 round_trippers.go:469] Request Headers:
	I0916 11:13:35.531373 1501462 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:13:35.531379 1501462 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:13:35.533622 1501462 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:13:35.533696 1501462 round_trippers.go:577] Response Headers:
	I0916 11:13:35.533705 1501462 round_trippers.go:580]     Audit-Id: 44bc5f40-2777-44b6-a747-fd159838414e
	I0916 11:13:35.533709 1501462 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:13:35.533712 1501462 round_trippers.go:580]     Content-Type: application/json
	I0916 11:13:35.533716 1501462 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:13:35.533721 1501462 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:13:35.533728 1501462 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:13:35 GMT
	I0916 11:13:35.533813 1501462 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-654612-m03","uid":"0a3ae4f6-3e42-45c5-9af6-3ab99d0c1b81","resourceVersion":"616","creationTimestamp":"2024-09-16T11:11:48Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-654612-m03","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-654612","minikube.k8s.io/primary":"false","minikube.k8s.io/updated_at":"2024_09_16T11_11_49_0700","minikube.k8s.io/version":"v1.34.0"},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///var/run/crio/crio.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubeadm","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:11:48Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:
annotations":{"f:kubeadm.alpha.kubernetes.io/cri-socket":{}}}}},{"manag [truncated 5817 chars]
	I0916 11:13:36.028061 1501462 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/namespaces/kube-system/pods/kube-proxy-vf648
	I0916 11:13:36.028091 1501462 round_trippers.go:469] Request Headers:
	I0916 11:13:36.028100 1501462 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:13:36.028107 1501462 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:13:36.030713 1501462 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:13:36.030786 1501462 round_trippers.go:577] Response Headers:
	I0916 11:13:36.030807 1501462 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:13:36.030832 1501462 round_trippers.go:580]     Content-Type: application/json
	I0916 11:13:36.030867 1501462 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:13:36.030889 1501462 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:13:36.030908 1501462 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:13:36 GMT
	I0916 11:13:36.030925 1501462 round_trippers.go:580]     Audit-Id: de10bc18-8c9e-415a-bbdb-acf77c04321f
	I0916 11:13:36.031086 1501462 request.go:1351] Response Body: {"kind":"Pod","apiVersion":"v1","metadata":{"name":"kube-proxy-vf648","generateName":"kube-proxy-","namespace":"kube-system","uid":"376afe3e-390b-443b-b289-7dfeeb1deed1","resourceVersion":"642","creationTimestamp":"2024-09-16T11:11:48Z","labels":{"controller-revision-hash":"648b489c5b","k8s-app":"kube-proxy","pod-template-generation":"1"},"ownerReferences":[{"apiVersion":"apps/v1","kind":"DaemonSet","name":"kube-proxy","uid":"8b5954ba-7bb1-4f7b-8014-fdfa99b2ac77","controller":true,"blockOwnerDeletion":true}],"managedFields":[{"manager":"kube-controller-manager","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:11:48Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:generateName":{},"f:labels":{".":{},"f:controller-revision-hash":{},"f:k8s-app":{},"f:pod-template-generation":{}},"f:ownerReferences":{".":{},"k:{\"uid\":\"8b5954ba-7bb1-4f7b-8014-fdfa99b2ac77\"}":{}}},"f:spec":{"f:affinity":{".":{},"f:nodeAffinity":{".":{},"f:r
equiredDuringSchedulingIgnoredDuringExecution":{}}},"f:containers":{"k: [truncated 6427 chars]
	I0916 11:13:36.031650 1501462 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/nodes/multinode-654612-m03
	I0916 11:13:36.031670 1501462 round_trippers.go:469] Request Headers:
	I0916 11:13:36.031679 1501462 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:13:36.031684 1501462 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:13:36.034041 1501462 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:13:36.034069 1501462 round_trippers.go:577] Response Headers:
	I0916 11:13:36.034079 1501462 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:13:36.034084 1501462 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:13:36.034087 1501462 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:13:36 GMT
	I0916 11:13:36.034090 1501462 round_trippers.go:580]     Audit-Id: c9f116ba-064f-485c-a068-a026f0a08d5d
	I0916 11:13:36.034093 1501462 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:13:36.034097 1501462 round_trippers.go:580]     Content-Type: application/json
	I0916 11:13:36.034218 1501462 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-654612-m03","uid":"0a3ae4f6-3e42-45c5-9af6-3ab99d0c1b81","resourceVersion":"616","creationTimestamp":"2024-09-16T11:11:48Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-654612-m03","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-654612","minikube.k8s.io/primary":"false","minikube.k8s.io/updated_at":"2024_09_16T11_11_49_0700","minikube.k8s.io/version":"v1.34.0"},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///var/run/crio/crio.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubeadm","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:11:48Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:
annotations":{"f:kubeadm.alpha.kubernetes.io/cri-socket":{}}}}},{"manag [truncated 5817 chars]
	I0916 11:13:36.528274 1501462 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/namespaces/kube-system/pods/kube-proxy-vf648
	I0916 11:13:36.528299 1501462 round_trippers.go:469] Request Headers:
	I0916 11:13:36.528309 1501462 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:13:36.528314 1501462 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:13:36.531226 1501462 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:13:36.531253 1501462 round_trippers.go:577] Response Headers:
	I0916 11:13:36.531262 1501462 round_trippers.go:580]     Audit-Id: 815b0ced-bd76-4e34-aa77-00d3c1b487cc
	I0916 11:13:36.531266 1501462 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:13:36.531268 1501462 round_trippers.go:580]     Content-Type: application/json
	I0916 11:13:36.531271 1501462 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:13:36.531274 1501462 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:13:36.531277 1501462 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:13:36 GMT
	I0916 11:13:36.531517 1501462 request.go:1351] Response Body: {"kind":"Pod","apiVersion":"v1","metadata":{"name":"kube-proxy-vf648","generateName":"kube-proxy-","namespace":"kube-system","uid":"376afe3e-390b-443b-b289-7dfeeb1deed1","resourceVersion":"642","creationTimestamp":"2024-09-16T11:11:48Z","labels":{"controller-revision-hash":"648b489c5b","k8s-app":"kube-proxy","pod-template-generation":"1"},"ownerReferences":[{"apiVersion":"apps/v1","kind":"DaemonSet","name":"kube-proxy","uid":"8b5954ba-7bb1-4f7b-8014-fdfa99b2ac77","controller":true,"blockOwnerDeletion":true}],"managedFields":[{"manager":"kube-controller-manager","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:11:48Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:generateName":{},"f:labels":{".":{},"f:controller-revision-hash":{},"f:k8s-app":{},"f:pod-template-generation":{}},"f:ownerReferences":{".":{},"k:{\"uid\":\"8b5954ba-7bb1-4f7b-8014-fdfa99b2ac77\"}":{}}},"f:spec":{"f:affinity":{".":{},"f:nodeAffinity":{".":{},"f:r
equiredDuringSchedulingIgnoredDuringExecution":{}}},"f:containers":{"k: [truncated 6427 chars]
	I0916 11:13:36.532151 1501462 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/nodes/multinode-654612-m03
	I0916 11:13:36.532171 1501462 round_trippers.go:469] Request Headers:
	I0916 11:13:36.532182 1501462 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:13:36.532187 1501462 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:13:36.534540 1501462 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:13:36.534573 1501462 round_trippers.go:577] Response Headers:
	I0916 11:13:36.534582 1501462 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:13:36.534586 1501462 round_trippers.go:580]     Content-Type: application/json
	I0916 11:13:36.534591 1501462 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:13:36.534594 1501462 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:13:36.534598 1501462 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:13:36 GMT
	I0916 11:13:36.534600 1501462 round_trippers.go:580]     Audit-Id: 03f358e6-5d22-40ff-b622-79148157f0c4
	I0916 11:13:36.534709 1501462 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-654612-m03","uid":"0a3ae4f6-3e42-45c5-9af6-3ab99d0c1b81","resourceVersion":"616","creationTimestamp":"2024-09-16T11:11:48Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-654612-m03","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-654612","minikube.k8s.io/primary":"false","minikube.k8s.io/updated_at":"2024_09_16T11_11_49_0700","minikube.k8s.io/version":"v1.34.0"},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///var/run/crio/crio.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubeadm","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:11:48Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:
annotations":{"f:kubeadm.alpha.kubernetes.io/cri-socket":{}}}}},{"manag [truncated 5817 chars]
	I0916 11:13:37.028022 1501462 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/namespaces/kube-system/pods/kube-proxy-vf648
	I0916 11:13:37.028052 1501462 round_trippers.go:469] Request Headers:
	I0916 11:13:37.028063 1501462 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:13:37.028069 1501462 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:13:37.030747 1501462 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:13:37.030780 1501462 round_trippers.go:577] Response Headers:
	I0916 11:13:37.030789 1501462 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:13:37 GMT
	I0916 11:13:37.030794 1501462 round_trippers.go:580]     Audit-Id: e781f9e7-955d-4277-aef1-3bb3cd189579
	I0916 11:13:37.030798 1501462 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:13:37.030801 1501462 round_trippers.go:580]     Content-Type: application/json
	I0916 11:13:37.030805 1501462 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:13:37.030811 1501462 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:13:37.030957 1501462 request.go:1351] Response Body: {"kind":"Pod","apiVersion":"v1","metadata":{"name":"kube-proxy-vf648","generateName":"kube-proxy-","namespace":"kube-system","uid":"376afe3e-390b-443b-b289-7dfeeb1deed1","resourceVersion":"642","creationTimestamp":"2024-09-16T11:11:48Z","labels":{"controller-revision-hash":"648b489c5b","k8s-app":"kube-proxy","pod-template-generation":"1"},"ownerReferences":[{"apiVersion":"apps/v1","kind":"DaemonSet","name":"kube-proxy","uid":"8b5954ba-7bb1-4f7b-8014-fdfa99b2ac77","controller":true,"blockOwnerDeletion":true}],"managedFields":[{"manager":"kube-controller-manager","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:11:48Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:generateName":{},"f:labels":{".":{},"f:controller-revision-hash":{},"f:k8s-app":{},"f:pod-template-generation":{}},"f:ownerReferences":{".":{},"k:{\"uid\":\"8b5954ba-7bb1-4f7b-8014-fdfa99b2ac77\"}":{}}},"f:spec":{"f:affinity":{".":{},"f:nodeAffinity":{".":{},"f:r
equiredDuringSchedulingIgnoredDuringExecution":{}}},"f:containers":{"k: [truncated 6427 chars]
	I0916 11:13:37.031498 1501462 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/nodes/multinode-654612-m03
	I0916 11:13:37.031530 1501462 round_trippers.go:469] Request Headers:
	I0916 11:13:37.031539 1501462 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:13:37.031543 1501462 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:13:37.034140 1501462 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:13:37.034165 1501462 round_trippers.go:577] Response Headers:
	I0916 11:13:37.034174 1501462 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:13:37.034179 1501462 round_trippers.go:580]     Content-Type: application/json
	I0916 11:13:37.034182 1501462 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:13:37.034185 1501462 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:13:37.034190 1501462 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:13:37 GMT
	I0916 11:13:37.034193 1501462 round_trippers.go:580]     Audit-Id: 340f27c8-3050-4e41-8ee7-7722d623183e
	I0916 11:13:37.034290 1501462 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-654612-m03","uid":"0a3ae4f6-3e42-45c5-9af6-3ab99d0c1b81","resourceVersion":"616","creationTimestamp":"2024-09-16T11:11:48Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-654612-m03","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-654612","minikube.k8s.io/primary":"false","minikube.k8s.io/updated_at":"2024_09_16T11_11_49_0700","minikube.k8s.io/version":"v1.34.0"},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///var/run/crio/crio.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubeadm","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:11:48Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:
annotations":{"f:kubeadm.alpha.kubernetes.io/cri-socket":{}}}}},{"manag [truncated 5817 chars]
	I0916 11:13:37.528168 1501462 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/namespaces/kube-system/pods/kube-proxy-vf648
	I0916 11:13:37.528192 1501462 round_trippers.go:469] Request Headers:
	I0916 11:13:37.528202 1501462 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:13:37.528206 1501462 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:13:37.530613 1501462 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:13:37.530637 1501462 round_trippers.go:577] Response Headers:
	I0916 11:13:37.530646 1501462 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:13:37.530651 1501462 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:13:37.530655 1501462 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:13:37 GMT
	I0916 11:13:37.530658 1501462 round_trippers.go:580]     Audit-Id: f110bb6c-d460-41c6-a9aa-0aac30cf0609
	I0916 11:13:37.530661 1501462 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:13:37.530664 1501462 round_trippers.go:580]     Content-Type: application/json
	I0916 11:13:37.530812 1501462 request.go:1351] Response Body: {"kind":"Pod","apiVersion":"v1","metadata":{"name":"kube-proxy-vf648","generateName":"kube-proxy-","namespace":"kube-system","uid":"376afe3e-390b-443b-b289-7dfeeb1deed1","resourceVersion":"642","creationTimestamp":"2024-09-16T11:11:48Z","labels":{"controller-revision-hash":"648b489c5b","k8s-app":"kube-proxy","pod-template-generation":"1"},"ownerReferences":[{"apiVersion":"apps/v1","kind":"DaemonSet","name":"kube-proxy","uid":"8b5954ba-7bb1-4f7b-8014-fdfa99b2ac77","controller":true,"blockOwnerDeletion":true}],"managedFields":[{"manager":"kube-controller-manager","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:11:48Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:generateName":{},"f:labels":{".":{},"f:controller-revision-hash":{},"f:k8s-app":{},"f:pod-template-generation":{}},"f:ownerReferences":{".":{},"k:{\"uid\":\"8b5954ba-7bb1-4f7b-8014-fdfa99b2ac77\"}":{}}},"f:spec":{"f:affinity":{".":{},"f:nodeAffinity":{".":{},"f:r
equiredDuringSchedulingIgnoredDuringExecution":{}}},"f:containers":{"k: [truncated 6427 chars]
	I0916 11:13:37.531335 1501462 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/nodes/multinode-654612-m03
	I0916 11:13:37.531346 1501462 round_trippers.go:469] Request Headers:
	I0916 11:13:37.531355 1501462 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:13:37.531358 1501462 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:13:37.533415 1501462 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:13:37.533490 1501462 round_trippers.go:577] Response Headers:
	I0916 11:13:37.533509 1501462 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:13:37.533515 1501462 round_trippers.go:580]     Content-Type: application/json
	I0916 11:13:37.533518 1501462 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:13:37.533521 1501462 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:13:37.533525 1501462 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:13:37 GMT
	I0916 11:13:37.533528 1501462 round_trippers.go:580]     Audit-Id: d3535f07-5a0b-416a-9042-1d90ea61e6d1
	I0916 11:13:37.533652 1501462 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-654612-m03","uid":"0a3ae4f6-3e42-45c5-9af6-3ab99d0c1b81","resourceVersion":"616","creationTimestamp":"2024-09-16T11:11:48Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-654612-m03","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-654612","minikube.k8s.io/primary":"false","minikube.k8s.io/updated_at":"2024_09_16T11_11_49_0700","minikube.k8s.io/version":"v1.34.0"},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///var/run/crio/crio.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubeadm","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:11:48Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:
annotations":{"f:kubeadm.alpha.kubernetes.io/cri-socket":{}}}}},{"manag [truncated 5817 chars]
	I0916 11:13:37.534090 1501462 pod_ready.go:103] pod "kube-proxy-vf648" in "kube-system" namespace has status "Ready":"False"
	I0916 11:13:38.028864 1501462 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/namespaces/kube-system/pods/kube-proxy-vf648
	I0916 11:13:38.028892 1501462 round_trippers.go:469] Request Headers:
	I0916 11:13:38.028902 1501462 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:13:38.028908 1501462 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:13:38.031514 1501462 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:13:38.031577 1501462 round_trippers.go:577] Response Headers:
	I0916 11:13:38.031593 1501462 round_trippers.go:580]     Audit-Id: 438de499-8421-4ac2-8aaf-c04f6e00685e
	I0916 11:13:38.031597 1501462 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:13:38.031601 1501462 round_trippers.go:580]     Content-Type: application/json
	I0916 11:13:38.031603 1501462 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:13:38.031608 1501462 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:13:38.031611 1501462 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:13:38 GMT
	I0916 11:13:38.032030 1501462 request.go:1351] Response Body: {"kind":"Pod","apiVersion":"v1","metadata":{"name":"kube-proxy-vf648","generateName":"kube-proxy-","namespace":"kube-system","uid":"376afe3e-390b-443b-b289-7dfeeb1deed1","resourceVersion":"642","creationTimestamp":"2024-09-16T11:11:48Z","labels":{"controller-revision-hash":"648b489c5b","k8s-app":"kube-proxy","pod-template-generation":"1"},"ownerReferences":[{"apiVersion":"apps/v1","kind":"DaemonSet","name":"kube-proxy","uid":"8b5954ba-7bb1-4f7b-8014-fdfa99b2ac77","controller":true,"blockOwnerDeletion":true}],"managedFields":[{"manager":"kube-controller-manager","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:11:48Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:generateName":{},"f:labels":{".":{},"f:controller-revision-hash":{},"f:k8s-app":{},"f:pod-template-generation":{}},"f:ownerReferences":{".":{},"k:{\"uid\":\"8b5954ba-7bb1-4f7b-8014-fdfa99b2ac77\"}":{}}},"f:spec":{"f:affinity":{".":{},"f:nodeAffinity":{".":{},"f:r
equiredDuringSchedulingIgnoredDuringExecution":{}}},"f:containers":{"k: [truncated 6427 chars]
	I0916 11:13:38.032649 1501462 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/nodes/multinode-654612-m03
	I0916 11:13:38.032670 1501462 round_trippers.go:469] Request Headers:
	I0916 11:13:38.032697 1501462 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:13:38.032702 1501462 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:13:38.034976 1501462 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:13:38.034997 1501462 round_trippers.go:577] Response Headers:
	I0916 11:13:38.035006 1501462 round_trippers.go:580]     Audit-Id: daec1afc-8fe6-42da-91ff-a5a859f60f19
	I0916 11:13:38.035011 1501462 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:13:38.035014 1501462 round_trippers.go:580]     Content-Type: application/json
	I0916 11:13:38.035017 1501462 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:13:38.035020 1501462 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:13:38.035023 1501462 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:13:38 GMT
	I0916 11:13:38.035186 1501462 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-654612-m03","uid":"0a3ae4f6-3e42-45c5-9af6-3ab99d0c1b81","resourceVersion":"616","creationTimestamp":"2024-09-16T11:11:48Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-654612-m03","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-654612","minikube.k8s.io/primary":"false","minikube.k8s.io/updated_at":"2024_09_16T11_11_49_0700","minikube.k8s.io/version":"v1.34.0"},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///var/run/crio/crio.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubeadm","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:11:48Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:
annotations":{"f:kubeadm.alpha.kubernetes.io/cri-socket":{}}}}},{"manag [truncated 5817 chars]
	I0916 11:13:38.528247 1501462 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/namespaces/kube-system/pods/kube-proxy-vf648
	I0916 11:13:38.528272 1501462 round_trippers.go:469] Request Headers:
	I0916 11:13:38.528281 1501462 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:13:38.528287 1501462 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:13:38.530841 1501462 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:13:38.530872 1501462 round_trippers.go:577] Response Headers:
	I0916 11:13:38.530881 1501462 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:13:38.530885 1501462 round_trippers.go:580]     Content-Type: application/json
	I0916 11:13:38.530889 1501462 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:13:38.530893 1501462 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:13:38.530896 1501462 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:13:38 GMT
	I0916 11:13:38.530898 1501462 round_trippers.go:580]     Audit-Id: 73643f55-4b75-4018-aeb6-d8f7eb4f3e65
	I0916 11:13:38.531110 1501462 request.go:1351] Response Body: {"kind":"Pod","apiVersion":"v1","metadata":{"name":"kube-proxy-vf648","generateName":"kube-proxy-","namespace":"kube-system","uid":"376afe3e-390b-443b-b289-7dfeeb1deed1","resourceVersion":"642","creationTimestamp":"2024-09-16T11:11:48Z","labels":{"controller-revision-hash":"648b489c5b","k8s-app":"kube-proxy","pod-template-generation":"1"},"ownerReferences":[{"apiVersion":"apps/v1","kind":"DaemonSet","name":"kube-proxy","uid":"8b5954ba-7bb1-4f7b-8014-fdfa99b2ac77","controller":true,"blockOwnerDeletion":true}],"managedFields":[{"manager":"kube-controller-manager","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:11:48Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:generateName":{},"f:labels":{".":{},"f:controller-revision-hash":{},"f:k8s-app":{},"f:pod-template-generation":{}},"f:ownerReferences":{".":{},"k:{\"uid\":\"8b5954ba-7bb1-4f7b-8014-fdfa99b2ac77\"}":{}}},"f:spec":{"f:affinity":{".":{},"f:nodeAffinity":{".":{},"f:r
equiredDuringSchedulingIgnoredDuringExecution":{}}},"f:containers":{"k: [truncated 6427 chars]
	I0916 11:13:38.531687 1501462 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/nodes/multinode-654612-m03
	I0916 11:13:38.531705 1501462 round_trippers.go:469] Request Headers:
	I0916 11:13:38.531719 1501462 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:13:38.531723 1501462 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:13:38.534176 1501462 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:13:38.534243 1501462 round_trippers.go:577] Response Headers:
	I0916 11:13:38.534280 1501462 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:13:38.534304 1501462 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:13:38 GMT
	I0916 11:13:38.534325 1501462 round_trippers.go:580]     Audit-Id: 5bc0d83c-9a9a-4799-9e14-e1917680eb0a
	I0916 11:13:38.534359 1501462 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:13:38.534382 1501462 round_trippers.go:580]     Content-Type: application/json
	I0916 11:13:38.534401 1501462 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:13:38.534541 1501462 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-654612-m03","uid":"0a3ae4f6-3e42-45c5-9af6-3ab99d0c1b81","resourceVersion":"616","creationTimestamp":"2024-09-16T11:11:48Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-654612-m03","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-654612","minikube.k8s.io/primary":"false","minikube.k8s.io/updated_at":"2024_09_16T11_11_49_0700","minikube.k8s.io/version":"v1.34.0"},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///var/run/crio/crio.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubeadm","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:11:48Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:
annotations":{"f:kubeadm.alpha.kubernetes.io/cri-socket":{}}}}},{"manag [truncated 5817 chars]
	I0916 11:13:39.027975 1501462 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/namespaces/kube-system/pods/kube-proxy-vf648
	I0916 11:13:39.027999 1501462 round_trippers.go:469] Request Headers:
	I0916 11:13:39.028009 1501462 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:13:39.028014 1501462 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:13:39.030538 1501462 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:13:39.030560 1501462 round_trippers.go:577] Response Headers:
	I0916 11:13:39.030568 1501462 round_trippers.go:580]     Content-Type: application/json
	I0916 11:13:39.030572 1501462 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:13:39.030575 1501462 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:13:39.030579 1501462 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:13:39 GMT
	I0916 11:13:39.030582 1501462 round_trippers.go:580]     Audit-Id: bf44e33c-671e-4c6f-b399-33bb77f45604
	I0916 11:13:39.030584 1501462 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:13:39.030725 1501462 request.go:1351] Response Body: {"kind":"Pod","apiVersion":"v1","metadata":{"name":"kube-proxy-vf648","generateName":"kube-proxy-","namespace":"kube-system","uid":"376afe3e-390b-443b-b289-7dfeeb1deed1","resourceVersion":"642","creationTimestamp":"2024-09-16T11:11:48Z","labels":{"controller-revision-hash":"648b489c5b","k8s-app":"kube-proxy","pod-template-generation":"1"},"ownerReferences":[{"apiVersion":"apps/v1","kind":"DaemonSet","name":"kube-proxy","uid":"8b5954ba-7bb1-4f7b-8014-fdfa99b2ac77","controller":true,"blockOwnerDeletion":true}],"managedFields":[{"manager":"kube-controller-manager","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:11:48Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:generateName":{},"f:labels":{".":{},"f:controller-revision-hash":{},"f:k8s-app":{},"f:pod-template-generation":{}},"f:ownerReferences":{".":{},"k:{\"uid\":\"8b5954ba-7bb1-4f7b-8014-fdfa99b2ac77\"}":{}}},"f:spec":{"f:affinity":{".":{},"f:nodeAffinity":{".":{},"f:r
equiredDuringSchedulingIgnoredDuringExecution":{}}},"f:containers":{"k: [truncated 6427 chars]
	I0916 11:13:39.031255 1501462 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/nodes/multinode-654612-m03
	I0916 11:13:39.031266 1501462 round_trippers.go:469] Request Headers:
	I0916 11:13:39.031274 1501462 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:13:39.031278 1501462 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:13:39.033376 1501462 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:13:39.033401 1501462 round_trippers.go:577] Response Headers:
	I0916 11:13:39.033410 1501462 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:13:39 GMT
	I0916 11:13:39.033415 1501462 round_trippers.go:580]     Audit-Id: 1c2caf48-8156-494b-be0d-37c980987970
	I0916 11:13:39.033419 1501462 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:13:39.033429 1501462 round_trippers.go:580]     Content-Type: application/json
	I0916 11:13:39.033432 1501462 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:13:39.033435 1501462 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:13:39.033558 1501462 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-654612-m03","uid":"0a3ae4f6-3e42-45c5-9af6-3ab99d0c1b81","resourceVersion":"616","creationTimestamp":"2024-09-16T11:11:48Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-654612-m03","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-654612","minikube.k8s.io/primary":"false","minikube.k8s.io/updated_at":"2024_09_16T11_11_49_0700","minikube.k8s.io/version":"v1.34.0"},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///var/run/crio/crio.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubeadm","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:11:48Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:
annotations":{"f:kubeadm.alpha.kubernetes.io/cri-socket":{}}}}},{"manag [truncated 5817 chars]
	I0916 11:13:39.528050 1501462 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/namespaces/kube-system/pods/kube-proxy-vf648
	I0916 11:13:39.528076 1501462 round_trippers.go:469] Request Headers:
	I0916 11:13:39.528086 1501462 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:13:39.528091 1501462 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:13:39.530457 1501462 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:13:39.530496 1501462 round_trippers.go:577] Response Headers:
	I0916 11:13:39.530506 1501462 round_trippers.go:580]     Audit-Id: bfedbd95-cf64-454f-aaef-9de48460ccc6
	I0916 11:13:39.530510 1501462 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:13:39.530514 1501462 round_trippers.go:580]     Content-Type: application/json
	I0916 11:13:39.530518 1501462 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:13:39.530521 1501462 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:13:39.530525 1501462 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:13:39 GMT
	I0916 11:13:39.531041 1501462 request.go:1351] Response Body: {"kind":"Pod","apiVersion":"v1","metadata":{"name":"kube-proxy-vf648","generateName":"kube-proxy-","namespace":"kube-system","uid":"376afe3e-390b-443b-b289-7dfeeb1deed1","resourceVersion":"642","creationTimestamp":"2024-09-16T11:11:48Z","labels":{"controller-revision-hash":"648b489c5b","k8s-app":"kube-proxy","pod-template-generation":"1"},"ownerReferences":[{"apiVersion":"apps/v1","kind":"DaemonSet","name":"kube-proxy","uid":"8b5954ba-7bb1-4f7b-8014-fdfa99b2ac77","controller":true,"blockOwnerDeletion":true}],"managedFields":[{"manager":"kube-controller-manager","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:11:48Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:generateName":{},"f:labels":{".":{},"f:controller-revision-hash":{},"f:k8s-app":{},"f:pod-template-generation":{}},"f:ownerReferences":{".":{},"k:{\"uid\":\"8b5954ba-7bb1-4f7b-8014-fdfa99b2ac77\"}":{}}},"f:spec":{"f:affinity":{".":{},"f:nodeAffinity":{".":{},"f:r
equiredDuringSchedulingIgnoredDuringExecution":{}}},"f:containers":{"k: [truncated 6427 chars]
	I0916 11:13:39.531576 1501462 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/nodes/multinode-654612-m03
	I0916 11:13:39.531594 1501462 round_trippers.go:469] Request Headers:
	I0916 11:13:39.531604 1501462 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:13:39.531612 1501462 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:13:39.533585 1501462 round_trippers.go:574] Response Status: 200 OK in 1 milliseconds
	I0916 11:13:39.533641 1501462 round_trippers.go:577] Response Headers:
	I0916 11:13:39.533650 1501462 round_trippers.go:580]     Content-Type: application/json
	I0916 11:13:39.533661 1501462 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:13:39.533676 1501462 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:13:39.533679 1501462 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:13:39 GMT
	I0916 11:13:39.533683 1501462 round_trippers.go:580]     Audit-Id: f414df29-c8d2-4a31-99aa-51a670ea093b
	I0916 11:13:39.533697 1501462 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:13:39.533786 1501462 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-654612-m03","uid":"0a3ae4f6-3e42-45c5-9af6-3ab99d0c1b81","resourceVersion":"616","creationTimestamp":"2024-09-16T11:11:48Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-654612-m03","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-654612","minikube.k8s.io/primary":"false","minikube.k8s.io/updated_at":"2024_09_16T11_11_49_0700","minikube.k8s.io/version":"v1.34.0"},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///var/run/crio/crio.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubeadm","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:11:48Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:
annotations":{"f:kubeadm.alpha.kubernetes.io/cri-socket":{}}}}},{"manag [truncated 5817 chars]
	I0916 11:13:39.534198 1501462 pod_ready.go:103] pod "kube-proxy-vf648" in "kube-system" namespace has status "Ready":"False"
	I0916 11:13:40.028833 1501462 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/namespaces/kube-system/pods/kube-proxy-vf648
	I0916 11:13:40.028862 1501462 round_trippers.go:469] Request Headers:
	I0916 11:13:40.028872 1501462 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:13:40.028876 1501462 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:13:40.031861 1501462 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:13:40.031942 1501462 round_trippers.go:577] Response Headers:
	I0916 11:13:40.031953 1501462 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:13:40.031959 1501462 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:13:40.031963 1501462 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:13:40 GMT
	I0916 11:13:40.031966 1501462 round_trippers.go:580]     Audit-Id: 233edce5-0f02-419a-8b7d-607d8c2d70a9
	I0916 11:13:40.031970 1501462 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:13:40.031973 1501462 round_trippers.go:580]     Content-Type: application/json
	I0916 11:13:40.032383 1501462 request.go:1351] Response Body: {"kind":"Pod","apiVersion":"v1","metadata":{"name":"kube-proxy-vf648","generateName":"kube-proxy-","namespace":"kube-system","uid":"376afe3e-390b-443b-b289-7dfeeb1deed1","resourceVersion":"642","creationTimestamp":"2024-09-16T11:11:48Z","labels":{"controller-revision-hash":"648b489c5b","k8s-app":"kube-proxy","pod-template-generation":"1"},"ownerReferences":[{"apiVersion":"apps/v1","kind":"DaemonSet","name":"kube-proxy","uid":"8b5954ba-7bb1-4f7b-8014-fdfa99b2ac77","controller":true,"blockOwnerDeletion":true}],"managedFields":[{"manager":"kube-controller-manager","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:11:48Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:generateName":{},"f:labels":{".":{},"f:controller-revision-hash":{},"f:k8s-app":{},"f:pod-template-generation":{}},"f:ownerReferences":{".":{},"k:{\"uid\":\"8b5954ba-7bb1-4f7b-8014-fdfa99b2ac77\"}":{}}},"f:spec":{"f:affinity":{".":{},"f:nodeAffinity":{".":{},"f:r
equiredDuringSchedulingIgnoredDuringExecution":{}}},"f:containers":{"k: [truncated 6427 chars]
	I0916 11:13:40.033078 1501462 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/nodes/multinode-654612-m03
	I0916 11:13:40.033096 1501462 round_trippers.go:469] Request Headers:
	I0916 11:13:40.033105 1501462 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:13:40.033109 1501462 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:13:40.035729 1501462 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:13:40.035755 1501462 round_trippers.go:577] Response Headers:
	I0916 11:13:40.035766 1501462 round_trippers.go:580]     Audit-Id: fc904453-0168-4e61-acd2-f8ac5f5318f9
	I0916 11:13:40.035772 1501462 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:13:40.035775 1501462 round_trippers.go:580]     Content-Type: application/json
	I0916 11:13:40.035779 1501462 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:13:40.035782 1501462 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:13:40.035785 1501462 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:13:40 GMT
	I0916 11:13:40.036012 1501462 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-654612-m03","uid":"0a3ae4f6-3e42-45c5-9af6-3ab99d0c1b81","resourceVersion":"616","creationTimestamp":"2024-09-16T11:11:48Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-654612-m03","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-654612","minikube.k8s.io/primary":"false","minikube.k8s.io/updated_at":"2024_09_16T11_11_49_0700","minikube.k8s.io/version":"v1.34.0"},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///var/run/crio/crio.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubeadm","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:11:48Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:
annotations":{"f:kubeadm.alpha.kubernetes.io/cri-socket":{}}}}},{"manag [truncated 5817 chars]
	I0916 11:13:40.528786 1501462 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/namespaces/kube-system/pods/kube-proxy-vf648
	I0916 11:13:40.528811 1501462 round_trippers.go:469] Request Headers:
	I0916 11:13:40.528821 1501462 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:13:40.528825 1501462 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:13:40.531146 1501462 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:13:40.531172 1501462 round_trippers.go:577] Response Headers:
	I0916 11:13:40.531183 1501462 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:13:40 GMT
	I0916 11:13:40.531187 1501462 round_trippers.go:580]     Audit-Id: 8cb14204-e23e-44dd-b9d2-56dfa3e723af
	I0916 11:13:40.531191 1501462 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:13:40.531194 1501462 round_trippers.go:580]     Content-Type: application/json
	I0916 11:13:40.531197 1501462 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:13:40.531201 1501462 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:13:40.531384 1501462 request.go:1351] Response Body: {"kind":"Pod","apiVersion":"v1","metadata":{"name":"kube-proxy-vf648","generateName":"kube-proxy-","namespace":"kube-system","uid":"376afe3e-390b-443b-b289-7dfeeb1deed1","resourceVersion":"642","creationTimestamp":"2024-09-16T11:11:48Z","labels":{"controller-revision-hash":"648b489c5b","k8s-app":"kube-proxy","pod-template-generation":"1"},"ownerReferences":[{"apiVersion":"apps/v1","kind":"DaemonSet","name":"kube-proxy","uid":"8b5954ba-7bb1-4f7b-8014-fdfa99b2ac77","controller":true,"blockOwnerDeletion":true}],"managedFields":[{"manager":"kube-controller-manager","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:11:48Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:generateName":{},"f:labels":{".":{},"f:controller-revision-hash":{},"f:k8s-app":{},"f:pod-template-generation":{}},"f:ownerReferences":{".":{},"k:{\"uid\":\"8b5954ba-7bb1-4f7b-8014-fdfa99b2ac77\"}":{}}},"f:spec":{"f:affinity":{".":{},"f:nodeAffinity":{".":{},"f:r
equiredDuringSchedulingIgnoredDuringExecution":{}}},"f:containers":{"k: [truncated 6427 chars]
	I0916 11:13:40.531930 1501462 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/nodes/multinode-654612-m03
	I0916 11:13:40.531949 1501462 round_trippers.go:469] Request Headers:
	I0916 11:13:40.531958 1501462 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:13:40.531962 1501462 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:13:40.533890 1501462 round_trippers.go:574] Response Status: 200 OK in 1 milliseconds
	I0916 11:13:40.533918 1501462 round_trippers.go:577] Response Headers:
	I0916 11:13:40.533927 1501462 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:13:40.533932 1501462 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:13:40.533935 1501462 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:13:40 GMT
	I0916 11:13:40.533939 1501462 round_trippers.go:580]     Audit-Id: ac6e1760-d158-443f-898f-1d7cbfb8ca44
	I0916 11:13:40.533943 1501462 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:13:40.533946 1501462 round_trippers.go:580]     Content-Type: application/json
	I0916 11:13:40.534460 1501462 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-654612-m03","uid":"0a3ae4f6-3e42-45c5-9af6-3ab99d0c1b81","resourceVersion":"616","creationTimestamp":"2024-09-16T11:11:48Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-654612-m03","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-654612","minikube.k8s.io/primary":"false","minikube.k8s.io/updated_at":"2024_09_16T11_11_49_0700","minikube.k8s.io/version":"v1.34.0"},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///var/run/crio/crio.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubeadm","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:11:48Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:
annotations":{"f:kubeadm.alpha.kubernetes.io/cri-socket":{}}}}},{"manag [truncated 5817 chars]
	I0916 11:13:41.028136 1501462 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/namespaces/kube-system/pods/kube-proxy-vf648
	I0916 11:13:41.028162 1501462 round_trippers.go:469] Request Headers:
	I0916 11:13:41.028172 1501462 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:13:41.028175 1501462 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:13:41.030520 1501462 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:13:41.030592 1501462 round_trippers.go:577] Response Headers:
	I0916 11:13:41.030614 1501462 round_trippers.go:580]     Audit-Id: 690ebd3a-b68c-43a9-baed-f83a8ae6007a
	I0916 11:13:41.030631 1501462 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:13:41.030663 1501462 round_trippers.go:580]     Content-Type: application/json
	I0916 11:13:41.030685 1501462 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:13:41.030731 1501462 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:13:41.030747 1501462 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:13:41 GMT
	I0916 11:13:41.030895 1501462 request.go:1351] Response Body: {"kind":"Pod","apiVersion":"v1","metadata":{"name":"kube-proxy-vf648","generateName":"kube-proxy-","namespace":"kube-system","uid":"376afe3e-390b-443b-b289-7dfeeb1deed1","resourceVersion":"642","creationTimestamp":"2024-09-16T11:11:48Z","labels":{"controller-revision-hash":"648b489c5b","k8s-app":"kube-proxy","pod-template-generation":"1"},"ownerReferences":[{"apiVersion":"apps/v1","kind":"DaemonSet","name":"kube-proxy","uid":"8b5954ba-7bb1-4f7b-8014-fdfa99b2ac77","controller":true,"blockOwnerDeletion":true}],"managedFields":[{"manager":"kube-controller-manager","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:11:48Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:generateName":{},"f:labels":{".":{},"f:controller-revision-hash":{},"f:k8s-app":{},"f:pod-template-generation":{}},"f:ownerReferences":{".":{},"k:{\"uid\":\"8b5954ba-7bb1-4f7b-8014-fdfa99b2ac77\"}":{}}},"f:spec":{"f:affinity":{".":{},"f:nodeAffinity":{".":{},"f:r
equiredDuringSchedulingIgnoredDuringExecution":{}}},"f:containers":{"k: [truncated 6427 chars]
	I0916 11:13:41.031441 1501462 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/nodes/multinode-654612-m03
	I0916 11:13:41.031483 1501462 round_trippers.go:469] Request Headers:
	I0916 11:13:41.031498 1501462 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:13:41.031503 1501462 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:13:41.033605 1501462 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:13:41.033637 1501462 round_trippers.go:577] Response Headers:
	I0916 11:13:41.033646 1501462 round_trippers.go:580]     Audit-Id: 61e6df0e-822d-4fc8-98ff-431f4ee87a9d
	I0916 11:13:41.033650 1501462 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:13:41.033655 1501462 round_trippers.go:580]     Content-Type: application/json
	I0916 11:13:41.033658 1501462 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:13:41.033662 1501462 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:13:41.033666 1501462 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:13:41 GMT
	I0916 11:13:41.033778 1501462 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-654612-m03","uid":"0a3ae4f6-3e42-45c5-9af6-3ab99d0c1b81","resourceVersion":"616","creationTimestamp":"2024-09-16T11:11:48Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-654612-m03","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-654612","minikube.k8s.io/primary":"false","minikube.k8s.io/updated_at":"2024_09_16T11_11_49_0700","minikube.k8s.io/version":"v1.34.0"},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///var/run/crio/crio.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubeadm","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:11:48Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:
annotations":{"f:kubeadm.alpha.kubernetes.io/cri-socket":{}}}}},{"manag [truncated 5817 chars]
	I0916 11:13:41.528963 1501462 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/namespaces/kube-system/pods/kube-proxy-vf648
	I0916 11:13:41.528989 1501462 round_trippers.go:469] Request Headers:
	I0916 11:13:41.528999 1501462 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:13:41.529004 1501462 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:13:41.531512 1501462 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:13:41.531536 1501462 round_trippers.go:577] Response Headers:
	I0916 11:13:41.531544 1501462 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:13:41.531549 1501462 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:13:41.531552 1501462 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:13:41 GMT
	I0916 11:13:41.531555 1501462 round_trippers.go:580]     Audit-Id: f82244a9-8f9d-4780-96f3-2b2e3ad37493
	I0916 11:13:41.531558 1501462 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:13:41.531561 1501462 round_trippers.go:580]     Content-Type: application/json
	I0916 11:13:41.531701 1501462 request.go:1351] Response Body: {"kind":"Pod","apiVersion":"v1","metadata":{"name":"kube-proxy-vf648","generateName":"kube-proxy-","namespace":"kube-system","uid":"376afe3e-390b-443b-b289-7dfeeb1deed1","resourceVersion":"642","creationTimestamp":"2024-09-16T11:11:48Z","labels":{"controller-revision-hash":"648b489c5b","k8s-app":"kube-proxy","pod-template-generation":"1"},"ownerReferences":[{"apiVersion":"apps/v1","kind":"DaemonSet","name":"kube-proxy","uid":"8b5954ba-7bb1-4f7b-8014-fdfa99b2ac77","controller":true,"blockOwnerDeletion":true}],"managedFields":[{"manager":"kube-controller-manager","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:11:48Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:generateName":{},"f:labels":{".":{},"f:controller-revision-hash":{},"f:k8s-app":{},"f:pod-template-generation":{}},"f:ownerReferences":{".":{},"k:{\"uid\":\"8b5954ba-7bb1-4f7b-8014-fdfa99b2ac77\"}":{}}},"f:spec":{"f:affinity":{".":{},"f:nodeAffinity":{".":{},"f:r
equiredDuringSchedulingIgnoredDuringExecution":{}}},"f:containers":{"k: [truncated 6427 chars]
	I0916 11:13:41.532341 1501462 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/nodes/multinode-654612-m03
	I0916 11:13:41.532354 1501462 round_trippers.go:469] Request Headers:
	I0916 11:13:41.532363 1501462 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:13:41.532368 1501462 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:13:41.534530 1501462 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:13:41.534552 1501462 round_trippers.go:577] Response Headers:
	I0916 11:13:41.534561 1501462 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:13:41.534566 1501462 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:13:41 GMT
	I0916 11:13:41.534569 1501462 round_trippers.go:580]     Audit-Id: 4e3771a7-55bd-4de8-9817-d89548dc3391
	I0916 11:13:41.534572 1501462 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:13:41.534575 1501462 round_trippers.go:580]     Content-Type: application/json
	I0916 11:13:41.534577 1501462 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:13:41.534686 1501462 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-654612-m03","uid":"0a3ae4f6-3e42-45c5-9af6-3ab99d0c1b81","resourceVersion":"616","creationTimestamp":"2024-09-16T11:11:48Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-654612-m03","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-654612","minikube.k8s.io/primary":"false","minikube.k8s.io/updated_at":"2024_09_16T11_11_49_0700","minikube.k8s.io/version":"v1.34.0"},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///var/run/crio/crio.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubeadm","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:11:48Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:
annotations":{"f:kubeadm.alpha.kubernetes.io/cri-socket":{}}}}},{"manag [truncated 5817 chars]
	I0916 11:13:41.535071 1501462 pod_ready.go:103] pod "kube-proxy-vf648" in "kube-system" namespace has status "Ready":"False"
	I0916 11:13:42.028885 1501462 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/namespaces/kube-system/pods/kube-proxy-vf648
	I0916 11:13:42.028911 1501462 round_trippers.go:469] Request Headers:
	I0916 11:13:42.028920 1501462 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:13:42.028924 1501462 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:13:42.031615 1501462 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:13:42.031639 1501462 round_trippers.go:577] Response Headers:
	I0916 11:13:42.031648 1501462 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:13:42.031652 1501462 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:13:42 GMT
	I0916 11:13:42.031657 1501462 round_trippers.go:580]     Audit-Id: ba4c6c59-1b8c-4512-bc2d-f60990ea128a
	I0916 11:13:42.031660 1501462 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:13:42.031663 1501462 round_trippers.go:580]     Content-Type: application/json
	I0916 11:13:42.031666 1501462 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:13:42.031787 1501462 request.go:1351] Response Body: {"kind":"Pod","apiVersion":"v1","metadata":{"name":"kube-proxy-vf648","generateName":"kube-proxy-","namespace":"kube-system","uid":"376afe3e-390b-443b-b289-7dfeeb1deed1","resourceVersion":"642","creationTimestamp":"2024-09-16T11:11:48Z","labels":{"controller-revision-hash":"648b489c5b","k8s-app":"kube-proxy","pod-template-generation":"1"},"ownerReferences":[{"apiVersion":"apps/v1","kind":"DaemonSet","name":"kube-proxy","uid":"8b5954ba-7bb1-4f7b-8014-fdfa99b2ac77","controller":true,"blockOwnerDeletion":true}],"managedFields":[{"manager":"kube-controller-manager","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:11:48Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:generateName":{},"f:labels":{".":{},"f:controller-revision-hash":{},"f:k8s-app":{},"f:pod-template-generation":{}},"f:ownerReferences":{".":{},"k:{\"uid\":\"8b5954ba-7bb1-4f7b-8014-fdfa99b2ac77\"}":{}}},"f:spec":{"f:affinity":{".":{},"f:nodeAffinity":{".":{},"f:r
equiredDuringSchedulingIgnoredDuringExecution":{}}},"f:containers":{"k: [truncated 6427 chars]
	I0916 11:13:42.032369 1501462 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/nodes/multinode-654612-m03
	I0916 11:13:42.032383 1501462 round_trippers.go:469] Request Headers:
	I0916 11:13:42.032393 1501462 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:13:42.032397 1501462 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:13:42.034649 1501462 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:13:42.034673 1501462 round_trippers.go:577] Response Headers:
	I0916 11:13:42.034799 1501462 round_trippers.go:580]     Audit-Id: f5609422-5bb7-4698-98bf-30f459ada5f1
	I0916 11:13:42.034849 1501462 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:13:42.034862 1501462 round_trippers.go:580]     Content-Type: application/json
	I0916 11:13:42.034866 1501462 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:13:42.034870 1501462 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:13:42.034874 1501462 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:13:42 GMT
	I0916 11:13:42.035042 1501462 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-654612-m03","uid":"0a3ae4f6-3e42-45c5-9af6-3ab99d0c1b81","resourceVersion":"616","creationTimestamp":"2024-09-16T11:11:48Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-654612-m03","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-654612","minikube.k8s.io/primary":"false","minikube.k8s.io/updated_at":"2024_09_16T11_11_49_0700","minikube.k8s.io/version":"v1.34.0"},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///var/run/crio/crio.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubeadm","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:11:48Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:
annotations":{"f:kubeadm.alpha.kubernetes.io/cri-socket":{}}}}},{"manag [truncated 5817 chars]
	I0916 11:13:42.528879 1501462 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/namespaces/kube-system/pods/kube-proxy-vf648
	I0916 11:13:42.528906 1501462 round_trippers.go:469] Request Headers:
	I0916 11:13:42.528917 1501462 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:13:42.528922 1501462 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:13:42.531271 1501462 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:13:42.531294 1501462 round_trippers.go:577] Response Headers:
	I0916 11:13:42.531303 1501462 round_trippers.go:580]     Audit-Id: 768900cc-0b33-4b56-b16e-2b4cf87c2719
	I0916 11:13:42.531308 1501462 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:13:42.531311 1501462 round_trippers.go:580]     Content-Type: application/json
	I0916 11:13:42.531313 1501462 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:13:42.531316 1501462 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:13:42.531319 1501462 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:13:42 GMT
	I0916 11:13:42.531447 1501462 request.go:1351] Response Body: {"kind":"Pod","apiVersion":"v1","metadata":{"name":"kube-proxy-vf648","generateName":"kube-proxy-","namespace":"kube-system","uid":"376afe3e-390b-443b-b289-7dfeeb1deed1","resourceVersion":"642","creationTimestamp":"2024-09-16T11:11:48Z","labels":{"controller-revision-hash":"648b489c5b","k8s-app":"kube-proxy","pod-template-generation":"1"},"ownerReferences":[{"apiVersion":"apps/v1","kind":"DaemonSet","name":"kube-proxy","uid":"8b5954ba-7bb1-4f7b-8014-fdfa99b2ac77","controller":true,"blockOwnerDeletion":true}],"managedFields":[{"manager":"kube-controller-manager","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:11:48Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:generateName":{},"f:labels":{".":{},"f:controller-revision-hash":{},"f:k8s-app":{},"f:pod-template-generation":{}},"f:ownerReferences":{".":{},"k:{\"uid\":\"8b5954ba-7bb1-4f7b-8014-fdfa99b2ac77\"}":{}}},"f:spec":{"f:affinity":{".":{},"f:nodeAffinity":{".":{},"f:r
equiredDuringSchedulingIgnoredDuringExecution":{}}},"f:containers":{"k: [truncated 6427 chars]
	I0916 11:13:42.532029 1501462 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/nodes/multinode-654612-m03
	I0916 11:13:42.532040 1501462 round_trippers.go:469] Request Headers:
	I0916 11:13:42.532048 1501462 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:13:42.532053 1501462 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:13:42.534339 1501462 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:13:42.534366 1501462 round_trippers.go:577] Response Headers:
	I0916 11:13:42.534375 1501462 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:13:42.534379 1501462 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:13:42 GMT
	I0916 11:13:42.534382 1501462 round_trippers.go:580]     Audit-Id: dd14a8e0-953f-403a-ae42-0a410531345d
	I0916 11:13:42.534385 1501462 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:13:42.534388 1501462 round_trippers.go:580]     Content-Type: application/json
	I0916 11:13:42.534391 1501462 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:13:42.534490 1501462 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-654612-m03","uid":"0a3ae4f6-3e42-45c5-9af6-3ab99d0c1b81","resourceVersion":"616","creationTimestamp":"2024-09-16T11:11:48Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-654612-m03","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-654612","minikube.k8s.io/primary":"false","minikube.k8s.io/updated_at":"2024_09_16T11_11_49_0700","minikube.k8s.io/version":"v1.34.0"},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///var/run/crio/crio.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubeadm","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:11:48Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:
annotations":{"f:kubeadm.alpha.kubernetes.io/cri-socket":{}}}}},{"manag [truncated 5817 chars]
	I0916 11:13:43.028666 1501462 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/namespaces/kube-system/pods/kube-proxy-vf648
	I0916 11:13:43.028708 1501462 round_trippers.go:469] Request Headers:
	I0916 11:13:43.028718 1501462 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:13:43.028724 1501462 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:13:43.031060 1501462 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:13:43.031090 1501462 round_trippers.go:577] Response Headers:
	I0916 11:13:43.031098 1501462 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:13:43.031103 1501462 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:13:43 GMT
	I0916 11:13:43.031106 1501462 round_trippers.go:580]     Audit-Id: 59dd36ea-6c88-4917-99da-f06c0fbbfae2
	I0916 11:13:43.031108 1501462 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:13:43.031111 1501462 round_trippers.go:580]     Content-Type: application/json
	I0916 11:13:43.031114 1501462 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:13:43.031231 1501462 request.go:1351] Response Body: {"kind":"Pod","apiVersion":"v1","metadata":{"name":"kube-proxy-vf648","generateName":"kube-proxy-","namespace":"kube-system","uid":"376afe3e-390b-443b-b289-7dfeeb1deed1","resourceVersion":"642","creationTimestamp":"2024-09-16T11:11:48Z","labels":{"controller-revision-hash":"648b489c5b","k8s-app":"kube-proxy","pod-template-generation":"1"},"ownerReferences":[{"apiVersion":"apps/v1","kind":"DaemonSet","name":"kube-proxy","uid":"8b5954ba-7bb1-4f7b-8014-fdfa99b2ac77","controller":true,"blockOwnerDeletion":true}],"managedFields":[{"manager":"kube-controller-manager","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:11:48Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:generateName":{},"f:labels":{".":{},"f:controller-revision-hash":{},"f:k8s-app":{},"f:pod-template-generation":{}},"f:ownerReferences":{".":{},"k:{\"uid\":\"8b5954ba-7bb1-4f7b-8014-fdfa99b2ac77\"}":{}}},"f:spec":{"f:affinity":{".":{},"f:nodeAffinity":{".":{},"f:r
equiredDuringSchedulingIgnoredDuringExecution":{}}},"f:containers":{"k: [truncated 6427 chars]
	I0916 11:13:43.031765 1501462 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/nodes/multinode-654612-m03
	I0916 11:13:43.031782 1501462 round_trippers.go:469] Request Headers:
	I0916 11:13:43.031791 1501462 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:13:43.031798 1501462 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:13:43.033889 1501462 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:13:43.033911 1501462 round_trippers.go:577] Response Headers:
	I0916 11:13:43.033920 1501462 round_trippers.go:580]     Audit-Id: 014586d7-ac9b-48cf-9b72-fc9ba28f90b3
	I0916 11:13:43.033924 1501462 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:13:43.033927 1501462 round_trippers.go:580]     Content-Type: application/json
	I0916 11:13:43.033930 1501462 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:13:43.033933 1501462 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:13:43.033936 1501462 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:13:43 GMT
	I0916 11:13:43.034165 1501462 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-654612-m03","uid":"0a3ae4f6-3e42-45c5-9af6-3ab99d0c1b81","resourceVersion":"616","creationTimestamp":"2024-09-16T11:11:48Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-654612-m03","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-654612","minikube.k8s.io/primary":"false","minikube.k8s.io/updated_at":"2024_09_16T11_11_49_0700","minikube.k8s.io/version":"v1.34.0"},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///var/run/crio/crio.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubeadm","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:11:48Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:
annotations":{"f:kubeadm.alpha.kubernetes.io/cri-socket":{}}}}},{"manag [truncated 5817 chars]
	I0916 11:13:43.528949 1501462 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/namespaces/kube-system/pods/kube-proxy-vf648
	I0916 11:13:43.528991 1501462 round_trippers.go:469] Request Headers:
	I0916 11:13:43.529001 1501462 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:13:43.529007 1501462 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:13:43.532781 1501462 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 11:13:43.532814 1501462 round_trippers.go:577] Response Headers:
	I0916 11:13:43.532825 1501462 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:13:43 GMT
	I0916 11:13:43.532829 1501462 round_trippers.go:580]     Audit-Id: 39a87456-d41a-48d1-a057-2d1f5889ef3c
	I0916 11:13:43.532832 1501462 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:13:43.532835 1501462 round_trippers.go:580]     Content-Type: application/json
	I0916 11:13:43.532837 1501462 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:13:43.532841 1501462 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:13:43.533088 1501462 request.go:1351] Response Body: {"kind":"Pod","apiVersion":"v1","metadata":{"name":"kube-proxy-vf648","generateName":"kube-proxy-","namespace":"kube-system","uid":"376afe3e-390b-443b-b289-7dfeeb1deed1","resourceVersion":"642","creationTimestamp":"2024-09-16T11:11:48Z","labels":{"controller-revision-hash":"648b489c5b","k8s-app":"kube-proxy","pod-template-generation":"1"},"ownerReferences":[{"apiVersion":"apps/v1","kind":"DaemonSet","name":"kube-proxy","uid":"8b5954ba-7bb1-4f7b-8014-fdfa99b2ac77","controller":true,"blockOwnerDeletion":true}],"managedFields":[{"manager":"kube-controller-manager","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:11:48Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:generateName":{},"f:labels":{".":{},"f:controller-revision-hash":{},"f:k8s-app":{},"f:pod-template-generation":{}},"f:ownerReferences":{".":{},"k:{\"uid\":\"8b5954ba-7bb1-4f7b-8014-fdfa99b2ac77\"}":{}}},"f:spec":{"f:affinity":{".":{},"f:nodeAffinity":{".":{},"f:r
equiredDuringSchedulingIgnoredDuringExecution":{}}},"f:containers":{"k: [truncated 6427 chars]
	I0916 11:13:43.533656 1501462 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/nodes/multinode-654612-m03
	I0916 11:13:43.533674 1501462 round_trippers.go:469] Request Headers:
	I0916 11:13:43.533683 1501462 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:13:43.533686 1501462 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:13:43.535862 1501462 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:13:43.535883 1501462 round_trippers.go:577] Response Headers:
	I0916 11:13:43.535892 1501462 round_trippers.go:580]     Audit-Id: 69799e1d-cdea-4187-b655-8e8339163e99
	I0916 11:13:43.535896 1501462 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:13:43.535899 1501462 round_trippers.go:580]     Content-Type: application/json
	I0916 11:13:43.535903 1501462 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:13:43.535906 1501462 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:13:43.535909 1501462 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:13:43 GMT
	I0916 11:13:43.536183 1501462 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-654612-m03","uid":"0a3ae4f6-3e42-45c5-9af6-3ab99d0c1b81","resourceVersion":"616","creationTimestamp":"2024-09-16T11:11:48Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-654612-m03","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-654612","minikube.k8s.io/primary":"false","minikube.k8s.io/updated_at":"2024_09_16T11_11_49_0700","minikube.k8s.io/version":"v1.34.0"},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///var/run/crio/crio.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubeadm","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:11:48Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:
annotations":{"f:kubeadm.alpha.kubernetes.io/cri-socket":{}}}}},{"manag [truncated 5817 chars]
	I0916 11:13:43.536588 1501462 pod_ready.go:103] pod "kube-proxy-vf648" in "kube-system" namespace has status "Ready":"False"
	I0916 11:13:44.027983 1501462 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/namespaces/kube-system/pods/kube-proxy-vf648
	I0916 11:13:44.028008 1501462 round_trippers.go:469] Request Headers:
	I0916 11:13:44.028018 1501462 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:13:44.028022 1501462 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:13:44.030462 1501462 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:13:44.030484 1501462 round_trippers.go:577] Response Headers:
	I0916 11:13:44.030491 1501462 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:13:44.030494 1501462 round_trippers.go:580]     Content-Type: application/json
	I0916 11:13:44.030497 1501462 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:13:44.030500 1501462 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:13:44.030503 1501462 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:13:44 GMT
	I0916 11:13:44.030505 1501462 round_trippers.go:580]     Audit-Id: 55d472b4-b005-4c90-8267-48cb9657f56d
	I0916 11:13:44.030663 1501462 request.go:1351] Response Body: {"kind":"Pod","apiVersion":"v1","metadata":{"name":"kube-proxy-vf648","generateName":"kube-proxy-","namespace":"kube-system","uid":"376afe3e-390b-443b-b289-7dfeeb1deed1","resourceVersion":"642","creationTimestamp":"2024-09-16T11:11:48Z","labels":{"controller-revision-hash":"648b489c5b","k8s-app":"kube-proxy","pod-template-generation":"1"},"ownerReferences":[{"apiVersion":"apps/v1","kind":"DaemonSet","name":"kube-proxy","uid":"8b5954ba-7bb1-4f7b-8014-fdfa99b2ac77","controller":true,"blockOwnerDeletion":true}],"managedFields":[{"manager":"kube-controller-manager","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:11:48Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:generateName":{},"f:labels":{".":{},"f:controller-revision-hash":{},"f:k8s-app":{},"f:pod-template-generation":{}},"f:ownerReferences":{".":{},"k:{\"uid\":\"8b5954ba-7bb1-4f7b-8014-fdfa99b2ac77\"}":{}}},"f:spec":{"f:affinity":{".":{},"f:nodeAffinity":{".":{},"f:r
equiredDuringSchedulingIgnoredDuringExecution":{}}},"f:containers":{"k: [truncated 6427 chars]
	I0916 11:13:44.031196 1501462 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/nodes/multinode-654612-m03
	I0916 11:13:44.031206 1501462 round_trippers.go:469] Request Headers:
	I0916 11:13:44.031214 1501462 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:13:44.031220 1501462 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:13:44.033555 1501462 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:13:44.033576 1501462 round_trippers.go:577] Response Headers:
	I0916 11:13:44.033585 1501462 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:13:44.033589 1501462 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:13:44.033593 1501462 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:13:44 GMT
	I0916 11:13:44.033596 1501462 round_trippers.go:580]     Audit-Id: 38f83b11-8a54-4161-9172-e60d9a8bc866
	I0916 11:13:44.033598 1501462 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:13:44.033601 1501462 round_trippers.go:580]     Content-Type: application/json
	I0916 11:13:44.033695 1501462 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-654612-m03","uid":"0a3ae4f6-3e42-45c5-9af6-3ab99d0c1b81","resourceVersion":"616","creationTimestamp":"2024-09-16T11:11:48Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-654612-m03","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-654612","minikube.k8s.io/primary":"false","minikube.k8s.io/updated_at":"2024_09_16T11_11_49_0700","minikube.k8s.io/version":"v1.34.0"},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///var/run/crio/crio.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubeadm","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:11:48Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:
annotations":{"f:kubeadm.alpha.kubernetes.io/cri-socket":{}}}}},{"manag [truncated 5817 chars]
	I0916 11:13:44.528812 1501462 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/namespaces/kube-system/pods/kube-proxy-vf648
	I0916 11:13:44.528837 1501462 round_trippers.go:469] Request Headers:
	I0916 11:13:44.528849 1501462 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:13:44.528855 1501462 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:13:44.531075 1501462 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:13:44.531098 1501462 round_trippers.go:577] Response Headers:
	I0916 11:13:44.531106 1501462 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:13:44.531111 1501462 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:13:44.531116 1501462 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:13:44 GMT
	I0916 11:13:44.531119 1501462 round_trippers.go:580]     Audit-Id: 5216bcf2-f5c0-453c-803d-8411f4e737e9
	I0916 11:13:44.531122 1501462 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:13:44.531125 1501462 round_trippers.go:580]     Content-Type: application/json
	I0916 11:13:44.531491 1501462 request.go:1351] Response Body: {"kind":"Pod","apiVersion":"v1","metadata":{"name":"kube-proxy-vf648","generateName":"kube-proxy-","namespace":"kube-system","uid":"376afe3e-390b-443b-b289-7dfeeb1deed1","resourceVersion":"642","creationTimestamp":"2024-09-16T11:11:48Z","labels":{"controller-revision-hash":"648b489c5b","k8s-app":"kube-proxy","pod-template-generation":"1"},"ownerReferences":[{"apiVersion":"apps/v1","kind":"DaemonSet","name":"kube-proxy","uid":"8b5954ba-7bb1-4f7b-8014-fdfa99b2ac77","controller":true,"blockOwnerDeletion":true}],"managedFields":[{"manager":"kube-controller-manager","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:11:48Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:generateName":{},"f:labels":{".":{},"f:controller-revision-hash":{},"f:k8s-app":{},"f:pod-template-generation":{}},"f:ownerReferences":{".":{},"k:{\"uid\":\"8b5954ba-7bb1-4f7b-8014-fdfa99b2ac77\"}":{}}},"f:spec":{"f:affinity":{".":{},"f:nodeAffinity":{".":{},"f:r
equiredDuringSchedulingIgnoredDuringExecution":{}}},"f:containers":{"k: [truncated 6427 chars]
	I0916 11:13:44.532035 1501462 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/nodes/multinode-654612-m03
	I0916 11:13:44.532046 1501462 round_trippers.go:469] Request Headers:
	I0916 11:13:44.532054 1501462 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:13:44.532059 1501462 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:13:44.534213 1501462 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:13:44.534236 1501462 round_trippers.go:577] Response Headers:
	I0916 11:13:44.534244 1501462 round_trippers.go:580]     Audit-Id: d170cc20-bac6-48de-a69f-fbc3624671e3
	I0916 11:13:44.534248 1501462 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:13:44.534251 1501462 round_trippers.go:580]     Content-Type: application/json
	I0916 11:13:44.534254 1501462 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:13:44.534257 1501462 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:13:44.534260 1501462 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:13:44 GMT
	I0916 11:13:44.534385 1501462 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-654612-m03","uid":"0a3ae4f6-3e42-45c5-9af6-3ab99d0c1b81","resourceVersion":"616","creationTimestamp":"2024-09-16T11:11:48Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-654612-m03","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-654612","minikube.k8s.io/primary":"false","minikube.k8s.io/updated_at":"2024_09_16T11_11_49_0700","minikube.k8s.io/version":"v1.34.0"},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///var/run/crio/crio.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubeadm","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:11:48Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:
annotations":{"f:kubeadm.alpha.kubernetes.io/cri-socket":{}}}}},{"manag [truncated 5817 chars]
	I0916 11:13:45.028707 1501462 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/namespaces/kube-system/pods/kube-proxy-vf648
	I0916 11:13:45.028734 1501462 round_trippers.go:469] Request Headers:
	I0916 11:13:45.028745 1501462 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:13:45.028749 1501462 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:13:45.032441 1501462 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 11:13:45.032467 1501462 round_trippers.go:577] Response Headers:
	I0916 11:13:45.032476 1501462 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:13:45 GMT
	I0916 11:13:45.032479 1501462 round_trippers.go:580]     Audit-Id: d815443f-c467-47f1-87e4-d8c19e865e57
	I0916 11:13:45.032482 1501462 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:13:45.032485 1501462 round_trippers.go:580]     Content-Type: application/json
	I0916 11:13:45.032488 1501462 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:13:45.032491 1501462 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:13:45.034590 1501462 request.go:1351] Response Body: {"kind":"Pod","apiVersion":"v1","metadata":{"name":"kube-proxy-vf648","generateName":"kube-proxy-","namespace":"kube-system","uid":"376afe3e-390b-443b-b289-7dfeeb1deed1","resourceVersion":"642","creationTimestamp":"2024-09-16T11:11:48Z","labels":{"controller-revision-hash":"648b489c5b","k8s-app":"kube-proxy","pod-template-generation":"1"},"ownerReferences":[{"apiVersion":"apps/v1","kind":"DaemonSet","name":"kube-proxy","uid":"8b5954ba-7bb1-4f7b-8014-fdfa99b2ac77","controller":true,"blockOwnerDeletion":true}],"managedFields":[{"manager":"kube-controller-manager","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:11:48Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:generateName":{},"f:labels":{".":{},"f:controller-revision-hash":{},"f:k8s-app":{},"f:pod-template-generation":{}},"f:ownerReferences":{".":{},"k:{\"uid\":\"8b5954ba-7bb1-4f7b-8014-fdfa99b2ac77\"}":{}}},"f:spec":{"f:affinity":{".":{},"f:nodeAffinity":{".":{},"f:r
equiredDuringSchedulingIgnoredDuringExecution":{}}},"f:containers":{"k: [truncated 6427 chars]
	I0916 11:13:45.035191 1501462 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/nodes/multinode-654612-m03
	I0916 11:13:45.035204 1501462 round_trippers.go:469] Request Headers:
	I0916 11:13:45.035215 1501462 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:13:45.035220 1501462 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:13:45.046185 1501462 round_trippers.go:574] Response Status: 200 OK in 10 milliseconds
	I0916 11:13:45.046235 1501462 round_trippers.go:577] Response Headers:
	I0916 11:13:45.046245 1501462 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:13:45.046249 1501462 round_trippers.go:580]     Content-Type: application/json
	I0916 11:13:45.046252 1501462 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:13:45.046256 1501462 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:13:45.046259 1501462 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:13:45 GMT
	I0916 11:13:45.046263 1501462 round_trippers.go:580]     Audit-Id: d2f948ef-3bf0-449f-a6b5-43d78c446cc1
	I0916 11:13:45.046365 1501462 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-654612-m03","uid":"0a3ae4f6-3e42-45c5-9af6-3ab99d0c1b81","resourceVersion":"616","creationTimestamp":"2024-09-16T11:11:48Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-654612-m03","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-654612","minikube.k8s.io/primary":"false","minikube.k8s.io/updated_at":"2024_09_16T11_11_49_0700","minikube.k8s.io/version":"v1.34.0"},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///var/run/crio/crio.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubeadm","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:11:48Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:
annotations":{"f:kubeadm.alpha.kubernetes.io/cri-socket":{}}}}},{"manag [truncated 5817 chars]
	I0916 11:13:45.528523 1501462 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/namespaces/kube-system/pods/kube-proxy-vf648
	I0916 11:13:45.528552 1501462 round_trippers.go:469] Request Headers:
	I0916 11:13:45.528562 1501462 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:13:45.528570 1501462 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:13:45.531234 1501462 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:13:45.531263 1501462 round_trippers.go:577] Response Headers:
	I0916 11:13:45.531270 1501462 round_trippers.go:580]     Audit-Id: 1b4831de-c614-4459-90fd-dbc48f203300
	I0916 11:13:45.531274 1501462 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:13:45.531276 1501462 round_trippers.go:580]     Content-Type: application/json
	I0916 11:13:45.531279 1501462 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:13:45.531314 1501462 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:13:45.531321 1501462 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:13:45 GMT
	I0916 11:13:45.531466 1501462 request.go:1351] Response Body: {"kind":"Pod","apiVersion":"v1","metadata":{"name":"kube-proxy-vf648","generateName":"kube-proxy-","namespace":"kube-system","uid":"376afe3e-390b-443b-b289-7dfeeb1deed1","resourceVersion":"642","creationTimestamp":"2024-09-16T11:11:48Z","labels":{"controller-revision-hash":"648b489c5b","k8s-app":"kube-proxy","pod-template-generation":"1"},"ownerReferences":[{"apiVersion":"apps/v1","kind":"DaemonSet","name":"kube-proxy","uid":"8b5954ba-7bb1-4f7b-8014-fdfa99b2ac77","controller":true,"blockOwnerDeletion":true}],"managedFields":[{"manager":"kube-controller-manager","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:11:48Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:generateName":{},"f:labels":{".":{},"f:controller-revision-hash":{},"f:k8s-app":{},"f:pod-template-generation":{}},"f:ownerReferences":{".":{},"k:{\"uid\":\"8b5954ba-7bb1-4f7b-8014-fdfa99b2ac77\"}":{}}},"f:spec":{"f:affinity":{".":{},"f:nodeAffinity":{".":{},"f:r
equiredDuringSchedulingIgnoredDuringExecution":{}}},"f:containers":{"k: [truncated 6427 chars]
	I0916 11:13:45.532090 1501462 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/nodes/multinode-654612-m03
	I0916 11:13:45.532113 1501462 round_trippers.go:469] Request Headers:
	I0916 11:13:45.532123 1501462 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:13:45.532130 1501462 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:13:45.534459 1501462 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:13:45.534529 1501462 round_trippers.go:577] Response Headers:
	I0916 11:13:45.534559 1501462 round_trippers.go:580]     Content-Type: application/json
	I0916 11:13:45.534565 1501462 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:13:45.534569 1501462 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:13:45.534572 1501462 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:13:45 GMT
	I0916 11:13:45.534576 1501462 round_trippers.go:580]     Audit-Id: c848d2c7-8140-4e35-bd44-0d3848b7a1de
	I0916 11:13:45.534581 1501462 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:13:45.534731 1501462 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-654612-m03","uid":"0a3ae4f6-3e42-45c5-9af6-3ab99d0c1b81","resourceVersion":"616","creationTimestamp":"2024-09-16T11:11:48Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-654612-m03","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-654612","minikube.k8s.io/primary":"false","minikube.k8s.io/updated_at":"2024_09_16T11_11_49_0700","minikube.k8s.io/version":"v1.34.0"},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///var/run/crio/crio.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubeadm","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:11:48Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:
annotations":{"f:kubeadm.alpha.kubernetes.io/cri-socket":{}}}}},{"manag [truncated 5817 chars]
	I0916 11:13:46.029001 1501462 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/namespaces/kube-system/pods/kube-proxy-vf648
	I0916 11:13:46.029031 1501462 round_trippers.go:469] Request Headers:
	I0916 11:13:46.029041 1501462 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:13:46.029045 1501462 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:13:46.032175 1501462 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 11:13:46.032212 1501462 round_trippers.go:577] Response Headers:
	I0916 11:13:46.032223 1501462 round_trippers.go:580]     Audit-Id: 0cdf6608-b805-448c-ab60-e01990d5dcae
	I0916 11:13:46.032227 1501462 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:13:46.032231 1501462 round_trippers.go:580]     Content-Type: application/json
	I0916 11:13:46.032235 1501462 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:13:46.032239 1501462 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:13:46.032242 1501462 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:13:46 GMT
	I0916 11:13:46.032455 1501462 request.go:1351] Response Body: {"kind":"Pod","apiVersion":"v1","metadata":{"name":"kube-proxy-vf648","generateName":"kube-proxy-","namespace":"kube-system","uid":"376afe3e-390b-443b-b289-7dfeeb1deed1","resourceVersion":"642","creationTimestamp":"2024-09-16T11:11:48Z","labels":{"controller-revision-hash":"648b489c5b","k8s-app":"kube-proxy","pod-template-generation":"1"},"ownerReferences":[{"apiVersion":"apps/v1","kind":"DaemonSet","name":"kube-proxy","uid":"8b5954ba-7bb1-4f7b-8014-fdfa99b2ac77","controller":true,"blockOwnerDeletion":true}],"managedFields":[{"manager":"kube-controller-manager","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:11:48Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:generateName":{},"f:labels":{".":{},"f:controller-revision-hash":{},"f:k8s-app":{},"f:pod-template-generation":{}},"f:ownerReferences":{".":{},"k:{\"uid\":\"8b5954ba-7bb1-4f7b-8014-fdfa99b2ac77\"}":{}}},"f:spec":{"f:affinity":{".":{},"f:nodeAffinity":{".":{},"f:r
equiredDuringSchedulingIgnoredDuringExecution":{}}},"f:containers":{"k: [truncated 6427 chars]
	I0916 11:13:46.033056 1501462 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/nodes/multinode-654612-m03
	I0916 11:13:46.033077 1501462 round_trippers.go:469] Request Headers:
	I0916 11:13:46.033085 1501462 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:13:46.033090 1501462 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:13:46.035478 1501462 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:13:46.035538 1501462 round_trippers.go:577] Response Headers:
	I0916 11:13:46.035547 1501462 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:13:46.035553 1501462 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:13:46 GMT
	I0916 11:13:46.035557 1501462 round_trippers.go:580]     Audit-Id: 094a2c83-2f49-4ff2-99ff-0b7efec5d29a
	I0916 11:13:46.035559 1501462 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:13:46.035562 1501462 round_trippers.go:580]     Content-Type: application/json
	I0916 11:13:46.035577 1501462 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:13:46.035677 1501462 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-654612-m03","uid":"0a3ae4f6-3e42-45c5-9af6-3ab99d0c1b81","resourceVersion":"616","creationTimestamp":"2024-09-16T11:11:48Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-654612-m03","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-654612","minikube.k8s.io/primary":"false","minikube.k8s.io/updated_at":"2024_09_16T11_11_49_0700","minikube.k8s.io/version":"v1.34.0"},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///var/run/crio/crio.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubeadm","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:11:48Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:
annotations":{"f:kubeadm.alpha.kubernetes.io/cri-socket":{}}}}},{"manag [truncated 5817 chars]
	I0916 11:13:46.036092 1501462 pod_ready.go:103] pod "kube-proxy-vf648" in "kube-system" namespace has status "Ready":"False"
	I0916 11:13:46.528169 1501462 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/namespaces/kube-system/pods/kube-proxy-vf648
	I0916 11:13:46.528196 1501462 round_trippers.go:469] Request Headers:
	I0916 11:13:46.528207 1501462 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:13:46.528213 1501462 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:13:46.530576 1501462 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:13:46.530599 1501462 round_trippers.go:577] Response Headers:
	I0916 11:13:46.530607 1501462 round_trippers.go:580]     Content-Type: application/json
	I0916 11:13:46.530610 1501462 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:13:46.530614 1501462 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:13:46.530618 1501462 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:13:46 GMT
	I0916 11:13:46.530621 1501462 round_trippers.go:580]     Audit-Id: 8368fa0c-8d99-4ea8-9b55-6f3accf40eb0
	I0916 11:13:46.530623 1501462 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:13:46.531165 1501462 request.go:1351] Response Body: {"kind":"Pod","apiVersion":"v1","metadata":{"name":"kube-proxy-vf648","generateName":"kube-proxy-","namespace":"kube-system","uid":"376afe3e-390b-443b-b289-7dfeeb1deed1","resourceVersion":"642","creationTimestamp":"2024-09-16T11:11:48Z","labels":{"controller-revision-hash":"648b489c5b","k8s-app":"kube-proxy","pod-template-generation":"1"},"ownerReferences":[{"apiVersion":"apps/v1","kind":"DaemonSet","name":"kube-proxy","uid":"8b5954ba-7bb1-4f7b-8014-fdfa99b2ac77","controller":true,"blockOwnerDeletion":true}],"managedFields":[{"manager":"kube-controller-manager","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:11:48Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:generateName":{},"f:labels":{".":{},"f:controller-revision-hash":{},"f:k8s-app":{},"f:pod-template-generation":{}},"f:ownerReferences":{".":{},"k:{\"uid\":\"8b5954ba-7bb1-4f7b-8014-fdfa99b2ac77\"}":{}}},"f:spec":{"f:affinity":{".":{},"f:nodeAffinity":{".":{},"f:r
equiredDuringSchedulingIgnoredDuringExecution":{}}},"f:containers":{"k: [truncated 6427 chars]
	I0916 11:13:46.531723 1501462 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/nodes/multinode-654612-m03
	I0916 11:13:46.531738 1501462 round_trippers.go:469] Request Headers:
	I0916 11:13:46.531748 1501462 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:13:46.531752 1501462 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:13:46.534046 1501462 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:13:46.534075 1501462 round_trippers.go:577] Response Headers:
	I0916 11:13:46.534086 1501462 round_trippers.go:580]     Audit-Id: ab2429c4-f14e-41be-ad60-b755643992ed
	I0916 11:13:46.534090 1501462 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:13:46.534093 1501462 round_trippers.go:580]     Content-Type: application/json
	I0916 11:13:46.534095 1501462 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:13:46.534098 1501462 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:13:46.534102 1501462 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:13:46 GMT
	I0916 11:13:46.534311 1501462 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-654612-m03","uid":"0a3ae4f6-3e42-45c5-9af6-3ab99d0c1b81","resourceVersion":"616","creationTimestamp":"2024-09-16T11:11:48Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-654612-m03","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-654612","minikube.k8s.io/primary":"false","minikube.k8s.io/updated_at":"2024_09_16T11_11_49_0700","minikube.k8s.io/version":"v1.34.0"},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///var/run/crio/crio.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubeadm","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:11:48Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:
annotations":{"f:kubeadm.alpha.kubernetes.io/cri-socket":{}}}}},{"manag [truncated 5817 chars]
	I0916 11:13:47.028650 1501462 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/namespaces/kube-system/pods/kube-proxy-vf648
	I0916 11:13:47.028703 1501462 round_trippers.go:469] Request Headers:
	I0916 11:13:47.028716 1501462 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:13:47.028722 1501462 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:13:47.031291 1501462 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:13:47.031314 1501462 round_trippers.go:577] Response Headers:
	I0916 11:13:47.031323 1501462 round_trippers.go:580]     Audit-Id: 9df6daf7-1765-47ec-9d0a-442c83ac198d
	I0916 11:13:47.031328 1501462 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:13:47.031333 1501462 round_trippers.go:580]     Content-Type: application/json
	I0916 11:13:47.031342 1501462 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:13:47.031346 1501462 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:13:47.031350 1501462 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:13:47 GMT
	I0916 11:13:47.031483 1501462 request.go:1351] Response Body: {"kind":"Pod","apiVersion":"v1","metadata":{"name":"kube-proxy-vf648","generateName":"kube-proxy-","namespace":"kube-system","uid":"376afe3e-390b-443b-b289-7dfeeb1deed1","resourceVersion":"642","creationTimestamp":"2024-09-16T11:11:48Z","labels":{"controller-revision-hash":"648b489c5b","k8s-app":"kube-proxy","pod-template-generation":"1"},"ownerReferences":[{"apiVersion":"apps/v1","kind":"DaemonSet","name":"kube-proxy","uid":"8b5954ba-7bb1-4f7b-8014-fdfa99b2ac77","controller":true,"blockOwnerDeletion":true}],"managedFields":[{"manager":"kube-controller-manager","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:11:48Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:generateName":{},"f:labels":{".":{},"f:controller-revision-hash":{},"f:k8s-app":{},"f:pod-template-generation":{}},"f:ownerReferences":{".":{},"k:{\"uid\":\"8b5954ba-7bb1-4f7b-8014-fdfa99b2ac77\"}":{}}},"f:spec":{"f:affinity":{".":{},"f:nodeAffinity":{".":{},"f:r
equiredDuringSchedulingIgnoredDuringExecution":{}}},"f:containers":{"k: [truncated 6427 chars]
	I0916 11:13:47.032070 1501462 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/nodes/multinode-654612-m03
	I0916 11:13:47.032083 1501462 round_trippers.go:469] Request Headers:
	I0916 11:13:47.032092 1501462 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:13:47.032099 1501462 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:13:47.034691 1501462 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:13:47.034715 1501462 round_trippers.go:577] Response Headers:
	I0916 11:13:47.034724 1501462 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:13:47.034729 1501462 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:13:47 GMT
	I0916 11:13:47.034733 1501462 round_trippers.go:580]     Audit-Id: 299e9412-c3c8-4b18-8aa1-f993a274c9fe
	I0916 11:13:47.034737 1501462 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:13:47.034740 1501462 round_trippers.go:580]     Content-Type: application/json
	I0916 11:13:47.034744 1501462 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:13:47.034982 1501462 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-654612-m03","uid":"0a3ae4f6-3e42-45c5-9af6-3ab99d0c1b81","resourceVersion":"616","creationTimestamp":"2024-09-16T11:11:48Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-654612-m03","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-654612","minikube.k8s.io/primary":"false","minikube.k8s.io/updated_at":"2024_09_16T11_11_49_0700","minikube.k8s.io/version":"v1.34.0"},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///var/run/crio/crio.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubeadm","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:11:48Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:
annotations":{"f:kubeadm.alpha.kubernetes.io/cri-socket":{}}}}},{"manag [truncated 5817 chars]
	I0916 11:13:47.528757 1501462 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/namespaces/kube-system/pods/kube-proxy-vf648
	I0916 11:13:47.528781 1501462 round_trippers.go:469] Request Headers:
	I0916 11:13:47.528791 1501462 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:13:47.528800 1501462 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:13:47.531565 1501462 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:13:47.531640 1501462 round_trippers.go:577] Response Headers:
	I0916 11:13:47.531663 1501462 round_trippers.go:580]     Audit-Id: 618dd8d8-7f21-4dd8-8de3-383fc3631e57
	I0916 11:13:47.531680 1501462 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:13:47.531713 1501462 round_trippers.go:580]     Content-Type: application/json
	I0916 11:13:47.531771 1501462 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:13:47.531792 1501462 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:13:47.531796 1501462 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:13:47 GMT
	I0916 11:13:47.531959 1501462 request.go:1351] Response Body: {"kind":"Pod","apiVersion":"v1","metadata":{"name":"kube-proxy-vf648","generateName":"kube-proxy-","namespace":"kube-system","uid":"376afe3e-390b-443b-b289-7dfeeb1deed1","resourceVersion":"642","creationTimestamp":"2024-09-16T11:11:48Z","labels":{"controller-revision-hash":"648b489c5b","k8s-app":"kube-proxy","pod-template-generation":"1"},"ownerReferences":[{"apiVersion":"apps/v1","kind":"DaemonSet","name":"kube-proxy","uid":"8b5954ba-7bb1-4f7b-8014-fdfa99b2ac77","controller":true,"blockOwnerDeletion":true}],"managedFields":[{"manager":"kube-controller-manager","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:11:48Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:generateName":{},"f:labels":{".":{},"f:controller-revision-hash":{},"f:k8s-app":{},"f:pod-template-generation":{}},"f:ownerReferences":{".":{},"k:{\"uid\":\"8b5954ba-7bb1-4f7b-8014-fdfa99b2ac77\"}":{}}},"f:spec":{"f:affinity":{".":{},"f:nodeAffinity":{".":{},"f:r
equiredDuringSchedulingIgnoredDuringExecution":{}}},"f:containers":{"k: [truncated 6427 chars]
	I0916 11:13:47.532506 1501462 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/nodes/multinode-654612-m03
	I0916 11:13:47.532522 1501462 round_trippers.go:469] Request Headers:
	I0916 11:13:47.532530 1501462 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:13:47.532536 1501462 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:13:47.534907 1501462 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:13:47.534930 1501462 round_trippers.go:577] Response Headers:
	I0916 11:13:47.534939 1501462 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:13:47 GMT
	I0916 11:13:47.534945 1501462 round_trippers.go:580]     Audit-Id: 2143aedd-8de5-4788-8bdb-6335239c3dca
	I0916 11:13:47.534950 1501462 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:13:47.534953 1501462 round_trippers.go:580]     Content-Type: application/json
	I0916 11:13:47.534956 1501462 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:13:47.534961 1501462 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:13:47.535094 1501462 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-654612-m03","uid":"0a3ae4f6-3e42-45c5-9af6-3ab99d0c1b81","resourceVersion":"616","creationTimestamp":"2024-09-16T11:11:48Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-654612-m03","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-654612","minikube.k8s.io/primary":"false","minikube.k8s.io/updated_at":"2024_09_16T11_11_49_0700","minikube.k8s.io/version":"v1.34.0"},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///var/run/crio/crio.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubeadm","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:11:48Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:
annotations":{"f:kubeadm.alpha.kubernetes.io/cri-socket":{}}}}},{"manag [truncated 5817 chars]
	I0916 11:13:48.028427 1501462 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/namespaces/kube-system/pods/kube-proxy-vf648
	I0916 11:13:48.028453 1501462 round_trippers.go:469] Request Headers:
	I0916 11:13:48.028462 1501462 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:13:48.028469 1501462 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:13:48.031214 1501462 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:13:48.031250 1501462 round_trippers.go:577] Response Headers:
	I0916 11:13:48.031261 1501462 round_trippers.go:580]     Audit-Id: 759734dd-f96e-4b99-b184-bd78892e08e5
	I0916 11:13:48.031266 1501462 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:13:48.031271 1501462 round_trippers.go:580]     Content-Type: application/json
	I0916 11:13:48.031275 1501462 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:13:48.031279 1501462 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:13:48.031283 1501462 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:13:48 GMT
	I0916 11:13:48.031478 1501462 request.go:1351] Response Body: {"kind":"Pod","apiVersion":"v1","metadata":{"name":"kube-proxy-vf648","generateName":"kube-proxy-","namespace":"kube-system","uid":"376afe3e-390b-443b-b289-7dfeeb1deed1","resourceVersion":"642","creationTimestamp":"2024-09-16T11:11:48Z","labels":{"controller-revision-hash":"648b489c5b","k8s-app":"kube-proxy","pod-template-generation":"1"},"ownerReferences":[{"apiVersion":"apps/v1","kind":"DaemonSet","name":"kube-proxy","uid":"8b5954ba-7bb1-4f7b-8014-fdfa99b2ac77","controller":true,"blockOwnerDeletion":true}],"managedFields":[{"manager":"kube-controller-manager","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:11:48Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:generateName":{},"f:labels":{".":{},"f:controller-revision-hash":{},"f:k8s-app":{},"f:pod-template-generation":{}},"f:ownerReferences":{".":{},"k:{\"uid\":\"8b5954ba-7bb1-4f7b-8014-fdfa99b2ac77\"}":{}}},"f:spec":{"f:affinity":{".":{},"f:nodeAffinity":{".":{},"f:r
equiredDuringSchedulingIgnoredDuringExecution":{}}},"f:containers":{"k: [truncated 6427 chars]
	I0916 11:13:48.032064 1501462 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/nodes/multinode-654612-m03
	I0916 11:13:48.032089 1501462 round_trippers.go:469] Request Headers:
	I0916 11:13:48.032098 1501462 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:13:48.032109 1501462 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:13:48.034558 1501462 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:13:48.034601 1501462 round_trippers.go:577] Response Headers:
	I0916 11:13:48.034609 1501462 round_trippers.go:580]     Content-Type: application/json
	I0916 11:13:48.034613 1501462 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:13:48.034616 1501462 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:13:48.034619 1501462 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:13:48 GMT
	I0916 11:13:48.034623 1501462 round_trippers.go:580]     Audit-Id: 34de3e1d-02e3-4f66-9aa0-7f9599ca30a7
	I0916 11:13:48.034626 1501462 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:13:48.034746 1501462 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-654612-m03","uid":"0a3ae4f6-3e42-45c5-9af6-3ab99d0c1b81","resourceVersion":"616","creationTimestamp":"2024-09-16T11:11:48Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-654612-m03","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-654612","minikube.k8s.io/primary":"false","minikube.k8s.io/updated_at":"2024_09_16T11_11_49_0700","minikube.k8s.io/version":"v1.34.0"},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///var/run/crio/crio.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubeadm","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:11:48Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:
annotations":{"f:kubeadm.alpha.kubernetes.io/cri-socket":{}}}}},{"manag [truncated 5817 chars]
	I0916 11:13:48.528472 1501462 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/namespaces/kube-system/pods/kube-proxy-vf648
	I0916 11:13:48.528498 1501462 round_trippers.go:469] Request Headers:
	I0916 11:13:48.528507 1501462 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:13:48.528513 1501462 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:13:48.531023 1501462 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:13:48.531044 1501462 round_trippers.go:577] Response Headers:
	I0916 11:13:48.531052 1501462 round_trippers.go:580]     Audit-Id: 32b68bee-f9b8-42ce-bbc2-4afe5164cdc2
	I0916 11:13:48.531057 1501462 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:13:48.531060 1501462 round_trippers.go:580]     Content-Type: application/json
	I0916 11:13:48.531064 1501462 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:13:48.531067 1501462 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:13:48.531069 1501462 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:13:48 GMT
	I0916 11:13:48.531306 1501462 request.go:1351] Response Body: {"kind":"Pod","apiVersion":"v1","metadata":{"name":"kube-proxy-vf648","generateName":"kube-proxy-","namespace":"kube-system","uid":"376afe3e-390b-443b-b289-7dfeeb1deed1","resourceVersion":"642","creationTimestamp":"2024-09-16T11:11:48Z","labels":{"controller-revision-hash":"648b489c5b","k8s-app":"kube-proxy","pod-template-generation":"1"},"ownerReferences":[{"apiVersion":"apps/v1","kind":"DaemonSet","name":"kube-proxy","uid":"8b5954ba-7bb1-4f7b-8014-fdfa99b2ac77","controller":true,"blockOwnerDeletion":true}],"managedFields":[{"manager":"kube-controller-manager","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:11:48Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:generateName":{},"f:labels":{".":{},"f:controller-revision-hash":{},"f:k8s-app":{},"f:pod-template-generation":{}},"f:ownerReferences":{".":{},"k:{\"uid\":\"8b5954ba-7bb1-4f7b-8014-fdfa99b2ac77\"}":{}}},"f:spec":{"f:affinity":{".":{},"f:nodeAffinity":{".":{},"f:r
equiredDuringSchedulingIgnoredDuringExecution":{}}},"f:containers":{"k: [truncated 6427 chars]
	I0916 11:13:48.531884 1501462 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/nodes/multinode-654612-m03
	I0916 11:13:48.531904 1501462 round_trippers.go:469] Request Headers:
	I0916 11:13:48.531913 1501462 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:13:48.531919 1501462 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:13:48.534327 1501462 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:13:48.534349 1501462 round_trippers.go:577] Response Headers:
	I0916 11:13:48.534357 1501462 round_trippers.go:580]     Content-Type: application/json
	I0916 11:13:48.534360 1501462 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:13:48.534363 1501462 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:13:48.534366 1501462 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:13:48 GMT
	I0916 11:13:48.534369 1501462 round_trippers.go:580]     Audit-Id: 01e3453b-c46d-47fc-acd9-bbc05b5be99d
	I0916 11:13:48.534372 1501462 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:13:48.534461 1501462 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-654612-m03","uid":"0a3ae4f6-3e42-45c5-9af6-3ab99d0c1b81","resourceVersion":"616","creationTimestamp":"2024-09-16T11:11:48Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-654612-m03","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-654612","minikube.k8s.io/primary":"false","minikube.k8s.io/updated_at":"2024_09_16T11_11_49_0700","minikube.k8s.io/version":"v1.34.0"},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///var/run/crio/crio.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubeadm","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:11:48Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:
annotations":{"f:kubeadm.alpha.kubernetes.io/cri-socket":{}}}}},{"manag [truncated 5817 chars]
	I0916 11:13:48.534849 1501462 pod_ready.go:103] pod "kube-proxy-vf648" in "kube-system" namespace has status "Ready":"False"
	I0916 11:13:49.028003 1501462 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/namespaces/kube-system/pods/kube-proxy-vf648
	I0916 11:13:49.028032 1501462 round_trippers.go:469] Request Headers:
	I0916 11:13:49.028041 1501462 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:13:49.028045 1501462 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:13:49.030629 1501462 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:13:49.030651 1501462 round_trippers.go:577] Response Headers:
	I0916 11:13:49.030662 1501462 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:13:49 GMT
	I0916 11:13:49.030666 1501462 round_trippers.go:580]     Audit-Id: 97e1124d-0139-4b3e-994e-d18f668ab65f
	I0916 11:13:49.030669 1501462 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:13:49.030674 1501462 round_trippers.go:580]     Content-Type: application/json
	I0916 11:13:49.030677 1501462 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:13:49.030680 1501462 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:13:49.030854 1501462 request.go:1351] Response Body: {"kind":"Pod","apiVersion":"v1","metadata":{"name":"kube-proxy-vf648","generateName":"kube-proxy-","namespace":"kube-system","uid":"376afe3e-390b-443b-b289-7dfeeb1deed1","resourceVersion":"642","creationTimestamp":"2024-09-16T11:11:48Z","labels":{"controller-revision-hash":"648b489c5b","k8s-app":"kube-proxy","pod-template-generation":"1"},"ownerReferences":[{"apiVersion":"apps/v1","kind":"DaemonSet","name":"kube-proxy","uid":"8b5954ba-7bb1-4f7b-8014-fdfa99b2ac77","controller":true,"blockOwnerDeletion":true}],"managedFields":[{"manager":"kube-controller-manager","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:11:48Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:generateName":{},"f:labels":{".":{},"f:controller-revision-hash":{},"f:k8s-app":{},"f:pod-template-generation":{}},"f:ownerReferences":{".":{},"k:{\"uid\":\"8b5954ba-7bb1-4f7b-8014-fdfa99b2ac77\"}":{}}},"f:spec":{"f:affinity":{".":{},"f:nodeAffinity":{".":{},"f:r
equiredDuringSchedulingIgnoredDuringExecution":{}}},"f:containers":{"k: [truncated 6427 chars]
	I0916 11:13:49.031423 1501462 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/nodes/multinode-654612-m03
	I0916 11:13:49.031445 1501462 round_trippers.go:469] Request Headers:
	I0916 11:13:49.031455 1501462 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:13:49.031459 1501462 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:13:49.033845 1501462 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:13:49.033866 1501462 round_trippers.go:577] Response Headers:
	I0916 11:13:49.033875 1501462 round_trippers.go:580]     Audit-Id: 2241c6b7-d3ad-4880-bdef-e946b7507327
	I0916 11:13:49.033896 1501462 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:13:49.033900 1501462 round_trippers.go:580]     Content-Type: application/json
	I0916 11:13:49.033904 1501462 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:13:49.033907 1501462 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:13:49.033910 1501462 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:13:49 GMT
	I0916 11:13:49.034050 1501462 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-654612-m03","uid":"0a3ae4f6-3e42-45c5-9af6-3ab99d0c1b81","resourceVersion":"616","creationTimestamp":"2024-09-16T11:11:48Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-654612-m03","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-654612","minikube.k8s.io/primary":"false","minikube.k8s.io/updated_at":"2024_09_16T11_11_49_0700","minikube.k8s.io/version":"v1.34.0"},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///var/run/crio/crio.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubeadm","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:11:48Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:
annotations":{"f:kubeadm.alpha.kubernetes.io/cri-socket":{}}}}},{"manag [truncated 5817 chars]
	I0916 11:13:49.528796 1501462 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/namespaces/kube-system/pods/kube-proxy-vf648
	I0916 11:13:49.528825 1501462 round_trippers.go:469] Request Headers:
	I0916 11:13:49.528835 1501462 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:13:49.528839 1501462 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:13:49.531184 1501462 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:13:49.531295 1501462 round_trippers.go:577] Response Headers:
	I0916 11:13:49.531330 1501462 round_trippers.go:580]     Content-Type: application/json
	I0916 11:13:49.531336 1501462 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:13:49.531346 1501462 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:13:49.531352 1501462 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:13:49 GMT
	I0916 11:13:49.531355 1501462 round_trippers.go:580]     Audit-Id: d6715c7c-07af-4782-b619-15a8603c15e4
	I0916 11:13:49.531358 1501462 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:13:49.531499 1501462 request.go:1351] Response Body: {"kind":"Pod","apiVersion":"v1","metadata":{"name":"kube-proxy-vf648","generateName":"kube-proxy-","namespace":"kube-system","uid":"376afe3e-390b-443b-b289-7dfeeb1deed1","resourceVersion":"642","creationTimestamp":"2024-09-16T11:11:48Z","labels":{"controller-revision-hash":"648b489c5b","k8s-app":"kube-proxy","pod-template-generation":"1"},"ownerReferences":[{"apiVersion":"apps/v1","kind":"DaemonSet","name":"kube-proxy","uid":"8b5954ba-7bb1-4f7b-8014-fdfa99b2ac77","controller":true,"blockOwnerDeletion":true}],"managedFields":[{"manager":"kube-controller-manager","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:11:48Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:generateName":{},"f:labels":{".":{},"f:controller-revision-hash":{},"f:k8s-app":{},"f:pod-template-generation":{}},"f:ownerReferences":{".":{},"k:{\"uid\":\"8b5954ba-7bb1-4f7b-8014-fdfa99b2ac77\"}":{}}},"f:spec":{"f:affinity":{".":{},"f:nodeAffinity":{".":{},"f:r
equiredDuringSchedulingIgnoredDuringExecution":{}}},"f:containers":{"k: [truncated 6427 chars]
	I0916 11:13:49.532043 1501462 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/nodes/multinode-654612-m03
	I0916 11:13:49.532060 1501462 round_trippers.go:469] Request Headers:
	I0916 11:13:49.532069 1501462 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:13:49.532074 1501462 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:13:49.534117 1501462 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:13:49.534141 1501462 round_trippers.go:577] Response Headers:
	I0916 11:13:49.534149 1501462 round_trippers.go:580]     Audit-Id: 0faa4b78-a772-4220-99ae-945ec73db0a6
	I0916 11:13:49.534155 1501462 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:13:49.534159 1501462 round_trippers.go:580]     Content-Type: application/json
	I0916 11:13:49.534162 1501462 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:13:49.534165 1501462 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:13:49.534168 1501462 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:13:49 GMT
	I0916 11:13:49.534266 1501462 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-654612-m03","uid":"0a3ae4f6-3e42-45c5-9af6-3ab99d0c1b81","resourceVersion":"616","creationTimestamp":"2024-09-16T11:11:48Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-654612-m03","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-654612","minikube.k8s.io/primary":"false","minikube.k8s.io/updated_at":"2024_09_16T11_11_49_0700","minikube.k8s.io/version":"v1.34.0"},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///var/run/crio/crio.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubeadm","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:11:48Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:
annotations":{"f:kubeadm.alpha.kubernetes.io/cri-socket":{}}}}},{"manag [truncated 5817 chars]
	I0916 11:13:50.028052 1501462 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/namespaces/kube-system/pods/kube-proxy-vf648
	I0916 11:13:50.028081 1501462 round_trippers.go:469] Request Headers:
	I0916 11:13:50.028092 1501462 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:13:50.028119 1501462 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:13:50.030875 1501462 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:13:50.030900 1501462 round_trippers.go:577] Response Headers:
	I0916 11:13:50.030909 1501462 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:13:50.030915 1501462 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:13:50 GMT
	I0916 11:13:50.030918 1501462 round_trippers.go:580]     Audit-Id: 8c867e95-4169-402f-b7b8-6d07d00d4960
	I0916 11:13:50.030921 1501462 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:13:50.030924 1501462 round_trippers.go:580]     Content-Type: application/json
	I0916 11:13:50.030926 1501462 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:13:50.031413 1501462 request.go:1351] Response Body: {"kind":"Pod","apiVersion":"v1","metadata":{"name":"kube-proxy-vf648","generateName":"kube-proxy-","namespace":"kube-system","uid":"376afe3e-390b-443b-b289-7dfeeb1deed1","resourceVersion":"642","creationTimestamp":"2024-09-16T11:11:48Z","labels":{"controller-revision-hash":"648b489c5b","k8s-app":"kube-proxy","pod-template-generation":"1"},"ownerReferences":[{"apiVersion":"apps/v1","kind":"DaemonSet","name":"kube-proxy","uid":"8b5954ba-7bb1-4f7b-8014-fdfa99b2ac77","controller":true,"blockOwnerDeletion":true}],"managedFields":[{"manager":"kube-controller-manager","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:11:48Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:generateName":{},"f:labels":{".":{},"f:controller-revision-hash":{},"f:k8s-app":{},"f:pod-template-generation":{}},"f:ownerReferences":{".":{},"k:{\"uid\":\"8b5954ba-7bb1-4f7b-8014-fdfa99b2ac77\"}":{}}},"f:spec":{"f:affinity":{".":{},"f:nodeAffinity":{".":{},"f:r
equiredDuringSchedulingIgnoredDuringExecution":{}}},"f:containers":{"k: [truncated 6427 chars]
	I0916 11:13:50.032029 1501462 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/nodes/multinode-654612-m03
	I0916 11:13:50.032052 1501462 round_trippers.go:469] Request Headers:
	I0916 11:13:50.032072 1501462 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:13:50.032078 1501462 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:13:50.034733 1501462 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:13:50.034765 1501462 round_trippers.go:577] Response Headers:
	I0916 11:13:50.034774 1501462 round_trippers.go:580]     Content-Type: application/json
	I0916 11:13:50.034778 1501462 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:13:50.034782 1501462 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:13:50.034784 1501462 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:13:50 GMT
	I0916 11:13:50.034787 1501462 round_trippers.go:580]     Audit-Id: 6f2c81ed-3139-40ee-a9c2-1317d438cdef
	I0916 11:13:50.034790 1501462 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:13:50.034909 1501462 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-654612-m03","uid":"0a3ae4f6-3e42-45c5-9af6-3ab99d0c1b81","resourceVersion":"616","creationTimestamp":"2024-09-16T11:11:48Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-654612-m03","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-654612","minikube.k8s.io/primary":"false","minikube.k8s.io/updated_at":"2024_09_16T11_11_49_0700","minikube.k8s.io/version":"v1.34.0"},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///var/run/crio/crio.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubeadm","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:11:48Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:
annotations":{"f:kubeadm.alpha.kubernetes.io/cri-socket":{}}}}},{"manag [truncated 5817 chars]
	I0916 11:13:50.527976 1501462 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/namespaces/kube-system/pods/kube-proxy-vf648
	I0916 11:13:50.528005 1501462 round_trippers.go:469] Request Headers:
	I0916 11:13:50.528015 1501462 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:13:50.528050 1501462 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:13:50.530342 1501462 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:13:50.530368 1501462 round_trippers.go:577] Response Headers:
	I0916 11:13:50.530375 1501462 round_trippers.go:580]     Content-Type: application/json
	I0916 11:13:50.530381 1501462 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:13:50.530386 1501462 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:13:50.530392 1501462 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:13:50 GMT
	I0916 11:13:50.530397 1501462 round_trippers.go:580]     Audit-Id: f24ed375-7c51-451e-ae0f-49d85082e93f
	I0916 11:13:50.530401 1501462 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:13:50.530534 1501462 request.go:1351] Response Body: {"kind":"Pod","apiVersion":"v1","metadata":{"name":"kube-proxy-vf648","generateName":"kube-proxy-","namespace":"kube-system","uid":"376afe3e-390b-443b-b289-7dfeeb1deed1","resourceVersion":"642","creationTimestamp":"2024-09-16T11:11:48Z","labels":{"controller-revision-hash":"648b489c5b","k8s-app":"kube-proxy","pod-template-generation":"1"},"ownerReferences":[{"apiVersion":"apps/v1","kind":"DaemonSet","name":"kube-proxy","uid":"8b5954ba-7bb1-4f7b-8014-fdfa99b2ac77","controller":true,"blockOwnerDeletion":true}],"managedFields":[{"manager":"kube-controller-manager","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:11:48Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:generateName":{},"f:labels":{".":{},"f:controller-revision-hash":{},"f:k8s-app":{},"f:pod-template-generation":{}},"f:ownerReferences":{".":{},"k:{\"uid\":\"8b5954ba-7bb1-4f7b-8014-fdfa99b2ac77\"}":{}}},"f:spec":{"f:affinity":{".":{},"f:nodeAffinity":{".":{},"f:r
equiredDuringSchedulingIgnoredDuringExecution":{}}},"f:containers":{"k: [truncated 6427 chars]
	I0916 11:13:50.531055 1501462 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/nodes/multinode-654612-m03
	I0916 11:13:50.531077 1501462 round_trippers.go:469] Request Headers:
	I0916 11:13:50.531085 1501462 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:13:50.531089 1501462 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:13:50.533000 1501462 round_trippers.go:574] Response Status: 200 OK in 1 milliseconds
	I0916 11:13:50.533031 1501462 round_trippers.go:577] Response Headers:
	I0916 11:13:50.533040 1501462 round_trippers.go:580]     Audit-Id: f22b5d19-ef4c-457a-95b8-d1a597d8648c
	I0916 11:13:50.533046 1501462 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:13:50.533050 1501462 round_trippers.go:580]     Content-Type: application/json
	I0916 11:13:50.533052 1501462 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:13:50.533055 1501462 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:13:50.533059 1501462 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:13:50 GMT
	I0916 11:13:50.533322 1501462 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-654612-m03","uid":"0a3ae4f6-3e42-45c5-9af6-3ab99d0c1b81","resourceVersion":"616","creationTimestamp":"2024-09-16T11:11:48Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-654612-m03","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-654612","minikube.k8s.io/primary":"false","minikube.k8s.io/updated_at":"2024_09_16T11_11_49_0700","minikube.k8s.io/version":"v1.34.0"},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///var/run/crio/crio.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubeadm","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:11:48Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:
annotations":{"f:kubeadm.alpha.kubernetes.io/cri-socket":{}}}}},{"manag [truncated 5817 chars]
	I0916 11:13:51.028563 1501462 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/namespaces/kube-system/pods/kube-proxy-vf648
	I0916 11:13:51.028591 1501462 round_trippers.go:469] Request Headers:
	I0916 11:13:51.028601 1501462 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:13:51.028606 1501462 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:13:51.031113 1501462 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:13:51.031140 1501462 round_trippers.go:577] Response Headers:
	I0916 11:13:51.031149 1501462 round_trippers.go:580]     Content-Type: application/json
	I0916 11:13:51.031154 1501462 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:13:51.031157 1501462 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:13:51.031160 1501462 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:13:51 GMT
	I0916 11:13:51.031162 1501462 round_trippers.go:580]     Audit-Id: 95e06430-19b3-45bc-8ad2-f47f6ad5579d
	I0916 11:13:51.031165 1501462 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:13:51.031334 1501462 request.go:1351] Response Body: {"kind":"Pod","apiVersion":"v1","metadata":{"name":"kube-proxy-vf648","generateName":"kube-proxy-","namespace":"kube-system","uid":"376afe3e-390b-443b-b289-7dfeeb1deed1","resourceVersion":"642","creationTimestamp":"2024-09-16T11:11:48Z","labels":{"controller-revision-hash":"648b489c5b","k8s-app":"kube-proxy","pod-template-generation":"1"},"ownerReferences":[{"apiVersion":"apps/v1","kind":"DaemonSet","name":"kube-proxy","uid":"8b5954ba-7bb1-4f7b-8014-fdfa99b2ac77","controller":true,"blockOwnerDeletion":true}],"managedFields":[{"manager":"kube-controller-manager","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:11:48Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:generateName":{},"f:labels":{".":{},"f:controller-revision-hash":{},"f:k8s-app":{},"f:pod-template-generation":{}},"f:ownerReferences":{".":{},"k:{\"uid\":\"8b5954ba-7bb1-4f7b-8014-fdfa99b2ac77\"}":{}}},"f:spec":{"f:affinity":{".":{},"f:nodeAffinity":{".":{},"f:r
equiredDuringSchedulingIgnoredDuringExecution":{}}},"f:containers":{"k: [truncated 6427 chars]
	I0916 11:13:51.031930 1501462 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/nodes/multinode-654612-m03
	I0916 11:13:51.031950 1501462 round_trippers.go:469] Request Headers:
	I0916 11:13:51.031960 1501462 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:13:51.031967 1501462 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:13:51.034156 1501462 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:13:51.034197 1501462 round_trippers.go:577] Response Headers:
	I0916 11:13:51.034206 1501462 round_trippers.go:580]     Audit-Id: 0993a470-f9c5-4beb-807c-89b7c1a42554
	I0916 11:13:51.034211 1501462 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:13:51.034214 1501462 round_trippers.go:580]     Content-Type: application/json
	I0916 11:13:51.034218 1501462 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:13:51.034223 1501462 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:13:51.034226 1501462 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:13:51 GMT
	I0916 11:13:51.034335 1501462 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-654612-m03","uid":"0a3ae4f6-3e42-45c5-9af6-3ab99d0c1b81","resourceVersion":"616","creationTimestamp":"2024-09-16T11:11:48Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-654612-m03","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-654612","minikube.k8s.io/primary":"false","minikube.k8s.io/updated_at":"2024_09_16T11_11_49_0700","minikube.k8s.io/version":"v1.34.0"},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///var/run/crio/crio.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubeadm","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:11:48Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:
annotations":{"f:kubeadm.alpha.kubernetes.io/cri-socket":{}}}}},{"manag [truncated 5817 chars]
	I0916 11:13:51.034774 1501462 pod_ready.go:103] pod "kube-proxy-vf648" in "kube-system" namespace has status "Ready":"False"
	I0916 11:13:51.528938 1501462 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/namespaces/kube-system/pods/kube-proxy-vf648
	I0916 11:13:51.528969 1501462 round_trippers.go:469] Request Headers:
	I0916 11:13:51.528980 1501462 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:13:51.528985 1501462 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:13:51.531392 1501462 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:13:51.531414 1501462 round_trippers.go:577] Response Headers:
	I0916 11:13:51.531422 1501462 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:13:51.531427 1501462 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:13:51 GMT
	I0916 11:13:51.531431 1501462 round_trippers.go:580]     Audit-Id: f33b9174-03cb-465a-a150-37ed21bbb176
	I0916 11:13:51.531434 1501462 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:13:51.531436 1501462 round_trippers.go:580]     Content-Type: application/json
	I0916 11:13:51.531439 1501462 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:13:51.531691 1501462 request.go:1351] Response Body: {"kind":"Pod","apiVersion":"v1","metadata":{"name":"kube-proxy-vf648","generateName":"kube-proxy-","namespace":"kube-system","uid":"376afe3e-390b-443b-b289-7dfeeb1deed1","resourceVersion":"642","creationTimestamp":"2024-09-16T11:11:48Z","labels":{"controller-revision-hash":"648b489c5b","k8s-app":"kube-proxy","pod-template-generation":"1"},"ownerReferences":[{"apiVersion":"apps/v1","kind":"DaemonSet","name":"kube-proxy","uid":"8b5954ba-7bb1-4f7b-8014-fdfa99b2ac77","controller":true,"blockOwnerDeletion":true}],"managedFields":[{"manager":"kube-controller-manager","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:11:48Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:generateName":{},"f:labels":{".":{},"f:controller-revision-hash":{},"f:k8s-app":{},"f:pod-template-generation":{}},"f:ownerReferences":{".":{},"k:{\"uid\":\"8b5954ba-7bb1-4f7b-8014-fdfa99b2ac77\"}":{}}},"f:spec":{"f:affinity":{".":{},"f:nodeAffinity":{".":{},"f:r
equiredDuringSchedulingIgnoredDuringExecution":{}}},"f:containers":{"k: [truncated 6427 chars]
	I0916 11:13:51.532248 1501462 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/nodes/multinode-654612-m03
	I0916 11:13:51.532267 1501462 round_trippers.go:469] Request Headers:
	I0916 11:13:51.532276 1501462 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:13:51.532281 1501462 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:13:51.534533 1501462 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:13:51.534565 1501462 round_trippers.go:577] Response Headers:
	I0916 11:13:51.534572 1501462 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:13:51.534576 1501462 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:13:51 GMT
	I0916 11:13:51.534579 1501462 round_trippers.go:580]     Audit-Id: d2a64c3e-4465-495a-a201-601cc4cd8c29
	I0916 11:13:51.534583 1501462 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:13:51.534586 1501462 round_trippers.go:580]     Content-Type: application/json
	I0916 11:13:51.534593 1501462 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:13:51.534698 1501462 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-654612-m03","uid":"0a3ae4f6-3e42-45c5-9af6-3ab99d0c1b81","resourceVersion":"616","creationTimestamp":"2024-09-16T11:11:48Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-654612-m03","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-654612","minikube.k8s.io/primary":"false","minikube.k8s.io/updated_at":"2024_09_16T11_11_49_0700","minikube.k8s.io/version":"v1.34.0"},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///var/run/crio/crio.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubeadm","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:11:48Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:
annotations":{"f:kubeadm.alpha.kubernetes.io/cri-socket":{}}}}},{"manag [truncated 5817 chars]
	I0916 11:13:52.028410 1501462 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/namespaces/kube-system/pods/kube-proxy-vf648
	I0916 11:13:52.028442 1501462 round_trippers.go:469] Request Headers:
	I0916 11:13:52.028452 1501462 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:13:52.028458 1501462 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:13:52.031114 1501462 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:13:52.031149 1501462 round_trippers.go:577] Response Headers:
	I0916 11:13:52.031159 1501462 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:13:52.031164 1501462 round_trippers.go:580]     Content-Type: application/json
	I0916 11:13:52.031169 1501462 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:13:52.031172 1501462 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:13:52.031176 1501462 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:13:52 GMT
	I0916 11:13:52.031180 1501462 round_trippers.go:580]     Audit-Id: e57cacaa-47b4-4d52-b8b5-6edccd073f2f
	I0916 11:13:52.031330 1501462 request.go:1351] Response Body: {"kind":"Pod","apiVersion":"v1","metadata":{"name":"kube-proxy-vf648","generateName":"kube-proxy-","namespace":"kube-system","uid":"376afe3e-390b-443b-b289-7dfeeb1deed1","resourceVersion":"642","creationTimestamp":"2024-09-16T11:11:48Z","labels":{"controller-revision-hash":"648b489c5b","k8s-app":"kube-proxy","pod-template-generation":"1"},"ownerReferences":[{"apiVersion":"apps/v1","kind":"DaemonSet","name":"kube-proxy","uid":"8b5954ba-7bb1-4f7b-8014-fdfa99b2ac77","controller":true,"blockOwnerDeletion":true}],"managedFields":[{"manager":"kube-controller-manager","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:11:48Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:generateName":{},"f:labels":{".":{},"f:controller-revision-hash":{},"f:k8s-app":{},"f:pod-template-generation":{}},"f:ownerReferences":{".":{},"k:{\"uid\":\"8b5954ba-7bb1-4f7b-8014-fdfa99b2ac77\"}":{}}},"f:spec":{"f:affinity":{".":{},"f:nodeAffinity":{".":{},"f:r
equiredDuringSchedulingIgnoredDuringExecution":{}}},"f:containers":{"k: [truncated 6427 chars]
	I0916 11:13:52.031900 1501462 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/nodes/multinode-654612-m03
	I0916 11:13:52.031919 1501462 round_trippers.go:469] Request Headers:
	I0916 11:13:52.031928 1501462 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:13:52.031932 1501462 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:13:52.034350 1501462 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:13:52.034373 1501462 round_trippers.go:577] Response Headers:
	I0916 11:13:52.034382 1501462 round_trippers.go:580]     Audit-Id: 8e0ab52c-34b3-40d1-9179-621612797321
	I0916 11:13:52.034386 1501462 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:13:52.034389 1501462 round_trippers.go:580]     Content-Type: application/json
	I0916 11:13:52.034392 1501462 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:13:52.034395 1501462 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:13:52.034397 1501462 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:13:52 GMT
	I0916 11:13:52.034627 1501462 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-654612-m03","uid":"0a3ae4f6-3e42-45c5-9af6-3ab99d0c1b81","resourceVersion":"616","creationTimestamp":"2024-09-16T11:11:48Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-654612-m03","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-654612","minikube.k8s.io/primary":"false","minikube.k8s.io/updated_at":"2024_09_16T11_11_49_0700","minikube.k8s.io/version":"v1.34.0"},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///var/run/crio/crio.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubeadm","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:11:48Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:
annotations":{"f:kubeadm.alpha.kubernetes.io/cri-socket":{}}}}},{"manag [truncated 5817 chars]
	I0916 11:13:52.528863 1501462 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/namespaces/kube-system/pods/kube-proxy-vf648
	I0916 11:13:52.528890 1501462 round_trippers.go:469] Request Headers:
	I0916 11:13:52.528901 1501462 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:13:52.528905 1501462 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:13:52.531206 1501462 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:13:52.531228 1501462 round_trippers.go:577] Response Headers:
	I0916 11:13:52.531236 1501462 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:13:52 GMT
	I0916 11:13:52.531240 1501462 round_trippers.go:580]     Audit-Id: 9880031c-d092-4da9-99da-5fb7f3e4c473
	I0916 11:13:52.531243 1501462 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:13:52.531246 1501462 round_trippers.go:580]     Content-Type: application/json
	I0916 11:13:52.531249 1501462 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:13:52.531252 1501462 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:13:52.532038 1501462 request.go:1351] Response Body: {"kind":"Pod","apiVersion":"v1","metadata":{"name":"kube-proxy-vf648","generateName":"kube-proxy-","namespace":"kube-system","uid":"376afe3e-390b-443b-b289-7dfeeb1deed1","resourceVersion":"642","creationTimestamp":"2024-09-16T11:11:48Z","labels":{"controller-revision-hash":"648b489c5b","k8s-app":"kube-proxy","pod-template-generation":"1"},"ownerReferences":[{"apiVersion":"apps/v1","kind":"DaemonSet","name":"kube-proxy","uid":"8b5954ba-7bb1-4f7b-8014-fdfa99b2ac77","controller":true,"blockOwnerDeletion":true}],"managedFields":[{"manager":"kube-controller-manager","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:11:48Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:generateName":{},"f:labels":{".":{},"f:controller-revision-hash":{},"f:k8s-app":{},"f:pod-template-generation":{}},"f:ownerReferences":{".":{},"k:{\"uid\":\"8b5954ba-7bb1-4f7b-8014-fdfa99b2ac77\"}":{}}},"f:spec":{"f:affinity":{".":{},"f:nodeAffinity":{".":{},"f:r
equiredDuringSchedulingIgnoredDuringExecution":{}}},"f:containers":{"k: [truncated 6427 chars]
	I0916 11:13:52.532814 1501462 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/nodes/multinode-654612-m03
	I0916 11:13:52.532836 1501462 round_trippers.go:469] Request Headers:
	I0916 11:13:52.532845 1501462 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:13:52.532850 1501462 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:13:52.544861 1501462 round_trippers.go:574] Response Status: 200 OK in 11 milliseconds
	I0916 11:13:52.544889 1501462 round_trippers.go:577] Response Headers:
	I0916 11:13:52.544897 1501462 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:13:52 GMT
	I0916 11:13:52.544900 1501462 round_trippers.go:580]     Audit-Id: ecc053a2-15c5-4653-8050-19480ed6b68c
	I0916 11:13:52.544903 1501462 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:13:52.544906 1501462 round_trippers.go:580]     Content-Type: application/json
	I0916 11:13:52.544909 1501462 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:13:52.544912 1501462 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:13:52.545043 1501462 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-654612-m03","uid":"0a3ae4f6-3e42-45c5-9af6-3ab99d0c1b81","resourceVersion":"616","creationTimestamp":"2024-09-16T11:11:48Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-654612-m03","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-654612","minikube.k8s.io/primary":"false","minikube.k8s.io/updated_at":"2024_09_16T11_11_49_0700","minikube.k8s.io/version":"v1.34.0"},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///var/run/crio/crio.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubeadm","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:11:48Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:
annotations":{"f:kubeadm.alpha.kubernetes.io/cri-socket":{}}}}},{"manag [truncated 5817 chars]
	I0916 11:13:53.028085 1501462 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/namespaces/kube-system/pods/kube-proxy-vf648
	I0916 11:13:53.028113 1501462 round_trippers.go:469] Request Headers:
	I0916 11:13:53.028124 1501462 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:13:53.028129 1501462 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:13:53.030514 1501462 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:13:53.030537 1501462 round_trippers.go:577] Response Headers:
	I0916 11:13:53.030546 1501462 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:13:53.030551 1501462 round_trippers.go:580]     Content-Type: application/json
	I0916 11:13:53.030554 1501462 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:13:53.030557 1501462 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:13:53.030561 1501462 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:13:53 GMT
	I0916 11:13:53.030564 1501462 round_trippers.go:580]     Audit-Id: 478c52d4-250f-4abc-9900-9c62275cdecf
	I0916 11:13:53.031012 1501462 request.go:1351] Response Body: {"kind":"Pod","apiVersion":"v1","metadata":{"name":"kube-proxy-vf648","generateName":"kube-proxy-","namespace":"kube-system","uid":"376afe3e-390b-443b-b289-7dfeeb1deed1","resourceVersion":"642","creationTimestamp":"2024-09-16T11:11:48Z","labels":{"controller-revision-hash":"648b489c5b","k8s-app":"kube-proxy","pod-template-generation":"1"},"ownerReferences":[{"apiVersion":"apps/v1","kind":"DaemonSet","name":"kube-proxy","uid":"8b5954ba-7bb1-4f7b-8014-fdfa99b2ac77","controller":true,"blockOwnerDeletion":true}],"managedFields":[{"manager":"kube-controller-manager","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:11:48Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:generateName":{},"f:labels":{".":{},"f:controller-revision-hash":{},"f:k8s-app":{},"f:pod-template-generation":{}},"f:ownerReferences":{".":{},"k:{\"uid\":\"8b5954ba-7bb1-4f7b-8014-fdfa99b2ac77\"}":{}}},"f:spec":{"f:affinity":{".":{},"f:nodeAffinity":{".":{},"f:r
equiredDuringSchedulingIgnoredDuringExecution":{}}},"f:containers":{"k: [truncated 6427 chars]
	I0916 11:13:53.031630 1501462 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/nodes/multinode-654612-m03
	I0916 11:13:53.031650 1501462 round_trippers.go:469] Request Headers:
	I0916 11:13:53.031659 1501462 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:13:53.031667 1501462 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:13:53.033903 1501462 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:13:53.033932 1501462 round_trippers.go:577] Response Headers:
	I0916 11:13:53.033939 1501462 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:13:53.033943 1501462 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:13:53 GMT
	I0916 11:13:53.033946 1501462 round_trippers.go:580]     Audit-Id: f67e2108-59e5-4faf-a545-d9b50ffb746d
	I0916 11:13:53.033949 1501462 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:13:53.033951 1501462 round_trippers.go:580]     Content-Type: application/json
	I0916 11:13:53.033954 1501462 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:13:53.034065 1501462 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-654612-m03","uid":"0a3ae4f6-3e42-45c5-9af6-3ab99d0c1b81","resourceVersion":"616","creationTimestamp":"2024-09-16T11:11:48Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-654612-m03","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-654612","minikube.k8s.io/primary":"false","minikube.k8s.io/updated_at":"2024_09_16T11_11_49_0700","minikube.k8s.io/version":"v1.34.0"},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///var/run/crio/crio.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubeadm","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:11:48Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:
annotations":{"f:kubeadm.alpha.kubernetes.io/cri-socket":{}}}}},{"manag [truncated 5817 chars]
	I0916 11:13:53.528239 1501462 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/namespaces/kube-system/pods/kube-proxy-vf648
	I0916 11:13:53.528265 1501462 round_trippers.go:469] Request Headers:
	I0916 11:13:53.528274 1501462 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:13:53.528280 1501462 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:13:53.530825 1501462 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:13:53.530888 1501462 round_trippers.go:577] Response Headers:
	I0916 11:13:53.530909 1501462 round_trippers.go:580]     Audit-Id: 7a92072c-a38a-4a4e-88b0-117778b53bc4
	I0916 11:13:53.530928 1501462 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:13:53.530946 1501462 round_trippers.go:580]     Content-Type: application/json
	I0916 11:13:53.530979 1501462 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:13:53.530996 1501462 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:13:53.531012 1501462 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:13:53 GMT
	I0916 11:13:53.531155 1501462 request.go:1351] Response Body: {"kind":"Pod","apiVersion":"v1","metadata":{"name":"kube-proxy-vf648","generateName":"kube-proxy-","namespace":"kube-system","uid":"376afe3e-390b-443b-b289-7dfeeb1deed1","resourceVersion":"642","creationTimestamp":"2024-09-16T11:11:48Z","labels":{"controller-revision-hash":"648b489c5b","k8s-app":"kube-proxy","pod-template-generation":"1"},"ownerReferences":[{"apiVersion":"apps/v1","kind":"DaemonSet","name":"kube-proxy","uid":"8b5954ba-7bb1-4f7b-8014-fdfa99b2ac77","controller":true,"blockOwnerDeletion":true}],"managedFields":[{"manager":"kube-controller-manager","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:11:48Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:generateName":{},"f:labels":{".":{},"f:controller-revision-hash":{},"f:k8s-app":{},"f:pod-template-generation":{}},"f:ownerReferences":{".":{},"k:{\"uid\":\"8b5954ba-7bb1-4f7b-8014-fdfa99b2ac77\"}":{}}},"f:spec":{"f:affinity":{".":{},"f:nodeAffinity":{".":{},"f:r
equiredDuringSchedulingIgnoredDuringExecution":{}}},"f:containers":{"k: [truncated 6427 chars]
	I0916 11:13:53.531690 1501462 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/nodes/multinode-654612-m03
	I0916 11:13:53.531707 1501462 round_trippers.go:469] Request Headers:
	I0916 11:13:53.531723 1501462 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:13:53.531728 1501462 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:13:53.533851 1501462 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:13:53.533909 1501462 round_trippers.go:577] Response Headers:
	I0916 11:13:53.533930 1501462 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:13:53.533951 1501462 round_trippers.go:580]     Content-Type: application/json
	I0916 11:13:53.533981 1501462 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:13:53.534000 1501462 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:13:53.534019 1501462 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:13:53 GMT
	I0916 11:13:53.534023 1501462 round_trippers.go:580]     Audit-Id: 437d3fbc-3b70-48be-b0b3-a87599fda096
	I0916 11:13:53.534172 1501462 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-654612-m03","uid":"0a3ae4f6-3e42-45c5-9af6-3ab99d0c1b81","resourceVersion":"616","creationTimestamp":"2024-09-16T11:11:48Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-654612-m03","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-654612","minikube.k8s.io/primary":"false","minikube.k8s.io/updated_at":"2024_09_16T11_11_49_0700","minikube.k8s.io/version":"v1.34.0"},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///var/run/crio/crio.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubeadm","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:11:48Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:
annotations":{"f:kubeadm.alpha.kubernetes.io/cri-socket":{}}}}},{"manag [truncated 5817 chars]
	I0916 11:13:53.534574 1501462 pod_ready.go:103] pod "kube-proxy-vf648" in "kube-system" namespace has status "Ready":"False"
	I0916 11:13:54.027985 1501462 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/namespaces/kube-system/pods/kube-proxy-vf648
	I0916 11:13:54.028013 1501462 round_trippers.go:469] Request Headers:
	I0916 11:13:54.028023 1501462 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:13:54.028027 1501462 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:13:54.030704 1501462 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:13:54.030742 1501462 round_trippers.go:577] Response Headers:
	I0916 11:13:54.030752 1501462 round_trippers.go:580]     Audit-Id: d38da8a1-4e1b-4e7e-8edf-180fae656c77
	I0916 11:13:54.030757 1501462 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:13:54.030772 1501462 round_trippers.go:580]     Content-Type: application/json
	I0916 11:13:54.030776 1501462 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:13:54.030780 1501462 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:13:54.030784 1501462 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:13:54 GMT
	I0916 11:13:54.031256 1501462 request.go:1351] Response Body: {"kind":"Pod","apiVersion":"v1","metadata":{"name":"kube-proxy-vf648","generateName":"kube-proxy-","namespace":"kube-system","uid":"376afe3e-390b-443b-b289-7dfeeb1deed1","resourceVersion":"642","creationTimestamp":"2024-09-16T11:11:48Z","labels":{"controller-revision-hash":"648b489c5b","k8s-app":"kube-proxy","pod-template-generation":"1"},"ownerReferences":[{"apiVersion":"apps/v1","kind":"DaemonSet","name":"kube-proxy","uid":"8b5954ba-7bb1-4f7b-8014-fdfa99b2ac77","controller":true,"blockOwnerDeletion":true}],"managedFields":[{"manager":"kube-controller-manager","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:11:48Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:generateName":{},"f:labels":{".":{},"f:controller-revision-hash":{},"f:k8s-app":{},"f:pod-template-generation":{}},"f:ownerReferences":{".":{},"k:{\"uid\":\"8b5954ba-7bb1-4f7b-8014-fdfa99b2ac77\"}":{}}},"f:spec":{"f:affinity":{".":{},"f:nodeAffinity":{".":{},"f:r
equiredDuringSchedulingIgnoredDuringExecution":{}}},"f:containers":{"k: [truncated 6427 chars]
	I0916 11:13:54.031903 1501462 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/nodes/multinode-654612-m03
	I0916 11:13:54.031932 1501462 round_trippers.go:469] Request Headers:
	I0916 11:13:54.031942 1501462 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:13:54.031952 1501462 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:13:54.034442 1501462 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:13:54.034471 1501462 round_trippers.go:577] Response Headers:
	I0916 11:13:54.034480 1501462 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:13:54 GMT
	I0916 11:13:54.034484 1501462 round_trippers.go:580]     Audit-Id: a9c36029-439c-45ca-9531-c716b01a7783
	I0916 11:13:54.034487 1501462 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:13:54.034490 1501462 round_trippers.go:580]     Content-Type: application/json
	I0916 11:13:54.034492 1501462 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:13:54.034495 1501462 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:13:54.034622 1501462 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-654612-m03","uid":"0a3ae4f6-3e42-45c5-9af6-3ab99d0c1b81","resourceVersion":"616","creationTimestamp":"2024-09-16T11:11:48Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-654612-m03","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-654612","minikube.k8s.io/primary":"false","minikube.k8s.io/updated_at":"2024_09_16T11_11_49_0700","minikube.k8s.io/version":"v1.34.0"},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///var/run/crio/crio.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubeadm","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:11:48Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:
annotations":{"f:kubeadm.alpha.kubernetes.io/cri-socket":{}}}}},{"manag [truncated 5817 chars]
	I0916 11:13:54.528883 1501462 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/namespaces/kube-system/pods/kube-proxy-vf648
	I0916 11:13:54.528929 1501462 round_trippers.go:469] Request Headers:
	I0916 11:13:54.528939 1501462 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:13:54.528944 1501462 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:13:54.531338 1501462 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:13:54.531366 1501462 round_trippers.go:577] Response Headers:
	I0916 11:13:54.531375 1501462 round_trippers.go:580]     Audit-Id: ebff567d-75a3-414b-b23d-18a2751d4ee8
	I0916 11:13:54.531379 1501462 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:13:54.531384 1501462 round_trippers.go:580]     Content-Type: application/json
	I0916 11:13:54.531388 1501462 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:13:54.531392 1501462 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:13:54.531397 1501462 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:13:54 GMT
	I0916 11:13:54.531930 1501462 request.go:1351] Response Body: {"kind":"Pod","apiVersion":"v1","metadata":{"name":"kube-proxy-vf648","generateName":"kube-proxy-","namespace":"kube-system","uid":"376afe3e-390b-443b-b289-7dfeeb1deed1","resourceVersion":"642","creationTimestamp":"2024-09-16T11:11:48Z","labels":{"controller-revision-hash":"648b489c5b","k8s-app":"kube-proxy","pod-template-generation":"1"},"ownerReferences":[{"apiVersion":"apps/v1","kind":"DaemonSet","name":"kube-proxy","uid":"8b5954ba-7bb1-4f7b-8014-fdfa99b2ac77","controller":true,"blockOwnerDeletion":true}],"managedFields":[{"manager":"kube-controller-manager","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:11:48Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:generateName":{},"f:labels":{".":{},"f:controller-revision-hash":{},"f:k8s-app":{},"f:pod-template-generation":{}},"f:ownerReferences":{".":{},"k:{\"uid\":\"8b5954ba-7bb1-4f7b-8014-fdfa99b2ac77\"}":{}}},"f:spec":{"f:affinity":{".":{},"f:nodeAffinity":{".":{},"f:r
equiredDuringSchedulingIgnoredDuringExecution":{}}},"f:containers":{"k: [truncated 6427 chars]
	I0916 11:13:54.532463 1501462 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/nodes/multinode-654612-m03
	I0916 11:13:54.532481 1501462 round_trippers.go:469] Request Headers:
	I0916 11:13:54.532490 1501462 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:13:54.532498 1501462 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:13:54.534563 1501462 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:13:54.534586 1501462 round_trippers.go:577] Response Headers:
	I0916 11:13:54.534594 1501462 round_trippers.go:580]     Audit-Id: 7cb39bd1-9e35-49d2-9831-054d0a35c3a7
	I0916 11:13:54.534600 1501462 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:13:54.534604 1501462 round_trippers.go:580]     Content-Type: application/json
	I0916 11:13:54.534607 1501462 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:13:54.534611 1501462 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:13:54.534615 1501462 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:13:54 GMT
	I0916 11:13:54.534911 1501462 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-654612-m03","uid":"0a3ae4f6-3e42-45c5-9af6-3ab99d0c1b81","resourceVersion":"788","creationTimestamp":"2024-09-16T11:11:48Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-654612-m03","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-654612","minikube.k8s.io/primary":"false","minikube.k8s.io/updated_at":"2024_09_16T11_11_49_0700","minikube.k8s.io/version":"v1.34.0"},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///var/run/crio/crio.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubeadm","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:11:48Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:
annotations":{"f:kubeadm.alpha.kubernetes.io/cri-socket":{}}}}},{"manag [truncated 6476 chars]
	I0916 11:13:54.535332 1501462 pod_ready.go:98] node "multinode-654612-m03" hosting pod "kube-proxy-vf648" in "kube-system" namespace is currently not "Ready" (skipping!): node "multinode-654612-m03" has status "Ready":"Unknown"
	I0916 11:13:54.535356 1501462 pod_ready.go:82] duration metric: took 30.007591979s for pod "kube-proxy-vf648" in "kube-system" namespace to be "Ready" ...
	E0916 11:13:54.535370 1501462 pod_ready.go:67] WaitExtra: waitPodCondition: node "multinode-654612-m03" hosting pod "kube-proxy-vf648" in "kube-system" namespace is currently not "Ready" (skipping!): node "multinode-654612-m03" has status "Ready":"Unknown"
	I0916 11:13:54.535381 1501462 pod_ready.go:79] waiting up to 6m0s for pod "kube-scheduler-multinode-654612" in "kube-system" namespace to be "Ready" ...
	I0916 11:13:54.535445 1501462 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/namespaces/kube-system/pods/kube-scheduler-multinode-654612
	I0916 11:13:54.535456 1501462 round_trippers.go:469] Request Headers:
	I0916 11:13:54.535474 1501462 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:13:54.535483 1501462 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:13:54.537553 1501462 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:13:54.537578 1501462 round_trippers.go:577] Response Headers:
	I0916 11:13:54.537586 1501462 round_trippers.go:580]     Audit-Id: 19ed4135-95a9-4876-8a34-85b42752c260
	I0916 11:13:54.537591 1501462 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:13:54.537594 1501462 round_trippers.go:580]     Content-Type: application/json
	I0916 11:13:54.537597 1501462 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:13:54.537600 1501462 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:13:54.537603 1501462 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:13:54 GMT
	I0916 11:13:54.537717 1501462 request.go:1351] Response Body: {"kind":"Pod","apiVersion":"v1","metadata":{"name":"kube-scheduler-multinode-654612","namespace":"kube-system","uid":"fd553108-8193-4f33-8190-d4ec25a66de1","resourceVersion":"755","creationTimestamp":"2024-09-16T11:10:10Z","labels":{"component":"kube-scheduler","tier":"control-plane"},"annotations":{"kubernetes.io/config.hash":"281b64f61502642475e3dbc1b139b188","kubernetes.io/config.mirror":"281b64f61502642475e3dbc1b139b188","kubernetes.io/config.seen":"2024-09-16T11:10:10.145156597Z","kubernetes.io/config.source":"file"},"ownerReferences":[{"apiVersion":"v1","kind":"Node","name":"multinode-654612","uid":"17ee1588-6ece-4a45-8e72-a05ec6d1f806","controller":true}],"managedFields":[{"manager":"kubelet","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:10:10Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:annotations":{".":{},"f:kubernetes.io/config.hash":{},"f:kubernetes.io/config.mirror":{},"f:kubernetes.io/config.seen":{},
"f:kubernetes.io/config.source":{}},"f:labels":{".":{},"f:component":{} [truncated 5101 chars]
	I0916 11:13:54.538222 1501462 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/nodes/multinode-654612
	I0916 11:13:54.538257 1501462 round_trippers.go:469] Request Headers:
	I0916 11:13:54.538271 1501462 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:13:54.538276 1501462 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:13:54.540400 1501462 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:13:54.540418 1501462 round_trippers.go:577] Response Headers:
	I0916 11:13:54.540426 1501462 round_trippers.go:580]     Audit-Id: 67373f1a-75d5-4c7e-97e2-45afac5d8745
	I0916 11:13:54.540430 1501462 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:13:54.540433 1501462 round_trippers.go:580]     Content-Type: application/json
	I0916 11:13:54.540436 1501462 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:13:54.540438 1501462 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:13:54.540441 1501462 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:13:54 GMT
	I0916 11:13:54.540546 1501462 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-654612","uid":"17ee1588-6ece-4a45-8e72-a05ec6d1f806","resourceVersion":"662","creationTimestamp":"2024-09-16T11:10:07Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-654612","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-654612","minikube.k8s.io/primary":"true","minikube.k8s.io/updated_at":"2024_09_16T11_10_11_0700","minikube.k8s.io/version":"v1.34.0","node-role.kubernetes.io/control-plane":"","node.kubernetes.io/exclude-from-external-load-balancers":""},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///var/run/crio/crio.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubelet","operation":"Update","apiVe
rsion":"v1","time":"2024-09-16T11:10:07Z","fieldsType":"FieldsV1","fiel [truncated 6346 chars]
	I0916 11:13:54.540969 1501462 pod_ready.go:93] pod "kube-scheduler-multinode-654612" in "kube-system" namespace has status "Ready":"True"
	I0916 11:13:54.540988 1501462 pod_ready.go:82] duration metric: took 5.598828ms for pod "kube-scheduler-multinode-654612" in "kube-system" namespace to be "Ready" ...
	I0916 11:13:54.541000 1501462 pod_ready.go:39] duration metric: took 43.269529606s for extra waiting for all system-critical and pods with labels [k8s-app=kube-dns component=etcd component=kube-apiserver component=kube-controller-manager k8s-app=kube-proxy component=kube-scheduler] to be "Ready" ...
	I0916 11:13:54.541018 1501462 api_server.go:52] waiting for apiserver process to appear ...
	I0916 11:13:54.541089 1501462 ssh_runner.go:195] Run: sudo pgrep -xnf kube-apiserver.*minikube.*
	I0916 11:13:54.552306 1501462 command_runner.go:130] > 958
	I0916 11:13:54.553583 1501462 api_server.go:72] duration metric: took 47.852729375s to wait for apiserver process to appear ...
	I0916 11:13:54.553604 1501462 api_server.go:88] waiting for apiserver healthz status ...
	I0916 11:13:54.553627 1501462 api_server.go:253] Checking apiserver healthz at https://192.168.67.2:8443/healthz ...
	I0916 11:13:54.561137 1501462 api_server.go:279] https://192.168.67.2:8443/healthz returned 200:
	ok
	I0916 11:13:54.561219 1501462 round_trippers.go:463] GET https://192.168.67.2:8443/version
	I0916 11:13:54.561231 1501462 round_trippers.go:469] Request Headers:
	I0916 11:13:54.561240 1501462 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:13:54.561244 1501462 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:13:54.562172 1501462 round_trippers.go:574] Response Status: 200 OK in 0 milliseconds
	I0916 11:13:54.562189 1501462 round_trippers.go:577] Response Headers:
	I0916 11:13:54.562198 1501462 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:13:54.562203 1501462 round_trippers.go:580]     Content-Length: 263
	I0916 11:13:54.562208 1501462 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:13:54 GMT
	I0916 11:13:54.562222 1501462 round_trippers.go:580]     Audit-Id: f728b55e-a93d-4ff4-a54d-370fd4cea4c7
	I0916 11:13:54.562229 1501462 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:13:54.562232 1501462 round_trippers.go:580]     Content-Type: application/json
	I0916 11:13:54.562238 1501462 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:13:54.562254 1501462 request.go:1351] Response Body: {
	  "major": "1",
	  "minor": "31",
	  "gitVersion": "v1.31.1",
	  "gitCommit": "948afe5ca072329a73c8e79ed5938717a5cb3d21",
	  "gitTreeState": "clean",
	  "buildDate": "2024-09-11T21:22:08Z",
	  "goVersion": "go1.22.6",
	  "compiler": "gc",
	  "platform": "linux/arm64"
	}
	I0916 11:13:54.562361 1501462 api_server.go:141] control plane version: v1.31.1
	I0916 11:13:54.562409 1501462 api_server.go:131] duration metric: took 8.799932ms to wait for apiserver health ...
	I0916 11:13:54.562418 1501462 system_pods.go:43] waiting for kube-system pods to appear ...
	I0916 11:13:54.562491 1501462 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/namespaces/kube-system/pods
	I0916 11:13:54.562501 1501462 round_trippers.go:469] Request Headers:
	I0916 11:13:54.562509 1501462 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:13:54.562513 1501462 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:13:54.566290 1501462 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 11:13:54.566350 1501462 round_trippers.go:577] Response Headers:
	I0916 11:13:54.566374 1501462 round_trippers.go:580]     Audit-Id: bac1e954-2b6c-432b-b099-4048b9cee56e
	I0916 11:13:54.566389 1501462 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:13:54.566406 1501462 round_trippers.go:580]     Content-Type: application/json
	I0916 11:13:54.566433 1501462 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:13:54.566454 1501462 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:13:54.566472 1501462 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:13:54 GMT
	I0916 11:13:54.567245 1501462 request.go:1351] Response Body: {"kind":"PodList","apiVersion":"v1","metadata":{"resourceVersion":"798"},"items":[{"metadata":{"name":"coredns-7c65d6cfc9-szvv9","generateName":"coredns-7c65d6cfc9-","namespace":"kube-system","uid":"26df8cd4-36bc-49e1-98bf-9c30f5555b7b","resourceVersion":"775","creationTimestamp":"2024-09-16T11:10:15Z","labels":{"k8s-app":"kube-dns","pod-template-hash":"7c65d6cfc9"},"ownerReferences":[{"apiVersion":"apps/v1","kind":"ReplicaSet","name":"coredns-7c65d6cfc9","uid":"a88acc4a-b14b-4341-a616-6a6077ab8958","controller":true,"blockOwnerDeletion":true}],"managedFields":[{"manager":"kube-controller-manager","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:10:15Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:generateName":{},"f:labels":{".":{},"f:k8s-app":{},"f:pod-template-hash":{}},"f:ownerReferences":{".":{},"k:{\"uid\":\"a88acc4a-b14b-4341-a616-6a6077ab8958\"}":{}}},"f:spec":{"f:affinity":{".":{},"f:podAntiAffinity":{".":{},"f
:preferredDuringSchedulingIgnoredDuringExecution":{}}},"f:containers":{ [truncated 91618 chars]
	I0916 11:13:54.571153 1501462 system_pods.go:59] 12 kube-system pods found
	I0916 11:13:54.571194 1501462 system_pods.go:61] "coredns-7c65d6cfc9-szvv9" [26df8cd4-36bc-49e1-98bf-9c30f5555b7b] Running
	I0916 11:13:54.571200 1501462 system_pods.go:61] "etcd-multinode-654612" [bb46feea-e4d5-411b-9ebc-e5984b1147a8] Running
	I0916 11:13:54.571205 1501462 system_pods.go:61] "kindnet-687xg" [021cf850-fa0b-463e-968b-f257f7952a05] Running
	I0916 11:13:54.571214 1501462 system_pods.go:61] "kindnet-ncfhl" [2e9059ba-ed83-45b3-810c-02dda1910d4a] Running / Ready:ContainersNotReady (containers with unready status: [kindnet-cni]) / ContainersReady:ContainersNotReady (containers with unready status: [kindnet-cni])
	I0916 11:13:54.571224 1501462 system_pods.go:61] "kindnet-whjqt" [0ed90b6c-0a03-4af6-a0ab-ea90794fa963] Running
	I0916 11:13:54.571229 1501462 system_pods.go:61] "kube-apiserver-multinode-654612" [8a56377d-b2a9-46dc-90b0-6d8f0aadec52] Running
	I0916 11:13:54.571237 1501462 system_pods.go:61] "kube-controller-manager-multinode-654612" [08e87c01-201e-4373-bbd7-0a8a7a724a84] Running
	I0916 11:13:54.571241 1501462 system_pods.go:61] "kube-proxy-gf2tw" [814e8a89-b190-4aef-a303-44981c9e19c9] Running
	I0916 11:13:54.571245 1501462 system_pods.go:61] "kube-proxy-t9pzq" [d5dac41c-8386-4ad5-a463-1730169d8062] Running
	I0916 11:13:54.571253 1501462 system_pods.go:61] "kube-proxy-vf648" [376afe3e-390b-443b-b289-7dfeeb1deed1] Running / Ready:ContainersNotReady (containers with unready status: [kube-proxy]) / ContainersReady:ContainersNotReady (containers with unready status: [kube-proxy])
	I0916 11:13:54.571261 1501462 system_pods.go:61] "kube-scheduler-multinode-654612" [fd553108-8193-4f33-8190-d4ec25a66de1] Running
	I0916 11:13:54.571266 1501462 system_pods.go:61] "storage-provisioner" [2b21455e-8cb4-4c70-937b-6ff3cd85b42f] Running
	I0916 11:13:54.571272 1501462 system_pods.go:74] duration metric: took 8.846323ms to wait for pod list to return data ...
	I0916 11:13:54.571283 1501462 default_sa.go:34] waiting for default service account to be created ...
	I0916 11:13:54.571367 1501462 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/namespaces/default/serviceaccounts
	I0916 11:13:54.571378 1501462 round_trippers.go:469] Request Headers:
	I0916 11:13:54.571386 1501462 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:13:54.571389 1501462 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:13:54.574003 1501462 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:13:54.574039 1501462 round_trippers.go:577] Response Headers:
	I0916 11:13:54.574047 1501462 round_trippers.go:580]     Content-Type: application/json
	I0916 11:13:54.574051 1501462 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:13:54.574054 1501462 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:13:54.574057 1501462 round_trippers.go:580]     Content-Length: 261
	I0916 11:13:54.574060 1501462 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:13:54 GMT
	I0916 11:13:54.574063 1501462 round_trippers.go:580]     Audit-Id: 7e64c9f9-fb8e-4e0c-8f8f-0a1e1fb24b05
	I0916 11:13:54.574066 1501462 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:13:54.574088 1501462 request.go:1351] Response Body: {"kind":"ServiceAccountList","apiVersion":"v1","metadata":{"resourceVersion":"798"},"items":[{"metadata":{"name":"default","namespace":"default","uid":"8b0a4fd5-1ca6-4da1-beae-b1e2017b49fd","resourceVersion":"297","creationTimestamp":"2024-09-16T11:10:14Z"}}]}
	I0916 11:13:54.574293 1501462 default_sa.go:45] found service account: "default"
	I0916 11:13:54.574311 1501462 default_sa.go:55] duration metric: took 3.022703ms for default service account to be created ...
	I0916 11:13:54.574319 1501462 system_pods.go:116] waiting for k8s-apps to be running ...
	I0916 11:13:54.574379 1501462 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/namespaces/kube-system/pods
	I0916 11:13:54.574390 1501462 round_trippers.go:469] Request Headers:
	I0916 11:13:54.574397 1501462 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:13:54.574402 1501462 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:13:54.577371 1501462 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:13:54.577394 1501462 round_trippers.go:577] Response Headers:
	I0916 11:13:54.577402 1501462 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:13:54 GMT
	I0916 11:13:54.577408 1501462 round_trippers.go:580]     Audit-Id: 4bb8cf97-fda6-4ee7-ba70-5b9d35f617df
	I0916 11:13:54.577411 1501462 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:13:54.577414 1501462 round_trippers.go:580]     Content-Type: application/json
	I0916 11:13:54.577418 1501462 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:13:54.577422 1501462 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:13:54.578028 1501462 request.go:1351] Response Body: {"kind":"PodList","apiVersion":"v1","metadata":{"resourceVersion":"798"},"items":[{"metadata":{"name":"coredns-7c65d6cfc9-szvv9","generateName":"coredns-7c65d6cfc9-","namespace":"kube-system","uid":"26df8cd4-36bc-49e1-98bf-9c30f5555b7b","resourceVersion":"775","creationTimestamp":"2024-09-16T11:10:15Z","labels":{"k8s-app":"kube-dns","pod-template-hash":"7c65d6cfc9"},"ownerReferences":[{"apiVersion":"apps/v1","kind":"ReplicaSet","name":"coredns-7c65d6cfc9","uid":"a88acc4a-b14b-4341-a616-6a6077ab8958","controller":true,"blockOwnerDeletion":true}],"managedFields":[{"manager":"kube-controller-manager","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:10:15Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:generateName":{},"f:labels":{".":{},"f:k8s-app":{},"f:pod-template-hash":{}},"f:ownerReferences":{".":{},"k:{\"uid\":\"a88acc4a-b14b-4341-a616-6a6077ab8958\"}":{}}},"f:spec":{"f:affinity":{".":{},"f:podAntiAffinity":{".":{},"f
:preferredDuringSchedulingIgnoredDuringExecution":{}}},"f:containers":{ [truncated 91618 chars]
	I0916 11:13:54.581870 1501462 system_pods.go:86] 12 kube-system pods found
	I0916 11:13:54.581903 1501462 system_pods.go:89] "coredns-7c65d6cfc9-szvv9" [26df8cd4-36bc-49e1-98bf-9c30f5555b7b] Running
	I0916 11:13:54.581910 1501462 system_pods.go:89] "etcd-multinode-654612" [bb46feea-e4d5-411b-9ebc-e5984b1147a8] Running
	I0916 11:13:54.581915 1501462 system_pods.go:89] "kindnet-687xg" [021cf850-fa0b-463e-968b-f257f7952a05] Running
	I0916 11:13:54.581923 1501462 system_pods.go:89] "kindnet-ncfhl" [2e9059ba-ed83-45b3-810c-02dda1910d4a] Running / Ready:ContainersNotReady (containers with unready status: [kindnet-cni]) / ContainersReady:ContainersNotReady (containers with unready status: [kindnet-cni])
	I0916 11:13:54.581930 1501462 system_pods.go:89] "kindnet-whjqt" [0ed90b6c-0a03-4af6-a0ab-ea90794fa963] Running
	I0916 11:13:54.581936 1501462 system_pods.go:89] "kube-apiserver-multinode-654612" [8a56377d-b2a9-46dc-90b0-6d8f0aadec52] Running
	I0916 11:13:54.581945 1501462 system_pods.go:89] "kube-controller-manager-multinode-654612" [08e87c01-201e-4373-bbd7-0a8a7a724a84] Running
	I0916 11:13:54.581949 1501462 system_pods.go:89] "kube-proxy-gf2tw" [814e8a89-b190-4aef-a303-44981c9e19c9] Running
	I0916 11:13:54.581953 1501462 system_pods.go:89] "kube-proxy-t9pzq" [d5dac41c-8386-4ad5-a463-1730169d8062] Running
	I0916 11:13:54.581962 1501462 system_pods.go:89] "kube-proxy-vf648" [376afe3e-390b-443b-b289-7dfeeb1deed1] Running / Ready:ContainersNotReady (containers with unready status: [kube-proxy]) / ContainersReady:ContainersNotReady (containers with unready status: [kube-proxy])
	I0916 11:13:54.581969 1501462 system_pods.go:89] "kube-scheduler-multinode-654612" [fd553108-8193-4f33-8190-d4ec25a66de1] Running
	I0916 11:13:54.581975 1501462 system_pods.go:89] "storage-provisioner" [2b21455e-8cb4-4c70-937b-6ff3cd85b42f] Running
	I0916 11:13:54.581982 1501462 system_pods.go:126] duration metric: took 7.656591ms to wait for k8s-apps to be running ...
	I0916 11:13:54.581994 1501462 system_svc.go:44] waiting for kubelet service to be running ....
	I0916 11:13:54.582051 1501462 ssh_runner.go:195] Run: sudo systemctl is-active --quiet service kubelet
	I0916 11:13:54.593461 1501462 system_svc.go:56] duration metric: took 11.457113ms WaitForService to wait for kubelet
	I0916 11:13:54.593538 1501462 kubeadm.go:582] duration metric: took 47.892687796s to wait for: map[apiserver:true apps_running:true default_sa:true extra:true kubelet:true node_ready:true system_pods:true]
	I0916 11:13:54.593566 1501462 node_conditions.go:102] verifying NodePressure condition ...
	I0916 11:13:54.593642 1501462 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/nodes
	I0916 11:13:54.593653 1501462 round_trippers.go:469] Request Headers:
	I0916 11:13:54.593662 1501462 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:13:54.593668 1501462 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:13:54.596499 1501462 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:13:54.596525 1501462 round_trippers.go:577] Response Headers:
	I0916 11:13:54.596534 1501462 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:13:54 GMT
	I0916 11:13:54.596538 1501462 round_trippers.go:580]     Audit-Id: c6f6c094-7604-4728-88e8-ffe300cec5aa
	I0916 11:13:54.596543 1501462 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:13:54.596546 1501462 round_trippers.go:580]     Content-Type: application/json
	I0916 11:13:54.596548 1501462 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:13:54.596559 1501462 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:13:54.596896 1501462 request.go:1351] Response Body: {"kind":"NodeList","apiVersion":"v1","metadata":{"resourceVersion":"798"},"items":[{"metadata":{"name":"multinode-654612","uid":"17ee1588-6ece-4a45-8e72-a05ec6d1f806","resourceVersion":"662","creationTimestamp":"2024-09-16T11:10:07Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-654612","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-654612","minikube.k8s.io/primary":"true","minikube.k8s.io/updated_at":"2024_09_16T11_10_11_0700","minikube.k8s.io/version":"v1.34.0","node-role.kubernetes.io/control-plane":"","node.kubernetes.io/exclude-from-external-load-balancers":""},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///var/run/crio/crio.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields
":[{"manager":"kubelet","operation":"Update","apiVersion":"v1","time":" [truncated 21620 chars]
	I0916 11:13:54.597959 1501462 node_conditions.go:122] node storage ephemeral capacity is 203034800Ki
	I0916 11:13:54.597989 1501462 node_conditions.go:123] node cpu capacity is 2
	I0916 11:13:54.598001 1501462 node_conditions.go:122] node storage ephemeral capacity is 203034800Ki
	I0916 11:13:54.598006 1501462 node_conditions.go:123] node cpu capacity is 2
	I0916 11:13:54.598016 1501462 node_conditions.go:122] node storage ephemeral capacity is 203034800Ki
	I0916 11:13:54.598021 1501462 node_conditions.go:123] node cpu capacity is 2
	I0916 11:13:54.598026 1501462 node_conditions.go:105] duration metric: took 4.454323ms to run NodePressure ...
	I0916 11:13:54.598044 1501462 start.go:241] waiting for startup goroutines ...
	I0916 11:13:54.598052 1501462 start.go:246] waiting for cluster config update ...
	I0916 11:13:54.598063 1501462 start.go:255] writing updated cluster config ...
	I0916 11:13:54.601485 1501462 out.go:201] 
	I0916 11:13:54.604893 1501462 config.go:182] Loaded profile config "multinode-654612": Driver=docker, ContainerRuntime=crio, KubernetesVersion=v1.31.1
	I0916 11:13:54.605014 1501462 profile.go:143] Saving config to /home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/multinode-654612/config.json ...
	I0916 11:13:54.607922 1501462 out.go:177] * Starting "multinode-654612-m02" worker node in "multinode-654612" cluster
	I0916 11:13:54.610573 1501462 cache.go:121] Beginning downloading kic base image for docker with crio
	I0916 11:13:54.613443 1501462 out.go:177] * Pulling base image v0.0.45-1726358845-19644 ...
	I0916 11:13:54.615992 1501462 preload.go:131] Checking if preload exists for k8s version v1.31.1 and runtime crio
	I0916 11:13:54.616018 1501462 cache.go:56] Caching tarball of preloaded images
	I0916 11:13:54.616074 1501462 image.go:79] Checking for gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 in local docker daemon
	I0916 11:13:54.616121 1501462 preload.go:172] Found /home/jenkins/minikube-integration/19651-1378450/.minikube/cache/preloaded-tarball/preloaded-images-k8s-v18-v1.31.1-cri-o-overlay-arm64.tar.lz4 in cache, skipping download
	I0916 11:13:54.616132 1501462 cache.go:59] Finished verifying existence of preloaded tar for v1.31.1 on crio
	I0916 11:13:54.616270 1501462 profile.go:143] Saving config to /home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/multinode-654612/config.json ...
	W0916 11:13:54.639985 1501462 image.go:95] image gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 is of wrong architecture
	I0916 11:13:54.640008 1501462 cache.go:149] Downloading gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 to local cache
	I0916 11:13:54.640082 1501462 image.go:63] Checking for gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 in local cache directory
	I0916 11:13:54.640106 1501462 image.go:66] Found gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 in local cache directory, skipping pull
	I0916 11:13:54.640115 1501462 image.go:135] gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 exists in cache, skipping pull
	I0916 11:13:54.640123 1501462 cache.go:152] successfully saved gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 as a tarball
	I0916 11:13:54.640132 1501462 cache.go:162] Loading gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 from local cache
	I0916 11:13:54.641352 1501462 image.go:273] response: 
	I0916 11:13:54.758878 1501462 cache.go:164] successfully loaded and using gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 from cached tarball
	I0916 11:13:54.758919 1501462 cache.go:194] Successfully downloaded all kic artifacts
	I0916 11:13:54.758950 1501462 start.go:360] acquireMachinesLock for multinode-654612-m02: {Name:mk70904bbc860a548c4a9726b7d64e227f1f9cac Clock:{} Delay:500ms Timeout:10m0s Cancel:<nil>}
	I0916 11:13:54.759018 1501462 start.go:364] duration metric: took 45.283µs to acquireMachinesLock for "multinode-654612-m02"
	I0916 11:13:54.759049 1501462 start.go:96] Skipping create...Using existing machine configuration
	I0916 11:13:54.759058 1501462 fix.go:54] fixHost starting: m02
	I0916 11:13:54.759333 1501462 cli_runner.go:164] Run: docker container inspect multinode-654612-m02 --format={{.State.Status}}
	I0916 11:13:54.774793 1501462 fix.go:112] recreateIfNeeded on multinode-654612-m02: state=Stopped err=<nil>
	W0916 11:13:54.774829 1501462 fix.go:138] unexpected machine state, will restart: <nil>
	I0916 11:13:54.778002 1501462 out.go:177] * Restarting existing docker container for "multinode-654612-m02" ...
	I0916 11:13:54.780624 1501462 cli_runner.go:164] Run: docker start multinode-654612-m02
	I0916 11:13:55.108289 1501462 cli_runner.go:164] Run: docker container inspect multinode-654612-m02 --format={{.State.Status}}
	I0916 11:13:55.134002 1501462 kic.go:430] container "multinode-654612-m02" state is running.
	I0916 11:13:55.134377 1501462 cli_runner.go:164] Run: docker container inspect -f "{{range .NetworkSettings.Networks}}{{.IPAddress}},{{.GlobalIPv6Address}}{{end}}" multinode-654612-m02
	I0916 11:13:55.160633 1501462 profile.go:143] Saving config to /home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/multinode-654612/config.json ...
	I0916 11:13:55.160907 1501462 machine.go:93] provisionDockerMachine start ...
	I0916 11:13:55.160975 1501462 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" multinode-654612-m02
	I0916 11:13:55.187770 1501462 main.go:141] libmachine: Using SSH client type: native
	I0916 11:13:55.188052 1501462 main.go:141] libmachine: &{{{<nil> 0 [] [] []} docker [0x41abe0] 0x41d420 <nil>  [] 0s} 127.0.0.1 34763 <nil> <nil>}
	I0916 11:13:55.188064 1501462 main.go:141] libmachine: About to run SSH command:
	hostname
	I0916 11:13:55.188849 1501462 main.go:141] libmachine: Error dialing TCP: ssh: handshake failed: EOF
	I0916 11:13:58.329021 1501462 main.go:141] libmachine: SSH cmd err, output: <nil>: multinode-654612-m02
	
	I0916 11:13:58.329046 1501462 ubuntu.go:169] provisioning hostname "multinode-654612-m02"
	I0916 11:13:58.329114 1501462 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" multinode-654612-m02
	I0916 11:13:58.348453 1501462 main.go:141] libmachine: Using SSH client type: native
	I0916 11:13:58.348738 1501462 main.go:141] libmachine: &{{{<nil> 0 [] [] []} docker [0x41abe0] 0x41d420 <nil>  [] 0s} 127.0.0.1 34763 <nil> <nil>}
	I0916 11:13:58.348757 1501462 main.go:141] libmachine: About to run SSH command:
	sudo hostname multinode-654612-m02 && echo "multinode-654612-m02" | sudo tee /etc/hostname
	I0916 11:13:58.507501 1501462 main.go:141] libmachine: SSH cmd err, output: <nil>: multinode-654612-m02
	
	I0916 11:13:58.507590 1501462 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" multinode-654612-m02
	I0916 11:13:58.525666 1501462 main.go:141] libmachine: Using SSH client type: native
	I0916 11:13:58.525925 1501462 main.go:141] libmachine: &{{{<nil> 0 [] [] []} docker [0x41abe0] 0x41d420 <nil>  [] 0s} 127.0.0.1 34763 <nil> <nil>}
	I0916 11:13:58.525948 1501462 main.go:141] libmachine: About to run SSH command:
	
			if ! grep -xq '.*\smultinode-654612-m02' /etc/hosts; then
				if grep -xq '127.0.1.1\s.*' /etc/hosts; then
					sudo sed -i 's/^127.0.1.1\s.*/127.0.1.1 multinode-654612-m02/g' /etc/hosts;
				else 
					echo '127.0.1.1 multinode-654612-m02' | sudo tee -a /etc/hosts; 
				fi
			fi
	I0916 11:13:58.665332 1501462 main.go:141] libmachine: SSH cmd err, output: <nil>: 
	I0916 11:13:58.665364 1501462 ubuntu.go:175] set auth options {CertDir:/home/jenkins/minikube-integration/19651-1378450/.minikube CaCertPath:/home/jenkins/minikube-integration/19651-1378450/.minikube/certs/ca.pem CaPrivateKeyPath:/home/jenkins/minikube-integration/19651-1378450/.minikube/certs/ca-key.pem CaCertRemotePath:/etc/docker/ca.pem ServerCertPath:/home/jenkins/minikube-integration/19651-1378450/.minikube/machines/server.pem ServerKeyPath:/home/jenkins/minikube-integration/19651-1378450/.minikube/machines/server-key.pem ClientKeyPath:/home/jenkins/minikube-integration/19651-1378450/.minikube/certs/key.pem ServerCertRemotePath:/etc/docker/server.pem ServerKeyRemotePath:/etc/docker/server-key.pem ClientCertPath:/home/jenkins/minikube-integration/19651-1378450/.minikube/certs/cert.pem ServerCertSANs:[] StorePath:/home/jenkins/minikube-integration/19651-1378450/.minikube}
	I0916 11:13:58.665388 1501462 ubuntu.go:177] setting up certificates
	I0916 11:13:58.665398 1501462 provision.go:84] configureAuth start
	I0916 11:13:58.665467 1501462 cli_runner.go:164] Run: docker container inspect -f "{{range .NetworkSettings.Networks}}{{.IPAddress}},{{.GlobalIPv6Address}}{{end}}" multinode-654612-m02
	I0916 11:13:58.685695 1501462 provision.go:143] copyHostCerts
	I0916 11:13:58.685747 1501462 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-1378450/.minikube/certs/ca.pem -> /home/jenkins/minikube-integration/19651-1378450/.minikube/ca.pem
	I0916 11:13:58.685784 1501462 exec_runner.go:144] found /home/jenkins/minikube-integration/19651-1378450/.minikube/ca.pem, removing ...
	I0916 11:13:58.685797 1501462 exec_runner.go:203] rm: /home/jenkins/minikube-integration/19651-1378450/.minikube/ca.pem
	I0916 11:13:58.685901 1501462 exec_runner.go:151] cp: /home/jenkins/minikube-integration/19651-1378450/.minikube/certs/ca.pem --> /home/jenkins/minikube-integration/19651-1378450/.minikube/ca.pem (1078 bytes)
	I0916 11:13:58.686016 1501462 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-1378450/.minikube/certs/cert.pem -> /home/jenkins/minikube-integration/19651-1378450/.minikube/cert.pem
	I0916 11:13:58.686043 1501462 exec_runner.go:144] found /home/jenkins/minikube-integration/19651-1378450/.minikube/cert.pem, removing ...
	I0916 11:13:58.686049 1501462 exec_runner.go:203] rm: /home/jenkins/minikube-integration/19651-1378450/.minikube/cert.pem
	I0916 11:13:58.686093 1501462 exec_runner.go:151] cp: /home/jenkins/minikube-integration/19651-1378450/.minikube/certs/cert.pem --> /home/jenkins/minikube-integration/19651-1378450/.minikube/cert.pem (1123 bytes)
	I0916 11:13:58.686153 1501462 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-1378450/.minikube/certs/key.pem -> /home/jenkins/minikube-integration/19651-1378450/.minikube/key.pem
	I0916 11:13:58.686180 1501462 exec_runner.go:144] found /home/jenkins/minikube-integration/19651-1378450/.minikube/key.pem, removing ...
	I0916 11:13:58.686188 1501462 exec_runner.go:203] rm: /home/jenkins/minikube-integration/19651-1378450/.minikube/key.pem
	I0916 11:13:58.686226 1501462 exec_runner.go:151] cp: /home/jenkins/minikube-integration/19651-1378450/.minikube/certs/key.pem --> /home/jenkins/minikube-integration/19651-1378450/.minikube/key.pem (1679 bytes)
	I0916 11:13:58.686294 1501462 provision.go:117] generating server cert: /home/jenkins/minikube-integration/19651-1378450/.minikube/machines/server.pem ca-key=/home/jenkins/minikube-integration/19651-1378450/.minikube/certs/ca.pem private-key=/home/jenkins/minikube-integration/19651-1378450/.minikube/certs/ca-key.pem org=jenkins.multinode-654612-m02 san=[127.0.0.1 192.168.67.3 localhost minikube multinode-654612-m02]
	I0916 11:13:59.351435 1501462 provision.go:177] copyRemoteCerts
	I0916 11:13:59.351563 1501462 ssh_runner.go:195] Run: sudo mkdir -p /etc/docker /etc/docker /etc/docker
	I0916 11:13:59.351646 1501462 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" multinode-654612-m02
	I0916 11:13:59.368610 1501462 sshutil.go:53] new ssh client: &{IP:127.0.0.1 Port:34763 SSHKeyPath:/home/jenkins/minikube-integration/19651-1378450/.minikube/machines/multinode-654612-m02/id_rsa Username:docker}
	I0916 11:13:59.468149 1501462 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-1378450/.minikube/certs/ca.pem -> /etc/docker/ca.pem
	I0916 11:13:59.468229 1501462 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-1378450/.minikube/certs/ca.pem --> /etc/docker/ca.pem (1078 bytes)
	I0916 11:13:59.495265 1501462 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-1378450/.minikube/machines/server.pem -> /etc/docker/server.pem
	I0916 11:13:59.495330 1501462 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-1378450/.minikube/machines/server.pem --> /etc/docker/server.pem (1229 bytes)
	I0916 11:13:59.521007 1501462 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-1378450/.minikube/machines/server-key.pem -> /etc/docker/server-key.pem
	I0916 11:13:59.521101 1501462 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-1378450/.minikube/machines/server-key.pem --> /etc/docker/server-key.pem (1679 bytes)
	I0916 11:13:59.548024 1501462 provision.go:87] duration metric: took 882.606631ms to configureAuth
	I0916 11:13:59.548054 1501462 ubuntu.go:193] setting minikube options for container-runtime
	I0916 11:13:59.548328 1501462 config.go:182] Loaded profile config "multinode-654612": Driver=docker, ContainerRuntime=crio, KubernetesVersion=v1.31.1
	I0916 11:13:59.548449 1501462 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" multinode-654612-m02
	I0916 11:13:59.572892 1501462 main.go:141] libmachine: Using SSH client type: native
	I0916 11:13:59.573134 1501462 main.go:141] libmachine: &{{{<nil> 0 [] [] []} docker [0x41abe0] 0x41d420 <nil>  [] 0s} 127.0.0.1 34763 <nil> <nil>}
	I0916 11:13:59.573149 1501462 main.go:141] libmachine: About to run SSH command:
	sudo mkdir -p /etc/sysconfig && printf %s "
	CRIO_MINIKUBE_OPTIONS='--insecure-registry 10.96.0.0/12 '
	" | sudo tee /etc/sysconfig/crio.minikube && sudo systemctl restart crio
	I0916 11:13:59.849659 1501462 main.go:141] libmachine: SSH cmd err, output: <nil>: 
	CRIO_MINIKUBE_OPTIONS='--insecure-registry 10.96.0.0/12 '
	
	I0916 11:13:59.849687 1501462 machine.go:96] duration metric: took 4.688768714s to provisionDockerMachine
	I0916 11:13:59.849699 1501462 start.go:293] postStartSetup for "multinode-654612-m02" (driver="docker")
	I0916 11:13:59.849709 1501462 start.go:322] creating required directories: [/etc/kubernetes/addons /etc/kubernetes/manifests /var/tmp/minikube /var/lib/minikube /var/lib/minikube/certs /var/lib/minikube/images /var/lib/minikube/binaries /tmp/gvisor /usr/share/ca-certificates /etc/ssl/certs]
	I0916 11:13:59.849779 1501462 ssh_runner.go:195] Run: sudo mkdir -p /etc/kubernetes/addons /etc/kubernetes/manifests /var/tmp/minikube /var/lib/minikube /var/lib/minikube/certs /var/lib/minikube/images /var/lib/minikube/binaries /tmp/gvisor /usr/share/ca-certificates /etc/ssl/certs
	I0916 11:13:59.849837 1501462 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" multinode-654612-m02
	I0916 11:13:59.867303 1501462 sshutil.go:53] new ssh client: &{IP:127.0.0.1 Port:34763 SSHKeyPath:/home/jenkins/minikube-integration/19651-1378450/.minikube/machines/multinode-654612-m02/id_rsa Username:docker}
	I0916 11:13:59.969704 1501462 ssh_runner.go:195] Run: cat /etc/os-release
	I0916 11:13:59.972960 1501462 command_runner.go:130] > PRETTY_NAME="Ubuntu 22.04.4 LTS"
	I0916 11:13:59.972980 1501462 command_runner.go:130] > NAME="Ubuntu"
	I0916 11:13:59.972987 1501462 command_runner.go:130] > VERSION_ID="22.04"
	I0916 11:13:59.973018 1501462 command_runner.go:130] > VERSION="22.04.4 LTS (Jammy Jellyfish)"
	I0916 11:13:59.973023 1501462 command_runner.go:130] > VERSION_CODENAME=jammy
	I0916 11:13:59.973030 1501462 command_runner.go:130] > ID=ubuntu
	I0916 11:13:59.973035 1501462 command_runner.go:130] > ID_LIKE=debian
	I0916 11:13:59.973040 1501462 command_runner.go:130] > HOME_URL="https://www.ubuntu.com/"
	I0916 11:13:59.973044 1501462 command_runner.go:130] > SUPPORT_URL="https://help.ubuntu.com/"
	I0916 11:13:59.973052 1501462 command_runner.go:130] > BUG_REPORT_URL="https://bugs.launchpad.net/ubuntu/"
	I0916 11:13:59.973059 1501462 command_runner.go:130] > PRIVACY_POLICY_URL="https://www.ubuntu.com/legal/terms-and-policies/privacy-policy"
	I0916 11:13:59.973067 1501462 command_runner.go:130] > UBUNTU_CODENAME=jammy
	I0916 11:13:59.973125 1501462 main.go:141] libmachine: Couldn't set key VERSION_CODENAME, no corresponding struct field found
	I0916 11:13:59.973154 1501462 main.go:141] libmachine: Couldn't set key PRIVACY_POLICY_URL, no corresponding struct field found
	I0916 11:13:59.973164 1501462 main.go:141] libmachine: Couldn't set key UBUNTU_CODENAME, no corresponding struct field found
	I0916 11:13:59.973171 1501462 info.go:137] Remote host: Ubuntu 22.04.4 LTS
	I0916 11:13:59.973184 1501462 filesync.go:126] Scanning /home/jenkins/minikube-integration/19651-1378450/.minikube/addons for local assets ...
	I0916 11:13:59.973254 1501462 filesync.go:126] Scanning /home/jenkins/minikube-integration/19651-1378450/.minikube/files for local assets ...
	I0916 11:13:59.973336 1501462 filesync.go:149] local asset: /home/jenkins/minikube-integration/19651-1378450/.minikube/files/etc/ssl/certs/13838332.pem -> 13838332.pem in /etc/ssl/certs
	I0916 11:13:59.973347 1501462 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-1378450/.minikube/files/etc/ssl/certs/13838332.pem -> /etc/ssl/certs/13838332.pem
	I0916 11:13:59.973475 1501462 ssh_runner.go:195] Run: sudo mkdir -p /etc/ssl/certs
	I0916 11:13:59.984245 1501462 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-1378450/.minikube/files/etc/ssl/certs/13838332.pem --> /etc/ssl/certs/13838332.pem (1708 bytes)
	I0916 11:14:00.016544 1501462 start.go:296] duration metric: took 166.826535ms for postStartSetup
	I0916 11:14:00.016650 1501462 ssh_runner.go:195] Run: sh -c "df -h /var | awk 'NR==2{print $5}'"
	I0916 11:14:00.016723 1501462 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" multinode-654612-m02
	I0916 11:14:00.063797 1501462 sshutil.go:53] new ssh client: &{IP:127.0.0.1 Port:34763 SSHKeyPath:/home/jenkins/minikube-integration/19651-1378450/.minikube/machines/multinode-654612-m02/id_rsa Username:docker}
	I0916 11:14:00.223080 1501462 command_runner.go:130] > 13%
	I0916 11:14:00.223176 1501462 ssh_runner.go:195] Run: sh -c "df -BG /var | awk 'NR==2{print $4}'"
	I0916 11:14:00.230921 1501462 command_runner.go:130] > 170G
	I0916 11:14:00.230954 1501462 fix.go:56] duration metric: took 5.471893292s for fixHost
	I0916 11:14:00.230967 1501462 start.go:83] releasing machines lock for "multinode-654612-m02", held for 5.471936318s
	I0916 11:14:00.231058 1501462 cli_runner.go:164] Run: docker container inspect -f "{{range .NetworkSettings.Networks}}{{.IPAddress}},{{.GlobalIPv6Address}}{{end}}" multinode-654612-m02
	I0916 11:14:00.312141 1501462 out.go:177] * Found network options:
	I0916 11:14:00.315259 1501462 out.go:177]   - NO_PROXY=192.168.67.2
	W0916 11:14:00.358584 1501462 proxy.go:119] fail to check proxy env: Error ip not in block
	W0916 11:14:00.358642 1501462 proxy.go:119] fail to check proxy env: Error ip not in block
	I0916 11:14:00.358734 1501462 ssh_runner.go:195] Run: sudo sh -c "podman version >/dev/null"
	I0916 11:14:00.358798 1501462 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" multinode-654612-m02
	I0916 11:14:00.359094 1501462 ssh_runner.go:195] Run: curl -sS -m 2 https://registry.k8s.io/
	I0916 11:14:00.359150 1501462 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" multinode-654612-m02
	I0916 11:14:00.418973 1501462 sshutil.go:53] new ssh client: &{IP:127.0.0.1 Port:34763 SSHKeyPath:/home/jenkins/minikube-integration/19651-1378450/.minikube/machines/multinode-654612-m02/id_rsa Username:docker}
	I0916 11:14:00.434903 1501462 sshutil.go:53] new ssh client: &{IP:127.0.0.1 Port:34763 SSHKeyPath:/home/jenkins/minikube-integration/19651-1378450/.minikube/machines/multinode-654612-m02/id_rsa Username:docker}
	I0916 11:14:00.679162 1501462 ssh_runner.go:195] Run: sh -c "stat /etc/cni/net.d/*loopback.conf*"
	I0916 11:14:00.679240 1501462 command_runner.go:130] > <a href="https://github.com/kubernetes/registry.k8s.io">Temporary Redirect</a>.
	I0916 11:14:00.684263 1501462 command_runner.go:130] >   File: /etc/cni/net.d/200-loopback.conf.mk_disabled
	I0916 11:14:00.684293 1501462 command_runner.go:130] >   Size: 54        	Blocks: 8          IO Block: 4096   regular file
	I0916 11:14:00.684310 1501462 command_runner.go:130] > Device: c4h/196d	Inode: 1570512     Links: 1
	I0916 11:14:00.684322 1501462 command_runner.go:130] > Access: (0644/-rw-r--r--)  Uid: (    0/    root)   Gid: (    0/    root)
	I0916 11:14:00.684329 1501462 command_runner.go:130] > Access: 2023-06-14 14:44:50.000000000 +0000
	I0916 11:14:00.684334 1501462 command_runner.go:130] > Modify: 2023-06-14 14:44:50.000000000 +0000
	I0916 11:14:00.684339 1501462 command_runner.go:130] > Change: 2024-09-16 11:11:09.805260925 +0000
	I0916 11:14:00.684344 1501462 command_runner.go:130] >  Birth: 2024-09-16 11:11:09.801261023 +0000
	I0916 11:14:00.685171 1501462 ssh_runner.go:195] Run: sudo find /etc/cni/net.d -maxdepth 1 -type f -name *loopback.conf* -not -name *.mk_disabled -exec sh -c "sudo mv {} {}.mk_disabled" ;
	I0916 11:14:00.695191 1501462 cni.go:221] loopback cni configuration disabled: "/etc/cni/net.d/*loopback.conf*" found
	I0916 11:14:00.695345 1501462 ssh_runner.go:195] Run: sudo find /etc/cni/net.d -maxdepth 1 -type f ( ( -name *bridge* -or -name *podman* ) -and -not -name *.mk_disabled ) -printf "%p, " -exec sh -c "sudo mv {} {}.mk_disabled" ;
	I0916 11:14:00.704415 1501462 cni.go:259] no active bridge cni configs found in "/etc/cni/net.d" - nothing to disable
	I0916 11:14:00.704441 1501462 start.go:495] detecting cgroup driver to use...
	I0916 11:14:00.704504 1501462 detect.go:187] detected "cgroupfs" cgroup driver on host os
	I0916 11:14:00.704571 1501462 ssh_runner.go:195] Run: sudo systemctl stop -f containerd
	I0916 11:14:00.718671 1501462 ssh_runner.go:195] Run: sudo systemctl is-active --quiet service containerd
	I0916 11:14:00.731997 1501462 docker.go:217] disabling cri-docker service (if available) ...
	I0916 11:14:00.732072 1501462 ssh_runner.go:195] Run: sudo systemctl stop -f cri-docker.socket
	I0916 11:14:00.746360 1501462 ssh_runner.go:195] Run: sudo systemctl stop -f cri-docker.service
	I0916 11:14:00.759804 1501462 ssh_runner.go:195] Run: sudo systemctl disable cri-docker.socket
	I0916 11:14:00.860627 1501462 ssh_runner.go:195] Run: sudo systemctl mask cri-docker.service
	I0916 11:14:00.974075 1501462 docker.go:233] disabling docker service ...
	I0916 11:14:00.974150 1501462 ssh_runner.go:195] Run: sudo systemctl stop -f docker.socket
	I0916 11:14:00.988237 1501462 ssh_runner.go:195] Run: sudo systemctl stop -f docker.service
	I0916 11:14:01.000067 1501462 ssh_runner.go:195] Run: sudo systemctl disable docker.socket
	I0916 11:14:01.098591 1501462 ssh_runner.go:195] Run: sudo systemctl mask docker.service
	I0916 11:14:01.188658 1501462 ssh_runner.go:195] Run: sudo systemctl is-active --quiet service docker
	I0916 11:14:01.201209 1501462 ssh_runner.go:195] Run: /bin/bash -c "sudo mkdir -p /etc && printf %s "runtime-endpoint: unix:///var/run/crio/crio.sock
	" | sudo tee /etc/crictl.yaml"
	I0916 11:14:01.218823 1501462 command_runner.go:130] > runtime-endpoint: unix:///var/run/crio/crio.sock
	I0916 11:14:01.220657 1501462 crio.go:59] configure cri-o to use "registry.k8s.io/pause:3.10" pause image...
	I0916 11:14:01.220770 1501462 ssh_runner.go:195] Run: sh -c "sudo sed -i 's|^.*pause_image = .*$|pause_image = "registry.k8s.io/pause:3.10"|' /etc/crio/crio.conf.d/02-crio.conf"
	I0916 11:14:01.234737 1501462 crio.go:70] configuring cri-o to use "cgroupfs" as cgroup driver...
	I0916 11:14:01.234820 1501462 ssh_runner.go:195] Run: sh -c "sudo sed -i 's|^.*cgroup_manager = .*$|cgroup_manager = "cgroupfs"|' /etc/crio/crio.conf.d/02-crio.conf"
	I0916 11:14:01.246089 1501462 ssh_runner.go:195] Run: sh -c "sudo sed -i '/conmon_cgroup = .*/d' /etc/crio/crio.conf.d/02-crio.conf"
	I0916 11:14:01.256775 1501462 ssh_runner.go:195] Run: sh -c "sudo sed -i '/cgroup_manager = .*/a conmon_cgroup = "pod"' /etc/crio/crio.conf.d/02-crio.conf"
	I0916 11:14:01.268144 1501462 ssh_runner.go:195] Run: sh -c "sudo rm -rf /etc/cni/net.mk"
	I0916 11:14:01.278113 1501462 ssh_runner.go:195] Run: sh -c "sudo sed -i '/^ *"net.ipv4.ip_unprivileged_port_start=.*"/d' /etc/crio/crio.conf.d/02-crio.conf"
	I0916 11:14:01.290577 1501462 ssh_runner.go:195] Run: sh -c "sudo grep -q "^ *default_sysctls" /etc/crio/crio.conf.d/02-crio.conf || sudo sed -i '/conmon_cgroup = .*/a default_sysctls = \[\n\]' /etc/crio/crio.conf.d/02-crio.conf"
	I0916 11:14:01.300780 1501462 ssh_runner.go:195] Run: sh -c "sudo sed -i -r 's|^default_sysctls *= *\[|&\n  "net.ipv4.ip_unprivileged_port_start=0",|' /etc/crio/crio.conf.d/02-crio.conf"
	I0916 11:14:01.311473 1501462 ssh_runner.go:195] Run: sudo sysctl net.bridge.bridge-nf-call-iptables
	I0916 11:14:01.319767 1501462 command_runner.go:130] > net.bridge.bridge-nf-call-iptables = 1
	I0916 11:14:01.320949 1501462 ssh_runner.go:195] Run: sudo sh -c "echo 1 > /proc/sys/net/ipv4/ip_forward"
	I0916 11:14:01.332891 1501462 ssh_runner.go:195] Run: sudo systemctl daemon-reload
	I0916 11:14:01.419951 1501462 ssh_runner.go:195] Run: sudo systemctl restart crio
	I0916 11:14:01.542874 1501462 start.go:542] Will wait 60s for socket path /var/run/crio/crio.sock
	I0916 11:14:01.543002 1501462 ssh_runner.go:195] Run: stat /var/run/crio/crio.sock
	I0916 11:14:01.548052 1501462 command_runner.go:130] >   File: /var/run/crio/crio.sock
	I0916 11:14:01.548082 1501462 command_runner.go:130] >   Size: 0         	Blocks: 0          IO Block: 4096   socket
	I0916 11:14:01.548092 1501462 command_runner.go:130] > Device: cdh/205d	Inode: 190         Links: 1
	I0916 11:14:01.548100 1501462 command_runner.go:130] > Access: (0660/srw-rw----)  Uid: (    0/    root)   Gid: (    0/    root)
	I0916 11:14:01.548105 1501462 command_runner.go:130] > Access: 2024-09-16 11:14:01.525060357 +0000
	I0916 11:14:01.548111 1501462 command_runner.go:130] > Modify: 2024-09-16 11:14:01.525060357 +0000
	I0916 11:14:01.548116 1501462 command_runner.go:130] > Change: 2024-09-16 11:14:01.525060357 +0000
	I0916 11:14:01.548138 1501462 command_runner.go:130] >  Birth: -
	I0916 11:14:01.548386 1501462 start.go:563] Will wait 60s for crictl version
	I0916 11:14:01.548479 1501462 ssh_runner.go:195] Run: which crictl
	I0916 11:14:01.553512 1501462 command_runner.go:130] > /usr/bin/crictl
	I0916 11:14:01.554094 1501462 ssh_runner.go:195] Run: sudo /usr/bin/crictl version
	I0916 11:14:01.598673 1501462 command_runner.go:130] > Version:  0.1.0
	I0916 11:14:01.598696 1501462 command_runner.go:130] > RuntimeName:  cri-o
	I0916 11:14:01.598701 1501462 command_runner.go:130] > RuntimeVersion:  1.24.6
	I0916 11:14:01.598707 1501462 command_runner.go:130] > RuntimeApiVersion:  v1
	I0916 11:14:01.601986 1501462 start.go:579] Version:  0.1.0
	RuntimeName:  cri-o
	RuntimeVersion:  1.24.6
	RuntimeApiVersion:  v1
	I0916 11:14:01.602111 1501462 ssh_runner.go:195] Run: crio --version
	I0916 11:14:01.640418 1501462 command_runner.go:130] > crio version 1.24.6
	I0916 11:14:01.640444 1501462 command_runner.go:130] > Version:          1.24.6
	I0916 11:14:01.640453 1501462 command_runner.go:130] > GitCommit:        4bfe15a9feb74ffc95e66a21c04b15fa7bbc2b90
	I0916 11:14:01.640459 1501462 command_runner.go:130] > GitTreeState:     clean
	I0916 11:14:01.640466 1501462 command_runner.go:130] > BuildDate:        2023-06-14T14:44:50Z
	I0916 11:14:01.640491 1501462 command_runner.go:130] > GoVersion:        go1.18.2
	I0916 11:14:01.640503 1501462 command_runner.go:130] > Compiler:         gc
	I0916 11:14:01.640509 1501462 command_runner.go:130] > Platform:         linux/arm64
	I0916 11:14:01.640524 1501462 command_runner.go:130] > Linkmode:         dynamic
	I0916 11:14:01.640532 1501462 command_runner.go:130] > BuildTags:        apparmor, exclude_graphdriver_devicemapper, containers_image_ostree_stub, seccomp
	I0916 11:14:01.640538 1501462 command_runner.go:130] > SeccompEnabled:   true
	I0916 11:14:01.640545 1501462 command_runner.go:130] > AppArmorEnabled:  false
	I0916 11:14:01.642829 1501462 ssh_runner.go:195] Run: crio --version
	I0916 11:14:01.688381 1501462 command_runner.go:130] > crio version 1.24.6
	I0916 11:14:01.688407 1501462 command_runner.go:130] > Version:          1.24.6
	I0916 11:14:01.688416 1501462 command_runner.go:130] > GitCommit:        4bfe15a9feb74ffc95e66a21c04b15fa7bbc2b90
	I0916 11:14:01.688421 1501462 command_runner.go:130] > GitTreeState:     clean
	I0916 11:14:01.688427 1501462 command_runner.go:130] > BuildDate:        2023-06-14T14:44:50Z
	I0916 11:14:01.688432 1501462 command_runner.go:130] > GoVersion:        go1.18.2
	I0916 11:14:01.688437 1501462 command_runner.go:130] > Compiler:         gc
	I0916 11:14:01.688482 1501462 command_runner.go:130] > Platform:         linux/arm64
	I0916 11:14:01.688488 1501462 command_runner.go:130] > Linkmode:         dynamic
	I0916 11:14:01.688495 1501462 command_runner.go:130] > BuildTags:        apparmor, exclude_graphdriver_devicemapper, containers_image_ostree_stub, seccomp
	I0916 11:14:01.688499 1501462 command_runner.go:130] > SeccompEnabled:   true
	I0916 11:14:01.688503 1501462 command_runner.go:130] > AppArmorEnabled:  false
	I0916 11:14:01.695691 1501462 out.go:177] * Preparing Kubernetes v1.31.1 on CRI-O 1.24.6 ...
	I0916 11:14:01.698593 1501462 out.go:177]   - env NO_PROXY=192.168.67.2
	I0916 11:14:01.701326 1501462 cli_runner.go:164] Run: docker network inspect multinode-654612 --format "{"Name": "{{.Name}}","Driver": "{{.Driver}}","Subnet": "{{range .IPAM.Config}}{{.Subnet}}{{end}}","Gateway": "{{range .IPAM.Config}}{{.Gateway}}{{end}}","MTU": {{if (index .Options "com.docker.network.driver.mtu")}}{{(index .Options "com.docker.network.driver.mtu")}}{{else}}0{{end}}, "ContainerIPs": [{{range $k,$v := .Containers }}"{{$v.IPv4Address}}",{{end}}]}"
	I0916 11:14:01.718143 1501462 ssh_runner.go:195] Run: grep 192.168.67.1	host.minikube.internal$ /etc/hosts
	I0916 11:14:01.722719 1501462 ssh_runner.go:195] Run: /bin/bash -c "{ grep -v $'\thost.minikube.internal$' "/etc/hosts"; echo "192.168.67.1	host.minikube.internal"; } > /tmp/h.$$; sudo cp /tmp/h.$$ "/etc/hosts""
	I0916 11:14:01.735639 1501462 mustload.go:65] Loading cluster: multinode-654612
	I0916 11:14:01.735912 1501462 config.go:182] Loaded profile config "multinode-654612": Driver=docker, ContainerRuntime=crio, KubernetesVersion=v1.31.1
	I0916 11:14:01.736215 1501462 cli_runner.go:164] Run: docker container inspect multinode-654612 --format={{.State.Status}}
	I0916 11:14:01.754038 1501462 host.go:66] Checking if "multinode-654612" exists ...
	I0916 11:14:01.754336 1501462 certs.go:68] Setting up /home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/multinode-654612 for IP: 192.168.67.3
	I0916 11:14:01.754351 1501462 certs.go:194] generating shared ca certs ...
	I0916 11:14:01.754364 1501462 certs.go:226] acquiring lock for ca certs: {Name:mk0ae46b50e2e49d53ad6fcc94535aa50d9156d6 Clock:{} Delay:500ms Timeout:1m0s Cancel:<nil>}
	I0916 11:14:01.754476 1501462 certs.go:235] skipping valid "minikubeCA" ca cert: /home/jenkins/minikube-integration/19651-1378450/.minikube/ca.key
	I0916 11:14:01.754527 1501462 certs.go:235] skipping valid "proxyClientCA" ca cert: /home/jenkins/minikube-integration/19651-1378450/.minikube/proxy-client-ca.key
	I0916 11:14:01.754541 1501462 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-1378450/.minikube/ca.crt -> /var/lib/minikube/certs/ca.crt
	I0916 11:14:01.754556 1501462 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-1378450/.minikube/ca.key -> /var/lib/minikube/certs/ca.key
	I0916 11:14:01.754572 1501462 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-1378450/.minikube/proxy-client-ca.crt -> /var/lib/minikube/certs/proxy-client-ca.crt
	I0916 11:14:01.754582 1501462 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-1378450/.minikube/proxy-client-ca.key -> /var/lib/minikube/certs/proxy-client-ca.key
	I0916 11:14:01.754639 1501462 certs.go:484] found cert: /home/jenkins/minikube-integration/19651-1378450/.minikube/certs/1383833.pem (1338 bytes)
	W0916 11:14:01.754673 1501462 certs.go:480] ignoring /home/jenkins/minikube-integration/19651-1378450/.minikube/certs/1383833_empty.pem, impossibly tiny 0 bytes
	I0916 11:14:01.754686 1501462 certs.go:484] found cert: /home/jenkins/minikube-integration/19651-1378450/.minikube/certs/ca-key.pem (1679 bytes)
	I0916 11:14:01.754713 1501462 certs.go:484] found cert: /home/jenkins/minikube-integration/19651-1378450/.minikube/certs/ca.pem (1078 bytes)
	I0916 11:14:01.754738 1501462 certs.go:484] found cert: /home/jenkins/minikube-integration/19651-1378450/.minikube/certs/cert.pem (1123 bytes)
	I0916 11:14:01.754766 1501462 certs.go:484] found cert: /home/jenkins/minikube-integration/19651-1378450/.minikube/certs/key.pem (1679 bytes)
	I0916 11:14:01.754825 1501462 certs.go:484] found cert: /home/jenkins/minikube-integration/19651-1378450/.minikube/files/etc/ssl/certs/13838332.pem (1708 bytes)
	I0916 11:14:01.754859 1501462 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-1378450/.minikube/ca.crt -> /usr/share/ca-certificates/minikubeCA.pem
	I0916 11:14:01.754875 1501462 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-1378450/.minikube/certs/1383833.pem -> /usr/share/ca-certificates/1383833.pem
	I0916 11:14:01.754886 1501462 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-1378450/.minikube/files/etc/ssl/certs/13838332.pem -> /usr/share/ca-certificates/13838332.pem
	I0916 11:14:01.754908 1501462 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-1378450/.minikube/ca.crt --> /var/lib/minikube/certs/ca.crt (1111 bytes)
	I0916 11:14:01.783845 1501462 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-1378450/.minikube/ca.key --> /var/lib/minikube/certs/ca.key (1675 bytes)
	I0916 11:14:01.809475 1501462 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-1378450/.minikube/proxy-client-ca.crt --> /var/lib/minikube/certs/proxy-client-ca.crt (1119 bytes)
	I0916 11:14:01.837228 1501462 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-1378450/.minikube/proxy-client-ca.key --> /var/lib/minikube/certs/proxy-client-ca.key (1679 bytes)
	I0916 11:14:01.863910 1501462 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-1378450/.minikube/ca.crt --> /usr/share/ca-certificates/minikubeCA.pem (1111 bytes)
	I0916 11:14:01.898422 1501462 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-1378450/.minikube/certs/1383833.pem --> /usr/share/ca-certificates/1383833.pem (1338 bytes)
	I0916 11:14:01.923562 1501462 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-1378450/.minikube/files/etc/ssl/certs/13838332.pem --> /usr/share/ca-certificates/13838332.pem (1708 bytes)
	I0916 11:14:01.949608 1501462 ssh_runner.go:195] Run: openssl version
	I0916 11:14:01.955356 1501462 command_runner.go:130] > OpenSSL 3.0.2 15 Mar 2022 (Library: OpenSSL 3.0.2 15 Mar 2022)
	I0916 11:14:01.955799 1501462 ssh_runner.go:195] Run: sudo /bin/bash -c "test -s /usr/share/ca-certificates/minikubeCA.pem && ln -fs /usr/share/ca-certificates/minikubeCA.pem /etc/ssl/certs/minikubeCA.pem"
	I0916 11:14:01.965960 1501462 ssh_runner.go:195] Run: ls -la /usr/share/ca-certificates/minikubeCA.pem
	I0916 11:14:01.970895 1501462 command_runner.go:130] > -rw-r--r-- 1 root root 1111 Sep 16 10:35 /usr/share/ca-certificates/minikubeCA.pem
	I0916 11:14:01.971096 1501462 certs.go:528] hashing: -rw-r--r-- 1 root root 1111 Sep 16 10:35 /usr/share/ca-certificates/minikubeCA.pem
	I0916 11:14:01.971154 1501462 ssh_runner.go:195] Run: openssl x509 -hash -noout -in /usr/share/ca-certificates/minikubeCA.pem
	I0916 11:14:01.979425 1501462 command_runner.go:130] > b5213941
	I0916 11:14:01.979509 1501462 ssh_runner.go:195] Run: sudo /bin/bash -c "test -L /etc/ssl/certs/b5213941.0 || ln -fs /etc/ssl/certs/minikubeCA.pem /etc/ssl/certs/b5213941.0"
	I0916 11:14:01.989442 1501462 ssh_runner.go:195] Run: sudo /bin/bash -c "test -s /usr/share/ca-certificates/1383833.pem && ln -fs /usr/share/ca-certificates/1383833.pem /etc/ssl/certs/1383833.pem"
	I0916 11:14:01.999841 1501462 ssh_runner.go:195] Run: ls -la /usr/share/ca-certificates/1383833.pem
	I0916 11:14:02.006250 1501462 command_runner.go:130] > -rw-r--r-- 1 root root 1338 Sep 16 10:46 /usr/share/ca-certificates/1383833.pem
	I0916 11:14:02.006567 1501462 certs.go:528] hashing: -rw-r--r-- 1 root root 1338 Sep 16 10:46 /usr/share/ca-certificates/1383833.pem
	I0916 11:14:02.006672 1501462 ssh_runner.go:195] Run: openssl x509 -hash -noout -in /usr/share/ca-certificates/1383833.pem
	I0916 11:14:02.015895 1501462 command_runner.go:130] > 51391683
	I0916 11:14:02.016461 1501462 ssh_runner.go:195] Run: sudo /bin/bash -c "test -L /etc/ssl/certs/51391683.0 || ln -fs /etc/ssl/certs/1383833.pem /etc/ssl/certs/51391683.0"
	I0916 11:14:02.026821 1501462 ssh_runner.go:195] Run: sudo /bin/bash -c "test -s /usr/share/ca-certificates/13838332.pem && ln -fs /usr/share/ca-certificates/13838332.pem /etc/ssl/certs/13838332.pem"
	I0916 11:14:02.037622 1501462 ssh_runner.go:195] Run: ls -la /usr/share/ca-certificates/13838332.pem
	I0916 11:14:02.041688 1501462 command_runner.go:130] > -rw-r--r-- 1 root root 1708 Sep 16 10:46 /usr/share/ca-certificates/13838332.pem
	I0916 11:14:02.041738 1501462 certs.go:528] hashing: -rw-r--r-- 1 root root 1708 Sep 16 10:46 /usr/share/ca-certificates/13838332.pem
	I0916 11:14:02.041796 1501462 ssh_runner.go:195] Run: openssl x509 -hash -noout -in /usr/share/ca-certificates/13838332.pem
	I0916 11:14:02.049128 1501462 command_runner.go:130] > 3ec20f2e
	I0916 11:14:02.049565 1501462 ssh_runner.go:195] Run: sudo /bin/bash -c "test -L /etc/ssl/certs/3ec20f2e.0 || ln -fs /etc/ssl/certs/13838332.pem /etc/ssl/certs/3ec20f2e.0"
	I0916 11:14:02.059266 1501462 ssh_runner.go:195] Run: stat /var/lib/minikube/certs/apiserver-kubelet-client.crt
	I0916 11:14:02.062850 1501462 command_runner.go:130] ! stat: cannot statx '/var/lib/minikube/certs/apiserver-kubelet-client.crt': No such file or directory
	I0916 11:14:02.062942 1501462 certs.go:399] 'apiserver-kubelet-client' cert doesn't exist, likely first start: stat /var/lib/minikube/certs/apiserver-kubelet-client.crt: Process exited with status 1
	stdout:
	
	stderr:
	stat: cannot statx '/var/lib/minikube/certs/apiserver-kubelet-client.crt': No such file or directory
	I0916 11:14:02.062982 1501462 kubeadm.go:934] updating node {m02 192.168.67.3 8443 v1.31.1 crio false true} ...
	I0916 11:14:02.063074 1501462 kubeadm.go:946] kubelet [Unit]
	Wants=crio.service
	
	[Service]
	ExecStart=
	ExecStart=/var/lib/minikube/binaries/v1.31.1/kubelet --bootstrap-kubeconfig=/etc/kubernetes/bootstrap-kubelet.conf --cgroups-per-qos=false --config=/var/lib/kubelet/config.yaml --enforce-node-allocatable= --hostname-override=multinode-654612-m02 --kubeconfig=/etc/kubernetes/kubelet.conf --node-ip=192.168.67.3
	
	[Install]
	 config:
	{KubernetesVersion:v1.31.1 ClusterName:multinode-654612 Namespace:default APIServerHAVIP: APIServerName:minikubeCA APIServerNames:[] APIServerIPs:[] DNSDomain:cluster.local ContainerRuntime:crio CRISocket: NetworkPlugin:cni FeatureGates: ServiceCIDR:10.96.0.0/12 ImageRepository: LoadBalancerStartIP: LoadBalancerEndIP: CustomIngressCert: RegistryAliases: ExtraOptions:[] ShouldLoadCachedImages:true EnableDefaultCNI:false CNI:}
	I0916 11:14:02.063145 1501462 ssh_runner.go:195] Run: sudo ls /var/lib/minikube/binaries/v1.31.1
	I0916 11:14:02.070964 1501462 command_runner.go:130] > kubeadm
	I0916 11:14:02.070987 1501462 command_runner.go:130] > kubectl
	I0916 11:14:02.070992 1501462 command_runner.go:130] > kubelet
	I0916 11:14:02.072182 1501462 binaries.go:44] Found k8s binaries, skipping transfer
	I0916 11:14:02.072271 1501462 ssh_runner.go:195] Run: sudo mkdir -p /etc/systemd/system/kubelet.service.d /lib/systemd/system
	I0916 11:14:02.082414 1501462 ssh_runner.go:362] scp memory --> /etc/systemd/system/kubelet.service.d/10-kubeadm.conf (370 bytes)
	I0916 11:14:02.103606 1501462 ssh_runner.go:362] scp memory --> /lib/systemd/system/kubelet.service (352 bytes)
	I0916 11:14:02.123696 1501462 ssh_runner.go:195] Run: grep 192.168.67.2	control-plane.minikube.internal$ /etc/hosts
	I0916 11:14:02.128339 1501462 ssh_runner.go:195] Run: /bin/bash -c "{ grep -v $'\tcontrol-plane.minikube.internal$' "/etc/hosts"; echo "192.168.67.2	control-plane.minikube.internal"; } > /tmp/h.$$; sudo cp /tmp/h.$$ "/etc/hosts""
	I0916 11:14:02.139861 1501462 ssh_runner.go:195] Run: sudo systemctl daemon-reload
	I0916 11:14:02.228241 1501462 ssh_runner.go:195] Run: sudo systemctl start kubelet
	I0916 11:14:02.242377 1501462 start.go:235] Will wait 6m0s for node &{Name:m02 IP:192.168.67.3 Port:8443 KubernetesVersion:v1.31.1 ContainerRuntime:crio ControlPlane:false Worker:true}
	I0916 11:14:02.242752 1501462 config.go:182] Loaded profile config "multinode-654612": Driver=docker, ContainerRuntime=crio, KubernetesVersion=v1.31.1
	I0916 11:14:02.245959 1501462 out.go:177] * Verifying Kubernetes components...
	I0916 11:14:02.249369 1501462 ssh_runner.go:195] Run: sudo systemctl daemon-reload
	I0916 11:14:02.353355 1501462 ssh_runner.go:195] Run: sudo systemctl start kubelet
	I0916 11:14:02.366953 1501462 loader.go:395] Config loaded from file:  /home/jenkins/minikube-integration/19651-1378450/kubeconfig
	I0916 11:14:02.367222 1501462 kapi.go:59] client config for multinode-654612: &rest.Config{Host:"https://192.168.67.2:8443", APIPath:"", ContentConfig:rest.ContentConfig{AcceptContentTypes:"", ContentType:"", GroupVersion:(*schema.GroupVersion)(nil), NegotiatedSerializer:runtime.NegotiatedSerializer(nil)}, Username:"", Password:"", BearerToken:"", BearerTokenFile:"", Impersonate:rest.ImpersonationConfig{UserName:"", UID:"", Groups:[]string(nil), Extra:map[string][]string(nil)}, AuthProvider:<nil>, AuthConfigPersister:rest.AuthProviderConfigPersister(nil), ExecProvider:<nil>, TLSClientConfig:rest.sanitizedTLSClientConfig{Insecure:false, ServerName:"", CertFile:"/home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/multinode-654612/client.crt", KeyFile:"/home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/multinode-654612/client.key", CAFile:"/home/jenkins/minikube-integration/19651-1378450/.minikube/ca.crt", CertData:[]uint8(nil), KeyData:[]uint8(nil), CAData:[]uint8(nil),
NextProtos:[]string(nil)}, UserAgent:"", DisableCompression:false, Transport:http.RoundTripper(nil), WrapTransport:(transport.WrapperFunc)(0x1a1e6c0), QPS:0, Burst:0, RateLimiter:flowcontrol.RateLimiter(nil), WarningHandler:rest.WarningHandler(nil), Timeout:0, Dial:(func(context.Context, string, string) (net.Conn, error))(nil), Proxy:(func(*http.Request) (*url.URL, error))(nil)}
	I0916 11:14:02.367496 1501462 node_ready.go:35] waiting up to 6m0s for node "multinode-654612-m02" to be "Ready" ...
	I0916 11:14:02.367591 1501462 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/nodes/multinode-654612-m02
	I0916 11:14:02.367603 1501462 round_trippers.go:469] Request Headers:
	I0916 11:14:02.367620 1501462 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:14:02.367624 1501462 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:14:02.369995 1501462 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:14:02.370019 1501462 round_trippers.go:577] Response Headers:
	I0916 11:14:02.370028 1501462 round_trippers.go:580]     Audit-Id: 32ea06a5-87b8-4513-8104-41dded19205d
	I0916 11:14:02.370032 1501462 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:14:02.370045 1501462 round_trippers.go:580]     Content-Type: application/json
	I0916 11:14:02.370049 1501462 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:14:02.370052 1501462 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:14:02.370055 1501462 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:14:02 GMT
	I0916 11:14:02.370354 1501462 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-654612-m02","uid":"aa1e2e1b-4957-47bd-bcf9-786ad66f873a","resourceVersion":"799","creationTimestamp":"2024-09-16T11:11:13Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-654612-m02","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-654612","minikube.k8s.io/primary":"false","minikube.k8s.io/updated_at":"2024_09_16T11_11_14_0700","minikube.k8s.io/version":"v1.34.0"},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///var/run/crio/crio.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubeadm","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:11:13Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:
annotations":{"f:kubeadm.alpha.kubernetes.io/cri-socket":{}}}}},{"manag [truncated 6858 chars]
	I0916 11:14:02.868367 1501462 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/nodes/multinode-654612-m02
	I0916 11:14:02.868390 1501462 round_trippers.go:469] Request Headers:
	I0916 11:14:02.868400 1501462 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:14:02.868406 1501462 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:14:02.870721 1501462 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:14:02.870790 1501462 round_trippers.go:577] Response Headers:
	I0916 11:14:02.870812 1501462 round_trippers.go:580]     Audit-Id: 77cae021-726b-48a5-8a2f-da61e08121c2
	I0916 11:14:02.870830 1501462 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:14:02.870863 1501462 round_trippers.go:580]     Content-Type: application/json
	I0916 11:14:02.870888 1501462 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:14:02.870906 1501462 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:14:02.870923 1501462 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:14:02 GMT
	I0916 11:14:02.871099 1501462 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-654612-m02","uid":"aa1e2e1b-4957-47bd-bcf9-786ad66f873a","resourceVersion":"799","creationTimestamp":"2024-09-16T11:11:13Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-654612-m02","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-654612","minikube.k8s.io/primary":"false","minikube.k8s.io/updated_at":"2024_09_16T11_11_14_0700","minikube.k8s.io/version":"v1.34.0"},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///var/run/crio/crio.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubeadm","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:11:13Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:
annotations":{"f:kubeadm.alpha.kubernetes.io/cri-socket":{}}}}},{"manag [truncated 6858 chars]
	I0916 11:14:03.368704 1501462 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/nodes/multinode-654612-m02
	I0916 11:14:03.368730 1501462 round_trippers.go:469] Request Headers:
	I0916 11:14:03.368740 1501462 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:14:03.368745 1501462 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:14:03.370977 1501462 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:14:03.371044 1501462 round_trippers.go:577] Response Headers:
	I0916 11:14:03.371069 1501462 round_trippers.go:580]     Audit-Id: e535f637-055c-4dd3-ae5a-c022aa21d610
	I0916 11:14:03.371080 1501462 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:14:03.371085 1501462 round_trippers.go:580]     Content-Type: application/json
	I0916 11:14:03.371089 1501462 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:14:03.371092 1501462 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:14:03.371105 1501462 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:14:03 GMT
	I0916 11:14:03.371289 1501462 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-654612-m02","uid":"aa1e2e1b-4957-47bd-bcf9-786ad66f873a","resourceVersion":"799","creationTimestamp":"2024-09-16T11:11:13Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-654612-m02","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-654612","minikube.k8s.io/primary":"false","minikube.k8s.io/updated_at":"2024_09_16T11_11_14_0700","minikube.k8s.io/version":"v1.34.0"},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///var/run/crio/crio.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubeadm","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:11:13Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:
annotations":{"f:kubeadm.alpha.kubernetes.io/cri-socket":{}}}}},{"manag [truncated 6858 chars]
	I0916 11:14:03.868587 1501462 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/nodes/multinode-654612-m02
	I0916 11:14:03.868611 1501462 round_trippers.go:469] Request Headers:
	I0916 11:14:03.868621 1501462 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:14:03.868626 1501462 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:14:03.871894 1501462 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 11:14:03.871921 1501462 round_trippers.go:577] Response Headers:
	I0916 11:14:03.871930 1501462 round_trippers.go:580]     Audit-Id: 5830f4cb-b458-49f7-b5f0-2aeff6231148
	I0916 11:14:03.871935 1501462 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:14:03.871938 1501462 round_trippers.go:580]     Content-Type: application/json
	I0916 11:14:03.871941 1501462 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:14:03.871944 1501462 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:14:03.871948 1501462 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:14:03 GMT
	I0916 11:14:03.872059 1501462 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-654612-m02","uid":"aa1e2e1b-4957-47bd-bcf9-786ad66f873a","resourceVersion":"799","creationTimestamp":"2024-09-16T11:11:13Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-654612-m02","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-654612","minikube.k8s.io/primary":"false","minikube.k8s.io/updated_at":"2024_09_16T11_11_14_0700","minikube.k8s.io/version":"v1.34.0"},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///var/run/crio/crio.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubeadm","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:11:13Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:
annotations":{"f:kubeadm.alpha.kubernetes.io/cri-socket":{}}}}},{"manag [truncated 6858 chars]
	I0916 11:14:04.368435 1501462 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/nodes/multinode-654612-m02
	I0916 11:14:04.368465 1501462 round_trippers.go:469] Request Headers:
	I0916 11:14:04.368474 1501462 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:14:04.368481 1501462 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:14:04.370986 1501462 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:14:04.371019 1501462 round_trippers.go:577] Response Headers:
	I0916 11:14:04.371030 1501462 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:14:04.371039 1501462 round_trippers.go:580]     Content-Type: application/json
	I0916 11:14:04.371042 1501462 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:14:04.371046 1501462 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:14:04.371049 1501462 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:14:04 GMT
	I0916 11:14:04.371052 1501462 round_trippers.go:580]     Audit-Id: 98952023-2e72-404a-8d81-d38e299cc448
	I0916 11:14:04.371253 1501462 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-654612-m02","uid":"aa1e2e1b-4957-47bd-bcf9-786ad66f873a","resourceVersion":"799","creationTimestamp":"2024-09-16T11:11:13Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-654612-m02","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-654612","minikube.k8s.io/primary":"false","minikube.k8s.io/updated_at":"2024_09_16T11_11_14_0700","minikube.k8s.io/version":"v1.34.0"},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///var/run/crio/crio.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubeadm","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:11:13Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:
annotations":{"f:kubeadm.alpha.kubernetes.io/cri-socket":{}}}}},{"manag [truncated 6858 chars]
	I0916 11:14:04.371686 1501462 node_ready.go:53] node "multinode-654612-m02" has status "Ready":"Unknown"
	I0916 11:14:04.867760 1501462 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/nodes/multinode-654612-m02
	I0916 11:14:04.867782 1501462 round_trippers.go:469] Request Headers:
	I0916 11:14:04.867791 1501462 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:14:04.867798 1501462 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:14:04.870094 1501462 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:14:04.870144 1501462 round_trippers.go:577] Response Headers:
	I0916 11:14:04.870154 1501462 round_trippers.go:580]     Audit-Id: cac48e86-4fca-40eb-9e0f-5ccb539e038e
	I0916 11:14:04.870168 1501462 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:14:04.870174 1501462 round_trippers.go:580]     Content-Type: application/json
	I0916 11:14:04.870177 1501462 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:14:04.870180 1501462 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:14:04.870183 1501462 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:14:04 GMT
	I0916 11:14:04.870334 1501462 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-654612-m02","uid":"aa1e2e1b-4957-47bd-bcf9-786ad66f873a","resourceVersion":"799","creationTimestamp":"2024-09-16T11:11:13Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-654612-m02","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-654612","minikube.k8s.io/primary":"false","minikube.k8s.io/updated_at":"2024_09_16T11_11_14_0700","minikube.k8s.io/version":"v1.34.0"},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///var/run/crio/crio.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubeadm","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:11:13Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:
annotations":{"f:kubeadm.alpha.kubernetes.io/cri-socket":{}}}}},{"manag [truncated 6858 chars]
	I0916 11:14:05.367804 1501462 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/nodes/multinode-654612-m02
	I0916 11:14:05.367835 1501462 round_trippers.go:469] Request Headers:
	I0916 11:14:05.367844 1501462 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:14:05.367852 1501462 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:14:05.370216 1501462 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:14:05.370243 1501462 round_trippers.go:577] Response Headers:
	I0916 11:14:05.370251 1501462 round_trippers.go:580]     Audit-Id: 2505abdd-b5a9-4c84-8df7-06281a02d5dc
	I0916 11:14:05.370254 1501462 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:14:05.370257 1501462 round_trippers.go:580]     Content-Type: application/json
	I0916 11:14:05.370259 1501462 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:14:05.370262 1501462 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:14:05.370265 1501462 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:14:05 GMT
	I0916 11:14:05.370933 1501462 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-654612-m02","uid":"aa1e2e1b-4957-47bd-bcf9-786ad66f873a","resourceVersion":"799","creationTimestamp":"2024-09-16T11:11:13Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-654612-m02","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-654612","minikube.k8s.io/primary":"false","minikube.k8s.io/updated_at":"2024_09_16T11_11_14_0700","minikube.k8s.io/version":"v1.34.0"},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///var/run/crio/crio.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubeadm","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:11:13Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:
annotations":{"f:kubeadm.alpha.kubernetes.io/cri-socket":{}}}}},{"manag [truncated 6858 chars]
	I0916 11:14:05.867760 1501462 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/nodes/multinode-654612-m02
	I0916 11:14:05.867803 1501462 round_trippers.go:469] Request Headers:
	I0916 11:14:05.867818 1501462 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:14:05.867825 1501462 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:14:05.870039 1501462 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:14:05.870101 1501462 round_trippers.go:577] Response Headers:
	I0916 11:14:05.870122 1501462 round_trippers.go:580]     Content-Type: application/json
	I0916 11:14:05.870143 1501462 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:14:05.870179 1501462 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:14:05.870198 1501462 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:14:05 GMT
	I0916 11:14:05.870214 1501462 round_trippers.go:580]     Audit-Id: 97cfc484-2c6e-447d-b183-e681672ac44b
	I0916 11:14:05.870232 1501462 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:14:05.870370 1501462 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-654612-m02","uid":"aa1e2e1b-4957-47bd-bcf9-786ad66f873a","resourceVersion":"799","creationTimestamp":"2024-09-16T11:11:13Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-654612-m02","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-654612","minikube.k8s.io/primary":"false","minikube.k8s.io/updated_at":"2024_09_16T11_11_14_0700","minikube.k8s.io/version":"v1.34.0"},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///var/run/crio/crio.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubeadm","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:11:13Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:
annotations":{"f:kubeadm.alpha.kubernetes.io/cri-socket":{}}}}},{"manag [truncated 6858 chars]
	I0916 11:14:06.368043 1501462 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/nodes/multinode-654612-m02
	I0916 11:14:06.368070 1501462 round_trippers.go:469] Request Headers:
	I0916 11:14:06.368081 1501462 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:14:06.368085 1501462 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:14:06.370258 1501462 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:14:06.370283 1501462 round_trippers.go:577] Response Headers:
	I0916 11:14:06.370292 1501462 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:14:06 GMT
	I0916 11:14:06.370297 1501462 round_trippers.go:580]     Audit-Id: e7aae075-dfe7-446f-b5d0-eeea04173414
	I0916 11:14:06.370300 1501462 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:14:06.370303 1501462 round_trippers.go:580]     Content-Type: application/json
	I0916 11:14:06.370306 1501462 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:14:06.370308 1501462 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:14:06.370547 1501462 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-654612-m02","uid":"aa1e2e1b-4957-47bd-bcf9-786ad66f873a","resourceVersion":"799","creationTimestamp":"2024-09-16T11:11:13Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-654612-m02","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-654612","minikube.k8s.io/primary":"false","minikube.k8s.io/updated_at":"2024_09_16T11_11_14_0700","minikube.k8s.io/version":"v1.34.0"},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///var/run/crio/crio.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubeadm","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:11:13Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:
annotations":{"f:kubeadm.alpha.kubernetes.io/cri-socket":{}}}}},{"manag [truncated 6858 chars]
	I0916 11:14:06.868092 1501462 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/nodes/multinode-654612-m02
	I0916 11:14:06.868122 1501462 round_trippers.go:469] Request Headers:
	I0916 11:14:06.868132 1501462 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:14:06.868138 1501462 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:14:06.870436 1501462 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:14:06.870465 1501462 round_trippers.go:577] Response Headers:
	I0916 11:14:06.870476 1501462 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:14:06.870480 1501462 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:14:06 GMT
	I0916 11:14:06.870485 1501462 round_trippers.go:580]     Audit-Id: 6040ed8d-27d1-40bc-97a4-025a325c2b41
	I0916 11:14:06.870489 1501462 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:14:06.870492 1501462 round_trippers.go:580]     Content-Type: application/json
	I0916 11:14:06.870496 1501462 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:14:06.872155 1501462 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-654612-m02","uid":"aa1e2e1b-4957-47bd-bcf9-786ad66f873a","resourceVersion":"799","creationTimestamp":"2024-09-16T11:11:13Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-654612-m02","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-654612","minikube.k8s.io/primary":"false","minikube.k8s.io/updated_at":"2024_09_16T11_11_14_0700","minikube.k8s.io/version":"v1.34.0"},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///var/run/crio/crio.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubeadm","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:11:13Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:
annotations":{"f:kubeadm.alpha.kubernetes.io/cri-socket":{}}}}},{"manag [truncated 6858 chars]
	I0916 11:14:06.872597 1501462 node_ready.go:53] node "multinode-654612-m02" has status "Ready":"Unknown"
	I0916 11:14:07.368370 1501462 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/nodes/multinode-654612-m02
	I0916 11:14:07.368394 1501462 round_trippers.go:469] Request Headers:
	I0916 11:14:07.368403 1501462 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:14:07.368407 1501462 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:14:07.370764 1501462 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:14:07.370790 1501462 round_trippers.go:577] Response Headers:
	I0916 11:14:07.370799 1501462 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:14:07.370804 1501462 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:14:07 GMT
	I0916 11:14:07.370807 1501462 round_trippers.go:580]     Audit-Id: 679a5462-b88e-44ee-9cff-47ea26ecc283
	I0916 11:14:07.370810 1501462 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:14:07.370835 1501462 round_trippers.go:580]     Content-Type: application/json
	I0916 11:14:07.370845 1501462 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:14:07.371058 1501462 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-654612-m02","uid":"aa1e2e1b-4957-47bd-bcf9-786ad66f873a","resourceVersion":"799","creationTimestamp":"2024-09-16T11:11:13Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-654612-m02","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-654612","minikube.k8s.io/primary":"false","minikube.k8s.io/updated_at":"2024_09_16T11_11_14_0700","minikube.k8s.io/version":"v1.34.0"},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///var/run/crio/crio.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubeadm","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:11:13Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:
annotations":{"f:kubeadm.alpha.kubernetes.io/cri-socket":{}}}}},{"manag [truncated 6858 chars]
	I0916 11:14:07.868733 1501462 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/nodes/multinode-654612-m02
	I0916 11:14:07.868758 1501462 round_trippers.go:469] Request Headers:
	I0916 11:14:07.868768 1501462 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:14:07.868773 1501462 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:14:07.871098 1501462 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:14:07.871127 1501462 round_trippers.go:577] Response Headers:
	I0916 11:14:07.871136 1501462 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:14:07 GMT
	I0916 11:14:07.871141 1501462 round_trippers.go:580]     Audit-Id: 0a05555d-a9d0-4978-8160-0ab0a8b7c026
	I0916 11:14:07.871146 1501462 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:14:07.871149 1501462 round_trippers.go:580]     Content-Type: application/json
	I0916 11:14:07.871153 1501462 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:14:07.871155 1501462 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:14:07.871343 1501462 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-654612-m02","uid":"aa1e2e1b-4957-47bd-bcf9-786ad66f873a","resourceVersion":"799","creationTimestamp":"2024-09-16T11:11:13Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-654612-m02","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-654612","minikube.k8s.io/primary":"false","minikube.k8s.io/updated_at":"2024_09_16T11_11_14_0700","minikube.k8s.io/version":"v1.34.0"},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///var/run/crio/crio.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubeadm","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:11:13Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:
annotations":{"f:kubeadm.alpha.kubernetes.io/cri-socket":{}}}}},{"manag [truncated 6858 chars]
	I0916 11:14:08.368670 1501462 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/nodes/multinode-654612-m02
	I0916 11:14:08.368713 1501462 round_trippers.go:469] Request Headers:
	I0916 11:14:08.368723 1501462 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:14:08.368727 1501462 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:14:08.371022 1501462 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:14:08.371048 1501462 round_trippers.go:577] Response Headers:
	I0916 11:14:08.371057 1501462 round_trippers.go:580]     Audit-Id: ef4a946b-f4c1-47ec-9a20-6e80941587c5
	I0916 11:14:08.371061 1501462 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:14:08.371065 1501462 round_trippers.go:580]     Content-Type: application/json
	I0916 11:14:08.371068 1501462 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:14:08.371071 1501462 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:14:08.371074 1501462 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:14:08 GMT
	I0916 11:14:08.371409 1501462 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-654612-m02","uid":"aa1e2e1b-4957-47bd-bcf9-786ad66f873a","resourceVersion":"799","creationTimestamp":"2024-09-16T11:11:13Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-654612-m02","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-654612","minikube.k8s.io/primary":"false","minikube.k8s.io/updated_at":"2024_09_16T11_11_14_0700","minikube.k8s.io/version":"v1.34.0"},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///var/run/crio/crio.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubeadm","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:11:13Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:
annotations":{"f:kubeadm.alpha.kubernetes.io/cri-socket":{}}}}},{"manag [truncated 6858 chars]
	I0916 11:14:08.867765 1501462 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/nodes/multinode-654612-m02
	I0916 11:14:08.867791 1501462 round_trippers.go:469] Request Headers:
	I0916 11:14:08.867800 1501462 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:14:08.867804 1501462 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:14:08.870421 1501462 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:14:08.870447 1501462 round_trippers.go:577] Response Headers:
	I0916 11:14:08.870454 1501462 round_trippers.go:580]     Audit-Id: 0c9ce6e5-0a33-4cea-8800-d2f41580b027
	I0916 11:14:08.870458 1501462 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:14:08.870461 1501462 round_trippers.go:580]     Content-Type: application/json
	I0916 11:14:08.870464 1501462 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:14:08.870467 1501462 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:14:08.870487 1501462 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:14:08 GMT
	I0916 11:14:08.870672 1501462 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-654612-m02","uid":"aa1e2e1b-4957-47bd-bcf9-786ad66f873a","resourceVersion":"835","creationTimestamp":"2024-09-16T11:11:13Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-654612-m02","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-654612","minikube.k8s.io/primary":"false","minikube.k8s.io/updated_at":"2024_09_16T11_11_14_0700","minikube.k8s.io/version":"v1.34.0"},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///var/run/crio/crio.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubeadm","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:11:13Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:
annotations":{"f:kubeadm.alpha.kubernetes.io/cri-socket":{}}}}},{"manag [truncated 6463 chars]
	I0916 11:14:08.871125 1501462 node_ready.go:49] node "multinode-654612-m02" has status "Ready":"True"
	I0916 11:14:08.871146 1501462 node_ready.go:38] duration metric: took 6.503629914s for node "multinode-654612-m02" to be "Ready" ...
	I0916 11:14:08.871157 1501462 pod_ready.go:36] extra waiting up to 6m0s for all system-critical pods including labels [k8s-app=kube-dns component=etcd component=kube-apiserver component=kube-controller-manager k8s-app=kube-proxy component=kube-scheduler] to be "Ready" ...
	I0916 11:14:08.871236 1501462 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/namespaces/kube-system/pods
	I0916 11:14:08.871247 1501462 round_trippers.go:469] Request Headers:
	I0916 11:14:08.871268 1501462 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:14:08.871280 1501462 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:14:08.875123 1501462 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 11:14:08.875148 1501462 round_trippers.go:577] Response Headers:
	I0916 11:14:08.875157 1501462 round_trippers.go:580]     Audit-Id: a42dbc66-c642-4816-b99c-b2e899e50df7
	I0916 11:14:08.875166 1501462 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:14:08.875178 1501462 round_trippers.go:580]     Content-Type: application/json
	I0916 11:14:08.875181 1501462 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:14:08.875190 1501462 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:14:08.875194 1501462 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:14:08 GMT
	I0916 11:14:08.876310 1501462 request.go:1351] Response Body: {"kind":"PodList","apiVersion":"v1","metadata":{"resourceVersion":"835"},"items":[{"metadata":{"name":"coredns-7c65d6cfc9-szvv9","generateName":"coredns-7c65d6cfc9-","namespace":"kube-system","uid":"26df8cd4-36bc-49e1-98bf-9c30f5555b7b","resourceVersion":"775","creationTimestamp":"2024-09-16T11:10:15Z","labels":{"k8s-app":"kube-dns","pod-template-hash":"7c65d6cfc9"},"ownerReferences":[{"apiVersion":"apps/v1","kind":"ReplicaSet","name":"coredns-7c65d6cfc9","uid":"a88acc4a-b14b-4341-a616-6a6077ab8958","controller":true,"blockOwnerDeletion":true}],"managedFields":[{"manager":"kube-controller-manager","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:10:15Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:generateName":{},"f:labels":{".":{},"f:k8s-app":{},"f:pod-template-hash":{}},"f:ownerReferences":{".":{},"k:{\"uid\":\"a88acc4a-b14b-4341-a616-6a6077ab8958\"}":{}}},"f:spec":{"f:affinity":{".":{},"f:podAntiAffinity":{".":{},"f
:preferredDuringSchedulingIgnoredDuringExecution":{}}},"f:containers":{ [truncated 91618 chars]
	I0916 11:14:08.880502 1501462 pod_ready.go:79] waiting up to 6m0s for pod "coredns-7c65d6cfc9-szvv9" in "kube-system" namespace to be "Ready" ...
	I0916 11:14:08.880613 1501462 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/namespaces/kube-system/pods/coredns-7c65d6cfc9-szvv9
	I0916 11:14:08.880624 1501462 round_trippers.go:469] Request Headers:
	I0916 11:14:08.880634 1501462 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:14:08.880638 1501462 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:14:08.883983 1501462 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 11:14:08.884011 1501462 round_trippers.go:577] Response Headers:
	I0916 11:14:08.884052 1501462 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:14:08.884060 1501462 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:14:08.884064 1501462 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:14:08 GMT
	I0916 11:14:08.884068 1501462 round_trippers.go:580]     Audit-Id: 93cddbd2-c397-47a7-8a9c-00ca55edb369
	I0916 11:14:08.884072 1501462 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:14:08.884075 1501462 round_trippers.go:580]     Content-Type: application/json
	I0916 11:14:08.884208 1501462 request.go:1351] Response Body: {"kind":"Pod","apiVersion":"v1","metadata":{"name":"coredns-7c65d6cfc9-szvv9","generateName":"coredns-7c65d6cfc9-","namespace":"kube-system","uid":"26df8cd4-36bc-49e1-98bf-9c30f5555b7b","resourceVersion":"775","creationTimestamp":"2024-09-16T11:10:15Z","labels":{"k8s-app":"kube-dns","pod-template-hash":"7c65d6cfc9"},"ownerReferences":[{"apiVersion":"apps/v1","kind":"ReplicaSet","name":"coredns-7c65d6cfc9","uid":"a88acc4a-b14b-4341-a616-6a6077ab8958","controller":true,"blockOwnerDeletion":true}],"managedFields":[{"manager":"kube-controller-manager","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:10:15Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:generateName":{},"f:labels":{".":{},"f:k8s-app":{},"f:pod-template-hash":{}},"f:ownerReferences":{".":{},"k:{\"uid\":\"a88acc4a-b14b-4341-a616-6a6077ab8958\"}":{}}},"f:spec":{"f:affinity":{".":{},"f:podAntiAffinity":{".":{},"f:preferredDuringSchedulingIgnoredDuringExecution":{
}}},"f:containers":{"k:{\"name\":\"coredns\"}":{".":{},"f:args":{},"f:i [truncated 6813 chars]
	I0916 11:14:08.884825 1501462 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/nodes/multinode-654612
	I0916 11:14:08.884844 1501462 round_trippers.go:469] Request Headers:
	I0916 11:14:08.884852 1501462 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:14:08.884857 1501462 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:14:08.886946 1501462 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:14:08.886970 1501462 round_trippers.go:577] Response Headers:
	I0916 11:14:08.886978 1501462 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:14:08.886985 1501462 round_trippers.go:580]     Content-Type: application/json
	I0916 11:14:08.886989 1501462 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:14:08.886994 1501462 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:14:08.886997 1501462 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:14:08 GMT
	I0916 11:14:08.887000 1501462 round_trippers.go:580]     Audit-Id: 1f69ba49-50d2-40cf-a296-89638d03d16e
	I0916 11:14:08.887360 1501462 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-654612","uid":"17ee1588-6ece-4a45-8e72-a05ec6d1f806","resourceVersion":"662","creationTimestamp":"2024-09-16T11:10:07Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-654612","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-654612","minikube.k8s.io/primary":"true","minikube.k8s.io/updated_at":"2024_09_16T11_10_11_0700","minikube.k8s.io/version":"v1.34.0","node-role.kubernetes.io/control-plane":"","node.kubernetes.io/exclude-from-external-load-balancers":""},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///var/run/crio/crio.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubelet","operation":"Update","apiVe
rsion":"v1","time":"2024-09-16T11:10:07Z","fieldsType":"FieldsV1","fiel [truncated 6346 chars]
	I0916 11:14:08.887794 1501462 pod_ready.go:93] pod "coredns-7c65d6cfc9-szvv9" in "kube-system" namespace has status "Ready":"True"
	I0916 11:14:08.887820 1501462 pod_ready.go:82] duration metric: took 7.279846ms for pod "coredns-7c65d6cfc9-szvv9" in "kube-system" namespace to be "Ready" ...
	I0916 11:14:08.887832 1501462 pod_ready.go:79] waiting up to 6m0s for pod "etcd-multinode-654612" in "kube-system" namespace to be "Ready" ...
	I0916 11:14:08.887898 1501462 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/namespaces/kube-system/pods/etcd-multinode-654612
	I0916 11:14:08.887915 1501462 round_trippers.go:469] Request Headers:
	I0916 11:14:08.887923 1501462 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:14:08.887928 1501462 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:14:08.890160 1501462 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:14:08.890220 1501462 round_trippers.go:577] Response Headers:
	I0916 11:14:08.890232 1501462 round_trippers.go:580]     Audit-Id: 95687000-eb93-47b0-bca3-f59019c34144
	I0916 11:14:08.890240 1501462 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:14:08.890244 1501462 round_trippers.go:580]     Content-Type: application/json
	I0916 11:14:08.890262 1501462 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:14:08.890267 1501462 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:14:08.890270 1501462 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:14:08 GMT
	I0916 11:14:08.890396 1501462 request.go:1351] Response Body: {"kind":"Pod","apiVersion":"v1","metadata":{"name":"etcd-multinode-654612","namespace":"kube-system","uid":"bb46feea-e4d5-411b-9ebc-e5984b1147a8","resourceVersion":"760","creationTimestamp":"2024-09-16T11:10:10Z","labels":{"component":"etcd","tier":"control-plane"},"annotations":{"kubeadm.kubernetes.io/etcd.advertise-client-urls":"https://192.168.67.2:2379","kubernetes.io/config.hash":"d0a18dbc2f101ac77b9a3f54b47797a2","kubernetes.io/config.mirror":"d0a18dbc2f101ac77b9a3f54b47797a2","kubernetes.io/config.seen":"2024-09-16T11:10:10.145147523Z","kubernetes.io/config.source":"file"},"ownerReferences":[{"apiVersion":"v1","kind":"Node","name":"multinode-654612","uid":"17ee1588-6ece-4a45-8e72-a05ec6d1f806","controller":true}],"managedFields":[{"manager":"kubelet","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:10:10Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:annotations":{".":{},"f:kubeadm.kubernetes.io/etcd.advertise-cl
ient-urls":{},"f:kubernetes.io/config.hash":{},"f:kubernetes.io/config. [truncated 6575 chars]
	I0916 11:14:08.890923 1501462 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/nodes/multinode-654612
	I0916 11:14:08.890941 1501462 round_trippers.go:469] Request Headers:
	I0916 11:14:08.890949 1501462 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:14:08.890955 1501462 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:14:08.893006 1501462 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:14:08.893029 1501462 round_trippers.go:577] Response Headers:
	I0916 11:14:08.893037 1501462 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:14:08 GMT
	I0916 11:14:08.893041 1501462 round_trippers.go:580]     Audit-Id: a79843e3-2505-4b2d-859e-7b52233d25ec
	I0916 11:14:08.893044 1501462 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:14:08.893048 1501462 round_trippers.go:580]     Content-Type: application/json
	I0916 11:14:08.893050 1501462 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:14:08.893053 1501462 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:14:08.893447 1501462 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-654612","uid":"17ee1588-6ece-4a45-8e72-a05ec6d1f806","resourceVersion":"662","creationTimestamp":"2024-09-16T11:10:07Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-654612","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-654612","minikube.k8s.io/primary":"true","minikube.k8s.io/updated_at":"2024_09_16T11_10_11_0700","minikube.k8s.io/version":"v1.34.0","node-role.kubernetes.io/control-plane":"","node.kubernetes.io/exclude-from-external-load-balancers":""},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///var/run/crio/crio.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubelet","operation":"Update","apiVe
rsion":"v1","time":"2024-09-16T11:10:07Z","fieldsType":"FieldsV1","fiel [truncated 6346 chars]
	I0916 11:14:08.893861 1501462 pod_ready.go:93] pod "etcd-multinode-654612" in "kube-system" namespace has status "Ready":"True"
	I0916 11:14:08.893880 1501462 pod_ready.go:82] duration metric: took 6.04022ms for pod "etcd-multinode-654612" in "kube-system" namespace to be "Ready" ...
	I0916 11:14:08.893903 1501462 pod_ready.go:79] waiting up to 6m0s for pod "kube-apiserver-multinode-654612" in "kube-system" namespace to be "Ready" ...
	I0916 11:14:08.893976 1501462 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/namespaces/kube-system/pods/kube-apiserver-multinode-654612
	I0916 11:14:08.893986 1501462 round_trippers.go:469] Request Headers:
	I0916 11:14:08.893994 1501462 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:14:08.893998 1501462 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:14:08.896154 1501462 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:14:08.896213 1501462 round_trippers.go:577] Response Headers:
	I0916 11:14:08.896234 1501462 round_trippers.go:580]     Audit-Id: 085bd1c9-8588-49b4-ad00-68ff71aaca10
	I0916 11:14:08.896254 1501462 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:14:08.896273 1501462 round_trippers.go:580]     Content-Type: application/json
	I0916 11:14:08.896307 1501462 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:14:08.896320 1501462 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:14:08.896323 1501462 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:14:08 GMT
	I0916 11:14:08.896480 1501462 request.go:1351] Response Body: {"kind":"Pod","apiVersion":"v1","metadata":{"name":"kube-apiserver-multinode-654612","namespace":"kube-system","uid":"8a56377d-b2a9-46dc-90b0-6d8f0aadec52","resourceVersion":"753","creationTimestamp":"2024-09-16T11:10:10Z","labels":{"component":"kube-apiserver","tier":"control-plane"},"annotations":{"kubeadm.kubernetes.io/kube-apiserver.advertise-address.endpoint":"192.168.67.2:8443","kubernetes.io/config.hash":"f3fdb95ee92c3c630b459a996a1fc6f9","kubernetes.io/config.mirror":"f3fdb95ee92c3c630b459a996a1fc6f9","kubernetes.io/config.seen":"2024-09-16T11:10:10.145153931Z","kubernetes.io/config.source":"file"},"ownerReferences":[{"apiVersion":"v1","kind":"Node","name":"multinode-654612","uid":"17ee1588-6ece-4a45-8e72-a05ec6d1f806","controller":true}],"managedFields":[{"manager":"kubelet","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:10:10Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:annotations":{".":{},"f:kubeadm.kube
rnetes.io/kube-apiserver.advertise-address.endpoint":{},"f:kubernetes.i [truncated 9107 chars]
	I0916 11:14:08.897091 1501462 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/nodes/multinode-654612
	I0916 11:14:08.897116 1501462 round_trippers.go:469] Request Headers:
	I0916 11:14:08.897125 1501462 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:14:08.897131 1501462 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:14:08.899202 1501462 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:14:08.899273 1501462 round_trippers.go:577] Response Headers:
	I0916 11:14:08.899301 1501462 round_trippers.go:580]     Audit-Id: 32494ca9-10c6-4a5f-b23e-f2ff1b5d2a39
	I0916 11:14:08.899306 1501462 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:14:08.899309 1501462 round_trippers.go:580]     Content-Type: application/json
	I0916 11:14:08.899326 1501462 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:14:08.899335 1501462 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:14:08.899338 1501462 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:14:08 GMT
	I0916 11:14:08.899453 1501462 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-654612","uid":"17ee1588-6ece-4a45-8e72-a05ec6d1f806","resourceVersion":"662","creationTimestamp":"2024-09-16T11:10:07Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-654612","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-654612","minikube.k8s.io/primary":"true","minikube.k8s.io/updated_at":"2024_09_16T11_10_11_0700","minikube.k8s.io/version":"v1.34.0","node-role.kubernetes.io/control-plane":"","node.kubernetes.io/exclude-from-external-load-balancers":""},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///var/run/crio/crio.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubelet","operation":"Update","apiVe
rsion":"v1","time":"2024-09-16T11:10:07Z","fieldsType":"FieldsV1","fiel [truncated 6346 chars]
	I0916 11:14:08.899886 1501462 pod_ready.go:93] pod "kube-apiserver-multinode-654612" in "kube-system" namespace has status "Ready":"True"
	I0916 11:14:08.899903 1501462 pod_ready.go:82] duration metric: took 5.98789ms for pod "kube-apiserver-multinode-654612" in "kube-system" namespace to be "Ready" ...
	I0916 11:14:08.899915 1501462 pod_ready.go:79] waiting up to 6m0s for pod "kube-controller-manager-multinode-654612" in "kube-system" namespace to be "Ready" ...
	I0916 11:14:08.899980 1501462 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/namespaces/kube-system/pods/kube-controller-manager-multinode-654612
	I0916 11:14:08.899989 1501462 round_trippers.go:469] Request Headers:
	I0916 11:14:08.899997 1501462 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:14:08.900001 1501462 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:14:08.902350 1501462 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:14:08.902383 1501462 round_trippers.go:577] Response Headers:
	I0916 11:14:08.902395 1501462 round_trippers.go:580]     Content-Type: application/json
	I0916 11:14:08.902406 1501462 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:14:08.902409 1501462 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:14:08.902412 1501462 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:14:08 GMT
	I0916 11:14:08.902415 1501462 round_trippers.go:580]     Audit-Id: 98eecfdc-d45e-4208-b5d3-108affbd4953
	I0916 11:14:08.902419 1501462 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:14:08.902592 1501462 request.go:1351] Response Body: {"kind":"Pod","apiVersion":"v1","metadata":{"name":"kube-controller-manager-multinode-654612","namespace":"kube-system","uid":"08e87c01-201e-4373-bbd7-0a8a7a724a84","resourceVersion":"761","creationTimestamp":"2024-09-16T11:10:10Z","labels":{"component":"kube-controller-manager","tier":"control-plane"},"annotations":{"kubernetes.io/config.hash":"c7028fbfd05aaf11bd1f32f252589714","kubernetes.io/config.mirror":"c7028fbfd05aaf11bd1f32f252589714","kubernetes.io/config.seen":"2024-09-16T11:10:10.145155408Z","kubernetes.io/config.source":"file"},"ownerReferences":[{"apiVersion":"v1","kind":"Node","name":"multinode-654612","uid":"17ee1588-6ece-4a45-8e72-a05ec6d1f806","controller":true}],"managedFields":[{"manager":"kubelet","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:10:10Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:annotations":{".":{},"f:kubernetes.io/config.hash":{},"f:kubernetes.io/config.mirror":{},"f:kubernetes.i
o/config.seen":{},"f:kubernetes.io/config.source":{}},"f:labels":{".":{ [truncated 8897 chars]
	I0916 11:14:08.903218 1501462 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/nodes/multinode-654612
	I0916 11:14:08.903229 1501462 round_trippers.go:469] Request Headers:
	I0916 11:14:08.903238 1501462 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:14:08.903242 1501462 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:14:08.905531 1501462 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:14:08.905630 1501462 round_trippers.go:577] Response Headers:
	I0916 11:14:08.905652 1501462 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:14:08.905657 1501462 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:14:08.905661 1501462 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:14:08 GMT
	I0916 11:14:08.905664 1501462 round_trippers.go:580]     Audit-Id: 92ac8c53-43eb-4441-a76e-2f604c97dd51
	I0916 11:14:08.905667 1501462 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:14:08.905670 1501462 round_trippers.go:580]     Content-Type: application/json
	I0916 11:14:08.906378 1501462 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-654612","uid":"17ee1588-6ece-4a45-8e72-a05ec6d1f806","resourceVersion":"662","creationTimestamp":"2024-09-16T11:10:07Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-654612","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-654612","minikube.k8s.io/primary":"true","minikube.k8s.io/updated_at":"2024_09_16T11_10_11_0700","minikube.k8s.io/version":"v1.34.0","node-role.kubernetes.io/control-plane":"","node.kubernetes.io/exclude-from-external-load-balancers":""},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///var/run/crio/crio.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubelet","operation":"Update","apiVe
rsion":"v1","time":"2024-09-16T11:10:07Z","fieldsType":"FieldsV1","fiel [truncated 6346 chars]
	I0916 11:14:08.906829 1501462 pod_ready.go:93] pod "kube-controller-manager-multinode-654612" in "kube-system" namespace has status "Ready":"True"
	I0916 11:14:08.906849 1501462 pod_ready.go:82] duration metric: took 6.927846ms for pod "kube-controller-manager-multinode-654612" in "kube-system" namespace to be "Ready" ...
	I0916 11:14:08.906863 1501462 pod_ready.go:79] waiting up to 6m0s for pod "kube-proxy-gf2tw" in "kube-system" namespace to be "Ready" ...
	I0916 11:14:09.068329 1501462 request.go:632] Waited for 161.394971ms due to client-side throttling, not priority and fairness, request: GET:https://192.168.67.2:8443/api/v1/namespaces/kube-system/pods/kube-proxy-gf2tw
	I0916 11:14:09.068418 1501462 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/namespaces/kube-system/pods/kube-proxy-gf2tw
	I0916 11:14:09.068424 1501462 round_trippers.go:469] Request Headers:
	I0916 11:14:09.068433 1501462 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:14:09.068440 1501462 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:14:09.070882 1501462 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:14:09.070957 1501462 round_trippers.go:577] Response Headers:
	I0916 11:14:09.070981 1501462 round_trippers.go:580]     Content-Type: application/json
	I0916 11:14:09.071001 1501462 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:14:09.071031 1501462 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:14:09.071052 1501462 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:14:09 GMT
	I0916 11:14:09.071070 1501462 round_trippers.go:580]     Audit-Id: 18b04f17-393e-4687-b5ad-ef569fb5883a
	I0916 11:14:09.071088 1501462 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:14:09.071296 1501462 request.go:1351] Response Body: {"kind":"Pod","apiVersion":"v1","metadata":{"name":"kube-proxy-gf2tw","generateName":"kube-proxy-","namespace":"kube-system","uid":"814e8a89-b190-4aef-a303-44981c9e19c9","resourceVersion":"793","creationTimestamp":"2024-09-16T11:11:13Z","labels":{"controller-revision-hash":"648b489c5b","k8s-app":"kube-proxy","pod-template-generation":"1"},"ownerReferences":[{"apiVersion":"apps/v1","kind":"DaemonSet","name":"kube-proxy","uid":"8b5954ba-7bb1-4f7b-8014-fdfa99b2ac77","controller":true,"blockOwnerDeletion":true}],"managedFields":[{"manager":"kube-controller-manager","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:11:13Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:generateName":{},"f:labels":{".":{},"f:controller-revision-hash":{},"f:k8s-app":{},"f:pod-template-generation":{}},"f:ownerReferences":{".":{},"k:{\"uid\":\"8b5954ba-7bb1-4f7b-8014-fdfa99b2ac77\"}":{}}},"f:spec":{"f:affinity":{".":{},"f:nodeAffinity":{".":{},"f:r
equiredDuringSchedulingIgnoredDuringExecution":{}}},"f:containers":{"k: [truncated 6403 chars]
	I0916 11:14:09.268324 1501462 request.go:632] Waited for 196.399667ms due to client-side throttling, not priority and fairness, request: GET:https://192.168.67.2:8443/api/v1/nodes/multinode-654612-m02
	I0916 11:14:09.268418 1501462 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/nodes/multinode-654612-m02
	I0916 11:14:09.268431 1501462 round_trippers.go:469] Request Headers:
	I0916 11:14:09.268440 1501462 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:14:09.268450 1501462 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:14:09.270802 1501462 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:14:09.270824 1501462 round_trippers.go:577] Response Headers:
	I0916 11:14:09.270833 1501462 round_trippers.go:580]     Audit-Id: 3a8c85c6-a9cc-4f3b-b8d6-6c60379ed2c4
	I0916 11:14:09.270836 1501462 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:14:09.270840 1501462 round_trippers.go:580]     Content-Type: application/json
	I0916 11:14:09.270843 1501462 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:14:09.270846 1501462 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:14:09.270849 1501462 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:14:09 GMT
	I0916 11:14:09.271144 1501462 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-654612-m02","uid":"aa1e2e1b-4957-47bd-bcf9-786ad66f873a","resourceVersion":"835","creationTimestamp":"2024-09-16T11:11:13Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-654612-m02","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-654612","minikube.k8s.io/primary":"false","minikube.k8s.io/updated_at":"2024_09_16T11_11_14_0700","minikube.k8s.io/version":"v1.34.0"},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///var/run/crio/crio.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubeadm","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:11:13Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:
annotations":{"f:kubeadm.alpha.kubernetes.io/cri-socket":{}}}}},{"manag [truncated 6463 chars]
	I0916 11:14:09.468169 1501462 request.go:632] Waited for 60.226706ms due to client-side throttling, not priority and fairness, request: GET:https://192.168.67.2:8443/api/v1/namespaces/kube-system/pods/kube-proxy-gf2tw
	I0916 11:14:09.468241 1501462 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/namespaces/kube-system/pods/kube-proxy-gf2tw
	I0916 11:14:09.468247 1501462 round_trippers.go:469] Request Headers:
	I0916 11:14:09.468256 1501462 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:14:09.468266 1501462 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:14:09.470672 1501462 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:14:09.470736 1501462 round_trippers.go:577] Response Headers:
	I0916 11:14:09.470770 1501462 round_trippers.go:580]     Content-Type: application/json
	I0916 11:14:09.470782 1501462 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:14:09.470785 1501462 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:14:09.470788 1501462 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:14:09 GMT
	I0916 11:14:09.470792 1501462 round_trippers.go:580]     Audit-Id: 52001e3f-3dd4-4dc7-97e8-69e8fb82b951
	I0916 11:14:09.470794 1501462 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:14:09.471037 1501462 request.go:1351] Response Body: {"kind":"Pod","apiVersion":"v1","metadata":{"name":"kube-proxy-gf2tw","generateName":"kube-proxy-","namespace":"kube-system","uid":"814e8a89-b190-4aef-a303-44981c9e19c9","resourceVersion":"793","creationTimestamp":"2024-09-16T11:11:13Z","labels":{"controller-revision-hash":"648b489c5b","k8s-app":"kube-proxy","pod-template-generation":"1"},"ownerReferences":[{"apiVersion":"apps/v1","kind":"DaemonSet","name":"kube-proxy","uid":"8b5954ba-7bb1-4f7b-8014-fdfa99b2ac77","controller":true,"blockOwnerDeletion":true}],"managedFields":[{"manager":"kube-controller-manager","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:11:13Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:generateName":{},"f:labels":{".":{},"f:controller-revision-hash":{},"f:k8s-app":{},"f:pod-template-generation":{}},"f:ownerReferences":{".":{},"k:{\"uid\":\"8b5954ba-7bb1-4f7b-8014-fdfa99b2ac77\"}":{}}},"f:spec":{"f:affinity":{".":{},"f:nodeAffinity":{".":{},"f:r
equiredDuringSchedulingIgnoredDuringExecution":{}}},"f:containers":{"k: [truncated 6403 chars]
	I0916 11:14:09.667861 1501462 request.go:632] Waited for 196.262127ms due to client-side throttling, not priority and fairness, request: GET:https://192.168.67.2:8443/api/v1/nodes/multinode-654612-m02
	I0916 11:14:09.667972 1501462 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/nodes/multinode-654612-m02
	I0916 11:14:09.668002 1501462 round_trippers.go:469] Request Headers:
	I0916 11:14:09.668017 1501462 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:14:09.668023 1501462 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:14:09.670418 1501462 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:14:09.670493 1501462 round_trippers.go:577] Response Headers:
	I0916 11:14:09.670515 1501462 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:14:09 GMT
	I0916 11:14:09.670537 1501462 round_trippers.go:580]     Audit-Id: 238e204f-c09e-4d51-96b1-43731abf0592
	I0916 11:14:09.670568 1501462 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:14:09.670590 1501462 round_trippers.go:580]     Content-Type: application/json
	I0916 11:14:09.670611 1501462 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:14:09.670629 1501462 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:14:09.670806 1501462 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-654612-m02","uid":"aa1e2e1b-4957-47bd-bcf9-786ad66f873a","resourceVersion":"837","creationTimestamp":"2024-09-16T11:11:13Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-654612-m02","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-654612","minikube.k8s.io/primary":"false","minikube.k8s.io/updated_at":"2024_09_16T11_11_14_0700","minikube.k8s.io/version":"v1.34.0"},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///var/run/crio/crio.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubeadm","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:11:13Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:
annotations":{"f:kubeadm.alpha.kubernetes.io/cri-socket":{}}}}},{"manag [truncated 6341 chars]
	I0916 11:14:09.907856 1501462 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/namespaces/kube-system/pods/kube-proxy-gf2tw
	I0916 11:14:09.907882 1501462 round_trippers.go:469] Request Headers:
	I0916 11:14:09.907891 1501462 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:14:09.907898 1501462 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:14:09.910290 1501462 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:14:09.910317 1501462 round_trippers.go:577] Response Headers:
	I0916 11:14:09.910325 1501462 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:14:09 GMT
	I0916 11:14:09.910329 1501462 round_trippers.go:580]     Audit-Id: 2905fa5c-5972-4532-bc1a-9632ca09dddc
	I0916 11:14:09.910332 1501462 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:14:09.910337 1501462 round_trippers.go:580]     Content-Type: application/json
	I0916 11:14:09.910358 1501462 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:14:09.910373 1501462 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:14:09.910529 1501462 request.go:1351] Response Body: {"kind":"Pod","apiVersion":"v1","metadata":{"name":"kube-proxy-gf2tw","generateName":"kube-proxy-","namespace":"kube-system","uid":"814e8a89-b190-4aef-a303-44981c9e19c9","resourceVersion":"793","creationTimestamp":"2024-09-16T11:11:13Z","labels":{"controller-revision-hash":"648b489c5b","k8s-app":"kube-proxy","pod-template-generation":"1"},"ownerReferences":[{"apiVersion":"apps/v1","kind":"DaemonSet","name":"kube-proxy","uid":"8b5954ba-7bb1-4f7b-8014-fdfa99b2ac77","controller":true,"blockOwnerDeletion":true}],"managedFields":[{"manager":"kube-controller-manager","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:11:13Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:generateName":{},"f:labels":{".":{},"f:controller-revision-hash":{},"f:k8s-app":{},"f:pod-template-generation":{}},"f:ownerReferences":{".":{},"k:{\"uid\":\"8b5954ba-7bb1-4f7b-8014-fdfa99b2ac77\"}":{}}},"f:spec":{"f:affinity":{".":{},"f:nodeAffinity":{".":{},"f:r
equiredDuringSchedulingIgnoredDuringExecution":{}}},"f:containers":{"k: [truncated 6403 chars]
	I0916 11:14:10.068454 1501462 request.go:632] Waited for 157.364892ms due to client-side throttling, not priority and fairness, request: GET:https://192.168.67.2:8443/api/v1/nodes/multinode-654612-m02
	I0916 11:14:10.068535 1501462 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/nodes/multinode-654612-m02
	I0916 11:14:10.068546 1501462 round_trippers.go:469] Request Headers:
	I0916 11:14:10.068559 1501462 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:14:10.068564 1501462 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:14:10.071395 1501462 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:14:10.071441 1501462 round_trippers.go:577] Response Headers:
	I0916 11:14:10.071462 1501462 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:14:10.071468 1501462 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:14:10.071484 1501462 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:14:10 GMT
	I0916 11:14:10.071491 1501462 round_trippers.go:580]     Audit-Id: 92bc0f5c-9810-4d6a-921d-70bdd54cfc42
	I0916 11:14:10.071494 1501462 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:14:10.071498 1501462 round_trippers.go:580]     Content-Type: application/json
	I0916 11:14:10.071670 1501462 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-654612-m02","uid":"aa1e2e1b-4957-47bd-bcf9-786ad66f873a","resourceVersion":"837","creationTimestamp":"2024-09-16T11:11:13Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-654612-m02","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-654612","minikube.k8s.io/primary":"false","minikube.k8s.io/updated_at":"2024_09_16T11_11_14_0700","minikube.k8s.io/version":"v1.34.0"},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///var/run/crio/crio.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubeadm","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:11:13Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:
annotations":{"f:kubeadm.alpha.kubernetes.io/cri-socket":{}}}}},{"manag [truncated 6341 chars]
	I0916 11:14:10.408589 1501462 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/namespaces/kube-system/pods/kube-proxy-gf2tw
	I0916 11:14:10.408613 1501462 round_trippers.go:469] Request Headers:
	I0916 11:14:10.408622 1501462 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:14:10.408629 1501462 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:14:10.410964 1501462 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:14:10.411039 1501462 round_trippers.go:577] Response Headers:
	I0916 11:14:10.411062 1501462 round_trippers.go:580]     Content-Type: application/json
	I0916 11:14:10.411078 1501462 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:14:10.411097 1501462 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:14:10.411129 1501462 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:14:10 GMT
	I0916 11:14:10.411146 1501462 round_trippers.go:580]     Audit-Id: a4be9daf-1e3c-4129-9e36-366b8c7640b6
	I0916 11:14:10.411165 1501462 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:14:10.411330 1501462 request.go:1351] Response Body: {"kind":"Pod","apiVersion":"v1","metadata":{"name":"kube-proxy-gf2tw","generateName":"kube-proxy-","namespace":"kube-system","uid":"814e8a89-b190-4aef-a303-44981c9e19c9","resourceVersion":"793","creationTimestamp":"2024-09-16T11:11:13Z","labels":{"controller-revision-hash":"648b489c5b","k8s-app":"kube-proxy","pod-template-generation":"1"},"ownerReferences":[{"apiVersion":"apps/v1","kind":"DaemonSet","name":"kube-proxy","uid":"8b5954ba-7bb1-4f7b-8014-fdfa99b2ac77","controller":true,"blockOwnerDeletion":true}],"managedFields":[{"manager":"kube-controller-manager","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:11:13Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:generateName":{},"f:labels":{".":{},"f:controller-revision-hash":{},"f:k8s-app":{},"f:pod-template-generation":{}},"f:ownerReferences":{".":{},"k:{\"uid\":\"8b5954ba-7bb1-4f7b-8014-fdfa99b2ac77\"}":{}}},"f:spec":{"f:affinity":{".":{},"f:nodeAffinity":{".":{},"f:r
equiredDuringSchedulingIgnoredDuringExecution":{}}},"f:containers":{"k: [truncated 6403 chars]
	I0916 11:14:10.468173 1501462 request.go:632] Waited for 56.233606ms due to client-side throttling, not priority and fairness, request: GET:https://192.168.67.2:8443/api/v1/nodes/multinode-654612-m02
	I0916 11:14:10.468305 1501462 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/nodes/multinode-654612-m02
	I0916 11:14:10.468318 1501462 round_trippers.go:469] Request Headers:
	I0916 11:14:10.468328 1501462 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:14:10.468333 1501462 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:14:10.470675 1501462 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:14:10.470752 1501462 round_trippers.go:577] Response Headers:
	I0916 11:14:10.470782 1501462 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:14:10.470822 1501462 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:14:10.470828 1501462 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:14:10 GMT
	I0916 11:14:10.470835 1501462 round_trippers.go:580]     Audit-Id: 75635edf-e45d-4e52-a50e-22b5fcdb5306
	I0916 11:14:10.470875 1501462 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:14:10.470911 1501462 round_trippers.go:580]     Content-Type: application/json
	I0916 11:14:10.471046 1501462 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-654612-m02","uid":"aa1e2e1b-4957-47bd-bcf9-786ad66f873a","resourceVersion":"837","creationTimestamp":"2024-09-16T11:11:13Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-654612-m02","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-654612","minikube.k8s.io/primary":"false","minikube.k8s.io/updated_at":"2024_09_16T11_11_14_0700","minikube.k8s.io/version":"v1.34.0"},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///var/run/crio/crio.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubeadm","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:11:13Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:
annotations":{"f:kubeadm.alpha.kubernetes.io/cri-socket":{}}}}},{"manag [truncated 6341 chars]
	I0916 11:14:10.907171 1501462 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/namespaces/kube-system/pods/kube-proxy-gf2tw
	I0916 11:14:10.907199 1501462 round_trippers.go:469] Request Headers:
	I0916 11:14:10.907209 1501462 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:14:10.907214 1501462 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:14:10.910003 1501462 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:14:10.910031 1501462 round_trippers.go:577] Response Headers:
	I0916 11:14:10.910040 1501462 round_trippers.go:580]     Audit-Id: b7a1603b-8fa4-4dcf-a225-30553349f0ef
	I0916 11:14:10.910045 1501462 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:14:10.910048 1501462 round_trippers.go:580]     Content-Type: application/json
	I0916 11:14:10.910051 1501462 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:14:10.910054 1501462 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:14:10.910057 1501462 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:14:10 GMT
	I0916 11:14:10.910420 1501462 request.go:1351] Response Body: {"kind":"Pod","apiVersion":"v1","metadata":{"name":"kube-proxy-gf2tw","generateName":"kube-proxy-","namespace":"kube-system","uid":"814e8a89-b190-4aef-a303-44981c9e19c9","resourceVersion":"793","creationTimestamp":"2024-09-16T11:11:13Z","labels":{"controller-revision-hash":"648b489c5b","k8s-app":"kube-proxy","pod-template-generation":"1"},"ownerReferences":[{"apiVersion":"apps/v1","kind":"DaemonSet","name":"kube-proxy","uid":"8b5954ba-7bb1-4f7b-8014-fdfa99b2ac77","controller":true,"blockOwnerDeletion":true}],"managedFields":[{"manager":"kube-controller-manager","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:11:13Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:generateName":{},"f:labels":{".":{},"f:controller-revision-hash":{},"f:k8s-app":{},"f:pod-template-generation":{}},"f:ownerReferences":{".":{},"k:{\"uid\":\"8b5954ba-7bb1-4f7b-8014-fdfa99b2ac77\"}":{}}},"f:spec":{"f:affinity":{".":{},"f:nodeAffinity":{".":{},"f:r
equiredDuringSchedulingIgnoredDuringExecution":{}}},"f:containers":{"k: [truncated 6403 chars]
	I0916 11:14:10.910969 1501462 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/nodes/multinode-654612-m02
	I0916 11:14:10.910988 1501462 round_trippers.go:469] Request Headers:
	I0916 11:14:10.910997 1501462 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:14:10.911002 1501462 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:14:10.913389 1501462 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:14:10.913412 1501462 round_trippers.go:577] Response Headers:
	I0916 11:14:10.913422 1501462 round_trippers.go:580]     Audit-Id: 80265d20-f187-4885-b7ea-7fd8132dd6af
	I0916 11:14:10.913427 1501462 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:14:10.913431 1501462 round_trippers.go:580]     Content-Type: application/json
	I0916 11:14:10.913435 1501462 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:14:10.913439 1501462 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:14:10.913442 1501462 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:14:10 GMT
	I0916 11:14:10.913699 1501462 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-654612-m02","uid":"aa1e2e1b-4957-47bd-bcf9-786ad66f873a","resourceVersion":"837","creationTimestamp":"2024-09-16T11:11:13Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-654612-m02","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-654612","minikube.k8s.io/primary":"false","minikube.k8s.io/updated_at":"2024_09_16T11_11_14_0700","minikube.k8s.io/version":"v1.34.0"},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///var/run/crio/crio.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubeadm","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:11:13Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:
annotations":{"f:kubeadm.alpha.kubernetes.io/cri-socket":{}}}}},{"manag [truncated 6341 chars]
	I0916 11:14:10.914151 1501462 pod_ready.go:103] pod "kube-proxy-gf2tw" in "kube-system" namespace has status "Ready":"False"
	I0916 11:14:11.407930 1501462 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/namespaces/kube-system/pods/kube-proxy-gf2tw
	I0916 11:14:11.408002 1501462 round_trippers.go:469] Request Headers:
	I0916 11:14:11.408039 1501462 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:14:11.408066 1501462 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:14:11.410757 1501462 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:14:11.410828 1501462 round_trippers.go:577] Response Headers:
	I0916 11:14:11.410851 1501462 round_trippers.go:580]     Audit-Id: 3db69e31-2b97-4d30-ac3e-a5095941b365
	I0916 11:14:11.410873 1501462 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:14:11.410906 1501462 round_trippers.go:580]     Content-Type: application/json
	I0916 11:14:11.410929 1501462 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:14:11.410946 1501462 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:14:11.410963 1501462 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:14:11 GMT
	I0916 11:14:11.411175 1501462 request.go:1351] Response Body: {"kind":"Pod","apiVersion":"v1","metadata":{"name":"kube-proxy-gf2tw","generateName":"kube-proxy-","namespace":"kube-system","uid":"814e8a89-b190-4aef-a303-44981c9e19c9","resourceVersion":"793","creationTimestamp":"2024-09-16T11:11:13Z","labels":{"controller-revision-hash":"648b489c5b","k8s-app":"kube-proxy","pod-template-generation":"1"},"ownerReferences":[{"apiVersion":"apps/v1","kind":"DaemonSet","name":"kube-proxy","uid":"8b5954ba-7bb1-4f7b-8014-fdfa99b2ac77","controller":true,"blockOwnerDeletion":true}],"managedFields":[{"manager":"kube-controller-manager","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:11:13Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:generateName":{},"f:labels":{".":{},"f:controller-revision-hash":{},"f:k8s-app":{},"f:pod-template-generation":{}},"f:ownerReferences":{".":{},"k:{\"uid\":\"8b5954ba-7bb1-4f7b-8014-fdfa99b2ac77\"}":{}}},"f:spec":{"f:affinity":{".":{},"f:nodeAffinity":{".":{},"f:r
equiredDuringSchedulingIgnoredDuringExecution":{}}},"f:containers":{"k: [truncated 6403 chars]
	I0916 11:14:11.411747 1501462 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/nodes/multinode-654612-m02
	I0916 11:14:11.411764 1501462 round_trippers.go:469] Request Headers:
	I0916 11:14:11.411773 1501462 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:14:11.411777 1501462 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:14:11.413965 1501462 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:14:11.413988 1501462 round_trippers.go:577] Response Headers:
	I0916 11:14:11.413997 1501462 round_trippers.go:580]     Audit-Id: 4835a32a-e909-4065-9624-891e8d99077a
	I0916 11:14:11.414002 1501462 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:14:11.414007 1501462 round_trippers.go:580]     Content-Type: application/json
	I0916 11:14:11.414012 1501462 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:14:11.414015 1501462 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:14:11.414018 1501462 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:14:11 GMT
	I0916 11:14:11.414152 1501462 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-654612-m02","uid":"aa1e2e1b-4957-47bd-bcf9-786ad66f873a","resourceVersion":"837","creationTimestamp":"2024-09-16T11:11:13Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-654612-m02","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-654612","minikube.k8s.io/primary":"false","minikube.k8s.io/updated_at":"2024_09_16T11_11_14_0700","minikube.k8s.io/version":"v1.34.0"},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///var/run/crio/crio.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubeadm","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:11:13Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:
annotations":{"f:kubeadm.alpha.kubernetes.io/cri-socket":{}}}}},{"manag [truncated 6341 chars]
	I0916 11:14:11.907153 1501462 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/namespaces/kube-system/pods/kube-proxy-gf2tw
	I0916 11:14:11.907178 1501462 round_trippers.go:469] Request Headers:
	I0916 11:14:11.907187 1501462 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:14:11.907192 1501462 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:14:11.909647 1501462 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:14:11.909686 1501462 round_trippers.go:577] Response Headers:
	I0916 11:14:11.909695 1501462 round_trippers.go:580]     Content-Type: application/json
	I0916 11:14:11.909701 1501462 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:14:11.909704 1501462 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:14:11.909708 1501462 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:14:11 GMT
	I0916 11:14:11.909713 1501462 round_trippers.go:580]     Audit-Id: 2e7ea9c7-1e1d-41e5-a35e-740f4b0e70cc
	I0916 11:14:11.909717 1501462 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:14:11.909904 1501462 request.go:1351] Response Body: {"kind":"Pod","apiVersion":"v1","metadata":{"name":"kube-proxy-gf2tw","generateName":"kube-proxy-","namespace":"kube-system","uid":"814e8a89-b190-4aef-a303-44981c9e19c9","resourceVersion":"793","creationTimestamp":"2024-09-16T11:11:13Z","labels":{"controller-revision-hash":"648b489c5b","k8s-app":"kube-proxy","pod-template-generation":"1"},"ownerReferences":[{"apiVersion":"apps/v1","kind":"DaemonSet","name":"kube-proxy","uid":"8b5954ba-7bb1-4f7b-8014-fdfa99b2ac77","controller":true,"blockOwnerDeletion":true}],"managedFields":[{"manager":"kube-controller-manager","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:11:13Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:generateName":{},"f:labels":{".":{},"f:controller-revision-hash":{},"f:k8s-app":{},"f:pod-template-generation":{}},"f:ownerReferences":{".":{},"k:{\"uid\":\"8b5954ba-7bb1-4f7b-8014-fdfa99b2ac77\"}":{}}},"f:spec":{"f:affinity":{".":{},"f:nodeAffinity":{".":{},"f:r
equiredDuringSchedulingIgnoredDuringExecution":{}}},"f:containers":{"k: [truncated 6403 chars]
	I0916 11:14:11.910440 1501462 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/nodes/multinode-654612-m02
	I0916 11:14:11.910460 1501462 round_trippers.go:469] Request Headers:
	I0916 11:14:11.910469 1501462 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:14:11.910473 1501462 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:14:11.912809 1501462 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:14:11.912883 1501462 round_trippers.go:577] Response Headers:
	I0916 11:14:11.912919 1501462 round_trippers.go:580]     Audit-Id: c5efc942-56d9-4827-87ac-4a2f36028115
	I0916 11:14:11.912956 1501462 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:14:11.912978 1501462 round_trippers.go:580]     Content-Type: application/json
	I0916 11:14:11.912999 1501462 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:14:11.913011 1501462 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:14:11.913014 1501462 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:14:11 GMT
	I0916 11:14:11.913182 1501462 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-654612-m02","uid":"aa1e2e1b-4957-47bd-bcf9-786ad66f873a","resourceVersion":"837","creationTimestamp":"2024-09-16T11:11:13Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-654612-m02","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-654612","minikube.k8s.io/primary":"false","minikube.k8s.io/updated_at":"2024_09_16T11_11_14_0700","minikube.k8s.io/version":"v1.34.0"},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///var/run/crio/crio.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubeadm","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:11:13Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:
annotations":{"f:kubeadm.alpha.kubernetes.io/cri-socket":{}}}}},{"manag [truncated 6341 chars]
	I0916 11:14:12.407710 1501462 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/namespaces/kube-system/pods/kube-proxy-gf2tw
	I0916 11:14:12.407734 1501462 round_trippers.go:469] Request Headers:
	I0916 11:14:12.407745 1501462 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:14:12.407750 1501462 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:14:12.411124 1501462 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 11:14:12.411199 1501462 round_trippers.go:577] Response Headers:
	I0916 11:14:12.411221 1501462 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:14:12.411239 1501462 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:14:12 GMT
	I0916 11:14:12.411272 1501462 round_trippers.go:580]     Audit-Id: cee7b5da-0c53-4ca5-a4a9-52254ef3d372
	I0916 11:14:12.411293 1501462 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:14:12.411311 1501462 round_trippers.go:580]     Content-Type: application/json
	I0916 11:14:12.411329 1501462 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:14:12.411552 1501462 request.go:1351] Response Body: {"kind":"Pod","apiVersion":"v1","metadata":{"name":"kube-proxy-gf2tw","generateName":"kube-proxy-","namespace":"kube-system","uid":"814e8a89-b190-4aef-a303-44981c9e19c9","resourceVersion":"793","creationTimestamp":"2024-09-16T11:11:13Z","labels":{"controller-revision-hash":"648b489c5b","k8s-app":"kube-proxy","pod-template-generation":"1"},"ownerReferences":[{"apiVersion":"apps/v1","kind":"DaemonSet","name":"kube-proxy","uid":"8b5954ba-7bb1-4f7b-8014-fdfa99b2ac77","controller":true,"blockOwnerDeletion":true}],"managedFields":[{"manager":"kube-controller-manager","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:11:13Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:generateName":{},"f:labels":{".":{},"f:controller-revision-hash":{},"f:k8s-app":{},"f:pod-template-generation":{}},"f:ownerReferences":{".":{},"k:{\"uid\":\"8b5954ba-7bb1-4f7b-8014-fdfa99b2ac77\"}":{}}},"f:spec":{"f:affinity":{".":{},"f:nodeAffinity":{".":{},"f:r
equiredDuringSchedulingIgnoredDuringExecution":{}}},"f:containers":{"k: [truncated 6403 chars]
	I0916 11:14:12.412110 1501462 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/nodes/multinode-654612-m02
	I0916 11:14:12.412128 1501462 round_trippers.go:469] Request Headers:
	I0916 11:14:12.412138 1501462 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:14:12.412141 1501462 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:14:12.414405 1501462 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:14:12.414428 1501462 round_trippers.go:577] Response Headers:
	I0916 11:14:12.414436 1501462 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:14:12 GMT
	I0916 11:14:12.414441 1501462 round_trippers.go:580]     Audit-Id: 2f6de91e-e5d7-42d5-8c2d-3e9fa762739b
	I0916 11:14:12.414446 1501462 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:14:12.414451 1501462 round_trippers.go:580]     Content-Type: application/json
	I0916 11:14:12.414454 1501462 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:14:12.414457 1501462 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:14:12.414648 1501462 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-654612-m02","uid":"aa1e2e1b-4957-47bd-bcf9-786ad66f873a","resourceVersion":"837","creationTimestamp":"2024-09-16T11:11:13Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-654612-m02","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-654612","minikube.k8s.io/primary":"false","minikube.k8s.io/updated_at":"2024_09_16T11_11_14_0700","minikube.k8s.io/version":"v1.34.0"},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///var/run/crio/crio.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubeadm","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:11:13Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:
annotations":{"f:kubeadm.alpha.kubernetes.io/cri-socket":{}}}}},{"manag [truncated 6341 chars]
	I0916 11:14:12.907825 1501462 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/namespaces/kube-system/pods/kube-proxy-gf2tw
	I0916 11:14:12.907852 1501462 round_trippers.go:469] Request Headers:
	I0916 11:14:12.907862 1501462 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:14:12.907867 1501462 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:14:12.910183 1501462 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:14:12.910257 1501462 round_trippers.go:577] Response Headers:
	I0916 11:14:12.910280 1501462 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:14:12.910298 1501462 round_trippers.go:580]     Content-Type: application/json
	I0916 11:14:12.910330 1501462 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:14:12.910385 1501462 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:14:12.910403 1501462 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:14:12 GMT
	I0916 11:14:12.910418 1501462 round_trippers.go:580]     Audit-Id: f7c055ad-902c-467d-b7c7-eb0ac246ac42
	I0916 11:14:12.910613 1501462 request.go:1351] Response Body: {"kind":"Pod","apiVersion":"v1","metadata":{"name":"kube-proxy-gf2tw","generateName":"kube-proxy-","namespace":"kube-system","uid":"814e8a89-b190-4aef-a303-44981c9e19c9","resourceVersion":"793","creationTimestamp":"2024-09-16T11:11:13Z","labels":{"controller-revision-hash":"648b489c5b","k8s-app":"kube-proxy","pod-template-generation":"1"},"ownerReferences":[{"apiVersion":"apps/v1","kind":"DaemonSet","name":"kube-proxy","uid":"8b5954ba-7bb1-4f7b-8014-fdfa99b2ac77","controller":true,"blockOwnerDeletion":true}],"managedFields":[{"manager":"kube-controller-manager","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:11:13Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:generateName":{},"f:labels":{".":{},"f:controller-revision-hash":{},"f:k8s-app":{},"f:pod-template-generation":{}},"f:ownerReferences":{".":{},"k:{\"uid\":\"8b5954ba-7bb1-4f7b-8014-fdfa99b2ac77\"}":{}}},"f:spec":{"f:affinity":{".":{},"f:nodeAffinity":{".":{},"f:r
equiredDuringSchedulingIgnoredDuringExecution":{}}},"f:containers":{"k: [truncated 6403 chars]
	I0916 11:14:12.911149 1501462 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/nodes/multinode-654612-m02
	I0916 11:14:12.911168 1501462 round_trippers.go:469] Request Headers:
	I0916 11:14:12.911177 1501462 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:14:12.911183 1501462 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:14:12.913362 1501462 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:14:12.913387 1501462 round_trippers.go:577] Response Headers:
	I0916 11:14:12.913395 1501462 round_trippers.go:580]     Audit-Id: 9518d7b0-651d-488a-b67c-b9a5d777b996
	I0916 11:14:12.913400 1501462 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:14:12.913403 1501462 round_trippers.go:580]     Content-Type: application/json
	I0916 11:14:12.913406 1501462 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:14:12.913409 1501462 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:14:12.913412 1501462 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:14:12 GMT
	I0916 11:14:12.913748 1501462 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-654612-m02","uid":"aa1e2e1b-4957-47bd-bcf9-786ad66f873a","resourceVersion":"837","creationTimestamp":"2024-09-16T11:11:13Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-654612-m02","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-654612","minikube.k8s.io/primary":"false","minikube.k8s.io/updated_at":"2024_09_16T11_11_14_0700","minikube.k8s.io/version":"v1.34.0"},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///var/run/crio/crio.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubeadm","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:11:13Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:
annotations":{"f:kubeadm.alpha.kubernetes.io/cri-socket":{}}}}},{"manag [truncated 6341 chars]
	I0916 11:14:12.914192 1501462 pod_ready.go:103] pod "kube-proxy-gf2tw" in "kube-system" namespace has status "Ready":"False"
	I0916 11:14:13.407347 1501462 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/namespaces/kube-system/pods/kube-proxy-gf2tw
	I0916 11:14:13.407375 1501462 round_trippers.go:469] Request Headers:
	I0916 11:14:13.407385 1501462 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:14:13.407392 1501462 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:14:13.410713 1501462 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 11:14:13.410738 1501462 round_trippers.go:577] Response Headers:
	I0916 11:14:13.410746 1501462 round_trippers.go:580]     Audit-Id: 3a91537c-e92c-496b-ad9b-02bab631d448
	I0916 11:14:13.410752 1501462 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:14:13.410756 1501462 round_trippers.go:580]     Content-Type: application/json
	I0916 11:14:13.410759 1501462 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:14:13.410762 1501462 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:14:13.410765 1501462 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:14:13 GMT
	I0916 11:14:13.411030 1501462 request.go:1351] Response Body: {"kind":"Pod","apiVersion":"v1","metadata":{"name":"kube-proxy-gf2tw","generateName":"kube-proxy-","namespace":"kube-system","uid":"814e8a89-b190-4aef-a303-44981c9e19c9","resourceVersion":"793","creationTimestamp":"2024-09-16T11:11:13Z","labels":{"controller-revision-hash":"648b489c5b","k8s-app":"kube-proxy","pod-template-generation":"1"},"ownerReferences":[{"apiVersion":"apps/v1","kind":"DaemonSet","name":"kube-proxy","uid":"8b5954ba-7bb1-4f7b-8014-fdfa99b2ac77","controller":true,"blockOwnerDeletion":true}],"managedFields":[{"manager":"kube-controller-manager","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:11:13Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:generateName":{},"f:labels":{".":{},"f:controller-revision-hash":{},"f:k8s-app":{},"f:pod-template-generation":{}},"f:ownerReferences":{".":{},"k:{\"uid\":\"8b5954ba-7bb1-4f7b-8014-fdfa99b2ac77\"}":{}}},"f:spec":{"f:affinity":{".":{},"f:nodeAffinity":{".":{},"f:r
equiredDuringSchedulingIgnoredDuringExecution":{}}},"f:containers":{"k: [truncated 6403 chars]
	I0916 11:14:13.411607 1501462 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/nodes/multinode-654612-m02
	I0916 11:14:13.411618 1501462 round_trippers.go:469] Request Headers:
	I0916 11:14:13.411629 1501462 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:14:13.411634 1501462 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:14:13.413931 1501462 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:14:13.414007 1501462 round_trippers.go:577] Response Headers:
	I0916 11:14:13.414021 1501462 round_trippers.go:580]     Content-Type: application/json
	I0916 11:14:13.414026 1501462 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:14:13.414032 1501462 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:14:13.414036 1501462 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:14:13 GMT
	I0916 11:14:13.414041 1501462 round_trippers.go:580]     Audit-Id: 1739f5b1-51f0-43b5-a449-e83f34c90900
	I0916 11:14:13.414046 1501462 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:14:13.414524 1501462 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-654612-m02","uid":"aa1e2e1b-4957-47bd-bcf9-786ad66f873a","resourceVersion":"837","creationTimestamp":"2024-09-16T11:11:13Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-654612-m02","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-654612","minikube.k8s.io/primary":"false","minikube.k8s.io/updated_at":"2024_09_16T11_11_14_0700","minikube.k8s.io/version":"v1.34.0"},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///var/run/crio/crio.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubeadm","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:11:13Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:
annotations":{"f:kubeadm.alpha.kubernetes.io/cri-socket":{}}}}},{"manag [truncated 6341 chars]
	I0916 11:14:13.907144 1501462 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/namespaces/kube-system/pods/kube-proxy-gf2tw
	I0916 11:14:13.907171 1501462 round_trippers.go:469] Request Headers:
	I0916 11:14:13.907181 1501462 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:14:13.907188 1501462 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:14:13.909661 1501462 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:14:13.909724 1501462 round_trippers.go:577] Response Headers:
	I0916 11:14:13.909746 1501462 round_trippers.go:580]     Audit-Id: 6617dacb-3502-4fae-87dc-f84917138f5f
	I0916 11:14:13.909766 1501462 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:14:13.909796 1501462 round_trippers.go:580]     Content-Type: application/json
	I0916 11:14:13.909815 1501462 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:14:13.909830 1501462 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:14:13.909848 1501462 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:14:13 GMT
	I0916 11:14:13.909978 1501462 request.go:1351] Response Body: {"kind":"Pod","apiVersion":"v1","metadata":{"name":"kube-proxy-gf2tw","generateName":"kube-proxy-","namespace":"kube-system","uid":"814e8a89-b190-4aef-a303-44981c9e19c9","resourceVersion":"846","creationTimestamp":"2024-09-16T11:11:13Z","labels":{"controller-revision-hash":"648b489c5b","k8s-app":"kube-proxy","pod-template-generation":"1"},"ownerReferences":[{"apiVersion":"apps/v1","kind":"DaemonSet","name":"kube-proxy","uid":"8b5954ba-7bb1-4f7b-8014-fdfa99b2ac77","controller":true,"blockOwnerDeletion":true}],"managedFields":[{"manager":"kube-controller-manager","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:11:13Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:generateName":{},"f:labels":{".":{},"f:controller-revision-hash":{},"f:k8s-app":{},"f:pod-template-generation":{}},"f:ownerReferences":{".":{},"k:{\"uid\":\"8b5954ba-7bb1-4f7b-8014-fdfa99b2ac77\"}":{}}},"f:spec":{"f:affinity":{".":{},"f:nodeAffinity":{".":{},"f:r
equiredDuringSchedulingIgnoredDuringExecution":{}}},"f:containers":{"k: [truncated 6651 chars]
	I0916 11:14:13.910545 1501462 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/nodes/multinode-654612-m02
	I0916 11:14:13.910561 1501462 round_trippers.go:469] Request Headers:
	I0916 11:14:13.910569 1501462 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:14:13.910575 1501462 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:14:13.912786 1501462 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:14:13.912807 1501462 round_trippers.go:577] Response Headers:
	I0916 11:14:13.912815 1501462 round_trippers.go:580]     Content-Type: application/json
	I0916 11:14:13.912818 1501462 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:14:13.912821 1501462 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:14:13.912826 1501462 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:14:13 GMT
	I0916 11:14:13.912829 1501462 round_trippers.go:580]     Audit-Id: 82069b3b-93da-4507-aeae-94129b168440
	I0916 11:14:13.912832 1501462 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:14:13.913051 1501462 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-654612-m02","uid":"aa1e2e1b-4957-47bd-bcf9-786ad66f873a","resourceVersion":"837","creationTimestamp":"2024-09-16T11:11:13Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-654612-m02","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-654612","minikube.k8s.io/primary":"false","minikube.k8s.io/updated_at":"2024_09_16T11_11_14_0700","minikube.k8s.io/version":"v1.34.0"},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///var/run/crio/crio.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubeadm","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:11:13Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:
annotations":{"f:kubeadm.alpha.kubernetes.io/cri-socket":{}}}}},{"manag [truncated 6341 chars]
	I0916 11:14:14.407741 1501462 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/namespaces/kube-system/pods/kube-proxy-gf2tw
	I0916 11:14:14.407772 1501462 round_trippers.go:469] Request Headers:
	I0916 11:14:14.407781 1501462 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:14:14.407785 1501462 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:14:14.410811 1501462 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 11:14:14.410838 1501462 round_trippers.go:577] Response Headers:
	I0916 11:14:14.410847 1501462 round_trippers.go:580]     Audit-Id: cc895061-71d8-4425-8c4a-263db46bd91c
	I0916 11:14:14.410858 1501462 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:14:14.410862 1501462 round_trippers.go:580]     Content-Type: application/json
	I0916 11:14:14.410866 1501462 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:14:14.410869 1501462 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:14:14.410871 1501462 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:14:14 GMT
	I0916 11:14:14.411508 1501462 request.go:1351] Response Body: {"kind":"Pod","apiVersion":"v1","metadata":{"name":"kube-proxy-gf2tw","generateName":"kube-proxy-","namespace":"kube-system","uid":"814e8a89-b190-4aef-a303-44981c9e19c9","resourceVersion":"846","creationTimestamp":"2024-09-16T11:11:13Z","labels":{"controller-revision-hash":"648b489c5b","k8s-app":"kube-proxy","pod-template-generation":"1"},"ownerReferences":[{"apiVersion":"apps/v1","kind":"DaemonSet","name":"kube-proxy","uid":"8b5954ba-7bb1-4f7b-8014-fdfa99b2ac77","controller":true,"blockOwnerDeletion":true}],"managedFields":[{"manager":"kube-controller-manager","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:11:13Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:generateName":{},"f:labels":{".":{},"f:controller-revision-hash":{},"f:k8s-app":{},"f:pod-template-generation":{}},"f:ownerReferences":{".":{},"k:{\"uid\":\"8b5954ba-7bb1-4f7b-8014-fdfa99b2ac77\"}":{}}},"f:spec":{"f:affinity":{".":{},"f:nodeAffinity":{".":{},"f:r
equiredDuringSchedulingIgnoredDuringExecution":{}}},"f:containers":{"k: [truncated 6651 chars]
	I0916 11:14:14.412090 1501462 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/nodes/multinode-654612-m02
	I0916 11:14:14.412110 1501462 round_trippers.go:469] Request Headers:
	I0916 11:14:14.412118 1501462 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:14:14.412125 1501462 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:14:14.414358 1501462 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:14:14.414382 1501462 round_trippers.go:577] Response Headers:
	I0916 11:14:14.414391 1501462 round_trippers.go:580]     Content-Type: application/json
	I0916 11:14:14.414395 1501462 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:14:14.414398 1501462 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:14:14.414401 1501462 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:14:14 GMT
	I0916 11:14:14.414405 1501462 round_trippers.go:580]     Audit-Id: 798c4974-60d9-4c55-a3eb-9ff803756316
	I0916 11:14:14.414408 1501462 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:14:14.414713 1501462 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-654612-m02","uid":"aa1e2e1b-4957-47bd-bcf9-786ad66f873a","resourceVersion":"837","creationTimestamp":"2024-09-16T11:11:13Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-654612-m02","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-654612","minikube.k8s.io/primary":"false","minikube.k8s.io/updated_at":"2024_09_16T11_11_14_0700","minikube.k8s.io/version":"v1.34.0"},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///var/run/crio/crio.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubeadm","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:11:13Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:
annotations":{"f:kubeadm.alpha.kubernetes.io/cri-socket":{}}}}},{"manag [truncated 6341 chars]
	I0916 11:14:14.907964 1501462 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/namespaces/kube-system/pods/kube-proxy-gf2tw
	I0916 11:14:14.907993 1501462 round_trippers.go:469] Request Headers:
	I0916 11:14:14.908003 1501462 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:14:14.908007 1501462 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:14:14.910594 1501462 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:14:14.910620 1501462 round_trippers.go:577] Response Headers:
	I0916 11:14:14.910630 1501462 round_trippers.go:580]     Audit-Id: 9ec78aaf-caa7-42df-9cc9-49564ac5c3a5
	I0916 11:14:14.910634 1501462 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:14:14.910637 1501462 round_trippers.go:580]     Content-Type: application/json
	I0916 11:14:14.910640 1501462 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:14:14.910643 1501462 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:14:14.910646 1501462 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:14:14 GMT
	I0916 11:14:14.910864 1501462 request.go:1351] Response Body: {"kind":"Pod","apiVersion":"v1","metadata":{"name":"kube-proxy-gf2tw","generateName":"kube-proxy-","namespace":"kube-system","uid":"814e8a89-b190-4aef-a303-44981c9e19c9","resourceVersion":"846","creationTimestamp":"2024-09-16T11:11:13Z","labels":{"controller-revision-hash":"648b489c5b","k8s-app":"kube-proxy","pod-template-generation":"1"},"ownerReferences":[{"apiVersion":"apps/v1","kind":"DaemonSet","name":"kube-proxy","uid":"8b5954ba-7bb1-4f7b-8014-fdfa99b2ac77","controller":true,"blockOwnerDeletion":true}],"managedFields":[{"manager":"kube-controller-manager","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:11:13Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:generateName":{},"f:labels":{".":{},"f:controller-revision-hash":{},"f:k8s-app":{},"f:pod-template-generation":{}},"f:ownerReferences":{".":{},"k:{\"uid\":\"8b5954ba-7bb1-4f7b-8014-fdfa99b2ac77\"}":{}}},"f:spec":{"f:affinity":{".":{},"f:nodeAffinity":{".":{},"f:r
equiredDuringSchedulingIgnoredDuringExecution":{}}},"f:containers":{"k: [truncated 6651 chars]
	I0916 11:14:14.911436 1501462 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/nodes/multinode-654612-m02
	I0916 11:14:14.911457 1501462 round_trippers.go:469] Request Headers:
	I0916 11:14:14.911466 1501462 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:14:14.911473 1501462 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:14:14.913612 1501462 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:14:14.913637 1501462 round_trippers.go:577] Response Headers:
	I0916 11:14:14.913646 1501462 round_trippers.go:580]     Content-Type: application/json
	I0916 11:14:14.913653 1501462 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:14:14.913656 1501462 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:14:14.913660 1501462 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:14:14 GMT
	I0916 11:14:14.913662 1501462 round_trippers.go:580]     Audit-Id: 181b81c6-e2b2-4742-ae10-9a8ada8a7456
	I0916 11:14:14.913665 1501462 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:14:14.914119 1501462 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-654612-m02","uid":"aa1e2e1b-4957-47bd-bcf9-786ad66f873a","resourceVersion":"837","creationTimestamp":"2024-09-16T11:11:13Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-654612-m02","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-654612","minikube.k8s.io/primary":"false","minikube.k8s.io/updated_at":"2024_09_16T11_11_14_0700","minikube.k8s.io/version":"v1.34.0"},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///var/run/crio/crio.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubeadm","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:11:13Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:
annotations":{"f:kubeadm.alpha.kubernetes.io/cri-socket":{}}}}},{"manag [truncated 6341 chars]
	I0916 11:14:14.914590 1501462 pod_ready.go:103] pod "kube-proxy-gf2tw" in "kube-system" namespace has status "Ready":"False"
	I0916 11:14:15.407365 1501462 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/namespaces/kube-system/pods/kube-proxy-gf2tw
	I0916 11:14:15.407497 1501462 round_trippers.go:469] Request Headers:
	I0916 11:14:15.407516 1501462 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:14:15.407524 1501462 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:14:15.410529 1501462 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:14:15.410603 1501462 round_trippers.go:577] Response Headers:
	I0916 11:14:15.410625 1501462 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:14:15.410642 1501462 round_trippers.go:580]     Content-Type: application/json
	I0916 11:14:15.410674 1501462 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:14:15.410696 1501462 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:14:15.410714 1501462 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:14:15 GMT
	I0916 11:14:15.410734 1501462 round_trippers.go:580]     Audit-Id: 82611206-b2b7-4b00-8046-ec91757bc8c6
	I0916 11:14:15.410868 1501462 request.go:1351] Response Body: {"kind":"Pod","apiVersion":"v1","metadata":{"name":"kube-proxy-gf2tw","generateName":"kube-proxy-","namespace":"kube-system","uid":"814e8a89-b190-4aef-a303-44981c9e19c9","resourceVersion":"854","creationTimestamp":"2024-09-16T11:11:13Z","labels":{"controller-revision-hash":"648b489c5b","k8s-app":"kube-proxy","pod-template-generation":"1"},"ownerReferences":[{"apiVersion":"apps/v1","kind":"DaemonSet","name":"kube-proxy","uid":"8b5954ba-7bb1-4f7b-8014-fdfa99b2ac77","controller":true,"blockOwnerDeletion":true}],"managedFields":[{"manager":"kube-controller-manager","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:11:13Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:generateName":{},"f:labels":{".":{},"f:controller-revision-hash":{},"f:k8s-app":{},"f:pod-template-generation":{}},"f:ownerReferences":{".":{},"k:{\"uid\":\"8b5954ba-7bb1-4f7b-8014-fdfa99b2ac77\"}":{}}},"f:spec":{"f:affinity":{".":{},"f:nodeAffinity":{".":{},"f:r
equiredDuringSchedulingIgnoredDuringExecution":{}}},"f:containers":{"k: [truncated 6743 chars]
	I0916 11:14:15.411488 1501462 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/nodes/multinode-654612-m02
	I0916 11:14:15.411507 1501462 round_trippers.go:469] Request Headers:
	I0916 11:14:15.411516 1501462 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:14:15.411521 1501462 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:14:15.414222 1501462 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:14:15.414253 1501462 round_trippers.go:577] Response Headers:
	I0916 11:14:15.414263 1501462 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:14:15 GMT
	I0916 11:14:15.414267 1501462 round_trippers.go:580]     Audit-Id: 307e82ba-7eb5-4528-a9fc-c9f8c3480f36
	I0916 11:14:15.414276 1501462 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:14:15.414279 1501462 round_trippers.go:580]     Content-Type: application/json
	I0916 11:14:15.414282 1501462 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:14:15.414285 1501462 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:14:15.414555 1501462 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-654612-m02","uid":"aa1e2e1b-4957-47bd-bcf9-786ad66f873a","resourceVersion":"837","creationTimestamp":"2024-09-16T11:11:13Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-654612-m02","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-654612","minikube.k8s.io/primary":"false","minikube.k8s.io/updated_at":"2024_09_16T11_11_14_0700","minikube.k8s.io/version":"v1.34.0"},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///var/run/crio/crio.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubeadm","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:11:13Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:
annotations":{"f:kubeadm.alpha.kubernetes.io/cri-socket":{}}}}},{"manag [truncated 6341 chars]
	I0916 11:14:15.907134 1501462 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/namespaces/kube-system/pods/kube-proxy-gf2tw
	I0916 11:14:15.907159 1501462 round_trippers.go:469] Request Headers:
	I0916 11:14:15.907169 1501462 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:14:15.907173 1501462 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:14:15.909766 1501462 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:14:15.909795 1501462 round_trippers.go:577] Response Headers:
	I0916 11:14:15.909804 1501462 round_trippers.go:580]     Content-Type: application/json
	I0916 11:14:15.909810 1501462 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:14:15.909815 1501462 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:14:15.909818 1501462 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:14:15 GMT
	I0916 11:14:15.909821 1501462 round_trippers.go:580]     Audit-Id: fdc6f058-e36f-4fdd-9c69-a692ac092596
	I0916 11:14:15.909824 1501462 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:14:15.910117 1501462 request.go:1351] Response Body: {"kind":"Pod","apiVersion":"v1","metadata":{"name":"kube-proxy-gf2tw","generateName":"kube-proxy-","namespace":"kube-system","uid":"814e8a89-b190-4aef-a303-44981c9e19c9","resourceVersion":"854","creationTimestamp":"2024-09-16T11:11:13Z","labels":{"controller-revision-hash":"648b489c5b","k8s-app":"kube-proxy","pod-template-generation":"1"},"ownerReferences":[{"apiVersion":"apps/v1","kind":"DaemonSet","name":"kube-proxy","uid":"8b5954ba-7bb1-4f7b-8014-fdfa99b2ac77","controller":true,"blockOwnerDeletion":true}],"managedFields":[{"manager":"kube-controller-manager","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:11:13Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:generateName":{},"f:labels":{".":{},"f:controller-revision-hash":{},"f:k8s-app":{},"f:pod-template-generation":{}},"f:ownerReferences":{".":{},"k:{\"uid\":\"8b5954ba-7bb1-4f7b-8014-fdfa99b2ac77\"}":{}}},"f:spec":{"f:affinity":{".":{},"f:nodeAffinity":{".":{},"f:r
equiredDuringSchedulingIgnoredDuringExecution":{}}},"f:containers":{"k: [truncated 6743 chars]
	I0916 11:14:15.910678 1501462 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/nodes/multinode-654612-m02
	I0916 11:14:15.910697 1501462 round_trippers.go:469] Request Headers:
	I0916 11:14:15.910707 1501462 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:14:15.910714 1501462 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:14:15.912887 1501462 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:14:15.912913 1501462 round_trippers.go:577] Response Headers:
	I0916 11:14:15.912922 1501462 round_trippers.go:580]     Content-Type: application/json
	I0916 11:14:15.912927 1501462 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:14:15.912931 1501462 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:14:15.912933 1501462 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:14:15 GMT
	I0916 11:14:15.912936 1501462 round_trippers.go:580]     Audit-Id: 1d7faaed-5729-45d5-86c8-0545fb8309a5
	I0916 11:14:15.912939 1501462 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:14:15.913278 1501462 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-654612-m02","uid":"aa1e2e1b-4957-47bd-bcf9-786ad66f873a","resourceVersion":"837","creationTimestamp":"2024-09-16T11:11:13Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-654612-m02","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-654612","minikube.k8s.io/primary":"false","minikube.k8s.io/updated_at":"2024_09_16T11_11_14_0700","minikube.k8s.io/version":"v1.34.0"},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///var/run/crio/crio.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubeadm","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:11:13Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:
annotations":{"f:kubeadm.alpha.kubernetes.io/cri-socket":{}}}}},{"manag [truncated 6341 chars]
	I0916 11:14:16.408085 1501462 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/namespaces/kube-system/pods/kube-proxy-gf2tw
	I0916 11:14:16.408149 1501462 round_trippers.go:469] Request Headers:
	I0916 11:14:16.408179 1501462 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:14:16.408186 1501462 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:14:16.411112 1501462 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:14:16.411194 1501462 round_trippers.go:577] Response Headers:
	I0916 11:14:16.411266 1501462 round_trippers.go:580]     Audit-Id: dc5efd15-573e-489a-adda-7df81b93fbe7
	I0916 11:14:16.411300 1501462 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:14:16.411316 1501462 round_trippers.go:580]     Content-Type: application/json
	I0916 11:14:16.411349 1501462 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:14:16.411378 1501462 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:14:16.411387 1501462 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:14:16 GMT
	I0916 11:14:16.411618 1501462 request.go:1351] Response Body: {"kind":"Pod","apiVersion":"v1","metadata":{"name":"kube-proxy-gf2tw","generateName":"kube-proxy-","namespace":"kube-system","uid":"814e8a89-b190-4aef-a303-44981c9e19c9","resourceVersion":"854","creationTimestamp":"2024-09-16T11:11:13Z","labels":{"controller-revision-hash":"648b489c5b","k8s-app":"kube-proxy","pod-template-generation":"1"},"ownerReferences":[{"apiVersion":"apps/v1","kind":"DaemonSet","name":"kube-proxy","uid":"8b5954ba-7bb1-4f7b-8014-fdfa99b2ac77","controller":true,"blockOwnerDeletion":true}],"managedFields":[{"manager":"kube-controller-manager","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:11:13Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:generateName":{},"f:labels":{".":{},"f:controller-revision-hash":{},"f:k8s-app":{},"f:pod-template-generation":{}},"f:ownerReferences":{".":{},"k:{\"uid\":\"8b5954ba-7bb1-4f7b-8014-fdfa99b2ac77\"}":{}}},"f:spec":{"f:affinity":{".":{},"f:nodeAffinity":{".":{},"f:r
equiredDuringSchedulingIgnoredDuringExecution":{}}},"f:containers":{"k: [truncated 6743 chars]
	I0916 11:14:16.412370 1501462 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/nodes/multinode-654612-m02
	I0916 11:14:16.412394 1501462 round_trippers.go:469] Request Headers:
	I0916 11:14:16.412409 1501462 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:14:16.412419 1501462 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:14:16.414768 1501462 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:14:16.414794 1501462 round_trippers.go:577] Response Headers:
	I0916 11:14:16.414805 1501462 round_trippers.go:580]     Audit-Id: 2d3d839f-6528-469e-8ec7-fa88d06bf91a
	I0916 11:14:16.414809 1501462 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:14:16.414812 1501462 round_trippers.go:580]     Content-Type: application/json
	I0916 11:14:16.414814 1501462 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:14:16.414817 1501462 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:14:16.414820 1501462 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:14:16 GMT
	I0916 11:14:16.414974 1501462 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-654612-m02","uid":"aa1e2e1b-4957-47bd-bcf9-786ad66f873a","resourceVersion":"837","creationTimestamp":"2024-09-16T11:11:13Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-654612-m02","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-654612","minikube.k8s.io/primary":"false","minikube.k8s.io/updated_at":"2024_09_16T11_11_14_0700","minikube.k8s.io/version":"v1.34.0"},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///var/run/crio/crio.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubeadm","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:11:13Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:
annotations":{"f:kubeadm.alpha.kubernetes.io/cri-socket":{}}}}},{"manag [truncated 6341 chars]
	I0916 11:14:16.907370 1501462 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/namespaces/kube-system/pods/kube-proxy-gf2tw
	I0916 11:14:16.907396 1501462 round_trippers.go:469] Request Headers:
	I0916 11:14:16.907406 1501462 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:14:16.907413 1501462 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:14:16.910036 1501462 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:14:16.910097 1501462 round_trippers.go:577] Response Headers:
	I0916 11:14:16.910121 1501462 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:14:16.910138 1501462 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:14:16.910156 1501462 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:14:16 GMT
	I0916 11:14:16.910222 1501462 round_trippers.go:580]     Audit-Id: 4b01398b-847b-4751-a9ae-2dbe3f7d7c54
	I0916 11:14:16.910233 1501462 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:14:16.910237 1501462 round_trippers.go:580]     Content-Type: application/json
	I0916 11:14:16.910368 1501462 request.go:1351] Response Body: {"kind":"Pod","apiVersion":"v1","metadata":{"name":"kube-proxy-gf2tw","generateName":"kube-proxy-","namespace":"kube-system","uid":"814e8a89-b190-4aef-a303-44981c9e19c9","resourceVersion":"854","creationTimestamp":"2024-09-16T11:11:13Z","labels":{"controller-revision-hash":"648b489c5b","k8s-app":"kube-proxy","pod-template-generation":"1"},"ownerReferences":[{"apiVersion":"apps/v1","kind":"DaemonSet","name":"kube-proxy","uid":"8b5954ba-7bb1-4f7b-8014-fdfa99b2ac77","controller":true,"blockOwnerDeletion":true}],"managedFields":[{"manager":"kube-controller-manager","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:11:13Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:generateName":{},"f:labels":{".":{},"f:controller-revision-hash":{},"f:k8s-app":{},"f:pod-template-generation":{}},"f:ownerReferences":{".":{},"k:{\"uid\":\"8b5954ba-7bb1-4f7b-8014-fdfa99b2ac77\"}":{}}},"f:spec":{"f:affinity":{".":{},"f:nodeAffinity":{".":{},"f:r
equiredDuringSchedulingIgnoredDuringExecution":{}}},"f:containers":{"k: [truncated 6743 chars]
	I0916 11:14:16.910922 1501462 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/nodes/multinode-654612-m02
	I0916 11:14:16.910939 1501462 round_trippers.go:469] Request Headers:
	I0916 11:14:16.910948 1501462 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:14:16.910952 1501462 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:14:16.913222 1501462 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:14:16.913240 1501462 round_trippers.go:577] Response Headers:
	I0916 11:14:16.913248 1501462 round_trippers.go:580]     Audit-Id: 4ad90472-af39-411f-a991-8867ccb628b7
	I0916 11:14:16.913251 1501462 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:14:16.913254 1501462 round_trippers.go:580]     Content-Type: application/json
	I0916 11:14:16.913257 1501462 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:14:16.913261 1501462 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:14:16.913263 1501462 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:14:16 GMT
	I0916 11:14:16.913438 1501462 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-654612-m02","uid":"aa1e2e1b-4957-47bd-bcf9-786ad66f873a","resourceVersion":"837","creationTimestamp":"2024-09-16T11:11:13Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-654612-m02","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-654612","minikube.k8s.io/primary":"false","minikube.k8s.io/updated_at":"2024_09_16T11_11_14_0700","minikube.k8s.io/version":"v1.34.0"},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///var/run/crio/crio.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubeadm","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:11:13Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:
annotations":{"f:kubeadm.alpha.kubernetes.io/cri-socket":{}}}}},{"manag [truncated 6341 chars]
	I0916 11:14:17.407035 1501462 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/namespaces/kube-system/pods/kube-proxy-gf2tw
	I0916 11:14:17.407060 1501462 round_trippers.go:469] Request Headers:
	I0916 11:14:17.407070 1501462 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:14:17.407076 1501462 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:14:17.410051 1501462 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:14:17.410073 1501462 round_trippers.go:577] Response Headers:
	I0916 11:14:17.410082 1501462 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:14:17 GMT
	I0916 11:14:17.410086 1501462 round_trippers.go:580]     Audit-Id: 1e461303-36d1-4e9b-869b-595f4e3b7b61
	I0916 11:14:17.410089 1501462 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:14:17.410092 1501462 round_trippers.go:580]     Content-Type: application/json
	I0916 11:14:17.410095 1501462 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:14:17.410098 1501462 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:14:17.411225 1501462 request.go:1351] Response Body: {"kind":"Pod","apiVersion":"v1","metadata":{"name":"kube-proxy-gf2tw","generateName":"kube-proxy-","namespace":"kube-system","uid":"814e8a89-b190-4aef-a303-44981c9e19c9","resourceVersion":"854","creationTimestamp":"2024-09-16T11:11:13Z","labels":{"controller-revision-hash":"648b489c5b","k8s-app":"kube-proxy","pod-template-generation":"1"},"ownerReferences":[{"apiVersion":"apps/v1","kind":"DaemonSet","name":"kube-proxy","uid":"8b5954ba-7bb1-4f7b-8014-fdfa99b2ac77","controller":true,"blockOwnerDeletion":true}],"managedFields":[{"manager":"kube-controller-manager","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:11:13Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:generateName":{},"f:labels":{".":{},"f:controller-revision-hash":{},"f:k8s-app":{},"f:pod-template-generation":{}},"f:ownerReferences":{".":{},"k:{\"uid\":\"8b5954ba-7bb1-4f7b-8014-fdfa99b2ac77\"}":{}}},"f:spec":{"f:affinity":{".":{},"f:nodeAffinity":{".":{},"f:r
equiredDuringSchedulingIgnoredDuringExecution":{}}},"f:containers":{"k: [truncated 6743 chars]
	I0916 11:14:17.411828 1501462 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/nodes/multinode-654612-m02
	I0916 11:14:17.411846 1501462 round_trippers.go:469] Request Headers:
	I0916 11:14:17.411856 1501462 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:14:17.411862 1501462 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:14:17.414138 1501462 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:14:17.414157 1501462 round_trippers.go:577] Response Headers:
	I0916 11:14:17.414165 1501462 round_trippers.go:580]     Audit-Id: 82b2cfb0-e8c0-4dec-92a0-dcc174497887
	I0916 11:14:17.414169 1501462 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:14:17.414173 1501462 round_trippers.go:580]     Content-Type: application/json
	I0916 11:14:17.414176 1501462 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:14:17.414179 1501462 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:14:17.414181 1501462 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:14:17 GMT
	I0916 11:14:17.414821 1501462 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-654612-m02","uid":"aa1e2e1b-4957-47bd-bcf9-786ad66f873a","resourceVersion":"837","creationTimestamp":"2024-09-16T11:11:13Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-654612-m02","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-654612","minikube.k8s.io/primary":"false","minikube.k8s.io/updated_at":"2024_09_16T11_11_14_0700","minikube.k8s.io/version":"v1.34.0"},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///var/run/crio/crio.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubeadm","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:11:13Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:
annotations":{"f:kubeadm.alpha.kubernetes.io/cri-socket":{}}}}},{"manag [truncated 6341 chars]
	I0916 11:14:17.415232 1501462 pod_ready.go:103] pod "kube-proxy-gf2tw" in "kube-system" namespace has status "Ready":"False"
	I0916 11:14:17.907438 1501462 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/namespaces/kube-system/pods/kube-proxy-gf2tw
	I0916 11:14:17.907466 1501462 round_trippers.go:469] Request Headers:
	I0916 11:14:17.907477 1501462 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:14:17.907483 1501462 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:14:17.909953 1501462 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:14:17.909974 1501462 round_trippers.go:577] Response Headers:
	I0916 11:14:17.909982 1501462 round_trippers.go:580]     Audit-Id: ac15ab70-d58e-4e5c-a116-7e8d992390a1
	I0916 11:14:17.909991 1501462 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:14:17.909994 1501462 round_trippers.go:580]     Content-Type: application/json
	I0916 11:14:17.909998 1501462 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:14:17.910003 1501462 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:14:17.910006 1501462 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:14:17 GMT
	I0916 11:14:17.910194 1501462 request.go:1351] Response Body: {"kind":"Pod","apiVersion":"v1","metadata":{"name":"kube-proxy-gf2tw","generateName":"kube-proxy-","namespace":"kube-system","uid":"814e8a89-b190-4aef-a303-44981c9e19c9","resourceVersion":"854","creationTimestamp":"2024-09-16T11:11:13Z","labels":{"controller-revision-hash":"648b489c5b","k8s-app":"kube-proxy","pod-template-generation":"1"},"ownerReferences":[{"apiVersion":"apps/v1","kind":"DaemonSet","name":"kube-proxy","uid":"8b5954ba-7bb1-4f7b-8014-fdfa99b2ac77","controller":true,"blockOwnerDeletion":true}],"managedFields":[{"manager":"kube-controller-manager","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:11:13Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:generateName":{},"f:labels":{".":{},"f:controller-revision-hash":{},"f:k8s-app":{},"f:pod-template-generation":{}},"f:ownerReferences":{".":{},"k:{\"uid\":\"8b5954ba-7bb1-4f7b-8014-fdfa99b2ac77\"}":{}}},"f:spec":{"f:affinity":{".":{},"f:nodeAffinity":{".":{},"f:r
equiredDuringSchedulingIgnoredDuringExecution":{}}},"f:containers":{"k: [truncated 6743 chars]
	I0916 11:14:17.910743 1501462 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/nodes/multinode-654612-m02
	I0916 11:14:17.910762 1501462 round_trippers.go:469] Request Headers:
	I0916 11:14:17.910771 1501462 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:14:17.910775 1501462 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:14:17.913144 1501462 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:14:17.913167 1501462 round_trippers.go:577] Response Headers:
	I0916 11:14:17.913175 1501462 round_trippers.go:580]     Audit-Id: 9c819a14-8a1b-4727-b9d7-95562dceab6d
	I0916 11:14:17.913179 1501462 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:14:17.913183 1501462 round_trippers.go:580]     Content-Type: application/json
	I0916 11:14:17.913187 1501462 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:14:17.913190 1501462 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:14:17.913192 1501462 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:14:17 GMT
	I0916 11:14:17.913461 1501462 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-654612-m02","uid":"aa1e2e1b-4957-47bd-bcf9-786ad66f873a","resourceVersion":"837","creationTimestamp":"2024-09-16T11:11:13Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-654612-m02","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-654612","minikube.k8s.io/primary":"false","minikube.k8s.io/updated_at":"2024_09_16T11_11_14_0700","minikube.k8s.io/version":"v1.34.0"},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///var/run/crio/crio.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubeadm","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:11:13Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:
annotations":{"f:kubeadm.alpha.kubernetes.io/cri-socket":{}}}}},{"manag [truncated 6341 chars]
	I0916 11:14:18.407516 1501462 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/namespaces/kube-system/pods/kube-proxy-gf2tw
	I0916 11:14:18.407548 1501462 round_trippers.go:469] Request Headers:
	I0916 11:14:18.407560 1501462 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:14:18.407563 1501462 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:14:18.410521 1501462 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:14:18.410586 1501462 round_trippers.go:577] Response Headers:
	I0916 11:14:18.410609 1501462 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:14:18 GMT
	I0916 11:14:18.410628 1501462 round_trippers.go:580]     Audit-Id: f76a7c2e-00ce-4f43-a383-478ed93ae406
	I0916 11:14:18.410646 1501462 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:14:18.410678 1501462 round_trippers.go:580]     Content-Type: application/json
	I0916 11:14:18.410695 1501462 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:14:18.410714 1501462 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:14:18.410872 1501462 request.go:1351] Response Body: {"kind":"Pod","apiVersion":"v1","metadata":{"name":"kube-proxy-gf2tw","generateName":"kube-proxy-","namespace":"kube-system","uid":"814e8a89-b190-4aef-a303-44981c9e19c9","resourceVersion":"854","creationTimestamp":"2024-09-16T11:11:13Z","labels":{"controller-revision-hash":"648b489c5b","k8s-app":"kube-proxy","pod-template-generation":"1"},"ownerReferences":[{"apiVersion":"apps/v1","kind":"DaemonSet","name":"kube-proxy","uid":"8b5954ba-7bb1-4f7b-8014-fdfa99b2ac77","controller":true,"blockOwnerDeletion":true}],"managedFields":[{"manager":"kube-controller-manager","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:11:13Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:generateName":{},"f:labels":{".":{},"f:controller-revision-hash":{},"f:k8s-app":{},"f:pod-template-generation":{}},"f:ownerReferences":{".":{},"k:{\"uid\":\"8b5954ba-7bb1-4f7b-8014-fdfa99b2ac77\"}":{}}},"f:spec":{"f:affinity":{".":{},"f:nodeAffinity":{".":{},"f:r
equiredDuringSchedulingIgnoredDuringExecution":{}}},"f:containers":{"k: [truncated 6743 chars]
	I0916 11:14:18.411443 1501462 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/nodes/multinode-654612-m02
	I0916 11:14:18.411459 1501462 round_trippers.go:469] Request Headers:
	I0916 11:14:18.411468 1501462 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:14:18.411473 1501462 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:14:18.413739 1501462 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:14:18.413764 1501462 round_trippers.go:577] Response Headers:
	I0916 11:14:18.413773 1501462 round_trippers.go:580]     Audit-Id: 3cc9a5ad-ca62-4375-8d61-8ad9da1f7e1e
	I0916 11:14:18.413778 1501462 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:14:18.413781 1501462 round_trippers.go:580]     Content-Type: application/json
	I0916 11:14:18.413785 1501462 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:14:18.413788 1501462 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:14:18.413793 1501462 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:14:18 GMT
	I0916 11:14:18.414017 1501462 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-654612-m02","uid":"aa1e2e1b-4957-47bd-bcf9-786ad66f873a","resourceVersion":"837","creationTimestamp":"2024-09-16T11:11:13Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-654612-m02","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-654612","minikube.k8s.io/primary":"false","minikube.k8s.io/updated_at":"2024_09_16T11_11_14_0700","minikube.k8s.io/version":"v1.34.0"},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///var/run/crio/crio.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubeadm","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:11:13Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:
annotations":{"f:kubeadm.alpha.kubernetes.io/cri-socket":{}}}}},{"manag [truncated 6341 chars]
	I0916 11:14:18.907116 1501462 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/namespaces/kube-system/pods/kube-proxy-gf2tw
	I0916 11:14:18.907146 1501462 round_trippers.go:469] Request Headers:
	I0916 11:14:18.907156 1501462 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:14:18.907164 1501462 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:14:18.911486 1501462 round_trippers.go:574] Response Status: 200 OK in 4 milliseconds
	I0916 11:14:18.911513 1501462 round_trippers.go:577] Response Headers:
	I0916 11:14:18.911522 1501462 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:14:18 GMT
	I0916 11:14:18.911527 1501462 round_trippers.go:580]     Audit-Id: e0d93a54-7b02-4f7d-bdf3-990dc2b1a6f1
	I0916 11:14:18.911531 1501462 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:14:18.911533 1501462 round_trippers.go:580]     Content-Type: application/json
	I0916 11:14:18.911536 1501462 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:14:18.911539 1501462 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:14:18.911831 1501462 request.go:1351] Response Body: {"kind":"Pod","apiVersion":"v1","metadata":{"name":"kube-proxy-gf2tw","generateName":"kube-proxy-","namespace":"kube-system","uid":"814e8a89-b190-4aef-a303-44981c9e19c9","resourceVersion":"854","creationTimestamp":"2024-09-16T11:11:13Z","labels":{"controller-revision-hash":"648b489c5b","k8s-app":"kube-proxy","pod-template-generation":"1"},"ownerReferences":[{"apiVersion":"apps/v1","kind":"DaemonSet","name":"kube-proxy","uid":"8b5954ba-7bb1-4f7b-8014-fdfa99b2ac77","controller":true,"blockOwnerDeletion":true}],"managedFields":[{"manager":"kube-controller-manager","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:11:13Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:generateName":{},"f:labels":{".":{},"f:controller-revision-hash":{},"f:k8s-app":{},"f:pod-template-generation":{}},"f:ownerReferences":{".":{},"k:{\"uid\":\"8b5954ba-7bb1-4f7b-8014-fdfa99b2ac77\"}":{}}},"f:spec":{"f:affinity":{".":{},"f:nodeAffinity":{".":{},"f:r
equiredDuringSchedulingIgnoredDuringExecution":{}}},"f:containers":{"k: [truncated 6743 chars]
	I0916 11:14:18.912383 1501462 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/nodes/multinode-654612-m02
	I0916 11:14:18.912402 1501462 round_trippers.go:469] Request Headers:
	I0916 11:14:18.912411 1501462 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:14:18.912415 1501462 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:14:18.914613 1501462 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:14:18.914637 1501462 round_trippers.go:577] Response Headers:
	I0916 11:14:18.914647 1501462 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:14:18.914653 1501462 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:14:18 GMT
	I0916 11:14:18.914658 1501462 round_trippers.go:580]     Audit-Id: 7536ce15-4b1c-4fc7-bb0d-c2d959625442
	I0916 11:14:18.914661 1501462 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:14:18.914666 1501462 round_trippers.go:580]     Content-Type: application/json
	I0916 11:14:18.914669 1501462 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:14:18.914877 1501462 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-654612-m02","uid":"aa1e2e1b-4957-47bd-bcf9-786ad66f873a","resourceVersion":"837","creationTimestamp":"2024-09-16T11:11:13Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-654612-m02","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-654612","minikube.k8s.io/primary":"false","minikube.k8s.io/updated_at":"2024_09_16T11_11_14_0700","minikube.k8s.io/version":"v1.34.0"},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///var/run/crio/crio.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubeadm","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:11:13Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:
annotations":{"f:kubeadm.alpha.kubernetes.io/cri-socket":{}}}}},{"manag [truncated 6341 chars]
	I0916 11:14:19.408106 1501462 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/namespaces/kube-system/pods/kube-proxy-gf2tw
	I0916 11:14:19.408178 1501462 round_trippers.go:469] Request Headers:
	I0916 11:14:19.408203 1501462 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:14:19.408225 1501462 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:14:19.410791 1501462 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:14:19.410823 1501462 round_trippers.go:577] Response Headers:
	I0916 11:14:19.410831 1501462 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:14:19.410834 1501462 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:14:19.410837 1501462 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:14:19 GMT
	I0916 11:14:19.410840 1501462 round_trippers.go:580]     Audit-Id: 5f65d2f3-8591-4659-9b07-b67c8b8bfdd8
	I0916 11:14:19.410859 1501462 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:14:19.410870 1501462 round_trippers.go:580]     Content-Type: application/json
	I0916 11:14:19.411965 1501462 request.go:1351] Response Body: {"kind":"Pod","apiVersion":"v1","metadata":{"name":"kube-proxy-gf2tw","generateName":"kube-proxy-","namespace":"kube-system","uid":"814e8a89-b190-4aef-a303-44981c9e19c9","resourceVersion":"854","creationTimestamp":"2024-09-16T11:11:13Z","labels":{"controller-revision-hash":"648b489c5b","k8s-app":"kube-proxy","pod-template-generation":"1"},"ownerReferences":[{"apiVersion":"apps/v1","kind":"DaemonSet","name":"kube-proxy","uid":"8b5954ba-7bb1-4f7b-8014-fdfa99b2ac77","controller":true,"blockOwnerDeletion":true}],"managedFields":[{"manager":"kube-controller-manager","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:11:13Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:generateName":{},"f:labels":{".":{},"f:controller-revision-hash":{},"f:k8s-app":{},"f:pod-template-generation":{}},"f:ownerReferences":{".":{},"k:{\"uid\":\"8b5954ba-7bb1-4f7b-8014-fdfa99b2ac77\"}":{}}},"f:spec":{"f:affinity":{".":{},"f:nodeAffinity":{".":{},"f:r
equiredDuringSchedulingIgnoredDuringExecution":{}}},"f:containers":{"k: [truncated 6743 chars]
	I0916 11:14:19.412557 1501462 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/nodes/multinode-654612-m02
	I0916 11:14:19.412577 1501462 round_trippers.go:469] Request Headers:
	I0916 11:14:19.412586 1501462 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:14:19.412591 1501462 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:14:19.415183 1501462 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:14:19.415209 1501462 round_trippers.go:577] Response Headers:
	I0916 11:14:19.415217 1501462 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:14:19.415220 1501462 round_trippers.go:580]     Content-Type: application/json
	I0916 11:14:19.415223 1501462 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:14:19.415226 1501462 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:14:19.415229 1501462 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:14:19 GMT
	I0916 11:14:19.415232 1501462 round_trippers.go:580]     Audit-Id: 71cdf9ad-8a88-4075-b939-c61e212dbb47
	I0916 11:14:19.415375 1501462 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-654612-m02","uid":"aa1e2e1b-4957-47bd-bcf9-786ad66f873a","resourceVersion":"837","creationTimestamp":"2024-09-16T11:11:13Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-654612-m02","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-654612","minikube.k8s.io/primary":"false","minikube.k8s.io/updated_at":"2024_09_16T11_11_14_0700","minikube.k8s.io/version":"v1.34.0"},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///var/run/crio/crio.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubeadm","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:11:13Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:
annotations":{"f:kubeadm.alpha.kubernetes.io/cri-socket":{}}}}},{"manag [truncated 6341 chars]
	I0916 11:14:19.415785 1501462 pod_ready.go:103] pod "kube-proxy-gf2tw" in "kube-system" namespace has status "Ready":"False"
	I0916 11:14:19.907495 1501462 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/namespaces/kube-system/pods/kube-proxy-gf2tw
	I0916 11:14:19.907522 1501462 round_trippers.go:469] Request Headers:
	I0916 11:14:19.907532 1501462 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:14:19.907536 1501462 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:14:19.909997 1501462 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:14:19.910038 1501462 round_trippers.go:577] Response Headers:
	I0916 11:14:19.910053 1501462 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:14:19.910059 1501462 round_trippers.go:580]     Content-Type: application/json
	I0916 11:14:19.910065 1501462 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:14:19.910069 1501462 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:14:19.910074 1501462 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:14:19 GMT
	I0916 11:14:19.910084 1501462 round_trippers.go:580]     Audit-Id: ec51b60b-354a-45c4-851e-f42ef311db2f
	I0916 11:14:19.910270 1501462 request.go:1351] Response Body: {"kind":"Pod","apiVersion":"v1","metadata":{"name":"kube-proxy-gf2tw","generateName":"kube-proxy-","namespace":"kube-system","uid":"814e8a89-b190-4aef-a303-44981c9e19c9","resourceVersion":"854","creationTimestamp":"2024-09-16T11:11:13Z","labels":{"controller-revision-hash":"648b489c5b","k8s-app":"kube-proxy","pod-template-generation":"1"},"ownerReferences":[{"apiVersion":"apps/v1","kind":"DaemonSet","name":"kube-proxy","uid":"8b5954ba-7bb1-4f7b-8014-fdfa99b2ac77","controller":true,"blockOwnerDeletion":true}],"managedFields":[{"manager":"kube-controller-manager","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:11:13Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:generateName":{},"f:labels":{".":{},"f:controller-revision-hash":{},"f:k8s-app":{},"f:pod-template-generation":{}},"f:ownerReferences":{".":{},"k:{\"uid\":\"8b5954ba-7bb1-4f7b-8014-fdfa99b2ac77\"}":{}}},"f:spec":{"f:affinity":{".":{},"f:nodeAffinity":{".":{},"f:r
equiredDuringSchedulingIgnoredDuringExecution":{}}},"f:containers":{"k: [truncated 6743 chars]
	I0916 11:14:19.910819 1501462 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/nodes/multinode-654612-m02
	I0916 11:14:19.910838 1501462 round_trippers.go:469] Request Headers:
	I0916 11:14:19.910847 1501462 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:14:19.910851 1501462 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:14:19.912920 1501462 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:14:19.912945 1501462 round_trippers.go:577] Response Headers:
	I0916 11:14:19.912955 1501462 round_trippers.go:580]     Audit-Id: 37ff7f00-93c4-4c18-af05-bb1f127833d4
	I0916 11:14:19.912960 1501462 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:14:19.912986 1501462 round_trippers.go:580]     Content-Type: application/json
	I0916 11:14:19.913015 1501462 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:14:19.913025 1501462 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:14:19.913029 1501462 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:14:19 GMT
	I0916 11:14:19.913442 1501462 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-654612-m02","uid":"aa1e2e1b-4957-47bd-bcf9-786ad66f873a","resourceVersion":"837","creationTimestamp":"2024-09-16T11:11:13Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-654612-m02","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-654612","minikube.k8s.io/primary":"false","minikube.k8s.io/updated_at":"2024_09_16T11_11_14_0700","minikube.k8s.io/version":"v1.34.0"},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///var/run/crio/crio.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubeadm","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:11:13Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:
annotations":{"f:kubeadm.alpha.kubernetes.io/cri-socket":{}}}}},{"manag [truncated 6341 chars]
	I0916 11:14:20.407644 1501462 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/namespaces/kube-system/pods/kube-proxy-gf2tw
	I0916 11:14:20.407720 1501462 round_trippers.go:469] Request Headers:
	I0916 11:14:20.407746 1501462 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:14:20.407765 1501462 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:14:20.410714 1501462 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:14:20.410748 1501462 round_trippers.go:577] Response Headers:
	I0916 11:14:20.410757 1501462 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:14:20.410761 1501462 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:14:20.410764 1501462 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:14:20 GMT
	I0916 11:14:20.410782 1501462 round_trippers.go:580]     Audit-Id: 8e61a5f3-6142-4b9e-8cf6-35d2c6790975
	I0916 11:14:20.410793 1501462 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:14:20.410797 1501462 round_trippers.go:580]     Content-Type: application/json
	I0916 11:14:20.410990 1501462 request.go:1351] Response Body: {"kind":"Pod","apiVersion":"v1","metadata":{"name":"kube-proxy-gf2tw","generateName":"kube-proxy-","namespace":"kube-system","uid":"814e8a89-b190-4aef-a303-44981c9e19c9","resourceVersion":"854","creationTimestamp":"2024-09-16T11:11:13Z","labels":{"controller-revision-hash":"648b489c5b","k8s-app":"kube-proxy","pod-template-generation":"1"},"ownerReferences":[{"apiVersion":"apps/v1","kind":"DaemonSet","name":"kube-proxy","uid":"8b5954ba-7bb1-4f7b-8014-fdfa99b2ac77","controller":true,"blockOwnerDeletion":true}],"managedFields":[{"manager":"kube-controller-manager","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:11:13Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:generateName":{},"f:labels":{".":{},"f:controller-revision-hash":{},"f:k8s-app":{},"f:pod-template-generation":{}},"f:ownerReferences":{".":{},"k:{\"uid\":\"8b5954ba-7bb1-4f7b-8014-fdfa99b2ac77\"}":{}}},"f:spec":{"f:affinity":{".":{},"f:nodeAffinity":{".":{},"f:r
equiredDuringSchedulingIgnoredDuringExecution":{}}},"f:containers":{"k: [truncated 6743 chars]
	I0916 11:14:20.411565 1501462 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/nodes/multinode-654612-m02
	I0916 11:14:20.411584 1501462 round_trippers.go:469] Request Headers:
	I0916 11:14:20.411593 1501462 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:14:20.411598 1501462 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:14:20.413586 1501462 round_trippers.go:574] Response Status: 200 OK in 1 milliseconds
	I0916 11:14:20.413612 1501462 round_trippers.go:577] Response Headers:
	I0916 11:14:20.413620 1501462 round_trippers.go:580]     Audit-Id: 5c41222c-b6d5-4383-a1b4-c1ed8c5401ea
	I0916 11:14:20.413625 1501462 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:14:20.413629 1501462 round_trippers.go:580]     Content-Type: application/json
	I0916 11:14:20.413632 1501462 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:14:20.413637 1501462 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:14:20.413641 1501462 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:14:20 GMT
	I0916 11:14:20.413816 1501462 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-654612-m02","uid":"aa1e2e1b-4957-47bd-bcf9-786ad66f873a","resourceVersion":"837","creationTimestamp":"2024-09-16T11:11:13Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-654612-m02","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-654612","minikube.k8s.io/primary":"false","minikube.k8s.io/updated_at":"2024_09_16T11_11_14_0700","minikube.k8s.io/version":"v1.34.0"},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///var/run/crio/crio.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubeadm","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:11:13Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:
annotations":{"f:kubeadm.alpha.kubernetes.io/cri-socket":{}}}}},{"manag [truncated 6341 chars]
	I0916 11:14:20.907994 1501462 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/namespaces/kube-system/pods/kube-proxy-gf2tw
	I0916 11:14:20.908021 1501462 round_trippers.go:469] Request Headers:
	I0916 11:14:20.908031 1501462 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:14:20.908035 1501462 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:14:20.910724 1501462 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:14:20.910761 1501462 round_trippers.go:577] Response Headers:
	I0916 11:14:20.910771 1501462 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:14:20.910777 1501462 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:14:20.910781 1501462 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:14:20 GMT
	I0916 11:14:20.910784 1501462 round_trippers.go:580]     Audit-Id: c44b6f7d-31b4-410d-b3dd-d05391933abd
	I0916 11:14:20.910787 1501462 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:14:20.910801 1501462 round_trippers.go:580]     Content-Type: application/json
	I0916 11:14:20.911149 1501462 request.go:1351] Response Body: {"kind":"Pod","apiVersion":"v1","metadata":{"name":"kube-proxy-gf2tw","generateName":"kube-proxy-","namespace":"kube-system","uid":"814e8a89-b190-4aef-a303-44981c9e19c9","resourceVersion":"854","creationTimestamp":"2024-09-16T11:11:13Z","labels":{"controller-revision-hash":"648b489c5b","k8s-app":"kube-proxy","pod-template-generation":"1"},"ownerReferences":[{"apiVersion":"apps/v1","kind":"DaemonSet","name":"kube-proxy","uid":"8b5954ba-7bb1-4f7b-8014-fdfa99b2ac77","controller":true,"blockOwnerDeletion":true}],"managedFields":[{"manager":"kube-controller-manager","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:11:13Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:generateName":{},"f:labels":{".":{},"f:controller-revision-hash":{},"f:k8s-app":{},"f:pod-template-generation":{}},"f:ownerReferences":{".":{},"k:{\"uid\":\"8b5954ba-7bb1-4f7b-8014-fdfa99b2ac77\"}":{}}},"f:spec":{"f:affinity":{".":{},"f:nodeAffinity":{".":{},"f:r
equiredDuringSchedulingIgnoredDuringExecution":{}}},"f:containers":{"k: [truncated 6743 chars]
	I0916 11:14:20.911737 1501462 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/nodes/multinode-654612-m02
	I0916 11:14:20.911754 1501462 round_trippers.go:469] Request Headers:
	I0916 11:14:20.911763 1501462 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:14:20.911768 1501462 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:14:20.914019 1501462 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:14:20.914043 1501462 round_trippers.go:577] Response Headers:
	I0916 11:14:20.914052 1501462 round_trippers.go:580]     Audit-Id: 35cb3ecc-2ebc-4255-9f80-ce5b08152380
	I0916 11:14:20.914056 1501462 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:14:20.914059 1501462 round_trippers.go:580]     Content-Type: application/json
	I0916 11:14:20.914062 1501462 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:14:20.914064 1501462 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:14:20.914067 1501462 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:14:20 GMT
	I0916 11:14:20.914345 1501462 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-654612-m02","uid":"aa1e2e1b-4957-47bd-bcf9-786ad66f873a","resourceVersion":"837","creationTimestamp":"2024-09-16T11:11:13Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-654612-m02","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-654612","minikube.k8s.io/primary":"false","minikube.k8s.io/updated_at":"2024_09_16T11_11_14_0700","minikube.k8s.io/version":"v1.34.0"},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///var/run/crio/crio.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubeadm","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:11:13Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:
annotations":{"f:kubeadm.alpha.kubernetes.io/cri-socket":{}}}}},{"manag [truncated 6341 chars]
	I0916 11:14:21.407741 1501462 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/namespaces/kube-system/pods/kube-proxy-gf2tw
	I0916 11:14:21.407761 1501462 round_trippers.go:469] Request Headers:
	I0916 11:14:21.407771 1501462 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:14:21.407775 1501462 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:14:21.410211 1501462 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:14:21.410318 1501462 round_trippers.go:577] Response Headers:
	I0916 11:14:21.410341 1501462 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:14:21.410362 1501462 round_trippers.go:580]     Content-Type: application/json
	I0916 11:14:21.410393 1501462 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:14:21.410414 1501462 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:14:21.410433 1501462 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:14:21 GMT
	I0916 11:14:21.410450 1501462 round_trippers.go:580]     Audit-Id: dbc23ec6-fc33-44fa-ba45-a7f22f22bdee
	I0916 11:14:21.410593 1501462 request.go:1351] Response Body: {"kind":"Pod","apiVersion":"v1","metadata":{"name":"kube-proxy-gf2tw","generateName":"kube-proxy-","namespace":"kube-system","uid":"814e8a89-b190-4aef-a303-44981c9e19c9","resourceVersion":"854","creationTimestamp":"2024-09-16T11:11:13Z","labels":{"controller-revision-hash":"648b489c5b","k8s-app":"kube-proxy","pod-template-generation":"1"},"ownerReferences":[{"apiVersion":"apps/v1","kind":"DaemonSet","name":"kube-proxy","uid":"8b5954ba-7bb1-4f7b-8014-fdfa99b2ac77","controller":true,"blockOwnerDeletion":true}],"managedFields":[{"manager":"kube-controller-manager","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:11:13Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:generateName":{},"f:labels":{".":{},"f:controller-revision-hash":{},"f:k8s-app":{},"f:pod-template-generation":{}},"f:ownerReferences":{".":{},"k:{\"uid\":\"8b5954ba-7bb1-4f7b-8014-fdfa99b2ac77\"}":{}}},"f:spec":{"f:affinity":{".":{},"f:nodeAffinity":{".":{},"f:r
equiredDuringSchedulingIgnoredDuringExecution":{}}},"f:containers":{"k: [truncated 6743 chars]
	I0916 11:14:21.411217 1501462 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/nodes/multinode-654612-m02
	I0916 11:14:21.411234 1501462 round_trippers.go:469] Request Headers:
	I0916 11:14:21.411243 1501462 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:14:21.411247 1501462 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:14:21.413319 1501462 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:14:21.413342 1501462 round_trippers.go:577] Response Headers:
	I0916 11:14:21.413350 1501462 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:14:21.413354 1501462 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:14:21 GMT
	I0916 11:14:21.413357 1501462 round_trippers.go:580]     Audit-Id: 85fa524d-5bdb-4e8f-aca1-11f5add5d211
	I0916 11:14:21.413360 1501462 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:14:21.413363 1501462 round_trippers.go:580]     Content-Type: application/json
	I0916 11:14:21.413366 1501462 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:14:21.413703 1501462 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-654612-m02","uid":"aa1e2e1b-4957-47bd-bcf9-786ad66f873a","resourceVersion":"837","creationTimestamp":"2024-09-16T11:11:13Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-654612-m02","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-654612","minikube.k8s.io/primary":"false","minikube.k8s.io/updated_at":"2024_09_16T11_11_14_0700","minikube.k8s.io/version":"v1.34.0"},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///var/run/crio/crio.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubeadm","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:11:13Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:
annotations":{"f:kubeadm.alpha.kubernetes.io/cri-socket":{}}}}},{"manag [truncated 6341 chars]
	I0916 11:14:21.907875 1501462 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/namespaces/kube-system/pods/kube-proxy-gf2tw
	I0916 11:14:21.907901 1501462 round_trippers.go:469] Request Headers:
	I0916 11:14:21.907912 1501462 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:14:21.907917 1501462 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:14:21.910226 1501462 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:14:21.910250 1501462 round_trippers.go:577] Response Headers:
	I0916 11:14:21.910259 1501462 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:14:21.910263 1501462 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:14:21.910266 1501462 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:14:21 GMT
	I0916 11:14:21.910269 1501462 round_trippers.go:580]     Audit-Id: da789a06-2517-49e2-aa62-43752ff06171
	I0916 11:14:21.910272 1501462 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:14:21.910274 1501462 round_trippers.go:580]     Content-Type: application/json
	I0916 11:14:21.910621 1501462 request.go:1351] Response Body: {"kind":"Pod","apiVersion":"v1","metadata":{"name":"kube-proxy-gf2tw","generateName":"kube-proxy-","namespace":"kube-system","uid":"814e8a89-b190-4aef-a303-44981c9e19c9","resourceVersion":"854","creationTimestamp":"2024-09-16T11:11:13Z","labels":{"controller-revision-hash":"648b489c5b","k8s-app":"kube-proxy","pod-template-generation":"1"},"ownerReferences":[{"apiVersion":"apps/v1","kind":"DaemonSet","name":"kube-proxy","uid":"8b5954ba-7bb1-4f7b-8014-fdfa99b2ac77","controller":true,"blockOwnerDeletion":true}],"managedFields":[{"manager":"kube-controller-manager","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:11:13Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:generateName":{},"f:labels":{".":{},"f:controller-revision-hash":{},"f:k8s-app":{},"f:pod-template-generation":{}},"f:ownerReferences":{".":{},"k:{\"uid\":\"8b5954ba-7bb1-4f7b-8014-fdfa99b2ac77\"}":{}}},"f:spec":{"f:affinity":{".":{},"f:nodeAffinity":{".":{},"f:r
equiredDuringSchedulingIgnoredDuringExecution":{}}},"f:containers":{"k: [truncated 6743 chars]
	I0916 11:14:21.911168 1501462 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/nodes/multinode-654612-m02
	I0916 11:14:21.911188 1501462 round_trippers.go:469] Request Headers:
	I0916 11:14:21.911197 1501462 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:14:21.911201 1501462 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:14:21.913255 1501462 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:14:21.913278 1501462 round_trippers.go:577] Response Headers:
	I0916 11:14:21.913287 1501462 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:14:21 GMT
	I0916 11:14:21.913291 1501462 round_trippers.go:580]     Audit-Id: 2bf8fbc6-2629-45ca-b9b5-f81ff4b6feb0
	I0916 11:14:21.913294 1501462 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:14:21.913297 1501462 round_trippers.go:580]     Content-Type: application/json
	I0916 11:14:21.913300 1501462 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:14:21.913303 1501462 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:14:21.913551 1501462 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-654612-m02","uid":"aa1e2e1b-4957-47bd-bcf9-786ad66f873a","resourceVersion":"837","creationTimestamp":"2024-09-16T11:11:13Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-654612-m02","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-654612","minikube.k8s.io/primary":"false","minikube.k8s.io/updated_at":"2024_09_16T11_11_14_0700","minikube.k8s.io/version":"v1.34.0"},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///var/run/crio/crio.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubeadm","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:11:13Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:
annotations":{"f:kubeadm.alpha.kubernetes.io/cri-socket":{}}}}},{"manag [truncated 6341 chars]
	I0916 11:14:21.913998 1501462 pod_ready.go:103] pod "kube-proxy-gf2tw" in "kube-system" namespace has status "Ready":"False"
	I0916 11:14:22.407204 1501462 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/namespaces/kube-system/pods/kube-proxy-gf2tw
	I0916 11:14:22.407232 1501462 round_trippers.go:469] Request Headers:
	I0916 11:14:22.407242 1501462 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:14:22.407249 1501462 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:14:22.409707 1501462 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:14:22.409782 1501462 round_trippers.go:577] Response Headers:
	I0916 11:14:22.409804 1501462 round_trippers.go:580]     Audit-Id: 72ea0f2d-e8da-434c-817b-deb80217c095
	I0916 11:14:22.409824 1501462 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:14:22.409859 1501462 round_trippers.go:580]     Content-Type: application/json
	I0916 11:14:22.409882 1501462 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:14:22.409899 1501462 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:14:22.409917 1501462 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:14:22 GMT
	I0916 11:14:22.410319 1501462 request.go:1351] Response Body: {"kind":"Pod","apiVersion":"v1","metadata":{"name":"kube-proxy-gf2tw","generateName":"kube-proxy-","namespace":"kube-system","uid":"814e8a89-b190-4aef-a303-44981c9e19c9","resourceVersion":"854","creationTimestamp":"2024-09-16T11:11:13Z","labels":{"controller-revision-hash":"648b489c5b","k8s-app":"kube-proxy","pod-template-generation":"1"},"ownerReferences":[{"apiVersion":"apps/v1","kind":"DaemonSet","name":"kube-proxy","uid":"8b5954ba-7bb1-4f7b-8014-fdfa99b2ac77","controller":true,"blockOwnerDeletion":true}],"managedFields":[{"manager":"kube-controller-manager","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:11:13Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:generateName":{},"f:labels":{".":{},"f:controller-revision-hash":{},"f:k8s-app":{},"f:pod-template-generation":{}},"f:ownerReferences":{".":{},"k:{\"uid\":\"8b5954ba-7bb1-4f7b-8014-fdfa99b2ac77\"}":{}}},"f:spec":{"f:affinity":{".":{},"f:nodeAffinity":{".":{},"f:r
equiredDuringSchedulingIgnoredDuringExecution":{}}},"f:containers":{"k: [truncated 6743 chars]
	I0916 11:14:22.410943 1501462 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/nodes/multinode-654612-m02
	I0916 11:14:22.410961 1501462 round_trippers.go:469] Request Headers:
	I0916 11:14:22.410970 1501462 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:14:22.410975 1501462 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:14:22.413619 1501462 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:14:22.413645 1501462 round_trippers.go:577] Response Headers:
	I0916 11:14:22.413654 1501462 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:14:22.413658 1501462 round_trippers.go:580]     Content-Type: application/json
	I0916 11:14:22.413662 1501462 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:14:22.413666 1501462 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:14:22.413669 1501462 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:14:22 GMT
	I0916 11:14:22.413672 1501462 round_trippers.go:580]     Audit-Id: 9701b3ac-4325-4a8e-afda-4c5842aab804
	I0916 11:14:22.413818 1501462 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-654612-m02","uid":"aa1e2e1b-4957-47bd-bcf9-786ad66f873a","resourceVersion":"837","creationTimestamp":"2024-09-16T11:11:13Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-654612-m02","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-654612","minikube.k8s.io/primary":"false","minikube.k8s.io/updated_at":"2024_09_16T11_11_14_0700","minikube.k8s.io/version":"v1.34.0"},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///var/run/crio/crio.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubeadm","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:11:13Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:
annotations":{"f:kubeadm.alpha.kubernetes.io/cri-socket":{}}}}},{"manag [truncated 6341 chars]
	I0916 11:14:22.907612 1501462 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/namespaces/kube-system/pods/kube-proxy-gf2tw
	I0916 11:14:22.907688 1501462 round_trippers.go:469] Request Headers:
	I0916 11:14:22.907711 1501462 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:14:22.907732 1501462 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:14:22.911360 1501462 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 11:14:22.911438 1501462 round_trippers.go:577] Response Headers:
	I0916 11:14:22.911460 1501462 round_trippers.go:580]     Audit-Id: 9b89d642-298b-47d6-9363-7d45789694bc
	I0916 11:14:22.911479 1501462 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:14:22.911514 1501462 round_trippers.go:580]     Content-Type: application/json
	I0916 11:14:22.911537 1501462 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:14:22.911556 1501462 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:14:22.911576 1501462 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:14:22 GMT
	I0916 11:14:22.911870 1501462 request.go:1351] Response Body: {"kind":"Pod","apiVersion":"v1","metadata":{"name":"kube-proxy-gf2tw","generateName":"kube-proxy-","namespace":"kube-system","uid":"814e8a89-b190-4aef-a303-44981c9e19c9","resourceVersion":"854","creationTimestamp":"2024-09-16T11:11:13Z","labels":{"controller-revision-hash":"648b489c5b","k8s-app":"kube-proxy","pod-template-generation":"1"},"ownerReferences":[{"apiVersion":"apps/v1","kind":"DaemonSet","name":"kube-proxy","uid":"8b5954ba-7bb1-4f7b-8014-fdfa99b2ac77","controller":true,"blockOwnerDeletion":true}],"managedFields":[{"manager":"kube-controller-manager","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:11:13Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:generateName":{},"f:labels":{".":{},"f:controller-revision-hash":{},"f:k8s-app":{},"f:pod-template-generation":{}},"f:ownerReferences":{".":{},"k:{\"uid\":\"8b5954ba-7bb1-4f7b-8014-fdfa99b2ac77\"}":{}}},"f:spec":{"f:affinity":{".":{},"f:nodeAffinity":{".":{},"f:r
equiredDuringSchedulingIgnoredDuringExecution":{}}},"f:containers":{"k: [truncated 6743 chars]
	I0916 11:14:22.912482 1501462 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/nodes/multinode-654612-m02
	I0916 11:14:22.912501 1501462 round_trippers.go:469] Request Headers:
	I0916 11:14:22.912510 1501462 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:14:22.912514 1501462 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:14:22.914545 1501462 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:14:22.914569 1501462 round_trippers.go:577] Response Headers:
	I0916 11:14:22.914578 1501462 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:14:22 GMT
	I0916 11:14:22.914581 1501462 round_trippers.go:580]     Audit-Id: 4b36d913-ce5f-4752-8b83-7311d9a22e67
	I0916 11:14:22.914584 1501462 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:14:22.914589 1501462 round_trippers.go:580]     Content-Type: application/json
	I0916 11:14:22.914592 1501462 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:14:22.914595 1501462 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:14:22.914768 1501462 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-654612-m02","uid":"aa1e2e1b-4957-47bd-bcf9-786ad66f873a","resourceVersion":"837","creationTimestamp":"2024-09-16T11:11:13Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-654612-m02","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-654612","minikube.k8s.io/primary":"false","minikube.k8s.io/updated_at":"2024_09_16T11_11_14_0700","minikube.k8s.io/version":"v1.34.0"},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///var/run/crio/crio.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubeadm","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:11:13Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:
annotations":{"f:kubeadm.alpha.kubernetes.io/cri-socket":{}}}}},{"manag [truncated 6341 chars]
	I0916 11:14:23.407020 1501462 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/namespaces/kube-system/pods/kube-proxy-gf2tw
	I0916 11:14:23.407049 1501462 round_trippers.go:469] Request Headers:
	I0916 11:14:23.407059 1501462 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:14:23.407062 1501462 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:14:23.409463 1501462 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:14:23.409531 1501462 round_trippers.go:577] Response Headers:
	I0916 11:14:23.409554 1501462 round_trippers.go:580]     Content-Type: application/json
	I0916 11:14:23.409575 1501462 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:14:23.409607 1501462 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:14:23.409629 1501462 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:14:23 GMT
	I0916 11:14:23.409647 1501462 round_trippers.go:580]     Audit-Id: d3de4fc8-b46f-47f2-9e95-5a984157c1f1
	I0916 11:14:23.409665 1501462 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:14:23.409827 1501462 request.go:1351] Response Body: {"kind":"Pod","apiVersion":"v1","metadata":{"name":"kube-proxy-gf2tw","generateName":"kube-proxy-","namespace":"kube-system","uid":"814e8a89-b190-4aef-a303-44981c9e19c9","resourceVersion":"854","creationTimestamp":"2024-09-16T11:11:13Z","labels":{"controller-revision-hash":"648b489c5b","k8s-app":"kube-proxy","pod-template-generation":"1"},"ownerReferences":[{"apiVersion":"apps/v1","kind":"DaemonSet","name":"kube-proxy","uid":"8b5954ba-7bb1-4f7b-8014-fdfa99b2ac77","controller":true,"blockOwnerDeletion":true}],"managedFields":[{"manager":"kube-controller-manager","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:11:13Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:generateName":{},"f:labels":{".":{},"f:controller-revision-hash":{},"f:k8s-app":{},"f:pod-template-generation":{}},"f:ownerReferences":{".":{},"k:{\"uid\":\"8b5954ba-7bb1-4f7b-8014-fdfa99b2ac77\"}":{}}},"f:spec":{"f:affinity":{".":{},"f:nodeAffinity":{".":{},"f:r
equiredDuringSchedulingIgnoredDuringExecution":{}}},"f:containers":{"k: [truncated 6743 chars]
	I0916 11:14:23.410406 1501462 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/nodes/multinode-654612-m02
	I0916 11:14:23.410445 1501462 round_trippers.go:469] Request Headers:
	I0916 11:14:23.410466 1501462 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:14:23.410485 1501462 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:14:23.412713 1501462 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:14:23.412738 1501462 round_trippers.go:577] Response Headers:
	I0916 11:14:23.412746 1501462 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:14:23.412752 1501462 round_trippers.go:580]     Content-Type: application/json
	I0916 11:14:23.412756 1501462 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:14:23.412767 1501462 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:14:23.412773 1501462 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:14:23 GMT
	I0916 11:14:23.412776 1501462 round_trippers.go:580]     Audit-Id: c538ec9e-71f0-4eb4-b54f-aa4db05ad313
	I0916 11:14:23.412880 1501462 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-654612-m02","uid":"aa1e2e1b-4957-47bd-bcf9-786ad66f873a","resourceVersion":"837","creationTimestamp":"2024-09-16T11:11:13Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-654612-m02","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-654612","minikube.k8s.io/primary":"false","minikube.k8s.io/updated_at":"2024_09_16T11_11_14_0700","minikube.k8s.io/version":"v1.34.0"},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///var/run/crio/crio.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubeadm","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:11:13Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:
annotations":{"f:kubeadm.alpha.kubernetes.io/cri-socket":{}}}}},{"manag [truncated 6341 chars]
	I0916 11:14:23.907126 1501462 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/namespaces/kube-system/pods/kube-proxy-gf2tw
	I0916 11:14:23.907148 1501462 round_trippers.go:469] Request Headers:
	I0916 11:14:23.907158 1501462 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:14:23.907164 1501462 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:14:23.909710 1501462 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:14:23.909736 1501462 round_trippers.go:577] Response Headers:
	I0916 11:14:23.909743 1501462 round_trippers.go:580]     Audit-Id: 45bb08f0-4dda-4800-8ada-97e7f214cf9b
	I0916 11:14:23.909748 1501462 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:14:23.909759 1501462 round_trippers.go:580]     Content-Type: application/json
	I0916 11:14:23.909764 1501462 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:14:23.909768 1501462 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:14:23.909771 1501462 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:14:23 GMT
	I0916 11:14:23.909901 1501462 request.go:1351] Response Body: {"kind":"Pod","apiVersion":"v1","metadata":{"name":"kube-proxy-gf2tw","generateName":"kube-proxy-","namespace":"kube-system","uid":"814e8a89-b190-4aef-a303-44981c9e19c9","resourceVersion":"854","creationTimestamp":"2024-09-16T11:11:13Z","labels":{"controller-revision-hash":"648b489c5b","k8s-app":"kube-proxy","pod-template-generation":"1"},"ownerReferences":[{"apiVersion":"apps/v1","kind":"DaemonSet","name":"kube-proxy","uid":"8b5954ba-7bb1-4f7b-8014-fdfa99b2ac77","controller":true,"blockOwnerDeletion":true}],"managedFields":[{"manager":"kube-controller-manager","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:11:13Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:generateName":{},"f:labels":{".":{},"f:controller-revision-hash":{},"f:k8s-app":{},"f:pod-template-generation":{}},"f:ownerReferences":{".":{},"k:{\"uid\":\"8b5954ba-7bb1-4f7b-8014-fdfa99b2ac77\"}":{}}},"f:spec":{"f:affinity":{".":{},"f:nodeAffinity":{".":{},"f:r
equiredDuringSchedulingIgnoredDuringExecution":{}}},"f:containers":{"k: [truncated 6743 chars]
	I0916 11:14:23.910475 1501462 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/nodes/multinode-654612-m02
	I0916 11:14:23.910492 1501462 round_trippers.go:469] Request Headers:
	I0916 11:14:23.910501 1501462 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:14:23.910506 1501462 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:14:23.912763 1501462 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:14:23.912789 1501462 round_trippers.go:577] Response Headers:
	I0916 11:14:23.912797 1501462 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:14:23.912801 1501462 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:14:23 GMT
	I0916 11:14:23.912804 1501462 round_trippers.go:580]     Audit-Id: e84ff48f-08e4-4281-acce-76d612ce2a11
	I0916 11:14:23.912808 1501462 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:14:23.912811 1501462 round_trippers.go:580]     Content-Type: application/json
	I0916 11:14:23.912816 1501462 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:14:23.913112 1501462 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-654612-m02","uid":"aa1e2e1b-4957-47bd-bcf9-786ad66f873a","resourceVersion":"837","creationTimestamp":"2024-09-16T11:11:13Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-654612-m02","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-654612","minikube.k8s.io/primary":"false","minikube.k8s.io/updated_at":"2024_09_16T11_11_14_0700","minikube.k8s.io/version":"v1.34.0"},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///var/run/crio/crio.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubeadm","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:11:13Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:
annotations":{"f:kubeadm.alpha.kubernetes.io/cri-socket":{}}}}},{"manag [truncated 6341 chars]
	I0916 11:14:24.407170 1501462 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/namespaces/kube-system/pods/kube-proxy-gf2tw
	I0916 11:14:24.407246 1501462 round_trippers.go:469] Request Headers:
	I0916 11:14:24.407269 1501462 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:14:24.407289 1501462 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:14:24.410629 1501462 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 11:14:24.410708 1501462 round_trippers.go:577] Response Headers:
	I0916 11:14:24.410731 1501462 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:14:24.410761 1501462 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:14:24 GMT
	I0916 11:14:24.410766 1501462 round_trippers.go:580]     Audit-Id: 2dfc98d6-9f96-4961-92a4-c059b9b8e63c
	I0916 11:14:24.410770 1501462 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:14:24.410774 1501462 round_trippers.go:580]     Content-Type: application/json
	I0916 11:14:24.410776 1501462 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:14:24.410939 1501462 request.go:1351] Response Body: {"kind":"Pod","apiVersion":"v1","metadata":{"name":"kube-proxy-gf2tw","generateName":"kube-proxy-","namespace":"kube-system","uid":"814e8a89-b190-4aef-a303-44981c9e19c9","resourceVersion":"854","creationTimestamp":"2024-09-16T11:11:13Z","labels":{"controller-revision-hash":"648b489c5b","k8s-app":"kube-proxy","pod-template-generation":"1"},"ownerReferences":[{"apiVersion":"apps/v1","kind":"DaemonSet","name":"kube-proxy","uid":"8b5954ba-7bb1-4f7b-8014-fdfa99b2ac77","controller":true,"blockOwnerDeletion":true}],"managedFields":[{"manager":"kube-controller-manager","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:11:13Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:generateName":{},"f:labels":{".":{},"f:controller-revision-hash":{},"f:k8s-app":{},"f:pod-template-generation":{}},"f:ownerReferences":{".":{},"k:{\"uid\":\"8b5954ba-7bb1-4f7b-8014-fdfa99b2ac77\"}":{}}},"f:spec":{"f:affinity":{".":{},"f:nodeAffinity":{".":{},"f:r
equiredDuringSchedulingIgnoredDuringExecution":{}}},"f:containers":{"k: [truncated 6743 chars]
	I0916 11:14:24.411550 1501462 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/nodes/multinode-654612-m02
	I0916 11:14:24.411570 1501462 round_trippers.go:469] Request Headers:
	I0916 11:14:24.411579 1501462 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:14:24.411583 1501462 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:14:24.413727 1501462 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:14:24.413779 1501462 round_trippers.go:577] Response Headers:
	I0916 11:14:24.413808 1501462 round_trippers.go:580]     Audit-Id: 9b88348d-bea4-433d-b0a1-ba829a11e844
	I0916 11:14:24.413827 1501462 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:14:24.413870 1501462 round_trippers.go:580]     Content-Type: application/json
	I0916 11:14:24.413904 1501462 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:14:24.413924 1501462 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:14:24.413953 1501462 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:14:24 GMT
	I0916 11:14:24.414469 1501462 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-654612-m02","uid":"aa1e2e1b-4957-47bd-bcf9-786ad66f873a","resourceVersion":"837","creationTimestamp":"2024-09-16T11:11:13Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-654612-m02","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-654612","minikube.k8s.io/primary":"false","minikube.k8s.io/updated_at":"2024_09_16T11_11_14_0700","minikube.k8s.io/version":"v1.34.0"},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///var/run/crio/crio.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubeadm","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:11:13Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:
annotations":{"f:kubeadm.alpha.kubernetes.io/cri-socket":{}}}}},{"manag [truncated 6341 chars]
	I0916 11:14:24.414888 1501462 pod_ready.go:103] pod "kube-proxy-gf2tw" in "kube-system" namespace has status "Ready":"False"
	I0916 11:14:24.907237 1501462 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/namespaces/kube-system/pods/kube-proxy-gf2tw
	I0916 11:14:24.907266 1501462 round_trippers.go:469] Request Headers:
	I0916 11:14:24.907276 1501462 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:14:24.907281 1501462 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:14:24.909733 1501462 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:14:24.909766 1501462 round_trippers.go:577] Response Headers:
	I0916 11:14:24.909781 1501462 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:14:24 GMT
	I0916 11:14:24.909787 1501462 round_trippers.go:580]     Audit-Id: ec41381f-f186-47df-a532-4e325c55dce1
	I0916 11:14:24.909792 1501462 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:14:24.909796 1501462 round_trippers.go:580]     Content-Type: application/json
	I0916 11:14:24.909800 1501462 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:14:24.909805 1501462 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:14:24.910200 1501462 request.go:1351] Response Body: {"kind":"Pod","apiVersion":"v1","metadata":{"name":"kube-proxy-gf2tw","generateName":"kube-proxy-","namespace":"kube-system","uid":"814e8a89-b190-4aef-a303-44981c9e19c9","resourceVersion":"854","creationTimestamp":"2024-09-16T11:11:13Z","labels":{"controller-revision-hash":"648b489c5b","k8s-app":"kube-proxy","pod-template-generation":"1"},"ownerReferences":[{"apiVersion":"apps/v1","kind":"DaemonSet","name":"kube-proxy","uid":"8b5954ba-7bb1-4f7b-8014-fdfa99b2ac77","controller":true,"blockOwnerDeletion":true}],"managedFields":[{"manager":"kube-controller-manager","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:11:13Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:generateName":{},"f:labels":{".":{},"f:controller-revision-hash":{},"f:k8s-app":{},"f:pod-template-generation":{}},"f:ownerReferences":{".":{},"k:{\"uid\":\"8b5954ba-7bb1-4f7b-8014-fdfa99b2ac77\"}":{}}},"f:spec":{"f:affinity":{".":{},"f:nodeAffinity":{".":{},"f:r
equiredDuringSchedulingIgnoredDuringExecution":{}}},"f:containers":{"k: [truncated 6743 chars]
	I0916 11:14:24.910758 1501462 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/nodes/multinode-654612-m02
	I0916 11:14:24.910778 1501462 round_trippers.go:469] Request Headers:
	I0916 11:14:24.910788 1501462 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:14:24.910793 1501462 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:14:24.913283 1501462 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:14:24.913382 1501462 round_trippers.go:577] Response Headers:
	I0916 11:14:24.913433 1501462 round_trippers.go:580]     Audit-Id: 45b673df-cc3d-4813-8466-3f7aade9db04
	I0916 11:14:24.913468 1501462 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:14:24.913504 1501462 round_trippers.go:580]     Content-Type: application/json
	I0916 11:14:24.913535 1501462 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:14:24.913566 1501462 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:14:24.913649 1501462 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:14:24 GMT
	I0916 11:14:24.913783 1501462 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-654612-m02","uid":"aa1e2e1b-4957-47bd-bcf9-786ad66f873a","resourceVersion":"837","creationTimestamp":"2024-09-16T11:11:13Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-654612-m02","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-654612","minikube.k8s.io/primary":"false","minikube.k8s.io/updated_at":"2024_09_16T11_11_14_0700","minikube.k8s.io/version":"v1.34.0"},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///var/run/crio/crio.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubeadm","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:11:13Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:
annotations":{"f:kubeadm.alpha.kubernetes.io/cri-socket":{}}}}},{"manag [truncated 6341 chars]
	I0916 11:14:25.407165 1501462 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/namespaces/kube-system/pods/kube-proxy-gf2tw
	I0916 11:14:25.407192 1501462 round_trippers.go:469] Request Headers:
	I0916 11:14:25.407202 1501462 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:14:25.407207 1501462 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:14:25.410951 1501462 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 11:14:25.410980 1501462 round_trippers.go:577] Response Headers:
	I0916 11:14:25.410988 1501462 round_trippers.go:580]     Content-Type: application/json
	I0916 11:14:25.410993 1501462 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:14:25.410997 1501462 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:14:25.411000 1501462 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:14:25 GMT
	I0916 11:14:25.411004 1501462 round_trippers.go:580]     Audit-Id: 25635a8e-4da4-476d-8338-fef1cee5874b
	I0916 11:14:25.411008 1501462 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:14:25.411131 1501462 request.go:1351] Response Body: {"kind":"Pod","apiVersion":"v1","metadata":{"name":"kube-proxy-gf2tw","generateName":"kube-proxy-","namespace":"kube-system","uid":"814e8a89-b190-4aef-a303-44981c9e19c9","resourceVersion":"854","creationTimestamp":"2024-09-16T11:11:13Z","labels":{"controller-revision-hash":"648b489c5b","k8s-app":"kube-proxy","pod-template-generation":"1"},"ownerReferences":[{"apiVersion":"apps/v1","kind":"DaemonSet","name":"kube-proxy","uid":"8b5954ba-7bb1-4f7b-8014-fdfa99b2ac77","controller":true,"blockOwnerDeletion":true}],"managedFields":[{"manager":"kube-controller-manager","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:11:13Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:generateName":{},"f:labels":{".":{},"f:controller-revision-hash":{},"f:k8s-app":{},"f:pod-template-generation":{}},"f:ownerReferences":{".":{},"k:{\"uid\":\"8b5954ba-7bb1-4f7b-8014-fdfa99b2ac77\"}":{}}},"f:spec":{"f:affinity":{".":{},"f:nodeAffinity":{".":{},"f:r
equiredDuringSchedulingIgnoredDuringExecution":{}}},"f:containers":{"k: [truncated 6743 chars]
	I0916 11:14:25.411685 1501462 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/nodes/multinode-654612-m02
	I0916 11:14:25.411702 1501462 round_trippers.go:469] Request Headers:
	I0916 11:14:25.411710 1501462 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:14:25.411713 1501462 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:14:25.413720 1501462 round_trippers.go:574] Response Status: 200 OK in 1 milliseconds
	I0916 11:14:25.413740 1501462 round_trippers.go:577] Response Headers:
	I0916 11:14:25.413748 1501462 round_trippers.go:580]     Audit-Id: 2765cf1a-49ba-4c54-99c1-a505780ad799
	I0916 11:14:25.413753 1501462 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:14:25.413756 1501462 round_trippers.go:580]     Content-Type: application/json
	I0916 11:14:25.413759 1501462 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:14:25.413762 1501462 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:14:25.413764 1501462 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:14:25 GMT
	I0916 11:14:25.413895 1501462 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-654612-m02","uid":"aa1e2e1b-4957-47bd-bcf9-786ad66f873a","resourceVersion":"837","creationTimestamp":"2024-09-16T11:11:13Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-654612-m02","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-654612","minikube.k8s.io/primary":"false","minikube.k8s.io/updated_at":"2024_09_16T11_11_14_0700","minikube.k8s.io/version":"v1.34.0"},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///var/run/crio/crio.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubeadm","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:11:13Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:
annotations":{"f:kubeadm.alpha.kubernetes.io/cri-socket":{}}}}},{"manag [truncated 6341 chars]
	I0916 11:14:25.907087 1501462 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/namespaces/kube-system/pods/kube-proxy-gf2tw
	I0916 11:14:25.907109 1501462 round_trippers.go:469] Request Headers:
	I0916 11:14:25.907118 1501462 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:14:25.907122 1501462 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:14:25.909834 1501462 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:14:25.909859 1501462 round_trippers.go:577] Response Headers:
	I0916 11:14:25.909868 1501462 round_trippers.go:580]     Audit-Id: a8b51125-deab-4d22-b9d1-69cd61d19abb
	I0916 11:14:25.909873 1501462 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:14:25.909877 1501462 round_trippers.go:580]     Content-Type: application/json
	I0916 11:14:25.909880 1501462 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:14:25.909882 1501462 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:14:25.909885 1501462 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:14:25 GMT
	I0916 11:14:25.910306 1501462 request.go:1351] Response Body: {"kind":"Pod","apiVersion":"v1","metadata":{"name":"kube-proxy-gf2tw","generateName":"kube-proxy-","namespace":"kube-system","uid":"814e8a89-b190-4aef-a303-44981c9e19c9","resourceVersion":"854","creationTimestamp":"2024-09-16T11:11:13Z","labels":{"controller-revision-hash":"648b489c5b","k8s-app":"kube-proxy","pod-template-generation":"1"},"ownerReferences":[{"apiVersion":"apps/v1","kind":"DaemonSet","name":"kube-proxy","uid":"8b5954ba-7bb1-4f7b-8014-fdfa99b2ac77","controller":true,"blockOwnerDeletion":true}],"managedFields":[{"manager":"kube-controller-manager","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:11:13Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:generateName":{},"f:labels":{".":{},"f:controller-revision-hash":{},"f:k8s-app":{},"f:pod-template-generation":{}},"f:ownerReferences":{".":{},"k:{\"uid\":\"8b5954ba-7bb1-4f7b-8014-fdfa99b2ac77\"}":{}}},"f:spec":{"f:affinity":{".":{},"f:nodeAffinity":{".":{},"f:r
equiredDuringSchedulingIgnoredDuringExecution":{}}},"f:containers":{"k: [truncated 6743 chars]
	I0916 11:14:25.910851 1501462 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/nodes/multinode-654612-m02
	I0916 11:14:25.910862 1501462 round_trippers.go:469] Request Headers:
	I0916 11:14:25.910869 1501462 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:14:25.910874 1501462 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:14:25.913261 1501462 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:14:25.913278 1501462 round_trippers.go:577] Response Headers:
	I0916 11:14:25.913285 1501462 round_trippers.go:580]     Audit-Id: 845f8646-c8a0-4e3d-9e03-92cea0ae90ec
	I0916 11:14:25.913289 1501462 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:14:25.913293 1501462 round_trippers.go:580]     Content-Type: application/json
	I0916 11:14:25.913299 1501462 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:14:25.913302 1501462 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:14:25.913305 1501462 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:14:25 GMT
	I0916 11:14:25.913520 1501462 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-654612-m02","uid":"aa1e2e1b-4957-47bd-bcf9-786ad66f873a","resourceVersion":"837","creationTimestamp":"2024-09-16T11:11:13Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-654612-m02","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-654612","minikube.k8s.io/primary":"false","minikube.k8s.io/updated_at":"2024_09_16T11_11_14_0700","minikube.k8s.io/version":"v1.34.0"},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///var/run/crio/crio.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubeadm","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:11:13Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:
annotations":{"f:kubeadm.alpha.kubernetes.io/cri-socket":{}}}}},{"manag [truncated 6341 chars]
	I0916 11:14:26.407097 1501462 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/namespaces/kube-system/pods/kube-proxy-gf2tw
	I0916 11:14:26.407125 1501462 round_trippers.go:469] Request Headers:
	I0916 11:14:26.407135 1501462 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:14:26.407143 1501462 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:14:26.410330 1501462 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 11:14:26.410352 1501462 round_trippers.go:577] Response Headers:
	I0916 11:14:26.410360 1501462 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:14:26.410364 1501462 round_trippers.go:580]     Content-Type: application/json
	I0916 11:14:26.410367 1501462 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:14:26.410369 1501462 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:14:26.410372 1501462 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:14:26 GMT
	I0916 11:14:26.410375 1501462 round_trippers.go:580]     Audit-Id: fbaa8f88-4ff4-4b73-af09-f2e111a31a93
	I0916 11:14:26.410514 1501462 request.go:1351] Response Body: {"kind":"Pod","apiVersion":"v1","metadata":{"name":"kube-proxy-gf2tw","generateName":"kube-proxy-","namespace":"kube-system","uid":"814e8a89-b190-4aef-a303-44981c9e19c9","resourceVersion":"854","creationTimestamp":"2024-09-16T11:11:13Z","labels":{"controller-revision-hash":"648b489c5b","k8s-app":"kube-proxy","pod-template-generation":"1"},"ownerReferences":[{"apiVersion":"apps/v1","kind":"DaemonSet","name":"kube-proxy","uid":"8b5954ba-7bb1-4f7b-8014-fdfa99b2ac77","controller":true,"blockOwnerDeletion":true}],"managedFields":[{"manager":"kube-controller-manager","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:11:13Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:generateName":{},"f:labels":{".":{},"f:controller-revision-hash":{},"f:k8s-app":{},"f:pod-template-generation":{}},"f:ownerReferences":{".":{},"k:{\"uid\":\"8b5954ba-7bb1-4f7b-8014-fdfa99b2ac77\"}":{}}},"f:spec":{"f:affinity":{".":{},"f:nodeAffinity":{".":{},"f:r
equiredDuringSchedulingIgnoredDuringExecution":{}}},"f:containers":{"k: [truncated 6743 chars]
	I0916 11:14:26.411063 1501462 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/nodes/multinode-654612-m02
	I0916 11:14:26.411073 1501462 round_trippers.go:469] Request Headers:
	I0916 11:14:26.411082 1501462 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:14:26.411087 1501462 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:14:26.413651 1501462 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:14:26.413670 1501462 round_trippers.go:577] Response Headers:
	I0916 11:14:26.413679 1501462 round_trippers.go:580]     Content-Type: application/json
	I0916 11:14:26.413683 1501462 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:14:26.413685 1501462 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:14:26.413688 1501462 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:14:26 GMT
	I0916 11:14:26.413691 1501462 round_trippers.go:580]     Audit-Id: 200eb1ff-e8a7-4cc9-a1f7-ed9deed004f4
	I0916 11:14:26.413693 1501462 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:14:26.413804 1501462 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-654612-m02","uid":"aa1e2e1b-4957-47bd-bcf9-786ad66f873a","resourceVersion":"837","creationTimestamp":"2024-09-16T11:11:13Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-654612-m02","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-654612","minikube.k8s.io/primary":"false","minikube.k8s.io/updated_at":"2024_09_16T11_11_14_0700","minikube.k8s.io/version":"v1.34.0"},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///var/run/crio/crio.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubeadm","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:11:13Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:
annotations":{"f:kubeadm.alpha.kubernetes.io/cri-socket":{}}}}},{"manag [truncated 6341 chars]
	I0916 11:14:26.907120 1501462 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/namespaces/kube-system/pods/kube-proxy-gf2tw
	I0916 11:14:26.907150 1501462 round_trippers.go:469] Request Headers:
	I0916 11:14:26.907160 1501462 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:14:26.907166 1501462 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:14:26.909542 1501462 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:14:26.909571 1501462 round_trippers.go:577] Response Headers:
	I0916 11:14:26.909580 1501462 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:14:26.909585 1501462 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:14:26.909589 1501462 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:14:26 GMT
	I0916 11:14:26.909595 1501462 round_trippers.go:580]     Audit-Id: f8ad0225-a7e0-47d4-a756-cc174afe5e7c
	I0916 11:14:26.909598 1501462 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:14:26.909602 1501462 round_trippers.go:580]     Content-Type: application/json
	I0916 11:14:26.910005 1501462 request.go:1351] Response Body: {"kind":"Pod","apiVersion":"v1","metadata":{"name":"kube-proxy-gf2tw","generateName":"kube-proxy-","namespace":"kube-system","uid":"814e8a89-b190-4aef-a303-44981c9e19c9","resourceVersion":"854","creationTimestamp":"2024-09-16T11:11:13Z","labels":{"controller-revision-hash":"648b489c5b","k8s-app":"kube-proxy","pod-template-generation":"1"},"ownerReferences":[{"apiVersion":"apps/v1","kind":"DaemonSet","name":"kube-proxy","uid":"8b5954ba-7bb1-4f7b-8014-fdfa99b2ac77","controller":true,"blockOwnerDeletion":true}],"managedFields":[{"manager":"kube-controller-manager","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:11:13Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:generateName":{},"f:labels":{".":{},"f:controller-revision-hash":{},"f:k8s-app":{},"f:pod-template-generation":{}},"f:ownerReferences":{".":{},"k:{\"uid\":\"8b5954ba-7bb1-4f7b-8014-fdfa99b2ac77\"}":{}}},"f:spec":{"f:affinity":{".":{},"f:nodeAffinity":{".":{},"f:r
equiredDuringSchedulingIgnoredDuringExecution":{}}},"f:containers":{"k: [truncated 6743 chars]
	I0916 11:14:26.910656 1501462 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/nodes/multinode-654612-m02
	I0916 11:14:26.910680 1501462 round_trippers.go:469] Request Headers:
	I0916 11:14:26.910690 1501462 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:14:26.910694 1501462 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:14:26.913124 1501462 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:14:26.913146 1501462 round_trippers.go:577] Response Headers:
	I0916 11:14:26.913155 1501462 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:14:26.913158 1501462 round_trippers.go:580]     Content-Type: application/json
	I0916 11:14:26.913161 1501462 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:14:26.913164 1501462 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:14:26.913168 1501462 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:14:26 GMT
	I0916 11:14:26.913172 1501462 round_trippers.go:580]     Audit-Id: 0dce6af9-8994-48fa-8588-f4f133398400
	I0916 11:14:26.913394 1501462 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-654612-m02","uid":"aa1e2e1b-4957-47bd-bcf9-786ad66f873a","resourceVersion":"837","creationTimestamp":"2024-09-16T11:11:13Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-654612-m02","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-654612","minikube.k8s.io/primary":"false","minikube.k8s.io/updated_at":"2024_09_16T11_11_14_0700","minikube.k8s.io/version":"v1.34.0"},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///var/run/crio/crio.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubeadm","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:11:13Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:
annotations":{"f:kubeadm.alpha.kubernetes.io/cri-socket":{}}}}},{"manag [truncated 6341 chars]
	I0916 11:14:26.913888 1501462 pod_ready.go:103] pod "kube-proxy-gf2tw" in "kube-system" namespace has status "Ready":"False"
	I0916 11:14:27.407012 1501462 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/namespaces/kube-system/pods/kube-proxy-gf2tw
	I0916 11:14:27.407041 1501462 round_trippers.go:469] Request Headers:
	I0916 11:14:27.407051 1501462 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:14:27.407056 1501462 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:14:27.410282 1501462 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 11:14:27.410348 1501462 round_trippers.go:577] Response Headers:
	I0916 11:14:27.410372 1501462 round_trippers.go:580]     Audit-Id: eacfdd13-d70e-415e-8968-a18dcaa8c10f
	I0916 11:14:27.410397 1501462 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:14:27.410416 1501462 round_trippers.go:580]     Content-Type: application/json
	I0916 11:14:27.410464 1501462 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:14:27.410497 1501462 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:14:27.410518 1501462 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:14:27 GMT
	I0916 11:14:27.410644 1501462 request.go:1351] Response Body: {"kind":"Pod","apiVersion":"v1","metadata":{"name":"kube-proxy-gf2tw","generateName":"kube-proxy-","namespace":"kube-system","uid":"814e8a89-b190-4aef-a303-44981c9e19c9","resourceVersion":"877","creationTimestamp":"2024-09-16T11:11:13Z","labels":{"controller-revision-hash":"648b489c5b","k8s-app":"kube-proxy","pod-template-generation":"1"},"ownerReferences":[{"apiVersion":"apps/v1","kind":"DaemonSet","name":"kube-proxy","uid":"8b5954ba-7bb1-4f7b-8014-fdfa99b2ac77","controller":true,"blockOwnerDeletion":true}],"managedFields":[{"manager":"kube-controller-manager","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:11:13Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:generateName":{},"f:labels":{".":{},"f:controller-revision-hash":{},"f:k8s-app":{},"f:pod-template-generation":{}},"f:ownerReferences":{".":{},"k:{\"uid\":\"8b5954ba-7bb1-4f7b-8014-fdfa99b2ac77\"}":{}}},"f:spec":{"f:affinity":{".":{},"f:nodeAffinity":{".":{},"f:r
equiredDuringSchedulingIgnoredDuringExecution":{}}},"f:containers":{"k: [truncated 6178 chars]
	I0916 11:14:27.411177 1501462 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/nodes/multinode-654612-m02
	I0916 11:14:27.411197 1501462 round_trippers.go:469] Request Headers:
	I0916 11:14:27.411206 1501462 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:14:27.411210 1501462 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:14:27.413302 1501462 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:14:27.413322 1501462 round_trippers.go:577] Response Headers:
	I0916 11:14:27.413330 1501462 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:14:27.413335 1501462 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:14:27.413340 1501462 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:14:27 GMT
	I0916 11:14:27.413344 1501462 round_trippers.go:580]     Audit-Id: c28347bf-d644-4958-bda4-067d60d8fee2
	I0916 11:14:27.413347 1501462 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:14:27.413349 1501462 round_trippers.go:580]     Content-Type: application/json
	I0916 11:14:27.413522 1501462 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-654612-m02","uid":"aa1e2e1b-4957-47bd-bcf9-786ad66f873a","resourceVersion":"837","creationTimestamp":"2024-09-16T11:11:13Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-654612-m02","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-654612","minikube.k8s.io/primary":"false","minikube.k8s.io/updated_at":"2024_09_16T11_11_14_0700","minikube.k8s.io/version":"v1.34.0"},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///var/run/crio/crio.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubeadm","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:11:13Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:
annotations":{"f:kubeadm.alpha.kubernetes.io/cri-socket":{}}}}},{"manag [truncated 6341 chars]
	I0916 11:14:27.413935 1501462 pod_ready.go:93] pod "kube-proxy-gf2tw" in "kube-system" namespace has status "Ready":"True"
	I0916 11:14:27.413956 1501462 pod_ready.go:82] duration metric: took 18.507085721s for pod "kube-proxy-gf2tw" in "kube-system" namespace to be "Ready" ...
	I0916 11:14:27.413968 1501462 pod_ready.go:79] waiting up to 6m0s for pod "kube-proxy-t9pzq" in "kube-system" namespace to be "Ready" ...
	I0916 11:14:27.414034 1501462 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/namespaces/kube-system/pods/kube-proxy-t9pzq
	I0916 11:14:27.414044 1501462 round_trippers.go:469] Request Headers:
	I0916 11:14:27.414052 1501462 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:14:27.414057 1501462 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:14:27.416036 1501462 round_trippers.go:574] Response Status: 200 OK in 1 milliseconds
	I0916 11:14:27.416054 1501462 round_trippers.go:577] Response Headers:
	I0916 11:14:27.416061 1501462 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:14:27.416066 1501462 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:14:27.416070 1501462 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:14:27 GMT
	I0916 11:14:27.416073 1501462 round_trippers.go:580]     Audit-Id: f827d78f-b54b-4bb4-ba91-0665ac1ec87c
	I0916 11:14:27.416075 1501462 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:14:27.416078 1501462 round_trippers.go:580]     Content-Type: application/json
	I0916 11:14:27.416218 1501462 request.go:1351] Response Body: {"kind":"Pod","apiVersion":"v1","metadata":{"name":"kube-proxy-t9pzq","generateName":"kube-proxy-","namespace":"kube-system","uid":"d5dac41c-8386-4ad5-a463-1730169d8062","resourceVersion":"681","creationTimestamp":"2024-09-16T11:10:14Z","labels":{"controller-revision-hash":"648b489c5b","k8s-app":"kube-proxy","pod-template-generation":"1"},"ownerReferences":[{"apiVersion":"apps/v1","kind":"DaemonSet","name":"kube-proxy","uid":"8b5954ba-7bb1-4f7b-8014-fdfa99b2ac77","controller":true,"blockOwnerDeletion":true}],"managedFields":[{"manager":"kube-controller-manager","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:10:14Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:generateName":{},"f:labels":{".":{},"f:controller-revision-hash":{},"f:k8s-app":{},"f:pod-template-generation":{}},"f:ownerReferences":{".":{},"k:{\"uid\":\"8b5954ba-7bb1-4f7b-8014-fdfa99b2ac77\"}":{}}},"f:spec":{"f:affinity":{".":{},"f:nodeAffinity":{".":{},"f:r
equiredDuringSchedulingIgnoredDuringExecution":{}}},"f:containers":{"k: [truncated 6170 chars]
	I0916 11:14:27.416737 1501462 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/nodes/multinode-654612
	I0916 11:14:27.416748 1501462 round_trippers.go:469] Request Headers:
	I0916 11:14:27.416756 1501462 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:14:27.416760 1501462 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:14:27.418657 1501462 round_trippers.go:574] Response Status: 200 OK in 1 milliseconds
	I0916 11:14:27.418717 1501462 round_trippers.go:577] Response Headers:
	I0916 11:14:27.418740 1501462 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:14:27.418766 1501462 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:14:27 GMT
	I0916 11:14:27.418785 1501462 round_trippers.go:580]     Audit-Id: a2397233-2efe-4502-bc79-ac13abd75a39
	I0916 11:14:27.418827 1501462 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:14:27.418844 1501462 round_trippers.go:580]     Content-Type: application/json
	I0916 11:14:27.418859 1501462 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:14:27.418977 1501462 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-654612","uid":"17ee1588-6ece-4a45-8e72-a05ec6d1f806","resourceVersion":"662","creationTimestamp":"2024-09-16T11:10:07Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-654612","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-654612","minikube.k8s.io/primary":"true","minikube.k8s.io/updated_at":"2024_09_16T11_10_11_0700","minikube.k8s.io/version":"v1.34.0","node-role.kubernetes.io/control-plane":"","node.kubernetes.io/exclude-from-external-load-balancers":""},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///var/run/crio/crio.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubelet","operation":"Update","apiVe
rsion":"v1","time":"2024-09-16T11:10:07Z","fieldsType":"FieldsV1","fiel [truncated 6346 chars]
	I0916 11:14:27.419392 1501462 pod_ready.go:93] pod "kube-proxy-t9pzq" in "kube-system" namespace has status "Ready":"True"
	I0916 11:14:27.419414 1501462 pod_ready.go:82] duration metric: took 5.433417ms for pod "kube-proxy-t9pzq" in "kube-system" namespace to be "Ready" ...
	I0916 11:14:27.419426 1501462 pod_ready.go:79] waiting up to 6m0s for pod "kube-proxy-vf648" in "kube-system" namespace to be "Ready" ...
	I0916 11:14:27.419492 1501462 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/namespaces/kube-system/pods/kube-proxy-vf648
	I0916 11:14:27.419503 1501462 round_trippers.go:469] Request Headers:
	I0916 11:14:27.419511 1501462 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:14:27.419516 1501462 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:14:27.421516 1501462 round_trippers.go:574] Response Status: 200 OK in 1 milliseconds
	I0916 11:14:27.421533 1501462 round_trippers.go:577] Response Headers:
	I0916 11:14:27.421540 1501462 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:14:27.421544 1501462 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:14:27 GMT
	I0916 11:14:27.421547 1501462 round_trippers.go:580]     Audit-Id: 56d1b4b1-bce0-4e21-89c5-6bff987a5350
	I0916 11:14:27.421550 1501462 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:14:27.421553 1501462 round_trippers.go:580]     Content-Type: application/json
	I0916 11:14:27.421555 1501462 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:14:27.421878 1501462 request.go:1351] Response Body: {"kind":"Pod","apiVersion":"v1","metadata":{"name":"kube-proxy-vf648","generateName":"kube-proxy-","namespace":"kube-system","uid":"376afe3e-390b-443b-b289-7dfeeb1deed1","resourceVersion":"642","creationTimestamp":"2024-09-16T11:11:48Z","labels":{"controller-revision-hash":"648b489c5b","k8s-app":"kube-proxy","pod-template-generation":"1"},"ownerReferences":[{"apiVersion":"apps/v1","kind":"DaemonSet","name":"kube-proxy","uid":"8b5954ba-7bb1-4f7b-8014-fdfa99b2ac77","controller":true,"blockOwnerDeletion":true}],"managedFields":[{"manager":"kube-controller-manager","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:11:48Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:generateName":{},"f:labels":{".":{},"f:controller-revision-hash":{},"f:k8s-app":{},"f:pod-template-generation":{}},"f:ownerReferences":{".":{},"k:{\"uid\":\"8b5954ba-7bb1-4f7b-8014-fdfa99b2ac77\"}":{}}},"f:spec":{"f:affinity":{".":{},"f:nodeAffinity":{".":{},"f:r
equiredDuringSchedulingIgnoredDuringExecution":{}}},"f:containers":{"k: [truncated 6427 chars]
	I0916 11:14:27.422388 1501462 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/nodes/multinode-654612-m03
	I0916 11:14:27.422406 1501462 round_trippers.go:469] Request Headers:
	I0916 11:14:27.422414 1501462 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:14:27.422419 1501462 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:14:27.424463 1501462 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:14:27.424511 1501462 round_trippers.go:577] Response Headers:
	I0916 11:14:27.424520 1501462 round_trippers.go:580]     Audit-Id: d6923c96-7513-477a-ad6d-d364139a221d
	I0916 11:14:27.424525 1501462 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:14:27.424528 1501462 round_trippers.go:580]     Content-Type: application/json
	I0916 11:14:27.424532 1501462 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:14:27.424536 1501462 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:14:27.424539 1501462 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:14:27 GMT
	I0916 11:14:27.424646 1501462 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-654612-m03","uid":"0a3ae4f6-3e42-45c5-9af6-3ab99d0c1b81","resourceVersion":"839","creationTimestamp":"2024-09-16T11:11:48Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-654612-m03","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-654612","minikube.k8s.io/primary":"false","minikube.k8s.io/updated_at":"2024_09_16T11_11_49_0700","minikube.k8s.io/version":"v1.34.0"},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///var/run/crio/crio.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubeadm","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:11:48Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:
annotations":{"f:kubeadm.alpha.kubernetes.io/cri-socket":{}}}}},{"manag [truncated 6573 chars]
	I0916 11:14:27.425115 1501462 pod_ready.go:98] node "multinode-654612-m03" hosting pod "kube-proxy-vf648" in "kube-system" namespace is currently not "Ready" (skipping!): node "multinode-654612-m03" has status "Ready":"Unknown"
	I0916 11:14:27.425138 1501462 pod_ready.go:82] duration metric: took 5.704426ms for pod "kube-proxy-vf648" in "kube-system" namespace to be "Ready" ...
	E0916 11:14:27.425150 1501462 pod_ready.go:67] WaitExtra: waitPodCondition: node "multinode-654612-m03" hosting pod "kube-proxy-vf648" in "kube-system" namespace is currently not "Ready" (skipping!): node "multinode-654612-m03" has status "Ready":"Unknown"
	I0916 11:14:27.425159 1501462 pod_ready.go:79] waiting up to 6m0s for pod "kube-scheduler-multinode-654612" in "kube-system" namespace to be "Ready" ...
	I0916 11:14:27.425222 1501462 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/namespaces/kube-system/pods/kube-scheduler-multinode-654612
	I0916 11:14:27.425233 1501462 round_trippers.go:469] Request Headers:
	I0916 11:14:27.425241 1501462 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:14:27.425247 1501462 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:14:27.427223 1501462 round_trippers.go:574] Response Status: 200 OK in 1 milliseconds
	I0916 11:14:27.427244 1501462 round_trippers.go:577] Response Headers:
	I0916 11:14:27.427252 1501462 round_trippers.go:580]     Content-Type: application/json
	I0916 11:14:27.427257 1501462 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:14:27.427262 1501462 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:14:27.427265 1501462 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:14:27 GMT
	I0916 11:14:27.427268 1501462 round_trippers.go:580]     Audit-Id: 3e6c97f1-3f00-4eae-87ee-67d398b58dd8
	I0916 11:14:27.427272 1501462 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:14:27.427366 1501462 request.go:1351] Response Body: {"kind":"Pod","apiVersion":"v1","metadata":{"name":"kube-scheduler-multinode-654612","namespace":"kube-system","uid":"fd553108-8193-4f33-8190-d4ec25a66de1","resourceVersion":"755","creationTimestamp":"2024-09-16T11:10:10Z","labels":{"component":"kube-scheduler","tier":"control-plane"},"annotations":{"kubernetes.io/config.hash":"281b64f61502642475e3dbc1b139b188","kubernetes.io/config.mirror":"281b64f61502642475e3dbc1b139b188","kubernetes.io/config.seen":"2024-09-16T11:10:10.145156597Z","kubernetes.io/config.source":"file"},"ownerReferences":[{"apiVersion":"v1","kind":"Node","name":"multinode-654612","uid":"17ee1588-6ece-4a45-8e72-a05ec6d1f806","controller":true}],"managedFields":[{"manager":"kubelet","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:10:10Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:annotations":{".":{},"f:kubernetes.io/config.hash":{},"f:kubernetes.io/config.mirror":{},"f:kubernetes.io/config.seen":{},
"f:kubernetes.io/config.source":{}},"f:labels":{".":{},"f:component":{} [truncated 5101 chars]
	I0916 11:14:27.427779 1501462 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/nodes/multinode-654612
	I0916 11:14:27.427797 1501462 round_trippers.go:469] Request Headers:
	I0916 11:14:27.427842 1501462 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:14:27.427852 1501462 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:14:27.429922 1501462 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:14:27.429979 1501462 round_trippers.go:577] Response Headers:
	I0916 11:14:27.429996 1501462 round_trippers.go:580]     Audit-Id: f2e8b0fc-5f15-4d56-b3a1-84db9a90023a
	I0916 11:14:27.430002 1501462 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:14:27.430007 1501462 round_trippers.go:580]     Content-Type: application/json
	I0916 11:14:27.430010 1501462 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:14:27.430015 1501462 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:14:27.430019 1501462 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:14:27 GMT
	I0916 11:14:27.430171 1501462 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-654612","uid":"17ee1588-6ece-4a45-8e72-a05ec6d1f806","resourceVersion":"662","creationTimestamp":"2024-09-16T11:10:07Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-654612","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-654612","minikube.k8s.io/primary":"true","minikube.k8s.io/updated_at":"2024_09_16T11_10_11_0700","minikube.k8s.io/version":"v1.34.0","node-role.kubernetes.io/control-plane":"","node.kubernetes.io/exclude-from-external-load-balancers":""},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///var/run/crio/crio.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubelet","operation":"Update","apiVe
rsion":"v1","time":"2024-09-16T11:10:07Z","fieldsType":"FieldsV1","fiel [truncated 6346 chars]
	I0916 11:14:27.430578 1501462 pod_ready.go:93] pod "kube-scheduler-multinode-654612" in "kube-system" namespace has status "Ready":"True"
	I0916 11:14:27.430597 1501462 pod_ready.go:82] duration metric: took 5.425935ms for pod "kube-scheduler-multinode-654612" in "kube-system" namespace to be "Ready" ...
	I0916 11:14:27.430609 1501462 pod_ready.go:39] duration metric: took 18.559441678s for extra waiting for all system-critical and pods with labels [k8s-app=kube-dns component=etcd component=kube-apiserver component=kube-controller-manager k8s-app=kube-proxy component=kube-scheduler] to be "Ready" ...
	I0916 11:14:27.430628 1501462 system_svc.go:44] waiting for kubelet service to be running ....
	I0916 11:14:27.430687 1501462 ssh_runner.go:195] Run: sudo systemctl is-active --quiet service kubelet
	I0916 11:14:27.443076 1501462 system_svc.go:56] duration metric: took 12.439317ms WaitForService to wait for kubelet
	I0916 11:14:27.443108 1501462 kubeadm.go:582] duration metric: took 25.200325084s to wait for: map[apiserver:true apps_running:true default_sa:true extra:true kubelet:true node_ready:true system_pods:true]
	I0916 11:14:27.443128 1501462 node_conditions.go:102] verifying NodePressure condition ...
	I0916 11:14:27.443209 1501462 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/nodes
	I0916 11:14:27.443219 1501462 round_trippers.go:469] Request Headers:
	I0916 11:14:27.443227 1501462 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:14:27.443232 1501462 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:14:27.445754 1501462 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:14:27.445781 1501462 round_trippers.go:577] Response Headers:
	I0916 11:14:27.445790 1501462 round_trippers.go:580]     Audit-Id: 85c9c32a-56cf-4199-b3d5-283d0629f9fe
	I0916 11:14:27.445794 1501462 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:14:27.445798 1501462 round_trippers.go:580]     Content-Type: application/json
	I0916 11:14:27.445802 1501462 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:14:27.445806 1501462 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:14:27.445815 1501462 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:14:27 GMT
	I0916 11:14:27.446207 1501462 request.go:1351] Response Body: {"kind":"NodeList","apiVersion":"v1","metadata":{"resourceVersion":"879"},"items":[{"metadata":{"name":"multinode-654612","uid":"17ee1588-6ece-4a45-8e72-a05ec6d1f806","resourceVersion":"662","creationTimestamp":"2024-09-16T11:10:07Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-654612","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-654612","minikube.k8s.io/primary":"true","minikube.k8s.io/updated_at":"2024_09_16T11_10_11_0700","minikube.k8s.io/version":"v1.34.0","node-role.kubernetes.io/control-plane":"","node.kubernetes.io/exclude-from-external-load-balancers":""},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///var/run/crio/crio.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields
":[{"manager":"kubelet","operation":"Update","apiVersion":"v1","time":" [truncated 21297 chars]
	I0916 11:14:27.447209 1501462 node_conditions.go:122] node storage ephemeral capacity is 203034800Ki
	I0916 11:14:27.447232 1501462 node_conditions.go:123] node cpu capacity is 2
	I0916 11:14:27.447242 1501462 node_conditions.go:122] node storage ephemeral capacity is 203034800Ki
	I0916 11:14:27.447247 1501462 node_conditions.go:123] node cpu capacity is 2
	I0916 11:14:27.447252 1501462 node_conditions.go:122] node storage ephemeral capacity is 203034800Ki
	I0916 11:14:27.447255 1501462 node_conditions.go:123] node cpu capacity is 2
	I0916 11:14:27.447261 1501462 node_conditions.go:105] duration metric: took 4.127209ms to run NodePressure ...
	I0916 11:14:27.447274 1501462 start.go:241] waiting for startup goroutines ...
	I0916 11:14:27.447304 1501462 start.go:255] writing updated cluster config ...
	I0916 11:14:27.450831 1501462 out.go:201] 
	I0916 11:14:27.453766 1501462 config.go:182] Loaded profile config "multinode-654612": Driver=docker, ContainerRuntime=crio, KubernetesVersion=v1.31.1
	I0916 11:14:27.453888 1501462 profile.go:143] Saving config to /home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/multinode-654612/config.json ...
	I0916 11:14:27.457124 1501462 out.go:177] * Starting "multinode-654612-m03" worker node in "multinode-654612" cluster
	I0916 11:14:27.459662 1501462 cache.go:121] Beginning downloading kic base image for docker with crio
	I0916 11:14:27.462405 1501462 out.go:177] * Pulling base image v0.0.45-1726358845-19644 ...
	I0916 11:14:27.465050 1501462 preload.go:131] Checking if preload exists for k8s version v1.31.1 and runtime crio
	I0916 11:14:27.465084 1501462 cache.go:56] Caching tarball of preloaded images
	I0916 11:14:27.465149 1501462 image.go:79] Checking for gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 in local docker daemon
	I0916 11:14:27.465205 1501462 preload.go:172] Found /home/jenkins/minikube-integration/19651-1378450/.minikube/cache/preloaded-tarball/preloaded-images-k8s-v18-v1.31.1-cri-o-overlay-arm64.tar.lz4 in cache, skipping download
	I0916 11:14:27.465221 1501462 cache.go:59] Finished verifying existence of preloaded tar for v1.31.1 on crio
	I0916 11:14:27.465362 1501462 profile.go:143] Saving config to /home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/multinode-654612/config.json ...
	W0916 11:14:27.485098 1501462 image.go:95] image gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 is of wrong architecture
	I0916 11:14:27.485119 1501462 cache.go:149] Downloading gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 to local cache
	I0916 11:14:27.485213 1501462 image.go:63] Checking for gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 in local cache directory
	I0916 11:14:27.485231 1501462 image.go:66] Found gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 in local cache directory, skipping pull
	I0916 11:14:27.485236 1501462 image.go:135] gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 exists in cache, skipping pull
	I0916 11:14:27.485244 1501462 cache.go:152] successfully saved gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 as a tarball
	I0916 11:14:27.485249 1501462 cache.go:162] Loading gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 from local cache
	I0916 11:14:27.486796 1501462 image.go:273] response: 
	I0916 11:14:27.607611 1501462 cache.go:164] successfully loaded and using gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 from cached tarball
	I0916 11:14:27.607648 1501462 cache.go:194] Successfully downloaded all kic artifacts
	I0916 11:14:27.607679 1501462 start.go:360] acquireMachinesLock for multinode-654612-m03: {Name:mk96649fad40cc5082fee1b6d801ef9f364521f6 Clock:{} Delay:500ms Timeout:10m0s Cancel:<nil>}
	I0916 11:14:27.607748 1501462 start.go:364] duration metric: took 44.504µs to acquireMachinesLock for "multinode-654612-m03"
	I0916 11:14:27.607774 1501462 start.go:96] Skipping create...Using existing machine configuration
	I0916 11:14:27.607784 1501462 fix.go:54] fixHost starting: m03
	I0916 11:14:27.608091 1501462 cli_runner.go:164] Run: docker container inspect multinode-654612-m03 --format={{.State.Status}}
	I0916 11:14:27.624446 1501462 fix.go:112] recreateIfNeeded on multinode-654612-m03: state=Stopped err=<nil>
	W0916 11:14:27.624473 1501462 fix.go:138] unexpected machine state, will restart: <nil>
	I0916 11:14:27.627320 1501462 out.go:177] * Restarting existing docker container for "multinode-654612-m03" ...
	I0916 11:14:27.629970 1501462 cli_runner.go:164] Run: docker start multinode-654612-m03
	I0916 11:14:27.928504 1501462 cli_runner.go:164] Run: docker container inspect multinode-654612-m03 --format={{.State.Status}}
	I0916 11:14:27.956494 1501462 kic.go:430] container "multinode-654612-m03" state is running.
	I0916 11:14:27.956923 1501462 cli_runner.go:164] Run: docker container inspect -f "{{range .NetworkSettings.Networks}}{{.IPAddress}},{{.GlobalIPv6Address}}{{end}}" multinode-654612-m03
	I0916 11:14:27.977968 1501462 profile.go:143] Saving config to /home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/multinode-654612/config.json ...
	I0916 11:14:27.978226 1501462 machine.go:93] provisionDockerMachine start ...
	I0916 11:14:27.978302 1501462 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" multinode-654612-m03
	I0916 11:14:27.998868 1501462 main.go:141] libmachine: Using SSH client type: native
	I0916 11:14:27.999111 1501462 main.go:141] libmachine: &{{{<nil> 0 [] [] []} docker [0x41abe0] 0x41d420 <nil>  [] 0s} 127.0.0.1 34768 <nil> <nil>}
	I0916 11:14:27.999127 1501462 main.go:141] libmachine: About to run SSH command:
	hostname
	I0916 11:14:27.999645 1501462 main.go:141] libmachine: Error dialing TCP: ssh: handshake failed: read tcp 127.0.0.1:51758->127.0.0.1:34768: read: connection reset by peer
	I0916 11:14:31.140331 1501462 main.go:141] libmachine: SSH cmd err, output: <nil>: multinode-654612-m03
	
	I0916 11:14:31.140364 1501462 ubuntu.go:169] provisioning hostname "multinode-654612-m03"
	I0916 11:14:31.140440 1501462 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" multinode-654612-m03
	I0916 11:14:31.160581 1501462 main.go:141] libmachine: Using SSH client type: native
	I0916 11:14:31.160840 1501462 main.go:141] libmachine: &{{{<nil> 0 [] [] []} docker [0x41abe0] 0x41d420 <nil>  [] 0s} 127.0.0.1 34768 <nil> <nil>}
	I0916 11:14:31.160857 1501462 main.go:141] libmachine: About to run SSH command:
	sudo hostname multinode-654612-m03 && echo "multinode-654612-m03" | sudo tee /etc/hostname
	I0916 11:14:31.313019 1501462 main.go:141] libmachine: SSH cmd err, output: <nil>: multinode-654612-m03
	
	I0916 11:14:31.313104 1501462 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" multinode-654612-m03
	I0916 11:14:31.331155 1501462 main.go:141] libmachine: Using SSH client type: native
	I0916 11:14:31.331402 1501462 main.go:141] libmachine: &{{{<nil> 0 [] [] []} docker [0x41abe0] 0x41d420 <nil>  [] 0s} 127.0.0.1 34768 <nil> <nil>}
	I0916 11:14:31.331427 1501462 main.go:141] libmachine: About to run SSH command:
	
			if ! grep -xq '.*\smultinode-654612-m03' /etc/hosts; then
				if grep -xq '127.0.1.1\s.*' /etc/hosts; then
					sudo sed -i 's/^127.0.1.1\s.*/127.0.1.1 multinode-654612-m03/g' /etc/hosts;
				else 
					echo '127.0.1.1 multinode-654612-m03' | sudo tee -a /etc/hosts; 
				fi
			fi
	I0916 11:14:31.468774 1501462 main.go:141] libmachine: SSH cmd err, output: <nil>: 
	I0916 11:14:31.468805 1501462 ubuntu.go:175] set auth options {CertDir:/home/jenkins/minikube-integration/19651-1378450/.minikube CaCertPath:/home/jenkins/minikube-integration/19651-1378450/.minikube/certs/ca.pem CaPrivateKeyPath:/home/jenkins/minikube-integration/19651-1378450/.minikube/certs/ca-key.pem CaCertRemotePath:/etc/docker/ca.pem ServerCertPath:/home/jenkins/minikube-integration/19651-1378450/.minikube/machines/server.pem ServerKeyPath:/home/jenkins/minikube-integration/19651-1378450/.minikube/machines/server-key.pem ClientKeyPath:/home/jenkins/minikube-integration/19651-1378450/.minikube/certs/key.pem ServerCertRemotePath:/etc/docker/server.pem ServerKeyRemotePath:/etc/docker/server-key.pem ClientCertPath:/home/jenkins/minikube-integration/19651-1378450/.minikube/certs/cert.pem ServerCertSANs:[] StorePath:/home/jenkins/minikube-integration/19651-1378450/.minikube}
	I0916 11:14:31.468823 1501462 ubuntu.go:177] setting up certificates
	I0916 11:14:31.468834 1501462 provision.go:84] configureAuth start
	I0916 11:14:31.468900 1501462 cli_runner.go:164] Run: docker container inspect -f "{{range .NetworkSettings.Networks}}{{.IPAddress}},{{.GlobalIPv6Address}}{{end}}" multinode-654612-m03
	I0916 11:14:31.485639 1501462 provision.go:143] copyHostCerts
	I0916 11:14:31.485686 1501462 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-1378450/.minikube/certs/cert.pem -> /home/jenkins/minikube-integration/19651-1378450/.minikube/cert.pem
	I0916 11:14:31.485720 1501462 exec_runner.go:144] found /home/jenkins/minikube-integration/19651-1378450/.minikube/cert.pem, removing ...
	I0916 11:14:31.485733 1501462 exec_runner.go:203] rm: /home/jenkins/minikube-integration/19651-1378450/.minikube/cert.pem
	I0916 11:14:31.485833 1501462 exec_runner.go:151] cp: /home/jenkins/minikube-integration/19651-1378450/.minikube/certs/cert.pem --> /home/jenkins/minikube-integration/19651-1378450/.minikube/cert.pem (1123 bytes)
	I0916 11:14:31.485921 1501462 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-1378450/.minikube/certs/key.pem -> /home/jenkins/minikube-integration/19651-1378450/.minikube/key.pem
	I0916 11:14:31.485945 1501462 exec_runner.go:144] found /home/jenkins/minikube-integration/19651-1378450/.minikube/key.pem, removing ...
	I0916 11:14:31.485952 1501462 exec_runner.go:203] rm: /home/jenkins/minikube-integration/19651-1378450/.minikube/key.pem
	I0916 11:14:31.485981 1501462 exec_runner.go:151] cp: /home/jenkins/minikube-integration/19651-1378450/.minikube/certs/key.pem --> /home/jenkins/minikube-integration/19651-1378450/.minikube/key.pem (1679 bytes)
	I0916 11:14:31.486029 1501462 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-1378450/.minikube/certs/ca.pem -> /home/jenkins/minikube-integration/19651-1378450/.minikube/ca.pem
	I0916 11:14:31.486049 1501462 exec_runner.go:144] found /home/jenkins/minikube-integration/19651-1378450/.minikube/ca.pem, removing ...
	I0916 11:14:31.486057 1501462 exec_runner.go:203] rm: /home/jenkins/minikube-integration/19651-1378450/.minikube/ca.pem
	I0916 11:14:31.486081 1501462 exec_runner.go:151] cp: /home/jenkins/minikube-integration/19651-1378450/.minikube/certs/ca.pem --> /home/jenkins/minikube-integration/19651-1378450/.minikube/ca.pem (1078 bytes)
	I0916 11:14:31.486132 1501462 provision.go:117] generating server cert: /home/jenkins/minikube-integration/19651-1378450/.minikube/machines/server.pem ca-key=/home/jenkins/minikube-integration/19651-1378450/.minikube/certs/ca.pem private-key=/home/jenkins/minikube-integration/19651-1378450/.minikube/certs/ca-key.pem org=jenkins.multinode-654612-m03 san=[127.0.0.1 192.168.67.4 localhost minikube multinode-654612-m03]
	I0916 11:14:31.991226 1501462 provision.go:177] copyRemoteCerts
	I0916 11:14:31.991302 1501462 ssh_runner.go:195] Run: sudo mkdir -p /etc/docker /etc/docker /etc/docker
	I0916 11:14:31.991359 1501462 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" multinode-654612-m03
	I0916 11:14:32.012956 1501462 sshutil.go:53] new ssh client: &{IP:127.0.0.1 Port:34768 SSHKeyPath:/home/jenkins/minikube-integration/19651-1378450/.minikube/machines/multinode-654612-m03/id_rsa Username:docker}
	I0916 11:14:32.110161 1501462 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-1378450/.minikube/certs/ca.pem -> /etc/docker/ca.pem
	I0916 11:14:32.110229 1501462 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-1378450/.minikube/certs/ca.pem --> /etc/docker/ca.pem (1078 bytes)
	I0916 11:14:32.136729 1501462 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-1378450/.minikube/machines/server.pem -> /etc/docker/server.pem
	I0916 11:14:32.136811 1501462 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-1378450/.minikube/machines/server.pem --> /etc/docker/server.pem (1229 bytes)
	I0916 11:14:32.165705 1501462 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-1378450/.minikube/machines/server-key.pem -> /etc/docker/server-key.pem
	I0916 11:14:32.165778 1501462 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-1378450/.minikube/machines/server-key.pem --> /etc/docker/server-key.pem (1675 bytes)
	I0916 11:14:32.192109 1501462 provision.go:87] duration metric: took 723.259195ms to configureAuth
	I0916 11:14:32.192138 1501462 ubuntu.go:193] setting minikube options for container-runtime
	I0916 11:14:32.192368 1501462 config.go:182] Loaded profile config "multinode-654612": Driver=docker, ContainerRuntime=crio, KubernetesVersion=v1.31.1
	I0916 11:14:32.192475 1501462 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" multinode-654612-m03
	I0916 11:14:32.215894 1501462 main.go:141] libmachine: Using SSH client type: native
	I0916 11:14:32.216156 1501462 main.go:141] libmachine: &{{{<nil> 0 [] [] []} docker [0x41abe0] 0x41d420 <nil>  [] 0s} 127.0.0.1 34768 <nil> <nil>}
	I0916 11:14:32.216177 1501462 main.go:141] libmachine: About to run SSH command:
	sudo mkdir -p /etc/sysconfig && printf %s "
	CRIO_MINIKUBE_OPTIONS='--insecure-registry 10.96.0.0/12 '
	" | sudo tee /etc/sysconfig/crio.minikube && sudo systemctl restart crio
	I0916 11:14:32.494257 1501462 main.go:141] libmachine: SSH cmd err, output: <nil>: 
	CRIO_MINIKUBE_OPTIONS='--insecure-registry 10.96.0.0/12 '
	
	I0916 11:14:32.494349 1501462 machine.go:96] duration metric: took 4.516103882s to provisionDockerMachine
	I0916 11:14:32.494378 1501462 start.go:293] postStartSetup for "multinode-654612-m03" (driver="docker")
	I0916 11:14:32.494420 1501462 start.go:322] creating required directories: [/etc/kubernetes/addons /etc/kubernetes/manifests /var/tmp/minikube /var/lib/minikube /var/lib/minikube/certs /var/lib/minikube/images /var/lib/minikube/binaries /tmp/gvisor /usr/share/ca-certificates /etc/ssl/certs]
	I0916 11:14:32.494507 1501462 ssh_runner.go:195] Run: sudo mkdir -p /etc/kubernetes/addons /etc/kubernetes/manifests /var/tmp/minikube /var/lib/minikube /var/lib/minikube/certs /var/lib/minikube/images /var/lib/minikube/binaries /tmp/gvisor /usr/share/ca-certificates /etc/ssl/certs
	I0916 11:14:32.494593 1501462 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" multinode-654612-m03
	I0916 11:14:32.515917 1501462 sshutil.go:53] new ssh client: &{IP:127.0.0.1 Port:34768 SSHKeyPath:/home/jenkins/minikube-integration/19651-1378450/.minikube/machines/multinode-654612-m03/id_rsa Username:docker}
	I0916 11:14:32.617939 1501462 ssh_runner.go:195] Run: cat /etc/os-release
	I0916 11:14:32.621237 1501462 command_runner.go:130] > PRETTY_NAME="Ubuntu 22.04.4 LTS"
	I0916 11:14:32.621262 1501462 command_runner.go:130] > NAME="Ubuntu"
	I0916 11:14:32.621270 1501462 command_runner.go:130] > VERSION_ID="22.04"
	I0916 11:14:32.621284 1501462 command_runner.go:130] > VERSION="22.04.4 LTS (Jammy Jellyfish)"
	I0916 11:14:32.621304 1501462 command_runner.go:130] > VERSION_CODENAME=jammy
	I0916 11:14:32.621312 1501462 command_runner.go:130] > ID=ubuntu
	I0916 11:14:32.621317 1501462 command_runner.go:130] > ID_LIKE=debian
	I0916 11:14:32.621325 1501462 command_runner.go:130] > HOME_URL="https://www.ubuntu.com/"
	I0916 11:14:32.621330 1501462 command_runner.go:130] > SUPPORT_URL="https://help.ubuntu.com/"
	I0916 11:14:32.621337 1501462 command_runner.go:130] > BUG_REPORT_URL="https://bugs.launchpad.net/ubuntu/"
	I0916 11:14:32.621368 1501462 command_runner.go:130] > PRIVACY_POLICY_URL="https://www.ubuntu.com/legal/terms-and-policies/privacy-policy"
	I0916 11:14:32.621375 1501462 command_runner.go:130] > UBUNTU_CODENAME=jammy
	I0916 11:14:32.621448 1501462 main.go:141] libmachine: Couldn't set key VERSION_CODENAME, no corresponding struct field found
	I0916 11:14:32.621482 1501462 main.go:141] libmachine: Couldn't set key PRIVACY_POLICY_URL, no corresponding struct field found
	I0916 11:14:32.621498 1501462 main.go:141] libmachine: Couldn't set key UBUNTU_CODENAME, no corresponding struct field found
	I0916 11:14:32.621506 1501462 info.go:137] Remote host: Ubuntu 22.04.4 LTS
	I0916 11:14:32.621522 1501462 filesync.go:126] Scanning /home/jenkins/minikube-integration/19651-1378450/.minikube/addons for local assets ...
	I0916 11:14:32.621581 1501462 filesync.go:126] Scanning /home/jenkins/minikube-integration/19651-1378450/.minikube/files for local assets ...
	I0916 11:14:32.621675 1501462 filesync.go:149] local asset: /home/jenkins/minikube-integration/19651-1378450/.minikube/files/etc/ssl/certs/13838332.pem -> 13838332.pem in /etc/ssl/certs
	I0916 11:14:32.621692 1501462 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-1378450/.minikube/files/etc/ssl/certs/13838332.pem -> /etc/ssl/certs/13838332.pem
	I0916 11:14:32.621819 1501462 ssh_runner.go:195] Run: sudo mkdir -p /etc/ssl/certs
	I0916 11:14:32.630519 1501462 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-1378450/.minikube/files/etc/ssl/certs/13838332.pem --> /etc/ssl/certs/13838332.pem (1708 bytes)
	I0916 11:14:32.655331 1501462 start.go:296] duration metric: took 160.907817ms for postStartSetup
	I0916 11:14:32.655416 1501462 ssh_runner.go:195] Run: sh -c "df -h /var | awk 'NR==2{print $5}'"
	I0916 11:14:32.655474 1501462 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" multinode-654612-m03
	I0916 11:14:32.674349 1501462 sshutil.go:53] new ssh client: &{IP:127.0.0.1 Port:34768 SSHKeyPath:/home/jenkins/minikube-integration/19651-1378450/.minikube/machines/multinode-654612-m03/id_rsa Username:docker}
	I0916 11:14:32.770019 1501462 command_runner.go:130] > 13%
	I0916 11:14:32.770163 1501462 ssh_runner.go:195] Run: sh -c "df -BG /var | awk 'NR==2{print $4}'"
	I0916 11:14:32.777122 1501462 command_runner.go:130] > 170G
	I0916 11:14:32.777395 1501462 fix.go:56] duration metric: took 5.169606687s for fixHost
	I0916 11:14:32.777415 1501462 start.go:83] releasing machines lock for "multinode-654612-m03", held for 5.169655712s
	I0916 11:14:32.777524 1501462 cli_runner.go:164] Run: docker container inspect -f "{{range .NetworkSettings.Networks}}{{.IPAddress}},{{.GlobalIPv6Address}}{{end}}" multinode-654612-m03
	I0916 11:14:32.801183 1501462 out.go:177] * Found network options:
	I0916 11:14:32.805683 1501462 out.go:177]   - NO_PROXY=192.168.67.2,192.168.67.3
	W0916 11:14:32.812913 1501462 proxy.go:119] fail to check proxy env: Error ip not in block
	W0916 11:14:32.812945 1501462 proxy.go:119] fail to check proxy env: Error ip not in block
	W0916 11:14:32.812970 1501462 proxy.go:119] fail to check proxy env: Error ip not in block
	W0916 11:14:32.812980 1501462 proxy.go:119] fail to check proxy env: Error ip not in block
	I0916 11:14:32.813058 1501462 ssh_runner.go:195] Run: sudo sh -c "podman version >/dev/null"
	I0916 11:14:32.813121 1501462 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" multinode-654612-m03
	I0916 11:14:32.813139 1501462 ssh_runner.go:195] Run: curl -sS -m 2 https://registry.k8s.io/
	I0916 11:14:32.813208 1501462 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" multinode-654612-m03
	I0916 11:14:32.840408 1501462 sshutil.go:53] new ssh client: &{IP:127.0.0.1 Port:34768 SSHKeyPath:/home/jenkins/minikube-integration/19651-1378450/.minikube/machines/multinode-654612-m03/id_rsa Username:docker}
	I0916 11:14:32.861999 1501462 sshutil.go:53] new ssh client: &{IP:127.0.0.1 Port:34768 SSHKeyPath:/home/jenkins/minikube-integration/19651-1378450/.minikube/machines/multinode-654612-m03/id_rsa Username:docker}
	I0916 11:14:33.103767 1501462 ssh_runner.go:195] Run: sh -c "stat /etc/cni/net.d/*loopback.conf*"
	I0916 11:14:33.104093 1501462 command_runner.go:130] > <a href="https://github.com/kubernetes/registry.k8s.io">Temporary Redirect</a>.
	I0916 11:14:33.114588 1501462 command_runner.go:130] >   File: /etc/cni/net.d/200-loopback.conf.mk_disabled
	I0916 11:14:33.114615 1501462 command_runner.go:130] >   Size: 54        	Blocks: 8          IO Block: 4096   regular file
	I0916 11:14:33.114622 1501462 command_runner.go:130] > Device: e8h/232d	Inode: 1570512     Links: 1
	I0916 11:14:33.114629 1501462 command_runner.go:130] > Access: (0644/-rw-r--r--)  Uid: (    0/    root)   Gid: (    0/    root)
	I0916 11:14:33.114636 1501462 command_runner.go:130] > Access: 2023-06-14 14:44:50.000000000 +0000
	I0916 11:14:33.114641 1501462 command_runner.go:130] > Modify: 2023-06-14 14:44:50.000000000 +0000
	I0916 11:14:33.114646 1501462 command_runner.go:130] > Change: 2024-09-16 11:11:45.284395539 +0000
	I0916 11:14:33.114651 1501462 command_runner.go:130] >  Birth: 2024-09-16 11:11:45.280395637 +0000
	I0916 11:14:33.114723 1501462 ssh_runner.go:195] Run: sudo find /etc/cni/net.d -maxdepth 1 -type f -name *loopback.conf* -not -name *.mk_disabled -exec sh -c "sudo mv {} {}.mk_disabled" ;
	I0916 11:14:33.125070 1501462 cni.go:221] loopback cni configuration disabled: "/etc/cni/net.d/*loopback.conf*" found
	I0916 11:14:33.125177 1501462 ssh_runner.go:195] Run: sudo find /etc/cni/net.d -maxdepth 1 -type f ( ( -name *bridge* -or -name *podman* ) -and -not -name *.mk_disabled ) -printf "%p, " -exec sh -c "sudo mv {} {}.mk_disabled" ;
	I0916 11:14:33.134919 1501462 cni.go:259] no active bridge cni configs found in "/etc/cni/net.d" - nothing to disable
	I0916 11:14:33.134942 1501462 start.go:495] detecting cgroup driver to use...
	I0916 11:14:33.134976 1501462 detect.go:187] detected "cgroupfs" cgroup driver on host os
	I0916 11:14:33.135035 1501462 ssh_runner.go:195] Run: sudo systemctl stop -f containerd
	I0916 11:14:33.148085 1501462 ssh_runner.go:195] Run: sudo systemctl is-active --quiet service containerd
	I0916 11:14:33.160110 1501462 docker.go:217] disabling cri-docker service (if available) ...
	I0916 11:14:33.160183 1501462 ssh_runner.go:195] Run: sudo systemctl stop -f cri-docker.socket
	I0916 11:14:33.174714 1501462 ssh_runner.go:195] Run: sudo systemctl stop -f cri-docker.service
	I0916 11:14:33.187546 1501462 ssh_runner.go:195] Run: sudo systemctl disable cri-docker.socket
	I0916 11:14:33.281830 1501462 ssh_runner.go:195] Run: sudo systemctl mask cri-docker.service
	I0916 11:14:33.385168 1501462 docker.go:233] disabling docker service ...
	I0916 11:14:33.385248 1501462 ssh_runner.go:195] Run: sudo systemctl stop -f docker.socket
	I0916 11:14:33.398141 1501462 ssh_runner.go:195] Run: sudo systemctl stop -f docker.service
	I0916 11:14:33.411313 1501462 ssh_runner.go:195] Run: sudo systemctl disable docker.socket
	I0916 11:14:33.510901 1501462 ssh_runner.go:195] Run: sudo systemctl mask docker.service
	I0916 11:14:33.592550 1501462 ssh_runner.go:195] Run: sudo systemctl is-active --quiet service docker
	I0916 11:14:33.606175 1501462 ssh_runner.go:195] Run: /bin/bash -c "sudo mkdir -p /etc && printf %s "runtime-endpoint: unix:///var/run/crio/crio.sock
	" | sudo tee /etc/crictl.yaml"
	I0916 11:14:33.621707 1501462 command_runner.go:130] > runtime-endpoint: unix:///var/run/crio/crio.sock
	I0916 11:14:33.623178 1501462 crio.go:59] configure cri-o to use "registry.k8s.io/pause:3.10" pause image...
	I0916 11:14:33.623246 1501462 ssh_runner.go:195] Run: sh -c "sudo sed -i 's|^.*pause_image = .*$|pause_image = "registry.k8s.io/pause:3.10"|' /etc/crio/crio.conf.d/02-crio.conf"
	I0916 11:14:33.634396 1501462 crio.go:70] configuring cri-o to use "cgroupfs" as cgroup driver...
	I0916 11:14:33.634488 1501462 ssh_runner.go:195] Run: sh -c "sudo sed -i 's|^.*cgroup_manager = .*$|cgroup_manager = "cgroupfs"|' /etc/crio/crio.conf.d/02-crio.conf"
	I0916 11:14:33.647708 1501462 ssh_runner.go:195] Run: sh -c "sudo sed -i '/conmon_cgroup = .*/d' /etc/crio/crio.conf.d/02-crio.conf"
	I0916 11:14:33.658051 1501462 ssh_runner.go:195] Run: sh -c "sudo sed -i '/cgroup_manager = .*/a conmon_cgroup = "pod"' /etc/crio/crio.conf.d/02-crio.conf"
	I0916 11:14:33.670229 1501462 ssh_runner.go:195] Run: sh -c "sudo rm -rf /etc/cni/net.mk"
	I0916 11:14:33.680168 1501462 ssh_runner.go:195] Run: sh -c "sudo sed -i '/^ *"net.ipv4.ip_unprivileged_port_start=.*"/d' /etc/crio/crio.conf.d/02-crio.conf"
	I0916 11:14:33.690576 1501462 ssh_runner.go:195] Run: sh -c "sudo grep -q "^ *default_sysctls" /etc/crio/crio.conf.d/02-crio.conf || sudo sed -i '/conmon_cgroup = .*/a default_sysctls = \[\n\]' /etc/crio/crio.conf.d/02-crio.conf"
	I0916 11:14:33.700320 1501462 ssh_runner.go:195] Run: sh -c "sudo sed -i -r 's|^default_sysctls *= *\[|&\n  "net.ipv4.ip_unprivileged_port_start=0",|' /etc/crio/crio.conf.d/02-crio.conf"
	I0916 11:14:33.711050 1501462 ssh_runner.go:195] Run: sudo sysctl net.bridge.bridge-nf-call-iptables
	I0916 11:14:33.720286 1501462 command_runner.go:130] > net.bridge.bridge-nf-call-iptables = 1
	I0916 11:14:33.721488 1501462 ssh_runner.go:195] Run: sudo sh -c "echo 1 > /proc/sys/net/ipv4/ip_forward"
	I0916 11:14:33.730444 1501462 ssh_runner.go:195] Run: sudo systemctl daemon-reload
	I0916 11:14:33.826894 1501462 ssh_runner.go:195] Run: sudo systemctl restart crio
	I0916 11:14:33.944323 1501462 start.go:542] Will wait 60s for socket path /var/run/crio/crio.sock
	I0916 11:14:33.944447 1501462 ssh_runner.go:195] Run: stat /var/run/crio/crio.sock
	I0916 11:14:33.949916 1501462 command_runner.go:130] >   File: /var/run/crio/crio.sock
	I0916 11:14:33.949995 1501462 command_runner.go:130] >   Size: 0         	Blocks: 0          IO Block: 4096   socket
	I0916 11:14:33.950017 1501462 command_runner.go:130] > Device: f1h/241d	Inode: 181         Links: 1
	I0916 11:14:33.950042 1501462 command_runner.go:130] > Access: (0660/srw-rw----)  Uid: (    0/    root)   Gid: (    0/    root)
	I0916 11:14:33.950069 1501462 command_runner.go:130] > Access: 2024-09-16 11:14:33.940264359 +0000
	I0916 11:14:33.950099 1501462 command_runner.go:130] > Modify: 2024-09-16 11:14:33.928264654 +0000
	I0916 11:14:33.950120 1501462 command_runner.go:130] > Change: 2024-09-16 11:14:33.928264654 +0000
	I0916 11:14:33.950147 1501462 command_runner.go:130] >  Birth: -
	I0916 11:14:33.950176 1501462 start.go:563] Will wait 60s for crictl version
	I0916 11:14:33.950277 1501462 ssh_runner.go:195] Run: which crictl
	I0916 11:14:33.955895 1501462 command_runner.go:130] > /usr/bin/crictl
	I0916 11:14:33.956008 1501462 ssh_runner.go:195] Run: sudo /usr/bin/crictl version
	I0916 11:14:33.998262 1501462 command_runner.go:130] > Version:  0.1.0
	I0916 11:14:33.998330 1501462 command_runner.go:130] > RuntimeName:  cri-o
	I0916 11:14:33.998351 1501462 command_runner.go:130] > RuntimeVersion:  1.24.6
	I0916 11:14:33.998363 1501462 command_runner.go:130] > RuntimeApiVersion:  v1
	I0916 11:14:33.998381 1501462 start.go:579] Version:  0.1.0
	RuntimeName:  cri-o
	RuntimeVersion:  1.24.6
	RuntimeApiVersion:  v1
	I0916 11:14:33.998486 1501462 ssh_runner.go:195] Run: crio --version
	I0916 11:14:34.048403 1501462 command_runner.go:130] > crio version 1.24.6
	I0916 11:14:34.048481 1501462 command_runner.go:130] > Version:          1.24.6
	I0916 11:14:34.048502 1501462 command_runner.go:130] > GitCommit:        4bfe15a9feb74ffc95e66a21c04b15fa7bbc2b90
	I0916 11:14:34.048518 1501462 command_runner.go:130] > GitTreeState:     clean
	I0916 11:14:34.048555 1501462 command_runner.go:130] > BuildDate:        2023-06-14T14:44:50Z
	I0916 11:14:34.048581 1501462 command_runner.go:130] > GoVersion:        go1.18.2
	I0916 11:14:34.048601 1501462 command_runner.go:130] > Compiler:         gc
	I0916 11:14:34.048621 1501462 command_runner.go:130] > Platform:         linux/arm64
	I0916 11:14:34.048641 1501462 command_runner.go:130] > Linkmode:         dynamic
	I0916 11:14:34.048671 1501462 command_runner.go:130] > BuildTags:        apparmor, exclude_graphdriver_devicemapper, containers_image_ostree_stub, seccomp
	I0916 11:14:34.048701 1501462 command_runner.go:130] > SeccompEnabled:   true
	I0916 11:14:34.048723 1501462 command_runner.go:130] > AppArmorEnabled:  false
	I0916 11:14:34.050884 1501462 ssh_runner.go:195] Run: crio --version
	I0916 11:14:34.094347 1501462 command_runner.go:130] > crio version 1.24.6
	I0916 11:14:34.094422 1501462 command_runner.go:130] > Version:          1.24.6
	I0916 11:14:34.094444 1501462 command_runner.go:130] > GitCommit:        4bfe15a9feb74ffc95e66a21c04b15fa7bbc2b90
	I0916 11:14:34.094461 1501462 command_runner.go:130] > GitTreeState:     clean
	I0916 11:14:34.094512 1501462 command_runner.go:130] > BuildDate:        2023-06-14T14:44:50Z
	I0916 11:14:34.094549 1501462 command_runner.go:130] > GoVersion:        go1.18.2
	I0916 11:14:34.094569 1501462 command_runner.go:130] > Compiler:         gc
	I0916 11:14:34.094587 1501462 command_runner.go:130] > Platform:         linux/arm64
	I0916 11:14:34.094619 1501462 command_runner.go:130] > Linkmode:         dynamic
	I0916 11:14:34.094646 1501462 command_runner.go:130] > BuildTags:        apparmor, exclude_graphdriver_devicemapper, containers_image_ostree_stub, seccomp
	I0916 11:14:34.094664 1501462 command_runner.go:130] > SeccompEnabled:   true
	I0916 11:14:34.094682 1501462 command_runner.go:130] > AppArmorEnabled:  false
	I0916 11:14:34.099329 1501462 out.go:177] * Preparing Kubernetes v1.31.1 on CRI-O 1.24.6 ...
	I0916 11:14:34.101834 1501462 out.go:177]   - env NO_PROXY=192.168.67.2
	I0916 11:14:34.104515 1501462 out.go:177]   - env NO_PROXY=192.168.67.2,192.168.67.3
	I0916 11:14:34.107225 1501462 cli_runner.go:164] Run: docker network inspect multinode-654612 --format "{"Name": "{{.Name}}","Driver": "{{.Driver}}","Subnet": "{{range .IPAM.Config}}{{.Subnet}}{{end}}","Gateway": "{{range .IPAM.Config}}{{.Gateway}}{{end}}","MTU": {{if (index .Options "com.docker.network.driver.mtu")}}{{(index .Options "com.docker.network.driver.mtu")}}{{else}}0{{end}}, "ContainerIPs": [{{range $k,$v := .Containers }}"{{$v.IPv4Address}}",{{end}}]}"
	I0916 11:14:34.123771 1501462 ssh_runner.go:195] Run: grep 192.168.67.1	host.minikube.internal$ /etc/hosts
	I0916 11:14:34.127732 1501462 ssh_runner.go:195] Run: /bin/bash -c "{ grep -v $'\thost.minikube.internal$' "/etc/hosts"; echo "192.168.67.1	host.minikube.internal"; } > /tmp/h.$$; sudo cp /tmp/h.$$ "/etc/hosts""
	I0916 11:14:34.139206 1501462 mustload.go:65] Loading cluster: multinode-654612
	I0916 11:14:34.139469 1501462 config.go:182] Loaded profile config "multinode-654612": Driver=docker, ContainerRuntime=crio, KubernetesVersion=v1.31.1
	I0916 11:14:34.139766 1501462 cli_runner.go:164] Run: docker container inspect multinode-654612 --format={{.State.Status}}
	I0916 11:14:34.157232 1501462 host.go:66] Checking if "multinode-654612" exists ...
	I0916 11:14:34.157531 1501462 certs.go:68] Setting up /home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/multinode-654612 for IP: 192.168.67.4
	I0916 11:14:34.157556 1501462 certs.go:194] generating shared ca certs ...
	I0916 11:14:34.157572 1501462 certs.go:226] acquiring lock for ca certs: {Name:mk0ae46b50e2e49d53ad6fcc94535aa50d9156d6 Clock:{} Delay:500ms Timeout:1m0s Cancel:<nil>}
	I0916 11:14:34.157703 1501462 certs.go:235] skipping valid "minikubeCA" ca cert: /home/jenkins/minikube-integration/19651-1378450/.minikube/ca.key
	I0916 11:14:34.157749 1501462 certs.go:235] skipping valid "proxyClientCA" ca cert: /home/jenkins/minikube-integration/19651-1378450/.minikube/proxy-client-ca.key
	I0916 11:14:34.157764 1501462 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-1378450/.minikube/ca.crt -> /var/lib/minikube/certs/ca.crt
	I0916 11:14:34.157777 1501462 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-1378450/.minikube/ca.key -> /var/lib/minikube/certs/ca.key
	I0916 11:14:34.157793 1501462 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-1378450/.minikube/proxy-client-ca.crt -> /var/lib/minikube/certs/proxy-client-ca.crt
	I0916 11:14:34.157811 1501462 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-1378450/.minikube/proxy-client-ca.key -> /var/lib/minikube/certs/proxy-client-ca.key
	I0916 11:14:34.157880 1501462 certs.go:484] found cert: /home/jenkins/minikube-integration/19651-1378450/.minikube/certs/1383833.pem (1338 bytes)
	W0916 11:14:34.157913 1501462 certs.go:480] ignoring /home/jenkins/minikube-integration/19651-1378450/.minikube/certs/1383833_empty.pem, impossibly tiny 0 bytes
	I0916 11:14:34.157926 1501462 certs.go:484] found cert: /home/jenkins/minikube-integration/19651-1378450/.minikube/certs/ca-key.pem (1679 bytes)
	I0916 11:14:34.157951 1501462 certs.go:484] found cert: /home/jenkins/minikube-integration/19651-1378450/.minikube/certs/ca.pem (1078 bytes)
	I0916 11:14:34.157985 1501462 certs.go:484] found cert: /home/jenkins/minikube-integration/19651-1378450/.minikube/certs/cert.pem (1123 bytes)
	I0916 11:14:34.158012 1501462 certs.go:484] found cert: /home/jenkins/minikube-integration/19651-1378450/.minikube/certs/key.pem (1679 bytes)
	I0916 11:14:34.158061 1501462 certs.go:484] found cert: /home/jenkins/minikube-integration/19651-1378450/.minikube/files/etc/ssl/certs/13838332.pem (1708 bytes)
	I0916 11:14:34.158094 1501462 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-1378450/.minikube/ca.crt -> /usr/share/ca-certificates/minikubeCA.pem
	I0916 11:14:34.158112 1501462 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-1378450/.minikube/certs/1383833.pem -> /usr/share/ca-certificates/1383833.pem
	I0916 11:14:34.158131 1501462 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-1378450/.minikube/files/etc/ssl/certs/13838332.pem -> /usr/share/ca-certificates/13838332.pem
	I0916 11:14:34.158152 1501462 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-1378450/.minikube/ca.crt --> /var/lib/minikube/certs/ca.crt (1111 bytes)
	I0916 11:14:34.185434 1501462 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-1378450/.minikube/ca.key --> /var/lib/minikube/certs/ca.key (1675 bytes)
	I0916 11:14:34.211199 1501462 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-1378450/.minikube/proxy-client-ca.crt --> /var/lib/minikube/certs/proxy-client-ca.crt (1119 bytes)
	I0916 11:14:34.237349 1501462 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-1378450/.minikube/proxy-client-ca.key --> /var/lib/minikube/certs/proxy-client-ca.key (1679 bytes)
	I0916 11:14:34.263715 1501462 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-1378450/.minikube/ca.crt --> /usr/share/ca-certificates/minikubeCA.pem (1111 bytes)
	I0916 11:14:34.290676 1501462 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-1378450/.minikube/certs/1383833.pem --> /usr/share/ca-certificates/1383833.pem (1338 bytes)
	I0916 11:14:34.316364 1501462 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-1378450/.minikube/files/etc/ssl/certs/13838332.pem --> /usr/share/ca-certificates/13838332.pem (1708 bytes)
	I0916 11:14:34.343789 1501462 ssh_runner.go:195] Run: openssl version
	I0916 11:14:34.349193 1501462 command_runner.go:130] > OpenSSL 3.0.2 15 Mar 2022 (Library: OpenSSL 3.0.2 15 Mar 2022)
	I0916 11:14:34.349655 1501462 ssh_runner.go:195] Run: sudo /bin/bash -c "test -s /usr/share/ca-certificates/1383833.pem && ln -fs /usr/share/ca-certificates/1383833.pem /etc/ssl/certs/1383833.pem"
	I0916 11:14:34.359777 1501462 ssh_runner.go:195] Run: ls -la /usr/share/ca-certificates/1383833.pem
	I0916 11:14:34.363746 1501462 command_runner.go:130] > -rw-r--r-- 1 root root 1338 Sep 16 10:46 /usr/share/ca-certificates/1383833.pem
	I0916 11:14:34.363793 1501462 certs.go:528] hashing: -rw-r--r-- 1 root root 1338 Sep 16 10:46 /usr/share/ca-certificates/1383833.pem
	I0916 11:14:34.363893 1501462 ssh_runner.go:195] Run: openssl x509 -hash -noout -in /usr/share/ca-certificates/1383833.pem
	I0916 11:14:34.370741 1501462 command_runner.go:130] > 51391683
	I0916 11:14:34.371374 1501462 ssh_runner.go:195] Run: sudo /bin/bash -c "test -L /etc/ssl/certs/51391683.0 || ln -fs /etc/ssl/certs/1383833.pem /etc/ssl/certs/51391683.0"
	I0916 11:14:34.385329 1501462 ssh_runner.go:195] Run: sudo /bin/bash -c "test -s /usr/share/ca-certificates/13838332.pem && ln -fs /usr/share/ca-certificates/13838332.pem /etc/ssl/certs/13838332.pem"
	I0916 11:14:34.395318 1501462 ssh_runner.go:195] Run: ls -la /usr/share/ca-certificates/13838332.pem
	I0916 11:14:34.398835 1501462 command_runner.go:130] > -rw-r--r-- 1 root root 1708 Sep 16 10:46 /usr/share/ca-certificates/13838332.pem
	I0916 11:14:34.399185 1501462 certs.go:528] hashing: -rw-r--r-- 1 root root 1708 Sep 16 10:46 /usr/share/ca-certificates/13838332.pem
	I0916 11:14:34.399280 1501462 ssh_runner.go:195] Run: openssl x509 -hash -noout -in /usr/share/ca-certificates/13838332.pem
	I0916 11:14:34.407733 1501462 command_runner.go:130] > 3ec20f2e
	I0916 11:14:34.407942 1501462 ssh_runner.go:195] Run: sudo /bin/bash -c "test -L /etc/ssl/certs/3ec20f2e.0 || ln -fs /etc/ssl/certs/13838332.pem /etc/ssl/certs/3ec20f2e.0"
	I0916 11:14:34.417682 1501462 ssh_runner.go:195] Run: sudo /bin/bash -c "test -s /usr/share/ca-certificates/minikubeCA.pem && ln -fs /usr/share/ca-certificates/minikubeCA.pem /etc/ssl/certs/minikubeCA.pem"
	I0916 11:14:34.428089 1501462 ssh_runner.go:195] Run: ls -la /usr/share/ca-certificates/minikubeCA.pem
	I0916 11:14:34.432264 1501462 command_runner.go:130] > -rw-r--r-- 1 root root 1111 Sep 16 10:35 /usr/share/ca-certificates/minikubeCA.pem
	I0916 11:14:34.432310 1501462 certs.go:528] hashing: -rw-r--r-- 1 root root 1111 Sep 16 10:35 /usr/share/ca-certificates/minikubeCA.pem
	I0916 11:14:34.432367 1501462 ssh_runner.go:195] Run: openssl x509 -hash -noout -in /usr/share/ca-certificates/minikubeCA.pem
	I0916 11:14:34.439616 1501462 command_runner.go:130] > b5213941
	I0916 11:14:34.440268 1501462 ssh_runner.go:195] Run: sudo /bin/bash -c "test -L /etc/ssl/certs/b5213941.0 || ln -fs /etc/ssl/certs/minikubeCA.pem /etc/ssl/certs/b5213941.0"
	I0916 11:14:34.450060 1501462 ssh_runner.go:195] Run: stat /var/lib/minikube/certs/apiserver-kubelet-client.crt
	I0916 11:14:34.453826 1501462 command_runner.go:130] ! stat: cannot statx '/var/lib/minikube/certs/apiserver-kubelet-client.crt': No such file or directory
	I0916 11:14:34.453888 1501462 certs.go:399] 'apiserver-kubelet-client' cert doesn't exist, likely first start: stat /var/lib/minikube/certs/apiserver-kubelet-client.crt: Process exited with status 1
	stdout:
	
	stderr:
	stat: cannot statx '/var/lib/minikube/certs/apiserver-kubelet-client.crt': No such file or directory
	I0916 11:14:34.453925 1501462 kubeadm.go:934] updating node {m03 192.168.67.4 0 v1.31.1  false true} ...
	I0916 11:14:34.454019 1501462 kubeadm.go:946] kubelet [Unit]
	Wants=crio.service
	
	[Service]
	ExecStart=
	ExecStart=/var/lib/minikube/binaries/v1.31.1/kubelet --bootstrap-kubeconfig=/etc/kubernetes/bootstrap-kubelet.conf --cgroups-per-qos=false --config=/var/lib/kubelet/config.yaml --enforce-node-allocatable= --hostname-override=multinode-654612-m03 --kubeconfig=/etc/kubernetes/kubelet.conf --node-ip=192.168.67.4
	
	[Install]
	 config:
	{KubernetesVersion:v1.31.1 ClusterName:multinode-654612 Namespace:default APIServerHAVIP: APIServerName:minikubeCA APIServerNames:[] APIServerIPs:[] DNSDomain:cluster.local ContainerRuntime:crio CRISocket: NetworkPlugin:cni FeatureGates: ServiceCIDR:10.96.0.0/12 ImageRepository: LoadBalancerStartIP: LoadBalancerEndIP: CustomIngressCert: RegistryAliases: ExtraOptions:[] ShouldLoadCachedImages:true EnableDefaultCNI:false CNI:}
	I0916 11:14:34.454089 1501462 ssh_runner.go:195] Run: sudo ls /var/lib/minikube/binaries/v1.31.1
	I0916 11:14:34.464273 1501462 command_runner.go:130] > kubeadm
	I0916 11:14:34.464299 1501462 command_runner.go:130] > kubectl
	I0916 11:14:34.464304 1501462 command_runner.go:130] > kubelet
	I0916 11:14:34.465510 1501462 binaries.go:44] Found k8s binaries, skipping transfer
	I0916 11:14:34.465581 1501462 ssh_runner.go:195] Run: sudo mkdir -p /etc/systemd/system/kubelet.service.d /lib/systemd/system
	I0916 11:14:34.475058 1501462 ssh_runner.go:362] scp memory --> /etc/systemd/system/kubelet.service.d/10-kubeadm.conf (370 bytes)
	I0916 11:14:34.496975 1501462 ssh_runner.go:362] scp memory --> /lib/systemd/system/kubelet.service (352 bytes)
	I0916 11:14:34.517299 1501462 ssh_runner.go:195] Run: grep 192.168.67.2	control-plane.minikube.internal$ /etc/hosts
	I0916 11:14:34.521197 1501462 ssh_runner.go:195] Run: /bin/bash -c "{ grep -v $'\tcontrol-plane.minikube.internal$' "/etc/hosts"; echo "192.168.67.2	control-plane.minikube.internal"; } > /tmp/h.$$; sudo cp /tmp/h.$$ "/etc/hosts""
	I0916 11:14:34.532933 1501462 ssh_runner.go:195] Run: sudo systemctl daemon-reload
	I0916 11:14:34.627160 1501462 ssh_runner.go:195] Run: sudo systemctl start kubelet
	I0916 11:14:34.640422 1501462 start.go:235] Will wait 6m0s for node &{Name:m03 IP:192.168.67.4 Port:0 KubernetesVersion:v1.31.1 ContainerRuntime: ControlPlane:false Worker:true}
	I0916 11:14:34.640877 1501462 config.go:182] Loaded profile config "multinode-654612": Driver=docker, ContainerRuntime=crio, KubernetesVersion=v1.31.1
	I0916 11:14:34.645823 1501462 out.go:177] * Verifying Kubernetes components...
	I0916 11:14:34.648904 1501462 ssh_runner.go:195] Run: sudo systemctl daemon-reload
	I0916 11:14:34.756044 1501462 ssh_runner.go:195] Run: sudo systemctl start kubelet
	I0916 11:14:34.769035 1501462 loader.go:395] Config loaded from file:  /home/jenkins/minikube-integration/19651-1378450/kubeconfig
	I0916 11:14:34.769376 1501462 kapi.go:59] client config for multinode-654612: &rest.Config{Host:"https://192.168.67.2:8443", APIPath:"", ContentConfig:rest.ContentConfig{AcceptContentTypes:"", ContentType:"", GroupVersion:(*schema.GroupVersion)(nil), NegotiatedSerializer:runtime.NegotiatedSerializer(nil)}, Username:"", Password:"", BearerToken:"", BearerTokenFile:"", Impersonate:rest.ImpersonationConfig{UserName:"", UID:"", Groups:[]string(nil), Extra:map[string][]string(nil)}, AuthProvider:<nil>, AuthConfigPersister:rest.AuthProviderConfigPersister(nil), ExecProvider:<nil>, TLSClientConfig:rest.sanitizedTLSClientConfig{Insecure:false, ServerName:"", CertFile:"/home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/multinode-654612/client.crt", KeyFile:"/home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/multinode-654612/client.key", CAFile:"/home/jenkins/minikube-integration/19651-1378450/.minikube/ca.crt", CertData:[]uint8(nil), KeyData:[]uint8(nil), CAData:[]uint8(nil),
NextProtos:[]string(nil)}, UserAgent:"", DisableCompression:false, Transport:http.RoundTripper(nil), WrapTransport:(transport.WrapperFunc)(0x1a1e6c0), QPS:0, Burst:0, RateLimiter:flowcontrol.RateLimiter(nil), WarningHandler:rest.WarningHandler(nil), Timeout:0, Dial:(func(context.Context, string, string) (net.Conn, error))(nil), Proxy:(func(*http.Request) (*url.URL, error))(nil)}
	I0916 11:14:34.769687 1501462 node_ready.go:35] waiting up to 6m0s for node "multinode-654612-m03" to be "Ready" ...
	I0916 11:14:34.769795 1501462 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/nodes/multinode-654612-m03
	I0916 11:14:34.769835 1501462 round_trippers.go:469] Request Headers:
	I0916 11:14:34.769859 1501462 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:14:34.769879 1501462 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:14:34.772353 1501462 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:14:34.772416 1501462 round_trippers.go:577] Response Headers:
	I0916 11:14:34.772439 1501462 round_trippers.go:580]     Audit-Id: 2d6145d2-d1f9-4ec2-9c70-eb4d5066ff83
	I0916 11:14:34.772459 1501462 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:14:34.772477 1501462 round_trippers.go:580]     Content-Type: application/json
	I0916 11:14:34.772496 1501462 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:14:34.772514 1501462 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:14:34.772532 1501462 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:14:34 GMT
	I0916 11:14:34.773580 1501462 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-654612-m03","uid":"0a3ae4f6-3e42-45c5-9af6-3ab99d0c1b81","resourceVersion":"839","creationTimestamp":"2024-09-16T11:11:48Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-654612-m03","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-654612","minikube.k8s.io/primary":"false","minikube.k8s.io/updated_at":"2024_09_16T11_11_49_0700","minikube.k8s.io/version":"v1.34.0"},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///var/run/crio/crio.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubeadm","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:11:48Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:
annotations":{"f:kubeadm.alpha.kubernetes.io/cri-socket":{}}}}},{"manag [truncated 6573 chars]
	I0916 11:14:35.270033 1501462 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/nodes/multinode-654612-m03
	I0916 11:14:35.270062 1501462 round_trippers.go:469] Request Headers:
	I0916 11:14:35.270071 1501462 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:14:35.270075 1501462 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:14:35.272529 1501462 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:14:35.272554 1501462 round_trippers.go:577] Response Headers:
	I0916 11:14:35.272562 1501462 round_trippers.go:580]     Audit-Id: e29b8163-cb55-4463-b9d9-dce470a82e10
	I0916 11:14:35.272567 1501462 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:14:35.272599 1501462 round_trippers.go:580]     Content-Type: application/json
	I0916 11:14:35.272610 1501462 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:14:35.272615 1501462 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:14:35.272621 1501462 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:14:35 GMT
	I0916 11:14:35.272882 1501462 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-654612-m03","uid":"0a3ae4f6-3e42-45c5-9af6-3ab99d0c1b81","resourceVersion":"839","creationTimestamp":"2024-09-16T11:11:48Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-654612-m03","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-654612","minikube.k8s.io/primary":"false","minikube.k8s.io/updated_at":"2024_09_16T11_11_49_0700","minikube.k8s.io/version":"v1.34.0"},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///var/run/crio/crio.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubeadm","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:11:48Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:
annotations":{"f:kubeadm.alpha.kubernetes.io/cri-socket":{}}}}},{"manag [truncated 6573 chars]
	I0916 11:14:35.770726 1501462 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/nodes/multinode-654612-m03
	I0916 11:14:35.770756 1501462 round_trippers.go:469] Request Headers:
	I0916 11:14:35.770767 1501462 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:14:35.770771 1501462 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:14:35.773344 1501462 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:14:35.773416 1501462 round_trippers.go:577] Response Headers:
	I0916 11:14:35.773439 1501462 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:14:35.773452 1501462 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:14:35.773458 1501462 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:14:35 GMT
	I0916 11:14:35.773463 1501462 round_trippers.go:580]     Audit-Id: b381df50-c4e5-46c9-9d5a-063df1ae0492
	I0916 11:14:35.773466 1501462 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:14:35.773470 1501462 round_trippers.go:580]     Content-Type: application/json
	I0916 11:14:35.773622 1501462 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-654612-m03","uid":"0a3ae4f6-3e42-45c5-9af6-3ab99d0c1b81","resourceVersion":"839","creationTimestamp":"2024-09-16T11:11:48Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-654612-m03","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-654612","minikube.k8s.io/primary":"false","minikube.k8s.io/updated_at":"2024_09_16T11_11_49_0700","minikube.k8s.io/version":"v1.34.0"},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///var/run/crio/crio.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubeadm","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:11:48Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:
annotations":{"f:kubeadm.alpha.kubernetes.io/cri-socket":{}}}}},{"manag [truncated 6573 chars]
	I0916 11:14:36.269967 1501462 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/nodes/multinode-654612-m03
	I0916 11:14:36.269993 1501462 round_trippers.go:469] Request Headers:
	I0916 11:14:36.270004 1501462 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:14:36.270007 1501462 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:14:36.272263 1501462 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:14:36.272285 1501462 round_trippers.go:577] Response Headers:
	I0916 11:14:36.272296 1501462 round_trippers.go:580]     Audit-Id: e5452d15-d80c-4eb0-8c88-78c7dec8f044
	I0916 11:14:36.272301 1501462 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:14:36.272305 1501462 round_trippers.go:580]     Content-Type: application/json
	I0916 11:14:36.272332 1501462 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:14:36.272340 1501462 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:14:36.272343 1501462 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:14:36 GMT
	I0916 11:14:36.272826 1501462 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-654612-m03","uid":"0a3ae4f6-3e42-45c5-9af6-3ab99d0c1b81","resourceVersion":"839","creationTimestamp":"2024-09-16T11:11:48Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-654612-m03","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-654612","minikube.k8s.io/primary":"false","minikube.k8s.io/updated_at":"2024_09_16T11_11_49_0700","minikube.k8s.io/version":"v1.34.0"},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///var/run/crio/crio.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubeadm","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:11:48Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:
annotations":{"f:kubeadm.alpha.kubernetes.io/cri-socket":{}}}}},{"manag [truncated 6573 chars]
	I0916 11:14:36.770599 1501462 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/nodes/multinode-654612-m03
	I0916 11:14:36.770626 1501462 round_trippers.go:469] Request Headers:
	I0916 11:14:36.770640 1501462 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:14:36.770648 1501462 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:14:36.772972 1501462 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:14:36.772995 1501462 round_trippers.go:577] Response Headers:
	I0916 11:14:36.773004 1501462 round_trippers.go:580]     Audit-Id: 68e31f03-5da4-4a5d-bcf6-55b651b307f6
	I0916 11:14:36.773008 1501462 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:14:36.773010 1501462 round_trippers.go:580]     Content-Type: application/json
	I0916 11:14:36.773013 1501462 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:14:36.773016 1501462 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:14:36.773019 1501462 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:14:36 GMT
	I0916 11:14:36.773358 1501462 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-654612-m03","uid":"0a3ae4f6-3e42-45c5-9af6-3ab99d0c1b81","resourceVersion":"839","creationTimestamp":"2024-09-16T11:11:48Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-654612-m03","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-654612","minikube.k8s.io/primary":"false","minikube.k8s.io/updated_at":"2024_09_16T11_11_49_0700","minikube.k8s.io/version":"v1.34.0"},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///var/run/crio/crio.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubeadm","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:11:48Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:
annotations":{"f:kubeadm.alpha.kubernetes.io/cri-socket":{}}}}},{"manag [truncated 6573 chars]
	I0916 11:14:36.773797 1501462 node_ready.go:53] node "multinode-654612-m03" has status "Ready":"Unknown"
	I0916 11:14:37.270536 1501462 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/nodes/multinode-654612-m03
	I0916 11:14:37.270602 1501462 round_trippers.go:469] Request Headers:
	I0916 11:14:37.270623 1501462 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:14:37.270628 1501462 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:14:37.273451 1501462 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:14:37.273520 1501462 round_trippers.go:577] Response Headers:
	I0916 11:14:37.273540 1501462 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:14:37 GMT
	I0916 11:14:37.273573 1501462 round_trippers.go:580]     Audit-Id: 1bbf7170-0dce-4e17-9b0b-f44ba3c4ec57
	I0916 11:14:37.273592 1501462 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:14:37.273635 1501462 round_trippers.go:580]     Content-Type: application/json
	I0916 11:14:37.273674 1501462 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:14:37.273705 1501462 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:14:37.273886 1501462 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-654612-m03","uid":"0a3ae4f6-3e42-45c5-9af6-3ab99d0c1b81","resourceVersion":"839","creationTimestamp":"2024-09-16T11:11:48Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-654612-m03","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-654612","minikube.k8s.io/primary":"false","minikube.k8s.io/updated_at":"2024_09_16T11_11_49_0700","minikube.k8s.io/version":"v1.34.0"},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///var/run/crio/crio.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubeadm","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:11:48Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:
annotations":{"f:kubeadm.alpha.kubernetes.io/cri-socket":{}}}}},{"manag [truncated 6573 chars]
	I0916 11:14:37.770008 1501462 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/nodes/multinode-654612-m03
	I0916 11:14:37.770080 1501462 round_trippers.go:469] Request Headers:
	I0916 11:14:37.770114 1501462 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:14:37.770131 1501462 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:14:37.774099 1501462 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 11:14:37.774173 1501462 round_trippers.go:577] Response Headers:
	I0916 11:14:37.774197 1501462 round_trippers.go:580]     Audit-Id: dbf0372a-e007-4394-a4da-a5081b4a7b99
	I0916 11:14:37.774217 1501462 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:14:37.774250 1501462 round_trippers.go:580]     Content-Type: application/json
	I0916 11:14:37.774273 1501462 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:14:37.774290 1501462 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:14:37.774310 1501462 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:14:37 GMT
	I0916 11:14:37.774945 1501462 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-654612-m03","uid":"0a3ae4f6-3e42-45c5-9af6-3ab99d0c1b81","resourceVersion":"839","creationTimestamp":"2024-09-16T11:11:48Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-654612-m03","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-654612","minikube.k8s.io/primary":"false","minikube.k8s.io/updated_at":"2024_09_16T11_11_49_0700","minikube.k8s.io/version":"v1.34.0"},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///var/run/crio/crio.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubeadm","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:11:48Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:
annotations":{"f:kubeadm.alpha.kubernetes.io/cri-socket":{}}}}},{"manag [truncated 6573 chars]
	I0916 11:14:38.269973 1501462 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/nodes/multinode-654612-m03
	I0916 11:14:38.270001 1501462 round_trippers.go:469] Request Headers:
	I0916 11:14:38.270011 1501462 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:14:38.270015 1501462 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:14:38.272177 1501462 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:14:38.272256 1501462 round_trippers.go:577] Response Headers:
	I0916 11:14:38.272278 1501462 round_trippers.go:580]     Audit-Id: 2ac4d389-3512-49fe-bcc8-1b298a217141
	I0916 11:14:38.272296 1501462 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:14:38.272327 1501462 round_trippers.go:580]     Content-Type: application/json
	I0916 11:14:38.272352 1501462 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:14:38.272368 1501462 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:14:38.272388 1501462 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:14:38 GMT
	I0916 11:14:38.272533 1501462 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-654612-m03","uid":"0a3ae4f6-3e42-45c5-9af6-3ab99d0c1b81","resourceVersion":"839","creationTimestamp":"2024-09-16T11:11:48Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-654612-m03","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-654612","minikube.k8s.io/primary":"false","minikube.k8s.io/updated_at":"2024_09_16T11_11_49_0700","minikube.k8s.io/version":"v1.34.0"},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///var/run/crio/crio.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubeadm","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:11:48Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:
annotations":{"f:kubeadm.alpha.kubernetes.io/cri-socket":{}}}}},{"manag [truncated 6573 chars]
	I0916 11:14:38.770444 1501462 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/nodes/multinode-654612-m03
	I0916 11:14:38.770472 1501462 round_trippers.go:469] Request Headers:
	I0916 11:14:38.770482 1501462 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:14:38.770488 1501462 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:14:38.773170 1501462 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:14:38.773199 1501462 round_trippers.go:577] Response Headers:
	I0916 11:14:38.773207 1501462 round_trippers.go:580]     Content-Type: application/json
	I0916 11:14:38.773218 1501462 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:14:38.773222 1501462 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:14:38.773226 1501462 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:14:38 GMT
	I0916 11:14:38.773229 1501462 round_trippers.go:580]     Audit-Id: 7d8d0340-4533-4e7c-9ece-2bba0c937c5c
	I0916 11:14:38.773232 1501462 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:14:38.773358 1501462 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-654612-m03","uid":"0a3ae4f6-3e42-45c5-9af6-3ab99d0c1b81","resourceVersion":"839","creationTimestamp":"2024-09-16T11:11:48Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-654612-m03","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-654612","minikube.k8s.io/primary":"false","minikube.k8s.io/updated_at":"2024_09_16T11_11_49_0700","minikube.k8s.io/version":"v1.34.0"},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///var/run/crio/crio.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubeadm","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:11:48Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:
annotations":{"f:kubeadm.alpha.kubernetes.io/cri-socket":{}}}}},{"manag [truncated 6573 chars]
	I0916 11:14:38.773818 1501462 node_ready.go:53] node "multinode-654612-m03" has status "Ready":"Unknown"
	I0916 11:14:39.269963 1501462 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/nodes/multinode-654612-m03
	I0916 11:14:39.269992 1501462 round_trippers.go:469] Request Headers:
	I0916 11:14:39.270003 1501462 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:14:39.270007 1501462 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:14:39.272226 1501462 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:14:39.272291 1501462 round_trippers.go:577] Response Headers:
	I0916 11:14:39.272307 1501462 round_trippers.go:580]     Audit-Id: 21b214e6-742f-4b13-974e-2e3b47592f46
	I0916 11:14:39.272312 1501462 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:14:39.272318 1501462 round_trippers.go:580]     Content-Type: application/json
	I0916 11:14:39.272321 1501462 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:14:39.272324 1501462 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:14:39.272327 1501462 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:14:39 GMT
	I0916 11:14:39.272430 1501462 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-654612-m03","uid":"0a3ae4f6-3e42-45c5-9af6-3ab99d0c1b81","resourceVersion":"839","creationTimestamp":"2024-09-16T11:11:48Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-654612-m03","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-654612","minikube.k8s.io/primary":"false","minikube.k8s.io/updated_at":"2024_09_16T11_11_49_0700","minikube.k8s.io/version":"v1.34.0"},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///var/run/crio/crio.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubeadm","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:11:48Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:
annotations":{"f:kubeadm.alpha.kubernetes.io/cri-socket":{}}}}},{"manag [truncated 6573 chars]
	I0916 11:14:39.769886 1501462 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/nodes/multinode-654612-m03
	I0916 11:14:39.769912 1501462 round_trippers.go:469] Request Headers:
	I0916 11:14:39.769922 1501462 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:14:39.769928 1501462 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:14:39.772092 1501462 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:14:39.772116 1501462 round_trippers.go:577] Response Headers:
	I0916 11:14:39.772125 1501462 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:14:39.772130 1501462 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:14:39.772135 1501462 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:14:39 GMT
	I0916 11:14:39.772138 1501462 round_trippers.go:580]     Audit-Id: 455576a3-4dfe-4f44-8226-d14e12b3a714
	I0916 11:14:39.772140 1501462 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:14:39.772145 1501462 round_trippers.go:580]     Content-Type: application/json
	I0916 11:14:39.772360 1501462 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-654612-m03","uid":"0a3ae4f6-3e42-45c5-9af6-3ab99d0c1b81","resourceVersion":"839","creationTimestamp":"2024-09-16T11:11:48Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-654612-m03","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-654612","minikube.k8s.io/primary":"false","minikube.k8s.io/updated_at":"2024_09_16T11_11_49_0700","minikube.k8s.io/version":"v1.34.0"},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///var/run/crio/crio.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubeadm","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:11:48Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:
annotations":{"f:kubeadm.alpha.kubernetes.io/cri-socket":{}}}}},{"manag [truncated 6573 chars]
	I0916 11:14:40.270568 1501462 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/nodes/multinode-654612-m03
	I0916 11:14:40.270596 1501462 round_trippers.go:469] Request Headers:
	I0916 11:14:40.270605 1501462 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:14:40.270609 1501462 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:14:40.273026 1501462 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:14:40.273102 1501462 round_trippers.go:577] Response Headers:
	I0916 11:14:40.273124 1501462 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:14:40 GMT
	I0916 11:14:40.273145 1501462 round_trippers.go:580]     Audit-Id: f2adb3c6-e4b9-4292-b308-2e9207ae7c1a
	I0916 11:14:40.273179 1501462 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:14:40.273189 1501462 round_trippers.go:580]     Content-Type: application/json
	I0916 11:14:40.273193 1501462 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:14:40.273196 1501462 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:14:40.273329 1501462 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-654612-m03","uid":"0a3ae4f6-3e42-45c5-9af6-3ab99d0c1b81","resourceVersion":"839","creationTimestamp":"2024-09-16T11:11:48Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-654612-m03","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-654612","minikube.k8s.io/primary":"false","minikube.k8s.io/updated_at":"2024_09_16T11_11_49_0700","minikube.k8s.io/version":"v1.34.0"},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///var/run/crio/crio.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubeadm","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:11:48Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:
annotations":{"f:kubeadm.alpha.kubernetes.io/cri-socket":{}}}}},{"manag [truncated 6573 chars]
	I0916 11:14:40.770805 1501462 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/nodes/multinode-654612-m03
	I0916 11:14:40.770831 1501462 round_trippers.go:469] Request Headers:
	I0916 11:14:40.770840 1501462 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:14:40.770845 1501462 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:14:40.773098 1501462 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:14:40.773171 1501462 round_trippers.go:577] Response Headers:
	I0916 11:14:40.773194 1501462 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:14:40.773211 1501462 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:14:40.773229 1501462 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:14:40 GMT
	I0916 11:14:40.773257 1501462 round_trippers.go:580]     Audit-Id: 9f9cccd4-64d6-4af3-8259-3d9cc8f3af28
	I0916 11:14:40.773275 1501462 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:14:40.773294 1501462 round_trippers.go:580]     Content-Type: application/json
	I0916 11:14:40.773724 1501462 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-654612-m03","uid":"0a3ae4f6-3e42-45c5-9af6-3ab99d0c1b81","resourceVersion":"839","creationTimestamp":"2024-09-16T11:11:48Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-654612-m03","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-654612","minikube.k8s.io/primary":"false","minikube.k8s.io/updated_at":"2024_09_16T11_11_49_0700","minikube.k8s.io/version":"v1.34.0"},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///var/run/crio/crio.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubeadm","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:11:48Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:
annotations":{"f:kubeadm.alpha.kubernetes.io/cri-socket":{}}}}},{"manag [truncated 6573 chars]
	I0916 11:14:40.774197 1501462 node_ready.go:53] node "multinode-654612-m03" has status "Ready":"Unknown"
	I0916 11:14:41.269994 1501462 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/nodes/multinode-654612-m03
	I0916 11:14:41.270019 1501462 round_trippers.go:469] Request Headers:
	I0916 11:14:41.270029 1501462 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:14:41.270033 1501462 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:14:41.272379 1501462 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:14:41.272419 1501462 round_trippers.go:577] Response Headers:
	I0916 11:14:41.272428 1501462 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:14:41 GMT
	I0916 11:14:41.272434 1501462 round_trippers.go:580]     Audit-Id: 1dff8b8b-65b1-4ab5-ab0e-451e37d2eb53
	I0916 11:14:41.272437 1501462 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:14:41.272440 1501462 round_trippers.go:580]     Content-Type: application/json
	I0916 11:14:41.272444 1501462 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:14:41.272447 1501462 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:14:41.272636 1501462 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-654612-m03","uid":"0a3ae4f6-3e42-45c5-9af6-3ab99d0c1b81","resourceVersion":"839","creationTimestamp":"2024-09-16T11:11:48Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-654612-m03","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-654612","minikube.k8s.io/primary":"false","minikube.k8s.io/updated_at":"2024_09_16T11_11_49_0700","minikube.k8s.io/version":"v1.34.0"},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///var/run/crio/crio.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubeadm","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:11:48Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:
annotations":{"f:kubeadm.alpha.kubernetes.io/cri-socket":{}}}}},{"manag [truncated 6573 chars]
	I0916 11:14:41.770943 1501462 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/nodes/multinode-654612-m03
	I0916 11:14:41.770969 1501462 round_trippers.go:469] Request Headers:
	I0916 11:14:41.770980 1501462 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:14:41.770984 1501462 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:14:41.773223 1501462 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:14:41.773250 1501462 round_trippers.go:577] Response Headers:
	I0916 11:14:41.773259 1501462 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:14:41.773265 1501462 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:14:41 GMT
	I0916 11:14:41.773268 1501462 round_trippers.go:580]     Audit-Id: cf352742-ea65-4c7f-a6ca-0a5af95e43f8
	I0916 11:14:41.773275 1501462 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:14:41.773278 1501462 round_trippers.go:580]     Content-Type: application/json
	I0916 11:14:41.773281 1501462 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:14:41.773507 1501462 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-654612-m03","uid":"0a3ae4f6-3e42-45c5-9af6-3ab99d0c1b81","resourceVersion":"930","creationTimestamp":"2024-09-16T11:11:48Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-654612-m03","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-654612","minikube.k8s.io/primary":"false","minikube.k8s.io/updated_at":"2024_09_16T11_11_49_0700","minikube.k8s.io/version":"v1.34.0"},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///var/run/crio/crio.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubeadm","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:11:48Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:
annotations":{"f:kubeadm.alpha.kubernetes.io/cri-socket":{}}}}},{"manag [truncated 6178 chars]
	I0916 11:14:41.773934 1501462 node_ready.go:49] node "multinode-654612-m03" has status "Ready":"True"
	I0916 11:14:41.773953 1501462 node_ready.go:38] duration metric: took 7.004226336s for node "multinode-654612-m03" to be "Ready" ...
	I0916 11:14:41.773963 1501462 pod_ready.go:36] extra waiting up to 6m0s for all system-critical pods including labels [k8s-app=kube-dns component=etcd component=kube-apiserver component=kube-controller-manager k8s-app=kube-proxy component=kube-scheduler] to be "Ready" ...
	I0916 11:14:41.774036 1501462 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/namespaces/kube-system/pods
	I0916 11:14:41.774047 1501462 round_trippers.go:469] Request Headers:
	I0916 11:14:41.774055 1501462 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:14:41.774058 1501462 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:14:41.777630 1501462 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 11:14:41.777656 1501462 round_trippers.go:577] Response Headers:
	I0916 11:14:41.777665 1501462 round_trippers.go:580]     Audit-Id: 542f66de-a27e-4479-907d-0c91dabd50a5
	I0916 11:14:41.777669 1501462 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:14:41.777672 1501462 round_trippers.go:580]     Content-Type: application/json
	I0916 11:14:41.777677 1501462 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:14:41.777680 1501462 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:14:41.777683 1501462 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:14:41 GMT
	I0916 11:14:41.778568 1501462 request.go:1351] Response Body: {"kind":"PodList","apiVersion":"v1","metadata":{"resourceVersion":"930"},"items":[{"metadata":{"name":"coredns-7c65d6cfc9-szvv9","generateName":"coredns-7c65d6cfc9-","namespace":"kube-system","uid":"26df8cd4-36bc-49e1-98bf-9c30f5555b7b","resourceVersion":"775","creationTimestamp":"2024-09-16T11:10:15Z","labels":{"k8s-app":"kube-dns","pod-template-hash":"7c65d6cfc9"},"ownerReferences":[{"apiVersion":"apps/v1","kind":"ReplicaSet","name":"coredns-7c65d6cfc9","uid":"a88acc4a-b14b-4341-a616-6a6077ab8958","controller":true,"blockOwnerDeletion":true}],"managedFields":[{"manager":"kube-controller-manager","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:10:15Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:generateName":{},"f:labels":{".":{},"f:k8s-app":{},"f:pod-template-hash":{}},"f:ownerReferences":{".":{},"k:{\"uid\":\"a88acc4a-b14b-4341-a616-6a6077ab8958\"}":{}}},"f:spec":{"f:affinity":{".":{},"f:podAntiAffinity":{".":{},"f
:preferredDuringSchedulingIgnoredDuringExecution":{}}},"f:containers":{ [truncated 91168 chars]
	I0916 11:14:41.782527 1501462 pod_ready.go:79] waiting up to 6m0s for pod "coredns-7c65d6cfc9-szvv9" in "kube-system" namespace to be "Ready" ...
	I0916 11:14:41.782653 1501462 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/namespaces/kube-system/pods/coredns-7c65d6cfc9-szvv9
	I0916 11:14:41.782665 1501462 round_trippers.go:469] Request Headers:
	I0916 11:14:41.782674 1501462 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:14:41.782681 1501462 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:14:41.785269 1501462 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:14:41.785334 1501462 round_trippers.go:577] Response Headers:
	I0916 11:14:41.785375 1501462 round_trippers.go:580]     Content-Type: application/json
	I0916 11:14:41.785402 1501462 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:14:41.785415 1501462 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:14:41.785421 1501462 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:14:41 GMT
	I0916 11:14:41.785425 1501462 round_trippers.go:580]     Audit-Id: 7d8ba4a5-ef0b-4ddc-8bd2-f197a8088f2e
	I0916 11:14:41.785428 1501462 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:14:41.785564 1501462 request.go:1351] Response Body: {"kind":"Pod","apiVersion":"v1","metadata":{"name":"coredns-7c65d6cfc9-szvv9","generateName":"coredns-7c65d6cfc9-","namespace":"kube-system","uid":"26df8cd4-36bc-49e1-98bf-9c30f5555b7b","resourceVersion":"775","creationTimestamp":"2024-09-16T11:10:15Z","labels":{"k8s-app":"kube-dns","pod-template-hash":"7c65d6cfc9"},"ownerReferences":[{"apiVersion":"apps/v1","kind":"ReplicaSet","name":"coredns-7c65d6cfc9","uid":"a88acc4a-b14b-4341-a616-6a6077ab8958","controller":true,"blockOwnerDeletion":true}],"managedFields":[{"manager":"kube-controller-manager","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:10:15Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:generateName":{},"f:labels":{".":{},"f:k8s-app":{},"f:pod-template-hash":{}},"f:ownerReferences":{".":{},"k:{\"uid\":\"a88acc4a-b14b-4341-a616-6a6077ab8958\"}":{}}},"f:spec":{"f:affinity":{".":{},"f:podAntiAffinity":{".":{},"f:preferredDuringSchedulingIgnoredDuringExecution":{
}}},"f:containers":{"k:{\"name\":\"coredns\"}":{".":{},"f:args":{},"f:i [truncated 6813 chars]
	I0916 11:14:41.786181 1501462 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/nodes/multinode-654612
	I0916 11:14:41.786198 1501462 round_trippers.go:469] Request Headers:
	I0916 11:14:41.786208 1501462 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:14:41.786213 1501462 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:14:41.788712 1501462 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:14:41.788737 1501462 round_trippers.go:577] Response Headers:
	I0916 11:14:41.788747 1501462 round_trippers.go:580]     Content-Type: application/json
	I0916 11:14:41.788751 1501462 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:14:41.788755 1501462 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:14:41.788759 1501462 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:14:41 GMT
	I0916 11:14:41.788762 1501462 round_trippers.go:580]     Audit-Id: 2c98468a-798d-4499-8492-839ac89f9500
	I0916 11:14:41.788765 1501462 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:14:41.789163 1501462 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-654612","uid":"17ee1588-6ece-4a45-8e72-a05ec6d1f806","resourceVersion":"662","creationTimestamp":"2024-09-16T11:10:07Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-654612","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-654612","minikube.k8s.io/primary":"true","minikube.k8s.io/updated_at":"2024_09_16T11_10_11_0700","minikube.k8s.io/version":"v1.34.0","node-role.kubernetes.io/control-plane":"","node.kubernetes.io/exclude-from-external-load-balancers":""},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///var/run/crio/crio.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubelet","operation":"Update","apiVe
rsion":"v1","time":"2024-09-16T11:10:07Z","fieldsType":"FieldsV1","fiel [truncated 6346 chars]
	I0916 11:14:41.789624 1501462 pod_ready.go:93] pod "coredns-7c65d6cfc9-szvv9" in "kube-system" namespace has status "Ready":"True"
	I0916 11:14:41.789651 1501462 pod_ready.go:82] duration metric: took 7.076093ms for pod "coredns-7c65d6cfc9-szvv9" in "kube-system" namespace to be "Ready" ...
	I0916 11:14:41.789665 1501462 pod_ready.go:79] waiting up to 6m0s for pod "etcd-multinode-654612" in "kube-system" namespace to be "Ready" ...
	I0916 11:14:41.789732 1501462 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/namespaces/kube-system/pods/etcd-multinode-654612
	I0916 11:14:41.789743 1501462 round_trippers.go:469] Request Headers:
	I0916 11:14:41.789751 1501462 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:14:41.789756 1501462 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:14:41.791918 1501462 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:14:41.792006 1501462 round_trippers.go:577] Response Headers:
	I0916 11:14:41.792021 1501462 round_trippers.go:580]     Audit-Id: cac6223d-fe35-4ec7-b8b2-9a2af042de68
	I0916 11:14:41.792027 1501462 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:14:41.792030 1501462 round_trippers.go:580]     Content-Type: application/json
	I0916 11:14:41.792034 1501462 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:14:41.792053 1501462 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:14:41.792068 1501462 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:14:41 GMT
	I0916 11:14:41.792205 1501462 request.go:1351] Response Body: {"kind":"Pod","apiVersion":"v1","metadata":{"name":"etcd-multinode-654612","namespace":"kube-system","uid":"bb46feea-e4d5-411b-9ebc-e5984b1147a8","resourceVersion":"760","creationTimestamp":"2024-09-16T11:10:10Z","labels":{"component":"etcd","tier":"control-plane"},"annotations":{"kubeadm.kubernetes.io/etcd.advertise-client-urls":"https://192.168.67.2:2379","kubernetes.io/config.hash":"d0a18dbc2f101ac77b9a3f54b47797a2","kubernetes.io/config.mirror":"d0a18dbc2f101ac77b9a3f54b47797a2","kubernetes.io/config.seen":"2024-09-16T11:10:10.145147523Z","kubernetes.io/config.source":"file"},"ownerReferences":[{"apiVersion":"v1","kind":"Node","name":"multinode-654612","uid":"17ee1588-6ece-4a45-8e72-a05ec6d1f806","controller":true}],"managedFields":[{"manager":"kubelet","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:10:10Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:annotations":{".":{},"f:kubeadm.kubernetes.io/etcd.advertise-cl
ient-urls":{},"f:kubernetes.io/config.hash":{},"f:kubernetes.io/config. [truncated 6575 chars]
	I0916 11:14:41.792751 1501462 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/nodes/multinode-654612
	I0916 11:14:41.792771 1501462 round_trippers.go:469] Request Headers:
	I0916 11:14:41.792780 1501462 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:14:41.792789 1501462 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:14:41.794894 1501462 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:14:41.794953 1501462 round_trippers.go:577] Response Headers:
	I0916 11:14:41.794975 1501462 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:14:41 GMT
	I0916 11:14:41.794994 1501462 round_trippers.go:580]     Audit-Id: 3c541ed2-baca-4cb1-b148-d104f33a988c
	I0916 11:14:41.795011 1501462 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:14:41.795037 1501462 round_trippers.go:580]     Content-Type: application/json
	I0916 11:14:41.795060 1501462 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:14:41.795077 1501462 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:14:41.795212 1501462 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-654612","uid":"17ee1588-6ece-4a45-8e72-a05ec6d1f806","resourceVersion":"662","creationTimestamp":"2024-09-16T11:10:07Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-654612","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-654612","minikube.k8s.io/primary":"true","minikube.k8s.io/updated_at":"2024_09_16T11_10_11_0700","minikube.k8s.io/version":"v1.34.0","node-role.kubernetes.io/control-plane":"","node.kubernetes.io/exclude-from-external-load-balancers":""},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///var/run/crio/crio.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubelet","operation":"Update","apiVe
rsion":"v1","time":"2024-09-16T11:10:07Z","fieldsType":"FieldsV1","fiel [truncated 6346 chars]
	I0916 11:14:41.795657 1501462 pod_ready.go:93] pod "etcd-multinode-654612" in "kube-system" namespace has status "Ready":"True"
	I0916 11:14:41.795679 1501462 pod_ready.go:82] duration metric: took 6.006343ms for pod "etcd-multinode-654612" in "kube-system" namespace to be "Ready" ...
	I0916 11:14:41.795700 1501462 pod_ready.go:79] waiting up to 6m0s for pod "kube-apiserver-multinode-654612" in "kube-system" namespace to be "Ready" ...
	I0916 11:14:41.795772 1501462 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/namespaces/kube-system/pods/kube-apiserver-multinode-654612
	I0916 11:14:41.795783 1501462 round_trippers.go:469] Request Headers:
	I0916 11:14:41.795792 1501462 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:14:41.795798 1501462 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:14:41.798321 1501462 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:14:41.798380 1501462 round_trippers.go:577] Response Headers:
	I0916 11:14:41.798389 1501462 round_trippers.go:580]     Audit-Id: 79fa29df-3d1f-4dbb-8a34-2a73a820f0f9
	I0916 11:14:41.798398 1501462 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:14:41.798401 1501462 round_trippers.go:580]     Content-Type: application/json
	I0916 11:14:41.798405 1501462 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:14:41.798408 1501462 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:14:41.798411 1501462 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:14:41 GMT
	I0916 11:14:41.798557 1501462 request.go:1351] Response Body: {"kind":"Pod","apiVersion":"v1","metadata":{"name":"kube-apiserver-multinode-654612","namespace":"kube-system","uid":"8a56377d-b2a9-46dc-90b0-6d8f0aadec52","resourceVersion":"753","creationTimestamp":"2024-09-16T11:10:10Z","labels":{"component":"kube-apiserver","tier":"control-plane"},"annotations":{"kubeadm.kubernetes.io/kube-apiserver.advertise-address.endpoint":"192.168.67.2:8443","kubernetes.io/config.hash":"f3fdb95ee92c3c630b459a996a1fc6f9","kubernetes.io/config.mirror":"f3fdb95ee92c3c630b459a996a1fc6f9","kubernetes.io/config.seen":"2024-09-16T11:10:10.145153931Z","kubernetes.io/config.source":"file"},"ownerReferences":[{"apiVersion":"v1","kind":"Node","name":"multinode-654612","uid":"17ee1588-6ece-4a45-8e72-a05ec6d1f806","controller":true}],"managedFields":[{"manager":"kubelet","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:10:10Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:annotations":{".":{},"f:kubeadm.kube
rnetes.io/kube-apiserver.advertise-address.endpoint":{},"f:kubernetes.i [truncated 9107 chars]
	I0916 11:14:41.799181 1501462 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/nodes/multinode-654612
	I0916 11:14:41.799198 1501462 round_trippers.go:469] Request Headers:
	I0916 11:14:41.799206 1501462 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:14:41.799211 1501462 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:14:41.801308 1501462 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:14:41.801336 1501462 round_trippers.go:577] Response Headers:
	I0916 11:14:41.801345 1501462 round_trippers.go:580]     Content-Type: application/json
	I0916 11:14:41.801349 1501462 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:14:41.801353 1501462 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:14:41.801356 1501462 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:14:41 GMT
	I0916 11:14:41.801363 1501462 round_trippers.go:580]     Audit-Id: 504991f4-be4f-473f-9d6e-bf767a41471c
	I0916 11:14:41.801366 1501462 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:14:41.801729 1501462 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-654612","uid":"17ee1588-6ece-4a45-8e72-a05ec6d1f806","resourceVersion":"662","creationTimestamp":"2024-09-16T11:10:07Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-654612","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-654612","minikube.k8s.io/primary":"true","minikube.k8s.io/updated_at":"2024_09_16T11_10_11_0700","minikube.k8s.io/version":"v1.34.0","node-role.kubernetes.io/control-plane":"","node.kubernetes.io/exclude-from-external-load-balancers":""},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///var/run/crio/crio.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubelet","operation":"Update","apiVe
rsion":"v1","time":"2024-09-16T11:10:07Z","fieldsType":"FieldsV1","fiel [truncated 6346 chars]
	I0916 11:14:41.802173 1501462 pod_ready.go:93] pod "kube-apiserver-multinode-654612" in "kube-system" namespace has status "Ready":"True"
	I0916 11:14:41.802194 1501462 pod_ready.go:82] duration metric: took 6.481318ms for pod "kube-apiserver-multinode-654612" in "kube-system" namespace to be "Ready" ...
	I0916 11:14:41.802206 1501462 pod_ready.go:79] waiting up to 6m0s for pod "kube-controller-manager-multinode-654612" in "kube-system" namespace to be "Ready" ...
	I0916 11:14:41.802275 1501462 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/namespaces/kube-system/pods/kube-controller-manager-multinode-654612
	I0916 11:14:41.802285 1501462 round_trippers.go:469] Request Headers:
	I0916 11:14:41.802293 1501462 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:14:41.802305 1501462 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:14:41.804706 1501462 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:14:41.804778 1501462 round_trippers.go:577] Response Headers:
	I0916 11:14:41.804802 1501462 round_trippers.go:580]     Audit-Id: 097b9888-987f-4a77-86e7-889f5c29786a
	I0916 11:14:41.804821 1501462 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:14:41.804858 1501462 round_trippers.go:580]     Content-Type: application/json
	I0916 11:14:41.804884 1501462 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:14:41.804904 1501462 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:14:41.804935 1501462 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:14:41 GMT
	I0916 11:14:41.805152 1501462 request.go:1351] Response Body: {"kind":"Pod","apiVersion":"v1","metadata":{"name":"kube-controller-manager-multinode-654612","namespace":"kube-system","uid":"08e87c01-201e-4373-bbd7-0a8a7a724a84","resourceVersion":"761","creationTimestamp":"2024-09-16T11:10:10Z","labels":{"component":"kube-controller-manager","tier":"control-plane"},"annotations":{"kubernetes.io/config.hash":"c7028fbfd05aaf11bd1f32f252589714","kubernetes.io/config.mirror":"c7028fbfd05aaf11bd1f32f252589714","kubernetes.io/config.seen":"2024-09-16T11:10:10.145155408Z","kubernetes.io/config.source":"file"},"ownerReferences":[{"apiVersion":"v1","kind":"Node","name":"multinode-654612","uid":"17ee1588-6ece-4a45-8e72-a05ec6d1f806","controller":true}],"managedFields":[{"manager":"kubelet","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:10:10Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:annotations":{".":{},"f:kubernetes.io/config.hash":{},"f:kubernetes.io/config.mirror":{},"f:kubernetes.i
o/config.seen":{},"f:kubernetes.io/config.source":{}},"f:labels":{".":{ [truncated 8897 chars]
	I0916 11:14:41.805768 1501462 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/nodes/multinode-654612
	I0916 11:14:41.805786 1501462 round_trippers.go:469] Request Headers:
	I0916 11:14:41.805795 1501462 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:14:41.805799 1501462 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:14:41.808025 1501462 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:14:41.808053 1501462 round_trippers.go:577] Response Headers:
	I0916 11:14:41.808063 1501462 round_trippers.go:580]     Audit-Id: deed8325-469c-4a95-a4d9-75c5d6dde997
	I0916 11:14:41.808077 1501462 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:14:41.808081 1501462 round_trippers.go:580]     Content-Type: application/json
	I0916 11:14:41.808084 1501462 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:14:41.808087 1501462 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:14:41.808091 1501462 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:14:41 GMT
	I0916 11:14:41.808480 1501462 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-654612","uid":"17ee1588-6ece-4a45-8e72-a05ec6d1f806","resourceVersion":"662","creationTimestamp":"2024-09-16T11:10:07Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-654612","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-654612","minikube.k8s.io/primary":"true","minikube.k8s.io/updated_at":"2024_09_16T11_10_11_0700","minikube.k8s.io/version":"v1.34.0","node-role.kubernetes.io/control-plane":"","node.kubernetes.io/exclude-from-external-load-balancers":""},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///var/run/crio/crio.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubelet","operation":"Update","apiVe
rsion":"v1","time":"2024-09-16T11:10:07Z","fieldsType":"FieldsV1","fiel [truncated 6346 chars]
	I0916 11:14:41.808931 1501462 pod_ready.go:93] pod "kube-controller-manager-multinode-654612" in "kube-system" namespace has status "Ready":"True"
	I0916 11:14:41.808953 1501462 pod_ready.go:82] duration metric: took 6.734997ms for pod "kube-controller-manager-multinode-654612" in "kube-system" namespace to be "Ready" ...
	I0916 11:14:41.808965 1501462 pod_ready.go:79] waiting up to 6m0s for pod "kube-proxy-gf2tw" in "kube-system" namespace to be "Ready" ...
	I0916 11:14:41.971408 1501462 request.go:632] Waited for 162.347034ms due to client-side throttling, not priority and fairness, request: GET:https://192.168.67.2:8443/api/v1/namespaces/kube-system/pods/kube-proxy-gf2tw
	I0916 11:14:41.971474 1501462 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/namespaces/kube-system/pods/kube-proxy-gf2tw
	I0916 11:14:41.971481 1501462 round_trippers.go:469] Request Headers:
	I0916 11:14:41.971490 1501462 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:14:41.971500 1501462 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:14:41.973961 1501462 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:14:41.973987 1501462 round_trippers.go:577] Response Headers:
	I0916 11:14:41.973996 1501462 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:14:41 GMT
	I0916 11:14:41.974000 1501462 round_trippers.go:580]     Audit-Id: d3651905-9153-46c0-8506-7c3ddded113b
	I0916 11:14:41.974005 1501462 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:14:41.974008 1501462 round_trippers.go:580]     Content-Type: application/json
	I0916 11:14:41.974012 1501462 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:14:41.974017 1501462 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:14:41.974167 1501462 request.go:1351] Response Body: {"kind":"Pod","apiVersion":"v1","metadata":{"name":"kube-proxy-gf2tw","generateName":"kube-proxy-","namespace":"kube-system","uid":"814e8a89-b190-4aef-a303-44981c9e19c9","resourceVersion":"877","creationTimestamp":"2024-09-16T11:11:13Z","labels":{"controller-revision-hash":"648b489c5b","k8s-app":"kube-proxy","pod-template-generation":"1"},"ownerReferences":[{"apiVersion":"apps/v1","kind":"DaemonSet","name":"kube-proxy","uid":"8b5954ba-7bb1-4f7b-8014-fdfa99b2ac77","controller":true,"blockOwnerDeletion":true}],"managedFields":[{"manager":"kube-controller-manager","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:11:13Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:generateName":{},"f:labels":{".":{},"f:controller-revision-hash":{},"f:k8s-app":{},"f:pod-template-generation":{}},"f:ownerReferences":{".":{},"k:{\"uid\":\"8b5954ba-7bb1-4f7b-8014-fdfa99b2ac77\"}":{}}},"f:spec":{"f:affinity":{".":{},"f:nodeAffinity":{".":{},"f:r
equiredDuringSchedulingIgnoredDuringExecution":{}}},"f:containers":{"k: [truncated 6178 chars]
	I0916 11:14:42.171882 1501462 request.go:632] Waited for 197.162695ms due to client-side throttling, not priority and fairness, request: GET:https://192.168.67.2:8443/api/v1/nodes/multinode-654612-m02
	I0916 11:14:42.171955 1501462 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/nodes/multinode-654612-m02
	I0916 11:14:42.171964 1501462 round_trippers.go:469] Request Headers:
	I0916 11:14:42.171975 1501462 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:14:42.171980 1501462 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:14:42.174872 1501462 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:14:42.174906 1501462 round_trippers.go:577] Response Headers:
	I0916 11:14:42.174923 1501462 round_trippers.go:580]     Audit-Id: bf637934-8a0b-4637-87b5-c0b96397ee4c
	I0916 11:14:42.174928 1501462 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:14:42.174931 1501462 round_trippers.go:580]     Content-Type: application/json
	I0916 11:14:42.174934 1501462 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:14:42.174937 1501462 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:14:42.174940 1501462 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:14:42 GMT
	I0916 11:14:42.175101 1501462 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-654612-m02","uid":"aa1e2e1b-4957-47bd-bcf9-786ad66f873a","resourceVersion":"837","creationTimestamp":"2024-09-16T11:11:13Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-654612-m02","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-654612","minikube.k8s.io/primary":"false","minikube.k8s.io/updated_at":"2024_09_16T11_11_14_0700","minikube.k8s.io/version":"v1.34.0"},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///var/run/crio/crio.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubeadm","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:11:13Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:
annotations":{"f:kubeadm.alpha.kubernetes.io/cri-socket":{}}}}},{"manag [truncated 6341 chars]
	I0916 11:14:42.175556 1501462 pod_ready.go:93] pod "kube-proxy-gf2tw" in "kube-system" namespace has status "Ready":"True"
	I0916 11:14:42.175574 1501462 pod_ready.go:82] duration metric: took 366.598662ms for pod "kube-proxy-gf2tw" in "kube-system" namespace to be "Ready" ...
	I0916 11:14:42.175588 1501462 pod_ready.go:79] waiting up to 6m0s for pod "kube-proxy-t9pzq" in "kube-system" namespace to be "Ready" ...
	I0916 11:14:42.372080 1501462 request.go:632] Waited for 196.397578ms due to client-side throttling, not priority and fairness, request: GET:https://192.168.67.2:8443/api/v1/namespaces/kube-system/pods/kube-proxy-t9pzq
	I0916 11:14:42.372157 1501462 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/namespaces/kube-system/pods/kube-proxy-t9pzq
	I0916 11:14:42.372168 1501462 round_trippers.go:469] Request Headers:
	I0916 11:14:42.372184 1501462 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:14:42.372192 1501462 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:14:42.374794 1501462 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:14:42.374822 1501462 round_trippers.go:577] Response Headers:
	I0916 11:14:42.374832 1501462 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:14:42.374838 1501462 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:14:42.374842 1501462 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:14:42 GMT
	I0916 11:14:42.374846 1501462 round_trippers.go:580]     Audit-Id: 7944771f-1bc0-4ceb-82ce-b762d64014e5
	I0916 11:14:42.374851 1501462 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:14:42.374854 1501462 round_trippers.go:580]     Content-Type: application/json
	I0916 11:14:42.375198 1501462 request.go:1351] Response Body: {"kind":"Pod","apiVersion":"v1","metadata":{"name":"kube-proxy-t9pzq","generateName":"kube-proxy-","namespace":"kube-system","uid":"d5dac41c-8386-4ad5-a463-1730169d8062","resourceVersion":"681","creationTimestamp":"2024-09-16T11:10:14Z","labels":{"controller-revision-hash":"648b489c5b","k8s-app":"kube-proxy","pod-template-generation":"1"},"ownerReferences":[{"apiVersion":"apps/v1","kind":"DaemonSet","name":"kube-proxy","uid":"8b5954ba-7bb1-4f7b-8014-fdfa99b2ac77","controller":true,"blockOwnerDeletion":true}],"managedFields":[{"manager":"kube-controller-manager","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:10:14Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:generateName":{},"f:labels":{".":{},"f:controller-revision-hash":{},"f:k8s-app":{},"f:pod-template-generation":{}},"f:ownerReferences":{".":{},"k:{\"uid\":\"8b5954ba-7bb1-4f7b-8014-fdfa99b2ac77\"}":{}}},"f:spec":{"f:affinity":{".":{},"f:nodeAffinity":{".":{},"f:r
equiredDuringSchedulingIgnoredDuringExecution":{}}},"f:containers":{"k: [truncated 6170 chars]
	I0916 11:14:42.571106 1501462 request.go:632] Waited for 195.274463ms due to client-side throttling, not priority and fairness, request: GET:https://192.168.67.2:8443/api/v1/nodes/multinode-654612
	I0916 11:14:42.571167 1501462 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/nodes/multinode-654612
	I0916 11:14:42.571173 1501462 round_trippers.go:469] Request Headers:
	I0916 11:14:42.571182 1501462 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:14:42.571190 1501462 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:14:42.573574 1501462 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:14:42.573649 1501462 round_trippers.go:577] Response Headers:
	I0916 11:14:42.573686 1501462 round_trippers.go:580]     Content-Type: application/json
	I0916 11:14:42.573712 1501462 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:14:42.573732 1501462 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:14:42.573763 1501462 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:14:42 GMT
	I0916 11:14:42.573786 1501462 round_trippers.go:580]     Audit-Id: 0a9d84f9-bf72-480a-820b-ae4b37380bff
	I0916 11:14:42.573805 1501462 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:14:42.573977 1501462 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-654612","uid":"17ee1588-6ece-4a45-8e72-a05ec6d1f806","resourceVersion":"662","creationTimestamp":"2024-09-16T11:10:07Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-654612","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-654612","minikube.k8s.io/primary":"true","minikube.k8s.io/updated_at":"2024_09_16T11_10_11_0700","minikube.k8s.io/version":"v1.34.0","node-role.kubernetes.io/control-plane":"","node.kubernetes.io/exclude-from-external-load-balancers":""},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///var/run/crio/crio.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubelet","operation":"Update","apiVe
rsion":"v1","time":"2024-09-16T11:10:07Z","fieldsType":"FieldsV1","fiel [truncated 6346 chars]
	I0916 11:14:42.574402 1501462 pod_ready.go:93] pod "kube-proxy-t9pzq" in "kube-system" namespace has status "Ready":"True"
	I0916 11:14:42.574421 1501462 pod_ready.go:82] duration metric: took 398.825546ms for pod "kube-proxy-t9pzq" in "kube-system" namespace to be "Ready" ...
	I0916 11:14:42.574432 1501462 pod_ready.go:79] waiting up to 6m0s for pod "kube-proxy-vf648" in "kube-system" namespace to be "Ready" ...
	I0916 11:14:42.771220 1501462 request.go:632] Waited for 196.717545ms due to client-side throttling, not priority and fairness, request: GET:https://192.168.67.2:8443/api/v1/namespaces/kube-system/pods/kube-proxy-vf648
	I0916 11:14:42.771302 1501462 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/namespaces/kube-system/pods/kube-proxy-vf648
	I0916 11:14:42.771312 1501462 round_trippers.go:469] Request Headers:
	I0916 11:14:42.771319 1501462 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:14:42.771330 1501462 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:14:42.773826 1501462 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:14:42.773936 1501462 round_trippers.go:577] Response Headers:
	I0916 11:14:42.773961 1501462 round_trippers.go:580]     Audit-Id: ee9c5506-462d-41ec-85a9-f3270692c7bf
	I0916 11:14:42.773981 1501462 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:14:42.774016 1501462 round_trippers.go:580]     Content-Type: application/json
	I0916 11:14:42.774027 1501462 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:14:42.774031 1501462 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:14:42.774035 1501462 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:14:42 GMT
	I0916 11:14:42.774174 1501462 request.go:1351] Response Body: {"kind":"Pod","apiVersion":"v1","metadata":{"name":"kube-proxy-vf648","generateName":"kube-proxy-","namespace":"kube-system","uid":"376afe3e-390b-443b-b289-7dfeeb1deed1","resourceVersion":"642","creationTimestamp":"2024-09-16T11:11:48Z","labels":{"controller-revision-hash":"648b489c5b","k8s-app":"kube-proxy","pod-template-generation":"1"},"ownerReferences":[{"apiVersion":"apps/v1","kind":"DaemonSet","name":"kube-proxy","uid":"8b5954ba-7bb1-4f7b-8014-fdfa99b2ac77","controller":true,"blockOwnerDeletion":true}],"managedFields":[{"manager":"kube-controller-manager","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:11:48Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:generateName":{},"f:labels":{".":{},"f:controller-revision-hash":{},"f:k8s-app":{},"f:pod-template-generation":{}},"f:ownerReferences":{".":{},"k:{\"uid\":\"8b5954ba-7bb1-4f7b-8014-fdfa99b2ac77\"}":{}}},"f:spec":{"f:affinity":{".":{},"f:nodeAffinity":{".":{},"f:r
equiredDuringSchedulingIgnoredDuringExecution":{}}},"f:containers":{"k: [truncated 6427 chars]
	I0916 11:14:42.971063 1501462 request.go:632] Waited for 196.219974ms due to client-side throttling, not priority and fairness, request: GET:https://192.168.67.2:8443/api/v1/nodes/multinode-654612-m03
	I0916 11:14:42.971165 1501462 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/nodes/multinode-654612-m03
	I0916 11:14:42.971176 1501462 round_trippers.go:469] Request Headers:
	I0916 11:14:42.971192 1501462 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:14:42.971196 1501462 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:14:42.973907 1501462 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:14:42.973991 1501462 round_trippers.go:577] Response Headers:
	I0916 11:14:42.974021 1501462 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:14:42 GMT
	I0916 11:14:42.974054 1501462 round_trippers.go:580]     Audit-Id: b0d68488-fb88-444d-a11d-60291406c705
	I0916 11:14:42.974090 1501462 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:14:42.974141 1501462 round_trippers.go:580]     Content-Type: application/json
	I0916 11:14:42.974154 1501462 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:14:42.974171 1501462 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:14:42.974307 1501462 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-654612-m03","uid":"0a3ae4f6-3e42-45c5-9af6-3ab99d0c1b81","resourceVersion":"930","creationTimestamp":"2024-09-16T11:11:48Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-654612-m03","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-654612","minikube.k8s.io/primary":"false","minikube.k8s.io/updated_at":"2024_09_16T11_11_49_0700","minikube.k8s.io/version":"v1.34.0"},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///var/run/crio/crio.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubeadm","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:11:48Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:
annotations":{"f:kubeadm.alpha.kubernetes.io/cri-socket":{}}}}},{"manag [truncated 6178 chars]
	I0916 11:14:43.171636 1501462 request.go:632] Waited for 96.288629ms due to client-side throttling, not priority and fairness, request: GET:https://192.168.67.2:8443/api/v1/namespaces/kube-system/pods/kube-proxy-vf648
	I0916 11:14:43.171700 1501462 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/namespaces/kube-system/pods/kube-proxy-vf648
	I0916 11:14:43.171705 1501462 round_trippers.go:469] Request Headers:
	I0916 11:14:43.171715 1501462 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:14:43.171723 1501462 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:14:43.174124 1501462 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:14:43.174154 1501462 round_trippers.go:577] Response Headers:
	I0916 11:14:43.174164 1501462 round_trippers.go:580]     Audit-Id: f0d109d0-4c7e-43a8-ad4f-290694c354aa
	I0916 11:14:43.174168 1501462 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:14:43.174173 1501462 round_trippers.go:580]     Content-Type: application/json
	I0916 11:14:43.174177 1501462 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:14:43.174180 1501462 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:14:43.174183 1501462 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:14:43 GMT
	I0916 11:14:43.174421 1501462 request.go:1351] Response Body: {"kind":"Pod","apiVersion":"v1","metadata":{"name":"kube-proxy-vf648","generateName":"kube-proxy-","namespace":"kube-system","uid":"376afe3e-390b-443b-b289-7dfeeb1deed1","resourceVersion":"642","creationTimestamp":"2024-09-16T11:11:48Z","labels":{"controller-revision-hash":"648b489c5b","k8s-app":"kube-proxy","pod-template-generation":"1"},"ownerReferences":[{"apiVersion":"apps/v1","kind":"DaemonSet","name":"kube-proxy","uid":"8b5954ba-7bb1-4f7b-8014-fdfa99b2ac77","controller":true,"blockOwnerDeletion":true}],"managedFields":[{"manager":"kube-controller-manager","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:11:48Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:generateName":{},"f:labels":{".":{},"f:controller-revision-hash":{},"f:k8s-app":{},"f:pod-template-generation":{}},"f:ownerReferences":{".":{},"k:{\"uid\":\"8b5954ba-7bb1-4f7b-8014-fdfa99b2ac77\"}":{}}},"f:spec":{"f:affinity":{".":{},"f:nodeAffinity":{".":{},"f:r
equiredDuringSchedulingIgnoredDuringExecution":{}}},"f:containers":{"k: [truncated 6427 chars]
	I0916 11:14:43.371343 1501462 request.go:632] Waited for 196.341349ms due to client-side throttling, not priority and fairness, request: GET:https://192.168.67.2:8443/api/v1/nodes/multinode-654612-m03
	I0916 11:14:43.371444 1501462 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/nodes/multinode-654612-m03
	I0916 11:14:43.371454 1501462 round_trippers.go:469] Request Headers:
	I0916 11:14:43.371464 1501462 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:14:43.371468 1501462 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:14:43.374435 1501462 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:14:43.374616 1501462 round_trippers.go:577] Response Headers:
	I0916 11:14:43.374641 1501462 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:14:43.374646 1501462 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:14:43 GMT
	I0916 11:14:43.374650 1501462 round_trippers.go:580]     Audit-Id: 85c074da-ba05-4ad9-a513-888cf0bfa58c
	I0916 11:14:43.374654 1501462 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:14:43.374658 1501462 round_trippers.go:580]     Content-Type: application/json
	I0916 11:14:43.374661 1501462 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:14:43.375109 1501462 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-654612-m03","uid":"0a3ae4f6-3e42-45c5-9af6-3ab99d0c1b81","resourceVersion":"930","creationTimestamp":"2024-09-16T11:11:48Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-654612-m03","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-654612","minikube.k8s.io/primary":"false","minikube.k8s.io/updated_at":"2024_09_16T11_11_49_0700","minikube.k8s.io/version":"v1.34.0"},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///var/run/crio/crio.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubeadm","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:11:48Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:
annotations":{"f:kubeadm.alpha.kubernetes.io/cri-socket":{}}}}},{"manag [truncated 6178 chars]
	I0916 11:14:43.574917 1501462 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/namespaces/kube-system/pods/kube-proxy-vf648
	I0916 11:14:43.574946 1501462 round_trippers.go:469] Request Headers:
	I0916 11:14:43.574956 1501462 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:14:43.574961 1501462 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:14:43.577267 1501462 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:14:43.577301 1501462 round_trippers.go:577] Response Headers:
	I0916 11:14:43.577310 1501462 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:14:43 GMT
	I0916 11:14:43.577314 1501462 round_trippers.go:580]     Audit-Id: 7d8fe009-1554-47a9-8ba8-3300a671bdcb
	I0916 11:14:43.577318 1501462 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:14:43.577322 1501462 round_trippers.go:580]     Content-Type: application/json
	I0916 11:14:43.577324 1501462 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:14:43.577327 1501462 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:14:43.577450 1501462 request.go:1351] Response Body: {"kind":"Pod","apiVersion":"v1","metadata":{"name":"kube-proxy-vf648","generateName":"kube-proxy-","namespace":"kube-system","uid":"376afe3e-390b-443b-b289-7dfeeb1deed1","resourceVersion":"642","creationTimestamp":"2024-09-16T11:11:48Z","labels":{"controller-revision-hash":"648b489c5b","k8s-app":"kube-proxy","pod-template-generation":"1"},"ownerReferences":[{"apiVersion":"apps/v1","kind":"DaemonSet","name":"kube-proxy","uid":"8b5954ba-7bb1-4f7b-8014-fdfa99b2ac77","controller":true,"blockOwnerDeletion":true}],"managedFields":[{"manager":"kube-controller-manager","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:11:48Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:generateName":{},"f:labels":{".":{},"f:controller-revision-hash":{},"f:k8s-app":{},"f:pod-template-generation":{}},"f:ownerReferences":{".":{},"k:{\"uid\":\"8b5954ba-7bb1-4f7b-8014-fdfa99b2ac77\"}":{}}},"f:spec":{"f:affinity":{".":{},"f:nodeAffinity":{".":{},"f:r
equiredDuringSchedulingIgnoredDuringExecution":{}}},"f:containers":{"k: [truncated 6427 chars]
	I0916 11:14:43.771145 1501462 request.go:632] Waited for 193.173755ms due to client-side throttling, not priority and fairness, request: GET:https://192.168.67.2:8443/api/v1/nodes/multinode-654612-m03
	I0916 11:14:43.771225 1501462 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/nodes/multinode-654612-m03
	I0916 11:14:43.771232 1501462 round_trippers.go:469] Request Headers:
	I0916 11:14:43.771241 1501462 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:14:43.771245 1501462 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:14:43.773580 1501462 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:14:43.773616 1501462 round_trippers.go:577] Response Headers:
	I0916 11:14:43.773625 1501462 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:14:43.773631 1501462 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:14:43.773684 1501462 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:14:43 GMT
	I0916 11:14:43.773694 1501462 round_trippers.go:580]     Audit-Id: 94d4a05e-5297-44b8-acca-3b8b6b154dbb
	I0916 11:14:43.773697 1501462 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:14:43.773700 1501462 round_trippers.go:580]     Content-Type: application/json
	I0916 11:14:43.773844 1501462 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-654612-m03","uid":"0a3ae4f6-3e42-45c5-9af6-3ab99d0c1b81","resourceVersion":"930","creationTimestamp":"2024-09-16T11:11:48Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-654612-m03","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-654612","minikube.k8s.io/primary":"false","minikube.k8s.io/updated_at":"2024_09_16T11_11_49_0700","minikube.k8s.io/version":"v1.34.0"},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///var/run/crio/crio.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubeadm","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:11:48Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:
annotations":{"f:kubeadm.alpha.kubernetes.io/cri-socket":{}}}}},{"manag [truncated 6178 chars]
	I0916 11:14:44.075445 1501462 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/namespaces/kube-system/pods/kube-proxy-vf648
	I0916 11:14:44.075466 1501462 round_trippers.go:469] Request Headers:
	I0916 11:14:44.075476 1501462 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:14:44.075482 1501462 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:14:44.078351 1501462 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:14:44.078431 1501462 round_trippers.go:577] Response Headers:
	I0916 11:14:44.078457 1501462 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:14:44.078474 1501462 round_trippers.go:580]     Content-Type: application/json
	I0916 11:14:44.078509 1501462 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:14:44.078519 1501462 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:14:44.078524 1501462 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:14:44 GMT
	I0916 11:14:44.078527 1501462 round_trippers.go:580]     Audit-Id: 2a94c8c4-2b04-40c5-901e-acc4a25f514a
	I0916 11:14:44.078654 1501462 request.go:1351] Response Body: {"kind":"Pod","apiVersion":"v1","metadata":{"name":"kube-proxy-vf648","generateName":"kube-proxy-","namespace":"kube-system","uid":"376afe3e-390b-443b-b289-7dfeeb1deed1","resourceVersion":"642","creationTimestamp":"2024-09-16T11:11:48Z","labels":{"controller-revision-hash":"648b489c5b","k8s-app":"kube-proxy","pod-template-generation":"1"},"ownerReferences":[{"apiVersion":"apps/v1","kind":"DaemonSet","name":"kube-proxy","uid":"8b5954ba-7bb1-4f7b-8014-fdfa99b2ac77","controller":true,"blockOwnerDeletion":true}],"managedFields":[{"manager":"kube-controller-manager","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:11:48Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:generateName":{},"f:labels":{".":{},"f:controller-revision-hash":{},"f:k8s-app":{},"f:pod-template-generation":{}},"f:ownerReferences":{".":{},"k:{\"uid\":\"8b5954ba-7bb1-4f7b-8014-fdfa99b2ac77\"}":{}}},"f:spec":{"f:affinity":{".":{},"f:nodeAffinity":{".":{},"f:r
equiredDuringSchedulingIgnoredDuringExecution":{}}},"f:containers":{"k: [truncated 6427 chars]
	I0916 11:14:44.171530 1501462 request.go:632] Waited for 92.269626ms due to client-side throttling, not priority and fairness, request: GET:https://192.168.67.2:8443/api/v1/nodes/multinode-654612-m03
	I0916 11:14:44.171616 1501462 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/nodes/multinode-654612-m03
	I0916 11:14:44.171623 1501462 round_trippers.go:469] Request Headers:
	I0916 11:14:44.171633 1501462 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:14:44.171638 1501462 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:14:44.173912 1501462 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:14:44.173937 1501462 round_trippers.go:577] Response Headers:
	I0916 11:14:44.173945 1501462 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:14:44.173951 1501462 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:14:44 GMT
	I0916 11:14:44.173954 1501462 round_trippers.go:580]     Audit-Id: 5804e932-f2fc-40fa-a33f-752696db3a76
	I0916 11:14:44.173964 1501462 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:14:44.173967 1501462 round_trippers.go:580]     Content-Type: application/json
	I0916 11:14:44.173970 1501462 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:14:44.175091 1501462 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-654612-m03","uid":"0a3ae4f6-3e42-45c5-9af6-3ab99d0c1b81","resourceVersion":"930","creationTimestamp":"2024-09-16T11:11:48Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-654612-m03","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-654612","minikube.k8s.io/primary":"false","minikube.k8s.io/updated_at":"2024_09_16T11_11_49_0700","minikube.k8s.io/version":"v1.34.0"},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///var/run/crio/crio.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubeadm","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:11:48Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:
annotations":{"f:kubeadm.alpha.kubernetes.io/cri-socket":{}}}}},{"manag [truncated 6178 chars]
	I0916 11:14:44.574737 1501462 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/namespaces/kube-system/pods/kube-proxy-vf648
	I0916 11:14:44.574766 1501462 round_trippers.go:469] Request Headers:
	I0916 11:14:44.574777 1501462 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:14:44.574782 1501462 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:14:44.577315 1501462 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:14:44.577389 1501462 round_trippers.go:577] Response Headers:
	I0916 11:14:44.577411 1501462 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:14:44.577428 1501462 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:14:44.577461 1501462 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:14:44 GMT
	I0916 11:14:44.577471 1501462 round_trippers.go:580]     Audit-Id: e002f336-b291-464e-ba72-91f96f901268
	I0916 11:14:44.577475 1501462 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:14:44.577478 1501462 round_trippers.go:580]     Content-Type: application/json
	I0916 11:14:44.577608 1501462 request.go:1351] Response Body: {"kind":"Pod","apiVersion":"v1","metadata":{"name":"kube-proxy-vf648","generateName":"kube-proxy-","namespace":"kube-system","uid":"376afe3e-390b-443b-b289-7dfeeb1deed1","resourceVersion":"642","creationTimestamp":"2024-09-16T11:11:48Z","labels":{"controller-revision-hash":"648b489c5b","k8s-app":"kube-proxy","pod-template-generation":"1"},"ownerReferences":[{"apiVersion":"apps/v1","kind":"DaemonSet","name":"kube-proxy","uid":"8b5954ba-7bb1-4f7b-8014-fdfa99b2ac77","controller":true,"blockOwnerDeletion":true}],"managedFields":[{"manager":"kube-controller-manager","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:11:48Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:generateName":{},"f:labels":{".":{},"f:controller-revision-hash":{},"f:k8s-app":{},"f:pod-template-generation":{}},"f:ownerReferences":{".":{},"k:{\"uid\":\"8b5954ba-7bb1-4f7b-8014-fdfa99b2ac77\"}":{}}},"f:spec":{"f:affinity":{".":{},"f:nodeAffinity":{".":{},"f:r
equiredDuringSchedulingIgnoredDuringExecution":{}}},"f:containers":{"k: [truncated 6427 chars]
	I0916 11:14:44.578153 1501462 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/nodes/multinode-654612-m03
	I0916 11:14:44.578169 1501462 round_trippers.go:469] Request Headers:
	I0916 11:14:44.578178 1501462 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:14:44.578185 1501462 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:14:44.580317 1501462 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:14:44.580344 1501462 round_trippers.go:577] Response Headers:
	I0916 11:14:44.580354 1501462 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:14:44.580358 1501462 round_trippers.go:580]     Content-Type: application/json
	I0916 11:14:44.580361 1501462 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:14:44.580364 1501462 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:14:44.580367 1501462 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:14:44 GMT
	I0916 11:14:44.580370 1501462 round_trippers.go:580]     Audit-Id: b5dd5772-1e51-4051-ad1d-3883f60b87a4
	I0916 11:14:44.580604 1501462 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-654612-m03","uid":"0a3ae4f6-3e42-45c5-9af6-3ab99d0c1b81","resourceVersion":"935","creationTimestamp":"2024-09-16T11:11:48Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-654612-m03","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-654612","minikube.k8s.io/primary":"false","minikube.k8s.io/updated_at":"2024_09_16T11_11_49_0700","minikube.k8s.io/version":"v1.34.0"},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///var/run/crio/crio.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubeadm","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:11:48Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:
annotations":{"f:kubeadm.alpha.kubernetes.io/cri-socket":{}}}}},{"manag [truncated 6056 chars]
	I0916 11:14:44.581035 1501462 pod_ready.go:103] pod "kube-proxy-vf648" in "kube-system" namespace has status "Ready":"False"
	I0916 11:14:45.076240 1501462 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/namespaces/kube-system/pods/kube-proxy-vf648
	I0916 11:14:45.076338 1501462 round_trippers.go:469] Request Headers:
	I0916 11:14:45.076381 1501462 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:14:45.076425 1501462 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:14:45.079454 1501462 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:14:45.079608 1501462 round_trippers.go:577] Response Headers:
	I0916 11:14:45.079639 1501462 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:14:45.079683 1501462 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:14:45.079706 1501462 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:14:45 GMT
	I0916 11:14:45.079725 1501462 round_trippers.go:580]     Audit-Id: b41fdcdb-d1ad-43f0-9e59-e0da04b16180
	I0916 11:14:45.079744 1501462 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:14:45.079765 1501462 round_trippers.go:580]     Content-Type: application/json
	I0916 11:14:45.080588 1501462 request.go:1351] Response Body: {"kind":"Pod","apiVersion":"v1","metadata":{"name":"kube-proxy-vf648","generateName":"kube-proxy-","namespace":"kube-system","uid":"376afe3e-390b-443b-b289-7dfeeb1deed1","resourceVersion":"642","creationTimestamp":"2024-09-16T11:11:48Z","labels":{"controller-revision-hash":"648b489c5b","k8s-app":"kube-proxy","pod-template-generation":"1"},"ownerReferences":[{"apiVersion":"apps/v1","kind":"DaemonSet","name":"kube-proxy","uid":"8b5954ba-7bb1-4f7b-8014-fdfa99b2ac77","controller":true,"blockOwnerDeletion":true}],"managedFields":[{"manager":"kube-controller-manager","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:11:48Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:generateName":{},"f:labels":{".":{},"f:controller-revision-hash":{},"f:k8s-app":{},"f:pod-template-generation":{}},"f:ownerReferences":{".":{},"k:{\"uid\":\"8b5954ba-7bb1-4f7b-8014-fdfa99b2ac77\"}":{}}},"f:spec":{"f:affinity":{".":{},"f:nodeAffinity":{".":{},"f:r
equiredDuringSchedulingIgnoredDuringExecution":{}}},"f:containers":{"k: [truncated 6427 chars]
	I0916 11:14:45.081386 1501462 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/nodes/multinode-654612-m03
	I0916 11:14:45.081471 1501462 round_trippers.go:469] Request Headers:
	I0916 11:14:45.081495 1501462 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:14:45.081534 1501462 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:14:45.084778 1501462 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 11:14:45.084861 1501462 round_trippers.go:577] Response Headers:
	I0916 11:14:45.084887 1501462 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:14:45.084908 1501462 round_trippers.go:580]     Content-Type: application/json
	I0916 11:14:45.084942 1501462 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:14:45.084969 1501462 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:14:45.084994 1501462 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:14:45 GMT
	I0916 11:14:45.085013 1501462 round_trippers.go:580]     Audit-Id: 3f5e67b5-8021-4148-a0b2-c191b532fc38
	I0916 11:14:45.085196 1501462 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-654612-m03","uid":"0a3ae4f6-3e42-45c5-9af6-3ab99d0c1b81","resourceVersion":"935","creationTimestamp":"2024-09-16T11:11:48Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-654612-m03","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-654612","minikube.k8s.io/primary":"false","minikube.k8s.io/updated_at":"2024_09_16T11_11_49_0700","minikube.k8s.io/version":"v1.34.0"},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///var/run/crio/crio.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubeadm","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:11:48Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:
annotations":{"f:kubeadm.alpha.kubernetes.io/cri-socket":{}}}}},{"manag [truncated 6056 chars]
	I0916 11:14:45.575465 1501462 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/namespaces/kube-system/pods/kube-proxy-vf648
	I0916 11:14:45.575492 1501462 round_trippers.go:469] Request Headers:
	I0916 11:14:45.575502 1501462 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:14:45.575507 1501462 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:14:45.578047 1501462 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:14:45.578074 1501462 round_trippers.go:577] Response Headers:
	I0916 11:14:45.578083 1501462 round_trippers.go:580]     Audit-Id: 85e41073-c954-4c1e-8c74-9d2d3edd2eaf
	I0916 11:14:45.578088 1501462 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:14:45.578091 1501462 round_trippers.go:580]     Content-Type: application/json
	I0916 11:14:45.578094 1501462 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:14:45.578097 1501462 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:14:45.578101 1501462 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:14:45 GMT
	I0916 11:14:45.578247 1501462 request.go:1351] Response Body: {"kind":"Pod","apiVersion":"v1","metadata":{"name":"kube-proxy-vf648","generateName":"kube-proxy-","namespace":"kube-system","uid":"376afe3e-390b-443b-b289-7dfeeb1deed1","resourceVersion":"642","creationTimestamp":"2024-09-16T11:11:48Z","labels":{"controller-revision-hash":"648b489c5b","k8s-app":"kube-proxy","pod-template-generation":"1"},"ownerReferences":[{"apiVersion":"apps/v1","kind":"DaemonSet","name":"kube-proxy","uid":"8b5954ba-7bb1-4f7b-8014-fdfa99b2ac77","controller":true,"blockOwnerDeletion":true}],"managedFields":[{"manager":"kube-controller-manager","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:11:48Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:generateName":{},"f:labels":{".":{},"f:controller-revision-hash":{},"f:k8s-app":{},"f:pod-template-generation":{}},"f:ownerReferences":{".":{},"k:{\"uid\":\"8b5954ba-7bb1-4f7b-8014-fdfa99b2ac77\"}":{}}},"f:spec":{"f:affinity":{".":{},"f:nodeAffinity":{".":{},"f:r
equiredDuringSchedulingIgnoredDuringExecution":{}}},"f:containers":{"k: [truncated 6427 chars]
	I0916 11:14:45.578804 1501462 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/nodes/multinode-654612-m03
	I0916 11:14:45.578823 1501462 round_trippers.go:469] Request Headers:
	I0916 11:14:45.578832 1501462 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:14:45.578837 1501462 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:14:45.581062 1501462 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:14:45.581132 1501462 round_trippers.go:577] Response Headers:
	I0916 11:14:45.581153 1501462 round_trippers.go:580]     Content-Type: application/json
	I0916 11:14:45.581172 1501462 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:14:45.581207 1501462 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:14:45.581231 1501462 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:14:45 GMT
	I0916 11:14:45.581250 1501462 round_trippers.go:580]     Audit-Id: 00baedb9-8d7b-4e48-8221-bfc6c75dbe08
	I0916 11:14:45.581267 1501462 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:14:45.581401 1501462 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-654612-m03","uid":"0a3ae4f6-3e42-45c5-9af6-3ab99d0c1b81","resourceVersion":"935","creationTimestamp":"2024-09-16T11:11:48Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-654612-m03","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-654612","minikube.k8s.io/primary":"false","minikube.k8s.io/updated_at":"2024_09_16T11_11_49_0700","minikube.k8s.io/version":"v1.34.0"},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///var/run/crio/crio.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubeadm","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:11:48Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:
annotations":{"f:kubeadm.alpha.kubernetes.io/cri-socket":{}}}}},{"manag [truncated 6056 chars]
	I0916 11:14:46.074686 1501462 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/namespaces/kube-system/pods/kube-proxy-vf648
	I0916 11:14:46.074710 1501462 round_trippers.go:469] Request Headers:
	I0916 11:14:46.074720 1501462 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:14:46.074724 1501462 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:14:46.077811 1501462 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 11:14:46.077842 1501462 round_trippers.go:577] Response Headers:
	I0916 11:14:46.077850 1501462 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:14:46 GMT
	I0916 11:14:46.077892 1501462 round_trippers.go:580]     Audit-Id: 480eb449-72d8-4552-b188-a67a77d33959
	I0916 11:14:46.077908 1501462 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:14:46.077913 1501462 round_trippers.go:580]     Content-Type: application/json
	I0916 11:14:46.077917 1501462 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:14:46.077920 1501462 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:14:46.078380 1501462 request.go:1351] Response Body: {"kind":"Pod","apiVersion":"v1","metadata":{"name":"kube-proxy-vf648","generateName":"kube-proxy-","namespace":"kube-system","uid":"376afe3e-390b-443b-b289-7dfeeb1deed1","resourceVersion":"642","creationTimestamp":"2024-09-16T11:11:48Z","labels":{"controller-revision-hash":"648b489c5b","k8s-app":"kube-proxy","pod-template-generation":"1"},"ownerReferences":[{"apiVersion":"apps/v1","kind":"DaemonSet","name":"kube-proxy","uid":"8b5954ba-7bb1-4f7b-8014-fdfa99b2ac77","controller":true,"blockOwnerDeletion":true}],"managedFields":[{"manager":"kube-controller-manager","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:11:48Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:generateName":{},"f:labels":{".":{},"f:controller-revision-hash":{},"f:k8s-app":{},"f:pod-template-generation":{}},"f:ownerReferences":{".":{},"k:{\"uid\":\"8b5954ba-7bb1-4f7b-8014-fdfa99b2ac77\"}":{}}},"f:spec":{"f:affinity":{".":{},"f:nodeAffinity":{".":{},"f:r
equiredDuringSchedulingIgnoredDuringExecution":{}}},"f:containers":{"k: [truncated 6427 chars]
	I0916 11:14:46.078916 1501462 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/nodes/multinode-654612-m03
	I0916 11:14:46.078933 1501462 round_trippers.go:469] Request Headers:
	I0916 11:14:46.078943 1501462 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:14:46.078947 1501462 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:14:46.081322 1501462 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:14:46.081382 1501462 round_trippers.go:577] Response Headers:
	I0916 11:14:46.081405 1501462 round_trippers.go:580]     Audit-Id: 5d20d7b2-9e7e-4817-a4c0-e5b296fc79a6
	I0916 11:14:46.081425 1501462 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:14:46.081458 1501462 round_trippers.go:580]     Content-Type: application/json
	I0916 11:14:46.081475 1501462 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:14:46.081488 1501462 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:14:46.081492 1501462 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:14:46 GMT
	I0916 11:14:46.081596 1501462 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-654612-m03","uid":"0a3ae4f6-3e42-45c5-9af6-3ab99d0c1b81","resourceVersion":"935","creationTimestamp":"2024-09-16T11:11:48Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-654612-m03","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-654612","minikube.k8s.io/primary":"false","minikube.k8s.io/updated_at":"2024_09_16T11_11_49_0700","minikube.k8s.io/version":"v1.34.0"},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///var/run/crio/crio.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubeadm","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:11:48Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:
annotations":{"f:kubeadm.alpha.kubernetes.io/cri-socket":{}}}}},{"manag [truncated 6056 chars]
	I0916 11:14:46.574703 1501462 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/namespaces/kube-system/pods/kube-proxy-vf648
	I0916 11:14:46.574731 1501462 round_trippers.go:469] Request Headers:
	I0916 11:14:46.574741 1501462 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:14:46.574747 1501462 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:14:46.577409 1501462 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:14:46.577446 1501462 round_trippers.go:577] Response Headers:
	I0916 11:14:46.577457 1501462 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:14:46 GMT
	I0916 11:14:46.577461 1501462 round_trippers.go:580]     Audit-Id: f27c9aba-67e2-4661-9917-efa21ae66bfd
	I0916 11:14:46.577465 1501462 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:14:46.577467 1501462 round_trippers.go:580]     Content-Type: application/json
	I0916 11:14:46.577470 1501462 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:14:46.577473 1501462 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:14:46.577743 1501462 request.go:1351] Response Body: {"kind":"Pod","apiVersion":"v1","metadata":{"name":"kube-proxy-vf648","generateName":"kube-proxy-","namespace":"kube-system","uid":"376afe3e-390b-443b-b289-7dfeeb1deed1","resourceVersion":"642","creationTimestamp":"2024-09-16T11:11:48Z","labels":{"controller-revision-hash":"648b489c5b","k8s-app":"kube-proxy","pod-template-generation":"1"},"ownerReferences":[{"apiVersion":"apps/v1","kind":"DaemonSet","name":"kube-proxy","uid":"8b5954ba-7bb1-4f7b-8014-fdfa99b2ac77","controller":true,"blockOwnerDeletion":true}],"managedFields":[{"manager":"kube-controller-manager","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:11:48Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:generateName":{},"f:labels":{".":{},"f:controller-revision-hash":{},"f:k8s-app":{},"f:pod-template-generation":{}},"f:ownerReferences":{".":{},"k:{\"uid\":\"8b5954ba-7bb1-4f7b-8014-fdfa99b2ac77\"}":{}}},"f:spec":{"f:affinity":{".":{},"f:nodeAffinity":{".":{},"f:r
equiredDuringSchedulingIgnoredDuringExecution":{}}},"f:containers":{"k: [truncated 6427 chars]
	I0916 11:14:46.578325 1501462 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/nodes/multinode-654612-m03
	I0916 11:14:46.578342 1501462 round_trippers.go:469] Request Headers:
	I0916 11:14:46.578359 1501462 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:14:46.578364 1501462 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:14:46.580767 1501462 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:14:46.580824 1501462 round_trippers.go:577] Response Headers:
	I0916 11:14:46.580843 1501462 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:14:46 GMT
	I0916 11:14:46.580863 1501462 round_trippers.go:580]     Audit-Id: 145b41b0-129f-4b92-852c-09bc7b20c1fc
	I0916 11:14:46.580881 1501462 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:14:46.580907 1501462 round_trippers.go:580]     Content-Type: application/json
	I0916 11:14:46.580929 1501462 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:14:46.580946 1501462 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:14:46.581079 1501462 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-654612-m03","uid":"0a3ae4f6-3e42-45c5-9af6-3ab99d0c1b81","resourceVersion":"935","creationTimestamp":"2024-09-16T11:11:48Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-654612-m03","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-654612","minikube.k8s.io/primary":"false","minikube.k8s.io/updated_at":"2024_09_16T11_11_49_0700","minikube.k8s.io/version":"v1.34.0"},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///var/run/crio/crio.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubeadm","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:11:48Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:
annotations":{"f:kubeadm.alpha.kubernetes.io/cri-socket":{}}}}},{"manag [truncated 6056 chars]
	I0916 11:14:46.581506 1501462 pod_ready.go:103] pod "kube-proxy-vf648" in "kube-system" namespace has status "Ready":"False"
	I0916 11:14:47.074719 1501462 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/namespaces/kube-system/pods/kube-proxy-vf648
	I0916 11:14:47.074746 1501462 round_trippers.go:469] Request Headers:
	I0916 11:14:47.074757 1501462 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:14:47.074762 1501462 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:14:47.077270 1501462 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:14:47.077345 1501462 round_trippers.go:577] Response Headers:
	I0916 11:14:47.077365 1501462 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:14:47.077385 1501462 round_trippers.go:580]     Content-Type: application/json
	I0916 11:14:47.077418 1501462 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:14:47.077443 1501462 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:14:47.077462 1501462 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:14:47 GMT
	I0916 11:14:47.077480 1501462 round_trippers.go:580]     Audit-Id: 5a2161df-3ae3-4195-a451-4cdf801ff08c
	I0916 11:14:47.077647 1501462 request.go:1351] Response Body: {"kind":"Pod","apiVersion":"v1","metadata":{"name":"kube-proxy-vf648","generateName":"kube-proxy-","namespace":"kube-system","uid":"376afe3e-390b-443b-b289-7dfeeb1deed1","resourceVersion":"642","creationTimestamp":"2024-09-16T11:11:48Z","labels":{"controller-revision-hash":"648b489c5b","k8s-app":"kube-proxy","pod-template-generation":"1"},"ownerReferences":[{"apiVersion":"apps/v1","kind":"DaemonSet","name":"kube-proxy","uid":"8b5954ba-7bb1-4f7b-8014-fdfa99b2ac77","controller":true,"blockOwnerDeletion":true}],"managedFields":[{"manager":"kube-controller-manager","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:11:48Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:generateName":{},"f:labels":{".":{},"f:controller-revision-hash":{},"f:k8s-app":{},"f:pod-template-generation":{}},"f:ownerReferences":{".":{},"k:{\"uid\":\"8b5954ba-7bb1-4f7b-8014-fdfa99b2ac77\"}":{}}},"f:spec":{"f:affinity":{".":{},"f:nodeAffinity":{".":{},"f:r
equiredDuringSchedulingIgnoredDuringExecution":{}}},"f:containers":{"k: [truncated 6427 chars]
	I0916 11:14:47.078199 1501462 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/nodes/multinode-654612-m03
	I0916 11:14:47.078220 1501462 round_trippers.go:469] Request Headers:
	I0916 11:14:47.078229 1501462 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:14:47.078235 1501462 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:14:47.080380 1501462 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:14:47.080404 1501462 round_trippers.go:577] Response Headers:
	I0916 11:14:47.080413 1501462 round_trippers.go:580]     Content-Type: application/json
	I0916 11:14:47.080417 1501462 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:14:47.080422 1501462 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:14:47.080425 1501462 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:14:47 GMT
	I0916 11:14:47.080429 1501462 round_trippers.go:580]     Audit-Id: 43adc892-10e7-433e-8489-68181233c916
	I0916 11:14:47.080432 1501462 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:14:47.080800 1501462 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-654612-m03","uid":"0a3ae4f6-3e42-45c5-9af6-3ab99d0c1b81","resourceVersion":"935","creationTimestamp":"2024-09-16T11:11:48Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-654612-m03","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-654612","minikube.k8s.io/primary":"false","minikube.k8s.io/updated_at":"2024_09_16T11_11_49_0700","minikube.k8s.io/version":"v1.34.0"},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///var/run/crio/crio.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubeadm","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:11:48Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:
annotations":{"f:kubeadm.alpha.kubernetes.io/cri-socket":{}}}}},{"manag [truncated 6056 chars]
	I0916 11:14:47.574931 1501462 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/namespaces/kube-system/pods/kube-proxy-vf648
	I0916 11:14:47.574965 1501462 round_trippers.go:469] Request Headers:
	I0916 11:14:47.574979 1501462 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:14:47.574987 1501462 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:14:47.577538 1501462 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:14:47.577565 1501462 round_trippers.go:577] Response Headers:
	I0916 11:14:47.577576 1501462 round_trippers.go:580]     Audit-Id: 17ad7b41-675f-4e81-93ad-c64f2d179707
	I0916 11:14:47.577580 1501462 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:14:47.577586 1501462 round_trippers.go:580]     Content-Type: application/json
	I0916 11:14:47.577589 1501462 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:14:47.577595 1501462 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:14:47.577599 1501462 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:14:47 GMT
	I0916 11:14:47.577830 1501462 request.go:1351] Response Body: {"kind":"Pod","apiVersion":"v1","metadata":{"name":"kube-proxy-vf648","generateName":"kube-proxy-","namespace":"kube-system","uid":"376afe3e-390b-443b-b289-7dfeeb1deed1","resourceVersion":"642","creationTimestamp":"2024-09-16T11:11:48Z","labels":{"controller-revision-hash":"648b489c5b","k8s-app":"kube-proxy","pod-template-generation":"1"},"ownerReferences":[{"apiVersion":"apps/v1","kind":"DaemonSet","name":"kube-proxy","uid":"8b5954ba-7bb1-4f7b-8014-fdfa99b2ac77","controller":true,"blockOwnerDeletion":true}],"managedFields":[{"manager":"kube-controller-manager","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:11:48Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:generateName":{},"f:labels":{".":{},"f:controller-revision-hash":{},"f:k8s-app":{},"f:pod-template-generation":{}},"f:ownerReferences":{".":{},"k:{\"uid\":\"8b5954ba-7bb1-4f7b-8014-fdfa99b2ac77\"}":{}}},"f:spec":{"f:affinity":{".":{},"f:nodeAffinity":{".":{},"f:r
equiredDuringSchedulingIgnoredDuringExecution":{}}},"f:containers":{"k: [truncated 6427 chars]
	I0916 11:14:47.578426 1501462 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/nodes/multinode-654612-m03
	I0916 11:14:47.578471 1501462 round_trippers.go:469] Request Headers:
	I0916 11:14:47.578480 1501462 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:14:47.578483 1501462 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:14:47.580526 1501462 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:14:47.580546 1501462 round_trippers.go:577] Response Headers:
	I0916 11:14:47.580555 1501462 round_trippers.go:580]     Audit-Id: c3b96736-82f8-4ba6-9583-c72b78311a95
	I0916 11:14:47.580558 1501462 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:14:47.580561 1501462 round_trippers.go:580]     Content-Type: application/json
	I0916 11:14:47.580564 1501462 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:14:47.580567 1501462 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:14:47.580570 1501462 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:14:47 GMT
	I0916 11:14:47.580817 1501462 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-654612-m03","uid":"0a3ae4f6-3e42-45c5-9af6-3ab99d0c1b81","resourceVersion":"935","creationTimestamp":"2024-09-16T11:11:48Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-654612-m03","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-654612","minikube.k8s.io/primary":"false","minikube.k8s.io/updated_at":"2024_09_16T11_11_49_0700","minikube.k8s.io/version":"v1.34.0"},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///var/run/crio/crio.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubeadm","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:11:48Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:
annotations":{"f:kubeadm.alpha.kubernetes.io/cri-socket":{}}}}},{"manag [truncated 6056 chars]
	I0916 11:14:48.075008 1501462 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/namespaces/kube-system/pods/kube-proxy-vf648
	I0916 11:14:48.075034 1501462 round_trippers.go:469] Request Headers:
	I0916 11:14:48.075048 1501462 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:14:48.075053 1501462 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:14:48.078225 1501462 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 11:14:48.078342 1501462 round_trippers.go:577] Response Headers:
	I0916 11:14:48.078377 1501462 round_trippers.go:580]     Audit-Id: 830eea3a-d94e-4a5c-a97a-f9c17109f78d
	I0916 11:14:48.078412 1501462 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:14:48.078426 1501462 round_trippers.go:580]     Content-Type: application/json
	I0916 11:14:48.078429 1501462 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:14:48.078433 1501462 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:14:48.078436 1501462 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:14:48 GMT
	I0916 11:14:48.078632 1501462 request.go:1351] Response Body: {"kind":"Pod","apiVersion":"v1","metadata":{"name":"kube-proxy-vf648","generateName":"kube-proxy-","namespace":"kube-system","uid":"376afe3e-390b-443b-b289-7dfeeb1deed1","resourceVersion":"642","creationTimestamp":"2024-09-16T11:11:48Z","labels":{"controller-revision-hash":"648b489c5b","k8s-app":"kube-proxy","pod-template-generation":"1"},"ownerReferences":[{"apiVersion":"apps/v1","kind":"DaemonSet","name":"kube-proxy","uid":"8b5954ba-7bb1-4f7b-8014-fdfa99b2ac77","controller":true,"blockOwnerDeletion":true}],"managedFields":[{"manager":"kube-controller-manager","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:11:48Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:generateName":{},"f:labels":{".":{},"f:controller-revision-hash":{},"f:k8s-app":{},"f:pod-template-generation":{}},"f:ownerReferences":{".":{},"k:{\"uid\":\"8b5954ba-7bb1-4f7b-8014-fdfa99b2ac77\"}":{}}},"f:spec":{"f:affinity":{".":{},"f:nodeAffinity":{".":{},"f:r
equiredDuringSchedulingIgnoredDuringExecution":{}}},"f:containers":{"k: [truncated 6427 chars]
	I0916 11:14:48.079391 1501462 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/nodes/multinode-654612-m03
	I0916 11:14:48.079412 1501462 round_trippers.go:469] Request Headers:
	I0916 11:14:48.079421 1501462 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:14:48.079426 1501462 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:14:48.082604 1501462 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 11:14:48.082628 1501462 round_trippers.go:577] Response Headers:
	I0916 11:14:48.082637 1501462 round_trippers.go:580]     Audit-Id: 64366884-b977-48b7-8e16-0c82da2573fd
	I0916 11:14:48.082641 1501462 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:14:48.082647 1501462 round_trippers.go:580]     Content-Type: application/json
	I0916 11:14:48.082651 1501462 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:14:48.082655 1501462 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:14:48.082657 1501462 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:14:48 GMT
	I0916 11:14:48.082947 1501462 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-654612-m03","uid":"0a3ae4f6-3e42-45c5-9af6-3ab99d0c1b81","resourceVersion":"935","creationTimestamp":"2024-09-16T11:11:48Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-654612-m03","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-654612","minikube.k8s.io/primary":"false","minikube.k8s.io/updated_at":"2024_09_16T11_11_49_0700","minikube.k8s.io/version":"v1.34.0"},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///var/run/crio/crio.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubeadm","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:11:48Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:
annotations":{"f:kubeadm.alpha.kubernetes.io/cri-socket":{}}}}},{"manag [truncated 6056 chars]
	I0916 11:14:48.574810 1501462 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/namespaces/kube-system/pods/kube-proxy-vf648
	I0916 11:14:48.574837 1501462 round_trippers.go:469] Request Headers:
	I0916 11:14:48.574847 1501462 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:14:48.574851 1501462 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:14:48.577326 1501462 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:14:48.577348 1501462 round_trippers.go:577] Response Headers:
	I0916 11:14:48.577356 1501462 round_trippers.go:580]     Content-Type: application/json
	I0916 11:14:48.577362 1501462 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:14:48.577366 1501462 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:14:48.577369 1501462 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:14:48 GMT
	I0916 11:14:48.577372 1501462 round_trippers.go:580]     Audit-Id: 6e157fd7-7643-4582-aabd-d453bdcda46a
	I0916 11:14:48.577375 1501462 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:14:48.577717 1501462 request.go:1351] Response Body: {"kind":"Pod","apiVersion":"v1","metadata":{"name":"kube-proxy-vf648","generateName":"kube-proxy-","namespace":"kube-system","uid":"376afe3e-390b-443b-b289-7dfeeb1deed1","resourceVersion":"642","creationTimestamp":"2024-09-16T11:11:48Z","labels":{"controller-revision-hash":"648b489c5b","k8s-app":"kube-proxy","pod-template-generation":"1"},"ownerReferences":[{"apiVersion":"apps/v1","kind":"DaemonSet","name":"kube-proxy","uid":"8b5954ba-7bb1-4f7b-8014-fdfa99b2ac77","controller":true,"blockOwnerDeletion":true}],"managedFields":[{"manager":"kube-controller-manager","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:11:48Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:generateName":{},"f:labels":{".":{},"f:controller-revision-hash":{},"f:k8s-app":{},"f:pod-template-generation":{}},"f:ownerReferences":{".":{},"k:{\"uid\":\"8b5954ba-7bb1-4f7b-8014-fdfa99b2ac77\"}":{}}},"f:spec":{"f:affinity":{".":{},"f:nodeAffinity":{".":{},"f:r
equiredDuringSchedulingIgnoredDuringExecution":{}}},"f:containers":{"k: [truncated 6427 chars]
	I0916 11:14:48.578246 1501462 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/nodes/multinode-654612-m03
	I0916 11:14:48.578265 1501462 round_trippers.go:469] Request Headers:
	I0916 11:14:48.578273 1501462 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:14:48.578277 1501462 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:14:48.580719 1501462 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:14:48.580744 1501462 round_trippers.go:577] Response Headers:
	I0916 11:14:48.580752 1501462 round_trippers.go:580]     Audit-Id: 1683d46c-cc05-474b-94a4-f2d1695acdc4
	I0916 11:14:48.580757 1501462 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:14:48.580761 1501462 round_trippers.go:580]     Content-Type: application/json
	I0916 11:14:48.580763 1501462 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:14:48.580766 1501462 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:14:48.580769 1501462 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:14:48 GMT
	I0916 11:14:48.581084 1501462 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-654612-m03","uid":"0a3ae4f6-3e42-45c5-9af6-3ab99d0c1b81","resourceVersion":"935","creationTimestamp":"2024-09-16T11:11:48Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-654612-m03","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-654612","minikube.k8s.io/primary":"false","minikube.k8s.io/updated_at":"2024_09_16T11_11_49_0700","minikube.k8s.io/version":"v1.34.0"},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///var/run/crio/crio.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubeadm","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:11:48Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:
annotations":{"f:kubeadm.alpha.kubernetes.io/cri-socket":{}}}}},{"manag [truncated 6056 chars]
	I0916 11:14:48.581572 1501462 pod_ready.go:103] pod "kube-proxy-vf648" in "kube-system" namespace has status "Ready":"False"
	I0916 11:14:49.074684 1501462 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/namespaces/kube-system/pods/kube-proxy-vf648
	I0916 11:14:49.074714 1501462 round_trippers.go:469] Request Headers:
	I0916 11:14:49.074725 1501462 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:14:49.074729 1501462 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:14:49.077190 1501462 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:14:49.077216 1501462 round_trippers.go:577] Response Headers:
	I0916 11:14:49.077226 1501462 round_trippers.go:580]     Audit-Id: d9752a23-9814-4b9e-b941-4beae4af8333
	I0916 11:14:49.077230 1501462 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:14:49.077233 1501462 round_trippers.go:580]     Content-Type: application/json
	I0916 11:14:49.077236 1501462 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:14:49.077239 1501462 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:14:49.077242 1501462 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:14:49 GMT
	I0916 11:14:49.077449 1501462 request.go:1351] Response Body: {"kind":"Pod","apiVersion":"v1","metadata":{"name":"kube-proxy-vf648","generateName":"kube-proxy-","namespace":"kube-system","uid":"376afe3e-390b-443b-b289-7dfeeb1deed1","resourceVersion":"642","creationTimestamp":"2024-09-16T11:11:48Z","labels":{"controller-revision-hash":"648b489c5b","k8s-app":"kube-proxy","pod-template-generation":"1"},"ownerReferences":[{"apiVersion":"apps/v1","kind":"DaemonSet","name":"kube-proxy","uid":"8b5954ba-7bb1-4f7b-8014-fdfa99b2ac77","controller":true,"blockOwnerDeletion":true}],"managedFields":[{"manager":"kube-controller-manager","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:11:48Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:generateName":{},"f:labels":{".":{},"f:controller-revision-hash":{},"f:k8s-app":{},"f:pod-template-generation":{}},"f:ownerReferences":{".":{},"k:{\"uid\":\"8b5954ba-7bb1-4f7b-8014-fdfa99b2ac77\"}":{}}},"f:spec":{"f:affinity":{".":{},"f:nodeAffinity":{".":{},"f:r
equiredDuringSchedulingIgnoredDuringExecution":{}}},"f:containers":{"k: [truncated 6427 chars]
	I0916 11:14:49.078017 1501462 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/nodes/multinode-654612-m03
	I0916 11:14:49.078034 1501462 round_trippers.go:469] Request Headers:
	I0916 11:14:49.078042 1501462 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:14:49.078046 1501462 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:14:49.080042 1501462 round_trippers.go:574] Response Status: 200 OK in 1 milliseconds
	I0916 11:14:49.080064 1501462 round_trippers.go:577] Response Headers:
	I0916 11:14:49.080073 1501462 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:14:49.080077 1501462 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:14:49.080080 1501462 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:14:49 GMT
	I0916 11:14:49.080083 1501462 round_trippers.go:580]     Audit-Id: 05a2882c-9e90-4286-860e-02a10bbc2c8a
	I0916 11:14:49.080090 1501462 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:14:49.080093 1501462 round_trippers.go:580]     Content-Type: application/json
	I0916 11:14:49.080372 1501462 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-654612-m03","uid":"0a3ae4f6-3e42-45c5-9af6-3ab99d0c1b81","resourceVersion":"935","creationTimestamp":"2024-09-16T11:11:48Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-654612-m03","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-654612","minikube.k8s.io/primary":"false","minikube.k8s.io/updated_at":"2024_09_16T11_11_49_0700","minikube.k8s.io/version":"v1.34.0"},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///var/run/crio/crio.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubeadm","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:11:48Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:
annotations":{"f:kubeadm.alpha.kubernetes.io/cri-socket":{}}}}},{"manag [truncated 6056 chars]
	I0916 11:14:49.575565 1501462 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/namespaces/kube-system/pods/kube-proxy-vf648
	I0916 11:14:49.575591 1501462 round_trippers.go:469] Request Headers:
	I0916 11:14:49.575613 1501462 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:14:49.575620 1501462 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:14:49.578154 1501462 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:14:49.578180 1501462 round_trippers.go:577] Response Headers:
	I0916 11:14:49.578190 1501462 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:14:49.578194 1501462 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:14:49.578197 1501462 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:14:49 GMT
	I0916 11:14:49.578200 1501462 round_trippers.go:580]     Audit-Id: 6ad549dd-5eab-40e0-b544-5234f622b519
	I0916 11:14:49.578202 1501462 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:14:49.578205 1501462 round_trippers.go:580]     Content-Type: application/json
	I0916 11:14:49.578482 1501462 request.go:1351] Response Body: {"kind":"Pod","apiVersion":"v1","metadata":{"name":"kube-proxy-vf648","generateName":"kube-proxy-","namespace":"kube-system","uid":"376afe3e-390b-443b-b289-7dfeeb1deed1","resourceVersion":"642","creationTimestamp":"2024-09-16T11:11:48Z","labels":{"controller-revision-hash":"648b489c5b","k8s-app":"kube-proxy","pod-template-generation":"1"},"ownerReferences":[{"apiVersion":"apps/v1","kind":"DaemonSet","name":"kube-proxy","uid":"8b5954ba-7bb1-4f7b-8014-fdfa99b2ac77","controller":true,"blockOwnerDeletion":true}],"managedFields":[{"manager":"kube-controller-manager","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:11:48Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:generateName":{},"f:labels":{".":{},"f:controller-revision-hash":{},"f:k8s-app":{},"f:pod-template-generation":{}},"f:ownerReferences":{".":{},"k:{\"uid\":\"8b5954ba-7bb1-4f7b-8014-fdfa99b2ac77\"}":{}}},"f:spec":{"f:affinity":{".":{},"f:nodeAffinity":{".":{},"f:r
equiredDuringSchedulingIgnoredDuringExecution":{}}},"f:containers":{"k: [truncated 6427 chars]
	I0916 11:14:49.579017 1501462 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/nodes/multinode-654612-m03
	I0916 11:14:49.579036 1501462 round_trippers.go:469] Request Headers:
	I0916 11:14:49.579044 1501462 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:14:49.579050 1501462 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:14:49.581211 1501462 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:14:49.581274 1501462 round_trippers.go:577] Response Headers:
	I0916 11:14:49.581296 1501462 round_trippers.go:580]     Content-Type: application/json
	I0916 11:14:49.581315 1501462 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:14:49.581348 1501462 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:14:49.581382 1501462 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:14:49 GMT
	I0916 11:14:49.581401 1501462 round_trippers.go:580]     Audit-Id: 59617a19-1a25-40b1-bd14-89a3e757e092
	I0916 11:14:49.581410 1501462 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:14:49.581509 1501462 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-654612-m03","uid":"0a3ae4f6-3e42-45c5-9af6-3ab99d0c1b81","resourceVersion":"935","creationTimestamp":"2024-09-16T11:11:48Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-654612-m03","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-654612","minikube.k8s.io/primary":"false","minikube.k8s.io/updated_at":"2024_09_16T11_11_49_0700","minikube.k8s.io/version":"v1.34.0"},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///var/run/crio/crio.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubeadm","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:11:48Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:
annotations":{"f:kubeadm.alpha.kubernetes.io/cri-socket":{}}}}},{"manag [truncated 6056 chars]
	I0916 11:14:50.074655 1501462 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/namespaces/kube-system/pods/kube-proxy-vf648
	I0916 11:14:50.074684 1501462 round_trippers.go:469] Request Headers:
	I0916 11:14:50.074694 1501462 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:14:50.074699 1501462 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:14:50.077795 1501462 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 11:14:50.077829 1501462 round_trippers.go:577] Response Headers:
	I0916 11:14:50.077845 1501462 round_trippers.go:580]     Content-Type: application/json
	I0916 11:14:50.077848 1501462 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:14:50.077854 1501462 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:14:50.077859 1501462 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:14:50 GMT
	I0916 11:14:50.077862 1501462 round_trippers.go:580]     Audit-Id: 2538fcb8-a648-415c-975e-a639dd679c8a
	I0916 11:14:50.077865 1501462 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:14:50.078382 1501462 request.go:1351] Response Body: {"kind":"Pod","apiVersion":"v1","metadata":{"name":"kube-proxy-vf648","generateName":"kube-proxy-","namespace":"kube-system","uid":"376afe3e-390b-443b-b289-7dfeeb1deed1","resourceVersion":"642","creationTimestamp":"2024-09-16T11:11:48Z","labels":{"controller-revision-hash":"648b489c5b","k8s-app":"kube-proxy","pod-template-generation":"1"},"ownerReferences":[{"apiVersion":"apps/v1","kind":"DaemonSet","name":"kube-proxy","uid":"8b5954ba-7bb1-4f7b-8014-fdfa99b2ac77","controller":true,"blockOwnerDeletion":true}],"managedFields":[{"manager":"kube-controller-manager","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:11:48Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:generateName":{},"f:labels":{".":{},"f:controller-revision-hash":{},"f:k8s-app":{},"f:pod-template-generation":{}},"f:ownerReferences":{".":{},"k:{\"uid\":\"8b5954ba-7bb1-4f7b-8014-fdfa99b2ac77\"}":{}}},"f:spec":{"f:affinity":{".":{},"f:nodeAffinity":{".":{},"f:r
equiredDuringSchedulingIgnoredDuringExecution":{}}},"f:containers":{"k: [truncated 6427 chars]
	I0916 11:14:50.078962 1501462 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/nodes/multinode-654612-m03
	I0916 11:14:50.078977 1501462 round_trippers.go:469] Request Headers:
	I0916 11:14:50.078987 1501462 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:14:50.078992 1501462 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:14:50.083142 1501462 round_trippers.go:574] Response Status: 200 OK in 4 milliseconds
	I0916 11:14:50.083166 1501462 round_trippers.go:577] Response Headers:
	I0916 11:14:50.083175 1501462 round_trippers.go:580]     Content-Type: application/json
	I0916 11:14:50.083181 1501462 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:14:50.083185 1501462 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:14:50.083189 1501462 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:14:50 GMT
	I0916 11:14:50.083193 1501462 round_trippers.go:580]     Audit-Id: b03f01a0-ef95-4063-93ec-36498c3a50f1
	I0916 11:14:50.083198 1501462 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:14:50.083306 1501462 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-654612-m03","uid":"0a3ae4f6-3e42-45c5-9af6-3ab99d0c1b81","resourceVersion":"935","creationTimestamp":"2024-09-16T11:11:48Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-654612-m03","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-654612","minikube.k8s.io/primary":"false","minikube.k8s.io/updated_at":"2024_09_16T11_11_49_0700","minikube.k8s.io/version":"v1.34.0"},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///var/run/crio/crio.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubeadm","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:11:48Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:
annotations":{"f:kubeadm.alpha.kubernetes.io/cri-socket":{}}}}},{"manag [truncated 6056 chars]
	I0916 11:14:50.575530 1501462 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/namespaces/kube-system/pods/kube-proxy-vf648
	I0916 11:14:50.575552 1501462 round_trippers.go:469] Request Headers:
	I0916 11:14:50.575562 1501462 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:14:50.575567 1501462 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:14:50.578015 1501462 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:14:50.578041 1501462 round_trippers.go:577] Response Headers:
	I0916 11:14:50.578049 1501462 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:14:50.578054 1501462 round_trippers.go:580]     Content-Type: application/json
	I0916 11:14:50.578058 1501462 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:14:50.578060 1501462 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:14:50.578064 1501462 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:14:50 GMT
	I0916 11:14:50.578066 1501462 round_trippers.go:580]     Audit-Id: 195caa61-db08-4fe7-93a0-79435e9ae7c2
	I0916 11:14:50.578223 1501462 request.go:1351] Response Body: {"kind":"Pod","apiVersion":"v1","metadata":{"name":"kube-proxy-vf648","generateName":"kube-proxy-","namespace":"kube-system","uid":"376afe3e-390b-443b-b289-7dfeeb1deed1","resourceVersion":"642","creationTimestamp":"2024-09-16T11:11:48Z","labels":{"controller-revision-hash":"648b489c5b","k8s-app":"kube-proxy","pod-template-generation":"1"},"ownerReferences":[{"apiVersion":"apps/v1","kind":"DaemonSet","name":"kube-proxy","uid":"8b5954ba-7bb1-4f7b-8014-fdfa99b2ac77","controller":true,"blockOwnerDeletion":true}],"managedFields":[{"manager":"kube-controller-manager","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:11:48Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:generateName":{},"f:labels":{".":{},"f:controller-revision-hash":{},"f:k8s-app":{},"f:pod-template-generation":{}},"f:ownerReferences":{".":{},"k:{\"uid\":\"8b5954ba-7bb1-4f7b-8014-fdfa99b2ac77\"}":{}}},"f:spec":{"f:affinity":{".":{},"f:nodeAffinity":{".":{},"f:r
equiredDuringSchedulingIgnoredDuringExecution":{}}},"f:containers":{"k: [truncated 6427 chars]
	I0916 11:14:50.578740 1501462 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/nodes/multinode-654612-m03
	I0916 11:14:50.578751 1501462 round_trippers.go:469] Request Headers:
	I0916 11:14:50.578758 1501462 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:14:50.578762 1501462 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:14:50.581174 1501462 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:14:50.581194 1501462 round_trippers.go:577] Response Headers:
	I0916 11:14:50.581202 1501462 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:14:50.581207 1501462 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:14:50 GMT
	I0916 11:14:50.581212 1501462 round_trippers.go:580]     Audit-Id: 11aff600-bbfe-4708-8f8b-93141350da2d
	I0916 11:14:50.581215 1501462 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:14:50.581218 1501462 round_trippers.go:580]     Content-Type: application/json
	I0916 11:14:50.581221 1501462 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:14:50.581318 1501462 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-654612-m03","uid":"0a3ae4f6-3e42-45c5-9af6-3ab99d0c1b81","resourceVersion":"935","creationTimestamp":"2024-09-16T11:11:48Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-654612-m03","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-654612","minikube.k8s.io/primary":"false","minikube.k8s.io/updated_at":"2024_09_16T11_11_49_0700","minikube.k8s.io/version":"v1.34.0"},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///var/run/crio/crio.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubeadm","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:11:48Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:
annotations":{"f:kubeadm.alpha.kubernetes.io/cri-socket":{}}}}},{"manag [truncated 6056 chars]
	I0916 11:14:50.581711 1501462 pod_ready.go:103] pod "kube-proxy-vf648" in "kube-system" namespace has status "Ready":"False"
	I0916 11:14:51.075224 1501462 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/namespaces/kube-system/pods/kube-proxy-vf648
	I0916 11:14:51.075252 1501462 round_trippers.go:469] Request Headers:
	I0916 11:14:51.075262 1501462 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:14:51.075266 1501462 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:14:51.077645 1501462 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:14:51.077671 1501462 round_trippers.go:577] Response Headers:
	I0916 11:14:51.077680 1501462 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:14:51 GMT
	I0916 11:14:51.077683 1501462 round_trippers.go:580]     Audit-Id: 0d7f244d-deef-40e5-9a1f-cfcef4b1d457
	I0916 11:14:51.077686 1501462 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:14:51.077689 1501462 round_trippers.go:580]     Content-Type: application/json
	I0916 11:14:51.077693 1501462 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:14:51.077695 1501462 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:14:51.077937 1501462 request.go:1351] Response Body: {"kind":"Pod","apiVersion":"v1","metadata":{"name":"kube-proxy-vf648","generateName":"kube-proxy-","namespace":"kube-system","uid":"376afe3e-390b-443b-b289-7dfeeb1deed1","resourceVersion":"947","creationTimestamp":"2024-09-16T11:11:48Z","labels":{"controller-revision-hash":"648b489c5b","k8s-app":"kube-proxy","pod-template-generation":"1"},"ownerReferences":[{"apiVersion":"apps/v1","kind":"DaemonSet","name":"kube-proxy","uid":"8b5954ba-7bb1-4f7b-8014-fdfa99b2ac77","controller":true,"blockOwnerDeletion":true}],"managedFields":[{"manager":"kube-controller-manager","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:11:48Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:generateName":{},"f:labels":{".":{},"f:controller-revision-hash":{},"f:k8s-app":{},"f:pod-template-generation":{}},"f:ownerReferences":{".":{},"k:{\"uid\":\"8b5954ba-7bb1-4f7b-8014-fdfa99b2ac77\"}":{}}},"f:spec":{"f:affinity":{".":{},"f:nodeAffinity":{".":{},"f:r
equiredDuringSchedulingIgnoredDuringExecution":{}}},"f:containers":{"k: [truncated 6178 chars]
	I0916 11:14:51.078516 1501462 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/nodes/multinode-654612-m03
	I0916 11:14:51.078545 1501462 round_trippers.go:469] Request Headers:
	I0916 11:14:51.078560 1501462 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:14:51.078572 1501462 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:14:51.080947 1501462 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:14:51.080969 1501462 round_trippers.go:577] Response Headers:
	I0916 11:14:51.080978 1501462 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:14:51.080982 1501462 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:14:51.080986 1501462 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:14:51 GMT
	I0916 11:14:51.080990 1501462 round_trippers.go:580]     Audit-Id: 1dbd3ea6-ff1c-4f88-a052-1dcf2504469a
	I0916 11:14:51.080994 1501462 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:14:51.081006 1501462 round_trippers.go:580]     Content-Type: application/json
	I0916 11:14:51.081148 1501462 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-654612-m03","uid":"0a3ae4f6-3e42-45c5-9af6-3ab99d0c1b81","resourceVersion":"935","creationTimestamp":"2024-09-16T11:11:48Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-654612-m03","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-654612","minikube.k8s.io/primary":"false","minikube.k8s.io/updated_at":"2024_09_16T11_11_49_0700","minikube.k8s.io/version":"v1.34.0"},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///var/run/crio/crio.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubeadm","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:11:48Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:
annotations":{"f:kubeadm.alpha.kubernetes.io/cri-socket":{}}}}},{"manag [truncated 6056 chars]
	I0916 11:14:51.081571 1501462 pod_ready.go:93] pod "kube-proxy-vf648" in "kube-system" namespace has status "Ready":"True"
	I0916 11:14:51.081595 1501462 pod_ready.go:82] duration metric: took 8.507151006s for pod "kube-proxy-vf648" in "kube-system" namespace to be "Ready" ...
	I0916 11:14:51.081608 1501462 pod_ready.go:79] waiting up to 6m0s for pod "kube-scheduler-multinode-654612" in "kube-system" namespace to be "Ready" ...
	I0916 11:14:51.081675 1501462 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/namespaces/kube-system/pods/kube-scheduler-multinode-654612
	I0916 11:14:51.081685 1501462 round_trippers.go:469] Request Headers:
	I0916 11:14:51.081693 1501462 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:14:51.081697 1501462 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:14:51.083988 1501462 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:14:51.084012 1501462 round_trippers.go:577] Response Headers:
	I0916 11:14:51.084019 1501462 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:14:51.084024 1501462 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:14:51 GMT
	I0916 11:14:51.084028 1501462 round_trippers.go:580]     Audit-Id: 0d08ff7c-2c08-4660-9b71-eaa313282657
	I0916 11:14:51.084031 1501462 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:14:51.084035 1501462 round_trippers.go:580]     Content-Type: application/json
	I0916 11:14:51.084039 1501462 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:14:51.084207 1501462 request.go:1351] Response Body: {"kind":"Pod","apiVersion":"v1","metadata":{"name":"kube-scheduler-multinode-654612","namespace":"kube-system","uid":"fd553108-8193-4f33-8190-d4ec25a66de1","resourceVersion":"755","creationTimestamp":"2024-09-16T11:10:10Z","labels":{"component":"kube-scheduler","tier":"control-plane"},"annotations":{"kubernetes.io/config.hash":"281b64f61502642475e3dbc1b139b188","kubernetes.io/config.mirror":"281b64f61502642475e3dbc1b139b188","kubernetes.io/config.seen":"2024-09-16T11:10:10.145156597Z","kubernetes.io/config.source":"file"},"ownerReferences":[{"apiVersion":"v1","kind":"Node","name":"multinode-654612","uid":"17ee1588-6ece-4a45-8e72-a05ec6d1f806","controller":true}],"managedFields":[{"manager":"kubelet","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:10:10Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:annotations":{".":{},"f:kubernetes.io/config.hash":{},"f:kubernetes.io/config.mirror":{},"f:kubernetes.io/config.seen":{},
"f:kubernetes.io/config.source":{}},"f:labels":{".":{},"f:component":{} [truncated 5101 chars]
	I0916 11:14:51.084664 1501462 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/nodes/multinode-654612
	I0916 11:14:51.084713 1501462 round_trippers.go:469] Request Headers:
	I0916 11:14:51.084722 1501462 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:14:51.084728 1501462 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:14:51.087144 1501462 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:14:51.087168 1501462 round_trippers.go:577] Response Headers:
	I0916 11:14:51.087177 1501462 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:14:51 GMT
	I0916 11:14:51.087183 1501462 round_trippers.go:580]     Audit-Id: c4758ebe-ab44-4763-8ba0-c91fa63c7def
	I0916 11:14:51.087187 1501462 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:14:51.087190 1501462 round_trippers.go:580]     Content-Type: application/json
	I0916 11:14:51.087193 1501462 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:14:51.087196 1501462 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:14:51.087363 1501462 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-654612","uid":"17ee1588-6ece-4a45-8e72-a05ec6d1f806","resourceVersion":"662","creationTimestamp":"2024-09-16T11:10:07Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-654612","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-654612","minikube.k8s.io/primary":"true","minikube.k8s.io/updated_at":"2024_09_16T11_10_11_0700","minikube.k8s.io/version":"v1.34.0","node-role.kubernetes.io/control-plane":"","node.kubernetes.io/exclude-from-external-load-balancers":""},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///var/run/crio/crio.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubelet","operation":"Update","apiVe
rsion":"v1","time":"2024-09-16T11:10:07Z","fieldsType":"FieldsV1","fiel [truncated 6346 chars]
	I0916 11:14:51.087789 1501462 pod_ready.go:93] pod "kube-scheduler-multinode-654612" in "kube-system" namespace has status "Ready":"True"
	I0916 11:14:51.087809 1501462 pod_ready.go:82] duration metric: took 6.19476ms for pod "kube-scheduler-multinode-654612" in "kube-system" namespace to be "Ready" ...
	I0916 11:14:51.087822 1501462 pod_ready.go:39] duration metric: took 9.313844542s for extra waiting for all system-critical and pods with labels [k8s-app=kube-dns component=etcd component=kube-apiserver component=kube-controller-manager k8s-app=kube-proxy component=kube-scheduler] to be "Ready" ...
	I0916 11:14:51.087835 1501462 system_svc.go:44] waiting for kubelet service to be running ....
	I0916 11:14:51.087911 1501462 ssh_runner.go:195] Run: sudo systemctl is-active --quiet service kubelet
	I0916 11:14:51.100728 1501462 system_svc.go:56] duration metric: took 12.882046ms WaitForService to wait for kubelet
	I0916 11:14:51.100761 1501462 kubeadm.go:582] duration metric: took 16.460294617s to wait for: map[apiserver:true apps_running:true default_sa:true extra:true kubelet:true node_ready:true system_pods:true]
	I0916 11:14:51.100782 1501462 node_conditions.go:102] verifying NodePressure condition ...
	I0916 11:14:51.100862 1501462 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/nodes
	I0916 11:14:51.100873 1501462 round_trippers.go:469] Request Headers:
	I0916 11:14:51.100881 1501462 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:14:51.100886 1501462 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:14:51.103743 1501462 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:14:51.103771 1501462 round_trippers.go:577] Response Headers:
	I0916 11:14:51.103780 1501462 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:14:51.103784 1501462 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:14:51.103791 1501462 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:14:51 GMT
	I0916 11:14:51.103794 1501462 round_trippers.go:580]     Audit-Id: 5b22c577-c00d-42aa-9aa1-170302e712e1
	I0916 11:14:51.103797 1501462 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:14:51.103800 1501462 round_trippers.go:580]     Content-Type: application/json
	I0916 11:14:51.104282 1501462 request.go:1351] Response Body: {"kind":"NodeList","apiVersion":"v1","metadata":{"resourceVersion":"950"},"items":[{"metadata":{"name":"multinode-654612","uid":"17ee1588-6ece-4a45-8e72-a05ec6d1f806","resourceVersion":"662","creationTimestamp":"2024-09-16T11:10:07Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-654612","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-654612","minikube.k8s.io/primary":"true","minikube.k8s.io/updated_at":"2024_09_16T11_10_11_0700","minikube.k8s.io/version":"v1.34.0","node-role.kubernetes.io/control-plane":"","node.kubernetes.io/exclude-from-external-load-balancers":""},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///var/run/crio/crio.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields
":[{"manager":"kubelet","operation":"Update","apiVersion":"v1","time":" [truncated 20780 chars]
	I0916 11:14:51.105348 1501462 node_conditions.go:122] node storage ephemeral capacity is 203034800Ki
	I0916 11:14:51.105377 1501462 node_conditions.go:123] node cpu capacity is 2
	I0916 11:14:51.105388 1501462 node_conditions.go:122] node storage ephemeral capacity is 203034800Ki
	I0916 11:14:51.105393 1501462 node_conditions.go:123] node cpu capacity is 2
	I0916 11:14:51.105398 1501462 node_conditions.go:122] node storage ephemeral capacity is 203034800Ki
	I0916 11:14:51.105403 1501462 node_conditions.go:123] node cpu capacity is 2
	I0916 11:14:51.105409 1501462 node_conditions.go:105] duration metric: took 4.622442ms to run NodePressure ...
	I0916 11:14:51.105429 1501462 start.go:241] waiting for startup goroutines ...
	I0916 11:14:51.105451 1501462 start.go:255] writing updated cluster config ...
	I0916 11:14:51.105795 1501462 ssh_runner.go:195] Run: rm -f paused
	I0916 11:14:51.115441 1501462 out.go:177] * Done! kubectl is now configured to use "multinode-654612" cluster and "default" namespace by default
	E0916 11:14:51.119341 1501462 start.go:291] kubectl info: exec: fork/exec /usr/local/bin/kubectl: exec format error
	
	
	==> CRI-O <==
	Sep 16 11:13:43 multinode-654612 crio[631]: time="2024-09-16 11:13:43.367496634Z" level=info msg="Image status: &ImageStatusResponse{Image:&Image{Id:ba04bb24b95753201135cbc420b233c1b0b9fa2e1fd21d28319c348c33fbcde6,RepoTags:[gcr.io/k8s-minikube/storage-provisioner:v5],RepoDigests:[gcr.io/k8s-minikube/storage-provisioner@sha256:0ba370588274b88531ab311a5d2e645d240a853555c1e58fd1dd428fc333c9d2 gcr.io/k8s-minikube/storage-provisioner@sha256:18eb69d1418e854ad5a19e399310e52808a8321e4c441c1dddad8977a0d7a944],Size_:29037500,Uid:nil,Username:,Spec:nil,},Info:map[string]string{},}" id=f867ef5c-b603-4f2d-a9e1-2b2edda9b727 name=/runtime.v1.ImageService/ImageStatus
	Sep 16 11:13:43 multinode-654612 crio[631]: time="2024-09-16 11:13:43.369730483Z" level=info msg="Checking image status: gcr.io/k8s-minikube/storage-provisioner:v5" id=ca96d590-f037-40f9-833e-81a60c761d24 name=/runtime.v1.ImageService/ImageStatus
	Sep 16 11:13:43 multinode-654612 crio[631]: time="2024-09-16 11:13:43.369968713Z" level=info msg="Image status: &ImageStatusResponse{Image:&Image{Id:ba04bb24b95753201135cbc420b233c1b0b9fa2e1fd21d28319c348c33fbcde6,RepoTags:[gcr.io/k8s-minikube/storage-provisioner:v5],RepoDigests:[gcr.io/k8s-minikube/storage-provisioner@sha256:0ba370588274b88531ab311a5d2e645d240a853555c1e58fd1dd428fc333c9d2 gcr.io/k8s-minikube/storage-provisioner@sha256:18eb69d1418e854ad5a19e399310e52808a8321e4c441c1dddad8977a0d7a944],Size_:29037500,Uid:nil,Username:,Spec:nil,},Info:map[string]string{},}" id=ca96d590-f037-40f9-833e-81a60c761d24 name=/runtime.v1.ImageService/ImageStatus
	Sep 16 11:13:43 multinode-654612 crio[631]: time="2024-09-16 11:13:43.370781042Z" level=info msg="Creating container: kube-system/storage-provisioner/storage-provisioner" id=76f916c8-2c6c-4fae-b216-e07e2014dc23 name=/runtime.v1.RuntimeService/CreateContainer
	Sep 16 11:13:43 multinode-654612 crio[631]: time="2024-09-16 11:13:43.370978929Z" level=warning msg="Allowed annotations are specified for workload []"
	Sep 16 11:13:43 multinode-654612 crio[631]: time="2024-09-16 11:13:43.388752540Z" level=warning msg="Failed to open /etc/passwd: open /var/lib/containers/storage/overlay/bb97d5980ec36f3691596ba486d878924380e573414f71cf078987eea4784d70/merged/etc/passwd: no such file or directory"
	Sep 16 11:13:43 multinode-654612 crio[631]: time="2024-09-16 11:13:43.388799291Z" level=warning msg="Failed to open /etc/group: open /var/lib/containers/storage/overlay/bb97d5980ec36f3691596ba486d878924380e573414f71cf078987eea4784d70/merged/etc/group: no such file or directory"
	Sep 16 11:13:43 multinode-654612 crio[631]: time="2024-09-16 11:13:43.428976292Z" level=info msg="Created container 3ae2e73ccbc162b2588f42d92eb27904e963b3429c7bdf30ec1bfe476cce4885: kube-system/storage-provisioner/storage-provisioner" id=76f916c8-2c6c-4fae-b216-e07e2014dc23 name=/runtime.v1.RuntimeService/CreateContainer
	Sep 16 11:13:43 multinode-654612 crio[631]: time="2024-09-16 11:13:43.430904326Z" level=info msg="Starting container: 3ae2e73ccbc162b2588f42d92eb27904e963b3429c7bdf30ec1bfe476cce4885" id=0ef5ace3-3d00-4391-95b4-fa7c0ed57ae1 name=/runtime.v1.RuntimeService/StartContainer
	Sep 16 11:13:43 multinode-654612 crio[631]: time="2024-09-16 11:13:43.440818561Z" level=info msg="Started container" PID=1489 containerID=3ae2e73ccbc162b2588f42d92eb27904e963b3429c7bdf30ec1bfe476cce4885 description=kube-system/storage-provisioner/storage-provisioner id=0ef5ace3-3d00-4391-95b4-fa7c0ed57ae1 name=/runtime.v1.RuntimeService/StartContainer sandboxID=2da8de9351579c439191b9afcb83859d27f5c74e7dafac70ff7316f2206b3436
	Sep 16 11:13:53 multinode-654612 crio[631]: time="2024-09-16 11:13:53.718034034Z" level=info msg="CNI monitoring event \"/etc/cni/net.d/10-kindnet.conflist.temp\": CREATE"
	Sep 16 11:13:53 multinode-654612 crio[631]: time="2024-09-16 11:13:53.721696205Z" level=info msg="Found CNI network kindnet (type=ptp) at /etc/cni/net.d/10-kindnet.conflist"
	Sep 16 11:13:53 multinode-654612 crio[631]: time="2024-09-16 11:13:53.721731691Z" level=info msg="Updated default CNI network name to kindnet"
	Sep 16 11:13:53 multinode-654612 crio[631]: time="2024-09-16 11:13:53.721756716Z" level=info msg="CNI monitoring event \"/etc/cni/net.d/10-kindnet.conflist.temp\": WRITE"
	Sep 16 11:13:53 multinode-654612 crio[631]: time="2024-09-16 11:13:53.725721247Z" level=info msg="Found CNI network kindnet (type=ptp) at /etc/cni/net.d/10-kindnet.conflist"
	Sep 16 11:13:53 multinode-654612 crio[631]: time="2024-09-16 11:13:53.725756315Z" level=info msg="Updated default CNI network name to kindnet"
	Sep 16 11:13:53 multinode-654612 crio[631]: time="2024-09-16 11:13:53.725785122Z" level=info msg="CNI monitoring event \"/etc/cni/net.d/10-kindnet.conflist.temp\": WRITE"
	Sep 16 11:13:53 multinode-654612 crio[631]: time="2024-09-16 11:13:53.729431408Z" level=info msg="Found CNI network kindnet (type=ptp) at /etc/cni/net.d/10-kindnet.conflist"
	Sep 16 11:13:53 multinode-654612 crio[631]: time="2024-09-16 11:13:53.729469619Z" level=info msg="Updated default CNI network name to kindnet"
	Sep 16 11:13:53 multinode-654612 crio[631]: time="2024-09-16 11:13:53.729490877Z" level=info msg="CNI monitoring event \"/etc/cni/net.d/10-kindnet.conflist.temp\": RENAME"
	Sep 16 11:13:53 multinode-654612 crio[631]: time="2024-09-16 11:13:53.733009692Z" level=info msg="Found CNI network kindnet (type=ptp) at /etc/cni/net.d/10-kindnet.conflist"
	Sep 16 11:13:53 multinode-654612 crio[631]: time="2024-09-16 11:13:53.733047369Z" level=info msg="Updated default CNI network name to kindnet"
	Sep 16 11:13:53 multinode-654612 crio[631]: time="2024-09-16 11:13:53.733065559Z" level=info msg="CNI monitoring event \"/etc/cni/net.d/10-kindnet.conflist\": CREATE"
	Sep 16 11:13:53 multinode-654612 crio[631]: time="2024-09-16 11:13:53.736120401Z" level=info msg="Found CNI network kindnet (type=ptp) at /etc/cni/net.d/10-kindnet.conflist"
	Sep 16 11:13:53 multinode-654612 crio[631]: time="2024-09-16 11:13:53.736158193Z" level=info msg="Updated default CNI network name to kindnet"
	
	
	==> container status <==
	CONTAINER           IMAGE                                                              CREATED              STATE               NAME                      ATTEMPT             POD ID              POD
	3ae2e73ccbc16       ba04bb24b95753201135cbc420b233c1b0b9fa2e1fd21d28319c348c33fbcde6   About a minute ago   Running             storage-provisioner       2                   2da8de9351579       storage-provisioner
	2af4726fa1f35       2f6c962e7b8311337352d9fdea917da2184d9919f4da7695bc2a6517cf392fe4   About a minute ago   Running             coredns                   1                   b53789daa0a0b       coredns-7c65d6cfc9-szvv9
	1c4c35778d5a4       24a140c548c075e487e45d0ee73b1aa89f8bfb40c08a57e05975559728822b1d   About a minute ago   Running             kube-proxy                1                   30b54122406e1       kube-proxy-t9pzq
	1919daff83457       89a35e2ebb6b938201966889b5e8c85b931db6432c5643966116cd1c28bf45cd   About a minute ago   Running             busybox                   1                   86a065daa887d       busybox-7dff88458-rdtjw
	2d295548684b4       6a23fa8fd2b78ab58e42ba273808edc936a9c53d8ac4a919f6337be094843a51   About a minute ago   Running             kindnet-cni               1                   23e8171c84cb9       kindnet-whjqt
	74535872778c0       ba04bb24b95753201135cbc420b233c1b0b9fa2e1fd21d28319c348c33fbcde6   About a minute ago   Exited              storage-provisioner       1                   2da8de9351579       storage-provisioner
	f65bfffec4130       d3f53a98c0a9d9163c4848bcf34b2d2f5e1e3691b79f3d1dd6d0206809e02853   About a minute ago   Running             kube-apiserver            1                   7360fa05c2694       kube-apiserver-multinode-654612
	4729d6fed93e5       279f381cb37365bbbcd133c9531fba9c2beb0f38dbbe6ddfcd0b1b1643d3450e   About a minute ago   Running             kube-controller-manager   1                   ec8a03a76bb2f       kube-controller-manager-multinode-654612
	02948e1dee20a       7f8aa378bb47dffcf430f3a601abe39137e88aee0238e23ed8530fdd18dab82d   About a minute ago   Running             kube-scheduler            1                   21e62554c31f3       kube-scheduler-multinode-654612
	277d0e5e47ff5       27e3830e1402783674d8b594038967deea9d51f0d91b34c93c8f39d2f68af7da   About a minute ago   Running             etcd                      1                   ed8ff43a2e11c       etcd-multinode-654612
	
	
	==> coredns [2af4726fa1f357b97bd09777380e1c194a424e80ea2e4d5b1a2464b05a025eef] <==
	[INFO] plugin/kubernetes: waiting for Kubernetes API before starting server
	[INFO] plugin/kubernetes: waiting for Kubernetes API before starting server
	[WARNING] plugin/kubernetes: starting server with unsynced Kubernetes API
	.:53
	[INFO] plugin/reload: Running configuration SHA512 = bfa258e3dfcd8004ab6c7d60772766a595ee209e49c62e6ae56bd911a145318b327e0c73bbccac30667047dafea6a8c1149027cea85d58a2246677e8ec1caab2
	CoreDNS-1.11.3
	linux/arm64, go1.21.11, a6338e9
	[INFO] 127.0.0.1:60900 - 58370 "HINFO IN 3769895235797377904.4709434450396632126. udp 57 false 512" NXDOMAIN qr,rd,ra 57 0.014527849s
	[INFO] plugin/ready: Still waiting on: "kubernetes"
	[INFO] plugin/ready: Still waiting on: "kubernetes"
	[INFO] plugin/kubernetes: pkg/mod/k8s.io/client-go@v0.29.3/tools/cache/reflector.go:229: failed to list *v1.EndpointSlice: Get "https://10.96.0.1:443/apis/discovery.k8s.io/v1/endpointslices?limit=500&resourceVersion=0": dial tcp 10.96.0.1:443: i/o timeout
	[INFO] plugin/kubernetes: Trace[6482037]: "Reflector ListAndWatch" name:pkg/mod/k8s.io/client-go@v0.29.3/tools/cache/reflector.go:229 (16-Sep-2024 11:13:13.585) (total time: 30000ms):
	Trace[6482037]: ---"Objects listed" error:Get "https://10.96.0.1:443/apis/discovery.k8s.io/v1/endpointslices?limit=500&resourceVersion=0": dial tcp 10.96.0.1:443: i/o timeout 30000ms (11:13:43.585)
	Trace[6482037]: [30.000643109s] [30.000643109s] END
	[ERROR] plugin/kubernetes: pkg/mod/k8s.io/client-go@v0.29.3/tools/cache/reflector.go:229: Failed to watch *v1.EndpointSlice: failed to list *v1.EndpointSlice: Get "https://10.96.0.1:443/apis/discovery.k8s.io/v1/endpointslices?limit=500&resourceVersion=0": dial tcp 10.96.0.1:443: i/o timeout
	[INFO] plugin/kubernetes: pkg/mod/k8s.io/client-go@v0.29.3/tools/cache/reflector.go:229: failed to list *v1.Service: Get "https://10.96.0.1:443/api/v1/services?limit=500&resourceVersion=0": dial tcp 10.96.0.1:443: i/o timeout
	[INFO] plugin/kubernetes: Trace[2138995071]: "Reflector ListAndWatch" name:pkg/mod/k8s.io/client-go@v0.29.3/tools/cache/reflector.go:229 (16-Sep-2024 11:13:13.585) (total time: 30000ms):
	Trace[2138995071]: ---"Objects listed" error:Get "https://10.96.0.1:443/api/v1/services?limit=500&resourceVersion=0": dial tcp 10.96.0.1:443: i/o timeout 30000ms (11:13:43.586)
	Trace[2138995071]: [30.000723919s] [30.000723919s] END
	[ERROR] plugin/kubernetes: pkg/mod/k8s.io/client-go@v0.29.3/tools/cache/reflector.go:229: Failed to watch *v1.Service: failed to list *v1.Service: Get "https://10.96.0.1:443/api/v1/services?limit=500&resourceVersion=0": dial tcp 10.96.0.1:443: i/o timeout
	[INFO] plugin/kubernetes: pkg/mod/k8s.io/client-go@v0.29.3/tools/cache/reflector.go:229: failed to list *v1.Namespace: Get "https://10.96.0.1:443/api/v1/namespaces?limit=500&resourceVersion=0": dial tcp 10.96.0.1:443: i/o timeout
	[INFO] plugin/kubernetes: Trace[1984770615]: "Reflector ListAndWatch" name:pkg/mod/k8s.io/client-go@v0.29.3/tools/cache/reflector.go:229 (16-Sep-2024 11:13:13.586) (total time: 30000ms):
	Trace[1984770615]: ---"Objects listed" error:Get "https://10.96.0.1:443/api/v1/namespaces?limit=500&resourceVersion=0": dial tcp 10.96.0.1:443: i/o timeout 30000ms (11:13:43.586)
	Trace[1984770615]: [30.000799159s] [30.000799159s] END
	[ERROR] plugin/kubernetes: pkg/mod/k8s.io/client-go@v0.29.3/tools/cache/reflector.go:229: Failed to watch *v1.Namespace: failed to list *v1.Namespace: Get "https://10.96.0.1:443/api/v1/namespaces?limit=500&resourceVersion=0": dial tcp 10.96.0.1:443: i/o timeout
	
	
	==> describe nodes <==
	Name:               multinode-654612
	Roles:              control-plane
	Labels:             beta.kubernetes.io/arch=arm64
	                    beta.kubernetes.io/os=linux
	                    kubernetes.io/arch=arm64
	                    kubernetes.io/hostname=multinode-654612
	                    kubernetes.io/os=linux
	                    minikube.k8s.io/commit=90d544f06ea0f69499271b003be64a9a224d57ed
	                    minikube.k8s.io/name=multinode-654612
	                    minikube.k8s.io/primary=true
	                    minikube.k8s.io/updated_at=2024_09_16T11_10_11_0700
	                    minikube.k8s.io/version=v1.34.0
	                    node-role.kubernetes.io/control-plane=
	                    node.kubernetes.io/exclude-from-external-load-balancers=
	Annotations:        kubeadm.alpha.kubernetes.io/cri-socket: unix:///var/run/crio/crio.sock
	                    node.alpha.kubernetes.io/ttl: 0
	                    volumes.kubernetes.io/controller-managed-attach-detach: true
	CreationTimestamp:  Mon, 16 Sep 2024 11:10:07 +0000
	Taints:             <none>
	Unschedulable:      false
	Lease:
	  HolderIdentity:  multinode-654612
	  AcquireTime:     <unset>
	  RenewTime:       Mon, 16 Sep 2024 11:14:52 +0000
	Conditions:
	  Type             Status  LastHeartbeatTime                 LastTransitionTime                Reason                       Message
	  ----             ------  -----------------                 ------------------                ------                       -------
	  MemoryPressure   False   Mon, 16 Sep 2024 11:13:11 +0000   Mon, 16 Sep 2024 11:10:04 +0000   KubeletHasSufficientMemory   kubelet has sufficient memory available
	  DiskPressure     False   Mon, 16 Sep 2024 11:13:11 +0000   Mon, 16 Sep 2024 11:10:04 +0000   KubeletHasNoDiskPressure     kubelet has no disk pressure
	  PIDPressure      False   Mon, 16 Sep 2024 11:13:11 +0000   Mon, 16 Sep 2024 11:10:04 +0000   KubeletHasSufficientPID      kubelet has sufficient PID available
	  Ready            True    Mon, 16 Sep 2024 11:13:11 +0000   Mon, 16 Sep 2024 11:10:56 +0000   KubeletReady                 kubelet is posting ready status
	Addresses:
	  InternalIP:  192.168.67.2
	  Hostname:    multinode-654612
	Capacity:
	  cpu:                2
	  ephemeral-storage:  203034800Ki
	  hugepages-1Gi:      0
	  hugepages-2Mi:      0
	  hugepages-32Mi:     0
	  hugepages-64Ki:     0
	  memory:             8022304Ki
	  pods:               110
	Allocatable:
	  cpu:                2
	  ephemeral-storage:  203034800Ki
	  hugepages-1Gi:      0
	  hugepages-2Mi:      0
	  hugepages-32Mi:     0
	  hugepages-64Ki:     0
	  memory:             8022304Ki
	  pods:               110
	System Info:
	  Machine ID:                 6119ef809ecd4b638a18feb903efb648
	  System UUID:                b0403d6b-24c6-42eb-8273-193a1e97b1c8
	  Boot ID:                    34b2555f-ef29-4c31-9b47-b3b930bd3b4b
	  Kernel Version:             5.15.0-1069-aws
	  OS Image:                   Ubuntu 22.04.4 LTS
	  Operating System:           linux
	  Architecture:               arm64
	  Container Runtime Version:  cri-o://1.24.6
	  Kubelet Version:            v1.31.1
	  Kube-Proxy Version:         v1.31.1
	PodCIDR:                      10.244.0.0/24
	PodCIDRs:                     10.244.0.0/24
	Non-terminated Pods:          (9 in total)
	  Namespace                   Name                                        CPU Requests  CPU Limits  Memory Requests  Memory Limits  Age
	  ---------                   ----                                        ------------  ----------  ---------------  -------------  ---
	  default                     busybox-7dff88458-rdtjw                     0 (0%)        0 (0%)      0 (0%)           0 (0%)         3m30s
	  kube-system                 coredns-7c65d6cfc9-szvv9                    100m (5%)     0 (0%)      70Mi (0%)        170Mi (2%)     4m43s
	  kube-system                 etcd-multinode-654612                       100m (5%)     0 (0%)      100Mi (1%)       0 (0%)         4m48s
	  kube-system                 kindnet-whjqt                               100m (5%)     100m (5%)   50Mi (0%)        50Mi (0%)      4m44s
	  kube-system                 kube-apiserver-multinode-654612             250m (12%)    0 (0%)      0 (0%)           0 (0%)         4m48s
	  kube-system                 kube-controller-manager-multinode-654612    200m (10%)    0 (0%)      0 (0%)           0 (0%)         4m48s
	  kube-system                 kube-proxy-t9pzq                            0 (0%)        0 (0%)      0 (0%)           0 (0%)         4m44s
	  kube-system                 kube-scheduler-multinode-654612             100m (5%)     0 (0%)      0 (0%)           0 (0%)         4m48s
	  kube-system                 storage-provisioner                         0 (0%)        0 (0%)      0 (0%)           0 (0%)         4m43s
	Allocated resources:
	  (Total limits may be over 100 percent, i.e., overcommitted.)
	  Resource           Requests    Limits
	  --------           --------    ------
	  cpu                850m (42%)  100m (5%)
	  memory             220Mi (2%)  220Mi (2%)
	  ephemeral-storage  0 (0%)      0 (0%)
	  hugepages-1Gi      0 (0%)      0 (0%)
	  hugepages-2Mi      0 (0%)      0 (0%)
	  hugepages-32Mi     0 (0%)      0 (0%)
	  hugepages-64Ki     0 (0%)      0 (0%)
	Events:
	  Type     Reason                   Age                  From             Message
	  ----     ------                   ----                 ----             -------
	  Normal   Starting                 4m41s                kube-proxy       
	  Normal   Starting                 104s                 kube-proxy       
	  Warning  CgroupV1                 4m48s                kubelet          Cgroup v1 support is in maintenance mode, please migrate to Cgroup v2.
	  Normal   Starting                 4m48s                kubelet          Starting kubelet.
	  Normal   NodeHasSufficientMemory  4m48s                kubelet          Node multinode-654612 status is now: NodeHasSufficientMemory
	  Normal   NodeHasNoDiskPressure    4m48s                kubelet          Node multinode-654612 status is now: NodeHasNoDiskPressure
	  Normal   NodeHasSufficientPID     4m48s                kubelet          Node multinode-654612 status is now: NodeHasSufficientPID
	  Normal   RegisteredNode           4m44s                node-controller  Node multinode-654612 event: Registered Node multinode-654612 in Controller
	  Normal   CIDRAssignmentFailed     4m44s                cidrAllocator    Node multinode-654612 status is now: CIDRAssignmentFailed
	  Normal   NodeReady                4m2s                 kubelet          Node multinode-654612 status is now: NodeReady
	  Normal   Starting                 112s                 kubelet          Starting kubelet.
	  Warning  CgroupV1                 112s                 kubelet          Cgroup v1 support is in maintenance mode, please migrate to Cgroup v2.
	  Normal   NodeHasSufficientMemory  112s (x8 over 112s)  kubelet          Node multinode-654612 status is now: NodeHasSufficientMemory
	  Normal   NodeHasNoDiskPressure    112s (x8 over 112s)  kubelet          Node multinode-654612 status is now: NodeHasNoDiskPressure
	  Normal   NodeHasSufficientPID     112s (x7 over 112s)  kubelet          Node multinode-654612 status is now: NodeHasSufficientPID
	  Normal   RegisteredNode           104s                 node-controller  Node multinode-654612 event: Registered Node multinode-654612 in Controller
	
	
	Name:               multinode-654612-m02
	Roles:              <none>
	Labels:             beta.kubernetes.io/arch=arm64
	                    beta.kubernetes.io/os=linux
	                    kubernetes.io/arch=arm64
	                    kubernetes.io/hostname=multinode-654612-m02
	                    kubernetes.io/os=linux
	                    minikube.k8s.io/commit=90d544f06ea0f69499271b003be64a9a224d57ed
	                    minikube.k8s.io/name=multinode-654612
	                    minikube.k8s.io/primary=false
	                    minikube.k8s.io/updated_at=2024_09_16T11_11_14_0700
	                    minikube.k8s.io/version=v1.34.0
	Annotations:        kubeadm.alpha.kubernetes.io/cri-socket: unix:///var/run/crio/crio.sock
	                    node.alpha.kubernetes.io/ttl: 0
	                    volumes.kubernetes.io/controller-managed-attach-detach: true
	CreationTimestamp:  Mon, 16 Sep 2024 11:11:13 +0000
	Taints:             <none>
	Unschedulable:      false
	Lease:
	  HolderIdentity:  multinode-654612-m02
	  AcquireTime:     <unset>
	  RenewTime:       Mon, 16 Sep 2024 11:14:49 +0000
	Conditions:
	  Type             Status  LastHeartbeatTime                 LastTransitionTime                Reason                       Message
	  ----             ------  -----------------                 ------------------                ------                       -------
	  MemoryPressure   False   Mon, 16 Sep 2024 11:14:08 +0000   Mon, 16 Sep 2024 11:14:08 +0000   KubeletHasSufficientMemory   kubelet has sufficient memory available
	  DiskPressure     False   Mon, 16 Sep 2024 11:14:08 +0000   Mon, 16 Sep 2024 11:14:08 +0000   KubeletHasNoDiskPressure     kubelet has no disk pressure
	  PIDPressure      False   Mon, 16 Sep 2024 11:14:08 +0000   Mon, 16 Sep 2024 11:14:08 +0000   KubeletHasSufficientPID      kubelet has sufficient PID available
	  Ready            True    Mon, 16 Sep 2024 11:14:08 +0000   Mon, 16 Sep 2024 11:14:08 +0000   KubeletReady                 kubelet is posting ready status
	Addresses:
	  InternalIP:  192.168.67.3
	  Hostname:    multinode-654612-m02
	Capacity:
	  cpu:                2
	  ephemeral-storage:  203034800Ki
	  hugepages-1Gi:      0
	  hugepages-2Mi:      0
	  hugepages-32Mi:     0
	  hugepages-64Ki:     0
	  memory:             8022304Ki
	  pods:               110
	Allocatable:
	  cpu:                2
	  ephemeral-storage:  203034800Ki
	  hugepages-1Gi:      0
	  hugepages-2Mi:      0
	  hugepages-32Mi:     0
	  hugepages-64Ki:     0
	  memory:             8022304Ki
	  pods:               110
	System Info:
	  Machine ID:                 7c649bc18cb146aab9f77eed443d16f9
	  System UUID:                9e565e5c-62ec-45b7-a6b4-8e158afd85b2
	  Boot ID:                    34b2555f-ef29-4c31-9b47-b3b930bd3b4b
	  Kernel Version:             5.15.0-1069-aws
	  OS Image:                   Ubuntu 22.04.4 LTS
	  Operating System:           linux
	  Architecture:               arm64
	  Container Runtime Version:  cri-o://1.24.6
	  Kubelet Version:            v1.31.1
	  Kube-Proxy Version:         v1.31.1
	PodCIDR:                      10.244.2.0/24
	PodCIDRs:                     10.244.2.0/24
	Non-terminated Pods:          (3 in total)
	  Namespace                   Name                       CPU Requests  CPU Limits  Memory Requests  Memory Limits  Age
	  ---------                   ----                       ------------  ----------  ---------------  -------------  ---
	  default                     busybox-7dff88458-sfkxt    0 (0%)        0 (0%)      0 (0%)           0 (0%)         3m30s
	  kube-system                 kindnet-687xg              100m (5%)     100m (5%)   50Mi (0%)        50Mi (0%)      3m45s
	  kube-system                 kube-proxy-gf2tw           0 (0%)        0 (0%)      0 (0%)           0 (0%)         3m45s
	Allocated resources:
	  (Total limits may be over 100 percent, i.e., overcommitted.)
	  Resource           Requests   Limits
	  --------           --------   ------
	  cpu                100m (5%)  100m (5%)
	  memory             50Mi (0%)  50Mi (0%)
	  ephemeral-storage  0 (0%)     0 (0%)
	  hugepages-1Gi      0 (0%)     0 (0%)
	  hugepages-2Mi      0 (0%)     0 (0%)
	  hugepages-32Mi     0 (0%)     0 (0%)
	  hugepages-64Ki     0 (0%)     0 (0%)
	Events:
	  Type     Reason                   Age                    From             Message
	  ----     ------                   ----                   ----             -------
	  Normal   Starting                 3m43s                  kube-proxy       
	  Normal   Starting                 32s                    kube-proxy       
	  Normal   NodeHasSufficientPID     3m45s (x2 over 3m45s)  kubelet          Node multinode-654612-m02 status is now: NodeHasSufficientPID
	  Warning  CgroupV1                 3m45s                  kubelet          Cgroup v1 support is in maintenance mode, please migrate to Cgroup v2.
	  Normal   NodeHasSufficientMemory  3m45s (x2 over 3m45s)  kubelet          Node multinode-654612-m02 status is now: NodeHasSufficientMemory
	  Normal   NodeHasNoDiskPressure    3m45s (x2 over 3m45s)  kubelet          Node multinode-654612-m02 status is now: NodeHasNoDiskPressure
	  Normal   RegisteredNode           3m44s                  node-controller  Node multinode-654612-m02 event: Registered Node multinode-654612-m02 in Controller
	  Normal   NodeReady                3m33s                  kubelet          Node multinode-654612-m02 status is now: NodeReady
	  Normal   RegisteredNode           104s                   node-controller  Node multinode-654612-m02 event: Registered Node multinode-654612-m02 in Controller
	  Normal   NodeNotReady             64s                    node-controller  Node multinode-654612-m02 status is now: NodeNotReady
	  Normal   Starting                 63s                    kubelet          Starting kubelet.
	  Warning  CgroupV1                 63s                    kubelet          Cgroup v1 support is in maintenance mode, please migrate to Cgroup v2.
	  Normal   NodeHasSufficientPID     56s (x7 over 63s)      kubelet          Node multinode-654612-m02 status is now: NodeHasSufficientPID
	  Normal   NodeHasSufficientMemory  50s (x8 over 63s)      kubelet          Node multinode-654612-m02 status is now: NodeHasSufficientMemory
	  Normal   NodeHasNoDiskPressure    50s (x8 over 63s)      kubelet          Node multinode-654612-m02 status is now: NodeHasNoDiskPressure
	
	
	==> dmesg <==
	[Sep16 10:07] systemd-journald[226]: Failed to send stream file descriptor to service manager: Connection refused
	
	
	==> etcd [277d0e5e47ff5fc1af35e1f974e3f873617b798cab022586985ca7e3b27d6602] <==
	{"level":"info","ts":"2024-09-16T11:13:07.160758Z","caller":"fileutil/purge.go:50","msg":"started to purge file","dir":"/var/lib/minikube/etcd/member/snap","suffix":"snap.db","max":5,"interval":"30s"}
	{"level":"info","ts":"2024-09-16T11:13:07.161232Z","caller":"fileutil/purge.go:50","msg":"started to purge file","dir":"/var/lib/minikube/etcd/member/snap","suffix":"snap","max":5,"interval":"30s"}
	{"level":"info","ts":"2024-09-16T11:13:07.161259Z","caller":"fileutil/purge.go:50","msg":"started to purge file","dir":"/var/lib/minikube/etcd/member/wal","suffix":"wal","max":5,"interval":"30s"}
	{"level":"info","ts":"2024-09-16T11:13:07.161150Z","caller":"v3rpc/health.go:61","msg":"grpc service status changed","service":"","status":"SERVING"}
	{"level":"info","ts":"2024-09-16T11:13:07.185071Z","caller":"embed/etcd.go:728","msg":"starting with client TLS","tls-info":"cert = /var/lib/minikube/certs/etcd/server.crt, key = /var/lib/minikube/certs/etcd/server.key, client-cert=, client-key=, trusted-ca = /var/lib/minikube/certs/etcd/ca.crt, client-cert-auth = true, crl-file = ","cipher-suites":[]}
	{"level":"info","ts":"2024-09-16T11:13:07.185414Z","caller":"embed/etcd.go:279","msg":"now serving peer/client/metrics","local-member-id":"8688e899f7831fc7","initial-advertise-peer-urls":["https://192.168.67.2:2380"],"listen-peer-urls":["https://192.168.67.2:2380"],"advertise-client-urls":["https://192.168.67.2:2379"],"listen-client-urls":["https://127.0.0.1:2379","https://192.168.67.2:2379"],"listen-metrics-urls":["http://127.0.0.1:2381"]}
	{"level":"info","ts":"2024-09-16T11:13:07.185476Z","caller":"embed/etcd.go:870","msg":"serving metrics","address":"http://127.0.0.1:2381"}
	{"level":"info","ts":"2024-09-16T11:13:07.185612Z","caller":"embed/etcd.go:599","msg":"serving peer traffic","address":"192.168.67.2:2380"}
	{"level":"info","ts":"2024-09-16T11:13:07.185650Z","caller":"embed/etcd.go:571","msg":"cmux::serve","address":"192.168.67.2:2380"}
	{"level":"info","ts":"2024-09-16T11:13:08.120513Z","logger":"raft","caller":"etcdserver/zap_raft.go:77","msg":"8688e899f7831fc7 is starting a new election at term 2"}
	{"level":"info","ts":"2024-09-16T11:13:08.120663Z","logger":"raft","caller":"etcdserver/zap_raft.go:77","msg":"8688e899f7831fc7 became pre-candidate at term 2"}
	{"level":"info","ts":"2024-09-16T11:13:08.120731Z","logger":"raft","caller":"etcdserver/zap_raft.go:77","msg":"8688e899f7831fc7 received MsgPreVoteResp from 8688e899f7831fc7 at term 2"}
	{"level":"info","ts":"2024-09-16T11:13:08.120775Z","logger":"raft","caller":"etcdserver/zap_raft.go:77","msg":"8688e899f7831fc7 became candidate at term 3"}
	{"level":"info","ts":"2024-09-16T11:13:08.120921Z","logger":"raft","caller":"etcdserver/zap_raft.go:77","msg":"8688e899f7831fc7 received MsgVoteResp from 8688e899f7831fc7 at term 3"}
	{"level":"info","ts":"2024-09-16T11:13:08.120965Z","logger":"raft","caller":"etcdserver/zap_raft.go:77","msg":"8688e899f7831fc7 became leader at term 3"}
	{"level":"info","ts":"2024-09-16T11:13:08.120999Z","logger":"raft","caller":"etcdserver/zap_raft.go:77","msg":"raft.node: 8688e899f7831fc7 elected leader 8688e899f7831fc7 at term 3"}
	{"level":"info","ts":"2024-09-16T11:13:08.128921Z","caller":"etcdserver/server.go:2118","msg":"published local member to cluster through raft","local-member-id":"8688e899f7831fc7","local-member-attributes":"{Name:multinode-654612 ClientURLs:[https://192.168.67.2:2379]}","request-path":"/0/members/8688e899f7831fc7/attributes","cluster-id":"9d8fdeb88b6def78","publish-timeout":"7s"}
	{"level":"info","ts":"2024-09-16T11:13:08.129207Z","caller":"embed/serve.go:103","msg":"ready to serve client requests"}
	{"level":"info","ts":"2024-09-16T11:13:08.130259Z","caller":"v3rpc/health.go:61","msg":"grpc service status changed","service":"","status":"SERVING"}
	{"level":"info","ts":"2024-09-16T11:13:08.131222Z","caller":"embed/serve.go:103","msg":"ready to serve client requests"}
	{"level":"info","ts":"2024-09-16T11:13:08.131341Z","caller":"embed/serve.go:250","msg":"serving client traffic securely","traffic":"grpc+http","address":"192.168.67.2:2379"}
	{"level":"info","ts":"2024-09-16T11:13:08.136713Z","caller":"etcdmain/main.go:44","msg":"notifying init daemon"}
	{"level":"info","ts":"2024-09-16T11:13:08.136772Z","caller":"etcdmain/main.go:50","msg":"successfully notified init daemon"}
	{"level":"info","ts":"2024-09-16T11:13:08.141063Z","caller":"v3rpc/health.go:61","msg":"grpc service status changed","service":"","status":"SERVING"}
	{"level":"info","ts":"2024-09-16T11:13:08.142195Z","caller":"embed/serve.go:250","msg":"serving client traffic securely","traffic":"grpc+http","address":"127.0.0.1:2379"}
	
	
	==> kernel <==
	 11:14:58 up 10:57,  0 users,  load average: 0.83, 1.92, 2.42
	Linux multinode-654612 5.15.0-1069-aws #75~20.04.1-Ubuntu SMP Mon Aug 19 16:22:47 UTC 2024 aarch64 aarch64 aarch64 GNU/Linux
	PRETTY_NAME="Ubuntu 22.04.4 LTS"
	
	
	==> kindnet [2d295548684b40050c9526162ea0de478365daadbfe40bbb0e0756a1a87256d6] <==
	I0916 11:14:13.717754       1 main.go:322] Node multinode-654612-m02 has CIDR [10.244.2.0/24] 
	I0916 11:14:13.717884       1 main.go:295] Handling node with IPs: map[192.168.67.4:{}]
	I0916 11:14:13.717901       1 main.go:322] Node multinode-654612-m03 has CIDR [10.244.3.0/24] 
	I0916 11:14:23.724852       1 main.go:295] Handling node with IPs: map[192.168.67.3:{}]
	I0916 11:14:23.724992       1 main.go:322] Node multinode-654612-m02 has CIDR [10.244.2.0/24] 
	I0916 11:14:23.725138       1 main.go:295] Handling node with IPs: map[192.168.67.4:{}]
	I0916 11:14:23.725175       1 main.go:322] Node multinode-654612-m03 has CIDR [10.244.3.0/24] 
	I0916 11:14:23.725244       1 main.go:295] Handling node with IPs: map[192.168.67.2:{}]
	I0916 11:14:23.725277       1 main.go:299] handling current node
	I0916 11:14:33.717389       1 main.go:295] Handling node with IPs: map[192.168.67.3:{}]
	I0916 11:14:33.717424       1 main.go:322] Node multinode-654612-m02 has CIDR [10.244.2.0/24] 
	I0916 11:14:33.717534       1 main.go:295] Handling node with IPs: map[192.168.67.4:{}]
	I0916 11:14:33.717546       1 main.go:322] Node multinode-654612-m03 has CIDR [10.244.3.0/24] 
	I0916 11:14:33.717596       1 main.go:295] Handling node with IPs: map[192.168.67.2:{}]
	I0916 11:14:33.717607       1 main.go:299] handling current node
	I0916 11:14:43.717760       1 main.go:295] Handling node with IPs: map[192.168.67.2:{}]
	I0916 11:14:43.717798       1 main.go:299] handling current node
	I0916 11:14:43.717814       1 main.go:295] Handling node with IPs: map[192.168.67.3:{}]
	I0916 11:14:43.717820       1 main.go:322] Node multinode-654612-m02 has CIDR [10.244.2.0/24] 
	I0916 11:14:43.717949       1 main.go:295] Handling node with IPs: map[192.168.67.4:{}]
	I0916 11:14:43.717961       1 main.go:322] Node multinode-654612-m03 has CIDR [10.244.3.0/24] 
	I0916 11:14:53.717753       1 main.go:295] Handling node with IPs: map[192.168.67.2:{}]
	I0916 11:14:53.717882       1 main.go:299] handling current node
	I0916 11:14:53.717904       1 main.go:295] Handling node with IPs: map[192.168.67.3:{}]
	I0916 11:14:53.717912       1 main.go:322] Node multinode-654612-m02 has CIDR [10.244.2.0/24] 
	
	
	==> kube-apiserver [f65bfffec41304a213500bf1c3d5fb4aec37410a397e6cab70922ebb20cae205] <==
	I0916 11:13:10.987650       1 apiapproval_controller.go:189] Starting KubernetesAPIApprovalPolicyConformantConditionController
	I0916 11:13:10.987701       1 crd_finalizer.go:269] Starting CRDFinalizer
	I0916 11:13:10.987741       1 crdregistration_controller.go:114] Starting crd-autoregister controller
	I0916 11:13:10.987861       1 shared_informer.go:313] Waiting for caches to sync for crd-autoregister
	I0916 11:13:11.288862       1 shared_informer.go:320] Caches are synced for crd-autoregister
	I0916 11:13:11.291698       1 aggregator.go:171] initial CRD sync complete...
	I0916 11:13:11.291798       1 autoregister_controller.go:144] Starting autoregister controller
	I0916 11:13:11.291871       1 cache.go:32] Waiting for caches to sync for autoregister controller
	I0916 11:13:11.302310       1 cache.go:39] Caches are synced for APIServiceRegistrationController controller
	I0916 11:13:11.309196       1 shared_informer.go:320] Caches are synced for cluster_authentication_trust_controller
	I0916 11:13:11.309913       1 apf_controller.go:382] Running API Priority and Fairness config worker
	I0916 11:13:11.309965       1 apf_controller.go:385] Running API Priority and Fairness periodic rebalancing process
	I0916 11:13:11.314471       1 cache.go:39] Caches are synced for RemoteAvailability controller
	I0916 11:13:11.314828       1 cache.go:39] Caches are synced for LocalAvailability controller
	I0916 11:13:11.315180       1 handler_discovery.go:450] Starting ResourceDiscoveryManager
	I0916 11:13:11.315267       1 shared_informer.go:320] Caches are synced for configmaps
	I0916 11:13:11.317892       1 shared_informer.go:320] Caches are synced for node_authorizer
	I0916 11:13:11.331225       1 shared_informer.go:320] Caches are synced for *generic.policySource[*k8s.io/api/admissionregistration/v1.ValidatingAdmissionPolicy,*k8s.io/api/admissionregistration/v1.ValidatingAdmissionPolicyBinding,k8s.io/apiserver/pkg/admission/plugin/policy/validating.Validator]
	I0916 11:13:11.331333       1 policy_source.go:224] refreshing policies
	E0916 11:13:11.358914       1 controller.go:97] Error removing old endpoints from kubernetes service: no API server IP addresses were listed in storage, refusing to erase all endpoints for the kubernetes Service
	I0916 11:13:11.392157       1 cache.go:39] Caches are synced for autoregister controller
	I0916 11:13:11.412211       1 controller.go:615] quota admission added evaluator for: leases.coordination.k8s.io
	I0916 11:13:11.922520       1 storage_scheduling.go:111] all system priority classes are created successfully or already exist.
	I0916 11:13:14.147664       1 controller.go:615] quota admission added evaluator for: endpoints
	I0916 11:13:14.546433       1 controller.go:615] quota admission added evaluator for: endpointslices.discovery.k8s.io
	
	
	==> kube-controller-manager [4729d6fed93e519930439127009e18a754fd4123fdb7482a567ccfac16ebcd73] <==
	I0916 11:13:54.360184       1 range_allocator.go:241] "Successfully synced" logger="node-ipam-controller" key="multinode-654612-m02"
	I0916 11:13:54.360356       1 topologycache.go:237] "Can't get CPU or zone information for node" logger="endpointslice-controller" node="multinode-654612-m03"
	I0916 11:13:54.371203       1 range_allocator.go:241] "Successfully synced" logger="node-ipam-controller" key="multinode-654612-m03"
	I0916 11:13:54.377450       1 range_allocator.go:241] "Successfully synced" logger="node-ipam-controller" key="multinode-654612-m02"
	I0916 11:13:54.389721       1 range_allocator.go:241] "Successfully synced" logger="node-ipam-controller" key="multinode-654612-m03"
	I0916 11:13:54.392410       1 replica_set.go:679] "Finished syncing" logger="replicaset-controller" kind="ReplicaSet" key="default/busybox-7dff88458" duration="13.953299ms"
	I0916 11:13:54.392541       1 replica_set.go:679] "Finished syncing" logger="replicaset-controller" kind="ReplicaSet" key="default/busybox-7dff88458" duration="40.59µs"
	I0916 11:13:59.465477       1 range_allocator.go:241] "Successfully synced" logger="node-ipam-controller" key="multinode-654612-m02"
	I0916 11:14:08.706422       1 range_allocator.go:241] "Successfully synced" logger="node-ipam-controller" key="multinode-654612-m02"
	I0916 11:14:08.706657       1 topologycache.go:237] "Can't get CPU or zone information for node" logger="endpointslice-controller" node="multinode-654612-m02"
	I0916 11:14:08.718153       1 range_allocator.go:241] "Successfully synced" logger="node-ipam-controller" key="multinode-654612-m02"
	I0916 11:14:09.426253       1 range_allocator.go:241] "Successfully synced" logger="node-ipam-controller" key="multinode-654612-m02"
	I0916 11:14:09.483482       1 range_allocator.go:241] "Successfully synced" logger="node-ipam-controller" key="multinode-654612-m03"
	I0916 11:14:13.873694       1 replica_set.go:679] "Finished syncing" logger="replicaset-controller" kind="ReplicaSet" key="default/busybox-7dff88458" duration="46.022µs"
	I0916 11:14:15.018121       1 replica_set.go:679] "Finished syncing" logger="replicaset-controller" kind="ReplicaSet" key="default/busybox-7dff88458" duration="52.708µs"
	I0916 11:14:28.984029       1 replica_set.go:679] "Finished syncing" logger="replicaset-controller" kind="ReplicaSet" key="default/busybox-7dff88458" duration="8.087505ms"
	I0916 11:14:28.984252       1 replica_set.go:679] "Finished syncing" logger="replicaset-controller" kind="ReplicaSet" key="default/busybox-7dff88458" duration="52.528µs"
	I0916 11:14:41.487685       1 topologycache.go:237] "Can't get CPU or zone information for node" logger="endpointslice-controller" node="multinode-654612-m03"
	I0916 11:14:41.487890       1 range_allocator.go:241] "Successfully synced" logger="node-ipam-controller" key="multinode-654612-m03"
	I0916 11:14:41.499480       1 range_allocator.go:241] "Successfully synced" logger="node-ipam-controller" key="multinode-654612-m03"
	I0916 11:14:44.450737       1 range_allocator.go:241] "Successfully synced" logger="node-ipam-controller" key="multinode-654612-m03"
	I0916 11:14:51.771524       1 range_allocator.go:241] "Successfully synced" logger="node-ipam-controller" key="multinode-654612-m03"
	I0916 11:14:51.788309       1 range_allocator.go:241] "Successfully synced" logger="node-ipam-controller" key="multinode-654612-m03"
	I0916 11:14:52.321879       1 range_allocator.go:241] "Successfully synced" logger="node-ipam-controller" key="multinode-654612-m03"
	I0916 11:14:52.322096       1 topologycache.go:237] "Can't get CPU or zone information for node" logger="endpointslice-controller" node="multinode-654612-m02"
	
	
	==> kube-proxy [1c4c35778d5a47e94140d35c6fcdd9657c8c26b578a00f18d67937128b5bd362] <==
	I0916 11:13:13.602142       1 server_linux.go:66] "Using iptables proxy"
	I0916 11:13:13.713500       1 server.go:677] "Successfully retrieved node IP(s)" IPs=["192.168.67.2"]
	E0916 11:13:13.713571       1 server.go:234] "Kube-proxy configuration may be incomplete or incorrect" err="nodePortAddresses is unset; NodePort connections will be accepted on all local IPs. Consider using `--nodeport-addresses primary`"
	I0916 11:13:13.740206       1 server.go:243] "kube-proxy running in dual-stack mode" primary ipFamily="IPv4"
	I0916 11:13:13.740326       1 server_linux.go:169] "Using iptables Proxier"
	I0916 11:13:13.742325       1 proxier.go:255] "Setting route_localnet=1 to allow node-ports on localhost; to change this either disable iptables.localhostNodePorts (--iptables-localhost-nodeports) or set nodePortAddresses (--nodeport-addresses) to filter loopback addresses" ipFamily="IPv4"
	I0916 11:13:13.742712       1 server.go:483] "Version info" version="v1.31.1"
	I0916 11:13:13.742913       1 server.go:485] "Golang settings" GOGC="" GOMAXPROCS="" GOTRACEBACK=""
	I0916 11:13:13.744104       1 config.go:199] "Starting service config controller"
	I0916 11:13:13.744189       1 shared_informer.go:313] Waiting for caches to sync for service config
	I0916 11:13:13.744262       1 config.go:105] "Starting endpoint slice config controller"
	I0916 11:13:13.744294       1 shared_informer.go:313] Waiting for caches to sync for endpoint slice config
	I0916 11:13:13.744886       1 config.go:328] "Starting node config controller"
	I0916 11:13:13.746619       1 shared_informer.go:313] Waiting for caches to sync for node config
	I0916 11:13:13.845230       1 shared_informer.go:320] Caches are synced for endpoint slice config
	I0916 11:13:13.845238       1 shared_informer.go:320] Caches are synced for service config
	I0916 11:13:13.846842       1 shared_informer.go:320] Caches are synced for node config
	
	
	==> kube-scheduler [02948e1dee20a81e5b6f62935b90e57e03b4b40192e1cd568dce2ff53cb20fea] <==
	I0916 11:13:10.395357       1 serving.go:386] Generated self-signed cert in-memory
	I0916 11:13:11.727928       1 server.go:167] "Starting Kubernetes Scheduler" version="v1.31.1"
	I0916 11:13:11.727966       1 server.go:169] "Golang settings" GOGC="" GOMAXPROCS="" GOTRACEBACK=""
	I0916 11:13:11.737313       1 requestheader_controller.go:172] Starting RequestHeaderAuthRequestController
	I0916 11:13:11.737447       1 shared_informer.go:313] Waiting for caches to sync for RequestHeaderAuthRequestController
	I0916 11:13:11.737540       1 configmap_cafile_content.go:205] "Starting controller" name="client-ca::kube-system::extension-apiserver-authentication::client-ca-file"
	I0916 11:13:11.737588       1 secure_serving.go:213] Serving securely on 127.0.0.1:10259
	I0916 11:13:11.737911       1 configmap_cafile_content.go:205] "Starting controller" name="client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file"
	I0916 11:13:11.740921       1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file
	I0916 11:13:11.737590       1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::client-ca-file
	I0916 11:13:11.738115       1 tlsconfig.go:243] "Starting DynamicServingCertificateController"
	I0916 11:13:11.838335       1 shared_informer.go:320] Caches are synced for RequestHeaderAuthRequestController
	I0916 11:13:11.841524       1 shared_informer.go:320] Caches are synced for client-ca::kube-system::extension-apiserver-authentication::client-ca-file
	I0916 11:13:11.841608       1 shared_informer.go:320] Caches are synced for client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file
	
	
	==> kubelet <==
	Sep 16 11:13:12 multinode-654612 kubelet[738]: I0916 11:13:12.983785     738 swap_util.go:74] "error creating dir to test if tmpfs noswap is enabled. Assuming not supported" mount path="" error="stat /var/lib/kubelet/plugins/kubernetes.io/empty-dir: no such file or directory"
	Sep 16 11:13:16 multinode-654612 kubelet[738]: E0916 11:13:16.281589     738 eviction_manager.go:257] "Eviction manager: failed to get HasDedicatedImageFs" err="missing image stats: &ImageFsInfoResponse{ImageFilesystems:[]*FilesystemUsage{&FilesystemUsage{Timestamp:1726485196281330754,FsId:&FilesystemIdentifier{Mountpoint:/var/lib/containers/storage/overlay-images,},UsedBytes:&UInt64Value{Value:135010,},InodesUsed:&UInt64Value{Value:63,},},},ContainerFilesystems:[]*FilesystemUsage{},}"
	Sep 16 11:13:16 multinode-654612 kubelet[738]: E0916 11:13:16.281626     738 eviction_manager.go:212] "Eviction manager: failed to synchronize" err="eviction manager: failed to get HasDedicatedImageFs: missing image stats: &ImageFsInfoResponse{ImageFilesystems:[]*FilesystemUsage{&FilesystemUsage{Timestamp:1726485196281330754,FsId:&FilesystemIdentifier{Mountpoint:/var/lib/containers/storage/overlay-images,},UsedBytes:&UInt64Value{Value:135010,},InodesUsed:&UInt64Value{Value:63,},},},ContainerFilesystems:[]*FilesystemUsage{},}"
	Sep 16 11:13:16 multinode-654612 kubelet[738]: I0916 11:13:16.474347     738 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness"
	Sep 16 11:13:26 multinode-654612 kubelet[738]: E0916 11:13:26.283477     738 eviction_manager.go:257] "Eviction manager: failed to get HasDedicatedImageFs" err="missing image stats: &ImageFsInfoResponse{ImageFilesystems:[]*FilesystemUsage{&FilesystemUsage{Timestamp:1726485206283071957,FsId:&FilesystemIdentifier{Mountpoint:/var/lib/containers/storage/overlay-images,},UsedBytes:&UInt64Value{Value:135010,},InodesUsed:&UInt64Value{Value:63,},},},ContainerFilesystems:[]*FilesystemUsage{},}"
	Sep 16 11:13:26 multinode-654612 kubelet[738]: E0916 11:13:26.283514     738 eviction_manager.go:212] "Eviction manager: failed to synchronize" err="eviction manager: failed to get HasDedicatedImageFs: missing image stats: &ImageFsInfoResponse{ImageFilesystems:[]*FilesystemUsage{&FilesystemUsage{Timestamp:1726485206283071957,FsId:&FilesystemIdentifier{Mountpoint:/var/lib/containers/storage/overlay-images,},UsedBytes:&UInt64Value{Value:135010,},InodesUsed:&UInt64Value{Value:63,},},},ContainerFilesystems:[]*FilesystemUsage{},}"
	Sep 16 11:13:36 multinode-654612 kubelet[738]: E0916 11:13:36.284952     738 eviction_manager.go:257] "Eviction manager: failed to get HasDedicatedImageFs" err="missing image stats: &ImageFsInfoResponse{ImageFilesystems:[]*FilesystemUsage{&FilesystemUsage{Timestamp:1726485216284443223,FsId:&FilesystemIdentifier{Mountpoint:/var/lib/containers/storage/overlay-images,},UsedBytes:&UInt64Value{Value:135010,},InodesUsed:&UInt64Value{Value:63,},},},ContainerFilesystems:[]*FilesystemUsage{},}"
	Sep 16 11:13:36 multinode-654612 kubelet[738]: E0916 11:13:36.284987     738 eviction_manager.go:212] "Eviction manager: failed to synchronize" err="eviction manager: failed to get HasDedicatedImageFs: missing image stats: &ImageFsInfoResponse{ImageFilesystems:[]*FilesystemUsage{&FilesystemUsage{Timestamp:1726485216284443223,FsId:&FilesystemIdentifier{Mountpoint:/var/lib/containers/storage/overlay-images,},UsedBytes:&UInt64Value{Value:135010,},InodesUsed:&UInt64Value{Value:63,},},},ContainerFilesystems:[]*FilesystemUsage{},}"
	Sep 16 11:13:43 multinode-654612 kubelet[738]: I0916 11:13:43.366423     738 scope.go:117] "RemoveContainer" containerID="74535872778c031972678a8f853035a125594b63b4e8995aa53d9516fe9f4414"
	Sep 16 11:13:46 multinode-654612 kubelet[738]: E0916 11:13:46.285934     738 eviction_manager.go:257] "Eviction manager: failed to get HasDedicatedImageFs" err="missing image stats: &ImageFsInfoResponse{ImageFilesystems:[]*FilesystemUsage{&FilesystemUsage{Timestamp:1726485226285724796,FsId:&FilesystemIdentifier{Mountpoint:/var/lib/containers/storage/overlay-images,},UsedBytes:&UInt64Value{Value:135010,},InodesUsed:&UInt64Value{Value:63,},},},ContainerFilesystems:[]*FilesystemUsage{},}"
	Sep 16 11:13:46 multinode-654612 kubelet[738]: E0916 11:13:46.286422     738 eviction_manager.go:212] "Eviction manager: failed to synchronize" err="eviction manager: failed to get HasDedicatedImageFs: missing image stats: &ImageFsInfoResponse{ImageFilesystems:[]*FilesystemUsage{&FilesystemUsage{Timestamp:1726485226285724796,FsId:&FilesystemIdentifier{Mountpoint:/var/lib/containers/storage/overlay-images,},UsedBytes:&UInt64Value{Value:135010,},InodesUsed:&UInt64Value{Value:63,},},},ContainerFilesystems:[]*FilesystemUsage{},}"
	Sep 16 11:13:56 multinode-654612 kubelet[738]: E0916 11:13:56.288172     738 eviction_manager.go:257] "Eviction manager: failed to get HasDedicatedImageFs" err="missing image stats: &ImageFsInfoResponse{ImageFilesystems:[]*FilesystemUsage{&FilesystemUsage{Timestamp:1726485236287741565,FsId:&FilesystemIdentifier{Mountpoint:/var/lib/containers/storage/overlay-images,},UsedBytes:&UInt64Value{Value:135010,},InodesUsed:&UInt64Value{Value:63,},},},ContainerFilesystems:[]*FilesystemUsage{},}"
	Sep 16 11:13:56 multinode-654612 kubelet[738]: E0916 11:13:56.288209     738 eviction_manager.go:212] "Eviction manager: failed to synchronize" err="eviction manager: failed to get HasDedicatedImageFs: missing image stats: &ImageFsInfoResponse{ImageFilesystems:[]*FilesystemUsage{&FilesystemUsage{Timestamp:1726485236287741565,FsId:&FilesystemIdentifier{Mountpoint:/var/lib/containers/storage/overlay-images,},UsedBytes:&UInt64Value{Value:135010,},InodesUsed:&UInt64Value{Value:63,},},},ContainerFilesystems:[]*FilesystemUsage{},}"
	Sep 16 11:14:06 multinode-654612 kubelet[738]: E0916 11:14:06.289364     738 eviction_manager.go:257] "Eviction manager: failed to get HasDedicatedImageFs" err="missing image stats: &ImageFsInfoResponse{ImageFilesystems:[]*FilesystemUsage{&FilesystemUsage{Timestamp:1726485246289141494,FsId:&FilesystemIdentifier{Mountpoint:/var/lib/containers/storage/overlay-images,},UsedBytes:&UInt64Value{Value:135010,},InodesUsed:&UInt64Value{Value:63,},},},ContainerFilesystems:[]*FilesystemUsage{},}"
	Sep 16 11:14:06 multinode-654612 kubelet[738]: E0916 11:14:06.289456     738 eviction_manager.go:212] "Eviction manager: failed to synchronize" err="eviction manager: failed to get HasDedicatedImageFs: missing image stats: &ImageFsInfoResponse{ImageFilesystems:[]*FilesystemUsage{&FilesystemUsage{Timestamp:1726485246289141494,FsId:&FilesystemIdentifier{Mountpoint:/var/lib/containers/storage/overlay-images,},UsedBytes:&UInt64Value{Value:135010,},InodesUsed:&UInt64Value{Value:63,},},},ContainerFilesystems:[]*FilesystemUsage{},}"
	Sep 16 11:14:16 multinode-654612 kubelet[738]: E0916 11:14:16.290455     738 eviction_manager.go:257] "Eviction manager: failed to get HasDedicatedImageFs" err="missing image stats: &ImageFsInfoResponse{ImageFilesystems:[]*FilesystemUsage{&FilesystemUsage{Timestamp:1726485256290233934,FsId:&FilesystemIdentifier{Mountpoint:/var/lib/containers/storage/overlay-images,},UsedBytes:&UInt64Value{Value:135010,},InodesUsed:&UInt64Value{Value:63,},},},ContainerFilesystems:[]*FilesystemUsage{},}"
	Sep 16 11:14:16 multinode-654612 kubelet[738]: E0916 11:14:16.290493     738 eviction_manager.go:212] "Eviction manager: failed to synchronize" err="eviction manager: failed to get HasDedicatedImageFs: missing image stats: &ImageFsInfoResponse{ImageFilesystems:[]*FilesystemUsage{&FilesystemUsage{Timestamp:1726485256290233934,FsId:&FilesystemIdentifier{Mountpoint:/var/lib/containers/storage/overlay-images,},UsedBytes:&UInt64Value{Value:135010,},InodesUsed:&UInt64Value{Value:63,},},},ContainerFilesystems:[]*FilesystemUsage{},}"
	Sep 16 11:14:26 multinode-654612 kubelet[738]: E0916 11:14:26.291452     738 eviction_manager.go:257] "Eviction manager: failed to get HasDedicatedImageFs" err="missing image stats: &ImageFsInfoResponse{ImageFilesystems:[]*FilesystemUsage{&FilesystemUsage{Timestamp:1726485266291190009,FsId:&FilesystemIdentifier{Mountpoint:/var/lib/containers/storage/overlay-images,},UsedBytes:&UInt64Value{Value:135010,},InodesUsed:&UInt64Value{Value:63,},},},ContainerFilesystems:[]*FilesystemUsage{},}"
	Sep 16 11:14:26 multinode-654612 kubelet[738]: E0916 11:14:26.291491     738 eviction_manager.go:212] "Eviction manager: failed to synchronize" err="eviction manager: failed to get HasDedicatedImageFs: missing image stats: &ImageFsInfoResponse{ImageFilesystems:[]*FilesystemUsage{&FilesystemUsage{Timestamp:1726485266291190009,FsId:&FilesystemIdentifier{Mountpoint:/var/lib/containers/storage/overlay-images,},UsedBytes:&UInt64Value{Value:135010,},InodesUsed:&UInt64Value{Value:63,},},},ContainerFilesystems:[]*FilesystemUsage{},}"
	Sep 16 11:14:36 multinode-654612 kubelet[738]: E0916 11:14:36.293952     738 eviction_manager.go:257] "Eviction manager: failed to get HasDedicatedImageFs" err="missing image stats: &ImageFsInfoResponse{ImageFilesystems:[]*FilesystemUsage{&FilesystemUsage{Timestamp:1726485276292644870,FsId:&FilesystemIdentifier{Mountpoint:/var/lib/containers/storage/overlay-images,},UsedBytes:&UInt64Value{Value:135010,},InodesUsed:&UInt64Value{Value:63,},},},ContainerFilesystems:[]*FilesystemUsage{},}"
	Sep 16 11:14:36 multinode-654612 kubelet[738]: E0916 11:14:36.294001     738 eviction_manager.go:212] "Eviction manager: failed to synchronize" err="eviction manager: failed to get HasDedicatedImageFs: missing image stats: &ImageFsInfoResponse{ImageFilesystems:[]*FilesystemUsage{&FilesystemUsage{Timestamp:1726485276292644870,FsId:&FilesystemIdentifier{Mountpoint:/var/lib/containers/storage/overlay-images,},UsedBytes:&UInt64Value{Value:135010,},InodesUsed:&UInt64Value{Value:63,},},},ContainerFilesystems:[]*FilesystemUsage{},}"
	Sep 16 11:14:46 multinode-654612 kubelet[738]: E0916 11:14:46.295109     738 eviction_manager.go:257] "Eviction manager: failed to get HasDedicatedImageFs" err="missing image stats: &ImageFsInfoResponse{ImageFilesystems:[]*FilesystemUsage{&FilesystemUsage{Timestamp:1726485286294765212,FsId:&FilesystemIdentifier{Mountpoint:/var/lib/containers/storage/overlay-images,},UsedBytes:&UInt64Value{Value:135010,},InodesUsed:&UInt64Value{Value:63,},},},ContainerFilesystems:[]*FilesystemUsage{},}"
	Sep 16 11:14:46 multinode-654612 kubelet[738]: E0916 11:14:46.295145     738 eviction_manager.go:212] "Eviction manager: failed to synchronize" err="eviction manager: failed to get HasDedicatedImageFs: missing image stats: &ImageFsInfoResponse{ImageFilesystems:[]*FilesystemUsage{&FilesystemUsage{Timestamp:1726485286294765212,FsId:&FilesystemIdentifier{Mountpoint:/var/lib/containers/storage/overlay-images,},UsedBytes:&UInt64Value{Value:135010,},InodesUsed:&UInt64Value{Value:63,},},},ContainerFilesystems:[]*FilesystemUsage{},}"
	Sep 16 11:14:56 multinode-654612 kubelet[738]: E0916 11:14:56.296887     738 eviction_manager.go:257] "Eviction manager: failed to get HasDedicatedImageFs" err="missing image stats: &ImageFsInfoResponse{ImageFilesystems:[]*FilesystemUsage{&FilesystemUsage{Timestamp:1726485296296204131,FsId:&FilesystemIdentifier{Mountpoint:/var/lib/containers/storage/overlay-images,},UsedBytes:&UInt64Value{Value:135010,},InodesUsed:&UInt64Value{Value:63,},},},ContainerFilesystems:[]*FilesystemUsage{},}"
	Sep 16 11:14:56 multinode-654612 kubelet[738]: E0916 11:14:56.296951     738 eviction_manager.go:212] "Eviction manager: failed to synchronize" err="eviction manager: failed to get HasDedicatedImageFs: missing image stats: &ImageFsInfoResponse{ImageFilesystems:[]*FilesystemUsage{&FilesystemUsage{Timestamp:1726485296296204131,FsId:&FilesystemIdentifier{Mountpoint:/var/lib/containers/storage/overlay-images,},UsedBytes:&UInt64Value{Value:135010,},InodesUsed:&UInt64Value{Value:63,},},},ContainerFilesystems:[]*FilesystemUsage{},}"
	

                                                
                                                
-- /stdout --
helpers_test.go:254: (dbg) Run:  out/minikube-linux-arm64 status --format={{.APIServer}} -p multinode-654612 -n multinode-654612
helpers_test.go:261: (dbg) Run:  kubectl --context multinode-654612 get po -o=jsonpath={.items[*].metadata.name} -A --field-selector=status.phase!=Running
helpers_test.go:261: (dbg) Non-zero exit: kubectl --context multinode-654612 get po -o=jsonpath={.items[*].metadata.name} -A --field-selector=status.phase!=Running: fork/exec /usr/local/bin/kubectl: exec format error (634.077µs)
helpers_test.go:263: kubectl --context multinode-654612 get po -o=jsonpath={.items[*].metadata.name} -A --field-selector=status.phase!=Running: fork/exec /usr/local/bin/kubectl: exec format error
--- FAIL: TestMultiNode/serial/DeleteNode (9.69s)

                                                
                                    
x
+
TestMultiNode/serial/RestartMultiNode (64.13s)

                                                
                                                
=== RUN   TestMultiNode/serial/RestartMultiNode
multinode_test.go:376: (dbg) Run:  out/minikube-linux-arm64 start -p multinode-654612 --wait=true -v=8 --alsologtostderr --driver=docker  --container-runtime=crio
multinode_test.go:376: (dbg) Done: out/minikube-linux-arm64 start -p multinode-654612 --wait=true -v=8 --alsologtostderr --driver=docker  --container-runtime=crio: (1m0.798757841s)
multinode_test.go:382: (dbg) Run:  out/minikube-linux-arm64 -p multinode-654612 status --alsologtostderr
multinode_test.go:396: (dbg) Run:  kubectl get nodes
multinode_test.go:396: (dbg) Non-zero exit: kubectl get nodes: fork/exec /usr/local/bin/kubectl: exec format error (605.163µs)
multinode_test.go:398: failed to run kubectl get nodes. args "kubectl get nodes" : fork/exec /usr/local/bin/kubectl: exec format error
helpers_test.go:222: -----------------------post-mortem--------------------------------
helpers_test.go:230: ======>  post-mortem[TestMultiNode/serial/RestartMultiNode]: docker inspect <======
helpers_test.go:231: (dbg) Run:  docker inspect multinode-654612
helpers_test.go:235: (dbg) docker inspect multinode-654612:

                                                
                                                
-- stdout --
	[
	    {
	        "Id": "402497514f0b8b3453fe3f147b28574766ee05bfb7c084c9f8550757726f30cd",
	        "Created": "2024-09-16T11:09:45.282229543Z",
	        "Path": "/usr/local/bin/entrypoint",
	        "Args": [
	            "/sbin/init"
	        ],
	        "State": {
	            "Status": "running",
	            "Running": true,
	            "Paused": false,
	            "Restarting": false,
	            "OOMKilled": false,
	            "Dead": false,
	            "Pid": 1507784,
	            "ExitCode": 0,
	            "Error": "",
	            "StartedAt": "2024-09-16T11:15:25.451287431Z",
	            "FinishedAt": "2024-09-16T11:15:24.415584442Z"
	        },
	        "Image": "sha256:a1b71fa87733590eb4674b16f6945626ae533f3af37066893e3fd70eb9476268",
	        "ResolvConfPath": "/var/lib/docker/containers/402497514f0b8b3453fe3f147b28574766ee05bfb7c084c9f8550757726f30cd/resolv.conf",
	        "HostnamePath": "/var/lib/docker/containers/402497514f0b8b3453fe3f147b28574766ee05bfb7c084c9f8550757726f30cd/hostname",
	        "HostsPath": "/var/lib/docker/containers/402497514f0b8b3453fe3f147b28574766ee05bfb7c084c9f8550757726f30cd/hosts",
	        "LogPath": "/var/lib/docker/containers/402497514f0b8b3453fe3f147b28574766ee05bfb7c084c9f8550757726f30cd/402497514f0b8b3453fe3f147b28574766ee05bfb7c084c9f8550757726f30cd-json.log",
	        "Name": "/multinode-654612",
	        "RestartCount": 0,
	        "Driver": "overlay2",
	        "Platform": "linux",
	        "MountLabel": "",
	        "ProcessLabel": "",
	        "AppArmorProfile": "unconfined",
	        "ExecIDs": null,
	        "HostConfig": {
	            "Binds": [
	                "/lib/modules:/lib/modules:ro",
	                "multinode-654612:/var"
	            ],
	            "ContainerIDFile": "",
	            "LogConfig": {
	                "Type": "json-file",
	                "Config": {}
	            },
	            "NetworkMode": "multinode-654612",
	            "PortBindings": {
	                "22/tcp": [
	                    {
	                        "HostIp": "127.0.0.1",
	                        "HostPort": ""
	                    }
	                ],
	                "2376/tcp": [
	                    {
	                        "HostIp": "127.0.0.1",
	                        "HostPort": ""
	                    }
	                ],
	                "32443/tcp": [
	                    {
	                        "HostIp": "127.0.0.1",
	                        "HostPort": ""
	                    }
	                ],
	                "5000/tcp": [
	                    {
	                        "HostIp": "127.0.0.1",
	                        "HostPort": ""
	                    }
	                ],
	                "8443/tcp": [
	                    {
	                        "HostIp": "127.0.0.1",
	                        "HostPort": ""
	                    }
	                ]
	            },
	            "RestartPolicy": {
	                "Name": "no",
	                "MaximumRetryCount": 0
	            },
	            "AutoRemove": false,
	            "VolumeDriver": "",
	            "VolumesFrom": null,
	            "ConsoleSize": [
	                0,
	                0
	            ],
	            "CapAdd": null,
	            "CapDrop": null,
	            "CgroupnsMode": "host",
	            "Dns": [],
	            "DnsOptions": [],
	            "DnsSearch": [],
	            "ExtraHosts": null,
	            "GroupAdd": null,
	            "IpcMode": "private",
	            "Cgroup": "",
	            "Links": null,
	            "OomScoreAdj": 0,
	            "PidMode": "",
	            "Privileged": true,
	            "PublishAllPorts": false,
	            "ReadonlyRootfs": false,
	            "SecurityOpt": [
	                "seccomp=unconfined",
	                "apparmor=unconfined",
	                "label=disable"
	            ],
	            "Tmpfs": {
	                "/run": "",
	                "/tmp": ""
	            },
	            "UTSMode": "",
	            "UsernsMode": "",
	            "ShmSize": 67108864,
	            "Runtime": "runc",
	            "Isolation": "",
	            "CpuShares": 0,
	            "Memory": 2306867200,
	            "NanoCpus": 2000000000,
	            "CgroupParent": "",
	            "BlkioWeight": 0,
	            "BlkioWeightDevice": [],
	            "BlkioDeviceReadBps": [],
	            "BlkioDeviceWriteBps": [],
	            "BlkioDeviceReadIOps": [],
	            "BlkioDeviceWriteIOps": [],
	            "CpuPeriod": 0,
	            "CpuQuota": 0,
	            "CpuRealtimePeriod": 0,
	            "CpuRealtimeRuntime": 0,
	            "CpusetCpus": "",
	            "CpusetMems": "",
	            "Devices": [],
	            "DeviceCgroupRules": null,
	            "DeviceRequests": null,
	            "MemoryReservation": 0,
	            "MemorySwap": 4613734400,
	            "MemorySwappiness": null,
	            "OomKillDisable": false,
	            "PidsLimit": null,
	            "Ulimits": [],
	            "CpuCount": 0,
	            "CpuPercent": 0,
	            "IOMaximumIOps": 0,
	            "IOMaximumBandwidth": 0,
	            "MaskedPaths": null,
	            "ReadonlyPaths": null
	        },
	        "GraphDriver": {
	            "Data": {
	                "LowerDir": "/var/lib/docker/overlay2/8f4b3b4182dcd0f3e388c56cd4cf94240822ac61d3fc96a90ebc4c74757003f6-init/diff:/var/lib/docker/overlay2/1502e35c27c097cfc834a7c6caeee5bb9f58b41375577f491b73f55bc131cbae/diff",
	                "MergedDir": "/var/lib/docker/overlay2/8f4b3b4182dcd0f3e388c56cd4cf94240822ac61d3fc96a90ebc4c74757003f6/merged",
	                "UpperDir": "/var/lib/docker/overlay2/8f4b3b4182dcd0f3e388c56cd4cf94240822ac61d3fc96a90ebc4c74757003f6/diff",
	                "WorkDir": "/var/lib/docker/overlay2/8f4b3b4182dcd0f3e388c56cd4cf94240822ac61d3fc96a90ebc4c74757003f6/work"
	            },
	            "Name": "overlay2"
	        },
	        "Mounts": [
	            {
	                "Type": "volume",
	                "Name": "multinode-654612",
	                "Source": "/var/lib/docker/volumes/multinode-654612/_data",
	                "Destination": "/var",
	                "Driver": "local",
	                "Mode": "z",
	                "RW": true,
	                "Propagation": ""
	            },
	            {
	                "Type": "bind",
	                "Source": "/lib/modules",
	                "Destination": "/lib/modules",
	                "Mode": "ro",
	                "RW": false,
	                "Propagation": "rprivate"
	            }
	        ],
	        "Config": {
	            "Hostname": "multinode-654612",
	            "Domainname": "",
	            "User": "",
	            "AttachStdin": false,
	            "AttachStdout": false,
	            "AttachStderr": false,
	            "ExposedPorts": {
	                "22/tcp": {},
	                "2376/tcp": {},
	                "32443/tcp": {},
	                "5000/tcp": {},
	                "8443/tcp": {}
	            },
	            "Tty": true,
	            "OpenStdin": false,
	            "StdinOnce": false,
	            "Env": [
	                "container=docker",
	                "PATH=/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin"
	            ],
	            "Cmd": null,
	            "Image": "gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0",
	            "Volumes": null,
	            "WorkingDir": "/",
	            "Entrypoint": [
	                "/usr/local/bin/entrypoint",
	                "/sbin/init"
	            ],
	            "OnBuild": null,
	            "Labels": {
	                "created_by.minikube.sigs.k8s.io": "true",
	                "mode.minikube.sigs.k8s.io": "multinode-654612",
	                "name.minikube.sigs.k8s.io": "multinode-654612",
	                "role.minikube.sigs.k8s.io": ""
	            },
	            "StopSignal": "SIGRTMIN+3"
	        },
	        "NetworkSettings": {
	            "Bridge": "",
	            "SandboxID": "111cb133f0dfd958acf9082f5054bcd563b05b2fe3d23be0af7148c0ed171daa",
	            "SandboxKey": "/var/run/docker/netns/111cb133f0df",
	            "Ports": {
	                "22/tcp": [
	                    {
	                        "HostIp": "127.0.0.1",
	                        "HostPort": "34773"
	                    }
	                ],
	                "2376/tcp": [
	                    {
	                        "HostIp": "127.0.0.1",
	                        "HostPort": "34774"
	                    }
	                ],
	                "32443/tcp": [
	                    {
	                        "HostIp": "127.0.0.1",
	                        "HostPort": "34777"
	                    }
	                ],
	                "5000/tcp": [
	                    {
	                        "HostIp": "127.0.0.1",
	                        "HostPort": "34775"
	                    }
	                ],
	                "8443/tcp": [
	                    {
	                        "HostIp": "127.0.0.1",
	                        "HostPort": "34776"
	                    }
	                ]
	            },
	            "HairpinMode": false,
	            "LinkLocalIPv6Address": "",
	            "LinkLocalIPv6PrefixLen": 0,
	            "SecondaryIPAddresses": null,
	            "SecondaryIPv6Addresses": null,
	            "EndpointID": "",
	            "Gateway": "",
	            "GlobalIPv6Address": "",
	            "GlobalIPv6PrefixLen": 0,
	            "IPAddress": "",
	            "IPPrefixLen": 0,
	            "IPv6Gateway": "",
	            "MacAddress": "",
	            "Networks": {
	                "multinode-654612": {
	                    "IPAMConfig": {
	                        "IPv4Address": "192.168.67.2"
	                    },
	                    "Links": null,
	                    "Aliases": null,
	                    "MacAddress": "02:42:c0:a8:43:02",
	                    "DriverOpts": null,
	                    "NetworkID": "76703dbf7b5c303b888ff80e924d3dab5e1ece3140da60ee94903d5d35e68013",
	                    "EndpointID": "b6ddbda0fd57b2fd6f3d9358f7c634d87b64df4dd290802e867f4a141ccbdea3",
	                    "Gateway": "192.168.67.1",
	                    "IPAddress": "192.168.67.2",
	                    "IPPrefixLen": 24,
	                    "IPv6Gateway": "",
	                    "GlobalIPv6Address": "",
	                    "GlobalIPv6PrefixLen": 0,
	                    "DNSNames": [
	                        "multinode-654612",
	                        "402497514f0b"
	                    ]
	                }
	            }
	        }
	    }
	]

                                                
                                                
-- /stdout --
helpers_test.go:239: (dbg) Run:  out/minikube-linux-arm64 status --format={{.Host}} -p multinode-654612 -n multinode-654612
helpers_test.go:244: <<< TestMultiNode/serial/RestartMultiNode FAILED: start of post-mortem logs <<<
helpers_test.go:245: ======>  post-mortem[TestMultiNode/serial/RestartMultiNode]: minikube logs <======
helpers_test.go:247: (dbg) Run:  out/minikube-linux-arm64 -p multinode-654612 logs -n 25
helpers_test.go:247: (dbg) Done: out/minikube-linux-arm64 -p multinode-654612 logs -n 25: (1.541107021s)
helpers_test.go:252: TestMultiNode/serial/RestartMultiNode logs: 
-- stdout --
	
	==> Audit <==
	|---------|-----------------------------------------------------------------------------------------|------------------|---------|---------|---------------------|---------------------|
	| Command |                                          Args                                           |     Profile      |  User   | Version |     Start Time      |      End Time       |
	|---------|-----------------------------------------------------------------------------------------|------------------|---------|---------|---------------------|---------------------|
	| cp      | multinode-654612 cp multinode-654612-m02:/home/docker/cp-test.txt                       | multinode-654612 | jenkins | v1.34.0 | 16 Sep 24 11:12 UTC | 16 Sep 24 11:12 UTC |
	|         | multinode-654612:/home/docker/cp-test_multinode-654612-m02_multinode-654612.txt         |                  |         |         |                     |                     |
	| ssh     | multinode-654612 ssh -n                                                                 | multinode-654612 | jenkins | v1.34.0 | 16 Sep 24 11:12 UTC | 16 Sep 24 11:12 UTC |
	|         | multinode-654612-m02 sudo cat                                                           |                  |         |         |                     |                     |
	|         | /home/docker/cp-test.txt                                                                |                  |         |         |                     |                     |
	| ssh     | multinode-654612 ssh -n multinode-654612 sudo cat                                       | multinode-654612 | jenkins | v1.34.0 | 16 Sep 24 11:12 UTC | 16 Sep 24 11:12 UTC |
	|         | /home/docker/cp-test_multinode-654612-m02_multinode-654612.txt                          |                  |         |         |                     |                     |
	| cp      | multinode-654612 cp multinode-654612-m02:/home/docker/cp-test.txt                       | multinode-654612 | jenkins | v1.34.0 | 16 Sep 24 11:12 UTC | 16 Sep 24 11:12 UTC |
	|         | multinode-654612-m03:/home/docker/cp-test_multinode-654612-m02_multinode-654612-m03.txt |                  |         |         |                     |                     |
	| ssh     | multinode-654612 ssh -n                                                                 | multinode-654612 | jenkins | v1.34.0 | 16 Sep 24 11:12 UTC | 16 Sep 24 11:12 UTC |
	|         | multinode-654612-m02 sudo cat                                                           |                  |         |         |                     |                     |
	|         | /home/docker/cp-test.txt                                                                |                  |         |         |                     |                     |
	| ssh     | multinode-654612 ssh -n multinode-654612-m03 sudo cat                                   | multinode-654612 | jenkins | v1.34.0 | 16 Sep 24 11:12 UTC | 16 Sep 24 11:12 UTC |
	|         | /home/docker/cp-test_multinode-654612-m02_multinode-654612-m03.txt                      |                  |         |         |                     |                     |
	| cp      | multinode-654612 cp testdata/cp-test.txt                                                | multinode-654612 | jenkins | v1.34.0 | 16 Sep 24 11:12 UTC | 16 Sep 24 11:12 UTC |
	|         | multinode-654612-m03:/home/docker/cp-test.txt                                           |                  |         |         |                     |                     |
	| ssh     | multinode-654612 ssh -n                                                                 | multinode-654612 | jenkins | v1.34.0 | 16 Sep 24 11:12 UTC | 16 Sep 24 11:12 UTC |
	|         | multinode-654612-m03 sudo cat                                                           |                  |         |         |                     |                     |
	|         | /home/docker/cp-test.txt                                                                |                  |         |         |                     |                     |
	| cp      | multinode-654612 cp multinode-654612-m03:/home/docker/cp-test.txt                       | multinode-654612 | jenkins | v1.34.0 | 16 Sep 24 11:12 UTC | 16 Sep 24 11:12 UTC |
	|         | /tmp/TestMultiNodeserialCopyFile4098428863/001/cp-test_multinode-654612-m03.txt         |                  |         |         |                     |                     |
	| ssh     | multinode-654612 ssh -n                                                                 | multinode-654612 | jenkins | v1.34.0 | 16 Sep 24 11:12 UTC | 16 Sep 24 11:12 UTC |
	|         | multinode-654612-m03 sudo cat                                                           |                  |         |         |                     |                     |
	|         | /home/docker/cp-test.txt                                                                |                  |         |         |                     |                     |
	| cp      | multinode-654612 cp multinode-654612-m03:/home/docker/cp-test.txt                       | multinode-654612 | jenkins | v1.34.0 | 16 Sep 24 11:12 UTC | 16 Sep 24 11:12 UTC |
	|         | multinode-654612:/home/docker/cp-test_multinode-654612-m03_multinode-654612.txt         |                  |         |         |                     |                     |
	| ssh     | multinode-654612 ssh -n                                                                 | multinode-654612 | jenkins | v1.34.0 | 16 Sep 24 11:12 UTC | 16 Sep 24 11:12 UTC |
	|         | multinode-654612-m03 sudo cat                                                           |                  |         |         |                     |                     |
	|         | /home/docker/cp-test.txt                                                                |                  |         |         |                     |                     |
	| ssh     | multinode-654612 ssh -n multinode-654612 sudo cat                                       | multinode-654612 | jenkins | v1.34.0 | 16 Sep 24 11:12 UTC | 16 Sep 24 11:12 UTC |
	|         | /home/docker/cp-test_multinode-654612-m03_multinode-654612.txt                          |                  |         |         |                     |                     |
	| cp      | multinode-654612 cp multinode-654612-m03:/home/docker/cp-test.txt                       | multinode-654612 | jenkins | v1.34.0 | 16 Sep 24 11:12 UTC | 16 Sep 24 11:12 UTC |
	|         | multinode-654612-m02:/home/docker/cp-test_multinode-654612-m03_multinode-654612-m02.txt |                  |         |         |                     |                     |
	| ssh     | multinode-654612 ssh -n                                                                 | multinode-654612 | jenkins | v1.34.0 | 16 Sep 24 11:12 UTC | 16 Sep 24 11:12 UTC |
	|         | multinode-654612-m03 sudo cat                                                           |                  |         |         |                     |                     |
	|         | /home/docker/cp-test.txt                                                                |                  |         |         |                     |                     |
	| ssh     | multinode-654612 ssh -n multinode-654612-m02 sudo cat                                   | multinode-654612 | jenkins | v1.34.0 | 16 Sep 24 11:12 UTC | 16 Sep 24 11:12 UTC |
	|         | /home/docker/cp-test_multinode-654612-m03_multinode-654612-m02.txt                      |                  |         |         |                     |                     |
	| node    | multinode-654612 node stop m03                                                          | multinode-654612 | jenkins | v1.34.0 | 16 Sep 24 11:12 UTC | 16 Sep 24 11:12 UTC |
	| node    | multinode-654612 node start                                                             | multinode-654612 | jenkins | v1.34.0 | 16 Sep 24 11:12 UTC | 16 Sep 24 11:12 UTC |
	|         | m03 -v=7 --alsologtostderr                                                              |                  |         |         |                     |                     |
	| node    | list -p multinode-654612                                                                | multinode-654612 | jenkins | v1.34.0 | 16 Sep 24 11:12 UTC |                     |
	| stop    | -p multinode-654612                                                                     | multinode-654612 | jenkins | v1.34.0 | 16 Sep 24 11:12 UTC | 16 Sep 24 11:12 UTC |
	| start   | -p multinode-654612                                                                     | multinode-654612 | jenkins | v1.34.0 | 16 Sep 24 11:12 UTC | 16 Sep 24 11:14 UTC |
	|         | --wait=true -v=8                                                                        |                  |         |         |                     |                     |
	|         | --alsologtostderr                                                                       |                  |         |         |                     |                     |
	| node    | list -p multinode-654612                                                                | multinode-654612 | jenkins | v1.34.0 | 16 Sep 24 11:14 UTC |                     |
	| node    | multinode-654612 node delete                                                            | multinode-654612 | jenkins | v1.34.0 | 16 Sep 24 11:14 UTC | 16 Sep 24 11:14 UTC |
	|         | m03                                                                                     |                  |         |         |                     |                     |
	| stop    | multinode-654612 stop                                                                   | multinode-654612 | jenkins | v1.34.0 | 16 Sep 24 11:15 UTC | 16 Sep 24 11:15 UTC |
	| start   | -p multinode-654612                                                                     | multinode-654612 | jenkins | v1.34.0 | 16 Sep 24 11:15 UTC | 16 Sep 24 11:16 UTC |
	|         | --wait=true -v=8                                                                        |                  |         |         |                     |                     |
	|         | --alsologtostderr                                                                       |                  |         |         |                     |                     |
	|         | --driver=docker                                                                         |                  |         |         |                     |                     |
	|         | --container-runtime=crio                                                                |                  |         |         |                     |                     |
	|---------|-----------------------------------------------------------------------------------------|------------------|---------|---------|---------------------|---------------------|
	
	
	==> Last Start <==
	Log file created at: 2024/09/16 11:15:24
	Running on machine: ip-172-31-21-244
	Binary: Built with gc go1.23.0 for linux/arm64
	Log line format: [IWEF]mmdd hh:mm:ss.uuuuuu threadid file:line] msg
	I0916 11:15:24.941620 1507580 out.go:345] Setting OutFile to fd 1 ...
	I0916 11:15:24.941776 1507580 out.go:392] TERM=,COLORTERM=, which probably does not support color
	I0916 11:15:24.941801 1507580 out.go:358] Setting ErrFile to fd 2...
	I0916 11:15:24.941824 1507580 out.go:392] TERM=,COLORTERM=, which probably does not support color
	I0916 11:15:24.942129 1507580 root.go:338] Updating PATH: /home/jenkins/minikube-integration/19651-1378450/.minikube/bin
	I0916 11:15:24.942539 1507580 out.go:352] Setting JSON to false
	I0916 11:15:24.943453 1507580 start.go:129] hostinfo: {"hostname":"ip-172-31-21-244","uptime":39470,"bootTime":1726445855,"procs":176,"os":"linux","platform":"ubuntu","platformFamily":"debian","platformVersion":"20.04","kernelVersion":"5.15.0-1069-aws","kernelArch":"aarch64","virtualizationSystem":"","virtualizationRole":"","hostId":"da8ac1fd-6236-412a-a346-95873c98230d"}
	I0916 11:15:24.943527 1507580 start.go:139] virtualization:  
	I0916 11:15:24.946864 1507580 out.go:177] * [multinode-654612] minikube v1.34.0 on Ubuntu 20.04 (arm64)
	I0916 11:15:24.950364 1507580 out.go:177]   - MINIKUBE_LOCATION=19651
	I0916 11:15:24.950518 1507580 notify.go:220] Checking for updates...
	I0916 11:15:24.955827 1507580 out.go:177]   - MINIKUBE_SUPPRESS_DOCKER_PERFORMANCE=true
	I0916 11:15:24.958512 1507580 out.go:177]   - KUBECONFIG=/home/jenkins/minikube-integration/19651-1378450/kubeconfig
	I0916 11:15:24.961159 1507580 out.go:177]   - MINIKUBE_HOME=/home/jenkins/minikube-integration/19651-1378450/.minikube
	I0916 11:15:24.963735 1507580 out.go:177]   - MINIKUBE_BIN=out/minikube-linux-arm64
	I0916 11:15:24.966289 1507580 out.go:177]   - MINIKUBE_FORCE_SYSTEMD=
	I0916 11:15:24.969394 1507580 config.go:182] Loaded profile config "multinode-654612": Driver=docker, ContainerRuntime=crio, KubernetesVersion=v1.31.1
	I0916 11:15:24.969947 1507580 driver.go:394] Setting default libvirt URI to qemu:///system
	I0916 11:15:25.006042 1507580 docker.go:123] docker version: linux-27.2.1:Docker Engine - Community
	I0916 11:15:25.006270 1507580 cli_runner.go:164] Run: docker system info --format "{{json .}}"
	I0916 11:15:25.086987 1507580 info.go:266] docker info: {ID:5FDH:SA5P:5GCT:NLAS:B73P:SGDQ:PBG5:UBVH:UZY3:RXGO:CI7S:WAIH Containers:2 ContainersRunning:0 ContainersPaused:0 ContainersStopped:2 Images:3 Driver:overlay2 DriverStatus:[[Backing Filesystem extfs] [Supports d_type true] [Using metacopy false] [Native Overlay Diff true] [userxattr false]] SystemStatus:<nil> Plugins:{Volume:[local] Network:[bridge host ipvlan macvlan null overlay] Authorization:<nil> Log:[awslogs fluentd gcplogs gelf journald json-file local splunk syslog]} MemoryLimit:true SwapLimit:true KernelMemory:false KernelMemoryTCP:true CPUCfsPeriod:true CPUCfsQuota:true CPUShares:true CPUSet:true PidsLimit:true IPv4Forwarding:true BridgeNfIptables:true BridgeNfIP6Tables:true Debug:false NFd:34 OomKillDisable:true NGoroutines:41 SystemTime:2024-09-16 11:15:25.075747495 +0000 UTC LoggingDriver:json-file CgroupDriver:cgroupfs NEventsListener:0 KernelVersion:5.15.0-1069-aws OperatingSystem:Ubuntu 20.04.6 LTS OSType:linux Architecture:aar
ch64 IndexServerAddress:https://index.docker.io/v1/ RegistryConfig:{AllowNondistributableArtifactsCIDRs:[] AllowNondistributableArtifactsHostnames:[] InsecureRegistryCIDRs:[127.0.0.0/8] IndexConfigs:{DockerIo:{Name:docker.io Mirrors:[] Secure:true Official:true}} Mirrors:[]} NCPU:2 MemTotal:8214839296 GenericResources:<nil> DockerRootDir:/var/lib/docker HTTPProxy: HTTPSProxy: NoProxy: Name:ip-172-31-21-244 Labels:[] ExperimentalBuild:false ServerVersion:27.2.1 ClusterStore: ClusterAdvertise: Runtimes:{Runc:{Path:runc}} DefaultRuntime:runc Swarm:{NodeID: NodeAddr: LocalNodeState:inactive ControlAvailable:false Error: RemoteManagers:<nil>} LiveRestoreEnabled:false Isolation: InitBinary:docker-init ContainerdCommit:{ID:7f7fdf5fed64eb6a7caf99b3e12efcf9d60e311c Expected:7f7fdf5fed64eb6a7caf99b3e12efcf9d60e311c} RuncCommit:{ID:v1.1.14-0-g2c9f560 Expected:v1.1.14-0-g2c9f560} InitCommit:{ID:de40ad0 Expected:de40ad0} SecurityOptions:[name=apparmor name=seccomp,profile=builtin] ProductLicense: Warnings:<nil> ServerErro
rs:[] ClientInfo:{Debug:false Plugins:[map[Name:buildx Path:/usr/libexec/docker/cli-plugins/docker-buildx SchemaVersion:0.1.0 ShortDescription:Docker Buildx Vendor:Docker Inc. Version:v0.16.2] map[Name:compose Path:/usr/libexec/docker/cli-plugins/docker-compose SchemaVersion:0.1.0 ShortDescription:Docker Compose Vendor:Docker Inc. Version:v2.29.2]] Warnings:<nil>}}
	I0916 11:15:25.087116 1507580 docker.go:318] overlay module found
	I0916 11:15:25.090666 1507580 out.go:177] * Using the docker driver based on existing profile
	I0916 11:15:25.093196 1507580 start.go:297] selected driver: docker
	I0916 11:15:25.093228 1507580 start.go:901] validating driver "docker" against &{Name:multinode-654612 KeepContext:false EmbedCerts:false MinikubeISO: KicBaseImage:gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 Memory:2200 CPUs:2 DiskSize:20000 Driver:docker HyperkitVpnKitSock: HyperkitVSockPorts:[] DockerEnv:[] ContainerVolumeMounts:[] InsecureRegistry:[] RegistryMirror:[] HostOnlyCIDR:192.168.59.1/24 HypervVirtualSwitch: HypervUseExternalSwitch:false HypervExternalAdapter: KVMNetwork:default KVMQemuURI:qemu:///system KVMGPU:false KVMHidden:false KVMNUMACount:1 APIServerPort:8443 DockerOpt:[] DisableDriverMounts:false NFSShare:[] NFSSharesRoot:/nfsshares UUID: NoVTXCheck:false DNSProxy:false HostDNSResolver:true HostOnlyNicType:virtio NatNicType:virtio SSHIPAddress: SSHUser:root SSHKey: SSHPort:22 KubernetesConfig:{KubernetesVersion:v1.31.1 ClusterName:multinode-654612 Namespace:default APIServerHAVIP: APIServerName
:minikubeCA APIServerNames:[] APIServerIPs:[] DNSDomain:cluster.local ContainerRuntime:crio CRISocket: NetworkPlugin:cni FeatureGates: ServiceCIDR:10.96.0.0/12 ImageRepository: LoadBalancerStartIP: LoadBalancerEndIP: CustomIngressCert: RegistryAliases: ExtraOptions:[] ShouldLoadCachedImages:true EnableDefaultCNI:false CNI:} Nodes:[{Name: IP:192.168.67.2 Port:8443 KubernetesVersion:v1.31.1 ContainerRuntime:crio ControlPlane:true Worker:true} {Name:m02 IP:192.168.67.3 Port:8443 KubernetesVersion:v1.31.1 ContainerRuntime:crio ControlPlane:false Worker:true}] Addons:map[ambassador:false auto-pause:false cloud-spanner:false csi-hostpath-driver:false dashboard:false default-storageclass:false efk:false freshpod:false gcp-auth:false gvisor:false headlamp:false helm-tiller:false inaccel:false ingress:false ingress-dns:false inspektor-gadget:false istio:false istio-provisioner:false kong:false kubeflow:false kubevirt:false logviewer:false metallb:false metrics-server:false nvidia-device-plugin:false nvidia-driver-inst
aller:false nvidia-gpu-device-plugin:false olm:false pod-security-policy:false portainer:false registry:false registry-aliases:false registry-creds:false storage-provisioner:false storage-provisioner-gluster:false storage-provisioner-rancher:false volcano:false volumesnapshots:false yakd:false] CustomAddonImages:map[] CustomAddonRegistries:map[] VerifyComponents:map[apiserver:true apps_running:true default_sa:true extra:true kubelet:true node_ready:true system_pods:true] StartHostTimeout:6m0s ScheduledStop:<nil> ExposedPorts:[] ListenAddress: Network: Subnet: MultiNodeRequested:true ExtraDisks:0 CertExpiration:26280h0m0s Mount:false MountString:/home/jenkins:/minikube-host Mount9PVersion:9p2000.L MountGID:docker MountIP: MountMSize:262144 MountOptions:[] MountPort:0 MountType:9p MountUID:docker BinaryMirror: DisableOptimizations:false DisableMetrics:false CustomQemuFirmwarePath: SocketVMnetClientPath: SocketVMnetPath: StaticIP: SSHAuthSock: SSHAgentPID:0 GPUs: AutoPauseInterval:1m0s}
	I0916 11:15:25.093393 1507580 start.go:912] status for docker: {Installed:true Healthy:true Running:false NeedsImprovement:false Error:<nil> Reason: Fix: Doc: Version:}
	I0916 11:15:25.093511 1507580 cli_runner.go:164] Run: docker system info --format "{{json .}}"
	I0916 11:15:25.151147 1507580 info.go:266] docker info: {ID:5FDH:SA5P:5GCT:NLAS:B73P:SGDQ:PBG5:UBVH:UZY3:RXGO:CI7S:WAIH Containers:2 ContainersRunning:0 ContainersPaused:0 ContainersStopped:2 Images:3 Driver:overlay2 DriverStatus:[[Backing Filesystem extfs] [Supports d_type true] [Using metacopy false] [Native Overlay Diff true] [userxattr false]] SystemStatus:<nil> Plugins:{Volume:[local] Network:[bridge host ipvlan macvlan null overlay] Authorization:<nil> Log:[awslogs fluentd gcplogs gelf journald json-file local splunk syslog]} MemoryLimit:true SwapLimit:true KernelMemory:false KernelMemoryTCP:true CPUCfsPeriod:true CPUCfsQuota:true CPUShares:true CPUSet:true PidsLimit:true IPv4Forwarding:true BridgeNfIptables:true BridgeNfIP6Tables:true Debug:false NFd:34 OomKillDisable:true NGoroutines:41 SystemTime:2024-09-16 11:15:25.140824138 +0000 UTC LoggingDriver:json-file CgroupDriver:cgroupfs NEventsListener:0 KernelVersion:5.15.0-1069-aws OperatingSystem:Ubuntu 20.04.6 LTS OSType:linux Architecture:aar
ch64 IndexServerAddress:https://index.docker.io/v1/ RegistryConfig:{AllowNondistributableArtifactsCIDRs:[] AllowNondistributableArtifactsHostnames:[] InsecureRegistryCIDRs:[127.0.0.0/8] IndexConfigs:{DockerIo:{Name:docker.io Mirrors:[] Secure:true Official:true}} Mirrors:[]} NCPU:2 MemTotal:8214839296 GenericResources:<nil> DockerRootDir:/var/lib/docker HTTPProxy: HTTPSProxy: NoProxy: Name:ip-172-31-21-244 Labels:[] ExperimentalBuild:false ServerVersion:27.2.1 ClusterStore: ClusterAdvertise: Runtimes:{Runc:{Path:runc}} DefaultRuntime:runc Swarm:{NodeID: NodeAddr: LocalNodeState:inactive ControlAvailable:false Error: RemoteManagers:<nil>} LiveRestoreEnabled:false Isolation: InitBinary:docker-init ContainerdCommit:{ID:7f7fdf5fed64eb6a7caf99b3e12efcf9d60e311c Expected:7f7fdf5fed64eb6a7caf99b3e12efcf9d60e311c} RuncCommit:{ID:v1.1.14-0-g2c9f560 Expected:v1.1.14-0-g2c9f560} InitCommit:{ID:de40ad0 Expected:de40ad0} SecurityOptions:[name=apparmor name=seccomp,profile=builtin] ProductLicense: Warnings:<nil> ServerErro
rs:[] ClientInfo:{Debug:false Plugins:[map[Name:buildx Path:/usr/libexec/docker/cli-plugins/docker-buildx SchemaVersion:0.1.0 ShortDescription:Docker Buildx Vendor:Docker Inc. Version:v0.16.2] map[Name:compose Path:/usr/libexec/docker/cli-plugins/docker-compose SchemaVersion:0.1.0 ShortDescription:Docker Compose Vendor:Docker Inc. Version:v2.29.2]] Warnings:<nil>}}
	I0916 11:15:25.151655 1507580 start_flags.go:947] Waiting for all components: map[apiserver:true apps_running:true default_sa:true extra:true kubelet:true node_ready:true system_pods:true]
	I0916 11:15:25.151694 1507580 cni.go:84] Creating CNI manager for ""
	I0916 11:15:25.151743 1507580 cni.go:136] multinode detected (2 nodes found), recommending kindnet
	I0916 11:15:25.151795 1507580 start.go:340] cluster config:
	{Name:multinode-654612 KeepContext:false EmbedCerts:false MinikubeISO: KicBaseImage:gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 Memory:2200 CPUs:2 DiskSize:20000 Driver:docker HyperkitVpnKitSock: HyperkitVSockPorts:[] DockerEnv:[] ContainerVolumeMounts:[] InsecureRegistry:[] RegistryMirror:[] HostOnlyCIDR:192.168.59.1/24 HypervVirtualSwitch: HypervUseExternalSwitch:false HypervExternalAdapter: KVMNetwork:default KVMQemuURI:qemu:///system KVMGPU:false KVMHidden:false KVMNUMACount:1 APIServerPort:8443 DockerOpt:[] DisableDriverMounts:false NFSShare:[] NFSSharesRoot:/nfsshares UUID: NoVTXCheck:false DNSProxy:false HostDNSResolver:true HostOnlyNicType:virtio NatNicType:virtio SSHIPAddress: SSHUser:root SSHKey: SSHPort:22 KubernetesConfig:{KubernetesVersion:v1.31.1 ClusterName:multinode-654612 Namespace:default APIServerHAVIP: APIServerName:minikubeCA APIServerNames:[] APIServerIPs:[] DNSDomain:cluster.local ContainerR
untime:crio CRISocket: NetworkPlugin:cni FeatureGates: ServiceCIDR:10.96.0.0/12 ImageRepository: LoadBalancerStartIP: LoadBalancerEndIP: CustomIngressCert: RegistryAliases: ExtraOptions:[] ShouldLoadCachedImages:true EnableDefaultCNI:false CNI:} Nodes:[{Name: IP:192.168.67.2 Port:8443 KubernetesVersion:v1.31.1 ContainerRuntime:crio ControlPlane:true Worker:true} {Name:m02 IP:192.168.67.3 Port:8443 KubernetesVersion:v1.31.1 ContainerRuntime:crio ControlPlane:false Worker:true}] Addons:map[ambassador:false auto-pause:false cloud-spanner:false csi-hostpath-driver:false dashboard:false default-storageclass:false efk:false freshpod:false gcp-auth:false gvisor:false headlamp:false helm-tiller:false inaccel:false ingress:false ingress-dns:false inspektor-gadget:false istio:false istio-provisioner:false kong:false kubeflow:false kubevirt:false logviewer:false metallb:false metrics-server:false nvidia-device-plugin:false nvidia-driver-installer:false nvidia-gpu-device-plugin:false olm:false pod-security-policy:false p
ortainer:false registry:false registry-aliases:false registry-creds:false storage-provisioner:false storage-provisioner-gluster:false storage-provisioner-rancher:false volcano:false volumesnapshots:false yakd:false] CustomAddonImages:map[] CustomAddonRegistries:map[] VerifyComponents:map[apiserver:true apps_running:true default_sa:true extra:true kubelet:true node_ready:true system_pods:true] StartHostTimeout:6m0s ScheduledStop:<nil> ExposedPorts:[] ListenAddress: Network: Subnet: MultiNodeRequested:true ExtraDisks:0 CertExpiration:26280h0m0s Mount:false MountString:/home/jenkins:/minikube-host Mount9PVersion:9p2000.L MountGID:docker MountIP: MountMSize:262144 MountOptions:[] MountPort:0 MountType:9p MountUID:docker BinaryMirror: DisableOptimizations:false DisableMetrics:false CustomQemuFirmwarePath: SocketVMnetClientPath: SocketVMnetPath: StaticIP: SSHAuthSock: SSHAgentPID:0 GPUs: AutoPauseInterval:1m0s}
	I0916 11:15:25.154706 1507580 out.go:177] * Starting "multinode-654612" primary control-plane node in "multinode-654612" cluster
	I0916 11:15:25.157211 1507580 cache.go:121] Beginning downloading kic base image for docker with crio
	I0916 11:15:25.159829 1507580 out.go:177] * Pulling base image v0.0.45-1726358845-19644 ...
	I0916 11:15:25.162426 1507580 preload.go:131] Checking if preload exists for k8s version v1.31.1 and runtime crio
	I0916 11:15:25.162495 1507580 preload.go:146] Found local preload: /home/jenkins/minikube-integration/19651-1378450/.minikube/cache/preloaded-tarball/preloaded-images-k8s-v18-v1.31.1-cri-o-overlay-arm64.tar.lz4
	I0916 11:15:25.162508 1507580 cache.go:56] Caching tarball of preloaded images
	I0916 11:15:25.162599 1507580 preload.go:172] Found /home/jenkins/minikube-integration/19651-1378450/.minikube/cache/preloaded-tarball/preloaded-images-k8s-v18-v1.31.1-cri-o-overlay-arm64.tar.lz4 in cache, skipping download
	I0916 11:15:25.162616 1507580 cache.go:59] Finished verifying existence of preloaded tar for v1.31.1 on crio
	I0916 11:15:25.162770 1507580 profile.go:143] Saving config to /home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/multinode-654612/config.json ...
	I0916 11:15:25.163022 1507580 image.go:79] Checking for gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 in local docker daemon
	W0916 11:15:25.182446 1507580 image.go:95] image gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 is of wrong architecture
	I0916 11:15:25.182471 1507580 cache.go:149] Downloading gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 to local cache
	I0916 11:15:25.182569 1507580 image.go:63] Checking for gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 in local cache directory
	I0916 11:15:25.182593 1507580 image.go:66] Found gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 in local cache directory, skipping pull
	I0916 11:15:25.182598 1507580 image.go:135] gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 exists in cache, skipping pull
	I0916 11:15:25.182606 1507580 cache.go:152] successfully saved gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 as a tarball
	I0916 11:15:25.182612 1507580 cache.go:162] Loading gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 from local cache
	I0916 11:15:25.184204 1507580 image.go:273] response: 
	I0916 11:15:25.308485 1507580 cache.go:164] successfully loaded and using gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 from cached tarball
	I0916 11:15:25.308553 1507580 cache.go:194] Successfully downloaded all kic artifacts
	I0916 11:15:25.308602 1507580 start.go:360] acquireMachinesLock for multinode-654612: {Name:mkfbf36af9c510d3c0697cdadc867dcd6648c047 Clock:{} Delay:500ms Timeout:10m0s Cancel:<nil>}
	I0916 11:15:25.308713 1507580 start.go:364] duration metric: took 77.718µs to acquireMachinesLock for "multinode-654612"
	I0916 11:15:25.308744 1507580 start.go:96] Skipping create...Using existing machine configuration
	I0916 11:15:25.308752 1507580 fix.go:54] fixHost starting: 
	I0916 11:15:25.309033 1507580 cli_runner.go:164] Run: docker container inspect multinode-654612 --format={{.State.Status}}
	I0916 11:15:25.326103 1507580 fix.go:112] recreateIfNeeded on multinode-654612: state=Stopped err=<nil>
	W0916 11:15:25.326135 1507580 fix.go:138] unexpected machine state, will restart: <nil>
	I0916 11:15:25.329032 1507580 out.go:177] * Restarting existing docker container for "multinode-654612" ...
	I0916 11:15:25.331597 1507580 cli_runner.go:164] Run: docker start multinode-654612
	I0916 11:15:25.635316 1507580 cli_runner.go:164] Run: docker container inspect multinode-654612 --format={{.State.Status}}
	I0916 11:15:25.658124 1507580 kic.go:430] container "multinode-654612" state is running.
	I0916 11:15:25.658524 1507580 cli_runner.go:164] Run: docker container inspect -f "{{range .NetworkSettings.Networks}}{{.IPAddress}},{{.GlobalIPv6Address}}{{end}}" multinode-654612
	I0916 11:15:25.695191 1507580 profile.go:143] Saving config to /home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/multinode-654612/config.json ...
	I0916 11:15:25.695462 1507580 machine.go:93] provisionDockerMachine start ...
	I0916 11:15:25.695533 1507580 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" multinode-654612
	I0916 11:15:25.725671 1507580 main.go:141] libmachine: Using SSH client type: native
	I0916 11:15:25.725942 1507580 main.go:141] libmachine: &{{{<nil> 0 [] [] []} docker [0x41abe0] 0x41d420 <nil>  [] 0s} 127.0.0.1 34773 <nil> <nil>}
	I0916 11:15:25.725959 1507580 main.go:141] libmachine: About to run SSH command:
	hostname
	I0916 11:15:25.726505 1507580 main.go:141] libmachine: Error dialing TCP: ssh: handshake failed: read tcp 127.0.0.1:54454->127.0.0.1:34773: read: connection reset by peer
	I0916 11:15:28.860032 1507580 main.go:141] libmachine: SSH cmd err, output: <nil>: multinode-654612
	
	I0916 11:15:28.860063 1507580 ubuntu.go:169] provisioning hostname "multinode-654612"
	I0916 11:15:28.860140 1507580 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" multinode-654612
	I0916 11:15:28.881086 1507580 main.go:141] libmachine: Using SSH client type: native
	I0916 11:15:28.881371 1507580 main.go:141] libmachine: &{{{<nil> 0 [] [] []} docker [0x41abe0] 0x41d420 <nil>  [] 0s} 127.0.0.1 34773 <nil> <nil>}
	I0916 11:15:28.881389 1507580 main.go:141] libmachine: About to run SSH command:
	sudo hostname multinode-654612 && echo "multinode-654612" | sudo tee /etc/hostname
	I0916 11:15:29.028733 1507580 main.go:141] libmachine: SSH cmd err, output: <nil>: multinode-654612
	
	I0916 11:15:29.028817 1507580 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" multinode-654612
	I0916 11:15:29.046880 1507580 main.go:141] libmachine: Using SSH client type: native
	I0916 11:15:29.047129 1507580 main.go:141] libmachine: &{{{<nil> 0 [] [] []} docker [0x41abe0] 0x41d420 <nil>  [] 0s} 127.0.0.1 34773 <nil> <nil>}
	I0916 11:15:29.047152 1507580 main.go:141] libmachine: About to run SSH command:
	
			if ! grep -xq '.*\smultinode-654612' /etc/hosts; then
				if grep -xq '127.0.1.1\s.*' /etc/hosts; then
					sudo sed -i 's/^127.0.1.1\s.*/127.0.1.1 multinode-654612/g' /etc/hosts;
				else 
					echo '127.0.1.1 multinode-654612' | sudo tee -a /etc/hosts; 
				fi
			fi
	I0916 11:15:29.184748 1507580 main.go:141] libmachine: SSH cmd err, output: <nil>: 
	I0916 11:15:29.184777 1507580 ubuntu.go:175] set auth options {CertDir:/home/jenkins/minikube-integration/19651-1378450/.minikube CaCertPath:/home/jenkins/minikube-integration/19651-1378450/.minikube/certs/ca.pem CaPrivateKeyPath:/home/jenkins/minikube-integration/19651-1378450/.minikube/certs/ca-key.pem CaCertRemotePath:/etc/docker/ca.pem ServerCertPath:/home/jenkins/minikube-integration/19651-1378450/.minikube/machines/server.pem ServerKeyPath:/home/jenkins/minikube-integration/19651-1378450/.minikube/machines/server-key.pem ClientKeyPath:/home/jenkins/minikube-integration/19651-1378450/.minikube/certs/key.pem ServerCertRemotePath:/etc/docker/server.pem ServerKeyRemotePath:/etc/docker/server-key.pem ClientCertPath:/home/jenkins/minikube-integration/19651-1378450/.minikube/certs/cert.pem ServerCertSANs:[] StorePath:/home/jenkins/minikube-integration/19651-1378450/.minikube}
	I0916 11:15:29.184815 1507580 ubuntu.go:177] setting up certificates
	I0916 11:15:29.184832 1507580 provision.go:84] configureAuth start
	I0916 11:15:29.184904 1507580 cli_runner.go:164] Run: docker container inspect -f "{{range .NetworkSettings.Networks}}{{.IPAddress}},{{.GlobalIPv6Address}}{{end}}" multinode-654612
	I0916 11:15:29.201103 1507580 provision.go:143] copyHostCerts
	I0916 11:15:29.201149 1507580 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-1378450/.minikube/certs/ca.pem -> /home/jenkins/minikube-integration/19651-1378450/.minikube/ca.pem
	I0916 11:15:29.201183 1507580 exec_runner.go:144] found /home/jenkins/minikube-integration/19651-1378450/.minikube/ca.pem, removing ...
	I0916 11:15:29.201194 1507580 exec_runner.go:203] rm: /home/jenkins/minikube-integration/19651-1378450/.minikube/ca.pem
	I0916 11:15:29.201269 1507580 exec_runner.go:151] cp: /home/jenkins/minikube-integration/19651-1378450/.minikube/certs/ca.pem --> /home/jenkins/minikube-integration/19651-1378450/.minikube/ca.pem (1078 bytes)
	I0916 11:15:29.201369 1507580 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-1378450/.minikube/certs/cert.pem -> /home/jenkins/minikube-integration/19651-1378450/.minikube/cert.pem
	I0916 11:15:29.201391 1507580 exec_runner.go:144] found /home/jenkins/minikube-integration/19651-1378450/.minikube/cert.pem, removing ...
	I0916 11:15:29.201401 1507580 exec_runner.go:203] rm: /home/jenkins/minikube-integration/19651-1378450/.minikube/cert.pem
	I0916 11:15:29.201436 1507580 exec_runner.go:151] cp: /home/jenkins/minikube-integration/19651-1378450/.minikube/certs/cert.pem --> /home/jenkins/minikube-integration/19651-1378450/.minikube/cert.pem (1123 bytes)
	I0916 11:15:29.201492 1507580 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-1378450/.minikube/certs/key.pem -> /home/jenkins/minikube-integration/19651-1378450/.minikube/key.pem
	I0916 11:15:29.201512 1507580 exec_runner.go:144] found /home/jenkins/minikube-integration/19651-1378450/.minikube/key.pem, removing ...
	I0916 11:15:29.201519 1507580 exec_runner.go:203] rm: /home/jenkins/minikube-integration/19651-1378450/.minikube/key.pem
	I0916 11:15:29.201545 1507580 exec_runner.go:151] cp: /home/jenkins/minikube-integration/19651-1378450/.minikube/certs/key.pem --> /home/jenkins/minikube-integration/19651-1378450/.minikube/key.pem (1679 bytes)
	I0916 11:15:29.201607 1507580 provision.go:117] generating server cert: /home/jenkins/minikube-integration/19651-1378450/.minikube/machines/server.pem ca-key=/home/jenkins/minikube-integration/19651-1378450/.minikube/certs/ca.pem private-key=/home/jenkins/minikube-integration/19651-1378450/.minikube/certs/ca-key.pem org=jenkins.multinode-654612 san=[127.0.0.1 192.168.67.2 localhost minikube multinode-654612]
	I0916 11:15:29.734800 1507580 provision.go:177] copyRemoteCerts
	I0916 11:15:29.734891 1507580 ssh_runner.go:195] Run: sudo mkdir -p /etc/docker /etc/docker /etc/docker
	I0916 11:15:29.734940 1507580 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" multinode-654612
	I0916 11:15:29.751277 1507580 sshutil.go:53] new ssh client: &{IP:127.0.0.1 Port:34773 SSHKeyPath:/home/jenkins/minikube-integration/19651-1378450/.minikube/machines/multinode-654612/id_rsa Username:docker}
	I0916 11:15:29.850166 1507580 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-1378450/.minikube/certs/ca.pem -> /etc/docker/ca.pem
	I0916 11:15:29.850231 1507580 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-1378450/.minikube/certs/ca.pem --> /etc/docker/ca.pem (1078 bytes)
	I0916 11:15:29.877011 1507580 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-1378450/.minikube/machines/server.pem -> /etc/docker/server.pem
	I0916 11:15:29.877086 1507580 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-1378450/.minikube/machines/server.pem --> /etc/docker/server.pem (1216 bytes)
	I0916 11:15:29.902708 1507580 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-1378450/.minikube/machines/server-key.pem -> /etc/docker/server-key.pem
	I0916 11:15:29.902819 1507580 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-1378450/.minikube/machines/server-key.pem --> /etc/docker/server-key.pem (1675 bytes)
	I0916 11:15:29.929118 1507580 provision.go:87] duration metric: took 744.257441ms to configureAuth
	I0916 11:15:29.929148 1507580 ubuntu.go:193] setting minikube options for container-runtime
	I0916 11:15:29.929384 1507580 config.go:182] Loaded profile config "multinode-654612": Driver=docker, ContainerRuntime=crio, KubernetesVersion=v1.31.1
	I0916 11:15:29.929496 1507580 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" multinode-654612
	I0916 11:15:29.946703 1507580 main.go:141] libmachine: Using SSH client type: native
	I0916 11:15:29.946964 1507580 main.go:141] libmachine: &{{{<nil> 0 [] [] []} docker [0x41abe0] 0x41d420 <nil>  [] 0s} 127.0.0.1 34773 <nil> <nil>}
	I0916 11:15:29.946986 1507580 main.go:141] libmachine: About to run SSH command:
	sudo mkdir -p /etc/sysconfig && printf %s "
	CRIO_MINIKUBE_OPTIONS='--insecure-registry 10.96.0.0/12 '
	" | sudo tee /etc/sysconfig/crio.minikube && sudo systemctl restart crio
	I0916 11:15:30.401113 1507580 main.go:141] libmachine: SSH cmd err, output: <nil>: 
	CRIO_MINIKUBE_OPTIONS='--insecure-registry 10.96.0.0/12 '
	
	I0916 11:15:30.401139 1507580 machine.go:96] duration metric: took 4.705667234s to provisionDockerMachine
	I0916 11:15:30.401152 1507580 start.go:293] postStartSetup for "multinode-654612" (driver="docker")
	I0916 11:15:30.401163 1507580 start.go:322] creating required directories: [/etc/kubernetes/addons /etc/kubernetes/manifests /var/tmp/minikube /var/lib/minikube /var/lib/minikube/certs /var/lib/minikube/images /var/lib/minikube/binaries /tmp/gvisor /usr/share/ca-certificates /etc/ssl/certs]
	I0916 11:15:30.401230 1507580 ssh_runner.go:195] Run: sudo mkdir -p /etc/kubernetes/addons /etc/kubernetes/manifests /var/tmp/minikube /var/lib/minikube /var/lib/minikube/certs /var/lib/minikube/images /var/lib/minikube/binaries /tmp/gvisor /usr/share/ca-certificates /etc/ssl/certs
	I0916 11:15:30.401297 1507580 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" multinode-654612
	I0916 11:15:30.418835 1507580 sshutil.go:53] new ssh client: &{IP:127.0.0.1 Port:34773 SSHKeyPath:/home/jenkins/minikube-integration/19651-1378450/.minikube/machines/multinode-654612/id_rsa Username:docker}
	I0916 11:15:30.518972 1507580 ssh_runner.go:195] Run: cat /etc/os-release
	I0916 11:15:30.522557 1507580 command_runner.go:130] > PRETTY_NAME="Ubuntu 22.04.4 LTS"
	I0916 11:15:30.522577 1507580 command_runner.go:130] > NAME="Ubuntu"
	I0916 11:15:30.522584 1507580 command_runner.go:130] > VERSION_ID="22.04"
	I0916 11:15:30.522590 1507580 command_runner.go:130] > VERSION="22.04.4 LTS (Jammy Jellyfish)"
	I0916 11:15:30.522594 1507580 command_runner.go:130] > VERSION_CODENAME=jammy
	I0916 11:15:30.522618 1507580 command_runner.go:130] > ID=ubuntu
	I0916 11:15:30.522622 1507580 command_runner.go:130] > ID_LIKE=debian
	I0916 11:15:30.522626 1507580 command_runner.go:130] > HOME_URL="https://www.ubuntu.com/"
	I0916 11:15:30.522631 1507580 command_runner.go:130] > SUPPORT_URL="https://help.ubuntu.com/"
	I0916 11:15:30.522641 1507580 command_runner.go:130] > BUG_REPORT_URL="https://bugs.launchpad.net/ubuntu/"
	I0916 11:15:30.522648 1507580 command_runner.go:130] > PRIVACY_POLICY_URL="https://www.ubuntu.com/legal/terms-and-policies/privacy-policy"
	I0916 11:15:30.522652 1507580 command_runner.go:130] > UBUNTU_CODENAME=jammy
	I0916 11:15:30.522705 1507580 main.go:141] libmachine: Couldn't set key VERSION_CODENAME, no corresponding struct field found
	I0916 11:15:30.522728 1507580 main.go:141] libmachine: Couldn't set key PRIVACY_POLICY_URL, no corresponding struct field found
	I0916 11:15:30.522738 1507580 main.go:141] libmachine: Couldn't set key UBUNTU_CODENAME, no corresponding struct field found
	I0916 11:15:30.522745 1507580 info.go:137] Remote host: Ubuntu 22.04.4 LTS
	I0916 11:15:30.522755 1507580 filesync.go:126] Scanning /home/jenkins/minikube-integration/19651-1378450/.minikube/addons for local assets ...
	I0916 11:15:30.522814 1507580 filesync.go:126] Scanning /home/jenkins/minikube-integration/19651-1378450/.minikube/files for local assets ...
	I0916 11:15:30.522902 1507580 filesync.go:149] local asset: /home/jenkins/minikube-integration/19651-1378450/.minikube/files/etc/ssl/certs/13838332.pem -> 13838332.pem in /etc/ssl/certs
	I0916 11:15:30.522909 1507580 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-1378450/.minikube/files/etc/ssl/certs/13838332.pem -> /etc/ssl/certs/13838332.pem
	I0916 11:15:30.523012 1507580 ssh_runner.go:195] Run: sudo mkdir -p /etc/ssl/certs
	I0916 11:15:30.531943 1507580 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-1378450/.minikube/files/etc/ssl/certs/13838332.pem --> /etc/ssl/certs/13838332.pem (1708 bytes)
	I0916 11:15:30.557892 1507580 start.go:296] duration metric: took 156.724397ms for postStartSetup
	I0916 11:15:30.557978 1507580 ssh_runner.go:195] Run: sh -c "df -h /var | awk 'NR==2{print $5}'"
	I0916 11:15:30.558045 1507580 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" multinode-654612
	I0916 11:15:30.575056 1507580 sshutil.go:53] new ssh client: &{IP:127.0.0.1 Port:34773 SSHKeyPath:/home/jenkins/minikube-integration/19651-1378450/.minikube/machines/multinode-654612/id_rsa Username:docker}
	I0916 11:15:30.669692 1507580 command_runner.go:130] > 12%
	I0916 11:15:30.669767 1507580 ssh_runner.go:195] Run: sh -c "df -BG /var | awk 'NR==2{print $4}'"
	I0916 11:15:30.674034 1507580 command_runner.go:130] > 171G
	I0916 11:15:30.674471 1507580 fix.go:56] duration metric: took 5.365712774s for fixHost
	I0916 11:15:30.674491 1507580 start.go:83] releasing machines lock for "multinode-654612", held for 5.36576166s
	I0916 11:15:30.674567 1507580 cli_runner.go:164] Run: docker container inspect -f "{{range .NetworkSettings.Networks}}{{.IPAddress}},{{.GlobalIPv6Address}}{{end}}" multinode-654612
	I0916 11:15:30.691086 1507580 ssh_runner.go:195] Run: cat /version.json
	I0916 11:15:30.691142 1507580 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" multinode-654612
	I0916 11:15:30.691428 1507580 ssh_runner.go:195] Run: curl -sS -m 2 https://registry.k8s.io/
	I0916 11:15:30.691499 1507580 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" multinode-654612
	I0916 11:15:30.708906 1507580 sshutil.go:53] new ssh client: &{IP:127.0.0.1 Port:34773 SSHKeyPath:/home/jenkins/minikube-integration/19651-1378450/.minikube/machines/multinode-654612/id_rsa Username:docker}
	I0916 11:15:30.711177 1507580 sshutil.go:53] new ssh client: &{IP:127.0.0.1 Port:34773 SSHKeyPath:/home/jenkins/minikube-integration/19651-1378450/.minikube/machines/multinode-654612/id_rsa Username:docker}
	I0916 11:15:30.932264 1507580 command_runner.go:130] > <a href="https://github.com/kubernetes/registry.k8s.io">Temporary Redirect</a>.
	I0916 11:15:30.935456 1507580 command_runner.go:130] > {"iso_version": "v1.34.0-1726281733-19643", "kicbase_version": "v0.0.45-1726358845-19644", "minikube_version": "v1.34.0", "commit": "f890713149c79cf50e25c13e6a5c0470aa0f0450"}
	I0916 11:15:30.935648 1507580 ssh_runner.go:195] Run: systemctl --version
	I0916 11:15:30.939815 1507580 command_runner.go:130] > systemd 249 (249.11-0ubuntu3.12)
	I0916 11:15:30.940005 1507580 command_runner.go:130] > +PAM +AUDIT +SELINUX +APPARMOR +IMA +SMACK +SECCOMP +GCRYPT +GNUTLS +OPENSSL +ACL +BLKID +CURL +ELFUTILS +FIDO2 +IDN2 -IDN +IPTC +KMOD +LIBCRYPTSETUP +LIBFDISK +PCRE2 -PWQUALITY -P11KIT -QRENCODE +BZIP2 +LZ4 +XZ +ZLIB +ZSTD -XKBCOMMON +UTMP +SYSVINIT default-hierarchy=unified
	I0916 11:15:30.940128 1507580 ssh_runner.go:195] Run: sudo sh -c "podman version >/dev/null"
	I0916 11:15:31.083380 1507580 ssh_runner.go:195] Run: sh -c "stat /etc/cni/net.d/*loopback.conf*"
	I0916 11:15:31.089147 1507580 command_runner.go:130] >   File: /etc/cni/net.d/200-loopback.conf.mk_disabled
	I0916 11:15:31.089185 1507580 command_runner.go:130] >   Size: 54        	Blocks: 8          IO Block: 4096   regular file
	I0916 11:15:31.089194 1507580 command_runner.go:130] > Device: 36h/54d	Inode: 1570512     Links: 1
	I0916 11:15:31.089201 1507580 command_runner.go:130] > Access: (0644/-rw-r--r--)  Uid: (    0/    root)   Gid: (    0/    root)
	I0916 11:15:31.089207 1507580 command_runner.go:130] > Access: 2023-06-14 14:44:50.000000000 +0000
	I0916 11:15:31.089219 1507580 command_runner.go:130] > Modify: 2023-06-14 14:44:50.000000000 +0000
	I0916 11:15:31.089225 1507580 command_runner.go:130] > Change: 2024-09-16 11:09:48.923228029 +0000
	I0916 11:15:31.089230 1507580 command_runner.go:130] >  Birth: 2024-09-16 11:09:48.919228126 +0000
	I0916 11:15:31.089462 1507580 ssh_runner.go:195] Run: sudo find /etc/cni/net.d -maxdepth 1 -type f -name *loopback.conf* -not -name *.mk_disabled -exec sh -c "sudo mv {} {}.mk_disabled" ;
	I0916 11:15:31.099689 1507580 cni.go:221] loopback cni configuration disabled: "/etc/cni/net.d/*loopback.conf*" found
	I0916 11:15:31.099779 1507580 ssh_runner.go:195] Run: sudo find /etc/cni/net.d -maxdepth 1 -type f ( ( -name *bridge* -or -name *podman* ) -and -not -name *.mk_disabled ) -printf "%p, " -exec sh -c "sudo mv {} {}.mk_disabled" ;
	I0916 11:15:31.109581 1507580 cni.go:259] no active bridge cni configs found in "/etc/cni/net.d" - nothing to disable
	I0916 11:15:31.109605 1507580 start.go:495] detecting cgroup driver to use...
	I0916 11:15:31.109641 1507580 detect.go:187] detected "cgroupfs" cgroup driver on host os
	I0916 11:15:31.109694 1507580 ssh_runner.go:195] Run: sudo systemctl stop -f containerd
	I0916 11:15:31.144646 1507580 ssh_runner.go:195] Run: sudo systemctl is-active --quiet service containerd
	I0916 11:15:31.158012 1507580 docker.go:217] disabling cri-docker service (if available) ...
	I0916 11:15:31.158129 1507580 ssh_runner.go:195] Run: sudo systemctl stop -f cri-docker.socket
	I0916 11:15:31.172127 1507580 ssh_runner.go:195] Run: sudo systemctl stop -f cri-docker.service
	I0916 11:15:31.184379 1507580 ssh_runner.go:195] Run: sudo systemctl disable cri-docker.socket
	I0916 11:15:31.265576 1507580 ssh_runner.go:195] Run: sudo systemctl mask cri-docker.service
	I0916 11:15:31.347564 1507580 docker.go:233] disabling docker service ...
	I0916 11:15:31.347638 1507580 ssh_runner.go:195] Run: sudo systemctl stop -f docker.socket
	I0916 11:15:31.361290 1507580 ssh_runner.go:195] Run: sudo systemctl stop -f docker.service
	I0916 11:15:31.373994 1507580 ssh_runner.go:195] Run: sudo systemctl disable docker.socket
	I0916 11:15:31.468104 1507580 ssh_runner.go:195] Run: sudo systemctl mask docker.service
	I0916 11:15:31.550804 1507580 ssh_runner.go:195] Run: sudo systemctl is-active --quiet service docker
	I0916 11:15:31.562666 1507580 ssh_runner.go:195] Run: /bin/bash -c "sudo mkdir -p /etc && printf %s "runtime-endpoint: unix:///var/run/crio/crio.sock
	" | sudo tee /etc/crictl.yaml"
	I0916 11:15:31.579190 1507580 command_runner.go:130] > runtime-endpoint: unix:///var/run/crio/crio.sock
	I0916 11:15:31.580509 1507580 crio.go:59] configure cri-o to use "registry.k8s.io/pause:3.10" pause image...
	I0916 11:15:31.580596 1507580 ssh_runner.go:195] Run: sh -c "sudo sed -i 's|^.*pause_image = .*$|pause_image = "registry.k8s.io/pause:3.10"|' /etc/crio/crio.conf.d/02-crio.conf"
	I0916 11:15:31.591042 1507580 crio.go:70] configuring cri-o to use "cgroupfs" as cgroup driver...
	I0916 11:15:31.591175 1507580 ssh_runner.go:195] Run: sh -c "sudo sed -i 's|^.*cgroup_manager = .*$|cgroup_manager = "cgroupfs"|' /etc/crio/crio.conf.d/02-crio.conf"
	I0916 11:15:31.601211 1507580 ssh_runner.go:195] Run: sh -c "sudo sed -i '/conmon_cgroup = .*/d' /etc/crio/crio.conf.d/02-crio.conf"
	I0916 11:15:31.612804 1507580 ssh_runner.go:195] Run: sh -c "sudo sed -i '/cgroup_manager = .*/a conmon_cgroup = "pod"' /etc/crio/crio.conf.d/02-crio.conf"
	I0916 11:15:31.624183 1507580 ssh_runner.go:195] Run: sh -c "sudo rm -rf /etc/cni/net.mk"
	I0916 11:15:31.634354 1507580 ssh_runner.go:195] Run: sh -c "sudo sed -i '/^ *"net.ipv4.ip_unprivileged_port_start=.*"/d' /etc/crio/crio.conf.d/02-crio.conf"
	I0916 11:15:31.645227 1507580 ssh_runner.go:195] Run: sh -c "sudo grep -q "^ *default_sysctls" /etc/crio/crio.conf.d/02-crio.conf || sudo sed -i '/conmon_cgroup = .*/a default_sysctls = \[\n\]' /etc/crio/crio.conf.d/02-crio.conf"
	I0916 11:15:31.656386 1507580 ssh_runner.go:195] Run: sh -c "sudo sed -i -r 's|^default_sysctls *= *\[|&\n  "net.ipv4.ip_unprivileged_port_start=0",|' /etc/crio/crio.conf.d/02-crio.conf"
	I0916 11:15:31.667302 1507580 ssh_runner.go:195] Run: sudo sysctl net.bridge.bridge-nf-call-iptables
	I0916 11:15:31.675582 1507580 command_runner.go:130] > net.bridge.bridge-nf-call-iptables = 1
	I0916 11:15:31.676907 1507580 ssh_runner.go:195] Run: sudo sh -c "echo 1 > /proc/sys/net/ipv4/ip_forward"
	I0916 11:15:31.685761 1507580 ssh_runner.go:195] Run: sudo systemctl daemon-reload
	I0916 11:15:31.766717 1507580 ssh_runner.go:195] Run: sudo systemctl restart crio
	I0916 11:15:31.884213 1507580 start.go:542] Will wait 60s for socket path /var/run/crio/crio.sock
	I0916 11:15:31.884319 1507580 ssh_runner.go:195] Run: stat /var/run/crio/crio.sock
	I0916 11:15:31.888277 1507580 command_runner.go:130] >   File: /var/run/crio/crio.sock
	I0916 11:15:31.888346 1507580 command_runner.go:130] >   Size: 0         	Blocks: 0          IO Block: 4096   socket
	I0916 11:15:31.888368 1507580 command_runner.go:130] > Device: 43h/67d	Inode: 208         Links: 1
	I0916 11:15:31.888388 1507580 command_runner.go:130] > Access: (0660/srw-rw----)  Uid: (    0/    root)   Gid: (    0/    root)
	I0916 11:15:31.888400 1507580 command_runner.go:130] > Access: 2024-09-16 11:15:31.870839761 +0000
	I0916 11:15:31.888406 1507580 command_runner.go:130] > Modify: 2024-09-16 11:15:31.870839761 +0000
	I0916 11:15:31.888411 1507580 command_runner.go:130] > Change: 2024-09-16 11:15:31.870839761 +0000
	I0916 11:15:31.888415 1507580 command_runner.go:130] >  Birth: -
	I0916 11:15:31.888437 1507580 start.go:563] Will wait 60s for crictl version
	I0916 11:15:31.888493 1507580 ssh_runner.go:195] Run: which crictl
	I0916 11:15:31.891808 1507580 command_runner.go:130] > /usr/bin/crictl
	I0916 11:15:31.891952 1507580 ssh_runner.go:195] Run: sudo /usr/bin/crictl version
	I0916 11:15:31.930972 1507580 command_runner.go:130] > Version:  0.1.0
	I0916 11:15:31.931041 1507580 command_runner.go:130] > RuntimeName:  cri-o
	I0916 11:15:31.931060 1507580 command_runner.go:130] > RuntimeVersion:  1.24.6
	I0916 11:15:31.931081 1507580 command_runner.go:130] > RuntimeApiVersion:  v1
	I0916 11:15:31.933233 1507580 start.go:579] Version:  0.1.0
	RuntimeName:  cri-o
	RuntimeVersion:  1.24.6
	RuntimeApiVersion:  v1
	I0916 11:15:31.933380 1507580 ssh_runner.go:195] Run: crio --version
	I0916 11:15:31.975426 1507580 command_runner.go:130] > crio version 1.24.6
	I0916 11:15:31.975451 1507580 command_runner.go:130] > Version:          1.24.6
	I0916 11:15:31.975460 1507580 command_runner.go:130] > GitCommit:        4bfe15a9feb74ffc95e66a21c04b15fa7bbc2b90
	I0916 11:15:31.975464 1507580 command_runner.go:130] > GitTreeState:     clean
	I0916 11:15:31.975470 1507580 command_runner.go:130] > BuildDate:        2023-06-14T14:44:50Z
	I0916 11:15:31.975474 1507580 command_runner.go:130] > GoVersion:        go1.18.2
	I0916 11:15:31.975479 1507580 command_runner.go:130] > Compiler:         gc
	I0916 11:15:31.975483 1507580 command_runner.go:130] > Platform:         linux/arm64
	I0916 11:15:31.975487 1507580 command_runner.go:130] > Linkmode:         dynamic
	I0916 11:15:31.975502 1507580 command_runner.go:130] > BuildTags:        apparmor, exclude_graphdriver_devicemapper, containers_image_ostree_stub, seccomp
	I0916 11:15:31.975509 1507580 command_runner.go:130] > SeccompEnabled:   true
	I0916 11:15:31.975513 1507580 command_runner.go:130] > AppArmorEnabled:  false
	I0916 11:15:31.977482 1507580 ssh_runner.go:195] Run: crio --version
	I0916 11:15:32.015703 1507580 command_runner.go:130] > crio version 1.24.6
	I0916 11:15:32.015726 1507580 command_runner.go:130] > Version:          1.24.6
	I0916 11:15:32.015734 1507580 command_runner.go:130] > GitCommit:        4bfe15a9feb74ffc95e66a21c04b15fa7bbc2b90
	I0916 11:15:32.015739 1507580 command_runner.go:130] > GitTreeState:     clean
	I0916 11:15:32.015744 1507580 command_runner.go:130] > BuildDate:        2023-06-14T14:44:50Z
	I0916 11:15:32.015749 1507580 command_runner.go:130] > GoVersion:        go1.18.2
	I0916 11:15:32.015754 1507580 command_runner.go:130] > Compiler:         gc
	I0916 11:15:32.015758 1507580 command_runner.go:130] > Platform:         linux/arm64
	I0916 11:15:32.015763 1507580 command_runner.go:130] > Linkmode:         dynamic
	I0916 11:15:32.015771 1507580 command_runner.go:130] > BuildTags:        apparmor, exclude_graphdriver_devicemapper, containers_image_ostree_stub, seccomp
	I0916 11:15:32.015786 1507580 command_runner.go:130] > SeccompEnabled:   true
	I0916 11:15:32.015795 1507580 command_runner.go:130] > AppArmorEnabled:  false
	I0916 11:15:32.022016 1507580 out.go:177] * Preparing Kubernetes v1.31.1 on CRI-O 1.24.6 ...
	I0916 11:15:32.024306 1507580 cli_runner.go:164] Run: docker network inspect multinode-654612 --format "{"Name": "{{.Name}}","Driver": "{{.Driver}}","Subnet": "{{range .IPAM.Config}}{{.Subnet}}{{end}}","Gateway": "{{range .IPAM.Config}}{{.Gateway}}{{end}}","MTU": {{if (index .Options "com.docker.network.driver.mtu")}}{{(index .Options "com.docker.network.driver.mtu")}}{{else}}0{{end}}, "ContainerIPs": [{{range $k,$v := .Containers }}"{{$v.IPv4Address}}",{{end}}]}"
	I0916 11:15:32.040352 1507580 ssh_runner.go:195] Run: grep 192.168.67.1	host.minikube.internal$ /etc/hosts
	I0916 11:15:32.044124 1507580 ssh_runner.go:195] Run: /bin/bash -c "{ grep -v $'\thost.minikube.internal$' "/etc/hosts"; echo "192.168.67.1	host.minikube.internal"; } > /tmp/h.$$; sudo cp /tmp/h.$$ "/etc/hosts""
	I0916 11:15:32.055303 1507580 kubeadm.go:883] updating cluster {Name:multinode-654612 KeepContext:false EmbedCerts:false MinikubeISO: KicBaseImage:gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 Memory:2200 CPUs:2 DiskSize:20000 Driver:docker HyperkitVpnKitSock: HyperkitVSockPorts:[] DockerEnv:[] ContainerVolumeMounts:[] InsecureRegistry:[] RegistryMirror:[] HostOnlyCIDR:192.168.59.1/24 HypervVirtualSwitch: HypervUseExternalSwitch:false HypervExternalAdapter: KVMNetwork:default KVMQemuURI:qemu:///system KVMGPU:false KVMHidden:false KVMNUMACount:1 APIServerPort:8443 DockerOpt:[] DisableDriverMounts:false NFSShare:[] NFSSharesRoot:/nfsshares UUID: NoVTXCheck:false DNSProxy:false HostDNSResolver:true HostOnlyNicType:virtio NatNicType:virtio SSHIPAddress: SSHUser:root SSHKey: SSHPort:22 KubernetesConfig:{KubernetesVersion:v1.31.1 ClusterName:multinode-654612 Namespace:default APIServerHAVIP: APIServerName:minikubeCA APISe
rverNames:[] APIServerIPs:[] DNSDomain:cluster.local ContainerRuntime:crio CRISocket: NetworkPlugin:cni FeatureGates: ServiceCIDR:10.96.0.0/12 ImageRepository: LoadBalancerStartIP: LoadBalancerEndIP: CustomIngressCert: RegistryAliases: ExtraOptions:[] ShouldLoadCachedImages:true EnableDefaultCNI:false CNI:} Nodes:[{Name: IP:192.168.67.2 Port:8443 KubernetesVersion:v1.31.1 ContainerRuntime:crio ControlPlane:true Worker:true} {Name:m02 IP:192.168.67.3 Port:8443 KubernetesVersion:v1.31.1 ContainerRuntime:crio ControlPlane:false Worker:true}] Addons:map[ambassador:false auto-pause:false cloud-spanner:false csi-hostpath-driver:false dashboard:false default-storageclass:false efk:false freshpod:false gcp-auth:false gvisor:false headlamp:false helm-tiller:false inaccel:false ingress:false ingress-dns:false inspektor-gadget:false istio:false istio-provisioner:false kong:false kubeflow:false kubevirt:false logviewer:false metallb:false metrics-server:false nvidia-device-plugin:false nvidia-driver-installer:false nvidi
a-gpu-device-plugin:false olm:false pod-security-policy:false portainer:false registry:false registry-aliases:false registry-creds:false storage-provisioner:false storage-provisioner-gluster:false storage-provisioner-rancher:false volcano:false volumesnapshots:false yakd:false] CustomAddonImages:map[] CustomAddonRegistries:map[] VerifyComponents:map[apiserver:true apps_running:true default_sa:true extra:true kubelet:true node_ready:true system_pods:true] StartHostTimeout:6m0s ScheduledStop:<nil> ExposedPorts:[] ListenAddress: Network: Subnet: MultiNodeRequested:true ExtraDisks:0 CertExpiration:26280h0m0s Mount:false MountString:/home/jenkins:/minikube-host Mount9PVersion:9p2000.L MountGID:docker MountIP: MountMSize:262144 MountOptions:[] MountPort:0 MountType:9p MountUID:docker BinaryMirror: DisableOptimizations:false DisableMetrics:false CustomQemuFirmwarePath: SocketVMnetClientPath: SocketVMnetPath: StaticIP: SSHAuthSock: SSHAgentPID:0 GPUs: AutoPauseInterval:1m0s} ...
	I0916 11:15:32.055455 1507580 preload.go:131] Checking if preload exists for k8s version v1.31.1 and runtime crio
	I0916 11:15:32.055525 1507580 ssh_runner.go:195] Run: sudo crictl images --output json
	I0916 11:15:32.099070 1507580 command_runner.go:130] > {
	I0916 11:15:32.099093 1507580 command_runner.go:130] >   "images": [
	I0916 11:15:32.099098 1507580 command_runner.go:130] >     {
	I0916 11:15:32.099107 1507580 command_runner.go:130] >       "id": "6a23fa8fd2b78ab58e42ba273808edc936a9c53d8ac4a919f6337be094843a51",
	I0916 11:15:32.099113 1507580 command_runner.go:130] >       "repoTags": [
	I0916 11:15:32.099121 1507580 command_runner.go:130] >         "docker.io/kindest/kindnetd:v20240813-c6f155d6"
	I0916 11:15:32.099124 1507580 command_runner.go:130] >       ],
	I0916 11:15:32.099128 1507580 command_runner.go:130] >       "repoDigests": [
	I0916 11:15:32.099136 1507580 command_runner.go:130] >         "docker.io/kindest/kindnetd@sha256:4d39335073da9a0b82be8e01028f0aa75aff16caff2e2d8889d0effd579a6f64",
	I0916 11:15:32.099145 1507580 command_runner.go:130] >         "docker.io/kindest/kindnetd@sha256:e59a687ca28ae274a2fc92f1e2f5f1c739f353178a43a23aafc71adb802ed166"
	I0916 11:15:32.099151 1507580 command_runner.go:130] >       ],
	I0916 11:15:32.099158 1507580 command_runner.go:130] >       "size": "90295858",
	I0916 11:15:32.099162 1507580 command_runner.go:130] >       "uid": null,
	I0916 11:15:32.099166 1507580 command_runner.go:130] >       "username": "",
	I0916 11:15:32.099178 1507580 command_runner.go:130] >       "spec": null,
	I0916 11:15:32.099182 1507580 command_runner.go:130] >       "pinned": false
	I0916 11:15:32.099185 1507580 command_runner.go:130] >     },
	I0916 11:15:32.099189 1507580 command_runner.go:130] >     {
	I0916 11:15:32.099196 1507580 command_runner.go:130] >       "id": "89a35e2ebb6b938201966889b5e8c85b931db6432c5643966116cd1c28bf45cd",
	I0916 11:15:32.099202 1507580 command_runner.go:130] >       "repoTags": [
	I0916 11:15:32.099209 1507580 command_runner.go:130] >         "gcr.io/k8s-minikube/busybox:1.28"
	I0916 11:15:32.099212 1507580 command_runner.go:130] >       ],
	I0916 11:15:32.099217 1507580 command_runner.go:130] >       "repoDigests": [
	I0916 11:15:32.099227 1507580 command_runner.go:130] >         "gcr.io/k8s-minikube/busybox@sha256:859d41e4316c182cb559f9ae3c5ffcac8602ee1179794a1707c06cd092a008d3",
	I0916 11:15:32.099235 1507580 command_runner.go:130] >         "gcr.io/k8s-minikube/busybox@sha256:9afb80db71730dbb303fe00765cbf34bddbdc6b66e49897fc2e1861967584b12"
	I0916 11:15:32.099239 1507580 command_runner.go:130] >       ],
	I0916 11:15:32.099243 1507580 command_runner.go:130] >       "size": "1496796",
	I0916 11:15:32.099247 1507580 command_runner.go:130] >       "uid": null,
	I0916 11:15:32.099257 1507580 command_runner.go:130] >       "username": "",
	I0916 11:15:32.099265 1507580 command_runner.go:130] >       "spec": null,
	I0916 11:15:32.099268 1507580 command_runner.go:130] >       "pinned": false
	I0916 11:15:32.099271 1507580 command_runner.go:130] >     },
	I0916 11:15:32.099275 1507580 command_runner.go:130] >     {
	I0916 11:15:32.099281 1507580 command_runner.go:130] >       "id": "ba04bb24b95753201135cbc420b233c1b0b9fa2e1fd21d28319c348c33fbcde6",
	I0916 11:15:32.099293 1507580 command_runner.go:130] >       "repoTags": [
	I0916 11:15:32.099299 1507580 command_runner.go:130] >         "gcr.io/k8s-minikube/storage-provisioner:v5"
	I0916 11:15:32.099302 1507580 command_runner.go:130] >       ],
	I0916 11:15:32.099306 1507580 command_runner.go:130] >       "repoDigests": [
	I0916 11:15:32.099314 1507580 command_runner.go:130] >         "gcr.io/k8s-minikube/storage-provisioner@sha256:0ba370588274b88531ab311a5d2e645d240a853555c1e58fd1dd428fc333c9d2",
	I0916 11:15:32.099325 1507580 command_runner.go:130] >         "gcr.io/k8s-minikube/storage-provisioner@sha256:18eb69d1418e854ad5a19e399310e52808a8321e4c441c1dddad8977a0d7a944"
	I0916 11:15:32.099329 1507580 command_runner.go:130] >       ],
	I0916 11:15:32.099333 1507580 command_runner.go:130] >       "size": "29037500",
	I0916 11:15:32.099337 1507580 command_runner.go:130] >       "uid": null,
	I0916 11:15:32.099343 1507580 command_runner.go:130] >       "username": "",
	I0916 11:15:32.099348 1507580 command_runner.go:130] >       "spec": null,
	I0916 11:15:32.099356 1507580 command_runner.go:130] >       "pinned": false
	I0916 11:15:32.099359 1507580 command_runner.go:130] >     },
	I0916 11:15:32.099362 1507580 command_runner.go:130] >     {
	I0916 11:15:32.099369 1507580 command_runner.go:130] >       "id": "2f6c962e7b8311337352d9fdea917da2184d9919f4da7695bc2a6517cf392fe4",
	I0916 11:15:32.099375 1507580 command_runner.go:130] >       "repoTags": [
	I0916 11:15:32.099380 1507580 command_runner.go:130] >         "registry.k8s.io/coredns/coredns:v1.11.3"
	I0916 11:15:32.099383 1507580 command_runner.go:130] >       ],
	I0916 11:15:32.099387 1507580 command_runner.go:130] >       "repoDigests": [
	I0916 11:15:32.099395 1507580 command_runner.go:130] >         "registry.k8s.io/coredns/coredns@sha256:31440a2bef59e2f1ffb600113b557103740ff851e27b0aef5b849f6e3ab994a6",
	I0916 11:15:32.099408 1507580 command_runner.go:130] >         "registry.k8s.io/coredns/coredns@sha256:9caabbf6238b189a65d0d6e6ac138de60d6a1c419e5a341fbbb7c78382559c6e"
	I0916 11:15:32.099412 1507580 command_runner.go:130] >       ],
	I0916 11:15:32.099416 1507580 command_runner.go:130] >       "size": "61647114",
	I0916 11:15:32.099432 1507580 command_runner.go:130] >       "uid": null,
	I0916 11:15:32.099440 1507580 command_runner.go:130] >       "username": "nonroot",
	I0916 11:15:32.099445 1507580 command_runner.go:130] >       "spec": null,
	I0916 11:15:32.099448 1507580 command_runner.go:130] >       "pinned": false
	I0916 11:15:32.099452 1507580 command_runner.go:130] >     },
	I0916 11:15:32.099457 1507580 command_runner.go:130] >     {
	I0916 11:15:32.099466 1507580 command_runner.go:130] >       "id": "27e3830e1402783674d8b594038967deea9d51f0d91b34c93c8f39d2f68af7da",
	I0916 11:15:32.099471 1507580 command_runner.go:130] >       "repoTags": [
	I0916 11:15:32.099477 1507580 command_runner.go:130] >         "registry.k8s.io/etcd:3.5.15-0"
	I0916 11:15:32.099480 1507580 command_runner.go:130] >       ],
	I0916 11:15:32.099484 1507580 command_runner.go:130] >       "repoDigests": [
	I0916 11:15:32.099493 1507580 command_runner.go:130] >         "registry.k8s.io/etcd@sha256:a6dc63e6e8cfa0307d7851762fa6b629afb18f28d8aa3fab5a6e91b4af60026a",
	I0916 11:15:32.099503 1507580 command_runner.go:130] >         "registry.k8s.io/etcd@sha256:e3ee3ca2dbaf511385000dbd54123629c71b6cfaabd469e658d76a116b7f43da"
	I0916 11:15:32.099510 1507580 command_runner.go:130] >       ],
	I0916 11:15:32.099514 1507580 command_runner.go:130] >       "size": "139912446",
	I0916 11:15:32.099518 1507580 command_runner.go:130] >       "uid": {
	I0916 11:15:32.099521 1507580 command_runner.go:130] >         "value": "0"
	I0916 11:15:32.099528 1507580 command_runner.go:130] >       },
	I0916 11:15:32.099532 1507580 command_runner.go:130] >       "username": "",
	I0916 11:15:32.099543 1507580 command_runner.go:130] >       "spec": null,
	I0916 11:15:32.099548 1507580 command_runner.go:130] >       "pinned": false
	I0916 11:15:32.099551 1507580 command_runner.go:130] >     },
	I0916 11:15:32.099554 1507580 command_runner.go:130] >     {
	I0916 11:15:32.099565 1507580 command_runner.go:130] >       "id": "d3f53a98c0a9d9163c4848bcf34b2d2f5e1e3691b79f3d1dd6d0206809e02853",
	I0916 11:15:32.099570 1507580 command_runner.go:130] >       "repoTags": [
	I0916 11:15:32.099576 1507580 command_runner.go:130] >         "registry.k8s.io/kube-apiserver:v1.31.1"
	I0916 11:15:32.099579 1507580 command_runner.go:130] >       ],
	I0916 11:15:32.099583 1507580 command_runner.go:130] >       "repoDigests": [
	I0916 11:15:32.099594 1507580 command_runner.go:130] >         "registry.k8s.io/kube-apiserver@sha256:2409c23dbb5a2b7a81adbb184d3eac43ac653e9b97a7c0ee121b89bb3ef61fdb",
	I0916 11:15:32.099606 1507580 command_runner.go:130] >         "registry.k8s.io/kube-apiserver@sha256:e3a40e6c6e99ba4a4d72432b3eda702099a2926e49d4afeb6138f2d95e6371ef"
	I0916 11:15:32.099609 1507580 command_runner.go:130] >       ],
	I0916 11:15:32.099613 1507580 command_runner.go:130] >       "size": "92632544",
	I0916 11:15:32.099619 1507580 command_runner.go:130] >       "uid": {
	I0916 11:15:32.099623 1507580 command_runner.go:130] >         "value": "0"
	I0916 11:15:32.099628 1507580 command_runner.go:130] >       },
	I0916 11:15:32.099632 1507580 command_runner.go:130] >       "username": "",
	I0916 11:15:32.099638 1507580 command_runner.go:130] >       "spec": null,
	I0916 11:15:32.099641 1507580 command_runner.go:130] >       "pinned": false
	I0916 11:15:32.099644 1507580 command_runner.go:130] >     },
	I0916 11:15:32.099648 1507580 command_runner.go:130] >     {
	I0916 11:15:32.099657 1507580 command_runner.go:130] >       "id": "279f381cb37365bbbcd133c9531fba9c2beb0f38dbbe6ddfcd0b1b1643d3450e",
	I0916 11:15:32.099661 1507580 command_runner.go:130] >       "repoTags": [
	I0916 11:15:32.099667 1507580 command_runner.go:130] >         "registry.k8s.io/kube-controller-manager:v1.31.1"
	I0916 11:15:32.099672 1507580 command_runner.go:130] >       ],
	I0916 11:15:32.099676 1507580 command_runner.go:130] >       "repoDigests": [
	I0916 11:15:32.099688 1507580 command_runner.go:130] >         "registry.k8s.io/kube-controller-manager@sha256:9f9da5b27e03f89599cc40ba89150aebf3b4cff001e6db6d998674b34181e1a1",
	I0916 11:15:32.099697 1507580 command_runner.go:130] >         "registry.k8s.io/kube-controller-manager@sha256:a9a0505b7d0caca0edd18e37bacc9425b2c8824546b26f5b286e8cb144669849"
	I0916 11:15:32.099703 1507580 command_runner.go:130] >       ],
	I0916 11:15:32.099707 1507580 command_runner.go:130] >       "size": "86930758",
	I0916 11:15:32.099710 1507580 command_runner.go:130] >       "uid": {
	I0916 11:15:32.099714 1507580 command_runner.go:130] >         "value": "0"
	I0916 11:15:32.099721 1507580 command_runner.go:130] >       },
	I0916 11:15:32.099726 1507580 command_runner.go:130] >       "username": "",
	I0916 11:15:32.099730 1507580 command_runner.go:130] >       "spec": null,
	I0916 11:15:32.099734 1507580 command_runner.go:130] >       "pinned": false
	I0916 11:15:32.099740 1507580 command_runner.go:130] >     },
	I0916 11:15:32.099743 1507580 command_runner.go:130] >     {
	I0916 11:15:32.099750 1507580 command_runner.go:130] >       "id": "24a140c548c075e487e45d0ee73b1aa89f8bfb40c08a57e05975559728822b1d",
	I0916 11:15:32.099756 1507580 command_runner.go:130] >       "repoTags": [
	I0916 11:15:32.099762 1507580 command_runner.go:130] >         "registry.k8s.io/kube-proxy:v1.31.1"
	I0916 11:15:32.099768 1507580 command_runner.go:130] >       ],
	I0916 11:15:32.099772 1507580 command_runner.go:130] >       "repoDigests": [
	I0916 11:15:32.099800 1507580 command_runner.go:130] >         "registry.k8s.io/kube-proxy@sha256:4ee50b00484d7f39a90fc4cda92251177ef5ad8fdf2f2a0c768f9e634b4c6d44",
	I0916 11:15:32.099812 1507580 command_runner.go:130] >         "registry.k8s.io/kube-proxy@sha256:7b3bf9f1e260ccb1fd543570e1e9869a373f716fb050cd23a6a2771aa4e06ae9"
	I0916 11:15:32.099815 1507580 command_runner.go:130] >       ],
	I0916 11:15:32.099820 1507580 command_runner.go:130] >       "size": "95951255",
	I0916 11:15:32.099824 1507580 command_runner.go:130] >       "uid": null,
	I0916 11:15:32.099827 1507580 command_runner.go:130] >       "username": "",
	I0916 11:15:32.099831 1507580 command_runner.go:130] >       "spec": null,
	I0916 11:15:32.099835 1507580 command_runner.go:130] >       "pinned": false
	I0916 11:15:32.099841 1507580 command_runner.go:130] >     },
	I0916 11:15:32.099845 1507580 command_runner.go:130] >     {
	I0916 11:15:32.099855 1507580 command_runner.go:130] >       "id": "7f8aa378bb47dffcf430f3a601abe39137e88aee0238e23ed8530fdd18dab82d",
	I0916 11:15:32.099859 1507580 command_runner.go:130] >       "repoTags": [
	I0916 11:15:32.099865 1507580 command_runner.go:130] >         "registry.k8s.io/kube-scheduler:v1.31.1"
	I0916 11:15:32.099870 1507580 command_runner.go:130] >       ],
	I0916 11:15:32.099874 1507580 command_runner.go:130] >       "repoDigests": [
	I0916 11:15:32.099882 1507580 command_runner.go:130] >         "registry.k8s.io/kube-scheduler@sha256:65212209347a96b08a97e679b98dca46885f09cf3a53e8d13b28d2c083a5b690",
	I0916 11:15:32.099893 1507580 command_runner.go:130] >         "registry.k8s.io/kube-scheduler@sha256:969a7e96340f3a927b3d652582edec2d6d82a083871d81ef5064b7edaab430d0"
	I0916 11:15:32.099896 1507580 command_runner.go:130] >       ],
	I0916 11:15:32.099900 1507580 command_runner.go:130] >       "size": "67007814",
	I0916 11:15:32.099903 1507580 command_runner.go:130] >       "uid": {
	I0916 11:15:32.099907 1507580 command_runner.go:130] >         "value": "0"
	I0916 11:15:32.099910 1507580 command_runner.go:130] >       },
	I0916 11:15:32.099914 1507580 command_runner.go:130] >       "username": "",
	I0916 11:15:32.099920 1507580 command_runner.go:130] >       "spec": null,
	I0916 11:15:32.099925 1507580 command_runner.go:130] >       "pinned": false
	I0916 11:15:32.099932 1507580 command_runner.go:130] >     },
	I0916 11:15:32.099935 1507580 command_runner.go:130] >     {
	I0916 11:15:32.099942 1507580 command_runner.go:130] >       "id": "afb61768ce381961ca0beff95337601f29dc70ff3ed14e5e4b3e5699057e6aa8",
	I0916 11:15:32.099949 1507580 command_runner.go:130] >       "repoTags": [
	I0916 11:15:32.099954 1507580 command_runner.go:130] >         "registry.k8s.io/pause:3.10"
	I0916 11:15:32.099960 1507580 command_runner.go:130] >       ],
	I0916 11:15:32.099970 1507580 command_runner.go:130] >       "repoDigests": [
	I0916 11:15:32.099978 1507580 command_runner.go:130] >         "registry.k8s.io/pause@sha256:e50b7059b633caf3c1449b8da680d11845cda4506b513ee7a2de00725f0a34a7",
	I0916 11:15:32.099986 1507580 command_runner.go:130] >         "registry.k8s.io/pause@sha256:ee6521f290b2168b6e0935a181d4cff9be1ac3f505666ef0e3c98fae8199917a"
	I0916 11:15:32.099989 1507580 command_runner.go:130] >       ],
	I0916 11:15:32.099992 1507580 command_runner.go:130] >       "size": "519877",
	I0916 11:15:32.099997 1507580 command_runner.go:130] >       "uid": {
	I0916 11:15:32.100001 1507580 command_runner.go:130] >         "value": "65535"
	I0916 11:15:32.100004 1507580 command_runner.go:130] >       },
	I0916 11:15:32.100012 1507580 command_runner.go:130] >       "username": "",
	I0916 11:15:32.100016 1507580 command_runner.go:130] >       "spec": null,
	I0916 11:15:32.100020 1507580 command_runner.go:130] >       "pinned": false
	I0916 11:15:32.100027 1507580 command_runner.go:130] >     }
	I0916 11:15:32.100030 1507580 command_runner.go:130] >   ]
	I0916 11:15:32.100033 1507580 command_runner.go:130] > }
	I0916 11:15:32.102934 1507580 crio.go:514] all images are preloaded for cri-o runtime.
	I0916 11:15:32.102956 1507580 crio.go:433] Images already preloaded, skipping extraction
	I0916 11:15:32.103015 1507580 ssh_runner.go:195] Run: sudo crictl images --output json
	I0916 11:15:32.138413 1507580 command_runner.go:130] > {
	I0916 11:15:32.138439 1507580 command_runner.go:130] >   "images": [
	I0916 11:15:32.138445 1507580 command_runner.go:130] >     {
	I0916 11:15:32.138454 1507580 command_runner.go:130] >       "id": "6a23fa8fd2b78ab58e42ba273808edc936a9c53d8ac4a919f6337be094843a51",
	I0916 11:15:32.138459 1507580 command_runner.go:130] >       "repoTags": [
	I0916 11:15:32.138465 1507580 command_runner.go:130] >         "docker.io/kindest/kindnetd:v20240813-c6f155d6"
	I0916 11:15:32.138468 1507580 command_runner.go:130] >       ],
	I0916 11:15:32.138472 1507580 command_runner.go:130] >       "repoDigests": [
	I0916 11:15:32.138481 1507580 command_runner.go:130] >         "docker.io/kindest/kindnetd@sha256:4d39335073da9a0b82be8e01028f0aa75aff16caff2e2d8889d0effd579a6f64",
	I0916 11:15:32.138489 1507580 command_runner.go:130] >         "docker.io/kindest/kindnetd@sha256:e59a687ca28ae274a2fc92f1e2f5f1c739f353178a43a23aafc71adb802ed166"
	I0916 11:15:32.138493 1507580 command_runner.go:130] >       ],
	I0916 11:15:32.138497 1507580 command_runner.go:130] >       "size": "90295858",
	I0916 11:15:32.138501 1507580 command_runner.go:130] >       "uid": null,
	I0916 11:15:32.138505 1507580 command_runner.go:130] >       "username": "",
	I0916 11:15:32.138511 1507580 command_runner.go:130] >       "spec": null,
	I0916 11:15:32.138515 1507580 command_runner.go:130] >       "pinned": false
	I0916 11:15:32.138524 1507580 command_runner.go:130] >     },
	I0916 11:15:32.138527 1507580 command_runner.go:130] >     {
	I0916 11:15:32.138538 1507580 command_runner.go:130] >       "id": "89a35e2ebb6b938201966889b5e8c85b931db6432c5643966116cd1c28bf45cd",
	I0916 11:15:32.138550 1507580 command_runner.go:130] >       "repoTags": [
	I0916 11:15:32.138559 1507580 command_runner.go:130] >         "gcr.io/k8s-minikube/busybox:1.28"
	I0916 11:15:32.138563 1507580 command_runner.go:130] >       ],
	I0916 11:15:32.138567 1507580 command_runner.go:130] >       "repoDigests": [
	I0916 11:15:32.138575 1507580 command_runner.go:130] >         "gcr.io/k8s-minikube/busybox@sha256:859d41e4316c182cb559f9ae3c5ffcac8602ee1179794a1707c06cd092a008d3",
	I0916 11:15:32.138589 1507580 command_runner.go:130] >         "gcr.io/k8s-minikube/busybox@sha256:9afb80db71730dbb303fe00765cbf34bddbdc6b66e49897fc2e1861967584b12"
	I0916 11:15:32.138592 1507580 command_runner.go:130] >       ],
	I0916 11:15:32.138596 1507580 command_runner.go:130] >       "size": "1496796",
	I0916 11:15:32.138600 1507580 command_runner.go:130] >       "uid": null,
	I0916 11:15:32.138605 1507580 command_runner.go:130] >       "username": "",
	I0916 11:15:32.138611 1507580 command_runner.go:130] >       "spec": null,
	I0916 11:15:32.138615 1507580 command_runner.go:130] >       "pinned": false
	I0916 11:15:32.138618 1507580 command_runner.go:130] >     },
	I0916 11:15:32.138621 1507580 command_runner.go:130] >     {
	I0916 11:15:32.138627 1507580 command_runner.go:130] >       "id": "ba04bb24b95753201135cbc420b233c1b0b9fa2e1fd21d28319c348c33fbcde6",
	I0916 11:15:32.138631 1507580 command_runner.go:130] >       "repoTags": [
	I0916 11:15:32.138636 1507580 command_runner.go:130] >         "gcr.io/k8s-minikube/storage-provisioner:v5"
	I0916 11:15:32.138639 1507580 command_runner.go:130] >       ],
	I0916 11:15:32.138643 1507580 command_runner.go:130] >       "repoDigests": [
	I0916 11:15:32.138651 1507580 command_runner.go:130] >         "gcr.io/k8s-minikube/storage-provisioner@sha256:0ba370588274b88531ab311a5d2e645d240a853555c1e58fd1dd428fc333c9d2",
	I0916 11:15:32.138659 1507580 command_runner.go:130] >         "gcr.io/k8s-minikube/storage-provisioner@sha256:18eb69d1418e854ad5a19e399310e52808a8321e4c441c1dddad8977a0d7a944"
	I0916 11:15:32.138662 1507580 command_runner.go:130] >       ],
	I0916 11:15:32.138666 1507580 command_runner.go:130] >       "size": "29037500",
	I0916 11:15:32.138670 1507580 command_runner.go:130] >       "uid": null,
	I0916 11:15:32.138673 1507580 command_runner.go:130] >       "username": "",
	I0916 11:15:32.138677 1507580 command_runner.go:130] >       "spec": null,
	I0916 11:15:32.138680 1507580 command_runner.go:130] >       "pinned": false
	I0916 11:15:32.138683 1507580 command_runner.go:130] >     },
	I0916 11:15:32.138686 1507580 command_runner.go:130] >     {
	I0916 11:15:32.138692 1507580 command_runner.go:130] >       "id": "2f6c962e7b8311337352d9fdea917da2184d9919f4da7695bc2a6517cf392fe4",
	I0916 11:15:32.138696 1507580 command_runner.go:130] >       "repoTags": [
	I0916 11:15:32.138702 1507580 command_runner.go:130] >         "registry.k8s.io/coredns/coredns:v1.11.3"
	I0916 11:15:32.138707 1507580 command_runner.go:130] >       ],
	I0916 11:15:32.138711 1507580 command_runner.go:130] >       "repoDigests": [
	I0916 11:15:32.138719 1507580 command_runner.go:130] >         "registry.k8s.io/coredns/coredns@sha256:31440a2bef59e2f1ffb600113b557103740ff851e27b0aef5b849f6e3ab994a6",
	I0916 11:15:32.138733 1507580 command_runner.go:130] >         "registry.k8s.io/coredns/coredns@sha256:9caabbf6238b189a65d0d6e6ac138de60d6a1c419e5a341fbbb7c78382559c6e"
	I0916 11:15:32.138737 1507580 command_runner.go:130] >       ],
	I0916 11:15:32.138741 1507580 command_runner.go:130] >       "size": "61647114",
	I0916 11:15:32.138745 1507580 command_runner.go:130] >       "uid": null,
	I0916 11:15:32.138752 1507580 command_runner.go:130] >       "username": "nonroot",
	I0916 11:15:32.138756 1507580 command_runner.go:130] >       "spec": null,
	I0916 11:15:32.138762 1507580 command_runner.go:130] >       "pinned": false
	I0916 11:15:32.138765 1507580 command_runner.go:130] >     },
	I0916 11:15:32.138768 1507580 command_runner.go:130] >     {
	I0916 11:15:32.138775 1507580 command_runner.go:130] >       "id": "27e3830e1402783674d8b594038967deea9d51f0d91b34c93c8f39d2f68af7da",
	I0916 11:15:32.138783 1507580 command_runner.go:130] >       "repoTags": [
	I0916 11:15:32.138787 1507580 command_runner.go:130] >         "registry.k8s.io/etcd:3.5.15-0"
	I0916 11:15:32.138790 1507580 command_runner.go:130] >       ],
	I0916 11:15:32.138794 1507580 command_runner.go:130] >       "repoDigests": [
	I0916 11:15:32.138801 1507580 command_runner.go:130] >         "registry.k8s.io/etcd@sha256:a6dc63e6e8cfa0307d7851762fa6b629afb18f28d8aa3fab5a6e91b4af60026a",
	I0916 11:15:32.138811 1507580 command_runner.go:130] >         "registry.k8s.io/etcd@sha256:e3ee3ca2dbaf511385000dbd54123629c71b6cfaabd469e658d76a116b7f43da"
	I0916 11:15:32.138817 1507580 command_runner.go:130] >       ],
	I0916 11:15:32.138821 1507580 command_runner.go:130] >       "size": "139912446",
	I0916 11:15:32.138825 1507580 command_runner.go:130] >       "uid": {
	I0916 11:15:32.138829 1507580 command_runner.go:130] >         "value": "0"
	I0916 11:15:32.138833 1507580 command_runner.go:130] >       },
	I0916 11:15:32.138838 1507580 command_runner.go:130] >       "username": "",
	I0916 11:15:32.138842 1507580 command_runner.go:130] >       "spec": null,
	I0916 11:15:32.138853 1507580 command_runner.go:130] >       "pinned": false
	I0916 11:15:32.138856 1507580 command_runner.go:130] >     },
	I0916 11:15:32.138859 1507580 command_runner.go:130] >     {
	I0916 11:15:32.138865 1507580 command_runner.go:130] >       "id": "d3f53a98c0a9d9163c4848bcf34b2d2f5e1e3691b79f3d1dd6d0206809e02853",
	I0916 11:15:32.138869 1507580 command_runner.go:130] >       "repoTags": [
	I0916 11:15:32.138875 1507580 command_runner.go:130] >         "registry.k8s.io/kube-apiserver:v1.31.1"
	I0916 11:15:32.138880 1507580 command_runner.go:130] >       ],
	I0916 11:15:32.138884 1507580 command_runner.go:130] >       "repoDigests": [
	I0916 11:15:32.138892 1507580 command_runner.go:130] >         "registry.k8s.io/kube-apiserver@sha256:2409c23dbb5a2b7a81adbb184d3eac43ac653e9b97a7c0ee121b89bb3ef61fdb",
	I0916 11:15:32.138902 1507580 command_runner.go:130] >         "registry.k8s.io/kube-apiserver@sha256:e3a40e6c6e99ba4a4d72432b3eda702099a2926e49d4afeb6138f2d95e6371ef"
	I0916 11:15:32.138914 1507580 command_runner.go:130] >       ],
	I0916 11:15:32.138918 1507580 command_runner.go:130] >       "size": "92632544",
	I0916 11:15:32.138922 1507580 command_runner.go:130] >       "uid": {
	I0916 11:15:32.138934 1507580 command_runner.go:130] >         "value": "0"
	I0916 11:15:32.138941 1507580 command_runner.go:130] >       },
	I0916 11:15:32.138945 1507580 command_runner.go:130] >       "username": "",
	I0916 11:15:32.138948 1507580 command_runner.go:130] >       "spec": null,
	I0916 11:15:32.138952 1507580 command_runner.go:130] >       "pinned": false
	I0916 11:15:32.138955 1507580 command_runner.go:130] >     },
	I0916 11:15:32.138958 1507580 command_runner.go:130] >     {
	I0916 11:15:32.138968 1507580 command_runner.go:130] >       "id": "279f381cb37365bbbcd133c9531fba9c2beb0f38dbbe6ddfcd0b1b1643d3450e",
	I0916 11:15:32.138972 1507580 command_runner.go:130] >       "repoTags": [
	I0916 11:15:32.138978 1507580 command_runner.go:130] >         "registry.k8s.io/kube-controller-manager:v1.31.1"
	I0916 11:15:32.138984 1507580 command_runner.go:130] >       ],
	I0916 11:15:32.138987 1507580 command_runner.go:130] >       "repoDigests": [
	I0916 11:15:32.138996 1507580 command_runner.go:130] >         "registry.k8s.io/kube-controller-manager@sha256:9f9da5b27e03f89599cc40ba89150aebf3b4cff001e6db6d998674b34181e1a1",
	I0916 11:15:32.139007 1507580 command_runner.go:130] >         "registry.k8s.io/kube-controller-manager@sha256:a9a0505b7d0caca0edd18e37bacc9425b2c8824546b26f5b286e8cb144669849"
	I0916 11:15:32.139010 1507580 command_runner.go:130] >       ],
	I0916 11:15:32.139014 1507580 command_runner.go:130] >       "size": "86930758",
	I0916 11:15:32.139020 1507580 command_runner.go:130] >       "uid": {
	I0916 11:15:32.139024 1507580 command_runner.go:130] >         "value": "0"
	I0916 11:15:32.139032 1507580 command_runner.go:130] >       },
	I0916 11:15:32.139036 1507580 command_runner.go:130] >       "username": "",
	I0916 11:15:32.139042 1507580 command_runner.go:130] >       "spec": null,
	I0916 11:15:32.139047 1507580 command_runner.go:130] >       "pinned": false
	I0916 11:15:32.139052 1507580 command_runner.go:130] >     },
	I0916 11:15:32.139055 1507580 command_runner.go:130] >     {
	I0916 11:15:32.139062 1507580 command_runner.go:130] >       "id": "24a140c548c075e487e45d0ee73b1aa89f8bfb40c08a57e05975559728822b1d",
	I0916 11:15:32.139068 1507580 command_runner.go:130] >       "repoTags": [
	I0916 11:15:32.139073 1507580 command_runner.go:130] >         "registry.k8s.io/kube-proxy:v1.31.1"
	I0916 11:15:32.139078 1507580 command_runner.go:130] >       ],
	I0916 11:15:32.139082 1507580 command_runner.go:130] >       "repoDigests": [
	I0916 11:15:32.139097 1507580 command_runner.go:130] >         "registry.k8s.io/kube-proxy@sha256:4ee50b00484d7f39a90fc4cda92251177ef5ad8fdf2f2a0c768f9e634b4c6d44",
	I0916 11:15:32.139108 1507580 command_runner.go:130] >         "registry.k8s.io/kube-proxy@sha256:7b3bf9f1e260ccb1fd543570e1e9869a373f716fb050cd23a6a2771aa4e06ae9"
	I0916 11:15:32.139111 1507580 command_runner.go:130] >       ],
	I0916 11:15:32.139118 1507580 command_runner.go:130] >       "size": "95951255",
	I0916 11:15:32.139123 1507580 command_runner.go:130] >       "uid": null,
	I0916 11:15:32.139130 1507580 command_runner.go:130] >       "username": "",
	I0916 11:15:32.139134 1507580 command_runner.go:130] >       "spec": null,
	I0916 11:15:32.139137 1507580 command_runner.go:130] >       "pinned": false
	I0916 11:15:32.139143 1507580 command_runner.go:130] >     },
	I0916 11:15:32.139147 1507580 command_runner.go:130] >     {
	I0916 11:15:32.139155 1507580 command_runner.go:130] >       "id": "7f8aa378bb47dffcf430f3a601abe39137e88aee0238e23ed8530fdd18dab82d",
	I0916 11:15:32.139159 1507580 command_runner.go:130] >       "repoTags": [
	I0916 11:15:32.139164 1507580 command_runner.go:130] >         "registry.k8s.io/kube-scheduler:v1.31.1"
	I0916 11:15:32.139176 1507580 command_runner.go:130] >       ],
	I0916 11:15:32.139179 1507580 command_runner.go:130] >       "repoDigests": [
	I0916 11:15:32.139187 1507580 command_runner.go:130] >         "registry.k8s.io/kube-scheduler@sha256:65212209347a96b08a97e679b98dca46885f09cf3a53e8d13b28d2c083a5b690",
	I0916 11:15:32.139198 1507580 command_runner.go:130] >         "registry.k8s.io/kube-scheduler@sha256:969a7e96340f3a927b3d652582edec2d6d82a083871d81ef5064b7edaab430d0"
	I0916 11:15:32.139201 1507580 command_runner.go:130] >       ],
	I0916 11:15:32.139205 1507580 command_runner.go:130] >       "size": "67007814",
	I0916 11:15:32.139209 1507580 command_runner.go:130] >       "uid": {
	I0916 11:15:32.139213 1507580 command_runner.go:130] >         "value": "0"
	I0916 11:15:32.139219 1507580 command_runner.go:130] >       },
	I0916 11:15:32.139223 1507580 command_runner.go:130] >       "username": "",
	I0916 11:15:32.139227 1507580 command_runner.go:130] >       "spec": null,
	I0916 11:15:32.139231 1507580 command_runner.go:130] >       "pinned": false
	I0916 11:15:32.139236 1507580 command_runner.go:130] >     },
	I0916 11:15:32.139240 1507580 command_runner.go:130] >     {
	I0916 11:15:32.139249 1507580 command_runner.go:130] >       "id": "afb61768ce381961ca0beff95337601f29dc70ff3ed14e5e4b3e5699057e6aa8",
	I0916 11:15:32.139252 1507580 command_runner.go:130] >       "repoTags": [
	I0916 11:15:32.139257 1507580 command_runner.go:130] >         "registry.k8s.io/pause:3.10"
	I0916 11:15:32.139262 1507580 command_runner.go:130] >       ],
	I0916 11:15:32.139266 1507580 command_runner.go:130] >       "repoDigests": [
	I0916 11:15:32.139281 1507580 command_runner.go:130] >         "registry.k8s.io/pause@sha256:e50b7059b633caf3c1449b8da680d11845cda4506b513ee7a2de00725f0a34a7",
	I0916 11:15:32.139289 1507580 command_runner.go:130] >         "registry.k8s.io/pause@sha256:ee6521f290b2168b6e0935a181d4cff9be1ac3f505666ef0e3c98fae8199917a"
	I0916 11:15:32.139295 1507580 command_runner.go:130] >       ],
	I0916 11:15:32.139299 1507580 command_runner.go:130] >       "size": "519877",
	I0916 11:15:32.139302 1507580 command_runner.go:130] >       "uid": {
	I0916 11:15:32.139310 1507580 command_runner.go:130] >         "value": "65535"
	I0916 11:15:32.139316 1507580 command_runner.go:130] >       },
	I0916 11:15:32.139319 1507580 command_runner.go:130] >       "username": "",
	I0916 11:15:32.139323 1507580 command_runner.go:130] >       "spec": null,
	I0916 11:15:32.139329 1507580 command_runner.go:130] >       "pinned": false
	I0916 11:15:32.139332 1507580 command_runner.go:130] >     }
	I0916 11:15:32.139335 1507580 command_runner.go:130] >   ]
	I0916 11:15:32.139338 1507580 command_runner.go:130] > }
	I0916 11:15:32.142306 1507580 crio.go:514] all images are preloaded for cri-o runtime.
	I0916 11:15:32.142327 1507580 cache_images.go:84] Images are preloaded, skipping loading
	I0916 11:15:32.142335 1507580 kubeadm.go:934] updating node { 192.168.67.2 8443 v1.31.1 crio true true} ...
	I0916 11:15:32.142440 1507580 kubeadm.go:946] kubelet [Unit]
	Wants=crio.service
	
	[Service]
	ExecStart=
	ExecStart=/var/lib/minikube/binaries/v1.31.1/kubelet --bootstrap-kubeconfig=/etc/kubernetes/bootstrap-kubelet.conf --cgroups-per-qos=false --config=/var/lib/kubelet/config.yaml --enforce-node-allocatable= --hostname-override=multinode-654612 --kubeconfig=/etc/kubernetes/kubelet.conf --node-ip=192.168.67.2
	
	[Install]
	 config:
	{KubernetesVersion:v1.31.1 ClusterName:multinode-654612 Namespace:default APIServerHAVIP: APIServerName:minikubeCA APIServerNames:[] APIServerIPs:[] DNSDomain:cluster.local ContainerRuntime:crio CRISocket: NetworkPlugin:cni FeatureGates: ServiceCIDR:10.96.0.0/12 ImageRepository: LoadBalancerStartIP: LoadBalancerEndIP: CustomIngressCert: RegistryAliases: ExtraOptions:[] ShouldLoadCachedImages:true EnableDefaultCNI:false CNI:}
	I0916 11:15:32.142525 1507580 ssh_runner.go:195] Run: crio config
	I0916 11:15:32.186943 1507580 command_runner.go:130] > # The CRI-O configuration file specifies all of the available configuration
	I0916 11:15:32.186973 1507580 command_runner.go:130] > # options and command-line flags for the crio(8) OCI Kubernetes Container Runtime
	I0916 11:15:32.186982 1507580 command_runner.go:130] > # daemon, but in a TOML format that can be more easily modified and versioned.
	I0916 11:15:32.186985 1507580 command_runner.go:130] > #
	I0916 11:15:32.186993 1507580 command_runner.go:130] > # Please refer to crio.conf(5) for details of all configuration options.
	I0916 11:15:32.186999 1507580 command_runner.go:130] > # CRI-O supports partial configuration reload during runtime, which can be
	I0916 11:15:32.187009 1507580 command_runner.go:130] > # done by sending SIGHUP to the running process. Currently supported options
	I0916 11:15:32.187020 1507580 command_runner.go:130] > # are explicitly mentioned with: 'This option supports live configuration
	I0916 11:15:32.187026 1507580 command_runner.go:130] > # reload'.
	I0916 11:15:32.187033 1507580 command_runner.go:130] > # CRI-O reads its storage defaults from the containers-storage.conf(5) file
	I0916 11:15:32.187046 1507580 command_runner.go:130] > # located at /etc/containers/storage.conf. Modify this storage configuration if
	I0916 11:15:32.187053 1507580 command_runner.go:130] > # you want to change the system's defaults. If you want to modify storage just
	I0916 11:15:32.187059 1507580 command_runner.go:130] > # for CRI-O, you can change the storage configuration options here.
	I0916 11:15:32.187065 1507580 command_runner.go:130] > [crio]
	I0916 11:15:32.187071 1507580 command_runner.go:130] > # Path to the "root directory". CRI-O stores all of its data, including
	I0916 11:15:32.187076 1507580 command_runner.go:130] > # containers images, in this directory.
	I0916 11:15:32.187085 1507580 command_runner.go:130] > # root = "/home/docker/.local/share/containers/storage"
	I0916 11:15:32.187091 1507580 command_runner.go:130] > # Path to the "run directory". CRI-O stores all of its state in this directory.
	I0916 11:15:32.187102 1507580 command_runner.go:130] > # runroot = "/tmp/containers-user-1000/containers"
	I0916 11:15:32.187109 1507580 command_runner.go:130] > # Storage driver used to manage the storage of images and containers. Please
	I0916 11:15:32.187123 1507580 command_runner.go:130] > # refer to containers-storage.conf(5) to see all available storage drivers.
	I0916 11:15:32.187131 1507580 command_runner.go:130] > # storage_driver = "vfs"
	I0916 11:15:32.187137 1507580 command_runner.go:130] > # List to pass options to the storage driver. Please refer to
	I0916 11:15:32.187143 1507580 command_runner.go:130] > # containers-storage.conf(5) to see all available storage options.
	I0916 11:15:32.187149 1507580 command_runner.go:130] > # storage_option = [
	I0916 11:15:32.187152 1507580 command_runner.go:130] > # ]
	I0916 11:15:32.187159 1507580 command_runner.go:130] > # The default log directory where all logs will go unless directly specified by
	I0916 11:15:32.187167 1507580 command_runner.go:130] > # the kubelet. The log directory specified must be an absolute directory.
	I0916 11:15:32.187171 1507580 command_runner.go:130] > # log_dir = "/var/log/crio/pods"
	I0916 11:15:32.187177 1507580 command_runner.go:130] > # Location for CRI-O to lay down the temporary version file.
	I0916 11:15:32.187187 1507580 command_runner.go:130] > # It is used to check if crio wipe should wipe containers, which should
	I0916 11:15:32.187191 1507580 command_runner.go:130] > # always happen on a node reboot
	I0916 11:15:32.187196 1507580 command_runner.go:130] > # version_file = "/var/run/crio/version"
	I0916 11:15:32.187204 1507580 command_runner.go:130] > # Location for CRI-O to lay down the persistent version file.
	I0916 11:15:32.187210 1507580 command_runner.go:130] > # It is used to check if crio wipe should wipe images, which should
	I0916 11:15:32.187220 1507580 command_runner.go:130] > # only happen when CRI-O has been upgraded
	I0916 11:15:32.187228 1507580 command_runner.go:130] > # version_file_persist = "/var/lib/crio/version"
	I0916 11:15:32.187236 1507580 command_runner.go:130] > # InternalWipe is whether CRI-O should wipe containers and images after a reboot when the server starts.
	I0916 11:15:32.187246 1507580 command_runner.go:130] > # If set to false, one must use the external command 'crio wipe' to wipe the containers and images in these situations.
	I0916 11:15:32.187252 1507580 command_runner.go:130] > # internal_wipe = true
	I0916 11:15:32.187259 1507580 command_runner.go:130] > # Location for CRI-O to lay down the clean shutdown file.
	I0916 11:15:32.187265 1507580 command_runner.go:130] > # It is used to check whether crio had time to sync before shutting down.
	I0916 11:15:32.187273 1507580 command_runner.go:130] > # If not found, crio wipe will clear the storage directory.
	I0916 11:15:32.187279 1507580 command_runner.go:130] > # clean_shutdown_file = "/var/lib/crio/clean.shutdown"
	I0916 11:15:32.187285 1507580 command_runner.go:130] > # The crio.api table contains settings for the kubelet/gRPC interface.
	I0916 11:15:32.187288 1507580 command_runner.go:130] > [crio.api]
	I0916 11:15:32.187294 1507580 command_runner.go:130] > # Path to AF_LOCAL socket on which CRI-O will listen.
	I0916 11:15:32.187301 1507580 command_runner.go:130] > # listen = "/var/run/crio/crio.sock"
	I0916 11:15:32.187306 1507580 command_runner.go:130] > # IP address on which the stream server will listen.
	I0916 11:15:32.187313 1507580 command_runner.go:130] > # stream_address = "127.0.0.1"
	I0916 11:15:32.187320 1507580 command_runner.go:130] > # The port on which the stream server will listen. If the port is set to "0", then
	I0916 11:15:32.187325 1507580 command_runner.go:130] > # CRI-O will allocate a random free port number.
	I0916 11:15:32.187335 1507580 command_runner.go:130] > # stream_port = "0"
	I0916 11:15:32.187340 1507580 command_runner.go:130] > # Enable encrypted TLS transport of the stream server.
	I0916 11:15:32.187345 1507580 command_runner.go:130] > # stream_enable_tls = false
	I0916 11:15:32.187354 1507580 command_runner.go:130] > # Length of time until open streams terminate due to lack of activity
	I0916 11:15:32.187359 1507580 command_runner.go:130] > # stream_idle_timeout = ""
	I0916 11:15:32.187365 1507580 command_runner.go:130] > # Path to the x509 certificate file used to serve the encrypted stream. This
	I0916 11:15:32.187377 1507580 command_runner.go:130] > # file can change, and CRI-O will automatically pick up the changes within 5
	I0916 11:15:32.187381 1507580 command_runner.go:130] > # minutes.
	I0916 11:15:32.187385 1507580 command_runner.go:130] > # stream_tls_cert = ""
	I0916 11:15:32.187393 1507580 command_runner.go:130] > # Path to the key file used to serve the encrypted stream. This file can
	I0916 11:15:32.187399 1507580 command_runner.go:130] > # change and CRI-O will automatically pick up the changes within 5 minutes.
	I0916 11:15:32.187405 1507580 command_runner.go:130] > # stream_tls_key = ""
	I0916 11:15:32.187412 1507580 command_runner.go:130] > # Path to the x509 CA(s) file used to verify and authenticate client
	I0916 11:15:32.187418 1507580 command_runner.go:130] > # communication with the encrypted stream. This file can change and CRI-O will
	I0916 11:15:32.187427 1507580 command_runner.go:130] > # automatically pick up the changes within 5 minutes.
	I0916 11:15:32.187431 1507580 command_runner.go:130] > # stream_tls_ca = ""
	I0916 11:15:32.187439 1507580 command_runner.go:130] > # Maximum grpc send message size in bytes. If not set or <=0, then CRI-O will default to 16 * 1024 * 1024.
	I0916 11:15:32.187446 1507580 command_runner.go:130] > # grpc_max_send_msg_size = 83886080
	I0916 11:15:32.187453 1507580 command_runner.go:130] > # Maximum grpc receive message size. If not set or <= 0, then CRI-O will default to 16 * 1024 * 1024.
	I0916 11:15:32.187460 1507580 command_runner.go:130] > # grpc_max_recv_msg_size = 83886080
	I0916 11:15:32.187472 1507580 command_runner.go:130] > # The crio.runtime table contains settings pertaining to the OCI runtime used
	I0916 11:15:32.187481 1507580 command_runner.go:130] > # and options for how to set up and manage the OCI runtime.
	I0916 11:15:32.187485 1507580 command_runner.go:130] > [crio.runtime]
	I0916 11:15:32.187491 1507580 command_runner.go:130] > # A list of ulimits to be set in containers by default, specified as
	I0916 11:15:32.187499 1507580 command_runner.go:130] > # "<ulimit name>=<soft limit>:<hard limit>", for example:
	I0916 11:15:32.187503 1507580 command_runner.go:130] > # "nofile=1024:2048"
	I0916 11:15:32.187509 1507580 command_runner.go:130] > # If nothing is set here, settings will be inherited from the CRI-O daemon
	I0916 11:15:32.187516 1507580 command_runner.go:130] > # default_ulimits = [
	I0916 11:15:32.187519 1507580 command_runner.go:130] > # ]
	I0916 11:15:32.187525 1507580 command_runner.go:130] > # If true, the runtime will not use pivot_root, but instead use MS_MOVE.
	I0916 11:15:32.187531 1507580 command_runner.go:130] > # no_pivot = false
	I0916 11:15:32.187537 1507580 command_runner.go:130] > # decryption_keys_path is the path where the keys required for
	I0916 11:15:32.187543 1507580 command_runner.go:130] > # image decryption are stored. This option supports live configuration reload.
	I0916 11:15:32.187550 1507580 command_runner.go:130] > # decryption_keys_path = "/etc/crio/keys/"
	I0916 11:15:32.187556 1507580 command_runner.go:130] > # Path to the conmon binary, used for monitoring the OCI runtime.
	I0916 11:15:32.187561 1507580 command_runner.go:130] > # Will be searched for using $PATH if empty.
	I0916 11:15:32.187572 1507580 command_runner.go:130] > # This option is currently deprecated, and will be replaced with RuntimeHandler.MonitorEnv.
	I0916 11:15:32.187576 1507580 command_runner.go:130] > # conmon = ""
	I0916 11:15:32.187582 1507580 command_runner.go:130] > # Cgroup setting for conmon
	I0916 11:15:32.187590 1507580 command_runner.go:130] > # This option is currently deprecated, and will be replaced with RuntimeHandler.MonitorCgroup.
	I0916 11:15:32.187597 1507580 command_runner.go:130] > conmon_cgroup = "pod"
	I0916 11:15:32.187603 1507580 command_runner.go:130] > # Environment variable list for the conmon process, used for passing necessary
	I0916 11:15:32.187609 1507580 command_runner.go:130] > # environment variables to conmon or the runtime.
	I0916 11:15:32.187618 1507580 command_runner.go:130] > # This option is currently deprecated, and will be replaced with RuntimeHandler.MonitorEnv.
	I0916 11:15:32.187623 1507580 command_runner.go:130] > # conmon_env = [
	I0916 11:15:32.187628 1507580 command_runner.go:130] > # ]
	I0916 11:15:32.187633 1507580 command_runner.go:130] > # Additional environment variables to set for all the
	I0916 11:15:32.187641 1507580 command_runner.go:130] > # containers. These are overridden if set in the
	I0916 11:15:32.187647 1507580 command_runner.go:130] > # container image spec or in the container runtime configuration.
	I0916 11:15:32.187655 1507580 command_runner.go:130] > # default_env = [
	I0916 11:15:32.187658 1507580 command_runner.go:130] > # ]
	I0916 11:15:32.187664 1507580 command_runner.go:130] > # If true, SELinux will be used for pod separation on the host.
	I0916 11:15:32.187669 1507580 command_runner.go:130] > # selinux = false
	I0916 11:15:32.187676 1507580 command_runner.go:130] > # Path to the seccomp.json profile which is used as the default seccomp profile
	I0916 11:15:32.187687 1507580 command_runner.go:130] > # for the runtime. If not specified, then the internal default seccomp profile
	I0916 11:15:32.187693 1507580 command_runner.go:130] > # will be used. This option supports live configuration reload.
	I0916 11:15:32.187699 1507580 command_runner.go:130] > # seccomp_profile = ""
	I0916 11:15:32.187705 1507580 command_runner.go:130] > # Changes the meaning of an empty seccomp profile. By default
	I0916 11:15:32.187711 1507580 command_runner.go:130] > # (and according to CRI spec), an empty profile means unconfined.
	I0916 11:15:32.187720 1507580 command_runner.go:130] > # This option tells CRI-O to treat an empty profile as the default profile,
	I0916 11:15:32.187724 1507580 command_runner.go:130] > # which might increase security.
	I0916 11:15:32.187729 1507580 command_runner.go:130] > # seccomp_use_default_when_empty = true
	I0916 11:15:32.187737 1507580 command_runner.go:130] > # Used to change the name of the default AppArmor profile of CRI-O. The default
	I0916 11:15:32.187744 1507580 command_runner.go:130] > # profile name is "crio-default". This profile only takes effect if the user
	I0916 11:15:32.187753 1507580 command_runner.go:130] > # does not specify a profile via the Kubernetes Pod's metadata annotation. If
	I0916 11:15:32.187759 1507580 command_runner.go:130] > # the profile is set to "unconfined", then this equals to disabling AppArmor.
	I0916 11:15:32.187764 1507580 command_runner.go:130] > # This option supports live configuration reload.
	I0916 11:15:32.187772 1507580 command_runner.go:130] > # apparmor_profile = "crio-default"
	I0916 11:15:32.187783 1507580 command_runner.go:130] > # Path to the blockio class configuration file for configuring
	I0916 11:15:32.187788 1507580 command_runner.go:130] > # the cgroup blockio controller.
	I0916 11:15:32.187792 1507580 command_runner.go:130] > # blockio_config_file = ""
	I0916 11:15:32.187801 1507580 command_runner.go:130] > # Used to change irqbalance service config file path which is used for configuring
	I0916 11:15:32.187807 1507580 command_runner.go:130] > # irqbalance daemon.
	I0916 11:15:32.187812 1507580 command_runner.go:130] > # irqbalance_config_file = "/etc/sysconfig/irqbalance"
	I0916 11:15:32.187822 1507580 command_runner.go:130] > # Path to the RDT configuration file for configuring the resctrl pseudo-filesystem.
	I0916 11:15:32.187827 1507580 command_runner.go:130] > # This option supports live configuration reload.
	I0916 11:15:32.187836 1507580 command_runner.go:130] > # rdt_config_file = ""
	I0916 11:15:32.187841 1507580 command_runner.go:130] > # Cgroup management implementation used for the runtime.
	I0916 11:15:32.187845 1507580 command_runner.go:130] > cgroup_manager = "cgroupfs"
	I0916 11:15:32.187851 1507580 command_runner.go:130] > # Specify whether the image pull must be performed in a separate cgroup.
	I0916 11:15:32.187861 1507580 command_runner.go:130] > # separate_pull_cgroup = ""
	I0916 11:15:32.187868 1507580 command_runner.go:130] > # List of default capabilities for containers. If it is empty or commented out,
	I0916 11:15:32.187876 1507580 command_runner.go:130] > # only the capabilities defined in the containers json file by the user/kube
	I0916 11:15:32.187883 1507580 command_runner.go:130] > # will be added.
	I0916 11:15:32.187887 1507580 command_runner.go:130] > # default_capabilities = [
	I0916 11:15:32.187890 1507580 command_runner.go:130] > # 	"CHOWN",
	I0916 11:15:32.187894 1507580 command_runner.go:130] > # 	"DAC_OVERRIDE",
	I0916 11:15:32.187900 1507580 command_runner.go:130] > # 	"FSETID",
	I0916 11:15:32.187904 1507580 command_runner.go:130] > # 	"FOWNER",
	I0916 11:15:32.187909 1507580 command_runner.go:130] > # 	"SETGID",
	I0916 11:15:32.187913 1507580 command_runner.go:130] > # 	"SETUID",
	I0916 11:15:32.187918 1507580 command_runner.go:130] > # 	"SETPCAP",
	I0916 11:15:32.187922 1507580 command_runner.go:130] > # 	"NET_BIND_SERVICE",
	I0916 11:15:32.187925 1507580 command_runner.go:130] > # 	"KILL",
	I0916 11:15:32.187928 1507580 command_runner.go:130] > # ]
	I0916 11:15:32.187939 1507580 command_runner.go:130] > # Add capabilities to the inheritable set, as well as the default group of permitted, bounding and effective.
	I0916 11:15:32.187948 1507580 command_runner.go:130] > # If capabilities are expected to work for non-root users, this option should be set.
	I0916 11:15:32.187955 1507580 command_runner.go:130] > # add_inheritable_capabilities = true
	I0916 11:15:32.187962 1507580 command_runner.go:130] > # List of default sysctls. If it is empty or commented out, only the sysctls
	I0916 11:15:32.187978 1507580 command_runner.go:130] > # defined in the container json file by the user/kube will be added.
	I0916 11:15:32.187981 1507580 command_runner.go:130] > default_sysctls = [
	I0916 11:15:32.187986 1507580 command_runner.go:130] > 	"net.ipv4.ip_unprivileged_port_start=0",
	I0916 11:15:32.187991 1507580 command_runner.go:130] > ]
	I0916 11:15:32.187997 1507580 command_runner.go:130] > # List of devices on the host that a
	I0916 11:15:32.188004 1507580 command_runner.go:130] > # user can specify with the "io.kubernetes.cri-o.Devices" allowed annotation.
	I0916 11:15:32.188010 1507580 command_runner.go:130] > # allowed_devices = [
	I0916 11:15:32.188013 1507580 command_runner.go:130] > # 	"/dev/fuse",
	I0916 11:15:32.188016 1507580 command_runner.go:130] > # ]
	I0916 11:15:32.188021 1507580 command_runner.go:130] > # List of additional devices. specified as
	I0916 11:15:32.188034 1507580 command_runner.go:130] > # "<device-on-host>:<device-on-container>:<permissions>", for example: "--device=/dev/sdc:/dev/xvdc:rwm".
	I0916 11:15:32.188043 1507580 command_runner.go:130] > # If it is empty or commented out, only the devices
	I0916 11:15:32.188049 1507580 command_runner.go:130] > # defined in the container json file by the user/kube will be added.
	I0916 11:15:32.188053 1507580 command_runner.go:130] > # additional_devices = [
	I0916 11:15:32.188058 1507580 command_runner.go:130] > # ]
	I0916 11:15:32.188063 1507580 command_runner.go:130] > # List of directories to scan for CDI Spec files.
	I0916 11:15:32.188069 1507580 command_runner.go:130] > # cdi_spec_dirs = [
	I0916 11:15:32.188072 1507580 command_runner.go:130] > # 	"/etc/cdi",
	I0916 11:15:32.188076 1507580 command_runner.go:130] > # 	"/var/run/cdi",
	I0916 11:15:32.188079 1507580 command_runner.go:130] > # ]
	I0916 11:15:32.188085 1507580 command_runner.go:130] > # Change the default behavior of setting container devices uid/gid from CRI's
	I0916 11:15:32.188095 1507580 command_runner.go:130] > # SecurityContext (RunAsUser/RunAsGroup) instead of taking host's uid/gid.
	I0916 11:15:32.188099 1507580 command_runner.go:130] > # Defaults to false.
	I0916 11:15:32.188104 1507580 command_runner.go:130] > # device_ownership_from_security_context = false
	I0916 11:15:32.188113 1507580 command_runner.go:130] > # Path to OCI hooks directories for automatically executed hooks. If one of the
	I0916 11:15:32.188119 1507580 command_runner.go:130] > # directories does not exist, then CRI-O will automatically skip them.
	I0916 11:15:32.188125 1507580 command_runner.go:130] > # hooks_dir = [
	I0916 11:15:32.188129 1507580 command_runner.go:130] > # 	"/usr/share/containers/oci/hooks.d",
	I0916 11:15:32.188138 1507580 command_runner.go:130] > # ]
	I0916 11:15:32.188144 1507580 command_runner.go:130] > # Path to the file specifying the defaults mounts for each container. The
	I0916 11:15:32.188156 1507580 command_runner.go:130] > # format of the config is /SRC:/DST, one mount per line. Notice that CRI-O reads
	I0916 11:15:32.188161 1507580 command_runner.go:130] > # its default mounts from the following two files:
	I0916 11:15:32.188165 1507580 command_runner.go:130] > #
	I0916 11:15:32.188172 1507580 command_runner.go:130] > #   1) /etc/containers/mounts.conf (i.e., default_mounts_file): This is the
	I0916 11:15:32.188181 1507580 command_runner.go:130] > #      override file, where users can either add in their own default mounts, or
	I0916 11:15:32.188187 1507580 command_runner.go:130] > #      override the default mounts shipped with the package.
	I0916 11:15:32.188191 1507580 command_runner.go:130] > #
	I0916 11:15:32.188198 1507580 command_runner.go:130] > #   2) /usr/share/containers/mounts.conf: This is the default file read for
	I0916 11:15:32.188204 1507580 command_runner.go:130] > #      mounts. If you want CRI-O to read from a different, specific mounts file,
	I0916 11:15:32.188214 1507580 command_runner.go:130] > #      you can change the default_mounts_file. Note, if this is done, CRI-O will
	I0916 11:15:32.188218 1507580 command_runner.go:130] > #      only add mounts it finds in this file.
	I0916 11:15:32.188221 1507580 command_runner.go:130] > #
	I0916 11:15:32.188225 1507580 command_runner.go:130] > # default_mounts_file = ""
	I0916 11:15:32.188233 1507580 command_runner.go:130] > # Maximum number of processes allowed in a container.
	I0916 11:15:32.188240 1507580 command_runner.go:130] > # This option is deprecated. The Kubelet flag '--pod-pids-limit' should be used instead.
	I0916 11:15:32.188246 1507580 command_runner.go:130] > # pids_limit = 0
	I0916 11:15:32.188252 1507580 command_runner.go:130] > # Maximum sized allowed for the container log file. Negative numbers indicate
	I0916 11:15:32.188259 1507580 command_runner.go:130] > # that no size limit is imposed. If it is positive, it must be >= 8192 to
	I0916 11:15:32.188269 1507580 command_runner.go:130] > # match/exceed conmon's read buffer. The file is truncated and re-opened so the
	I0916 11:15:32.188278 1507580 command_runner.go:130] > # limit is never exceeded. This option is deprecated. The Kubelet flag '--container-log-max-size' should be used instead.
	I0916 11:15:32.188284 1507580 command_runner.go:130] > # log_size_max = -1
	I0916 11:15:32.188292 1507580 command_runner.go:130] > # Whether container output should be logged to journald in addition to the kuberentes log file
	I0916 11:15:32.188298 1507580 command_runner.go:130] > # log_to_journald = false
	I0916 11:15:32.188306 1507580 command_runner.go:130] > # Path to directory in which container exit files are written to by conmon.
	I0916 11:15:32.188320 1507580 command_runner.go:130] > # container_exits_dir = "/var/run/crio/exits"
	I0916 11:15:32.188326 1507580 command_runner.go:130] > # Path to directory for container attach sockets.
	I0916 11:15:32.188334 1507580 command_runner.go:130] > # container_attach_socket_dir = "/var/run/crio"
	I0916 11:15:32.188339 1507580 command_runner.go:130] > # The prefix to use for the source of the bind mounts.
	I0916 11:15:32.188344 1507580 command_runner.go:130] > # bind_mount_prefix = ""
	I0916 11:15:32.188351 1507580 command_runner.go:130] > # If set to true, all containers will run in read-only mode.
	I0916 11:15:32.188355 1507580 command_runner.go:130] > # read_only = false
	I0916 11:15:32.188362 1507580 command_runner.go:130] > # Changes the verbosity of the logs based on the level it is set to. Options
	I0916 11:15:32.188370 1507580 command_runner.go:130] > # are fatal, panic, error, warn, info, debug and trace. This option supports
	I0916 11:15:32.188374 1507580 command_runner.go:130] > # live configuration reload.
	I0916 11:15:32.188383 1507580 command_runner.go:130] > # log_level = "info"
	I0916 11:15:32.188392 1507580 command_runner.go:130] > # Filter the log messages by the provided regular expression.
	I0916 11:15:32.188400 1507580 command_runner.go:130] > # This option supports live configuration reload.
	I0916 11:15:32.188403 1507580 command_runner.go:130] > # log_filter = ""
	I0916 11:15:32.188410 1507580 command_runner.go:130] > # The UID mappings for the user namespace of each container. A range is
	I0916 11:15:32.188419 1507580 command_runner.go:130] > # specified in the form containerUID:HostUID:Size. Multiple ranges must be
	I0916 11:15:32.188423 1507580 command_runner.go:130] > # separated by comma.
	I0916 11:15:32.188427 1507580 command_runner.go:130] > # uid_mappings = ""
	I0916 11:15:32.188434 1507580 command_runner.go:130] > # The GID mappings for the user namespace of each container. A range is
	I0916 11:15:32.188443 1507580 command_runner.go:130] > # specified in the form containerGID:HostGID:Size. Multiple ranges must be
	I0916 11:15:32.188447 1507580 command_runner.go:130] > # separated by comma.
	I0916 11:15:32.188450 1507580 command_runner.go:130] > # gid_mappings = ""
	I0916 11:15:32.188457 1507580 command_runner.go:130] > # If set, CRI-O will reject any attempt to map host UIDs below this value
	I0916 11:15:32.188465 1507580 command_runner.go:130] > # into user namespaces.  A negative value indicates that no minimum is set,
	I0916 11:15:32.188471 1507580 command_runner.go:130] > # so specifying mappings will only be allowed for pods that run as UID 0.
	I0916 11:15:32.188477 1507580 command_runner.go:130] > # minimum_mappable_uid = -1
	I0916 11:15:32.188483 1507580 command_runner.go:130] > # If set, CRI-O will reject any attempt to map host GIDs below this value
	I0916 11:15:32.188492 1507580 command_runner.go:130] > # into user namespaces.  A negative value indicates that no minimum is set,
	I0916 11:15:32.188498 1507580 command_runner.go:130] > # so specifying mappings will only be allowed for pods that run as UID 0.
	I0916 11:15:32.188504 1507580 command_runner.go:130] > # minimum_mappable_gid = -1
	I0916 11:15:32.188510 1507580 command_runner.go:130] > # The minimal amount of time in seconds to wait before issuing a timeout
	I0916 11:15:32.188519 1507580 command_runner.go:130] > # regarding the proper termination of the container. The lowest possible
	I0916 11:15:32.188525 1507580 command_runner.go:130] > # value is 30s, whereas lower values are not considered by CRI-O.
	I0916 11:15:32.188533 1507580 command_runner.go:130] > # ctr_stop_timeout = 30
	I0916 11:15:32.188542 1507580 command_runner.go:130] > # drop_infra_ctr determines whether CRI-O drops the infra container
	I0916 11:15:32.188549 1507580 command_runner.go:130] > # when a pod does not have a private PID namespace, and does not use
	I0916 11:15:32.188556 1507580 command_runner.go:130] > # a kernel separating runtime (like kata).
	I0916 11:15:32.188561 1507580 command_runner.go:130] > # It requires manage_ns_lifecycle to be true.
	I0916 11:15:32.188567 1507580 command_runner.go:130] > # drop_infra_ctr = true
	I0916 11:15:32.188574 1507580 command_runner.go:130] > # infra_ctr_cpuset determines what CPUs will be used to run infra containers.
	I0916 11:15:32.188582 1507580 command_runner.go:130] > # You can use linux CPU list format to specify desired CPUs.
	I0916 11:15:32.188590 1507580 command_runner.go:130] > # To get better isolation for guaranteed pods, set this parameter to be equal to kubelet reserved-cpus.
	I0916 11:15:32.188596 1507580 command_runner.go:130] > # infra_ctr_cpuset = ""
	I0916 11:15:32.188602 1507580 command_runner.go:130] > # The directory where the state of the managed namespaces gets tracked.
	I0916 11:15:32.188607 1507580 command_runner.go:130] > # Only used when manage_ns_lifecycle is true.
	I0916 11:15:32.188613 1507580 command_runner.go:130] > # namespaces_dir = "/var/run"
	I0916 11:15:32.188620 1507580 command_runner.go:130] > # pinns_path is the path to find the pinns binary, which is needed to manage namespace lifecycle
	I0916 11:15:32.188624 1507580 command_runner.go:130] > # pinns_path = ""
	I0916 11:15:32.188633 1507580 command_runner.go:130] > # default_runtime is the _name_ of the OCI runtime to be used as the default.
	I0916 11:15:32.188642 1507580 command_runner.go:130] > # The name is matched against the runtimes map below. If this value is changed,
	I0916 11:15:32.188651 1507580 command_runner.go:130] > # the corresponding existing entry from the runtimes map below will be ignored.
	I0916 11:15:32.188655 1507580 command_runner.go:130] > # default_runtime = "runc"
	I0916 11:15:32.188660 1507580 command_runner.go:130] > # A list of paths that, when absent from the host,
	I0916 11:15:32.188670 1507580 command_runner.go:130] > # will cause a container creation to fail (as opposed to the current behavior being created as a directory).
	I0916 11:15:32.188740 1507580 command_runner.go:130] > # This option is to protect from source locations whose existence as a directory could jepordize the health of the node, and whose
	I0916 11:15:32.188750 1507580 command_runner.go:130] > # creation as a file is not desired either.
	I0916 11:15:32.188759 1507580 command_runner.go:130] > # An example is /etc/hostname, which will cause failures on reboot if it's created as a directory, but often doesn't exist because
	I0916 11:15:32.188767 1507580 command_runner.go:130] > # the hostname is being managed dynamically.
	I0916 11:15:32.188772 1507580 command_runner.go:130] > # absent_mount_sources_to_reject = [
	I0916 11:15:32.188781 1507580 command_runner.go:130] > # ]
	I0916 11:15:32.188796 1507580 command_runner.go:130] > # The "crio.runtime.runtimes" table defines a list of OCI compatible runtimes.
	I0916 11:15:32.188806 1507580 command_runner.go:130] > # The runtime to use is picked based on the runtime handler provided by the CRI.
	I0916 11:15:32.188813 1507580 command_runner.go:130] > # If no runtime handler is provided, the runtime will be picked based on the level
	I0916 11:15:32.188821 1507580 command_runner.go:130] > # of trust of the workload. Each entry in the table should follow the format:
	I0916 11:15:32.188824 1507580 command_runner.go:130] > #
	I0916 11:15:32.188828 1507580 command_runner.go:130] > #[crio.runtime.runtimes.runtime-handler]
	I0916 11:15:32.188837 1507580 command_runner.go:130] > #  runtime_path = "/path/to/the/executable"
	I0916 11:15:32.188841 1507580 command_runner.go:130] > #  runtime_type = "oci"
	I0916 11:15:32.188846 1507580 command_runner.go:130] > #  runtime_root = "/path/to/the/root"
	I0916 11:15:32.188851 1507580 command_runner.go:130] > #  privileged_without_host_devices = false
	I0916 11:15:32.188860 1507580 command_runner.go:130] > #  allowed_annotations = []
	I0916 11:15:32.188869 1507580 command_runner.go:130] > # Where:
	I0916 11:15:32.188877 1507580 command_runner.go:130] > # - runtime-handler: name used to identify the runtime
	I0916 11:15:32.188883 1507580 command_runner.go:130] > # - runtime_path (optional, string): absolute path to the runtime executable in
	I0916 11:15:32.188895 1507580 command_runner.go:130] > #   the host filesystem. If omitted, the runtime-handler identifier should match
	I0916 11:15:32.188901 1507580 command_runner.go:130] > #   the runtime executable name, and the runtime executable should be placed
	I0916 11:15:32.188907 1507580 command_runner.go:130] > #   in $PATH.
	I0916 11:15:32.188913 1507580 command_runner.go:130] > # - runtime_type (optional, string): type of runtime, one of: "oci", "vm". If
	I0916 11:15:32.188918 1507580 command_runner.go:130] > #   omitted, an "oci" runtime is assumed.
	I0916 11:15:32.188927 1507580 command_runner.go:130] > # - runtime_root (optional, string): root directory for storage of containers
	I0916 11:15:32.188930 1507580 command_runner.go:130] > #   state.
	I0916 11:15:32.188937 1507580 command_runner.go:130] > # - runtime_config_path (optional, string): the path for the runtime configuration
	I0916 11:15:32.188953 1507580 command_runner.go:130] > #   file. This can only be used with when using the VM runtime_type.
	I0916 11:15:32.188959 1507580 command_runner.go:130] > # - privileged_without_host_devices (optional, bool): an option for restricting
	I0916 11:15:32.188967 1507580 command_runner.go:130] > #   host devices from being passed to privileged containers.
	I0916 11:15:32.188977 1507580 command_runner.go:130] > # - allowed_annotations (optional, array of strings): an option for specifying
	I0916 11:15:32.188987 1507580 command_runner.go:130] > #   a list of experimental annotations that this runtime handler is allowed to process.
	I0916 11:15:32.188992 1507580 command_runner.go:130] > #   The currently recognized values are:
	I0916 11:15:32.189001 1507580 command_runner.go:130] > #   "io.kubernetes.cri-o.userns-mode" for configuring a user namespace for the pod.
	I0916 11:15:32.189008 1507580 command_runner.go:130] > #   "io.kubernetes.cri-o.cgroup2-mount-hierarchy-rw" for mounting cgroups writably when set to "true".
	I0916 11:15:32.189023 1507580 command_runner.go:130] > #   "io.kubernetes.cri-o.Devices" for configuring devices for the pod.
	I0916 11:15:32.189029 1507580 command_runner.go:130] > #   "io.kubernetes.cri-o.ShmSize" for configuring the size of /dev/shm.
	I0916 11:15:32.189039 1507580 command_runner.go:130] > #   "io.kubernetes.cri-o.UnifiedCgroup.$CTR_NAME" for configuring the cgroup v2 unified block for a container.
	I0916 11:15:32.189048 1507580 command_runner.go:130] > #   "io.containers.trace-syscall" for tracing syscalls via the OCI seccomp BPF hook.
	I0916 11:15:32.189054 1507580 command_runner.go:130] > #   "io.kubernetes.cri.rdt-class" for setting the RDT class of a container
	I0916 11:15:32.189066 1507580 command_runner.go:130] > # - monitor_exec_cgroup (optional, string): if set to "container", indicates exec probes
	I0916 11:15:32.189071 1507580 command_runner.go:130] > #   should be moved to the container's cgroup
	I0916 11:15:32.189077 1507580 command_runner.go:130] > [crio.runtime.runtimes.runc]
	I0916 11:15:32.189082 1507580 command_runner.go:130] > runtime_path = "/usr/lib/cri-o-runc/sbin/runc"
	I0916 11:15:32.189094 1507580 command_runner.go:130] > runtime_type = "oci"
	I0916 11:15:32.189099 1507580 command_runner.go:130] > runtime_root = "/run/runc"
	I0916 11:15:32.189108 1507580 command_runner.go:130] > runtime_config_path = ""
	I0916 11:15:32.189111 1507580 command_runner.go:130] > monitor_path = ""
	I0916 11:15:32.189115 1507580 command_runner.go:130] > monitor_cgroup = ""
	I0916 11:15:32.189119 1507580 command_runner.go:130] > monitor_exec_cgroup = ""
	I0916 11:15:32.189142 1507580 command_runner.go:130] > # crun is a fast and lightweight fully featured OCI runtime and C library for
	I0916 11:15:32.189149 1507580 command_runner.go:130] > # running containers
	I0916 11:15:32.189153 1507580 command_runner.go:130] > #[crio.runtime.runtimes.crun]
	I0916 11:15:32.189160 1507580 command_runner.go:130] > # Kata Containers is an OCI runtime, where containers are run inside lightweight
	I0916 11:15:32.189176 1507580 command_runner.go:130] > # VMs. Kata provides additional isolation towards the host, minimizing the host attack
	I0916 11:15:32.189182 1507580 command_runner.go:130] > # surface and mitigating the consequences of containers breakout.
	I0916 11:15:32.189190 1507580 command_runner.go:130] > # Kata Containers with the default configured VMM
	I0916 11:15:32.189194 1507580 command_runner.go:130] > #[crio.runtime.runtimes.kata-runtime]
	I0916 11:15:32.189198 1507580 command_runner.go:130] > # Kata Containers with the QEMU VMM
	I0916 11:15:32.189203 1507580 command_runner.go:130] > #[crio.runtime.runtimes.kata-qemu]
	I0916 11:15:32.189207 1507580 command_runner.go:130] > # Kata Containers with the Firecracker VMM
	I0916 11:15:32.189212 1507580 command_runner.go:130] > #[crio.runtime.runtimes.kata-fc]
	I0916 11:15:32.189224 1507580 command_runner.go:130] > # The workloads table defines ways to customize containers with different resources
	I0916 11:15:32.189229 1507580 command_runner.go:130] > # that work based on annotations, rather than the CRI.
	I0916 11:15:32.189243 1507580 command_runner.go:130] > # Note, the behavior of this table is EXPERIMENTAL and may change at any time.
	I0916 11:15:32.189255 1507580 command_runner.go:130] > # Each workload, has a name, activation_annotation, annotation_prefix and set of resources it supports mutating.
	I0916 11:15:32.189266 1507580 command_runner.go:130] > # The currently supported resources are "cpu" (to configure the cpu shares) and "cpuset" to configure the cpuset.
	I0916 11:15:32.189275 1507580 command_runner.go:130] > # Each resource can have a default value specified, or be empty.
	I0916 11:15:32.189287 1507580 command_runner.go:130] > # For a container to opt-into this workload, the pod should be configured with the annotation $activation_annotation (key only, value is ignored).
	I0916 11:15:32.189295 1507580 command_runner.go:130] > # To customize per-container, an annotation of the form $annotation_prefix.$resource/$ctrName = "value" can be specified
	I0916 11:15:32.189304 1507580 command_runner.go:130] > # signifying for that resource type to override the default value.
	I0916 11:15:32.189318 1507580 command_runner.go:130] > # If the annotation_prefix is not present, every container in the pod will be given the default values.
	I0916 11:15:32.189325 1507580 command_runner.go:130] > # Example:
	I0916 11:15:32.189331 1507580 command_runner.go:130] > # [crio.runtime.workloads.workload-type]
	I0916 11:15:32.189336 1507580 command_runner.go:130] > # activation_annotation = "io.crio/workload"
	I0916 11:15:32.189343 1507580 command_runner.go:130] > # annotation_prefix = "io.crio.workload-type"
	I0916 11:15:32.189348 1507580 command_runner.go:130] > # [crio.runtime.workloads.workload-type.resources]
	I0916 11:15:32.189355 1507580 command_runner.go:130] > # cpuset = 0
	I0916 11:15:32.189361 1507580 command_runner.go:130] > # cpushares = "0-1"
	I0916 11:15:32.189365 1507580 command_runner.go:130] > # Where:
	I0916 11:15:32.189369 1507580 command_runner.go:130] > # The workload name is workload-type.
	I0916 11:15:32.189376 1507580 command_runner.go:130] > # To specify, the pod must have the "io.crio.workload" annotation (this is a precise string match).
	I0916 11:15:32.189382 1507580 command_runner.go:130] > # This workload supports setting cpuset and cpu resources.
	I0916 11:15:32.189396 1507580 command_runner.go:130] > # annotation_prefix is used to customize the different resources.
	I0916 11:15:32.189406 1507580 command_runner.go:130] > # To configure the cpu shares a container gets in the example above, the pod would have to have the following annotation:
	I0916 11:15:32.189415 1507580 command_runner.go:130] > # "io.crio.workload-type/$container_name = {"cpushares": "value"}"
	I0916 11:15:32.189417 1507580 command_runner.go:130] > # 
	I0916 11:15:32.189424 1507580 command_runner.go:130] > # The crio.image table contains settings pertaining to the management of OCI images.
	I0916 11:15:32.189429 1507580 command_runner.go:130] > #
	I0916 11:15:32.189435 1507580 command_runner.go:130] > # CRI-O reads its configured registries defaults from the system wide
	I0916 11:15:32.189442 1507580 command_runner.go:130] > # containers-registries.conf(5) located in /etc/containers/registries.conf. If
	I0916 11:15:32.189451 1507580 command_runner.go:130] > # you want to modify just CRI-O, you can change the registries configuration in
	I0916 11:15:32.189458 1507580 command_runner.go:130] > # this file. Otherwise, leave insecure_registries and registries commented out to
	I0916 11:15:32.189470 1507580 command_runner.go:130] > # use the system's defaults from /etc/containers/registries.conf.
	I0916 11:15:32.189476 1507580 command_runner.go:130] > [crio.image]
	I0916 11:15:32.189482 1507580 command_runner.go:130] > # Default transport for pulling images from a remote container storage.
	I0916 11:15:32.189486 1507580 command_runner.go:130] > # default_transport = "docker://"
	I0916 11:15:32.189495 1507580 command_runner.go:130] > # The path to a file containing credentials necessary for pulling images from
	I0916 11:15:32.189501 1507580 command_runner.go:130] > # secure registries. The file is similar to that of /var/lib/kubelet/config.json
	I0916 11:15:32.189507 1507580 command_runner.go:130] > # global_auth_file = ""
	I0916 11:15:32.189512 1507580 command_runner.go:130] > # The image used to instantiate infra containers.
	I0916 11:15:32.189521 1507580 command_runner.go:130] > # This option supports live configuration reload.
	I0916 11:15:32.189526 1507580 command_runner.go:130] > pause_image = "registry.k8s.io/pause:3.10"
	I0916 11:15:32.189544 1507580 command_runner.go:130] > # The path to a file containing credentials specific for pulling the pause_image from
	I0916 11:15:32.189550 1507580 command_runner.go:130] > # above. The file is similar to that of /var/lib/kubelet/config.json
	I0916 11:15:32.189555 1507580 command_runner.go:130] > # This option supports live configuration reload.
	I0916 11:15:32.189562 1507580 command_runner.go:130] > # pause_image_auth_file = ""
	I0916 11:15:32.189567 1507580 command_runner.go:130] > # The command to run to have a container stay in the paused state.
	I0916 11:15:32.189574 1507580 command_runner.go:130] > # When explicitly set to "", it will fallback to the entrypoint and command
	I0916 11:15:32.189582 1507580 command_runner.go:130] > # specified in the pause image. When commented out, it will fallback to the
	I0916 11:15:32.189592 1507580 command_runner.go:130] > # default: "/pause". This option supports live configuration reload.
	I0916 11:15:32.189600 1507580 command_runner.go:130] > # pause_command = "/pause"
	I0916 11:15:32.189606 1507580 command_runner.go:130] > # Path to the file which decides what sort of policy we use when deciding
	I0916 11:15:32.189619 1507580 command_runner.go:130] > # whether or not to trust an image that we've pulled. It is not recommended that
	I0916 11:15:32.189628 1507580 command_runner.go:130] > # this option be used, as the default behavior of using the system-wide default
	I0916 11:15:32.189635 1507580 command_runner.go:130] > # policy (i.e., /etc/containers/policy.json) is most often preferred. Please
	I0916 11:15:32.189640 1507580 command_runner.go:130] > # refer to containers-policy.json(5) for more details.
	I0916 11:15:32.189644 1507580 command_runner.go:130] > # signature_policy = ""
	I0916 11:15:32.189653 1507580 command_runner.go:130] > # List of registries to skip TLS verification for pulling images. Please
	I0916 11:15:32.189665 1507580 command_runner.go:130] > # consider configuring the registries via /etc/containers/registries.conf before
	I0916 11:15:32.189668 1507580 command_runner.go:130] > # changing them here.
	I0916 11:15:32.189672 1507580 command_runner.go:130] > # insecure_registries = [
	I0916 11:15:32.189677 1507580 command_runner.go:130] > # ]
	I0916 11:15:32.189690 1507580 command_runner.go:130] > # Controls how image volumes are handled. The valid values are mkdir, bind and
	I0916 11:15:32.189700 1507580 command_runner.go:130] > # ignore; the latter will ignore volumes entirely.
	I0916 11:15:32.189704 1507580 command_runner.go:130] > # image_volumes = "mkdir"
	I0916 11:15:32.189709 1507580 command_runner.go:130] > # Temporary directory to use for storing big files
	I0916 11:15:32.189713 1507580 command_runner.go:130] > # big_files_temporary_dir = ""
	I0916 11:15:32.189720 1507580 command_runner.go:130] > # The crio.network table containers settings pertaining to the management of
	I0916 11:15:32.189723 1507580 command_runner.go:130] > # CNI plugins.
	I0916 11:15:32.189726 1507580 command_runner.go:130] > [crio.network]
	I0916 11:15:32.189732 1507580 command_runner.go:130] > # The default CNI network name to be selected. If not set or "", then
	I0916 11:15:32.189737 1507580 command_runner.go:130] > # CRI-O will pick-up the first one found in network_dir.
	I0916 11:15:32.189741 1507580 command_runner.go:130] > # cni_default_network = ""
	I0916 11:15:32.189747 1507580 command_runner.go:130] > # Path to the directory where CNI configuration files are located.
	I0916 11:15:32.189751 1507580 command_runner.go:130] > # network_dir = "/etc/cni/net.d/"
	I0916 11:15:32.189757 1507580 command_runner.go:130] > # Paths to directories where CNI plugin binaries are located.
	I0916 11:15:32.189766 1507580 command_runner.go:130] > # plugin_dirs = [
	I0916 11:15:32.189770 1507580 command_runner.go:130] > # 	"/opt/cni/bin/",
	I0916 11:15:32.189773 1507580 command_runner.go:130] > # ]
	I0916 11:15:32.189785 1507580 command_runner.go:130] > # A necessary configuration for Prometheus based metrics retrieval
	I0916 11:15:32.189791 1507580 command_runner.go:130] > [crio.metrics]
	I0916 11:15:32.189797 1507580 command_runner.go:130] > # Globally enable or disable metrics support.
	I0916 11:15:32.189807 1507580 command_runner.go:130] > # enable_metrics = false
	I0916 11:15:32.189812 1507580 command_runner.go:130] > # Specify enabled metrics collectors.
	I0916 11:15:32.189816 1507580 command_runner.go:130] > # Per default all metrics are enabled.
	I0916 11:15:32.189825 1507580 command_runner.go:130] > # It is possible, to prefix the metrics with "container_runtime_" and "crio_".
	I0916 11:15:32.189831 1507580 command_runner.go:130] > # For example, the metrics collector "operations" would be treated in the same
	I0916 11:15:32.189843 1507580 command_runner.go:130] > # way as "crio_operations" and "container_runtime_crio_operations".
	I0916 11:15:32.189849 1507580 command_runner.go:130] > # metrics_collectors = [
	I0916 11:15:32.189853 1507580 command_runner.go:130] > # 	"operations",
	I0916 11:15:32.189863 1507580 command_runner.go:130] > # 	"operations_latency_microseconds_total",
	I0916 11:15:32.189868 1507580 command_runner.go:130] > # 	"operations_latency_microseconds",
	I0916 11:15:32.189872 1507580 command_runner.go:130] > # 	"operations_errors",
	I0916 11:15:32.189878 1507580 command_runner.go:130] > # 	"image_pulls_by_digest",
	I0916 11:15:32.189882 1507580 command_runner.go:130] > # 	"image_pulls_by_name",
	I0916 11:15:32.189892 1507580 command_runner.go:130] > # 	"image_pulls_by_name_skipped",
	I0916 11:15:32.189896 1507580 command_runner.go:130] > # 	"image_pulls_failures",
	I0916 11:15:32.189900 1507580 command_runner.go:130] > # 	"image_pulls_successes",
	I0916 11:15:32.189906 1507580 command_runner.go:130] > # 	"image_pulls_layer_size",
	I0916 11:15:32.189918 1507580 command_runner.go:130] > # 	"image_layer_reuse",
	I0916 11:15:32.189922 1507580 command_runner.go:130] > # 	"containers_oom_total",
	I0916 11:15:32.189926 1507580 command_runner.go:130] > # 	"containers_oom",
	I0916 11:15:32.189930 1507580 command_runner.go:130] > # 	"processes_defunct",
	I0916 11:15:32.189942 1507580 command_runner.go:130] > # 	"operations_total",
	I0916 11:15:32.189946 1507580 command_runner.go:130] > # 	"operations_latency_seconds",
	I0916 11:15:32.189950 1507580 command_runner.go:130] > # 	"operations_latency_seconds_total",
	I0916 11:15:32.189957 1507580 command_runner.go:130] > # 	"operations_errors_total",
	I0916 11:15:32.189961 1507580 command_runner.go:130] > # 	"image_pulls_bytes_total",
	I0916 11:15:32.189968 1507580 command_runner.go:130] > # 	"image_pulls_skipped_bytes_total",
	I0916 11:15:32.189972 1507580 command_runner.go:130] > # 	"image_pulls_failure_total",
	I0916 11:15:32.189976 1507580 command_runner.go:130] > # 	"image_pulls_success_total",
	I0916 11:15:32.189988 1507580 command_runner.go:130] > # 	"image_layer_reuse_total",
	I0916 11:15:32.189996 1507580 command_runner.go:130] > # 	"containers_oom_count_total",
	I0916 11:15:32.189999 1507580 command_runner.go:130] > # ]
	I0916 11:15:32.190004 1507580 command_runner.go:130] > # The port on which the metrics server will listen.
	I0916 11:15:32.190012 1507580 command_runner.go:130] > # metrics_port = 9090
	I0916 11:15:32.190017 1507580 command_runner.go:130] > # Local socket path to bind the metrics server to
	I0916 11:15:32.190023 1507580 command_runner.go:130] > # metrics_socket = ""
	I0916 11:15:32.190028 1507580 command_runner.go:130] > # The certificate for the secure metrics server.
	I0916 11:15:32.190038 1507580 command_runner.go:130] > # If the certificate is not available on disk, then CRI-O will generate a
	I0916 11:15:32.190049 1507580 command_runner.go:130] > # self-signed one. CRI-O also watches for changes of this path and reloads the
	I0916 11:15:32.190053 1507580 command_runner.go:130] > # certificate on any modification event.
	I0916 11:15:32.190064 1507580 command_runner.go:130] > # metrics_cert = ""
	I0916 11:15:32.190072 1507580 command_runner.go:130] > # The certificate key for the secure metrics server.
	I0916 11:15:32.190080 1507580 command_runner.go:130] > # Behaves in the same way as the metrics_cert.
	I0916 11:15:32.190087 1507580 command_runner.go:130] > # metrics_key = ""
	I0916 11:15:32.190093 1507580 command_runner.go:130] > # A necessary configuration for OpenTelemetry trace data exporting
	I0916 11:15:32.190096 1507580 command_runner.go:130] > [crio.tracing]
	I0916 11:15:32.190102 1507580 command_runner.go:130] > # Globally enable or disable exporting OpenTelemetry traces.
	I0916 11:15:32.190107 1507580 command_runner.go:130] > # enable_tracing = false
	I0916 11:15:32.190114 1507580 command_runner.go:130] > # Address on which the gRPC trace collector listens on.
	I0916 11:15:32.190119 1507580 command_runner.go:130] > # tracing_endpoint = "0.0.0.0:4317"
	I0916 11:15:32.190126 1507580 command_runner.go:130] > # Number of samples to collect per million spans.
	I0916 11:15:32.190136 1507580 command_runner.go:130] > # tracing_sampling_rate_per_million = 0
	I0916 11:15:32.190147 1507580 command_runner.go:130] > # Necessary information pertaining to container and pod stats reporting.
	I0916 11:15:32.190150 1507580 command_runner.go:130] > [crio.stats]
	I0916 11:15:32.190156 1507580 command_runner.go:130] > # The number of seconds between collecting pod and container stats.
	I0916 11:15:32.190164 1507580 command_runner.go:130] > # If set to 0, the stats are collected on-demand instead.
	I0916 11:15:32.190167 1507580 command_runner.go:130] > # stats_collection_period = 0
	I0916 11:15:32.192034 1507580 command_runner.go:130] ! time="2024-09-16 11:15:32.182868750Z" level=info msg="Starting CRI-O, version: 1.24.6, git: 4bfe15a9feb74ffc95e66a21c04b15fa7bbc2b90(clean)"
	I0916 11:15:32.192062 1507580 command_runner.go:130] ! level=info msg="Using default capabilities: CAP_CHOWN, CAP_DAC_OVERRIDE, CAP_FSETID, CAP_FOWNER, CAP_SETGID, CAP_SETUID, CAP_SETPCAP, CAP_NET_BIND_SERVICE, CAP_KILL"
	I0916 11:15:32.192324 1507580 cni.go:84] Creating CNI manager for ""
	I0916 11:15:32.192360 1507580 cni.go:136] multinode detected (2 nodes found), recommending kindnet
	I0916 11:15:32.192372 1507580 kubeadm.go:84] Using pod CIDR: 10.244.0.0/16
	I0916 11:15:32.192397 1507580 kubeadm.go:181] kubeadm options: {CertDir:/var/lib/minikube/certs ServiceCIDR:10.96.0.0/12 PodSubnet:10.244.0.0/16 AdvertiseAddress:192.168.67.2 APIServerPort:8443 KubernetesVersion:v1.31.1 EtcdDataDir:/var/lib/minikube/etcd EtcdExtraArgs:map[] ClusterName:multinode-654612 NodeName:multinode-654612 DNSDomain:cluster.local CRISocket:/var/run/crio/crio.sock ImageRepository: ComponentOptions:[{Component:apiServer ExtraArgs:map[enable-admission-plugins:NamespaceLifecycle,LimitRanger,ServiceAccount,DefaultStorageClass,DefaultTolerationSeconds,NodeRestriction,MutatingAdmissionWebhook,ValidatingAdmissionWebhook,ResourceQuota] Pairs:map[certSANs:["127.0.0.1", "localhost", "192.168.67.2"]]} {Component:controllerManager ExtraArgs:map[allocate-node-cidrs:true leader-elect:false] Pairs:map[]} {Component:scheduler ExtraArgs:map[leader-elect:false] Pairs:map[]}] FeatureArgs:map[] NodeIP:192.168.67.2 CgroupDriver:cgroupfs ClientCAFile:/var/lib/minikube/certs/ca.crt StaticPodPath:/etc/k
ubernetes/manifests ControlPlaneAddress:control-plane.minikube.internal KubeProxyOptions:map[] ResolvConfSearchRegression:false KubeletConfigOpts:map[containerRuntimeEndpoint:unix:///var/run/crio/crio.sock hairpinMode:hairpin-veth runtimeRequestTimeout:15m] PrependCriSocketUnix:true}
	I0916 11:15:32.192563 1507580 kubeadm.go:187] kubeadm config:
	apiVersion: kubeadm.k8s.io/v1beta3
	kind: InitConfiguration
	localAPIEndpoint:
	  advertiseAddress: 192.168.67.2
	  bindPort: 8443
	bootstrapTokens:
	  - groups:
	      - system:bootstrappers:kubeadm:default-node-token
	    ttl: 24h0m0s
	    usages:
	      - signing
	      - authentication
	nodeRegistration:
	  criSocket: unix:///var/run/crio/crio.sock
	  name: "multinode-654612"
	  kubeletExtraArgs:
	    node-ip: 192.168.67.2
	  taints: []
	---
	apiVersion: kubeadm.k8s.io/v1beta3
	kind: ClusterConfiguration
	apiServer:
	  certSANs: ["127.0.0.1", "localhost", "192.168.67.2"]
	  extraArgs:
	    enable-admission-plugins: "NamespaceLifecycle,LimitRanger,ServiceAccount,DefaultStorageClass,DefaultTolerationSeconds,NodeRestriction,MutatingAdmissionWebhook,ValidatingAdmissionWebhook,ResourceQuota"
	controllerManager:
	  extraArgs:
	    allocate-node-cidrs: "true"
	    leader-elect: "false"
	scheduler:
	  extraArgs:
	    leader-elect: "false"
	certificatesDir: /var/lib/minikube/certs
	clusterName: mk
	controlPlaneEndpoint: control-plane.minikube.internal:8443
	etcd:
	  local:
	    dataDir: /var/lib/minikube/etcd
	    extraArgs:
	      proxy-refresh-interval: "70000"
	kubernetesVersion: v1.31.1
	networking:
	  dnsDomain: cluster.local
	  podSubnet: "10.244.0.0/16"
	  serviceSubnet: 10.96.0.0/12
	---
	apiVersion: kubelet.config.k8s.io/v1beta1
	kind: KubeletConfiguration
	authentication:
	  x509:
	    clientCAFile: /var/lib/minikube/certs/ca.crt
	cgroupDriver: cgroupfs
	containerRuntimeEndpoint: unix:///var/run/crio/crio.sock
	hairpinMode: hairpin-veth
	runtimeRequestTimeout: 15m
	clusterDomain: "cluster.local"
	# disable disk resource management by default
	imageGCHighThresholdPercent: 100
	evictionHard:
	  nodefs.available: "0%"
	  nodefs.inodesFree: "0%"
	  imagefs.available: "0%"
	failSwapOn: false
	staticPodPath: /etc/kubernetes/manifests
	---
	apiVersion: kubeproxy.config.k8s.io/v1alpha1
	kind: KubeProxyConfiguration
	clusterCIDR: "10.244.0.0/16"
	metricsBindAddress: 0.0.0.0:10249
	conntrack:
	  maxPerCore: 0
	# Skip setting "net.netfilter.nf_conntrack_tcp_timeout_established"
	  tcpEstablishedTimeout: 0s
	# Skip setting "net.netfilter.nf_conntrack_tcp_timeout_close"
	  tcpCloseWaitTimeout: 0s
	
	I0916 11:15:32.192650 1507580 ssh_runner.go:195] Run: sudo ls /var/lib/minikube/binaries/v1.31.1
	I0916 11:15:32.201653 1507580 command_runner.go:130] > kubeadm
	I0916 11:15:32.201674 1507580 command_runner.go:130] > kubectl
	I0916 11:15:32.201689 1507580 command_runner.go:130] > kubelet
	I0916 11:15:32.201708 1507580 binaries.go:44] Found k8s binaries, skipping transfer
	I0916 11:15:32.201770 1507580 ssh_runner.go:195] Run: sudo mkdir -p /etc/systemd/system/kubelet.service.d /lib/systemd/system /var/tmp/minikube
	I0916 11:15:32.210360 1507580 ssh_runner.go:362] scp memory --> /etc/systemd/system/kubelet.service.d/10-kubeadm.conf (366 bytes)
	I0916 11:15:32.229759 1507580 ssh_runner.go:362] scp memory --> /lib/systemd/system/kubelet.service (352 bytes)
	I0916 11:15:32.249135 1507580 ssh_runner.go:362] scp memory --> /var/tmp/minikube/kubeadm.yaml.new (2154 bytes)
	I0916 11:15:32.267915 1507580 ssh_runner.go:195] Run: grep 192.168.67.2	control-plane.minikube.internal$ /etc/hosts
	I0916 11:15:32.271553 1507580 ssh_runner.go:195] Run: /bin/bash -c "{ grep -v $'\tcontrol-plane.minikube.internal$' "/etc/hosts"; echo "192.168.67.2	control-plane.minikube.internal"; } > /tmp/h.$$; sudo cp /tmp/h.$$ "/etc/hosts""
	I0916 11:15:32.283230 1507580 ssh_runner.go:195] Run: sudo systemctl daemon-reload
	I0916 11:15:32.372215 1507580 ssh_runner.go:195] Run: sudo systemctl start kubelet
	I0916 11:15:32.386107 1507580 certs.go:68] Setting up /home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/multinode-654612 for IP: 192.168.67.2
	I0916 11:15:32.386131 1507580 certs.go:194] generating shared ca certs ...
	I0916 11:15:32.386148 1507580 certs.go:226] acquiring lock for ca certs: {Name:mk0ae46b50e2e49d53ad6fcc94535aa50d9156d6 Clock:{} Delay:500ms Timeout:1m0s Cancel:<nil>}
	I0916 11:15:32.386288 1507580 certs.go:235] skipping valid "minikubeCA" ca cert: /home/jenkins/minikube-integration/19651-1378450/.minikube/ca.key
	I0916 11:15:32.386336 1507580 certs.go:235] skipping valid "proxyClientCA" ca cert: /home/jenkins/minikube-integration/19651-1378450/.minikube/proxy-client-ca.key
	I0916 11:15:32.386348 1507580 certs.go:256] generating profile certs ...
	I0916 11:15:32.386433 1507580 certs.go:359] skipping valid signed profile cert regeneration for "minikube-user": /home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/multinode-654612/client.key
	I0916 11:15:32.386485 1507580 certs.go:359] skipping valid signed profile cert regeneration for "minikube": /home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/multinode-654612/apiserver.key.51b1752e
	I0916 11:15:32.386537 1507580 certs.go:359] skipping valid signed profile cert regeneration for "aggregator": /home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/multinode-654612/proxy-client.key
	I0916 11:15:32.386549 1507580 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-1378450/.minikube/ca.crt -> /var/lib/minikube/certs/ca.crt
	I0916 11:15:32.386562 1507580 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-1378450/.minikube/ca.key -> /var/lib/minikube/certs/ca.key
	I0916 11:15:32.386580 1507580 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-1378450/.minikube/proxy-client-ca.crt -> /var/lib/minikube/certs/proxy-client-ca.crt
	I0916 11:15:32.386595 1507580 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-1378450/.minikube/proxy-client-ca.key -> /var/lib/minikube/certs/proxy-client-ca.key
	I0916 11:15:32.386607 1507580 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/multinode-654612/apiserver.crt -> /var/lib/minikube/certs/apiserver.crt
	I0916 11:15:32.386623 1507580 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/multinode-654612/apiserver.key -> /var/lib/minikube/certs/apiserver.key
	I0916 11:15:32.386635 1507580 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/multinode-654612/proxy-client.crt -> /var/lib/minikube/certs/proxy-client.crt
	I0916 11:15:32.386646 1507580 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/multinode-654612/proxy-client.key -> /var/lib/minikube/certs/proxy-client.key
	I0916 11:15:32.386738 1507580 certs.go:484] found cert: /home/jenkins/minikube-integration/19651-1378450/.minikube/certs/1383833.pem (1338 bytes)
	W0916 11:15:32.386772 1507580 certs.go:480] ignoring /home/jenkins/minikube-integration/19651-1378450/.minikube/certs/1383833_empty.pem, impossibly tiny 0 bytes
	I0916 11:15:32.386784 1507580 certs.go:484] found cert: /home/jenkins/minikube-integration/19651-1378450/.minikube/certs/ca-key.pem (1679 bytes)
	I0916 11:15:32.386809 1507580 certs.go:484] found cert: /home/jenkins/minikube-integration/19651-1378450/.minikube/certs/ca.pem (1078 bytes)
	I0916 11:15:32.386842 1507580 certs.go:484] found cert: /home/jenkins/minikube-integration/19651-1378450/.minikube/certs/cert.pem (1123 bytes)
	I0916 11:15:32.386869 1507580 certs.go:484] found cert: /home/jenkins/minikube-integration/19651-1378450/.minikube/certs/key.pem (1679 bytes)
	I0916 11:15:32.386918 1507580 certs.go:484] found cert: /home/jenkins/minikube-integration/19651-1378450/.minikube/files/etc/ssl/certs/13838332.pem (1708 bytes)
	I0916 11:15:32.386951 1507580 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-1378450/.minikube/certs/1383833.pem -> /usr/share/ca-certificates/1383833.pem
	I0916 11:15:32.386965 1507580 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-1378450/.minikube/files/etc/ssl/certs/13838332.pem -> /usr/share/ca-certificates/13838332.pem
	I0916 11:15:32.386978 1507580 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-1378450/.minikube/ca.crt -> /usr/share/ca-certificates/minikubeCA.pem
	I0916 11:15:32.387563 1507580 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-1378450/.minikube/ca.crt --> /var/lib/minikube/certs/ca.crt (1111 bytes)
	I0916 11:15:32.417015 1507580 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-1378450/.minikube/ca.key --> /var/lib/minikube/certs/ca.key (1675 bytes)
	I0916 11:15:32.445948 1507580 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-1378450/.minikube/proxy-client-ca.crt --> /var/lib/minikube/certs/proxy-client-ca.crt (1119 bytes)
	I0916 11:15:32.507118 1507580 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-1378450/.minikube/proxy-client-ca.key --> /var/lib/minikube/certs/proxy-client-ca.key (1679 bytes)
	I0916 11:15:32.544824 1507580 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/multinode-654612/apiserver.crt --> /var/lib/minikube/certs/apiserver.crt (1424 bytes)
	I0916 11:15:32.573415 1507580 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/multinode-654612/apiserver.key --> /var/lib/minikube/certs/apiserver.key (1679 bytes)
	I0916 11:15:32.598612 1507580 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/multinode-654612/proxy-client.crt --> /var/lib/minikube/certs/proxy-client.crt (1147 bytes)
	I0916 11:15:32.624695 1507580 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/multinode-654612/proxy-client.key --> /var/lib/minikube/certs/proxy-client.key (1675 bytes)
	I0916 11:15:32.649773 1507580 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-1378450/.minikube/certs/1383833.pem --> /usr/share/ca-certificates/1383833.pem (1338 bytes)
	I0916 11:15:32.677104 1507580 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-1378450/.minikube/files/etc/ssl/certs/13838332.pem --> /usr/share/ca-certificates/13838332.pem (1708 bytes)
	I0916 11:15:32.704483 1507580 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-1378450/.minikube/ca.crt --> /usr/share/ca-certificates/minikubeCA.pem (1111 bytes)
	I0916 11:15:32.729801 1507580 ssh_runner.go:362] scp memory --> /var/lib/minikube/kubeconfig (738 bytes)
	I0916 11:15:32.749777 1507580 ssh_runner.go:195] Run: openssl version
	I0916 11:15:32.755526 1507580 command_runner.go:130] > OpenSSL 3.0.2 15 Mar 2022 (Library: OpenSSL 3.0.2 15 Mar 2022)
	I0916 11:15:32.755966 1507580 ssh_runner.go:195] Run: sudo /bin/bash -c "test -s /usr/share/ca-certificates/1383833.pem && ln -fs /usr/share/ca-certificates/1383833.pem /etc/ssl/certs/1383833.pem"
	I0916 11:15:32.767089 1507580 ssh_runner.go:195] Run: ls -la /usr/share/ca-certificates/1383833.pem
	I0916 11:15:32.771125 1507580 command_runner.go:130] > -rw-r--r-- 1 root root 1338 Sep 16 10:46 /usr/share/ca-certificates/1383833.pem
	I0916 11:15:32.771163 1507580 certs.go:528] hashing: -rw-r--r-- 1 root root 1338 Sep 16 10:46 /usr/share/ca-certificates/1383833.pem
	I0916 11:15:32.771219 1507580 ssh_runner.go:195] Run: openssl x509 -hash -noout -in /usr/share/ca-certificates/1383833.pem
	I0916 11:15:32.778102 1507580 command_runner.go:130] > 51391683
	I0916 11:15:32.778544 1507580 ssh_runner.go:195] Run: sudo /bin/bash -c "test -L /etc/ssl/certs/51391683.0 || ln -fs /etc/ssl/certs/1383833.pem /etc/ssl/certs/51391683.0"
	I0916 11:15:32.788214 1507580 ssh_runner.go:195] Run: sudo /bin/bash -c "test -s /usr/share/ca-certificates/13838332.pem && ln -fs /usr/share/ca-certificates/13838332.pem /etc/ssl/certs/13838332.pem"
	I0916 11:15:32.798380 1507580 ssh_runner.go:195] Run: ls -la /usr/share/ca-certificates/13838332.pem
	I0916 11:15:32.802784 1507580 command_runner.go:130] > -rw-r--r-- 1 root root 1708 Sep 16 10:46 /usr/share/ca-certificates/13838332.pem
	I0916 11:15:32.802851 1507580 certs.go:528] hashing: -rw-r--r-- 1 root root 1708 Sep 16 10:46 /usr/share/ca-certificates/13838332.pem
	I0916 11:15:32.802926 1507580 ssh_runner.go:195] Run: openssl x509 -hash -noout -in /usr/share/ca-certificates/13838332.pem
	I0916 11:15:32.810964 1507580 command_runner.go:130] > 3ec20f2e
	I0916 11:15:32.811428 1507580 ssh_runner.go:195] Run: sudo /bin/bash -c "test -L /etc/ssl/certs/3ec20f2e.0 || ln -fs /etc/ssl/certs/13838332.pem /etc/ssl/certs/3ec20f2e.0"
	I0916 11:15:32.821712 1507580 ssh_runner.go:195] Run: sudo /bin/bash -c "test -s /usr/share/ca-certificates/minikubeCA.pem && ln -fs /usr/share/ca-certificates/minikubeCA.pem /etc/ssl/certs/minikubeCA.pem"
	I0916 11:15:32.831713 1507580 ssh_runner.go:195] Run: ls -la /usr/share/ca-certificates/minikubeCA.pem
	I0916 11:15:32.835466 1507580 command_runner.go:130] > -rw-r--r-- 1 root root 1111 Sep 16 10:35 /usr/share/ca-certificates/minikubeCA.pem
	I0916 11:15:32.835495 1507580 certs.go:528] hashing: -rw-r--r-- 1 root root 1111 Sep 16 10:35 /usr/share/ca-certificates/minikubeCA.pem
	I0916 11:15:32.835570 1507580 ssh_runner.go:195] Run: openssl x509 -hash -noout -in /usr/share/ca-certificates/minikubeCA.pem
	I0916 11:15:32.842332 1507580 command_runner.go:130] > b5213941
	I0916 11:15:32.842803 1507580 ssh_runner.go:195] Run: sudo /bin/bash -c "test -L /etc/ssl/certs/b5213941.0 || ln -fs /etc/ssl/certs/minikubeCA.pem /etc/ssl/certs/b5213941.0"
	I0916 11:15:32.852639 1507580 ssh_runner.go:195] Run: stat /var/lib/minikube/certs/apiserver-kubelet-client.crt
	I0916 11:15:32.856179 1507580 command_runner.go:130] >   File: /var/lib/minikube/certs/apiserver-kubelet-client.crt
	I0916 11:15:32.856246 1507580 command_runner.go:130] >   Size: 1176      	Blocks: 8          IO Block: 4096   regular file
	I0916 11:15:32.856257 1507580 command_runner.go:130] > Device: 10301h/66305d	Inode: 1308755     Links: 1
	I0916 11:15:32.856264 1507580 command_runner.go:130] > Access: (0644/-rw-r--r--)  Uid: (    0/    root)   Gid: (    0/    root)
	I0916 11:15:32.856270 1507580 command_runner.go:130] > Access: 2024-09-16 11:13:06.590407298 +0000
	I0916 11:15:32.856275 1507580 command_runner.go:130] > Modify: 2024-09-16 11:09:53.807109492 +0000
	I0916 11:15:32.856280 1507580 command_runner.go:130] > Change: 2024-09-16 11:09:53.807109492 +0000
	I0916 11:15:32.856285 1507580 command_runner.go:130] >  Birth: 2024-09-16 11:09:53.807109492 +0000
	I0916 11:15:32.856362 1507580 ssh_runner.go:195] Run: openssl x509 -noout -in /var/lib/minikube/certs/apiserver-etcd-client.crt -checkend 86400
	I0916 11:15:32.863033 1507580 command_runner.go:130] > Certificate will not expire
	I0916 11:15:32.863441 1507580 ssh_runner.go:195] Run: openssl x509 -noout -in /var/lib/minikube/certs/apiserver-kubelet-client.crt -checkend 86400
	I0916 11:15:32.870214 1507580 command_runner.go:130] > Certificate will not expire
	I0916 11:15:32.870632 1507580 ssh_runner.go:195] Run: openssl x509 -noout -in /var/lib/minikube/certs/etcd/server.crt -checkend 86400
	I0916 11:15:32.877748 1507580 command_runner.go:130] > Certificate will not expire
	I0916 11:15:32.877895 1507580 ssh_runner.go:195] Run: openssl x509 -noout -in /var/lib/minikube/certs/etcd/healthcheck-client.crt -checkend 86400
	I0916 11:15:32.884589 1507580 command_runner.go:130] > Certificate will not expire
	I0916 11:15:32.885057 1507580 ssh_runner.go:195] Run: openssl x509 -noout -in /var/lib/minikube/certs/etcd/peer.crt -checkend 86400
	I0916 11:15:32.891702 1507580 command_runner.go:130] > Certificate will not expire
	I0916 11:15:32.892185 1507580 ssh_runner.go:195] Run: openssl x509 -noout -in /var/lib/minikube/certs/front-proxy-client.crt -checkend 86400
	I0916 11:15:32.899089 1507580 command_runner.go:130] > Certificate will not expire
	I0916 11:15:32.899182 1507580 kubeadm.go:392] StartCluster: {Name:multinode-654612 KeepContext:false EmbedCerts:false MinikubeISO: KicBaseImage:gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 Memory:2200 CPUs:2 DiskSize:20000 Driver:docker HyperkitVpnKitSock: HyperkitVSockPorts:[] DockerEnv:[] ContainerVolumeMounts:[] InsecureRegistry:[] RegistryMirror:[] HostOnlyCIDR:192.168.59.1/24 HypervVirtualSwitch: HypervUseExternalSwitch:false HypervExternalAdapter: KVMNetwork:default KVMQemuURI:qemu:///system KVMGPU:false KVMHidden:false KVMNUMACount:1 APIServerPort:8443 DockerOpt:[] DisableDriverMounts:false NFSShare:[] NFSSharesRoot:/nfsshares UUID: NoVTXCheck:false DNSProxy:false HostDNSResolver:true HostOnlyNicType:virtio NatNicType:virtio SSHIPAddress: SSHUser:root SSHKey: SSHPort:22 KubernetesConfig:{KubernetesVersion:v1.31.1 ClusterName:multinode-654612 Namespace:default APIServerHAVIP: APIServerName:minikubeCA APIServe
rNames:[] APIServerIPs:[] DNSDomain:cluster.local ContainerRuntime:crio CRISocket: NetworkPlugin:cni FeatureGates: ServiceCIDR:10.96.0.0/12 ImageRepository: LoadBalancerStartIP: LoadBalancerEndIP: CustomIngressCert: RegistryAliases: ExtraOptions:[] ShouldLoadCachedImages:true EnableDefaultCNI:false CNI:} Nodes:[{Name: IP:192.168.67.2 Port:8443 KubernetesVersion:v1.31.1 ContainerRuntime:crio ControlPlane:true Worker:true} {Name:m02 IP:192.168.67.3 Port:8443 KubernetesVersion:v1.31.1 ContainerRuntime:crio ControlPlane:false Worker:true}] Addons:map[ambassador:false auto-pause:false cloud-spanner:false csi-hostpath-driver:false dashboard:false default-storageclass:false efk:false freshpod:false gcp-auth:false gvisor:false headlamp:false helm-tiller:false inaccel:false ingress:false ingress-dns:false inspektor-gadget:false istio:false istio-provisioner:false kong:false kubeflow:false kubevirt:false logviewer:false metallb:false metrics-server:false nvidia-device-plugin:false nvidia-driver-installer:false nvidia-g
pu-device-plugin:false olm:false pod-security-policy:false portainer:false registry:false registry-aliases:false registry-creds:false storage-provisioner:false storage-provisioner-gluster:false storage-provisioner-rancher:false volcano:false volumesnapshots:false yakd:false] CustomAddonImages:map[] CustomAddonRegistries:map[] VerifyComponents:map[apiserver:true apps_running:true default_sa:true extra:true kubelet:true node_ready:true system_pods:true] StartHostTimeout:6m0s ScheduledStop:<nil> ExposedPorts:[] ListenAddress: Network: Subnet: MultiNodeRequested:true ExtraDisks:0 CertExpiration:26280h0m0s Mount:false MountString:/home/jenkins:/minikube-host Mount9PVersion:9p2000.L MountGID:docker MountIP: MountMSize:262144 MountOptions:[] MountPort:0 MountType:9p MountUID:docker BinaryMirror: DisableOptimizations:false DisableMetrics:false CustomQemuFirmwarePath: SocketVMnetClientPath: SocketVMnetPath: StaticIP: SSHAuthSock: SSHAgentPID:0 GPUs: AutoPauseInterval:1m0s}
	I0916 11:15:32.899323 1507580 cri.go:54] listing CRI containers in root : {State:paused Name: Namespaces:[kube-system]}
	I0916 11:15:32.899387 1507580 ssh_runner.go:195] Run: sudo -s eval "crictl ps -a --quiet --label io.kubernetes.pod.namespace=kube-system"
	I0916 11:15:32.938034 1507580 cri.go:89] found id: ""
	I0916 11:15:32.938164 1507580 ssh_runner.go:195] Run: sudo ls /var/lib/kubelet/kubeadm-flags.env /var/lib/kubelet/config.yaml /var/lib/minikube/etcd
	I0916 11:15:32.946137 1507580 command_runner.go:130] > /var/lib/kubelet/config.yaml
	I0916 11:15:32.946161 1507580 command_runner.go:130] > /var/lib/kubelet/kubeadm-flags.env
	I0916 11:15:32.946168 1507580 command_runner.go:130] > /var/lib/minikube/etcd:
	I0916 11:15:32.946172 1507580 command_runner.go:130] > member
	I0916 11:15:32.947325 1507580 kubeadm.go:408] found existing configuration files, will attempt cluster restart
	I0916 11:15:32.947344 1507580 kubeadm.go:593] restartPrimaryControlPlane start ...
	I0916 11:15:32.947421 1507580 ssh_runner.go:195] Run: sudo test -d /data/minikube
	I0916 11:15:32.956482 1507580 kubeadm.go:130] /data/minikube skipping compat symlinks: sudo test -d /data/minikube: Process exited with status 1
	stdout:
	
	stderr:
	I0916 11:15:32.957041 1507580 kubeconfig.go:47] verify endpoint returned: get endpoint: "multinode-654612" does not appear in /home/jenkins/minikube-integration/19651-1378450/kubeconfig
	I0916 11:15:32.957162 1507580 kubeconfig.go:62] /home/jenkins/minikube-integration/19651-1378450/kubeconfig needs updating (will repair): [kubeconfig missing "multinode-654612" cluster setting kubeconfig missing "multinode-654612" context setting]
	I0916 11:15:32.957435 1507580 lock.go:35] WriteFile acquiring /home/jenkins/minikube-integration/19651-1378450/kubeconfig: {Name:mk806df66aa01ad28d0c99bc1a876b4310e8a3a0 Clock:{} Delay:500ms Timeout:1m0s Cancel:<nil>}
	I0916 11:15:32.957882 1507580 loader.go:395] Config loaded from file:  /home/jenkins/minikube-integration/19651-1378450/kubeconfig
	I0916 11:15:32.958145 1507580 kapi.go:59] client config for multinode-654612: &rest.Config{Host:"https://192.168.67.2:8443", APIPath:"", ContentConfig:rest.ContentConfig{AcceptContentTypes:"", ContentType:"", GroupVersion:(*schema.GroupVersion)(nil), NegotiatedSerializer:runtime.NegotiatedSerializer(nil)}, Username:"", Password:"", BearerToken:"", BearerTokenFile:"", Impersonate:rest.ImpersonationConfig{UserName:"", UID:"", Groups:[]string(nil), Extra:map[string][]string(nil)}, AuthProvider:<nil>, AuthConfigPersister:rest.AuthProviderConfigPersister(nil), ExecProvider:<nil>, TLSClientConfig:rest.sanitizedTLSClientConfig{Insecure:false, ServerName:"", CertFile:"/home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/multinode-654612/client.crt", KeyFile:"/home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/multinode-654612/client.key", CAFile:"/home/jenkins/minikube-integration/19651-1378450/.minikube/ca.crt", CertData:[]uint8(nil), KeyData:[]uint8(nil), CAData:[]uint8(nil),
NextProtos:[]string(nil)}, UserAgent:"", DisableCompression:false, Transport:http.RoundTripper(nil), WrapTransport:(transport.WrapperFunc)(0x1a1e6c0), QPS:0, Burst:0, RateLimiter:flowcontrol.RateLimiter(nil), WarningHandler:rest.WarningHandler(nil), Timeout:0, Dial:(func(context.Context, string, string) (net.Conn, error))(nil), Proxy:(func(*http.Request) (*url.URL, error))(nil)}
	I0916 11:15:32.958836 1507580 ssh_runner.go:195] Run: sudo diff -u /var/tmp/minikube/kubeadm.yaml /var/tmp/minikube/kubeadm.yaml.new
	I0916 11:15:32.958905 1507580 cert_rotation.go:140] Starting client certificate rotation controller
	I0916 11:15:32.969159 1507580 kubeadm.go:630] The running cluster does not require reconfiguration: 192.168.67.2
	I0916 11:15:32.969244 1507580 kubeadm.go:597] duration metric: took 21.893853ms to restartPrimaryControlPlane
	I0916 11:15:32.969270 1507580 kubeadm.go:394] duration metric: took 70.09311ms to StartCluster
	I0916 11:15:32.969308 1507580 settings.go:142] acquiring lock: {Name:mkc0474d366ad36774e47290c7932cc180a1b9f8 Clock:{} Delay:500ms Timeout:1m0s Cancel:<nil>}
	I0916 11:15:32.969404 1507580 settings.go:150] Updating kubeconfig:  /home/jenkins/minikube-integration/19651-1378450/kubeconfig
	I0916 11:15:32.970106 1507580 lock.go:35] WriteFile acquiring /home/jenkins/minikube-integration/19651-1378450/kubeconfig: {Name:mk806df66aa01ad28d0c99bc1a876b4310e8a3a0 Clock:{} Delay:500ms Timeout:1m0s Cancel:<nil>}
	I0916 11:15:32.970337 1507580 start.go:235] Will wait 6m0s for node &{Name: IP:192.168.67.2 Port:8443 KubernetesVersion:v1.31.1 ContainerRuntime:crio ControlPlane:true Worker:true}
	I0916 11:15:32.970601 1507580 config.go:182] Loaded profile config "multinode-654612": Driver=docker, ContainerRuntime=crio, KubernetesVersion=v1.31.1
	I0916 11:15:32.970674 1507580 addons.go:507] enable addons start: toEnable=map[ambassador:false auto-pause:false cloud-spanner:false csi-hostpath-driver:false dashboard:false default-storageclass:false efk:false freshpod:false gcp-auth:false gvisor:false headlamp:false helm-tiller:false inaccel:false ingress:false ingress-dns:false inspektor-gadget:false istio:false istio-provisioner:false kong:false kubeflow:false kubevirt:false logviewer:false metallb:false metrics-server:false nvidia-device-plugin:false nvidia-driver-installer:false nvidia-gpu-device-plugin:false olm:false pod-security-policy:false portainer:false registry:false registry-aliases:false registry-creds:false storage-provisioner:false storage-provisioner-gluster:false storage-provisioner-rancher:false volcano:false volumesnapshots:false yakd:false]
	I0916 11:15:32.975909 1507580 out.go:177] * Verifying Kubernetes components...
	I0916 11:15:32.975925 1507580 out.go:177] * Enabled addons: 
	I0916 11:15:32.979494 1507580 addons.go:510] duration metric: took 8.809851ms for enable addons: enabled=[]
	I0916 11:15:32.979600 1507580 ssh_runner.go:195] Run: sudo systemctl daemon-reload
	I0916 11:15:33.128842 1507580 ssh_runner.go:195] Run: sudo systemctl start kubelet
	I0916 11:15:33.161266 1507580 node_ready.go:35] waiting up to 6m0s for node "multinode-654612" to be "Ready" ...
	I0916 11:15:33.161384 1507580 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/nodes/multinode-654612
	I0916 11:15:33.161397 1507580 round_trippers.go:469] Request Headers:
	I0916 11:15:33.161407 1507580 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:15:33.161422 1507580 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:15:33.161628 1507580 round_trippers.go:574] Response Status:  in 0 milliseconds
	I0916 11:15:33.161646 1507580 round_trippers.go:577] Response Headers:
	I0916 11:15:33.662400 1507580 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/nodes/multinode-654612
	I0916 11:15:33.662470 1507580 round_trippers.go:469] Request Headers:
	I0916 11:15:33.662496 1507580 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:15:33.662517 1507580 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:15:37.468176 1507580 round_trippers.go:574] Response Status: 200 OK in 3805 milliseconds
	I0916 11:15:37.468198 1507580 round_trippers.go:577] Response Headers:
	I0916 11:15:37.468217 1507580 round_trippers.go:580]     Content-Type: application/json
	I0916 11:15:37.468225 1507580 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 
	I0916 11:15:37.468230 1507580 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 
	I0916 11:15:37.468233 1507580 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:15:37 GMT
	I0916 11:15:37.468236 1507580 round_trippers.go:580]     Audit-Id: 3229c4e1-b843-44ab-b510-2fdeaddccf47
	I0916 11:15:37.468238 1507580 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:15:37.469451 1507580 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-654612","uid":"17ee1588-6ece-4a45-8e72-a05ec6d1f806","resourceVersion":"662","creationTimestamp":"2024-09-16T11:10:07Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-654612","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-654612","minikube.k8s.io/primary":"true","minikube.k8s.io/updated_at":"2024_09_16T11_10_11_0700","minikube.k8s.io/version":"v1.34.0","node-role.kubernetes.io/control-plane":"","node.kubernetes.io/exclude-from-external-load-balancers":""},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///var/run/crio/crio.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubelet","operation":"Update","apiVe
rsion":"v1","time":"2024-09-16T11:10:07Z","fieldsType":"FieldsV1","fiel [truncated 6346 chars]
	I0916 11:15:37.470215 1507580 node_ready.go:49] node "multinode-654612" has status "Ready":"True"
	I0916 11:15:37.470228 1507580 node_ready.go:38] duration metric: took 4.308928495s for node "multinode-654612" to be "Ready" ...
	I0916 11:15:37.470239 1507580 pod_ready.go:36] extra waiting up to 6m0s for all system-critical pods including labels [k8s-app=kube-dns component=etcd component=kube-apiserver component=kube-controller-manager k8s-app=kube-proxy component=kube-scheduler] to be "Ready" ...
	I0916 11:15:37.470279 1507580 envvar.go:172] "Feature gate default state" feature="WatchListClient" enabled=false
	I0916 11:15:37.470289 1507580 envvar.go:172] "Feature gate default state" feature="InformerResourceVersion" enabled=false
	I0916 11:15:37.470348 1507580 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/namespaces/kube-system/pods
	I0916 11:15:37.470353 1507580 round_trippers.go:469] Request Headers:
	I0916 11:15:37.470361 1507580 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:15:37.470366 1507580 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:15:37.492441 1507580 round_trippers.go:574] Response Status: 200 OK in 22 milliseconds
	I0916 11:15:37.492464 1507580 round_trippers.go:577] Response Headers:
	I0916 11:15:37.492474 1507580 round_trippers.go:580]     Audit-Id: 999345df-e480-4bdd-929f-0200b47e5cc4
	I0916 11:15:37.492478 1507580 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:15:37.492483 1507580 round_trippers.go:580]     Content-Type: application/json
	I0916 11:15:37.492486 1507580 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 
	I0916 11:15:37.492489 1507580 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 
	I0916 11:15:37.492492 1507580 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:15:37 GMT
	I0916 11:15:37.496260 1507580 request.go:1351] Response Body: {"kind":"PodList","apiVersion":"v1","metadata":{"resourceVersion":"976"},"items":[{"metadata":{"name":"coredns-7c65d6cfc9-szvv9","generateName":"coredns-7c65d6cfc9-","namespace":"kube-system","uid":"26df8cd4-36bc-49e1-98bf-9c30f5555b7b","resourceVersion":"775","creationTimestamp":"2024-09-16T11:10:15Z","labels":{"k8s-app":"kube-dns","pod-template-hash":"7c65d6cfc9"},"ownerReferences":[{"apiVersion":"apps/v1","kind":"ReplicaSet","name":"coredns-7c65d6cfc9","uid":"a88acc4a-b14b-4341-a616-6a6077ab8958","controller":true,"blockOwnerDeletion":true}],"managedFields":[{"manager":"kube-controller-manager","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:10:15Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:generateName":{},"f:labels":{".":{},"f:k8s-app":{},"f:pod-template-hash":{}},"f:ownerReferences":{".":{},"k:{\"uid\":\"a88acc4a-b14b-4341-a616-6a6077ab8958\"}":{}}},"f:spec":{"f:affinity":{".":{},"f:podAntiAffinity":{".":{},"f
:preferredDuringSchedulingIgnoredDuringExecution":{}}},"f:containers":{ [truncated 90668 chars]
	I0916 11:15:37.502903 1507580 pod_ready.go:79] waiting up to 6m0s for pod "coredns-7c65d6cfc9-szvv9" in "kube-system" namespace to be "Ready" ...
	I0916 11:15:37.503106 1507580 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/namespaces/kube-system/pods/coredns-7c65d6cfc9-szvv9
	I0916 11:15:37.503133 1507580 round_trippers.go:469] Request Headers:
	I0916 11:15:37.503159 1507580 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:15:37.503177 1507580 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:15:37.519726 1507580 round_trippers.go:574] Response Status: 200 OK in 16 milliseconds
	I0916 11:15:37.519793 1507580 round_trippers.go:577] Response Headers:
	I0916 11:15:37.519814 1507580 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:15:37 GMT
	I0916 11:15:37.519835 1507580 round_trippers.go:580]     Audit-Id: f4c83ff4-af14-433d-8e3f-be8c1ed4e86b
	I0916 11:15:37.519866 1507580 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:15:37.519887 1507580 round_trippers.go:580]     Content-Type: application/json
	I0916 11:15:37.519904 1507580 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 
	I0916 11:15:37.519922 1507580 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 
	I0916 11:15:37.520733 1507580 request.go:1351] Response Body: {"kind":"Pod","apiVersion":"v1","metadata":{"name":"coredns-7c65d6cfc9-szvv9","generateName":"coredns-7c65d6cfc9-","namespace":"kube-system","uid":"26df8cd4-36bc-49e1-98bf-9c30f5555b7b","resourceVersion":"775","creationTimestamp":"2024-09-16T11:10:15Z","labels":{"k8s-app":"kube-dns","pod-template-hash":"7c65d6cfc9"},"ownerReferences":[{"apiVersion":"apps/v1","kind":"ReplicaSet","name":"coredns-7c65d6cfc9","uid":"a88acc4a-b14b-4341-a616-6a6077ab8958","controller":true,"blockOwnerDeletion":true}],"managedFields":[{"manager":"kube-controller-manager","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:10:15Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:generateName":{},"f:labels":{".":{},"f:k8s-app":{},"f:pod-template-hash":{}},"f:ownerReferences":{".":{},"k:{\"uid\":\"a88acc4a-b14b-4341-a616-6a6077ab8958\"}":{}}},"f:spec":{"f:affinity":{".":{},"f:podAntiAffinity":{".":{},"f:preferredDuringSchedulingIgnoredDuringExecution":{
}}},"f:containers":{"k:{\"name\":\"coredns\"}":{".":{},"f:args":{},"f:i [truncated 6813 chars]
	I0916 11:15:37.521460 1507580 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/nodes/multinode-654612
	I0916 11:15:37.521505 1507580 round_trippers.go:469] Request Headers:
	I0916 11:15:37.521528 1507580 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:15:37.521549 1507580 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:15:37.539930 1507580 round_trippers.go:574] Response Status: 200 OK in 18 milliseconds
	I0916 11:15:37.540008 1507580 round_trippers.go:577] Response Headers:
	I0916 11:15:37.540033 1507580 round_trippers.go:580]     Content-Type: application/json
	I0916 11:15:37.540055 1507580 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 
	I0916 11:15:37.540086 1507580 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 
	I0916 11:15:37.540108 1507580 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:15:37 GMT
	I0916 11:15:37.540125 1507580 round_trippers.go:580]     Audit-Id: e170549f-81df-4ebf-b8aa-4a10b814589e
	I0916 11:15:37.540143 1507580 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:15:37.568647 1507580 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-654612","uid":"17ee1588-6ece-4a45-8e72-a05ec6d1f806","resourceVersion":"662","creationTimestamp":"2024-09-16T11:10:07Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-654612","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-654612","minikube.k8s.io/primary":"true","minikube.k8s.io/updated_at":"2024_09_16T11_10_11_0700","minikube.k8s.io/version":"v1.34.0","node-role.kubernetes.io/control-plane":"","node.kubernetes.io/exclude-from-external-load-balancers":""},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///var/run/crio/crio.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubelet","operation":"Update","apiVe
rsion":"v1","time":"2024-09-16T11:10:07Z","fieldsType":"FieldsV1","fiel [truncated 6346 chars]
	I0916 11:15:37.569169 1507580 pod_ready.go:93] pod "coredns-7c65d6cfc9-szvv9" in "kube-system" namespace has status "Ready":"True"
	I0916 11:15:37.569207 1507580 pod_ready.go:82] duration metric: took 66.207634ms for pod "coredns-7c65d6cfc9-szvv9" in "kube-system" namespace to be "Ready" ...
	I0916 11:15:37.569235 1507580 pod_ready.go:79] waiting up to 6m0s for pod "etcd-multinode-654612" in "kube-system" namespace to be "Ready" ...
	I0916 11:15:37.569347 1507580 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/namespaces/kube-system/pods/etcd-multinode-654612
	I0916 11:15:37.569374 1507580 round_trippers.go:469] Request Headers:
	I0916 11:15:37.569394 1507580 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:15:37.569410 1507580 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:15:37.582433 1507580 round_trippers.go:574] Response Status: 200 OK in 12 milliseconds
	I0916 11:15:37.582501 1507580 round_trippers.go:577] Response Headers:
	I0916 11:15:37.582532 1507580 round_trippers.go:580]     Content-Type: application/json
	I0916 11:15:37.582595 1507580 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:15:37.582621 1507580 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:15:37.582655 1507580 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:15:37 GMT
	I0916 11:15:37.582750 1507580 round_trippers.go:580]     Audit-Id: f0478f4a-ebbe-407e-b3b1-9cc96bc185d2
	I0916 11:15:37.582806 1507580 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:15:37.583051 1507580 request.go:1351] Response Body: {"kind":"Pod","apiVersion":"v1","metadata":{"name":"etcd-multinode-654612","namespace":"kube-system","uid":"bb46feea-e4d5-411b-9ebc-e5984b1147a8","resourceVersion":"760","creationTimestamp":"2024-09-16T11:10:10Z","labels":{"component":"etcd","tier":"control-plane"},"annotations":{"kubeadm.kubernetes.io/etcd.advertise-client-urls":"https://192.168.67.2:2379","kubernetes.io/config.hash":"d0a18dbc2f101ac77b9a3f54b47797a2","kubernetes.io/config.mirror":"d0a18dbc2f101ac77b9a3f54b47797a2","kubernetes.io/config.seen":"2024-09-16T11:10:10.145147523Z","kubernetes.io/config.source":"file"},"ownerReferences":[{"apiVersion":"v1","kind":"Node","name":"multinode-654612","uid":"17ee1588-6ece-4a45-8e72-a05ec6d1f806","controller":true}],"managedFields":[{"manager":"kubelet","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:10:10Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:annotations":{".":{},"f:kubeadm.kubernetes.io/etcd.advertise-cl
ient-urls":{},"f:kubernetes.io/config.hash":{},"f:kubernetes.io/config. [truncated 6575 chars]
	I0916 11:15:37.583875 1507580 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/nodes/multinode-654612
	I0916 11:15:37.583959 1507580 round_trippers.go:469] Request Headers:
	I0916 11:15:37.584034 1507580 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:15:37.584062 1507580 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:15:37.590169 1507580 round_trippers.go:574] Response Status: 200 OK in 6 milliseconds
	I0916 11:15:37.590232 1507580 round_trippers.go:577] Response Headers:
	I0916 11:15:37.590254 1507580 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:15:37.590273 1507580 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:15:37 GMT
	I0916 11:15:37.590290 1507580 round_trippers.go:580]     Audit-Id: a3e8cd28-11aa-4193-8a35-173f57f17ba7
	I0916 11:15:37.590323 1507580 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:15:37.590346 1507580 round_trippers.go:580]     Content-Type: application/json
	I0916 11:15:37.590363 1507580 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:15:37.591749 1507580 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-654612","uid":"17ee1588-6ece-4a45-8e72-a05ec6d1f806","resourceVersion":"662","creationTimestamp":"2024-09-16T11:10:07Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-654612","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-654612","minikube.k8s.io/primary":"true","minikube.k8s.io/updated_at":"2024_09_16T11_10_11_0700","minikube.k8s.io/version":"v1.34.0","node-role.kubernetes.io/control-plane":"","node.kubernetes.io/exclude-from-external-load-balancers":""},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///var/run/crio/crio.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubelet","operation":"Update","apiVe
rsion":"v1","time":"2024-09-16T11:10:07Z","fieldsType":"FieldsV1","fiel [truncated 6346 chars]
	I0916 11:15:37.592241 1507580 pod_ready.go:93] pod "etcd-multinode-654612" in "kube-system" namespace has status "Ready":"True"
	I0916 11:15:37.592278 1507580 pod_ready.go:82] duration metric: took 23.024705ms for pod "etcd-multinode-654612" in "kube-system" namespace to be "Ready" ...
	I0916 11:15:37.592312 1507580 pod_ready.go:79] waiting up to 6m0s for pod "kube-apiserver-multinode-654612" in "kube-system" namespace to be "Ready" ...
	I0916 11:15:37.592407 1507580 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/namespaces/kube-system/pods/kube-apiserver-multinode-654612
	I0916 11:15:37.592432 1507580 round_trippers.go:469] Request Headers:
	I0916 11:15:37.592454 1507580 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:15:37.592473 1507580 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:15:37.607513 1507580 round_trippers.go:574] Response Status: 200 OK in 15 milliseconds
	I0916 11:15:37.607581 1507580 round_trippers.go:577] Response Headers:
	I0916 11:15:37.607605 1507580 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:15:37.607623 1507580 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:15:37 GMT
	I0916 11:15:37.607654 1507580 round_trippers.go:580]     Audit-Id: 3ff72d52-1399-4a6a-9053-0bfd2413f002
	I0916 11:15:37.607673 1507580 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:15:37.607691 1507580 round_trippers.go:580]     Content-Type: application/json
	I0916 11:15:37.607708 1507580 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:15:37.609766 1507580 request.go:1351] Response Body: {"kind":"Pod","apiVersion":"v1","metadata":{"name":"kube-apiserver-multinode-654612","namespace":"kube-system","uid":"8a56377d-b2a9-46dc-90b0-6d8f0aadec52","resourceVersion":"753","creationTimestamp":"2024-09-16T11:10:10Z","labels":{"component":"kube-apiserver","tier":"control-plane"},"annotations":{"kubeadm.kubernetes.io/kube-apiserver.advertise-address.endpoint":"192.168.67.2:8443","kubernetes.io/config.hash":"f3fdb95ee92c3c630b459a996a1fc6f9","kubernetes.io/config.mirror":"f3fdb95ee92c3c630b459a996a1fc6f9","kubernetes.io/config.seen":"2024-09-16T11:10:10.145153931Z","kubernetes.io/config.source":"file"},"ownerReferences":[{"apiVersion":"v1","kind":"Node","name":"multinode-654612","uid":"17ee1588-6ece-4a45-8e72-a05ec6d1f806","controller":true}],"managedFields":[{"manager":"kubelet","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:10:10Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:annotations":{".":{},"f:kubeadm.kube
rnetes.io/kube-apiserver.advertise-address.endpoint":{},"f:kubernetes.i [truncated 9107 chars]
	I0916 11:15:37.610479 1507580 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/nodes/multinode-654612
	I0916 11:15:37.610534 1507580 round_trippers.go:469] Request Headers:
	I0916 11:15:37.610558 1507580 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:15:37.610575 1507580 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:15:37.627652 1507580 round_trippers.go:574] Response Status: 200 OK in 17 milliseconds
	I0916 11:15:37.627726 1507580 round_trippers.go:577] Response Headers:
	I0916 11:15:37.627775 1507580 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:15:37.627810 1507580 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:15:37 GMT
	I0916 11:15:37.627834 1507580 round_trippers.go:580]     Audit-Id: 488d6d0c-4828-4272-afb6-66fa2ffb5de3
	I0916 11:15:37.627855 1507580 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:15:37.627873 1507580 round_trippers.go:580]     Content-Type: application/json
	I0916 11:15:37.627890 1507580 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:15:37.629724 1507580 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-654612","uid":"17ee1588-6ece-4a45-8e72-a05ec6d1f806","resourceVersion":"982","creationTimestamp":"2024-09-16T11:10:07Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-654612","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-654612","minikube.k8s.io/primary":"true","minikube.k8s.io/updated_at":"2024_09_16T11_10_11_0700","minikube.k8s.io/version":"v1.34.0","node-role.kubernetes.io/control-plane":"","node.kubernetes.io/exclude-from-external-load-balancers":""},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///var/run/crio/crio.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubelet","operation":"Update","apiVe
rsion":"v1","time":"2024-09-16T11:10:07Z","fieldsType":"FieldsV1","fiel [truncated 6346 chars]
	I0916 11:15:37.630217 1507580 pod_ready.go:93] pod "kube-apiserver-multinode-654612" in "kube-system" namespace has status "Ready":"True"
	I0916 11:15:37.630264 1507580 pod_ready.go:82] duration metric: took 37.931595ms for pod "kube-apiserver-multinode-654612" in "kube-system" namespace to be "Ready" ...
	I0916 11:15:37.630290 1507580 pod_ready.go:79] waiting up to 6m0s for pod "kube-controller-manager-multinode-654612" in "kube-system" namespace to be "Ready" ...
	I0916 11:15:37.630393 1507580 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/namespaces/kube-system/pods/kube-controller-manager-multinode-654612
	I0916 11:15:37.630417 1507580 round_trippers.go:469] Request Headers:
	I0916 11:15:37.630439 1507580 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:15:37.630461 1507580 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:15:37.641989 1507580 round_trippers.go:574] Response Status: 200 OK in 11 milliseconds
	I0916 11:15:37.642063 1507580 round_trippers.go:577] Response Headers:
	I0916 11:15:37.642086 1507580 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:15:37.642101 1507580 round_trippers.go:580]     Content-Type: application/json
	I0916 11:15:37.642119 1507580 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:15:37.642150 1507580 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:15:37.642173 1507580 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:15:37 GMT
	I0916 11:15:37.642191 1507580 round_trippers.go:580]     Audit-Id: fb04a0e6-795a-4d23-954e-4c4c4e011eb6
	I0916 11:15:37.642416 1507580 request.go:1351] Response Body: {"kind":"Pod","apiVersion":"v1","metadata":{"name":"kube-controller-manager-multinode-654612","namespace":"kube-system","uid":"08e87c01-201e-4373-bbd7-0a8a7a724a84","resourceVersion":"761","creationTimestamp":"2024-09-16T11:10:10Z","labels":{"component":"kube-controller-manager","tier":"control-plane"},"annotations":{"kubernetes.io/config.hash":"c7028fbfd05aaf11bd1f32f252589714","kubernetes.io/config.mirror":"c7028fbfd05aaf11bd1f32f252589714","kubernetes.io/config.seen":"2024-09-16T11:10:10.145155408Z","kubernetes.io/config.source":"file"},"ownerReferences":[{"apiVersion":"v1","kind":"Node","name":"multinode-654612","uid":"17ee1588-6ece-4a45-8e72-a05ec6d1f806","controller":true}],"managedFields":[{"manager":"kubelet","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:10:10Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:annotations":{".":{},"f:kubernetes.io/config.hash":{},"f:kubernetes.io/config.mirror":{},"f:kubernetes.i
o/config.seen":{},"f:kubernetes.io/config.source":{}},"f:labels":{".":{ [truncated 8897 chars]
	I0916 11:15:37.643050 1507580 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/nodes/multinode-654612
	I0916 11:15:37.643103 1507580 round_trippers.go:469] Request Headers:
	I0916 11:15:37.643124 1507580 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:15:37.643143 1507580 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:15:37.650288 1507580 round_trippers.go:574] Response Status: 200 OK in 7 milliseconds
	I0916 11:15:37.650360 1507580 round_trippers.go:577] Response Headers:
	I0916 11:15:37.650383 1507580 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:15:37.650403 1507580 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:15:37.650432 1507580 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:15:37 GMT
	I0916 11:15:37.650458 1507580 round_trippers.go:580]     Audit-Id: 74264991-6929-48f1-98b1-7393987e9b08
	I0916 11:15:37.650476 1507580 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:15:37.650494 1507580 round_trippers.go:580]     Content-Type: application/json
	I0916 11:15:37.650700 1507580 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-654612","uid":"17ee1588-6ece-4a45-8e72-a05ec6d1f806","resourceVersion":"982","creationTimestamp":"2024-09-16T11:10:07Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-654612","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-654612","minikube.k8s.io/primary":"true","minikube.k8s.io/updated_at":"2024_09_16T11_10_11_0700","minikube.k8s.io/version":"v1.34.0","node-role.kubernetes.io/control-plane":"","node.kubernetes.io/exclude-from-external-load-balancers":""},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///var/run/crio/crio.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubelet","operation":"Update","apiVe
rsion":"v1","time":"2024-09-16T11:10:07Z","fieldsType":"FieldsV1","fiel [truncated 6346 chars]
	I0916 11:15:37.651154 1507580 pod_ready.go:93] pod "kube-controller-manager-multinode-654612" in "kube-system" namespace has status "Ready":"True"
	I0916 11:15:37.651203 1507580 pod_ready.go:82] duration metric: took 20.894312ms for pod "kube-controller-manager-multinode-654612" in "kube-system" namespace to be "Ready" ...
	I0916 11:15:37.651231 1507580 pod_ready.go:79] waiting up to 6m0s for pod "kube-proxy-gf2tw" in "kube-system" namespace to be "Ready" ...
	I0916 11:15:37.651343 1507580 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/namespaces/kube-system/pods/kube-proxy-gf2tw
	I0916 11:15:37.651371 1507580 round_trippers.go:469] Request Headers:
	I0916 11:15:37.651392 1507580 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:15:37.651412 1507580 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:15:37.674519 1507580 round_trippers.go:574] Response Status: 200 OK in 23 milliseconds
	I0916 11:15:37.674601 1507580 round_trippers.go:577] Response Headers:
	I0916 11:15:37.674641 1507580 round_trippers.go:580]     Audit-Id: a679efb6-2c4f-4c96-b86b-dda57b59d096
	I0916 11:15:37.674679 1507580 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:15:37.674716 1507580 round_trippers.go:580]     Content-Type: application/json
	I0916 11:15:37.674742 1507580 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:15:37.674765 1507580 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:15:37.674795 1507580 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:15:37 GMT
	I0916 11:15:37.675533 1507580 request.go:1351] Response Body: {"kind":"Pod","apiVersion":"v1","metadata":{"name":"kube-proxy-gf2tw","generateName":"kube-proxy-","namespace":"kube-system","uid":"814e8a89-b190-4aef-a303-44981c9e19c9","resourceVersion":"877","creationTimestamp":"2024-09-16T11:11:13Z","labels":{"controller-revision-hash":"648b489c5b","k8s-app":"kube-proxy","pod-template-generation":"1"},"ownerReferences":[{"apiVersion":"apps/v1","kind":"DaemonSet","name":"kube-proxy","uid":"8b5954ba-7bb1-4f7b-8014-fdfa99b2ac77","controller":true,"blockOwnerDeletion":true}],"managedFields":[{"manager":"kube-controller-manager","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:11:13Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:generateName":{},"f:labels":{".":{},"f:controller-revision-hash":{},"f:k8s-app":{},"f:pod-template-generation":{}},"f:ownerReferences":{".":{},"k:{\"uid\":\"8b5954ba-7bb1-4f7b-8014-fdfa99b2ac77\"}":{}}},"f:spec":{"f:affinity":{".":{},"f:nodeAffinity":{".":{},"f:r
equiredDuringSchedulingIgnoredDuringExecution":{}}},"f:containers":{"k: [truncated 6178 chars]
	I0916 11:15:37.676200 1507580 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/nodes/multinode-654612-m02
	I0916 11:15:37.676243 1507580 round_trippers.go:469] Request Headers:
	I0916 11:15:37.676281 1507580 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:15:37.676299 1507580 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:15:37.688909 1507580 round_trippers.go:574] Response Status: 200 OK in 12 milliseconds
	I0916 11:15:37.688981 1507580 round_trippers.go:577] Response Headers:
	I0916 11:15:37.689004 1507580 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:15:37.689027 1507580 round_trippers.go:580]     Content-Type: application/json
	I0916 11:15:37.689059 1507580 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:15:37.689090 1507580 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:15:37.689110 1507580 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:15:37 GMT
	I0916 11:15:37.689129 1507580 round_trippers.go:580]     Audit-Id: 653a97f7-1056-43c8-b588-ae97164bb50f
	I0916 11:15:37.689313 1507580 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-654612-m02","uid":"aa1e2e1b-4957-47bd-bcf9-786ad66f873a","resourceVersion":"837","creationTimestamp":"2024-09-16T11:11:13Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-654612-m02","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-654612","minikube.k8s.io/primary":"false","minikube.k8s.io/updated_at":"2024_09_16T11_11_14_0700","minikube.k8s.io/version":"v1.34.0"},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///var/run/crio/crio.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubeadm","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:11:13Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:
annotations":{"f:kubeadm.alpha.kubernetes.io/cri-socket":{}}}}},{"manag [truncated 6341 chars]
	I0916 11:15:37.689790 1507580 pod_ready.go:93] pod "kube-proxy-gf2tw" in "kube-system" namespace has status "Ready":"True"
	I0916 11:15:37.689840 1507580 pod_ready.go:82] duration metric: took 38.587366ms for pod "kube-proxy-gf2tw" in "kube-system" namespace to be "Ready" ...
	I0916 11:15:37.689867 1507580 pod_ready.go:79] waiting up to 6m0s for pod "kube-proxy-t9pzq" in "kube-system" namespace to be "Ready" ...
	I0916 11:15:37.871224 1507580 request.go:632] Waited for 181.258874ms due to client-side throttling, not priority and fairness, request: GET:https://192.168.67.2:8443/api/v1/namespaces/kube-system/pods/kube-proxy-t9pzq
	I0916 11:15:37.871345 1507580 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/namespaces/kube-system/pods/kube-proxy-t9pzq
	I0916 11:15:37.871390 1507580 round_trippers.go:469] Request Headers:
	I0916 11:15:37.871416 1507580 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:15:37.871435 1507580 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:15:37.876733 1507580 round_trippers.go:574] Response Status: 200 OK in 5 milliseconds
	I0916 11:15:37.876834 1507580 round_trippers.go:577] Response Headers:
	I0916 11:15:37.876858 1507580 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:15:37.876875 1507580 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:15:37.876904 1507580 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:15:37 GMT
	I0916 11:15:37.876926 1507580 round_trippers.go:580]     Audit-Id: 8182054d-008f-4ccc-a085-5718a029bb58
	I0916 11:15:37.877006 1507580 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:15:37.877029 1507580 round_trippers.go:580]     Content-Type: application/json
	I0916 11:15:37.877206 1507580 request.go:1351] Response Body: {"kind":"Pod","apiVersion":"v1","metadata":{"name":"kube-proxy-t9pzq","generateName":"kube-proxy-","namespace":"kube-system","uid":"d5dac41c-8386-4ad5-a463-1730169d8062","resourceVersion":"984","creationTimestamp":"2024-09-16T11:10:14Z","labels":{"controller-revision-hash":"648b489c5b","k8s-app":"kube-proxy","pod-template-generation":"1"},"ownerReferences":[{"apiVersion":"apps/v1","kind":"DaemonSet","name":"kube-proxy","uid":"8b5954ba-7bb1-4f7b-8014-fdfa99b2ac77","controller":true,"blockOwnerDeletion":true}],"managedFields":[{"manager":"kube-controller-manager","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:10:14Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:generateName":{},"f:labels":{".":{},"f:controller-revision-hash":{},"f:k8s-app":{},"f:pod-template-generation":{}},"f:ownerReferences":{".":{},"k:{\"uid\":\"8b5954ba-7bb1-4f7b-8014-fdfa99b2ac77\"}":{}}},"f:spec":{"f:affinity":{".":{},"f:nodeAffinity":{".":{},"f:r
equiredDuringSchedulingIgnoredDuringExecution":{}}},"f:containers":{"k: [truncated 6419 chars]
	I0916 11:15:38.070947 1507580 request.go:632] Waited for 193.149108ms due to client-side throttling, not priority and fairness, request: GET:https://192.168.67.2:8443/api/v1/nodes/multinode-654612
	I0916 11:15:38.071062 1507580 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/nodes/multinode-654612
	I0916 11:15:38.071114 1507580 round_trippers.go:469] Request Headers:
	I0916 11:15:38.071142 1507580 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:15:38.071162 1507580 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:15:38.078387 1507580 round_trippers.go:574] Response Status: 200 OK in 7 milliseconds
	I0916 11:15:38.078454 1507580 round_trippers.go:577] Response Headers:
	I0916 11:15:38.078486 1507580 round_trippers.go:580]     Content-Type: application/json
	I0916 11:15:38.078504 1507580 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:15:38.078536 1507580 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:15:38.078560 1507580 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:15:38 GMT
	I0916 11:15:38.078578 1507580 round_trippers.go:580]     Audit-Id: 5a6b8616-754c-4f33-b57b-c225454fdf63
	I0916 11:15:38.078598 1507580 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:15:38.078831 1507580 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-654612","uid":"17ee1588-6ece-4a45-8e72-a05ec6d1f806","resourceVersion":"982","creationTimestamp":"2024-09-16T11:10:07Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-654612","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-654612","minikube.k8s.io/primary":"true","minikube.k8s.io/updated_at":"2024_09_16T11_10_11_0700","minikube.k8s.io/version":"v1.34.0","node-role.kubernetes.io/control-plane":"","node.kubernetes.io/exclude-from-external-load-balancers":""},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///var/run/crio/crio.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubelet","operation":"Update","apiVe
rsion":"v1","time":"2024-09-16T11:10:07Z","fieldsType":"FieldsV1","fiel [truncated 6346 chars]
	I0916 11:15:38.270823 1507580 request.go:632] Waited for 80.178769ms due to client-side throttling, not priority and fairness, request: GET:https://192.168.67.2:8443/api/v1/namespaces/kube-system/pods/kube-proxy-t9pzq
	I0916 11:15:38.270975 1507580 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/namespaces/kube-system/pods/kube-proxy-t9pzq
	I0916 11:15:38.271001 1507580 round_trippers.go:469] Request Headers:
	I0916 11:15:38.271026 1507580 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:15:38.271049 1507580 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:15:38.274233 1507580 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 11:15:38.274301 1507580 round_trippers.go:577] Response Headers:
	I0916 11:15:38.274322 1507580 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:15:38 GMT
	I0916 11:15:38.274343 1507580 round_trippers.go:580]     Audit-Id: 3596a990-737d-4b41-8fd9-8cf903444779
	I0916 11:15:38.274377 1507580 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:15:38.274395 1507580 round_trippers.go:580]     Content-Type: application/json
	I0916 11:15:38.274411 1507580 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:15:38.274430 1507580 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:15:38.275012 1507580 request.go:1351] Response Body: {"kind":"Pod","apiVersion":"v1","metadata":{"name":"kube-proxy-t9pzq","generateName":"kube-proxy-","namespace":"kube-system","uid":"d5dac41c-8386-4ad5-a463-1730169d8062","resourceVersion":"984","creationTimestamp":"2024-09-16T11:10:14Z","labels":{"controller-revision-hash":"648b489c5b","k8s-app":"kube-proxy","pod-template-generation":"1"},"ownerReferences":[{"apiVersion":"apps/v1","kind":"DaemonSet","name":"kube-proxy","uid":"8b5954ba-7bb1-4f7b-8014-fdfa99b2ac77","controller":true,"blockOwnerDeletion":true}],"managedFields":[{"manager":"kube-controller-manager","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:10:14Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:generateName":{},"f:labels":{".":{},"f:controller-revision-hash":{},"f:k8s-app":{},"f:pod-template-generation":{}},"f:ownerReferences":{".":{},"k:{\"uid\":\"8b5954ba-7bb1-4f7b-8014-fdfa99b2ac77\"}":{}}},"f:spec":{"f:affinity":{".":{},"f:nodeAffinity":{".":{},"f:r
equiredDuringSchedulingIgnoredDuringExecution":{}}},"f:containers":{"k: [truncated 6419 chars]
	I0916 11:15:38.470937 1507580 request.go:632] Waited for 195.334318ms due to client-side throttling, not priority and fairness, request: GET:https://192.168.67.2:8443/api/v1/nodes/multinode-654612
	I0916 11:15:38.471062 1507580 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/nodes/multinode-654612
	I0916 11:15:38.471093 1507580 round_trippers.go:469] Request Headers:
	I0916 11:15:38.471119 1507580 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:15:38.471136 1507580 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:15:38.475760 1507580 round_trippers.go:574] Response Status: 200 OK in 4 milliseconds
	I0916 11:15:38.475826 1507580 round_trippers.go:577] Response Headers:
	I0916 11:15:38.475869 1507580 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:15:38.475899 1507580 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:15:38 GMT
	I0916 11:15:38.475920 1507580 round_trippers.go:580]     Audit-Id: 86141d07-d40f-433b-8049-035549fb33fb
	I0916 11:15:38.475937 1507580 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:15:38.475953 1507580 round_trippers.go:580]     Content-Type: application/json
	I0916 11:15:38.475984 1507580 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:15:38.476524 1507580 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-654612","uid":"17ee1588-6ece-4a45-8e72-a05ec6d1f806","resourceVersion":"982","creationTimestamp":"2024-09-16T11:10:07Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-654612","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-654612","minikube.k8s.io/primary":"true","minikube.k8s.io/updated_at":"2024_09_16T11_10_11_0700","minikube.k8s.io/version":"v1.34.0","node-role.kubernetes.io/control-plane":"","node.kubernetes.io/exclude-from-external-load-balancers":""},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///var/run/crio/crio.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubelet","operation":"Update","apiVe
rsion":"v1","time":"2024-09-16T11:10:07Z","fieldsType":"FieldsV1","fiel [truncated 6346 chars]
	I0916 11:15:38.691083 1507580 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/namespaces/kube-system/pods/kube-proxy-t9pzq
	I0916 11:15:38.691149 1507580 round_trippers.go:469] Request Headers:
	I0916 11:15:38.691173 1507580 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:15:38.691192 1507580 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:15:38.696220 1507580 round_trippers.go:574] Response Status: 200 OK in 4 milliseconds
	I0916 11:15:38.696247 1507580 round_trippers.go:577] Response Headers:
	I0916 11:15:38.696256 1507580 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:15:38.696261 1507580 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:15:38.696266 1507580 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:15:38 GMT
	I0916 11:15:38.696275 1507580 round_trippers.go:580]     Audit-Id: 0794f2f9-9ede-4e9c-a793-b12c040f1ff5
	I0916 11:15:38.696280 1507580 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:15:38.696285 1507580 round_trippers.go:580]     Content-Type: application/json
	I0916 11:15:38.696913 1507580 request.go:1351] Response Body: {"kind":"Pod","apiVersion":"v1","metadata":{"name":"kube-proxy-t9pzq","generateName":"kube-proxy-","namespace":"kube-system","uid":"d5dac41c-8386-4ad5-a463-1730169d8062","resourceVersion":"994","creationTimestamp":"2024-09-16T11:10:14Z","labels":{"controller-revision-hash":"648b489c5b","k8s-app":"kube-proxy","pod-template-generation":"1"},"ownerReferences":[{"apiVersion":"apps/v1","kind":"DaemonSet","name":"kube-proxy","uid":"8b5954ba-7bb1-4f7b-8014-fdfa99b2ac77","controller":true,"blockOwnerDeletion":true}],"managedFields":[{"manager":"kube-controller-manager","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:10:14Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:generateName":{},"f:labels":{".":{},"f:controller-revision-hash":{},"f:k8s-app":{},"f:pod-template-generation":{}},"f:ownerReferences":{".":{},"k:{\"uid\":\"8b5954ba-7bb1-4f7b-8014-fdfa99b2ac77\"}":{}}},"f:spec":{"f:affinity":{".":{},"f:nodeAffinity":{".":{},"f:r
equiredDuringSchedulingIgnoredDuringExecution":{}}},"f:containers":{"k: [truncated 6170 chars]
	I0916 11:15:38.870802 1507580 request.go:632] Waited for 173.267381ms due to client-side throttling, not priority and fairness, request: GET:https://192.168.67.2:8443/api/v1/nodes/multinode-654612
	I0916 11:15:38.870868 1507580 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/nodes/multinode-654612
	I0916 11:15:38.870877 1507580 round_trippers.go:469] Request Headers:
	I0916 11:15:38.870886 1507580 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:15:38.870891 1507580 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:15:38.875309 1507580 round_trippers.go:574] Response Status: 200 OK in 4 milliseconds
	I0916 11:15:38.875422 1507580 round_trippers.go:577] Response Headers:
	I0916 11:15:38.875454 1507580 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:15:38.875467 1507580 round_trippers.go:580]     Content-Type: application/json
	I0916 11:15:38.875470 1507580 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:15:38.875473 1507580 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:15:38.875485 1507580 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:15:38 GMT
	I0916 11:15:38.875488 1507580 round_trippers.go:580]     Audit-Id: 853502ca-2f9c-4026-818b-73ac4bc55be2
	I0916 11:15:38.875645 1507580 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-654612","uid":"17ee1588-6ece-4a45-8e72-a05ec6d1f806","resourceVersion":"982","creationTimestamp":"2024-09-16T11:10:07Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-654612","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-654612","minikube.k8s.io/primary":"true","minikube.k8s.io/updated_at":"2024_09_16T11_10_11_0700","minikube.k8s.io/version":"v1.34.0","node-role.kubernetes.io/control-plane":"","node.kubernetes.io/exclude-from-external-load-balancers":""},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///var/run/crio/crio.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubelet","operation":"Update","apiVe
rsion":"v1","time":"2024-09-16T11:10:07Z","fieldsType":"FieldsV1","fiel [truncated 6346 chars]
	I0916 11:15:38.876087 1507580 pod_ready.go:93] pod "kube-proxy-t9pzq" in "kube-system" namespace has status "Ready":"True"
	I0916 11:15:38.876105 1507580 pod_ready.go:82] duration metric: took 1.186218839s for pod "kube-proxy-t9pzq" in "kube-system" namespace to be "Ready" ...
	I0916 11:15:38.876116 1507580 pod_ready.go:79] waiting up to 6m0s for pod "kube-proxy-vf648" in "kube-system" namespace to be "Ready" ...
	I0916 11:15:39.070546 1507580 request.go:632] Waited for 194.357497ms due to client-side throttling, not priority and fairness, request: GET:https://192.168.67.2:8443/api/v1/namespaces/kube-system/pods/kube-proxy-vf648
	I0916 11:15:39.070636 1507580 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/namespaces/kube-system/pods/kube-proxy-vf648
	I0916 11:15:39.070646 1507580 round_trippers.go:469] Request Headers:
	I0916 11:15:39.070654 1507580 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:15:39.070663 1507580 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:15:39.073190 1507580 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:15:39.073217 1507580 round_trippers.go:577] Response Headers:
	I0916 11:15:39.073227 1507580 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:15:39.073232 1507580 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:15:39.073236 1507580 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:15:39 GMT
	I0916 11:15:39.073240 1507580 round_trippers.go:580]     Audit-Id: c5d0c274-2f86-449c-8da8-78440dd6a564
	I0916 11:15:39.073242 1507580 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:15:39.073245 1507580 round_trippers.go:580]     Content-Type: application/json
	I0916 11:15:39.073626 1507580 request.go:1351] Response Body: {"kind":"Pod","apiVersion":"v1","metadata":{"name":"kube-proxy-vf648","generateName":"kube-proxy-","namespace":"kube-system","uid":"376afe3e-390b-443b-b289-7dfeeb1deed1","resourceVersion":"947","creationTimestamp":"2024-09-16T11:11:48Z","labels":{"controller-revision-hash":"648b489c5b","k8s-app":"kube-proxy","pod-template-generation":"1"},"ownerReferences":[{"apiVersion":"apps/v1","kind":"DaemonSet","name":"kube-proxy","uid":"8b5954ba-7bb1-4f7b-8014-fdfa99b2ac77","controller":true,"blockOwnerDeletion":true}],"managedFields":[{"manager":"kube-controller-manager","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:11:48Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:generateName":{},"f:labels":{".":{},"f:controller-revision-hash":{},"f:k8s-app":{},"f:pod-template-generation":{}},"f:ownerReferences":{".":{},"k:{\"uid\":\"8b5954ba-7bb1-4f7b-8014-fdfa99b2ac77\"}":{}}},"f:spec":{"f:affinity":{".":{},"f:nodeAffinity":{".":{},"f:r
equiredDuringSchedulingIgnoredDuringExecution":{}}},"f:containers":{"k: [truncated 6178 chars]
	I0916 11:15:39.270397 1507580 request.go:632] Waited for 196.250562ms due to client-side throttling, not priority and fairness, request: GET:https://192.168.67.2:8443/api/v1/nodes/multinode-654612-m03
	I0916 11:15:39.270467 1507580 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/nodes/multinode-654612-m03
	I0916 11:15:39.270472 1507580 round_trippers.go:469] Request Headers:
	I0916 11:15:39.270481 1507580 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:15:39.270493 1507580 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:15:39.272892 1507580 round_trippers.go:574] Response Status: 404 Not Found in 2 milliseconds
	I0916 11:15:39.272922 1507580 round_trippers.go:577] Response Headers:
	I0916 11:15:39.272930 1507580 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:15:39.272936 1507580 round_trippers.go:580]     Content-Length: 210
	I0916 11:15:39.272939 1507580 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:15:39 GMT
	I0916 11:15:39.272959 1507580 round_trippers.go:580]     Audit-Id: b41cef94-e211-4d80-95df-663f961f25b6
	I0916 11:15:39.272963 1507580 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:15:39.272965 1507580 round_trippers.go:580]     Content-Type: application/json
	I0916 11:15:39.272968 1507580 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:15:39.272987 1507580 request.go:1351] Response Body: {"kind":"Status","apiVersion":"v1","metadata":{},"status":"Failure","message":"nodes \"multinode-654612-m03\" not found","reason":"NotFound","details":{"name":"multinode-654612-m03","kind":"nodes"},"code":404}
	I0916 11:15:39.273199 1507580 pod_ready.go:98] node "multinode-654612-m03" hosting pod "kube-proxy-vf648" in "kube-system" namespace is currently not "Ready" (skipping!): error getting node "multinode-654612-m03": nodes "multinode-654612-m03" not found
	I0916 11:15:39.273220 1507580 pod_ready.go:82] duration metric: took 397.092863ms for pod "kube-proxy-vf648" in "kube-system" namespace to be "Ready" ...
	E0916 11:15:39.273230 1507580 pod_ready.go:67] WaitExtra: waitPodCondition: node "multinode-654612-m03" hosting pod "kube-proxy-vf648" in "kube-system" namespace is currently not "Ready" (skipping!): error getting node "multinode-654612-m03": nodes "multinode-654612-m03" not found
	I0916 11:15:39.273240 1507580 pod_ready.go:79] waiting up to 6m0s for pod "kube-scheduler-multinode-654612" in "kube-system" namespace to be "Ready" ...
	I0916 11:15:39.470675 1507580 request.go:632] Waited for 197.313181ms due to client-side throttling, not priority and fairness, request: GET:https://192.168.67.2:8443/api/v1/namespaces/kube-system/pods/kube-scheduler-multinode-654612
	I0916 11:15:39.470743 1507580 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/namespaces/kube-system/pods/kube-scheduler-multinode-654612
	I0916 11:15:39.470749 1507580 round_trippers.go:469] Request Headers:
	I0916 11:15:39.470758 1507580 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:15:39.470762 1507580 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:15:39.473704 1507580 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:15:39.473729 1507580 round_trippers.go:577] Response Headers:
	I0916 11:15:39.473738 1507580 round_trippers.go:580]     Content-Type: application/json
	I0916 11:15:39.473742 1507580 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:15:39.473745 1507580 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:15:39.473748 1507580 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:15:39 GMT
	I0916 11:15:39.473750 1507580 round_trippers.go:580]     Audit-Id: 223a3308-c3ac-49b4-ac84-043f0d87b710
	I0916 11:15:39.473753 1507580 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:15:39.474042 1507580 request.go:1351] Response Body: {"kind":"Pod","apiVersion":"v1","metadata":{"name":"kube-scheduler-multinode-654612","namespace":"kube-system","uid":"fd553108-8193-4f33-8190-d4ec25a66de1","resourceVersion":"988","creationTimestamp":"2024-09-16T11:10:10Z","labels":{"component":"kube-scheduler","tier":"control-plane"},"annotations":{"kubernetes.io/config.hash":"281b64f61502642475e3dbc1b139b188","kubernetes.io/config.mirror":"281b64f61502642475e3dbc1b139b188","kubernetes.io/config.seen":"2024-09-16T11:10:10.145156597Z","kubernetes.io/config.source":"file"},"ownerReferences":[{"apiVersion":"v1","kind":"Node","name":"multinode-654612","uid":"17ee1588-6ece-4a45-8e72-a05ec6d1f806","controller":true}],"managedFields":[{"manager":"kubelet","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:10:10Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:annotations":{".":{},"f:kubernetes.io/config.hash":{},"f:kubernetes.io/config.mirror":{},"f:kubernetes.io/config.seen":{},
"f:kubernetes.io/config.source":{}},"f:labels":{".":{},"f:component":{} [truncated 5345 chars]
	I0916 11:15:39.670949 1507580 request.go:632] Waited for 196.38093ms due to client-side throttling, not priority and fairness, request: GET:https://192.168.67.2:8443/api/v1/nodes/multinode-654612
	I0916 11:15:39.671037 1507580 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/nodes/multinode-654612
	I0916 11:15:39.671047 1507580 round_trippers.go:469] Request Headers:
	I0916 11:15:39.671056 1507580 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:15:39.671068 1507580 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:15:39.673858 1507580 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:15:39.673890 1507580 round_trippers.go:577] Response Headers:
	I0916 11:15:39.673900 1507580 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:15:39.673915 1507580 round_trippers.go:580]     Content-Type: application/json
	I0916 11:15:39.673919 1507580 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:15:39.673922 1507580 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:15:39.673926 1507580 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:15:39 GMT
	I0916 11:15:39.673929 1507580 round_trippers.go:580]     Audit-Id: 8f6be293-f779-4f2c-bcf1-c8e85ba2d9cf
	I0916 11:15:39.674329 1507580 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-654612","uid":"17ee1588-6ece-4a45-8e72-a05ec6d1f806","resourceVersion":"982","creationTimestamp":"2024-09-16T11:10:07Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-654612","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-654612","minikube.k8s.io/primary":"true","minikube.k8s.io/updated_at":"2024_09_16T11_10_11_0700","minikube.k8s.io/version":"v1.34.0","node-role.kubernetes.io/control-plane":"","node.kubernetes.io/exclude-from-external-load-balancers":""},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///var/run/crio/crio.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubelet","operation":"Update","apiVe
rsion":"v1","time":"2024-09-16T11:10:07Z","fieldsType":"FieldsV1","fiel [truncated 6346 chars]
	I0916 11:15:39.870681 1507580 request.go:632] Waited for 97.180344ms due to client-side throttling, not priority and fairness, request: GET:https://192.168.67.2:8443/api/v1/namespaces/kube-system/pods/kube-scheduler-multinode-654612
	I0916 11:15:39.870805 1507580 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/namespaces/kube-system/pods/kube-scheduler-multinode-654612
	I0916 11:15:39.870840 1507580 round_trippers.go:469] Request Headers:
	I0916 11:15:39.870886 1507580 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:15:39.870953 1507580 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:15:39.875365 1507580 round_trippers.go:574] Response Status: 200 OK in 4 milliseconds
	I0916 11:15:39.875455 1507580 round_trippers.go:577] Response Headers:
	I0916 11:15:39.875535 1507580 round_trippers.go:580]     Audit-Id: 355a1ac3-9b7a-4941-b52e-4acb64e40772
	I0916 11:15:39.875556 1507580 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:15:39.875575 1507580 round_trippers.go:580]     Content-Type: application/json
	I0916 11:15:39.875592 1507580 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:15:39.875619 1507580 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:15:39.875642 1507580 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:15:39 GMT
	I0916 11:15:39.875861 1507580 request.go:1351] Response Body: {"kind":"Pod","apiVersion":"v1","metadata":{"name":"kube-scheduler-multinode-654612","namespace":"kube-system","uid":"fd553108-8193-4f33-8190-d4ec25a66de1","resourceVersion":"988","creationTimestamp":"2024-09-16T11:10:10Z","labels":{"component":"kube-scheduler","tier":"control-plane"},"annotations":{"kubernetes.io/config.hash":"281b64f61502642475e3dbc1b139b188","kubernetes.io/config.mirror":"281b64f61502642475e3dbc1b139b188","kubernetes.io/config.seen":"2024-09-16T11:10:10.145156597Z","kubernetes.io/config.source":"file"},"ownerReferences":[{"apiVersion":"v1","kind":"Node","name":"multinode-654612","uid":"17ee1588-6ece-4a45-8e72-a05ec6d1f806","controller":true}],"managedFields":[{"manager":"kubelet","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:10:10Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:annotations":{".":{},"f:kubernetes.io/config.hash":{},"f:kubernetes.io/config.mirror":{},"f:kubernetes.io/config.seen":{},
"f:kubernetes.io/config.source":{}},"f:labels":{".":{},"f:component":{} [truncated 5345 chars]
	I0916 11:15:40.071048 1507580 request.go:632] Waited for 194.607566ms due to client-side throttling, not priority and fairness, request: GET:https://192.168.67.2:8443/api/v1/nodes/multinode-654612
	I0916 11:15:40.071132 1507580 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/nodes/multinode-654612
	I0916 11:15:40.071143 1507580 round_trippers.go:469] Request Headers:
	I0916 11:15:40.071154 1507580 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:15:40.071163 1507580 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:15:40.074530 1507580 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 11:15:40.074557 1507580 round_trippers.go:577] Response Headers:
	I0916 11:15:40.074566 1507580 round_trippers.go:580]     Audit-Id: 7b5ea8b2-27dd-4e33-8123-10b5930f5bae
	I0916 11:15:40.074571 1507580 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:15:40.074573 1507580 round_trippers.go:580]     Content-Type: application/json
	I0916 11:15:40.074576 1507580 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:15:40.074579 1507580 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:15:40.074581 1507580 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:15:40 GMT
	I0916 11:15:40.074857 1507580 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-654612","uid":"17ee1588-6ece-4a45-8e72-a05ec6d1f806","resourceVersion":"982","creationTimestamp":"2024-09-16T11:10:07Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-654612","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-654612","minikube.k8s.io/primary":"true","minikube.k8s.io/updated_at":"2024_09_16T11_10_11_0700","minikube.k8s.io/version":"v1.34.0","node-role.kubernetes.io/control-plane":"","node.kubernetes.io/exclude-from-external-load-balancers":""},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///var/run/crio/crio.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubelet","operation":"Update","apiVe
rsion":"v1","time":"2024-09-16T11:10:07Z","fieldsType":"FieldsV1","fiel [truncated 6346 chars]
	I0916 11:15:40.273747 1507580 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/namespaces/kube-system/pods/kube-scheduler-multinode-654612
	I0916 11:15:40.273775 1507580 round_trippers.go:469] Request Headers:
	I0916 11:15:40.273784 1507580 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:15:40.273789 1507580 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:15:40.276336 1507580 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:15:40.276361 1507580 round_trippers.go:577] Response Headers:
	I0916 11:15:40.276370 1507580 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:15:40.276375 1507580 round_trippers.go:580]     Content-Type: application/json
	I0916 11:15:40.276378 1507580 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:15:40.276381 1507580 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:15:40.276384 1507580 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:15:40 GMT
	I0916 11:15:40.276386 1507580 round_trippers.go:580]     Audit-Id: d1f94ceb-4697-4b7d-a4e9-b1698a9961db
	I0916 11:15:40.277076 1507580 request.go:1351] Response Body: {"kind":"Pod","apiVersion":"v1","metadata":{"name":"kube-scheduler-multinode-654612","namespace":"kube-system","uid":"fd553108-8193-4f33-8190-d4ec25a66de1","resourceVersion":"988","creationTimestamp":"2024-09-16T11:10:10Z","labels":{"component":"kube-scheduler","tier":"control-plane"},"annotations":{"kubernetes.io/config.hash":"281b64f61502642475e3dbc1b139b188","kubernetes.io/config.mirror":"281b64f61502642475e3dbc1b139b188","kubernetes.io/config.seen":"2024-09-16T11:10:10.145156597Z","kubernetes.io/config.source":"file"},"ownerReferences":[{"apiVersion":"v1","kind":"Node","name":"multinode-654612","uid":"17ee1588-6ece-4a45-8e72-a05ec6d1f806","controller":true}],"managedFields":[{"manager":"kubelet","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:10:10Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:annotations":{".":{},"f:kubernetes.io/config.hash":{},"f:kubernetes.io/config.mirror":{},"f:kubernetes.io/config.seen":{},
"f:kubernetes.io/config.source":{}},"f:labels":{".":{},"f:component":{} [truncated 5345 chars]
	I0916 11:15:40.470930 1507580 request.go:632] Waited for 193.341989ms due to client-side throttling, not priority and fairness, request: GET:https://192.168.67.2:8443/api/v1/nodes/multinode-654612
	I0916 11:15:40.470991 1507580 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/nodes/multinode-654612
	I0916 11:15:40.470998 1507580 round_trippers.go:469] Request Headers:
	I0916 11:15:40.471006 1507580 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:15:40.471015 1507580 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:15:40.474625 1507580 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 11:15:40.474698 1507580 round_trippers.go:577] Response Headers:
	I0916 11:15:40.474721 1507580 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:15:40 GMT
	I0916 11:15:40.474740 1507580 round_trippers.go:580]     Audit-Id: 4437121b-e7ac-4577-b8b1-258c334d6615
	I0916 11:15:40.474775 1507580 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:15:40.474793 1507580 round_trippers.go:580]     Content-Type: application/json
	I0916 11:15:40.474819 1507580 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:15:40.474828 1507580 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:15:40.475020 1507580 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-654612","uid":"17ee1588-6ece-4a45-8e72-a05ec6d1f806","resourceVersion":"982","creationTimestamp":"2024-09-16T11:10:07Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-654612","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-654612","minikube.k8s.io/primary":"true","minikube.k8s.io/updated_at":"2024_09_16T11_10_11_0700","minikube.k8s.io/version":"v1.34.0","node-role.kubernetes.io/control-plane":"","node.kubernetes.io/exclude-from-external-load-balancers":""},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///var/run/crio/crio.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubelet","operation":"Update","apiVe
rsion":"v1","time":"2024-09-16T11:10:07Z","fieldsType":"FieldsV1","fiel [truncated 6346 chars]
	I0916 11:15:40.774004 1507580 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/namespaces/kube-system/pods/kube-scheduler-multinode-654612
	I0916 11:15:40.774044 1507580 round_trippers.go:469] Request Headers:
	I0916 11:15:40.774054 1507580 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:15:40.774059 1507580 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:15:40.776597 1507580 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:15:40.776636 1507580 round_trippers.go:577] Response Headers:
	I0916 11:15:40.776646 1507580 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:15:40.776651 1507580 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:15:40.776656 1507580 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:15:40 GMT
	I0916 11:15:40.776659 1507580 round_trippers.go:580]     Audit-Id: 5dcc3517-e8a8-4cf0-aea0-27056768b15f
	I0916 11:15:40.776661 1507580 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:15:40.776664 1507580 round_trippers.go:580]     Content-Type: application/json
	I0916 11:15:40.777104 1507580 request.go:1351] Response Body: {"kind":"Pod","apiVersion":"v1","metadata":{"name":"kube-scheduler-multinode-654612","namespace":"kube-system","uid":"fd553108-8193-4f33-8190-d4ec25a66de1","resourceVersion":"988","creationTimestamp":"2024-09-16T11:10:10Z","labels":{"component":"kube-scheduler","tier":"control-plane"},"annotations":{"kubernetes.io/config.hash":"281b64f61502642475e3dbc1b139b188","kubernetes.io/config.mirror":"281b64f61502642475e3dbc1b139b188","kubernetes.io/config.seen":"2024-09-16T11:10:10.145156597Z","kubernetes.io/config.source":"file"},"ownerReferences":[{"apiVersion":"v1","kind":"Node","name":"multinode-654612","uid":"17ee1588-6ece-4a45-8e72-a05ec6d1f806","controller":true}],"managedFields":[{"manager":"kubelet","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:10:10Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:annotations":{".":{},"f:kubernetes.io/config.hash":{},"f:kubernetes.io/config.mirror":{},"f:kubernetes.io/config.seen":{},
"f:kubernetes.io/config.source":{}},"f:labels":{".":{},"f:component":{} [truncated 5345 chars]
	I0916 11:15:40.870782 1507580 request.go:632] Waited for 93.224897ms due to client-side throttling, not priority and fairness, request: GET:https://192.168.67.2:8443/api/v1/nodes/multinode-654612
	I0916 11:15:40.870863 1507580 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/nodes/multinode-654612
	I0916 11:15:40.870872 1507580 round_trippers.go:469] Request Headers:
	I0916 11:15:40.870881 1507580 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:15:40.870887 1507580 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:15:40.873089 1507580 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:15:40.873113 1507580 round_trippers.go:577] Response Headers:
	I0916 11:15:40.873121 1507580 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:15:40.873127 1507580 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:15:40 GMT
	I0916 11:15:40.873130 1507580 round_trippers.go:580]     Audit-Id: ab5eec45-ec34-4eb9-9f19-4d789bf4c5b7
	I0916 11:15:40.873133 1507580 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:15:40.873137 1507580 round_trippers.go:580]     Content-Type: application/json
	I0916 11:15:40.873179 1507580 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:15:40.874533 1507580 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-654612","uid":"17ee1588-6ece-4a45-8e72-a05ec6d1f806","resourceVersion":"982","creationTimestamp":"2024-09-16T11:10:07Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-654612","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-654612","minikube.k8s.io/primary":"true","minikube.k8s.io/updated_at":"2024_09_16T11_10_11_0700","minikube.k8s.io/version":"v1.34.0","node-role.kubernetes.io/control-plane":"","node.kubernetes.io/exclude-from-external-load-balancers":""},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///var/run/crio/crio.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubelet","operation":"Update","apiVe
rsion":"v1","time":"2024-09-16T11:10:07Z","fieldsType":"FieldsV1","fiel [truncated 6346 chars]
	I0916 11:15:41.273499 1507580 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/namespaces/kube-system/pods/kube-scheduler-multinode-654612
	I0916 11:15:41.273526 1507580 round_trippers.go:469] Request Headers:
	I0916 11:15:41.273535 1507580 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:15:41.273540 1507580 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:15:41.276524 1507580 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:15:41.276592 1507580 round_trippers.go:577] Response Headers:
	I0916 11:15:41.276624 1507580 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:15:41 GMT
	I0916 11:15:41.276643 1507580 round_trippers.go:580]     Audit-Id: 09300ce2-e393-4896-93d9-0ba83e663649
	I0916 11:15:41.276662 1507580 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:15:41.276708 1507580 round_trippers.go:580]     Content-Type: application/json
	I0916 11:15:41.276726 1507580 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:15:41.276743 1507580 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:15:41.276906 1507580 request.go:1351] Response Body: {"kind":"Pod","apiVersion":"v1","metadata":{"name":"kube-scheduler-multinode-654612","namespace":"kube-system","uid":"fd553108-8193-4f33-8190-d4ec25a66de1","resourceVersion":"988","creationTimestamp":"2024-09-16T11:10:10Z","labels":{"component":"kube-scheduler","tier":"control-plane"},"annotations":{"kubernetes.io/config.hash":"281b64f61502642475e3dbc1b139b188","kubernetes.io/config.mirror":"281b64f61502642475e3dbc1b139b188","kubernetes.io/config.seen":"2024-09-16T11:10:10.145156597Z","kubernetes.io/config.source":"file"},"ownerReferences":[{"apiVersion":"v1","kind":"Node","name":"multinode-654612","uid":"17ee1588-6ece-4a45-8e72-a05ec6d1f806","controller":true}],"managedFields":[{"manager":"kubelet","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:10:10Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:annotations":{".":{},"f:kubernetes.io/config.hash":{},"f:kubernetes.io/config.mirror":{},"f:kubernetes.io/config.seen":{},
"f:kubernetes.io/config.source":{}},"f:labels":{".":{},"f:component":{} [truncated 5345 chars]
	I0916 11:15:41.277396 1507580 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/nodes/multinode-654612
	I0916 11:15:41.277414 1507580 round_trippers.go:469] Request Headers:
	I0916 11:15:41.277423 1507580 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:15:41.277427 1507580 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:15:41.279723 1507580 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:15:41.279785 1507580 round_trippers.go:577] Response Headers:
	I0916 11:15:41.279807 1507580 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:15:41.279825 1507580 round_trippers.go:580]     Content-Type: application/json
	I0916 11:15:41.279858 1507580 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:15:41.279868 1507580 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:15:41.279880 1507580 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:15:41 GMT
	I0916 11:15:41.279884 1507580 round_trippers.go:580]     Audit-Id: ae72665b-b6ce-4aa4-8ff3-fa19b15c4353
	I0916 11:15:41.280052 1507580 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-654612","uid":"17ee1588-6ece-4a45-8e72-a05ec6d1f806","resourceVersion":"982","creationTimestamp":"2024-09-16T11:10:07Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-654612","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-654612","minikube.k8s.io/primary":"true","minikube.k8s.io/updated_at":"2024_09_16T11_10_11_0700","minikube.k8s.io/version":"v1.34.0","node-role.kubernetes.io/control-plane":"","node.kubernetes.io/exclude-from-external-load-balancers":""},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///var/run/crio/crio.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubelet","operation":"Update","apiVe
rsion":"v1","time":"2024-09-16T11:10:07Z","fieldsType":"FieldsV1","fiel [truncated 6346 chars]
	I0916 11:15:41.280500 1507580 pod_ready.go:103] pod "kube-scheduler-multinode-654612" in "kube-system" namespace has status "Ready":"False"
	I0916 11:15:41.773508 1507580 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/namespaces/kube-system/pods/kube-scheduler-multinode-654612
	I0916 11:15:41.773602 1507580 round_trippers.go:469] Request Headers:
	I0916 11:15:41.773616 1507580 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:15:41.773620 1507580 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:15:41.776339 1507580 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:15:41.776408 1507580 round_trippers.go:577] Response Headers:
	I0916 11:15:41.776433 1507580 round_trippers.go:580]     Content-Type: application/json
	I0916 11:15:41.776453 1507580 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:15:41.776486 1507580 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:15:41.776507 1507580 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:15:41 GMT
	I0916 11:15:41.776512 1507580 round_trippers.go:580]     Audit-Id: 4a28c020-7313-45c8-8fa8-5a9bdc4cf75c
	I0916 11:15:41.776515 1507580 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:15:41.776716 1507580 request.go:1351] Response Body: {"kind":"Pod","apiVersion":"v1","metadata":{"name":"kube-scheduler-multinode-654612","namespace":"kube-system","uid":"fd553108-8193-4f33-8190-d4ec25a66de1","resourceVersion":"988","creationTimestamp":"2024-09-16T11:10:10Z","labels":{"component":"kube-scheduler","tier":"control-plane"},"annotations":{"kubernetes.io/config.hash":"281b64f61502642475e3dbc1b139b188","kubernetes.io/config.mirror":"281b64f61502642475e3dbc1b139b188","kubernetes.io/config.seen":"2024-09-16T11:10:10.145156597Z","kubernetes.io/config.source":"file"},"ownerReferences":[{"apiVersion":"v1","kind":"Node","name":"multinode-654612","uid":"17ee1588-6ece-4a45-8e72-a05ec6d1f806","controller":true}],"managedFields":[{"manager":"kubelet","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:10:10Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:annotations":{".":{},"f:kubernetes.io/config.hash":{},"f:kubernetes.io/config.mirror":{},"f:kubernetes.io/config.seen":{},
"f:kubernetes.io/config.source":{}},"f:labels":{".":{},"f:component":{} [truncated 5345 chars]
	I0916 11:15:41.777204 1507580 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/nodes/multinode-654612
	I0916 11:15:41.777221 1507580 round_trippers.go:469] Request Headers:
	I0916 11:15:41.777230 1507580 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:15:41.777235 1507580 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:15:41.779230 1507580 round_trippers.go:574] Response Status: 200 OK in 1 milliseconds
	I0916 11:15:41.779253 1507580 round_trippers.go:577] Response Headers:
	I0916 11:15:41.779261 1507580 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:15:41 GMT
	I0916 11:15:41.779266 1507580 round_trippers.go:580]     Audit-Id: 278fae5f-e194-45f3-924a-e5592f865e96
	I0916 11:15:41.779270 1507580 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:15:41.779273 1507580 round_trippers.go:580]     Content-Type: application/json
	I0916 11:15:41.779275 1507580 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:15:41.779278 1507580 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:15:41.779693 1507580 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-654612","uid":"17ee1588-6ece-4a45-8e72-a05ec6d1f806","resourceVersion":"982","creationTimestamp":"2024-09-16T11:10:07Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-654612","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-654612","minikube.k8s.io/primary":"true","minikube.k8s.io/updated_at":"2024_09_16T11_10_11_0700","minikube.k8s.io/version":"v1.34.0","node-role.kubernetes.io/control-plane":"","node.kubernetes.io/exclude-from-external-load-balancers":""},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///var/run/crio/crio.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubelet","operation":"Update","apiVe
rsion":"v1","time":"2024-09-16T11:10:07Z","fieldsType":"FieldsV1","fiel [truncated 6346 chars]
	I0916 11:15:42.274038 1507580 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/namespaces/kube-system/pods/kube-scheduler-multinode-654612
	I0916 11:15:42.274068 1507580 round_trippers.go:469] Request Headers:
	I0916 11:15:42.274076 1507580 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:15:42.274082 1507580 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:15:42.276719 1507580 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:15:42.276746 1507580 round_trippers.go:577] Response Headers:
	I0916 11:15:42.276755 1507580 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:15:42 GMT
	I0916 11:15:42.276762 1507580 round_trippers.go:580]     Audit-Id: dcc45e8c-dde2-4004-bc32-7851be142deb
	I0916 11:15:42.276767 1507580 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:15:42.276771 1507580 round_trippers.go:580]     Content-Type: application/json
	I0916 11:15:42.276774 1507580 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:15:42.276777 1507580 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:15:42.277123 1507580 request.go:1351] Response Body: {"kind":"Pod","apiVersion":"v1","metadata":{"name":"kube-scheduler-multinode-654612","namespace":"kube-system","uid":"fd553108-8193-4f33-8190-d4ec25a66de1","resourceVersion":"988","creationTimestamp":"2024-09-16T11:10:10Z","labels":{"component":"kube-scheduler","tier":"control-plane"},"annotations":{"kubernetes.io/config.hash":"281b64f61502642475e3dbc1b139b188","kubernetes.io/config.mirror":"281b64f61502642475e3dbc1b139b188","kubernetes.io/config.seen":"2024-09-16T11:10:10.145156597Z","kubernetes.io/config.source":"file"},"ownerReferences":[{"apiVersion":"v1","kind":"Node","name":"multinode-654612","uid":"17ee1588-6ece-4a45-8e72-a05ec6d1f806","controller":true}],"managedFields":[{"manager":"kubelet","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:10:10Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:annotations":{".":{},"f:kubernetes.io/config.hash":{},"f:kubernetes.io/config.mirror":{},"f:kubernetes.io/config.seen":{},
"f:kubernetes.io/config.source":{}},"f:labels":{".":{},"f:component":{} [truncated 5345 chars]
	I0916 11:15:42.277615 1507580 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/nodes/multinode-654612
	I0916 11:15:42.277637 1507580 round_trippers.go:469] Request Headers:
	I0916 11:15:42.277647 1507580 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:15:42.277653 1507580 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:15:42.279944 1507580 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:15:42.280008 1507580 round_trippers.go:577] Response Headers:
	I0916 11:15:42.280021 1507580 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:15:42 GMT
	I0916 11:15:42.280026 1507580 round_trippers.go:580]     Audit-Id: b604dacd-83db-43cb-969c-c35c44f2386b
	I0916 11:15:42.280033 1507580 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:15:42.280038 1507580 round_trippers.go:580]     Content-Type: application/json
	I0916 11:15:42.280041 1507580 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:15:42.280045 1507580 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:15:42.280390 1507580 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-654612","uid":"17ee1588-6ece-4a45-8e72-a05ec6d1f806","resourceVersion":"982","creationTimestamp":"2024-09-16T11:10:07Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-654612","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-654612","minikube.k8s.io/primary":"true","minikube.k8s.io/updated_at":"2024_09_16T11_10_11_0700","minikube.k8s.io/version":"v1.34.0","node-role.kubernetes.io/control-plane":"","node.kubernetes.io/exclude-from-external-load-balancers":""},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///var/run/crio/crio.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubelet","operation":"Update","apiVe
rsion":"v1","time":"2024-09-16T11:10:07Z","fieldsType":"FieldsV1","fiel [truncated 6346 chars]
	I0916 11:15:42.774107 1507580 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/namespaces/kube-system/pods/kube-scheduler-multinode-654612
	I0916 11:15:42.774137 1507580 round_trippers.go:469] Request Headers:
	I0916 11:15:42.774147 1507580 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:15:42.774151 1507580 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:15:42.776397 1507580 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:15:42.776425 1507580 round_trippers.go:577] Response Headers:
	I0916 11:15:42.776433 1507580 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:15:42.776437 1507580 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:15:42.776453 1507580 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:15:42 GMT
	I0916 11:15:42.776457 1507580 round_trippers.go:580]     Audit-Id: 9b903f15-09a0-4e03-b424-f12a976e6c97
	I0916 11:15:42.776459 1507580 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:15:42.776462 1507580 round_trippers.go:580]     Content-Type: application/json
	I0916 11:15:42.776807 1507580 request.go:1351] Response Body: {"kind":"Pod","apiVersion":"v1","metadata":{"name":"kube-scheduler-multinode-654612","namespace":"kube-system","uid":"fd553108-8193-4f33-8190-d4ec25a66de1","resourceVersion":"988","creationTimestamp":"2024-09-16T11:10:10Z","labels":{"component":"kube-scheduler","tier":"control-plane"},"annotations":{"kubernetes.io/config.hash":"281b64f61502642475e3dbc1b139b188","kubernetes.io/config.mirror":"281b64f61502642475e3dbc1b139b188","kubernetes.io/config.seen":"2024-09-16T11:10:10.145156597Z","kubernetes.io/config.source":"file"},"ownerReferences":[{"apiVersion":"v1","kind":"Node","name":"multinode-654612","uid":"17ee1588-6ece-4a45-8e72-a05ec6d1f806","controller":true}],"managedFields":[{"manager":"kubelet","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:10:10Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:annotations":{".":{},"f:kubernetes.io/config.hash":{},"f:kubernetes.io/config.mirror":{},"f:kubernetes.io/config.seen":{},
"f:kubernetes.io/config.source":{}},"f:labels":{".":{},"f:component":{} [truncated 5345 chars]
	I0916 11:15:42.777273 1507580 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/nodes/multinode-654612
	I0916 11:15:42.777291 1507580 round_trippers.go:469] Request Headers:
	I0916 11:15:42.777300 1507580 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:15:42.777304 1507580 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:15:42.779291 1507580 round_trippers.go:574] Response Status: 200 OK in 1 milliseconds
	I0916 11:15:42.779313 1507580 round_trippers.go:577] Response Headers:
	I0916 11:15:42.779322 1507580 round_trippers.go:580]     Audit-Id: 06f5b821-fe42-409a-bea9-003416b08780
	I0916 11:15:42.779326 1507580 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:15:42.779335 1507580 round_trippers.go:580]     Content-Type: application/json
	I0916 11:15:42.779342 1507580 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:15:42.779346 1507580 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:15:42.779349 1507580 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:15:42 GMT
	I0916 11:15:42.779705 1507580 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-654612","uid":"17ee1588-6ece-4a45-8e72-a05ec6d1f806","resourceVersion":"982","creationTimestamp":"2024-09-16T11:10:07Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-654612","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-654612","minikube.k8s.io/primary":"true","minikube.k8s.io/updated_at":"2024_09_16T11_10_11_0700","minikube.k8s.io/version":"v1.34.0","node-role.kubernetes.io/control-plane":"","node.kubernetes.io/exclude-from-external-load-balancers":""},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///var/run/crio/crio.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubelet","operation":"Update","apiVe
rsion":"v1","time":"2024-09-16T11:10:07Z","fieldsType":"FieldsV1","fiel [truncated 6346 chars]
	I0916 11:15:43.273988 1507580 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/namespaces/kube-system/pods/kube-scheduler-multinode-654612
	I0916 11:15:43.274016 1507580 round_trippers.go:469] Request Headers:
	I0916 11:15:43.274026 1507580 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:15:43.274030 1507580 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:15:43.276335 1507580 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:15:43.276357 1507580 round_trippers.go:577] Response Headers:
	I0916 11:15:43.276365 1507580 round_trippers.go:580]     Audit-Id: 2665e785-40c3-4a4f-9fcb-1a7984459065
	I0916 11:15:43.276369 1507580 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:15:43.276375 1507580 round_trippers.go:580]     Content-Type: application/json
	I0916 11:15:43.276379 1507580 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:15:43.276382 1507580 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:15:43.276385 1507580 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:15:43 GMT
	I0916 11:15:43.276570 1507580 request.go:1351] Response Body: {"kind":"Pod","apiVersion":"v1","metadata":{"name":"kube-scheduler-multinode-654612","namespace":"kube-system","uid":"fd553108-8193-4f33-8190-d4ec25a66de1","resourceVersion":"988","creationTimestamp":"2024-09-16T11:10:10Z","labels":{"component":"kube-scheduler","tier":"control-plane"},"annotations":{"kubernetes.io/config.hash":"281b64f61502642475e3dbc1b139b188","kubernetes.io/config.mirror":"281b64f61502642475e3dbc1b139b188","kubernetes.io/config.seen":"2024-09-16T11:10:10.145156597Z","kubernetes.io/config.source":"file"},"ownerReferences":[{"apiVersion":"v1","kind":"Node","name":"multinode-654612","uid":"17ee1588-6ece-4a45-8e72-a05ec6d1f806","controller":true}],"managedFields":[{"manager":"kubelet","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:10:10Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:annotations":{".":{},"f:kubernetes.io/config.hash":{},"f:kubernetes.io/config.mirror":{},"f:kubernetes.io/config.seen":{},
"f:kubernetes.io/config.source":{}},"f:labels":{".":{},"f:component":{} [truncated 5345 chars]
	I0916 11:15:43.277077 1507580 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/nodes/multinode-654612
	I0916 11:15:43.277089 1507580 round_trippers.go:469] Request Headers:
	I0916 11:15:43.277098 1507580 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:15:43.277102 1507580 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:15:43.279181 1507580 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:15:43.279199 1507580 round_trippers.go:577] Response Headers:
	I0916 11:15:43.279209 1507580 round_trippers.go:580]     Content-Type: application/json
	I0916 11:15:43.279215 1507580 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:15:43.279219 1507580 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:15:43.279221 1507580 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:15:43 GMT
	I0916 11:15:43.279224 1507580 round_trippers.go:580]     Audit-Id: 4077a4cb-3c74-420d-9424-e0eeb4a6982d
	I0916 11:15:43.279226 1507580 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:15:43.279695 1507580 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-654612","uid":"17ee1588-6ece-4a45-8e72-a05ec6d1f806","resourceVersion":"982","creationTimestamp":"2024-09-16T11:10:07Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-654612","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-654612","minikube.k8s.io/primary":"true","minikube.k8s.io/updated_at":"2024_09_16T11_10_11_0700","minikube.k8s.io/version":"v1.34.0","node-role.kubernetes.io/control-plane":"","node.kubernetes.io/exclude-from-external-load-balancers":""},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///var/run/crio/crio.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubelet","operation":"Update","apiVe
rsion":"v1","time":"2024-09-16T11:10:07Z","fieldsType":"FieldsV1","fiel [truncated 6346 chars]
	I0916 11:15:43.774380 1507580 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/namespaces/kube-system/pods/kube-scheduler-multinode-654612
	I0916 11:15:43.774413 1507580 round_trippers.go:469] Request Headers:
	I0916 11:15:43.774423 1507580 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:15:43.774427 1507580 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:15:43.777234 1507580 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:15:43.777264 1507580 round_trippers.go:577] Response Headers:
	I0916 11:15:43.777272 1507580 round_trippers.go:580]     Audit-Id: 37cd32e9-1258-4855-bd36-d712add37f2b
	I0916 11:15:43.777276 1507580 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:15:43.777279 1507580 round_trippers.go:580]     Content-Type: application/json
	I0916 11:15:43.777286 1507580 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:15:43.777294 1507580 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:15:43.777298 1507580 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:15:43 GMT
	I0916 11:15:43.777434 1507580 request.go:1351] Response Body: {"kind":"Pod","apiVersion":"v1","metadata":{"name":"kube-scheduler-multinode-654612","namespace":"kube-system","uid":"fd553108-8193-4f33-8190-d4ec25a66de1","resourceVersion":"988","creationTimestamp":"2024-09-16T11:10:10Z","labels":{"component":"kube-scheduler","tier":"control-plane"},"annotations":{"kubernetes.io/config.hash":"281b64f61502642475e3dbc1b139b188","kubernetes.io/config.mirror":"281b64f61502642475e3dbc1b139b188","kubernetes.io/config.seen":"2024-09-16T11:10:10.145156597Z","kubernetes.io/config.source":"file"},"ownerReferences":[{"apiVersion":"v1","kind":"Node","name":"multinode-654612","uid":"17ee1588-6ece-4a45-8e72-a05ec6d1f806","controller":true}],"managedFields":[{"manager":"kubelet","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:10:10Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:annotations":{".":{},"f:kubernetes.io/config.hash":{},"f:kubernetes.io/config.mirror":{},"f:kubernetes.io/config.seen":{},
"f:kubernetes.io/config.source":{}},"f:labels":{".":{},"f:component":{} [truncated 5345 chars]
	I0916 11:15:43.777964 1507580 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/nodes/multinode-654612
	I0916 11:15:43.777981 1507580 round_trippers.go:469] Request Headers:
	I0916 11:15:43.777991 1507580 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:15:43.778002 1507580 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:15:43.780239 1507580 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:15:43.780259 1507580 round_trippers.go:577] Response Headers:
	I0916 11:15:43.780266 1507580 round_trippers.go:580]     Audit-Id: 89615a42-30e4-4441-bf0f-c088ef80e368
	I0916 11:15:43.780271 1507580 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:15:43.780276 1507580 round_trippers.go:580]     Content-Type: application/json
	I0916 11:15:43.780279 1507580 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:15:43.780282 1507580 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:15:43.780285 1507580 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:15:43 GMT
	I0916 11:15:43.780444 1507580 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-654612","uid":"17ee1588-6ece-4a45-8e72-a05ec6d1f806","resourceVersion":"982","creationTimestamp":"2024-09-16T11:10:07Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-654612","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-654612","minikube.k8s.io/primary":"true","minikube.k8s.io/updated_at":"2024_09_16T11_10_11_0700","minikube.k8s.io/version":"v1.34.0","node-role.kubernetes.io/control-plane":"","node.kubernetes.io/exclude-from-external-load-balancers":""},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///var/run/crio/crio.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubelet","operation":"Update","apiVe
rsion":"v1","time":"2024-09-16T11:10:07Z","fieldsType":"FieldsV1","fiel [truncated 6346 chars]
	I0916 11:15:43.780880 1507580 pod_ready.go:103] pod "kube-scheduler-multinode-654612" in "kube-system" namespace has status "Ready":"False"
	I0916 11:15:44.273967 1507580 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/namespaces/kube-system/pods/kube-scheduler-multinode-654612
	I0916 11:15:44.273995 1507580 round_trippers.go:469] Request Headers:
	I0916 11:15:44.274005 1507580 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:15:44.274011 1507580 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:15:44.276381 1507580 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:15:44.276401 1507580 round_trippers.go:577] Response Headers:
	I0916 11:15:44.276409 1507580 round_trippers.go:580]     Audit-Id: c3c87206-95d2-41bd-9738-37d45d67de83
	I0916 11:15:44.276413 1507580 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:15:44.276418 1507580 round_trippers.go:580]     Content-Type: application/json
	I0916 11:15:44.276421 1507580 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:15:44.276424 1507580 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:15:44.276427 1507580 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:15:44 GMT
	I0916 11:15:44.276565 1507580 request.go:1351] Response Body: {"kind":"Pod","apiVersion":"v1","metadata":{"name":"kube-scheduler-multinode-654612","namespace":"kube-system","uid":"fd553108-8193-4f33-8190-d4ec25a66de1","resourceVersion":"988","creationTimestamp":"2024-09-16T11:10:10Z","labels":{"component":"kube-scheduler","tier":"control-plane"},"annotations":{"kubernetes.io/config.hash":"281b64f61502642475e3dbc1b139b188","kubernetes.io/config.mirror":"281b64f61502642475e3dbc1b139b188","kubernetes.io/config.seen":"2024-09-16T11:10:10.145156597Z","kubernetes.io/config.source":"file"},"ownerReferences":[{"apiVersion":"v1","kind":"Node","name":"multinode-654612","uid":"17ee1588-6ece-4a45-8e72-a05ec6d1f806","controller":true}],"managedFields":[{"manager":"kubelet","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:10:10Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:annotations":{".":{},"f:kubernetes.io/config.hash":{},"f:kubernetes.io/config.mirror":{},"f:kubernetes.io/config.seen":{},
"f:kubernetes.io/config.source":{}},"f:labels":{".":{},"f:component":{} [truncated 5345 chars]
	I0916 11:15:44.277099 1507580 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/nodes/multinode-654612
	I0916 11:15:44.277113 1507580 round_trippers.go:469] Request Headers:
	I0916 11:15:44.277127 1507580 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:15:44.277133 1507580 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:15:44.279299 1507580 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:15:44.279320 1507580 round_trippers.go:577] Response Headers:
	I0916 11:15:44.279328 1507580 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:15:44.279333 1507580 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:15:44.279337 1507580 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:15:44 GMT
	I0916 11:15:44.279340 1507580 round_trippers.go:580]     Audit-Id: 45af43b7-b904-4e04-b150-52314f6fd409
	I0916 11:15:44.279358 1507580 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:15:44.279368 1507580 round_trippers.go:580]     Content-Type: application/json
	I0916 11:15:44.279626 1507580 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-654612","uid":"17ee1588-6ece-4a45-8e72-a05ec6d1f806","resourceVersion":"982","creationTimestamp":"2024-09-16T11:10:07Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-654612","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-654612","minikube.k8s.io/primary":"true","minikube.k8s.io/updated_at":"2024_09_16T11_10_11_0700","minikube.k8s.io/version":"v1.34.0","node-role.kubernetes.io/control-plane":"","node.kubernetes.io/exclude-from-external-load-balancers":""},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///var/run/crio/crio.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubelet","operation":"Update","apiVe
rsion":"v1","time":"2024-09-16T11:10:07Z","fieldsType":"FieldsV1","fiel [truncated 6346 chars]
	I0916 11:15:44.773475 1507580 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/namespaces/kube-system/pods/kube-scheduler-multinode-654612
	I0916 11:15:44.773505 1507580 round_trippers.go:469] Request Headers:
	I0916 11:15:44.773515 1507580 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:15:44.773519 1507580 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:15:44.775778 1507580 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:15:44.775804 1507580 round_trippers.go:577] Response Headers:
	I0916 11:15:44.775811 1507580 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:15:44 GMT
	I0916 11:15:44.775815 1507580 round_trippers.go:580]     Audit-Id: bea8608b-5fa1-4def-a2ff-7671777beb71
	I0916 11:15:44.775844 1507580 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:15:44.775847 1507580 round_trippers.go:580]     Content-Type: application/json
	I0916 11:15:44.775850 1507580 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:15:44.775853 1507580 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:15:44.775964 1507580 request.go:1351] Response Body: {"kind":"Pod","apiVersion":"v1","metadata":{"name":"kube-scheduler-multinode-654612","namespace":"kube-system","uid":"fd553108-8193-4f33-8190-d4ec25a66de1","resourceVersion":"988","creationTimestamp":"2024-09-16T11:10:10Z","labels":{"component":"kube-scheduler","tier":"control-plane"},"annotations":{"kubernetes.io/config.hash":"281b64f61502642475e3dbc1b139b188","kubernetes.io/config.mirror":"281b64f61502642475e3dbc1b139b188","kubernetes.io/config.seen":"2024-09-16T11:10:10.145156597Z","kubernetes.io/config.source":"file"},"ownerReferences":[{"apiVersion":"v1","kind":"Node","name":"multinode-654612","uid":"17ee1588-6ece-4a45-8e72-a05ec6d1f806","controller":true}],"managedFields":[{"manager":"kubelet","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:10:10Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:annotations":{".":{},"f:kubernetes.io/config.hash":{},"f:kubernetes.io/config.mirror":{},"f:kubernetes.io/config.seen":{},
"f:kubernetes.io/config.source":{}},"f:labels":{".":{},"f:component":{} [truncated 5345 chars]
	I0916 11:15:44.776452 1507580 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/nodes/multinode-654612
	I0916 11:15:44.776470 1507580 round_trippers.go:469] Request Headers:
	I0916 11:15:44.776479 1507580 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:15:44.776483 1507580 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:15:44.778745 1507580 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:15:44.778775 1507580 round_trippers.go:577] Response Headers:
	I0916 11:15:44.778784 1507580 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:15:44 GMT
	I0916 11:15:44.778788 1507580 round_trippers.go:580]     Audit-Id: 8cfac063-5798-4599-8724-919083aabf35
	I0916 11:15:44.778791 1507580 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:15:44.778794 1507580 round_trippers.go:580]     Content-Type: application/json
	I0916 11:15:44.778797 1507580 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:15:44.778799 1507580 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:15:44.778917 1507580 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-654612","uid":"17ee1588-6ece-4a45-8e72-a05ec6d1f806","resourceVersion":"982","creationTimestamp":"2024-09-16T11:10:07Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-654612","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-654612","minikube.k8s.io/primary":"true","minikube.k8s.io/updated_at":"2024_09_16T11_10_11_0700","minikube.k8s.io/version":"v1.34.0","node-role.kubernetes.io/control-plane":"","node.kubernetes.io/exclude-from-external-load-balancers":""},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///var/run/crio/crio.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubelet","operation":"Update","apiVe
rsion":"v1","time":"2024-09-16T11:10:07Z","fieldsType":"FieldsV1","fiel [truncated 6346 chars]
	I0916 11:15:45.273957 1507580 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/namespaces/kube-system/pods/kube-scheduler-multinode-654612
	I0916 11:15:45.274021 1507580 round_trippers.go:469] Request Headers:
	I0916 11:15:45.274033 1507580 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:15:45.274039 1507580 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:15:45.276806 1507580 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:15:45.276837 1507580 round_trippers.go:577] Response Headers:
	I0916 11:15:45.276848 1507580 round_trippers.go:580]     Audit-Id: d682a3fe-6e13-4436-89b8-6d04d126371c
	I0916 11:15:45.276854 1507580 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:15:45.276859 1507580 round_trippers.go:580]     Content-Type: application/json
	I0916 11:15:45.276884 1507580 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:15:45.276891 1507580 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:15:45.276898 1507580 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:15:45 GMT
	I0916 11:15:45.277529 1507580 request.go:1351] Response Body: {"kind":"Pod","apiVersion":"v1","metadata":{"name":"kube-scheduler-multinode-654612","namespace":"kube-system","uid":"fd553108-8193-4f33-8190-d4ec25a66de1","resourceVersion":"988","creationTimestamp":"2024-09-16T11:10:10Z","labels":{"component":"kube-scheduler","tier":"control-plane"},"annotations":{"kubernetes.io/config.hash":"281b64f61502642475e3dbc1b139b188","kubernetes.io/config.mirror":"281b64f61502642475e3dbc1b139b188","kubernetes.io/config.seen":"2024-09-16T11:10:10.145156597Z","kubernetes.io/config.source":"file"},"ownerReferences":[{"apiVersion":"v1","kind":"Node","name":"multinode-654612","uid":"17ee1588-6ece-4a45-8e72-a05ec6d1f806","controller":true}],"managedFields":[{"manager":"kubelet","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:10:10Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:annotations":{".":{},"f:kubernetes.io/config.hash":{},"f:kubernetes.io/config.mirror":{},"f:kubernetes.io/config.seen":{},
"f:kubernetes.io/config.source":{}},"f:labels":{".":{},"f:component":{} [truncated 5345 chars]
	I0916 11:15:45.278087 1507580 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/nodes/multinode-654612
	I0916 11:15:45.278109 1507580 round_trippers.go:469] Request Headers:
	I0916 11:15:45.278121 1507580 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:15:45.278125 1507580 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:15:45.280670 1507580 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:15:45.280737 1507580 round_trippers.go:577] Response Headers:
	I0916 11:15:45.280749 1507580 round_trippers.go:580]     Audit-Id: 91846a7a-8bd6-456d-b8db-eefd90084bea
	I0916 11:15:45.280756 1507580 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:15:45.280761 1507580 round_trippers.go:580]     Content-Type: application/json
	I0916 11:15:45.280768 1507580 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:15:45.280773 1507580 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:15:45.280777 1507580 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:15:45 GMT
	I0916 11:15:45.281064 1507580 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-654612","uid":"17ee1588-6ece-4a45-8e72-a05ec6d1f806","resourceVersion":"982","creationTimestamp":"2024-09-16T11:10:07Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-654612","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-654612","minikube.k8s.io/primary":"true","minikube.k8s.io/updated_at":"2024_09_16T11_10_11_0700","minikube.k8s.io/version":"v1.34.0","node-role.kubernetes.io/control-plane":"","node.kubernetes.io/exclude-from-external-load-balancers":""},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///var/run/crio/crio.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubelet","operation":"Update","apiVe
rsion":"v1","time":"2024-09-16T11:10:07Z","fieldsType":"FieldsV1","fiel [truncated 6346 chars]
	I0916 11:15:45.773918 1507580 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/namespaces/kube-system/pods/kube-scheduler-multinode-654612
	I0916 11:15:45.773944 1507580 round_trippers.go:469] Request Headers:
	I0916 11:15:45.773955 1507580 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:15:45.773961 1507580 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:15:45.776330 1507580 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:15:45.776400 1507580 round_trippers.go:577] Response Headers:
	I0916 11:15:45.776423 1507580 round_trippers.go:580]     Audit-Id: c95f060c-85c3-478a-b42a-afb3f18f62d4
	I0916 11:15:45.776445 1507580 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:15:45.776479 1507580 round_trippers.go:580]     Content-Type: application/json
	I0916 11:15:45.776514 1507580 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:15:45.776532 1507580 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:15:45.776549 1507580 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:15:45 GMT
	I0916 11:15:45.776771 1507580 request.go:1351] Response Body: {"kind":"Pod","apiVersion":"v1","metadata":{"name":"kube-scheduler-multinode-654612","namespace":"kube-system","uid":"fd553108-8193-4f33-8190-d4ec25a66de1","resourceVersion":"988","creationTimestamp":"2024-09-16T11:10:10Z","labels":{"component":"kube-scheduler","tier":"control-plane"},"annotations":{"kubernetes.io/config.hash":"281b64f61502642475e3dbc1b139b188","kubernetes.io/config.mirror":"281b64f61502642475e3dbc1b139b188","kubernetes.io/config.seen":"2024-09-16T11:10:10.145156597Z","kubernetes.io/config.source":"file"},"ownerReferences":[{"apiVersion":"v1","kind":"Node","name":"multinode-654612","uid":"17ee1588-6ece-4a45-8e72-a05ec6d1f806","controller":true}],"managedFields":[{"manager":"kubelet","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:10:10Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:annotations":{".":{},"f:kubernetes.io/config.hash":{},"f:kubernetes.io/config.mirror":{},"f:kubernetes.io/config.seen":{},
"f:kubernetes.io/config.source":{}},"f:labels":{".":{},"f:component":{} [truncated 5345 chars]
	I0916 11:15:45.777237 1507580 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/nodes/multinode-654612
	I0916 11:15:45.777256 1507580 round_trippers.go:469] Request Headers:
	I0916 11:15:45.777266 1507580 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:15:45.777270 1507580 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:15:45.779339 1507580 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:15:45.779367 1507580 round_trippers.go:577] Response Headers:
	I0916 11:15:45.779376 1507580 round_trippers.go:580]     Audit-Id: 6115e1ad-f0ab-4e9a-9703-ef810f2e4ba3
	I0916 11:15:45.779381 1507580 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:15:45.779385 1507580 round_trippers.go:580]     Content-Type: application/json
	I0916 11:15:45.779388 1507580 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:15:45.779393 1507580 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:15:45.779396 1507580 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:15:45 GMT
	I0916 11:15:45.779598 1507580 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-654612","uid":"17ee1588-6ece-4a45-8e72-a05ec6d1f806","resourceVersion":"982","creationTimestamp":"2024-09-16T11:10:07Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-654612","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-654612","minikube.k8s.io/primary":"true","minikube.k8s.io/updated_at":"2024_09_16T11_10_11_0700","minikube.k8s.io/version":"v1.34.0","node-role.kubernetes.io/control-plane":"","node.kubernetes.io/exclude-from-external-load-balancers":""},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///var/run/crio/crio.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubelet","operation":"Update","apiVe
rsion":"v1","time":"2024-09-16T11:10:07Z","fieldsType":"FieldsV1","fiel [truncated 6346 chars]
	I0916 11:15:46.273703 1507580 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/namespaces/kube-system/pods/kube-scheduler-multinode-654612
	I0916 11:15:46.273730 1507580 round_trippers.go:469] Request Headers:
	I0916 11:15:46.273740 1507580 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:15:46.273751 1507580 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:15:46.276108 1507580 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:15:46.276133 1507580 round_trippers.go:577] Response Headers:
	I0916 11:15:46.276142 1507580 round_trippers.go:580]     Audit-Id: 0f0e2bce-a1e9-4035-ad2e-87bb73333e47
	I0916 11:15:46.276147 1507580 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:15:46.276151 1507580 round_trippers.go:580]     Content-Type: application/json
	I0916 11:15:46.276154 1507580 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:15:46.276157 1507580 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:15:46.276159 1507580 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:15:46 GMT
	I0916 11:15:46.276327 1507580 request.go:1351] Response Body: {"kind":"Pod","apiVersion":"v1","metadata":{"name":"kube-scheduler-multinode-654612","namespace":"kube-system","uid":"fd553108-8193-4f33-8190-d4ec25a66de1","resourceVersion":"988","creationTimestamp":"2024-09-16T11:10:10Z","labels":{"component":"kube-scheduler","tier":"control-plane"},"annotations":{"kubernetes.io/config.hash":"281b64f61502642475e3dbc1b139b188","kubernetes.io/config.mirror":"281b64f61502642475e3dbc1b139b188","kubernetes.io/config.seen":"2024-09-16T11:10:10.145156597Z","kubernetes.io/config.source":"file"},"ownerReferences":[{"apiVersion":"v1","kind":"Node","name":"multinode-654612","uid":"17ee1588-6ece-4a45-8e72-a05ec6d1f806","controller":true}],"managedFields":[{"manager":"kubelet","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:10:10Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:annotations":{".":{},"f:kubernetes.io/config.hash":{},"f:kubernetes.io/config.mirror":{},"f:kubernetes.io/config.seen":{},
"f:kubernetes.io/config.source":{}},"f:labels":{".":{},"f:component":{} [truncated 5345 chars]
	I0916 11:15:46.276847 1507580 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/nodes/multinode-654612
	I0916 11:15:46.276864 1507580 round_trippers.go:469] Request Headers:
	I0916 11:15:46.276873 1507580 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:15:46.276878 1507580 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:15:46.279154 1507580 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:15:46.279224 1507580 round_trippers.go:577] Response Headers:
	I0916 11:15:46.279258 1507580 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:15:46.279269 1507580 round_trippers.go:580]     Content-Type: application/json
	I0916 11:15:46.279275 1507580 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:15:46.279296 1507580 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:15:46.279302 1507580 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:15:46 GMT
	I0916 11:15:46.279305 1507580 round_trippers.go:580]     Audit-Id: 9543b450-aec6-4178-8740-8949dfcf49ea
	I0916 11:15:46.279468 1507580 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-654612","uid":"17ee1588-6ece-4a45-8e72-a05ec6d1f806","resourceVersion":"982","creationTimestamp":"2024-09-16T11:10:07Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-654612","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-654612","minikube.k8s.io/primary":"true","minikube.k8s.io/updated_at":"2024_09_16T11_10_11_0700","minikube.k8s.io/version":"v1.34.0","node-role.kubernetes.io/control-plane":"","node.kubernetes.io/exclude-from-external-load-balancers":""},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///var/run/crio/crio.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubelet","operation":"Update","apiVe
rsion":"v1","time":"2024-09-16T11:10:07Z","fieldsType":"FieldsV1","fiel [truncated 6346 chars]
	I0916 11:15:46.279964 1507580 pod_ready.go:103] pod "kube-scheduler-multinode-654612" in "kube-system" namespace has status "Ready":"False"
	I0916 11:15:46.773885 1507580 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/namespaces/kube-system/pods/kube-scheduler-multinode-654612
	I0916 11:15:46.773912 1507580 round_trippers.go:469] Request Headers:
	I0916 11:15:46.773921 1507580 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:15:46.773927 1507580 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:15:46.776288 1507580 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:15:46.776314 1507580 round_trippers.go:577] Response Headers:
	I0916 11:15:46.776323 1507580 round_trippers.go:580]     Content-Type: application/json
	I0916 11:15:46.776328 1507580 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:15:46.776332 1507580 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:15:46.776335 1507580 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:15:46 GMT
	I0916 11:15:46.776337 1507580 round_trippers.go:580]     Audit-Id: cf51304f-b1b9-4ff8-87f8-fc818b9a6a05
	I0916 11:15:46.776340 1507580 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:15:46.776483 1507580 request.go:1351] Response Body: {"kind":"Pod","apiVersion":"v1","metadata":{"name":"kube-scheduler-multinode-654612","namespace":"kube-system","uid":"fd553108-8193-4f33-8190-d4ec25a66de1","resourceVersion":"988","creationTimestamp":"2024-09-16T11:10:10Z","labels":{"component":"kube-scheduler","tier":"control-plane"},"annotations":{"kubernetes.io/config.hash":"281b64f61502642475e3dbc1b139b188","kubernetes.io/config.mirror":"281b64f61502642475e3dbc1b139b188","kubernetes.io/config.seen":"2024-09-16T11:10:10.145156597Z","kubernetes.io/config.source":"file"},"ownerReferences":[{"apiVersion":"v1","kind":"Node","name":"multinode-654612","uid":"17ee1588-6ece-4a45-8e72-a05ec6d1f806","controller":true}],"managedFields":[{"manager":"kubelet","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:10:10Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:annotations":{".":{},"f:kubernetes.io/config.hash":{},"f:kubernetes.io/config.mirror":{},"f:kubernetes.io/config.seen":{},
"f:kubernetes.io/config.source":{}},"f:labels":{".":{},"f:component":{} [truncated 5345 chars]
	I0916 11:15:46.777065 1507580 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/nodes/multinode-654612
	I0916 11:15:46.777086 1507580 round_trippers.go:469] Request Headers:
	I0916 11:15:46.777095 1507580 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:15:46.777100 1507580 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:15:46.779131 1507580 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:15:46.779192 1507580 round_trippers.go:577] Response Headers:
	I0916 11:15:46.779215 1507580 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:15:46.779235 1507580 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:15:46 GMT
	I0916 11:15:46.779264 1507580 round_trippers.go:580]     Audit-Id: 02ccf6a7-c307-47c9-8c17-b790d5838b60
	I0916 11:15:46.779283 1507580 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:15:46.779299 1507580 round_trippers.go:580]     Content-Type: application/json
	I0916 11:15:46.779317 1507580 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:15:46.779461 1507580 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-654612","uid":"17ee1588-6ece-4a45-8e72-a05ec6d1f806","resourceVersion":"982","creationTimestamp":"2024-09-16T11:10:07Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-654612","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-654612","minikube.k8s.io/primary":"true","minikube.k8s.io/updated_at":"2024_09_16T11_10_11_0700","minikube.k8s.io/version":"v1.34.0","node-role.kubernetes.io/control-plane":"","node.kubernetes.io/exclude-from-external-load-balancers":""},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///var/run/crio/crio.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubelet","operation":"Update","apiVe
rsion":"v1","time":"2024-09-16T11:10:07Z","fieldsType":"FieldsV1","fiel [truncated 6346 chars]
	I0916 11:15:47.273948 1507580 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/namespaces/kube-system/pods/kube-scheduler-multinode-654612
	I0916 11:15:47.273975 1507580 round_trippers.go:469] Request Headers:
	I0916 11:15:47.273985 1507580 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:15:47.273992 1507580 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:15:47.276704 1507580 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:15:47.276776 1507580 round_trippers.go:577] Response Headers:
	I0916 11:15:47.276795 1507580 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:15:47.276806 1507580 round_trippers.go:580]     Content-Type: application/json
	I0916 11:15:47.276809 1507580 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:15:47.276813 1507580 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:15:47.276817 1507580 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:15:47 GMT
	I0916 11:15:47.276821 1507580 round_trippers.go:580]     Audit-Id: 1b8877dd-d1e6-4207-addc-dd1b153a8350
	I0916 11:15:47.277061 1507580 request.go:1351] Response Body: {"kind":"Pod","apiVersion":"v1","metadata":{"name":"kube-scheduler-multinode-654612","namespace":"kube-system","uid":"fd553108-8193-4f33-8190-d4ec25a66de1","resourceVersion":"988","creationTimestamp":"2024-09-16T11:10:10Z","labels":{"component":"kube-scheduler","tier":"control-plane"},"annotations":{"kubernetes.io/config.hash":"281b64f61502642475e3dbc1b139b188","kubernetes.io/config.mirror":"281b64f61502642475e3dbc1b139b188","kubernetes.io/config.seen":"2024-09-16T11:10:10.145156597Z","kubernetes.io/config.source":"file"},"ownerReferences":[{"apiVersion":"v1","kind":"Node","name":"multinode-654612","uid":"17ee1588-6ece-4a45-8e72-a05ec6d1f806","controller":true}],"managedFields":[{"manager":"kubelet","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:10:10Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:annotations":{".":{},"f:kubernetes.io/config.hash":{},"f:kubernetes.io/config.mirror":{},"f:kubernetes.io/config.seen":{},
"f:kubernetes.io/config.source":{}},"f:labels":{".":{},"f:component":{} [truncated 5345 chars]
	I0916 11:15:47.277654 1507580 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/nodes/multinode-654612
	I0916 11:15:47.277681 1507580 round_trippers.go:469] Request Headers:
	I0916 11:15:47.277690 1507580 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:15:47.277694 1507580 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:15:47.279968 1507580 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:15:47.280053 1507580 round_trippers.go:577] Response Headers:
	I0916 11:15:47.280061 1507580 round_trippers.go:580]     Content-Type: application/json
	I0916 11:15:47.280065 1507580 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:15:47.280068 1507580 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:15:47.280087 1507580 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:15:47 GMT
	I0916 11:15:47.280098 1507580 round_trippers.go:580]     Audit-Id: 2c000ccc-b897-4dfa-8543-0a19c910754a
	I0916 11:15:47.280101 1507580 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:15:47.280257 1507580 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-654612","uid":"17ee1588-6ece-4a45-8e72-a05ec6d1f806","resourceVersion":"982","creationTimestamp":"2024-09-16T11:10:07Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-654612","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-654612","minikube.k8s.io/primary":"true","minikube.k8s.io/updated_at":"2024_09_16T11_10_11_0700","minikube.k8s.io/version":"v1.34.0","node-role.kubernetes.io/control-plane":"","node.kubernetes.io/exclude-from-external-load-balancers":""},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///var/run/crio/crio.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubelet","operation":"Update","apiVe
rsion":"v1","time":"2024-09-16T11:10:07Z","fieldsType":"FieldsV1","fiel [truncated 6346 chars]
	I0916 11:15:47.773454 1507580 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/namespaces/kube-system/pods/kube-scheduler-multinode-654612
	I0916 11:15:47.773489 1507580 round_trippers.go:469] Request Headers:
	I0916 11:15:47.773498 1507580 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:15:47.773503 1507580 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:15:47.776290 1507580 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:15:47.776332 1507580 round_trippers.go:577] Response Headers:
	I0916 11:15:47.776535 1507580 round_trippers.go:580]     Content-Type: application/json
	I0916 11:15:47.776540 1507580 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:15:47.776550 1507580 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:15:47.776554 1507580 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:15:47 GMT
	I0916 11:15:47.776558 1507580 round_trippers.go:580]     Audit-Id: 00dc611d-598c-4254-a1eb-0600838f6506
	I0916 11:15:47.776567 1507580 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:15:47.776788 1507580 request.go:1351] Response Body: {"kind":"Pod","apiVersion":"v1","metadata":{"name":"kube-scheduler-multinode-654612","namespace":"kube-system","uid":"fd553108-8193-4f33-8190-d4ec25a66de1","resourceVersion":"988","creationTimestamp":"2024-09-16T11:10:10Z","labels":{"component":"kube-scheduler","tier":"control-plane"},"annotations":{"kubernetes.io/config.hash":"281b64f61502642475e3dbc1b139b188","kubernetes.io/config.mirror":"281b64f61502642475e3dbc1b139b188","kubernetes.io/config.seen":"2024-09-16T11:10:10.145156597Z","kubernetes.io/config.source":"file"},"ownerReferences":[{"apiVersion":"v1","kind":"Node","name":"multinode-654612","uid":"17ee1588-6ece-4a45-8e72-a05ec6d1f806","controller":true}],"managedFields":[{"manager":"kubelet","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:10:10Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:annotations":{".":{},"f:kubernetes.io/config.hash":{},"f:kubernetes.io/config.mirror":{},"f:kubernetes.io/config.seen":{},
"f:kubernetes.io/config.source":{}},"f:labels":{".":{},"f:component":{} [truncated 5345 chars]
	I0916 11:15:47.777425 1507580 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/nodes/multinode-654612
	I0916 11:15:47.777448 1507580 round_trippers.go:469] Request Headers:
	I0916 11:15:47.777458 1507580 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:15:47.777467 1507580 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:15:47.780353 1507580 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:15:47.780424 1507580 round_trippers.go:577] Response Headers:
	I0916 11:15:47.780449 1507580 round_trippers.go:580]     Audit-Id: ee25b637-38f6-4cb9-89ac-0b5bc0d848d5
	I0916 11:15:47.780470 1507580 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:15:47.780528 1507580 round_trippers.go:580]     Content-Type: application/json
	I0916 11:15:47.780541 1507580 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:15:47.780544 1507580 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:15:47.780547 1507580 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:15:47 GMT
	I0916 11:15:47.780669 1507580 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-654612","uid":"17ee1588-6ece-4a45-8e72-a05ec6d1f806","resourceVersion":"982","creationTimestamp":"2024-09-16T11:10:07Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-654612","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-654612","minikube.k8s.io/primary":"true","minikube.k8s.io/updated_at":"2024_09_16T11_10_11_0700","minikube.k8s.io/version":"v1.34.0","node-role.kubernetes.io/control-plane":"","node.kubernetes.io/exclude-from-external-load-balancers":""},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///var/run/crio/crio.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubelet","operation":"Update","apiVe
rsion":"v1","time":"2024-09-16T11:10:07Z","fieldsType":"FieldsV1","fiel [truncated 6346 chars]
	I0916 11:15:48.273909 1507580 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/namespaces/kube-system/pods/kube-scheduler-multinode-654612
	I0916 11:15:48.273936 1507580 round_trippers.go:469] Request Headers:
	I0916 11:15:48.273951 1507580 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:15:48.273956 1507580 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:15:48.276715 1507580 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:15:48.276737 1507580 round_trippers.go:577] Response Headers:
	I0916 11:15:48.276745 1507580 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:15:48.276751 1507580 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:15:48.276754 1507580 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:15:48 GMT
	I0916 11:15:48.276758 1507580 round_trippers.go:580]     Audit-Id: 92a6bf9e-c5fc-4ce3-85d5-4bfbf83f5ef0
	I0916 11:15:48.276761 1507580 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:15:48.276763 1507580 round_trippers.go:580]     Content-Type: application/json
	I0916 11:15:48.277012 1507580 request.go:1351] Response Body: {"kind":"Pod","apiVersion":"v1","metadata":{"name":"kube-scheduler-multinode-654612","namespace":"kube-system","uid":"fd553108-8193-4f33-8190-d4ec25a66de1","resourceVersion":"988","creationTimestamp":"2024-09-16T11:10:10Z","labels":{"component":"kube-scheduler","tier":"control-plane"},"annotations":{"kubernetes.io/config.hash":"281b64f61502642475e3dbc1b139b188","kubernetes.io/config.mirror":"281b64f61502642475e3dbc1b139b188","kubernetes.io/config.seen":"2024-09-16T11:10:10.145156597Z","kubernetes.io/config.source":"file"},"ownerReferences":[{"apiVersion":"v1","kind":"Node","name":"multinode-654612","uid":"17ee1588-6ece-4a45-8e72-a05ec6d1f806","controller":true}],"managedFields":[{"manager":"kubelet","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:10:10Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:annotations":{".":{},"f:kubernetes.io/config.hash":{},"f:kubernetes.io/config.mirror":{},"f:kubernetes.io/config.seen":{},
"f:kubernetes.io/config.source":{}},"f:labels":{".":{},"f:component":{} [truncated 5345 chars]
	I0916 11:15:48.277505 1507580 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/nodes/multinode-654612
	I0916 11:15:48.277524 1507580 round_trippers.go:469] Request Headers:
	I0916 11:15:48.277532 1507580 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:15:48.277536 1507580 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:15:48.279644 1507580 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:15:48.279666 1507580 round_trippers.go:577] Response Headers:
	I0916 11:15:48.279674 1507580 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:15:48.279678 1507580 round_trippers.go:580]     Content-Type: application/json
	I0916 11:15:48.279681 1507580 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:15:48.279688 1507580 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:15:48.279691 1507580 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:15:48 GMT
	I0916 11:15:48.279695 1507580 round_trippers.go:580]     Audit-Id: 691d849d-5c10-42a9-a1ed-365bcd3caa98
	I0916 11:15:48.279818 1507580 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-654612","uid":"17ee1588-6ece-4a45-8e72-a05ec6d1f806","resourceVersion":"982","creationTimestamp":"2024-09-16T11:10:07Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-654612","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-654612","minikube.k8s.io/primary":"true","minikube.k8s.io/updated_at":"2024_09_16T11_10_11_0700","minikube.k8s.io/version":"v1.34.0","node-role.kubernetes.io/control-plane":"","node.kubernetes.io/exclude-from-external-load-balancers":""},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///var/run/crio/crio.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubelet","operation":"Update","apiVe
rsion":"v1","time":"2024-09-16T11:10:07Z","fieldsType":"FieldsV1","fiel [truncated 6346 chars]
	I0916 11:15:48.280251 1507580 pod_ready.go:103] pod "kube-scheduler-multinode-654612" in "kube-system" namespace has status "Ready":"False"
	I0916 11:15:48.773912 1507580 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/namespaces/kube-system/pods/kube-scheduler-multinode-654612
	I0916 11:15:48.773939 1507580 round_trippers.go:469] Request Headers:
	I0916 11:15:48.773948 1507580 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:15:48.773954 1507580 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:15:48.776344 1507580 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:15:48.776424 1507580 round_trippers.go:577] Response Headers:
	I0916 11:15:48.776493 1507580 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:15:48 GMT
	I0916 11:15:48.776523 1507580 round_trippers.go:580]     Audit-Id: cb59fda5-48dc-4884-95b4-3f3d60825133
	I0916 11:15:48.776535 1507580 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:15:48.776539 1507580 round_trippers.go:580]     Content-Type: application/json
	I0916 11:15:48.776542 1507580 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:15:48.776545 1507580 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:15:48.776707 1507580 request.go:1351] Response Body: {"kind":"Pod","apiVersion":"v1","metadata":{"name":"kube-scheduler-multinode-654612","namespace":"kube-system","uid":"fd553108-8193-4f33-8190-d4ec25a66de1","resourceVersion":"988","creationTimestamp":"2024-09-16T11:10:10Z","labels":{"component":"kube-scheduler","tier":"control-plane"},"annotations":{"kubernetes.io/config.hash":"281b64f61502642475e3dbc1b139b188","kubernetes.io/config.mirror":"281b64f61502642475e3dbc1b139b188","kubernetes.io/config.seen":"2024-09-16T11:10:10.145156597Z","kubernetes.io/config.source":"file"},"ownerReferences":[{"apiVersion":"v1","kind":"Node","name":"multinode-654612","uid":"17ee1588-6ece-4a45-8e72-a05ec6d1f806","controller":true}],"managedFields":[{"manager":"kubelet","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:10:10Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:annotations":{".":{},"f:kubernetes.io/config.hash":{},"f:kubernetes.io/config.mirror":{},"f:kubernetes.io/config.seen":{},
"f:kubernetes.io/config.source":{}},"f:labels":{".":{},"f:component":{} [truncated 5345 chars]
	I0916 11:15:48.777223 1507580 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/nodes/multinode-654612
	I0916 11:15:48.777241 1507580 round_trippers.go:469] Request Headers:
	I0916 11:15:48.777251 1507580 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:15:48.777255 1507580 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:15:48.779972 1507580 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:15:48.780000 1507580 round_trippers.go:577] Response Headers:
	I0916 11:15:48.780017 1507580 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:15:48.780022 1507580 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:15:48 GMT
	I0916 11:15:48.780025 1507580 round_trippers.go:580]     Audit-Id: db00c5b4-8458-4d22-adb2-b47772f8202e
	I0916 11:15:48.780027 1507580 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:15:48.780030 1507580 round_trippers.go:580]     Content-Type: application/json
	I0916 11:15:48.780032 1507580 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:15:48.780157 1507580 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-654612","uid":"17ee1588-6ece-4a45-8e72-a05ec6d1f806","resourceVersion":"982","creationTimestamp":"2024-09-16T11:10:07Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-654612","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-654612","minikube.k8s.io/primary":"true","minikube.k8s.io/updated_at":"2024_09_16T11_10_11_0700","minikube.k8s.io/version":"v1.34.0","node-role.kubernetes.io/control-plane":"","node.kubernetes.io/exclude-from-external-load-balancers":""},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///var/run/crio/crio.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubelet","operation":"Update","apiVe
rsion":"v1","time":"2024-09-16T11:10:07Z","fieldsType":"FieldsV1","fiel [truncated 6346 chars]
	I0916 11:15:49.273923 1507580 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/namespaces/kube-system/pods/kube-scheduler-multinode-654612
	I0916 11:15:49.273951 1507580 round_trippers.go:469] Request Headers:
	I0916 11:15:49.273961 1507580 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:15:49.273966 1507580 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:15:49.276220 1507580 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:15:49.276279 1507580 round_trippers.go:577] Response Headers:
	I0916 11:15:49.276288 1507580 round_trippers.go:580]     Audit-Id: c24b610e-e874-4294-83de-c0072adba56a
	I0916 11:15:49.276293 1507580 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:15:49.276298 1507580 round_trippers.go:580]     Content-Type: application/json
	I0916 11:15:49.276301 1507580 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:15:49.276303 1507580 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:15:49.276306 1507580 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:15:49 GMT
	I0916 11:15:49.276420 1507580 request.go:1351] Response Body: {"kind":"Pod","apiVersion":"v1","metadata":{"name":"kube-scheduler-multinode-654612","namespace":"kube-system","uid":"fd553108-8193-4f33-8190-d4ec25a66de1","resourceVersion":"988","creationTimestamp":"2024-09-16T11:10:10Z","labels":{"component":"kube-scheduler","tier":"control-plane"},"annotations":{"kubernetes.io/config.hash":"281b64f61502642475e3dbc1b139b188","kubernetes.io/config.mirror":"281b64f61502642475e3dbc1b139b188","kubernetes.io/config.seen":"2024-09-16T11:10:10.145156597Z","kubernetes.io/config.source":"file"},"ownerReferences":[{"apiVersion":"v1","kind":"Node","name":"multinode-654612","uid":"17ee1588-6ece-4a45-8e72-a05ec6d1f806","controller":true}],"managedFields":[{"manager":"kubelet","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:10:10Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:annotations":{".":{},"f:kubernetes.io/config.hash":{},"f:kubernetes.io/config.mirror":{},"f:kubernetes.io/config.seen":{},
"f:kubernetes.io/config.source":{}},"f:labels":{".":{},"f:component":{} [truncated 5345 chars]
	I0916 11:15:49.276928 1507580 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/nodes/multinode-654612
	I0916 11:15:49.276946 1507580 round_trippers.go:469] Request Headers:
	I0916 11:15:49.276955 1507580 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:15:49.276960 1507580 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:15:49.278979 1507580 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:15:49.279004 1507580 round_trippers.go:577] Response Headers:
	I0916 11:15:49.279012 1507580 round_trippers.go:580]     Audit-Id: d8ebbe5f-f580-41cd-95e6-2b5cfe364d6b
	I0916 11:15:49.279016 1507580 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:15:49.279019 1507580 round_trippers.go:580]     Content-Type: application/json
	I0916 11:15:49.279022 1507580 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:15:49.279027 1507580 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:15:49.279032 1507580 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:15:49 GMT
	I0916 11:15:49.279277 1507580 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-654612","uid":"17ee1588-6ece-4a45-8e72-a05ec6d1f806","resourceVersion":"982","creationTimestamp":"2024-09-16T11:10:07Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-654612","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-654612","minikube.k8s.io/primary":"true","minikube.k8s.io/updated_at":"2024_09_16T11_10_11_0700","minikube.k8s.io/version":"v1.34.0","node-role.kubernetes.io/control-plane":"","node.kubernetes.io/exclude-from-external-load-balancers":""},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///var/run/crio/crio.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubelet","operation":"Update","apiVe
rsion":"v1","time":"2024-09-16T11:10:07Z","fieldsType":"FieldsV1","fiel [truncated 6346 chars]
	I0916 11:15:49.774436 1507580 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/namespaces/kube-system/pods/kube-scheduler-multinode-654612
	I0916 11:15:49.774465 1507580 round_trippers.go:469] Request Headers:
	I0916 11:15:49.774476 1507580 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:15:49.774482 1507580 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:15:49.776953 1507580 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:15:49.776984 1507580 round_trippers.go:577] Response Headers:
	I0916 11:15:49.776993 1507580 round_trippers.go:580]     Audit-Id: aeda5cf8-1d91-4267-8dd4-17ba680b788f
	I0916 11:15:49.777000 1507580 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:15:49.777004 1507580 round_trippers.go:580]     Content-Type: application/json
	I0916 11:15:49.777007 1507580 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:15:49.777009 1507580 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:15:49.777014 1507580 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:15:49 GMT
	I0916 11:15:49.777187 1507580 request.go:1351] Response Body: {"kind":"Pod","apiVersion":"v1","metadata":{"name":"kube-scheduler-multinode-654612","namespace":"kube-system","uid":"fd553108-8193-4f33-8190-d4ec25a66de1","resourceVersion":"988","creationTimestamp":"2024-09-16T11:10:10Z","labels":{"component":"kube-scheduler","tier":"control-plane"},"annotations":{"kubernetes.io/config.hash":"281b64f61502642475e3dbc1b139b188","kubernetes.io/config.mirror":"281b64f61502642475e3dbc1b139b188","kubernetes.io/config.seen":"2024-09-16T11:10:10.145156597Z","kubernetes.io/config.source":"file"},"ownerReferences":[{"apiVersion":"v1","kind":"Node","name":"multinode-654612","uid":"17ee1588-6ece-4a45-8e72-a05ec6d1f806","controller":true}],"managedFields":[{"manager":"kubelet","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:10:10Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:annotations":{".":{},"f:kubernetes.io/config.hash":{},"f:kubernetes.io/config.mirror":{},"f:kubernetes.io/config.seen":{},
"f:kubernetes.io/config.source":{}},"f:labels":{".":{},"f:component":{} [truncated 5345 chars]
	I0916 11:15:49.777655 1507580 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/nodes/multinode-654612
	I0916 11:15:49.777677 1507580 round_trippers.go:469] Request Headers:
	I0916 11:15:49.777686 1507580 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:15:49.777693 1507580 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:15:49.780127 1507580 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:15:49.780156 1507580 round_trippers.go:577] Response Headers:
	I0916 11:15:49.780165 1507580 round_trippers.go:580]     Audit-Id: f13ca61b-d16f-42bd-a15f-739879239090
	I0916 11:15:49.780169 1507580 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:15:49.780172 1507580 round_trippers.go:580]     Content-Type: application/json
	I0916 11:15:49.780175 1507580 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:15:49.780178 1507580 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:15:49.780181 1507580 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:15:49 GMT
	I0916 11:15:49.780464 1507580 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-654612","uid":"17ee1588-6ece-4a45-8e72-a05ec6d1f806","resourceVersion":"982","creationTimestamp":"2024-09-16T11:10:07Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-654612","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-654612","minikube.k8s.io/primary":"true","minikube.k8s.io/updated_at":"2024_09_16T11_10_11_0700","minikube.k8s.io/version":"v1.34.0","node-role.kubernetes.io/control-plane":"","node.kubernetes.io/exclude-from-external-load-balancers":""},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///var/run/crio/crio.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubelet","operation":"Update","apiVe
rsion":"v1","time":"2024-09-16T11:10:07Z","fieldsType":"FieldsV1","fiel [truncated 6346 chars]
	I0916 11:15:50.273948 1507580 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/namespaces/kube-system/pods/kube-scheduler-multinode-654612
	I0916 11:15:50.273975 1507580 round_trippers.go:469] Request Headers:
	I0916 11:15:50.273984 1507580 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:15:50.273990 1507580 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:15:50.276419 1507580 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:15:50.276447 1507580 round_trippers.go:577] Response Headers:
	I0916 11:15:50.276456 1507580 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:15:50.276462 1507580 round_trippers.go:580]     Content-Type: application/json
	I0916 11:15:50.276465 1507580 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:15:50.276468 1507580 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:15:50.276471 1507580 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:15:50 GMT
	I0916 11:15:50.276474 1507580 round_trippers.go:580]     Audit-Id: 852a33fc-c869-457f-8ccb-b193e8f660c5
	I0916 11:15:50.276662 1507580 request.go:1351] Response Body: {"kind":"Pod","apiVersion":"v1","metadata":{"name":"kube-scheduler-multinode-654612","namespace":"kube-system","uid":"fd553108-8193-4f33-8190-d4ec25a66de1","resourceVersion":"988","creationTimestamp":"2024-09-16T11:10:10Z","labels":{"component":"kube-scheduler","tier":"control-plane"},"annotations":{"kubernetes.io/config.hash":"281b64f61502642475e3dbc1b139b188","kubernetes.io/config.mirror":"281b64f61502642475e3dbc1b139b188","kubernetes.io/config.seen":"2024-09-16T11:10:10.145156597Z","kubernetes.io/config.source":"file"},"ownerReferences":[{"apiVersion":"v1","kind":"Node","name":"multinode-654612","uid":"17ee1588-6ece-4a45-8e72-a05ec6d1f806","controller":true}],"managedFields":[{"manager":"kubelet","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:10:10Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:annotations":{".":{},"f:kubernetes.io/config.hash":{},"f:kubernetes.io/config.mirror":{},"f:kubernetes.io/config.seen":{},
"f:kubernetes.io/config.source":{}},"f:labels":{".":{},"f:component":{} [truncated 5345 chars]
	I0916 11:15:50.277147 1507580 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/nodes/multinode-654612
	I0916 11:15:50.277166 1507580 round_trippers.go:469] Request Headers:
	I0916 11:15:50.277175 1507580 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:15:50.277180 1507580 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:15:50.279501 1507580 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:15:50.279521 1507580 round_trippers.go:577] Response Headers:
	I0916 11:15:50.279529 1507580 round_trippers.go:580]     Audit-Id: 00956b46-2be8-4cc4-b2dc-f2a4ff93e4bc
	I0916 11:15:50.279535 1507580 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:15:50.279539 1507580 round_trippers.go:580]     Content-Type: application/json
	I0916 11:15:50.279542 1507580 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:15:50.279545 1507580 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:15:50.279547 1507580 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:15:50 GMT
	I0916 11:15:50.279730 1507580 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-654612","uid":"17ee1588-6ece-4a45-8e72-a05ec6d1f806","resourceVersion":"982","creationTimestamp":"2024-09-16T11:10:07Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-654612","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-654612","minikube.k8s.io/primary":"true","minikube.k8s.io/updated_at":"2024_09_16T11_10_11_0700","minikube.k8s.io/version":"v1.34.0","node-role.kubernetes.io/control-plane":"","node.kubernetes.io/exclude-from-external-load-balancers":""},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///var/run/crio/crio.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubelet","operation":"Update","apiVe
rsion":"v1","time":"2024-09-16T11:10:07Z","fieldsType":"FieldsV1","fiel [truncated 6346 chars]
	I0916 11:15:50.773852 1507580 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/namespaces/kube-system/pods/kube-scheduler-multinode-654612
	I0916 11:15:50.773876 1507580 round_trippers.go:469] Request Headers:
	I0916 11:15:50.773886 1507580 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:15:50.773890 1507580 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:15:50.776084 1507580 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:15:50.776116 1507580 round_trippers.go:577] Response Headers:
	I0916 11:15:50.776124 1507580 round_trippers.go:580]     Audit-Id: 1041290f-c984-478a-94c4-57016820e3c9
	I0916 11:15:50.776127 1507580 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:15:50.776130 1507580 round_trippers.go:580]     Content-Type: application/json
	I0916 11:15:50.776133 1507580 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:15:50.776136 1507580 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:15:50.776139 1507580 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:15:50 GMT
	I0916 11:15:50.776368 1507580 request.go:1351] Response Body: {"kind":"Pod","apiVersion":"v1","metadata":{"name":"kube-scheduler-multinode-654612","namespace":"kube-system","uid":"fd553108-8193-4f33-8190-d4ec25a66de1","resourceVersion":"1072","creationTimestamp":"2024-09-16T11:10:10Z","labels":{"component":"kube-scheduler","tier":"control-plane"},"annotations":{"kubernetes.io/config.hash":"281b64f61502642475e3dbc1b139b188","kubernetes.io/config.mirror":"281b64f61502642475e3dbc1b139b188","kubernetes.io/config.seen":"2024-09-16T11:10:10.145156597Z","kubernetes.io/config.source":"file"},"ownerReferences":[{"apiVersion":"v1","kind":"Node","name":"multinode-654612","uid":"17ee1588-6ece-4a45-8e72-a05ec6d1f806","controller":true}],"managedFields":[{"manager":"kubelet","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:10:10Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:annotations":{".":{},"f:kubernetes.io/config.hash":{},"f:kubernetes.io/config.mirror":{},"f:kubernetes.io/config.seen":{}
,"f:kubernetes.io/config.source":{}},"f:labels":{".":{},"f:component":{ [truncated 5102 chars]
	I0916 11:15:50.776841 1507580 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/nodes/multinode-654612
	I0916 11:15:50.776859 1507580 round_trippers.go:469] Request Headers:
	I0916 11:15:50.776867 1507580 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:15:50.776871 1507580 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:15:50.778937 1507580 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:15:50.778956 1507580 round_trippers.go:577] Response Headers:
	I0916 11:15:50.778964 1507580 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:15:50 GMT
	I0916 11:15:50.778968 1507580 round_trippers.go:580]     Audit-Id: 9ff9280d-cd48-4c23-9018-19944c68a054
	I0916 11:15:50.778971 1507580 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:15:50.778982 1507580 round_trippers.go:580]     Content-Type: application/json
	I0916 11:15:50.778985 1507580 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:15:50.778988 1507580 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:15:50.779146 1507580 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-654612","uid":"17ee1588-6ece-4a45-8e72-a05ec6d1f806","resourceVersion":"982","creationTimestamp":"2024-09-16T11:10:07Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-654612","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-654612","minikube.k8s.io/primary":"true","minikube.k8s.io/updated_at":"2024_09_16T11_10_11_0700","minikube.k8s.io/version":"v1.34.0","node-role.kubernetes.io/control-plane":"","node.kubernetes.io/exclude-from-external-load-balancers":""},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///var/run/crio/crio.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubelet","operation":"Update","apiVe
rsion":"v1","time":"2024-09-16T11:10:07Z","fieldsType":"FieldsV1","fiel [truncated 6346 chars]
	I0916 11:15:50.779585 1507580 pod_ready.go:93] pod "kube-scheduler-multinode-654612" in "kube-system" namespace has status "Ready":"True"
	I0916 11:15:50.779604 1507580 pod_ready.go:82] duration metric: took 11.506354968s for pod "kube-scheduler-multinode-654612" in "kube-system" namespace to be "Ready" ...
	I0916 11:15:50.779616 1507580 pod_ready.go:39] duration metric: took 13.309365582s for extra waiting for all system-critical and pods with labels [k8s-app=kube-dns component=etcd component=kube-apiserver component=kube-controller-manager k8s-app=kube-proxy component=kube-scheduler] to be "Ready" ...
	I0916 11:15:50.779631 1507580 api_server.go:52] waiting for apiserver process to appear ...
	I0916 11:15:50.779701 1507580 ssh_runner.go:195] Run: sudo pgrep -xnf kube-apiserver.*minikube.*
	I0916 11:15:50.789943 1507580 command_runner.go:130] > 914
	I0916 11:15:50.791255 1507580 api_server.go:72] duration metric: took 17.820879073s to wait for apiserver process to appear ...
	I0916 11:15:50.791277 1507580 api_server.go:88] waiting for apiserver healthz status ...
	I0916 11:15:50.791299 1507580 api_server.go:253] Checking apiserver healthz at https://192.168.67.2:8443/healthz ...
	I0916 11:15:50.799817 1507580 api_server.go:279] https://192.168.67.2:8443/healthz returned 200:
	ok
	I0916 11:15:50.799893 1507580 round_trippers.go:463] GET https://192.168.67.2:8443/version
	I0916 11:15:50.799904 1507580 round_trippers.go:469] Request Headers:
	I0916 11:15:50.799913 1507580 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:15:50.799918 1507580 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:15:50.801085 1507580 round_trippers.go:574] Response Status: 200 OK in 1 milliseconds
	I0916 11:15:50.801108 1507580 round_trippers.go:577] Response Headers:
	I0916 11:15:50.801116 1507580 round_trippers.go:580]     Content-Type: application/json
	I0916 11:15:50.801120 1507580 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:15:50.801123 1507580 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:15:50.801125 1507580 round_trippers.go:580]     Content-Length: 263
	I0916 11:15:50.801128 1507580 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:15:50 GMT
	I0916 11:15:50.801133 1507580 round_trippers.go:580]     Audit-Id: 69f9a8cb-f020-4dd9-820b-1c54c8ba2d71
	I0916 11:15:50.801136 1507580 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:15:50.801154 1507580 request.go:1351] Response Body: {
	  "major": "1",
	  "minor": "31",
	  "gitVersion": "v1.31.1",
	  "gitCommit": "948afe5ca072329a73c8e79ed5938717a5cb3d21",
	  "gitTreeState": "clean",
	  "buildDate": "2024-09-11T21:22:08Z",
	  "goVersion": "go1.22.6",
	  "compiler": "gc",
	  "platform": "linux/arm64"
	}
	I0916 11:15:50.801252 1507580 api_server.go:141] control plane version: v1.31.1
	I0916 11:15:50.801270 1507580 api_server.go:131] duration metric: took 9.986831ms to wait for apiserver health ...
	I0916 11:15:50.801279 1507580 system_pods.go:43] waiting for kube-system pods to appear ...
	I0916 11:15:50.801339 1507580 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/namespaces/kube-system/pods
	I0916 11:15:50.801349 1507580 round_trippers.go:469] Request Headers:
	I0916 11:15:50.801356 1507580 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:15:50.801361 1507580 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:15:50.805101 1507580 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 11:15:50.805130 1507580 round_trippers.go:577] Response Headers:
	I0916 11:15:50.805139 1507580 round_trippers.go:580]     Audit-Id: 5cc80ed8-dc7e-43ba-ae1a-6c87c068a51d
	I0916 11:15:50.805144 1507580 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:15:50.805150 1507580 round_trippers.go:580]     Content-Type: application/json
	I0916 11:15:50.805153 1507580 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:15:50.805156 1507580 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:15:50.805160 1507580 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:15:50 GMT
	I0916 11:15:50.806167 1507580 request.go:1351] Response Body: {"kind":"PodList","apiVersion":"v1","metadata":{"resourceVersion":"1072"},"items":[{"metadata":{"name":"coredns-7c65d6cfc9-szvv9","generateName":"coredns-7c65d6cfc9-","namespace":"kube-system","uid":"26df8cd4-36bc-49e1-98bf-9c30f5555b7b","resourceVersion":"991","creationTimestamp":"2024-09-16T11:10:15Z","labels":{"k8s-app":"kube-dns","pod-template-hash":"7c65d6cfc9"},"ownerReferences":[{"apiVersion":"apps/v1","kind":"ReplicaSet","name":"coredns-7c65d6cfc9","uid":"a88acc4a-b14b-4341-a616-6a6077ab8958","controller":true,"blockOwnerDeletion":true}],"managedFields":[{"manager":"kube-controller-manager","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:10:15Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:generateName":{},"f:labels":{".":{},"f:k8s-app":{},"f:pod-template-hash":{}},"f:ownerReferences":{".":{},"k:{\"uid\":\"a88acc4a-b14b-4341-a616-6a6077ab8958\"}":{}}},"f:spec":{"f:affinity":{".":{},"f:podAntiAffinity":{".":{},"
f:preferredDuringSchedulingIgnoredDuringExecution":{}}},"f:containers": [truncated 90698 chars]
	I0916 11:15:50.810254 1507580 system_pods.go:59] 12 kube-system pods found
	I0916 11:15:50.810303 1507580 system_pods.go:61] "coredns-7c65d6cfc9-szvv9" [26df8cd4-36bc-49e1-98bf-9c30f5555b7b] Running / Ready:ContainersNotReady (containers with unready status: [coredns]) / ContainersReady:ContainersNotReady (containers with unready status: [coredns])
	I0916 11:15:50.810311 1507580 system_pods.go:61] "etcd-multinode-654612" [bb46feea-e4d5-411b-9ebc-e5984b1147a8] Running
	I0916 11:15:50.810317 1507580 system_pods.go:61] "kindnet-687xg" [021cf850-fa0b-463e-968b-f257f7952a05] Running
	I0916 11:15:50.810321 1507580 system_pods.go:61] "kindnet-ncfhl" [2e9059ba-ed83-45b3-810c-02dda1910d4a] Running
	I0916 11:15:50.810325 1507580 system_pods.go:61] "kindnet-whjqt" [0ed90b6c-0a03-4af6-a0ab-ea90794fa963] Running
	I0916 11:15:50.810330 1507580 system_pods.go:61] "kube-apiserver-multinode-654612" [8a56377d-b2a9-46dc-90b0-6d8f0aadec52] Running
	I0916 11:15:50.810337 1507580 system_pods.go:61] "kube-controller-manager-multinode-654612" [08e87c01-201e-4373-bbd7-0a8a7a724a84] Running
	I0916 11:15:50.810341 1507580 system_pods.go:61] "kube-proxy-gf2tw" [814e8a89-b190-4aef-a303-44981c9e19c9] Running
	I0916 11:15:50.810349 1507580 system_pods.go:61] "kube-proxy-t9pzq" [d5dac41c-8386-4ad5-a463-1730169d8062] Running
	I0916 11:15:50.810355 1507580 system_pods.go:61] "kube-proxy-vf648" [376afe3e-390b-443b-b289-7dfeeb1deed1] Running
	I0916 11:15:50.810362 1507580 system_pods.go:61] "kube-scheduler-multinode-654612" [fd553108-8193-4f33-8190-d4ec25a66de1] Running
	I0916 11:15:50.810366 1507580 system_pods.go:61] "storage-provisioner" [2b21455e-8cb4-4c70-937b-6ff3cd85b42f] Running
	I0916 11:15:50.810374 1507580 system_pods.go:74] duration metric: took 9.090148ms to wait for pod list to return data ...
	I0916 11:15:50.810383 1507580 default_sa.go:34] waiting for default service account to be created ...
	I0916 11:15:50.810478 1507580 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/namespaces/default/serviceaccounts
	I0916 11:15:50.810490 1507580 round_trippers.go:469] Request Headers:
	I0916 11:15:50.810498 1507580 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:15:50.810503 1507580 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:15:50.813304 1507580 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:15:50.813328 1507580 round_trippers.go:577] Response Headers:
	I0916 11:15:50.813335 1507580 round_trippers.go:580]     Audit-Id: ead5eee1-19de-400f-b63c-369dbb6748c7
	I0916 11:15:50.813341 1507580 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:15:50.813346 1507580 round_trippers.go:580]     Content-Type: application/json
	I0916 11:15:50.813349 1507580 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:15:50.813353 1507580 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:15:50.813356 1507580 round_trippers.go:580]     Content-Length: 262
	I0916 11:15:50.813358 1507580 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:15:50 GMT
	I0916 11:15:50.813379 1507580 request.go:1351] Response Body: {"kind":"ServiceAccountList","apiVersion":"v1","metadata":{"resourceVersion":"1072"},"items":[{"metadata":{"name":"default","namespace":"default","uid":"8b0a4fd5-1ca6-4da1-beae-b1e2017b49fd","resourceVersion":"297","creationTimestamp":"2024-09-16T11:10:14Z"}}]}
	I0916 11:15:50.813562 1507580 default_sa.go:45] found service account: "default"
	I0916 11:15:50.813581 1507580 default_sa.go:55] duration metric: took 3.189689ms for default service account to be created ...
	I0916 11:15:50.813590 1507580 system_pods.go:116] waiting for k8s-apps to be running ...
	I0916 11:15:50.813656 1507580 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/namespaces/kube-system/pods
	I0916 11:15:50.813666 1507580 round_trippers.go:469] Request Headers:
	I0916 11:15:50.813675 1507580 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:15:50.813680 1507580 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:15:50.816851 1507580 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 11:15:50.816923 1507580 round_trippers.go:577] Response Headers:
	I0916 11:15:50.816945 1507580 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:15:50.816958 1507580 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:15:50.816962 1507580 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:15:50 GMT
	I0916 11:15:50.816966 1507580 round_trippers.go:580]     Audit-Id: 3cb1c4a2-98c4-455c-9fef-cdfe0464fb75
	I0916 11:15:50.816974 1507580 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:15:50.816977 1507580 round_trippers.go:580]     Content-Type: application/json
	I0916 11:15:50.818252 1507580 request.go:1351] Response Body: {"kind":"PodList","apiVersion":"v1","metadata":{"resourceVersion":"1072"},"items":[{"metadata":{"name":"coredns-7c65d6cfc9-szvv9","generateName":"coredns-7c65d6cfc9-","namespace":"kube-system","uid":"26df8cd4-36bc-49e1-98bf-9c30f5555b7b","resourceVersion":"991","creationTimestamp":"2024-09-16T11:10:15Z","labels":{"k8s-app":"kube-dns","pod-template-hash":"7c65d6cfc9"},"ownerReferences":[{"apiVersion":"apps/v1","kind":"ReplicaSet","name":"coredns-7c65d6cfc9","uid":"a88acc4a-b14b-4341-a616-6a6077ab8958","controller":true,"blockOwnerDeletion":true}],"managedFields":[{"manager":"kube-controller-manager","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:10:15Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:generateName":{},"f:labels":{".":{},"f:k8s-app":{},"f:pod-template-hash":{}},"f:ownerReferences":{".":{},"k:{\"uid\":\"a88acc4a-b14b-4341-a616-6a6077ab8958\"}":{}}},"f:spec":{"f:affinity":{".":{},"f:podAntiAffinity":{".":{},"
f:preferredDuringSchedulingIgnoredDuringExecution":{}}},"f:containers": [truncated 90698 chars]
	I0916 11:15:50.822363 1507580 system_pods.go:86] 12 kube-system pods found
	I0916 11:15:50.822404 1507580 system_pods.go:89] "coredns-7c65d6cfc9-szvv9" [26df8cd4-36bc-49e1-98bf-9c30f5555b7b] Running / Ready:ContainersNotReady (containers with unready status: [coredns]) / ContainersReady:ContainersNotReady (containers with unready status: [coredns])
	I0916 11:15:50.822412 1507580 system_pods.go:89] "etcd-multinode-654612" [bb46feea-e4d5-411b-9ebc-e5984b1147a8] Running
	I0916 11:15:50.822418 1507580 system_pods.go:89] "kindnet-687xg" [021cf850-fa0b-463e-968b-f257f7952a05] Running
	I0916 11:15:50.822422 1507580 system_pods.go:89] "kindnet-ncfhl" [2e9059ba-ed83-45b3-810c-02dda1910d4a] Running
	I0916 11:15:50.822427 1507580 system_pods.go:89] "kindnet-whjqt" [0ed90b6c-0a03-4af6-a0ab-ea90794fa963] Running
	I0916 11:15:50.822432 1507580 system_pods.go:89] "kube-apiserver-multinode-654612" [8a56377d-b2a9-46dc-90b0-6d8f0aadec52] Running
	I0916 11:15:50.822438 1507580 system_pods.go:89] "kube-controller-manager-multinode-654612" [08e87c01-201e-4373-bbd7-0a8a7a724a84] Running
	I0916 11:15:50.822442 1507580 system_pods.go:89] "kube-proxy-gf2tw" [814e8a89-b190-4aef-a303-44981c9e19c9] Running
	I0916 11:15:50.822447 1507580 system_pods.go:89] "kube-proxy-t9pzq" [d5dac41c-8386-4ad5-a463-1730169d8062] Running
	I0916 11:15:50.822450 1507580 system_pods.go:89] "kube-proxy-vf648" [376afe3e-390b-443b-b289-7dfeeb1deed1] Running
	I0916 11:15:50.822455 1507580 system_pods.go:89] "kube-scheduler-multinode-654612" [fd553108-8193-4f33-8190-d4ec25a66de1] Running
	I0916 11:15:50.822459 1507580 system_pods.go:89] "storage-provisioner" [2b21455e-8cb4-4c70-937b-6ff3cd85b42f] Running
	I0916 11:15:50.822466 1507580 system_pods.go:126] duration metric: took 8.866505ms to wait for k8s-apps to be running ...
	I0916 11:15:50.822481 1507580 system_svc.go:44] waiting for kubelet service to be running ....
	I0916 11:15:50.822545 1507580 ssh_runner.go:195] Run: sudo systemctl is-active --quiet service kubelet
	I0916 11:15:50.834857 1507580 system_svc.go:56] duration metric: took 12.365964ms WaitForService to wait for kubelet
	I0916 11:15:50.834939 1507580 kubeadm.go:582] duration metric: took 17.864566924s to wait for: map[apiserver:true apps_running:true default_sa:true extra:true kubelet:true node_ready:true system_pods:true]
	I0916 11:15:50.834972 1507580 node_conditions.go:102] verifying NodePressure condition ...
	I0916 11:15:50.835076 1507580 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/nodes
	I0916 11:15:50.835100 1507580 round_trippers.go:469] Request Headers:
	I0916 11:15:50.835122 1507580 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:15:50.835141 1507580 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:15:50.837992 1507580 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:15:50.838026 1507580 round_trippers.go:577] Response Headers:
	I0916 11:15:50.838036 1507580 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:15:50 GMT
	I0916 11:15:50.838040 1507580 round_trippers.go:580]     Audit-Id: 990cf2a2-aef9-4542-93cc-aebb9a19b890
	I0916 11:15:50.838044 1507580 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:15:50.838047 1507580 round_trippers.go:580]     Content-Type: application/json
	I0916 11:15:50.838050 1507580 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:15:50.838055 1507580 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:15:50.838407 1507580 request.go:1351] Response Body: {"kind":"NodeList","apiVersion":"v1","metadata":{"resourceVersion":"1072"},"items":[{"metadata":{"name":"multinode-654612","uid":"17ee1588-6ece-4a45-8e72-a05ec6d1f806","resourceVersion":"982","creationTimestamp":"2024-09-16T11:10:07Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-654612","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-654612","minikube.k8s.io/primary":"true","minikube.k8s.io/updated_at":"2024_09_16T11_10_11_0700","minikube.k8s.io/version":"v1.34.0","node-role.kubernetes.io/control-plane":"","node.kubernetes.io/exclude-from-external-load-balancers":""},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///var/run/crio/crio.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedField
s":[{"manager":"kubelet","operation":"Update","apiVersion":"v1","time": [truncated 13733 chars]
	I0916 11:15:50.839183 1507580 node_conditions.go:122] node storage ephemeral capacity is 203034800Ki
	I0916 11:15:50.839229 1507580 node_conditions.go:123] node cpu capacity is 2
	I0916 11:15:50.839242 1507580 node_conditions.go:122] node storage ephemeral capacity is 203034800Ki
	I0916 11:15:50.839253 1507580 node_conditions.go:123] node cpu capacity is 2
	I0916 11:15:50.839259 1507580 node_conditions.go:105] duration metric: took 4.270861ms to run NodePressure ...
	I0916 11:15:50.839270 1507580 start.go:241] waiting for startup goroutines ...
	I0916 11:15:50.839283 1507580 start.go:246] waiting for cluster config update ...
	I0916 11:15:50.839291 1507580 start.go:255] writing updated cluster config ...
	I0916 11:15:50.842682 1507580 out.go:201] 
	I0916 11:15:50.845516 1507580 config.go:182] Loaded profile config "multinode-654612": Driver=docker, ContainerRuntime=crio, KubernetesVersion=v1.31.1
	I0916 11:15:50.845642 1507580 profile.go:143] Saving config to /home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/multinode-654612/config.json ...
	I0916 11:15:50.848889 1507580 out.go:177] * Starting "multinode-654612-m02" worker node in "multinode-654612" cluster
	I0916 11:15:50.851452 1507580 cache.go:121] Beginning downloading kic base image for docker with crio
	I0916 11:15:50.854074 1507580 out.go:177] * Pulling base image v0.0.45-1726358845-19644 ...
	I0916 11:15:50.856767 1507580 preload.go:131] Checking if preload exists for k8s version v1.31.1 and runtime crio
	I0916 11:15:50.856810 1507580 cache.go:56] Caching tarball of preloaded images
	I0916 11:15:50.856928 1507580 image.go:79] Checking for gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 in local docker daemon
	I0916 11:15:50.856963 1507580 preload.go:172] Found /home/jenkins/minikube-integration/19651-1378450/.minikube/cache/preloaded-tarball/preloaded-images-k8s-v18-v1.31.1-cri-o-overlay-arm64.tar.lz4 in cache, skipping download
	I0916 11:15:50.856976 1507580 cache.go:59] Finished verifying existence of preloaded tar for v1.31.1 on crio
	I0916 11:15:50.857131 1507580 profile.go:143] Saving config to /home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/multinode-654612/config.json ...
	W0916 11:15:50.876307 1507580 image.go:95] image gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 is of wrong architecture
	I0916 11:15:50.876327 1507580 cache.go:149] Downloading gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 to local cache
	I0916 11:15:50.876404 1507580 image.go:63] Checking for gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 in local cache directory
	I0916 11:15:50.876429 1507580 image.go:66] Found gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 in local cache directory, skipping pull
	I0916 11:15:50.876438 1507580 image.go:135] gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 exists in cache, skipping pull
	I0916 11:15:50.876447 1507580 cache.go:152] successfully saved gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 as a tarball
	I0916 11:15:50.876457 1507580 cache.go:162] Loading gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 from local cache
	I0916 11:15:50.877768 1507580 image.go:273] response: 
	I0916 11:15:51.002541 1507580 cache.go:164] successfully loaded and using gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 from cached tarball
	I0916 11:15:51.002624 1507580 cache.go:194] Successfully downloaded all kic artifacts
	I0916 11:15:51.002660 1507580 start.go:360] acquireMachinesLock for multinode-654612-m02: {Name:mk70904bbc860a548c4a9726b7d64e227f1f9cac Clock:{} Delay:500ms Timeout:10m0s Cancel:<nil>}
	I0916 11:15:51.002735 1507580 start.go:364] duration metric: took 54.35µs to acquireMachinesLock for "multinode-654612-m02"
	I0916 11:15:51.002759 1507580 start.go:96] Skipping create...Using existing machine configuration
	I0916 11:15:51.002765 1507580 fix.go:54] fixHost starting: m02
	I0916 11:15:51.003399 1507580 cli_runner.go:164] Run: docker container inspect multinode-654612-m02 --format={{.State.Status}}
	I0916 11:15:51.026129 1507580 fix.go:112] recreateIfNeeded on multinode-654612-m02: state=Stopped err=<nil>
	W0916 11:15:51.026164 1507580 fix.go:138] unexpected machine state, will restart: <nil>
	I0916 11:15:51.029970 1507580 out.go:177] * Restarting existing docker container for "multinode-654612-m02" ...
	I0916 11:15:51.032839 1507580 cli_runner.go:164] Run: docker start multinode-654612-m02
	I0916 11:15:51.339164 1507580 cli_runner.go:164] Run: docker container inspect multinode-654612-m02 --format={{.State.Status}}
	I0916 11:15:51.364057 1507580 kic.go:430] container "multinode-654612-m02" state is running.
	I0916 11:15:51.365546 1507580 cli_runner.go:164] Run: docker container inspect -f "{{range .NetworkSettings.Networks}}{{.IPAddress}},{{.GlobalIPv6Address}}{{end}}" multinode-654612-m02
	I0916 11:15:51.386943 1507580 profile.go:143] Saving config to /home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/multinode-654612/config.json ...
	I0916 11:15:51.387451 1507580 machine.go:93] provisionDockerMachine start ...
	I0916 11:15:51.387525 1507580 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" multinode-654612-m02
	I0916 11:15:51.414431 1507580 main.go:141] libmachine: Using SSH client type: native
	I0916 11:15:51.414746 1507580 main.go:141] libmachine: &{{{<nil> 0 [] [] []} docker [0x41abe0] 0x41d420 <nil>  [] 0s} 127.0.0.1 34778 <nil> <nil>}
	I0916 11:15:51.414756 1507580 main.go:141] libmachine: About to run SSH command:
	hostname
	I0916 11:15:51.415691 1507580 main.go:141] libmachine: Error dialing TCP: ssh: handshake failed: EOF
	I0916 11:15:54.556147 1507580 main.go:141] libmachine: SSH cmd err, output: <nil>: multinode-654612-m02
	
	I0916 11:15:54.556176 1507580 ubuntu.go:169] provisioning hostname "multinode-654612-m02"
	I0916 11:15:54.556246 1507580 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" multinode-654612-m02
	I0916 11:15:54.573025 1507580 main.go:141] libmachine: Using SSH client type: native
	I0916 11:15:54.573261 1507580 main.go:141] libmachine: &{{{<nil> 0 [] [] []} docker [0x41abe0] 0x41d420 <nil>  [] 0s} 127.0.0.1 34778 <nil> <nil>}
	I0916 11:15:54.573278 1507580 main.go:141] libmachine: About to run SSH command:
	sudo hostname multinode-654612-m02 && echo "multinode-654612-m02" | sudo tee /etc/hostname
	I0916 11:15:54.719936 1507580 main.go:141] libmachine: SSH cmd err, output: <nil>: multinode-654612-m02
	
	I0916 11:15:54.720029 1507580 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" multinode-654612-m02
	I0916 11:15:54.740586 1507580 main.go:141] libmachine: Using SSH client type: native
	I0916 11:15:54.740859 1507580 main.go:141] libmachine: &{{{<nil> 0 [] [] []} docker [0x41abe0] 0x41d420 <nil>  [] 0s} 127.0.0.1 34778 <nil> <nil>}
	I0916 11:15:54.740883 1507580 main.go:141] libmachine: About to run SSH command:
	
			if ! grep -xq '.*\smultinode-654612-m02' /etc/hosts; then
				if grep -xq '127.0.1.1\s.*' /etc/hosts; then
					sudo sed -i 's/^127.0.1.1\s.*/127.0.1.1 multinode-654612-m02/g' /etc/hosts;
				else 
					echo '127.0.1.1 multinode-654612-m02' | sudo tee -a /etc/hosts; 
				fi
			fi
	I0916 11:15:54.880813 1507580 main.go:141] libmachine: SSH cmd err, output: <nil>: 
	I0916 11:15:54.880840 1507580 ubuntu.go:175] set auth options {CertDir:/home/jenkins/minikube-integration/19651-1378450/.minikube CaCertPath:/home/jenkins/minikube-integration/19651-1378450/.minikube/certs/ca.pem CaPrivateKeyPath:/home/jenkins/minikube-integration/19651-1378450/.minikube/certs/ca-key.pem CaCertRemotePath:/etc/docker/ca.pem ServerCertPath:/home/jenkins/minikube-integration/19651-1378450/.minikube/machines/server.pem ServerKeyPath:/home/jenkins/minikube-integration/19651-1378450/.minikube/machines/server-key.pem ClientKeyPath:/home/jenkins/minikube-integration/19651-1378450/.minikube/certs/key.pem ServerCertRemotePath:/etc/docker/server.pem ServerKeyRemotePath:/etc/docker/server-key.pem ClientCertPath:/home/jenkins/minikube-integration/19651-1378450/.minikube/certs/cert.pem ServerCertSANs:[] StorePath:/home/jenkins/minikube-integration/19651-1378450/.minikube}
	I0916 11:15:54.880858 1507580 ubuntu.go:177] setting up certificates
	I0916 11:15:54.880867 1507580 provision.go:84] configureAuth start
	I0916 11:15:54.880929 1507580 cli_runner.go:164] Run: docker container inspect -f "{{range .NetworkSettings.Networks}}{{.IPAddress}},{{.GlobalIPv6Address}}{{end}}" multinode-654612-m02
	I0916 11:15:54.900132 1507580 provision.go:143] copyHostCerts
	I0916 11:15:54.900180 1507580 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-1378450/.minikube/certs/ca.pem -> /home/jenkins/minikube-integration/19651-1378450/.minikube/ca.pem
	I0916 11:15:54.900215 1507580 exec_runner.go:144] found /home/jenkins/minikube-integration/19651-1378450/.minikube/ca.pem, removing ...
	I0916 11:15:54.900225 1507580 exec_runner.go:203] rm: /home/jenkins/minikube-integration/19651-1378450/.minikube/ca.pem
	I0916 11:15:54.900303 1507580 exec_runner.go:151] cp: /home/jenkins/minikube-integration/19651-1378450/.minikube/certs/ca.pem --> /home/jenkins/minikube-integration/19651-1378450/.minikube/ca.pem (1078 bytes)
	I0916 11:15:54.900389 1507580 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-1378450/.minikube/certs/cert.pem -> /home/jenkins/minikube-integration/19651-1378450/.minikube/cert.pem
	I0916 11:15:54.900412 1507580 exec_runner.go:144] found /home/jenkins/minikube-integration/19651-1378450/.minikube/cert.pem, removing ...
	I0916 11:15:54.900417 1507580 exec_runner.go:203] rm: /home/jenkins/minikube-integration/19651-1378450/.minikube/cert.pem
	I0916 11:15:54.900448 1507580 exec_runner.go:151] cp: /home/jenkins/minikube-integration/19651-1378450/.minikube/certs/cert.pem --> /home/jenkins/minikube-integration/19651-1378450/.minikube/cert.pem (1123 bytes)
	I0916 11:15:54.900496 1507580 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-1378450/.minikube/certs/key.pem -> /home/jenkins/minikube-integration/19651-1378450/.minikube/key.pem
	I0916 11:15:54.900515 1507580 exec_runner.go:144] found /home/jenkins/minikube-integration/19651-1378450/.minikube/key.pem, removing ...
	I0916 11:15:54.900525 1507580 exec_runner.go:203] rm: /home/jenkins/minikube-integration/19651-1378450/.minikube/key.pem
	I0916 11:15:54.900551 1507580 exec_runner.go:151] cp: /home/jenkins/minikube-integration/19651-1378450/.minikube/certs/key.pem --> /home/jenkins/minikube-integration/19651-1378450/.minikube/key.pem (1679 bytes)
	I0916 11:15:54.900649 1507580 provision.go:117] generating server cert: /home/jenkins/minikube-integration/19651-1378450/.minikube/machines/server.pem ca-key=/home/jenkins/minikube-integration/19651-1378450/.minikube/certs/ca.pem private-key=/home/jenkins/minikube-integration/19651-1378450/.minikube/certs/ca-key.pem org=jenkins.multinode-654612-m02 san=[127.0.0.1 192.168.67.3 localhost minikube multinode-654612-m02]
	I0916 11:15:55.294488 1507580 provision.go:177] copyRemoteCerts
	I0916 11:15:55.294563 1507580 ssh_runner.go:195] Run: sudo mkdir -p /etc/docker /etc/docker /etc/docker
	I0916 11:15:55.294605 1507580 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" multinode-654612-m02
	I0916 11:15:55.311380 1507580 sshutil.go:53] new ssh client: &{IP:127.0.0.1 Port:34778 SSHKeyPath:/home/jenkins/minikube-integration/19651-1378450/.minikube/machines/multinode-654612-m02/id_rsa Username:docker}
	I0916 11:15:55.409889 1507580 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-1378450/.minikube/certs/ca.pem -> /etc/docker/ca.pem
	I0916 11:15:55.409951 1507580 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-1378450/.minikube/certs/ca.pem --> /etc/docker/ca.pem (1078 bytes)
	I0916 11:15:55.441894 1507580 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-1378450/.minikube/machines/server.pem -> /etc/docker/server.pem
	I0916 11:15:55.441994 1507580 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-1378450/.minikube/machines/server.pem --> /etc/docker/server.pem (1229 bytes)
	I0916 11:15:55.470998 1507580 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-1378450/.minikube/machines/server-key.pem -> /etc/docker/server-key.pem
	I0916 11:15:55.471066 1507580 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-1378450/.minikube/machines/server-key.pem --> /etc/docker/server-key.pem (1679 bytes)
	I0916 11:15:55.504954 1507580 provision.go:87] duration metric: took 624.06564ms to configureAuth
	I0916 11:15:55.504996 1507580 ubuntu.go:193] setting minikube options for container-runtime
	I0916 11:15:55.505339 1507580 config.go:182] Loaded profile config "multinode-654612": Driver=docker, ContainerRuntime=crio, KubernetesVersion=v1.31.1
	I0916 11:15:55.505506 1507580 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" multinode-654612-m02
	I0916 11:15:55.521949 1507580 main.go:141] libmachine: Using SSH client type: native
	I0916 11:15:55.522207 1507580 main.go:141] libmachine: &{{{<nil> 0 [] [] []} docker [0x41abe0] 0x41d420 <nil>  [] 0s} 127.0.0.1 34778 <nil> <nil>}
	I0916 11:15:55.522228 1507580 main.go:141] libmachine: About to run SSH command:
	sudo mkdir -p /etc/sysconfig && printf %s "
	CRIO_MINIKUBE_OPTIONS='--insecure-registry 10.96.0.0/12 '
	" | sudo tee /etc/sysconfig/crio.minikube && sudo systemctl restart crio
	I0916 11:15:55.794380 1507580 main.go:141] libmachine: SSH cmd err, output: <nil>: 
	CRIO_MINIKUBE_OPTIONS='--insecure-registry 10.96.0.0/12 '
	
	I0916 11:15:55.794405 1507580 machine.go:96] duration metric: took 4.406937752s to provisionDockerMachine
	I0916 11:15:55.794421 1507580 start.go:293] postStartSetup for "multinode-654612-m02" (driver="docker")
	I0916 11:15:55.794433 1507580 start.go:322] creating required directories: [/etc/kubernetes/addons /etc/kubernetes/manifests /var/tmp/minikube /var/lib/minikube /var/lib/minikube/certs /var/lib/minikube/images /var/lib/minikube/binaries /tmp/gvisor /usr/share/ca-certificates /etc/ssl/certs]
	I0916 11:15:55.794496 1507580 ssh_runner.go:195] Run: sudo mkdir -p /etc/kubernetes/addons /etc/kubernetes/manifests /var/tmp/minikube /var/lib/minikube /var/lib/minikube/certs /var/lib/minikube/images /var/lib/minikube/binaries /tmp/gvisor /usr/share/ca-certificates /etc/ssl/certs
	I0916 11:15:55.794544 1507580 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" multinode-654612-m02
	I0916 11:15:55.816659 1507580 sshutil.go:53] new ssh client: &{IP:127.0.0.1 Port:34778 SSHKeyPath:/home/jenkins/minikube-integration/19651-1378450/.minikube/machines/multinode-654612-m02/id_rsa Username:docker}
	I0916 11:15:55.914310 1507580 ssh_runner.go:195] Run: cat /etc/os-release
	I0916 11:15:55.920817 1507580 command_runner.go:130] > PRETTY_NAME="Ubuntu 22.04.4 LTS"
	I0916 11:15:55.920836 1507580 command_runner.go:130] > NAME="Ubuntu"
	I0916 11:15:55.920842 1507580 command_runner.go:130] > VERSION_ID="22.04"
	I0916 11:15:55.920847 1507580 command_runner.go:130] > VERSION="22.04.4 LTS (Jammy Jellyfish)"
	I0916 11:15:55.920852 1507580 command_runner.go:130] > VERSION_CODENAME=jammy
	I0916 11:15:55.920856 1507580 command_runner.go:130] > ID=ubuntu
	I0916 11:15:55.920859 1507580 command_runner.go:130] > ID_LIKE=debian
	I0916 11:15:55.920864 1507580 command_runner.go:130] > HOME_URL="https://www.ubuntu.com/"
	I0916 11:15:55.920868 1507580 command_runner.go:130] > SUPPORT_URL="https://help.ubuntu.com/"
	I0916 11:15:55.920874 1507580 command_runner.go:130] > BUG_REPORT_URL="https://bugs.launchpad.net/ubuntu/"
	I0916 11:15:55.920881 1507580 command_runner.go:130] > PRIVACY_POLICY_URL="https://www.ubuntu.com/legal/terms-and-policies/privacy-policy"
	I0916 11:15:55.920885 1507580 command_runner.go:130] > UBUNTU_CODENAME=jammy
	I0916 11:15:55.920929 1507580 main.go:141] libmachine: Couldn't set key VERSION_CODENAME, no corresponding struct field found
	I0916 11:15:55.920954 1507580 main.go:141] libmachine: Couldn't set key PRIVACY_POLICY_URL, no corresponding struct field found
	I0916 11:15:55.920964 1507580 main.go:141] libmachine: Couldn't set key UBUNTU_CODENAME, no corresponding struct field found
	I0916 11:15:55.920971 1507580 info.go:137] Remote host: Ubuntu 22.04.4 LTS
	I0916 11:15:55.920981 1507580 filesync.go:126] Scanning /home/jenkins/minikube-integration/19651-1378450/.minikube/addons for local assets ...
	I0916 11:15:55.921039 1507580 filesync.go:126] Scanning /home/jenkins/minikube-integration/19651-1378450/.minikube/files for local assets ...
	I0916 11:15:55.921125 1507580 filesync.go:149] local asset: /home/jenkins/minikube-integration/19651-1378450/.minikube/files/etc/ssl/certs/13838332.pem -> 13838332.pem in /etc/ssl/certs
	I0916 11:15:55.921132 1507580 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-1378450/.minikube/files/etc/ssl/certs/13838332.pem -> /etc/ssl/certs/13838332.pem
	I0916 11:15:55.921228 1507580 ssh_runner.go:195] Run: sudo mkdir -p /etc/ssl/certs
	I0916 11:15:55.930114 1507580 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-1378450/.minikube/files/etc/ssl/certs/13838332.pem --> /etc/ssl/certs/13838332.pem (1708 bytes)
	I0916 11:15:55.955341 1507580 start.go:296] duration metric: took 160.896452ms for postStartSetup
	I0916 11:15:55.955486 1507580 ssh_runner.go:195] Run: sh -c "df -h /var | awk 'NR==2{print $5}'"
	I0916 11:15:55.955541 1507580 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" multinode-654612-m02
	I0916 11:15:55.973342 1507580 sshutil.go:53] new ssh client: &{IP:127.0.0.1 Port:34778 SSHKeyPath:/home/jenkins/minikube-integration/19651-1378450/.minikube/machines/multinode-654612-m02/id_rsa Username:docker}
	I0916 11:15:56.065056 1507580 command_runner.go:130] > 12%
	I0916 11:15:56.065447 1507580 ssh_runner.go:195] Run: sh -c "df -BG /var | awk 'NR==2{print $4}'"
	I0916 11:15:56.069758 1507580 command_runner.go:130] > 171G
	I0916 11:15:56.070307 1507580 fix.go:56] duration metric: took 5.067537245s for fixHost
	I0916 11:15:56.070327 1507580 start.go:83] releasing machines lock for "multinode-654612-m02", held for 5.067582922s
	I0916 11:15:56.070399 1507580 cli_runner.go:164] Run: docker container inspect -f "{{range .NetworkSettings.Networks}}{{.IPAddress}},{{.GlobalIPv6Address}}{{end}}" multinode-654612-m02
	I0916 11:15:56.090954 1507580 out.go:177] * Found network options:
	I0916 11:15:56.093595 1507580 out.go:177]   - NO_PROXY=192.168.67.2
	W0916 11:15:56.096308 1507580 proxy.go:119] fail to check proxy env: Error ip not in block
	W0916 11:15:56.096356 1507580 proxy.go:119] fail to check proxy env: Error ip not in block
	I0916 11:15:56.096430 1507580 ssh_runner.go:195] Run: sudo sh -c "podman version >/dev/null"
	I0916 11:15:56.096483 1507580 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" multinode-654612-m02
	I0916 11:15:56.096778 1507580 ssh_runner.go:195] Run: curl -sS -m 2 https://registry.k8s.io/
	I0916 11:15:56.096848 1507580 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" multinode-654612-m02
	I0916 11:15:56.123286 1507580 sshutil.go:53] new ssh client: &{IP:127.0.0.1 Port:34778 SSHKeyPath:/home/jenkins/minikube-integration/19651-1378450/.minikube/machines/multinode-654612-m02/id_rsa Username:docker}
	I0916 11:15:56.139955 1507580 sshutil.go:53] new ssh client: &{IP:127.0.0.1 Port:34778 SSHKeyPath:/home/jenkins/minikube-integration/19651-1378450/.minikube/machines/multinode-654612-m02/id_rsa Username:docker}
	I0916 11:15:56.381175 1507580 command_runner.go:130] > <a href="https://github.com/kubernetes/registry.k8s.io">Temporary Redirect</a>.
	I0916 11:15:56.412626 1507580 ssh_runner.go:195] Run: sh -c "stat /etc/cni/net.d/*loopback.conf*"
	I0916 11:15:56.416623 1507580 command_runner.go:130] >   File: /etc/cni/net.d/200-loopback.conf.mk_disabled
	I0916 11:15:56.416660 1507580 command_runner.go:130] >   Size: 54        	Blocks: 8          IO Block: 4096   regular file
	I0916 11:15:56.416716 1507580 command_runner.go:130] > Device: c2h/194d	Inode: 1570512     Links: 1
	I0916 11:15:56.416732 1507580 command_runner.go:130] > Access: (0644/-rw-r--r--)  Uid: (    0/    root)   Gid: (    0/    root)
	I0916 11:15:56.416738 1507580 command_runner.go:130] > Access: 2023-06-14 14:44:50.000000000 +0000
	I0916 11:15:56.416743 1507580 command_runner.go:130] > Modify: 2023-06-14 14:44:50.000000000 +0000
	I0916 11:15:56.416751 1507580 command_runner.go:130] > Change: 2024-09-16 11:11:09.805260925 +0000
	I0916 11:15:56.416757 1507580 command_runner.go:130] >  Birth: 2024-09-16 11:11:09.801261023 +0000
	I0916 11:15:56.417005 1507580 ssh_runner.go:195] Run: sudo find /etc/cni/net.d -maxdepth 1 -type f -name *loopback.conf* -not -name *.mk_disabled -exec sh -c "sudo mv {} {}.mk_disabled" ;
	I0916 11:15:56.425907 1507580 cni.go:221] loopback cni configuration disabled: "/etc/cni/net.d/*loopback.conf*" found
	I0916 11:15:56.425985 1507580 ssh_runner.go:195] Run: sudo find /etc/cni/net.d -maxdepth 1 -type f ( ( -name *bridge* -or -name *podman* ) -and -not -name *.mk_disabled ) -printf "%p, " -exec sh -c "sudo mv {} {}.mk_disabled" ;
	I0916 11:15:56.435425 1507580 cni.go:259] no active bridge cni configs found in "/etc/cni/net.d" - nothing to disable
	I0916 11:15:56.435449 1507580 start.go:495] detecting cgroup driver to use...
	I0916 11:15:56.435481 1507580 detect.go:187] detected "cgroupfs" cgroup driver on host os
	I0916 11:15:56.435532 1507580 ssh_runner.go:195] Run: sudo systemctl stop -f containerd
	I0916 11:15:56.448248 1507580 ssh_runner.go:195] Run: sudo systemctl is-active --quiet service containerd
	I0916 11:15:56.460926 1507580 docker.go:217] disabling cri-docker service (if available) ...
	I0916 11:15:56.460999 1507580 ssh_runner.go:195] Run: sudo systemctl stop -f cri-docker.socket
	I0916 11:15:56.475154 1507580 ssh_runner.go:195] Run: sudo systemctl stop -f cri-docker.service
	I0916 11:15:56.488161 1507580 ssh_runner.go:195] Run: sudo systemctl disable cri-docker.socket
	I0916 11:15:56.586836 1507580 ssh_runner.go:195] Run: sudo systemctl mask cri-docker.service
	I0916 11:15:56.688463 1507580 docker.go:233] disabling docker service ...
	I0916 11:15:56.688567 1507580 ssh_runner.go:195] Run: sudo systemctl stop -f docker.socket
	I0916 11:15:56.701174 1507580 ssh_runner.go:195] Run: sudo systemctl stop -f docker.service
	I0916 11:15:56.712841 1507580 ssh_runner.go:195] Run: sudo systemctl disable docker.socket
	I0916 11:15:56.801180 1507580 ssh_runner.go:195] Run: sudo systemctl mask docker.service
	I0916 11:15:56.896560 1507580 ssh_runner.go:195] Run: sudo systemctl is-active --quiet service docker
	I0916 11:15:56.909520 1507580 ssh_runner.go:195] Run: /bin/bash -c "sudo mkdir -p /etc && printf %s "runtime-endpoint: unix:///var/run/crio/crio.sock
	" | sudo tee /etc/crictl.yaml"
	I0916 11:15:56.927082 1507580 command_runner.go:130] > runtime-endpoint: unix:///var/run/crio/crio.sock
	I0916 11:15:56.927132 1507580 crio.go:59] configure cri-o to use "registry.k8s.io/pause:3.10" pause image...
	I0916 11:15:56.927241 1507580 ssh_runner.go:195] Run: sh -c "sudo sed -i 's|^.*pause_image = .*$|pause_image = "registry.k8s.io/pause:3.10"|' /etc/crio/crio.conf.d/02-crio.conf"
	I0916 11:15:56.940170 1507580 crio.go:70] configuring cri-o to use "cgroupfs" as cgroup driver...
	I0916 11:15:56.940295 1507580 ssh_runner.go:195] Run: sh -c "sudo sed -i 's|^.*cgroup_manager = .*$|cgroup_manager = "cgroupfs"|' /etc/crio/crio.conf.d/02-crio.conf"
	I0916 11:15:56.953108 1507580 ssh_runner.go:195] Run: sh -c "sudo sed -i '/conmon_cgroup = .*/d' /etc/crio/crio.conf.d/02-crio.conf"
	I0916 11:15:56.964577 1507580 ssh_runner.go:195] Run: sh -c "sudo sed -i '/cgroup_manager = .*/a conmon_cgroup = "pod"' /etc/crio/crio.conf.d/02-crio.conf"
	I0916 11:15:56.974843 1507580 ssh_runner.go:195] Run: sh -c "sudo rm -rf /etc/cni/net.mk"
	I0916 11:15:56.987306 1507580 ssh_runner.go:195] Run: sh -c "sudo sed -i '/^ *"net.ipv4.ip_unprivileged_port_start=.*"/d' /etc/crio/crio.conf.d/02-crio.conf"
	I0916 11:15:57.001042 1507580 ssh_runner.go:195] Run: sh -c "sudo grep -q "^ *default_sysctls" /etc/crio/crio.conf.d/02-crio.conf || sudo sed -i '/conmon_cgroup = .*/a default_sysctls = \[\n\]' /etc/crio/crio.conf.d/02-crio.conf"
	I0916 11:15:57.014686 1507580 ssh_runner.go:195] Run: sh -c "sudo sed -i -r 's|^default_sysctls *= *\[|&\n  "net.ipv4.ip_unprivileged_port_start=0",|' /etc/crio/crio.conf.d/02-crio.conf"
	I0916 11:15:57.026068 1507580 ssh_runner.go:195] Run: sudo sysctl net.bridge.bridge-nf-call-iptables
	I0916 11:15:57.034330 1507580 command_runner.go:130] > net.bridge.bridge-nf-call-iptables = 1
	I0916 11:15:57.035544 1507580 ssh_runner.go:195] Run: sudo sh -c "echo 1 > /proc/sys/net/ipv4/ip_forward"
	I0916 11:15:57.045222 1507580 ssh_runner.go:195] Run: sudo systemctl daemon-reload
	I0916 11:15:57.136021 1507580 ssh_runner.go:195] Run: sudo systemctl restart crio
	I0916 11:15:57.278360 1507580 start.go:542] Will wait 60s for socket path /var/run/crio/crio.sock
	I0916 11:15:57.278492 1507580 ssh_runner.go:195] Run: stat /var/run/crio/crio.sock
	I0916 11:15:57.282204 1507580 command_runner.go:130] >   File: /var/run/crio/crio.sock
	I0916 11:15:57.282264 1507580 command_runner.go:130] >   Size: 0         	Blocks: 0          IO Block: 4096   socket
	I0916 11:15:57.282296 1507580 command_runner.go:130] > Device: cbh/203d	Inode: 190         Links: 1
	I0916 11:15:57.282318 1507580 command_runner.go:130] > Access: (0660/srw-rw----)  Uid: (    0/    root)   Gid: (    0/    root)
	I0916 11:15:57.282338 1507580 command_runner.go:130] > Access: 2024-09-16 11:15:57.262214586 +0000
	I0916 11:15:57.282367 1507580 command_runner.go:130] > Modify: 2024-09-16 11:15:57.262214586 +0000
	I0916 11:15:57.282388 1507580 command_runner.go:130] > Change: 2024-09-16 11:15:57.262214586 +0000
	I0916 11:15:57.282406 1507580 command_runner.go:130] >  Birth: -
	I0916 11:15:57.282682 1507580 start.go:563] Will wait 60s for crictl version
	I0916 11:15:57.282774 1507580 ssh_runner.go:195] Run: which crictl
	I0916 11:15:57.286872 1507580 command_runner.go:130] > /usr/bin/crictl
	I0916 11:15:57.287358 1507580 ssh_runner.go:195] Run: sudo /usr/bin/crictl version
	I0916 11:15:57.332211 1507580 command_runner.go:130] > Version:  0.1.0
	I0916 11:15:57.332283 1507580 command_runner.go:130] > RuntimeName:  cri-o
	I0916 11:15:57.332302 1507580 command_runner.go:130] > RuntimeVersion:  1.24.6
	I0916 11:15:57.332323 1507580 command_runner.go:130] > RuntimeApiVersion:  v1
	I0916 11:15:57.335132 1507580 start.go:579] Version:  0.1.0
	RuntimeName:  cri-o
	RuntimeVersion:  1.24.6
	RuntimeApiVersion:  v1
	I0916 11:15:57.335268 1507580 ssh_runner.go:195] Run: crio --version
	I0916 11:15:57.373929 1507580 command_runner.go:130] > crio version 1.24.6
	I0916 11:15:57.373996 1507580 command_runner.go:130] > Version:          1.24.6
	I0916 11:15:57.374020 1507580 command_runner.go:130] > GitCommit:        4bfe15a9feb74ffc95e66a21c04b15fa7bbc2b90
	I0916 11:15:57.374032 1507580 command_runner.go:130] > GitTreeState:     clean
	I0916 11:15:57.374040 1507580 command_runner.go:130] > BuildDate:        2023-06-14T14:44:50Z
	I0916 11:15:57.374044 1507580 command_runner.go:130] > GoVersion:        go1.18.2
	I0916 11:15:57.374048 1507580 command_runner.go:130] > Compiler:         gc
	I0916 11:15:57.374052 1507580 command_runner.go:130] > Platform:         linux/arm64
	I0916 11:15:57.374074 1507580 command_runner.go:130] > Linkmode:         dynamic
	I0916 11:15:57.374087 1507580 command_runner.go:130] > BuildTags:        apparmor, exclude_graphdriver_devicemapper, containers_image_ostree_stub, seccomp
	I0916 11:15:57.374092 1507580 command_runner.go:130] > SeccompEnabled:   true
	I0916 11:15:57.374101 1507580 command_runner.go:130] > AppArmorEnabled:  false
	I0916 11:15:57.374193 1507580 ssh_runner.go:195] Run: crio --version
	I0916 11:15:57.424788 1507580 command_runner.go:130] > crio version 1.24.6
	I0916 11:15:57.424813 1507580 command_runner.go:130] > Version:          1.24.6
	I0916 11:15:57.424839 1507580 command_runner.go:130] > GitCommit:        4bfe15a9feb74ffc95e66a21c04b15fa7bbc2b90
	I0916 11:15:57.424844 1507580 command_runner.go:130] > GitTreeState:     clean
	I0916 11:15:57.424857 1507580 command_runner.go:130] > BuildDate:        2023-06-14T14:44:50Z
	I0916 11:15:57.424864 1507580 command_runner.go:130] > GoVersion:        go1.18.2
	I0916 11:15:57.424869 1507580 command_runner.go:130] > Compiler:         gc
	I0916 11:15:57.424878 1507580 command_runner.go:130] > Platform:         linux/arm64
	I0916 11:15:57.424884 1507580 command_runner.go:130] > Linkmode:         dynamic
	I0916 11:15:57.424893 1507580 command_runner.go:130] > BuildTags:        apparmor, exclude_graphdriver_devicemapper, containers_image_ostree_stub, seccomp
	I0916 11:15:57.424902 1507580 command_runner.go:130] > SeccompEnabled:   true
	I0916 11:15:57.424906 1507580 command_runner.go:130] > AppArmorEnabled:  false
	I0916 11:15:57.431884 1507580 out.go:177] * Preparing Kubernetes v1.31.1 on CRI-O 1.24.6 ...
	I0916 11:15:57.434669 1507580 out.go:177]   - env NO_PROXY=192.168.67.2
	I0916 11:15:57.437298 1507580 cli_runner.go:164] Run: docker network inspect multinode-654612 --format "{"Name": "{{.Name}}","Driver": "{{.Driver}}","Subnet": "{{range .IPAM.Config}}{{.Subnet}}{{end}}","Gateway": "{{range .IPAM.Config}}{{.Gateway}}{{end}}","MTU": {{if (index .Options "com.docker.network.driver.mtu")}}{{(index .Options "com.docker.network.driver.mtu")}}{{else}}0{{end}}, "ContainerIPs": [{{range $k,$v := .Containers }}"{{$v.IPv4Address}}",{{end}}]}"
	I0916 11:15:57.454521 1507580 ssh_runner.go:195] Run: grep 192.168.67.1	host.minikube.internal$ /etc/hosts
	I0916 11:15:57.458482 1507580 ssh_runner.go:195] Run: /bin/bash -c "{ grep -v $'\thost.minikube.internal$' "/etc/hosts"; echo "192.168.67.1	host.minikube.internal"; } > /tmp/h.$$; sudo cp /tmp/h.$$ "/etc/hosts""
	I0916 11:15:57.471394 1507580 mustload.go:65] Loading cluster: multinode-654612
	I0916 11:15:57.471658 1507580 config.go:182] Loaded profile config "multinode-654612": Driver=docker, ContainerRuntime=crio, KubernetesVersion=v1.31.1
	I0916 11:15:57.471953 1507580 cli_runner.go:164] Run: docker container inspect multinode-654612 --format={{.State.Status}}
	I0916 11:15:57.489526 1507580 host.go:66] Checking if "multinode-654612" exists ...
	I0916 11:15:57.489850 1507580 certs.go:68] Setting up /home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/multinode-654612 for IP: 192.168.67.3
	I0916 11:15:57.489863 1507580 certs.go:194] generating shared ca certs ...
	I0916 11:15:57.489880 1507580 certs.go:226] acquiring lock for ca certs: {Name:mk0ae46b50e2e49d53ad6fcc94535aa50d9156d6 Clock:{} Delay:500ms Timeout:1m0s Cancel:<nil>}
	I0916 11:15:57.489996 1507580 certs.go:235] skipping valid "minikubeCA" ca cert: /home/jenkins/minikube-integration/19651-1378450/.minikube/ca.key
	I0916 11:15:57.490083 1507580 certs.go:235] skipping valid "proxyClientCA" ca cert: /home/jenkins/minikube-integration/19651-1378450/.minikube/proxy-client-ca.key
	I0916 11:15:57.490101 1507580 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-1378450/.minikube/ca.crt -> /var/lib/minikube/certs/ca.crt
	I0916 11:15:57.490116 1507580 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-1378450/.minikube/ca.key -> /var/lib/minikube/certs/ca.key
	I0916 11:15:57.490134 1507580 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-1378450/.minikube/proxy-client-ca.crt -> /var/lib/minikube/certs/proxy-client-ca.crt
	I0916 11:15:57.490148 1507580 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-1378450/.minikube/proxy-client-ca.key -> /var/lib/minikube/certs/proxy-client-ca.key
	I0916 11:15:57.490201 1507580 certs.go:484] found cert: /home/jenkins/minikube-integration/19651-1378450/.minikube/certs/1383833.pem (1338 bytes)
	W0916 11:15:57.490235 1507580 certs.go:480] ignoring /home/jenkins/minikube-integration/19651-1378450/.minikube/certs/1383833_empty.pem, impossibly tiny 0 bytes
	I0916 11:15:57.490248 1507580 certs.go:484] found cert: /home/jenkins/minikube-integration/19651-1378450/.minikube/certs/ca-key.pem (1679 bytes)
	I0916 11:15:57.490272 1507580 certs.go:484] found cert: /home/jenkins/minikube-integration/19651-1378450/.minikube/certs/ca.pem (1078 bytes)
	I0916 11:15:57.490298 1507580 certs.go:484] found cert: /home/jenkins/minikube-integration/19651-1378450/.minikube/certs/cert.pem (1123 bytes)
	I0916 11:15:57.490320 1507580 certs.go:484] found cert: /home/jenkins/minikube-integration/19651-1378450/.minikube/certs/key.pem (1679 bytes)
	I0916 11:15:57.490366 1507580 certs.go:484] found cert: /home/jenkins/minikube-integration/19651-1378450/.minikube/files/etc/ssl/certs/13838332.pem (1708 bytes)
	I0916 11:15:57.490396 1507580 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-1378450/.minikube/ca.crt -> /usr/share/ca-certificates/minikubeCA.pem
	I0916 11:15:57.490415 1507580 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-1378450/.minikube/certs/1383833.pem -> /usr/share/ca-certificates/1383833.pem
	I0916 11:15:57.490429 1507580 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-1378450/.minikube/files/etc/ssl/certs/13838332.pem -> /usr/share/ca-certificates/13838332.pem
	I0916 11:15:57.490450 1507580 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-1378450/.minikube/ca.crt --> /var/lib/minikube/certs/ca.crt (1111 bytes)
	I0916 11:15:57.518839 1507580 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-1378450/.minikube/ca.key --> /var/lib/minikube/certs/ca.key (1675 bytes)
	I0916 11:15:57.545083 1507580 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-1378450/.minikube/proxy-client-ca.crt --> /var/lib/minikube/certs/proxy-client-ca.crt (1119 bytes)
	I0916 11:15:57.570697 1507580 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-1378450/.minikube/proxy-client-ca.key --> /var/lib/minikube/certs/proxy-client-ca.key (1679 bytes)
	I0916 11:15:57.594917 1507580 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-1378450/.minikube/ca.crt --> /usr/share/ca-certificates/minikubeCA.pem (1111 bytes)
	I0916 11:15:57.620506 1507580 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-1378450/.minikube/certs/1383833.pem --> /usr/share/ca-certificates/1383833.pem (1338 bytes)
	I0916 11:15:57.648274 1507580 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-1378450/.minikube/files/etc/ssl/certs/13838332.pem --> /usr/share/ca-certificates/13838332.pem (1708 bytes)
	I0916 11:15:57.673828 1507580 ssh_runner.go:195] Run: openssl version
	I0916 11:15:57.678921 1507580 command_runner.go:130] > OpenSSL 3.0.2 15 Mar 2022 (Library: OpenSSL 3.0.2 15 Mar 2022)
	I0916 11:15:57.679353 1507580 ssh_runner.go:195] Run: sudo /bin/bash -c "test -s /usr/share/ca-certificates/minikubeCA.pem && ln -fs /usr/share/ca-certificates/minikubeCA.pem /etc/ssl/certs/minikubeCA.pem"
	I0916 11:15:57.689922 1507580 ssh_runner.go:195] Run: ls -la /usr/share/ca-certificates/minikubeCA.pem
	I0916 11:15:57.693518 1507580 command_runner.go:130] > -rw-r--r-- 1 root root 1111 Sep 16 10:35 /usr/share/ca-certificates/minikubeCA.pem
	I0916 11:15:57.693631 1507580 certs.go:528] hashing: -rw-r--r-- 1 root root 1111 Sep 16 10:35 /usr/share/ca-certificates/minikubeCA.pem
	I0916 11:15:57.693703 1507580 ssh_runner.go:195] Run: openssl x509 -hash -noout -in /usr/share/ca-certificates/minikubeCA.pem
	I0916 11:15:57.700387 1507580 command_runner.go:130] > b5213941
	I0916 11:15:57.700863 1507580 ssh_runner.go:195] Run: sudo /bin/bash -c "test -L /etc/ssl/certs/b5213941.0 || ln -fs /etc/ssl/certs/minikubeCA.pem /etc/ssl/certs/b5213941.0"
	I0916 11:15:57.711247 1507580 ssh_runner.go:195] Run: sudo /bin/bash -c "test -s /usr/share/ca-certificates/1383833.pem && ln -fs /usr/share/ca-certificates/1383833.pem /etc/ssl/certs/1383833.pem"
	I0916 11:15:57.721103 1507580 ssh_runner.go:195] Run: ls -la /usr/share/ca-certificates/1383833.pem
	I0916 11:15:57.725117 1507580 command_runner.go:130] > -rw-r--r-- 1 root root 1338 Sep 16 10:46 /usr/share/ca-certificates/1383833.pem
	I0916 11:15:57.725514 1507580 certs.go:528] hashing: -rw-r--r-- 1 root root 1338 Sep 16 10:46 /usr/share/ca-certificates/1383833.pem
	I0916 11:15:57.725596 1507580 ssh_runner.go:195] Run: openssl x509 -hash -noout -in /usr/share/ca-certificates/1383833.pem
	I0916 11:15:57.733920 1507580 command_runner.go:130] > 51391683
	I0916 11:15:57.733997 1507580 ssh_runner.go:195] Run: sudo /bin/bash -c "test -L /etc/ssl/certs/51391683.0 || ln -fs /etc/ssl/certs/1383833.pem /etc/ssl/certs/51391683.0"
	I0916 11:15:57.743272 1507580 ssh_runner.go:195] Run: sudo /bin/bash -c "test -s /usr/share/ca-certificates/13838332.pem && ln -fs /usr/share/ca-certificates/13838332.pem /etc/ssl/certs/13838332.pem"
	I0916 11:15:57.753188 1507580 ssh_runner.go:195] Run: ls -la /usr/share/ca-certificates/13838332.pem
	I0916 11:15:57.757167 1507580 command_runner.go:130] > -rw-r--r-- 1 root root 1708 Sep 16 10:46 /usr/share/ca-certificates/13838332.pem
	I0916 11:15:57.757222 1507580 certs.go:528] hashing: -rw-r--r-- 1 root root 1708 Sep 16 10:46 /usr/share/ca-certificates/13838332.pem
	I0916 11:15:57.757291 1507580 ssh_runner.go:195] Run: openssl x509 -hash -noout -in /usr/share/ca-certificates/13838332.pem
	I0916 11:15:57.764149 1507580 command_runner.go:130] > 3ec20f2e
	I0916 11:15:57.764572 1507580 ssh_runner.go:195] Run: sudo /bin/bash -c "test -L /etc/ssl/certs/3ec20f2e.0 || ln -fs /etc/ssl/certs/13838332.pem /etc/ssl/certs/3ec20f2e.0"
	I0916 11:15:57.773782 1507580 ssh_runner.go:195] Run: stat /var/lib/minikube/certs/apiserver-kubelet-client.crt
	I0916 11:15:57.777221 1507580 command_runner.go:130] ! stat: cannot statx '/var/lib/minikube/certs/apiserver-kubelet-client.crt': No such file or directory
	I0916 11:15:57.777265 1507580 certs.go:399] 'apiserver-kubelet-client' cert doesn't exist, likely first start: stat /var/lib/minikube/certs/apiserver-kubelet-client.crt: Process exited with status 1
	stdout:
	
	stderr:
	stat: cannot statx '/var/lib/minikube/certs/apiserver-kubelet-client.crt': No such file or directory
	I0916 11:15:57.777299 1507580 kubeadm.go:934] updating node {m02 192.168.67.3 8443 v1.31.1 crio false true} ...
	I0916 11:15:57.777394 1507580 kubeadm.go:946] kubelet [Unit]
	Wants=crio.service
	
	[Service]
	ExecStart=
	ExecStart=/var/lib/minikube/binaries/v1.31.1/kubelet --bootstrap-kubeconfig=/etc/kubernetes/bootstrap-kubelet.conf --cgroups-per-qos=false --config=/var/lib/kubelet/config.yaml --enforce-node-allocatable= --hostname-override=multinode-654612-m02 --kubeconfig=/etc/kubernetes/kubelet.conf --node-ip=192.168.67.3
	
	[Install]
	 config:
	{KubernetesVersion:v1.31.1 ClusterName:multinode-654612 Namespace:default APIServerHAVIP: APIServerName:minikubeCA APIServerNames:[] APIServerIPs:[] DNSDomain:cluster.local ContainerRuntime:crio CRISocket: NetworkPlugin:cni FeatureGates: ServiceCIDR:10.96.0.0/12 ImageRepository: LoadBalancerStartIP: LoadBalancerEndIP: CustomIngressCert: RegistryAliases: ExtraOptions:[] ShouldLoadCachedImages:true EnableDefaultCNI:false CNI:}
	I0916 11:15:57.777461 1507580 ssh_runner.go:195] Run: sudo ls /var/lib/minikube/binaries/v1.31.1
	I0916 11:15:57.785531 1507580 command_runner.go:130] > kubeadm
	I0916 11:15:57.785552 1507580 command_runner.go:130] > kubectl
	I0916 11:15:57.785556 1507580 command_runner.go:130] > kubelet
	I0916 11:15:57.786794 1507580 binaries.go:44] Found k8s binaries, skipping transfer
	I0916 11:15:57.786859 1507580 ssh_runner.go:195] Run: sudo mkdir -p /etc/systemd/system/kubelet.service.d /lib/systemd/system
	I0916 11:15:57.795419 1507580 ssh_runner.go:362] scp memory --> /etc/systemd/system/kubelet.service.d/10-kubeadm.conf (370 bytes)
	I0916 11:15:57.813856 1507580 ssh_runner.go:362] scp memory --> /lib/systemd/system/kubelet.service (352 bytes)
	I0916 11:15:57.833551 1507580 ssh_runner.go:195] Run: grep 192.168.67.2	control-plane.minikube.internal$ /etc/hosts
	I0916 11:15:57.837112 1507580 ssh_runner.go:195] Run: /bin/bash -c "{ grep -v $'\tcontrol-plane.minikube.internal$' "/etc/hosts"; echo "192.168.67.2	control-plane.minikube.internal"; } > /tmp/h.$$; sudo cp /tmp/h.$$ "/etc/hosts""
	I0916 11:15:57.848052 1507580 ssh_runner.go:195] Run: sudo systemctl daemon-reload
	I0916 11:15:57.940207 1507580 ssh_runner.go:195] Run: sudo systemctl start kubelet
	I0916 11:15:57.952975 1507580 start.go:235] Will wait 6m0s for node &{Name:m02 IP:192.168.67.3 Port:8443 KubernetesVersion:v1.31.1 ContainerRuntime:crio ControlPlane:false Worker:true}
	I0916 11:15:57.953375 1507580 config.go:182] Loaded profile config "multinode-654612": Driver=docker, ContainerRuntime=crio, KubernetesVersion=v1.31.1
	I0916 11:15:57.956475 1507580 out.go:177] * Verifying Kubernetes components...
	I0916 11:15:57.960093 1507580 ssh_runner.go:195] Run: sudo systemctl daemon-reload
	I0916 11:15:58.059940 1507580 ssh_runner.go:195] Run: sudo systemctl start kubelet
	I0916 11:15:58.072851 1507580 loader.go:395] Config loaded from file:  /home/jenkins/minikube-integration/19651-1378450/kubeconfig
	I0916 11:15:58.073372 1507580 kapi.go:59] client config for multinode-654612: &rest.Config{Host:"https://192.168.67.2:8443", APIPath:"", ContentConfig:rest.ContentConfig{AcceptContentTypes:"", ContentType:"", GroupVersion:(*schema.GroupVersion)(nil), NegotiatedSerializer:runtime.NegotiatedSerializer(nil)}, Username:"", Password:"", BearerToken:"", BearerTokenFile:"", Impersonate:rest.ImpersonationConfig{UserName:"", UID:"", Groups:[]string(nil), Extra:map[string][]string(nil)}, AuthProvider:<nil>, AuthConfigPersister:rest.AuthProviderConfigPersister(nil), ExecProvider:<nil>, TLSClientConfig:rest.sanitizedTLSClientConfig{Insecure:false, ServerName:"", CertFile:"/home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/multinode-654612/client.crt", KeyFile:"/home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/multinode-654612/client.key", CAFile:"/home/jenkins/minikube-integration/19651-1378450/.minikube/ca.crt", CertData:[]uint8(nil), KeyData:[]uint8(nil), CAData:[]uint8(nil),
NextProtos:[]string(nil)}, UserAgent:"", DisableCompression:false, Transport:http.RoundTripper(nil), WrapTransport:(transport.WrapperFunc)(0x1a1e6c0), QPS:0, Burst:0, RateLimiter:flowcontrol.RateLimiter(nil), WarningHandler:rest.WarningHandler(nil), Timeout:0, Dial:(func(context.Context, string, string) (net.Conn, error))(nil), Proxy:(func(*http.Request) (*url.URL, error))(nil)}
	I0916 11:15:58.074190 1507580 node_ready.go:35] waiting up to 6m0s for node "multinode-654612-m02" to be "Ready" ...
	I0916 11:15:58.074290 1507580 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/nodes/multinode-654612-m02
	I0916 11:15:58.074302 1507580 round_trippers.go:469] Request Headers:
	I0916 11:15:58.074317 1507580 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:15:58.074321 1507580 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:15:58.084912 1507580 round_trippers.go:574] Response Status: 200 OK in 10 milliseconds
	I0916 11:15:58.084945 1507580 round_trippers.go:577] Response Headers:
	I0916 11:15:58.084954 1507580 round_trippers.go:580]     Audit-Id: aef2e752-1e80-4483-8587-1956c19e8c9e
	I0916 11:15:58.084958 1507580 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:15:58.084961 1507580 round_trippers.go:580]     Content-Type: application/json
	I0916 11:15:58.084993 1507580 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:15:58.084996 1507580 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:15:58.085001 1507580 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:15:58 GMT
	I0916 11:15:58.085150 1507580 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-654612-m02","uid":"aa1e2e1b-4957-47bd-bcf9-786ad66f873a","resourceVersion":"837","creationTimestamp":"2024-09-16T11:11:13Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-654612-m02","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-654612","minikube.k8s.io/primary":"false","minikube.k8s.io/updated_at":"2024_09_16T11_11_14_0700","minikube.k8s.io/version":"v1.34.0"},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///var/run/crio/crio.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubeadm","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:11:13Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:
annotations":{"f:kubeadm.alpha.kubernetes.io/cri-socket":{}}}}},{"manag [truncated 6341 chars]
	I0916 11:15:58.085636 1507580 node_ready.go:49] node "multinode-654612-m02" has status "Ready":"True"
	I0916 11:15:58.085656 1507580 node_ready.go:38] duration metric: took 11.439841ms for node "multinode-654612-m02" to be "Ready" ...
	I0916 11:15:58.085667 1507580 pod_ready.go:36] extra waiting up to 6m0s for all system-critical pods including labels [k8s-app=kube-dns component=etcd component=kube-apiserver component=kube-controller-manager k8s-app=kube-proxy component=kube-scheduler] to be "Ready" ...
	I0916 11:15:58.085748 1507580 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/namespaces/kube-system/pods
	I0916 11:15:58.085755 1507580 round_trippers.go:469] Request Headers:
	I0916 11:15:58.085772 1507580 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:15:58.085777 1507580 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:15:58.089435 1507580 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 11:15:58.089457 1507580 round_trippers.go:577] Response Headers:
	I0916 11:15:58.089466 1507580 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:15:58 GMT
	I0916 11:15:58.089469 1507580 round_trippers.go:580]     Audit-Id: 2da7ad66-ea33-4946-bd09-1fc85f146703
	I0916 11:15:58.089472 1507580 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:15:58.089475 1507580 round_trippers.go:580]     Content-Type: application/json
	I0916 11:15:58.089478 1507580 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:15:58.089481 1507580 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:15:58.090022 1507580 request.go:1351] Response Body: {"kind":"PodList","apiVersion":"v1","metadata":{"resourceVersion":"1074"},"items":[{"metadata":{"name":"coredns-7c65d6cfc9-szvv9","generateName":"coredns-7c65d6cfc9-","namespace":"kube-system","uid":"26df8cd4-36bc-49e1-98bf-9c30f5555b7b","resourceVersion":"991","creationTimestamp":"2024-09-16T11:10:15Z","labels":{"k8s-app":"kube-dns","pod-template-hash":"7c65d6cfc9"},"ownerReferences":[{"apiVersion":"apps/v1","kind":"ReplicaSet","name":"coredns-7c65d6cfc9","uid":"a88acc4a-b14b-4341-a616-6a6077ab8958","controller":true,"blockOwnerDeletion":true}],"managedFields":[{"manager":"kube-controller-manager","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:10:15Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:generateName":{},"f:labels":{".":{},"f:k8s-app":{},"f:pod-template-hash":{}},"f:ownerReferences":{".":{},"k:{\"uid\":\"a88acc4a-b14b-4341-a616-6a6077ab8958\"}":{}}},"f:spec":{"f:affinity":{".":{},"f:podAntiAffinity":{".":{},"
f:preferredDuringSchedulingIgnoredDuringExecution":{}}},"f:containers": [truncated 90698 chars]
	I0916 11:15:58.094133 1507580 pod_ready.go:79] waiting up to 6m0s for pod "coredns-7c65d6cfc9-szvv9" in "kube-system" namespace to be "Ready" ...
	I0916 11:15:58.094240 1507580 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/namespaces/kube-system/pods/coredns-7c65d6cfc9-szvv9
	I0916 11:15:58.094255 1507580 round_trippers.go:469] Request Headers:
	I0916 11:15:58.094264 1507580 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:15:58.094267 1507580 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:15:58.096824 1507580 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:15:58.096844 1507580 round_trippers.go:577] Response Headers:
	I0916 11:15:58.096852 1507580 round_trippers.go:580]     Audit-Id: 0bc95727-76ef-4c9f-9c06-9dd493c268dc
	I0916 11:15:58.096856 1507580 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:15:58.096859 1507580 round_trippers.go:580]     Content-Type: application/json
	I0916 11:15:58.096862 1507580 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:15:58.096865 1507580 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:15:58.096868 1507580 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:15:58 GMT
	I0916 11:15:58.097061 1507580 request.go:1351] Response Body: {"kind":"Pod","apiVersion":"v1","metadata":{"name":"coredns-7c65d6cfc9-szvv9","generateName":"coredns-7c65d6cfc9-","namespace":"kube-system","uid":"26df8cd4-36bc-49e1-98bf-9c30f5555b7b","resourceVersion":"991","creationTimestamp":"2024-09-16T11:10:15Z","labels":{"k8s-app":"kube-dns","pod-template-hash":"7c65d6cfc9"},"ownerReferences":[{"apiVersion":"apps/v1","kind":"ReplicaSet","name":"coredns-7c65d6cfc9","uid":"a88acc4a-b14b-4341-a616-6a6077ab8958","controller":true,"blockOwnerDeletion":true}],"managedFields":[{"manager":"kube-controller-manager","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:10:15Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:generateName":{},"f:labels":{".":{},"f:k8s-app":{},"f:pod-template-hash":{}},"f:ownerReferences":{".":{},"k:{\"uid\":\"a88acc4a-b14b-4341-a616-6a6077ab8958\"}":{}}},"f:spec":{"f:affinity":{".":{},"f:podAntiAffinity":{".":{},"f:preferredDuringSchedulingIgnoredDuringExecution":{
}}},"f:containers":{"k:{\"name\":\"coredns\"}":{".":{},"f:args":{},"f:i [truncated 7042 chars]
	I0916 11:15:58.097670 1507580 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/nodes/multinode-654612
	I0916 11:15:58.097693 1507580 round_trippers.go:469] Request Headers:
	I0916 11:15:58.097702 1507580 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:15:58.097708 1507580 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:15:58.099870 1507580 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:15:58.099889 1507580 round_trippers.go:577] Response Headers:
	I0916 11:15:58.099896 1507580 round_trippers.go:580]     Audit-Id: ae9f4892-3e98-48d2-87b4-a8d5b0d0bded
	I0916 11:15:58.099906 1507580 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:15:58.099909 1507580 round_trippers.go:580]     Content-Type: application/json
	I0916 11:15:58.099912 1507580 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:15:58.099915 1507580 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:15:58.099917 1507580 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:15:58 GMT
	I0916 11:15:58.100305 1507580 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-654612","uid":"17ee1588-6ece-4a45-8e72-a05ec6d1f806","resourceVersion":"982","creationTimestamp":"2024-09-16T11:10:07Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-654612","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-654612","minikube.k8s.io/primary":"true","minikube.k8s.io/updated_at":"2024_09_16T11_10_11_0700","minikube.k8s.io/version":"v1.34.0","node-role.kubernetes.io/control-plane":"","node.kubernetes.io/exclude-from-external-load-balancers":""},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///var/run/crio/crio.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubelet","operation":"Update","apiVe
rsion":"v1","time":"2024-09-16T11:10:07Z","fieldsType":"FieldsV1","fiel [truncated 6346 chars]
	I0916 11:15:58.595051 1507580 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/namespaces/kube-system/pods/coredns-7c65d6cfc9-szvv9
	I0916 11:15:58.595079 1507580 round_trippers.go:469] Request Headers:
	I0916 11:15:58.595090 1507580 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:15:58.595099 1507580 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:15:58.598033 1507580 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:15:58.598057 1507580 round_trippers.go:577] Response Headers:
	I0916 11:15:58.598066 1507580 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:15:58 GMT
	I0916 11:15:58.598070 1507580 round_trippers.go:580]     Audit-Id: b1ca37a6-922c-4b78-904a-d748d80c5034
	I0916 11:15:58.598073 1507580 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:15:58.598094 1507580 round_trippers.go:580]     Content-Type: application/json
	I0916 11:15:58.598098 1507580 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:15:58.598101 1507580 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:15:58.598359 1507580 request.go:1351] Response Body: {"kind":"Pod","apiVersion":"v1","metadata":{"name":"coredns-7c65d6cfc9-szvv9","generateName":"coredns-7c65d6cfc9-","namespace":"kube-system","uid":"26df8cd4-36bc-49e1-98bf-9c30f5555b7b","resourceVersion":"991","creationTimestamp":"2024-09-16T11:10:15Z","labels":{"k8s-app":"kube-dns","pod-template-hash":"7c65d6cfc9"},"ownerReferences":[{"apiVersion":"apps/v1","kind":"ReplicaSet","name":"coredns-7c65d6cfc9","uid":"a88acc4a-b14b-4341-a616-6a6077ab8958","controller":true,"blockOwnerDeletion":true}],"managedFields":[{"manager":"kube-controller-manager","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:10:15Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:generateName":{},"f:labels":{".":{},"f:k8s-app":{},"f:pod-template-hash":{}},"f:ownerReferences":{".":{},"k:{\"uid\":\"a88acc4a-b14b-4341-a616-6a6077ab8958\"}":{}}},"f:spec":{"f:affinity":{".":{},"f:podAntiAffinity":{".":{},"f:preferredDuringSchedulingIgnoredDuringExecution":{
}}},"f:containers":{"k:{\"name\":\"coredns\"}":{".":{},"f:args":{},"f:i [truncated 7042 chars]
	I0916 11:15:58.599036 1507580 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/nodes/multinode-654612
	I0916 11:15:58.599055 1507580 round_trippers.go:469] Request Headers:
	I0916 11:15:58.599065 1507580 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:15:58.599071 1507580 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:15:58.601252 1507580 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:15:58.601276 1507580 round_trippers.go:577] Response Headers:
	I0916 11:15:58.601284 1507580 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:15:58.601290 1507580 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:15:58.601293 1507580 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:15:58 GMT
	I0916 11:15:58.601296 1507580 round_trippers.go:580]     Audit-Id: c15d20af-f630-4c11-abb2-6877a3f6255e
	I0916 11:15:58.601321 1507580 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:15:58.601330 1507580 round_trippers.go:580]     Content-Type: application/json
	I0916 11:15:58.601481 1507580 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-654612","uid":"17ee1588-6ece-4a45-8e72-a05ec6d1f806","resourceVersion":"982","creationTimestamp":"2024-09-16T11:10:07Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-654612","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-654612","minikube.k8s.io/primary":"true","minikube.k8s.io/updated_at":"2024_09_16T11_10_11_0700","minikube.k8s.io/version":"v1.34.0","node-role.kubernetes.io/control-plane":"","node.kubernetes.io/exclude-from-external-load-balancers":""},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///var/run/crio/crio.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubelet","operation":"Update","apiVe
rsion":"v1","time":"2024-09-16T11:10:07Z","fieldsType":"FieldsV1","fiel [truncated 6346 chars]
	I0916 11:15:59.094607 1507580 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/namespaces/kube-system/pods/coredns-7c65d6cfc9-szvv9
	I0916 11:15:59.094633 1507580 round_trippers.go:469] Request Headers:
	I0916 11:15:59.094643 1507580 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:15:59.094648 1507580 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:15:59.098005 1507580 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 11:15:59.098044 1507580 round_trippers.go:577] Response Headers:
	I0916 11:15:59.098053 1507580 round_trippers.go:580]     Content-Type: application/json
	I0916 11:15:59.098059 1507580 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:15:59.098068 1507580 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:15:59.098071 1507580 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:15:59 GMT
	I0916 11:15:59.098075 1507580 round_trippers.go:580]     Audit-Id: c18d3789-058f-4a3c-b5d2-2b7dfcbaaa07
	I0916 11:15:59.098078 1507580 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:15:59.098204 1507580 request.go:1351] Response Body: {"kind":"Pod","apiVersion":"v1","metadata":{"name":"coredns-7c65d6cfc9-szvv9","generateName":"coredns-7c65d6cfc9-","namespace":"kube-system","uid":"26df8cd4-36bc-49e1-98bf-9c30f5555b7b","resourceVersion":"991","creationTimestamp":"2024-09-16T11:10:15Z","labels":{"k8s-app":"kube-dns","pod-template-hash":"7c65d6cfc9"},"ownerReferences":[{"apiVersion":"apps/v1","kind":"ReplicaSet","name":"coredns-7c65d6cfc9","uid":"a88acc4a-b14b-4341-a616-6a6077ab8958","controller":true,"blockOwnerDeletion":true}],"managedFields":[{"manager":"kube-controller-manager","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:10:15Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:generateName":{},"f:labels":{".":{},"f:k8s-app":{},"f:pod-template-hash":{}},"f:ownerReferences":{".":{},"k:{\"uid\":\"a88acc4a-b14b-4341-a616-6a6077ab8958\"}":{}}},"f:spec":{"f:affinity":{".":{},"f:podAntiAffinity":{".":{},"f:preferredDuringSchedulingIgnoredDuringExecution":{
}}},"f:containers":{"k:{\"name\":\"coredns\"}":{".":{},"f:args":{},"f:i [truncated 7042 chars]
	I0916 11:15:59.098788 1507580 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/nodes/multinode-654612
	I0916 11:15:59.098808 1507580 round_trippers.go:469] Request Headers:
	I0916 11:15:59.098818 1507580 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:15:59.098821 1507580 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:15:59.101172 1507580 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:15:59.101208 1507580 round_trippers.go:577] Response Headers:
	I0916 11:15:59.101215 1507580 round_trippers.go:580]     Audit-Id: 8def53dd-a014-430d-9249-cab1dd805bce
	I0916 11:15:59.101219 1507580 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:15:59.101222 1507580 round_trippers.go:580]     Content-Type: application/json
	I0916 11:15:59.101225 1507580 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:15:59.101229 1507580 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:15:59.101232 1507580 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:15:59 GMT
	I0916 11:15:59.101754 1507580 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-654612","uid":"17ee1588-6ece-4a45-8e72-a05ec6d1f806","resourceVersion":"982","creationTimestamp":"2024-09-16T11:10:07Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-654612","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-654612","minikube.k8s.io/primary":"true","minikube.k8s.io/updated_at":"2024_09_16T11_10_11_0700","minikube.k8s.io/version":"v1.34.0","node-role.kubernetes.io/control-plane":"","node.kubernetes.io/exclude-from-external-load-balancers":""},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///var/run/crio/crio.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubelet","operation":"Update","apiVe
rsion":"v1","time":"2024-09-16T11:10:07Z","fieldsType":"FieldsV1","fiel [truncated 6346 chars]
	I0916 11:15:59.594333 1507580 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/namespaces/kube-system/pods/coredns-7c65d6cfc9-szvv9
	I0916 11:15:59.594357 1507580 round_trippers.go:469] Request Headers:
	I0916 11:15:59.594367 1507580 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:15:59.594373 1507580 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:15:59.596775 1507580 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:15:59.596795 1507580 round_trippers.go:577] Response Headers:
	I0916 11:15:59.596860 1507580 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:15:59.596874 1507580 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:15:59.596879 1507580 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:15:59 GMT
	I0916 11:15:59.596883 1507580 round_trippers.go:580]     Audit-Id: 698cc462-e19c-412a-a17e-c3cfdbb1007a
	I0916 11:15:59.596887 1507580 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:15:59.596890 1507580 round_trippers.go:580]     Content-Type: application/json
	I0916 11:15:59.597263 1507580 request.go:1351] Response Body: {"kind":"Pod","apiVersion":"v1","metadata":{"name":"coredns-7c65d6cfc9-szvv9","generateName":"coredns-7c65d6cfc9-","namespace":"kube-system","uid":"26df8cd4-36bc-49e1-98bf-9c30f5555b7b","resourceVersion":"991","creationTimestamp":"2024-09-16T11:10:15Z","labels":{"k8s-app":"kube-dns","pod-template-hash":"7c65d6cfc9"},"ownerReferences":[{"apiVersion":"apps/v1","kind":"ReplicaSet","name":"coredns-7c65d6cfc9","uid":"a88acc4a-b14b-4341-a616-6a6077ab8958","controller":true,"blockOwnerDeletion":true}],"managedFields":[{"manager":"kube-controller-manager","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:10:15Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:generateName":{},"f:labels":{".":{},"f:k8s-app":{},"f:pod-template-hash":{}},"f:ownerReferences":{".":{},"k:{\"uid\":\"a88acc4a-b14b-4341-a616-6a6077ab8958\"}":{}}},"f:spec":{"f:affinity":{".":{},"f:podAntiAffinity":{".":{},"f:preferredDuringSchedulingIgnoredDuringExecution":{
}}},"f:containers":{"k:{\"name\":\"coredns\"}":{".":{},"f:args":{},"f:i [truncated 7042 chars]
	I0916 11:15:59.597837 1507580 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/nodes/multinode-654612
	I0916 11:15:59.597856 1507580 round_trippers.go:469] Request Headers:
	I0916 11:15:59.597865 1507580 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:15:59.597871 1507580 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:15:59.599795 1507580 round_trippers.go:574] Response Status: 200 OK in 1 milliseconds
	I0916 11:15:59.599817 1507580 round_trippers.go:577] Response Headers:
	I0916 11:15:59.599825 1507580 round_trippers.go:580]     Audit-Id: 1d7fb31a-978e-411c-a2eb-836f6832d624
	I0916 11:15:59.599830 1507580 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:15:59.599833 1507580 round_trippers.go:580]     Content-Type: application/json
	I0916 11:15:59.599835 1507580 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:15:59.599838 1507580 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:15:59.599841 1507580 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:15:59 GMT
	I0916 11:15:59.600103 1507580 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-654612","uid":"17ee1588-6ece-4a45-8e72-a05ec6d1f806","resourceVersion":"982","creationTimestamp":"2024-09-16T11:10:07Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-654612","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-654612","minikube.k8s.io/primary":"true","minikube.k8s.io/updated_at":"2024_09_16T11_10_11_0700","minikube.k8s.io/version":"v1.34.0","node-role.kubernetes.io/control-plane":"","node.kubernetes.io/exclude-from-external-load-balancers":""},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///var/run/crio/crio.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubelet","operation":"Update","apiVe
rsion":"v1","time":"2024-09-16T11:10:07Z","fieldsType":"FieldsV1","fiel [truncated 6346 chars]
	I0916 11:16:00.095405 1507580 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/namespaces/kube-system/pods/coredns-7c65d6cfc9-szvv9
	I0916 11:16:00.095488 1507580 round_trippers.go:469] Request Headers:
	I0916 11:16:00.095515 1507580 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:16:00.095537 1507580 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:16:00.099143 1507580 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 11:16:00.099172 1507580 round_trippers.go:577] Response Headers:
	I0916 11:16:00.099181 1507580 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:16:00.099186 1507580 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:16:00.099190 1507580 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:16:00 GMT
	I0916 11:16:00.099195 1507580 round_trippers.go:580]     Audit-Id: d00bd085-4635-4343-a5f5-e922b9c6d44a
	I0916 11:16:00.099199 1507580 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:16:00.099202 1507580 round_trippers.go:580]     Content-Type: application/json
	I0916 11:16:00.099951 1507580 request.go:1351] Response Body: {"kind":"Pod","apiVersion":"v1","metadata":{"name":"coredns-7c65d6cfc9-szvv9","generateName":"coredns-7c65d6cfc9-","namespace":"kube-system","uid":"26df8cd4-36bc-49e1-98bf-9c30f5555b7b","resourceVersion":"991","creationTimestamp":"2024-09-16T11:10:15Z","labels":{"k8s-app":"kube-dns","pod-template-hash":"7c65d6cfc9"},"ownerReferences":[{"apiVersion":"apps/v1","kind":"ReplicaSet","name":"coredns-7c65d6cfc9","uid":"a88acc4a-b14b-4341-a616-6a6077ab8958","controller":true,"blockOwnerDeletion":true}],"managedFields":[{"manager":"kube-controller-manager","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:10:15Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:generateName":{},"f:labels":{".":{},"f:k8s-app":{},"f:pod-template-hash":{}},"f:ownerReferences":{".":{},"k:{\"uid\":\"a88acc4a-b14b-4341-a616-6a6077ab8958\"}":{}}},"f:spec":{"f:affinity":{".":{},"f:podAntiAffinity":{".":{},"f:preferredDuringSchedulingIgnoredDuringExecution":{
}}},"f:containers":{"k:{\"name\":\"coredns\"}":{".":{},"f:args":{},"f:i [truncated 7042 chars]
	I0916 11:16:00.100646 1507580 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/nodes/multinode-654612
	I0916 11:16:00.100662 1507580 round_trippers.go:469] Request Headers:
	I0916 11:16:00.100706 1507580 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:16:00.100713 1507580 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:16:00.159001 1507580 round_trippers.go:574] Response Status: 200 OK in 58 milliseconds
	I0916 11:16:00.159044 1507580 round_trippers.go:577] Response Headers:
	I0916 11:16:00.159055 1507580 round_trippers.go:580]     Audit-Id: 0e483f1b-f194-4cdc-9efe-b40b5c674d70
	I0916 11:16:00.159060 1507580 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:16:00.159063 1507580 round_trippers.go:580]     Content-Type: application/json
	I0916 11:16:00.159068 1507580 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:16:00.159071 1507580 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:16:00.159074 1507580 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:16:00 GMT
	I0916 11:16:00.159370 1507580 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-654612","uid":"17ee1588-6ece-4a45-8e72-a05ec6d1f806","resourceVersion":"982","creationTimestamp":"2024-09-16T11:10:07Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-654612","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-654612","minikube.k8s.io/primary":"true","minikube.k8s.io/updated_at":"2024_09_16T11_10_11_0700","minikube.k8s.io/version":"v1.34.0","node-role.kubernetes.io/control-plane":"","node.kubernetes.io/exclude-from-external-load-balancers":""},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///var/run/crio/crio.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubelet","operation":"Update","apiVe
rsion":"v1","time":"2024-09-16T11:10:07Z","fieldsType":"FieldsV1","fiel [truncated 6346 chars]
	I0916 11:16:00.159927 1507580 pod_ready.go:103] pod "coredns-7c65d6cfc9-szvv9" in "kube-system" namespace has status "Ready":"False"
	I0916 11:16:00.594445 1507580 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/namespaces/kube-system/pods/coredns-7c65d6cfc9-szvv9
	I0916 11:16:00.594472 1507580 round_trippers.go:469] Request Headers:
	I0916 11:16:00.594482 1507580 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:16:00.594488 1507580 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:16:00.597468 1507580 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:16:00.597497 1507580 round_trippers.go:577] Response Headers:
	I0916 11:16:00.597506 1507580 round_trippers.go:580]     Audit-Id: ed0651f4-3305-49f4-8b0f-5b6e5a93d467
	I0916 11:16:00.597510 1507580 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:16:00.597514 1507580 round_trippers.go:580]     Content-Type: application/json
	I0916 11:16:00.597549 1507580 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:16:00.597559 1507580 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:16:00.597562 1507580 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:16:00 GMT
	I0916 11:16:00.597839 1507580 request.go:1351] Response Body: {"kind":"Pod","apiVersion":"v1","metadata":{"name":"coredns-7c65d6cfc9-szvv9","generateName":"coredns-7c65d6cfc9-","namespace":"kube-system","uid":"26df8cd4-36bc-49e1-98bf-9c30f5555b7b","resourceVersion":"991","creationTimestamp":"2024-09-16T11:10:15Z","labels":{"k8s-app":"kube-dns","pod-template-hash":"7c65d6cfc9"},"ownerReferences":[{"apiVersion":"apps/v1","kind":"ReplicaSet","name":"coredns-7c65d6cfc9","uid":"a88acc4a-b14b-4341-a616-6a6077ab8958","controller":true,"blockOwnerDeletion":true}],"managedFields":[{"manager":"kube-controller-manager","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:10:15Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:generateName":{},"f:labels":{".":{},"f:k8s-app":{},"f:pod-template-hash":{}},"f:ownerReferences":{".":{},"k:{\"uid\":\"a88acc4a-b14b-4341-a616-6a6077ab8958\"}":{}}},"f:spec":{"f:affinity":{".":{},"f:podAntiAffinity":{".":{},"f:preferredDuringSchedulingIgnoredDuringExecution":{
}}},"f:containers":{"k:{\"name\":\"coredns\"}":{".":{},"f:args":{},"f:i [truncated 7042 chars]
	I0916 11:16:00.598582 1507580 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/nodes/multinode-654612
	I0916 11:16:00.598611 1507580 round_trippers.go:469] Request Headers:
	I0916 11:16:00.598622 1507580 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:16:00.598629 1507580 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:16:00.601215 1507580 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:16:00.601242 1507580 round_trippers.go:577] Response Headers:
	I0916 11:16:00.601255 1507580 round_trippers.go:580]     Content-Type: application/json
	I0916 11:16:00.601259 1507580 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:16:00.601262 1507580 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:16:00.601266 1507580 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:16:00 GMT
	I0916 11:16:00.601269 1507580 round_trippers.go:580]     Audit-Id: a6ae753d-60b5-4894-bb72-c83cd84005ef
	I0916 11:16:00.601272 1507580 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:16:00.601557 1507580 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-654612","uid":"17ee1588-6ece-4a45-8e72-a05ec6d1f806","resourceVersion":"982","creationTimestamp":"2024-09-16T11:10:07Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-654612","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-654612","minikube.k8s.io/primary":"true","minikube.k8s.io/updated_at":"2024_09_16T11_10_11_0700","minikube.k8s.io/version":"v1.34.0","node-role.kubernetes.io/control-plane":"","node.kubernetes.io/exclude-from-external-load-balancers":""},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///var/run/crio/crio.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubelet","operation":"Update","apiVe
rsion":"v1","time":"2024-09-16T11:10:07Z","fieldsType":"FieldsV1","fiel [truncated 6346 chars]
	I0916 11:16:01.095247 1507580 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/namespaces/kube-system/pods/coredns-7c65d6cfc9-szvv9
	I0916 11:16:01.095275 1507580 round_trippers.go:469] Request Headers:
	I0916 11:16:01.095285 1507580 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:16:01.095292 1507580 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:16:01.097975 1507580 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:16:01.098070 1507580 round_trippers.go:577] Response Headers:
	I0916 11:16:01.098098 1507580 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:16:01.098133 1507580 round_trippers.go:580]     Content-Type: application/json
	I0916 11:16:01.098157 1507580 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:16:01.098168 1507580 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:16:01.098172 1507580 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:16:01 GMT
	I0916 11:16:01.098175 1507580 round_trippers.go:580]     Audit-Id: d22b2f12-cc3f-45cf-be0b-40fffcb899f0
	I0916 11:16:01.098288 1507580 request.go:1351] Response Body: {"kind":"Pod","apiVersion":"v1","metadata":{"name":"coredns-7c65d6cfc9-szvv9","generateName":"coredns-7c65d6cfc9-","namespace":"kube-system","uid":"26df8cd4-36bc-49e1-98bf-9c30f5555b7b","resourceVersion":"991","creationTimestamp":"2024-09-16T11:10:15Z","labels":{"k8s-app":"kube-dns","pod-template-hash":"7c65d6cfc9"},"ownerReferences":[{"apiVersion":"apps/v1","kind":"ReplicaSet","name":"coredns-7c65d6cfc9","uid":"a88acc4a-b14b-4341-a616-6a6077ab8958","controller":true,"blockOwnerDeletion":true}],"managedFields":[{"manager":"kube-controller-manager","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:10:15Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:generateName":{},"f:labels":{".":{},"f:k8s-app":{},"f:pod-template-hash":{}},"f:ownerReferences":{".":{},"k:{\"uid\":\"a88acc4a-b14b-4341-a616-6a6077ab8958\"}":{}}},"f:spec":{"f:affinity":{".":{},"f:podAntiAffinity":{".":{},"f:preferredDuringSchedulingIgnoredDuringExecution":{
}}},"f:containers":{"k:{\"name\":\"coredns\"}":{".":{},"f:args":{},"f:i [truncated 7042 chars]
	I0916 11:16:01.098860 1507580 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/nodes/multinode-654612
	I0916 11:16:01.098881 1507580 round_trippers.go:469] Request Headers:
	I0916 11:16:01.098890 1507580 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:16:01.098896 1507580 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:16:01.101080 1507580 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:16:01.101106 1507580 round_trippers.go:577] Response Headers:
	I0916 11:16:01.101114 1507580 round_trippers.go:580]     Audit-Id: 6522e8f0-5698-48ea-bb30-50725375ab33
	I0916 11:16:01.101118 1507580 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:16:01.101123 1507580 round_trippers.go:580]     Content-Type: application/json
	I0916 11:16:01.101126 1507580 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:16:01.101130 1507580 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:16:01.101133 1507580 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:16:01 GMT
	I0916 11:16:01.101426 1507580 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-654612","uid":"17ee1588-6ece-4a45-8e72-a05ec6d1f806","resourceVersion":"982","creationTimestamp":"2024-09-16T11:10:07Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-654612","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-654612","minikube.k8s.io/primary":"true","minikube.k8s.io/updated_at":"2024_09_16T11_10_11_0700","minikube.k8s.io/version":"v1.34.0","node-role.kubernetes.io/control-plane":"","node.kubernetes.io/exclude-from-external-load-balancers":""},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///var/run/crio/crio.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubelet","operation":"Update","apiVe
rsion":"v1","time":"2024-09-16T11:10:07Z","fieldsType":"FieldsV1","fiel [truncated 6346 chars]
	I0916 11:16:01.595186 1507580 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/namespaces/kube-system/pods/coredns-7c65d6cfc9-szvv9
	I0916 11:16:01.595264 1507580 round_trippers.go:469] Request Headers:
	I0916 11:16:01.595290 1507580 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:16:01.595310 1507580 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:16:01.598033 1507580 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:16:01.598108 1507580 round_trippers.go:577] Response Headers:
	I0916 11:16:01.598132 1507580 round_trippers.go:580]     Audit-Id: 2154d01a-19df-4ad6-9b8e-5e88a25bb385
	I0916 11:16:01.598150 1507580 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:16:01.598182 1507580 round_trippers.go:580]     Content-Type: application/json
	I0916 11:16:01.598205 1507580 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:16:01.598224 1507580 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:16:01.598241 1507580 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:16:01 GMT
	I0916 11:16:01.599097 1507580 request.go:1351] Response Body: {"kind":"Pod","apiVersion":"v1","metadata":{"name":"coredns-7c65d6cfc9-szvv9","generateName":"coredns-7c65d6cfc9-","namespace":"kube-system","uid":"26df8cd4-36bc-49e1-98bf-9c30f5555b7b","resourceVersion":"991","creationTimestamp":"2024-09-16T11:10:15Z","labels":{"k8s-app":"kube-dns","pod-template-hash":"7c65d6cfc9"},"ownerReferences":[{"apiVersion":"apps/v1","kind":"ReplicaSet","name":"coredns-7c65d6cfc9","uid":"a88acc4a-b14b-4341-a616-6a6077ab8958","controller":true,"blockOwnerDeletion":true}],"managedFields":[{"manager":"kube-controller-manager","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:10:15Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:generateName":{},"f:labels":{".":{},"f:k8s-app":{},"f:pod-template-hash":{}},"f:ownerReferences":{".":{},"k:{\"uid\":\"a88acc4a-b14b-4341-a616-6a6077ab8958\"}":{}}},"f:spec":{"f:affinity":{".":{},"f:podAntiAffinity":{".":{},"f:preferredDuringSchedulingIgnoredDuringExecution":{
}}},"f:containers":{"k:{\"name\":\"coredns\"}":{".":{},"f:args":{},"f:i [truncated 7042 chars]
	I0916 11:16:01.599819 1507580 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/nodes/multinode-654612
	I0916 11:16:01.599868 1507580 round_trippers.go:469] Request Headers:
	I0916 11:16:01.599904 1507580 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:16:01.599921 1507580 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:16:01.602375 1507580 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:16:01.602397 1507580 round_trippers.go:577] Response Headers:
	I0916 11:16:01.602406 1507580 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:16:01.602411 1507580 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:16:01 GMT
	I0916 11:16:01.602414 1507580 round_trippers.go:580]     Audit-Id: 8f773e71-0582-47b9-8308-84aca5c6b26c
	I0916 11:16:01.602418 1507580 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:16:01.602420 1507580 round_trippers.go:580]     Content-Type: application/json
	I0916 11:16:01.602423 1507580 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:16:01.602933 1507580 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-654612","uid":"17ee1588-6ece-4a45-8e72-a05ec6d1f806","resourceVersion":"982","creationTimestamp":"2024-09-16T11:10:07Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-654612","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-654612","minikube.k8s.io/primary":"true","minikube.k8s.io/updated_at":"2024_09_16T11_10_11_0700","minikube.k8s.io/version":"v1.34.0","node-role.kubernetes.io/control-plane":"","node.kubernetes.io/exclude-from-external-load-balancers":""},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///var/run/crio/crio.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubelet","operation":"Update","apiVe
rsion":"v1","time":"2024-09-16T11:10:07Z","fieldsType":"FieldsV1","fiel [truncated 6346 chars]
	I0916 11:16:02.094442 1507580 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/namespaces/kube-system/pods/coredns-7c65d6cfc9-szvv9
	I0916 11:16:02.094470 1507580 round_trippers.go:469] Request Headers:
	I0916 11:16:02.094480 1507580 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:16:02.094485 1507580 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:16:02.097125 1507580 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:16:02.097211 1507580 round_trippers.go:577] Response Headers:
	I0916 11:16:02.097240 1507580 round_trippers.go:580]     Audit-Id: 12098e8b-7deb-4b36-8ea3-cdae9e91cf29
	I0916 11:16:02.097274 1507580 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:16:02.097294 1507580 round_trippers.go:580]     Content-Type: application/json
	I0916 11:16:02.097350 1507580 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:16:02.097377 1507580 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:16:02.097397 1507580 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:16:02 GMT
	I0916 11:16:02.097582 1507580 request.go:1351] Response Body: {"kind":"Pod","apiVersion":"v1","metadata":{"name":"coredns-7c65d6cfc9-szvv9","generateName":"coredns-7c65d6cfc9-","namespace":"kube-system","uid":"26df8cd4-36bc-49e1-98bf-9c30f5555b7b","resourceVersion":"991","creationTimestamp":"2024-09-16T11:10:15Z","labels":{"k8s-app":"kube-dns","pod-template-hash":"7c65d6cfc9"},"ownerReferences":[{"apiVersion":"apps/v1","kind":"ReplicaSet","name":"coredns-7c65d6cfc9","uid":"a88acc4a-b14b-4341-a616-6a6077ab8958","controller":true,"blockOwnerDeletion":true}],"managedFields":[{"manager":"kube-controller-manager","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:10:15Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:generateName":{},"f:labels":{".":{},"f:k8s-app":{},"f:pod-template-hash":{}},"f:ownerReferences":{".":{},"k:{\"uid\":\"a88acc4a-b14b-4341-a616-6a6077ab8958\"}":{}}},"f:spec":{"f:affinity":{".":{},"f:podAntiAffinity":{".":{},"f:preferredDuringSchedulingIgnoredDuringExecution":{
}}},"f:containers":{"k:{\"name\":\"coredns\"}":{".":{},"f:args":{},"f:i [truncated 7042 chars]
	I0916 11:16:02.098235 1507580 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/nodes/multinode-654612
	I0916 11:16:02.098263 1507580 round_trippers.go:469] Request Headers:
	I0916 11:16:02.098273 1507580 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:16:02.098283 1507580 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:16:02.100755 1507580 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:16:02.100783 1507580 round_trippers.go:577] Response Headers:
	I0916 11:16:02.100793 1507580 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:16:02.100797 1507580 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:16:02.100801 1507580 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:16:02 GMT
	I0916 11:16:02.100805 1507580 round_trippers.go:580]     Audit-Id: 4d8842f5-02f0-4322-b8d6-42f5766b10fd
	I0916 11:16:02.100808 1507580 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:16:02.100812 1507580 round_trippers.go:580]     Content-Type: application/json
	I0916 11:16:02.100980 1507580 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-654612","uid":"17ee1588-6ece-4a45-8e72-a05ec6d1f806","resourceVersion":"982","creationTimestamp":"2024-09-16T11:10:07Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-654612","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-654612","minikube.k8s.io/primary":"true","minikube.k8s.io/updated_at":"2024_09_16T11_10_11_0700","minikube.k8s.io/version":"v1.34.0","node-role.kubernetes.io/control-plane":"","node.kubernetes.io/exclude-from-external-load-balancers":""},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///var/run/crio/crio.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubelet","operation":"Update","apiVe
rsion":"v1","time":"2024-09-16T11:10:07Z","fieldsType":"FieldsV1","fiel [truncated 6346 chars]
	I0916 11:16:02.595195 1507580 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/namespaces/kube-system/pods/coredns-7c65d6cfc9-szvv9
	I0916 11:16:02.595225 1507580 round_trippers.go:469] Request Headers:
	I0916 11:16:02.595235 1507580 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:16:02.595241 1507580 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:16:02.597832 1507580 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:16:02.597860 1507580 round_trippers.go:577] Response Headers:
	I0916 11:16:02.597869 1507580 round_trippers.go:580]     Content-Type: application/json
	I0916 11:16:02.597872 1507580 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:16:02.597875 1507580 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:16:02.597878 1507580 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:16:02 GMT
	I0916 11:16:02.597881 1507580 round_trippers.go:580]     Audit-Id: f628ff9a-bbbe-46b3-8c96-792dff63ce10
	I0916 11:16:02.597884 1507580 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:16:02.598296 1507580 request.go:1351] Response Body: {"kind":"Pod","apiVersion":"v1","metadata":{"name":"coredns-7c65d6cfc9-szvv9","generateName":"coredns-7c65d6cfc9-","namespace":"kube-system","uid":"26df8cd4-36bc-49e1-98bf-9c30f5555b7b","resourceVersion":"991","creationTimestamp":"2024-09-16T11:10:15Z","labels":{"k8s-app":"kube-dns","pod-template-hash":"7c65d6cfc9"},"ownerReferences":[{"apiVersion":"apps/v1","kind":"ReplicaSet","name":"coredns-7c65d6cfc9","uid":"a88acc4a-b14b-4341-a616-6a6077ab8958","controller":true,"blockOwnerDeletion":true}],"managedFields":[{"manager":"kube-controller-manager","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:10:15Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:generateName":{},"f:labels":{".":{},"f:k8s-app":{},"f:pod-template-hash":{}},"f:ownerReferences":{".":{},"k:{\"uid\":\"a88acc4a-b14b-4341-a616-6a6077ab8958\"}":{}}},"f:spec":{"f:affinity":{".":{},"f:podAntiAffinity":{".":{},"f:preferredDuringSchedulingIgnoredDuringExecution":{
}}},"f:containers":{"k:{\"name\":\"coredns\"}":{".":{},"f:args":{},"f:i [truncated 7042 chars]
	I0916 11:16:02.598940 1507580 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/nodes/multinode-654612
	I0916 11:16:02.598956 1507580 round_trippers.go:469] Request Headers:
	I0916 11:16:02.598965 1507580 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:16:02.598970 1507580 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:16:02.601598 1507580 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:16:02.601679 1507580 round_trippers.go:577] Response Headers:
	I0916 11:16:02.601702 1507580 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:16:02.601724 1507580 round_trippers.go:580]     Content-Type: application/json
	I0916 11:16:02.601757 1507580 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:16:02.601767 1507580 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:16:02.601771 1507580 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:16:02 GMT
	I0916 11:16:02.601774 1507580 round_trippers.go:580]     Audit-Id: 2fb00a26-53b5-4191-9432-bc059af17d1c
	I0916 11:16:02.601968 1507580 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-654612","uid":"17ee1588-6ece-4a45-8e72-a05ec6d1f806","resourceVersion":"982","creationTimestamp":"2024-09-16T11:10:07Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-654612","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-654612","minikube.k8s.io/primary":"true","minikube.k8s.io/updated_at":"2024_09_16T11_10_11_0700","minikube.k8s.io/version":"v1.34.0","node-role.kubernetes.io/control-plane":"","node.kubernetes.io/exclude-from-external-load-balancers":""},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///var/run/crio/crio.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubelet","operation":"Update","apiVe
rsion":"v1","time":"2024-09-16T11:10:07Z","fieldsType":"FieldsV1","fiel [truncated 6346 chars]
	I0916 11:16:02.602606 1507580 pod_ready.go:103] pod "coredns-7c65d6cfc9-szvv9" in "kube-system" namespace has status "Ready":"False"
	I0916 11:16:03.094638 1507580 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/namespaces/kube-system/pods/coredns-7c65d6cfc9-szvv9
	I0916 11:16:03.094666 1507580 round_trippers.go:469] Request Headers:
	I0916 11:16:03.094676 1507580 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:16:03.094682 1507580 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:16:03.097188 1507580 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:16:03.097212 1507580 round_trippers.go:577] Response Headers:
	I0916 11:16:03.097220 1507580 round_trippers.go:580]     Audit-Id: 676f24fa-f636-4efc-8da2-90600d945786
	I0916 11:16:03.097225 1507580 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:16:03.097228 1507580 round_trippers.go:580]     Content-Type: application/json
	I0916 11:16:03.097232 1507580 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:16:03.097236 1507580 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:16:03.097239 1507580 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:16:03 GMT
	I0916 11:16:03.097552 1507580 request.go:1351] Response Body: {"kind":"Pod","apiVersion":"v1","metadata":{"name":"coredns-7c65d6cfc9-szvv9","generateName":"coredns-7c65d6cfc9-","namespace":"kube-system","uid":"26df8cd4-36bc-49e1-98bf-9c30f5555b7b","resourceVersion":"991","creationTimestamp":"2024-09-16T11:10:15Z","labels":{"k8s-app":"kube-dns","pod-template-hash":"7c65d6cfc9"},"ownerReferences":[{"apiVersion":"apps/v1","kind":"ReplicaSet","name":"coredns-7c65d6cfc9","uid":"a88acc4a-b14b-4341-a616-6a6077ab8958","controller":true,"blockOwnerDeletion":true}],"managedFields":[{"manager":"kube-controller-manager","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:10:15Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:generateName":{},"f:labels":{".":{},"f:k8s-app":{},"f:pod-template-hash":{}},"f:ownerReferences":{".":{},"k:{\"uid\":\"a88acc4a-b14b-4341-a616-6a6077ab8958\"}":{}}},"f:spec":{"f:affinity":{".":{},"f:podAntiAffinity":{".":{},"f:preferredDuringSchedulingIgnoredDuringExecution":{
}}},"f:containers":{"k:{\"name\":\"coredns\"}":{".":{},"f:args":{},"f:i [truncated 7042 chars]
	I0916 11:16:03.098186 1507580 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/nodes/multinode-654612
	I0916 11:16:03.098205 1507580 round_trippers.go:469] Request Headers:
	I0916 11:16:03.098214 1507580 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:16:03.098220 1507580 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:16:03.100195 1507580 round_trippers.go:574] Response Status: 200 OK in 1 milliseconds
	I0916 11:16:03.100217 1507580 round_trippers.go:577] Response Headers:
	I0916 11:16:03.100225 1507580 round_trippers.go:580]     Content-Type: application/json
	I0916 11:16:03.100230 1507580 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:16:03.100234 1507580 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:16:03.100238 1507580 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:16:03 GMT
	I0916 11:16:03.100250 1507580 round_trippers.go:580]     Audit-Id: 89dc87ad-1282-4822-a25d-6aa38f568e18
	I0916 11:16:03.100254 1507580 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:16:03.100501 1507580 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-654612","uid":"17ee1588-6ece-4a45-8e72-a05ec6d1f806","resourceVersion":"982","creationTimestamp":"2024-09-16T11:10:07Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-654612","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-654612","minikube.k8s.io/primary":"true","minikube.k8s.io/updated_at":"2024_09_16T11_10_11_0700","minikube.k8s.io/version":"v1.34.0","node-role.kubernetes.io/control-plane":"","node.kubernetes.io/exclude-from-external-load-balancers":""},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///var/run/crio/crio.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubelet","operation":"Update","apiVe
rsion":"v1","time":"2024-09-16T11:10:07Z","fieldsType":"FieldsV1","fiel [truncated 6346 chars]
	I0916 11:16:03.594408 1507580 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/namespaces/kube-system/pods/coredns-7c65d6cfc9-szvv9
	I0916 11:16:03.594435 1507580 round_trippers.go:469] Request Headers:
	I0916 11:16:03.594445 1507580 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:16:03.594450 1507580 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:16:03.596831 1507580 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:16:03.596899 1507580 round_trippers.go:577] Response Headers:
	I0916 11:16:03.596922 1507580 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:16:03 GMT
	I0916 11:16:03.596943 1507580 round_trippers.go:580]     Audit-Id: b24a8101-180c-45c7-8233-eb59fcc8f131
	I0916 11:16:03.596948 1507580 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:16:03.596951 1507580 round_trippers.go:580]     Content-Type: application/json
	I0916 11:16:03.596953 1507580 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:16:03.596956 1507580 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:16:03.597153 1507580 request.go:1351] Response Body: {"kind":"Pod","apiVersion":"v1","metadata":{"name":"coredns-7c65d6cfc9-szvv9","generateName":"coredns-7c65d6cfc9-","namespace":"kube-system","uid":"26df8cd4-36bc-49e1-98bf-9c30f5555b7b","resourceVersion":"991","creationTimestamp":"2024-09-16T11:10:15Z","labels":{"k8s-app":"kube-dns","pod-template-hash":"7c65d6cfc9"},"ownerReferences":[{"apiVersion":"apps/v1","kind":"ReplicaSet","name":"coredns-7c65d6cfc9","uid":"a88acc4a-b14b-4341-a616-6a6077ab8958","controller":true,"blockOwnerDeletion":true}],"managedFields":[{"manager":"kube-controller-manager","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:10:15Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:generateName":{},"f:labels":{".":{},"f:k8s-app":{},"f:pod-template-hash":{}},"f:ownerReferences":{".":{},"k:{\"uid\":\"a88acc4a-b14b-4341-a616-6a6077ab8958\"}":{}}},"f:spec":{"f:affinity":{".":{},"f:podAntiAffinity":{".":{},"f:preferredDuringSchedulingIgnoredDuringExecution":{
}}},"f:containers":{"k:{\"name\":\"coredns\"}":{".":{},"f:args":{},"f:i [truncated 7042 chars]
	I0916 11:16:03.597738 1507580 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/nodes/multinode-654612
	I0916 11:16:03.597755 1507580 round_trippers.go:469] Request Headers:
	I0916 11:16:03.597764 1507580 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:16:03.597770 1507580 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:16:03.599692 1507580 round_trippers.go:574] Response Status: 200 OK in 1 milliseconds
	I0916 11:16:03.599713 1507580 round_trippers.go:577] Response Headers:
	I0916 11:16:03.599719 1507580 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:16:03.599724 1507580 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:16:03 GMT
	I0916 11:16:03.599729 1507580 round_trippers.go:580]     Audit-Id: c232608a-1139-4d8c-9ddc-3ff59eb36491
	I0916 11:16:03.599732 1507580 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:16:03.599735 1507580 round_trippers.go:580]     Content-Type: application/json
	I0916 11:16:03.599738 1507580 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:16:03.600327 1507580 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-654612","uid":"17ee1588-6ece-4a45-8e72-a05ec6d1f806","resourceVersion":"982","creationTimestamp":"2024-09-16T11:10:07Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-654612","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-654612","minikube.k8s.io/primary":"true","minikube.k8s.io/updated_at":"2024_09_16T11_10_11_0700","minikube.k8s.io/version":"v1.34.0","node-role.kubernetes.io/control-plane":"","node.kubernetes.io/exclude-from-external-load-balancers":""},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///var/run/crio/crio.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubelet","operation":"Update","apiVe
rsion":"v1","time":"2024-09-16T11:10:07Z","fieldsType":"FieldsV1","fiel [truncated 6346 chars]
	I0916 11:16:04.095133 1507580 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/namespaces/kube-system/pods/coredns-7c65d6cfc9-szvv9
	I0916 11:16:04.095160 1507580 round_trippers.go:469] Request Headers:
	I0916 11:16:04.095170 1507580 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:16:04.095176 1507580 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:16:04.097680 1507580 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:16:04.097701 1507580 round_trippers.go:577] Response Headers:
	I0916 11:16:04.097710 1507580 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:16:04.097714 1507580 round_trippers.go:580]     Content-Type: application/json
	I0916 11:16:04.097718 1507580 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:16:04.097720 1507580 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:16:04.097723 1507580 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:16:04 GMT
	I0916 11:16:04.097726 1507580 round_trippers.go:580]     Audit-Id: 7031c3c4-efa6-4b5b-804c-b892b4cd6514
	I0916 11:16:04.097847 1507580 request.go:1351] Response Body: {"kind":"Pod","apiVersion":"v1","metadata":{"name":"coredns-7c65d6cfc9-szvv9","generateName":"coredns-7c65d6cfc9-","namespace":"kube-system","uid":"26df8cd4-36bc-49e1-98bf-9c30f5555b7b","resourceVersion":"991","creationTimestamp":"2024-09-16T11:10:15Z","labels":{"k8s-app":"kube-dns","pod-template-hash":"7c65d6cfc9"},"ownerReferences":[{"apiVersion":"apps/v1","kind":"ReplicaSet","name":"coredns-7c65d6cfc9","uid":"a88acc4a-b14b-4341-a616-6a6077ab8958","controller":true,"blockOwnerDeletion":true}],"managedFields":[{"manager":"kube-controller-manager","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:10:15Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:generateName":{},"f:labels":{".":{},"f:k8s-app":{},"f:pod-template-hash":{}},"f:ownerReferences":{".":{},"k:{\"uid\":\"a88acc4a-b14b-4341-a616-6a6077ab8958\"}":{}}},"f:spec":{"f:affinity":{".":{},"f:podAntiAffinity":{".":{},"f:preferredDuringSchedulingIgnoredDuringExecution":{
}}},"f:containers":{"k:{\"name\":\"coredns\"}":{".":{},"f:args":{},"f:i [truncated 7042 chars]
	I0916 11:16:04.098420 1507580 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/nodes/multinode-654612
	I0916 11:16:04.098429 1507580 round_trippers.go:469] Request Headers:
	I0916 11:16:04.098438 1507580 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:16:04.098441 1507580 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:16:04.100574 1507580 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:16:04.100598 1507580 round_trippers.go:577] Response Headers:
	I0916 11:16:04.100607 1507580 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:16:04 GMT
	I0916 11:16:04.100612 1507580 round_trippers.go:580]     Audit-Id: 25ad611a-6494-4bfc-8595-5074014b9e89
	I0916 11:16:04.100616 1507580 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:16:04.100619 1507580 round_trippers.go:580]     Content-Type: application/json
	I0916 11:16:04.100622 1507580 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:16:04.100624 1507580 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:16:04.100919 1507580 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-654612","uid":"17ee1588-6ece-4a45-8e72-a05ec6d1f806","resourceVersion":"982","creationTimestamp":"2024-09-16T11:10:07Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-654612","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-654612","minikube.k8s.io/primary":"true","minikube.k8s.io/updated_at":"2024_09_16T11_10_11_0700","minikube.k8s.io/version":"v1.34.0","node-role.kubernetes.io/control-plane":"","node.kubernetes.io/exclude-from-external-load-balancers":""},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///var/run/crio/crio.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubelet","operation":"Update","apiVe
rsion":"v1","time":"2024-09-16T11:10:07Z","fieldsType":"FieldsV1","fiel [truncated 6346 chars]
	I0916 11:16:04.595111 1507580 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/namespaces/kube-system/pods/coredns-7c65d6cfc9-szvv9
	I0916 11:16:04.595135 1507580 round_trippers.go:469] Request Headers:
	I0916 11:16:04.595145 1507580 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:16:04.595150 1507580 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:16:04.597626 1507580 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:16:04.597658 1507580 round_trippers.go:577] Response Headers:
	I0916 11:16:04.597668 1507580 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:16:04.597673 1507580 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:16:04 GMT
	I0916 11:16:04.597677 1507580 round_trippers.go:580]     Audit-Id: b8655f5f-1f02-4d40-9079-b2b5ddcf9a74
	I0916 11:16:04.597680 1507580 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:16:04.597685 1507580 round_trippers.go:580]     Content-Type: application/json
	I0916 11:16:04.597688 1507580 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:16:04.597884 1507580 request.go:1351] Response Body: {"kind":"Pod","apiVersion":"v1","metadata":{"name":"coredns-7c65d6cfc9-szvv9","generateName":"coredns-7c65d6cfc9-","namespace":"kube-system","uid":"26df8cd4-36bc-49e1-98bf-9c30f5555b7b","resourceVersion":"991","creationTimestamp":"2024-09-16T11:10:15Z","labels":{"k8s-app":"kube-dns","pod-template-hash":"7c65d6cfc9"},"ownerReferences":[{"apiVersion":"apps/v1","kind":"ReplicaSet","name":"coredns-7c65d6cfc9","uid":"a88acc4a-b14b-4341-a616-6a6077ab8958","controller":true,"blockOwnerDeletion":true}],"managedFields":[{"manager":"kube-controller-manager","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:10:15Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:generateName":{},"f:labels":{".":{},"f:k8s-app":{},"f:pod-template-hash":{}},"f:ownerReferences":{".":{},"k:{\"uid\":\"a88acc4a-b14b-4341-a616-6a6077ab8958\"}":{}}},"f:spec":{"f:affinity":{".":{},"f:podAntiAffinity":{".":{},"f:preferredDuringSchedulingIgnoredDuringExecution":{
}}},"f:containers":{"k:{\"name\":\"coredns\"}":{".":{},"f:args":{},"f:i [truncated 7042 chars]
	I0916 11:16:04.598494 1507580 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/nodes/multinode-654612
	I0916 11:16:04.598513 1507580 round_trippers.go:469] Request Headers:
	I0916 11:16:04.598522 1507580 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:16:04.598527 1507580 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:16:04.600448 1507580 round_trippers.go:574] Response Status: 200 OK in 1 milliseconds
	I0916 11:16:04.600474 1507580 round_trippers.go:577] Response Headers:
	I0916 11:16:04.600482 1507580 round_trippers.go:580]     Audit-Id: 20f6e020-a898-4c50-8de0-95450bcb57d1
	I0916 11:16:04.600486 1507580 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:16:04.600489 1507580 round_trippers.go:580]     Content-Type: application/json
	I0916 11:16:04.600493 1507580 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:16:04.600496 1507580 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:16:04.600499 1507580 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:16:04 GMT
	I0916 11:16:04.600596 1507580 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-654612","uid":"17ee1588-6ece-4a45-8e72-a05ec6d1f806","resourceVersion":"982","creationTimestamp":"2024-09-16T11:10:07Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-654612","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-654612","minikube.k8s.io/primary":"true","minikube.k8s.io/updated_at":"2024_09_16T11_10_11_0700","minikube.k8s.io/version":"v1.34.0","node-role.kubernetes.io/control-plane":"","node.kubernetes.io/exclude-from-external-load-balancers":""},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///var/run/crio/crio.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubelet","operation":"Update","apiVe
rsion":"v1","time":"2024-09-16T11:10:07Z","fieldsType":"FieldsV1","fiel [truncated 6346 chars]
	I0916 11:16:05.094721 1507580 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/namespaces/kube-system/pods/coredns-7c65d6cfc9-szvv9
	I0916 11:16:05.094748 1507580 round_trippers.go:469] Request Headers:
	I0916 11:16:05.094759 1507580 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:16:05.094763 1507580 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:16:05.097282 1507580 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:16:05.097308 1507580 round_trippers.go:577] Response Headers:
	I0916 11:16:05.097317 1507580 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:16:05 GMT
	I0916 11:16:05.097322 1507580 round_trippers.go:580]     Audit-Id: f17e3988-3a27-44b3-bf2e-102f6c3c7bad
	I0916 11:16:05.097325 1507580 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:16:05.097328 1507580 round_trippers.go:580]     Content-Type: application/json
	I0916 11:16:05.097331 1507580 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:16:05.097335 1507580 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:16:05.097787 1507580 request.go:1351] Response Body: {"kind":"Pod","apiVersion":"v1","metadata":{"name":"coredns-7c65d6cfc9-szvv9","generateName":"coredns-7c65d6cfc9-","namespace":"kube-system","uid":"26df8cd4-36bc-49e1-98bf-9c30f5555b7b","resourceVersion":"991","creationTimestamp":"2024-09-16T11:10:15Z","labels":{"k8s-app":"kube-dns","pod-template-hash":"7c65d6cfc9"},"ownerReferences":[{"apiVersion":"apps/v1","kind":"ReplicaSet","name":"coredns-7c65d6cfc9","uid":"a88acc4a-b14b-4341-a616-6a6077ab8958","controller":true,"blockOwnerDeletion":true}],"managedFields":[{"manager":"kube-controller-manager","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:10:15Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:generateName":{},"f:labels":{".":{},"f:k8s-app":{},"f:pod-template-hash":{}},"f:ownerReferences":{".":{},"k:{\"uid\":\"a88acc4a-b14b-4341-a616-6a6077ab8958\"}":{}}},"f:spec":{"f:affinity":{".":{},"f:podAntiAffinity":{".":{},"f:preferredDuringSchedulingIgnoredDuringExecution":{
}}},"f:containers":{"k:{\"name\":\"coredns\"}":{".":{},"f:args":{},"f:i [truncated 7042 chars]
	I0916 11:16:05.098419 1507580 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/nodes/multinode-654612
	I0916 11:16:05.098437 1507580 round_trippers.go:469] Request Headers:
	I0916 11:16:05.098447 1507580 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:16:05.098452 1507580 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:16:05.100708 1507580 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:16:05.100731 1507580 round_trippers.go:577] Response Headers:
	I0916 11:16:05.100739 1507580 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:16:05.100745 1507580 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:16:05.100749 1507580 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:16:05 GMT
	I0916 11:16:05.100752 1507580 round_trippers.go:580]     Audit-Id: 0b010660-3954-4f42-b354-55d7480bb304
	I0916 11:16:05.100780 1507580 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:16:05.100792 1507580 round_trippers.go:580]     Content-Type: application/json
	I0916 11:16:05.101043 1507580 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-654612","uid":"17ee1588-6ece-4a45-8e72-a05ec6d1f806","resourceVersion":"982","creationTimestamp":"2024-09-16T11:10:07Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-654612","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-654612","minikube.k8s.io/primary":"true","minikube.k8s.io/updated_at":"2024_09_16T11_10_11_0700","minikube.k8s.io/version":"v1.34.0","node-role.kubernetes.io/control-plane":"","node.kubernetes.io/exclude-from-external-load-balancers":""},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///var/run/crio/crio.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubelet","operation":"Update","apiVe
rsion":"v1","time":"2024-09-16T11:10:07Z","fieldsType":"FieldsV1","fiel [truncated 6346 chars]
	I0916 11:16:05.101491 1507580 pod_ready.go:103] pod "coredns-7c65d6cfc9-szvv9" in "kube-system" namespace has status "Ready":"False"
	I0916 11:16:05.595255 1507580 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/namespaces/kube-system/pods/coredns-7c65d6cfc9-szvv9
	I0916 11:16:05.595285 1507580 round_trippers.go:469] Request Headers:
	I0916 11:16:05.595295 1507580 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:16:05.595299 1507580 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:16:05.598103 1507580 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:16:05.598197 1507580 round_trippers.go:577] Response Headers:
	I0916 11:16:05.598234 1507580 round_trippers.go:580]     Audit-Id: bd2019d1-4a5a-44f0-b75c-cce1bad88c33
	I0916 11:16:05.598268 1507580 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:16:05.598290 1507580 round_trippers.go:580]     Content-Type: application/json
	I0916 11:16:05.598350 1507580 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:16:05.598370 1507580 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:16:05.598375 1507580 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:16:05 GMT
	I0916 11:16:05.598516 1507580 request.go:1351] Response Body: {"kind":"Pod","apiVersion":"v1","metadata":{"name":"coredns-7c65d6cfc9-szvv9","generateName":"coredns-7c65d6cfc9-","namespace":"kube-system","uid":"26df8cd4-36bc-49e1-98bf-9c30f5555b7b","resourceVersion":"991","creationTimestamp":"2024-09-16T11:10:15Z","labels":{"k8s-app":"kube-dns","pod-template-hash":"7c65d6cfc9"},"ownerReferences":[{"apiVersion":"apps/v1","kind":"ReplicaSet","name":"coredns-7c65d6cfc9","uid":"a88acc4a-b14b-4341-a616-6a6077ab8958","controller":true,"blockOwnerDeletion":true}],"managedFields":[{"manager":"kube-controller-manager","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:10:15Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:generateName":{},"f:labels":{".":{},"f:k8s-app":{},"f:pod-template-hash":{}},"f:ownerReferences":{".":{},"k:{\"uid\":\"a88acc4a-b14b-4341-a616-6a6077ab8958\"}":{}}},"f:spec":{"f:affinity":{".":{},"f:podAntiAffinity":{".":{},"f:preferredDuringSchedulingIgnoredDuringExecution":{
}}},"f:containers":{"k:{\"name\":\"coredns\"}":{".":{},"f:args":{},"f:i [truncated 7042 chars]
	I0916 11:16:05.599193 1507580 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/nodes/multinode-654612
	I0916 11:16:05.599211 1507580 round_trippers.go:469] Request Headers:
	I0916 11:16:05.599227 1507580 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:16:05.599237 1507580 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:16:05.601552 1507580 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:16:05.601577 1507580 round_trippers.go:577] Response Headers:
	I0916 11:16:05.601586 1507580 round_trippers.go:580]     Audit-Id: 39c1bbec-7081-406a-a0c9-05329c01d874
	I0916 11:16:05.601590 1507580 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:16:05.601592 1507580 round_trippers.go:580]     Content-Type: application/json
	I0916 11:16:05.601595 1507580 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:16:05.601598 1507580 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:16:05.601602 1507580 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:16:05 GMT
	I0916 11:16:05.601907 1507580 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-654612","uid":"17ee1588-6ece-4a45-8e72-a05ec6d1f806","resourceVersion":"982","creationTimestamp":"2024-09-16T11:10:07Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-654612","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-654612","minikube.k8s.io/primary":"true","minikube.k8s.io/updated_at":"2024_09_16T11_10_11_0700","minikube.k8s.io/version":"v1.34.0","node-role.kubernetes.io/control-plane":"","node.kubernetes.io/exclude-from-external-load-balancers":""},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///var/run/crio/crio.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubelet","operation":"Update","apiVe
rsion":"v1","time":"2024-09-16T11:10:07Z","fieldsType":"FieldsV1","fiel [truncated 6346 chars]
	I0916 11:16:06.095123 1507580 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/namespaces/kube-system/pods/coredns-7c65d6cfc9-szvv9
	I0916 11:16:06.095148 1507580 round_trippers.go:469] Request Headers:
	I0916 11:16:06.095160 1507580 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:16:06.095164 1507580 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:16:06.097847 1507580 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:16:06.097921 1507580 round_trippers.go:577] Response Headers:
	I0916 11:16:06.097943 1507580 round_trippers.go:580]     Audit-Id: 61fff549-e05c-4669-ae05-4b77a2772ba8
	I0916 11:16:06.097953 1507580 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:16:06.097957 1507580 round_trippers.go:580]     Content-Type: application/json
	I0916 11:16:06.097959 1507580 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:16:06.097962 1507580 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:16:06.097965 1507580 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:16:06 GMT
	I0916 11:16:06.098180 1507580 request.go:1351] Response Body: {"kind":"Pod","apiVersion":"v1","metadata":{"name":"coredns-7c65d6cfc9-szvv9","generateName":"coredns-7c65d6cfc9-","namespace":"kube-system","uid":"26df8cd4-36bc-49e1-98bf-9c30f5555b7b","resourceVersion":"991","creationTimestamp":"2024-09-16T11:10:15Z","labels":{"k8s-app":"kube-dns","pod-template-hash":"7c65d6cfc9"},"ownerReferences":[{"apiVersion":"apps/v1","kind":"ReplicaSet","name":"coredns-7c65d6cfc9","uid":"a88acc4a-b14b-4341-a616-6a6077ab8958","controller":true,"blockOwnerDeletion":true}],"managedFields":[{"manager":"kube-controller-manager","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:10:15Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:generateName":{},"f:labels":{".":{},"f:k8s-app":{},"f:pod-template-hash":{}},"f:ownerReferences":{".":{},"k:{\"uid\":\"a88acc4a-b14b-4341-a616-6a6077ab8958\"}":{}}},"f:spec":{"f:affinity":{".":{},"f:podAntiAffinity":{".":{},"f:preferredDuringSchedulingIgnoredDuringExecution":{
}}},"f:containers":{"k:{\"name\":\"coredns\"}":{".":{},"f:args":{},"f:i [truncated 7042 chars]
	I0916 11:16:06.098860 1507580 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/nodes/multinode-654612
	I0916 11:16:06.098881 1507580 round_trippers.go:469] Request Headers:
	I0916 11:16:06.098889 1507580 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:16:06.098893 1507580 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:16:06.101079 1507580 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:16:06.101152 1507580 round_trippers.go:577] Response Headers:
	I0916 11:16:06.101211 1507580 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:16:06.101242 1507580 round_trippers.go:580]     Content-Type: application/json
	I0916 11:16:06.101252 1507580 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:16:06.101256 1507580 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:16:06.101259 1507580 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:16:06 GMT
	I0916 11:16:06.101261 1507580 round_trippers.go:580]     Audit-Id: 2b89abad-8249-4ffb-9f67-90cf9e476338
	I0916 11:16:06.101463 1507580 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-654612","uid":"17ee1588-6ece-4a45-8e72-a05ec6d1f806","resourceVersion":"982","creationTimestamp":"2024-09-16T11:10:07Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-654612","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-654612","minikube.k8s.io/primary":"true","minikube.k8s.io/updated_at":"2024_09_16T11_10_11_0700","minikube.k8s.io/version":"v1.34.0","node-role.kubernetes.io/control-plane":"","node.kubernetes.io/exclude-from-external-load-balancers":""},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///var/run/crio/crio.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubelet","operation":"Update","apiVe
rsion":"v1","time":"2024-09-16T11:10:07Z","fieldsType":"FieldsV1","fiel [truncated 6346 chars]
	I0916 11:16:06.594908 1507580 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/namespaces/kube-system/pods/coredns-7c65d6cfc9-szvv9
	I0916 11:16:06.594934 1507580 round_trippers.go:469] Request Headers:
	I0916 11:16:06.594944 1507580 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:16:06.594948 1507580 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:16:06.597394 1507580 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:16:06.597454 1507580 round_trippers.go:577] Response Headers:
	I0916 11:16:06.597463 1507580 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:16:06.597469 1507580 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:16:06 GMT
	I0916 11:16:06.597473 1507580 round_trippers.go:580]     Audit-Id: 5bb211e8-2e57-4a4e-8d71-355b3e35c47e
	I0916 11:16:06.597476 1507580 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:16:06.597479 1507580 round_trippers.go:580]     Content-Type: application/json
	I0916 11:16:06.597482 1507580 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:16:06.597662 1507580 request.go:1351] Response Body: {"kind":"Pod","apiVersion":"v1","metadata":{"name":"coredns-7c65d6cfc9-szvv9","generateName":"coredns-7c65d6cfc9-","namespace":"kube-system","uid":"26df8cd4-36bc-49e1-98bf-9c30f5555b7b","resourceVersion":"991","creationTimestamp":"2024-09-16T11:10:15Z","labels":{"k8s-app":"kube-dns","pod-template-hash":"7c65d6cfc9"},"ownerReferences":[{"apiVersion":"apps/v1","kind":"ReplicaSet","name":"coredns-7c65d6cfc9","uid":"a88acc4a-b14b-4341-a616-6a6077ab8958","controller":true,"blockOwnerDeletion":true}],"managedFields":[{"manager":"kube-controller-manager","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:10:15Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:generateName":{},"f:labels":{".":{},"f:k8s-app":{},"f:pod-template-hash":{}},"f:ownerReferences":{".":{},"k:{\"uid\":\"a88acc4a-b14b-4341-a616-6a6077ab8958\"}":{}}},"f:spec":{"f:affinity":{".":{},"f:podAntiAffinity":{".":{},"f:preferredDuringSchedulingIgnoredDuringExecution":{
}}},"f:containers":{"k:{\"name\":\"coredns\"}":{".":{},"f:args":{},"f:i [truncated 7042 chars]
	I0916 11:16:06.598260 1507580 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/nodes/multinode-654612
	I0916 11:16:06.598279 1507580 round_trippers.go:469] Request Headers:
	I0916 11:16:06.598287 1507580 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:16:06.598293 1507580 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:16:06.600324 1507580 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:16:06.600342 1507580 round_trippers.go:577] Response Headers:
	I0916 11:16:06.600351 1507580 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:16:06.600356 1507580 round_trippers.go:580]     Content-Type: application/json
	I0916 11:16:06.600360 1507580 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:16:06.600363 1507580 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:16:06.600365 1507580 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:16:06 GMT
	I0916 11:16:06.600372 1507580 round_trippers.go:580]     Audit-Id: 2f37138b-27d1-454c-9061-f796646862de
	I0916 11:16:06.600764 1507580 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-654612","uid":"17ee1588-6ece-4a45-8e72-a05ec6d1f806","resourceVersion":"982","creationTimestamp":"2024-09-16T11:10:07Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-654612","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-654612","minikube.k8s.io/primary":"true","minikube.k8s.io/updated_at":"2024_09_16T11_10_11_0700","minikube.k8s.io/version":"v1.34.0","node-role.kubernetes.io/control-plane":"","node.kubernetes.io/exclude-from-external-load-balancers":""},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///var/run/crio/crio.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubelet","operation":"Update","apiVe
rsion":"v1","time":"2024-09-16T11:10:07Z","fieldsType":"FieldsV1","fiel [truncated 6346 chars]
	I0916 11:16:07.094940 1507580 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/namespaces/kube-system/pods/coredns-7c65d6cfc9-szvv9
	I0916 11:16:07.094963 1507580 round_trippers.go:469] Request Headers:
	I0916 11:16:07.094972 1507580 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:16:07.094977 1507580 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:16:07.097784 1507580 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:16:07.097858 1507580 round_trippers.go:577] Response Headers:
	I0916 11:16:07.097880 1507580 round_trippers.go:580]     Audit-Id: f4fcfbc1-5092-4ff0-8f30-e8975c47af9c
	I0916 11:16:07.097900 1507580 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:16:07.097933 1507580 round_trippers.go:580]     Content-Type: application/json
	I0916 11:16:07.097957 1507580 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:16:07.097975 1507580 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:16:07.097994 1507580 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:16:07 GMT
	I0916 11:16:07.098182 1507580 request.go:1351] Response Body: {"kind":"Pod","apiVersion":"v1","metadata":{"name":"coredns-7c65d6cfc9-szvv9","generateName":"coredns-7c65d6cfc9-","namespace":"kube-system","uid":"26df8cd4-36bc-49e1-98bf-9c30f5555b7b","resourceVersion":"991","creationTimestamp":"2024-09-16T11:10:15Z","labels":{"k8s-app":"kube-dns","pod-template-hash":"7c65d6cfc9"},"ownerReferences":[{"apiVersion":"apps/v1","kind":"ReplicaSet","name":"coredns-7c65d6cfc9","uid":"a88acc4a-b14b-4341-a616-6a6077ab8958","controller":true,"blockOwnerDeletion":true}],"managedFields":[{"manager":"kube-controller-manager","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:10:15Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:generateName":{},"f:labels":{".":{},"f:k8s-app":{},"f:pod-template-hash":{}},"f:ownerReferences":{".":{},"k:{\"uid\":\"a88acc4a-b14b-4341-a616-6a6077ab8958\"}":{}}},"f:spec":{"f:affinity":{".":{},"f:podAntiAffinity":{".":{},"f:preferredDuringSchedulingIgnoredDuringExecution":{
}}},"f:containers":{"k:{\"name\":\"coredns\"}":{".":{},"f:args":{},"f:i [truncated 7042 chars]
	I0916 11:16:07.098789 1507580 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/nodes/multinode-654612
	I0916 11:16:07.098809 1507580 round_trippers.go:469] Request Headers:
	I0916 11:16:07.098818 1507580 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:16:07.098825 1507580 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:16:07.101257 1507580 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:16:07.101284 1507580 round_trippers.go:577] Response Headers:
	I0916 11:16:07.101293 1507580 round_trippers.go:580]     Content-Type: application/json
	I0916 11:16:07.101296 1507580 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:16:07.101299 1507580 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:16:07.101302 1507580 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:16:07 GMT
	I0916 11:16:07.101305 1507580 round_trippers.go:580]     Audit-Id: de524e5c-b4a7-4873-9c6f-97781aba9323
	I0916 11:16:07.101308 1507580 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:16:07.101618 1507580 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-654612","uid":"17ee1588-6ece-4a45-8e72-a05ec6d1f806","resourceVersion":"982","creationTimestamp":"2024-09-16T11:10:07Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-654612","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-654612","minikube.k8s.io/primary":"true","minikube.k8s.io/updated_at":"2024_09_16T11_10_11_0700","minikube.k8s.io/version":"v1.34.0","node-role.kubernetes.io/control-plane":"","node.kubernetes.io/exclude-from-external-load-balancers":""},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///var/run/crio/crio.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubelet","operation":"Update","apiVe
rsion":"v1","time":"2024-09-16T11:10:07Z","fieldsType":"FieldsV1","fiel [truncated 6346 chars]
	I0916 11:16:07.102030 1507580 pod_ready.go:103] pod "coredns-7c65d6cfc9-szvv9" in "kube-system" namespace has status "Ready":"False"
	I0916 11:16:07.595288 1507580 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/namespaces/kube-system/pods/coredns-7c65d6cfc9-szvv9
	I0916 11:16:07.595312 1507580 round_trippers.go:469] Request Headers:
	I0916 11:16:07.595322 1507580 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:16:07.595327 1507580 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:16:07.597743 1507580 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:16:07.597859 1507580 round_trippers.go:577] Response Headers:
	I0916 11:16:07.597876 1507580 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:16:07.597895 1507580 round_trippers.go:580]     Content-Type: application/json
	I0916 11:16:07.597902 1507580 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:16:07.597906 1507580 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:16:07.597909 1507580 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:16:07 GMT
	I0916 11:16:07.597914 1507580 round_trippers.go:580]     Audit-Id: 4c438d8d-260f-4462-8a1e-f789f6bbc979
	I0916 11:16:07.598055 1507580 request.go:1351] Response Body: {"kind":"Pod","apiVersion":"v1","metadata":{"name":"coredns-7c65d6cfc9-szvv9","generateName":"coredns-7c65d6cfc9-","namespace":"kube-system","uid":"26df8cd4-36bc-49e1-98bf-9c30f5555b7b","resourceVersion":"991","creationTimestamp":"2024-09-16T11:10:15Z","labels":{"k8s-app":"kube-dns","pod-template-hash":"7c65d6cfc9"},"ownerReferences":[{"apiVersion":"apps/v1","kind":"ReplicaSet","name":"coredns-7c65d6cfc9","uid":"a88acc4a-b14b-4341-a616-6a6077ab8958","controller":true,"blockOwnerDeletion":true}],"managedFields":[{"manager":"kube-controller-manager","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:10:15Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:generateName":{},"f:labels":{".":{},"f:k8s-app":{},"f:pod-template-hash":{}},"f:ownerReferences":{".":{},"k:{\"uid\":\"a88acc4a-b14b-4341-a616-6a6077ab8958\"}":{}}},"f:spec":{"f:affinity":{".":{},"f:podAntiAffinity":{".":{},"f:preferredDuringSchedulingIgnoredDuringExecution":{
}}},"f:containers":{"k:{\"name\":\"coredns\"}":{".":{},"f:args":{},"f:i [truncated 7042 chars]
	I0916 11:16:07.598644 1507580 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/nodes/multinode-654612
	I0916 11:16:07.598662 1507580 round_trippers.go:469] Request Headers:
	I0916 11:16:07.598671 1507580 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:16:07.598676 1507580 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:16:07.600634 1507580 round_trippers.go:574] Response Status: 200 OK in 1 milliseconds
	I0916 11:16:07.600657 1507580 round_trippers.go:577] Response Headers:
	I0916 11:16:07.600666 1507580 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:16:07.600670 1507580 round_trippers.go:580]     Content-Type: application/json
	I0916 11:16:07.600701 1507580 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:16:07.600705 1507580 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:16:07.600709 1507580 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:16:07 GMT
	I0916 11:16:07.600712 1507580 round_trippers.go:580]     Audit-Id: d2b25e26-cb05-43d5-9721-9ffd1fcc7531
	I0916 11:16:07.601025 1507580 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-654612","uid":"17ee1588-6ece-4a45-8e72-a05ec6d1f806","resourceVersion":"982","creationTimestamp":"2024-09-16T11:10:07Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-654612","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-654612","minikube.k8s.io/primary":"true","minikube.k8s.io/updated_at":"2024_09_16T11_10_11_0700","minikube.k8s.io/version":"v1.34.0","node-role.kubernetes.io/control-plane":"","node.kubernetes.io/exclude-from-external-load-balancers":""},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///var/run/crio/crio.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubelet","operation":"Update","apiVe
rsion":"v1","time":"2024-09-16T11:10:07Z","fieldsType":"FieldsV1","fiel [truncated 6346 chars]
	I0916 11:16:08.095265 1507580 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/namespaces/kube-system/pods/coredns-7c65d6cfc9-szvv9
	I0916 11:16:08.095292 1507580 round_trippers.go:469] Request Headers:
	I0916 11:16:08.095302 1507580 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:16:08.095306 1507580 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:16:08.097755 1507580 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:16:08.097781 1507580 round_trippers.go:577] Response Headers:
	I0916 11:16:08.097790 1507580 round_trippers.go:580]     Content-Type: application/json
	I0916 11:16:08.097795 1507580 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:16:08.097798 1507580 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:16:08.097803 1507580 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:16:08 GMT
	I0916 11:16:08.097806 1507580 round_trippers.go:580]     Audit-Id: 8668d6c5-d5f1-4404-8c7b-89eb6c8966c1
	I0916 11:16:08.097809 1507580 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:16:08.098170 1507580 request.go:1351] Response Body: {"kind":"Pod","apiVersion":"v1","metadata":{"name":"coredns-7c65d6cfc9-szvv9","generateName":"coredns-7c65d6cfc9-","namespace":"kube-system","uid":"26df8cd4-36bc-49e1-98bf-9c30f5555b7b","resourceVersion":"991","creationTimestamp":"2024-09-16T11:10:15Z","labels":{"k8s-app":"kube-dns","pod-template-hash":"7c65d6cfc9"},"ownerReferences":[{"apiVersion":"apps/v1","kind":"ReplicaSet","name":"coredns-7c65d6cfc9","uid":"a88acc4a-b14b-4341-a616-6a6077ab8958","controller":true,"blockOwnerDeletion":true}],"managedFields":[{"manager":"kube-controller-manager","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:10:15Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:generateName":{},"f:labels":{".":{},"f:k8s-app":{},"f:pod-template-hash":{}},"f:ownerReferences":{".":{},"k:{\"uid\":\"a88acc4a-b14b-4341-a616-6a6077ab8958\"}":{}}},"f:spec":{"f:affinity":{".":{},"f:podAntiAffinity":{".":{},"f:preferredDuringSchedulingIgnoredDuringExecution":{
}}},"f:containers":{"k:{\"name\":\"coredns\"}":{".":{},"f:args":{},"f:i [truncated 7042 chars]
	I0916 11:16:08.098780 1507580 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/nodes/multinode-654612
	I0916 11:16:08.098800 1507580 round_trippers.go:469] Request Headers:
	I0916 11:16:08.098809 1507580 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:16:08.098815 1507580 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:16:08.100990 1507580 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:16:08.101015 1507580 round_trippers.go:577] Response Headers:
	I0916 11:16:08.101023 1507580 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:16:08.101027 1507580 round_trippers.go:580]     Content-Type: application/json
	I0916 11:16:08.101030 1507580 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:16:08.101033 1507580 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:16:08.101036 1507580 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:16:08 GMT
	I0916 11:16:08.101039 1507580 round_trippers.go:580]     Audit-Id: 195b7a97-bf5a-425e-ae6a-154168d3c489
	I0916 11:16:08.101366 1507580 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-654612","uid":"17ee1588-6ece-4a45-8e72-a05ec6d1f806","resourceVersion":"982","creationTimestamp":"2024-09-16T11:10:07Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-654612","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-654612","minikube.k8s.io/primary":"true","minikube.k8s.io/updated_at":"2024_09_16T11_10_11_0700","minikube.k8s.io/version":"v1.34.0","node-role.kubernetes.io/control-plane":"","node.kubernetes.io/exclude-from-external-load-balancers":""},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///var/run/crio/crio.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubelet","operation":"Update","apiVe
rsion":"v1","time":"2024-09-16T11:10:07Z","fieldsType":"FieldsV1","fiel [truncated 6346 chars]
	I0916 11:16:08.595163 1507580 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/namespaces/kube-system/pods/coredns-7c65d6cfc9-szvv9
	I0916 11:16:08.595188 1507580 round_trippers.go:469] Request Headers:
	I0916 11:16:08.595198 1507580 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:16:08.595203 1507580 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:16:08.597876 1507580 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:16:08.597899 1507580 round_trippers.go:577] Response Headers:
	I0916 11:16:08.597907 1507580 round_trippers.go:580]     Audit-Id: 358dd3db-aa21-4857-a1b7-b962fe18ac29
	I0916 11:16:08.597912 1507580 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:16:08.597918 1507580 round_trippers.go:580]     Content-Type: application/json
	I0916 11:16:08.597921 1507580 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:16:08.597924 1507580 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:16:08.597927 1507580 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:16:08 GMT
	I0916 11:16:08.598076 1507580 request.go:1351] Response Body: {"kind":"Pod","apiVersion":"v1","metadata":{"name":"coredns-7c65d6cfc9-szvv9","generateName":"coredns-7c65d6cfc9-","namespace":"kube-system","uid":"26df8cd4-36bc-49e1-98bf-9c30f5555b7b","resourceVersion":"991","creationTimestamp":"2024-09-16T11:10:15Z","labels":{"k8s-app":"kube-dns","pod-template-hash":"7c65d6cfc9"},"ownerReferences":[{"apiVersion":"apps/v1","kind":"ReplicaSet","name":"coredns-7c65d6cfc9","uid":"a88acc4a-b14b-4341-a616-6a6077ab8958","controller":true,"blockOwnerDeletion":true}],"managedFields":[{"manager":"kube-controller-manager","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:10:15Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:generateName":{},"f:labels":{".":{},"f:k8s-app":{},"f:pod-template-hash":{}},"f:ownerReferences":{".":{},"k:{\"uid\":\"a88acc4a-b14b-4341-a616-6a6077ab8958\"}":{}}},"f:spec":{"f:affinity":{".":{},"f:podAntiAffinity":{".":{},"f:preferredDuringSchedulingIgnoredDuringExecution":{
}}},"f:containers":{"k:{\"name\":\"coredns\"}":{".":{},"f:args":{},"f:i [truncated 7042 chars]
	I0916 11:16:08.598650 1507580 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/nodes/multinode-654612
	I0916 11:16:08.598660 1507580 round_trippers.go:469] Request Headers:
	I0916 11:16:08.598671 1507580 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:16:08.598676 1507580 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:16:08.600739 1507580 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:16:08.600760 1507580 round_trippers.go:577] Response Headers:
	I0916 11:16:08.600772 1507580 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:16:08 GMT
	I0916 11:16:08.600776 1507580 round_trippers.go:580]     Audit-Id: dd5baf27-8faa-4914-830c-3d4c9869ab59
	I0916 11:16:08.600779 1507580 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:16:08.600783 1507580 round_trippers.go:580]     Content-Type: application/json
	I0916 11:16:08.600785 1507580 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:16:08.600789 1507580 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:16:08.601250 1507580 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-654612","uid":"17ee1588-6ece-4a45-8e72-a05ec6d1f806","resourceVersion":"982","creationTimestamp":"2024-09-16T11:10:07Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-654612","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-654612","minikube.k8s.io/primary":"true","minikube.k8s.io/updated_at":"2024_09_16T11_10_11_0700","minikube.k8s.io/version":"v1.34.0","node-role.kubernetes.io/control-plane":"","node.kubernetes.io/exclude-from-external-load-balancers":""},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///var/run/crio/crio.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubelet","operation":"Update","apiVe
rsion":"v1","time":"2024-09-16T11:10:07Z","fieldsType":"FieldsV1","fiel [truncated 6346 chars]
	I0916 11:16:09.094577 1507580 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/namespaces/kube-system/pods/coredns-7c65d6cfc9-szvv9
	I0916 11:16:09.094601 1507580 round_trippers.go:469] Request Headers:
	I0916 11:16:09.094611 1507580 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:16:09.094618 1507580 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:16:09.100867 1507580 round_trippers.go:574] Response Status: 200 OK in 6 milliseconds
	I0916 11:16:09.100906 1507580 round_trippers.go:577] Response Headers:
	I0916 11:16:09.100916 1507580 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:16:09.100945 1507580 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:16:09 GMT
	I0916 11:16:09.100958 1507580 round_trippers.go:580]     Audit-Id: c068c9b9-53de-4491-80d4-77acc93c752a
	I0916 11:16:09.100963 1507580 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:16:09.100966 1507580 round_trippers.go:580]     Content-Type: application/json
	I0916 11:16:09.100969 1507580 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:16:09.101104 1507580 request.go:1351] Response Body: {"kind":"Pod","apiVersion":"v1","metadata":{"name":"coredns-7c65d6cfc9-szvv9","generateName":"coredns-7c65d6cfc9-","namespace":"kube-system","uid":"26df8cd4-36bc-49e1-98bf-9c30f5555b7b","resourceVersion":"991","creationTimestamp":"2024-09-16T11:10:15Z","labels":{"k8s-app":"kube-dns","pod-template-hash":"7c65d6cfc9"},"ownerReferences":[{"apiVersion":"apps/v1","kind":"ReplicaSet","name":"coredns-7c65d6cfc9","uid":"a88acc4a-b14b-4341-a616-6a6077ab8958","controller":true,"blockOwnerDeletion":true}],"managedFields":[{"manager":"kube-controller-manager","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:10:15Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:generateName":{},"f:labels":{".":{},"f:k8s-app":{},"f:pod-template-hash":{}},"f:ownerReferences":{".":{},"k:{\"uid\":\"a88acc4a-b14b-4341-a616-6a6077ab8958\"}":{}}},"f:spec":{"f:affinity":{".":{},"f:podAntiAffinity":{".":{},"f:preferredDuringSchedulingIgnoredDuringExecution":{
}}},"f:containers":{"k:{\"name\":\"coredns\"}":{".":{},"f:args":{},"f:i [truncated 7042 chars]
	I0916 11:16:09.101730 1507580 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/nodes/multinode-654612
	I0916 11:16:09.101746 1507580 round_trippers.go:469] Request Headers:
	I0916 11:16:09.101755 1507580 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:16:09.101759 1507580 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:16:09.104080 1507580 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:16:09.104104 1507580 round_trippers.go:577] Response Headers:
	I0916 11:16:09.104113 1507580 round_trippers.go:580]     Audit-Id: 3f0e56e4-3e5e-4f83-9bdf-bfaa9960b9b8
	I0916 11:16:09.104118 1507580 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:16:09.104121 1507580 round_trippers.go:580]     Content-Type: application/json
	I0916 11:16:09.104125 1507580 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:16:09.104128 1507580 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:16:09.104133 1507580 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:16:09 GMT
	I0916 11:16:09.105039 1507580 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-654612","uid":"17ee1588-6ece-4a45-8e72-a05ec6d1f806","resourceVersion":"982","creationTimestamp":"2024-09-16T11:10:07Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-654612","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-654612","minikube.k8s.io/primary":"true","minikube.k8s.io/updated_at":"2024_09_16T11_10_11_0700","minikube.k8s.io/version":"v1.34.0","node-role.kubernetes.io/control-plane":"","node.kubernetes.io/exclude-from-external-load-balancers":""},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///var/run/crio/crio.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubelet","operation":"Update","apiVe
rsion":"v1","time":"2024-09-16T11:10:07Z","fieldsType":"FieldsV1","fiel [truncated 6346 chars]
	I0916 11:16:09.105474 1507580 pod_ready.go:103] pod "coredns-7c65d6cfc9-szvv9" in "kube-system" namespace has status "Ready":"False"
	I0916 11:16:09.595194 1507580 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/namespaces/kube-system/pods/coredns-7c65d6cfc9-szvv9
	I0916 11:16:09.595219 1507580 round_trippers.go:469] Request Headers:
	I0916 11:16:09.595229 1507580 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:16:09.595234 1507580 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:16:09.597742 1507580 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:16:09.597775 1507580 round_trippers.go:577] Response Headers:
	I0916 11:16:09.597785 1507580 round_trippers.go:580]     Content-Type: application/json
	I0916 11:16:09.597792 1507580 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:16:09.597796 1507580 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:16:09.597800 1507580 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:16:09 GMT
	I0916 11:16:09.597805 1507580 round_trippers.go:580]     Audit-Id: 6aa7fc5f-7e77-4d09-90bb-809111f9549a
	I0916 11:16:09.597808 1507580 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:16:09.598110 1507580 request.go:1351] Response Body: {"kind":"Pod","apiVersion":"v1","metadata":{"name":"coredns-7c65d6cfc9-szvv9","generateName":"coredns-7c65d6cfc9-","namespace":"kube-system","uid":"26df8cd4-36bc-49e1-98bf-9c30f5555b7b","resourceVersion":"991","creationTimestamp":"2024-09-16T11:10:15Z","labels":{"k8s-app":"kube-dns","pod-template-hash":"7c65d6cfc9"},"ownerReferences":[{"apiVersion":"apps/v1","kind":"ReplicaSet","name":"coredns-7c65d6cfc9","uid":"a88acc4a-b14b-4341-a616-6a6077ab8958","controller":true,"blockOwnerDeletion":true}],"managedFields":[{"manager":"kube-controller-manager","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:10:15Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:generateName":{},"f:labels":{".":{},"f:k8s-app":{},"f:pod-template-hash":{}},"f:ownerReferences":{".":{},"k:{\"uid\":\"a88acc4a-b14b-4341-a616-6a6077ab8958\"}":{}}},"f:spec":{"f:affinity":{".":{},"f:podAntiAffinity":{".":{},"f:preferredDuringSchedulingIgnoredDuringExecution":{
}}},"f:containers":{"k:{\"name\":\"coredns\"}":{".":{},"f:args":{},"f:i [truncated 7042 chars]
	I0916 11:16:09.598690 1507580 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/nodes/multinode-654612
	I0916 11:16:09.598708 1507580 round_trippers.go:469] Request Headers:
	I0916 11:16:09.598717 1507580 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:16:09.598723 1507580 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:16:09.601096 1507580 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:16:09.601163 1507580 round_trippers.go:577] Response Headers:
	I0916 11:16:09.601183 1507580 round_trippers.go:580]     Audit-Id: e79d1e67-86ca-43a8-8a6a-68e94a0379a5
	I0916 11:16:09.601202 1507580 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:16:09.601219 1507580 round_trippers.go:580]     Content-Type: application/json
	I0916 11:16:09.601244 1507580 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:16:09.601268 1507580 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:16:09.601287 1507580 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:16:09 GMT
	I0916 11:16:09.601424 1507580 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-654612","uid":"17ee1588-6ece-4a45-8e72-a05ec6d1f806","resourceVersion":"982","creationTimestamp":"2024-09-16T11:10:07Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-654612","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-654612","minikube.k8s.io/primary":"true","minikube.k8s.io/updated_at":"2024_09_16T11_10_11_0700","minikube.k8s.io/version":"v1.34.0","node-role.kubernetes.io/control-plane":"","node.kubernetes.io/exclude-from-external-load-balancers":""},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///var/run/crio/crio.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubelet","operation":"Update","apiVe
rsion":"v1","time":"2024-09-16T11:10:07Z","fieldsType":"FieldsV1","fiel [truncated 6346 chars]
	I0916 11:16:10.095252 1507580 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/namespaces/kube-system/pods/coredns-7c65d6cfc9-szvv9
	I0916 11:16:10.095290 1507580 round_trippers.go:469] Request Headers:
	I0916 11:16:10.095301 1507580 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:16:10.095306 1507580 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:16:10.098190 1507580 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:16:10.098218 1507580 round_trippers.go:577] Response Headers:
	I0916 11:16:10.098229 1507580 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:16:10.098235 1507580 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:16:10 GMT
	I0916 11:16:10.098238 1507580 round_trippers.go:580]     Audit-Id: 02062767-3d03-41a7-842b-a80f1f090ffa
	I0916 11:16:10.098241 1507580 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:16:10.098244 1507580 round_trippers.go:580]     Content-Type: application/json
	I0916 11:16:10.098246 1507580 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:16:10.098714 1507580 request.go:1351] Response Body: {"kind":"Pod","apiVersion":"v1","metadata":{"name":"coredns-7c65d6cfc9-szvv9","generateName":"coredns-7c65d6cfc9-","namespace":"kube-system","uid":"26df8cd4-36bc-49e1-98bf-9c30f5555b7b","resourceVersion":"991","creationTimestamp":"2024-09-16T11:10:15Z","labels":{"k8s-app":"kube-dns","pod-template-hash":"7c65d6cfc9"},"ownerReferences":[{"apiVersion":"apps/v1","kind":"ReplicaSet","name":"coredns-7c65d6cfc9","uid":"a88acc4a-b14b-4341-a616-6a6077ab8958","controller":true,"blockOwnerDeletion":true}],"managedFields":[{"manager":"kube-controller-manager","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:10:15Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:generateName":{},"f:labels":{".":{},"f:k8s-app":{},"f:pod-template-hash":{}},"f:ownerReferences":{".":{},"k:{\"uid\":\"a88acc4a-b14b-4341-a616-6a6077ab8958\"}":{}}},"f:spec":{"f:affinity":{".":{},"f:podAntiAffinity":{".":{},"f:preferredDuringSchedulingIgnoredDuringExecution":{
}}},"f:containers":{"k:{\"name\":\"coredns\"}":{".":{},"f:args":{},"f:i [truncated 7042 chars]
	I0916 11:16:10.099467 1507580 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/nodes/multinode-654612
	I0916 11:16:10.099492 1507580 round_trippers.go:469] Request Headers:
	I0916 11:16:10.099504 1507580 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:16:10.099509 1507580 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:16:10.102613 1507580 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 11:16:10.102708 1507580 round_trippers.go:577] Response Headers:
	I0916 11:16:10.102721 1507580 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:16:10.102725 1507580 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:16:10 GMT
	I0916 11:16:10.102735 1507580 round_trippers.go:580]     Audit-Id: 20aa5989-fcd0-411e-84ca-1218b4ad11e5
	I0916 11:16:10.102742 1507580 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:16:10.102747 1507580 round_trippers.go:580]     Content-Type: application/json
	I0916 11:16:10.102749 1507580 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:16:10.103011 1507580 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-654612","uid":"17ee1588-6ece-4a45-8e72-a05ec6d1f806","resourceVersion":"982","creationTimestamp":"2024-09-16T11:10:07Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-654612","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-654612","minikube.k8s.io/primary":"true","minikube.k8s.io/updated_at":"2024_09_16T11_10_11_0700","minikube.k8s.io/version":"v1.34.0","node-role.kubernetes.io/control-plane":"","node.kubernetes.io/exclude-from-external-load-balancers":""},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///var/run/crio/crio.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubelet","operation":"Update","apiVe
rsion":"v1","time":"2024-09-16T11:10:07Z","fieldsType":"FieldsV1","fiel [truncated 6346 chars]
	I0916 11:16:10.594389 1507580 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/namespaces/kube-system/pods/coredns-7c65d6cfc9-szvv9
	I0916 11:16:10.594417 1507580 round_trippers.go:469] Request Headers:
	I0916 11:16:10.594427 1507580 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:16:10.594431 1507580 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:16:10.596993 1507580 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:16:10.597031 1507580 round_trippers.go:577] Response Headers:
	I0916 11:16:10.597041 1507580 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:16:10 GMT
	I0916 11:16:10.597046 1507580 round_trippers.go:580]     Audit-Id: 470a5f61-e742-4a03-a7ed-df3a96920787
	I0916 11:16:10.597053 1507580 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:16:10.597056 1507580 round_trippers.go:580]     Content-Type: application/json
	I0916 11:16:10.597059 1507580 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:16:10.597062 1507580 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:16:10.597743 1507580 request.go:1351] Response Body: {"kind":"Pod","apiVersion":"v1","metadata":{"name":"coredns-7c65d6cfc9-szvv9","generateName":"coredns-7c65d6cfc9-","namespace":"kube-system","uid":"26df8cd4-36bc-49e1-98bf-9c30f5555b7b","resourceVersion":"991","creationTimestamp":"2024-09-16T11:10:15Z","labels":{"k8s-app":"kube-dns","pod-template-hash":"7c65d6cfc9"},"ownerReferences":[{"apiVersion":"apps/v1","kind":"ReplicaSet","name":"coredns-7c65d6cfc9","uid":"a88acc4a-b14b-4341-a616-6a6077ab8958","controller":true,"blockOwnerDeletion":true}],"managedFields":[{"manager":"kube-controller-manager","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:10:15Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:generateName":{},"f:labels":{".":{},"f:k8s-app":{},"f:pod-template-hash":{}},"f:ownerReferences":{".":{},"k:{\"uid\":\"a88acc4a-b14b-4341-a616-6a6077ab8958\"}":{}}},"f:spec":{"f:affinity":{".":{},"f:podAntiAffinity":{".":{},"f:preferredDuringSchedulingIgnoredDuringExecution":{
}}},"f:containers":{"k:{\"name\":\"coredns\"}":{".":{},"f:args":{},"f:i [truncated 7042 chars]
	I0916 11:16:10.598430 1507580 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/nodes/multinode-654612
	I0916 11:16:10.598456 1507580 round_trippers.go:469] Request Headers:
	I0916 11:16:10.598471 1507580 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:16:10.598477 1507580 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:16:10.601096 1507580 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:16:10.601121 1507580 round_trippers.go:577] Response Headers:
	I0916 11:16:10.601129 1507580 round_trippers.go:580]     Audit-Id: e5dc510a-6e31-4e79-a9c7-c5aeec0a5022
	I0916 11:16:10.601133 1507580 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:16:10.601136 1507580 round_trippers.go:580]     Content-Type: application/json
	I0916 11:16:10.601140 1507580 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:16:10.601142 1507580 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:16:10.601145 1507580 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:16:10 GMT
	I0916 11:16:10.601490 1507580 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-654612","uid":"17ee1588-6ece-4a45-8e72-a05ec6d1f806","resourceVersion":"982","creationTimestamp":"2024-09-16T11:10:07Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-654612","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-654612","minikube.k8s.io/primary":"true","minikube.k8s.io/updated_at":"2024_09_16T11_10_11_0700","minikube.k8s.io/version":"v1.34.0","node-role.kubernetes.io/control-plane":"","node.kubernetes.io/exclude-from-external-load-balancers":""},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///var/run/crio/crio.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubelet","operation":"Update","apiVe
rsion":"v1","time":"2024-09-16T11:10:07Z","fieldsType":"FieldsV1","fiel [truncated 6346 chars]
	I0916 11:16:11.094359 1507580 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/namespaces/kube-system/pods/coredns-7c65d6cfc9-szvv9
	I0916 11:16:11.094386 1507580 round_trippers.go:469] Request Headers:
	I0916 11:16:11.094396 1507580 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:16:11.094400 1507580 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:16:11.096956 1507580 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:16:11.096987 1507580 round_trippers.go:577] Response Headers:
	I0916 11:16:11.096996 1507580 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:16:11.097004 1507580 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:16:11.097009 1507580 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:16:11 GMT
	I0916 11:16:11.097011 1507580 round_trippers.go:580]     Audit-Id: c806788b-c97c-4e95-8b66-ed134c06ca29
	I0916 11:16:11.097014 1507580 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:16:11.097017 1507580 round_trippers.go:580]     Content-Type: application/json
	I0916 11:16:11.097305 1507580 request.go:1351] Response Body: {"kind":"Pod","apiVersion":"v1","metadata":{"name":"coredns-7c65d6cfc9-szvv9","generateName":"coredns-7c65d6cfc9-","namespace":"kube-system","uid":"26df8cd4-36bc-49e1-98bf-9c30f5555b7b","resourceVersion":"991","creationTimestamp":"2024-09-16T11:10:15Z","labels":{"k8s-app":"kube-dns","pod-template-hash":"7c65d6cfc9"},"ownerReferences":[{"apiVersion":"apps/v1","kind":"ReplicaSet","name":"coredns-7c65d6cfc9","uid":"a88acc4a-b14b-4341-a616-6a6077ab8958","controller":true,"blockOwnerDeletion":true}],"managedFields":[{"manager":"kube-controller-manager","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:10:15Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:generateName":{},"f:labels":{".":{},"f:k8s-app":{},"f:pod-template-hash":{}},"f:ownerReferences":{".":{},"k:{\"uid\":\"a88acc4a-b14b-4341-a616-6a6077ab8958\"}":{}}},"f:spec":{"f:affinity":{".":{},"f:podAntiAffinity":{".":{},"f:preferredDuringSchedulingIgnoredDuringExecution":{
}}},"f:containers":{"k:{\"name\":\"coredns\"}":{".":{},"f:args":{},"f:i [truncated 7042 chars]
	I0916 11:16:11.097902 1507580 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/nodes/multinode-654612
	I0916 11:16:11.097922 1507580 round_trippers.go:469] Request Headers:
	I0916 11:16:11.097931 1507580 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:16:11.097935 1507580 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:16:11.100177 1507580 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:16:11.100248 1507580 round_trippers.go:577] Response Headers:
	I0916 11:16:11.100271 1507580 round_trippers.go:580]     Audit-Id: 5995c431-e64b-463f-84f2-cec1fefa4564
	I0916 11:16:11.100292 1507580 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:16:11.100325 1507580 round_trippers.go:580]     Content-Type: application/json
	I0916 11:16:11.100344 1507580 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:16:11.100360 1507580 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:16:11.100381 1507580 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:16:11 GMT
	I0916 11:16:11.100570 1507580 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-654612","uid":"17ee1588-6ece-4a45-8e72-a05ec6d1f806","resourceVersion":"982","creationTimestamp":"2024-09-16T11:10:07Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-654612","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-654612","minikube.k8s.io/primary":"true","minikube.k8s.io/updated_at":"2024_09_16T11_10_11_0700","minikube.k8s.io/version":"v1.34.0","node-role.kubernetes.io/control-plane":"","node.kubernetes.io/exclude-from-external-load-balancers":""},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///var/run/crio/crio.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubelet","operation":"Update","apiVe
rsion":"v1","time":"2024-09-16T11:10:07Z","fieldsType":"FieldsV1","fiel [truncated 6346 chars]
	I0916 11:16:11.594832 1507580 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/namespaces/kube-system/pods/coredns-7c65d6cfc9-szvv9
	I0916 11:16:11.594865 1507580 round_trippers.go:469] Request Headers:
	I0916 11:16:11.594875 1507580 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:16:11.594879 1507580 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:16:11.597360 1507580 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:16:11.597387 1507580 round_trippers.go:577] Response Headers:
	I0916 11:16:11.597396 1507580 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:16:11.597405 1507580 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:16:11.597416 1507580 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:16:11 GMT
	I0916 11:16:11.597420 1507580 round_trippers.go:580]     Audit-Id: bc61ad05-200f-4378-ab35-8a432d7b7b30
	I0916 11:16:11.597422 1507580 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:16:11.597458 1507580 round_trippers.go:580]     Content-Type: application/json
	I0916 11:16:11.597647 1507580 request.go:1351] Response Body: {"kind":"Pod","apiVersion":"v1","metadata":{"name":"coredns-7c65d6cfc9-szvv9","generateName":"coredns-7c65d6cfc9-","namespace":"kube-system","uid":"26df8cd4-36bc-49e1-98bf-9c30f5555b7b","resourceVersion":"991","creationTimestamp":"2024-09-16T11:10:15Z","labels":{"k8s-app":"kube-dns","pod-template-hash":"7c65d6cfc9"},"ownerReferences":[{"apiVersion":"apps/v1","kind":"ReplicaSet","name":"coredns-7c65d6cfc9","uid":"a88acc4a-b14b-4341-a616-6a6077ab8958","controller":true,"blockOwnerDeletion":true}],"managedFields":[{"manager":"kube-controller-manager","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:10:15Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:generateName":{},"f:labels":{".":{},"f:k8s-app":{},"f:pod-template-hash":{}},"f:ownerReferences":{".":{},"k:{\"uid\":\"a88acc4a-b14b-4341-a616-6a6077ab8958\"}":{}}},"f:spec":{"f:affinity":{".":{},"f:podAntiAffinity":{".":{},"f:preferredDuringSchedulingIgnoredDuringExecution":{
}}},"f:containers":{"k:{\"name\":\"coredns\"}":{".":{},"f:args":{},"f:i [truncated 7042 chars]
	I0916 11:16:11.598312 1507580 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/nodes/multinode-654612
	I0916 11:16:11.598335 1507580 round_trippers.go:469] Request Headers:
	I0916 11:16:11.598345 1507580 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:16:11.598351 1507580 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:16:11.600714 1507580 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:16:11.600742 1507580 round_trippers.go:577] Response Headers:
	I0916 11:16:11.600751 1507580 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:16:11.600756 1507580 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:16:11 GMT
	I0916 11:16:11.600760 1507580 round_trippers.go:580]     Audit-Id: 0ae5e9f3-1e0c-4df8-a4d6-caef13da2640
	I0916 11:16:11.600763 1507580 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:16:11.600766 1507580 round_trippers.go:580]     Content-Type: application/json
	I0916 11:16:11.600769 1507580 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:16:11.601149 1507580 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-654612","uid":"17ee1588-6ece-4a45-8e72-a05ec6d1f806","resourceVersion":"982","creationTimestamp":"2024-09-16T11:10:07Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-654612","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-654612","minikube.k8s.io/primary":"true","minikube.k8s.io/updated_at":"2024_09_16T11_10_11_0700","minikube.k8s.io/version":"v1.34.0","node-role.kubernetes.io/control-plane":"","node.kubernetes.io/exclude-from-external-load-balancers":""},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///var/run/crio/crio.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubelet","operation":"Update","apiVe
rsion":"v1","time":"2024-09-16T11:10:07Z","fieldsType":"FieldsV1","fiel [truncated 6346 chars]
	I0916 11:16:11.601573 1507580 pod_ready.go:103] pod "coredns-7c65d6cfc9-szvv9" in "kube-system" namespace has status "Ready":"False"
	I0916 11:16:12.094509 1507580 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/namespaces/kube-system/pods/coredns-7c65d6cfc9-szvv9
	I0916 11:16:12.094547 1507580 round_trippers.go:469] Request Headers:
	I0916 11:16:12.094557 1507580 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:16:12.094561 1507580 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:16:12.097222 1507580 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:16:12.097310 1507580 round_trippers.go:577] Response Headers:
	I0916 11:16:12.097338 1507580 round_trippers.go:580]     Audit-Id: 33d980c5-ce8c-4e2a-98cb-feaffea930f7
	I0916 11:16:12.097380 1507580 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:16:12.097402 1507580 round_trippers.go:580]     Content-Type: application/json
	I0916 11:16:12.097418 1507580 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:16:12.097438 1507580 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:16:12.097469 1507580 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:16:12 GMT
	I0916 11:16:12.097662 1507580 request.go:1351] Response Body: {"kind":"Pod","apiVersion":"v1","metadata":{"name":"coredns-7c65d6cfc9-szvv9","generateName":"coredns-7c65d6cfc9-","namespace":"kube-system","uid":"26df8cd4-36bc-49e1-98bf-9c30f5555b7b","resourceVersion":"991","creationTimestamp":"2024-09-16T11:10:15Z","labels":{"k8s-app":"kube-dns","pod-template-hash":"7c65d6cfc9"},"ownerReferences":[{"apiVersion":"apps/v1","kind":"ReplicaSet","name":"coredns-7c65d6cfc9","uid":"a88acc4a-b14b-4341-a616-6a6077ab8958","controller":true,"blockOwnerDeletion":true}],"managedFields":[{"manager":"kube-controller-manager","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:10:15Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:generateName":{},"f:labels":{".":{},"f:k8s-app":{},"f:pod-template-hash":{}},"f:ownerReferences":{".":{},"k:{\"uid\":\"a88acc4a-b14b-4341-a616-6a6077ab8958\"}":{}}},"f:spec":{"f:affinity":{".":{},"f:podAntiAffinity":{".":{},"f:preferredDuringSchedulingIgnoredDuringExecution":{
}}},"f:containers":{"k:{\"name\":\"coredns\"}":{".":{},"f:args":{},"f:i [truncated 7042 chars]
	I0916 11:16:12.098311 1507580 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/nodes/multinode-654612
	I0916 11:16:12.098330 1507580 round_trippers.go:469] Request Headers:
	I0916 11:16:12.098354 1507580 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:16:12.098390 1507580 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:16:12.100618 1507580 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:16:12.100638 1507580 round_trippers.go:577] Response Headers:
	I0916 11:16:12.100646 1507580 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:16:12.100650 1507580 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:16:12.100654 1507580 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:16:12 GMT
	I0916 11:16:12.100657 1507580 round_trippers.go:580]     Audit-Id: e82cbcbb-fb65-4c9e-8039-bf5acab32ace
	I0916 11:16:12.100659 1507580 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:16:12.100662 1507580 round_trippers.go:580]     Content-Type: application/json
	I0916 11:16:12.101052 1507580 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-654612","uid":"17ee1588-6ece-4a45-8e72-a05ec6d1f806","resourceVersion":"982","creationTimestamp":"2024-09-16T11:10:07Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-654612","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-654612","minikube.k8s.io/primary":"true","minikube.k8s.io/updated_at":"2024_09_16T11_10_11_0700","minikube.k8s.io/version":"v1.34.0","node-role.kubernetes.io/control-plane":"","node.kubernetes.io/exclude-from-external-load-balancers":""},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///var/run/crio/crio.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubelet","operation":"Update","apiVe
rsion":"v1","time":"2024-09-16T11:10:07Z","fieldsType":"FieldsV1","fiel [truncated 6346 chars]
	I0916 11:16:12.594408 1507580 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/namespaces/kube-system/pods/coredns-7c65d6cfc9-szvv9
	I0916 11:16:12.594430 1507580 round_trippers.go:469] Request Headers:
	I0916 11:16:12.594440 1507580 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:16:12.594446 1507580 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:16:12.596761 1507580 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:16:12.596785 1507580 round_trippers.go:577] Response Headers:
	I0916 11:16:12.596793 1507580 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:16:12.596797 1507580 round_trippers.go:580]     Content-Type: application/json
	I0916 11:16:12.596800 1507580 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:16:12.596804 1507580 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:16:12.596807 1507580 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:16:12 GMT
	I0916 11:16:12.596809 1507580 round_trippers.go:580]     Audit-Id: e2976648-ceea-4d6d-9775-27f0c7e1ea56
	I0916 11:16:12.597070 1507580 request.go:1351] Response Body: {"kind":"Pod","apiVersion":"v1","metadata":{"name":"coredns-7c65d6cfc9-szvv9","generateName":"coredns-7c65d6cfc9-","namespace":"kube-system","uid":"26df8cd4-36bc-49e1-98bf-9c30f5555b7b","resourceVersion":"991","creationTimestamp":"2024-09-16T11:10:15Z","labels":{"k8s-app":"kube-dns","pod-template-hash":"7c65d6cfc9"},"ownerReferences":[{"apiVersion":"apps/v1","kind":"ReplicaSet","name":"coredns-7c65d6cfc9","uid":"a88acc4a-b14b-4341-a616-6a6077ab8958","controller":true,"blockOwnerDeletion":true}],"managedFields":[{"manager":"kube-controller-manager","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:10:15Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:generateName":{},"f:labels":{".":{},"f:k8s-app":{},"f:pod-template-hash":{}},"f:ownerReferences":{".":{},"k:{\"uid\":\"a88acc4a-b14b-4341-a616-6a6077ab8958\"}":{}}},"f:spec":{"f:affinity":{".":{},"f:podAntiAffinity":{".":{},"f:preferredDuringSchedulingIgnoredDuringExecution":{
}}},"f:containers":{"k:{\"name\":\"coredns\"}":{".":{},"f:args":{},"f:i [truncated 7042 chars]
	I0916 11:16:12.597689 1507580 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/nodes/multinode-654612
	I0916 11:16:12.597706 1507580 round_trippers.go:469] Request Headers:
	I0916 11:16:12.597715 1507580 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:16:12.597718 1507580 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:16:12.599532 1507580 round_trippers.go:574] Response Status: 200 OK in 1 milliseconds
	I0916 11:16:12.599553 1507580 round_trippers.go:577] Response Headers:
	I0916 11:16:12.599561 1507580 round_trippers.go:580]     Audit-Id: 9fef9f32-89a2-49ac-b848-0366b060347e
	I0916 11:16:12.599564 1507580 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:16:12.599568 1507580 round_trippers.go:580]     Content-Type: application/json
	I0916 11:16:12.599570 1507580 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:16:12.599573 1507580 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:16:12.599576 1507580 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:16:12 GMT
	I0916 11:16:12.599683 1507580 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-654612","uid":"17ee1588-6ece-4a45-8e72-a05ec6d1f806","resourceVersion":"982","creationTimestamp":"2024-09-16T11:10:07Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-654612","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-654612","minikube.k8s.io/primary":"true","minikube.k8s.io/updated_at":"2024_09_16T11_10_11_0700","minikube.k8s.io/version":"v1.34.0","node-role.kubernetes.io/control-plane":"","node.kubernetes.io/exclude-from-external-load-balancers":""},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///var/run/crio/crio.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubelet","operation":"Update","apiVe
rsion":"v1","time":"2024-09-16T11:10:07Z","fieldsType":"FieldsV1","fiel [truncated 6346 chars]
	I0916 11:16:13.095059 1507580 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/namespaces/kube-system/pods/coredns-7c65d6cfc9-szvv9
	I0916 11:16:13.095089 1507580 round_trippers.go:469] Request Headers:
	I0916 11:16:13.095099 1507580 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:16:13.095104 1507580 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:16:13.097727 1507580 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:16:13.097759 1507580 round_trippers.go:577] Response Headers:
	I0916 11:16:13.097770 1507580 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:16:13 GMT
	I0916 11:16:13.097776 1507580 round_trippers.go:580]     Audit-Id: e6cb38b5-e1f2-4ade-a5a4-381fe0a00281
	I0916 11:16:13.097779 1507580 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:16:13.097785 1507580 round_trippers.go:580]     Content-Type: application/json
	I0916 11:16:13.097788 1507580 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:16:13.097791 1507580 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:16:13.098113 1507580 request.go:1351] Response Body: {"kind":"Pod","apiVersion":"v1","metadata":{"name":"coredns-7c65d6cfc9-szvv9","generateName":"coredns-7c65d6cfc9-","namespace":"kube-system","uid":"26df8cd4-36bc-49e1-98bf-9c30f5555b7b","resourceVersion":"991","creationTimestamp":"2024-09-16T11:10:15Z","labels":{"k8s-app":"kube-dns","pod-template-hash":"7c65d6cfc9"},"ownerReferences":[{"apiVersion":"apps/v1","kind":"ReplicaSet","name":"coredns-7c65d6cfc9","uid":"a88acc4a-b14b-4341-a616-6a6077ab8958","controller":true,"blockOwnerDeletion":true}],"managedFields":[{"manager":"kube-controller-manager","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:10:15Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:generateName":{},"f:labels":{".":{},"f:k8s-app":{},"f:pod-template-hash":{}},"f:ownerReferences":{".":{},"k:{\"uid\":\"a88acc4a-b14b-4341-a616-6a6077ab8958\"}":{}}},"f:spec":{"f:affinity":{".":{},"f:podAntiAffinity":{".":{},"f:preferredDuringSchedulingIgnoredDuringExecution":{
}}},"f:containers":{"k:{\"name\":\"coredns\"}":{".":{},"f:args":{},"f:i [truncated 7042 chars]
	I0916 11:16:13.098709 1507580 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/nodes/multinode-654612
	I0916 11:16:13.098733 1507580 round_trippers.go:469] Request Headers:
	I0916 11:16:13.098743 1507580 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:16:13.098749 1507580 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:16:13.101447 1507580 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:16:13.101471 1507580 round_trippers.go:577] Response Headers:
	I0916 11:16:13.101480 1507580 round_trippers.go:580]     Audit-Id: dcd02f75-79c7-478e-aa97-cf2dcdf1f087
	I0916 11:16:13.101484 1507580 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:16:13.101490 1507580 round_trippers.go:580]     Content-Type: application/json
	I0916 11:16:13.101494 1507580 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:16:13.101497 1507580 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:16:13.101500 1507580 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:16:13 GMT
	I0916 11:16:13.101748 1507580 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-654612","uid":"17ee1588-6ece-4a45-8e72-a05ec6d1f806","resourceVersion":"982","creationTimestamp":"2024-09-16T11:10:07Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-654612","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-654612","minikube.k8s.io/primary":"true","minikube.k8s.io/updated_at":"2024_09_16T11_10_11_0700","minikube.k8s.io/version":"v1.34.0","node-role.kubernetes.io/control-plane":"","node.kubernetes.io/exclude-from-external-load-balancers":""},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///var/run/crio/crio.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubelet","operation":"Update","apiVe
rsion":"v1","time":"2024-09-16T11:10:07Z","fieldsType":"FieldsV1","fiel [truncated 6346 chars]
	I0916 11:16:13.594972 1507580 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/namespaces/kube-system/pods/coredns-7c65d6cfc9-szvv9
	I0916 11:16:13.594997 1507580 round_trippers.go:469] Request Headers:
	I0916 11:16:13.595008 1507580 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:16:13.595011 1507580 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:16:13.597477 1507580 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:16:13.597502 1507580 round_trippers.go:577] Response Headers:
	I0916 11:16:13.597511 1507580 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:16:13.597517 1507580 round_trippers.go:580]     Content-Type: application/json
	I0916 11:16:13.597521 1507580 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:16:13.597524 1507580 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:16:13.597530 1507580 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:16:13 GMT
	I0916 11:16:13.597536 1507580 round_trippers.go:580]     Audit-Id: 2eeffca5-833f-46f1-9df7-d947c62aff8a
	I0916 11:16:13.597854 1507580 request.go:1351] Response Body: {"kind":"Pod","apiVersion":"v1","metadata":{"name":"coredns-7c65d6cfc9-szvv9","generateName":"coredns-7c65d6cfc9-","namespace":"kube-system","uid":"26df8cd4-36bc-49e1-98bf-9c30f5555b7b","resourceVersion":"991","creationTimestamp":"2024-09-16T11:10:15Z","labels":{"k8s-app":"kube-dns","pod-template-hash":"7c65d6cfc9"},"ownerReferences":[{"apiVersion":"apps/v1","kind":"ReplicaSet","name":"coredns-7c65d6cfc9","uid":"a88acc4a-b14b-4341-a616-6a6077ab8958","controller":true,"blockOwnerDeletion":true}],"managedFields":[{"manager":"kube-controller-manager","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:10:15Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:generateName":{},"f:labels":{".":{},"f:k8s-app":{},"f:pod-template-hash":{}},"f:ownerReferences":{".":{},"k:{\"uid\":\"a88acc4a-b14b-4341-a616-6a6077ab8958\"}":{}}},"f:spec":{"f:affinity":{".":{},"f:podAntiAffinity":{".":{},"f:preferredDuringSchedulingIgnoredDuringExecution":{
}}},"f:containers":{"k:{\"name\":\"coredns\"}":{".":{},"f:args":{},"f:i [truncated 7042 chars]
	I0916 11:16:13.598456 1507580 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/nodes/multinode-654612
	I0916 11:16:13.598475 1507580 round_trippers.go:469] Request Headers:
	I0916 11:16:13.598485 1507580 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:16:13.598490 1507580 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:16:13.600700 1507580 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:16:13.600722 1507580 round_trippers.go:577] Response Headers:
	I0916 11:16:13.600729 1507580 round_trippers.go:580]     Audit-Id: c8b42c09-7559-42f0-b1ac-40435bbe60b3
	I0916 11:16:13.600733 1507580 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:16:13.600736 1507580 round_trippers.go:580]     Content-Type: application/json
	I0916 11:16:13.600739 1507580 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:16:13.600742 1507580 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:16:13.600745 1507580 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:16:13 GMT
	I0916 11:16:13.600999 1507580 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-654612","uid":"17ee1588-6ece-4a45-8e72-a05ec6d1f806","resourceVersion":"982","creationTimestamp":"2024-09-16T11:10:07Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-654612","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-654612","minikube.k8s.io/primary":"true","minikube.k8s.io/updated_at":"2024_09_16T11_10_11_0700","minikube.k8s.io/version":"v1.34.0","node-role.kubernetes.io/control-plane":"","node.kubernetes.io/exclude-from-external-load-balancers":""},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///var/run/crio/crio.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubelet","operation":"Update","apiVe
rsion":"v1","time":"2024-09-16T11:10:07Z","fieldsType":"FieldsV1","fiel [truncated 6346 chars]
	I0916 11:16:14.095234 1507580 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/namespaces/kube-system/pods/coredns-7c65d6cfc9-szvv9
	I0916 11:16:14.095263 1507580 round_trippers.go:469] Request Headers:
	I0916 11:16:14.095272 1507580 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:16:14.095276 1507580 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:16:14.098092 1507580 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:16:14.098204 1507580 round_trippers.go:577] Response Headers:
	I0916 11:16:14.098223 1507580 round_trippers.go:580]     Content-Type: application/json
	I0916 11:16:14.098230 1507580 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:16:14.098233 1507580 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:16:14.098253 1507580 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:16:14 GMT
	I0916 11:16:14.098259 1507580 round_trippers.go:580]     Audit-Id: df5b2c43-d1e8-41f3-9a19-3824db1bdd5d
	I0916 11:16:14.098262 1507580 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:16:14.098419 1507580 request.go:1351] Response Body: {"kind":"Pod","apiVersion":"v1","metadata":{"name":"coredns-7c65d6cfc9-szvv9","generateName":"coredns-7c65d6cfc9-","namespace":"kube-system","uid":"26df8cd4-36bc-49e1-98bf-9c30f5555b7b","resourceVersion":"991","creationTimestamp":"2024-09-16T11:10:15Z","labels":{"k8s-app":"kube-dns","pod-template-hash":"7c65d6cfc9"},"ownerReferences":[{"apiVersion":"apps/v1","kind":"ReplicaSet","name":"coredns-7c65d6cfc9","uid":"a88acc4a-b14b-4341-a616-6a6077ab8958","controller":true,"blockOwnerDeletion":true}],"managedFields":[{"manager":"kube-controller-manager","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:10:15Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:generateName":{},"f:labels":{".":{},"f:k8s-app":{},"f:pod-template-hash":{}},"f:ownerReferences":{".":{},"k:{\"uid\":\"a88acc4a-b14b-4341-a616-6a6077ab8958\"}":{}}},"f:spec":{"f:affinity":{".":{},"f:podAntiAffinity":{".":{},"f:preferredDuringSchedulingIgnoredDuringExecution":{
}}},"f:containers":{"k:{\"name\":\"coredns\"}":{".":{},"f:args":{},"f:i [truncated 7042 chars]
	I0916 11:16:14.098989 1507580 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/nodes/multinode-654612
	I0916 11:16:14.099007 1507580 round_trippers.go:469] Request Headers:
	I0916 11:16:14.099018 1507580 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:16:14.099022 1507580 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:16:14.101292 1507580 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:16:14.101313 1507580 round_trippers.go:577] Response Headers:
	I0916 11:16:14.101322 1507580 round_trippers.go:580]     Audit-Id: 1558d96f-cb4a-4142-abe5-6cd6c03cb844
	I0916 11:16:14.101326 1507580 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:16:14.101329 1507580 round_trippers.go:580]     Content-Type: application/json
	I0916 11:16:14.101331 1507580 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:16:14.101334 1507580 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:16:14.101337 1507580 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:16:14 GMT
	I0916 11:16:14.101455 1507580 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-654612","uid":"17ee1588-6ece-4a45-8e72-a05ec6d1f806","resourceVersion":"982","creationTimestamp":"2024-09-16T11:10:07Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-654612","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-654612","minikube.k8s.io/primary":"true","minikube.k8s.io/updated_at":"2024_09_16T11_10_11_0700","minikube.k8s.io/version":"v1.34.0","node-role.kubernetes.io/control-plane":"","node.kubernetes.io/exclude-from-external-load-balancers":""},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///var/run/crio/crio.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubelet","operation":"Update","apiVe
rsion":"v1","time":"2024-09-16T11:10:07Z","fieldsType":"FieldsV1","fiel [truncated 6346 chars]
	I0916 11:16:14.101856 1507580 pod_ready.go:103] pod "coredns-7c65d6cfc9-szvv9" in "kube-system" namespace has status "Ready":"False"
	I0916 11:16:14.594632 1507580 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/namespaces/kube-system/pods/coredns-7c65d6cfc9-szvv9
	I0916 11:16:14.594658 1507580 round_trippers.go:469] Request Headers:
	I0916 11:16:14.594668 1507580 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:16:14.594674 1507580 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:16:14.597201 1507580 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:16:14.597229 1507580 round_trippers.go:577] Response Headers:
	I0916 11:16:14.597238 1507580 round_trippers.go:580]     Audit-Id: ee5fc961-9d03-464c-a37f-beae02975f1e
	I0916 11:16:14.597244 1507580 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:16:14.597248 1507580 round_trippers.go:580]     Content-Type: application/json
	I0916 11:16:14.597255 1507580 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:16:14.597259 1507580 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:16:14.597261 1507580 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:16:14 GMT
	I0916 11:16:14.597370 1507580 request.go:1351] Response Body: {"kind":"Pod","apiVersion":"v1","metadata":{"name":"coredns-7c65d6cfc9-szvv9","generateName":"coredns-7c65d6cfc9-","namespace":"kube-system","uid":"26df8cd4-36bc-49e1-98bf-9c30f5555b7b","resourceVersion":"991","creationTimestamp":"2024-09-16T11:10:15Z","labels":{"k8s-app":"kube-dns","pod-template-hash":"7c65d6cfc9"},"ownerReferences":[{"apiVersion":"apps/v1","kind":"ReplicaSet","name":"coredns-7c65d6cfc9","uid":"a88acc4a-b14b-4341-a616-6a6077ab8958","controller":true,"blockOwnerDeletion":true}],"managedFields":[{"manager":"kube-controller-manager","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:10:15Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:generateName":{},"f:labels":{".":{},"f:k8s-app":{},"f:pod-template-hash":{}},"f:ownerReferences":{".":{},"k:{\"uid\":\"a88acc4a-b14b-4341-a616-6a6077ab8958\"}":{}}},"f:spec":{"f:affinity":{".":{},"f:podAntiAffinity":{".":{},"f:preferredDuringSchedulingIgnoredDuringExecution":{
}}},"f:containers":{"k:{\"name\":\"coredns\"}":{".":{},"f:args":{},"f:i [truncated 7042 chars]
	I0916 11:16:14.597933 1507580 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/nodes/multinode-654612
	I0916 11:16:14.597951 1507580 round_trippers.go:469] Request Headers:
	I0916 11:16:14.597959 1507580 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:16:14.597963 1507580 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:16:14.600100 1507580 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:16:14.600132 1507580 round_trippers.go:577] Response Headers:
	I0916 11:16:14.600141 1507580 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:16:14.600146 1507580 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:16:14 GMT
	I0916 11:16:14.600149 1507580 round_trippers.go:580]     Audit-Id: 80b78dba-9fd3-4018-b078-8ca3392901ff
	I0916 11:16:14.600152 1507580 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:16:14.600155 1507580 round_trippers.go:580]     Content-Type: application/json
	I0916 11:16:14.600174 1507580 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:16:14.600482 1507580 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-654612","uid":"17ee1588-6ece-4a45-8e72-a05ec6d1f806","resourceVersion":"982","creationTimestamp":"2024-09-16T11:10:07Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-654612","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-654612","minikube.k8s.io/primary":"true","minikube.k8s.io/updated_at":"2024_09_16T11_10_11_0700","minikube.k8s.io/version":"v1.34.0","node-role.kubernetes.io/control-plane":"","node.kubernetes.io/exclude-from-external-load-balancers":""},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///var/run/crio/crio.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubelet","operation":"Update","apiVe
rsion":"v1","time":"2024-09-16T11:10:07Z","fieldsType":"FieldsV1","fiel [truncated 6346 chars]
	I0916 11:16:15.094584 1507580 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/namespaces/kube-system/pods/coredns-7c65d6cfc9-szvv9
	I0916 11:16:15.094669 1507580 round_trippers.go:469] Request Headers:
	I0916 11:16:15.094695 1507580 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:16:15.094718 1507580 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:16:15.097909 1507580 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 11:16:15.097999 1507580 round_trippers.go:577] Response Headers:
	I0916 11:16:15.098022 1507580 round_trippers.go:580]     Audit-Id: 86c7b1f4-30ef-42c7-a45b-06e309f7e1e2
	I0916 11:16:15.098044 1507580 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:16:15.098086 1507580 round_trippers.go:580]     Content-Type: application/json
	I0916 11:16:15.098117 1507580 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:16:15.098137 1507580 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:16:15.098181 1507580 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:16:15 GMT
	I0916 11:16:15.099008 1507580 request.go:1351] Response Body: {"kind":"Pod","apiVersion":"v1","metadata":{"name":"coredns-7c65d6cfc9-szvv9","generateName":"coredns-7c65d6cfc9-","namespace":"kube-system","uid":"26df8cd4-36bc-49e1-98bf-9c30f5555b7b","resourceVersion":"991","creationTimestamp":"2024-09-16T11:10:15Z","labels":{"k8s-app":"kube-dns","pod-template-hash":"7c65d6cfc9"},"ownerReferences":[{"apiVersion":"apps/v1","kind":"ReplicaSet","name":"coredns-7c65d6cfc9","uid":"a88acc4a-b14b-4341-a616-6a6077ab8958","controller":true,"blockOwnerDeletion":true}],"managedFields":[{"manager":"kube-controller-manager","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:10:15Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:generateName":{},"f:labels":{".":{},"f:k8s-app":{},"f:pod-template-hash":{}},"f:ownerReferences":{".":{},"k:{\"uid\":\"a88acc4a-b14b-4341-a616-6a6077ab8958\"}":{}}},"f:spec":{"f:affinity":{".":{},"f:podAntiAffinity":{".":{},"f:preferredDuringSchedulingIgnoredDuringExecution":{
}}},"f:containers":{"k:{\"name\":\"coredns\"}":{".":{},"f:args":{},"f:i [truncated 7042 chars]
	I0916 11:16:15.099811 1507580 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/nodes/multinode-654612
	I0916 11:16:15.099866 1507580 round_trippers.go:469] Request Headers:
	I0916 11:16:15.099891 1507580 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:16:15.099913 1507580 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:16:15.102981 1507580 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 11:16:15.103065 1507580 round_trippers.go:577] Response Headers:
	I0916 11:16:15.103089 1507580 round_trippers.go:580]     Audit-Id: 08e9352d-2f8e-41fb-ae69-50b7471607eb
	I0916 11:16:15.103112 1507580 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:16:15.103152 1507580 round_trippers.go:580]     Content-Type: application/json
	I0916 11:16:15.103176 1507580 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:16:15.103196 1507580 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:16:15.103230 1507580 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:16:15 GMT
	I0916 11:16:15.103375 1507580 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-654612","uid":"17ee1588-6ece-4a45-8e72-a05ec6d1f806","resourceVersion":"982","creationTimestamp":"2024-09-16T11:10:07Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-654612","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-654612","minikube.k8s.io/primary":"true","minikube.k8s.io/updated_at":"2024_09_16T11_10_11_0700","minikube.k8s.io/version":"v1.34.0","node-role.kubernetes.io/control-plane":"","node.kubernetes.io/exclude-from-external-load-balancers":""},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///var/run/crio/crio.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubelet","operation":"Update","apiVe
rsion":"v1","time":"2024-09-16T11:10:07Z","fieldsType":"FieldsV1","fiel [truncated 6346 chars]
	I0916 11:16:15.594545 1507580 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/namespaces/kube-system/pods/coredns-7c65d6cfc9-szvv9
	I0916 11:16:15.594576 1507580 round_trippers.go:469] Request Headers:
	I0916 11:16:15.594588 1507580 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:16:15.594611 1507580 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:16:15.597627 1507580 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:16:15.597696 1507580 round_trippers.go:577] Response Headers:
	I0916 11:16:15.597719 1507580 round_trippers.go:580]     Audit-Id: 2d64c585-5d1f-4400-8395-69e182c66c63
	I0916 11:16:15.597740 1507580 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:16:15.597775 1507580 round_trippers.go:580]     Content-Type: application/json
	I0916 11:16:15.597796 1507580 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:16:15.597811 1507580 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:16:15.597829 1507580 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:16:15 GMT
	I0916 11:16:15.598078 1507580 request.go:1351] Response Body: {"kind":"Pod","apiVersion":"v1","metadata":{"name":"coredns-7c65d6cfc9-szvv9","generateName":"coredns-7c65d6cfc9-","namespace":"kube-system","uid":"26df8cd4-36bc-49e1-98bf-9c30f5555b7b","resourceVersion":"991","creationTimestamp":"2024-09-16T11:10:15Z","labels":{"k8s-app":"kube-dns","pod-template-hash":"7c65d6cfc9"},"ownerReferences":[{"apiVersion":"apps/v1","kind":"ReplicaSet","name":"coredns-7c65d6cfc9","uid":"a88acc4a-b14b-4341-a616-6a6077ab8958","controller":true,"blockOwnerDeletion":true}],"managedFields":[{"manager":"kube-controller-manager","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:10:15Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:generateName":{},"f:labels":{".":{},"f:k8s-app":{},"f:pod-template-hash":{}},"f:ownerReferences":{".":{},"k:{\"uid\":\"a88acc4a-b14b-4341-a616-6a6077ab8958\"}":{}}},"f:spec":{"f:affinity":{".":{},"f:podAntiAffinity":{".":{},"f:preferredDuringSchedulingIgnoredDuringExecution":{
}}},"f:containers":{"k:{\"name\":\"coredns\"}":{".":{},"f:args":{},"f:i [truncated 7042 chars]
	I0916 11:16:15.598779 1507580 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/nodes/multinode-654612
	I0916 11:16:15.598803 1507580 round_trippers.go:469] Request Headers:
	I0916 11:16:15.598812 1507580 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:16:15.598817 1507580 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:16:15.601196 1507580 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:16:15.601264 1507580 round_trippers.go:577] Response Headers:
	I0916 11:16:15.601286 1507580 round_trippers.go:580]     Content-Type: application/json
	I0916 11:16:15.601305 1507580 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:16:15.601339 1507580 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:16:15.601361 1507580 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:16:15 GMT
	I0916 11:16:15.601378 1507580 round_trippers.go:580]     Audit-Id: 6673627b-fc01-46f3-a94d-74f56ab2271a
	I0916 11:16:15.601395 1507580 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:16:15.601549 1507580 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-654612","uid":"17ee1588-6ece-4a45-8e72-a05ec6d1f806","resourceVersion":"982","creationTimestamp":"2024-09-16T11:10:07Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-654612","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-654612","minikube.k8s.io/primary":"true","minikube.k8s.io/updated_at":"2024_09_16T11_10_11_0700","minikube.k8s.io/version":"v1.34.0","node-role.kubernetes.io/control-plane":"","node.kubernetes.io/exclude-from-external-load-balancers":""},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///var/run/crio/crio.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubelet","operation":"Update","apiVe
rsion":"v1","time":"2024-09-16T11:10:07Z","fieldsType":"FieldsV1","fiel [truncated 6346 chars]
	I0916 11:16:16.095150 1507580 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/namespaces/kube-system/pods/coredns-7c65d6cfc9-szvv9
	I0916 11:16:16.095174 1507580 round_trippers.go:469] Request Headers:
	I0916 11:16:16.095189 1507580 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:16:16.095195 1507580 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:16:16.097802 1507580 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:16:16.097830 1507580 round_trippers.go:577] Response Headers:
	I0916 11:16:16.097839 1507580 round_trippers.go:580]     Content-Type: application/json
	I0916 11:16:16.097843 1507580 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:16:16.097846 1507580 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:16:16.097851 1507580 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:16:16 GMT
	I0916 11:16:16.097855 1507580 round_trippers.go:580]     Audit-Id: ab3deb37-365e-4fa4-b55f-66e6362ee471
	I0916 11:16:16.097861 1507580 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:16:16.097992 1507580 request.go:1351] Response Body: {"kind":"Pod","apiVersion":"v1","metadata":{"name":"coredns-7c65d6cfc9-szvv9","generateName":"coredns-7c65d6cfc9-","namespace":"kube-system","uid":"26df8cd4-36bc-49e1-98bf-9c30f5555b7b","resourceVersion":"991","creationTimestamp":"2024-09-16T11:10:15Z","labels":{"k8s-app":"kube-dns","pod-template-hash":"7c65d6cfc9"},"ownerReferences":[{"apiVersion":"apps/v1","kind":"ReplicaSet","name":"coredns-7c65d6cfc9","uid":"a88acc4a-b14b-4341-a616-6a6077ab8958","controller":true,"blockOwnerDeletion":true}],"managedFields":[{"manager":"kube-controller-manager","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:10:15Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:generateName":{},"f:labels":{".":{},"f:k8s-app":{},"f:pod-template-hash":{}},"f:ownerReferences":{".":{},"k:{\"uid\":\"a88acc4a-b14b-4341-a616-6a6077ab8958\"}":{}}},"f:spec":{"f:affinity":{".":{},"f:podAntiAffinity":{".":{},"f:preferredDuringSchedulingIgnoredDuringExecution":{
}}},"f:containers":{"k:{\"name\":\"coredns\"}":{".":{},"f:args":{},"f:i [truncated 7042 chars]
	I0916 11:16:16.098607 1507580 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/nodes/multinode-654612
	I0916 11:16:16.098627 1507580 round_trippers.go:469] Request Headers:
	I0916 11:16:16.098636 1507580 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:16:16.098642 1507580 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:16:16.100573 1507580 round_trippers.go:574] Response Status: 200 OK in 1 milliseconds
	I0916 11:16:16.100596 1507580 round_trippers.go:577] Response Headers:
	I0916 11:16:16.100605 1507580 round_trippers.go:580]     Audit-Id: 81a48e3a-eae0-4ca7-8852-457415e81f98
	I0916 11:16:16.100610 1507580 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:16:16.100613 1507580 round_trippers.go:580]     Content-Type: application/json
	I0916 11:16:16.100617 1507580 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:16:16.100619 1507580 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:16:16.100622 1507580 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:16:16 GMT
	I0916 11:16:16.101039 1507580 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-654612","uid":"17ee1588-6ece-4a45-8e72-a05ec6d1f806","resourceVersion":"982","creationTimestamp":"2024-09-16T11:10:07Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-654612","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-654612","minikube.k8s.io/primary":"true","minikube.k8s.io/updated_at":"2024_09_16T11_10_11_0700","minikube.k8s.io/version":"v1.34.0","node-role.kubernetes.io/control-plane":"","node.kubernetes.io/exclude-from-external-load-balancers":""},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///var/run/crio/crio.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubelet","operation":"Update","apiVe
rsion":"v1","time":"2024-09-16T11:10:07Z","fieldsType":"FieldsV1","fiel [truncated 6346 chars]
	I0916 11:16:16.594485 1507580 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/namespaces/kube-system/pods/coredns-7c65d6cfc9-szvv9
	I0916 11:16:16.594519 1507580 round_trippers.go:469] Request Headers:
	I0916 11:16:16.594529 1507580 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:16:16.594533 1507580 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:16:16.597145 1507580 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:16:16.597174 1507580 round_trippers.go:577] Response Headers:
	I0916 11:16:16.597183 1507580 round_trippers.go:580]     Audit-Id: 546793f2-201e-4a2d-8c0a-551ca2734d86
	I0916 11:16:16.597187 1507580 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:16:16.597191 1507580 round_trippers.go:580]     Content-Type: application/json
	I0916 11:16:16.597193 1507580 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:16:16.597196 1507580 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:16:16.597200 1507580 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:16:16 GMT
	I0916 11:16:16.597522 1507580 request.go:1351] Response Body: {"kind":"Pod","apiVersion":"v1","metadata":{"name":"coredns-7c65d6cfc9-szvv9","generateName":"coredns-7c65d6cfc9-","namespace":"kube-system","uid":"26df8cd4-36bc-49e1-98bf-9c30f5555b7b","resourceVersion":"991","creationTimestamp":"2024-09-16T11:10:15Z","labels":{"k8s-app":"kube-dns","pod-template-hash":"7c65d6cfc9"},"ownerReferences":[{"apiVersion":"apps/v1","kind":"ReplicaSet","name":"coredns-7c65d6cfc9","uid":"a88acc4a-b14b-4341-a616-6a6077ab8958","controller":true,"blockOwnerDeletion":true}],"managedFields":[{"manager":"kube-controller-manager","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:10:15Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:generateName":{},"f:labels":{".":{},"f:k8s-app":{},"f:pod-template-hash":{}},"f:ownerReferences":{".":{},"k:{\"uid\":\"a88acc4a-b14b-4341-a616-6a6077ab8958\"}":{}}},"f:spec":{"f:affinity":{".":{},"f:podAntiAffinity":{".":{},"f:preferredDuringSchedulingIgnoredDuringExecution":{
}}},"f:containers":{"k:{\"name\":\"coredns\"}":{".":{},"f:args":{},"f:i [truncated 7042 chars]
	I0916 11:16:16.598184 1507580 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/nodes/multinode-654612
	I0916 11:16:16.598205 1507580 round_trippers.go:469] Request Headers:
	I0916 11:16:16.598215 1507580 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:16:16.598219 1507580 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:16:16.600383 1507580 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:16:16.600442 1507580 round_trippers.go:577] Response Headers:
	I0916 11:16:16.600464 1507580 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:16:16 GMT
	I0916 11:16:16.600484 1507580 round_trippers.go:580]     Audit-Id: b91a604a-c761-46c4-90a4-7f740c637b30
	I0916 11:16:16.600518 1507580 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:16:16.600544 1507580 round_trippers.go:580]     Content-Type: application/json
	I0916 11:16:16.600562 1507580 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:16:16.600570 1507580 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:16:16.600734 1507580 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-654612","uid":"17ee1588-6ece-4a45-8e72-a05ec6d1f806","resourceVersion":"982","creationTimestamp":"2024-09-16T11:10:07Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-654612","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-654612","minikube.k8s.io/primary":"true","minikube.k8s.io/updated_at":"2024_09_16T11_10_11_0700","minikube.k8s.io/version":"v1.34.0","node-role.kubernetes.io/control-plane":"","node.kubernetes.io/exclude-from-external-load-balancers":""},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///var/run/crio/crio.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubelet","operation":"Update","apiVe
rsion":"v1","time":"2024-09-16T11:10:07Z","fieldsType":"FieldsV1","fiel [truncated 6346 chars]
	I0916 11:16:16.601208 1507580 pod_ready.go:103] pod "coredns-7c65d6cfc9-szvv9" in "kube-system" namespace has status "Ready":"False"
	I0916 11:16:17.095206 1507580 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/namespaces/kube-system/pods/coredns-7c65d6cfc9-szvv9
	I0916 11:16:17.095229 1507580 round_trippers.go:469] Request Headers:
	I0916 11:16:17.095239 1507580 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:16:17.095244 1507580 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:16:17.097639 1507580 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:16:17.097664 1507580 round_trippers.go:577] Response Headers:
	I0916 11:16:17.097674 1507580 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:16:17 GMT
	I0916 11:16:17.097680 1507580 round_trippers.go:580]     Audit-Id: 7af53aa4-ec45-4ad1-a5c5-ca6701a47e2d
	I0916 11:16:17.097683 1507580 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:16:17.097687 1507580 round_trippers.go:580]     Content-Type: application/json
	I0916 11:16:17.097690 1507580 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:16:17.097693 1507580 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:16:17.097924 1507580 request.go:1351] Response Body: {"kind":"Pod","apiVersion":"v1","metadata":{"name":"coredns-7c65d6cfc9-szvv9","generateName":"coredns-7c65d6cfc9-","namespace":"kube-system","uid":"26df8cd4-36bc-49e1-98bf-9c30f5555b7b","resourceVersion":"991","creationTimestamp":"2024-09-16T11:10:15Z","labels":{"k8s-app":"kube-dns","pod-template-hash":"7c65d6cfc9"},"ownerReferences":[{"apiVersion":"apps/v1","kind":"ReplicaSet","name":"coredns-7c65d6cfc9","uid":"a88acc4a-b14b-4341-a616-6a6077ab8958","controller":true,"blockOwnerDeletion":true}],"managedFields":[{"manager":"kube-controller-manager","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:10:15Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:generateName":{},"f:labels":{".":{},"f:k8s-app":{},"f:pod-template-hash":{}},"f:ownerReferences":{".":{},"k:{\"uid\":\"a88acc4a-b14b-4341-a616-6a6077ab8958\"}":{}}},"f:spec":{"f:affinity":{".":{},"f:podAntiAffinity":{".":{},"f:preferredDuringSchedulingIgnoredDuringExecution":{
}}},"f:containers":{"k:{\"name\":\"coredns\"}":{".":{},"f:args":{},"f:i [truncated 7042 chars]
	I0916 11:16:17.098553 1507580 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/nodes/multinode-654612
	I0916 11:16:17.098572 1507580 round_trippers.go:469] Request Headers:
	I0916 11:16:17.098580 1507580 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:16:17.098585 1507580 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:16:17.100616 1507580 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:16:17.100637 1507580 round_trippers.go:577] Response Headers:
	I0916 11:16:17.100645 1507580 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:16:17.100650 1507580 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:16:17 GMT
	I0916 11:16:17.100653 1507580 round_trippers.go:580]     Audit-Id: a2eb5aef-8336-47a7-a8a2-86325c2bea7c
	I0916 11:16:17.100658 1507580 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:16:17.100661 1507580 round_trippers.go:580]     Content-Type: application/json
	I0916 11:16:17.100664 1507580 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:16:17.100819 1507580 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-654612","uid":"17ee1588-6ece-4a45-8e72-a05ec6d1f806","resourceVersion":"982","creationTimestamp":"2024-09-16T11:10:07Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-654612","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-654612","minikube.k8s.io/primary":"true","minikube.k8s.io/updated_at":"2024_09_16T11_10_11_0700","minikube.k8s.io/version":"v1.34.0","node-role.kubernetes.io/control-plane":"","node.kubernetes.io/exclude-from-external-load-balancers":""},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///var/run/crio/crio.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubelet","operation":"Update","apiVe
rsion":"v1","time":"2024-09-16T11:10:07Z","fieldsType":"FieldsV1","fiel [truncated 6346 chars]
	I0916 11:16:17.595068 1507580 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/namespaces/kube-system/pods/coredns-7c65d6cfc9-szvv9
	I0916 11:16:17.595094 1507580 round_trippers.go:469] Request Headers:
	I0916 11:16:17.595104 1507580 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:16:17.595110 1507580 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:16:17.597659 1507580 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:16:17.597686 1507580 round_trippers.go:577] Response Headers:
	I0916 11:16:17.597694 1507580 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:16:17 GMT
	I0916 11:16:17.597698 1507580 round_trippers.go:580]     Audit-Id: 75e54cb2-a847-4e4e-816a-6256c00583b2
	I0916 11:16:17.597701 1507580 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:16:17.597703 1507580 round_trippers.go:580]     Content-Type: application/json
	I0916 11:16:17.597706 1507580 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:16:17.597709 1507580 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:16:17.598143 1507580 request.go:1351] Response Body: {"kind":"Pod","apiVersion":"v1","metadata":{"name":"coredns-7c65d6cfc9-szvv9","generateName":"coredns-7c65d6cfc9-","namespace":"kube-system","uid":"26df8cd4-36bc-49e1-98bf-9c30f5555b7b","resourceVersion":"991","creationTimestamp":"2024-09-16T11:10:15Z","labels":{"k8s-app":"kube-dns","pod-template-hash":"7c65d6cfc9"},"ownerReferences":[{"apiVersion":"apps/v1","kind":"ReplicaSet","name":"coredns-7c65d6cfc9","uid":"a88acc4a-b14b-4341-a616-6a6077ab8958","controller":true,"blockOwnerDeletion":true}],"managedFields":[{"manager":"kube-controller-manager","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:10:15Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:generateName":{},"f:labels":{".":{},"f:k8s-app":{},"f:pod-template-hash":{}},"f:ownerReferences":{".":{},"k:{\"uid\":\"a88acc4a-b14b-4341-a616-6a6077ab8958\"}":{}}},"f:spec":{"f:affinity":{".":{},"f:podAntiAffinity":{".":{},"f:preferredDuringSchedulingIgnoredDuringExecution":{
}}},"f:containers":{"k:{\"name\":\"coredns\"}":{".":{},"f:args":{},"f:i [truncated 7042 chars]
	I0916 11:16:17.598724 1507580 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/nodes/multinode-654612
	I0916 11:16:17.598737 1507580 round_trippers.go:469] Request Headers:
	I0916 11:16:17.598746 1507580 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:16:17.598752 1507580 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:16:17.600978 1507580 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:16:17.601010 1507580 round_trippers.go:577] Response Headers:
	I0916 11:16:17.601037 1507580 round_trippers.go:580]     Audit-Id: 7dce9866-8332-4527-bbf8-5e7ceb62817b
	I0916 11:16:17.601050 1507580 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:16:17.601054 1507580 round_trippers.go:580]     Content-Type: application/json
	I0916 11:16:17.601061 1507580 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:16:17.601064 1507580 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:16:17.601068 1507580 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:16:17 GMT
	I0916 11:16:17.601508 1507580 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-654612","uid":"17ee1588-6ece-4a45-8e72-a05ec6d1f806","resourceVersion":"982","creationTimestamp":"2024-09-16T11:10:07Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-654612","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-654612","minikube.k8s.io/primary":"true","minikube.k8s.io/updated_at":"2024_09_16T11_10_11_0700","minikube.k8s.io/version":"v1.34.0","node-role.kubernetes.io/control-plane":"","node.kubernetes.io/exclude-from-external-load-balancers":""},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///var/run/crio/crio.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubelet","operation":"Update","apiVe
rsion":"v1","time":"2024-09-16T11:10:07Z","fieldsType":"FieldsV1","fiel [truncated 6346 chars]
	I0916 11:16:18.095276 1507580 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/namespaces/kube-system/pods/coredns-7c65d6cfc9-szvv9
	I0916 11:16:18.095303 1507580 round_trippers.go:469] Request Headers:
	I0916 11:16:18.095313 1507580 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:16:18.095319 1507580 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:16:18.097790 1507580 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:16:18.097821 1507580 round_trippers.go:577] Response Headers:
	I0916 11:16:18.097831 1507580 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:16:18 GMT
	I0916 11:16:18.097835 1507580 round_trippers.go:580]     Audit-Id: ab4d9721-8498-4cd5-a2d6-3f2c03cc12fd
	I0916 11:16:18.097841 1507580 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:16:18.097844 1507580 round_trippers.go:580]     Content-Type: application/json
	I0916 11:16:18.097846 1507580 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:16:18.097849 1507580 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:16:18.098138 1507580 request.go:1351] Response Body: {"kind":"Pod","apiVersion":"v1","metadata":{"name":"coredns-7c65d6cfc9-szvv9","generateName":"coredns-7c65d6cfc9-","namespace":"kube-system","uid":"26df8cd4-36bc-49e1-98bf-9c30f5555b7b","resourceVersion":"991","creationTimestamp":"2024-09-16T11:10:15Z","labels":{"k8s-app":"kube-dns","pod-template-hash":"7c65d6cfc9"},"ownerReferences":[{"apiVersion":"apps/v1","kind":"ReplicaSet","name":"coredns-7c65d6cfc9","uid":"a88acc4a-b14b-4341-a616-6a6077ab8958","controller":true,"blockOwnerDeletion":true}],"managedFields":[{"manager":"kube-controller-manager","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:10:15Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:generateName":{},"f:labels":{".":{},"f:k8s-app":{},"f:pod-template-hash":{}},"f:ownerReferences":{".":{},"k:{\"uid\":\"a88acc4a-b14b-4341-a616-6a6077ab8958\"}":{}}},"f:spec":{"f:affinity":{".":{},"f:podAntiAffinity":{".":{},"f:preferredDuringSchedulingIgnoredDuringExecution":{
}}},"f:containers":{"k:{\"name\":\"coredns\"}":{".":{},"f:args":{},"f:i [truncated 7042 chars]
	I0916 11:16:18.098770 1507580 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/nodes/multinode-654612
	I0916 11:16:18.098793 1507580 round_trippers.go:469] Request Headers:
	I0916 11:16:18.098802 1507580 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:16:18.098807 1507580 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:16:18.101010 1507580 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:16:18.101040 1507580 round_trippers.go:577] Response Headers:
	I0916 11:16:18.101050 1507580 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:16:18.101056 1507580 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:16:18 GMT
	I0916 11:16:18.101059 1507580 round_trippers.go:580]     Audit-Id: 34d5d75a-7ca8-4964-94cf-9e3495d94677
	I0916 11:16:18.101062 1507580 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:16:18.101065 1507580 round_trippers.go:580]     Content-Type: application/json
	I0916 11:16:18.101068 1507580 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:16:18.101174 1507580 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-654612","uid":"17ee1588-6ece-4a45-8e72-a05ec6d1f806","resourceVersion":"982","creationTimestamp":"2024-09-16T11:10:07Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-654612","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-654612","minikube.k8s.io/primary":"true","minikube.k8s.io/updated_at":"2024_09_16T11_10_11_0700","minikube.k8s.io/version":"v1.34.0","node-role.kubernetes.io/control-plane":"","node.kubernetes.io/exclude-from-external-load-balancers":""},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///var/run/crio/crio.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubelet","operation":"Update","apiVe
rsion":"v1","time":"2024-09-16T11:10:07Z","fieldsType":"FieldsV1","fiel [truncated 6346 chars]
	I0916 11:16:18.594901 1507580 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/namespaces/kube-system/pods/coredns-7c65d6cfc9-szvv9
	I0916 11:16:18.594925 1507580 round_trippers.go:469] Request Headers:
	I0916 11:16:18.594935 1507580 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:16:18.594941 1507580 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:16:18.597484 1507580 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:16:18.597521 1507580 round_trippers.go:577] Response Headers:
	I0916 11:16:18.597529 1507580 round_trippers.go:580]     Audit-Id: 4d9f2e29-2353-4c85-b60c-416932089026
	I0916 11:16:18.597533 1507580 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:16:18.597536 1507580 round_trippers.go:580]     Content-Type: application/json
	I0916 11:16:18.597539 1507580 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:16:18.597542 1507580 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:16:18.597544 1507580 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:16:18 GMT
	I0916 11:16:18.597694 1507580 request.go:1351] Response Body: {"kind":"Pod","apiVersion":"v1","metadata":{"name":"coredns-7c65d6cfc9-szvv9","generateName":"coredns-7c65d6cfc9-","namespace":"kube-system","uid":"26df8cd4-36bc-49e1-98bf-9c30f5555b7b","resourceVersion":"991","creationTimestamp":"2024-09-16T11:10:15Z","labels":{"k8s-app":"kube-dns","pod-template-hash":"7c65d6cfc9"},"ownerReferences":[{"apiVersion":"apps/v1","kind":"ReplicaSet","name":"coredns-7c65d6cfc9","uid":"a88acc4a-b14b-4341-a616-6a6077ab8958","controller":true,"blockOwnerDeletion":true}],"managedFields":[{"manager":"kube-controller-manager","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:10:15Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:generateName":{},"f:labels":{".":{},"f:k8s-app":{},"f:pod-template-hash":{}},"f:ownerReferences":{".":{},"k:{\"uid\":\"a88acc4a-b14b-4341-a616-6a6077ab8958\"}":{}}},"f:spec":{"f:affinity":{".":{},"f:podAntiAffinity":{".":{},"f:preferredDuringSchedulingIgnoredDuringExecution":{
}}},"f:containers":{"k:{\"name\":\"coredns\"}":{".":{},"f:args":{},"f:i [truncated 7042 chars]
	I0916 11:16:18.598275 1507580 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/nodes/multinode-654612
	I0916 11:16:18.598291 1507580 round_trippers.go:469] Request Headers:
	I0916 11:16:18.598299 1507580 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:16:18.598304 1507580 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:16:18.600301 1507580 round_trippers.go:574] Response Status: 200 OK in 1 milliseconds
	I0916 11:16:18.600368 1507580 round_trippers.go:577] Response Headers:
	I0916 11:16:18.600379 1507580 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:16:18.600384 1507580 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:16:18.600387 1507580 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:16:18 GMT
	I0916 11:16:18.600418 1507580 round_trippers.go:580]     Audit-Id: 33ca143c-e62e-4f66-a88c-f7374a2104b5
	I0916 11:16:18.600426 1507580 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:16:18.600429 1507580 round_trippers.go:580]     Content-Type: application/json
	I0916 11:16:18.600583 1507580 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-654612","uid":"17ee1588-6ece-4a45-8e72-a05ec6d1f806","resourceVersion":"982","creationTimestamp":"2024-09-16T11:10:07Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-654612","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-654612","minikube.k8s.io/primary":"true","minikube.k8s.io/updated_at":"2024_09_16T11_10_11_0700","minikube.k8s.io/version":"v1.34.0","node-role.kubernetes.io/control-plane":"","node.kubernetes.io/exclude-from-external-load-balancers":""},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///var/run/crio/crio.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubelet","operation":"Update","apiVe
rsion":"v1","time":"2024-09-16T11:10:07Z","fieldsType":"FieldsV1","fiel [truncated 6346 chars]
	I0916 11:16:19.094484 1507580 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/namespaces/kube-system/pods/coredns-7c65d6cfc9-szvv9
	I0916 11:16:19.094513 1507580 round_trippers.go:469] Request Headers:
	I0916 11:16:19.094523 1507580 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:16:19.094528 1507580 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:16:19.096965 1507580 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:16:19.096991 1507580 round_trippers.go:577] Response Headers:
	I0916 11:16:19.097000 1507580 round_trippers.go:580]     Audit-Id: e7667669-62e9-4f45-9c56-cdce7687a422
	I0916 11:16:19.097006 1507580 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:16:19.097009 1507580 round_trippers.go:580]     Content-Type: application/json
	I0916 11:16:19.097021 1507580 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:16:19.097024 1507580 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:16:19.097027 1507580 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:16:19 GMT
	I0916 11:16:19.097135 1507580 request.go:1351] Response Body: {"kind":"Pod","apiVersion":"v1","metadata":{"name":"coredns-7c65d6cfc9-szvv9","generateName":"coredns-7c65d6cfc9-","namespace":"kube-system","uid":"26df8cd4-36bc-49e1-98bf-9c30f5555b7b","resourceVersion":"991","creationTimestamp":"2024-09-16T11:10:15Z","labels":{"k8s-app":"kube-dns","pod-template-hash":"7c65d6cfc9"},"ownerReferences":[{"apiVersion":"apps/v1","kind":"ReplicaSet","name":"coredns-7c65d6cfc9","uid":"a88acc4a-b14b-4341-a616-6a6077ab8958","controller":true,"blockOwnerDeletion":true}],"managedFields":[{"manager":"kube-controller-manager","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:10:15Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:generateName":{},"f:labels":{".":{},"f:k8s-app":{},"f:pod-template-hash":{}},"f:ownerReferences":{".":{},"k:{\"uid\":\"a88acc4a-b14b-4341-a616-6a6077ab8958\"}":{}}},"f:spec":{"f:affinity":{".":{},"f:podAntiAffinity":{".":{},"f:preferredDuringSchedulingIgnoredDuringExecution":{
}}},"f:containers":{"k:{\"name\":\"coredns\"}":{".":{},"f:args":{},"f:i [truncated 7042 chars]
	I0916 11:16:19.097701 1507580 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/nodes/multinode-654612
	I0916 11:16:19.097719 1507580 round_trippers.go:469] Request Headers:
	I0916 11:16:19.097727 1507580 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:16:19.097732 1507580 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:16:19.099533 1507580 round_trippers.go:574] Response Status: 200 OK in 1 milliseconds
	I0916 11:16:19.099552 1507580 round_trippers.go:577] Response Headers:
	I0916 11:16:19.099559 1507580 round_trippers.go:580]     Audit-Id: d17a7dac-ee6c-4b8f-b27c-3fce305c3ef1
	I0916 11:16:19.099563 1507580 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:16:19.099566 1507580 round_trippers.go:580]     Content-Type: application/json
	I0916 11:16:19.099569 1507580 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:16:19.099572 1507580 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:16:19.099575 1507580 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:16:19 GMT
	I0916 11:16:19.099843 1507580 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-654612","uid":"17ee1588-6ece-4a45-8e72-a05ec6d1f806","resourceVersion":"982","creationTimestamp":"2024-09-16T11:10:07Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-654612","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-654612","minikube.k8s.io/primary":"true","minikube.k8s.io/updated_at":"2024_09_16T11_10_11_0700","minikube.k8s.io/version":"v1.34.0","node-role.kubernetes.io/control-plane":"","node.kubernetes.io/exclude-from-external-load-balancers":""},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///var/run/crio/crio.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubelet","operation":"Update","apiVe
rsion":"v1","time":"2024-09-16T11:10:07Z","fieldsType":"FieldsV1","fiel [truncated 6346 chars]
	I0916 11:16:19.100275 1507580 pod_ready.go:103] pod "coredns-7c65d6cfc9-szvv9" in "kube-system" namespace has status "Ready":"False"
	I0916 11:16:19.594436 1507580 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/namespaces/kube-system/pods/coredns-7c65d6cfc9-szvv9
	I0916 11:16:19.594466 1507580 round_trippers.go:469] Request Headers:
	I0916 11:16:19.594482 1507580 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:16:19.594488 1507580 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:16:19.597218 1507580 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:16:19.597245 1507580 round_trippers.go:577] Response Headers:
	I0916 11:16:19.597255 1507580 round_trippers.go:580]     Audit-Id: 0a49c843-c570-4749-91b6-213d7a4a47b6
	I0916 11:16:19.597259 1507580 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:16:19.597261 1507580 round_trippers.go:580]     Content-Type: application/json
	I0916 11:16:19.597264 1507580 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:16:19.597267 1507580 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:16:19.597270 1507580 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:16:19 GMT
	I0916 11:16:19.597627 1507580 request.go:1351] Response Body: {"kind":"Pod","apiVersion":"v1","metadata":{"name":"coredns-7c65d6cfc9-szvv9","generateName":"coredns-7c65d6cfc9-","namespace":"kube-system","uid":"26df8cd4-36bc-49e1-98bf-9c30f5555b7b","resourceVersion":"1139","creationTimestamp":"2024-09-16T11:10:15Z","labels":{"k8s-app":"kube-dns","pod-template-hash":"7c65d6cfc9"},"ownerReferences":[{"apiVersion":"apps/v1","kind":"ReplicaSet","name":"coredns-7c65d6cfc9","uid":"a88acc4a-b14b-4341-a616-6a6077ab8958","controller":true,"blockOwnerDeletion":true}],"managedFields":[{"manager":"kube-controller-manager","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:10:15Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:generateName":{},"f:labels":{".":{},"f:k8s-app":{},"f:pod-template-hash":{}},"f:ownerReferences":{".":{},"k:{\"uid\":\"a88acc4a-b14b-4341-a616-6a6077ab8958\"}":{}}},"f:spec":{"f:affinity":{".":{},"f:podAntiAffinity":{".":{},"f:preferredDuringSchedulingIgnoredDuringExecution":
{}}},"f:containers":{"k:{\"name\":\"coredns\"}":{".":{},"f:args":{},"f: [truncated 6814 chars]
	I0916 11:16:19.598222 1507580 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/nodes/multinode-654612
	I0916 11:16:19.598241 1507580 round_trippers.go:469] Request Headers:
	I0916 11:16:19.598251 1507580 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:16:19.598256 1507580 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:16:19.600615 1507580 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:16:19.600637 1507580 round_trippers.go:577] Response Headers:
	I0916 11:16:19.600645 1507580 round_trippers.go:580]     Audit-Id: 15f438f9-13d5-42d3-b209-2f449b7d15c0
	I0916 11:16:19.600649 1507580 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:16:19.600652 1507580 round_trippers.go:580]     Content-Type: application/json
	I0916 11:16:19.600654 1507580 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:16:19.600663 1507580 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:16:19.600665 1507580 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:16:19 GMT
	I0916 11:16:19.600862 1507580 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-654612","uid":"17ee1588-6ece-4a45-8e72-a05ec6d1f806","resourceVersion":"982","creationTimestamp":"2024-09-16T11:10:07Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-654612","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-654612","minikube.k8s.io/primary":"true","minikube.k8s.io/updated_at":"2024_09_16T11_10_11_0700","minikube.k8s.io/version":"v1.34.0","node-role.kubernetes.io/control-plane":"","node.kubernetes.io/exclude-from-external-load-balancers":""},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///var/run/crio/crio.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubelet","operation":"Update","apiVe
rsion":"v1","time":"2024-09-16T11:10:07Z","fieldsType":"FieldsV1","fiel [truncated 6346 chars]
	I0916 11:16:19.601290 1507580 pod_ready.go:93] pod "coredns-7c65d6cfc9-szvv9" in "kube-system" namespace has status "Ready":"True"
	I0916 11:16:19.601315 1507580 pod_ready.go:82] duration metric: took 21.507149558s for pod "coredns-7c65d6cfc9-szvv9" in "kube-system" namespace to be "Ready" ...
	I0916 11:16:19.601328 1507580 pod_ready.go:79] waiting up to 6m0s for pod "etcd-multinode-654612" in "kube-system" namespace to be "Ready" ...
	I0916 11:16:19.601401 1507580 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/namespaces/kube-system/pods/etcd-multinode-654612
	I0916 11:16:19.601413 1507580 round_trippers.go:469] Request Headers:
	I0916 11:16:19.601421 1507580 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:16:19.601428 1507580 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:16:19.603645 1507580 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:16:19.603674 1507580 round_trippers.go:577] Response Headers:
	I0916 11:16:19.603683 1507580 round_trippers.go:580]     Audit-Id: f71b39d5-8995-4d8e-a68d-a791f3797113
	I0916 11:16:19.603687 1507580 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:16:19.603691 1507580 round_trippers.go:580]     Content-Type: application/json
	I0916 11:16:19.603694 1507580 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:16:19.603697 1507580 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:16:19.603699 1507580 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:16:19 GMT
	I0916 11:16:19.604074 1507580 request.go:1351] Response Body: {"kind":"Pod","apiVersion":"v1","metadata":{"name":"etcd-multinode-654612","namespace":"kube-system","uid":"bb46feea-e4d5-411b-9ebc-e5984b1147a8","resourceVersion":"1071","creationTimestamp":"2024-09-16T11:10:10Z","labels":{"component":"etcd","tier":"control-plane"},"annotations":{"kubeadm.kubernetes.io/etcd.advertise-client-urls":"https://192.168.67.2:2379","kubernetes.io/config.hash":"d0a18dbc2f101ac77b9a3f54b47797a2","kubernetes.io/config.mirror":"d0a18dbc2f101ac77b9a3f54b47797a2","kubernetes.io/config.seen":"2024-09-16T11:10:10.145147523Z","kubernetes.io/config.source":"file"},"ownerReferences":[{"apiVersion":"v1","kind":"Node","name":"multinode-654612","uid":"17ee1588-6ece-4a45-8e72-a05ec6d1f806","controller":true}],"managedFields":[{"manager":"kubelet","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:10:10Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:annotations":{".":{},"f:kubeadm.kubernetes.io/etcd.advertise-c
lient-urls":{},"f:kubernetes.io/config.hash":{},"f:kubernetes.io/config [truncated 6576 chars]
	I0916 11:16:19.604573 1507580 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/nodes/multinode-654612
	I0916 11:16:19.604592 1507580 round_trippers.go:469] Request Headers:
	I0916 11:16:19.604601 1507580 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:16:19.604606 1507580 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:16:19.606827 1507580 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:16:19.606853 1507580 round_trippers.go:577] Response Headers:
	I0916 11:16:19.606862 1507580 round_trippers.go:580]     Audit-Id: 15a39e59-a5f1-4c72-9f9b-67468a6d30e4
	I0916 11:16:19.606868 1507580 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:16:19.606873 1507580 round_trippers.go:580]     Content-Type: application/json
	I0916 11:16:19.606876 1507580 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:16:19.606879 1507580 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:16:19.606882 1507580 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:16:19 GMT
	I0916 11:16:19.607192 1507580 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-654612","uid":"17ee1588-6ece-4a45-8e72-a05ec6d1f806","resourceVersion":"982","creationTimestamp":"2024-09-16T11:10:07Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-654612","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-654612","minikube.k8s.io/primary":"true","minikube.k8s.io/updated_at":"2024_09_16T11_10_11_0700","minikube.k8s.io/version":"v1.34.0","node-role.kubernetes.io/control-plane":"","node.kubernetes.io/exclude-from-external-load-balancers":""},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///var/run/crio/crio.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubelet","operation":"Update","apiVe
rsion":"v1","time":"2024-09-16T11:10:07Z","fieldsType":"FieldsV1","fiel [truncated 6346 chars]
	I0916 11:16:19.607631 1507580 pod_ready.go:93] pod "etcd-multinode-654612" in "kube-system" namespace has status "Ready":"True"
	I0916 11:16:19.607654 1507580 pod_ready.go:82] duration metric: took 6.309768ms for pod "etcd-multinode-654612" in "kube-system" namespace to be "Ready" ...
	I0916 11:16:19.607676 1507580 pod_ready.go:79] waiting up to 6m0s for pod "kube-apiserver-multinode-654612" in "kube-system" namespace to be "Ready" ...
	I0916 11:16:19.607746 1507580 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/namespaces/kube-system/pods/kube-apiserver-multinode-654612
	I0916 11:16:19.607756 1507580 round_trippers.go:469] Request Headers:
	I0916 11:16:19.607764 1507580 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:16:19.607770 1507580 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:16:19.610042 1507580 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:16:19.610063 1507580 round_trippers.go:577] Response Headers:
	I0916 11:16:19.610070 1507580 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:16:19.610075 1507580 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:16:19.610078 1507580 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:16:19 GMT
	I0916 11:16:19.610081 1507580 round_trippers.go:580]     Audit-Id: d692adf1-eac4-4ee1-b71d-3e797c4c7ddb
	I0916 11:16:19.610084 1507580 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:16:19.610087 1507580 round_trippers.go:580]     Content-Type: application/json
	I0916 11:16:19.610297 1507580 request.go:1351] Response Body: {"kind":"Pod","apiVersion":"v1","metadata":{"name":"kube-apiserver-multinode-654612","namespace":"kube-system","uid":"8a56377d-b2a9-46dc-90b0-6d8f0aadec52","resourceVersion":"1069","creationTimestamp":"2024-09-16T11:10:10Z","labels":{"component":"kube-apiserver","tier":"control-plane"},"annotations":{"kubeadm.kubernetes.io/kube-apiserver.advertise-address.endpoint":"192.168.67.2:8443","kubernetes.io/config.hash":"f3fdb95ee92c3c630b459a996a1fc6f9","kubernetes.io/config.mirror":"f3fdb95ee92c3c630b459a996a1fc6f9","kubernetes.io/config.seen":"2024-09-16T11:10:10.145153931Z","kubernetes.io/config.source":"file"},"ownerReferences":[{"apiVersion":"v1","kind":"Node","name":"multinode-654612","uid":"17ee1588-6ece-4a45-8e72-a05ec6d1f806","controller":true}],"managedFields":[{"manager":"kubelet","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:10:10Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:annotations":{".":{},"f:kubeadm.kub
ernetes.io/kube-apiserver.advertise-address.endpoint":{},"f:kubernetes. [truncated 9108 chars]
	I0916 11:16:19.610876 1507580 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/nodes/multinode-654612
	I0916 11:16:19.610889 1507580 round_trippers.go:469] Request Headers:
	I0916 11:16:19.610897 1507580 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:16:19.610903 1507580 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:16:19.613027 1507580 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:16:19.613047 1507580 round_trippers.go:577] Response Headers:
	I0916 11:16:19.613055 1507580 round_trippers.go:580]     Audit-Id: b36a6e01-b119-4eff-9cf0-0e0ec8cbd82b
	I0916 11:16:19.613059 1507580 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:16:19.613062 1507580 round_trippers.go:580]     Content-Type: application/json
	I0916 11:16:19.613065 1507580 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:16:19.613067 1507580 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:16:19.613070 1507580 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:16:19 GMT
	I0916 11:16:19.613318 1507580 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-654612","uid":"17ee1588-6ece-4a45-8e72-a05ec6d1f806","resourceVersion":"982","creationTimestamp":"2024-09-16T11:10:07Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-654612","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-654612","minikube.k8s.io/primary":"true","minikube.k8s.io/updated_at":"2024_09_16T11_10_11_0700","minikube.k8s.io/version":"v1.34.0","node-role.kubernetes.io/control-plane":"","node.kubernetes.io/exclude-from-external-load-balancers":""},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///var/run/crio/crio.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubelet","operation":"Update","apiVe
rsion":"v1","time":"2024-09-16T11:10:07Z","fieldsType":"FieldsV1","fiel [truncated 6346 chars]
	I0916 11:16:19.613787 1507580 pod_ready.go:93] pod "kube-apiserver-multinode-654612" in "kube-system" namespace has status "Ready":"True"
	I0916 11:16:19.613810 1507580 pod_ready.go:82] duration metric: took 6.124419ms for pod "kube-apiserver-multinode-654612" in "kube-system" namespace to be "Ready" ...
	I0916 11:16:19.613823 1507580 pod_ready.go:79] waiting up to 6m0s for pod "kube-controller-manager-multinode-654612" in "kube-system" namespace to be "Ready" ...
	I0916 11:16:19.613888 1507580 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/namespaces/kube-system/pods/kube-controller-manager-multinode-654612
	I0916 11:16:19.613899 1507580 round_trippers.go:469] Request Headers:
	I0916 11:16:19.613908 1507580 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:16:19.613912 1507580 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:16:19.616166 1507580 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:16:19.616192 1507580 round_trippers.go:577] Response Headers:
	I0916 11:16:19.616202 1507580 round_trippers.go:580]     Audit-Id: bcee8c83-b371-4379-9c97-44b49988c879
	I0916 11:16:19.616206 1507580 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:16:19.616209 1507580 round_trippers.go:580]     Content-Type: application/json
	I0916 11:16:19.616212 1507580 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:16:19.616215 1507580 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:16:19.616218 1507580 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:16:19 GMT
	I0916 11:16:19.616545 1507580 request.go:1351] Response Body: {"kind":"Pod","apiVersion":"v1","metadata":{"name":"kube-controller-manager-multinode-654612","namespace":"kube-system","uid":"08e87c01-201e-4373-bbd7-0a8a7a724a84","resourceVersion":"1063","creationTimestamp":"2024-09-16T11:10:10Z","labels":{"component":"kube-controller-manager","tier":"control-plane"},"annotations":{"kubernetes.io/config.hash":"c7028fbfd05aaf11bd1f32f252589714","kubernetes.io/config.mirror":"c7028fbfd05aaf11bd1f32f252589714","kubernetes.io/config.seen":"2024-09-16T11:10:10.145155408Z","kubernetes.io/config.source":"file"},"ownerReferences":[{"apiVersion":"v1","kind":"Node","name":"multinode-654612","uid":"17ee1588-6ece-4a45-8e72-a05ec6d1f806","controller":true}],"managedFields":[{"manager":"kubelet","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:10:10Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:annotations":{".":{},"f:kubernetes.io/config.hash":{},"f:kubernetes.io/config.mirror":{},"f:kubernetes.
io/config.seen":{},"f:kubernetes.io/config.source":{}},"f:labels":{".": [truncated 8898 chars]
	I0916 11:16:19.617145 1507580 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/nodes/multinode-654612
	I0916 11:16:19.617156 1507580 round_trippers.go:469] Request Headers:
	I0916 11:16:19.617167 1507580 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:16:19.617170 1507580 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:16:19.619224 1507580 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:16:19.619307 1507580 round_trippers.go:577] Response Headers:
	I0916 11:16:19.619330 1507580 round_trippers.go:580]     Audit-Id: 84c43152-f03d-4633-9e2a-5e64e9d30630
	I0916 11:16:19.619360 1507580 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:16:19.619381 1507580 round_trippers.go:580]     Content-Type: application/json
	I0916 11:16:19.619399 1507580 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:16:19.619419 1507580 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:16:19.619449 1507580 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:16:19 GMT
	I0916 11:16:19.619569 1507580 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-654612","uid":"17ee1588-6ece-4a45-8e72-a05ec6d1f806","resourceVersion":"982","creationTimestamp":"2024-09-16T11:10:07Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-654612","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-654612","minikube.k8s.io/primary":"true","minikube.k8s.io/updated_at":"2024_09_16T11_10_11_0700","minikube.k8s.io/version":"v1.34.0","node-role.kubernetes.io/control-plane":"","node.kubernetes.io/exclude-from-external-load-balancers":""},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///var/run/crio/crio.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubelet","operation":"Update","apiVe
rsion":"v1","time":"2024-09-16T11:10:07Z","fieldsType":"FieldsV1","fiel [truncated 6346 chars]
	I0916 11:16:19.619989 1507580 pod_ready.go:93] pod "kube-controller-manager-multinode-654612" in "kube-system" namespace has status "Ready":"True"
	I0916 11:16:19.620033 1507580 pod_ready.go:82] duration metric: took 6.202366ms for pod "kube-controller-manager-multinode-654612" in "kube-system" namespace to be "Ready" ...
	I0916 11:16:19.620050 1507580 pod_ready.go:79] waiting up to 6m0s for pod "kube-proxy-gf2tw" in "kube-system" namespace to be "Ready" ...
	I0916 11:16:19.620117 1507580 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/namespaces/kube-system/pods/kube-proxy-gf2tw
	I0916 11:16:19.620126 1507580 round_trippers.go:469] Request Headers:
	I0916 11:16:19.620134 1507580 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:16:19.620140 1507580 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:16:19.622267 1507580 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:16:19.622293 1507580 round_trippers.go:577] Response Headers:
	I0916 11:16:19.622302 1507580 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:16:19.622306 1507580 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:16:19.622310 1507580 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:16:19 GMT
	I0916 11:16:19.622314 1507580 round_trippers.go:580]     Audit-Id: b8dc2dd2-40fc-4372-b03c-227c6281fcdf
	I0916 11:16:19.622318 1507580 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:16:19.622321 1507580 round_trippers.go:580]     Content-Type: application/json
	I0916 11:16:19.622438 1507580 request.go:1351] Response Body: {"kind":"Pod","apiVersion":"v1","metadata":{"name":"kube-proxy-gf2tw","generateName":"kube-proxy-","namespace":"kube-system","uid":"814e8a89-b190-4aef-a303-44981c9e19c9","resourceVersion":"1130","creationTimestamp":"2024-09-16T11:11:13Z","labels":{"controller-revision-hash":"648b489c5b","k8s-app":"kube-proxy","pod-template-generation":"1"},"ownerReferences":[{"apiVersion":"apps/v1","kind":"DaemonSet","name":"kube-proxy","uid":"8b5954ba-7bb1-4f7b-8014-fdfa99b2ac77","controller":true,"blockOwnerDeletion":true}],"managedFields":[{"manager":"kube-controller-manager","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:11:13Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:generateName":{},"f:labels":{".":{},"f:controller-revision-hash":{},"f:k8s-app":{},"f:pod-template-generation":{}},"f:ownerReferences":{".":{},"k:{\"uid\":\"8b5954ba-7bb1-4f7b-8014-fdfa99b2ac77\"}":{}}},"f:spec":{"f:affinity":{".":{},"f:nodeAffinity":{".":{},"f:
requiredDuringSchedulingIgnoredDuringExecution":{}}},"f:containers":{"k [truncated 6520 chars]
	I0916 11:16:19.622958 1507580 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/nodes/multinode-654612-m02
	I0916 11:16:19.622975 1507580 round_trippers.go:469] Request Headers:
	I0916 11:16:19.622983 1507580 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:16:19.622988 1507580 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:16:19.625042 1507580 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:16:19.625061 1507580 round_trippers.go:577] Response Headers:
	I0916 11:16:19.625069 1507580 round_trippers.go:580]     Content-Type: application/json
	I0916 11:16:19.625072 1507580 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:16:19.625078 1507580 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:16:19.625081 1507580 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:16:19 GMT
	I0916 11:16:19.625084 1507580 round_trippers.go:580]     Audit-Id: 983a55df-449b-4dfa-8c9c-51924ffad0cd
	I0916 11:16:19.625087 1507580 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:16:19.625187 1507580 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-654612-m02","uid":"aa1e2e1b-4957-47bd-bcf9-786ad66f873a","resourceVersion":"1078","creationTimestamp":"2024-09-16T11:11:13Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-654612-m02","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-654612","minikube.k8s.io/primary":"false","minikube.k8s.io/updated_at":"2024_09_16T11_11_14_0700","minikube.k8s.io/version":"v1.34.0"},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///var/run/crio/crio.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubeadm","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:11:13Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f
:annotations":{"f:kubeadm.alpha.kubernetes.io/cri-socket":{}}}}},{"mana [truncated 6342 chars]
	I0916 11:16:20.121279 1507580 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/namespaces/kube-system/pods/kube-proxy-gf2tw
	I0916 11:16:20.121306 1507580 round_trippers.go:469] Request Headers:
	I0916 11:16:20.121321 1507580 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:16:20.121326 1507580 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:16:20.123841 1507580 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:16:20.123865 1507580 round_trippers.go:577] Response Headers:
	I0916 11:16:20.123874 1507580 round_trippers.go:580]     Content-Type: application/json
	I0916 11:16:20.123879 1507580 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:16:20.123882 1507580 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:16:20.123885 1507580 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:16:20 GMT
	I0916 11:16:20.123888 1507580 round_trippers.go:580]     Audit-Id: 3105a4bc-28fb-4050-8f89-a10694abdd3a
	I0916 11:16:20.123891 1507580 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:16:20.123988 1507580 request.go:1351] Response Body: {"kind":"Pod","apiVersion":"v1","metadata":{"name":"kube-proxy-gf2tw","generateName":"kube-proxy-","namespace":"kube-system","uid":"814e8a89-b190-4aef-a303-44981c9e19c9","resourceVersion":"1130","creationTimestamp":"2024-09-16T11:11:13Z","labels":{"controller-revision-hash":"648b489c5b","k8s-app":"kube-proxy","pod-template-generation":"1"},"ownerReferences":[{"apiVersion":"apps/v1","kind":"DaemonSet","name":"kube-proxy","uid":"8b5954ba-7bb1-4f7b-8014-fdfa99b2ac77","controller":true,"blockOwnerDeletion":true}],"managedFields":[{"manager":"kube-controller-manager","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:11:13Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:generateName":{},"f:labels":{".":{},"f:controller-revision-hash":{},"f:k8s-app":{},"f:pod-template-generation":{}},"f:ownerReferences":{".":{},"k:{\"uid\":\"8b5954ba-7bb1-4f7b-8014-fdfa99b2ac77\"}":{}}},"f:spec":{"f:affinity":{".":{},"f:nodeAffinity":{".":{},"f:
requiredDuringSchedulingIgnoredDuringExecution":{}}},"f:containers":{"k [truncated 6520 chars]
	I0916 11:16:20.124575 1507580 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/nodes/multinode-654612-m02
	I0916 11:16:20.124589 1507580 round_trippers.go:469] Request Headers:
	I0916 11:16:20.124598 1507580 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:16:20.124602 1507580 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:16:20.126800 1507580 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:16:20.126885 1507580 round_trippers.go:577] Response Headers:
	I0916 11:16:20.126916 1507580 round_trippers.go:580]     Audit-Id: 0dc4d334-bed5-42f1-b22d-c18f271a79b6
	I0916 11:16:20.126970 1507580 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:16:20.126998 1507580 round_trippers.go:580]     Content-Type: application/json
	I0916 11:16:20.127033 1507580 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:16:20.127070 1507580 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:16:20.127091 1507580 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:16:20 GMT
	I0916 11:16:20.127309 1507580 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-654612-m02","uid":"aa1e2e1b-4957-47bd-bcf9-786ad66f873a","resourceVersion":"1078","creationTimestamp":"2024-09-16T11:11:13Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-654612-m02","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-654612","minikube.k8s.io/primary":"false","minikube.k8s.io/updated_at":"2024_09_16T11_11_14_0700","minikube.k8s.io/version":"v1.34.0"},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///var/run/crio/crio.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubeadm","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:11:13Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f
:annotations":{"f:kubeadm.alpha.kubernetes.io/cri-socket":{}}}}},{"mana [truncated 6342 chars]
	I0916 11:16:20.620555 1507580 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/namespaces/kube-system/pods/kube-proxy-gf2tw
	I0916 11:16:20.620583 1507580 round_trippers.go:469] Request Headers:
	I0916 11:16:20.620592 1507580 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:16:20.620598 1507580 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:16:20.622956 1507580 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:16:20.622984 1507580 round_trippers.go:577] Response Headers:
	I0916 11:16:20.622993 1507580 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:16:20.622998 1507580 round_trippers.go:580]     Content-Type: application/json
	I0916 11:16:20.623003 1507580 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:16:20.623069 1507580 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:16:20.623080 1507580 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:16:20 GMT
	I0916 11:16:20.623084 1507580 round_trippers.go:580]     Audit-Id: 430d936e-4dc8-411c-93c6-f0f1dc3d3380
	I0916 11:16:20.623209 1507580 request.go:1351] Response Body: {"kind":"Pod","apiVersion":"v1","metadata":{"name":"kube-proxy-gf2tw","generateName":"kube-proxy-","namespace":"kube-system","uid":"814e8a89-b190-4aef-a303-44981c9e19c9","resourceVersion":"1130","creationTimestamp":"2024-09-16T11:11:13Z","labels":{"controller-revision-hash":"648b489c5b","k8s-app":"kube-proxy","pod-template-generation":"1"},"ownerReferences":[{"apiVersion":"apps/v1","kind":"DaemonSet","name":"kube-proxy","uid":"8b5954ba-7bb1-4f7b-8014-fdfa99b2ac77","controller":true,"blockOwnerDeletion":true}],"managedFields":[{"manager":"kube-controller-manager","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:11:13Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:generateName":{},"f:labels":{".":{},"f:controller-revision-hash":{},"f:k8s-app":{},"f:pod-template-generation":{}},"f:ownerReferences":{".":{},"k:{\"uid\":\"8b5954ba-7bb1-4f7b-8014-fdfa99b2ac77\"}":{}}},"f:spec":{"f:affinity":{".":{},"f:nodeAffinity":{".":{},"f:
requiredDuringSchedulingIgnoredDuringExecution":{}}},"f:containers":{"k [truncated 6520 chars]
	I0916 11:16:20.623754 1507580 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/nodes/multinode-654612-m02
	I0916 11:16:20.623769 1507580 round_trippers.go:469] Request Headers:
	I0916 11:16:20.623778 1507580 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:16:20.623782 1507580 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:16:20.625847 1507580 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:16:20.625874 1507580 round_trippers.go:577] Response Headers:
	I0916 11:16:20.625882 1507580 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:16:20.625888 1507580 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:16:20 GMT
	I0916 11:16:20.625891 1507580 round_trippers.go:580]     Audit-Id: ad81730a-7529-4129-83aa-f9998657eba5
	I0916 11:16:20.625895 1507580 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:16:20.625898 1507580 round_trippers.go:580]     Content-Type: application/json
	I0916 11:16:20.625901 1507580 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:16:20.626013 1507580 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-654612-m02","uid":"aa1e2e1b-4957-47bd-bcf9-786ad66f873a","resourceVersion":"1078","creationTimestamp":"2024-09-16T11:11:13Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-654612-m02","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-654612","minikube.k8s.io/primary":"false","minikube.k8s.io/updated_at":"2024_09_16T11_11_14_0700","minikube.k8s.io/version":"v1.34.0"},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///var/run/crio/crio.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubeadm","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:11:13Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f
:annotations":{"f:kubeadm.alpha.kubernetes.io/cri-socket":{}}}}},{"mana [truncated 6342 chars]
	I0916 11:16:21.121187 1507580 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/namespaces/kube-system/pods/kube-proxy-gf2tw
	I0916 11:16:21.121221 1507580 round_trippers.go:469] Request Headers:
	I0916 11:16:21.121237 1507580 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:16:21.121245 1507580 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:16:21.123780 1507580 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:16:21.123805 1507580 round_trippers.go:577] Response Headers:
	I0916 11:16:21.123813 1507580 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:16:21.123818 1507580 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:16:21.123824 1507580 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:16:21 GMT
	I0916 11:16:21.123827 1507580 round_trippers.go:580]     Audit-Id: 690fd752-eb16-40d7-80f1-5f4b72da4429
	I0916 11:16:21.123830 1507580 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:16:21.123832 1507580 round_trippers.go:580]     Content-Type: application/json
	I0916 11:16:21.124332 1507580 request.go:1351] Response Body: {"kind":"Pod","apiVersion":"v1","metadata":{"name":"kube-proxy-gf2tw","generateName":"kube-proxy-","namespace":"kube-system","uid":"814e8a89-b190-4aef-a303-44981c9e19c9","resourceVersion":"1130","creationTimestamp":"2024-09-16T11:11:13Z","labels":{"controller-revision-hash":"648b489c5b","k8s-app":"kube-proxy","pod-template-generation":"1"},"ownerReferences":[{"apiVersion":"apps/v1","kind":"DaemonSet","name":"kube-proxy","uid":"8b5954ba-7bb1-4f7b-8014-fdfa99b2ac77","controller":true,"blockOwnerDeletion":true}],"managedFields":[{"manager":"kube-controller-manager","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:11:13Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:generateName":{},"f:labels":{".":{},"f:controller-revision-hash":{},"f:k8s-app":{},"f:pod-template-generation":{}},"f:ownerReferences":{".":{},"k:{\"uid\":\"8b5954ba-7bb1-4f7b-8014-fdfa99b2ac77\"}":{}}},"f:spec":{"f:affinity":{".":{},"f:nodeAffinity":{".":{},"f:
requiredDuringSchedulingIgnoredDuringExecution":{}}},"f:containers":{"k [truncated 6520 chars]
	I0916 11:16:21.124898 1507580 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/nodes/multinode-654612-m02
	I0916 11:16:21.124917 1507580 round_trippers.go:469] Request Headers:
	I0916 11:16:21.124926 1507580 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:16:21.124932 1507580 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:16:21.127040 1507580 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:16:21.127059 1507580 round_trippers.go:577] Response Headers:
	I0916 11:16:21.127067 1507580 round_trippers.go:580]     Audit-Id: bce3f6f7-3e58-4da4-a565-6f7c13eeec80
	I0916 11:16:21.127072 1507580 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:16:21.127077 1507580 round_trippers.go:580]     Content-Type: application/json
	I0916 11:16:21.127081 1507580 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:16:21.127083 1507580 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:16:21.127087 1507580 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:16:21 GMT
	I0916 11:16:21.127202 1507580 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-654612-m02","uid":"aa1e2e1b-4957-47bd-bcf9-786ad66f873a","resourceVersion":"1078","creationTimestamp":"2024-09-16T11:11:13Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-654612-m02","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-654612","minikube.k8s.io/primary":"false","minikube.k8s.io/updated_at":"2024_09_16T11_11_14_0700","minikube.k8s.io/version":"v1.34.0"},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///var/run/crio/crio.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubeadm","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:11:13Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f
:annotations":{"f:kubeadm.alpha.kubernetes.io/cri-socket":{}}}}},{"mana [truncated 6342 chars]
	I0916 11:16:21.620270 1507580 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/namespaces/kube-system/pods/kube-proxy-gf2tw
	I0916 11:16:21.620297 1507580 round_trippers.go:469] Request Headers:
	I0916 11:16:21.620308 1507580 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:16:21.620313 1507580 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:16:21.622740 1507580 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:16:21.622769 1507580 round_trippers.go:577] Response Headers:
	I0916 11:16:21.622777 1507580 round_trippers.go:580]     Content-Type: application/json
	I0916 11:16:21.622781 1507580 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:16:21.622786 1507580 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:16:21.622789 1507580 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:16:21 GMT
	I0916 11:16:21.622793 1507580 round_trippers.go:580]     Audit-Id: f4b5e5e1-516b-49a3-8e26-143727697253
	I0916 11:16:21.622797 1507580 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:16:21.623132 1507580 request.go:1351] Response Body: {"kind":"Pod","apiVersion":"v1","metadata":{"name":"kube-proxy-gf2tw","generateName":"kube-proxy-","namespace":"kube-system","uid":"814e8a89-b190-4aef-a303-44981c9e19c9","resourceVersion":"1130","creationTimestamp":"2024-09-16T11:11:13Z","labels":{"controller-revision-hash":"648b489c5b","k8s-app":"kube-proxy","pod-template-generation":"1"},"ownerReferences":[{"apiVersion":"apps/v1","kind":"DaemonSet","name":"kube-proxy","uid":"8b5954ba-7bb1-4f7b-8014-fdfa99b2ac77","controller":true,"blockOwnerDeletion":true}],"managedFields":[{"manager":"kube-controller-manager","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:11:13Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:generateName":{},"f:labels":{".":{},"f:controller-revision-hash":{},"f:k8s-app":{},"f:pod-template-generation":{}},"f:ownerReferences":{".":{},"k:{\"uid\":\"8b5954ba-7bb1-4f7b-8014-fdfa99b2ac77\"}":{}}},"f:spec":{"f:affinity":{".":{},"f:nodeAffinity":{".":{},"f:
requiredDuringSchedulingIgnoredDuringExecution":{}}},"f:containers":{"k [truncated 6520 chars]
	I0916 11:16:21.623731 1507580 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/nodes/multinode-654612-m02
	I0916 11:16:21.623750 1507580 round_trippers.go:469] Request Headers:
	I0916 11:16:21.623760 1507580 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:16:21.623766 1507580 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:16:21.626126 1507580 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:16:21.626155 1507580 round_trippers.go:577] Response Headers:
	I0916 11:16:21.626163 1507580 round_trippers.go:580]     Content-Type: application/json
	I0916 11:16:21.626169 1507580 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:16:21.626172 1507580 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:16:21.626177 1507580 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:16:21 GMT
	I0916 11:16:21.626180 1507580 round_trippers.go:580]     Audit-Id: 6dbe55f5-18d4-4fb1-bde1-98ab82a6d30d
	I0916 11:16:21.626182 1507580 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:16:21.626467 1507580 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-654612-m02","uid":"aa1e2e1b-4957-47bd-bcf9-786ad66f873a","resourceVersion":"1078","creationTimestamp":"2024-09-16T11:11:13Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-654612-m02","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-654612","minikube.k8s.io/primary":"false","minikube.k8s.io/updated_at":"2024_09_16T11_11_14_0700","minikube.k8s.io/version":"v1.34.0"},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///var/run/crio/crio.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubeadm","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:11:13Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f
:annotations":{"f:kubeadm.alpha.kubernetes.io/cri-socket":{}}}}},{"mana [truncated 6342 chars]
	I0916 11:16:21.626939 1507580 pod_ready.go:103] pod "kube-proxy-gf2tw" in "kube-system" namespace has status "Ready":"False"
	I0916 11:16:22.120750 1507580 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/namespaces/kube-system/pods/kube-proxy-gf2tw
	I0916 11:16:22.120777 1507580 round_trippers.go:469] Request Headers:
	I0916 11:16:22.120787 1507580 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:16:22.120791 1507580 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:16:22.123918 1507580 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 11:16:22.123939 1507580 round_trippers.go:577] Response Headers:
	I0916 11:16:22.123948 1507580 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:16:22 GMT
	I0916 11:16:22.123951 1507580 round_trippers.go:580]     Audit-Id: c463712d-4c37-4ba0-b70c-1a3323d7242b
	I0916 11:16:22.123954 1507580 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:16:22.123957 1507580 round_trippers.go:580]     Content-Type: application/json
	I0916 11:16:22.123959 1507580 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:16:22.123962 1507580 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:16:22.124220 1507580 request.go:1351] Response Body: {"kind":"Pod","apiVersion":"v1","metadata":{"name":"kube-proxy-gf2tw","generateName":"kube-proxy-","namespace":"kube-system","uid":"814e8a89-b190-4aef-a303-44981c9e19c9","resourceVersion":"1130","creationTimestamp":"2024-09-16T11:11:13Z","labels":{"controller-revision-hash":"648b489c5b","k8s-app":"kube-proxy","pod-template-generation":"1"},"ownerReferences":[{"apiVersion":"apps/v1","kind":"DaemonSet","name":"kube-proxy","uid":"8b5954ba-7bb1-4f7b-8014-fdfa99b2ac77","controller":true,"blockOwnerDeletion":true}],"managedFields":[{"manager":"kube-controller-manager","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:11:13Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:generateName":{},"f:labels":{".":{},"f:controller-revision-hash":{},"f:k8s-app":{},"f:pod-template-generation":{}},"f:ownerReferences":{".":{},"k:{\"uid\":\"8b5954ba-7bb1-4f7b-8014-fdfa99b2ac77\"}":{}}},"f:spec":{"f:affinity":{".":{},"f:nodeAffinity":{".":{},"f:
requiredDuringSchedulingIgnoredDuringExecution":{}}},"f:containers":{"k [truncated 6520 chars]
	I0916 11:16:22.124777 1507580 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/nodes/multinode-654612-m02
	I0916 11:16:22.124789 1507580 round_trippers.go:469] Request Headers:
	I0916 11:16:22.124797 1507580 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:16:22.124801 1507580 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:16:22.128523 1507580 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 11:16:22.128543 1507580 round_trippers.go:577] Response Headers:
	I0916 11:16:22.128551 1507580 round_trippers.go:580]     Audit-Id: c53919cf-b112-43a8-8130-d55a0ffd9719
	I0916 11:16:22.128555 1507580 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:16:22.128558 1507580 round_trippers.go:580]     Content-Type: application/json
	I0916 11:16:22.128562 1507580 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:16:22.128565 1507580 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:16:22.128568 1507580 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:16:22 GMT
	I0916 11:16:22.128735 1507580 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-654612-m02","uid":"aa1e2e1b-4957-47bd-bcf9-786ad66f873a","resourceVersion":"1078","creationTimestamp":"2024-09-16T11:11:13Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-654612-m02","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-654612","minikube.k8s.io/primary":"false","minikube.k8s.io/updated_at":"2024_09_16T11_11_14_0700","minikube.k8s.io/version":"v1.34.0"},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///var/run/crio/crio.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubeadm","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:11:13Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f
:annotations":{"f:kubeadm.alpha.kubernetes.io/cri-socket":{}}}}},{"mana [truncated 6342 chars]
	I0916 11:16:22.620981 1507580 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/namespaces/kube-system/pods/kube-proxy-gf2tw
	I0916 11:16:22.621006 1507580 round_trippers.go:469] Request Headers:
	I0916 11:16:22.621024 1507580 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:16:22.621028 1507580 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:16:22.623468 1507580 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:16:22.623493 1507580 round_trippers.go:577] Response Headers:
	I0916 11:16:22.623502 1507580 round_trippers.go:580]     Audit-Id: 70b89d1b-6ac9-40b6-9a83-9f5d56a10f9b
	I0916 11:16:22.623507 1507580 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:16:22.623512 1507580 round_trippers.go:580]     Content-Type: application/json
	I0916 11:16:22.623515 1507580 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:16:22.623517 1507580 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:16:22.623520 1507580 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:16:22 GMT
	I0916 11:16:22.623955 1507580 request.go:1351] Response Body: {"kind":"Pod","apiVersion":"v1","metadata":{"name":"kube-proxy-gf2tw","generateName":"kube-proxy-","namespace":"kube-system","uid":"814e8a89-b190-4aef-a303-44981c9e19c9","resourceVersion":"1130","creationTimestamp":"2024-09-16T11:11:13Z","labels":{"controller-revision-hash":"648b489c5b","k8s-app":"kube-proxy","pod-template-generation":"1"},"ownerReferences":[{"apiVersion":"apps/v1","kind":"DaemonSet","name":"kube-proxy","uid":"8b5954ba-7bb1-4f7b-8014-fdfa99b2ac77","controller":true,"blockOwnerDeletion":true}],"managedFields":[{"manager":"kube-controller-manager","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:11:13Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:generateName":{},"f:labels":{".":{},"f:controller-revision-hash":{},"f:k8s-app":{},"f:pod-template-generation":{}},"f:ownerReferences":{".":{},"k:{\"uid\":\"8b5954ba-7bb1-4f7b-8014-fdfa99b2ac77\"}":{}}},"f:spec":{"f:affinity":{".":{},"f:nodeAffinity":{".":{},"f:
requiredDuringSchedulingIgnoredDuringExecution":{}}},"f:containers":{"k [truncated 6520 chars]
	I0916 11:16:22.624522 1507580 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/nodes/multinode-654612-m02
	I0916 11:16:22.624541 1507580 round_trippers.go:469] Request Headers:
	I0916 11:16:22.624551 1507580 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:16:22.624556 1507580 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:16:22.626791 1507580 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:16:22.626809 1507580 round_trippers.go:577] Response Headers:
	I0916 11:16:22.626816 1507580 round_trippers.go:580]     Audit-Id: 12cc0612-0b6e-4299-a08e-9eb53533a5ca
	I0916 11:16:22.626891 1507580 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:16:22.626915 1507580 round_trippers.go:580]     Content-Type: application/json
	I0916 11:16:22.626920 1507580 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:16:22.626926 1507580 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:16:22.626938 1507580 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:16:22 GMT
	I0916 11:16:22.627096 1507580 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-654612-m02","uid":"aa1e2e1b-4957-47bd-bcf9-786ad66f873a","resourceVersion":"1078","creationTimestamp":"2024-09-16T11:11:13Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-654612-m02","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-654612","minikube.k8s.io/primary":"false","minikube.k8s.io/updated_at":"2024_09_16T11_11_14_0700","minikube.k8s.io/version":"v1.34.0"},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///var/run/crio/crio.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubeadm","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:11:13Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f
:annotations":{"f:kubeadm.alpha.kubernetes.io/cri-socket":{}}}}},{"mana [truncated 6342 chars]
	I0916 11:16:23.120765 1507580 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/namespaces/kube-system/pods/kube-proxy-gf2tw
	I0916 11:16:23.120791 1507580 round_trippers.go:469] Request Headers:
	I0916 11:16:23.120801 1507580 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:16:23.120805 1507580 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:16:23.123178 1507580 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:16:23.123206 1507580 round_trippers.go:577] Response Headers:
	I0916 11:16:23.123215 1507580 round_trippers.go:580]     Content-Type: application/json
	I0916 11:16:23.123219 1507580 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:16:23.123222 1507580 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:16:23.123226 1507580 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:16:23 GMT
	I0916 11:16:23.123229 1507580 round_trippers.go:580]     Audit-Id: 750cab53-1f64-42d9-9d0e-9d678fde35d4
	I0916 11:16:23.123233 1507580 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:16:23.123420 1507580 request.go:1351] Response Body: {"kind":"Pod","apiVersion":"v1","metadata":{"name":"kube-proxy-gf2tw","generateName":"kube-proxy-","namespace":"kube-system","uid":"814e8a89-b190-4aef-a303-44981c9e19c9","resourceVersion":"1130","creationTimestamp":"2024-09-16T11:11:13Z","labels":{"controller-revision-hash":"648b489c5b","k8s-app":"kube-proxy","pod-template-generation":"1"},"ownerReferences":[{"apiVersion":"apps/v1","kind":"DaemonSet","name":"kube-proxy","uid":"8b5954ba-7bb1-4f7b-8014-fdfa99b2ac77","controller":true,"blockOwnerDeletion":true}],"managedFields":[{"manager":"kube-controller-manager","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:11:13Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:generateName":{},"f:labels":{".":{},"f:controller-revision-hash":{},"f:k8s-app":{},"f:pod-template-generation":{}},"f:ownerReferences":{".":{},"k:{\"uid\":\"8b5954ba-7bb1-4f7b-8014-fdfa99b2ac77\"}":{}}},"f:spec":{"f:affinity":{".":{},"f:nodeAffinity":{".":{},"f:
requiredDuringSchedulingIgnoredDuringExecution":{}}},"f:containers":{"k [truncated 6520 chars]
	I0916 11:16:23.123954 1507580 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/nodes/multinode-654612-m02
	I0916 11:16:23.123969 1507580 round_trippers.go:469] Request Headers:
	I0916 11:16:23.123978 1507580 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:16:23.123991 1507580 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:16:23.126054 1507580 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:16:23.126072 1507580 round_trippers.go:577] Response Headers:
	I0916 11:16:23.126080 1507580 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:16:23 GMT
	I0916 11:16:23.126084 1507580 round_trippers.go:580]     Audit-Id: 226eb4c5-c604-4a28-ab83-0b4e779813e6
	I0916 11:16:23.126087 1507580 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:16:23.126090 1507580 round_trippers.go:580]     Content-Type: application/json
	I0916 11:16:23.126093 1507580 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:16:23.126096 1507580 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:16:23.126228 1507580 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-654612-m02","uid":"aa1e2e1b-4957-47bd-bcf9-786ad66f873a","resourceVersion":"1078","creationTimestamp":"2024-09-16T11:11:13Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-654612-m02","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-654612","minikube.k8s.io/primary":"false","minikube.k8s.io/updated_at":"2024_09_16T11_11_14_0700","minikube.k8s.io/version":"v1.34.0"},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///var/run/crio/crio.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubeadm","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:11:13Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f
:annotations":{"f:kubeadm.alpha.kubernetes.io/cri-socket":{}}}}},{"mana [truncated 6342 chars]
	I0916 11:16:23.620297 1507580 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/namespaces/kube-system/pods/kube-proxy-gf2tw
	I0916 11:16:23.620323 1507580 round_trippers.go:469] Request Headers:
	I0916 11:16:23.620347 1507580 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:16:23.620352 1507580 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:16:23.623066 1507580 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:16:23.623093 1507580 round_trippers.go:577] Response Headers:
	I0916 11:16:23.623102 1507580 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:16:23.623105 1507580 round_trippers.go:580]     Content-Type: application/json
	I0916 11:16:23.623110 1507580 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:16:23.623113 1507580 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:16:23.623116 1507580 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:16:23 GMT
	I0916 11:16:23.623119 1507580 round_trippers.go:580]     Audit-Id: c702a07a-76ae-41b9-bf8f-f2b0fac45239
	I0916 11:16:23.623240 1507580 request.go:1351] Response Body: {"kind":"Pod","apiVersion":"v1","metadata":{"name":"kube-proxy-gf2tw","generateName":"kube-proxy-","namespace":"kube-system","uid":"814e8a89-b190-4aef-a303-44981c9e19c9","resourceVersion":"1130","creationTimestamp":"2024-09-16T11:11:13Z","labels":{"controller-revision-hash":"648b489c5b","k8s-app":"kube-proxy","pod-template-generation":"1"},"ownerReferences":[{"apiVersion":"apps/v1","kind":"DaemonSet","name":"kube-proxy","uid":"8b5954ba-7bb1-4f7b-8014-fdfa99b2ac77","controller":true,"blockOwnerDeletion":true}],"managedFields":[{"manager":"kube-controller-manager","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:11:13Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:generateName":{},"f:labels":{".":{},"f:controller-revision-hash":{},"f:k8s-app":{},"f:pod-template-generation":{}},"f:ownerReferences":{".":{},"k:{\"uid\":\"8b5954ba-7bb1-4f7b-8014-fdfa99b2ac77\"}":{}}},"f:spec":{"f:affinity":{".":{},"f:nodeAffinity":{".":{},"f:
requiredDuringSchedulingIgnoredDuringExecution":{}}},"f:containers":{"k [truncated 6520 chars]
	I0916 11:16:23.623802 1507580 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/nodes/multinode-654612-m02
	I0916 11:16:23.623827 1507580 round_trippers.go:469] Request Headers:
	I0916 11:16:23.623838 1507580 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:16:23.623846 1507580 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:16:23.626061 1507580 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:16:23.626088 1507580 round_trippers.go:577] Response Headers:
	I0916 11:16:23.626096 1507580 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:16:23 GMT
	I0916 11:16:23.626100 1507580 round_trippers.go:580]     Audit-Id: 4132075d-240a-4c64-8ad6-0ec858c25f69
	I0916 11:16:23.626103 1507580 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:16:23.626106 1507580 round_trippers.go:580]     Content-Type: application/json
	I0916 11:16:23.626109 1507580 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:16:23.626111 1507580 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:16:23.626233 1507580 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-654612-m02","uid":"aa1e2e1b-4957-47bd-bcf9-786ad66f873a","resourceVersion":"1078","creationTimestamp":"2024-09-16T11:11:13Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-654612-m02","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-654612","minikube.k8s.io/primary":"false","minikube.k8s.io/updated_at":"2024_09_16T11_11_14_0700","minikube.k8s.io/version":"v1.34.0"},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///var/run/crio/crio.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubeadm","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:11:13Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f
:annotations":{"f:kubeadm.alpha.kubernetes.io/cri-socket":{}}}}},{"mana [truncated 6342 chars]
	I0916 11:16:24.120300 1507580 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/namespaces/kube-system/pods/kube-proxy-gf2tw
	I0916 11:16:24.120349 1507580 round_trippers.go:469] Request Headers:
	I0916 11:16:24.120361 1507580 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:16:24.120366 1507580 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:16:24.124819 1507580 round_trippers.go:574] Response Status: 200 OK in 4 milliseconds
	I0916 11:16:24.124844 1507580 round_trippers.go:577] Response Headers:
	I0916 11:16:24.124869 1507580 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:16:24.124877 1507580 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:16:24 GMT
	I0916 11:16:24.124888 1507580 round_trippers.go:580]     Audit-Id: a41cbe6b-9017-4181-871e-605971f57458
	I0916 11:16:24.124891 1507580 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:16:24.124893 1507580 round_trippers.go:580]     Content-Type: application/json
	I0916 11:16:24.124896 1507580 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:16:24.128016 1507580 request.go:1351] Response Body: {"kind":"Pod","apiVersion":"v1","metadata":{"name":"kube-proxy-gf2tw","generateName":"kube-proxy-","namespace":"kube-system","uid":"814e8a89-b190-4aef-a303-44981c9e19c9","resourceVersion":"1130","creationTimestamp":"2024-09-16T11:11:13Z","labels":{"controller-revision-hash":"648b489c5b","k8s-app":"kube-proxy","pod-template-generation":"1"},"ownerReferences":[{"apiVersion":"apps/v1","kind":"DaemonSet","name":"kube-proxy","uid":"8b5954ba-7bb1-4f7b-8014-fdfa99b2ac77","controller":true,"blockOwnerDeletion":true}],"managedFields":[{"manager":"kube-controller-manager","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:11:13Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:generateName":{},"f:labels":{".":{},"f:controller-revision-hash":{},"f:k8s-app":{},"f:pod-template-generation":{}},"f:ownerReferences":{".":{},"k:{\"uid\":\"8b5954ba-7bb1-4f7b-8014-fdfa99b2ac77\"}":{}}},"f:spec":{"f:affinity":{".":{},"f:nodeAffinity":{".":{},"f:
requiredDuringSchedulingIgnoredDuringExecution":{}}},"f:containers":{"k [truncated 6520 chars]
	I0916 11:16:24.128569 1507580 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/nodes/multinode-654612-m02
	I0916 11:16:24.128591 1507580 round_trippers.go:469] Request Headers:
	I0916 11:16:24.128602 1507580 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:16:24.128609 1507580 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:16:24.131322 1507580 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:16:24.131344 1507580 round_trippers.go:577] Response Headers:
	I0916 11:16:24.131353 1507580 round_trippers.go:580]     Content-Type: application/json
	I0916 11:16:24.131356 1507580 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:16:24.131359 1507580 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:16:24.131362 1507580 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:16:24 GMT
	I0916 11:16:24.131365 1507580 round_trippers.go:580]     Audit-Id: 9e75576d-78ce-41e0-a8f2-34cc0cc318e2
	I0916 11:16:24.131367 1507580 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:16:24.131693 1507580 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-654612-m02","uid":"aa1e2e1b-4957-47bd-bcf9-786ad66f873a","resourceVersion":"1078","creationTimestamp":"2024-09-16T11:11:13Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-654612-m02","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-654612","minikube.k8s.io/primary":"false","minikube.k8s.io/updated_at":"2024_09_16T11_11_14_0700","minikube.k8s.io/version":"v1.34.0"},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///var/run/crio/crio.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubeadm","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:11:13Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f
:annotations":{"f:kubeadm.alpha.kubernetes.io/cri-socket":{}}}}},{"mana [truncated 6342 chars]
	I0916 11:16:24.132124 1507580 pod_ready.go:103] pod "kube-proxy-gf2tw" in "kube-system" namespace has status "Ready":"False"
	I0916 11:16:24.620831 1507580 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/namespaces/kube-system/pods/kube-proxy-gf2tw
	I0916 11:16:24.620855 1507580 round_trippers.go:469] Request Headers:
	I0916 11:16:24.620866 1507580 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:16:24.620871 1507580 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:16:24.623397 1507580 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:16:24.623418 1507580 round_trippers.go:577] Response Headers:
	I0916 11:16:24.623426 1507580 round_trippers.go:580]     Audit-Id: 44be896d-af58-41ea-b550-b846e822efd0
	I0916 11:16:24.623433 1507580 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:16:24.623437 1507580 round_trippers.go:580]     Content-Type: application/json
	I0916 11:16:24.623440 1507580 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:16:24.623444 1507580 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:16:24.623448 1507580 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:16:24 GMT
	I0916 11:16:24.624132 1507580 request.go:1351] Response Body: {"kind":"Pod","apiVersion":"v1","metadata":{"name":"kube-proxy-gf2tw","generateName":"kube-proxy-","namespace":"kube-system","uid":"814e8a89-b190-4aef-a303-44981c9e19c9","resourceVersion":"1130","creationTimestamp":"2024-09-16T11:11:13Z","labels":{"controller-revision-hash":"648b489c5b","k8s-app":"kube-proxy","pod-template-generation":"1"},"ownerReferences":[{"apiVersion":"apps/v1","kind":"DaemonSet","name":"kube-proxy","uid":"8b5954ba-7bb1-4f7b-8014-fdfa99b2ac77","controller":true,"blockOwnerDeletion":true}],"managedFields":[{"manager":"kube-controller-manager","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:11:13Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:generateName":{},"f:labels":{".":{},"f:controller-revision-hash":{},"f:k8s-app":{},"f:pod-template-generation":{}},"f:ownerReferences":{".":{},"k:{\"uid\":\"8b5954ba-7bb1-4f7b-8014-fdfa99b2ac77\"}":{}}},"f:spec":{"f:affinity":{".":{},"f:nodeAffinity":{".":{},"f:
requiredDuringSchedulingIgnoredDuringExecution":{}}},"f:containers":{"k [truncated 6520 chars]
	I0916 11:16:24.624759 1507580 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/nodes/multinode-654612-m02
	I0916 11:16:24.624778 1507580 round_trippers.go:469] Request Headers:
	I0916 11:16:24.624788 1507580 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:16:24.624791 1507580 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:16:24.627122 1507580 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:16:24.627143 1507580 round_trippers.go:577] Response Headers:
	I0916 11:16:24.627151 1507580 round_trippers.go:580]     Audit-Id: 1aa53f42-51ea-481c-9c9e-38d70ea9e7f0
	I0916 11:16:24.627155 1507580 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:16:24.627159 1507580 round_trippers.go:580]     Content-Type: application/json
	I0916 11:16:24.627162 1507580 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:16:24.627165 1507580 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:16:24.627169 1507580 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:16:24 GMT
	I0916 11:16:24.627767 1507580 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-654612-m02","uid":"aa1e2e1b-4957-47bd-bcf9-786ad66f873a","resourceVersion":"1078","creationTimestamp":"2024-09-16T11:11:13Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-654612-m02","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-654612","minikube.k8s.io/primary":"false","minikube.k8s.io/updated_at":"2024_09_16T11_11_14_0700","minikube.k8s.io/version":"v1.34.0"},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///var/run/crio/crio.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubeadm","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:11:13Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f
:annotations":{"f:kubeadm.alpha.kubernetes.io/cri-socket":{}}}}},{"mana [truncated 6342 chars]
	I0916 11:16:25.121131 1507580 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/namespaces/kube-system/pods/kube-proxy-gf2tw
	I0916 11:16:25.121159 1507580 round_trippers.go:469] Request Headers:
	I0916 11:16:25.121170 1507580 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:16:25.121174 1507580 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:16:25.123672 1507580 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:16:25.123693 1507580 round_trippers.go:577] Response Headers:
	I0916 11:16:25.123702 1507580 round_trippers.go:580]     Audit-Id: d33c4a73-1bc9-40a8-a1fd-2b7bfbba4903
	I0916 11:16:25.123708 1507580 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:16:25.123713 1507580 round_trippers.go:580]     Content-Type: application/json
	I0916 11:16:25.123716 1507580 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:16:25.123720 1507580 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:16:25.123722 1507580 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:16:25 GMT
	I0916 11:16:25.123935 1507580 request.go:1351] Response Body: {"kind":"Pod","apiVersion":"v1","metadata":{"name":"kube-proxy-gf2tw","generateName":"kube-proxy-","namespace":"kube-system","uid":"814e8a89-b190-4aef-a303-44981c9e19c9","resourceVersion":"1130","creationTimestamp":"2024-09-16T11:11:13Z","labels":{"controller-revision-hash":"648b489c5b","k8s-app":"kube-proxy","pod-template-generation":"1"},"ownerReferences":[{"apiVersion":"apps/v1","kind":"DaemonSet","name":"kube-proxy","uid":"8b5954ba-7bb1-4f7b-8014-fdfa99b2ac77","controller":true,"blockOwnerDeletion":true}],"managedFields":[{"manager":"kube-controller-manager","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:11:13Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:generateName":{},"f:labels":{".":{},"f:controller-revision-hash":{},"f:k8s-app":{},"f:pod-template-generation":{}},"f:ownerReferences":{".":{},"k:{\"uid\":\"8b5954ba-7bb1-4f7b-8014-fdfa99b2ac77\"}":{}}},"f:spec":{"f:affinity":{".":{},"f:nodeAffinity":{".":{},"f:
requiredDuringSchedulingIgnoredDuringExecution":{}}},"f:containers":{"k [truncated 6520 chars]
	I0916 11:16:25.124505 1507580 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/nodes/multinode-654612-m02
	I0916 11:16:25.124527 1507580 round_trippers.go:469] Request Headers:
	I0916 11:16:25.124536 1507580 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:16:25.124541 1507580 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:16:25.126700 1507580 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:16:25.126722 1507580 round_trippers.go:577] Response Headers:
	I0916 11:16:25.126736 1507580 round_trippers.go:580]     Content-Type: application/json
	I0916 11:16:25.126742 1507580 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:16:25.126745 1507580 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:16:25.126748 1507580 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:16:25 GMT
	I0916 11:16:25.126752 1507580 round_trippers.go:580]     Audit-Id: 5d646801-feb6-418e-b3ca-afaac38227f7
	I0916 11:16:25.126754 1507580 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:16:25.127231 1507580 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-654612-m02","uid":"aa1e2e1b-4957-47bd-bcf9-786ad66f873a","resourceVersion":"1078","creationTimestamp":"2024-09-16T11:11:13Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-654612-m02","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-654612","minikube.k8s.io/primary":"false","minikube.k8s.io/updated_at":"2024_09_16T11_11_14_0700","minikube.k8s.io/version":"v1.34.0"},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///var/run/crio/crio.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubeadm","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:11:13Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f
:annotations":{"f:kubeadm.alpha.kubernetes.io/cri-socket":{}}}}},{"mana [truncated 6342 chars]
	I0916 11:16:25.620962 1507580 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/namespaces/kube-system/pods/kube-proxy-gf2tw
	I0916 11:16:25.620991 1507580 round_trippers.go:469] Request Headers:
	I0916 11:16:25.621002 1507580 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:16:25.621008 1507580 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:16:25.623368 1507580 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:16:25.623389 1507580 round_trippers.go:577] Response Headers:
	I0916 11:16:25.623397 1507580 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:16:25.623400 1507580 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:16:25.623404 1507580 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:16:25 GMT
	I0916 11:16:25.623407 1507580 round_trippers.go:580]     Audit-Id: e28fe009-54a1-46f8-b9f3-9e20bbf6f174
	I0916 11:16:25.623410 1507580 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:16:25.623412 1507580 round_trippers.go:580]     Content-Type: application/json
	I0916 11:16:25.623550 1507580 request.go:1351] Response Body: {"kind":"Pod","apiVersion":"v1","metadata":{"name":"kube-proxy-gf2tw","generateName":"kube-proxy-","namespace":"kube-system","uid":"814e8a89-b190-4aef-a303-44981c9e19c9","resourceVersion":"1166","creationTimestamp":"2024-09-16T11:11:13Z","labels":{"controller-revision-hash":"648b489c5b","k8s-app":"kube-proxy","pod-template-generation":"1"},"ownerReferences":[{"apiVersion":"apps/v1","kind":"DaemonSet","name":"kube-proxy","uid":"8b5954ba-7bb1-4f7b-8014-fdfa99b2ac77","controller":true,"blockOwnerDeletion":true}],"managedFields":[{"manager":"kube-controller-manager","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:11:13Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:generateName":{},"f:labels":{".":{},"f:controller-revision-hash":{},"f:k8s-app":{},"f:pod-template-generation":{}},"f:ownerReferences":{".":{},"k:{\"uid\":\"8b5954ba-7bb1-4f7b-8014-fdfa99b2ac77\"}":{}}},"f:spec":{"f:affinity":{".":{},"f:nodeAffinity":{".":{},"f:
requiredDuringSchedulingIgnoredDuringExecution":{}}},"f:containers":{"k [truncated 6179 chars]
	I0916 11:16:25.624066 1507580 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/nodes/multinode-654612-m02
	I0916 11:16:25.624076 1507580 round_trippers.go:469] Request Headers:
	I0916 11:16:25.624084 1507580 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:16:25.624089 1507580 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:16:25.626384 1507580 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:16:25.626410 1507580 round_trippers.go:577] Response Headers:
	I0916 11:16:25.626418 1507580 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:16:25.626422 1507580 round_trippers.go:580]     Content-Type: application/json
	I0916 11:16:25.626425 1507580 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:16:25.626428 1507580 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:16:25.626431 1507580 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:16:25 GMT
	I0916 11:16:25.626433 1507580 round_trippers.go:580]     Audit-Id: 4913c0bd-bd12-48ea-84f0-b4c5ea14d2a3
	I0916 11:16:25.626634 1507580 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-654612-m02","uid":"aa1e2e1b-4957-47bd-bcf9-786ad66f873a","resourceVersion":"1078","creationTimestamp":"2024-09-16T11:11:13Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-654612-m02","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-654612","minikube.k8s.io/primary":"false","minikube.k8s.io/updated_at":"2024_09_16T11_11_14_0700","minikube.k8s.io/version":"v1.34.0"},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///var/run/crio/crio.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubeadm","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:11:13Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f
:annotations":{"f:kubeadm.alpha.kubernetes.io/cri-socket":{}}}}},{"mana [truncated 6342 chars]
	I0916 11:16:25.627043 1507580 pod_ready.go:93] pod "kube-proxy-gf2tw" in "kube-system" namespace has status "Ready":"True"
	I0916 11:16:25.627062 1507580 pod_ready.go:82] duration metric: took 6.00700438s for pod "kube-proxy-gf2tw" in "kube-system" namespace to be "Ready" ...
	I0916 11:16:25.627074 1507580 pod_ready.go:79] waiting up to 6m0s for pod "kube-proxy-t9pzq" in "kube-system" namespace to be "Ready" ...
	I0916 11:16:25.627152 1507580 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/namespaces/kube-system/pods/kube-proxy-t9pzq
	I0916 11:16:25.627161 1507580 round_trippers.go:469] Request Headers:
	I0916 11:16:25.627169 1507580 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:16:25.627175 1507580 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:16:25.629411 1507580 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:16:25.629434 1507580 round_trippers.go:577] Response Headers:
	I0916 11:16:25.629443 1507580 round_trippers.go:580]     Audit-Id: 8a19ab8b-d192-4697-9c76-a4362619ea80
	I0916 11:16:25.629447 1507580 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:16:25.629450 1507580 round_trippers.go:580]     Content-Type: application/json
	I0916 11:16:25.629453 1507580 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:16:25.629456 1507580 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:16:25.629460 1507580 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:16:25 GMT
	I0916 11:16:25.629835 1507580 request.go:1351] Response Body: {"kind":"Pod","apiVersion":"v1","metadata":{"name":"kube-proxy-t9pzq","generateName":"kube-proxy-","namespace":"kube-system","uid":"d5dac41c-8386-4ad5-a463-1730169d8062","resourceVersion":"994","creationTimestamp":"2024-09-16T11:10:14Z","labels":{"controller-revision-hash":"648b489c5b","k8s-app":"kube-proxy","pod-template-generation":"1"},"ownerReferences":[{"apiVersion":"apps/v1","kind":"DaemonSet","name":"kube-proxy","uid":"8b5954ba-7bb1-4f7b-8014-fdfa99b2ac77","controller":true,"blockOwnerDeletion":true}],"managedFields":[{"manager":"kube-controller-manager","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:10:14Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:generateName":{},"f:labels":{".":{},"f:controller-revision-hash":{},"f:k8s-app":{},"f:pod-template-generation":{}},"f:ownerReferences":{".":{},"k:{\"uid\":\"8b5954ba-7bb1-4f7b-8014-fdfa99b2ac77\"}":{}}},"f:spec":{"f:affinity":{".":{},"f:nodeAffinity":{".":{},"f:r
equiredDuringSchedulingIgnoredDuringExecution":{}}},"f:containers":{"k: [truncated 6170 chars]
	I0916 11:16:25.630366 1507580 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/nodes/multinode-654612
	I0916 11:16:25.630382 1507580 round_trippers.go:469] Request Headers:
	I0916 11:16:25.630391 1507580 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:16:25.630398 1507580 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:16:25.632595 1507580 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:16:25.632616 1507580 round_trippers.go:577] Response Headers:
	I0916 11:16:25.632626 1507580 round_trippers.go:580]     Audit-Id: 02d993ac-7c5a-4917-9775-9954f4b180b4
	I0916 11:16:25.632631 1507580 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:16:25.632635 1507580 round_trippers.go:580]     Content-Type: application/json
	I0916 11:16:25.632638 1507580 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:16:25.632641 1507580 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:16:25.632653 1507580 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:16:25 GMT
	I0916 11:16:25.633201 1507580 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-654612","uid":"17ee1588-6ece-4a45-8e72-a05ec6d1f806","resourceVersion":"982","creationTimestamp":"2024-09-16T11:10:07Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-654612","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-654612","minikube.k8s.io/primary":"true","minikube.k8s.io/updated_at":"2024_09_16T11_10_11_0700","minikube.k8s.io/version":"v1.34.0","node-role.kubernetes.io/control-plane":"","node.kubernetes.io/exclude-from-external-load-balancers":""},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///var/run/crio/crio.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubelet","operation":"Update","apiVe
rsion":"v1","time":"2024-09-16T11:10:07Z","fieldsType":"FieldsV1","fiel [truncated 6346 chars]
	I0916 11:16:25.633615 1507580 pod_ready.go:93] pod "kube-proxy-t9pzq" in "kube-system" namespace has status "Ready":"True"
	I0916 11:16:25.633635 1507580 pod_ready.go:82] duration metric: took 6.549312ms for pod "kube-proxy-t9pzq" in "kube-system" namespace to be "Ready" ...
	I0916 11:16:25.633648 1507580 pod_ready.go:79] waiting up to 6m0s for pod "kube-proxy-vf648" in "kube-system" namespace to be "Ready" ...
	I0916 11:16:25.633720 1507580 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/namespaces/kube-system/pods/kube-proxy-vf648
	I0916 11:16:25.633730 1507580 round_trippers.go:469] Request Headers:
	I0916 11:16:25.633739 1507580 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:16:25.633748 1507580 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:16:25.635786 1507580 round_trippers.go:574] Response Status: 404 Not Found in 2 milliseconds
	I0916 11:16:25.635850 1507580 round_trippers.go:577] Response Headers:
	I0916 11:16:25.635866 1507580 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:16:25 GMT
	I0916 11:16:25.635874 1507580 round_trippers.go:580]     Audit-Id: edaf3b73-9440-40c1-ac49-0d22aa723b6a
	I0916 11:16:25.635880 1507580 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:16:25.635883 1507580 round_trippers.go:580]     Content-Type: application/json
	I0916 11:16:25.635886 1507580 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:16:25.635889 1507580 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:16:25.635891 1507580 round_trippers.go:580]     Content-Length: 200
	I0916 11:16:25.636101 1507580 request.go:1351] Response Body: {"kind":"Status","apiVersion":"v1","metadata":{},"status":"Failure","message":"pods \"kube-proxy-vf648\" not found","reason":"NotFound","details":{"name":"kube-proxy-vf648","kind":"pods"},"code":404}
	I0916 11:16:25.636208 1507580 pod_ready.go:98] error getting pod "kube-proxy-vf648" in "kube-system" namespace (skipping!): pods "kube-proxy-vf648" not found
	I0916 11:16:25.636227 1507580 pod_ready.go:82] duration metric: took 2.56865ms for pod "kube-proxy-vf648" in "kube-system" namespace to be "Ready" ...
	E0916 11:16:25.636243 1507580 pod_ready.go:67] WaitExtra: waitPodCondition: error getting pod "kube-proxy-vf648" in "kube-system" namespace (skipping!): pods "kube-proxy-vf648" not found
	I0916 11:16:25.636254 1507580 pod_ready.go:79] waiting up to 6m0s for pod "kube-scheduler-multinode-654612" in "kube-system" namespace to be "Ready" ...
	I0916 11:16:25.636368 1507580 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/namespaces/kube-system/pods/kube-scheduler-multinode-654612
	I0916 11:16:25.636379 1507580 round_trippers.go:469] Request Headers:
	I0916 11:16:25.636387 1507580 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:16:25.636391 1507580 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:16:25.638552 1507580 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:16:25.638575 1507580 round_trippers.go:577] Response Headers:
	I0916 11:16:25.638583 1507580 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:16:25.638588 1507580 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:16:25.638592 1507580 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:16:25 GMT
	I0916 11:16:25.638596 1507580 round_trippers.go:580]     Audit-Id: 0bd4f30c-61a3-431b-b699-aece5220da24
	I0916 11:16:25.638598 1507580 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:16:25.638608 1507580 round_trippers.go:580]     Content-Type: application/json
	I0916 11:16:25.638818 1507580 request.go:1351] Response Body: {"kind":"Pod","apiVersion":"v1","metadata":{"name":"kube-scheduler-multinode-654612","namespace":"kube-system","uid":"fd553108-8193-4f33-8190-d4ec25a66de1","resourceVersion":"1072","creationTimestamp":"2024-09-16T11:10:10Z","labels":{"component":"kube-scheduler","tier":"control-plane"},"annotations":{"kubernetes.io/config.hash":"281b64f61502642475e3dbc1b139b188","kubernetes.io/config.mirror":"281b64f61502642475e3dbc1b139b188","kubernetes.io/config.seen":"2024-09-16T11:10:10.145156597Z","kubernetes.io/config.source":"file"},"ownerReferences":[{"apiVersion":"v1","kind":"Node","name":"multinode-654612","uid":"17ee1588-6ece-4a45-8e72-a05ec6d1f806","controller":true}],"managedFields":[{"manager":"kubelet","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:10:10Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:annotations":{".":{},"f:kubernetes.io/config.hash":{},"f:kubernetes.io/config.mirror":{},"f:kubernetes.io/config.seen":{}
,"f:kubernetes.io/config.source":{}},"f:labels":{".":{},"f:component":{ [truncated 5102 chars]
	I0916 11:16:25.639325 1507580 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/nodes/multinode-654612
	I0916 11:16:25.639341 1507580 round_trippers.go:469] Request Headers:
	I0916 11:16:25.639350 1507580 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:16:25.639356 1507580 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:16:25.641411 1507580 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:16:25.641469 1507580 round_trippers.go:577] Response Headers:
	I0916 11:16:25.641501 1507580 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:16:25.641520 1507580 round_trippers.go:580]     Content-Type: application/json
	I0916 11:16:25.641538 1507580 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:16:25.641557 1507580 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:16:25.641587 1507580 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:16:25 GMT
	I0916 11:16:25.641605 1507580 round_trippers.go:580]     Audit-Id: b35178f2-8c77-4b3f-b2b3-cc2c791778a1
	I0916 11:16:25.641761 1507580 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-654612","uid":"17ee1588-6ece-4a45-8e72-a05ec6d1f806","resourceVersion":"982","creationTimestamp":"2024-09-16T11:10:07Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-654612","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-654612","minikube.k8s.io/primary":"true","minikube.k8s.io/updated_at":"2024_09_16T11_10_11_0700","minikube.k8s.io/version":"v1.34.0","node-role.kubernetes.io/control-plane":"","node.kubernetes.io/exclude-from-external-load-balancers":""},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///var/run/crio/crio.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubelet","operation":"Update","apiVe
rsion":"v1","time":"2024-09-16T11:10:07Z","fieldsType":"FieldsV1","fiel [truncated 6346 chars]
	I0916 11:16:25.642195 1507580 pod_ready.go:93] pod "kube-scheduler-multinode-654612" in "kube-system" namespace has status "Ready":"True"
	I0916 11:16:25.642215 1507580 pod_ready.go:82] duration metric: took 5.952904ms for pod "kube-scheduler-multinode-654612" in "kube-system" namespace to be "Ready" ...
	I0916 11:16:25.642228 1507580 pod_ready.go:39] duration metric: took 27.556550777s for extra waiting for all system-critical and pods with labels [k8s-app=kube-dns component=etcd component=kube-apiserver component=kube-controller-manager k8s-app=kube-proxy component=kube-scheduler] to be "Ready" ...
	I0916 11:16:25.642248 1507580 system_svc.go:44] waiting for kubelet service to be running ....
	I0916 11:16:25.642308 1507580 ssh_runner.go:195] Run: sudo systemctl is-active --quiet service kubelet
	I0916 11:16:25.653937 1507580 system_svc.go:56] duration metric: took 11.680442ms WaitForService to wait for kubelet
	I0916 11:16:25.653966 1507580 kubeadm.go:582] duration metric: took 27.700947196s to wait for: map[apiserver:true apps_running:true default_sa:true extra:true kubelet:true node_ready:true system_pods:true]
	I0916 11:16:25.653985 1507580 node_conditions.go:102] verifying NodePressure condition ...
	I0916 11:16:25.654059 1507580 round_trippers.go:463] GET https://192.168.67.2:8443/api/v1/nodes
	I0916 11:16:25.654069 1507580 round_trippers.go:469] Request Headers:
	I0916 11:16:25.654078 1507580 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:16:25.654093 1507580 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:16:25.656724 1507580 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:16:25.656753 1507580 round_trippers.go:577] Response Headers:
	I0916 11:16:25.656775 1507580 round_trippers.go:580]     Audit-Id: 27c46758-d002-4ff3-989f-af3d6a82fa5c
	I0916 11:16:25.656779 1507580 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:16:25.656782 1507580 round_trippers.go:580]     Content-Type: application/json
	I0916 11:16:25.656786 1507580 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 398d33dc-9930-49e5-b54c-c7f69bec5bba
	I0916 11:16:25.656789 1507580 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 74b7aedf-058e-41ea-8730-db2df59a227b
	I0916 11:16:25.656792 1507580 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:16:25 GMT
	I0916 11:16:25.657229 1507580 request.go:1351] Response Body: {"kind":"NodeList","apiVersion":"v1","metadata":{"resourceVersion":"1167"},"items":[{"metadata":{"name":"multinode-654612","uid":"17ee1588-6ece-4a45-8e72-a05ec6d1f806","resourceVersion":"982","creationTimestamp":"2024-09-16T11:10:07Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-654612","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-654612","minikube.k8s.io/primary":"true","minikube.k8s.io/updated_at":"2024_09_16T11_10_11_0700","minikube.k8s.io/version":"v1.34.0","node-role.kubernetes.io/control-plane":"","node.kubernetes.io/exclude-from-external-load-balancers":""},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///var/run/crio/crio.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedField
s":[{"manager":"kubelet","operation":"Update","apiVersion":"v1","time": [truncated 13734 chars]
	I0916 11:16:25.657942 1507580 node_conditions.go:122] node storage ephemeral capacity is 203034800Ki
	I0916 11:16:25.657969 1507580 node_conditions.go:123] node cpu capacity is 2
	I0916 11:16:25.657979 1507580 node_conditions.go:122] node storage ephemeral capacity is 203034800Ki
	I0916 11:16:25.657983 1507580 node_conditions.go:123] node cpu capacity is 2
	I0916 11:16:25.657989 1507580 node_conditions.go:105] duration metric: took 3.998671ms to run NodePressure ...
	I0916 11:16:25.658001 1507580 start.go:241] waiting for startup goroutines ...
	I0916 11:16:25.658029 1507580 start.go:255] writing updated cluster config ...
	I0916 11:16:25.658342 1507580 ssh_runner.go:195] Run: rm -f paused
	I0916 11:16:25.666476 1507580 out.go:177] * Done! kubectl is now configured to use "multinode-654612" cluster and "default" namespace by default
	E0916 11:16:25.668831 1507580 start.go:291] kubectl info: exec: fork/exec /usr/local/bin/kubectl: exec format error
	
	
	==> CRI-O <==
	Sep 16 11:15:38 multinode-654612 crio[631]: time="2024-09-16 11:15:38.301565071Z" level=info msg="Started container" PID=1179 containerID=4409ebf189fd0948fa7a1e7e96cf866e1f53e4107415900dd922885f124ae5fc description=default/busybox-7dff88458-rdtjw/busybox id=77669418-9492-4029-802d-a28f1861d3df name=/runtime.v1.RuntimeService/StartContainer sandboxID=cfb8428cb3cb2bcd7423b781cd99059a6920d43badf6ab5adb6746559d74d801
	Sep 16 11:16:08 multinode-654612 conmon[1152]: conmon ff8c58b5c6ed8aad0043 <ninfo>: container 1174 exited with status 1
	Sep 16 11:16:08 multinode-654612 crio[631]: time="2024-09-16 11:16:08.650556625Z" level=info msg="Checking image status: gcr.io/k8s-minikube/storage-provisioner:v5" id=8e0636b6-61f1-4a6d-859a-359df3aff978 name=/runtime.v1.ImageService/ImageStatus
	Sep 16 11:16:08 multinode-654612 crio[631]: time="2024-09-16 11:16:08.650798482Z" level=info msg="Image status: &ImageStatusResponse{Image:&Image{Id:ba04bb24b95753201135cbc420b233c1b0b9fa2e1fd21d28319c348c33fbcde6,RepoTags:[gcr.io/k8s-minikube/storage-provisioner:v5],RepoDigests:[gcr.io/k8s-minikube/storage-provisioner@sha256:0ba370588274b88531ab311a5d2e645d240a853555c1e58fd1dd428fc333c9d2 gcr.io/k8s-minikube/storage-provisioner@sha256:18eb69d1418e854ad5a19e399310e52808a8321e4c441c1dddad8977a0d7a944],Size_:29037500,Uid:nil,Username:,Spec:nil,},Info:map[string]string{},}" id=8e0636b6-61f1-4a6d-859a-359df3aff978 name=/runtime.v1.ImageService/ImageStatus
	Sep 16 11:16:08 multinode-654612 crio[631]: time="2024-09-16 11:16:08.652472360Z" level=info msg="Checking image status: gcr.io/k8s-minikube/storage-provisioner:v5" id=e0439085-caab-4cc7-9721-896c013ea34c name=/runtime.v1.ImageService/ImageStatus
	Sep 16 11:16:08 multinode-654612 crio[631]: time="2024-09-16 11:16:08.652981911Z" level=info msg="Image status: &ImageStatusResponse{Image:&Image{Id:ba04bb24b95753201135cbc420b233c1b0b9fa2e1fd21d28319c348c33fbcde6,RepoTags:[gcr.io/k8s-minikube/storage-provisioner:v5],RepoDigests:[gcr.io/k8s-minikube/storage-provisioner@sha256:0ba370588274b88531ab311a5d2e645d240a853555c1e58fd1dd428fc333c9d2 gcr.io/k8s-minikube/storage-provisioner@sha256:18eb69d1418e854ad5a19e399310e52808a8321e4c441c1dddad8977a0d7a944],Size_:29037500,Uid:nil,Username:,Spec:nil,},Info:map[string]string{},}" id=e0439085-caab-4cc7-9721-896c013ea34c name=/runtime.v1.ImageService/ImageStatus
	Sep 16 11:16:08 multinode-654612 crio[631]: time="2024-09-16 11:16:08.654125029Z" level=info msg="Creating container: kube-system/storage-provisioner/storage-provisioner" id=ad97899a-bf73-47b4-b0f3-0ade3643f00f name=/runtime.v1.RuntimeService/CreateContainer
	Sep 16 11:16:08 multinode-654612 crio[631]: time="2024-09-16 11:16:08.654227665Z" level=warning msg="Allowed annotations are specified for workload []"
	Sep 16 11:16:08 multinode-654612 crio[631]: time="2024-09-16 11:16:08.673296988Z" level=warning msg="Failed to open /etc/passwd: open /var/lib/containers/storage/overlay/56db943c9de3821735d39bc5d6bbb8669f081592d9762c9fd65a16f97f85d8a7/merged/etc/passwd: no such file or directory"
	Sep 16 11:16:08 multinode-654612 crio[631]: time="2024-09-16 11:16:08.673344248Z" level=warning msg="Failed to open /etc/group: open /var/lib/containers/storage/overlay/56db943c9de3821735d39bc5d6bbb8669f081592d9762c9fd65a16f97f85d8a7/merged/etc/group: no such file or directory"
	Sep 16 11:16:08 multinode-654612 crio[631]: time="2024-09-16 11:16:08.720817868Z" level=info msg="Created container db28444949a7a1aaaabfe88643d0f68aff502c2b903376cf3d5b70dad4bd448b: kube-system/storage-provisioner/storage-provisioner" id=ad97899a-bf73-47b4-b0f3-0ade3643f00f name=/runtime.v1.RuntimeService/CreateContainer
	Sep 16 11:16:08 multinode-654612 crio[631]: time="2024-09-16 11:16:08.721845018Z" level=info msg="Starting container: db28444949a7a1aaaabfe88643d0f68aff502c2b903376cf3d5b70dad4bd448b" id=c3b39e61-6383-4a43-8966-6bd374ba5282 name=/runtime.v1.RuntimeService/StartContainer
	Sep 16 11:16:08 multinode-654612 crio[631]: time="2024-09-16 11:16:08.734166774Z" level=info msg="Started container" PID=1505 containerID=db28444949a7a1aaaabfe88643d0f68aff502c2b903376cf3d5b70dad4bd448b description=kube-system/storage-provisioner/storage-provisioner id=c3b39e61-6383-4a43-8966-6bd374ba5282 name=/runtime.v1.RuntimeService/StartContainer sandboxID=c0a3a78706d8e16ca00dd3cd436f81022332dc5a8bf4e47120ee0e1403b93df2
	Sep 16 11:16:18 multinode-654612 crio[631]: time="2024-09-16 11:16:18.547922196Z" level=info msg="CNI monitoring event \"/etc/cni/net.d/10-kindnet.conflist.temp\": CREATE"
	Sep 16 11:16:18 multinode-654612 crio[631]: time="2024-09-16 11:16:18.551973526Z" level=info msg="Found CNI network kindnet (type=ptp) at /etc/cni/net.d/10-kindnet.conflist"
	Sep 16 11:16:18 multinode-654612 crio[631]: time="2024-09-16 11:16:18.552025127Z" level=info msg="Updated default CNI network name to kindnet"
	Sep 16 11:16:18 multinode-654612 crio[631]: time="2024-09-16 11:16:18.552042685Z" level=info msg="CNI monitoring event \"/etc/cni/net.d/10-kindnet.conflist.temp\": WRITE"
	Sep 16 11:16:18 multinode-654612 crio[631]: time="2024-09-16 11:16:18.555419111Z" level=info msg="Found CNI network kindnet (type=ptp) at /etc/cni/net.d/10-kindnet.conflist"
	Sep 16 11:16:18 multinode-654612 crio[631]: time="2024-09-16 11:16:18.555454031Z" level=info msg="Updated default CNI network name to kindnet"
	Sep 16 11:16:18 multinode-654612 crio[631]: time="2024-09-16 11:16:18.555470531Z" level=info msg="CNI monitoring event \"/etc/cni/net.d/10-kindnet.conflist.temp\": RENAME"
	Sep 16 11:16:18 multinode-654612 crio[631]: time="2024-09-16 11:16:18.558640110Z" level=info msg="Found CNI network kindnet (type=ptp) at /etc/cni/net.d/10-kindnet.conflist"
	Sep 16 11:16:18 multinode-654612 crio[631]: time="2024-09-16 11:16:18.558677525Z" level=info msg="Updated default CNI network name to kindnet"
	Sep 16 11:16:18 multinode-654612 crio[631]: time="2024-09-16 11:16:18.558694476Z" level=info msg="CNI monitoring event \"/etc/cni/net.d/10-kindnet.conflist\": CREATE"
	Sep 16 11:16:18 multinode-654612 crio[631]: time="2024-09-16 11:16:18.561955844Z" level=info msg="Found CNI network kindnet (type=ptp) at /etc/cni/net.d/10-kindnet.conflist"
	Sep 16 11:16:18 multinode-654612 crio[631]: time="2024-09-16 11:16:18.561989533Z" level=info msg="Updated default CNI network name to kindnet"
	
	
	==> container status <==
	CONTAINER           IMAGE                                                              CREATED             STATE               NAME                      ATTEMPT             POD ID              POD
	db28444949a7a       ba04bb24b95753201135cbc420b233c1b0b9fa2e1fd21d28319c348c33fbcde6   18 seconds ago      Running             storage-provisioner       4                   c0a3a78706d8e       storage-provisioner
	4b6753fd3e0bb       2f6c962e7b8311337352d9fdea917da2184d9919f4da7695bc2a6517cf392fe4   49 seconds ago      Running             coredns                   2                   c93693e41662f       coredns-7c65d6cfc9-szvv9
	ff8c58b5c6ed8       ba04bb24b95753201135cbc420b233c1b0b9fa2e1fd21d28319c348c33fbcde6   49 seconds ago      Exited              storage-provisioner       3                   c0a3a78706d8e       storage-provisioner
	4409ebf189fd0       89a35e2ebb6b938201966889b5e8c85b931db6432c5643966116cd1c28bf45cd   49 seconds ago      Running             busybox                   2                   cfb8428cb3cb2       busybox-7dff88458-rdtjw
	fa3dcc5e63f86       24a140c548c075e487e45d0ee73b1aa89f8bfb40c08a57e05975559728822b1d   49 seconds ago      Running             kube-proxy                2                   c8b4073e288ef       kube-proxy-t9pzq
	f9f52628ba051       6a23fa8fd2b78ab58e42ba273808edc936a9c53d8ac4a919f6337be094843a51   49 seconds ago      Running             kindnet-cni               2                   bd3a61c2e8405       kindnet-whjqt
	6fadc3310a749       279f381cb37365bbbcd133c9531fba9c2beb0f38dbbe6ddfcd0b1b1643d3450e   53 seconds ago      Running             kube-controller-manager   3                   7ec00c9ff5ae0       kube-controller-manager-multinode-654612
	6968d2a350b6f       27e3830e1402783674d8b594038967deea9d51f0d91b34c93c8f39d2f68af7da   54 seconds ago      Running             etcd                      2                   f2bc478079622       etcd-multinode-654612
	f38f3d56710e9       7f8aa378bb47dffcf430f3a601abe39137e88aee0238e23ed8530fdd18dab82d   54 seconds ago      Running             kube-scheduler            2                   7932065704f60       kube-scheduler-multinode-654612
	994d9d024ab99       d3f53a98c0a9d9163c4848bcf34b2d2f5e1e3691b79f3d1dd6d0206809e02853   54 seconds ago      Running             kube-apiserver            2                   a58c8c24037f9       kube-apiserver-multinode-654612
	
	
	==> coredns [4b6753fd3e0bbd13ed7b7bd522490175dad31bcb7c845832daded1cfe068a120] <==
	[INFO] plugin/kubernetes: waiting for Kubernetes API before starting server
	[WARNING] plugin/kubernetes: starting server with unsynced Kubernetes API
	.:53
	[INFO] plugin/reload: Running configuration SHA512 = bfa258e3dfcd8004ab6c7d60772766a595ee209e49c62e6ae56bd911a145318b327e0c73bbccac30667047dafea6a8c1149027cea85d58a2246677e8ec1caab2
	CoreDNS-1.11.3
	linux/arm64, go1.21.11, a6338e9
	[INFO] 127.0.0.1:47217 - 58795 "HINFO IN 5166087170056830609.5641503210447977862. udp 57 false 512" NXDOMAIN qr,rd,ra 57 0.019566082s
	[INFO] plugin/ready: Still waiting on: "kubernetes"
	[INFO] plugin/ready: Still waiting on: "kubernetes"
	[INFO] plugin/kubernetes: pkg/mod/k8s.io/client-go@v0.29.3/tools/cache/reflector.go:229: failed to list *v1.Namespace: Get "https://10.96.0.1:443/api/v1/namespaces?limit=500&resourceVersion=0": dial tcp 10.96.0.1:443: i/o timeout
	[INFO] plugin/kubernetes: Trace[159563475]: "Reflector ListAndWatch" name:pkg/mod/k8s.io/client-go@v0.29.3/tools/cache/reflector.go:229 (16-Sep-2024 11:15:38.264) (total time: 30002ms):
	Trace[159563475]: ---"Objects listed" error:Get "https://10.96.0.1:443/api/v1/namespaces?limit=500&resourceVersion=0": dial tcp 10.96.0.1:443: i/o timeout 30001ms (11:16:08.266)
	Trace[159563475]: [30.002273107s] [30.002273107s] END
	[ERROR] plugin/kubernetes: pkg/mod/k8s.io/client-go@v0.29.3/tools/cache/reflector.go:229: Failed to watch *v1.Namespace: failed to list *v1.Namespace: Get "https://10.96.0.1:443/api/v1/namespaces?limit=500&resourceVersion=0": dial tcp 10.96.0.1:443: i/o timeout
	[INFO] plugin/kubernetes: pkg/mod/k8s.io/client-go@v0.29.3/tools/cache/reflector.go:229: failed to list *v1.Service: Get "https://10.96.0.1:443/api/v1/services?limit=500&resourceVersion=0": dial tcp 10.96.0.1:443: i/o timeout
	[INFO] plugin/kubernetes: Trace[1160327272]: "Reflector ListAndWatch" name:pkg/mod/k8s.io/client-go@v0.29.3/tools/cache/reflector.go:229 (16-Sep-2024 11:15:38.265) (total time: 30003ms):
	Trace[1160327272]: ---"Objects listed" error:Get "https://10.96.0.1:443/api/v1/services?limit=500&resourceVersion=0": dial tcp 10.96.0.1:443: i/o timeout 30001ms (11:16:08.266)
	Trace[1160327272]: [30.003409284s] [30.003409284s] END
	[ERROR] plugin/kubernetes: pkg/mod/k8s.io/client-go@v0.29.3/tools/cache/reflector.go:229: Failed to watch *v1.Service: failed to list *v1.Service: Get "https://10.96.0.1:443/api/v1/services?limit=500&resourceVersion=0": dial tcp 10.96.0.1:443: i/o timeout
	[INFO] plugin/kubernetes: pkg/mod/k8s.io/client-go@v0.29.3/tools/cache/reflector.go:229: failed to list *v1.EndpointSlice: Get "https://10.96.0.1:443/apis/discovery.k8s.io/v1/endpointslices?limit=500&resourceVersion=0": dial tcp 10.96.0.1:443: i/o timeout
	[INFO] plugin/kubernetes: Trace[1011091284]: "Reflector ListAndWatch" name:pkg/mod/k8s.io/client-go@v0.29.3/tools/cache/reflector.go:229 (16-Sep-2024 11:15:38.265) (total time: 30003ms):
	Trace[1011091284]: ---"Objects listed" error:Get "https://10.96.0.1:443/apis/discovery.k8s.io/v1/endpointslices?limit=500&resourceVersion=0": dial tcp 10.96.0.1:443: i/o timeout 30001ms (11:16:08.266)
	Trace[1011091284]: [30.003437821s] [30.003437821s] END
	[ERROR] plugin/kubernetes: pkg/mod/k8s.io/client-go@v0.29.3/tools/cache/reflector.go:229: Failed to watch *v1.EndpointSlice: failed to list *v1.EndpointSlice: Get "https://10.96.0.1:443/apis/discovery.k8s.io/v1/endpointslices?limit=500&resourceVersion=0": dial tcp 10.96.0.1:443: i/o timeout
	[INFO] plugin/ready: Still waiting on: "kubernetes"
	
	
	==> describe nodes <==
	Name:               multinode-654612
	Roles:              control-plane
	Labels:             beta.kubernetes.io/arch=arm64
	                    beta.kubernetes.io/os=linux
	                    kubernetes.io/arch=arm64
	                    kubernetes.io/hostname=multinode-654612
	                    kubernetes.io/os=linux
	                    minikube.k8s.io/commit=90d544f06ea0f69499271b003be64a9a224d57ed
	                    minikube.k8s.io/name=multinode-654612
	                    minikube.k8s.io/primary=true
	                    minikube.k8s.io/updated_at=2024_09_16T11_10_11_0700
	                    minikube.k8s.io/version=v1.34.0
	                    node-role.kubernetes.io/control-plane=
	                    node.kubernetes.io/exclude-from-external-load-balancers=
	Annotations:        kubeadm.alpha.kubernetes.io/cri-socket: unix:///var/run/crio/crio.sock
	                    node.alpha.kubernetes.io/ttl: 0
	                    volumes.kubernetes.io/controller-managed-attach-detach: true
	CreationTimestamp:  Mon, 16 Sep 2024 11:10:07 +0000
	Taints:             <none>
	Unschedulable:      false
	Lease:
	  HolderIdentity:  multinode-654612
	  AcquireTime:     <unset>
	  RenewTime:       Mon, 16 Sep 2024 11:16:18 +0000
	Conditions:
	  Type             Status  LastHeartbeatTime                 LastTransitionTime                Reason                       Message
	  ----             ------  -----------------                 ------------------                ------                       -------
	  MemoryPressure   False   Mon, 16 Sep 2024 11:15:37 +0000   Mon, 16 Sep 2024 11:10:04 +0000   KubeletHasSufficientMemory   kubelet has sufficient memory available
	  DiskPressure     False   Mon, 16 Sep 2024 11:15:37 +0000   Mon, 16 Sep 2024 11:10:04 +0000   KubeletHasNoDiskPressure     kubelet has no disk pressure
	  PIDPressure      False   Mon, 16 Sep 2024 11:15:37 +0000   Mon, 16 Sep 2024 11:10:04 +0000   KubeletHasSufficientPID      kubelet has sufficient PID available
	  Ready            True    Mon, 16 Sep 2024 11:15:37 +0000   Mon, 16 Sep 2024 11:10:56 +0000   KubeletReady                 kubelet is posting ready status
	Addresses:
	  InternalIP:  192.168.67.2
	  Hostname:    multinode-654612
	Capacity:
	  cpu:                2
	  ephemeral-storage:  203034800Ki
	  hugepages-1Gi:      0
	  hugepages-2Mi:      0
	  hugepages-32Mi:     0
	  hugepages-64Ki:     0
	  memory:             8022304Ki
	  pods:               110
	Allocatable:
	  cpu:                2
	  ephemeral-storage:  203034800Ki
	  hugepages-1Gi:      0
	  hugepages-2Mi:      0
	  hugepages-32Mi:     0
	  hugepages-64Ki:     0
	  memory:             8022304Ki
	  pods:               110
	System Info:
	  Machine ID:                 5993924e5a4a42c9b7b048ca5154f788
	  System UUID:                b0403d6b-24c6-42eb-8273-193a1e97b1c8
	  Boot ID:                    34b2555f-ef29-4c31-9b47-b3b930bd3b4b
	  Kernel Version:             5.15.0-1069-aws
	  OS Image:                   Ubuntu 22.04.4 LTS
	  Operating System:           linux
	  Architecture:               arm64
	  Container Runtime Version:  cri-o://1.24.6
	  Kubelet Version:            v1.31.1
	  Kube-Proxy Version:         v1.31.1
	PodCIDR:                      10.244.0.0/24
	PodCIDRs:                     10.244.0.0/24
	Non-terminated Pods:          (9 in total)
	  Namespace                   Name                                        CPU Requests  CPU Limits  Memory Requests  Memory Limits  Age
	  ---------                   ----                                        ------------  ----------  ---------------  -------------  ---
	  default                     busybox-7dff88458-rdtjw                     0 (0%)        0 (0%)      0 (0%)           0 (0%)         4m59s
	  kube-system                 coredns-7c65d6cfc9-szvv9                    100m (5%)     0 (0%)      70Mi (0%)        170Mi (2%)     6m12s
	  kube-system                 etcd-multinode-654612                       100m (5%)     0 (0%)      100Mi (1%)       0 (0%)         6m17s
	  kube-system                 kindnet-whjqt                               100m (5%)     100m (5%)   50Mi (0%)        50Mi (0%)      6m13s
	  kube-system                 kube-apiserver-multinode-654612             250m (12%)    0 (0%)      0 (0%)           0 (0%)         6m17s
	  kube-system                 kube-controller-manager-multinode-654612    200m (10%)    0 (0%)      0 (0%)           0 (0%)         6m17s
	  kube-system                 kube-proxy-t9pzq                            0 (0%)        0 (0%)      0 (0%)           0 (0%)         6m13s
	  kube-system                 kube-scheduler-multinode-654612             100m (5%)     0 (0%)      0 (0%)           0 (0%)         6m17s
	  kube-system                 storage-provisioner                         0 (0%)        0 (0%)      0 (0%)           0 (0%)         6m12s
	Allocated resources:
	  (Total limits may be over 100 percent, i.e., overcommitted.)
	  Resource           Requests    Limits
	  --------           --------    ------
	  cpu                850m (42%)  100m (5%)
	  memory             220Mi (2%)  220Mi (2%)
	  ephemeral-storage  0 (0%)      0 (0%)
	  hugepages-1Gi      0 (0%)      0 (0%)
	  hugepages-2Mi      0 (0%)      0 (0%)
	  hugepages-32Mi     0 (0%)      0 (0%)
	  hugepages-64Ki     0 (0%)      0 (0%)
	Events:
	  Type     Reason                   Age                    From             Message
	  ----     ------                   ----                   ----             -------
	  Normal   Starting                 48s                    kube-proxy       
	  Normal   Starting                 3m13s                  kube-proxy       
	  Normal   Starting                 6m10s                  kube-proxy       
	  Normal   Starting                 6m17s                  kubelet          Starting kubelet.
	  Warning  CgroupV1                 6m17s                  kubelet          Cgroup v1 support is in maintenance mode, please migrate to Cgroup v2.
	  Normal   NodeHasSufficientMemory  6m17s                  kubelet          Node multinode-654612 status is now: NodeHasSufficientMemory
	  Normal   NodeHasNoDiskPressure    6m17s                  kubelet          Node multinode-654612 status is now: NodeHasNoDiskPressure
	  Normal   NodeHasSufficientPID     6m17s                  kubelet          Node multinode-654612 status is now: NodeHasSufficientPID
	  Normal   RegisteredNode           6m13s                  node-controller  Node multinode-654612 event: Registered Node multinode-654612 in Controller
	  Normal   CIDRAssignmentFailed     6m13s                  cidrAllocator    Node multinode-654612 status is now: CIDRAssignmentFailed
	  Normal   NodeReady                5m31s                  kubelet          Node multinode-654612 status is now: NodeReady
	  Normal   Starting                 3m21s                  kubelet          Starting kubelet.
	  Warning  CgroupV1                 3m21s                  kubelet          Cgroup v1 support is in maintenance mode, please migrate to Cgroup v2.
	  Normal   NodeHasSufficientMemory  3m21s (x8 over 3m21s)  kubelet          Node multinode-654612 status is now: NodeHasSufficientMemory
	  Normal   NodeHasNoDiskPressure    3m21s (x8 over 3m21s)  kubelet          Node multinode-654612 status is now: NodeHasNoDiskPressure
	  Normal   NodeHasSufficientPID     3m21s (x7 over 3m21s)  kubelet          Node multinode-654612 status is now: NodeHasSufficientPID
	  Normal   RegisteredNode           3m13s                  node-controller  Node multinode-654612 event: Registered Node multinode-654612 in Controller
	  Normal   Starting                 55s                    kubelet          Starting kubelet.
	  Warning  CgroupV1                 55s                    kubelet          Cgroup v1 support is in maintenance mode, please migrate to Cgroup v2.
	  Normal   NodeHasSufficientMemory  55s (x8 over 55s)      kubelet          Node multinode-654612 status is now: NodeHasSufficientMemory
	  Normal   NodeHasNoDiskPressure    55s (x8 over 55s)      kubelet          Node multinode-654612 status is now: NodeHasNoDiskPressure
	  Normal   NodeHasSufficientPID     55s (x7 over 55s)      kubelet          Node multinode-654612 status is now: NodeHasSufficientPID
	  Normal   RegisteredNode           46s                    node-controller  Node multinode-654612 event: Registered Node multinode-654612 in Controller
	
	
	Name:               multinode-654612-m02
	Roles:              <none>
	Labels:             beta.kubernetes.io/arch=arm64
	                    beta.kubernetes.io/os=linux
	                    kubernetes.io/arch=arm64
	                    kubernetes.io/hostname=multinode-654612-m02
	                    kubernetes.io/os=linux
	                    minikube.k8s.io/commit=90d544f06ea0f69499271b003be64a9a224d57ed
	                    minikube.k8s.io/name=multinode-654612
	                    minikube.k8s.io/primary=false
	                    minikube.k8s.io/updated_at=2024_09_16T11_11_14_0700
	                    minikube.k8s.io/version=v1.34.0
	Annotations:        kubeadm.alpha.kubernetes.io/cri-socket: unix:///var/run/crio/crio.sock
	                    node.alpha.kubernetes.io/ttl: 0
	                    volumes.kubernetes.io/controller-managed-attach-detach: true
	CreationTimestamp:  Mon, 16 Sep 2024 11:11:13 +0000
	Taints:             <none>
	Unschedulable:      false
	Lease:
	  HolderIdentity:  multinode-654612-m02
	  AcquireTime:     <unset>
	  RenewTime:       Mon, 16 Sep 2024 11:16:25 +0000
	Conditions:
	  Type             Status  LastHeartbeatTime                 LastTransitionTime                Reason                       Message
	  ----             ------  -----------------                 ------------------                ------                       -------
	  MemoryPressure   False   Mon, 16 Sep 2024 11:16:04 +0000   Mon, 16 Sep 2024 11:14:08 +0000   KubeletHasSufficientMemory   kubelet has sufficient memory available
	  DiskPressure     False   Mon, 16 Sep 2024 11:16:04 +0000   Mon, 16 Sep 2024 11:14:08 +0000   KubeletHasNoDiskPressure     kubelet has no disk pressure
	  PIDPressure      False   Mon, 16 Sep 2024 11:16:04 +0000   Mon, 16 Sep 2024 11:14:08 +0000   KubeletHasSufficientPID      kubelet has sufficient PID available
	  Ready            True    Mon, 16 Sep 2024 11:16:04 +0000   Mon, 16 Sep 2024 11:14:08 +0000   KubeletReady                 kubelet is posting ready status
	Addresses:
	  InternalIP:  192.168.67.3
	  Hostname:    multinode-654612-m02
	Capacity:
	  cpu:                2
	  ephemeral-storage:  203034800Ki
	  hugepages-1Gi:      0
	  hugepages-2Mi:      0
	  hugepages-32Mi:     0
	  hugepages-64Ki:     0
	  memory:             8022304Ki
	  pods:               110
	Allocatable:
	  cpu:                2
	  ephemeral-storage:  203034800Ki
	  hugepages-1Gi:      0
	  hugepages-2Mi:      0
	  hugepages-32Mi:     0
	  hugepages-64Ki:     0
	  memory:             8022304Ki
	  pods:               110
	System Info:
	  Machine ID:                 784e35d902e746379198fb9bfd2b530b
	  System UUID:                9e565e5c-62ec-45b7-a6b4-8e158afd85b2
	  Boot ID:                    34b2555f-ef29-4c31-9b47-b3b930bd3b4b
	  Kernel Version:             5.15.0-1069-aws
	  OS Image:                   Ubuntu 22.04.4 LTS
	  Operating System:           linux
	  Architecture:               arm64
	  Container Runtime Version:  cri-o://1.24.6
	  Kubelet Version:            v1.31.1
	  Kube-Proxy Version:         v1.31.1
	PodCIDR:                      10.244.2.0/24
	PodCIDRs:                     10.244.2.0/24
	Non-terminated Pods:          (3 in total)
	  Namespace                   Name                       CPU Requests  CPU Limits  Memory Requests  Memory Limits  Age
	  ---------                   ----                       ------------  ----------  ---------------  -------------  ---
	  default                     busybox-7dff88458-sfkxt    0 (0%)        0 (0%)      0 (0%)           0 (0%)         4m59s
	  kube-system                 kindnet-687xg              100m (5%)     100m (5%)   50Mi (0%)        50Mi (0%)      5m14s
	  kube-system                 kube-proxy-gf2tw           0 (0%)        0 (0%)      0 (0%)           0 (0%)         5m14s
	Allocated resources:
	  (Total limits may be over 100 percent, i.e., overcommitted.)
	  Resource           Requests   Limits
	  --------           --------   ------
	  cpu                100m (5%)  100m (5%)
	  memory             50Mi (0%)  50Mi (0%)
	  ephemeral-storage  0 (0%)     0 (0%)
	  hugepages-1Gi      0 (0%)     0 (0%)
	  hugepages-2Mi      0 (0%)     0 (0%)
	  hugepages-32Mi     0 (0%)     0 (0%)
	  hugepages-64Ki     0 (0%)     0 (0%)
	Events:
	  Type     Reason                   Age                    From             Message
	  ----     ------                   ----                   ----             -------
	  Normal   Starting                 5m12s                  kube-proxy       
	  Normal   Starting                 3s                     kube-proxy       
	  Normal   Starting                 2m1s                   kube-proxy       
	  Normal   NodeHasSufficientMemory  5m14s (x2 over 5m14s)  kubelet          Node multinode-654612-m02 status is now: NodeHasSufficientMemory
	  Normal   NodeHasNoDiskPressure    5m14s (x2 over 5m14s)  kubelet          Node multinode-654612-m02 status is now: NodeHasNoDiskPressure
	  Normal   NodeHasSufficientPID     5m14s (x2 over 5m14s)  kubelet          Node multinode-654612-m02 status is now: NodeHasSufficientPID
	  Warning  CgroupV1                 5m14s                  kubelet          Cgroup v1 support is in maintenance mode, please migrate to Cgroup v2.
	  Normal   RegisteredNode           5m13s                  node-controller  Node multinode-654612-m02 event: Registered Node multinode-654612-m02 in Controller
	  Normal   NodeReady                5m2s                   kubelet          Node multinode-654612-m02 status is now: NodeReady
	  Normal   RegisteredNode           3m13s                  node-controller  Node multinode-654612-m02 event: Registered Node multinode-654612-m02 in Controller
	  Normal   NodeNotReady             2m33s                  node-controller  Node multinode-654612-m02 status is now: NodeNotReady
	  Normal   Starting                 2m32s                  kubelet          Starting kubelet.
	  Warning  CgroupV1                 2m32s                  kubelet          Cgroup v1 support is in maintenance mode, please migrate to Cgroup v2.
	  Normal   NodeHasSufficientPID     2m25s (x7 over 2m32s)  kubelet          Node multinode-654612-m02 status is now: NodeHasSufficientPID
	  Normal   NodeHasSufficientMemory  2m19s (x8 over 2m32s)  kubelet          Node multinode-654612-m02 status is now: NodeHasSufficientMemory
	  Normal   NodeHasNoDiskPressure    2m19s (x8 over 2m32s)  kubelet          Node multinode-654612-m02 status is now: NodeHasNoDiskPressure
	  Normal   RegisteredNode           46s                    node-controller  Node multinode-654612-m02 event: Registered Node multinode-654612-m02 in Controller
	  Warning  CgroupV1                 35s                    kubelet          Cgroup v1 support is in maintenance mode, please migrate to Cgroup v2.
	  Normal   Starting                 35s                    kubelet          Starting kubelet.
	  Normal   NodeHasSufficientPID     29s (x7 over 35s)      kubelet          Node multinode-654612-m02 status is now: NodeHasSufficientPID
	  Normal   NodeHasSufficientMemory  23s (x8 over 35s)      kubelet          Node multinode-654612-m02 status is now: NodeHasSufficientMemory
	  Normal   NodeHasNoDiskPressure    23s (x8 over 35s)      kubelet          Node multinode-654612-m02 status is now: NodeHasNoDiskPressure
	
	
	==> dmesg <==
	[Sep16 10:07] systemd-journald[226]: Failed to send stream file descriptor to service manager: Connection refused
	
	
	==> etcd [6968d2a350b6f344adb4a00c2854ab5395cfeb0053cc339fea1f5b490b33cd33] <==
	{"level":"info","ts":"2024-09-16T11:15:33.327780Z","caller":"membership/cluster.go:421","msg":"added member","cluster-id":"9d8fdeb88b6def78","local-member-id":"8688e899f7831fc7","added-peer-id":"8688e899f7831fc7","added-peer-peer-urls":["https://192.168.67.2:2380"]}
	{"level":"info","ts":"2024-09-16T11:15:33.327894Z","caller":"membership/cluster.go:584","msg":"set initial cluster version","cluster-id":"9d8fdeb88b6def78","local-member-id":"8688e899f7831fc7","cluster-version":"3.5"}
	{"level":"info","ts":"2024-09-16T11:15:33.327936Z","caller":"api/capability.go:75","msg":"enabled capabilities for version","cluster-version":"3.5"}
	{"level":"info","ts":"2024-09-16T11:15:33.329787Z","caller":"v3rpc/health.go:61","msg":"grpc service status changed","service":"","status":"SERVING"}
	{"level":"info","ts":"2024-09-16T11:15:33.331355Z","caller":"embed/etcd.go:728","msg":"starting with client TLS","tls-info":"cert = /var/lib/minikube/certs/etcd/server.crt, key = /var/lib/minikube/certs/etcd/server.key, client-cert=, client-key=, trusted-ca = /var/lib/minikube/certs/etcd/ca.crt, client-cert-auth = true, crl-file = ","cipher-suites":[]}
	{"level":"info","ts":"2024-09-16T11:15:33.331561Z","caller":"embed/etcd.go:279","msg":"now serving peer/client/metrics","local-member-id":"8688e899f7831fc7","initial-advertise-peer-urls":["https://192.168.67.2:2380"],"listen-peer-urls":["https://192.168.67.2:2380"],"advertise-client-urls":["https://192.168.67.2:2379"],"listen-client-urls":["https://127.0.0.1:2379","https://192.168.67.2:2379"],"listen-metrics-urls":["http://127.0.0.1:2381"]}
	{"level":"info","ts":"2024-09-16T11:15:33.331590Z","caller":"embed/etcd.go:870","msg":"serving metrics","address":"http://127.0.0.1:2381"}
	{"level":"info","ts":"2024-09-16T11:15:33.331729Z","caller":"embed/etcd.go:599","msg":"serving peer traffic","address":"192.168.67.2:2380"}
	{"level":"info","ts":"2024-09-16T11:15:33.331744Z","caller":"embed/etcd.go:571","msg":"cmux::serve","address":"192.168.67.2:2380"}
	{"level":"info","ts":"2024-09-16T11:15:34.716719Z","logger":"raft","caller":"etcdserver/zap_raft.go:77","msg":"8688e899f7831fc7 is starting a new election at term 3"}
	{"level":"info","ts":"2024-09-16T11:15:34.716841Z","logger":"raft","caller":"etcdserver/zap_raft.go:77","msg":"8688e899f7831fc7 became pre-candidate at term 3"}
	{"level":"info","ts":"2024-09-16T11:15:34.716898Z","logger":"raft","caller":"etcdserver/zap_raft.go:77","msg":"8688e899f7831fc7 received MsgPreVoteResp from 8688e899f7831fc7 at term 3"}
	{"level":"info","ts":"2024-09-16T11:15:34.716938Z","logger":"raft","caller":"etcdserver/zap_raft.go:77","msg":"8688e899f7831fc7 became candidate at term 4"}
	{"level":"info","ts":"2024-09-16T11:15:34.716975Z","logger":"raft","caller":"etcdserver/zap_raft.go:77","msg":"8688e899f7831fc7 received MsgVoteResp from 8688e899f7831fc7 at term 4"}
	{"level":"info","ts":"2024-09-16T11:15:34.717022Z","logger":"raft","caller":"etcdserver/zap_raft.go:77","msg":"8688e899f7831fc7 became leader at term 4"}
	{"level":"info","ts":"2024-09-16T11:15:34.717057Z","logger":"raft","caller":"etcdserver/zap_raft.go:77","msg":"raft.node: 8688e899f7831fc7 elected leader 8688e899f7831fc7 at term 4"}
	{"level":"info","ts":"2024-09-16T11:15:34.720920Z","caller":"etcdserver/server.go:2118","msg":"published local member to cluster through raft","local-member-id":"8688e899f7831fc7","local-member-attributes":"{Name:multinode-654612 ClientURLs:[https://192.168.67.2:2379]}","request-path":"/0/members/8688e899f7831fc7/attributes","cluster-id":"9d8fdeb88b6def78","publish-timeout":"7s"}
	{"level":"info","ts":"2024-09-16T11:15:34.721151Z","caller":"embed/serve.go:103","msg":"ready to serve client requests"}
	{"level":"info","ts":"2024-09-16T11:15:34.721452Z","caller":"embed/serve.go:103","msg":"ready to serve client requests"}
	{"level":"info","ts":"2024-09-16T11:15:34.721640Z","caller":"etcdmain/main.go:44","msg":"notifying init daemon"}
	{"level":"info","ts":"2024-09-16T11:15:34.721683Z","caller":"etcdmain/main.go:50","msg":"successfully notified init daemon"}
	{"level":"info","ts":"2024-09-16T11:15:34.722328Z","caller":"v3rpc/health.go:61","msg":"grpc service status changed","service":"","status":"SERVING"}
	{"level":"info","ts":"2024-09-16T11:15:34.723240Z","caller":"embed/serve.go:250","msg":"serving client traffic securely","traffic":"grpc+http","address":"192.168.67.2:2379"}
	{"level":"info","ts":"2024-09-16T11:15:34.732417Z","caller":"v3rpc/health.go:61","msg":"grpc service status changed","service":"","status":"SERVING"}
	{"level":"info","ts":"2024-09-16T11:15:34.733355Z","caller":"embed/serve.go:250","msg":"serving client traffic securely","traffic":"grpc+http","address":"127.0.0.1:2379"}
	
	
	==> kernel <==
	 11:16:27 up 10:58,  0 users,  load average: 0.93, 1.66, 2.28
	Linux multinode-654612 5.15.0-1069-aws #75~20.04.1-Ubuntu SMP Mon Aug 19 16:22:47 UTC 2024 aarch64 aarch64 aarch64 GNU/Linux
	PRETTY_NAME="Ubuntu 22.04.4 LTS"
	
	
	==> kindnet [f9f52628ba05193780f6c82e9a2da0b44f2986f170c6b0d29ff5a83fe37848f4] <==
	Trace[740724766]: [30.001344156s] [30.001344156s] END
	E0916 11:16:08.543135       1 reflector.go:150] pkg/mod/k8s.io/client-go@v0.30.3/tools/cache/reflector.go:232: Failed to watch *v1.Namespace: failed to list *v1.Namespace: Get "https://10.96.0.1:443/api/v1/namespaces?limit=500&resourceVersion=0": dial tcp 10.96.0.1:443: i/o timeout
	W0916 11:16:08.542878       1 reflector.go:547] pkg/mod/k8s.io/client-go@v0.30.3/tools/cache/reflector.go:232: failed to list *v1.Node: Get "https://10.96.0.1:443/api/v1/nodes?limit=500&resourceVersion=0": dial tcp 10.96.0.1:443: i/o timeout
	I0916 11:16:08.543175       1 trace.go:236] Trace[163965576]: "Reflector ListAndWatch" name:pkg/mod/k8s.io/client-go@v0.30.3/tools/cache/reflector.go:232 (16-Sep-2024 11:15:38.542) (total time: 30000ms):
	Trace[163965576]: ---"Objects listed" error:Get "https://10.96.0.1:443/api/v1/nodes?limit=500&resourceVersion=0": dial tcp 10.96.0.1:443: i/o timeout 30000ms (11:16:08.542)
	Trace[163965576]: [30.000621966s] [30.000621966s] END
	E0916 11:16:08.543212       1 reflector.go:150] pkg/mod/k8s.io/client-go@v0.30.3/tools/cache/reflector.go:232: Failed to watch *v1.Node: failed to list *v1.Node: Get "https://10.96.0.1:443/api/v1/nodes?limit=500&resourceVersion=0": dial tcp 10.96.0.1:443: i/o timeout
	W0916 11:16:08.542917       1 reflector.go:547] pkg/mod/k8s.io/client-go@v0.30.3/tools/cache/reflector.go:232: failed to list *v1.NetworkPolicy: Get "https://10.96.0.1:443/apis/networking.k8s.io/v1/networkpolicies?limit=500&resourceVersion=0": dial tcp 10.96.0.1:443: i/o timeout
	I0916 11:16:08.543249       1 trace.go:236] Trace[1583965420]: "Reflector ListAndWatch" name:pkg/mod/k8s.io/client-go@v0.30.3/tools/cache/reflector.go:232 (16-Sep-2024 11:15:38.542) (total time: 30000ms):
	Trace[1583965420]: ---"Objects listed" error:Get "https://10.96.0.1:443/apis/networking.k8s.io/v1/networkpolicies?limit=500&resourceVersion=0": dial tcp 10.96.0.1:443: i/o timeout 30000ms (11:16:08.542)
	Trace[1583965420]: [30.000961388s] [30.000961388s] END
	E0916 11:16:08.543282       1 reflector.go:150] pkg/mod/k8s.io/client-go@v0.30.3/tools/cache/reflector.go:232: Failed to watch *v1.NetworkPolicy: failed to list *v1.NetworkPolicy: Get "https://10.96.0.1:443/apis/networking.k8s.io/v1/networkpolicies?limit=500&resourceVersion=0": dial tcp 10.96.0.1:443: i/o timeout
	W0916 11:16:08.542975       1 reflector.go:547] pkg/mod/k8s.io/client-go@v0.30.3/tools/cache/reflector.go:232: failed to list *v1.Pod: Get "https://10.96.0.1:443/api/v1/pods?limit=500&resourceVersion=0": dial tcp 10.96.0.1:443: i/o timeout
	I0916 11:16:08.543306       1 trace.go:236] Trace[261070840]: "Reflector ListAndWatch" name:pkg/mod/k8s.io/client-go@v0.30.3/tools/cache/reflector.go:232 (16-Sep-2024 11:15:38.542) (total time: 30000ms):
	Trace[261070840]: ---"Objects listed" error:Get "https://10.96.0.1:443/api/v1/pods?limit=500&resourceVersion=0": dial tcp 10.96.0.1:443: i/o timeout 30000ms (11:16:08.542)
	Trace[261070840]: [30.000587415s] [30.000587415s] END
	E0916 11:16:08.543317       1 reflector.go:150] pkg/mod/k8s.io/client-go@v0.30.3/tools/cache/reflector.go:232: Failed to watch *v1.Pod: failed to list *v1.Pod: Get "https://10.96.0.1:443/api/v1/pods?limit=500&resourceVersion=0": dial tcp 10.96.0.1:443: i/o timeout
	I0916 11:16:09.943884       1 shared_informer.go:320] Caches are synced for kube-network-policies
	I0916 11:16:09.943912       1 metrics.go:61] Registering metrics
	I0916 11:16:09.943982       1 controller.go:374] Syncing nftables rules
	I0916 11:16:18.547629       1 main.go:295] Handling node with IPs: map[192.168.67.2:{}]
	I0916 11:16:18.547688       1 main.go:299] handling current node
	I0916 11:16:18.551242       1 main.go:295] Handling node with IPs: map[192.168.67.3:{}]
	I0916 11:16:18.551275       1 main.go:322] Node multinode-654612-m02 has CIDR [10.244.2.0/24] 
	I0916 11:16:18.551403       1 routes.go:62] Adding route {Ifindex: 0 Dst: 10.244.2.0/24 Src: <nil> Gw: 192.168.67.3 Flags: [] Table: 0} 
	
	
	==> kube-apiserver [994d9d024ab9948799a40f0cf766bfdead44ddf40276d6aeb55982fcc069ad25] <==
	I0916 11:15:37.330976       1 apiapproval_controller.go:189] Starting KubernetesAPIApprovalPolicyConformantConditionController
	I0916 11:15:37.330983       1 crd_finalizer.go:269] Starting CRDFinalizer
	I0916 11:15:37.426460       1 dynamic_cafile_content.go:160] "Starting controller" name="client-ca-bundle::/var/lib/minikube/certs/ca.crt"
	I0916 11:15:37.426578       1 dynamic_cafile_content.go:160] "Starting controller" name="request-header::/var/lib/minikube/certs/front-proxy-ca.crt"
	I0916 11:15:37.487341       1 shared_informer.go:320] Caches are synced for node_authorizer
	I0916 11:15:37.496266       1 shared_informer.go:320] Caches are synced for *generic.policySource[*k8s.io/api/admissionregistration/v1.ValidatingAdmissionPolicy,*k8s.io/api/admissionregistration/v1.ValidatingAdmissionPolicyBinding,k8s.io/apiserver/pkg/admission/plugin/policy/validating.Validator]
	I0916 11:15:37.496291       1 policy_source.go:224] refreshing policies
	I0916 11:15:37.512144       1 controller.go:615] quota admission added evaluator for: leases.coordination.k8s.io
	I0916 11:15:37.531523       1 shared_informer.go:320] Caches are synced for cluster_authentication_trust_controller
	I0916 11:15:37.532216       1 apf_controller.go:382] Running API Priority and Fairness config worker
	I0916 11:15:37.536703       1 apf_controller.go:385] Running API Priority and Fairness periodic rebalancing process
	I0916 11:15:37.549318       1 cache.go:39] Caches are synced for RemoteAvailability controller
	I0916 11:15:37.550703       1 cache.go:39] Caches are synced for LocalAvailability controller
	I0916 11:15:37.551062       1 cache.go:39] Caches are synced for APIServiceRegistrationController controller
	I0916 11:15:37.558312       1 shared_informer.go:320] Caches are synced for configmaps
	I0916 11:15:37.564792       1 shared_informer.go:320] Caches are synced for crd-autoregister
	I0916 11:15:37.566814       1 handler_discovery.go:450] Starting ResourceDiscoveryManager
	I0916 11:15:37.566965       1 aggregator.go:171] initial CRD sync complete...
	I0916 11:15:37.567017       1 autoregister_controller.go:144] Starting autoregister controller
	I0916 11:15:37.567048       1 cache.go:32] Waiting for caches to sync for autoregister controller
	I0916 11:15:37.567085       1 cache.go:39] Caches are synced for autoregister controller
	E0916 11:15:37.591261       1 controller.go:97] Error removing old endpoints from kubernetes service: no API server IP addresses were listed in storage, refusing to erase all endpoints for the kubernetes Service
	I0916 11:15:38.331085       1 storage_scheduling.go:111] all system priority classes are created successfully or already exist.
	I0916 11:15:41.185250       1 controller.go:615] quota admission added evaluator for: endpoints
	I0916 11:15:41.533092       1 controller.go:615] quota admission added evaluator for: endpointslices.discovery.k8s.io
	
	
	==> kube-controller-manager [6fadc3310a74940a032beaf9e6e87e5fdae7bd990b12f3dd070a8e9ffc041ba8] <==
	I0916 11:15:41.199505       1 shared_informer.go:320] Caches are synced for deployment
	I0916 11:15:41.209261       1 shared_informer.go:320] Caches are synced for cronjob
	I0916 11:15:41.226202       1 shared_informer.go:320] Caches are synced for stateful set
	I0916 11:15:41.242360       1 shared_informer.go:320] Caches are synced for daemon sets
	I0916 11:15:41.320310       1 shared_informer.go:320] Caches are synced for persistent volume
	I0916 11:15:41.326055       1 shared_informer.go:320] Caches are synced for attach detach
	I0916 11:15:41.376740       1 shared_informer.go:320] Caches are synced for PV protection
	I0916 11:15:41.376751       1 shared_informer.go:320] Caches are synced for validatingadmissionpolicy-status
	I0916 11:15:41.490413       1 replica_set.go:679] "Finished syncing" logger="replicaset-controller" kind="ReplicaSet" key="kube-system/coredns-7c65d6cfc9" duration="312.947725ms"
	I0916 11:15:41.491217       1 replica_set.go:679] "Finished syncing" logger="replicaset-controller" kind="ReplicaSet" key="kube-system/coredns-7c65d6cfc9" duration="54.3µs"
	I0916 11:15:41.820638       1 shared_informer.go:320] Caches are synced for garbage collector
	I0916 11:15:41.872608       1 shared_informer.go:320] Caches are synced for garbage collector
	I0916 11:15:41.872656       1 garbagecollector.go:157] "All resource monitors have synced. Proceeding to collect garbage" logger="garbage-collector-controller"
	I0916 11:16:04.909101       1 range_allocator.go:241] "Successfully synced" logger="node-ipam-controller" key="multinode-654612-m02"
	I0916 11:16:09.129044       1 replica_set.go:679] "Finished syncing" logger="replicaset-controller" kind="ReplicaSet" key="default/busybox-7dff88458" duration="14.082738ms"
	I0916 11:16:09.129314       1 replica_set.go:679] "Finished syncing" logger="replicaset-controller" kind="ReplicaSet" key="default/busybox-7dff88458" duration="74.722µs"
	I0916 11:16:10.229914       1 replica_set.go:679] "Finished syncing" logger="replicaset-controller" kind="ReplicaSet" key="default/busybox-7dff88458" duration="58.197µs"
	I0916 11:16:19.190685       1 replica_set.go:679] "Finished syncing" logger="replicaset-controller" kind="ReplicaSet" key="kube-system/coredns-7c65d6cfc9" duration="13.933515ms"
	I0916 11:16:19.190927       1 replica_set.go:679] "Finished syncing" logger="replicaset-controller" kind="ReplicaSet" key="kube-system/coredns-7c65d6cfc9" duration="71.916µs"
	I0916 11:16:21.145389       1 gc_controller.go:342] "PodGC is force deleting Pod" logger="pod-garbage-collector-controller" pod="kube-system/kube-proxy-vf648"
	I0916 11:16:21.168817       1 gc_controller.go:258] "Forced deletion of orphaned Pod succeeded" logger="pod-garbage-collector-controller" pod="kube-system/kube-proxy-vf648"
	I0916 11:16:21.168854       1 gc_controller.go:342] "PodGC is force deleting Pod" logger="pod-garbage-collector-controller" pod="kube-system/kindnet-ncfhl"
	I0916 11:16:21.191243       1 gc_controller.go:258] "Forced deletion of orphaned Pod succeeded" logger="pod-garbage-collector-controller" pod="kube-system/kindnet-ncfhl"
	I0916 11:16:25.268907       1 replica_set.go:679] "Finished syncing" logger="replicaset-controller" kind="ReplicaSet" key="default/busybox-7dff88458" duration="6.851318ms"
	I0916 11:16:25.269065       1 replica_set.go:679] "Finished syncing" logger="replicaset-controller" kind="ReplicaSet" key="default/busybox-7dff88458" duration="38.719µs"
	
	
	==> kube-proxy [fa3dcc5e63f86fcf2cbd444eb60dbfc5475d002fc896dd81709b451392f557e0] <==
	I0916 11:15:38.235332       1 server_linux.go:66] "Using iptables proxy"
	I0916 11:15:38.535920       1 server.go:677] "Successfully retrieved node IP(s)" IPs=["192.168.67.2"]
	E0916 11:15:38.536029       1 server.go:234] "Kube-proxy configuration may be incomplete or incorrect" err="nodePortAddresses is unset; NodePort connections will be accepted on all local IPs. Consider using `--nodeport-addresses primary`"
	I0916 11:15:38.827067       1 server.go:243] "kube-proxy running in dual-stack mode" primary ipFamily="IPv4"
	I0916 11:15:38.827137       1 server_linux.go:169] "Using iptables Proxier"
	I0916 11:15:38.829289       1 proxier.go:255] "Setting route_localnet=1 to allow node-ports on localhost; to change this either disable iptables.localhostNodePorts (--iptables-localhost-nodeports) or set nodePortAddresses (--nodeport-addresses) to filter loopback addresses" ipFamily="IPv4"
	I0916 11:15:38.829609       1 server.go:483] "Version info" version="v1.31.1"
	I0916 11:15:38.829630       1 server.go:485] "Golang settings" GOGC="" GOMAXPROCS="" GOTRACEBACK=""
	I0916 11:15:38.835062       1 config.go:199] "Starting service config controller"
	I0916 11:15:38.835090       1 shared_informer.go:313] Waiting for caches to sync for service config
	I0916 11:15:38.835113       1 config.go:105] "Starting endpoint slice config controller"
	I0916 11:15:38.835121       1 shared_informer.go:313] Waiting for caches to sync for endpoint slice config
	I0916 11:15:38.835624       1 config.go:328] "Starting node config controller"
	I0916 11:15:38.835644       1 shared_informer.go:313] Waiting for caches to sync for node config
	I0916 11:15:38.935512       1 shared_informer.go:320] Caches are synced for endpoint slice config
	I0916 11:15:38.935525       1 shared_informer.go:320] Caches are synced for service config
	I0916 11:15:38.935876       1 shared_informer.go:320] Caches are synced for node config
	
	
	==> kube-scheduler [f38f3d56710e99426603ca6e20faaedfd260337339bd7efaec25385a0a7c8334] <==
	I0916 11:15:34.156889       1 serving.go:386] Generated self-signed cert in-memory
	W0916 11:15:37.397221       1 requestheader_controller.go:196] Unable to get configmap/extension-apiserver-authentication in kube-system.  Usually fixed by 'kubectl create rolebinding -n kube-system ROLEBINDING_NAME --role=extension-apiserver-authentication-reader --serviceaccount=YOUR_NS:YOUR_SA'
	W0916 11:15:37.399502       1 authentication.go:370] Error looking up in-cluster authentication configuration: configmaps "extension-apiserver-authentication" is forbidden: User "system:kube-scheduler" cannot get resource "configmaps" in API group "" in the namespace "kube-system"
	W0916 11:15:37.399549       1 authentication.go:371] Continuing without authentication configuration. This may treat all requests as anonymous.
	W0916 11:15:37.399580       1 authentication.go:372] To require authentication configuration lookup to succeed, set --authentication-tolerate-lookup-failure=false
	I0916 11:15:37.487168       1 server.go:167] "Starting Kubernetes Scheduler" version="v1.31.1"
	I0916 11:15:37.487207       1 server.go:169] "Golang settings" GOGC="" GOMAXPROCS="" GOTRACEBACK=""
	I0916 11:15:37.494531       1 secure_serving.go:213] Serving securely on 127.0.0.1:10259
	I0916 11:15:37.494649       1 configmap_cafile_content.go:205] "Starting controller" name="client-ca::kube-system::extension-apiserver-authentication::client-ca-file"
	I0916 11:15:37.525134       1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::client-ca-file
	I0916 11:15:37.498165       1 tlsconfig.go:243] "Starting DynamicServingCertificateController"
	I0916 11:15:37.626003       1 shared_informer.go:320] Caches are synced for client-ca::kube-system::extension-apiserver-authentication::client-ca-file
	
	
	==> kubelet <==
	Sep 16 11:15:34 multinode-654612 kubelet[739]: I0916 11:15:34.055417     739 kubelet_node_status.go:72] "Attempting to register node" node="multinode-654612"
	Sep 16 11:15:37 multinode-654612 kubelet[739]: I0916 11:15:37.488726     739 apiserver.go:52] "Watching apiserver"
	Sep 16 11:15:37 multinode-654612 kubelet[739]: I0916 11:15:37.598361     739 kubelet_node_status.go:111] "Node was previously registered" node="multinode-654612"
	Sep 16 11:15:37 multinode-654612 kubelet[739]: I0916 11:15:37.598473     739 kubelet_node_status.go:75] "Successfully registered node" node="multinode-654612"
	Sep 16 11:15:37 multinode-654612 kubelet[739]: I0916 11:15:37.598502     739 kuberuntime_manager.go:1635] "Updating runtime config through cri with podcidr" CIDR="10.244.0.0/24"
	Sep 16 11:15:37 multinode-654612 kubelet[739]: I0916 11:15:37.599195     739 kubelet_network.go:61] "Updating Pod CIDR" originalPodCIDR="" newPodCIDR="10.244.0.0/24"
	Sep 16 11:15:37 multinode-654612 kubelet[739]: I0916 11:15:37.602216     739 desired_state_of_world_populator.go:154] "Finished populating initial desired state of world"
	Sep 16 11:15:37 multinode-654612 kubelet[739]: I0916 11:15:37.660570     739 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"xtables-lock\" (UniqueName: \"kubernetes.io/host-path/d5dac41c-8386-4ad5-a463-1730169d8062-xtables-lock\") pod \"kube-proxy-t9pzq\" (UID: \"d5dac41c-8386-4ad5-a463-1730169d8062\") " pod="kube-system/kube-proxy-t9pzq"
	Sep 16 11:15:37 multinode-654612 kubelet[739]: I0916 11:15:37.660699     739 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"tmp\" (UniqueName: \"kubernetes.io/host-path/2b21455e-8cb4-4c70-937b-6ff3cd85b42f-tmp\") pod \"storage-provisioner\" (UID: \"2b21455e-8cb4-4c70-937b-6ff3cd85b42f\") " pod="kube-system/storage-provisioner"
	Sep 16 11:15:37 multinode-654612 kubelet[739]: I0916 11:15:37.660740     739 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cni-cfg\" (UniqueName: \"kubernetes.io/host-path/0ed90b6c-0a03-4af6-a0ab-ea90794fa963-cni-cfg\") pod \"kindnet-whjqt\" (UID: \"0ed90b6c-0a03-4af6-a0ab-ea90794fa963\") " pod="kube-system/kindnet-whjqt"
	Sep 16 11:15:37 multinode-654612 kubelet[739]: I0916 11:15:37.660766     739 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"xtables-lock\" (UniqueName: \"kubernetes.io/host-path/0ed90b6c-0a03-4af6-a0ab-ea90794fa963-xtables-lock\") pod \"kindnet-whjqt\" (UID: \"0ed90b6c-0a03-4af6-a0ab-ea90794fa963\") " pod="kube-system/kindnet-whjqt"
	Sep 16 11:15:37 multinode-654612 kubelet[739]: I0916 11:15:37.660802     739 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"lib-modules\" (UniqueName: \"kubernetes.io/host-path/0ed90b6c-0a03-4af6-a0ab-ea90794fa963-lib-modules\") pod \"kindnet-whjqt\" (UID: \"0ed90b6c-0a03-4af6-a0ab-ea90794fa963\") " pod="kube-system/kindnet-whjqt"
	Sep 16 11:15:37 multinode-654612 kubelet[739]: I0916 11:15:37.660827     739 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"lib-modules\" (UniqueName: \"kubernetes.io/host-path/d5dac41c-8386-4ad5-a463-1730169d8062-lib-modules\") pod \"kube-proxy-t9pzq\" (UID: \"d5dac41c-8386-4ad5-a463-1730169d8062\") " pod="kube-system/kube-proxy-t9pzq"
	Sep 16 11:15:37 multinode-654612 kubelet[739]: I0916 11:15:37.712618     739 swap_util.go:74] "error creating dir to test if tmpfs noswap is enabled. Assuming not supported" mount path="" error="stat /var/lib/kubelet/plugins/kubernetes.io/empty-dir: no such file or directory"
	Sep 16 11:15:42 multinode-654612 kubelet[739]: E0916 11:15:42.552895     739 eviction_manager.go:257] "Eviction manager: failed to get HasDedicatedImageFs" err="missing image stats: &ImageFsInfoResponse{ImageFilesystems:[]*FilesystemUsage{&FilesystemUsage{Timestamp:1726485342552603341,FsId:&FilesystemIdentifier{Mountpoint:/var/lib/containers/storage/overlay-images,},UsedBytes:&UInt64Value{Value:135010,},InodesUsed:&UInt64Value{Value:63,},},},ContainerFilesystems:[]*FilesystemUsage{},}"
	Sep 16 11:15:42 multinode-654612 kubelet[739]: E0916 11:15:42.552933     739 eviction_manager.go:212] "Eviction manager: failed to synchronize" err="eviction manager: failed to get HasDedicatedImageFs: missing image stats: &ImageFsInfoResponse{ImageFilesystems:[]*FilesystemUsage{&FilesystemUsage{Timestamp:1726485342552603341,FsId:&FilesystemIdentifier{Mountpoint:/var/lib/containers/storage/overlay-images,},UsedBytes:&UInt64Value{Value:135010,},InodesUsed:&UInt64Value{Value:63,},},},ContainerFilesystems:[]*FilesystemUsage{},}"
	Sep 16 11:15:52 multinode-654612 kubelet[739]: E0916 11:15:52.555255     739 eviction_manager.go:257] "Eviction manager: failed to get HasDedicatedImageFs" err="missing image stats: &ImageFsInfoResponse{ImageFilesystems:[]*FilesystemUsage{&FilesystemUsage{Timestamp:1726485352554767299,FsId:&FilesystemIdentifier{Mountpoint:/var/lib/containers/storage/overlay-images,},UsedBytes:&UInt64Value{Value:135010,},InodesUsed:&UInt64Value{Value:63,},},},ContainerFilesystems:[]*FilesystemUsage{},}"
	Sep 16 11:15:52 multinode-654612 kubelet[739]: E0916 11:15:52.555289     739 eviction_manager.go:212] "Eviction manager: failed to synchronize" err="eviction manager: failed to get HasDedicatedImageFs: missing image stats: &ImageFsInfoResponse{ImageFilesystems:[]*FilesystemUsage{&FilesystemUsage{Timestamp:1726485352554767299,FsId:&FilesystemIdentifier{Mountpoint:/var/lib/containers/storage/overlay-images,},UsedBytes:&UInt64Value{Value:135010,},InodesUsed:&UInt64Value{Value:63,},},},ContainerFilesystems:[]*FilesystemUsage{},}"
	Sep 16 11:16:02 multinode-654612 kubelet[739]: E0916 11:16:02.556649     739 eviction_manager.go:257] "Eviction manager: failed to get HasDedicatedImageFs" err="missing image stats: &ImageFsInfoResponse{ImageFilesystems:[]*FilesystemUsage{&FilesystemUsage{Timestamp:1726485362556359651,FsId:&FilesystemIdentifier{Mountpoint:/var/lib/containers/storage/overlay-images,},UsedBytes:&UInt64Value{Value:135010,},InodesUsed:&UInt64Value{Value:63,},},},ContainerFilesystems:[]*FilesystemUsage{},}"
	Sep 16 11:16:02 multinode-654612 kubelet[739]: E0916 11:16:02.556708     739 eviction_manager.go:212] "Eviction manager: failed to synchronize" err="eviction manager: failed to get HasDedicatedImageFs: missing image stats: &ImageFsInfoResponse{ImageFilesystems:[]*FilesystemUsage{&FilesystemUsage{Timestamp:1726485362556359651,FsId:&FilesystemIdentifier{Mountpoint:/var/lib/containers/storage/overlay-images,},UsedBytes:&UInt64Value{Value:135010,},InodesUsed:&UInt64Value{Value:63,},},},ContainerFilesystems:[]*FilesystemUsage{},}"
	Sep 16 11:16:08 multinode-654612 kubelet[739]: I0916 11:16:08.644832     739 scope.go:117] "RemoveContainer" containerID="ff8c58b5c6ed8aad00432f5561974b44083abc1cf02f287570db0a4f37eb5c55"
	Sep 16 11:16:12 multinode-654612 kubelet[739]: E0916 11:16:12.558927     739 eviction_manager.go:257] "Eviction manager: failed to get HasDedicatedImageFs" err="missing image stats: &ImageFsInfoResponse{ImageFilesystems:[]*FilesystemUsage{&FilesystemUsage{Timestamp:1726485372558513219,FsId:&FilesystemIdentifier{Mountpoint:/var/lib/containers/storage/overlay-images,},UsedBytes:&UInt64Value{Value:135010,},InodesUsed:&UInt64Value{Value:63,},},},ContainerFilesystems:[]*FilesystemUsage{},}"
	Sep 16 11:16:12 multinode-654612 kubelet[739]: E0916 11:16:12.558964     739 eviction_manager.go:212] "Eviction manager: failed to synchronize" err="eviction manager: failed to get HasDedicatedImageFs: missing image stats: &ImageFsInfoResponse{ImageFilesystems:[]*FilesystemUsage{&FilesystemUsage{Timestamp:1726485372558513219,FsId:&FilesystemIdentifier{Mountpoint:/var/lib/containers/storage/overlay-images,},UsedBytes:&UInt64Value{Value:135010,},InodesUsed:&UInt64Value{Value:63,},},},ContainerFilesystems:[]*FilesystemUsage{},}"
	Sep 16 11:16:22 multinode-654612 kubelet[739]: E0916 11:16:22.559955     739 eviction_manager.go:257] "Eviction manager: failed to get HasDedicatedImageFs" err="missing image stats: &ImageFsInfoResponse{ImageFilesystems:[]*FilesystemUsage{&FilesystemUsage{Timestamp:1726485382559762275,FsId:&FilesystemIdentifier{Mountpoint:/var/lib/containers/storage/overlay-images,},UsedBytes:&UInt64Value{Value:135010,},InodesUsed:&UInt64Value{Value:63,},},},ContainerFilesystems:[]*FilesystemUsage{},}"
	Sep 16 11:16:22 multinode-654612 kubelet[739]: E0916 11:16:22.559999     739 eviction_manager.go:212] "Eviction manager: failed to synchronize" err="eviction manager: failed to get HasDedicatedImageFs: missing image stats: &ImageFsInfoResponse{ImageFilesystems:[]*FilesystemUsage{&FilesystemUsage{Timestamp:1726485382559762275,FsId:&FilesystemIdentifier{Mountpoint:/var/lib/containers/storage/overlay-images,},UsedBytes:&UInt64Value{Value:135010,},InodesUsed:&UInt64Value{Value:63,},},},ContainerFilesystems:[]*FilesystemUsage{},}"
	

                                                
                                                
-- /stdout --
helpers_test.go:254: (dbg) Run:  out/minikube-linux-arm64 status --format={{.APIServer}} -p multinode-654612 -n multinode-654612
helpers_test.go:261: (dbg) Run:  kubectl --context multinode-654612 get po -o=jsonpath={.items[*].metadata.name} -A --field-selector=status.phase!=Running
helpers_test.go:261: (dbg) Non-zero exit: kubectl --context multinode-654612 get po -o=jsonpath={.items[*].metadata.name} -A --field-selector=status.phase!=Running: fork/exec /usr/local/bin/kubectl: exec format error (497.259µs)
helpers_test.go:263: kubectl --context multinode-654612 get po -o=jsonpath={.items[*].metadata.name} -A --field-selector=status.phase!=Running: fork/exec /usr/local/bin/kubectl: exec format error
--- FAIL: TestMultiNode/serial/RestartMultiNode (64.13s)

                                                
                                    
x
+
TestPreload (23.63s)

                                                
                                                
=== RUN   TestPreload
preload_test.go:44: (dbg) Run:  out/minikube-linux-arm64 start -p test-preload-899756 --memory=2200 --alsologtostderr --wait=true --preload=false --driver=docker  --container-runtime=crio --kubernetes-version=v1.24.4
preload_test.go:44: (dbg) Non-zero exit: out/minikube-linux-arm64 start -p test-preload-899756 --memory=2200 --alsologtostderr --wait=true --preload=false --driver=docker  --container-runtime=crio --kubernetes-version=v1.24.4: exit status 100 (21.379157102s)

                                                
                                                
-- stdout --
	* [test-preload-899756] minikube v1.34.0 on Ubuntu 20.04 (arm64)
	  - MINIKUBE_LOCATION=19651
	  - MINIKUBE_SUPPRESS_DOCKER_PERFORMANCE=true
	  - KUBECONFIG=/home/jenkins/minikube-integration/19651-1378450/kubeconfig
	  - MINIKUBE_HOME=/home/jenkins/minikube-integration/19651-1378450/.minikube
	  - MINIKUBE_BIN=out/minikube-linux-arm64
	  - MINIKUBE_FORCE_SYSTEMD=
	* Using the docker driver based on user configuration
	* Using Docker driver with root privileges
	* Starting "test-preload-899756" primary control-plane node in "test-preload-899756" cluster
	* Pulling base image v0.0.45-1726358845-19644 ...
	* Creating docker container (CPUs=2, Memory=2200MB) ...
	* Preparing Kubernetes v1.24.4 on CRI-O 1.24.6 ...
	
	

                                                
                                                
-- /stdout --
** stderr ** 
	I0916 11:17:14.202055 1514821 out.go:345] Setting OutFile to fd 1 ...
	I0916 11:17:14.202274 1514821 out.go:392] TERM=,COLORTERM=, which probably does not support color
	I0916 11:17:14.202300 1514821 out.go:358] Setting ErrFile to fd 2...
	I0916 11:17:14.202322 1514821 out.go:392] TERM=,COLORTERM=, which probably does not support color
	I0916 11:17:14.202606 1514821 root.go:338] Updating PATH: /home/jenkins/minikube-integration/19651-1378450/.minikube/bin
	I0916 11:17:14.203066 1514821 out.go:352] Setting JSON to false
	I0916 11:17:14.204002 1514821 start.go:129] hostinfo: {"hostname":"ip-172-31-21-244","uptime":39580,"bootTime":1726445855,"procs":174,"os":"linux","platform":"ubuntu","platformFamily":"debian","platformVersion":"20.04","kernelVersion":"5.15.0-1069-aws","kernelArch":"aarch64","virtualizationSystem":"","virtualizationRole":"","hostId":"da8ac1fd-6236-412a-a346-95873c98230d"}
	I0916 11:17:14.204135 1514821 start.go:139] virtualization:  
	I0916 11:17:14.207626 1514821 out.go:177] * [test-preload-899756] minikube v1.34.0 on Ubuntu 20.04 (arm64)
	I0916 11:17:14.211244 1514821 out.go:177]   - MINIKUBE_LOCATION=19651
	I0916 11:17:14.211372 1514821 notify.go:220] Checking for updates...
	I0916 11:17:14.217001 1514821 out.go:177]   - MINIKUBE_SUPPRESS_DOCKER_PERFORMANCE=true
	I0916 11:17:14.220458 1514821 out.go:177]   - KUBECONFIG=/home/jenkins/minikube-integration/19651-1378450/kubeconfig
	I0916 11:17:14.223433 1514821 out.go:177]   - MINIKUBE_HOME=/home/jenkins/minikube-integration/19651-1378450/.minikube
	I0916 11:17:14.226170 1514821 out.go:177]   - MINIKUBE_BIN=out/minikube-linux-arm64
	I0916 11:17:14.228826 1514821 out.go:177]   - MINIKUBE_FORCE_SYSTEMD=
	I0916 11:17:14.231649 1514821 driver.go:394] Setting default libvirt URI to qemu:///system
	I0916 11:17:14.261282 1514821 docker.go:123] docker version: linux-27.2.1:Docker Engine - Community
	I0916 11:17:14.261423 1514821 cli_runner.go:164] Run: docker system info --format "{{json .}}"
	I0916 11:17:14.322796 1514821 info.go:266] docker info: {ID:5FDH:SA5P:5GCT:NLAS:B73P:SGDQ:PBG5:UBVH:UZY3:RXGO:CI7S:WAIH Containers:0 ContainersRunning:0 ContainersPaused:0 ContainersStopped:0 Images:3 Driver:overlay2 DriverStatus:[[Backing Filesystem extfs] [Supports d_type true] [Using metacopy false] [Native Overlay Diff true] [userxattr false]] SystemStatus:<nil> Plugins:{Volume:[local] Network:[bridge host ipvlan macvlan null overlay] Authorization:<nil> Log:[awslogs fluentd gcplogs gelf journald json-file local splunk syslog]} MemoryLimit:true SwapLimit:true KernelMemory:false KernelMemoryTCP:true CPUCfsPeriod:true CPUCfsQuota:true CPUShares:true CPUSet:true PidsLimit:true IPv4Forwarding:true BridgeNfIptables:true BridgeNfIP6Tables:true Debug:false NFd:25 OomKillDisable:true NGoroutines:41 SystemTime:2024-09-16 11:17:14.313354521 +0000 UTC LoggingDriver:json-file CgroupDriver:cgroupfs NEventsListener:0 KernelVersion:5.15.0-1069-aws OperatingSystem:Ubuntu 20.04.6 LTS OSType:linux Architecture:aar
ch64 IndexServerAddress:https://index.docker.io/v1/ RegistryConfig:{AllowNondistributableArtifactsCIDRs:[] AllowNondistributableArtifactsHostnames:[] InsecureRegistryCIDRs:[127.0.0.0/8] IndexConfigs:{DockerIo:{Name:docker.io Mirrors:[] Secure:true Official:true}} Mirrors:[]} NCPU:2 MemTotal:8214839296 GenericResources:<nil> DockerRootDir:/var/lib/docker HTTPProxy: HTTPSProxy: NoProxy: Name:ip-172-31-21-244 Labels:[] ExperimentalBuild:false ServerVersion:27.2.1 ClusterStore: ClusterAdvertise: Runtimes:{Runc:{Path:runc}} DefaultRuntime:runc Swarm:{NodeID: NodeAddr: LocalNodeState:inactive ControlAvailable:false Error: RemoteManagers:<nil>} LiveRestoreEnabled:false Isolation: InitBinary:docker-init ContainerdCommit:{ID:7f7fdf5fed64eb6a7caf99b3e12efcf9d60e311c Expected:7f7fdf5fed64eb6a7caf99b3e12efcf9d60e311c} RuncCommit:{ID:v1.1.14-0-g2c9f560 Expected:v1.1.14-0-g2c9f560} InitCommit:{ID:de40ad0 Expected:de40ad0} SecurityOptions:[name=apparmor name=seccomp,profile=builtin] ProductLicense: Warnings:<nil> ServerErro
rs:[] ClientInfo:{Debug:false Plugins:[map[Name:buildx Path:/usr/libexec/docker/cli-plugins/docker-buildx SchemaVersion:0.1.0 ShortDescription:Docker Buildx Vendor:Docker Inc. Version:v0.16.2] map[Name:compose Path:/usr/libexec/docker/cli-plugins/docker-compose SchemaVersion:0.1.0 ShortDescription:Docker Compose Vendor:Docker Inc. Version:v2.29.2]] Warnings:<nil>}}
	I0916 11:17:14.322913 1514821 docker.go:318] overlay module found
	I0916 11:17:14.325920 1514821 out.go:177] * Using the docker driver based on user configuration
	I0916 11:17:14.328669 1514821 start.go:297] selected driver: docker
	I0916 11:17:14.328705 1514821 start.go:901] validating driver "docker" against <nil>
	I0916 11:17:14.328738 1514821 start.go:912] status for docker: {Installed:true Healthy:true Running:false NeedsImprovement:false Error:<nil> Reason: Fix: Doc: Version:}
	I0916 11:17:14.329391 1514821 cli_runner.go:164] Run: docker system info --format "{{json .}}"
	I0916 11:17:14.393414 1514821 info.go:266] docker info: {ID:5FDH:SA5P:5GCT:NLAS:B73P:SGDQ:PBG5:UBVH:UZY3:RXGO:CI7S:WAIH Containers:0 ContainersRunning:0 ContainersPaused:0 ContainersStopped:0 Images:3 Driver:overlay2 DriverStatus:[[Backing Filesystem extfs] [Supports d_type true] [Using metacopy false] [Native Overlay Diff true] [userxattr false]] SystemStatus:<nil> Plugins:{Volume:[local] Network:[bridge host ipvlan macvlan null overlay] Authorization:<nil> Log:[awslogs fluentd gcplogs gelf journald json-file local splunk syslog]} MemoryLimit:true SwapLimit:true KernelMemory:false KernelMemoryTCP:true CPUCfsPeriod:true CPUCfsQuota:true CPUShares:true CPUSet:true PidsLimit:true IPv4Forwarding:true BridgeNfIptables:true BridgeNfIP6Tables:true Debug:false NFd:25 OomKillDisable:true NGoroutines:41 SystemTime:2024-09-16 11:17:14.383061773 +0000 UTC LoggingDriver:json-file CgroupDriver:cgroupfs NEventsListener:0 KernelVersion:5.15.0-1069-aws OperatingSystem:Ubuntu 20.04.6 LTS OSType:linux Architecture:aar
ch64 IndexServerAddress:https://index.docker.io/v1/ RegistryConfig:{AllowNondistributableArtifactsCIDRs:[] AllowNondistributableArtifactsHostnames:[] InsecureRegistryCIDRs:[127.0.0.0/8] IndexConfigs:{DockerIo:{Name:docker.io Mirrors:[] Secure:true Official:true}} Mirrors:[]} NCPU:2 MemTotal:8214839296 GenericResources:<nil> DockerRootDir:/var/lib/docker HTTPProxy: HTTPSProxy: NoProxy: Name:ip-172-31-21-244 Labels:[] ExperimentalBuild:false ServerVersion:27.2.1 ClusterStore: ClusterAdvertise: Runtimes:{Runc:{Path:runc}} DefaultRuntime:runc Swarm:{NodeID: NodeAddr: LocalNodeState:inactive ControlAvailable:false Error: RemoteManagers:<nil>} LiveRestoreEnabled:false Isolation: InitBinary:docker-init ContainerdCommit:{ID:7f7fdf5fed64eb6a7caf99b3e12efcf9d60e311c Expected:7f7fdf5fed64eb6a7caf99b3e12efcf9d60e311c} RuncCommit:{ID:v1.1.14-0-g2c9f560 Expected:v1.1.14-0-g2c9f560} InitCommit:{ID:de40ad0 Expected:de40ad0} SecurityOptions:[name=apparmor name=seccomp,profile=builtin] ProductLicense: Warnings:<nil> ServerErro
rs:[] ClientInfo:{Debug:false Plugins:[map[Name:buildx Path:/usr/libexec/docker/cli-plugins/docker-buildx SchemaVersion:0.1.0 ShortDescription:Docker Buildx Vendor:Docker Inc. Version:v0.16.2] map[Name:compose Path:/usr/libexec/docker/cli-plugins/docker-compose SchemaVersion:0.1.0 ShortDescription:Docker Compose Vendor:Docker Inc. Version:v2.29.2]] Warnings:<nil>}}
	I0916 11:17:14.393625 1514821 start_flags.go:310] no existing cluster config was found, will generate one from the flags 
	I0916 11:17:14.393858 1514821 start_flags.go:947] Waiting for all components: map[apiserver:true apps_running:true default_sa:true extra:true kubelet:true node_ready:true system_pods:true]
	I0916 11:17:14.397042 1514821 out.go:177] * Using Docker driver with root privileges
	I0916 11:17:14.399738 1514821 cni.go:84] Creating CNI manager for ""
	I0916 11:17:14.399805 1514821 cni.go:143] "docker" driver + "crio" runtime found, recommending kindnet
	I0916 11:17:14.399818 1514821 start_flags.go:319] Found "CNI" CNI - setting NetworkPlugin=cni
	I0916 11:17:14.399917 1514821 start.go:340] cluster config:
	{Name:test-preload-899756 KeepContext:false EmbedCerts:false MinikubeISO: KicBaseImage:gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 Memory:2200 CPUs:2 DiskSize:20000 Driver:docker HyperkitVpnKitSock: HyperkitVSockPorts:[] DockerEnv:[] ContainerVolumeMounts:[] InsecureRegistry:[] RegistryMirror:[] HostOnlyCIDR:192.168.59.1/24 HypervVirtualSwitch: HypervUseExternalSwitch:false HypervExternalAdapter: KVMNetwork:default KVMQemuURI:qemu:///system KVMGPU:false KVMHidden:false KVMNUMACount:1 APIServerPort:8443 DockerOpt:[] DisableDriverMounts:false NFSShare:[] NFSSharesRoot:/nfsshares UUID: NoVTXCheck:false DNSProxy:false HostDNSResolver:true HostOnlyNicType:virtio NatNicType:virtio SSHIPAddress: SSHUser:root SSHKey: SSHPort:22 KubernetesConfig:{KubernetesVersion:v1.24.4 ClusterName:test-preload-899756 Namespace:default APIServerHAVIP: APIServerName:minikubeCA APIServerNames:[] APIServerIPs:[] DNSDomain:cluster.local Cont
ainerRuntime:crio CRISocket: NetworkPlugin:cni FeatureGates: ServiceCIDR:10.96.0.0/12 ImageRepository: LoadBalancerStartIP: LoadBalancerEndIP: CustomIngressCert: RegistryAliases: ExtraOptions:[] ShouldLoadCachedImages:true EnableDefaultCNI:false CNI:} Nodes:[{Name: IP: Port:8443 KubernetesVersion:v1.24.4 ContainerRuntime:crio ControlPlane:true Worker:true}] Addons:map[] CustomAddonImages:map[] CustomAddonRegistries:map[] VerifyComponents:map[apiserver:true apps_running:true default_sa:true extra:true kubelet:true node_ready:true system_pods:true] StartHostTimeout:6m0s ScheduledStop:<nil> ExposedPorts:[] ListenAddress: Network: Subnet: MultiNodeRequested:false ExtraDisks:0 CertExpiration:26280h0m0s Mount:false MountString:/home/jenkins:/minikube-host Mount9PVersion:9p2000.L MountGID:docker MountIP: MountMSize:262144 MountOptions:[] MountPort:0 MountType:9p MountUID:docker BinaryMirror: DisableOptimizations:false DisableMetrics:false CustomQemuFirmwarePath: SocketVMnetClientPath: SocketVMnetPath: StaticIP: SSHA
uthSock: SSHAgentPID:0 GPUs: AutoPauseInterval:1m0s}
	I0916 11:17:14.402836 1514821 out.go:177] * Starting "test-preload-899756" primary control-plane node in "test-preload-899756" cluster
	I0916 11:17:14.405563 1514821 cache.go:121] Beginning downloading kic base image for docker with crio
	I0916 11:17:14.408266 1514821 out.go:177] * Pulling base image v0.0.45-1726358845-19644 ...
	I0916 11:17:14.411049 1514821 preload.go:131] Checking if preload exists for k8s version v1.24.4 and runtime crio
	I0916 11:17:14.411135 1514821 image.go:79] Checking for gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 in local docker daemon
	I0916 11:17:14.411462 1514821 profile.go:143] Saving config to /home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/test-preload-899756/config.json ...
	I0916 11:17:14.411510 1514821 lock.go:35] WriteFile acquiring /home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/test-preload-899756/config.json: {Name:mkf98abbababb7255057b8a1b597ddfbe64dbf65 Clock:{} Delay:500ms Timeout:1m0s Cancel:<nil>}
	I0916 11:17:14.411766 1514821 cache.go:107] acquiring lock: {Name:mk48a37467b14862cc733ed5d988ef0733c9af99 Clock:{} Delay:500ms Timeout:10m0s Cancel:<nil>}
	I0916 11:17:14.411937 1514821 image.go:135] retrieving image: gcr.io/k8s-minikube/storage-provisioner:v5
	I0916 11:17:14.412344 1514821 cache.go:107] acquiring lock: {Name:mk4c23b1dd092a9b15b54b86d461dd190436d033 Clock:{} Delay:500ms Timeout:10m0s Cancel:<nil>}
	I0916 11:17:14.412532 1514821 image.go:135] retrieving image: registry.k8s.io/kube-apiserver:v1.24.4
	I0916 11:17:14.412841 1514821 cache.go:107] acquiring lock: {Name:mk6ff83868cf732b44fa2260ffe6c4aceca55d83 Clock:{} Delay:500ms Timeout:10m0s Cancel:<nil>}
	I0916 11:17:14.412984 1514821 image.go:135] retrieving image: registry.k8s.io/kube-controller-manager:v1.24.4
	I0916 11:17:14.413237 1514821 cache.go:107] acquiring lock: {Name:mk066dfa3ca7fec1fac34c6f4b17616a42bdf7e7 Clock:{} Delay:500ms Timeout:10m0s Cancel:<nil>}
	I0916 11:17:14.413420 1514821 image.go:135] retrieving image: registry.k8s.io/kube-scheduler:v1.24.4
	I0916 11:17:14.413715 1514821 cache.go:107] acquiring lock: {Name:mka25bb4b437de54d99fe6dab266b52c245a5615 Clock:{} Delay:500ms Timeout:10m0s Cancel:<nil>}
	I0916 11:17:14.413898 1514821 image.go:135] retrieving image: registry.k8s.io/kube-proxy:v1.24.4
	I0916 11:17:14.414179 1514821 cache.go:107] acquiring lock: {Name:mkfb3a036b609c5ef46bb7731592bdffbbb10b84 Clock:{} Delay:500ms Timeout:10m0s Cancel:<nil>}
	I0916 11:17:14.414307 1514821 image.go:135] retrieving image: registry.k8s.io/pause:3.7
	I0916 11:17:14.414619 1514821 cache.go:107] acquiring lock: {Name:mk3ef3a08092c7639024cbd4b346ee7c3100329a Clock:{} Delay:500ms Timeout:10m0s Cancel:<nil>}
	I0916 11:17:14.414736 1514821 image.go:135] retrieving image: registry.k8s.io/etcd:3.5.3-0
	I0916 11:17:14.415014 1514821 cache.go:107] acquiring lock: {Name:mka595ddab266cba139c2dff8084e14423ce4dde Clock:{} Delay:500ms Timeout:10m0s Cancel:<nil>}
	I0916 11:17:14.415210 1514821 image.go:135] retrieving image: registry.k8s.io/coredns/coredns:v1.8.6
	I0916 11:17:14.418210 1514821 image.go:178] daemon lookup for registry.k8s.io/kube-controller-manager:v1.24.4: Error response from daemon: No such image: registry.k8s.io/kube-controller-manager:v1.24.4
	I0916 11:17:14.418673 1514821 image.go:178] daemon lookup for registry.k8s.io/kube-scheduler:v1.24.4: Error response from daemon: No such image: registry.k8s.io/kube-scheduler:v1.24.4
	I0916 11:17:14.418851 1514821 image.go:178] daemon lookup for registry.k8s.io/kube-proxy:v1.24.4: Error response from daemon: No such image: registry.k8s.io/kube-proxy:v1.24.4
	I0916 11:17:14.419393 1514821 image.go:178] daemon lookup for gcr.io/k8s-minikube/storage-provisioner:v5: Error response from daemon: No such image: gcr.io/k8s-minikube/storage-provisioner:v5
	I0916 11:17:14.419675 1514821 image.go:178] daemon lookup for registry.k8s.io/kube-apiserver:v1.24.4: Error response from daemon: No such image: registry.k8s.io/kube-apiserver:v1.24.4
	I0916 11:17:14.419901 1514821 image.go:178] daemon lookup for registry.k8s.io/coredns/coredns:v1.8.6: Error response from daemon: No such image: registry.k8s.io/coredns/coredns:v1.8.6
	I0916 11:17:14.420173 1514821 image.go:178] daemon lookup for registry.k8s.io/etcd:3.5.3-0: Error response from daemon: No such image: registry.k8s.io/etcd:3.5.3-0
	I0916 11:17:14.420728 1514821 image.go:178] daemon lookup for registry.k8s.io/pause:3.7: Error response from daemon: No such image: registry.k8s.io/pause:3.7
	W0916 11:17:14.435939 1514821 image.go:95] image gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 is of wrong architecture
	I0916 11:17:14.435960 1514821 cache.go:149] Downloading gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 to local cache
	I0916 11:17:14.436070 1514821 image.go:63] Checking for gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 in local cache directory
	I0916 11:17:14.436108 1514821 image.go:66] Found gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 in local cache directory, skipping pull
	I0916 11:17:14.436121 1514821 image.go:135] gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 exists in cache, skipping pull
	I0916 11:17:14.436129 1514821 cache.go:152] successfully saved gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 as a tarball
	I0916 11:17:14.436137 1514821 cache.go:162] Loading gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 from local cache
	I0916 11:17:14.570229 1514821 cache.go:164] successfully loaded and using gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 from cached tarball
	I0916 11:17:14.570269 1514821 cache.go:194] Successfully downloaded all kic artifacts
	I0916 11:17:14.570296 1514821 start.go:360] acquireMachinesLock for test-preload-899756: {Name:mkcd9df795c8290ec6991b23b20a2c1107ff238b Clock:{} Delay:500ms Timeout:10m0s Cancel:<nil>}
	I0916 11:17:14.570407 1514821 start.go:364] duration metric: took 87.629µs to acquireMachinesLock for "test-preload-899756"
	I0916 11:17:14.570440 1514821 start.go:93] Provisioning new machine with config: &{Name:test-preload-899756 KeepContext:false EmbedCerts:false MinikubeISO: KicBaseImage:gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 Memory:2200 CPUs:2 DiskSize:20000 Driver:docker HyperkitVpnKitSock: HyperkitVSockPorts:[] DockerEnv:[] ContainerVolumeMounts:[] InsecureRegistry:[] RegistryMirror:[] HostOnlyCIDR:192.168.59.1/24 HypervVirtualSwitch: HypervUseExternalSwitch:false HypervExternalAdapter: KVMNetwork:default KVMQemuURI:qemu:///system KVMGPU:false KVMHidden:false KVMNUMACount:1 APIServerPort:8443 DockerOpt:[] DisableDriverMounts:false NFSShare:[] NFSSharesRoot:/nfsshares UUID: NoVTXCheck:false DNSProxy:false HostDNSResolver:true HostOnlyNicType:virtio NatNicType:virtio SSHIPAddress: SSHUser:root SSHKey: SSHPort:22 KubernetesConfig:{KubernetesVersion:v1.24.4 ClusterName:test-preload-899756 Namespace:default APIServerHAVIP: APISe
rverName:minikubeCA APIServerNames:[] APIServerIPs:[] DNSDomain:cluster.local ContainerRuntime:crio CRISocket: NetworkPlugin:cni FeatureGates: ServiceCIDR:10.96.0.0/12 ImageRepository: LoadBalancerStartIP: LoadBalancerEndIP: CustomIngressCert: RegistryAliases: ExtraOptions:[] ShouldLoadCachedImages:true EnableDefaultCNI:false CNI:} Nodes:[{Name: IP: Port:8443 KubernetesVersion:v1.24.4 ContainerRuntime:crio ControlPlane:true Worker:true}] Addons:map[] CustomAddonImages:map[] CustomAddonRegistries:map[] VerifyComponents:map[apiserver:true apps_running:true default_sa:true extra:true kubelet:true node_ready:true system_pods:true] StartHostTimeout:6m0s ScheduledStop:<nil> ExposedPorts:[] ListenAddress: Network: Subnet: MultiNodeRequested:false ExtraDisks:0 CertExpiration:26280h0m0s Mount:false MountString:/home/jenkins:/minikube-host Mount9PVersion:9p2000.L MountGID:docker MountIP: MountMSize:262144 MountOptions:[] MountPort:0 MountType:9p MountUID:docker BinaryMirror: DisableOptimizations:false DisableMetrics:fa
lse CustomQemuFirmwarePath: SocketVMnetClientPath: SocketVMnetPath: StaticIP: SSHAuthSock: SSHAgentPID:0 GPUs: AutoPauseInterval:1m0s} &{Name: IP: Port:8443 KubernetesVersion:v1.24.4 ContainerRuntime:crio ControlPlane:true Worker:true}
	I0916 11:17:14.570520 1514821 start.go:125] createHost starting for "" (driver="docker")
	I0916 11:17:14.574052 1514821 out.go:235] * Creating docker container (CPUs=2, Memory=2200MB) ...
	I0916 11:17:14.574353 1514821 start.go:159] libmachine.API.Create for "test-preload-899756" (driver="docker")
	I0916 11:17:14.574405 1514821 client.go:168] LocalClient.Create starting
	I0916 11:17:14.574497 1514821 main.go:141] libmachine: Reading certificate data from /home/jenkins/minikube-integration/19651-1378450/.minikube/certs/ca.pem
	I0916 11:17:14.574550 1514821 main.go:141] libmachine: Decoding PEM data...
	I0916 11:17:14.574577 1514821 main.go:141] libmachine: Parsing certificate...
	I0916 11:17:14.574659 1514821 main.go:141] libmachine: Reading certificate data from /home/jenkins/minikube-integration/19651-1378450/.minikube/certs/cert.pem
	I0916 11:17:14.574707 1514821 main.go:141] libmachine: Decoding PEM data...
	I0916 11:17:14.574731 1514821 main.go:141] libmachine: Parsing certificate...
	I0916 11:17:14.575159 1514821 cli_runner.go:164] Run: docker network inspect test-preload-899756 --format "{"Name": "{{.Name}}","Driver": "{{.Driver}}","Subnet": "{{range .IPAM.Config}}{{.Subnet}}{{end}}","Gateway": "{{range .IPAM.Config}}{{.Gateway}}{{end}}","MTU": {{if (index .Options "com.docker.network.driver.mtu")}}{{(index .Options "com.docker.network.driver.mtu")}}{{else}}0{{end}}, "ContainerIPs": [{{range $k,$v := .Containers }}"{{$v.IPv4Address}}",{{end}}]}"
	W0916 11:17:14.594026 1514821 cli_runner.go:211] docker network inspect test-preload-899756 --format "{"Name": "{{.Name}}","Driver": "{{.Driver}}","Subnet": "{{range .IPAM.Config}}{{.Subnet}}{{end}}","Gateway": "{{range .IPAM.Config}}{{.Gateway}}{{end}}","MTU": {{if (index .Options "com.docker.network.driver.mtu")}}{{(index .Options "com.docker.network.driver.mtu")}}{{else}}0{{end}}, "ContainerIPs": [{{range $k,$v := .Containers }}"{{$v.IPv4Address}}",{{end}}]}" returned with exit code 1
	I0916 11:17:14.594119 1514821 network_create.go:284] running [docker network inspect test-preload-899756] to gather additional debugging logs...
	I0916 11:17:14.594144 1514821 cli_runner.go:164] Run: docker network inspect test-preload-899756
	W0916 11:17:14.610789 1514821 cli_runner.go:211] docker network inspect test-preload-899756 returned with exit code 1
	I0916 11:17:14.610822 1514821 network_create.go:287] error running [docker network inspect test-preload-899756]: docker network inspect test-preload-899756: exit status 1
	stdout:
	[]
	
	stderr:
	Error response from daemon: network test-preload-899756 not found
	I0916 11:17:14.610839 1514821 network_create.go:289] output of [docker network inspect test-preload-899756]: -- stdout --
	[]
	
	-- /stdout --
	** stderr ** 
	Error response from daemon: network test-preload-899756 not found
	
	** /stderr **
	I0916 11:17:14.610946 1514821 cli_runner.go:164] Run: docker network inspect bridge --format "{"Name": "{{.Name}}","Driver": "{{.Driver}}","Subnet": "{{range .IPAM.Config}}{{.Subnet}}{{end}}","Gateway": "{{range .IPAM.Config}}{{.Gateway}}{{end}}","MTU": {{if (index .Options "com.docker.network.driver.mtu")}}{{(index .Options "com.docker.network.driver.mtu")}}{{else}}0{{end}}, "ContainerIPs": [{{range $k,$v := .Containers }}"{{$v.IPv4Address}}",{{end}}]}"
	I0916 11:17:14.625839 1514821 network.go:211] skipping subnet 192.168.49.0/24 that is taken: &{IP:192.168.49.0 Netmask:255.255.255.0 Prefix:24 CIDR:192.168.49.0/24 Gateway:192.168.49.1 ClientMin:192.168.49.2 ClientMax:192.168.49.254 Broadcast:192.168.49.255 IsPrivate:true Interface:{IfaceName:br-a49e1846148d IfaceIPv4:192.168.49.1 IfaceMTU:1500 IfaceMAC:02:42:d3:9d:ef:74} reservation:<nil>}
	I0916 11:17:14.626276 1514821 network.go:211] skipping subnet 192.168.58.0/24 that is taken: &{IP:192.168.58.0 Netmask:255.255.255.0 Prefix:24 CIDR:192.168.58.0/24 Gateway:192.168.58.1 ClientMin:192.168.58.2 ClientMax:192.168.58.254 Broadcast:192.168.58.255 IsPrivate:true Interface:{IfaceName:br-2e9863632116 IfaceIPv4:192.168.58.1 IfaceMTU:1500 IfaceMAC:02:42:77:c8:06:b6} reservation:<nil>}
	I0916 11:17:14.626688 1514821 network.go:211] skipping subnet 192.168.67.0/24 that is taken: &{IP:192.168.67.0 Netmask:255.255.255.0 Prefix:24 CIDR:192.168.67.0/24 Gateway:192.168.67.1 ClientMin:192.168.67.2 ClientMax:192.168.67.254 Broadcast:192.168.67.255 IsPrivate:true Interface:{IfaceName:br-76703dbf7b5c IfaceIPv4:192.168.67.1 IfaceMTU:1500 IfaceMAC:02:42:29:f7:34:a1} reservation:<nil>}
	I0916 11:17:14.627113 1514821 network.go:206] using free private subnet 192.168.76.0/24: &{IP:192.168.76.0 Netmask:255.255.255.0 Prefix:24 CIDR:192.168.76.0/24 Gateway:192.168.76.1 ClientMin:192.168.76.2 ClientMax:192.168.76.254 Broadcast:192.168.76.255 IsPrivate:true Interface:{IfaceName: IfaceIPv4: IfaceMTU:0 IfaceMAC:} reservation:0x400000fb00}
	I0916 11:17:14.627137 1514821 network_create.go:124] attempt to create docker network test-preload-899756 192.168.76.0/24 with gateway 192.168.76.1 and MTU of 1500 ...
	I0916 11:17:14.627205 1514821 cli_runner.go:164] Run: docker network create --driver=bridge --subnet=192.168.76.0/24 --gateway=192.168.76.1 -o --ip-masq -o --icc -o com.docker.network.driver.mtu=1500 --label=created_by.minikube.sigs.k8s.io=true --label=name.minikube.sigs.k8s.io=test-preload-899756 test-preload-899756
	I0916 11:17:14.692530 1514821 network_create.go:108] docker network test-preload-899756 192.168.76.0/24 created
	I0916 11:17:14.692560 1514821 kic.go:121] calculated static IP "192.168.76.2" for the "test-preload-899756" container
	I0916 11:17:14.692642 1514821 cli_runner.go:164] Run: docker ps -a --format {{.Names}}
	I0916 11:17:14.707408 1514821 cli_runner.go:164] Run: docker volume create test-preload-899756 --label name.minikube.sigs.k8s.io=test-preload-899756 --label created_by.minikube.sigs.k8s.io=true
	I0916 11:17:14.729625 1514821 oci.go:103] Successfully created a docker volume test-preload-899756
	I0916 11:17:14.729713 1514821 cli_runner.go:164] Run: docker run --rm --name test-preload-899756-preload-sidecar --label created_by.minikube.sigs.k8s.io=true --label name.minikube.sigs.k8s.io=test-preload-899756 --entrypoint /usr/bin/test -v test-preload-899756:/var gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 -d /var/lib
	I0916 11:17:14.865033 1514821 cache.go:162] opening:  /home/jenkins/minikube-integration/19651-1378450/.minikube/cache/images/arm64/registry.k8s.io/pause_3.7
	I0916 11:17:14.955121 1514821 cache.go:157] /home/jenkins/minikube-integration/19651-1378450/.minikube/cache/images/arm64/registry.k8s.io/pause_3.7 exists
	I0916 11:17:14.955148 1514821 cache.go:96] cache image "registry.k8s.io/pause:3.7" -> "/home/jenkins/minikube-integration/19651-1378450/.minikube/cache/images/arm64/registry.k8s.io/pause_3.7" took 540.97796ms
	I0916 11:17:14.955159 1514821 cache.go:80] save to tar file registry.k8s.io/pause:3.7 -> /home/jenkins/minikube-integration/19651-1378450/.minikube/cache/images/arm64/registry.k8s.io/pause_3.7 succeeded
	I0916 11:17:14.974813 1514821 cache.go:162] opening:  /home/jenkins/minikube-integration/19651-1378450/.minikube/cache/images/arm64/registry.k8s.io/etcd_3.5.3-0
	W0916 11:17:14.986756 1514821 image.go:283] image registry.k8s.io/coredns/coredns:v1.8.6 arch mismatch: want arm64 got amd64. fixing
	I0916 11:17:14.986822 1514821 cache.go:162] opening:  /home/jenkins/minikube-integration/19651-1378450/.minikube/cache/images/arm64/registry.k8s.io/coredns/coredns_v1.8.6
	I0916 11:17:14.989063 1514821 cache.go:162] opening:  /home/jenkins/minikube-integration/19651-1378450/.minikube/cache/images/arm64/registry.k8s.io/kube-controller-manager_v1.24.4
	I0916 11:17:14.990024 1514821 cache.go:162] opening:  /home/jenkins/minikube-integration/19651-1378450/.minikube/cache/images/arm64/registry.k8s.io/kube-proxy_v1.24.4
	I0916 11:17:14.995551 1514821 cache.go:162] opening:  /home/jenkins/minikube-integration/19651-1378450/.minikube/cache/images/arm64/registry.k8s.io/kube-apiserver_v1.24.4
	I0916 11:17:15.001057 1514821 cache.go:162] opening:  /home/jenkins/minikube-integration/19651-1378450/.minikube/cache/images/arm64/registry.k8s.io/kube-scheduler_v1.24.4
	I0916 11:17:15.293777 1514821 cache.go:157] /home/jenkins/minikube-integration/19651-1378450/.minikube/cache/images/arm64/registry.k8s.io/coredns/coredns_v1.8.6 exists
	I0916 11:17:15.293807 1514821 cache.go:96] cache image "registry.k8s.io/coredns/coredns:v1.8.6" -> "/home/jenkins/minikube-integration/19651-1378450/.minikube/cache/images/arm64/registry.k8s.io/coredns/coredns_v1.8.6" took 878.796766ms
	I0916 11:17:15.293827 1514821 cache.go:80] save to tar file registry.k8s.io/coredns/coredns:v1.8.6 -> /home/jenkins/minikube-integration/19651-1378450/.minikube/cache/images/arm64/registry.k8s.io/coredns/coredns_v1.8.6 succeeded
	W0916 11:17:15.412356 1514821 image.go:283] image gcr.io/k8s-minikube/storage-provisioner:v5 arch mismatch: want arm64 got amd64. fixing
	I0916 11:17:15.412411 1514821 cache.go:162] opening:  /home/jenkins/minikube-integration/19651-1378450/.minikube/cache/images/arm64/gcr.io/k8s-minikube/storage-provisioner_v5
	I0916 11:17:15.452915 1514821 cache.go:157] /home/jenkins/minikube-integration/19651-1378450/.minikube/cache/images/arm64/registry.k8s.io/kube-scheduler_v1.24.4 exists
	I0916 11:17:15.452990 1514821 cache.go:96] cache image "registry.k8s.io/kube-scheduler:v1.24.4" -> "/home/jenkins/minikube-integration/19651-1378450/.minikube/cache/images/arm64/registry.k8s.io/kube-scheduler_v1.24.4" took 1.039758808s
	I0916 11:17:15.453016 1514821 cache.go:80] save to tar file registry.k8s.io/kube-scheduler:v1.24.4 -> /home/jenkins/minikube-integration/19651-1378450/.minikube/cache/images/arm64/registry.k8s.io/kube-scheduler_v1.24.4 succeeded
	I0916 11:17:15.545734 1514821 cache.go:157] /home/jenkins/minikube-integration/19651-1378450/.minikube/cache/images/arm64/registry.k8s.io/kube-apiserver_v1.24.4 exists
	I0916 11:17:15.545765 1514821 cache.go:96] cache image "registry.k8s.io/kube-apiserver:v1.24.4" -> "/home/jenkins/minikube-integration/19651-1378450/.minikube/cache/images/arm64/registry.k8s.io/kube-apiserver_v1.24.4" took 1.133445923s
	I0916 11:17:15.545777 1514821 cache.go:80] save to tar file registry.k8s.io/kube-apiserver:v1.24.4 -> /home/jenkins/minikube-integration/19651-1378450/.minikube/cache/images/arm64/registry.k8s.io/kube-apiserver_v1.24.4 succeeded
	I0916 11:17:15.593101 1514821 cache.go:157] /home/jenkins/minikube-integration/19651-1378450/.minikube/cache/images/arm64/registry.k8s.io/kube-proxy_v1.24.4 exists
	I0916 11:17:15.593128 1514821 cache.go:96] cache image "registry.k8s.io/kube-proxy:v1.24.4" -> "/home/jenkins/minikube-integration/19651-1378450/.minikube/cache/images/arm64/registry.k8s.io/kube-proxy_v1.24.4" took 1.179417195s
	I0916 11:17:15.593140 1514821 cache.go:80] save to tar file registry.k8s.io/kube-proxy:v1.24.4 -> /home/jenkins/minikube-integration/19651-1378450/.minikube/cache/images/arm64/registry.k8s.io/kube-proxy_v1.24.4 succeeded
	I0916 11:17:15.601970 1514821 cache.go:157] /home/jenkins/minikube-integration/19651-1378450/.minikube/cache/images/arm64/registry.k8s.io/kube-controller-manager_v1.24.4 exists
	I0916 11:17:15.601997 1514821 cache.go:96] cache image "registry.k8s.io/kube-controller-manager:v1.24.4" -> "/home/jenkins/minikube-integration/19651-1378450/.minikube/cache/images/arm64/registry.k8s.io/kube-controller-manager_v1.24.4" took 1.189162275s
	I0916 11:17:15.602009 1514821 cache.go:80] save to tar file registry.k8s.io/kube-controller-manager:v1.24.4 -> /home/jenkins/minikube-integration/19651-1378450/.minikube/cache/images/arm64/registry.k8s.io/kube-controller-manager_v1.24.4 succeeded
	I0916 11:17:15.712199 1514821 oci.go:107] Successfully prepared a docker volume test-preload-899756
	I0916 11:17:15.712250 1514821 preload.go:131] Checking if preload exists for k8s version v1.24.4 and runtime crio
	W0916 11:17:15.712392 1514821 cgroups_linux.go:77] Your kernel does not support swap limit capabilities or the cgroup is not mounted.
	I0916 11:17:15.712516 1514821 cli_runner.go:164] Run: docker info --format "'{{json .SecurityOptions}}'"
	I0916 11:17:15.801241 1514821 cli_runner.go:164] Run: docker run -d -t --privileged --security-opt seccomp=unconfined --tmpfs /tmp --tmpfs /run -v /lib/modules:/lib/modules:ro --hostname test-preload-899756 --name test-preload-899756 --label created_by.minikube.sigs.k8s.io=true --label name.minikube.sigs.k8s.io=test-preload-899756 --label role.minikube.sigs.k8s.io= --label mode.minikube.sigs.k8s.io=test-preload-899756 --network test-preload-899756 --ip 192.168.76.2 --volume test-preload-899756:/var --security-opt apparmor=unconfined --memory=2200mb --cpus=2 -e container=docker --expose 8443 --publish=127.0.0.1::8443 --publish=127.0.0.1::22 --publish=127.0.0.1::2376 --publish=127.0.0.1::5000 --publish=127.0.0.1::32443 gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0
	I0916 11:17:15.862364 1514821 cache.go:157] /home/jenkins/minikube-integration/19651-1378450/.minikube/cache/images/arm64/gcr.io/k8s-minikube/storage-provisioner_v5 exists
	I0916 11:17:15.862393 1514821 cache.go:96] cache image "gcr.io/k8s-minikube/storage-provisioner:v5" -> "/home/jenkins/minikube-integration/19651-1378450/.minikube/cache/images/arm64/gcr.io/k8s-minikube/storage-provisioner_v5" took 1.450629833s
	I0916 11:17:15.862406 1514821 cache.go:80] save to tar file gcr.io/k8s-minikube/storage-provisioner:v5 -> /home/jenkins/minikube-integration/19651-1378450/.minikube/cache/images/arm64/gcr.io/k8s-minikube/storage-provisioner_v5 succeeded
	I0916 11:17:16.061302 1514821 cache.go:157] /home/jenkins/minikube-integration/19651-1378450/.minikube/cache/images/arm64/registry.k8s.io/etcd_3.5.3-0 exists
	I0916 11:17:16.061381 1514821 cache.go:96] cache image "registry.k8s.io/etcd:3.5.3-0" -> "/home/jenkins/minikube-integration/19651-1378450/.minikube/cache/images/arm64/registry.k8s.io/etcd_3.5.3-0" took 1.646764956s
	I0916 11:17:16.061410 1514821 cache.go:80] save to tar file registry.k8s.io/etcd:3.5.3-0 -> /home/jenkins/minikube-integration/19651-1378450/.minikube/cache/images/arm64/registry.k8s.io/etcd_3.5.3-0 succeeded
	I0916 11:17:16.061466 1514821 cache.go:87] Successfully saved all images to host disk.
	I0916 11:17:16.224050 1514821 cli_runner.go:164] Run: docker container inspect test-preload-899756 --format={{.State.Running}}
	I0916 11:17:16.259930 1514821 cli_runner.go:164] Run: docker container inspect test-preload-899756 --format={{.State.Status}}
	I0916 11:17:16.289178 1514821 cli_runner.go:164] Run: docker exec test-preload-899756 stat /var/lib/dpkg/alternatives/iptables
	I0916 11:17:16.376421 1514821 oci.go:144] the created container "test-preload-899756" has a running status.
	I0916 11:17:16.376469 1514821 kic.go:225] Creating ssh key for kic: /home/jenkins/minikube-integration/19651-1378450/.minikube/machines/test-preload-899756/id_rsa...
	I0916 11:17:16.946428 1514821 kic_runner.go:191] docker (temp): /home/jenkins/minikube-integration/19651-1378450/.minikube/machines/test-preload-899756/id_rsa.pub --> /home/docker/.ssh/authorized_keys (381 bytes)
	I0916 11:17:16.990935 1514821 cli_runner.go:164] Run: docker container inspect test-preload-899756 --format={{.State.Status}}
	I0916 11:17:17.031545 1514821 kic_runner.go:93] Run: chown docker:docker /home/docker/.ssh/authorized_keys
	I0916 11:17:17.031566 1514821 kic_runner.go:114] Args: [docker exec --privileged test-preload-899756 chown docker:docker /home/docker/.ssh/authorized_keys]
	I0916 11:17:17.087482 1514821 cli_runner.go:164] Run: docker container inspect test-preload-899756 --format={{.State.Status}}
	I0916 11:17:17.111372 1514821 machine.go:93] provisionDockerMachine start ...
	I0916 11:17:17.111475 1514821 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" test-preload-899756
	I0916 11:17:17.134455 1514821 main.go:141] libmachine: Using SSH client type: native
	I0916 11:17:17.134758 1514821 main.go:141] libmachine: &{{{<nil> 0 [] [] []} docker [0x41abe0] 0x41d420 <nil>  [] 0s} 127.0.0.1 34788 <nil> <nil>}
	I0916 11:17:17.134774 1514821 main.go:141] libmachine: About to run SSH command:
	hostname
	I0916 11:17:17.280338 1514821 main.go:141] libmachine: SSH cmd err, output: <nil>: test-preload-899756
	
	I0916 11:17:17.280410 1514821 ubuntu.go:169] provisioning hostname "test-preload-899756"
	I0916 11:17:17.280521 1514821 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" test-preload-899756
	I0916 11:17:17.304189 1514821 main.go:141] libmachine: Using SSH client type: native
	I0916 11:17:17.304439 1514821 main.go:141] libmachine: &{{{<nil> 0 [] [] []} docker [0x41abe0] 0x41d420 <nil>  [] 0s} 127.0.0.1 34788 <nil> <nil>}
	I0916 11:17:17.304452 1514821 main.go:141] libmachine: About to run SSH command:
	sudo hostname test-preload-899756 && echo "test-preload-899756" | sudo tee /etc/hostname
	I0916 11:17:17.461331 1514821 main.go:141] libmachine: SSH cmd err, output: <nil>: test-preload-899756
	
	I0916 11:17:17.461425 1514821 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" test-preload-899756
	I0916 11:17:17.478801 1514821 main.go:141] libmachine: Using SSH client type: native
	I0916 11:17:17.479054 1514821 main.go:141] libmachine: &{{{<nil> 0 [] [] []} docker [0x41abe0] 0x41d420 <nil>  [] 0s} 127.0.0.1 34788 <nil> <nil>}
	I0916 11:17:17.479073 1514821 main.go:141] libmachine: About to run SSH command:
	
			if ! grep -xq '.*\stest-preload-899756' /etc/hosts; then
				if grep -xq '127.0.1.1\s.*' /etc/hosts; then
					sudo sed -i 's/^127.0.1.1\s.*/127.0.1.1 test-preload-899756/g' /etc/hosts;
				else 
					echo '127.0.1.1 test-preload-899756' | sudo tee -a /etc/hosts; 
				fi
			fi
	I0916 11:17:17.616852 1514821 main.go:141] libmachine: SSH cmd err, output: <nil>: 
	I0916 11:17:17.616878 1514821 ubuntu.go:175] set auth options {CertDir:/home/jenkins/minikube-integration/19651-1378450/.minikube CaCertPath:/home/jenkins/minikube-integration/19651-1378450/.minikube/certs/ca.pem CaPrivateKeyPath:/home/jenkins/minikube-integration/19651-1378450/.minikube/certs/ca-key.pem CaCertRemotePath:/etc/docker/ca.pem ServerCertPath:/home/jenkins/minikube-integration/19651-1378450/.minikube/machines/server.pem ServerKeyPath:/home/jenkins/minikube-integration/19651-1378450/.minikube/machines/server-key.pem ClientKeyPath:/home/jenkins/minikube-integration/19651-1378450/.minikube/certs/key.pem ServerCertRemotePath:/etc/docker/server.pem ServerKeyRemotePath:/etc/docker/server-key.pem ClientCertPath:/home/jenkins/minikube-integration/19651-1378450/.minikube/certs/cert.pem ServerCertSANs:[] StorePath:/home/jenkins/minikube-integration/19651-1378450/.minikube}
	I0916 11:17:17.616906 1514821 ubuntu.go:177] setting up certificates
	I0916 11:17:17.616917 1514821 provision.go:84] configureAuth start
	I0916 11:17:17.616985 1514821 cli_runner.go:164] Run: docker container inspect -f "{{range .NetworkSettings.Networks}}{{.IPAddress}},{{.GlobalIPv6Address}}{{end}}" test-preload-899756
	I0916 11:17:17.634753 1514821 provision.go:143] copyHostCerts
	I0916 11:17:17.634818 1514821 exec_runner.go:144] found /home/jenkins/minikube-integration/19651-1378450/.minikube/key.pem, removing ...
	I0916 11:17:17.634827 1514821 exec_runner.go:203] rm: /home/jenkins/minikube-integration/19651-1378450/.minikube/key.pem
	I0916 11:17:17.634906 1514821 exec_runner.go:151] cp: /home/jenkins/minikube-integration/19651-1378450/.minikube/certs/key.pem --> /home/jenkins/minikube-integration/19651-1378450/.minikube/key.pem (1679 bytes)
	I0916 11:17:17.635010 1514821 exec_runner.go:144] found /home/jenkins/minikube-integration/19651-1378450/.minikube/ca.pem, removing ...
	I0916 11:17:17.635016 1514821 exec_runner.go:203] rm: /home/jenkins/minikube-integration/19651-1378450/.minikube/ca.pem
	I0916 11:17:17.635045 1514821 exec_runner.go:151] cp: /home/jenkins/minikube-integration/19651-1378450/.minikube/certs/ca.pem --> /home/jenkins/minikube-integration/19651-1378450/.minikube/ca.pem (1078 bytes)
	I0916 11:17:17.635102 1514821 exec_runner.go:144] found /home/jenkins/minikube-integration/19651-1378450/.minikube/cert.pem, removing ...
	I0916 11:17:17.635108 1514821 exec_runner.go:203] rm: /home/jenkins/minikube-integration/19651-1378450/.minikube/cert.pem
	I0916 11:17:17.635131 1514821 exec_runner.go:151] cp: /home/jenkins/minikube-integration/19651-1378450/.minikube/certs/cert.pem --> /home/jenkins/minikube-integration/19651-1378450/.minikube/cert.pem (1123 bytes)
	I0916 11:17:17.635179 1514821 provision.go:117] generating server cert: /home/jenkins/minikube-integration/19651-1378450/.minikube/machines/server.pem ca-key=/home/jenkins/minikube-integration/19651-1378450/.minikube/certs/ca.pem private-key=/home/jenkins/minikube-integration/19651-1378450/.minikube/certs/ca-key.pem org=jenkins.test-preload-899756 san=[127.0.0.1 192.168.76.2 localhost minikube test-preload-899756]
	I0916 11:17:17.985062 1514821 provision.go:177] copyRemoteCerts
	I0916 11:17:17.985140 1514821 ssh_runner.go:195] Run: sudo mkdir -p /etc/docker /etc/docker /etc/docker
	I0916 11:17:17.985187 1514821 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" test-preload-899756
	I0916 11:17:18.003023 1514821 sshutil.go:53] new ssh client: &{IP:127.0.0.1 Port:34788 SSHKeyPath:/home/jenkins/minikube-integration/19651-1378450/.minikube/machines/test-preload-899756/id_rsa Username:docker}
	I0916 11:17:18.102547 1514821 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-1378450/.minikube/certs/ca.pem --> /etc/docker/ca.pem (1078 bytes)
	I0916 11:17:18.129890 1514821 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-1378450/.minikube/machines/server.pem --> /etc/docker/server.pem (1224 bytes)
	I0916 11:17:18.155820 1514821 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-1378450/.minikube/machines/server-key.pem --> /etc/docker/server-key.pem (1675 bytes)
	I0916 11:17:18.181476 1514821 provision.go:87] duration metric: took 564.544024ms to configureAuth
	I0916 11:17:18.181502 1514821 ubuntu.go:193] setting minikube options for container-runtime
	I0916 11:17:18.181707 1514821 config.go:182] Loaded profile config "test-preload-899756": Driver=docker, ContainerRuntime=crio, KubernetesVersion=v1.24.4
	I0916 11:17:18.181814 1514821 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" test-preload-899756
	I0916 11:17:18.198519 1514821 main.go:141] libmachine: Using SSH client type: native
	I0916 11:17:18.198772 1514821 main.go:141] libmachine: &{{{<nil> 0 [] [] []} docker [0x41abe0] 0x41d420 <nil>  [] 0s} 127.0.0.1 34788 <nil> <nil>}
	I0916 11:17:18.198788 1514821 main.go:141] libmachine: About to run SSH command:
	sudo mkdir -p /etc/sysconfig && printf %s "
	CRIO_MINIKUBE_OPTIONS='--insecure-registry 10.96.0.0/12 '
	" | sudo tee /etc/sysconfig/crio.minikube && sudo systemctl restart crio
	I0916 11:17:18.441818 1514821 main.go:141] libmachine: SSH cmd err, output: <nil>: 
	CRIO_MINIKUBE_OPTIONS='--insecure-registry 10.96.0.0/12 '
	
	I0916 11:17:18.441907 1514821 machine.go:96] duration metric: took 1.330514797s to provisionDockerMachine
	I0916 11:17:18.441933 1514821 client.go:171] duration metric: took 3.867508761s to LocalClient.Create
	I0916 11:17:18.441982 1514821 start.go:167] duration metric: took 3.867635429s to libmachine.API.Create "test-preload-899756"
	I0916 11:17:18.442009 1514821 start.go:293] postStartSetup for "test-preload-899756" (driver="docker")
	I0916 11:17:18.442036 1514821 start.go:322] creating required directories: [/etc/kubernetes/addons /etc/kubernetes/manifests /var/tmp/minikube /var/lib/minikube /var/lib/minikube/certs /var/lib/minikube/images /var/lib/minikube/binaries /tmp/gvisor /usr/share/ca-certificates /etc/ssl/certs]
	I0916 11:17:18.442140 1514821 ssh_runner.go:195] Run: sudo mkdir -p /etc/kubernetes/addons /etc/kubernetes/manifests /var/tmp/minikube /var/lib/minikube /var/lib/minikube/certs /var/lib/minikube/images /var/lib/minikube/binaries /tmp/gvisor /usr/share/ca-certificates /etc/ssl/certs
	I0916 11:17:18.442209 1514821 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" test-preload-899756
	I0916 11:17:18.458908 1514821 sshutil.go:53] new ssh client: &{IP:127.0.0.1 Port:34788 SSHKeyPath:/home/jenkins/minikube-integration/19651-1378450/.minikube/machines/test-preload-899756/id_rsa Username:docker}
	I0916 11:17:18.557862 1514821 ssh_runner.go:195] Run: cat /etc/os-release
	I0916 11:17:18.560867 1514821 main.go:141] libmachine: Couldn't set key VERSION_CODENAME, no corresponding struct field found
	I0916 11:17:18.560904 1514821 main.go:141] libmachine: Couldn't set key PRIVACY_POLICY_URL, no corresponding struct field found
	I0916 11:17:18.560915 1514821 main.go:141] libmachine: Couldn't set key UBUNTU_CODENAME, no corresponding struct field found
	I0916 11:17:18.560923 1514821 info.go:137] Remote host: Ubuntu 22.04.4 LTS
	I0916 11:17:18.560934 1514821 filesync.go:126] Scanning /home/jenkins/minikube-integration/19651-1378450/.minikube/addons for local assets ...
	I0916 11:17:18.560998 1514821 filesync.go:126] Scanning /home/jenkins/minikube-integration/19651-1378450/.minikube/files for local assets ...
	I0916 11:17:18.561085 1514821 filesync.go:149] local asset: /home/jenkins/minikube-integration/19651-1378450/.minikube/files/etc/ssl/certs/13838332.pem -> 13838332.pem in /etc/ssl/certs
	I0916 11:17:18.561192 1514821 ssh_runner.go:195] Run: sudo mkdir -p /etc/ssl/certs
	I0916 11:17:18.570284 1514821 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-1378450/.minikube/files/etc/ssl/certs/13838332.pem --> /etc/ssl/certs/13838332.pem (1708 bytes)
	I0916 11:17:18.595781 1514821 start.go:296] duration metric: took 153.742593ms for postStartSetup
	I0916 11:17:18.596163 1514821 cli_runner.go:164] Run: docker container inspect -f "{{range .NetworkSettings.Networks}}{{.IPAddress}},{{.GlobalIPv6Address}}{{end}}" test-preload-899756
	I0916 11:17:18.612563 1514821 profile.go:143] Saving config to /home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/test-preload-899756/config.json ...
	I0916 11:17:18.612910 1514821 ssh_runner.go:195] Run: sh -c "df -h /var | awk 'NR==2{print $5}'"
	I0916 11:17:18.612969 1514821 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" test-preload-899756
	I0916 11:17:18.629537 1514821 sshutil.go:53] new ssh client: &{IP:127.0.0.1 Port:34788 SSHKeyPath:/home/jenkins/minikube-integration/19651-1378450/.minikube/machines/test-preload-899756/id_rsa Username:docker}
	I0916 11:17:18.725549 1514821 ssh_runner.go:195] Run: sh -c "df -BG /var | awk 'NR==2{print $4}'"
	I0916 11:17:18.730227 1514821 start.go:128] duration metric: took 4.159671519s to createHost
	I0916 11:17:18.730253 1514821 start.go:83] releasing machines lock for "test-preload-899756", held for 4.159831031s
	I0916 11:17:18.730331 1514821 cli_runner.go:164] Run: docker container inspect -f "{{range .NetworkSettings.Networks}}{{.IPAddress}},{{.GlobalIPv6Address}}{{end}}" test-preload-899756
	I0916 11:17:18.746562 1514821 ssh_runner.go:195] Run: cat /version.json
	I0916 11:17:18.746628 1514821 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" test-preload-899756
	I0916 11:17:18.746879 1514821 ssh_runner.go:195] Run: curl -sS -m 2 https://registry.k8s.io/
	I0916 11:17:18.746954 1514821 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" test-preload-899756
	I0916 11:17:18.772657 1514821 sshutil.go:53] new ssh client: &{IP:127.0.0.1 Port:34788 SSHKeyPath:/home/jenkins/minikube-integration/19651-1378450/.minikube/machines/test-preload-899756/id_rsa Username:docker}
	I0916 11:17:18.772737 1514821 sshutil.go:53] new ssh client: &{IP:127.0.0.1 Port:34788 SSHKeyPath:/home/jenkins/minikube-integration/19651-1378450/.minikube/machines/test-preload-899756/id_rsa Username:docker}
	I0916 11:17:18.987772 1514821 ssh_runner.go:195] Run: systemctl --version
	I0916 11:17:18.992061 1514821 ssh_runner.go:195] Run: sudo sh -c "podman version >/dev/null"
	I0916 11:17:19.133678 1514821 ssh_runner.go:195] Run: sh -c "stat /etc/cni/net.d/*loopback.conf*"
	I0916 11:17:19.138496 1514821 ssh_runner.go:195] Run: sudo find /etc/cni/net.d -maxdepth 1 -type f -name *loopback.conf* -not -name *.mk_disabled -exec sh -c "sudo mv {} {}.mk_disabled" ;
	I0916 11:17:19.161644 1514821 cni.go:221] loopback cni configuration disabled: "/etc/cni/net.d/*loopback.conf*" found
	I0916 11:17:19.161738 1514821 ssh_runner.go:195] Run: sudo find /etc/cni/net.d -maxdepth 1 -type f ( ( -name *bridge* -or -name *podman* ) -and -not -name *.mk_disabled ) -printf "%p, " -exec sh -c "sudo mv {} {}.mk_disabled" ;
	I0916 11:17:19.198937 1514821 cni.go:262] disabled [/etc/cni/net.d/87-podman-bridge.conflist, /etc/cni/net.d/100-crio-bridge.conf] bridge cni config(s)
	I0916 11:17:19.199011 1514821 start.go:495] detecting cgroup driver to use...
	I0916 11:17:19.199059 1514821 detect.go:187] detected "cgroupfs" cgroup driver on host os
	I0916 11:17:19.199142 1514821 ssh_runner.go:195] Run: sudo systemctl stop -f containerd
	I0916 11:17:19.217235 1514821 ssh_runner.go:195] Run: sudo systemctl is-active --quiet service containerd
	I0916 11:17:19.229908 1514821 docker.go:217] disabling cri-docker service (if available) ...
	I0916 11:17:19.230026 1514821 ssh_runner.go:195] Run: sudo systemctl stop -f cri-docker.socket
	I0916 11:17:19.245223 1514821 ssh_runner.go:195] Run: sudo systemctl stop -f cri-docker.service
	I0916 11:17:19.261223 1514821 ssh_runner.go:195] Run: sudo systemctl disable cri-docker.socket
	I0916 11:17:19.351447 1514821 ssh_runner.go:195] Run: sudo systemctl mask cri-docker.service
	I0916 11:17:19.452019 1514821 docker.go:233] disabling docker service ...
	I0916 11:17:19.452097 1514821 ssh_runner.go:195] Run: sudo systemctl stop -f docker.socket
	I0916 11:17:19.473071 1514821 ssh_runner.go:195] Run: sudo systemctl stop -f docker.service
	I0916 11:17:19.486050 1514821 ssh_runner.go:195] Run: sudo systemctl disable docker.socket
	I0916 11:17:19.585392 1514821 ssh_runner.go:195] Run: sudo systemctl mask docker.service
	I0916 11:17:19.685936 1514821 ssh_runner.go:195] Run: sudo systemctl is-active --quiet service docker
	I0916 11:17:19.697979 1514821 ssh_runner.go:195] Run: /bin/bash -c "sudo mkdir -p /etc && printf %s "runtime-endpoint: unix:///var/run/crio/crio.sock
	" | sudo tee /etc/crictl.yaml"
	I0916 11:17:19.715463 1514821 crio.go:59] configure cri-o to use "registry.k8s.io/pause:3.7" pause image...
	I0916 11:17:19.715593 1514821 ssh_runner.go:195] Run: sh -c "sudo sed -i 's|^.*pause_image = .*$|pause_image = "registry.k8s.io/pause:3.7"|' /etc/crio/crio.conf.d/02-crio.conf"
	I0916 11:17:19.725806 1514821 crio.go:70] configuring cri-o to use "cgroupfs" as cgroup driver...
	I0916 11:17:19.725931 1514821 ssh_runner.go:195] Run: sh -c "sudo sed -i 's|^.*cgroup_manager = .*$|cgroup_manager = "cgroupfs"|' /etc/crio/crio.conf.d/02-crio.conf"
	I0916 11:17:19.736909 1514821 ssh_runner.go:195] Run: sh -c "sudo sed -i '/conmon_cgroup = .*/d' /etc/crio/crio.conf.d/02-crio.conf"
	I0916 11:17:19.746612 1514821 ssh_runner.go:195] Run: sh -c "sudo sed -i '/cgroup_manager = .*/a conmon_cgroup = "pod"' /etc/crio/crio.conf.d/02-crio.conf"
	I0916 11:17:19.756631 1514821 ssh_runner.go:195] Run: sh -c "sudo rm -rf /etc/cni/net.mk"
	I0916 11:17:19.765787 1514821 ssh_runner.go:195] Run: sh -c "sudo sed -i '/^ *"net.ipv4.ip_unprivileged_port_start=.*"/d' /etc/crio/crio.conf.d/02-crio.conf"
	I0916 11:17:19.775856 1514821 ssh_runner.go:195] Run: sh -c "sudo grep -q "^ *default_sysctls" /etc/crio/crio.conf.d/02-crio.conf || sudo sed -i '/conmon_cgroup = .*/a default_sysctls = \[\n\]' /etc/crio/crio.conf.d/02-crio.conf"
	I0916 11:17:19.791871 1514821 ssh_runner.go:195] Run: sh -c "sudo sed -i -r 's|^default_sysctls *= *\[|&\n  "net.ipv4.ip_unprivileged_port_start=0",|' /etc/crio/crio.conf.d/02-crio.conf"
	I0916 11:17:19.802478 1514821 ssh_runner.go:195] Run: sudo sysctl net.bridge.bridge-nf-call-iptables
	I0916 11:17:19.811794 1514821 ssh_runner.go:195] Run: sudo sh -c "echo 1 > /proc/sys/net/ipv4/ip_forward"
	I0916 11:17:19.820184 1514821 ssh_runner.go:195] Run: sudo systemctl daemon-reload
	I0916 11:17:19.909484 1514821 ssh_runner.go:195] Run: sudo systemctl restart crio
	I0916 11:17:20.046227 1514821 start.go:542] Will wait 60s for socket path /var/run/crio/crio.sock
	I0916 11:17:20.046342 1514821 ssh_runner.go:195] Run: stat /var/run/crio/crio.sock
	I0916 11:17:20.050485 1514821 start.go:563] Will wait 60s for crictl version
	I0916 11:17:20.050555 1514821 ssh_runner.go:195] Run: which crictl
	I0916 11:17:20.054396 1514821 ssh_runner.go:195] Run: sudo /usr/bin/crictl version
	I0916 11:17:20.095238 1514821 start.go:579] Version:  0.1.0
	RuntimeName:  cri-o
	RuntimeVersion:  1.24.6
	RuntimeApiVersion:  v1
	I0916 11:17:20.095346 1514821 ssh_runner.go:195] Run: crio --version
	I0916 11:17:20.139539 1514821 ssh_runner.go:195] Run: crio --version
	I0916 11:17:20.182812 1514821 out.go:177] * Preparing Kubernetes v1.24.4 on CRI-O 1.24.6 ...
	I0916 11:17:20.185336 1514821 cli_runner.go:164] Run: docker network inspect test-preload-899756 --format "{"Name": "{{.Name}}","Driver": "{{.Driver}}","Subnet": "{{range .IPAM.Config}}{{.Subnet}}{{end}}","Gateway": "{{range .IPAM.Config}}{{.Gateway}}{{end}}","MTU": {{if (index .Options "com.docker.network.driver.mtu")}}{{(index .Options "com.docker.network.driver.mtu")}}{{else}}0{{end}}, "ContainerIPs": [{{range $k,$v := .Containers }}"{{$v.IPv4Address}}",{{end}}]}"
	I0916 11:17:20.201881 1514821 ssh_runner.go:195] Run: grep 192.168.76.1	host.minikube.internal$ /etc/hosts
	I0916 11:17:20.206023 1514821 ssh_runner.go:195] Run: /bin/bash -c "{ grep -v $'\thost.minikube.internal$' "/etc/hosts"; echo "192.168.76.1	host.minikube.internal"; } > /tmp/h.$$; sudo cp /tmp/h.$$ "/etc/hosts""
	I0916 11:17:20.217078 1514821 kubeadm.go:883] updating cluster {Name:test-preload-899756 KeepContext:false EmbedCerts:false MinikubeISO: KicBaseImage:gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 Memory:2200 CPUs:2 DiskSize:20000 Driver:docker HyperkitVpnKitSock: HyperkitVSockPorts:[] DockerEnv:[] ContainerVolumeMounts:[] InsecureRegistry:[] RegistryMirror:[] HostOnlyCIDR:192.168.59.1/24 HypervVirtualSwitch: HypervUseExternalSwitch:false HypervExternalAdapter: KVMNetwork:default KVMQemuURI:qemu:///system KVMGPU:false KVMHidden:false KVMNUMACount:1 APIServerPort:8443 DockerOpt:[] DisableDriverMounts:false NFSShare:[] NFSSharesRoot:/nfsshares UUID: NoVTXCheck:false DNSProxy:false HostDNSResolver:true HostOnlyNicType:virtio NatNicType:virtio SSHIPAddress: SSHUser:root SSHKey: SSHPort:22 KubernetesConfig:{KubernetesVersion:v1.24.4 ClusterName:test-preload-899756 Namespace:default APIServerHAVIP: APIServerName:minikubeCA
APIServerNames:[] APIServerIPs:[] DNSDomain:cluster.local ContainerRuntime:crio CRISocket: NetworkPlugin:cni FeatureGates: ServiceCIDR:10.96.0.0/12 ImageRepository: LoadBalancerStartIP: LoadBalancerEndIP: CustomIngressCert: RegistryAliases: ExtraOptions:[] ShouldLoadCachedImages:true EnableDefaultCNI:false CNI:} Nodes:[{Name: IP:192.168.76.2 Port:8443 KubernetesVersion:v1.24.4 ContainerRuntime:crio ControlPlane:true Worker:true}] Addons:map[] CustomAddonImages:map[] CustomAddonRegistries:map[] VerifyComponents:map[apiserver:true apps_running:true default_sa:true extra:true kubelet:true node_ready:true system_pods:true] StartHostTimeout:6m0s ScheduledStop:<nil> ExposedPorts:[] ListenAddress: Network: Subnet: MultiNodeRequested:false ExtraDisks:0 CertExpiration:26280h0m0s Mount:false MountString:/home/jenkins:/minikube-host Mount9PVersion:9p2000.L MountGID:docker MountIP: MountMSize:262144 MountOptions:[] MountPort:0 MountType:9p MountUID:docker BinaryMirror: DisableOptimizations:false DisableMetrics:false Cus
tomQemuFirmwarePath: SocketVMnetClientPath: SocketVMnetPath: StaticIP: SSHAuthSock: SSHAgentPID:0 GPUs: AutoPauseInterval:1m0s} ...
	I0916 11:17:20.217196 1514821 preload.go:131] Checking if preload exists for k8s version v1.24.4 and runtime crio
	I0916 11:17:20.217238 1514821 ssh_runner.go:195] Run: sudo crictl images --output json
	I0916 11:17:20.261085 1514821 crio.go:510] couldn't find preloaded image for "registry.k8s.io/kube-apiserver:v1.24.4". assuming images are not preloaded.
	I0916 11:17:20.261112 1514821 cache_images.go:88] LoadCachedImages start: [registry.k8s.io/kube-apiserver:v1.24.4 registry.k8s.io/kube-controller-manager:v1.24.4 registry.k8s.io/kube-scheduler:v1.24.4 registry.k8s.io/kube-proxy:v1.24.4 registry.k8s.io/pause:3.7 registry.k8s.io/etcd:3.5.3-0 registry.k8s.io/coredns/coredns:v1.8.6 gcr.io/k8s-minikube/storage-provisioner:v5]
	I0916 11:17:20.261156 1514821 image.go:135] retrieving image: gcr.io/k8s-minikube/storage-provisioner:v5
	I0916 11:17:20.261205 1514821 image.go:135] retrieving image: registry.k8s.io/kube-proxy:v1.24.4
	I0916 11:17:20.261359 1514821 image.go:135] retrieving image: registry.k8s.io/pause:3.7
	I0916 11:17:20.261382 1514821 image.go:135] retrieving image: registry.k8s.io/kube-apiserver:v1.24.4
	I0916 11:17:20.261455 1514821 image.go:135] retrieving image: registry.k8s.io/etcd:3.5.3-0
	I0916 11:17:20.261515 1514821 image.go:135] retrieving image: registry.k8s.io/kube-controller-manager:v1.24.4
	I0916 11:17:20.261547 1514821 image.go:135] retrieving image: registry.k8s.io/coredns/coredns:v1.8.6
	I0916 11:17:20.261627 1514821 image.go:135] retrieving image: registry.k8s.io/kube-scheduler:v1.24.4
	I0916 11:17:20.262949 1514821 image.go:178] daemon lookup for registry.k8s.io/kube-scheduler:v1.24.4: Error response from daemon: No such image: registry.k8s.io/kube-scheduler:v1.24.4
	I0916 11:17:20.263220 1514821 image.go:178] daemon lookup for registry.k8s.io/pause:3.7: Error response from daemon: No such image: registry.k8s.io/pause:3.7
	I0916 11:17:20.263366 1514821 image.go:178] daemon lookup for registry.k8s.io/kube-apiserver:v1.24.4: Error response from daemon: No such image: registry.k8s.io/kube-apiserver:v1.24.4
	I0916 11:17:20.263512 1514821 image.go:178] daemon lookup for registry.k8s.io/etcd:3.5.3-0: Error response from daemon: No such image: registry.k8s.io/etcd:3.5.3-0
	I0916 11:17:20.263671 1514821 image.go:178] daemon lookup for registry.k8s.io/kube-controller-manager:v1.24.4: Error response from daemon: No such image: registry.k8s.io/kube-controller-manager:v1.24.4
	I0916 11:17:20.263828 1514821 image.go:178] daemon lookup for registry.k8s.io/coredns/coredns:v1.8.6: Error response from daemon: No such image: registry.k8s.io/coredns/coredns:v1.8.6
	I0916 11:17:20.263977 1514821 image.go:178] daemon lookup for gcr.io/k8s-minikube/storage-provisioner:v5: Error response from daemon: No such image: gcr.io/k8s-minikube/storage-provisioner:v5
	I0916 11:17:20.264165 1514821 image.go:178] daemon lookup for registry.k8s.io/kube-proxy:v1.24.4: Error response from daemon: No such image: registry.k8s.io/kube-proxy:v1.24.4
	I0916 11:17:20.541044 1514821 ssh_runner.go:195] Run: sudo podman image inspect --format {{.Id}} registry.k8s.io/kube-proxy:v1.24.4
	I0916 11:17:20.581987 1514821 cache_images.go:116] "registry.k8s.io/kube-proxy:v1.24.4" needs transfer: "registry.k8s.io/kube-proxy:v1.24.4" does not exist at hash "bd8cc6d58247078a865774b7f516f8afc3ac8cd080fd49650ca30ef2fbc6ebd1" in container runtime
	I0916 11:17:20.582028 1514821 cri.go:218] Removing image: registry.k8s.io/kube-proxy:v1.24.4
	I0916 11:17:20.582082 1514821 ssh_runner.go:195] Run: which crictl
	I0916 11:17:20.585877 1514821 ssh_runner.go:195] Run: sudo /usr/bin/crictl rmi registry.k8s.io/kube-proxy:v1.24.4
	I0916 11:17:20.617126 1514821 ssh_runner.go:195] Run: sudo podman image inspect --format {{.Id}} registry.k8s.io/kube-apiserver:v1.24.4
	I0916 11:17:20.623886 1514821 ssh_runner.go:195] Run: sudo /usr/bin/crictl rmi registry.k8s.io/kube-proxy:v1.24.4
	I0916 11:17:20.668448 1514821 cache_images.go:116] "registry.k8s.io/kube-apiserver:v1.24.4" needs transfer: "registry.k8s.io/kube-apiserver:v1.24.4" does not exist at hash "3767741e7fba72f328a8500a18ef34481343eb78697e31ae5bf3e390a28317ae" in container runtime
	I0916 11:17:20.668503 1514821 cri.go:218] Removing image: registry.k8s.io/kube-apiserver:v1.24.4
	I0916 11:17:20.668561 1514821 ssh_runner.go:195] Run: which crictl
	I0916 11:17:20.671078 1514821 ssh_runner.go:195] Run: sudo /usr/bin/crictl rmi registry.k8s.io/kube-proxy:v1.24.4
	I0916 11:17:20.674255 1514821 ssh_runner.go:195] Run: sudo /usr/bin/crictl rmi registry.k8s.io/kube-apiserver:v1.24.4
	I0916 11:17:20.720603 1514821 ssh_runner.go:195] Run: sudo podman image inspect --format {{.Id}} registry.k8s.io/etcd:3.5.3-0
	I0916 11:17:20.725537 1514821 cache_images.go:289] Loading image from: /home/jenkins/minikube-integration/19651-1378450/.minikube/cache/images/arm64/registry.k8s.io/kube-proxy_v1.24.4
	I0916 11:17:20.725658 1514821 ssh_runner.go:195] Run: stat -c "%s %y" /var/lib/minikube/images/kube-proxy_v1.24.4
	I0916 11:17:20.725765 1514821 ssh_runner.go:195] Run: sudo /usr/bin/crictl rmi registry.k8s.io/kube-apiserver:v1.24.4
	I0916 11:17:20.725992 1514821 ssh_runner.go:195] Run: sudo podman image inspect --format {{.Id}} registry.k8s.io/pause:3.7
	I0916 11:17:20.728922 1514821 ssh_runner.go:195] Run: sudo podman image inspect --format {{.Id}} registry.k8s.io/kube-scheduler:v1.24.4
	W0916 11:17:20.729880 1514821 image.go:283] image registry.k8s.io/coredns/coredns:v1.8.6 arch mismatch: want arm64 got amd64. fixing
	I0916 11:17:20.730062 1514821 ssh_runner.go:195] Run: sudo podman image inspect --format {{.Id}} registry.k8s.io/coredns/coredns:v1.8.6
	I0916 11:17:20.731664 1514821 ssh_runner.go:195] Run: sudo podman image inspect --format {{.Id}} registry.k8s.io/kube-controller-manager:v1.24.4
	I0916 11:17:20.833226 1514821 cache_images.go:116] "registry.k8s.io/etcd:3.5.3-0" needs transfer: "registry.k8s.io/etcd:3.5.3-0" does not exist at hash "a9a710bb96df080e6b9c720eb85dc5b832ff84abf77263548d74fedec6466a5a" in container runtime
	I0916 11:17:20.833268 1514821 cri.go:218] Removing image: registry.k8s.io/etcd:3.5.3-0
	I0916 11:17:20.833332 1514821 ssh_runner.go:195] Run: which crictl
	I0916 11:17:20.874743 1514821 cache_images.go:116] "registry.k8s.io/pause:3.7" needs transfer: "registry.k8s.io/pause:3.7" does not exist at hash "e5a475a0380575fb5df454b2e32bdec93e1ec0094d8a61e895b41567cb884550" in container runtime
	I0916 11:17:20.874788 1514821 cri.go:218] Removing image: registry.k8s.io/pause:3.7
	I0916 11:17:20.874851 1514821 ssh_runner.go:352] existence check for /var/lib/minikube/images/kube-proxy_v1.24.4: stat -c "%s %y" /var/lib/minikube/images/kube-proxy_v1.24.4: Process exited with status 1
	stdout:
	
	stderr:
	stat: cannot statx '/var/lib/minikube/images/kube-proxy_v1.24.4': No such file or directory
	I0916 11:17:20.874873 1514821 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-1378450/.minikube/cache/images/arm64/registry.k8s.io/kube-proxy_v1.24.4 --> /var/lib/minikube/images/kube-proxy_v1.24.4 (38148096 bytes)
	I0916 11:17:20.875014 1514821 ssh_runner.go:195] Run: sudo /usr/bin/crictl rmi registry.k8s.io/kube-apiserver:v1.24.4
	I0916 11:17:20.875076 1514821 ssh_runner.go:195] Run: which crictl
	I0916 11:17:20.886522 1514821 cache_images.go:116] "registry.k8s.io/kube-scheduler:v1.24.4" needs transfer: "registry.k8s.io/kube-scheduler:v1.24.4" does not exist at hash "5753e4610b3ec0ac100c3535b8d8a7507b3d031148e168c2c3c4b0f389976074" in container runtime
	I0916 11:17:20.886608 1514821 cri.go:218] Removing image: registry.k8s.io/kube-scheduler:v1.24.4
	I0916 11:17:20.886689 1514821 ssh_runner.go:195] Run: which crictl
	I0916 11:17:20.915663 1514821 cache_images.go:116] "registry.k8s.io/coredns/coredns:v1.8.6" needs transfer: "registry.k8s.io/coredns/coredns:v1.8.6" does not exist at hash "6af7f860a8197bfa3fdb7dec2061aa33870253e87a1e91c492d55b8a4fd38d14" in container runtime
	I0916 11:17:20.915708 1514821 cri.go:218] Removing image: registry.k8s.io/coredns/coredns:v1.8.6
	I0916 11:17:20.915768 1514821 ssh_runner.go:195] Run: which crictl
	I0916 11:17:20.939865 1514821 ssh_runner.go:195] Run: sudo /usr/bin/crictl rmi registry.k8s.io/etcd:3.5.3-0
	I0916 11:17:20.939956 1514821 cache_images.go:116] "registry.k8s.io/kube-controller-manager:v1.24.4" needs transfer: "registry.k8s.io/kube-controller-manager:v1.24.4" does not exist at hash "81a4a8a4ac639bdd7e118359417a80cab1a0d0e4737eb735714cf7f8b15dc0c7" in container runtime
	I0916 11:17:20.939986 1514821 cri.go:218] Removing image: registry.k8s.io/kube-controller-manager:v1.24.4
	I0916 11:17:20.940027 1514821 ssh_runner.go:195] Run: which crictl
	I0916 11:17:20.980438 1514821 ssh_runner.go:195] Run: sudo /usr/bin/crictl rmi registry.k8s.io/pause:3.7
	I0916 11:17:20.980476 1514821 cache_images.go:289] Loading image from: /home/jenkins/minikube-integration/19651-1378450/.minikube/cache/images/arm64/registry.k8s.io/kube-apiserver_v1.24.4
	I0916 11:17:20.980645 1514821 ssh_runner.go:195] Run: stat -c "%s %y" /var/lib/minikube/images/kube-apiserver_v1.24.4
	I0916 11:17:20.989375 1514821 ssh_runner.go:195] Run: sudo /usr/bin/crictl rmi registry.k8s.io/coredns/coredns:v1.8.6
	I0916 11:17:20.989457 1514821 ssh_runner.go:195] Run: sudo /usr/bin/crictl rmi registry.k8s.io/kube-scheduler:v1.24.4
	I0916 11:17:21.084374 1514821 ssh_runner.go:195] Run: sudo /usr/bin/crictl rmi registry.k8s.io/kube-controller-manager:v1.24.4
	I0916 11:17:21.084493 1514821 ssh_runner.go:195] Run: sudo /usr/bin/crictl rmi registry.k8s.io/pause:3.7
	I0916 11:17:21.084528 1514821 ssh_runner.go:352] existence check for /var/lib/minikube/images/kube-apiserver_v1.24.4: stat -c "%s %y" /var/lib/minikube/images/kube-apiserver_v1.24.4: Process exited with status 1
	stdout:
	
	stderr:
	stat: cannot statx '/var/lib/minikube/images/kube-apiserver_v1.24.4': No such file or directory
	I0916 11:17:21.084805 1514821 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-1378450/.minikube/cache/images/arm64/registry.k8s.io/kube-apiserver_v1.24.4 --> /var/lib/minikube/images/kube-apiserver_v1.24.4 (30873088 bytes)
	I0916 11:17:21.084567 1514821 ssh_runner.go:195] Run: sudo /usr/bin/crictl rmi registry.k8s.io/etcd:3.5.3-0
	I0916 11:17:21.109437 1514821 ssh_runner.go:195] Run: sudo /usr/bin/crictl rmi registry.k8s.io/kube-scheduler:v1.24.4
	I0916 11:17:21.109612 1514821 ssh_runner.go:195] Run: sudo /usr/bin/crictl rmi registry.k8s.io/coredns/coredns:v1.8.6
	I0916 11:17:21.234928 1514821 ssh_runner.go:195] Run: sudo /usr/bin/crictl rmi registry.k8s.io/pause:3.7
	I0916 11:17:21.235048 1514821 ssh_runner.go:195] Run: sudo /usr/bin/crictl rmi registry.k8s.io/etcd:3.5.3-0
	I0916 11:17:21.235204 1514821 ssh_runner.go:195] Run: sudo /usr/bin/crictl rmi registry.k8s.io/kube-controller-manager:v1.24.4
	W0916 11:17:21.255616 1514821 image.go:283] image gcr.io/k8s-minikube/storage-provisioner:v5 arch mismatch: want arm64 got amd64. fixing
	I0916 11:17:21.255804 1514821 ssh_runner.go:195] Run: sudo podman image inspect --format {{.Id}} gcr.io/k8s-minikube/storage-provisioner:v5
	I0916 11:17:21.341655 1514821 ssh_runner.go:195] Run: sudo /usr/bin/crictl rmi registry.k8s.io/kube-scheduler:v1.24.4
	I0916 11:17:21.341748 1514821 ssh_runner.go:195] Run: sudo /usr/bin/crictl rmi registry.k8s.io/coredns/coredns:v1.8.6
	I0916 11:17:21.418512 1514821 cache_images.go:289] Loading image from: /home/jenkins/minikube-integration/19651-1378450/.minikube/cache/images/arm64/registry.k8s.io/pause_3.7
	I0916 11:17:21.418686 1514821 ssh_runner.go:195] Run: stat -c "%s %y" /var/lib/minikube/images/pause_3.7
	I0916 11:17:21.418775 1514821 cache_images.go:289] Loading image from: /home/jenkins/minikube-integration/19651-1378450/.minikube/cache/images/arm64/registry.k8s.io/etcd_3.5.3-0
	I0916 11:17:21.418861 1514821 ssh_runner.go:195] Run: stat -c "%s %y" /var/lib/minikube/images/etcd_3.5.3-0
	I0916 11:17:21.419112 1514821 ssh_runner.go:195] Run: sudo /usr/bin/crictl rmi registry.k8s.io/kube-controller-manager:v1.24.4
	I0916 11:17:21.494673 1514821 cache_images.go:116] "gcr.io/k8s-minikube/storage-provisioner:v5" needs transfer: "gcr.io/k8s-minikube/storage-provisioner:v5" does not exist at hash "66749159455b3f08c8318fe0233122f54d0f5889f9c5fdfb73c3fd9d99895b51" in container runtime
	I0916 11:17:21.494724 1514821 cri.go:218] Removing image: gcr.io/k8s-minikube/storage-provisioner:v5
	I0916 11:17:21.494799 1514821 ssh_runner.go:195] Run: which crictl
	I0916 11:17:21.517692 1514821 cache_images.go:289] Loading image from: /home/jenkins/minikube-integration/19651-1378450/.minikube/cache/images/arm64/registry.k8s.io/kube-scheduler_v1.24.4
	I0916 11:17:21.517814 1514821 ssh_runner.go:195] Run: stat -c "%s %y" /var/lib/minikube/images/kube-scheduler_v1.24.4
	I0916 11:17:21.517884 1514821 cache_images.go:289] Loading image from: /home/jenkins/minikube-integration/19651-1378450/.minikube/cache/images/arm64/registry.k8s.io/coredns/coredns_v1.8.6
	I0916 11:17:21.517937 1514821 ssh_runner.go:195] Run: stat -c "%s %y" /var/lib/minikube/images/coredns_v1.8.6
	I0916 11:17:21.549748 1514821 ssh_runner.go:352] existence check for /var/lib/minikube/images/pause_3.7: stat -c "%s %y" /var/lib/minikube/images/pause_3.7: Process exited with status 1
	stdout:
	
	stderr:
	stat: cannot statx '/var/lib/minikube/images/pause_3.7': No such file or directory
	I0916 11:17:21.550094 1514821 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-1378450/.minikube/cache/images/arm64/registry.k8s.io/pause_3.7 --> /var/lib/minikube/images/pause_3.7 (268288 bytes)
	I0916 11:17:21.549911 1514821 ssh_runner.go:352] existence check for /var/lib/minikube/images/etcd_3.5.3-0: stat -c "%s %y" /var/lib/minikube/images/etcd_3.5.3-0: Process exited with status 1
	stdout:
	
	stderr:
	stat: cannot statx '/var/lib/minikube/images/etcd_3.5.3-0': No such file or directory
	I0916 11:17:21.550193 1514821 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-1378450/.minikube/cache/images/arm64/registry.k8s.io/etcd_3.5.3-0 --> /var/lib/minikube/images/etcd_3.5.3-0 (81117184 bytes)
	I0916 11:17:21.549989 1514821 cache_images.go:289] Loading image from: /home/jenkins/minikube-integration/19651-1378450/.minikube/cache/images/arm64/registry.k8s.io/kube-controller-manager_v1.24.4
	I0916 11:17:21.550357 1514821 ssh_runner.go:195] Run: stat -c "%s %y" /var/lib/minikube/images/kube-controller-manager_v1.24.4
	I0916 11:17:21.549994 1514821 ssh_runner.go:195] Run: sudo /usr/bin/crictl rmi gcr.io/k8s-minikube/storage-provisioner:v5
	I0916 11:17:21.550047 1514821 ssh_runner.go:352] existence check for /var/lib/minikube/images/coredns_v1.8.6: stat -c "%s %y" /var/lib/minikube/images/coredns_v1.8.6: Process exited with status 1
	stdout:
	
	stderr:
	stat: cannot statx '/var/lib/minikube/images/coredns_v1.8.6': No such file or directory
	I0916 11:17:21.550481 1514821 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-1378450/.minikube/cache/images/arm64/registry.k8s.io/coredns/coredns_v1.8.6 --> /var/lib/minikube/images/coredns_v1.8.6 (12318720 bytes)
	I0916 11:17:21.550065 1514821 ssh_runner.go:352] existence check for /var/lib/minikube/images/kube-scheduler_v1.24.4: stat -c "%s %y" /var/lib/minikube/images/kube-scheduler_v1.24.4: Process exited with status 1
	stdout:
	
	stderr:
	stat: cannot statx '/var/lib/minikube/images/kube-scheduler_v1.24.4': No such file or directory
	I0916 11:17:21.550564 1514821 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-1378450/.minikube/cache/images/arm64/registry.k8s.io/kube-scheduler_v1.24.4 --> /var/lib/minikube/images/kube-scheduler_v1.24.4 (14094336 bytes)
	I0916 11:17:21.666909 1514821 crio.go:275] Loading image: /var/lib/minikube/images/pause_3.7
	I0916 11:17:21.666994 1514821 ssh_runner.go:195] Run: sudo podman load -i /var/lib/minikube/images/pause_3.7
	I0916 11:17:21.693603 1514821 ssh_runner.go:352] existence check for /var/lib/minikube/images/kube-controller-manager_v1.24.4: stat -c "%s %y" /var/lib/minikube/images/kube-controller-manager_v1.24.4: Process exited with status 1
	stdout:
	
	stderr:
	stat: cannot statx '/var/lib/minikube/images/kube-controller-manager_v1.24.4': No such file or directory
	I0916 11:17:21.693653 1514821 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-1378450/.minikube/cache/images/arm64/registry.k8s.io/kube-controller-manager_v1.24.4 --> /var/lib/minikube/images/kube-controller-manager_v1.24.4 (28246528 bytes)
	I0916 11:17:21.693919 1514821 ssh_runner.go:195] Run: sudo /usr/bin/crictl rmi gcr.io/k8s-minikube/storage-provisioner:v5
	I0916 11:17:22.106977 1514821 cache_images.go:321] Transferred and loaded /home/jenkins/minikube-integration/19651-1378450/.minikube/cache/images/arm64/registry.k8s.io/pause_3.7 from cache
	I0916 11:17:22.107107 1514821 crio.go:275] Loading image: /var/lib/minikube/images/coredns_v1.8.6
	I0916 11:17:22.107245 1514821 ssh_runner.go:195] Run: sudo /usr/bin/crictl rmi gcr.io/k8s-minikube/storage-provisioner:v5
	I0916 11:17:22.107446 1514821 ssh_runner.go:195] Run: sudo podman load -i /var/lib/minikube/images/coredns_v1.8.6
	I0916 11:17:22.203213 1514821 cache_images.go:289] Loading image from: /home/jenkins/minikube-integration/19651-1378450/.minikube/cache/images/arm64/gcr.io/k8s-minikube/storage-provisioner_v5
	I0916 11:17:22.203345 1514821 ssh_runner.go:195] Run: stat -c "%s %y" /var/lib/minikube/images/storage-provisioner_v5
	I0916 11:17:23.253370 1514821 ssh_runner.go:235] Completed: sudo podman load -i /var/lib/minikube/images/coredns_v1.8.6: (1.145871901s)
	I0916 11:17:23.253398 1514821 cache_images.go:321] Transferred and loaded /home/jenkins/minikube-integration/19651-1378450/.minikube/cache/images/arm64/registry.k8s.io/coredns/coredns_v1.8.6 from cache
	I0916 11:17:23.253414 1514821 ssh_runner.go:235] Completed: stat -c "%s %y" /var/lib/minikube/images/storage-provisioner_v5: (1.050044867s)
	I0916 11:17:23.253451 1514821 ssh_runner.go:352] existence check for /var/lib/minikube/images/storage-provisioner_v5: stat -c "%s %y" /var/lib/minikube/images/storage-provisioner_v5: Process exited with status 1
	stdout:
	
	stderr:
	stat: cannot statx '/var/lib/minikube/images/storage-provisioner_v5': No such file or directory
	I0916 11:17:23.253483 1514821 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-1378450/.minikube/cache/images/arm64/gcr.io/k8s-minikube/storage-provisioner_v5 --> /var/lib/minikube/images/storage-provisioner_v5 (8035840 bytes)
	I0916 11:17:23.253419 1514821 crio.go:275] Loading image: /var/lib/minikube/images/kube-scheduler_v1.24.4
	I0916 11:17:23.253584 1514821 ssh_runner.go:195] Run: sudo podman load -i /var/lib/minikube/images/kube-scheduler_v1.24.4
	I0916 11:17:24.469426 1514821 ssh_runner.go:235] Completed: sudo podman load -i /var/lib/minikube/images/kube-scheduler_v1.24.4: (1.2158125s)
	I0916 11:17:24.469450 1514821 cache_images.go:321] Transferred and loaded /home/jenkins/minikube-integration/19651-1378450/.minikube/cache/images/arm64/registry.k8s.io/kube-scheduler_v1.24.4 from cache
	I0916 11:17:24.469468 1514821 crio.go:275] Loading image: /var/lib/minikube/images/kube-apiserver_v1.24.4
	I0916 11:17:24.469539 1514821 ssh_runner.go:195] Run: sudo podman load -i /var/lib/minikube/images/kube-apiserver_v1.24.4
	I0916 11:17:26.219359 1514821 ssh_runner.go:235] Completed: sudo podman load -i /var/lib/minikube/images/kube-apiserver_v1.24.4: (1.749790055s)
	I0916 11:17:26.219391 1514821 cache_images.go:321] Transferred and loaded /home/jenkins/minikube-integration/19651-1378450/.minikube/cache/images/arm64/registry.k8s.io/kube-apiserver_v1.24.4 from cache
	I0916 11:17:26.219410 1514821 crio.go:275] Loading image: /var/lib/minikube/images/kube-proxy_v1.24.4
	I0916 11:17:26.219462 1514821 ssh_runner.go:195] Run: sudo podman load -i /var/lib/minikube/images/kube-proxy_v1.24.4
	I0916 11:17:28.503922 1514821 ssh_runner.go:235] Completed: sudo podman load -i /var/lib/minikube/images/kube-proxy_v1.24.4: (2.284435381s)
	I0916 11:17:28.503948 1514821 cache_images.go:321] Transferred and loaded /home/jenkins/minikube-integration/19651-1378450/.minikube/cache/images/arm64/registry.k8s.io/kube-proxy_v1.24.4 from cache
	I0916 11:17:28.503968 1514821 crio.go:275] Loading image: /var/lib/minikube/images/kube-controller-manager_v1.24.4
	I0916 11:17:28.504018 1514821 ssh_runner.go:195] Run: sudo podman load -i /var/lib/minikube/images/kube-controller-manager_v1.24.4
	I0916 11:17:30.255304 1514821 ssh_runner.go:235] Completed: sudo podman load -i /var/lib/minikube/images/kube-controller-manager_v1.24.4: (1.751257698s)
	I0916 11:17:30.255330 1514821 cache_images.go:321] Transferred and loaded /home/jenkins/minikube-integration/19651-1378450/.minikube/cache/images/arm64/registry.k8s.io/kube-controller-manager_v1.24.4 from cache
	I0916 11:17:30.255350 1514821 crio.go:275] Loading image: /var/lib/minikube/images/etcd_3.5.3-0
	I0916 11:17:30.255402 1514821 ssh_runner.go:195] Run: sudo podman load -i /var/lib/minikube/images/etcd_3.5.3-0
	I0916 11:17:33.648127 1514821 ssh_runner.go:235] Completed: sudo podman load -i /var/lib/minikube/images/etcd_3.5.3-0: (3.392700347s)
	I0916 11:17:33.648153 1514821 cache_images.go:321] Transferred and loaded /home/jenkins/minikube-integration/19651-1378450/.minikube/cache/images/arm64/registry.k8s.io/etcd_3.5.3-0 from cache
	I0916 11:17:33.648179 1514821 crio.go:275] Loading image: /var/lib/minikube/images/storage-provisioner_v5
	I0916 11:17:33.648262 1514821 ssh_runner.go:195] Run: sudo podman load -i /var/lib/minikube/images/storage-provisioner_v5
	I0916 11:17:34.296987 1514821 cache_images.go:321] Transferred and loaded /home/jenkins/minikube-integration/19651-1378450/.minikube/cache/images/arm64/gcr.io/k8s-minikube/storage-provisioner_v5 from cache
	I0916 11:17:34.297029 1514821 cache_images.go:123] Successfully loaded all cached images
	I0916 11:17:34.297036 1514821 cache_images.go:92] duration metric: took 14.035910507s to LoadCachedImages
	I0916 11:17:34.297047 1514821 kubeadm.go:934] updating node { 192.168.76.2 8443 v1.24.4 crio true true} ...
	I0916 11:17:34.297142 1514821 kubeadm.go:946] kubelet [Unit]
	Wants=crio.service
	
	[Service]
	ExecStart=
	ExecStart=/var/lib/minikube/binaries/v1.24.4/kubelet --bootstrap-kubeconfig=/etc/kubernetes/bootstrap-kubelet.conf --cgroups-per-qos=false --config=/var/lib/kubelet/config.yaml --container-runtime-endpoint=unix:///var/run/crio/crio.sock --enforce-node-allocatable= --hostname-override=test-preload-899756 --kubeconfig=/etc/kubernetes/kubelet.conf --node-ip=192.168.76.2
	
	[Install]
	 config:
	{KubernetesVersion:v1.24.4 ClusterName:test-preload-899756 Namespace:default APIServerHAVIP: APIServerName:minikubeCA APIServerNames:[] APIServerIPs:[] DNSDomain:cluster.local ContainerRuntime:crio CRISocket: NetworkPlugin:cni FeatureGates: ServiceCIDR:10.96.0.0/12 ImageRepository: LoadBalancerStartIP: LoadBalancerEndIP: CustomIngressCert: RegistryAliases: ExtraOptions:[] ShouldLoadCachedImages:true EnableDefaultCNI:false CNI:}
	I0916 11:17:34.297242 1514821 ssh_runner.go:195] Run: crio config
	I0916 11:17:34.357570 1514821 cni.go:84] Creating CNI manager for ""
	I0916 11:17:34.357594 1514821 cni.go:143] "docker" driver + "crio" runtime found, recommending kindnet
	I0916 11:17:34.357604 1514821 kubeadm.go:84] Using pod CIDR: 10.244.0.0/16
	I0916 11:17:34.357628 1514821 kubeadm.go:181] kubeadm options: {CertDir:/var/lib/minikube/certs ServiceCIDR:10.96.0.0/12 PodSubnet:10.244.0.0/16 AdvertiseAddress:192.168.76.2 APIServerPort:8443 KubernetesVersion:v1.24.4 EtcdDataDir:/var/lib/minikube/etcd EtcdExtraArgs:map[] ClusterName:test-preload-899756 NodeName:test-preload-899756 DNSDomain:cluster.local CRISocket:/var/run/crio/crio.sock ImageRepository: ComponentOptions:[{Component:apiServer ExtraArgs:map[enable-admission-plugins:NamespaceLifecycle,LimitRanger,ServiceAccount,DefaultStorageClass,DefaultTolerationSeconds,NodeRestriction,MutatingAdmissionWebhook,ValidatingAdmissionWebhook,ResourceQuota] Pairs:map[certSANs:["127.0.0.1", "localhost", "192.168.76.2"]]} {Component:controllerManager ExtraArgs:map[allocate-node-cidrs:true leader-elect:false] Pairs:map[]} {Component:scheduler ExtraArgs:map[leader-elect:false] Pairs:map[]}] FeatureArgs:map[] NodeIP:192.168.76.2 CgroupDriver:cgroupfs ClientCAFile:/var/lib/minikube/certs/ca.crt StaticPodPath:
/etc/kubernetes/manifests ControlPlaneAddress:control-plane.minikube.internal KubeProxyOptions:map[] ResolvConfSearchRegression:false KubeletConfigOpts:map[hairpinMode:hairpin-veth runtimeRequestTimeout:15m] PrependCriSocketUnix:true}
	I0916 11:17:34.357768 1514821 kubeadm.go:187] kubeadm config:
	apiVersion: kubeadm.k8s.io/v1beta3
	kind: InitConfiguration
	localAPIEndpoint:
	  advertiseAddress: 192.168.76.2
	  bindPort: 8443
	bootstrapTokens:
	  - groups:
	      - system:bootstrappers:kubeadm:default-node-token
	    ttl: 24h0m0s
	    usages:
	      - signing
	      - authentication
	nodeRegistration:
	  criSocket: unix:///var/run/crio/crio.sock
	  name: "test-preload-899756"
	  kubeletExtraArgs:
	    node-ip: 192.168.76.2
	  taints: []
	---
	apiVersion: kubeadm.k8s.io/v1beta3
	kind: ClusterConfiguration
	apiServer:
	  certSANs: ["127.0.0.1", "localhost", "192.168.76.2"]
	  extraArgs:
	    enable-admission-plugins: "NamespaceLifecycle,LimitRanger,ServiceAccount,DefaultStorageClass,DefaultTolerationSeconds,NodeRestriction,MutatingAdmissionWebhook,ValidatingAdmissionWebhook,ResourceQuota"
	controllerManager:
	  extraArgs:
	    allocate-node-cidrs: "true"
	    leader-elect: "false"
	scheduler:
	  extraArgs:
	    leader-elect: "false"
	certificatesDir: /var/lib/minikube/certs
	clusterName: mk
	controlPlaneEndpoint: control-plane.minikube.internal:8443
	etcd:
	  local:
	    dataDir: /var/lib/minikube/etcd
	    extraArgs:
	      proxy-refresh-interval: "70000"
	kubernetesVersion: v1.24.4
	networking:
	  dnsDomain: cluster.local
	  podSubnet: "10.244.0.0/16"
	  serviceSubnet: 10.96.0.0/12
	---
	apiVersion: kubelet.config.k8s.io/v1beta1
	kind: KubeletConfiguration
	authentication:
	  x509:
	    clientCAFile: /var/lib/minikube/certs/ca.crt
	cgroupDriver: cgroupfs
	hairpinMode: hairpin-veth
	runtimeRequestTimeout: 15m
	clusterDomain: "cluster.local"
	# disable disk resource management by default
	imageGCHighThresholdPercent: 100
	evictionHard:
	  nodefs.available: "0%"
	  nodefs.inodesFree: "0%"
	  imagefs.available: "0%"
	failSwapOn: false
	staticPodPath: /etc/kubernetes/manifests
	---
	apiVersion: kubeproxy.config.k8s.io/v1alpha1
	kind: KubeProxyConfiguration
	clusterCIDR: "10.244.0.0/16"
	metricsBindAddress: 0.0.0.0:10249
	conntrack:
	  maxPerCore: 0
	# Skip setting "net.netfilter.nf_conntrack_tcp_timeout_established"
	  tcpEstablishedTimeout: 0s
	# Skip setting "net.netfilter.nf_conntrack_tcp_timeout_close"
	  tcpCloseWaitTimeout: 0s
	
	I0916 11:17:34.357843 1514821 ssh_runner.go:195] Run: sudo ls /var/lib/minikube/binaries/v1.24.4
	I0916 11:17:34.367823 1514821 binaries.go:47] Didn't find k8s binaries: sudo ls /var/lib/minikube/binaries/v1.24.4: Process exited with status 2
	stdout:
	
	stderr:
	ls: cannot access '/var/lib/minikube/binaries/v1.24.4': No such file or directory
	
	Initiating transfer...
	I0916 11:17:34.367897 1514821 ssh_runner.go:195] Run: sudo mkdir -p /var/lib/minikube/binaries/v1.24.4
	I0916 11:17:34.378128 1514821 download.go:107] Downloading: https://dl.k8s.io/release/v1.24.4/bin/linux/arm64/kubectl?checksum=file:https://dl.k8s.io/release/v1.24.4/bin/linux/arm64/kubectl.sha256 -> /home/jenkins/minikube-integration/19651-1378450/.minikube/cache/linux/arm64/v1.24.4/kubectl
	I0916 11:17:34.378542 1514821 download.go:107] Downloading: https://dl.k8s.io/release/v1.24.4/bin/linux/arm64/kubelet?checksum=file:https://dl.k8s.io/release/v1.24.4/bin/linux/arm64/kubelet.sha256 -> /home/jenkins/minikube-integration/19651-1378450/.minikube/cache/linux/arm64/v1.24.4/kubelet
	I0916 11:17:34.378717 1514821 download.go:107] Downloading: https://dl.k8s.io/release/v1.24.4/bin/linux/arm64/kubeadm?checksum=file:https://dl.k8s.io/release/v1.24.4/bin/linux/arm64/kubeadm.sha256 -> /home/jenkins/minikube-integration/19651-1378450/.minikube/cache/linux/arm64/v1.24.4/kubeadm
	I0916 11:17:35.226911 1514821 ssh_runner.go:195] Run: stat -c "%s %y" /var/lib/minikube/binaries/v1.24.4/kubectl
	I0916 11:17:35.231534 1514821 ssh_runner.go:352] existence check for /var/lib/minikube/binaries/v1.24.4/kubectl: stat -c "%s %y" /var/lib/minikube/binaries/v1.24.4/kubectl: Process exited with status 1
	stdout:
	
	stderr:
	stat: cannot statx '/var/lib/minikube/binaries/v1.24.4/kubectl': No such file or directory
	I0916 11:17:35.231578 1514821 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-1378450/.minikube/cache/linux/arm64/v1.24.4/kubectl --> /var/lib/minikube/binaries/v1.24.4/kubectl (44564480 bytes)
	I0916 11:17:35.510564 1514821 out.go:201] 
	W0916 11:17:35.513271 1514821 out.go:270] X Exiting due to K8S_INSTALL_FAILED: Failed to update cluster: update primary control-plane node: downloading binaries: downloading kubelet: download failed: https://dl.k8s.io/release/v1.24.4/bin/linux/arm64/kubelet?checksum=file:https://dl.k8s.io/release/v1.24.4/bin/linux/arm64/kubelet.sha256: getter: &{Ctx:context.Background Src:https://dl.k8s.io/release/v1.24.4/bin/linux/arm64/kubelet?checksum=file:https://dl.k8s.io/release/v1.24.4/bin/linux/arm64/kubelet.sha256 Dst:/home/jenkins/minikube-integration/19651-1378450/.minikube/cache/linux/arm64/v1.24.4/kubelet.download Pwd: Mode:2 Umask:---------- Detectors:[0x4772320 0x4772320 0x4772320 0x4772320 0x4772320 0x4772320 0x4772320] Decompressors:map[bz2:0x40004e55d0 gz:0x40004e55d8 tar:0x40004e5580 tar.bz2:0x40004e5590 tar.gz:0x40004e55a0 tar.xz:0x40004e55b0 tar.zst:0x40004e55c0 tbz2:0x40004e5590 tgz:0x40004e55a0 txz:0x40004e55b0 tzst:0x40004e55c0 xz:0x40004e55e0 zip:0x40004e55f0 zst:0x40004e55e8] Getters:map[fil
e:0x4001a7cee0 http:0x40018b7d10 https:0x40018b7d60] Dir:false ProgressListener:<nil> Insecure:false DisableSymlinks:false Options:[]}: stream error: stream ID 1; PROTOCOL_ERROR; received from peer
	X Exiting due to K8S_INSTALL_FAILED: Failed to update cluster: update primary control-plane node: downloading binaries: downloading kubelet: download failed: https://dl.k8s.io/release/v1.24.4/bin/linux/arm64/kubelet?checksum=file:https://dl.k8s.io/release/v1.24.4/bin/linux/arm64/kubelet.sha256: getter: &{Ctx:context.Background Src:https://dl.k8s.io/release/v1.24.4/bin/linux/arm64/kubelet?checksum=file:https://dl.k8s.io/release/v1.24.4/bin/linux/arm64/kubelet.sha256 Dst:/home/jenkins/minikube-integration/19651-1378450/.minikube/cache/linux/arm64/v1.24.4/kubelet.download Pwd: Mode:2 Umask:---------- Detectors:[0x4772320 0x4772320 0x4772320 0x4772320 0x4772320 0x4772320 0x4772320] Decompressors:map[bz2:0x40004e55d0 gz:0x40004e55d8 tar:0x40004e5580 tar.bz2:0x40004e5590 tar.gz:0x40004e55a0 tar.xz:0x40004e55b0 tar.zst:0x40004e55c0 tbz2:0x40004e5590 tgz:0x40004e55a0 txz:0x40004e55b0 tzst:0x40004e55c0 xz:0x40004e55e0 zip:0x40004e55f0 zst:0x40004e55e8] Getters:map[file:0x4001a7cee0 http:0x40018b7d10 https:0x4
0018b7d60] Dir:false ProgressListener:<nil> Insecure:false DisableSymlinks:false Options:[]}: stream error: stream ID 1; PROTOCOL_ERROR; received from peer
	W0916 11:17:35.513300 1514821 out.go:270] * 
	* 
	W0916 11:17:35.514183 1514821 out.go:293] ╭─────────────────────────────────────────────────────────────────────────────────────────────╮
	│                                                                                             │
	│    * If the above advice does not help, please let us know:                                 │
	│      https://github.com/kubernetes/minikube/issues/new/choose                               │
	│                                                                                             │
	│    * Please run `minikube logs --file=logs.txt` and attach logs.txt to the GitHub issue.    │
	│                                                                                             │
	╰─────────────────────────────────────────────────────────────────────────────────────────────╯
	╭─────────────────────────────────────────────────────────────────────────────────────────────╮
	│                                                                                             │
	│    * If the above advice does not help, please let us know:                                 │
	│      https://github.com/kubernetes/minikube/issues/new/choose                               │
	│                                                                                             │
	│    * Please run `minikube logs --file=logs.txt` and attach logs.txt to the GitHub issue.    │
	│                                                                                             │
	╰─────────────────────────────────────────────────────────────────────────────────────────────╯
	I0916 11:17:35.517945 1514821 out.go:201] 

                                                
                                                
** /stderr **
preload_test.go:46: out/minikube-linux-arm64 start -p test-preload-899756 --memory=2200 --alsologtostderr --wait=true --preload=false --driver=docker  --container-runtime=crio --kubernetes-version=v1.24.4 failed: exit status 100
panic.go:629: *** TestPreload FAILED at 2024-09-16 11:17:35.555401612 +0000 UTC m=+2568.289495451
helpers_test.go:222: -----------------------post-mortem--------------------------------
helpers_test.go:230: ======>  post-mortem[TestPreload]: docker inspect <======
helpers_test.go:231: (dbg) Run:  docker inspect test-preload-899756
helpers_test.go:235: (dbg) docker inspect test-preload-899756:

                                                
                                                
-- stdout --
	[
	    {
	        "Id": "b98aed3a0a970bd5f1cfab2eb91615ab08e12cd4465db08907e3b46bd55e7c5c",
	        "Created": "2024-09-16T11:17:15.826478853Z",
	        "Path": "/usr/local/bin/entrypoint",
	        "Args": [
	            "/sbin/init"
	        ],
	        "State": {
	            "Status": "running",
	            "Running": true,
	            "Paused": false,
	            "Restarting": false,
	            "OOMKilled": false,
	            "Dead": false,
	            "Pid": 1515232,
	            "ExitCode": 0,
	            "Error": "",
	            "StartedAt": "2024-09-16T11:17:16.036125125Z",
	            "FinishedAt": "0001-01-01T00:00:00Z"
	        },
	        "Image": "sha256:a1b71fa87733590eb4674b16f6945626ae533f3af37066893e3fd70eb9476268",
	        "ResolvConfPath": "/var/lib/docker/containers/b98aed3a0a970bd5f1cfab2eb91615ab08e12cd4465db08907e3b46bd55e7c5c/resolv.conf",
	        "HostnamePath": "/var/lib/docker/containers/b98aed3a0a970bd5f1cfab2eb91615ab08e12cd4465db08907e3b46bd55e7c5c/hostname",
	        "HostsPath": "/var/lib/docker/containers/b98aed3a0a970bd5f1cfab2eb91615ab08e12cd4465db08907e3b46bd55e7c5c/hosts",
	        "LogPath": "/var/lib/docker/containers/b98aed3a0a970bd5f1cfab2eb91615ab08e12cd4465db08907e3b46bd55e7c5c/b98aed3a0a970bd5f1cfab2eb91615ab08e12cd4465db08907e3b46bd55e7c5c-json.log",
	        "Name": "/test-preload-899756",
	        "RestartCount": 0,
	        "Driver": "overlay2",
	        "Platform": "linux",
	        "MountLabel": "",
	        "ProcessLabel": "",
	        "AppArmorProfile": "unconfined",
	        "ExecIDs": null,
	        "HostConfig": {
	            "Binds": [
	                "/lib/modules:/lib/modules:ro",
	                "test-preload-899756:/var"
	            ],
	            "ContainerIDFile": "",
	            "LogConfig": {
	                "Type": "json-file",
	                "Config": {}
	            },
	            "NetworkMode": "test-preload-899756",
	            "PortBindings": {
	                "22/tcp": [
	                    {
	                        "HostIp": "127.0.0.1",
	                        "HostPort": ""
	                    }
	                ],
	                "2376/tcp": [
	                    {
	                        "HostIp": "127.0.0.1",
	                        "HostPort": ""
	                    }
	                ],
	                "32443/tcp": [
	                    {
	                        "HostIp": "127.0.0.1",
	                        "HostPort": ""
	                    }
	                ],
	                "5000/tcp": [
	                    {
	                        "HostIp": "127.0.0.1",
	                        "HostPort": ""
	                    }
	                ],
	                "8443/tcp": [
	                    {
	                        "HostIp": "127.0.0.1",
	                        "HostPort": ""
	                    }
	                ]
	            },
	            "RestartPolicy": {
	                "Name": "no",
	                "MaximumRetryCount": 0
	            },
	            "AutoRemove": false,
	            "VolumeDriver": "",
	            "VolumesFrom": null,
	            "ConsoleSize": [
	                0,
	                0
	            ],
	            "CapAdd": null,
	            "CapDrop": null,
	            "CgroupnsMode": "host",
	            "Dns": [],
	            "DnsOptions": [],
	            "DnsSearch": [],
	            "ExtraHosts": null,
	            "GroupAdd": null,
	            "IpcMode": "private",
	            "Cgroup": "",
	            "Links": null,
	            "OomScoreAdj": 0,
	            "PidMode": "",
	            "Privileged": true,
	            "PublishAllPorts": false,
	            "ReadonlyRootfs": false,
	            "SecurityOpt": [
	                "seccomp=unconfined",
	                "apparmor=unconfined",
	                "label=disable"
	            ],
	            "Tmpfs": {
	                "/run": "",
	                "/tmp": ""
	            },
	            "UTSMode": "",
	            "UsernsMode": "",
	            "ShmSize": 67108864,
	            "Runtime": "runc",
	            "Isolation": "",
	            "CpuShares": 0,
	            "Memory": 2306867200,
	            "NanoCpus": 2000000000,
	            "CgroupParent": "",
	            "BlkioWeight": 0,
	            "BlkioWeightDevice": [],
	            "BlkioDeviceReadBps": [],
	            "BlkioDeviceWriteBps": [],
	            "BlkioDeviceReadIOps": [],
	            "BlkioDeviceWriteIOps": [],
	            "CpuPeriod": 0,
	            "CpuQuota": 0,
	            "CpuRealtimePeriod": 0,
	            "CpuRealtimeRuntime": 0,
	            "CpusetCpus": "",
	            "CpusetMems": "",
	            "Devices": [],
	            "DeviceCgroupRules": null,
	            "DeviceRequests": null,
	            "MemoryReservation": 0,
	            "MemorySwap": 4613734400,
	            "MemorySwappiness": null,
	            "OomKillDisable": false,
	            "PidsLimit": null,
	            "Ulimits": [],
	            "CpuCount": 0,
	            "CpuPercent": 0,
	            "IOMaximumIOps": 0,
	            "IOMaximumBandwidth": 0,
	            "MaskedPaths": null,
	            "ReadonlyPaths": null
	        },
	        "GraphDriver": {
	            "Data": {
	                "LowerDir": "/var/lib/docker/overlay2/1242eb652ecbfe3fbf44fe07c805af0da3902adee91f1044263a5f9bc718f4da-init/diff:/var/lib/docker/overlay2/1502e35c27c097cfc834a7c6caeee5bb9f58b41375577f491b73f55bc131cbae/diff",
	                "MergedDir": "/var/lib/docker/overlay2/1242eb652ecbfe3fbf44fe07c805af0da3902adee91f1044263a5f9bc718f4da/merged",
	                "UpperDir": "/var/lib/docker/overlay2/1242eb652ecbfe3fbf44fe07c805af0da3902adee91f1044263a5f9bc718f4da/diff",
	                "WorkDir": "/var/lib/docker/overlay2/1242eb652ecbfe3fbf44fe07c805af0da3902adee91f1044263a5f9bc718f4da/work"
	            },
	            "Name": "overlay2"
	        },
	        "Mounts": [
	            {
	                "Type": "bind",
	                "Source": "/lib/modules",
	                "Destination": "/lib/modules",
	                "Mode": "ro",
	                "RW": false,
	                "Propagation": "rprivate"
	            },
	            {
	                "Type": "volume",
	                "Name": "test-preload-899756",
	                "Source": "/var/lib/docker/volumes/test-preload-899756/_data",
	                "Destination": "/var",
	                "Driver": "local",
	                "Mode": "z",
	                "RW": true,
	                "Propagation": ""
	            }
	        ],
	        "Config": {
	            "Hostname": "test-preload-899756",
	            "Domainname": "",
	            "User": "",
	            "AttachStdin": false,
	            "AttachStdout": false,
	            "AttachStderr": false,
	            "ExposedPorts": {
	                "22/tcp": {},
	                "2376/tcp": {},
	                "32443/tcp": {},
	                "5000/tcp": {},
	                "8443/tcp": {}
	            },
	            "Tty": true,
	            "OpenStdin": false,
	            "StdinOnce": false,
	            "Env": [
	                "container=docker",
	                "PATH=/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin"
	            ],
	            "Cmd": null,
	            "Image": "gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0",
	            "Volumes": null,
	            "WorkingDir": "/",
	            "Entrypoint": [
	                "/usr/local/bin/entrypoint",
	                "/sbin/init"
	            ],
	            "OnBuild": null,
	            "Labels": {
	                "created_by.minikube.sigs.k8s.io": "true",
	                "mode.minikube.sigs.k8s.io": "test-preload-899756",
	                "name.minikube.sigs.k8s.io": "test-preload-899756",
	                "role.minikube.sigs.k8s.io": ""
	            },
	            "StopSignal": "SIGRTMIN+3"
	        },
	        "NetworkSettings": {
	            "Bridge": "",
	            "SandboxID": "f91aac4209e60915fd1554a0aacacc00202e59e1beef3496cf5cbf49ace9ae45",
	            "SandboxKey": "/var/run/docker/netns/f91aac4209e6",
	            "Ports": {
	                "22/tcp": [
	                    {
	                        "HostIp": "127.0.0.1",
	                        "HostPort": "34788"
	                    }
	                ],
	                "2376/tcp": [
	                    {
	                        "HostIp": "127.0.0.1",
	                        "HostPort": "34789"
	                    }
	                ],
	                "32443/tcp": [
	                    {
	                        "HostIp": "127.0.0.1",
	                        "HostPort": "34792"
	                    }
	                ],
	                "5000/tcp": [
	                    {
	                        "HostIp": "127.0.0.1",
	                        "HostPort": "34790"
	                    }
	                ],
	                "8443/tcp": [
	                    {
	                        "HostIp": "127.0.0.1",
	                        "HostPort": "34791"
	                    }
	                ]
	            },
	            "HairpinMode": false,
	            "LinkLocalIPv6Address": "",
	            "LinkLocalIPv6PrefixLen": 0,
	            "SecondaryIPAddresses": null,
	            "SecondaryIPv6Addresses": null,
	            "EndpointID": "",
	            "Gateway": "",
	            "GlobalIPv6Address": "",
	            "GlobalIPv6PrefixLen": 0,
	            "IPAddress": "",
	            "IPPrefixLen": 0,
	            "IPv6Gateway": "",
	            "MacAddress": "",
	            "Networks": {
	                "test-preload-899756": {
	                    "IPAMConfig": {
	                        "IPv4Address": "192.168.76.2"
	                    },
	                    "Links": null,
	                    "Aliases": null,
	                    "MacAddress": "02:42:c0:a8:4c:02",
	                    "DriverOpts": null,
	                    "NetworkID": "c3aac96128a3f9c06a6e553a20a385f2b40ce552892a1aaee504bbb4aa5bcc36",
	                    "EndpointID": "247bc4bf3525dfa800cde3de71040f3b3188488e652b9582736ef2ef30229df0",
	                    "Gateway": "192.168.76.1",
	                    "IPAddress": "192.168.76.2",
	                    "IPPrefixLen": 24,
	                    "IPv6Gateway": "",
	                    "GlobalIPv6Address": "",
	                    "GlobalIPv6PrefixLen": 0,
	                    "DNSNames": [
	                        "test-preload-899756",
	                        "b98aed3a0a97"
	                    ]
	                }
	            }
	        }
	    }
	]

                                                
                                                
-- /stdout --
helpers_test.go:239: (dbg) Run:  out/minikube-linux-arm64 status --format={{.Host}} -p test-preload-899756 -n test-preload-899756
helpers_test.go:239: (dbg) Non-zero exit: out/minikube-linux-arm64 status --format={{.Host}} -p test-preload-899756 -n test-preload-899756: exit status 6 (309.519053ms)

                                                
                                                
-- stdout --
	Running
	WARNING: Your kubectl is pointing to stale minikube-vm.
	To fix the kubectl context, run `minikube update-context`

                                                
                                                
-- /stdout --
** stderr ** 
	E0916 11:17:35.873958 1516975 status.go:417] kubeconfig endpoint: get endpoint: "test-preload-899756" does not appear in /home/jenkins/minikube-integration/19651-1378450/kubeconfig

                                                
                                                
** /stderr **
helpers_test.go:239: status error: exit status 6 (may be ok)
helpers_test.go:241: "test-preload-899756" host is not running, skipping log retrieval (state="Running\nWARNING: Your kubectl is pointing to stale minikube-vm.\nTo fix the kubectl context, run `minikube update-context`")
helpers_test.go:175: Cleaning up "test-preload-899756" profile ...
helpers_test.go:178: (dbg) Run:  out/minikube-linux-arm64 delete -p test-preload-899756
helpers_test.go:178: (dbg) Done: out/minikube-linux-arm64 delete -p test-preload-899756: (1.900783411s)
--- FAIL: TestPreload (23.63s)

                                                
                                    
x
+
TestKubernetesUpgrade (358.72s)

                                                
                                                
=== RUN   TestKubernetesUpgrade
=== PAUSE TestKubernetesUpgrade

                                                
                                                

                                                
                                                
=== CONT  TestKubernetesUpgrade
version_upgrade_test.go:222: (dbg) Run:  out/minikube-linux-arm64 start -p kubernetes-upgrade-485103 --memory=2200 --kubernetes-version=v1.20.0 --alsologtostderr -v=1 --driver=docker  --container-runtime=crio
E0916 11:21:56.408862 1383833 cert_rotation.go:171] "Unhandled Error" err="key failed with : open /home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/addons-936355/client.crt: no such file or directory" logger="UnhandledError"
version_upgrade_test.go:222: (dbg) Done: out/minikube-linux-arm64 start -p kubernetes-upgrade-485103 --memory=2200 --kubernetes-version=v1.20.0 --alsologtostderr -v=1 --driver=docker  --container-runtime=crio: (1m13.694313395s)
version_upgrade_test.go:227: (dbg) Run:  out/minikube-linux-arm64 stop -p kubernetes-upgrade-485103
version_upgrade_test.go:227: (dbg) Done: out/minikube-linux-arm64 stop -p kubernetes-upgrade-485103: (1.231208934s)
version_upgrade_test.go:232: (dbg) Run:  out/minikube-linux-arm64 -p kubernetes-upgrade-485103 status --format={{.Host}}
version_upgrade_test.go:232: (dbg) Non-zero exit: out/minikube-linux-arm64 -p kubernetes-upgrade-485103 status --format={{.Host}}: exit status 7 (69.316277ms)

                                                
                                                
-- stdout --
	Stopped

                                                
                                                
-- /stdout --
version_upgrade_test.go:234: status error: exit status 7 (may be ok)
version_upgrade_test.go:243: (dbg) Run:  out/minikube-linux-arm64 start -p kubernetes-upgrade-485103 --memory=2200 --kubernetes-version=v1.31.1 --alsologtostderr -v=1 --driver=docker  --container-runtime=crio
E0916 11:23:53.341345 1383833 cert_rotation.go:171] "Unhandled Error" err="key failed with : open /home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/addons-936355/client.crt: no such file or directory" logger="UnhandledError"
version_upgrade_test.go:243: (dbg) Done: out/minikube-linux-arm64 start -p kubernetes-upgrade-485103 --memory=2200 --kubernetes-version=v1.31.1 --alsologtostderr -v=1 --driver=docker  --container-runtime=crio: (4m38.27668487s)
version_upgrade_test.go:248: (dbg) Run:  kubectl --context kubernetes-upgrade-485103 version --output=json
version_upgrade_test.go:248: (dbg) Non-zero exit: kubectl --context kubernetes-upgrade-485103 version --output=json: fork/exec /usr/local/bin/kubectl: exec format error (2.085732ms)
version_upgrade_test.go:250: error running kubectl: fork/exec /usr/local/bin/kubectl: exec format error
panic.go:629: *** TestKubernetesUpgrade FAILED at 2024-09-16 11:27:23.707584449 +0000 UTC m=+3156.441678280
helpers_test.go:222: -----------------------post-mortem--------------------------------
helpers_test.go:230: ======>  post-mortem[TestKubernetesUpgrade]: docker inspect <======
helpers_test.go:231: (dbg) Run:  docker inspect kubernetes-upgrade-485103
helpers_test.go:235: (dbg) docker inspect kubernetes-upgrade-485103:

                                                
                                                
-- stdout --
	[
	    {
	        "Id": "36b0705d261b663b7b97b366f614ced29ad446cedf32d931b23cd6bbdf1dee31",
	        "Created": "2024-09-16T11:21:40.667809706Z",
	        "Path": "/usr/local/bin/entrypoint",
	        "Args": [
	            "/sbin/init"
	        ],
	        "State": {
	            "Status": "running",
	            "Running": true,
	            "Paused": false,
	            "Restarting": false,
	            "OOMKilled": false,
	            "Dead": false,
	            "Pid": 1539355,
	            "ExitCode": 0,
	            "Error": "",
	            "StartedAt": "2024-09-16T11:22:45.954655384Z",
	            "FinishedAt": "2024-09-16T11:22:44.628247927Z"
	        },
	        "Image": "sha256:a1b71fa87733590eb4674b16f6945626ae533f3af37066893e3fd70eb9476268",
	        "ResolvConfPath": "/var/lib/docker/containers/36b0705d261b663b7b97b366f614ced29ad446cedf32d931b23cd6bbdf1dee31/resolv.conf",
	        "HostnamePath": "/var/lib/docker/containers/36b0705d261b663b7b97b366f614ced29ad446cedf32d931b23cd6bbdf1dee31/hostname",
	        "HostsPath": "/var/lib/docker/containers/36b0705d261b663b7b97b366f614ced29ad446cedf32d931b23cd6bbdf1dee31/hosts",
	        "LogPath": "/var/lib/docker/containers/36b0705d261b663b7b97b366f614ced29ad446cedf32d931b23cd6bbdf1dee31/36b0705d261b663b7b97b366f614ced29ad446cedf32d931b23cd6bbdf1dee31-json.log",
	        "Name": "/kubernetes-upgrade-485103",
	        "RestartCount": 0,
	        "Driver": "overlay2",
	        "Platform": "linux",
	        "MountLabel": "",
	        "ProcessLabel": "",
	        "AppArmorProfile": "unconfined",
	        "ExecIDs": null,
	        "HostConfig": {
	            "Binds": [
	                "/lib/modules:/lib/modules:ro",
	                "kubernetes-upgrade-485103:/var"
	            ],
	            "ContainerIDFile": "",
	            "LogConfig": {
	                "Type": "json-file",
	                "Config": {}
	            },
	            "NetworkMode": "kubernetes-upgrade-485103",
	            "PortBindings": {
	                "22/tcp": [
	                    {
	                        "HostIp": "127.0.0.1",
	                        "HostPort": ""
	                    }
	                ],
	                "2376/tcp": [
	                    {
	                        "HostIp": "127.0.0.1",
	                        "HostPort": ""
	                    }
	                ],
	                "32443/tcp": [
	                    {
	                        "HostIp": "127.0.0.1",
	                        "HostPort": ""
	                    }
	                ],
	                "5000/tcp": [
	                    {
	                        "HostIp": "127.0.0.1",
	                        "HostPort": ""
	                    }
	                ],
	                "8443/tcp": [
	                    {
	                        "HostIp": "127.0.0.1",
	                        "HostPort": ""
	                    }
	                ]
	            },
	            "RestartPolicy": {
	                "Name": "no",
	                "MaximumRetryCount": 0
	            },
	            "AutoRemove": false,
	            "VolumeDriver": "",
	            "VolumesFrom": null,
	            "ConsoleSize": [
	                0,
	                0
	            ],
	            "CapAdd": null,
	            "CapDrop": null,
	            "CgroupnsMode": "host",
	            "Dns": [],
	            "DnsOptions": [],
	            "DnsSearch": [],
	            "ExtraHosts": null,
	            "GroupAdd": null,
	            "IpcMode": "private",
	            "Cgroup": "",
	            "Links": null,
	            "OomScoreAdj": 0,
	            "PidMode": "",
	            "Privileged": true,
	            "PublishAllPorts": false,
	            "ReadonlyRootfs": false,
	            "SecurityOpt": [
	                "seccomp=unconfined",
	                "apparmor=unconfined",
	                "label=disable"
	            ],
	            "Tmpfs": {
	                "/run": "",
	                "/tmp": ""
	            },
	            "UTSMode": "",
	            "UsernsMode": "",
	            "ShmSize": 67108864,
	            "Runtime": "runc",
	            "Isolation": "",
	            "CpuShares": 0,
	            "Memory": 2306867200,
	            "NanoCpus": 2000000000,
	            "CgroupParent": "",
	            "BlkioWeight": 0,
	            "BlkioWeightDevice": [],
	            "BlkioDeviceReadBps": [],
	            "BlkioDeviceWriteBps": [],
	            "BlkioDeviceReadIOps": [],
	            "BlkioDeviceWriteIOps": [],
	            "CpuPeriod": 0,
	            "CpuQuota": 0,
	            "CpuRealtimePeriod": 0,
	            "CpuRealtimeRuntime": 0,
	            "CpusetCpus": "",
	            "CpusetMems": "",
	            "Devices": [],
	            "DeviceCgroupRules": null,
	            "DeviceRequests": null,
	            "MemoryReservation": 0,
	            "MemorySwap": 4613734400,
	            "MemorySwappiness": null,
	            "OomKillDisable": false,
	            "PidsLimit": null,
	            "Ulimits": [],
	            "CpuCount": 0,
	            "CpuPercent": 0,
	            "IOMaximumIOps": 0,
	            "IOMaximumBandwidth": 0,
	            "MaskedPaths": null,
	            "ReadonlyPaths": null
	        },
	        "GraphDriver": {
	            "Data": {
	                "LowerDir": "/var/lib/docker/overlay2/3809f3fb905e63829e9c83c4e99fbdcd42bc81450bb71ef9a5a68b1c373f49aa-init/diff:/var/lib/docker/overlay2/1502e35c27c097cfc834a7c6caeee5bb9f58b41375577f491b73f55bc131cbae/diff",
	                "MergedDir": "/var/lib/docker/overlay2/3809f3fb905e63829e9c83c4e99fbdcd42bc81450bb71ef9a5a68b1c373f49aa/merged",
	                "UpperDir": "/var/lib/docker/overlay2/3809f3fb905e63829e9c83c4e99fbdcd42bc81450bb71ef9a5a68b1c373f49aa/diff",
	                "WorkDir": "/var/lib/docker/overlay2/3809f3fb905e63829e9c83c4e99fbdcd42bc81450bb71ef9a5a68b1c373f49aa/work"
	            },
	            "Name": "overlay2"
	        },
	        "Mounts": [
	            {
	                "Type": "bind",
	                "Source": "/lib/modules",
	                "Destination": "/lib/modules",
	                "Mode": "ro",
	                "RW": false,
	                "Propagation": "rprivate"
	            },
	            {
	                "Type": "volume",
	                "Name": "kubernetes-upgrade-485103",
	                "Source": "/var/lib/docker/volumes/kubernetes-upgrade-485103/_data",
	                "Destination": "/var",
	                "Driver": "local",
	                "Mode": "z",
	                "RW": true,
	                "Propagation": ""
	            }
	        ],
	        "Config": {
	            "Hostname": "kubernetes-upgrade-485103",
	            "Domainname": "",
	            "User": "",
	            "AttachStdin": false,
	            "AttachStdout": false,
	            "AttachStderr": false,
	            "ExposedPorts": {
	                "22/tcp": {},
	                "2376/tcp": {},
	                "32443/tcp": {},
	                "5000/tcp": {},
	                "8443/tcp": {}
	            },
	            "Tty": true,
	            "OpenStdin": false,
	            "StdinOnce": false,
	            "Env": [
	                "container=docker",
	                "PATH=/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin"
	            ],
	            "Cmd": null,
	            "Image": "gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0",
	            "Volumes": null,
	            "WorkingDir": "/",
	            "Entrypoint": [
	                "/usr/local/bin/entrypoint",
	                "/sbin/init"
	            ],
	            "OnBuild": null,
	            "Labels": {
	                "created_by.minikube.sigs.k8s.io": "true",
	                "mode.minikube.sigs.k8s.io": "kubernetes-upgrade-485103",
	                "name.minikube.sigs.k8s.io": "kubernetes-upgrade-485103",
	                "role.minikube.sigs.k8s.io": ""
	            },
	            "StopSignal": "SIGRTMIN+3"
	        },
	        "NetworkSettings": {
	            "Bridge": "",
	            "SandboxID": "aa5b9cff4ce37788db4038763a4efb66cbe9d9924232bcceae55ccd2bde0da67",
	            "SandboxKey": "/var/run/docker/netns/aa5b9cff4ce3",
	            "Ports": {
	                "22/tcp": [
	                    {
	                        "HostIp": "127.0.0.1",
	                        "HostPort": "34833"
	                    }
	                ],
	                "2376/tcp": [
	                    {
	                        "HostIp": "127.0.0.1",
	                        "HostPort": "34834"
	                    }
	                ],
	                "32443/tcp": [
	                    {
	                        "HostIp": "127.0.0.1",
	                        "HostPort": "34837"
	                    }
	                ],
	                "5000/tcp": [
	                    {
	                        "HostIp": "127.0.0.1",
	                        "HostPort": "34835"
	                    }
	                ],
	                "8443/tcp": [
	                    {
	                        "HostIp": "127.0.0.1",
	                        "HostPort": "34836"
	                    }
	                ]
	            },
	            "HairpinMode": false,
	            "LinkLocalIPv6Address": "",
	            "LinkLocalIPv6PrefixLen": 0,
	            "SecondaryIPAddresses": null,
	            "SecondaryIPv6Addresses": null,
	            "EndpointID": "",
	            "Gateway": "",
	            "GlobalIPv6Address": "",
	            "GlobalIPv6PrefixLen": 0,
	            "IPAddress": "",
	            "IPPrefixLen": 0,
	            "IPv6Gateway": "",
	            "MacAddress": "",
	            "Networks": {
	                "kubernetes-upgrade-485103": {
	                    "IPAMConfig": {
	                        "IPv4Address": "192.168.76.2"
	                    },
	                    "Links": null,
	                    "Aliases": null,
	                    "MacAddress": "02:42:c0:a8:4c:02",
	                    "DriverOpts": null,
	                    "NetworkID": "2e97240047442c58cdaf0db54bbb7c1ce00d7ae1512db53285f7fbced57d77e8",
	                    "EndpointID": "35cc68a6494e337d229aac16e9bb39c1520ffb3f2798246fec9b612da95768a8",
	                    "Gateway": "192.168.76.1",
	                    "IPAddress": "192.168.76.2",
	                    "IPPrefixLen": 24,
	                    "IPv6Gateway": "",
	                    "GlobalIPv6Address": "",
	                    "GlobalIPv6PrefixLen": 0,
	                    "DNSNames": [
	                        "kubernetes-upgrade-485103",
	                        "36b0705d261b"
	                    ]
	                }
	            }
	        }
	    }
	]

                                                
                                                
-- /stdout --
helpers_test.go:239: (dbg) Run:  out/minikube-linux-arm64 status --format={{.Host}} -p kubernetes-upgrade-485103 -n kubernetes-upgrade-485103
helpers_test.go:244: <<< TestKubernetesUpgrade FAILED: start of post-mortem logs <<<
helpers_test.go:245: ======>  post-mortem[TestKubernetesUpgrade]: minikube logs <======
helpers_test.go:247: (dbg) Run:  out/minikube-linux-arm64 -p kubernetes-upgrade-485103 logs -n 25
helpers_test.go:247: (dbg) Done: out/minikube-linux-arm64 -p kubernetes-upgrade-485103 logs -n 25: (2.012495865s)
helpers_test.go:252: TestKubernetesUpgrade logs: 
-- stdout --
	
	==> Audit <==
	|---------|------------------------------------------------------|--------------------------|---------|---------|---------------------|---------------------|
	| Command |                         Args                         |         Profile          |  User   | Version |     Start Time      |      End Time       |
	|---------|------------------------------------------------------|--------------------------|---------|---------|---------------------|---------------------|
	| ssh     | -p cilium-141252 sudo                                | cilium-141252            | jenkins | v1.34.0 | 16 Sep 24 11:27 UTC |                     |
	|         | systemctl status kubelet --all                       |                          |         |         |                     |                     |
	|         | --full --no-pager                                    |                          |         |         |                     |                     |
	| ssh     | -p cilium-141252 sudo                                | cilium-141252            | jenkins | v1.34.0 | 16 Sep 24 11:27 UTC |                     |
	|         | systemctl cat kubelet                                |                          |         |         |                     |                     |
	|         | --no-pager                                           |                          |         |         |                     |                     |
	| ssh     | -p cilium-141252 sudo                                | cilium-141252            | jenkins | v1.34.0 | 16 Sep 24 11:27 UTC |                     |
	|         | journalctl -xeu kubelet --all                        |                          |         |         |                     |                     |
	|         | --full --no-pager                                    |                          |         |         |                     |                     |
	| ssh     | -p cilium-141252 sudo cat                            | cilium-141252            | jenkins | v1.34.0 | 16 Sep 24 11:27 UTC |                     |
	|         | /etc/kubernetes/kubelet.conf                         |                          |         |         |                     |                     |
	| ssh     | -p cilium-141252 sudo cat                            | cilium-141252            | jenkins | v1.34.0 | 16 Sep 24 11:27 UTC |                     |
	|         | /var/lib/kubelet/config.yaml                         |                          |         |         |                     |                     |
	| ssh     | -p cilium-141252 sudo                                | cilium-141252            | jenkins | v1.34.0 | 16 Sep 24 11:27 UTC |                     |
	|         | systemctl status docker --all                        |                          |         |         |                     |                     |
	|         | --full --no-pager                                    |                          |         |         |                     |                     |
	| ssh     | -p cilium-141252 sudo                                | cilium-141252            | jenkins | v1.34.0 | 16 Sep 24 11:27 UTC |                     |
	|         | systemctl cat docker                                 |                          |         |         |                     |                     |
	|         | --no-pager                                           |                          |         |         |                     |                     |
	| ssh     | -p cilium-141252 sudo cat                            | cilium-141252            | jenkins | v1.34.0 | 16 Sep 24 11:27 UTC |                     |
	|         | /etc/docker/daemon.json                              |                          |         |         |                     |                     |
	| ssh     | -p cilium-141252 sudo docker                         | cilium-141252            | jenkins | v1.34.0 | 16 Sep 24 11:27 UTC |                     |
	|         | system info                                          |                          |         |         |                     |                     |
	| ssh     | -p cilium-141252 sudo                                | cilium-141252            | jenkins | v1.34.0 | 16 Sep 24 11:27 UTC |                     |
	|         | systemctl status cri-docker                          |                          |         |         |                     |                     |
	|         | --all --full --no-pager                              |                          |         |         |                     |                     |
	| ssh     | -p cilium-141252 sudo                                | cilium-141252            | jenkins | v1.34.0 | 16 Sep 24 11:27 UTC |                     |
	|         | systemctl cat cri-docker                             |                          |         |         |                     |                     |
	|         | --no-pager                                           |                          |         |         |                     |                     |
	| ssh     | -p cilium-141252 sudo cat                            | cilium-141252            | jenkins | v1.34.0 | 16 Sep 24 11:27 UTC |                     |
	|         | /etc/systemd/system/cri-docker.service.d/10-cni.conf |                          |         |         |                     |                     |
	| ssh     | -p cilium-141252 sudo cat                            | cilium-141252            | jenkins | v1.34.0 | 16 Sep 24 11:27 UTC |                     |
	|         | /usr/lib/systemd/system/cri-docker.service           |                          |         |         |                     |                     |
	| ssh     | -p cilium-141252 sudo                                | cilium-141252            | jenkins | v1.34.0 | 16 Sep 24 11:27 UTC |                     |
	|         | cri-dockerd --version                                |                          |         |         |                     |                     |
	| ssh     | -p cilium-141252 sudo                                | cilium-141252            | jenkins | v1.34.0 | 16 Sep 24 11:27 UTC |                     |
	|         | systemctl status containerd                          |                          |         |         |                     |                     |
	|         | --all --full --no-pager                              |                          |         |         |                     |                     |
	| ssh     | -p cilium-141252 sudo                                | cilium-141252            | jenkins | v1.34.0 | 16 Sep 24 11:27 UTC |                     |
	|         | systemctl cat containerd                             |                          |         |         |                     |                     |
	|         | --no-pager                                           |                          |         |         |                     |                     |
	| ssh     | -p cilium-141252 sudo cat                            | cilium-141252            | jenkins | v1.34.0 | 16 Sep 24 11:27 UTC |                     |
	|         | /lib/systemd/system/containerd.service               |                          |         |         |                     |                     |
	| ssh     | -p cilium-141252 sudo cat                            | cilium-141252            | jenkins | v1.34.0 | 16 Sep 24 11:27 UTC |                     |
	|         | /etc/containerd/config.toml                          |                          |         |         |                     |                     |
	| ssh     | -p cilium-141252 sudo                                | cilium-141252            | jenkins | v1.34.0 | 16 Sep 24 11:27 UTC |                     |
	|         | containerd config dump                               |                          |         |         |                     |                     |
	| ssh     | -p cilium-141252 sudo                                | cilium-141252            | jenkins | v1.34.0 | 16 Sep 24 11:27 UTC |                     |
	|         | systemctl status crio --all                          |                          |         |         |                     |                     |
	|         | --full --no-pager                                    |                          |         |         |                     |                     |
	| ssh     | -p cilium-141252 sudo                                | cilium-141252            | jenkins | v1.34.0 | 16 Sep 24 11:27 UTC |                     |
	|         | systemctl cat crio --no-pager                        |                          |         |         |                     |                     |
	| ssh     | -p cilium-141252 sudo find                           | cilium-141252            | jenkins | v1.34.0 | 16 Sep 24 11:27 UTC |                     |
	|         | /etc/crio -type f -exec sh -c                        |                          |         |         |                     |                     |
	|         | 'echo {}; cat {}' \;                                 |                          |         |         |                     |                     |
	| ssh     | -p cilium-141252 sudo crio                           | cilium-141252            | jenkins | v1.34.0 | 16 Sep 24 11:27 UTC |                     |
	|         | config                                               |                          |         |         |                     |                     |
	| delete  | -p cilium-141252                                     | cilium-141252            | jenkins | v1.34.0 | 16 Sep 24 11:27 UTC | 16 Sep 24 11:27 UTC |
	| start   | -p force-systemd-env-541584                          | force-systemd-env-541584 | jenkins | v1.34.0 | 16 Sep 24 11:27 UTC |                     |
	|         | --memory=2048                                        |                          |         |         |                     |                     |
	|         | --alsologtostderr                                    |                          |         |         |                     |                     |
	|         | -v=5 --driver=docker                                 |                          |         |         |                     |                     |
	|         | --container-runtime=crio                             |                          |         |         |                     |                     |
	|---------|------------------------------------------------------|--------------------------|---------|---------|---------------------|---------------------|
	
	
	==> Last Start <==
	Log file created at: 2024/09/16 11:27:04
	Running on machine: ip-172-31-21-244
	Binary: Built with gc go1.23.0 for linux/arm64
	Log line format: [IWEF]mmdd hh:mm:ss.uuuuuu threadid file:line] msg
	I0916 11:27:04.776201 1560431 out.go:345] Setting OutFile to fd 1 ...
	I0916 11:27:04.776382 1560431 out.go:392] TERM=,COLORTERM=, which probably does not support color
	I0916 11:27:04.776412 1560431 out.go:358] Setting ErrFile to fd 2...
	I0916 11:27:04.776436 1560431 out.go:392] TERM=,COLORTERM=, which probably does not support color
	I0916 11:27:04.776716 1560431 root.go:338] Updating PATH: /home/jenkins/minikube-integration/19651-1378450/.minikube/bin
	I0916 11:27:04.777168 1560431 out.go:352] Setting JSON to false
	I0916 11:27:04.778201 1560431 start.go:129] hostinfo: {"hostname":"ip-172-31-21-244","uptime":40170,"bootTime":1726445855,"procs":187,"os":"linux","platform":"ubuntu","platformFamily":"debian","platformVersion":"20.04","kernelVersion":"5.15.0-1069-aws","kernelArch":"aarch64","virtualizationSystem":"","virtualizationRole":"","hostId":"da8ac1fd-6236-412a-a346-95873c98230d"}
	I0916 11:27:04.778346 1560431 start.go:139] virtualization:  
	I0916 11:27:04.781758 1560431 out.go:177] * [force-systemd-env-541584] minikube v1.34.0 on Ubuntu 20.04 (arm64)
	I0916 11:27:04.785201 1560431 out.go:177]   - MINIKUBE_LOCATION=19651
	I0916 11:27:04.785327 1560431 notify.go:220] Checking for updates...
	I0916 11:27:04.790594 1560431 out.go:177]   - MINIKUBE_SUPPRESS_DOCKER_PERFORMANCE=true
	I0916 11:27:04.793233 1560431 out.go:177]   - KUBECONFIG=/home/jenkins/minikube-integration/19651-1378450/kubeconfig
	I0916 11:27:04.795965 1560431 out.go:177]   - MINIKUBE_HOME=/home/jenkins/minikube-integration/19651-1378450/.minikube
	I0916 11:27:04.798573 1560431 out.go:177]   - MINIKUBE_BIN=out/minikube-linux-arm64
	I0916 11:27:04.801433 1560431 out.go:177]   - MINIKUBE_FORCE_SYSTEMD=true
	I0916 11:27:04.804490 1560431 config.go:182] Loaded profile config "kubernetes-upgrade-485103": Driver=docker, ContainerRuntime=crio, KubernetesVersion=v1.31.1
	I0916 11:27:04.804619 1560431 driver.go:394] Setting default libvirt URI to qemu:///system
	I0916 11:27:04.854870 1560431 docker.go:123] docker version: linux-27.2.1:Docker Engine - Community
	I0916 11:27:04.855011 1560431 cli_runner.go:164] Run: docker system info --format "{{json .}}"
	I0916 11:27:04.928634 1560431 info.go:266] docker info: {ID:5FDH:SA5P:5GCT:NLAS:B73P:SGDQ:PBG5:UBVH:UZY3:RXGO:CI7S:WAIH Containers:1 ContainersRunning:1 ContainersPaused:0 ContainersStopped:0 Images:4 Driver:overlay2 DriverStatus:[[Backing Filesystem extfs] [Supports d_type true] [Using metacopy false] [Native Overlay Diff true] [userxattr false]] SystemStatus:<nil> Plugins:{Volume:[local] Network:[bridge host ipvlan macvlan null overlay] Authorization:<nil> Log:[awslogs fluentd gcplogs gelf journald json-file local splunk syslog]} MemoryLimit:true SwapLimit:true KernelMemory:false KernelMemoryTCP:true CPUCfsPeriod:true CPUCfsQuota:true CPUShares:true CPUSet:true PidsLimit:true IPv4Forwarding:true BridgeNfIptables:true BridgeNfIP6Tables:true Debug:false NFd:34 OomKillDisable:true NGoroutines:53 SystemTime:2024-09-16 11:27:04.918935954 +0000 UTC LoggingDriver:json-file CgroupDriver:cgroupfs NEventsListener:0 KernelVersion:5.15.0-1069-aws OperatingSystem:Ubuntu 20.04.6 LTS OSType:linux Architecture:aar
ch64 IndexServerAddress:https://index.docker.io/v1/ RegistryConfig:{AllowNondistributableArtifactsCIDRs:[] AllowNondistributableArtifactsHostnames:[] InsecureRegistryCIDRs:[127.0.0.0/8] IndexConfigs:{DockerIo:{Name:docker.io Mirrors:[] Secure:true Official:true}} Mirrors:[]} NCPU:2 MemTotal:8214839296 GenericResources:<nil> DockerRootDir:/var/lib/docker HTTPProxy: HTTPSProxy: NoProxy: Name:ip-172-31-21-244 Labels:[] ExperimentalBuild:false ServerVersion:27.2.1 ClusterStore: ClusterAdvertise: Runtimes:{Runc:{Path:runc}} DefaultRuntime:runc Swarm:{NodeID: NodeAddr: LocalNodeState:inactive ControlAvailable:false Error: RemoteManagers:<nil>} LiveRestoreEnabled:false Isolation: InitBinary:docker-init ContainerdCommit:{ID:7f7fdf5fed64eb6a7caf99b3e12efcf9d60e311c Expected:7f7fdf5fed64eb6a7caf99b3e12efcf9d60e311c} RuncCommit:{ID:v1.1.14-0-g2c9f560 Expected:v1.1.14-0-g2c9f560} InitCommit:{ID:de40ad0 Expected:de40ad0} SecurityOptions:[name=apparmor name=seccomp,profile=builtin] ProductLicense: Warnings:<nil> ServerErro
rs:[] ClientInfo:{Debug:false Plugins:[map[Name:buildx Path:/usr/libexec/docker/cli-plugins/docker-buildx SchemaVersion:0.1.0 ShortDescription:Docker Buildx Vendor:Docker Inc. Version:v0.16.2] map[Name:compose Path:/usr/libexec/docker/cli-plugins/docker-compose SchemaVersion:0.1.0 ShortDescription:Docker Compose Vendor:Docker Inc. Version:v2.29.2]] Warnings:<nil>}}
	I0916 11:27:04.928775 1560431 docker.go:318] overlay module found
	I0916 11:27:04.931544 1560431 out.go:177] * Using the docker driver based on user configuration
	I0916 11:27:04.934110 1560431 start.go:297] selected driver: docker
	I0916 11:27:04.934129 1560431 start.go:901] validating driver "docker" against <nil>
	I0916 11:27:04.934144 1560431 start.go:912] status for docker: {Installed:true Healthy:true Running:false NeedsImprovement:false Error:<nil> Reason: Fix: Doc: Version:}
	I0916 11:27:04.934796 1560431 cli_runner.go:164] Run: docker system info --format "{{json .}}"
	I0916 11:27:04.996718 1560431 info.go:266] docker info: {ID:5FDH:SA5P:5GCT:NLAS:B73P:SGDQ:PBG5:UBVH:UZY3:RXGO:CI7S:WAIH Containers:1 ContainersRunning:1 ContainersPaused:0 ContainersStopped:0 Images:4 Driver:overlay2 DriverStatus:[[Backing Filesystem extfs] [Supports d_type true] [Using metacopy false] [Native Overlay Diff true] [userxattr false]] SystemStatus:<nil> Plugins:{Volume:[local] Network:[bridge host ipvlan macvlan null overlay] Authorization:<nil> Log:[awslogs fluentd gcplogs gelf journald json-file local splunk syslog]} MemoryLimit:true SwapLimit:true KernelMemory:false KernelMemoryTCP:true CPUCfsPeriod:true CPUCfsQuota:true CPUShares:true CPUSet:true PidsLimit:true IPv4Forwarding:true BridgeNfIptables:true BridgeNfIP6Tables:true Debug:false NFd:34 OomKillDisable:true NGoroutines:53 SystemTime:2024-09-16 11:27:04.987297716 +0000 UTC LoggingDriver:json-file CgroupDriver:cgroupfs NEventsListener:0 KernelVersion:5.15.0-1069-aws OperatingSystem:Ubuntu 20.04.6 LTS OSType:linux Architecture:aar
ch64 IndexServerAddress:https://index.docker.io/v1/ RegistryConfig:{AllowNondistributableArtifactsCIDRs:[] AllowNondistributableArtifactsHostnames:[] InsecureRegistryCIDRs:[127.0.0.0/8] IndexConfigs:{DockerIo:{Name:docker.io Mirrors:[] Secure:true Official:true}} Mirrors:[]} NCPU:2 MemTotal:8214839296 GenericResources:<nil> DockerRootDir:/var/lib/docker HTTPProxy: HTTPSProxy: NoProxy: Name:ip-172-31-21-244 Labels:[] ExperimentalBuild:false ServerVersion:27.2.1 ClusterStore: ClusterAdvertise: Runtimes:{Runc:{Path:runc}} DefaultRuntime:runc Swarm:{NodeID: NodeAddr: LocalNodeState:inactive ControlAvailable:false Error: RemoteManagers:<nil>} LiveRestoreEnabled:false Isolation: InitBinary:docker-init ContainerdCommit:{ID:7f7fdf5fed64eb6a7caf99b3e12efcf9d60e311c Expected:7f7fdf5fed64eb6a7caf99b3e12efcf9d60e311c} RuncCommit:{ID:v1.1.14-0-g2c9f560 Expected:v1.1.14-0-g2c9f560} InitCommit:{ID:de40ad0 Expected:de40ad0} SecurityOptions:[name=apparmor name=seccomp,profile=builtin] ProductLicense: Warnings:<nil> ServerErro
rs:[] ClientInfo:{Debug:false Plugins:[map[Name:buildx Path:/usr/libexec/docker/cli-plugins/docker-buildx SchemaVersion:0.1.0 ShortDescription:Docker Buildx Vendor:Docker Inc. Version:v0.16.2] map[Name:compose Path:/usr/libexec/docker/cli-plugins/docker-compose SchemaVersion:0.1.0 ShortDescription:Docker Compose Vendor:Docker Inc. Version:v2.29.2]] Warnings:<nil>}}
	I0916 11:27:04.996935 1560431 start_flags.go:310] no existing cluster config was found, will generate one from the flags 
	I0916 11:27:04.997161 1560431 start_flags.go:929] Wait components to verify : map[apiserver:true system_pods:true]
	I0916 11:27:04.999849 1560431 out.go:177] * Using Docker driver with root privileges
	I0916 11:27:05.006094 1560431 cni.go:84] Creating CNI manager for ""
	I0916 11:27:05.006203 1560431 cni.go:143] "docker" driver + "crio" runtime found, recommending kindnet
	I0916 11:27:05.006213 1560431 start_flags.go:319] Found "CNI" CNI - setting NetworkPlugin=cni
	I0916 11:27:05.006348 1560431 start.go:340] cluster config:
	{Name:force-systemd-env-541584 KeepContext:false EmbedCerts:false MinikubeISO: KicBaseImage:gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 Memory:2048 CPUs:2 DiskSize:20000 Driver:docker HyperkitVpnKitSock: HyperkitVSockPorts:[] DockerEnv:[] ContainerVolumeMounts:[] InsecureRegistry:[] RegistryMirror:[] HostOnlyCIDR:192.168.59.1/24 HypervVirtualSwitch: HypervUseExternalSwitch:false HypervExternalAdapter: KVMNetwork:default KVMQemuURI:qemu:///system KVMGPU:false KVMHidden:false KVMNUMACount:1 APIServerPort:8443 DockerOpt:[] DisableDriverMounts:false NFSShare:[] NFSSharesRoot:/nfsshares UUID: NoVTXCheck:false DNSProxy:false HostDNSResolver:true HostOnlyNicType:virtio NatNicType:virtio SSHIPAddress: SSHUser:root SSHKey: SSHPort:22 KubernetesConfig:{KubernetesVersion:v1.31.1 ClusterName:force-systemd-env-541584 Namespace:default APIServerHAVIP: APIServerName:minikubeCA APIServerNames:[] APIServerIPs:[] DNSDomain:cluster.
local ContainerRuntime:crio CRISocket: NetworkPlugin:cni FeatureGates: ServiceCIDR:10.96.0.0/12 ImageRepository: LoadBalancerStartIP: LoadBalancerEndIP: CustomIngressCert: RegistryAliases: ExtraOptions:[] ShouldLoadCachedImages:true EnableDefaultCNI:false CNI:} Nodes:[{Name: IP: Port:8443 KubernetesVersion:v1.31.1 ContainerRuntime:crio ControlPlane:true Worker:true}] Addons:map[] CustomAddonImages:map[] CustomAddonRegistries:map[] VerifyComponents:map[apiserver:true system_pods:true] StartHostTimeout:6m0s ScheduledStop:<nil> ExposedPorts:[] ListenAddress: Network: Subnet: MultiNodeRequested:false ExtraDisks:0 CertExpiration:26280h0m0s Mount:false MountString:/home/jenkins:/minikube-host Mount9PVersion:9p2000.L MountGID:docker MountIP: MountMSize:262144 MountOptions:[] MountPort:0 MountType:9p MountUID:docker BinaryMirror: DisableOptimizations:false DisableMetrics:false CustomQemuFirmwarePath: SocketVMnetClientPath: SocketVMnetPath: StaticIP: SSHAuthSock: SSHAgentPID:0 GPUs: AutoPauseInterval:1m0s}
	I0916 11:27:05.011323 1560431 out.go:177] * Starting "force-systemd-env-541584" primary control-plane node in "force-systemd-env-541584" cluster
	I0916 11:27:05.014352 1560431 cache.go:121] Beginning downloading kic base image for docker with crio
	I0916 11:27:05.017681 1560431 out.go:177] * Pulling base image v0.0.45-1726358845-19644 ...
	I0916 11:27:05.020537 1560431 image.go:79] Checking for gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 in local docker daemon
	I0916 11:27:05.020799 1560431 preload.go:131] Checking if preload exists for k8s version v1.31.1 and runtime crio
	I0916 11:27:05.020842 1560431 preload.go:146] Found local preload: /home/jenkins/minikube-integration/19651-1378450/.minikube/cache/preloaded-tarball/preloaded-images-k8s-v18-v1.31.1-cri-o-overlay-arm64.tar.lz4
	I0916 11:27:05.020850 1560431 cache.go:56] Caching tarball of preloaded images
	I0916 11:27:05.020928 1560431 preload.go:172] Found /home/jenkins/minikube-integration/19651-1378450/.minikube/cache/preloaded-tarball/preloaded-images-k8s-v18-v1.31.1-cri-o-overlay-arm64.tar.lz4 in cache, skipping download
	I0916 11:27:05.020945 1560431 cache.go:59] Finished verifying existence of preloaded tar for v1.31.1 on crio
	I0916 11:27:05.021068 1560431 profile.go:143] Saving config to /home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/force-systemd-env-541584/config.json ...
	I0916 11:27:05.021093 1560431 lock.go:35] WriteFile acquiring /home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/force-systemd-env-541584/config.json: {Name:mk8abf5a7598fc58537819f3dea2b7f736a3b67e Clock:{} Delay:500ms Timeout:1m0s Cancel:<nil>}
	W0916 11:27:05.046939 1560431 image.go:95] image gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 is of wrong architecture
	I0916 11:27:05.046964 1560431 cache.go:149] Downloading gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 to local cache
	I0916 11:27:05.047049 1560431 image.go:63] Checking for gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 in local cache directory
	I0916 11:27:05.047079 1560431 image.go:66] Found gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 in local cache directory, skipping pull
	I0916 11:27:05.047085 1560431 image.go:135] gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 exists in cache, skipping pull
	I0916 11:27:05.047095 1560431 cache.go:152] successfully saved gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 as a tarball
	I0916 11:27:05.047100 1560431 cache.go:162] Loading gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 from local cache
	I0916 11:27:05.048601 1560431 image.go:273] response: 
	I0916 11:27:05.167263 1560431 cache.go:164] successfully loaded and using gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 from cached tarball
	I0916 11:27:05.167317 1560431 cache.go:194] Successfully downloaded all kic artifacts
	I0916 11:27:05.167349 1560431 start.go:360] acquireMachinesLock for force-systemd-env-541584: {Name:mkedad62461b2d854bb5c587493997396e879bdc Clock:{} Delay:500ms Timeout:10m0s Cancel:<nil>}
	I0916 11:27:05.167472 1560431 start.go:364] duration metric: took 99.001µs to acquireMachinesLock for "force-systemd-env-541584"
	I0916 11:27:05.167506 1560431 start.go:93] Provisioning new machine with config: &{Name:force-systemd-env-541584 KeepContext:false EmbedCerts:false MinikubeISO: KicBaseImage:gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 Memory:2048 CPUs:2 DiskSize:20000 Driver:docker HyperkitVpnKitSock: HyperkitVSockPorts:[] DockerEnv:[] ContainerVolumeMounts:[] InsecureRegistry:[] RegistryMirror:[] HostOnlyCIDR:192.168.59.1/24 HypervVirtualSwitch: HypervUseExternalSwitch:false HypervExternalAdapter: KVMNetwork:default KVMQemuURI:qemu:///system KVMGPU:false KVMHidden:false KVMNUMACount:1 APIServerPort:8443 DockerOpt:[] DisableDriverMounts:false NFSShare:[] NFSSharesRoot:/nfsshares UUID: NoVTXCheck:false DNSProxy:false HostDNSResolver:true HostOnlyNicType:virtio NatNicType:virtio SSHIPAddress: SSHUser:root SSHKey: SSHPort:22 KubernetesConfig:{KubernetesVersion:v1.31.1 ClusterName:force-systemd-env-541584 Namespace:default APIServerHA
VIP: APIServerName:minikubeCA APIServerNames:[] APIServerIPs:[] DNSDomain:cluster.local ContainerRuntime:crio CRISocket: NetworkPlugin:cni FeatureGates: ServiceCIDR:10.96.0.0/12 ImageRepository: LoadBalancerStartIP: LoadBalancerEndIP: CustomIngressCert: RegistryAliases: ExtraOptions:[] ShouldLoadCachedImages:true EnableDefaultCNI:false CNI:} Nodes:[{Name: IP: Port:8443 KubernetesVersion:v1.31.1 ContainerRuntime:crio ControlPlane:true Worker:true}] Addons:map[] CustomAddonImages:map[] CustomAddonRegistries:map[] VerifyComponents:map[apiserver:true system_pods:true] StartHostTimeout:6m0s ScheduledStop:<nil> ExposedPorts:[] ListenAddress: Network: Subnet: MultiNodeRequested:false ExtraDisks:0 CertExpiration:26280h0m0s Mount:false MountString:/home/jenkins:/minikube-host Mount9PVersion:9p2000.L MountGID:docker MountIP: MountMSize:262144 MountOptions:[] MountPort:0 MountType:9p MountUID:docker BinaryMirror: DisableOptimizations:false DisableMetrics:false CustomQemuFirmwarePath: SocketVMnetClientPath: SocketVMnetPa
th: StaticIP: SSHAuthSock: SSHAgentPID:0 GPUs: AutoPauseInterval:1m0s} &{Name: IP: Port:8443 KubernetesVersion:v1.31.1 ContainerRuntime:crio ControlPlane:true Worker:true}
	I0916 11:27:05.167598 1560431 start.go:125] createHost starting for "" (driver="docker")
	I0916 11:27:01.255444 1539150 api_server.go:253] Checking apiserver healthz at https://192.168.76.2:8443/healthz ...
	I0916 11:27:01.255907 1539150 api_server.go:269] stopped: https://192.168.76.2:8443/healthz: Get "https://192.168.76.2:8443/healthz": dial tcp 192.168.76.2:8443: connect: connection refused
	I0916 11:27:01.255958 1539150 cri.go:54] listing CRI containers in root : {State:all Name:kube-apiserver Namespaces:[]}
	I0916 11:27:01.256017 1539150 ssh_runner.go:195] Run: sudo crictl ps -a --quiet --name=kube-apiserver
	I0916 11:27:01.305928 1539150 cri.go:89] found id: "dfc01a57cd297e57aeaef492377cf4bfc0dea52cd4fea17e12ce9dff2425312b"
	I0916 11:27:01.305946 1539150 cri.go:89] found id: ""
	I0916 11:27:01.305954 1539150 logs.go:276] 1 containers: [dfc01a57cd297e57aeaef492377cf4bfc0dea52cd4fea17e12ce9dff2425312b]
	I0916 11:27:01.306015 1539150 ssh_runner.go:195] Run: which crictl
	I0916 11:27:01.310260 1539150 cri.go:54] listing CRI containers in root : {State:all Name:etcd Namespaces:[]}
	I0916 11:27:01.310337 1539150 ssh_runner.go:195] Run: sudo crictl ps -a --quiet --name=etcd
	I0916 11:27:01.369605 1539150 cri.go:89] found id: ""
	I0916 11:27:01.369632 1539150 logs.go:276] 0 containers: []
	W0916 11:27:01.369642 1539150 logs.go:278] No container was found matching "etcd"
	I0916 11:27:01.369648 1539150 cri.go:54] listing CRI containers in root : {State:all Name:coredns Namespaces:[]}
	I0916 11:27:01.369710 1539150 ssh_runner.go:195] Run: sudo crictl ps -a --quiet --name=coredns
	I0916 11:27:01.418328 1539150 cri.go:89] found id: ""
	I0916 11:27:01.418355 1539150 logs.go:276] 0 containers: []
	W0916 11:27:01.418365 1539150 logs.go:278] No container was found matching "coredns"
	I0916 11:27:01.418372 1539150 cri.go:54] listing CRI containers in root : {State:all Name:kube-scheduler Namespaces:[]}
	I0916 11:27:01.418433 1539150 ssh_runner.go:195] Run: sudo crictl ps -a --quiet --name=kube-scheduler
	I0916 11:27:01.472482 1539150 cri.go:89] found id: "f194136d692cc8268d7cf990eadd990e724ad57e60645bc38a967cfe2e0938fa"
	I0916 11:27:01.472502 1539150 cri.go:89] found id: ""
	I0916 11:27:01.472510 1539150 logs.go:276] 1 containers: [f194136d692cc8268d7cf990eadd990e724ad57e60645bc38a967cfe2e0938fa]
	I0916 11:27:01.472573 1539150 ssh_runner.go:195] Run: which crictl
	I0916 11:27:01.476630 1539150 cri.go:54] listing CRI containers in root : {State:all Name:kube-proxy Namespaces:[]}
	I0916 11:27:01.476740 1539150 ssh_runner.go:195] Run: sudo crictl ps -a --quiet --name=kube-proxy
	I0916 11:27:01.538919 1539150 cri.go:89] found id: ""
	I0916 11:27:01.538942 1539150 logs.go:276] 0 containers: []
	W0916 11:27:01.538950 1539150 logs.go:278] No container was found matching "kube-proxy"
	I0916 11:27:01.538957 1539150 cri.go:54] listing CRI containers in root : {State:all Name:kube-controller-manager Namespaces:[]}
	I0916 11:27:01.539018 1539150 ssh_runner.go:195] Run: sudo crictl ps -a --quiet --name=kube-controller-manager
	I0916 11:27:01.597231 1539150 cri.go:89] found id: "abdb3268b75f0e67e809ec51713a535dd48d37e0b165034232f066bbda65e0f2"
	I0916 11:27:01.597249 1539150 cri.go:89] found id: ""
	I0916 11:27:01.597261 1539150 logs.go:276] 1 containers: [abdb3268b75f0e67e809ec51713a535dd48d37e0b165034232f066bbda65e0f2]
	I0916 11:27:01.597318 1539150 ssh_runner.go:195] Run: which crictl
	I0916 11:27:01.601493 1539150 cri.go:54] listing CRI containers in root : {State:all Name:kindnet Namespaces:[]}
	I0916 11:27:01.601567 1539150 ssh_runner.go:195] Run: sudo crictl ps -a --quiet --name=kindnet
	I0916 11:27:01.655110 1539150 cri.go:89] found id: ""
	I0916 11:27:01.655131 1539150 logs.go:276] 0 containers: []
	W0916 11:27:01.655141 1539150 logs.go:278] No container was found matching "kindnet"
	I0916 11:27:01.655148 1539150 cri.go:54] listing CRI containers in root : {State:all Name:storage-provisioner Namespaces:[]}
	I0916 11:27:01.655208 1539150 ssh_runner.go:195] Run: sudo crictl ps -a --quiet --name=storage-provisioner
	I0916 11:27:01.711036 1539150 cri.go:89] found id: ""
	I0916 11:27:01.711060 1539150 logs.go:276] 0 containers: []
	W0916 11:27:01.711070 1539150 logs.go:278] No container was found matching "storage-provisioner"
	I0916 11:27:01.711080 1539150 logs.go:123] Gathering logs for describe nodes ...
	I0916 11:27:01.711093 1539150 ssh_runner.go:195] Run: /bin/bash -c "sudo /var/lib/minikube/binaries/v1.31.1/kubectl describe nodes --kubeconfig=/var/lib/minikube/kubeconfig"
	W0916 11:27:01.799515 1539150 logs.go:130] failed describe nodes: command: /bin/bash -c "sudo /var/lib/minikube/binaries/v1.31.1/kubectl describe nodes --kubeconfig=/var/lib/minikube/kubeconfig" /bin/bash -c "sudo /var/lib/minikube/binaries/v1.31.1/kubectl describe nodes --kubeconfig=/var/lib/minikube/kubeconfig": Process exited with status 1
	stdout:
	
	stderr:
	The connection to the server localhost:8443 was refused - did you specify the right host or port?
	 output: 
	** stderr ** 
	The connection to the server localhost:8443 was refused - did you specify the right host or port?
	
	** /stderr **
	I0916 11:27:01.799662 1539150 logs.go:123] Gathering logs for kube-apiserver [dfc01a57cd297e57aeaef492377cf4bfc0dea52cd4fea17e12ce9dff2425312b] ...
	I0916 11:27:01.799684 1539150 ssh_runner.go:195] Run: /bin/bash -c "sudo /usr/bin/crictl logs --tail 400 dfc01a57cd297e57aeaef492377cf4bfc0dea52cd4fea17e12ce9dff2425312b"
	I0916 11:27:01.855507 1539150 logs.go:123] Gathering logs for kube-scheduler [f194136d692cc8268d7cf990eadd990e724ad57e60645bc38a967cfe2e0938fa] ...
	I0916 11:27:01.855577 1539150 ssh_runner.go:195] Run: /bin/bash -c "sudo /usr/bin/crictl logs --tail 400 f194136d692cc8268d7cf990eadd990e724ad57e60645bc38a967cfe2e0938fa"
	I0916 11:27:01.980805 1539150 logs.go:123] Gathering logs for kube-controller-manager [abdb3268b75f0e67e809ec51713a535dd48d37e0b165034232f066bbda65e0f2] ...
	I0916 11:27:01.980836 1539150 ssh_runner.go:195] Run: /bin/bash -c "sudo /usr/bin/crictl logs --tail 400 abdb3268b75f0e67e809ec51713a535dd48d37e0b165034232f066bbda65e0f2"
	I0916 11:27:02.033074 1539150 logs.go:123] Gathering logs for CRI-O ...
	I0916 11:27:02.033102 1539150 ssh_runner.go:195] Run: /bin/bash -c "sudo journalctl -u crio -n 400"
	I0916 11:27:02.083746 1539150 logs.go:123] Gathering logs for container status ...
	I0916 11:27:02.083778 1539150 ssh_runner.go:195] Run: /bin/bash -c "sudo `which crictl || echo crictl` ps -a || sudo docker ps -a"
	I0916 11:27:02.164938 1539150 logs.go:123] Gathering logs for kubelet ...
	I0916 11:27:02.164968 1539150 ssh_runner.go:195] Run: /bin/bash -c "sudo journalctl -u kubelet -n 400"
	I0916 11:27:02.304471 1539150 logs.go:123] Gathering logs for dmesg ...
	I0916 11:27:02.304509 1539150 ssh_runner.go:195] Run: /bin/bash -c "sudo dmesg -PH -L=never --level warn,err,crit,alert,emerg | tail -n 400"
	I0916 11:27:04.824298 1539150 api_server.go:253] Checking apiserver healthz at https://192.168.76.2:8443/healthz ...
	I0916 11:27:04.824639 1539150 api_server.go:269] stopped: https://192.168.76.2:8443/healthz: Get "https://192.168.76.2:8443/healthz": dial tcp 192.168.76.2:8443: connect: connection refused
	I0916 11:27:04.824717 1539150 kubeadm.go:597] duration metric: took 4m7.229969603s to restartPrimaryControlPlane
	W0916 11:27:04.824767 1539150 out.go:270] ! Unable to restart control-plane node(s), will reset cluster: <no value>
	I0916 11:27:04.824793 1539150 ssh_runner.go:195] Run: /bin/bash -c "sudo env PATH="/var/lib/minikube/binaries/v1.31.1:$PATH" kubeadm reset --cri-socket /var/run/crio/crio.sock --force"
	I0916 11:27:05.752127 1539150 ssh_runner.go:195] Run: sudo systemctl is-active --quiet service kubelet
	I0916 11:27:05.764650 1539150 ssh_runner.go:195] Run: sudo cp /var/tmp/minikube/kubeadm.yaml.new /var/tmp/minikube/kubeadm.yaml
	I0916 11:27:05.776029 1539150 kubeadm.go:214] ignoring SystemVerification for kubeadm because of docker driver
	I0916 11:27:05.776101 1539150 ssh_runner.go:195] Run: sudo ls -la /etc/kubernetes/admin.conf /etc/kubernetes/kubelet.conf /etc/kubernetes/controller-manager.conf /etc/kubernetes/scheduler.conf
	I0916 11:27:05.800657 1539150 kubeadm.go:155] config check failed, skipping stale config cleanup: sudo ls -la /etc/kubernetes/admin.conf /etc/kubernetes/kubelet.conf /etc/kubernetes/controller-manager.conf /etc/kubernetes/scheduler.conf: Process exited with status 2
	stdout:
	
	stderr:
	ls: cannot access '/etc/kubernetes/admin.conf': No such file or directory
	ls: cannot access '/etc/kubernetes/kubelet.conf': No such file or directory
	ls: cannot access '/etc/kubernetes/controller-manager.conf': No such file or directory
	ls: cannot access '/etc/kubernetes/scheduler.conf': No such file or directory
	I0916 11:27:05.800708 1539150 kubeadm.go:157] found existing configuration files:
	
	I0916 11:27:05.800763 1539150 ssh_runner.go:195] Run: sudo grep https://control-plane.minikube.internal:8443 /etc/kubernetes/admin.conf
	I0916 11:27:05.817349 1539150 kubeadm.go:163] "https://control-plane.minikube.internal:8443" may not be in /etc/kubernetes/admin.conf - will remove: sudo grep https://control-plane.minikube.internal:8443 /etc/kubernetes/admin.conf: Process exited with status 2
	stdout:
	
	stderr:
	grep: /etc/kubernetes/admin.conf: No such file or directory
	I0916 11:27:05.817418 1539150 ssh_runner.go:195] Run: sudo rm -f /etc/kubernetes/admin.conf
	I0916 11:27:05.827555 1539150 ssh_runner.go:195] Run: sudo grep https://control-plane.minikube.internal:8443 /etc/kubernetes/kubelet.conf
	I0916 11:27:05.839500 1539150 kubeadm.go:163] "https://control-plane.minikube.internal:8443" may not be in /etc/kubernetes/kubelet.conf - will remove: sudo grep https://control-plane.minikube.internal:8443 /etc/kubernetes/kubelet.conf: Process exited with status 2
	stdout:
	
	stderr:
	grep: /etc/kubernetes/kubelet.conf: No such file or directory
	I0916 11:27:05.839569 1539150 ssh_runner.go:195] Run: sudo rm -f /etc/kubernetes/kubelet.conf
	I0916 11:27:05.854966 1539150 ssh_runner.go:195] Run: sudo grep https://control-plane.minikube.internal:8443 /etc/kubernetes/controller-manager.conf
	I0916 11:27:05.864419 1539150 kubeadm.go:163] "https://control-plane.minikube.internal:8443" may not be in /etc/kubernetes/controller-manager.conf - will remove: sudo grep https://control-plane.minikube.internal:8443 /etc/kubernetes/controller-manager.conf: Process exited with status 2
	stdout:
	
	stderr:
	grep: /etc/kubernetes/controller-manager.conf: No such file or directory
	I0916 11:27:05.864484 1539150 ssh_runner.go:195] Run: sudo rm -f /etc/kubernetes/controller-manager.conf
	I0916 11:27:05.877216 1539150 ssh_runner.go:195] Run: sudo grep https://control-plane.minikube.internal:8443 /etc/kubernetes/scheduler.conf
	I0916 11:27:05.889714 1539150 kubeadm.go:163] "https://control-plane.minikube.internal:8443" may not be in /etc/kubernetes/scheduler.conf - will remove: sudo grep https://control-plane.minikube.internal:8443 /etc/kubernetes/scheduler.conf: Process exited with status 2
	stdout:
	
	stderr:
	grep: /etc/kubernetes/scheduler.conf: No such file or directory
	I0916 11:27:05.889804 1539150 ssh_runner.go:195] Run: sudo rm -f /etc/kubernetes/scheduler.conf
	I0916 11:27:05.898943 1539150 ssh_runner.go:286] Start: /bin/bash -c "sudo env PATH="/var/lib/minikube/binaries/v1.31.1:$PATH" kubeadm init --config /var/tmp/minikube/kubeadm.yaml  --ignore-preflight-errors=DirAvailable--etc-kubernetes-manifests,DirAvailable--var-lib-minikube,DirAvailable--var-lib-minikube-etcd,FileAvailable--etc-kubernetes-manifests-kube-scheduler.yaml,FileAvailable--etc-kubernetes-manifests-kube-apiserver.yaml,FileAvailable--etc-kubernetes-manifests-kube-controller-manager.yaml,FileAvailable--etc-kubernetes-manifests-etcd.yaml,Port-10250,Swap,NumCPU,Mem,SystemVerification,FileContent--proc-sys-net-bridge-bridge-nf-call-iptables"
	I0916 11:27:05.953323 1539150 kubeadm.go:310] [init] Using Kubernetes version: v1.31.1
	I0916 11:27:05.953643 1539150 kubeadm.go:310] [preflight] Running pre-flight checks
	I0916 11:27:06.000127 1539150 kubeadm.go:310] [preflight] The system verification failed. Printing the output from the verification:
	I0916 11:27:06.000247 1539150 kubeadm.go:310] KERNEL_VERSION: 5.15.0-1069-aws
	I0916 11:27:06.000311 1539150 kubeadm.go:310] OS: Linux
	I0916 11:27:06.000412 1539150 kubeadm.go:310] CGROUPS_CPU: enabled
	I0916 11:27:06.000549 1539150 kubeadm.go:310] CGROUPS_CPUACCT: enabled
	I0916 11:27:06.000610 1539150 kubeadm.go:310] CGROUPS_CPUSET: enabled
	I0916 11:27:06.000664 1539150 kubeadm.go:310] CGROUPS_DEVICES: enabled
	I0916 11:27:06.000730 1539150 kubeadm.go:310] CGROUPS_FREEZER: enabled
	I0916 11:27:06.000781 1539150 kubeadm.go:310] CGROUPS_MEMORY: enabled
	I0916 11:27:06.000829 1539150 kubeadm.go:310] CGROUPS_PIDS: enabled
	I0916 11:27:06.000879 1539150 kubeadm.go:310] CGROUPS_HUGETLB: enabled
	I0916 11:27:06.000943 1539150 kubeadm.go:310] CGROUPS_BLKIO: enabled
	I0916 11:27:06.125464 1539150 kubeadm.go:310] [preflight] Pulling images required for setting up a Kubernetes cluster
	I0916 11:27:06.125580 1539150 kubeadm.go:310] [preflight] This might take a minute or two, depending on the speed of your internet connection
	I0916 11:27:06.125683 1539150 kubeadm.go:310] [preflight] You can also perform this action beforehand using 'kubeadm config images pull'
	I0916 11:27:06.134441 1539150 kubeadm.go:310] [certs] Using certificateDir folder "/var/lib/minikube/certs"
	I0916 11:27:06.139429 1539150 out.go:235]   - Generating certificates and keys ...
	I0916 11:27:06.139543 1539150 kubeadm.go:310] [certs] Using existing ca certificate authority
	I0916 11:27:06.139621 1539150 kubeadm.go:310] [certs] Using existing apiserver certificate and key on disk
	I0916 11:27:06.139705 1539150 kubeadm.go:310] [certs] Using existing apiserver-kubelet-client certificate and key on disk
	I0916 11:27:06.139768 1539150 kubeadm.go:310] [certs] Using existing front-proxy-ca certificate authority
	I0916 11:27:06.139847 1539150 kubeadm.go:310] [certs] Using existing front-proxy-client certificate and key on disk
	I0916 11:27:06.139907 1539150 kubeadm.go:310] [certs] Using existing etcd/ca certificate authority
	I0916 11:27:06.139973 1539150 kubeadm.go:310] [certs] Using existing etcd/server certificate and key on disk
	I0916 11:27:06.140038 1539150 kubeadm.go:310] [certs] Using existing etcd/peer certificate and key on disk
	I0916 11:27:06.140119 1539150 kubeadm.go:310] [certs] Using existing etcd/healthcheck-client certificate and key on disk
	I0916 11:27:06.140214 1539150 kubeadm.go:310] [certs] Using existing apiserver-etcd-client certificate and key on disk
	I0916 11:27:06.140255 1539150 kubeadm.go:310] [certs] Using the existing "sa" key
	I0916 11:27:06.140313 1539150 kubeadm.go:310] [kubeconfig] Using kubeconfig folder "/etc/kubernetes"
	I0916 11:27:06.928802 1539150 kubeadm.go:310] [kubeconfig] Writing "admin.conf" kubeconfig file
	I0916 11:27:07.525003 1539150 kubeadm.go:310] [kubeconfig] Writing "super-admin.conf" kubeconfig file
	I0916 11:27:07.883609 1539150 kubeadm.go:310] [kubeconfig] Writing "kubelet.conf" kubeconfig file
	I0916 11:27:08.267840 1539150 kubeadm.go:310] [kubeconfig] Writing "controller-manager.conf" kubeconfig file
	I0916 11:27:08.568957 1539150 kubeadm.go:310] [kubeconfig] Writing "scheduler.conf" kubeconfig file
	I0916 11:27:08.570595 1539150 kubeadm.go:310] [etcd] Creating static Pod manifest for local etcd in "/etc/kubernetes/manifests"
	I0916 11:27:08.575170 1539150 kubeadm.go:310] [control-plane] Using manifest folder "/etc/kubernetes/manifests"
	I0916 11:27:05.170885 1560431 out.go:235] * Creating docker container (CPUs=2, Memory=2048MB) ...
	I0916 11:27:05.171187 1560431 start.go:159] libmachine.API.Create for "force-systemd-env-541584" (driver="docker")
	I0916 11:27:05.171225 1560431 client.go:168] LocalClient.Create starting
	I0916 11:27:05.171311 1560431 main.go:141] libmachine: Reading certificate data from /home/jenkins/minikube-integration/19651-1378450/.minikube/certs/ca.pem
	I0916 11:27:05.171357 1560431 main.go:141] libmachine: Decoding PEM data...
	I0916 11:27:05.171375 1560431 main.go:141] libmachine: Parsing certificate...
	I0916 11:27:05.171432 1560431 main.go:141] libmachine: Reading certificate data from /home/jenkins/minikube-integration/19651-1378450/.minikube/certs/cert.pem
	I0916 11:27:05.171453 1560431 main.go:141] libmachine: Decoding PEM data...
	I0916 11:27:05.171477 1560431 main.go:141] libmachine: Parsing certificate...
	I0916 11:27:05.171915 1560431 cli_runner.go:164] Run: docker network inspect force-systemd-env-541584 --format "{"Name": "{{.Name}}","Driver": "{{.Driver}}","Subnet": "{{range .IPAM.Config}}{{.Subnet}}{{end}}","Gateway": "{{range .IPAM.Config}}{{.Gateway}}{{end}}","MTU": {{if (index .Options "com.docker.network.driver.mtu")}}{{(index .Options "com.docker.network.driver.mtu")}}{{else}}0{{end}}, "ContainerIPs": [{{range $k,$v := .Containers }}"{{$v.IPv4Address}}",{{end}}]}"
	W0916 11:27:05.189019 1560431 cli_runner.go:211] docker network inspect force-systemd-env-541584 --format "{"Name": "{{.Name}}","Driver": "{{.Driver}}","Subnet": "{{range .IPAM.Config}}{{.Subnet}}{{end}}","Gateway": "{{range .IPAM.Config}}{{.Gateway}}{{end}}","MTU": {{if (index .Options "com.docker.network.driver.mtu")}}{{(index .Options "com.docker.network.driver.mtu")}}{{else}}0{{end}}, "ContainerIPs": [{{range $k,$v := .Containers }}"{{$v.IPv4Address}}",{{end}}]}" returned with exit code 1
	I0916 11:27:05.189104 1560431 network_create.go:284] running [docker network inspect force-systemd-env-541584] to gather additional debugging logs...
	I0916 11:27:05.189126 1560431 cli_runner.go:164] Run: docker network inspect force-systemd-env-541584
	W0916 11:27:05.204450 1560431 cli_runner.go:211] docker network inspect force-systemd-env-541584 returned with exit code 1
	I0916 11:27:05.204480 1560431 network_create.go:287] error running [docker network inspect force-systemd-env-541584]: docker network inspect force-systemd-env-541584: exit status 1
	stdout:
	[]
	
	stderr:
	Error response from daemon: network force-systemd-env-541584 not found
	I0916 11:27:05.204494 1560431 network_create.go:289] output of [docker network inspect force-systemd-env-541584]: -- stdout --
	[]
	
	-- /stdout --
	** stderr ** 
	Error response from daemon: network force-systemd-env-541584 not found
	
	** /stderr **
	I0916 11:27:05.204593 1560431 cli_runner.go:164] Run: docker network inspect bridge --format "{"Name": "{{.Name}}","Driver": "{{.Driver}}","Subnet": "{{range .IPAM.Config}}{{.Subnet}}{{end}}","Gateway": "{{range .IPAM.Config}}{{.Gateway}}{{end}}","MTU": {{if (index .Options "com.docker.network.driver.mtu")}}{{(index .Options "com.docker.network.driver.mtu")}}{{else}}0{{end}}, "ContainerIPs": [{{range $k,$v := .Containers }}"{{$v.IPv4Address}}",{{end}}]}"
	I0916 11:27:05.221152 1560431 network.go:211] skipping subnet 192.168.49.0/24 that is taken: &{IP:192.168.49.0 Netmask:255.255.255.0 Prefix:24 CIDR:192.168.49.0/24 Gateway:192.168.49.1 ClientMin:192.168.49.2 ClientMax:192.168.49.254 Broadcast:192.168.49.255 IsPrivate:true Interface:{IfaceName:br-a49e1846148d IfaceIPv4:192.168.49.1 IfaceMTU:1500 IfaceMAC:02:42:d3:9d:ef:74} reservation:<nil>}
	I0916 11:27:05.221613 1560431 network.go:211] skipping subnet 192.168.58.0/24 that is taken: &{IP:192.168.58.0 Netmask:255.255.255.0 Prefix:24 CIDR:192.168.58.0/24 Gateway:192.168.58.1 ClientMin:192.168.58.2 ClientMax:192.168.58.254 Broadcast:192.168.58.255 IsPrivate:true Interface:{IfaceName:br-2e9863632116 IfaceIPv4:192.168.58.1 IfaceMTU:1500 IfaceMAC:02:42:77:c8:06:b6} reservation:<nil>}
	I0916 11:27:05.222096 1560431 network.go:211] skipping subnet 192.168.67.0/24 that is taken: &{IP:192.168.67.0 Netmask:255.255.255.0 Prefix:24 CIDR:192.168.67.0/24 Gateway:192.168.67.1 ClientMin:192.168.67.2 ClientMax:192.168.67.254 Broadcast:192.168.67.255 IsPrivate:true Interface:{IfaceName:br-76703dbf7b5c IfaceIPv4:192.168.67.1 IfaceMTU:1500 IfaceMAC:02:42:29:f7:34:a1} reservation:<nil>}
	I0916 11:27:05.222430 1560431 network.go:211] skipping subnet 192.168.76.0/24 that is taken: &{IP:192.168.76.0 Netmask:255.255.255.0 Prefix:24 CIDR:192.168.76.0/24 Gateway:192.168.76.1 ClientMin:192.168.76.2 ClientMax:192.168.76.254 Broadcast:192.168.76.255 IsPrivate:true Interface:{IfaceName:br-2e9724004744 IfaceIPv4:192.168.76.1 IfaceMTU:1500 IfaceMAC:02:42:07:d5:f8:23} reservation:<nil>}
	I0916 11:27:05.222919 1560431 network.go:206] using free private subnet 192.168.85.0/24: &{IP:192.168.85.0 Netmask:255.255.255.0 Prefix:24 CIDR:192.168.85.0/24 Gateway:192.168.85.1 ClientMin:192.168.85.2 ClientMax:192.168.85.254 Broadcast:192.168.85.255 IsPrivate:true Interface:{IfaceName: IfaceIPv4: IfaceMTU:0 IfaceMAC:} reservation:0x400185bb60}
	I0916 11:27:05.223005 1560431 network_create.go:124] attempt to create docker network force-systemd-env-541584 192.168.85.0/24 with gateway 192.168.85.1 and MTU of 1500 ...
	I0916 11:27:05.223094 1560431 cli_runner.go:164] Run: docker network create --driver=bridge --subnet=192.168.85.0/24 --gateway=192.168.85.1 -o --ip-masq -o --icc -o com.docker.network.driver.mtu=1500 --label=created_by.minikube.sigs.k8s.io=true --label=name.minikube.sigs.k8s.io=force-systemd-env-541584 force-systemd-env-541584
	I0916 11:27:05.300049 1560431 network_create.go:108] docker network force-systemd-env-541584 192.168.85.0/24 created
	I0916 11:27:05.300080 1560431 kic.go:121] calculated static IP "192.168.85.2" for the "force-systemd-env-541584" container
	I0916 11:27:05.300161 1560431 cli_runner.go:164] Run: docker ps -a --format {{.Names}}
	I0916 11:27:05.322872 1560431 cli_runner.go:164] Run: docker volume create force-systemd-env-541584 --label name.minikube.sigs.k8s.io=force-systemd-env-541584 --label created_by.minikube.sigs.k8s.io=true
	I0916 11:27:05.341004 1560431 oci.go:103] Successfully created a docker volume force-systemd-env-541584
	I0916 11:27:05.341094 1560431 cli_runner.go:164] Run: docker run --rm --name force-systemd-env-541584-preload-sidecar --label created_by.minikube.sigs.k8s.io=true --label name.minikube.sigs.k8s.io=force-systemd-env-541584 --entrypoint /usr/bin/test -v force-systemd-env-541584:/var gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 -d /var/lib
	I0916 11:27:06.057749 1560431 oci.go:107] Successfully prepared a docker volume force-systemd-env-541584
	I0916 11:27:06.057805 1560431 preload.go:131] Checking if preload exists for k8s version v1.31.1 and runtime crio
	I0916 11:27:06.057826 1560431 kic.go:194] Starting extracting preloaded images to volume ...
	I0916 11:27:06.057910 1560431 cli_runner.go:164] Run: docker run --rm --entrypoint /usr/bin/tar -v /home/jenkins/minikube-integration/19651-1378450/.minikube/cache/preloaded-tarball/preloaded-images-k8s-v18-v1.31.1-cri-o-overlay-arm64.tar.lz4:/preloaded.tar:ro -v force-systemd-env-541584:/extractDir gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 -I lz4 -xf /preloaded.tar -C /extractDir
	I0916 11:27:08.587785 1539150 out.go:235]   - Booting up control plane ...
	I0916 11:27:08.587899 1539150 kubeadm.go:310] [control-plane] Creating static Pod manifest for "kube-apiserver"
	I0916 11:27:08.587975 1539150 kubeadm.go:310] [control-plane] Creating static Pod manifest for "kube-controller-manager"
	I0916 11:27:08.588041 1539150 kubeadm.go:310] [control-plane] Creating static Pod manifest for "kube-scheduler"
	I0916 11:27:08.591330 1539150 kubeadm.go:310] [kubelet-start] Writing kubelet environment file with flags to file "/var/lib/kubelet/kubeadm-flags.env"
	I0916 11:27:08.600135 1539150 kubeadm.go:310] [kubelet-start] Writing kubelet configuration to file "/var/lib/kubelet/config.yaml"
	I0916 11:27:08.600205 1539150 kubeadm.go:310] [kubelet-start] Starting the kubelet
	I0916 11:27:08.781273 1539150 kubeadm.go:310] [wait-control-plane] Waiting for the kubelet to boot up the control plane as static Pods from directory "/etc/kubernetes/manifests"
	I0916 11:27:08.781393 1539150 kubeadm.go:310] [kubelet-check] Waiting for a healthy kubelet at http://127.0.0.1:10248/healthz. This can take up to 4m0s
	I0916 11:27:10.699352 1560431 cli_runner.go:217] Completed: docker run --rm --entrypoint /usr/bin/tar -v /home/jenkins/minikube-integration/19651-1378450/.minikube/cache/preloaded-tarball/preloaded-images-k8s-v18-v1.31.1-cri-o-overlay-arm64.tar.lz4:/preloaded.tar:ro -v force-systemd-env-541584:/extractDir gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 -I lz4 -xf /preloaded.tar -C /extractDir: (4.641404113s)
	I0916 11:27:10.699382 1560431 kic.go:203] duration metric: took 4.641552614s to extract preloaded images to volume ...
	W0916 11:27:10.699527 1560431 cgroups_linux.go:77] Your kernel does not support swap limit capabilities or the cgroup is not mounted.
	I0916 11:27:10.699636 1560431 cli_runner.go:164] Run: docker info --format "'{{json .SecurityOptions}}'"
	I0916 11:27:10.780988 1560431 cli_runner.go:164] Run: docker run -d -t --privileged --security-opt seccomp=unconfined --tmpfs /tmp --tmpfs /run -v /lib/modules:/lib/modules:ro --hostname force-systemd-env-541584 --name force-systemd-env-541584 --label created_by.minikube.sigs.k8s.io=true --label name.minikube.sigs.k8s.io=force-systemd-env-541584 --label role.minikube.sigs.k8s.io= --label mode.minikube.sigs.k8s.io=force-systemd-env-541584 --network force-systemd-env-541584 --ip 192.168.85.2 --volume force-systemd-env-541584:/var --security-opt apparmor=unconfined --memory=2048mb --cpus=2 -e container=docker --expose 8443 --publish=127.0.0.1::8443 --publish=127.0.0.1::22 --publish=127.0.0.1::2376 --publish=127.0.0.1::5000 --publish=127.0.0.1::32443 gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0
	I0916 11:27:11.203535 1560431 cli_runner.go:164] Run: docker container inspect force-systemd-env-541584 --format={{.State.Running}}
	I0916 11:27:11.230634 1560431 cli_runner.go:164] Run: docker container inspect force-systemd-env-541584 --format={{.State.Status}}
	I0916 11:27:11.256915 1560431 cli_runner.go:164] Run: docker exec force-systemd-env-541584 stat /var/lib/dpkg/alternatives/iptables
	I0916 11:27:11.349696 1560431 oci.go:144] the created container "force-systemd-env-541584" has a running status.
	I0916 11:27:11.349729 1560431 kic.go:225] Creating ssh key for kic: /home/jenkins/minikube-integration/19651-1378450/.minikube/machines/force-systemd-env-541584/id_rsa...
	I0916 11:27:11.794121 1560431 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-1378450/.minikube/machines/force-systemd-env-541584/id_rsa.pub -> /home/docker/.ssh/authorized_keys
	I0916 11:27:11.794171 1560431 kic_runner.go:191] docker (temp): /home/jenkins/minikube-integration/19651-1378450/.minikube/machines/force-systemd-env-541584/id_rsa.pub --> /home/docker/.ssh/authorized_keys (381 bytes)
	I0916 11:27:11.819525 1560431 cli_runner.go:164] Run: docker container inspect force-systemd-env-541584 --format={{.State.Status}}
	I0916 11:27:11.856933 1560431 kic_runner.go:93] Run: chown docker:docker /home/docker/.ssh/authorized_keys
	I0916 11:27:11.856961 1560431 kic_runner.go:114] Args: [docker exec --privileged force-systemd-env-541584 chown docker:docker /home/docker/.ssh/authorized_keys]
	I0916 11:27:11.954755 1560431 cli_runner.go:164] Run: docker container inspect force-systemd-env-541584 --format={{.State.Status}}
	I0916 11:27:11.987578 1560431 machine.go:93] provisionDockerMachine start ...
	I0916 11:27:11.987674 1560431 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" force-systemd-env-541584
	I0916 11:27:12.010154 1560431 main.go:141] libmachine: Using SSH client type: native
	I0916 11:27:12.010504 1560431 main.go:141] libmachine: &{{{<nil> 0 [] [] []} docker [0x41abe0] 0x41d420 <nil>  [] 0s} 127.0.0.1 34863 <nil> <nil>}
	I0916 11:27:12.010516 1560431 main.go:141] libmachine: About to run SSH command:
	hostname
	I0916 11:27:12.011222 1560431 main.go:141] libmachine: Error dialing TCP: ssh: handshake failed: EOF
	I0916 11:27:12.287232 1539150 kubeadm.go:310] [kubelet-check] The kubelet is healthy after 3.506156207s
	I0916 11:27:12.287321 1539150 kubeadm.go:310] [api-check] Waiting for a healthy API server. This can take up to 4m0s
	I0916 11:27:15.168663 1560431 main.go:141] libmachine: SSH cmd err, output: <nil>: force-systemd-env-541584
	
	I0916 11:27:15.168712 1560431 ubuntu.go:169] provisioning hostname "force-systemd-env-541584"
	I0916 11:27:15.168798 1560431 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" force-systemd-env-541584
	I0916 11:27:15.201565 1560431 main.go:141] libmachine: Using SSH client type: native
	I0916 11:27:15.201834 1560431 main.go:141] libmachine: &{{{<nil> 0 [] [] []} docker [0x41abe0] 0x41d420 <nil>  [] 0s} 127.0.0.1 34863 <nil> <nil>}
	I0916 11:27:15.201854 1560431 main.go:141] libmachine: About to run SSH command:
	sudo hostname force-systemd-env-541584 && echo "force-systemd-env-541584" | sudo tee /etc/hostname
	I0916 11:27:15.377621 1560431 main.go:141] libmachine: SSH cmd err, output: <nil>: force-systemd-env-541584
	
	I0916 11:27:15.377798 1560431 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" force-systemd-env-541584
	I0916 11:27:15.406199 1560431 main.go:141] libmachine: Using SSH client type: native
	I0916 11:27:15.406457 1560431 main.go:141] libmachine: &{{{<nil> 0 [] [] []} docker [0x41abe0] 0x41d420 <nil>  [] 0s} 127.0.0.1 34863 <nil> <nil>}
	I0916 11:27:15.406476 1560431 main.go:141] libmachine: About to run SSH command:
	
			if ! grep -xq '.*\sforce-systemd-env-541584' /etc/hosts; then
				if grep -xq '127.0.1.1\s.*' /etc/hosts; then
					sudo sed -i 's/^127.0.1.1\s.*/127.0.1.1 force-systemd-env-541584/g' /etc/hosts;
				else 
					echo '127.0.1.1 force-systemd-env-541584' | sudo tee -a /etc/hosts; 
				fi
			fi
	I0916 11:27:15.565288 1560431 main.go:141] libmachine: SSH cmd err, output: <nil>: 
	I0916 11:27:15.565317 1560431 ubuntu.go:175] set auth options {CertDir:/home/jenkins/minikube-integration/19651-1378450/.minikube CaCertPath:/home/jenkins/minikube-integration/19651-1378450/.minikube/certs/ca.pem CaPrivateKeyPath:/home/jenkins/minikube-integration/19651-1378450/.minikube/certs/ca-key.pem CaCertRemotePath:/etc/docker/ca.pem ServerCertPath:/home/jenkins/minikube-integration/19651-1378450/.minikube/machines/server.pem ServerKeyPath:/home/jenkins/minikube-integration/19651-1378450/.minikube/machines/server-key.pem ClientKeyPath:/home/jenkins/minikube-integration/19651-1378450/.minikube/certs/key.pem ServerCertRemotePath:/etc/docker/server.pem ServerKeyRemotePath:/etc/docker/server-key.pem ClientCertPath:/home/jenkins/minikube-integration/19651-1378450/.minikube/certs/cert.pem ServerCertSANs:[] StorePath:/home/jenkins/minikube-integration/19651-1378450/.minikube}
	I0916 11:27:15.565337 1560431 ubuntu.go:177] setting up certificates
	I0916 11:27:15.565347 1560431 provision.go:84] configureAuth start
	I0916 11:27:15.565420 1560431 cli_runner.go:164] Run: docker container inspect -f "{{range .NetworkSettings.Networks}}{{.IPAddress}},{{.GlobalIPv6Address}}{{end}}" force-systemd-env-541584
	I0916 11:27:15.593664 1560431 provision.go:143] copyHostCerts
	I0916 11:27:15.593726 1560431 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-1378450/.minikube/certs/ca.pem -> /home/jenkins/minikube-integration/19651-1378450/.minikube/ca.pem
	I0916 11:27:15.593762 1560431 exec_runner.go:144] found /home/jenkins/minikube-integration/19651-1378450/.minikube/ca.pem, removing ...
	I0916 11:27:15.593769 1560431 exec_runner.go:203] rm: /home/jenkins/minikube-integration/19651-1378450/.minikube/ca.pem
	I0916 11:27:15.593843 1560431 exec_runner.go:151] cp: /home/jenkins/minikube-integration/19651-1378450/.minikube/certs/ca.pem --> /home/jenkins/minikube-integration/19651-1378450/.minikube/ca.pem (1078 bytes)
	I0916 11:27:15.593914 1560431 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-1378450/.minikube/certs/cert.pem -> /home/jenkins/minikube-integration/19651-1378450/.minikube/cert.pem
	I0916 11:27:15.593931 1560431 exec_runner.go:144] found /home/jenkins/minikube-integration/19651-1378450/.minikube/cert.pem, removing ...
	I0916 11:27:15.593936 1560431 exec_runner.go:203] rm: /home/jenkins/minikube-integration/19651-1378450/.minikube/cert.pem
	I0916 11:27:15.593960 1560431 exec_runner.go:151] cp: /home/jenkins/minikube-integration/19651-1378450/.minikube/certs/cert.pem --> /home/jenkins/minikube-integration/19651-1378450/.minikube/cert.pem (1123 bytes)
	I0916 11:27:15.593998 1560431 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-1378450/.minikube/certs/key.pem -> /home/jenkins/minikube-integration/19651-1378450/.minikube/key.pem
	I0916 11:27:15.594013 1560431 exec_runner.go:144] found /home/jenkins/minikube-integration/19651-1378450/.minikube/key.pem, removing ...
	I0916 11:27:15.594017 1560431 exec_runner.go:203] rm: /home/jenkins/minikube-integration/19651-1378450/.minikube/key.pem
	I0916 11:27:15.594041 1560431 exec_runner.go:151] cp: /home/jenkins/minikube-integration/19651-1378450/.minikube/certs/key.pem --> /home/jenkins/minikube-integration/19651-1378450/.minikube/key.pem (1679 bytes)
	I0916 11:27:15.594088 1560431 provision.go:117] generating server cert: /home/jenkins/minikube-integration/19651-1378450/.minikube/machines/server.pem ca-key=/home/jenkins/minikube-integration/19651-1378450/.minikube/certs/ca.pem private-key=/home/jenkins/minikube-integration/19651-1378450/.minikube/certs/ca-key.pem org=jenkins.force-systemd-env-541584 san=[127.0.0.1 192.168.85.2 force-systemd-env-541584 localhost minikube]
	I0916 11:27:16.328165 1560431 provision.go:177] copyRemoteCerts
	I0916 11:27:16.328295 1560431 ssh_runner.go:195] Run: sudo mkdir -p /etc/docker /etc/docker /etc/docker
	I0916 11:27:16.328376 1560431 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" force-systemd-env-541584
	I0916 11:27:16.346158 1560431 sshutil.go:53] new ssh client: &{IP:127.0.0.1 Port:34863 SSHKeyPath:/home/jenkins/minikube-integration/19651-1378450/.minikube/machines/force-systemd-env-541584/id_rsa Username:docker}
	I0916 11:27:16.459463 1560431 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-1378450/.minikube/certs/ca.pem -> /etc/docker/ca.pem
	I0916 11:27:16.459591 1560431 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-1378450/.minikube/certs/ca.pem --> /etc/docker/ca.pem (1078 bytes)
	I0916 11:27:16.497072 1560431 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-1378450/.minikube/machines/server.pem -> /etc/docker/server.pem
	I0916 11:27:16.497145 1560431 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-1378450/.minikube/machines/server.pem --> /etc/docker/server.pem (1237 bytes)
	I0916 11:27:16.538072 1560431 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-1378450/.minikube/machines/server-key.pem -> /etc/docker/server-key.pem
	I0916 11:27:16.538145 1560431 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-1378450/.minikube/machines/server-key.pem --> /etc/docker/server-key.pem (1679 bytes)
	I0916 11:27:16.574012 1560431 provision.go:87] duration metric: took 1.008649954s to configureAuth
	I0916 11:27:16.574035 1560431 ubuntu.go:193] setting minikube options for container-runtime
	I0916 11:27:16.574230 1560431 config.go:182] Loaded profile config "force-systemd-env-541584": Driver=docker, ContainerRuntime=crio, KubernetesVersion=v1.31.1
	I0916 11:27:16.574339 1560431 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" force-systemd-env-541584
	I0916 11:27:16.600026 1560431 main.go:141] libmachine: Using SSH client type: native
	I0916 11:27:16.600294 1560431 main.go:141] libmachine: &{{{<nil> 0 [] [] []} docker [0x41abe0] 0x41d420 <nil>  [] 0s} 127.0.0.1 34863 <nil> <nil>}
	I0916 11:27:16.600317 1560431 main.go:141] libmachine: About to run SSH command:
	sudo mkdir -p /etc/sysconfig && printf %s "
	CRIO_MINIKUBE_OPTIONS='--insecure-registry 10.96.0.0/12 '
	" | sudo tee /etc/sysconfig/crio.minikube && sudo systemctl restart crio
	I0916 11:27:16.951962 1560431 main.go:141] libmachine: SSH cmd err, output: <nil>: 
	CRIO_MINIKUBE_OPTIONS='--insecure-registry 10.96.0.0/12 '
	
	I0916 11:27:16.951997 1560431 machine.go:96] duration metric: took 4.964393446s to provisionDockerMachine
	I0916 11:27:16.952015 1560431 client.go:171] duration metric: took 11.780774087s to LocalClient.Create
	I0916 11:27:16.952030 1560431 start.go:167] duration metric: took 11.780850122s to libmachine.API.Create "force-systemd-env-541584"
	I0916 11:27:16.952045 1560431 start.go:293] postStartSetup for "force-systemd-env-541584" (driver="docker")
	I0916 11:27:16.952059 1560431 start.go:322] creating required directories: [/etc/kubernetes/addons /etc/kubernetes/manifests /var/tmp/minikube /var/lib/minikube /var/lib/minikube/certs /var/lib/minikube/images /var/lib/minikube/binaries /tmp/gvisor /usr/share/ca-certificates /etc/ssl/certs]
	I0916 11:27:16.952130 1560431 ssh_runner.go:195] Run: sudo mkdir -p /etc/kubernetes/addons /etc/kubernetes/manifests /var/tmp/minikube /var/lib/minikube /var/lib/minikube/certs /var/lib/minikube/images /var/lib/minikube/binaries /tmp/gvisor /usr/share/ca-certificates /etc/ssl/certs
	I0916 11:27:16.952192 1560431 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" force-systemd-env-541584
	I0916 11:27:16.986229 1560431 sshutil.go:53] new ssh client: &{IP:127.0.0.1 Port:34863 SSHKeyPath:/home/jenkins/minikube-integration/19651-1378450/.minikube/machines/force-systemd-env-541584/id_rsa Username:docker}
	I0916 11:27:17.106040 1560431 ssh_runner.go:195] Run: cat /etc/os-release
	I0916 11:27:17.109790 1560431 main.go:141] libmachine: Couldn't set key VERSION_CODENAME, no corresponding struct field found
	I0916 11:27:17.109830 1560431 main.go:141] libmachine: Couldn't set key PRIVACY_POLICY_URL, no corresponding struct field found
	I0916 11:27:17.109846 1560431 main.go:141] libmachine: Couldn't set key UBUNTU_CODENAME, no corresponding struct field found
	I0916 11:27:17.109854 1560431 info.go:137] Remote host: Ubuntu 22.04.4 LTS
	I0916 11:27:17.109868 1560431 filesync.go:126] Scanning /home/jenkins/minikube-integration/19651-1378450/.minikube/addons for local assets ...
	I0916 11:27:17.109928 1560431 filesync.go:126] Scanning /home/jenkins/minikube-integration/19651-1378450/.minikube/files for local assets ...
	I0916 11:27:17.110022 1560431 filesync.go:149] local asset: /home/jenkins/minikube-integration/19651-1378450/.minikube/files/etc/ssl/certs/13838332.pem -> 13838332.pem in /etc/ssl/certs
	I0916 11:27:17.110034 1560431 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-1378450/.minikube/files/etc/ssl/certs/13838332.pem -> /etc/ssl/certs/13838332.pem
	I0916 11:27:17.110134 1560431 ssh_runner.go:195] Run: sudo mkdir -p /etc/ssl/certs
	I0916 11:27:17.127429 1560431 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-1378450/.minikube/files/etc/ssl/certs/13838332.pem --> /etc/ssl/certs/13838332.pem (1708 bytes)
	I0916 11:27:17.170070 1560431 start.go:296] duration metric: took 218.008507ms for postStartSetup
	I0916 11:27:17.170503 1560431 cli_runner.go:164] Run: docker container inspect -f "{{range .NetworkSettings.Networks}}{{.IPAddress}},{{.GlobalIPv6Address}}{{end}}" force-systemd-env-541584
	I0916 11:27:17.202375 1560431 profile.go:143] Saving config to /home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/force-systemd-env-541584/config.json ...
	I0916 11:27:17.202659 1560431 ssh_runner.go:195] Run: sh -c "df -h /var | awk 'NR==2{print $5}'"
	I0916 11:27:17.202704 1560431 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" force-systemd-env-541584
	I0916 11:27:17.242260 1560431 sshutil.go:53] new ssh client: &{IP:127.0.0.1 Port:34863 SSHKeyPath:/home/jenkins/minikube-integration/19651-1378450/.minikube/machines/force-systemd-env-541584/id_rsa Username:docker}
	I0916 11:27:17.353118 1560431 ssh_runner.go:195] Run: sh -c "df -BG /var | awk 'NR==2{print $4}'"
	I0916 11:27:17.358226 1560431 start.go:128] duration metric: took 12.190612932s to createHost
	I0916 11:27:17.358250 1560431 start.go:83] releasing machines lock for "force-systemd-env-541584", held for 12.190763844s
	I0916 11:27:17.358331 1560431 cli_runner.go:164] Run: docker container inspect -f "{{range .NetworkSettings.Networks}}{{.IPAddress}},{{.GlobalIPv6Address}}{{end}}" force-systemd-env-541584
	I0916 11:27:17.382298 1560431 ssh_runner.go:195] Run: cat /version.json
	I0916 11:27:17.382353 1560431 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" force-systemd-env-541584
	I0916 11:27:17.382596 1560431 ssh_runner.go:195] Run: curl -sS -m 2 https://registry.k8s.io/
	I0916 11:27:17.382668 1560431 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" force-systemd-env-541584
	I0916 11:27:17.418348 1560431 sshutil.go:53] new ssh client: &{IP:127.0.0.1 Port:34863 SSHKeyPath:/home/jenkins/minikube-integration/19651-1378450/.minikube/machines/force-systemd-env-541584/id_rsa Username:docker}
	I0916 11:27:17.431187 1560431 sshutil.go:53] new ssh client: &{IP:127.0.0.1 Port:34863 SSHKeyPath:/home/jenkins/minikube-integration/19651-1378450/.minikube/machines/force-systemd-env-541584/id_rsa Username:docker}
	I0916 11:27:17.684340 1560431 ssh_runner.go:195] Run: systemctl --version
	I0916 11:27:17.693126 1560431 ssh_runner.go:195] Run: sudo sh -c "podman version >/dev/null"
	I0916 11:27:17.865622 1560431 ssh_runner.go:195] Run: sh -c "stat /etc/cni/net.d/*loopback.conf*"
	I0916 11:27:17.870027 1560431 ssh_runner.go:195] Run: sudo find /etc/cni/net.d -maxdepth 1 -type f -name *loopback.conf* -not -name *.mk_disabled -exec sh -c "sudo mv {} {}.mk_disabled" ;
	I0916 11:27:17.925218 1560431 cni.go:221] loopback cni configuration disabled: "/etc/cni/net.d/*loopback.conf*" found
	I0916 11:27:17.925305 1560431 ssh_runner.go:195] Run: sudo find /etc/cni/net.d -maxdepth 1 -type f ( ( -name *bridge* -or -name *podman* ) -and -not -name *.mk_disabled ) -printf "%p, " -exec sh -c "sudo mv {} {}.mk_disabled" ;
	I0916 11:27:17.993100 1560431 cni.go:262] disabled [/etc/cni/net.d/87-podman-bridge.conflist, /etc/cni/net.d/100-crio-bridge.conf] bridge cni config(s)
	I0916 11:27:17.993127 1560431 start.go:495] detecting cgroup driver to use...
	I0916 11:27:17.993144 1560431 start.go:499] using "systemd" cgroup driver as enforced via flags
	I0916 11:27:17.993200 1560431 ssh_runner.go:195] Run: sudo systemctl stop -f containerd
	I0916 11:27:18.033841 1560431 ssh_runner.go:195] Run: sudo systemctl is-active --quiet service containerd
	I0916 11:27:18.055308 1560431 docker.go:217] disabling cri-docker service (if available) ...
	I0916 11:27:18.055380 1560431 ssh_runner.go:195] Run: sudo systemctl stop -f cri-docker.socket
	I0916 11:27:18.078738 1560431 ssh_runner.go:195] Run: sudo systemctl stop -f cri-docker.service
	I0916 11:27:18.109845 1560431 ssh_runner.go:195] Run: sudo systemctl disable cri-docker.socket
	I0916 11:27:18.270204 1560431 ssh_runner.go:195] Run: sudo systemctl mask cri-docker.service
	I0916 11:27:18.406320 1560431 docker.go:233] disabling docker service ...
	I0916 11:27:18.406403 1560431 ssh_runner.go:195] Run: sudo systemctl stop -f docker.socket
	I0916 11:27:18.437217 1560431 ssh_runner.go:195] Run: sudo systemctl stop -f docker.service
	I0916 11:27:18.462074 1560431 ssh_runner.go:195] Run: sudo systemctl disable docker.socket
	I0916 11:27:18.577433 1560431 ssh_runner.go:195] Run: sudo systemctl mask docker.service
	I0916 11:27:18.682643 1560431 ssh_runner.go:195] Run: sudo systemctl is-active --quiet service docker
	I0916 11:27:18.699569 1560431 ssh_runner.go:195] Run: /bin/bash -c "sudo mkdir -p /etc && printf %s "runtime-endpoint: unix:///var/run/crio/crio.sock
	" | sudo tee /etc/crictl.yaml"
	I0916 11:27:18.718274 1560431 crio.go:59] configure cri-o to use "registry.k8s.io/pause:3.10" pause image...
	I0916 11:27:18.718348 1560431 ssh_runner.go:195] Run: sh -c "sudo sed -i 's|^.*pause_image = .*$|pause_image = "registry.k8s.io/pause:3.10"|' /etc/crio/crio.conf.d/02-crio.conf"
	I0916 11:27:18.729097 1560431 crio.go:70] configuring cri-o to use "systemd" as cgroup driver...
	I0916 11:27:18.729176 1560431 ssh_runner.go:195] Run: sh -c "sudo sed -i 's|^.*cgroup_manager = .*$|cgroup_manager = "systemd"|' /etc/crio/crio.conf.d/02-crio.conf"
	I0916 11:27:18.741460 1560431 ssh_runner.go:195] Run: sh -c "sudo sed -i '/conmon_cgroup = .*/d' /etc/crio/crio.conf.d/02-crio.conf"
	I0916 11:27:18.753323 1560431 ssh_runner.go:195] Run: sh -c "sudo sed -i '/cgroup_manager = .*/a conmon_cgroup = "pod"' /etc/crio/crio.conf.d/02-crio.conf"
	I0916 11:27:18.764585 1560431 ssh_runner.go:195] Run: sh -c "sudo rm -rf /etc/cni/net.mk"
	I0916 11:27:18.774821 1560431 ssh_runner.go:195] Run: sh -c "sudo sed -i '/^ *"net.ipv4.ip_unprivileged_port_start=.*"/d' /etc/crio/crio.conf.d/02-crio.conf"
	I0916 11:27:18.785734 1560431 ssh_runner.go:195] Run: sh -c "sudo grep -q "^ *default_sysctls" /etc/crio/crio.conf.d/02-crio.conf || sudo sed -i '/conmon_cgroup = .*/a default_sysctls = \[\n\]' /etc/crio/crio.conf.d/02-crio.conf"
	I0916 11:27:18.803270 1560431 ssh_runner.go:195] Run: sh -c "sudo sed -i -r 's|^default_sysctls *= *\[|&\n  "net.ipv4.ip_unprivileged_port_start=0",|' /etc/crio/crio.conf.d/02-crio.conf"
	I0916 11:27:18.815138 1560431 ssh_runner.go:195] Run: sudo sysctl net.bridge.bridge-nf-call-iptables
	I0916 11:27:18.823884 1560431 ssh_runner.go:195] Run: sudo sh -c "echo 1 > /proc/sys/net/ipv4/ip_forward"
	I0916 11:27:18.833912 1560431 ssh_runner.go:195] Run: sudo systemctl daemon-reload
	I0916 11:27:18.950203 1560431 ssh_runner.go:195] Run: sudo systemctl restart crio
	I0916 11:27:19.130822 1560431 start.go:542] Will wait 60s for socket path /var/run/crio/crio.sock
	I0916 11:27:19.130919 1560431 ssh_runner.go:195] Run: stat /var/run/crio/crio.sock
	I0916 11:27:19.135339 1560431 start.go:563] Will wait 60s for crictl version
	I0916 11:27:19.135460 1560431 ssh_runner.go:195] Run: which crictl
	I0916 11:27:19.139545 1560431 ssh_runner.go:195] Run: sudo /usr/bin/crictl version
	I0916 11:27:19.192050 1560431 start.go:579] Version:  0.1.0
	RuntimeName:  cri-o
	RuntimeVersion:  1.24.6
	RuntimeApiVersion:  v1
	I0916 11:27:19.192213 1560431 ssh_runner.go:195] Run: crio --version
	I0916 11:27:19.252042 1560431 ssh_runner.go:195] Run: crio --version
	I0916 11:27:19.321025 1560431 out.go:177] * Preparing Kubernetes v1.31.1 on CRI-O 1.24.6 ...
	I0916 11:27:19.323823 1560431 cli_runner.go:164] Run: docker network inspect force-systemd-env-541584 --format "{"Name": "{{.Name}}","Driver": "{{.Driver}}","Subnet": "{{range .IPAM.Config}}{{.Subnet}}{{end}}","Gateway": "{{range .IPAM.Config}}{{.Gateway}}{{end}}","MTU": {{if (index .Options "com.docker.network.driver.mtu")}}{{(index .Options "com.docker.network.driver.mtu")}}{{else}}0{{end}}, "ContainerIPs": [{{range $k,$v := .Containers }}"{{$v.IPv4Address}}",{{end}}]}"
	I0916 11:27:19.345202 1560431 ssh_runner.go:195] Run: grep 192.168.85.1	host.minikube.internal$ /etc/hosts
	I0916 11:27:19.348969 1560431 ssh_runner.go:195] Run: /bin/bash -c "{ grep -v $'\thost.minikube.internal$' "/etc/hosts"; echo "192.168.85.1	host.minikube.internal"; } > /tmp/h.$$; sudo cp /tmp/h.$$ "/etc/hosts""
	I0916 11:27:19.361560 1560431 kubeadm.go:883] updating cluster {Name:force-systemd-env-541584 KeepContext:false EmbedCerts:false MinikubeISO: KicBaseImage:gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 Memory:2048 CPUs:2 DiskSize:20000 Driver:docker HyperkitVpnKitSock: HyperkitVSockPorts:[] DockerEnv:[] ContainerVolumeMounts:[] InsecureRegistry:[] RegistryMirror:[] HostOnlyCIDR:192.168.59.1/24 HypervVirtualSwitch: HypervUseExternalSwitch:false HypervExternalAdapter: KVMNetwork:default KVMQemuURI:qemu:///system KVMGPU:false KVMHidden:false KVMNUMACount:1 APIServerPort:8443 DockerOpt:[] DisableDriverMounts:false NFSShare:[] NFSSharesRoot:/nfsshares UUID: NoVTXCheck:false DNSProxy:false HostDNSResolver:true HostOnlyNicType:virtio NatNicType:virtio SSHIPAddress: SSHUser:root SSHKey: SSHPort:22 KubernetesConfig:{KubernetesVersion:v1.31.1 ClusterName:force-systemd-env-541584 Namespace:default APIServerHAVIP: APIServerName:
minikubeCA APIServerNames:[] APIServerIPs:[] DNSDomain:cluster.local ContainerRuntime:crio CRISocket: NetworkPlugin:cni FeatureGates: ServiceCIDR:10.96.0.0/12 ImageRepository: LoadBalancerStartIP: LoadBalancerEndIP: CustomIngressCert: RegistryAliases: ExtraOptions:[] ShouldLoadCachedImages:true EnableDefaultCNI:false CNI:} Nodes:[{Name: IP:192.168.85.2 Port:8443 KubernetesVersion:v1.31.1 ContainerRuntime:crio ControlPlane:true Worker:true}] Addons:map[] CustomAddonImages:map[] CustomAddonRegistries:map[] VerifyComponents:map[apiserver:true system_pods:true] StartHostTimeout:6m0s ScheduledStop:<nil> ExposedPorts:[] ListenAddress: Network: Subnet: MultiNodeRequested:false ExtraDisks:0 CertExpiration:26280h0m0s Mount:false MountString:/home/jenkins:/minikube-host Mount9PVersion:9p2000.L MountGID:docker MountIP: MountMSize:262144 MountOptions:[] MountPort:0 MountType:9p MountUID:docker BinaryMirror: DisableOptimizations:false DisableMetrics:false CustomQemuFirmwarePath: SocketVMnetClientPath: SocketVMnetPath: Sta
ticIP: SSHAuthSock: SSHAgentPID:0 GPUs: AutoPauseInterval:1m0s} ...
	I0916 11:27:19.361680 1560431 preload.go:131] Checking if preload exists for k8s version v1.31.1 and runtime crio
	I0916 11:27:19.361739 1560431 ssh_runner.go:195] Run: sudo crictl images --output json
	I0916 11:27:19.462658 1560431 crio.go:514] all images are preloaded for cri-o runtime.
	I0916 11:27:19.462679 1560431 crio.go:433] Images already preloaded, skipping extraction
	I0916 11:27:19.462740 1560431 ssh_runner.go:195] Run: sudo crictl images --output json
	I0916 11:27:19.511198 1560431 crio.go:514] all images are preloaded for cri-o runtime.
	I0916 11:27:19.511274 1560431 cache_images.go:84] Images are preloaded, skipping loading
	I0916 11:27:19.511295 1560431 kubeadm.go:934] updating node { 192.168.85.2 8443 v1.31.1 crio true true} ...
	I0916 11:27:19.511409 1560431 kubeadm.go:946] kubelet [Unit]
	Wants=crio.service
	
	[Service]
	ExecStart=
	ExecStart=/var/lib/minikube/binaries/v1.31.1/kubelet --bootstrap-kubeconfig=/etc/kubernetes/bootstrap-kubelet.conf --cgroups-per-qos=false --config=/var/lib/kubelet/config.yaml --enforce-node-allocatable= --hostname-override=force-systemd-env-541584 --kubeconfig=/etc/kubernetes/kubelet.conf --node-ip=192.168.85.2
	
	[Install]
	 config:
	{KubernetesVersion:v1.31.1 ClusterName:force-systemd-env-541584 Namespace:default APIServerHAVIP: APIServerName:minikubeCA APIServerNames:[] APIServerIPs:[] DNSDomain:cluster.local ContainerRuntime:crio CRISocket: NetworkPlugin:cni FeatureGates: ServiceCIDR:10.96.0.0/12 ImageRepository: LoadBalancerStartIP: LoadBalancerEndIP: CustomIngressCert: RegistryAliases: ExtraOptions:[] ShouldLoadCachedImages:true EnableDefaultCNI:false CNI:}
	I0916 11:27:19.511539 1560431 ssh_runner.go:195] Run: crio config
	I0916 11:27:19.612058 1560431 cni.go:84] Creating CNI manager for ""
	I0916 11:27:19.612122 1560431 cni.go:143] "docker" driver + "crio" runtime found, recommending kindnet
	I0916 11:27:19.612145 1560431 kubeadm.go:84] Using pod CIDR: 10.244.0.0/16
	I0916 11:27:19.612212 1560431 kubeadm.go:181] kubeadm options: {CertDir:/var/lib/minikube/certs ServiceCIDR:10.96.0.0/12 PodSubnet:10.244.0.0/16 AdvertiseAddress:192.168.85.2 APIServerPort:8443 KubernetesVersion:v1.31.1 EtcdDataDir:/var/lib/minikube/etcd EtcdExtraArgs:map[] ClusterName:force-systemd-env-541584 NodeName:force-systemd-env-541584 DNSDomain:cluster.local CRISocket:/var/run/crio/crio.sock ImageRepository: ComponentOptions:[{Component:apiServer ExtraArgs:map[enable-admission-plugins:NamespaceLifecycle,LimitRanger,ServiceAccount,DefaultStorageClass,DefaultTolerationSeconds,NodeRestriction,MutatingAdmissionWebhook,ValidatingAdmissionWebhook,ResourceQuota] Pairs:map[certSANs:["127.0.0.1", "localhost", "192.168.85.2"]]} {Component:controllerManager ExtraArgs:map[allocate-node-cidrs:true leader-elect:false] Pairs:map[]} {Component:scheduler ExtraArgs:map[leader-elect:false] Pairs:map[]}] FeatureArgs:map[] NodeIP:192.168.85.2 CgroupDriver:systemd ClientCAFile:/var/lib/minikube/certs/ca.crt Stati
cPodPath:/etc/kubernetes/manifests ControlPlaneAddress:control-plane.minikube.internal KubeProxyOptions:map[] ResolvConfSearchRegression:false KubeletConfigOpts:map[containerRuntimeEndpoint:unix:///var/run/crio/crio.sock hairpinMode:hairpin-veth runtimeRequestTimeout:15m] PrependCriSocketUnix:true}
	I0916 11:27:19.612392 1560431 kubeadm.go:187] kubeadm config:
	apiVersion: kubeadm.k8s.io/v1beta3
	kind: InitConfiguration
	localAPIEndpoint:
	  advertiseAddress: 192.168.85.2
	  bindPort: 8443
	bootstrapTokens:
	  - groups:
	      - system:bootstrappers:kubeadm:default-node-token
	    ttl: 24h0m0s
	    usages:
	      - signing
	      - authentication
	nodeRegistration:
	  criSocket: unix:///var/run/crio/crio.sock
	  name: "force-systemd-env-541584"
	  kubeletExtraArgs:
	    node-ip: 192.168.85.2
	  taints: []
	---
	apiVersion: kubeadm.k8s.io/v1beta3
	kind: ClusterConfiguration
	apiServer:
	  certSANs: ["127.0.0.1", "localhost", "192.168.85.2"]
	  extraArgs:
	    enable-admission-plugins: "NamespaceLifecycle,LimitRanger,ServiceAccount,DefaultStorageClass,DefaultTolerationSeconds,NodeRestriction,MutatingAdmissionWebhook,ValidatingAdmissionWebhook,ResourceQuota"
	controllerManager:
	  extraArgs:
	    allocate-node-cidrs: "true"
	    leader-elect: "false"
	scheduler:
	  extraArgs:
	    leader-elect: "false"
	certificatesDir: /var/lib/minikube/certs
	clusterName: mk
	controlPlaneEndpoint: control-plane.minikube.internal:8443
	etcd:
	  local:
	    dataDir: /var/lib/minikube/etcd
	    extraArgs:
	      proxy-refresh-interval: "70000"
	kubernetesVersion: v1.31.1
	networking:
	  dnsDomain: cluster.local
	  podSubnet: "10.244.0.0/16"
	  serviceSubnet: 10.96.0.0/12
	---
	apiVersion: kubelet.config.k8s.io/v1beta1
	kind: KubeletConfiguration
	authentication:
	  x509:
	    clientCAFile: /var/lib/minikube/certs/ca.crt
	cgroupDriver: systemd
	containerRuntimeEndpoint: unix:///var/run/crio/crio.sock
	hairpinMode: hairpin-veth
	runtimeRequestTimeout: 15m
	clusterDomain: "cluster.local"
	# disable disk resource management by default
	imageGCHighThresholdPercent: 100
	evictionHard:
	  nodefs.available: "0%"
	  nodefs.inodesFree: "0%"
	  imagefs.available: "0%"
	failSwapOn: false
	staticPodPath: /etc/kubernetes/manifests
	---
	apiVersion: kubeproxy.config.k8s.io/v1alpha1
	kind: KubeProxyConfiguration
	clusterCIDR: "10.244.0.0/16"
	metricsBindAddress: 0.0.0.0:10249
	conntrack:
	  maxPerCore: 0
	# Skip setting "net.netfilter.nf_conntrack_tcp_timeout_established"
	  tcpEstablishedTimeout: 0s
	# Skip setting "net.netfilter.nf_conntrack_tcp_timeout_close"
	  tcpCloseWaitTimeout: 0s
	
	I0916 11:27:19.612502 1560431 ssh_runner.go:195] Run: sudo ls /var/lib/minikube/binaries/v1.31.1
	I0916 11:27:19.621783 1560431 binaries.go:44] Found k8s binaries, skipping transfer
	I0916 11:27:19.621919 1560431 ssh_runner.go:195] Run: sudo mkdir -p /etc/systemd/system/kubelet.service.d /lib/systemd/system /var/tmp/minikube
	I0916 11:27:19.631167 1560431 ssh_runner.go:362] scp memory --> /etc/systemd/system/kubelet.service.d/10-kubeadm.conf (374 bytes)
	I0916 11:27:19.651181 1560431 ssh_runner.go:362] scp memory --> /lib/systemd/system/kubelet.service (352 bytes)
	I0916 11:27:19.670330 1560431 ssh_runner.go:362] scp memory --> /var/tmp/minikube/kubeadm.yaml.new (2161 bytes)
	I0916 11:27:19.689205 1560431 ssh_runner.go:195] Run: grep 192.168.85.2	control-plane.minikube.internal$ /etc/hosts
	I0916 11:27:19.693047 1560431 ssh_runner.go:195] Run: /bin/bash -c "{ grep -v $'\tcontrol-plane.minikube.internal$' "/etc/hosts"; echo "192.168.85.2	control-plane.minikube.internal"; } > /tmp/h.$$; sudo cp /tmp/h.$$ "/etc/hosts""
	I0916 11:27:19.704260 1560431 ssh_runner.go:195] Run: sudo systemctl daemon-reload
	I0916 11:27:20.291375 1539150 kubeadm.go:310] [api-check] The API server is healthy after 8.002289553s
	I0916 11:27:20.324421 1539150 kubeadm.go:310] [upload-config] Storing the configuration used in ConfigMap "kubeadm-config" in the "kube-system" Namespace
	I0916 11:27:20.344502 1539150 kubeadm.go:310] [kubelet] Creating a ConfigMap "kubelet-config" in namespace kube-system with the configuration for the kubelets in the cluster
	I0916 11:27:20.383154 1539150 kubeadm.go:310] [upload-certs] Skipping phase. Please see --upload-certs
	I0916 11:27:20.383363 1539150 kubeadm.go:310] [mark-control-plane] Marking the node kubernetes-upgrade-485103 as control-plane by adding the labels: [node-role.kubernetes.io/control-plane node.kubernetes.io/exclude-from-external-load-balancers]
	I0916 11:27:20.397691 1539150 kubeadm.go:310] [bootstrap-token] Using token: 969g82.77vwc0uwucmfqr16
	I0916 11:27:20.400566 1539150 out.go:235]   - Configuring RBAC rules ...
	I0916 11:27:20.400823 1539150 kubeadm.go:310] [bootstrap-token] Configuring bootstrap tokens, cluster-info ConfigMap, RBAC Roles
	I0916 11:27:20.410857 1539150 kubeadm.go:310] [bootstrap-token] Configured RBAC rules to allow Node Bootstrap tokens to get nodes
	I0916 11:27:20.424515 1539150 kubeadm.go:310] [bootstrap-token] Configured RBAC rules to allow Node Bootstrap tokens to post CSRs in order for nodes to get long term certificate credentials
	I0916 11:27:20.430785 1539150 kubeadm.go:310] [bootstrap-token] Configured RBAC rules to allow the csrapprover controller automatically approve CSRs from a Node Bootstrap Token
	I0916 11:27:20.435868 1539150 kubeadm.go:310] [bootstrap-token] Configured RBAC rules to allow certificate rotation for all node client certificates in the cluster
	I0916 11:27:20.441043 1539150 kubeadm.go:310] [bootstrap-token] Creating the "cluster-info" ConfigMap in the "kube-public" namespace
	I0916 11:27:20.699485 1539150 kubeadm.go:310] [kubelet-finalize] Updating "/etc/kubernetes/kubelet.conf" to point to a rotatable kubelet client certificate and key
	I0916 11:27:21.302694 1539150 kubeadm.go:310] [addons] Applied essential addon: CoreDNS
	I0916 11:27:21.700257 1539150 kubeadm.go:310] [addons] Applied essential addon: kube-proxy
	I0916 11:27:21.700279 1539150 kubeadm.go:310] 
	I0916 11:27:21.700341 1539150 kubeadm.go:310] Your Kubernetes control-plane has initialized successfully!
	I0916 11:27:21.700346 1539150 kubeadm.go:310] 
	I0916 11:27:21.700422 1539150 kubeadm.go:310] To start using your cluster, you need to run the following as a regular user:
	I0916 11:27:21.700427 1539150 kubeadm.go:310] 
	I0916 11:27:21.700452 1539150 kubeadm.go:310]   mkdir -p $HOME/.kube
	I0916 11:27:21.700510 1539150 kubeadm.go:310]   sudo cp -i /etc/kubernetes/admin.conf $HOME/.kube/config
	I0916 11:27:21.700559 1539150 kubeadm.go:310]   sudo chown $(id -u):$(id -g) $HOME/.kube/config
	I0916 11:27:21.700564 1539150 kubeadm.go:310] 
	I0916 11:27:21.700622 1539150 kubeadm.go:310] Alternatively, if you are the root user, you can run:
	I0916 11:27:21.700628 1539150 kubeadm.go:310] 
	I0916 11:27:21.700696 1539150 kubeadm.go:310]   export KUBECONFIG=/etc/kubernetes/admin.conf
	I0916 11:27:21.700702 1539150 kubeadm.go:310] 
	I0916 11:27:21.700761 1539150 kubeadm.go:310] You should now deploy a pod network to the cluster.
	I0916 11:27:21.700837 1539150 kubeadm.go:310] Run "kubectl apply -f [podnetwork].yaml" with one of the options listed at:
	I0916 11:27:21.700904 1539150 kubeadm.go:310]   https://kubernetes.io/docs/concepts/cluster-administration/addons/
	I0916 11:27:21.700908 1539150 kubeadm.go:310] 
	I0916 11:27:21.700991 1539150 kubeadm.go:310] You can now join any number of control-plane nodes by copying certificate authorities
	I0916 11:27:21.701071 1539150 kubeadm.go:310] and service account keys on each node and then running the following as root:
	I0916 11:27:21.701075 1539150 kubeadm.go:310] 
	I0916 11:27:21.701163 1539150 kubeadm.go:310]   kubeadm join control-plane.minikube.internal:8443 --token 969g82.77vwc0uwucmfqr16 \
	I0916 11:27:21.701264 1539150 kubeadm.go:310] 	--discovery-token-ca-cert-hash sha256:a39d4a6e06a2efc97f5d9564a89b81063790e757dde370e866d9dc4c2ed0ec07 \
	I0916 11:27:21.701286 1539150 kubeadm.go:310] 	--control-plane 
	I0916 11:27:21.701291 1539150 kubeadm.go:310] 
	I0916 11:27:21.701374 1539150 kubeadm.go:310] Then you can join any number of worker nodes by running the following on each as root:
	I0916 11:27:21.701379 1539150 kubeadm.go:310] 
	I0916 11:27:21.701463 1539150 kubeadm.go:310] kubeadm join control-plane.minikube.internal:8443 --token 969g82.77vwc0uwucmfqr16 \
	I0916 11:27:21.701566 1539150 kubeadm.go:310] 	--discovery-token-ca-cert-hash sha256:a39d4a6e06a2efc97f5d9564a89b81063790e757dde370e866d9dc4c2ed0ec07 
	I0916 11:27:21.708527 1539150 kubeadm.go:310] W0916 11:27:05.941039    5371 common.go:101] your configuration file uses a deprecated API spec: "kubeadm.k8s.io/v1beta3" (kind: "ClusterConfiguration"). Please use 'kubeadm config migrate --old-config old.yaml --new-config new.yaml', which will write the new, similar spec using a newer API version.
	I0916 11:27:21.708841 1539150 kubeadm.go:310] W0916 11:27:05.942224    5371 common.go:101] your configuration file uses a deprecated API spec: "kubeadm.k8s.io/v1beta3" (kind: "InitConfiguration"). Please use 'kubeadm config migrate --old-config old.yaml --new-config new.yaml', which will write the new, similar spec using a newer API version.
	I0916 11:27:21.709056 1539150 kubeadm.go:310] 	[WARNING SystemVerification]: failed to parse kernel config: unable to load kernel module: "configs", output: "modprobe: FATAL: Module configs not found in directory /lib/modules/5.15.0-1069-aws\n", err: exit status 1
	I0916 11:27:21.709243 1539150 kubeadm.go:310] 	[WARNING Service-Kubelet]: kubelet service is not enabled, please run 'systemctl enable kubelet.service'
	I0916 11:27:21.709298 1539150 cni.go:84] Creating CNI manager for ""
	I0916 11:27:21.709319 1539150 cni.go:143] "docker" driver + "crio" runtime found, recommending kindnet
	I0916 11:27:21.714507 1539150 out.go:177] * Configuring CNI (Container Networking Interface) ...
	I0916 11:27:21.717389 1539150 ssh_runner.go:195] Run: stat /opt/cni/bin/portmap
	I0916 11:27:21.721926 1539150 cni.go:182] applying CNI manifest using /var/lib/minikube/binaries/v1.31.1/kubectl ...
	I0916 11:27:21.721943 1539150 ssh_runner.go:362] scp memory --> /var/tmp/minikube/cni.yaml (2601 bytes)
	I0916 11:27:21.756168 1539150 ssh_runner.go:195] Run: sudo /var/lib/minikube/binaries/v1.31.1/kubectl apply --kubeconfig=/var/lib/minikube/kubeconfig -f /var/tmp/minikube/cni.yaml
	I0916 11:27:22.156063 1539150 ssh_runner.go:195] Run: /bin/bash -c "cat /proc/$(pgrep kube-apiserver)/oom_adj"
	I0916 11:27:22.156230 1539150 ssh_runner.go:195] Run: sudo /var/lib/minikube/binaries/v1.31.1/kubectl create clusterrolebinding minikube-rbac --clusterrole=cluster-admin --serviceaccount=kube-system:default --kubeconfig=/var/lib/minikube/kubeconfig
	I0916 11:27:22.156321 1539150 ssh_runner.go:195] Run: sudo /var/lib/minikube/binaries/v1.31.1/kubectl --kubeconfig=/var/lib/minikube/kubeconfig label --overwrite nodes kubernetes-upgrade-485103 minikube.k8s.io/updated_at=2024_09_16T11_27_22_0700 minikube.k8s.io/version=v1.34.0 minikube.k8s.io/commit=90d544f06ea0f69499271b003be64a9a224d57ed minikube.k8s.io/name=kubernetes-upgrade-485103 minikube.k8s.io/primary=true
	I0916 11:27:22.510157 1539150 ops.go:34] apiserver oom_adj: -16
	I0916 11:27:22.510191 1539150 kubeadm.go:1113] duration metric: took 354.061401ms to wait for elevateKubeSystemPrivileges
	I0916 11:27:22.510205 1539150 kubeadm.go:394] duration metric: took 4m24.964899691s to StartCluster
	I0916 11:27:22.510222 1539150 settings.go:142] acquiring lock: {Name:mkc0474d366ad36774e47290c7932cc180a1b9f8 Clock:{} Delay:500ms Timeout:1m0s Cancel:<nil>}
	I0916 11:27:22.510282 1539150 settings.go:150] Updating kubeconfig:  /home/jenkins/minikube-integration/19651-1378450/kubeconfig
	I0916 11:27:22.510875 1539150 lock.go:35] WriteFile acquiring /home/jenkins/minikube-integration/19651-1378450/kubeconfig: {Name:mk806df66aa01ad28d0c99bc1a876b4310e8a3a0 Clock:{} Delay:500ms Timeout:1m0s Cancel:<nil>}
	I0916 11:27:22.511064 1539150 start.go:235] Will wait 6m0s for node &{Name: IP:192.168.76.2 Port:8443 KubernetesVersion:v1.31.1 ContainerRuntime:crio ControlPlane:true Worker:true}
	I0916 11:27:22.511275 1539150 config.go:182] Loaded profile config "kubernetes-upgrade-485103": Driver=docker, ContainerRuntime=crio, KubernetesVersion=v1.31.1
	I0916 11:27:22.511323 1539150 addons.go:507] enable addons start: toEnable=map[ambassador:false auto-pause:false cloud-spanner:false csi-hostpath-driver:false dashboard:false default-storageclass:true efk:false freshpod:false gcp-auth:false gvisor:false headlamp:false helm-tiller:false inaccel:false ingress:false ingress-dns:false inspektor-gadget:false istio:false istio-provisioner:false kong:false kubeflow:false kubevirt:false logviewer:false metallb:false metrics-server:false nvidia-device-plugin:false nvidia-driver-installer:false nvidia-gpu-device-plugin:false olm:false pod-security-policy:false portainer:false registry:false registry-aliases:false registry-creds:false storage-provisioner:true storage-provisioner-gluster:false storage-provisioner-rancher:false volcano:false volumesnapshots:false yakd:false]
	I0916 11:27:22.511379 1539150 addons.go:69] Setting storage-provisioner=true in profile "kubernetes-upgrade-485103"
	I0916 11:27:22.511396 1539150 addons.go:234] Setting addon storage-provisioner=true in "kubernetes-upgrade-485103"
	W0916 11:27:22.511402 1539150 addons.go:243] addon storage-provisioner should already be in state true
	I0916 11:27:22.511424 1539150 host.go:66] Checking if "kubernetes-upgrade-485103" exists ...
	I0916 11:27:22.511886 1539150 cli_runner.go:164] Run: docker container inspect kubernetes-upgrade-485103 --format={{.State.Status}}
	I0916 11:27:22.512233 1539150 addons.go:69] Setting default-storageclass=true in profile "kubernetes-upgrade-485103"
	I0916 11:27:22.512260 1539150 addons_storage_classes.go:33] enableOrDisableStorageClasses default-storageclass=true on "kubernetes-upgrade-485103"
	I0916 11:27:22.512574 1539150 cli_runner.go:164] Run: docker container inspect kubernetes-upgrade-485103 --format={{.State.Status}}
	I0916 11:27:22.514654 1539150 out.go:177] * Verifying Kubernetes components...
	I0916 11:27:22.517589 1539150 ssh_runner.go:195] Run: sudo systemctl daemon-reload
	I0916 11:27:22.563394 1539150 kapi.go:59] client config for kubernetes-upgrade-485103: &rest.Config{Host:"https://192.168.76.2:8443", APIPath:"", ContentConfig:rest.ContentConfig{AcceptContentTypes:"", ContentType:"", GroupVersion:(*schema.GroupVersion)(nil), NegotiatedSerializer:runtime.NegotiatedSerializer(nil)}, Username:"", Password:"", BearerToken:"", BearerTokenFile:"", Impersonate:rest.ImpersonationConfig{UserName:"", UID:"", Groups:[]string(nil), Extra:map[string][]string(nil)}, AuthProvider:<nil>, AuthConfigPersister:rest.AuthProviderConfigPersister(nil), ExecProvider:<nil>, TLSClientConfig:rest.sanitizedTLSClientConfig{Insecure:false, ServerName:"", CertFile:"/home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/kubernetes-upgrade-485103/client.crt", KeyFile:"/home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/kubernetes-upgrade-485103/client.key", CAFile:"/home/jenkins/minikube-integration/19651-1378450/.minikube/ca.crt", CertData:[]uint8(nil), KeyData:[]uint8
(nil), CAData:[]uint8(nil), NextProtos:[]string(nil)}, UserAgent:"", DisableCompression:false, Transport:http.RoundTripper(nil), WrapTransport:(transport.WrapperFunc)(0x1a1e6c0), QPS:0, Burst:0, RateLimiter:flowcontrol.RateLimiter(nil), WarningHandler:rest.WarningHandler(nil), Timeout:0, Dial:(func(context.Context, string, string) (net.Conn, error))(nil), Proxy:(func(*http.Request) (*url.URL, error))(nil)}
	I0916 11:27:22.563676 1539150 addons.go:234] Setting addon default-storageclass=true in "kubernetes-upgrade-485103"
	W0916 11:27:22.563687 1539150 addons.go:243] addon default-storageclass should already be in state true
	I0916 11:27:22.563712 1539150 host.go:66] Checking if "kubernetes-upgrade-485103" exists ...
	I0916 11:27:22.564132 1539150 cli_runner.go:164] Run: docker container inspect kubernetes-upgrade-485103 --format={{.State.Status}}
	I0916 11:27:22.567228 1539150 out.go:177]   - Using image gcr.io/k8s-minikube/storage-provisioner:v5
	I0916 11:27:22.569997 1539150 addons.go:431] installing /etc/kubernetes/addons/storage-provisioner.yaml
	I0916 11:27:22.570023 1539150 ssh_runner.go:362] scp memory --> /etc/kubernetes/addons/storage-provisioner.yaml (2676 bytes)
	I0916 11:27:22.570091 1539150 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" kubernetes-upgrade-485103
	I0916 11:27:22.606611 1539150 addons.go:431] installing /etc/kubernetes/addons/storageclass.yaml
	I0916 11:27:22.606639 1539150 ssh_runner.go:362] scp storageclass/storageclass.yaml --> /etc/kubernetes/addons/storageclass.yaml (271 bytes)
	I0916 11:27:22.606713 1539150 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" kubernetes-upgrade-485103
	I0916 11:27:22.608636 1539150 sshutil.go:53] new ssh client: &{IP:127.0.0.1 Port:34833 SSHKeyPath:/home/jenkins/minikube-integration/19651-1378450/.minikube/machines/kubernetes-upgrade-485103/id_rsa Username:docker}
	I0916 11:27:22.630280 1539150 sshutil.go:53] new ssh client: &{IP:127.0.0.1 Port:34833 SSHKeyPath:/home/jenkins/minikube-integration/19651-1378450/.minikube/machines/kubernetes-upgrade-485103/id_rsa Username:docker}
	I0916 11:27:22.856175 1539150 ssh_runner.go:195] Run: sudo systemctl start kubelet
	I0916 11:27:22.883971 1539150 ssh_runner.go:195] Run: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.31.1/kubectl apply -f /etc/kubernetes/addons/storage-provisioner.yaml
	I0916 11:27:22.897675 1539150 ssh_runner.go:195] Run: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.31.1/kubectl apply -f /etc/kubernetes/addons/storageclass.yaml
	I0916 11:27:22.908355 1539150 api_server.go:52] waiting for apiserver process to appear ...
	I0916 11:27:22.908509 1539150 ssh_runner.go:195] Run: sudo pgrep -xnf kube-apiserver.*minikube.*
	I0916 11:27:23.604097 1539150 envvar.go:172] "Feature gate default state" feature="InformerResourceVersion" enabled=false
	I0916 11:27:23.604277 1539150 envvar.go:172] "Feature gate default state" feature="WatchListClient" enabled=false
	I0916 11:27:23.604254 1539150 api_server.go:72] duration metric: took 1.093150619s to wait for apiserver process to appear ...
	I0916 11:27:23.604626 1539150 api_server.go:88] waiting for apiserver healthz status ...
	I0916 11:27:23.604657 1539150 api_server.go:253] Checking apiserver healthz at https://192.168.76.2:8443/healthz ...
	I0916 11:27:23.636442 1539150 api_server.go:279] https://192.168.76.2:8443/healthz returned 200:
	ok
	I0916 11:27:23.638427 1539150 api_server.go:141] control plane version: v1.31.1
	I0916 11:27:23.638451 1539150 api_server.go:131] duration metric: took 33.805ms to wait for apiserver health ...
	I0916 11:27:23.638460 1539150 system_pods.go:43] waiting for kube-system pods to appear ...
	I0916 11:27:23.647290 1539150 system_pods.go:59] 5 kube-system pods found
	I0916 11:27:23.647392 1539150 system_pods.go:61] "etcd-kubernetes-upgrade-485103" [eb8cd98d-d17c-48f7-8728-c9b8cd651342] Running / Ready:ContainersNotReady (containers with unready status: [etcd]) / ContainersReady:ContainersNotReady (containers with unready status: [etcd])
	I0916 11:27:23.647418 1539150 system_pods.go:61] "kube-apiserver-kubernetes-upgrade-485103" [84f9dcf2-c899-433b-8dce-d1bec0416cb7] Running / Ready:ContainersNotReady (containers with unready status: [kube-apiserver]) / ContainersReady:ContainersNotReady (containers with unready status: [kube-apiserver])
	I0916 11:27:23.647462 1539150 system_pods.go:61] "kube-controller-manager-kubernetes-upgrade-485103" [59b71569-1a22-472d-904c-420702ffdd62] Running / Ready:ContainersNotReady (containers with unready status: [kube-controller-manager]) / ContainersReady:ContainersNotReady (containers with unready status: [kube-controller-manager])
	I0916 11:27:23.647489 1539150 system_pods.go:61] "kube-scheduler-kubernetes-upgrade-485103" [44d43d51-5981-4491-a386-32657d52d120] Running / Ready:ContainersNotReady (containers with unready status: [kube-scheduler]) / ContainersReady:ContainersNotReady (containers with unready status: [kube-scheduler])
	I0916 11:27:23.647510 1539150 system_pods.go:61] "storage-provisioner" [63c8cf13-98ad-4d62-9011-5b0d7a9de023] Pending: PodScheduled:Unschedulable (0/1 nodes are available: 1 node(s) had untolerated taint {node.kubernetes.io/not-ready: }. preemption: 0/1 nodes are available: 1 Preemption is not helpful for scheduling.)
	I0916 11:27:23.647532 1539150 system_pods.go:74] duration metric: took 9.065227ms to wait for pod list to return data ...
	I0916 11:27:23.647565 1539150 kubeadm.go:582] duration metric: took 1.136468548s to wait for: map[apiserver:true system_pods:true]
	I0916 11:27:23.647602 1539150 node_conditions.go:102] verifying NodePressure condition ...
	I0916 11:27:23.657589 1539150 node_conditions.go:122] node storage ephemeral capacity is 203034800Ki
	I0916 11:27:23.657618 1539150 node_conditions.go:123] node cpu capacity is 2
	I0916 11:27:23.657630 1539150 node_conditions.go:105] duration metric: took 10.011501ms to run NodePressure ...
	I0916 11:27:23.657644 1539150 start.go:241] waiting for startup goroutines ...
	I0916 11:27:23.658534 1539150 out.go:177] * Enabled addons: storage-provisioner, default-storageclass
	I0916 11:27:23.661133 1539150 addons.go:510] duration metric: took 1.149806498s for enable addons: enabled=[storage-provisioner default-storageclass]
	I0916 11:27:23.661208 1539150 start.go:246] waiting for cluster config update ...
	I0916 11:27:23.661251 1539150 start.go:255] writing updated cluster config ...
	I0916 11:27:23.661577 1539150 ssh_runner.go:195] Run: rm -f paused
	I0916 11:27:23.668465 1539150 out.go:177] * Done! kubectl is now configured to use "kubernetes-upgrade-485103" cluster and "default" namespace by default
	E0916 11:27:23.670959 1539150 start.go:291] kubectl info: exec: fork/exec /usr/local/bin/kubectl: exec format error
	
	
	==> CRI-O <==
	Sep 16 11:27:12 kubernetes-upgrade-485103 crio[537]: time="2024-09-16 11:27:12.747543103Z" level=info msg="Checking image status: registry.k8s.io/kube-scheduler:v1.31.1" id=c5356d37-6e26-4c4c-9496-f5274a34bc1f name=/runtime.v1.ImageService/ImageStatus
	Sep 16 11:27:12 kubernetes-upgrade-485103 crio[537]: time="2024-09-16 11:27:12.747717261Z" level=info msg="Image status: &ImageStatusResponse{Image:&Image{Id:7f8aa378bb47dffcf430f3a601abe39137e88aee0238e23ed8530fdd18dab82d,RepoTags:[registry.k8s.io/kube-scheduler:v1.31.1],RepoDigests:[registry.k8s.io/kube-scheduler@sha256:65212209347a96b08a97e679b98dca46885f09cf3a53e8d13b28d2c083a5b690 registry.k8s.io/kube-scheduler@sha256:969a7e96340f3a927b3d652582edec2d6d82a083871d81ef5064b7edaab430d0],Size_:67007814,Uid:&Int64Value{Value:0,},Username:,Spec:nil,},Info:map[string]string{},}" id=c5356d37-6e26-4c4c-9496-f5274a34bc1f name=/runtime.v1.ImageService/ImageStatus
	Sep 16 11:27:12 kubernetes-upgrade-485103 crio[537]: time="2024-09-16 11:27:12.748439647Z" level=info msg="Checking image status: registry.k8s.io/kube-scheduler:v1.31.1" id=efa3cf33-e84f-4fa1-8174-8603240bb1bc name=/runtime.v1.ImageService/ImageStatus
	Sep 16 11:27:12 kubernetes-upgrade-485103 crio[537]: time="2024-09-16 11:27:12.748604270Z" level=info msg="Image status: &ImageStatusResponse{Image:&Image{Id:7f8aa378bb47dffcf430f3a601abe39137e88aee0238e23ed8530fdd18dab82d,RepoTags:[registry.k8s.io/kube-scheduler:v1.31.1],RepoDigests:[registry.k8s.io/kube-scheduler@sha256:65212209347a96b08a97e679b98dca46885f09cf3a53e8d13b28d2c083a5b690 registry.k8s.io/kube-scheduler@sha256:969a7e96340f3a927b3d652582edec2d6d82a083871d81ef5064b7edaab430d0],Size_:67007814,Uid:&Int64Value{Value:0,},Username:,Spec:nil,},Info:map[string]string{},}" id=efa3cf33-e84f-4fa1-8174-8603240bb1bc name=/runtime.v1.ImageService/ImageStatus
	Sep 16 11:27:12 kubernetes-upgrade-485103 crio[537]: time="2024-09-16 11:27:12.749277731Z" level=info msg="Creating container: kube-system/kube-scheduler-kubernetes-upgrade-485103/kube-scheduler" id=b6e402b2-ffe1-4a05-a260-396f76b25052 name=/runtime.v1.RuntimeService/CreateContainer
	Sep 16 11:27:12 kubernetes-upgrade-485103 crio[537]: time="2024-09-16 11:27:12.749359484Z" level=warning msg="Allowed annotations are specified for workload []"
	Sep 16 11:27:12 kubernetes-upgrade-485103 crio[537]: time="2024-09-16 11:27:12.751955457Z" level=info msg="Ran pod sandbox 0fdf286ba5d8f9dc27dda5a00b775e2fe580eafa4ee9410473c774e50c2b7f41 with infra container: kube-system/etcd-kubernetes-upgrade-485103/POD" id=932bada9-c6f1-4bba-9688-2770cb05b370 name=/runtime.v1.RuntimeService/RunPodSandbox
	Sep 16 11:27:12 kubernetes-upgrade-485103 crio[537]: time="2024-09-16 11:27:12.752958156Z" level=info msg="Checking image status: registry.k8s.io/etcd:3.5.15-0" id=a4d5b57a-4523-4498-91dc-ab7b79a32cb7 name=/runtime.v1.ImageService/ImageStatus
	Sep 16 11:27:12 kubernetes-upgrade-485103 crio[537]: time="2024-09-16 11:27:12.759319852Z" level=info msg="Image status: &ImageStatusResponse{Image:&Image{Id:27e3830e1402783674d8b594038967deea9d51f0d91b34c93c8f39d2f68af7da,RepoTags:[registry.k8s.io/etcd:3.5.15-0],RepoDigests:[registry.k8s.io/etcd@sha256:a6dc63e6e8cfa0307d7851762fa6b629afb18f28d8aa3fab5a6e91b4af60026a registry.k8s.io/etcd@sha256:e3ee3ca2dbaf511385000dbd54123629c71b6cfaabd469e658d76a116b7f43da],Size_:139912446,Uid:&Int64Value{Value:0,},Username:,Spec:nil,},Info:map[string]string{},}" id=a4d5b57a-4523-4498-91dc-ab7b79a32cb7 name=/runtime.v1.ImageService/ImageStatus
	Sep 16 11:27:12 kubernetes-upgrade-485103 crio[537]: time="2024-09-16 11:27:12.760125346Z" level=info msg="Checking image status: registry.k8s.io/etcd:3.5.15-0" id=40061c03-7b19-4996-97fd-922a955bf6df name=/runtime.v1.ImageService/ImageStatus
	Sep 16 11:27:12 kubernetes-upgrade-485103 crio[537]: time="2024-09-16 11:27:12.760320311Z" level=info msg="Image status: &ImageStatusResponse{Image:&Image{Id:27e3830e1402783674d8b594038967deea9d51f0d91b34c93c8f39d2f68af7da,RepoTags:[registry.k8s.io/etcd:3.5.15-0],RepoDigests:[registry.k8s.io/etcd@sha256:a6dc63e6e8cfa0307d7851762fa6b629afb18f28d8aa3fab5a6e91b4af60026a registry.k8s.io/etcd@sha256:e3ee3ca2dbaf511385000dbd54123629c71b6cfaabd469e658d76a116b7f43da],Size_:139912446,Uid:&Int64Value{Value:0,},Username:,Spec:nil,},Info:map[string]string{},}" id=40061c03-7b19-4996-97fd-922a955bf6df name=/runtime.v1.ImageService/ImageStatus
	Sep 16 11:27:12 kubernetes-upgrade-485103 crio[537]: time="2024-09-16 11:27:12.761115483Z" level=info msg="Creating container: kube-system/etcd-kubernetes-upgrade-485103/etcd" id=07d81202-e94f-4b4a-a562-879614cc9c9c name=/runtime.v1.RuntimeService/CreateContainer
	Sep 16 11:27:12 kubernetes-upgrade-485103 crio[537]: time="2024-09-16 11:27:12.761221712Z" level=warning msg="Allowed annotations are specified for workload []"
	Sep 16 11:27:12 kubernetes-upgrade-485103 crio[537]: time="2024-09-16 11:27:12.890502221Z" level=info msg="Created container 102fde2a52ced53c89f547c85268fc269efc2c6148600b16fd6b82c98dfe29f7: kube-system/kube-apiserver-kubernetes-upgrade-485103/kube-apiserver" id=8ff98557-17ee-4a91-a643-d5acdcfab8c5 name=/runtime.v1.RuntimeService/CreateContainer
	Sep 16 11:27:12 kubernetes-upgrade-485103 crio[537]: time="2024-09-16 11:27:12.891220447Z" level=info msg="Starting container: 102fde2a52ced53c89f547c85268fc269efc2c6148600b16fd6b82c98dfe29f7" id=7b2b2c03-1698-48a8-9f65-8aa96e604e4a name=/runtime.v1.RuntimeService/StartContainer
	Sep 16 11:27:12 kubernetes-upgrade-485103 crio[537]: time="2024-09-16 11:27:12.893049242Z" level=info msg="Created container ecb455b3657ed551396b13f5a39ab66c9888f5bf88701e0400eae57c743befab: kube-system/kube-controller-manager-kubernetes-upgrade-485103/kube-controller-manager" id=b1ef7266-64e9-4e9b-9a09-d6b38bc7e37d name=/runtime.v1.RuntimeService/CreateContainer
	Sep 16 11:27:12 kubernetes-upgrade-485103 crio[537]: time="2024-09-16 11:27:12.893536385Z" level=info msg="Starting container: ecb455b3657ed551396b13f5a39ab66c9888f5bf88701e0400eae57c743befab" id=2f9c05b5-4c30-4dec-aca3-d1aa9118eda7 name=/runtime.v1.RuntimeService/StartContainer
	Sep 16 11:27:12 kubernetes-upgrade-485103 crio[537]: time="2024-09-16 11:27:12.899387251Z" level=info msg="Created container d6b7bf683488da923c9479b44e276409c83176013d1b7ef090d07b5dc31a8abd: kube-system/etcd-kubernetes-upgrade-485103/etcd" id=07d81202-e94f-4b4a-a562-879614cc9c9c name=/runtime.v1.RuntimeService/CreateContainer
	Sep 16 11:27:12 kubernetes-upgrade-485103 crio[537]: time="2024-09-16 11:27:12.899912071Z" level=info msg="Starting container: d6b7bf683488da923c9479b44e276409c83176013d1b7ef090d07b5dc31a8abd" id=7978faa0-84d8-415e-a742-ac16a8490cd4 name=/runtime.v1.RuntimeService/StartContainer
	Sep 16 11:27:12 kubernetes-upgrade-485103 crio[537]: time="2024-09-16 11:27:12.903751901Z" level=info msg="Created container e6986a983c0fc1d9ed8467bcbb9d40568c3d3cbabf38709279155d0cf4a54313: kube-system/kube-scheduler-kubernetes-upgrade-485103/kube-scheduler" id=b6e402b2-ffe1-4a05-a260-396f76b25052 name=/runtime.v1.RuntimeService/CreateContainer
	Sep 16 11:27:12 kubernetes-upgrade-485103 crio[537]: time="2024-09-16 11:27:12.904293656Z" level=info msg="Starting container: e6986a983c0fc1d9ed8467bcbb9d40568c3d3cbabf38709279155d0cf4a54313" id=7b61ee05-42f7-4f3b-ae2f-2b8d2e70aff3 name=/runtime.v1.RuntimeService/StartContainer
	Sep 16 11:27:12 kubernetes-upgrade-485103 crio[537]: time="2024-09-16 11:27:12.908733841Z" level=info msg="Started container" PID=5500 containerID=102fde2a52ced53c89f547c85268fc269efc2c6148600b16fd6b82c98dfe29f7 description=kube-system/kube-apiserver-kubernetes-upgrade-485103/kube-apiserver id=7b2b2c03-1698-48a8-9f65-8aa96e604e4a name=/runtime.v1.RuntimeService/StartContainer sandboxID=95603812e7816eb71b1221d6a805b5aeabee766ca54e40d588555c9e4c4e7887
	Sep 16 11:27:12 kubernetes-upgrade-485103 crio[537]: time="2024-09-16 11:27:12.917385031Z" level=info msg="Started container" PID=5454 containerID=ecb455b3657ed551396b13f5a39ab66c9888f5bf88701e0400eae57c743befab description=kube-system/kube-controller-manager-kubernetes-upgrade-485103/kube-controller-manager id=2f9c05b5-4c30-4dec-aca3-d1aa9118eda7 name=/runtime.v1.RuntimeService/StartContainer sandboxID=91b3629155878611dc0edf42121a32aa08d327c7f420850c757e9d2f5fdce2d1
	Sep 16 11:27:12 kubernetes-upgrade-485103 crio[537]: time="2024-09-16 11:27:12.924294881Z" level=info msg="Started container" PID=5485 containerID=d6b7bf683488da923c9479b44e276409c83176013d1b7ef090d07b5dc31a8abd description=kube-system/etcd-kubernetes-upgrade-485103/etcd id=7978faa0-84d8-415e-a742-ac16a8490cd4 name=/runtime.v1.RuntimeService/StartContainer sandboxID=0fdf286ba5d8f9dc27dda5a00b775e2fe580eafa4ee9410473c774e50c2b7f41
	Sep 16 11:27:12 kubernetes-upgrade-485103 crio[537]: time="2024-09-16 11:27:12.935980498Z" level=info msg="Started container" PID=5510 containerID=e6986a983c0fc1d9ed8467bcbb9d40568c3d3cbabf38709279155d0cf4a54313 description=kube-system/kube-scheduler-kubernetes-upgrade-485103/kube-scheduler id=7b61ee05-42f7-4f3b-ae2f-2b8d2e70aff3 name=/runtime.v1.RuntimeService/StartContainer sandboxID=10d11a184d6b1582bdf732936916da6dd992e858ecf307e902cb9062e4da21cb
	
	
	==> container status <==
	CONTAINER           IMAGE                                                              CREATED             STATE               NAME                      ATTEMPT             POD ID              POD
	e6986a983c0fc       7f8aa378bb47dffcf430f3a601abe39137e88aee0238e23ed8530fdd18dab82d   12 seconds ago      Running             kube-scheduler            1                   10d11a184d6b1       kube-scheduler-kubernetes-upgrade-485103
	102fde2a52ced       d3f53a98c0a9d9163c4848bcf34b2d2f5e1e3691b79f3d1dd6d0206809e02853   12 seconds ago      Running             kube-apiserver            5                   95603812e7816       kube-apiserver-kubernetes-upgrade-485103
	d6b7bf683488d       27e3830e1402783674d8b594038967deea9d51f0d91b34c93c8f39d2f68af7da   12 seconds ago      Running             etcd                      0                   0fdf286ba5d8f       etcd-kubernetes-upgrade-485103
	ecb455b3657ed       279f381cb37365bbbcd133c9531fba9c2beb0f38dbbe6ddfcd0b1b1643d3450e   12 seconds ago      Running             kube-controller-manager   5                   91b3629155878       kube-controller-manager-kubernetes-upgrade-485103
	
	
	==> describe nodes <==
	Name:               kubernetes-upgrade-485103
	Roles:              control-plane
	Labels:             beta.kubernetes.io/arch=arm64
	                    beta.kubernetes.io/os=linux
	                    kubernetes.io/arch=arm64
	                    kubernetes.io/hostname=kubernetes-upgrade-485103
	                    kubernetes.io/os=linux
	                    minikube.k8s.io/commit=90d544f06ea0f69499271b003be64a9a224d57ed
	                    minikube.k8s.io/name=kubernetes-upgrade-485103
	                    minikube.k8s.io/primary=true
	                    minikube.k8s.io/updated_at=2024_09_16T11_27_22_0700
	                    minikube.k8s.io/version=v1.34.0
	                    node-role.kubernetes.io/control-plane=
	                    node.kubernetes.io/exclude-from-external-load-balancers=
	Annotations:        kubeadm.alpha.kubernetes.io/cri-socket: unix:///var/run/crio/crio.sock
	                    node.alpha.kubernetes.io/ttl: 0
	                    volumes.kubernetes.io/controller-managed-attach-detach: true
	CreationTimestamp:  Mon, 16 Sep 2024 11:27:18 +0000
	Taints:             node.kubernetes.io/not-ready:NoSchedule
	Unschedulable:      false
	Lease:
	  HolderIdentity:  kubernetes-upgrade-485103
	  AcquireTime:     <unset>
	  RenewTime:       Mon, 16 Sep 2024 11:27:21 +0000
	Conditions:
	  Type             Status  LastHeartbeatTime                 LastTransitionTime                Reason                       Message
	  ----             ------  -----------------                 ------------------                ------                       -------
	  MemoryPressure   False   Mon, 16 Sep 2024 11:27:21 +0000   Mon, 16 Sep 2024 11:27:13 +0000   KubeletHasSufficientMemory   kubelet has sufficient memory available
	  DiskPressure     False   Mon, 16 Sep 2024 11:27:21 +0000   Mon, 16 Sep 2024 11:27:13 +0000   KubeletHasNoDiskPressure     kubelet has no disk pressure
	  PIDPressure      False   Mon, 16 Sep 2024 11:27:21 +0000   Mon, 16 Sep 2024 11:27:13 +0000   KubeletHasSufficientPID      kubelet has sufficient PID available
	  Ready            False   Mon, 16 Sep 2024 11:27:21 +0000   Mon, 16 Sep 2024 11:27:13 +0000   KubeletNotReady              container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: No CNI configuration file in /etc/cni/net.d/. Has your network provider started?
	Addresses:
	  InternalIP:  192.168.76.2
	  Hostname:    kubernetes-upgrade-485103
	Capacity:
	  cpu:                2
	  ephemeral-storage:  203034800Ki
	  hugepages-1Gi:      0
	  hugepages-2Mi:      0
	  hugepages-32Mi:     0
	  hugepages-64Ki:     0
	  memory:             8022304Ki
	  pods:               110
	Allocatable:
	  cpu:                2
	  ephemeral-storage:  203034800Ki
	  hugepages-1Gi:      0
	  hugepages-2Mi:      0
	  hugepages-32Mi:     0
	  hugepages-64Ki:     0
	  memory:             8022304Ki
	  pods:               110
	System Info:
	  Machine ID:                 3ee89ab2c0a54974b9028e9e70952bb7
	  System UUID:                1882f879-a4a7-4829-a8b8-33db85fb75fc
	  Boot ID:                    34b2555f-ef29-4c31-9b47-b3b930bd3b4b
	  Kernel Version:             5.15.0-1069-aws
	  OS Image:                   Ubuntu 22.04.4 LTS
	  Operating System:           linux
	  Architecture:               arm64
	  Container Runtime Version:  cri-o://1.24.6
	  Kubelet Version:            v1.31.1
	  Kube-Proxy Version:         v1.31.1
	PodCIDR:                      10.244.0.0/24
	PodCIDRs:                     10.244.0.0/24
	Non-terminated Pods:          (4 in total)
	  Namespace                   Name                                                 CPU Requests  CPU Limits  Memory Requests  Memory Limits  Age
	  ---------                   ----                                                 ------------  ----------  ---------------  -------------  ---
	  kube-system                 etcd-kubernetes-upgrade-485103                       100m (5%)     0 (0%)      100Mi (1%)       0 (0%)         4s
	  kube-system                 kube-apiserver-kubernetes-upgrade-485103             250m (12%)    0 (0%)      0 (0%)           0 (0%)         7s
	  kube-system                 kube-controller-manager-kubernetes-upgrade-485103    200m (10%)    0 (0%)      0 (0%)           0 (0%)         4s
	  kube-system                 kube-scheduler-kubernetes-upgrade-485103             100m (5%)     0 (0%)      0 (0%)           0 (0%)         4s
	Allocated resources:
	  (Total limits may be over 100 percent, i.e., overcommitted.)
	  Resource           Requests    Limits
	  --------           --------    ------
	  cpu                650m (32%)  0 (0%)
	  memory             100Mi (1%)  0 (0%)
	  ephemeral-storage  0 (0%)      0 (0%)
	  hugepages-1Gi      0 (0%)      0 (0%)
	  hugepages-2Mi      0 (0%)      0 (0%)
	  hugepages-32Mi     0 (0%)      0 (0%)
	  hugepages-64Ki     0 (0%)      0 (0%)
	Events:
	  Type     Reason                   Age   From             Message
	  ----     ------                   ----  ----             -------
	  Normal   Starting                 4s    kubelet          Starting kubelet.
	  Warning  CgroupV1                 4s    kubelet          Cgroup v1 support is in maintenance mode, please migrate to Cgroup v2.
	  Normal   NodeHasSufficientMemory  4s    kubelet          Node kubernetes-upgrade-485103 status is now: NodeHasSufficientMemory
	  Normal   NodeHasNoDiskPressure    4s    kubelet          Node kubernetes-upgrade-485103 status is now: NodeHasNoDiskPressure
	  Normal   NodeHasSufficientPID     4s    kubelet          Node kubernetes-upgrade-485103 status is now: NodeHasSufficientPID
	  Normal   RegisteredNode           1s    node-controller  Node kubernetes-upgrade-485103 event: Registered Node kubernetes-upgrade-485103 in Controller
	
	
	==> dmesg <==
	[Sep16 10:07] systemd-journald[226]: Failed to send stream file descriptor to service manager: Connection refused
	
	
	==> etcd [d6b7bf683488da923c9479b44e276409c83176013d1b7ef090d07b5dc31a8abd] <==
	{"level":"info","ts":"2024-09-16T11:27:13.070061Z","caller":"embed/etcd.go:728","msg":"starting with client TLS","tls-info":"cert = /var/lib/minikube/certs/etcd/server.crt, key = /var/lib/minikube/certs/etcd/server.key, client-cert=, client-key=, trusted-ca = /var/lib/minikube/certs/etcd/ca.crt, client-cert-auth = true, crl-file = ","cipher-suites":[]}
	{"level":"info","ts":"2024-09-16T11:27:13.070278Z","caller":"embed/etcd.go:279","msg":"now serving peer/client/metrics","local-member-id":"ea7e25599daad906","initial-advertise-peer-urls":["https://192.168.76.2:2380"],"listen-peer-urls":["https://192.168.76.2:2380"],"advertise-client-urls":["https://192.168.76.2:2379"],"listen-client-urls":["https://127.0.0.1:2379","https://192.168.76.2:2379"],"listen-metrics-urls":["http://127.0.0.1:2381"]}
	{"level":"info","ts":"2024-09-16T11:27:13.070307Z","caller":"embed/etcd.go:870","msg":"serving metrics","address":"http://127.0.0.1:2381"}
	{"level":"info","ts":"2024-09-16T11:27:13.070375Z","caller":"embed/etcd.go:599","msg":"serving peer traffic","address":"192.168.76.2:2380"}
	{"level":"info","ts":"2024-09-16T11:27:13.070390Z","caller":"embed/etcd.go:571","msg":"cmux::serve","address":"192.168.76.2:2380"}
	{"level":"info","ts":"2024-09-16T11:27:13.596211Z","logger":"raft","caller":"etcdserver/zap_raft.go:77","msg":"ea7e25599daad906 is starting a new election at term 1"}
	{"level":"info","ts":"2024-09-16T11:27:13.596350Z","logger":"raft","caller":"etcdserver/zap_raft.go:77","msg":"ea7e25599daad906 became pre-candidate at term 1"}
	{"level":"info","ts":"2024-09-16T11:27:13.596403Z","logger":"raft","caller":"etcdserver/zap_raft.go:77","msg":"ea7e25599daad906 received MsgPreVoteResp from ea7e25599daad906 at term 1"}
	{"level":"info","ts":"2024-09-16T11:27:13.596440Z","logger":"raft","caller":"etcdserver/zap_raft.go:77","msg":"ea7e25599daad906 became candidate at term 2"}
	{"level":"info","ts":"2024-09-16T11:27:13.596475Z","logger":"raft","caller":"etcdserver/zap_raft.go:77","msg":"ea7e25599daad906 received MsgVoteResp from ea7e25599daad906 at term 2"}
	{"level":"info","ts":"2024-09-16T11:27:13.596517Z","logger":"raft","caller":"etcdserver/zap_raft.go:77","msg":"ea7e25599daad906 became leader at term 2"}
	{"level":"info","ts":"2024-09-16T11:27:13.596552Z","logger":"raft","caller":"etcdserver/zap_raft.go:77","msg":"raft.node: ea7e25599daad906 elected leader ea7e25599daad906 at term 2"}
	{"level":"info","ts":"2024-09-16T11:27:13.599648Z","caller":"etcdserver/server.go:2629","msg":"setting up initial cluster version using v2 API","cluster-version":"3.5"}
	{"level":"info","ts":"2024-09-16T11:27:13.606887Z","caller":"etcdserver/server.go:2118","msg":"published local member to cluster through raft","local-member-id":"ea7e25599daad906","local-member-attributes":"{Name:kubernetes-upgrade-485103 ClientURLs:[https://192.168.76.2:2379]}","request-path":"/0/members/ea7e25599daad906/attributes","cluster-id":"6f20f2c4b2fb5f8a","publish-timeout":"7s"}
	{"level":"info","ts":"2024-09-16T11:27:13.607112Z","caller":"membership/cluster.go:584","msg":"set initial cluster version","cluster-id":"6f20f2c4b2fb5f8a","local-member-id":"ea7e25599daad906","cluster-version":"3.5"}
	{"level":"info","ts":"2024-09-16T11:27:13.607202Z","caller":"api/capability.go:75","msg":"enabled capabilities for version","cluster-version":"3.5"}
	{"level":"info","ts":"2024-09-16T11:27:13.607249Z","caller":"etcdserver/server.go:2653","msg":"cluster version is updated","cluster-version":"3.5"}
	{"level":"info","ts":"2024-09-16T11:27:13.607301Z","caller":"embed/serve.go:103","msg":"ready to serve client requests"}
	{"level":"info","ts":"2024-09-16T11:27:13.607590Z","caller":"embed/serve.go:103","msg":"ready to serve client requests"}
	{"level":"info","ts":"2024-09-16T11:27:13.608311Z","caller":"v3rpc/health.go:61","msg":"grpc service status changed","service":"","status":"SERVING"}
	{"level":"info","ts":"2024-09-16T11:27:13.613491Z","caller":"embed/serve.go:250","msg":"serving client traffic securely","traffic":"grpc+http","address":"127.0.0.1:2379"}
	{"level":"info","ts":"2024-09-16T11:27:13.619445Z","caller":"v3rpc/health.go:61","msg":"grpc service status changed","service":"","status":"SERVING"}
	{"level":"info","ts":"2024-09-16T11:27:13.620507Z","caller":"embed/serve.go:250","msg":"serving client traffic securely","traffic":"grpc+http","address":"192.168.76.2:2379"}
	{"level":"info","ts":"2024-09-16T11:27:13.619564Z","caller":"etcdmain/main.go:44","msg":"notifying init daemon"}
	{"level":"info","ts":"2024-09-16T11:27:13.624313Z","caller":"etcdmain/main.go:50","msg":"successfully notified init daemon"}
	
	
	==> kernel <==
	 11:27:25 up 11:09,  0 users,  load average: 5.39, 3.73, 3.03
	Linux kubernetes-upgrade-485103 5.15.0-1069-aws #75~20.04.1-Ubuntu SMP Mon Aug 19 16:22:47 UTC 2024 aarch64 aarch64 aarch64 GNU/Linux
	PRETTY_NAME="Ubuntu 22.04.4 LTS"
	
	
	==> kube-apiserver [102fde2a52ced53c89f547c85268fc269efc2c6148600b16fd6b82c98dfe29f7] <==
	I0916 11:27:18.154821       1 handler_discovery.go:450] Starting ResourceDiscoveryManager
	I0916 11:27:18.154931       1 aggregator.go:171] initial CRD sync complete...
	I0916 11:27:18.154974       1 autoregister_controller.go:144] Starting autoregister controller
	I0916 11:27:18.155006       1 cache.go:32] Waiting for caches to sync for autoregister controller
	I0916 11:27:18.155036       1 cache.go:39] Caches are synced for autoregister controller
	I0916 11:27:18.168553       1 shared_informer.go:320] Caches are synced for node_authorizer
	I0916 11:27:18.172124       1 controller.go:615] quota admission added evaluator for: namespaces
	E0916 11:27:18.308049       1 controller.go:148] "Unhandled Error" err="while syncing ConfigMap \"kube-system/kube-apiserver-legacy-service-account-token-tracking\", err: namespaces \"kube-system\" not found" logger="UnhandledError"
	E0916 11:27:18.309609       1 controller.go:145] "Failed to ensure lease exists, will retry" err="namespaces \"kube-system\" not found" interval="200ms"
	I0916 11:27:18.518812       1 controller.go:615] quota admission added evaluator for: leases.coordination.k8s.io
	I0916 11:27:18.820994       1 storage_scheduling.go:95] created PriorityClass system-node-critical with value 2000001000
	I0916 11:27:18.830955       1 storage_scheduling.go:95] created PriorityClass system-cluster-critical with value 2000000000
	I0916 11:27:18.830978       1 storage_scheduling.go:111] all system priority classes are created successfully or already exist.
	I0916 11:27:19.678368       1 controller.go:615] quota admission added evaluator for: roles.rbac.authorization.k8s.io
	I0916 11:27:19.745455       1 controller.go:615] quota admission added evaluator for: rolebindings.rbac.authorization.k8s.io
	I0916 11:27:19.910309       1 alloc.go:330] "allocated clusterIPs" service="default/kubernetes" clusterIPs={"IPv4":"10.96.0.1"}
	W0916 11:27:19.926772       1 lease.go:265] Resetting endpoints for master service "kubernetes" to [192.168.76.2]
	I0916 11:27:19.928025       1 controller.go:615] quota admission added evaluator for: endpoints
	I0916 11:27:19.933633       1 controller.go:615] quota admission added evaluator for: endpointslices.discovery.k8s.io
	I0916 11:27:19.983424       1 controller.go:615] quota admission added evaluator for: serviceaccounts
	I0916 11:27:21.121480       1 controller.go:615] quota admission added evaluator for: deployments.apps
	I0916 11:27:21.295644       1 alloc.go:330] "allocated clusterIPs" service="kube-system/kube-dns" clusterIPs={"IPv4":"10.96.0.10"}
	I0916 11:27:21.350736       1 controller.go:615] quota admission added evaluator for: daemonsets.apps
	I0916 11:27:25.598216       1 controller.go:615] quota admission added evaluator for: replicasets.apps
	I0916 11:27:25.667583       1 controller.go:615] quota admission added evaluator for: controllerrevisions.apps
	
	
	==> kube-controller-manager [ecb455b3657ed551396b13f5a39ab66c9888f5bf88701e0400eae57c743befab] <==
	I0916 11:27:24.930146       1 shared_informer.go:320] Caches are synced for job
	I0916 11:27:24.930368       1 shared_informer.go:320] Caches are synced for HPA
	I0916 11:27:24.930387       1 shared_informer.go:320] Caches are synced for deployment
	I0916 11:27:24.940857       1 shared_informer.go:320] Caches are synced for disruption
	I0916 11:27:24.941121       1 shared_informer.go:320] Caches are synced for ReplicationController
	I0916 11:27:24.941280       1 shared_informer.go:320] Caches are synced for legacy-service-account-token-cleaner
	I0916 11:27:24.941413       1 shared_informer.go:320] Caches are synced for taint-eviction-controller
	I0916 11:27:24.942278       1 shared_informer.go:320] Caches are synced for GC
	I0916 11:27:24.942319       1 shared_informer.go:320] Caches are synced for daemon sets
	I0916 11:27:24.973794       1 shared_informer.go:320] Caches are synced for ephemeral
	I0916 11:27:24.973930       1 shared_informer.go:320] Caches are synced for endpoint_slice
	I0916 11:27:24.974986       1 shared_informer.go:320] Caches are synced for taint
	I0916 11:27:24.984093       1 shared_informer.go:320] Caches are synced for attach detach
	I0916 11:27:24.985534       1 shared_informer.go:320] Caches are synced for ReplicaSet
	I0916 11:27:24.985600       1 shared_informer.go:320] Caches are synced for stateful set
	I0916 11:27:24.985714       1 node_lifecycle_controller.go:1232] "Initializing eviction metric for zone" logger="node-lifecycle-controller" zone=""
	I0916 11:27:24.985812       1 node_lifecycle_controller.go:884] "Missing timestamp for Node. Assuming now as a timestamp" logger="node-lifecycle-controller" node="kubernetes-upgrade-485103"
	I0916 11:27:24.985890       1 node_lifecycle_controller.go:1036] "Controller detected that all Nodes are not-Ready. Entering master disruption mode" logger="node-lifecycle-controller"
	I0916 11:27:24.985964       1 shared_informer.go:320] Caches are synced for resource quota
	I0916 11:27:24.986012       1 shared_informer.go:320] Caches are synced for endpoint
	I0916 11:27:25.000887       1 shared_informer.go:320] Caches are synced for persistent volume
	I0916 11:27:25.143436       1 range_allocator.go:241] "Successfully synced" logger="node-ipam-controller" key="kubernetes-upgrade-485103"
	I0916 11:27:25.376136       1 shared_informer.go:320] Caches are synced for garbage collector
	I0916 11:27:25.376178       1 garbagecollector.go:157] "All resource monitors have synced. Proceeding to collect garbage" logger="garbage-collector-controller"
	I0916 11:27:25.382823       1 shared_informer.go:320] Caches are synced for garbage collector
	
	
	==> kube-scheduler [e6986a983c0fc1d9ed8467bcbb9d40568c3d3cbabf38709279155d0cf4a54313] <==
	W0916 11:27:18.225218       1 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.CSIDriver: csidrivers.storage.k8s.io is forbidden: User "system:kube-scheduler" cannot list resource "csidrivers" in API group "storage.k8s.io" at the cluster scope
	E0916 11:27:18.225295       1 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.CSIDriver: failed to list *v1.CSIDriver: csidrivers.storage.k8s.io is forbidden: User \"system:kube-scheduler\" cannot list resource \"csidrivers\" in API group \"storage.k8s.io\" at the cluster scope" logger="UnhandledError"
	W0916 11:27:18.225333       1 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.CSIStorageCapacity: csistoragecapacities.storage.k8s.io is forbidden: User "system:kube-scheduler" cannot list resource "csistoragecapacities" in API group "storage.k8s.io" at the cluster scope
	E0916 11:27:18.225354       1 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.CSIStorageCapacity: failed to list *v1.CSIStorageCapacity: csistoragecapacities.storage.k8s.io is forbidden: User \"system:kube-scheduler\" cannot list resource \"csistoragecapacities\" in API group \"storage.k8s.io\" at the cluster scope" logger="UnhandledError"
	W0916 11:27:18.225267       1 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.ReplicationController: replicationcontrollers is forbidden: User "system:kube-scheduler" cannot list resource "replicationcontrollers" in API group "" at the cluster scope
	E0916 11:27:18.225383       1 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.ReplicationController: failed to list *v1.ReplicationController: replicationcontrollers is forbidden: User \"system:kube-scheduler\" cannot list resource \"replicationcontrollers\" in API group \"\" at the cluster scope" logger="UnhandledError"
	W0916 11:27:18.225442       1 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.CSINode: csinodes.storage.k8s.io is forbidden: User "system:kube-scheduler" cannot list resource "csinodes" in API group "storage.k8s.io" at the cluster scope
	E0916 11:27:18.225458       1 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.CSINode: failed to list *v1.CSINode: csinodes.storage.k8s.io is forbidden: User \"system:kube-scheduler\" cannot list resource \"csinodes\" in API group \"storage.k8s.io\" at the cluster scope" logger="UnhandledError"
	W0916 11:27:18.225103       1 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.ReplicaSet: replicasets.apps is forbidden: User "system:kube-scheduler" cannot list resource "replicasets" in API group "apps" at the cluster scope
	E0916 11:27:18.225478       1 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.ReplicaSet: failed to list *v1.ReplicaSet: replicasets.apps is forbidden: User \"system:kube-scheduler\" cannot list resource \"replicasets\" in API group \"apps\" at the cluster scope" logger="UnhandledError"
	W0916 11:27:18.225540       1 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.StorageClass: storageclasses.storage.k8s.io is forbidden: User "system:kube-scheduler" cannot list resource "storageclasses" in API group "storage.k8s.io" at the cluster scope
	E0916 11:27:18.225606       1 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.StorageClass: failed to list *v1.StorageClass: storageclasses.storage.k8s.io is forbidden: User \"system:kube-scheduler\" cannot list resource \"storageclasses\" in API group \"storage.k8s.io\" at the cluster scope" logger="UnhandledError"
	W0916 11:27:19.049065       1 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Node: nodes is forbidden: User "system:kube-scheduler" cannot list resource "nodes" in API group "" at the cluster scope
	E0916 11:27:19.049122       1 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Node: failed to list *v1.Node: nodes is forbidden: User \"system:kube-scheduler\" cannot list resource \"nodes\" in API group \"\" at the cluster scope" logger="UnhandledError"
	W0916 11:27:19.144662       1 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.CSIDriver: csidrivers.storage.k8s.io is forbidden: User "system:kube-scheduler" cannot list resource "csidrivers" in API group "storage.k8s.io" at the cluster scope
	E0916 11:27:19.145272       1 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.CSIDriver: failed to list *v1.CSIDriver: csidrivers.storage.k8s.io is forbidden: User \"system:kube-scheduler\" cannot list resource \"csidrivers\" in API group \"storage.k8s.io\" at the cluster scope" logger="UnhandledError"
	W0916 11:27:19.210731       1 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.PersistentVolume: persistentvolumes is forbidden: User "system:kube-scheduler" cannot list resource "persistentvolumes" in API group "" at the cluster scope
	E0916 11:27:19.210842       1 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.PersistentVolume: failed to list *v1.PersistentVolume: persistentvolumes is forbidden: User \"system:kube-scheduler\" cannot list resource \"persistentvolumes\" in API group \"\" at the cluster scope" logger="UnhandledError"
	W0916 11:27:19.233539       1 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.StatefulSet: statefulsets.apps is forbidden: User "system:kube-scheduler" cannot list resource "statefulsets" in API group "apps" at the cluster scope
	E0916 11:27:19.233667       1 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.StatefulSet: failed to list *v1.StatefulSet: statefulsets.apps is forbidden: User \"system:kube-scheduler\" cannot list resource \"statefulsets\" in API group \"apps\" at the cluster scope" logger="UnhandledError"
	W0916 11:27:19.245674       1 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.CSINode: csinodes.storage.k8s.io is forbidden: User "system:kube-scheduler" cannot list resource "csinodes" in API group "storage.k8s.io" at the cluster scope
	E0916 11:27:19.245907       1 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.CSINode: failed to list *v1.CSINode: csinodes.storage.k8s.io is forbidden: User \"system:kube-scheduler\" cannot list resource \"csinodes\" in API group \"storage.k8s.io\" at the cluster scope" logger="UnhandledError"
	W0916 11:27:19.392064       1 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.ReplicationController: replicationcontrollers is forbidden: User "system:kube-scheduler" cannot list resource "replicationcontrollers" in API group "" at the cluster scope
	E0916 11:27:19.392222       1 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.ReplicationController: failed to list *v1.ReplicationController: replicationcontrollers is forbidden: User \"system:kube-scheduler\" cannot list resource \"replicationcontrollers\" in API group \"\" at the cluster scope" logger="UnhandledError"
	I0916 11:27:19.796513       1 shared_informer.go:320] Caches are synced for client-ca::kube-system::extension-apiserver-authentication::client-ca-file
	
	
	==> kubelet <==
	Sep 16 11:27:21 kubernetes-upgrade-485103 kubelet[5628]: I0916 11:27:21.492327    5628 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"k8s-certs\" (UniqueName: \"kubernetes.io/host-path/2d905d2dd0f084f080a7c7440e3fe25a-k8s-certs\") pod \"kube-apiserver-kubernetes-upgrade-485103\" (UID: \"2d905d2dd0f084f080a7c7440e3fe25a\") " pod="kube-system/kube-apiserver-kubernetes-upgrade-485103"
	Sep 16 11:27:21 kubernetes-upgrade-485103 kubelet[5628]: I0916 11:27:21.492344    5628 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"usr-local-share-ca-certificates\" (UniqueName: \"kubernetes.io/host-path/2d905d2dd0f084f080a7c7440e3fe25a-usr-local-share-ca-certificates\") pod \"kube-apiserver-kubernetes-upgrade-485103\" (UID: \"2d905d2dd0f084f080a7c7440e3fe25a\") " pod="kube-system/kube-apiserver-kubernetes-upgrade-485103"
	Sep 16 11:27:21 kubernetes-upgrade-485103 kubelet[5628]: I0916 11:27:21.492361    5628 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ca-certs\" (UniqueName: \"kubernetes.io/host-path/0d1eb005b39a6bcbfc46addbdab55bea-ca-certs\") pod \"kube-controller-manager-kubernetes-upgrade-485103\" (UID: \"0d1eb005b39a6bcbfc46addbdab55bea\") " pod="kube-system/kube-controller-manager-kubernetes-upgrade-485103"
	Sep 16 11:27:21 kubernetes-upgrade-485103 kubelet[5628]: I0916 11:27:21.492383    5628 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kubeconfig\" (UniqueName: \"kubernetes.io/host-path/5a4c53d6d4f6244cae08fd28d1b994b6-kubeconfig\") pod \"kube-scheduler-kubernetes-upgrade-485103\" (UID: \"5a4c53d6d4f6244cae08fd28d1b994b6\") " pod="kube-system/kube-scheduler-kubernetes-upgrade-485103"
	Sep 16 11:27:21 kubernetes-upgrade-485103 kubelet[5628]: I0916 11:27:21.492402    5628 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-certs\" (UniqueName: \"kubernetes.io/host-path/5c5c09acda492bf795789b67cd89db43-etcd-certs\") pod \"etcd-kubernetes-upgrade-485103\" (UID: \"5c5c09acda492bf795789b67cd89db43\") " pod="kube-system/etcd-kubernetes-upgrade-485103"
	Sep 16 11:27:21 kubernetes-upgrade-485103 kubelet[5628]: I0916 11:27:21.492429    5628 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-ca-certificates\" (UniqueName: \"kubernetes.io/host-path/2d905d2dd0f084f080a7c7440e3fe25a-etc-ca-certificates\") pod \"kube-apiserver-kubernetes-upgrade-485103\" (UID: \"2d905d2dd0f084f080a7c7440e3fe25a\") " pod="kube-system/kube-apiserver-kubernetes-upgrade-485103"
	Sep 16 11:27:21 kubernetes-upgrade-485103 kubelet[5628]: I0916 11:27:21.492448    5628 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"usr-local-share-ca-certificates\" (UniqueName: \"kubernetes.io/host-path/0d1eb005b39a6bcbfc46addbdab55bea-usr-local-share-ca-certificates\") pod \"kube-controller-manager-kubernetes-upgrade-485103\" (UID: \"0d1eb005b39a6bcbfc46addbdab55bea\") " pod="kube-system/kube-controller-manager-kubernetes-upgrade-485103"
	Sep 16 11:27:22 kubernetes-upgrade-485103 kubelet[5628]: I0916 11:27:22.149291    5628 apiserver.go:52] "Watching apiserver"
	Sep 16 11:27:22 kubernetes-upgrade-485103 kubelet[5628]: I0916 11:27:22.175040    5628 desired_state_of_world_populator.go:154] "Finished populating initial desired state of world"
	Sep 16 11:27:22 kubernetes-upgrade-485103 kubelet[5628]: E0916 11:27:22.429952    5628 kubelet.go:1915] "Failed creating a mirror pod for" err="pods \"etcd-kubernetes-upgrade-485103\" already exists" pod="kube-system/etcd-kubernetes-upgrade-485103"
	Sep 16 11:27:22 kubernetes-upgrade-485103 kubelet[5628]: I0916 11:27:22.527222    5628 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="kube-system/etcd-kubernetes-upgrade-485103" podStartSLOduration=1.517293232 podStartE2EDuration="1.517293232s" podCreationTimestamp="2024-09-16 11:27:21 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2024-09-16 11:27:22.517027811 +0000 UTC m=+1.535958113" watchObservedRunningTime="2024-09-16 11:27:22.517293232 +0000 UTC m=+1.536223525"
	Sep 16 11:27:22 kubernetes-upgrade-485103 kubelet[5628]: I0916 11:27:22.844332    5628 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="kube-system/kube-apiserver-kubernetes-upgrade-485103" podStartSLOduration=4.844309841 podStartE2EDuration="4.844309841s" podCreationTimestamp="2024-09-16 11:27:18 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2024-09-16 11:27:22.745351589 +0000 UTC m=+1.764281891" watchObservedRunningTime="2024-09-16 11:27:22.844309841 +0000 UTC m=+1.863240143"
	Sep 16 11:27:22 kubernetes-upgrade-485103 kubelet[5628]: I0916 11:27:22.918007    5628 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="kube-system/kube-controller-manager-kubernetes-upgrade-485103" podStartSLOduration=1.917986071 podStartE2EDuration="1.917986071s" podCreationTimestamp="2024-09-16 11:27:21 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2024-09-16 11:27:22.863728672 +0000 UTC m=+1.882658974" watchObservedRunningTime="2024-09-16 11:27:22.917986071 +0000 UTC m=+1.936916365"
	Sep 16 11:27:22 kubernetes-upgrade-485103 kubelet[5628]: I0916 11:27:22.918108    5628 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="kube-system/kube-scheduler-kubernetes-upgrade-485103" podStartSLOduration=1.9181028759999998 podStartE2EDuration="1.918102876s" podCreationTimestamp="2024-09-16 11:27:21 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2024-09-16 11:27:22.913752634 +0000 UTC m=+1.932682936" watchObservedRunningTime="2024-09-16 11:27:22.918102876 +0000 UTC m=+1.937033178"
	Sep 16 11:27:24 kubernetes-upgrade-485103 kubelet[5628]: I0916 11:27:24.976136    5628 kuberuntime_manager.go:1635] "Updating runtime config through cri with podcidr" CIDR="10.244.0.0/24"
	Sep 16 11:27:24 kubernetes-upgrade-485103 kubelet[5628]: I0916 11:27:24.980258    5628 kubelet_network.go:61] "Updating Pod CIDR" originalPodCIDR="" newPodCIDR="10.244.0.0/24"
	Sep 16 11:27:25 kubernetes-upgrade-485103 kubelet[5628]: I0916 11:27:25.927084    5628 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5sxq7\" (UniqueName: \"kubernetes.io/projected/83c810d1-bc6c-491e-b546-8b042b267eee-kube-api-access-5sxq7\") pod \"kindnet-5w7pl\" (UID: \"83c810d1-bc6c-491e-b546-8b042b267eee\") " pod="kube-system/kindnet-5w7pl"
	Sep 16 11:27:25 kubernetes-upgrade-485103 kubelet[5628]: I0916 11:27:25.927135    5628 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-proxy\" (UniqueName: \"kubernetes.io/configmap/b1d1c97f-b8db-4f08-b80f-f846ab8a3f89-kube-proxy\") pod \"kube-proxy-qqgjq\" (UID: \"b1d1c97f-b8db-4f08-b80f-f846ab8a3f89\") " pod="kube-system/kube-proxy-qqgjq"
	Sep 16 11:27:25 kubernetes-upgrade-485103 kubelet[5628]: I0916 11:27:25.927158    5628 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"xtables-lock\" (UniqueName: \"kubernetes.io/host-path/b1d1c97f-b8db-4f08-b80f-f846ab8a3f89-xtables-lock\") pod \"kube-proxy-qqgjq\" (UID: \"b1d1c97f-b8db-4f08-b80f-f846ab8a3f89\") " pod="kube-system/kube-proxy-qqgjq"
	Sep 16 11:27:25 kubernetes-upgrade-485103 kubelet[5628]: I0916 11:27:25.927175    5628 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rx4sr\" (UniqueName: \"kubernetes.io/projected/b1d1c97f-b8db-4f08-b80f-f846ab8a3f89-kube-api-access-rx4sr\") pod \"kube-proxy-qqgjq\" (UID: \"b1d1c97f-b8db-4f08-b80f-f846ab8a3f89\") " pod="kube-system/kube-proxy-qqgjq"
	Sep 16 11:27:25 kubernetes-upgrade-485103 kubelet[5628]: I0916 11:27:25.927196    5628 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cni-cfg\" (UniqueName: \"kubernetes.io/host-path/83c810d1-bc6c-491e-b546-8b042b267eee-cni-cfg\") pod \"kindnet-5w7pl\" (UID: \"83c810d1-bc6c-491e-b546-8b042b267eee\") " pod="kube-system/kindnet-5w7pl"
	Sep 16 11:27:25 kubernetes-upgrade-485103 kubelet[5628]: I0916 11:27:25.927213    5628 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"xtables-lock\" (UniqueName: \"kubernetes.io/host-path/83c810d1-bc6c-491e-b546-8b042b267eee-xtables-lock\") pod \"kindnet-5w7pl\" (UID: \"83c810d1-bc6c-491e-b546-8b042b267eee\") " pod="kube-system/kindnet-5w7pl"
	Sep 16 11:27:25 kubernetes-upgrade-485103 kubelet[5628]: I0916 11:27:25.927230    5628 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"lib-modules\" (UniqueName: \"kubernetes.io/host-path/83c810d1-bc6c-491e-b546-8b042b267eee-lib-modules\") pod \"kindnet-5w7pl\" (UID: \"83c810d1-bc6c-491e-b546-8b042b267eee\") " pod="kube-system/kindnet-5w7pl"
	Sep 16 11:27:25 kubernetes-upgrade-485103 kubelet[5628]: I0916 11:27:25.927249    5628 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"lib-modules\" (UniqueName: \"kubernetes.io/host-path/b1d1c97f-b8db-4f08-b80f-f846ab8a3f89-lib-modules\") pod \"kube-proxy-qqgjq\" (UID: \"b1d1c97f-b8db-4f08-b80f-f846ab8a3f89\") " pod="kube-system/kube-proxy-qqgjq"
	Sep 16 11:27:26 kubernetes-upgrade-485103 kubelet[5628]: I0916 11:27:26.129574    5628 swap_util.go:74] "error creating dir to test if tmpfs noswap is enabled. Assuming not supported" mount path="" error="stat /var/lib/kubelet/plugins/kubernetes.io/empty-dir: no such file or directory"
	

                                                
                                                
-- /stdout --
helpers_test.go:254: (dbg) Run:  out/minikube-linux-arm64 status --format={{.APIServer}} -p kubernetes-upgrade-485103 -n kubernetes-upgrade-485103
helpers_test.go:261: (dbg) Run:  kubectl --context kubernetes-upgrade-485103 get po -o=jsonpath={.items[*].metadata.name} -A --field-selector=status.phase!=Running
helpers_test.go:261: (dbg) Non-zero exit: kubectl --context kubernetes-upgrade-485103 get po -o=jsonpath={.items[*].metadata.name} -A --field-selector=status.phase!=Running: fork/exec /usr/local/bin/kubectl: exec format error (848.405µs)
helpers_test.go:263: kubectl --context kubernetes-upgrade-485103 get po -o=jsonpath={.items[*].metadata.name} -A --field-selector=status.phase!=Running: fork/exec /usr/local/bin/kubectl: exec format error
helpers_test.go:175: Cleaning up "kubernetes-upgrade-485103" profile ...
helpers_test.go:178: (dbg) Run:  out/minikube-linux-arm64 delete -p kubernetes-upgrade-485103
helpers_test.go:178: (dbg) Done: out/minikube-linux-arm64 delete -p kubernetes-upgrade-485103: (2.346609362s)
--- FAIL: TestKubernetesUpgrade (358.72s)

                                                
                                    
x
+
TestNetworkPlugins/group/custom-flannel/NetCatPod (7200.083s)

                                                
                                                
=== RUN   TestNetworkPlugins/group/custom-flannel/NetCatPod
net_test.go:149: (dbg) Run:  kubectl --context custom-flannel-141252 replace --force -f testdata/netcat-deployment.yaml
net_test.go:149: (dbg) Non-zero exit: kubectl --context custom-flannel-141252 replace --force -f testdata/netcat-deployment.yaml: fork/exec /usr/local/bin/kubectl: exec format error (574.688µs)
net_test.go:151: failed to apply netcat manifest: fork/exec /usr/local/bin/kubectl: exec format error
E0916 12:18:53.341078 1383833 cert_rotation.go:171] "Unhandled Error" err="key failed with : open /home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/addons-936355/client.crt: no such file or directory" logger="UnhandledError"
E0916 12:19:17.496064 1383833 cert_rotation.go:171] "Unhandled Error" err="key failed with : open /home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/functional-919910/client.crt: no such file or directory" logger="UnhandledError"
E0916 12:20:01.904734 1383833 cert_rotation.go:171] "Unhandled Error" err="key failed with : open /home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/default-k8s-diff-port-992501/client.crt: no such file or directory" logger="UnhandledError"
E0916 12:21:53.840080 1383833 cert_rotation.go:171] "Unhandled Error" err="key failed with : open /home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/auto-141252/client.crt: no such file or directory" logger="UnhandledError"
E0916 12:21:53.846570 1383833 cert_rotation.go:171] "Unhandled Error" err="key failed with : open /home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/auto-141252/client.crt: no such file or directory" logger="UnhandledError"
E0916 12:21:53.857987 1383833 cert_rotation.go:171] "Unhandled Error" err="key failed with : open /home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/auto-141252/client.crt: no such file or directory" logger="UnhandledError"
E0916 12:21:53.879334 1383833 cert_rotation.go:171] "Unhandled Error" err="key failed with : open /home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/auto-141252/client.crt: no such file or directory" logger="UnhandledError"
E0916 12:21:53.920968 1383833 cert_rotation.go:171] "Unhandled Error" err="key failed with : open /home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/auto-141252/client.crt: no such file or directory" logger="UnhandledError"
E0916 12:21:54.006590 1383833 cert_rotation.go:171] "Unhandled Error" err="key failed with : open /home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/auto-141252/client.crt: no such file or directory" logger="UnhandledError"
E0916 12:21:54.183989 1383833 cert_rotation.go:171] "Unhandled Error" err="key failed with : open /home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/auto-141252/client.crt: no such file or directory" logger="UnhandledError"
E0916 12:21:54.505706 1383833 cert_rotation.go:171] "Unhandled Error" err="key failed with : open /home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/auto-141252/client.crt: no such file or directory" logger="UnhandledError"
E0916 12:21:55.147762 1383833 cert_rotation.go:171] "Unhandled Error" err="key failed with : open /home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/auto-141252/client.crt: no such file or directory" logger="UnhandledError"
E0916 12:21:56.430086 1383833 cert_rotation.go:171] "Unhandled Error" err="key failed with : open /home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/auto-141252/client.crt: no such file or directory" logger="UnhandledError"
E0916 12:21:58.991742 1383833 cert_rotation.go:171] "Unhandled Error" err="key failed with : open /home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/auto-141252/client.crt: no such file or directory" logger="UnhandledError"
E0916 12:22:04.113719 1383833 cert_rotation.go:171] "Unhandled Error" err="key failed with : open /home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/auto-141252/client.crt: no such file or directory" logger="UnhandledError"
E0916 12:22:14.355066 1383833 cert_rotation.go:171] "Unhandled Error" err="key failed with : open /home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/auto-141252/client.crt: no such file or directory" logger="UnhandledError"
E0916 12:22:23.797416 1383833 cert_rotation.go:171] "Unhandled Error" err="key failed with : open /home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/kindnet-141252/client.crt: no such file or directory" logger="UnhandledError"
E0916 12:22:23.803800 1383833 cert_rotation.go:171] "Unhandled Error" err="key failed with : open /home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/kindnet-141252/client.crt: no such file or directory" logger="UnhandledError"
E0916 12:22:23.815271 1383833 cert_rotation.go:171] "Unhandled Error" err="key failed with : open /home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/kindnet-141252/client.crt: no such file or directory" logger="UnhandledError"
E0916 12:22:23.836722 1383833 cert_rotation.go:171] "Unhandled Error" err="key failed with : open /home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/kindnet-141252/client.crt: no such file or directory" logger="UnhandledError"
E0916 12:22:23.878257 1383833 cert_rotation.go:171] "Unhandled Error" err="key failed with : open /home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/kindnet-141252/client.crt: no such file or directory" logger="UnhandledError"
E0916 12:22:23.959774 1383833 cert_rotation.go:171] "Unhandled Error" err="key failed with : open /home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/kindnet-141252/client.crt: no such file or directory" logger="UnhandledError"
E0916 12:22:24.121471 1383833 cert_rotation.go:171] "Unhandled Error" err="key failed with : open /home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/kindnet-141252/client.crt: no such file or directory" logger="UnhandledError"
E0916 12:22:24.442998 1383833 cert_rotation.go:171] "Unhandled Error" err="key failed with : open /home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/kindnet-141252/client.crt: no such file or directory" logger="UnhandledError"
E0916 12:22:25.085128 1383833 cert_rotation.go:171] "Unhandled Error" err="key failed with : open /home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/kindnet-141252/client.crt: no such file or directory" logger="UnhandledError"
E0916 12:22:26.366561 1383833 cert_rotation.go:171] "Unhandled Error" err="key failed with : open /home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/kindnet-141252/client.crt: no such file or directory" logger="UnhandledError"
E0916 12:22:28.929030 1383833 cert_rotation.go:171] "Unhandled Error" err="key failed with : open /home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/kindnet-141252/client.crt: no such file or directory" logger="UnhandledError"
E0916 12:22:32.158909 1383833 cert_rotation.go:171] "Unhandled Error" err="key failed with : open /home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/no-preload-137798/client.crt: no such file or directory" logger="UnhandledError"
E0916 12:22:34.050667 1383833 cert_rotation.go:171] "Unhandled Error" err="key failed with : open /home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/kindnet-141252/client.crt: no such file or directory" logger="UnhandledError"
E0916 12:22:34.837634 1383833 cert_rotation.go:171] "Unhandled Error" err="key failed with : open /home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/auto-141252/client.crt: no such file or directory" logger="UnhandledError"
E0916 12:22:44.292929 1383833 cert_rotation.go:171] "Unhandled Error" err="key failed with : open /home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/kindnet-141252/client.crt: no such file or directory" logger="UnhandledError"
E0916 12:23:02.020782 1383833 cert_rotation.go:171] "Unhandled Error" err="key failed with : open /home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/old-k8s-version-114963/client.crt: no such file or directory" logger="UnhandledError"
E0916 12:23:04.774386 1383833 cert_rotation.go:171] "Unhandled Error" err="key failed with : open /home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/kindnet-141252/client.crt: no such file or directory" logger="UnhandledError"
E0916 12:23:04.974055 1383833 cert_rotation.go:171] "Unhandled Error" err="key failed with : open /home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/default-k8s-diff-port-992501/client.crt: no such file or directory" logger="UnhandledError"
E0916 12:23:15.800548 1383833 cert_rotation.go:171] "Unhandled Error" err="key failed with : open /home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/auto-141252/client.crt: no such file or directory" logger="UnhandledError"
E0916 12:23:45.736480 1383833 cert_rotation.go:171] "Unhandled Error" err="key failed with : open /home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/kindnet-141252/client.crt: no such file or directory" logger="UnhandledError"
E0916 12:23:53.342081 1383833 cert_rotation.go:171] "Unhandled Error" err="key failed with : open /home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/addons-936355/client.crt: no such file or directory" logger="UnhandledError"
E0916 12:24:17.495168 1383833 cert_rotation.go:171] "Unhandled Error" err="key failed with : open /home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/functional-919910/client.crt: no such file or directory" logger="UnhandledError"
E0916 12:24:37.722588 1383833 cert_rotation.go:171] "Unhandled Error" err="key failed with : open /home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/auto-141252/client.crt: no such file or directory" logger="UnhandledError"
E0916 12:25:01.904723 1383833 cert_rotation.go:171] "Unhandled Error" err="key failed with : open /home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/default-k8s-diff-port-992501/client.crt: no such file or directory" logger="UnhandledError"
E0916 12:25:07.657862 1383833 cert_rotation.go:171] "Unhandled Error" err="key failed with : open /home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/kindnet-141252/client.crt: no such file or directory" logger="UnhandledError"
E0916 12:26:53.840718 1383833 cert_rotation.go:171] "Unhandled Error" err="key failed with : open /home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/auto-141252/client.crt: no such file or directory" logger="UnhandledError"
E0916 12:27:21.564792 1383833 cert_rotation.go:171] "Unhandled Error" err="key failed with : open /home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/auto-141252/client.crt: no such file or directory" logger="UnhandledError"
E0916 12:27:23.797292 1383833 cert_rotation.go:171] "Unhandled Error" err="key failed with : open /home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/kindnet-141252/client.crt: no such file or directory" logger="UnhandledError"
E0916 12:27:32.159050 1383833 cert_rotation.go:171] "Unhandled Error" err="key failed with : open /home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/no-preload-137798/client.crt: no such file or directory" logger="UnhandledError"
E0916 12:27:51.499483 1383833 cert_rotation.go:171] "Unhandled Error" err="key failed with : open /home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/kindnet-141252/client.crt: no such file or directory" logger="UnhandledError"
E0916 12:28:02.020802 1383833 cert_rotation.go:171] "Unhandled Error" err="key failed with : open /home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/old-k8s-version-114963/client.crt: no such file or directory" logger="UnhandledError"
E0916 12:28:36.416824 1383833 cert_rotation.go:171] "Unhandled Error" err="key failed with : open /home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/addons-936355/client.crt: no such file or directory" logger="UnhandledError"
E0916 12:28:53.341374 1383833 cert_rotation.go:171] "Unhandled Error" err="key failed with : open /home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/addons-936355/client.crt: no such file or directory" logger="UnhandledError"
E0916 12:29:17.495187 1383833 cert_rotation.go:171] "Unhandled Error" err="key failed with : open /home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/functional-919910/client.crt: no such file or directory" logger="UnhandledError"
E0916 12:30:01.905069 1383833 cert_rotation.go:171] "Unhandled Error" err="key failed with : open /home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/default-k8s-diff-port-992501/client.crt: no such file or directory" logger="UnhandledError"
E0916 12:31:53.840276 1383833 cert_rotation.go:171] "Unhandled Error" err="key failed with : open /home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/auto-141252/client.crt: no such file or directory" logger="UnhandledError"
E0916 12:32:15.227615 1383833 cert_rotation.go:171] "Unhandled Error" err="key failed with : open /home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/no-preload-137798/client.crt: no such file or directory" logger="UnhandledError"
E0916 12:32:23.797379 1383833 cert_rotation.go:171] "Unhandled Error" err="key failed with : open /home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/kindnet-141252/client.crt: no such file or directory" logger="UnhandledError"
E0916 12:32:32.159148 1383833 cert_rotation.go:171] "Unhandled Error" err="key failed with : open /home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/no-preload-137798/client.crt: no such file or directory" logger="UnhandledError"
E0916 12:32:45.088876 1383833 cert_rotation.go:171] "Unhandled Error" err="key failed with : open /home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/old-k8s-version-114963/client.crt: no such file or directory" logger="UnhandledError"
E0916 12:33:02.020732 1383833 cert_rotation.go:171] "Unhandled Error" err="key failed with : open /home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/old-k8s-version-114963/client.crt: no such file or directory" logger="UnhandledError"
net_test.go:160: failed waiting for netcat deployment to stabilize: timed out waiting for the condition
net_test.go:163: (dbg) TestNetworkPlugins/group/custom-flannel/NetCatPod: waiting 15m0s for pods matching "app=netcat" in namespace "default" ...
E0916 12:33:53.341686 1383833 cert_rotation.go:171] "Unhandled Error" err="key failed with : open /home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/addons-936355/client.crt: no such file or directory" logger="UnhandledError"
E0916 12:34:00.570613 1383833 cert_rotation.go:171] "Unhandled Error" err="key failed with : open /home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/functional-919910/client.crt: no such file or directory" logger="UnhandledError"
E0916 12:34:17.495512 1383833 cert_rotation.go:171] "Unhandled Error" err="key failed with : open /home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/functional-919910/client.crt: no such file or directory" logger="UnhandledError"
panic: test timed out after 2h0m0s
	running tests:
		TestNetworkPlugins (1h7m50s)
		TestNetworkPlugins/group/calico (17m37s)
		TestNetworkPlugins/group/calico/NetCatPod (16m20s)
		TestNetworkPlugins/group/custom-flannel (16m56s)
		TestNetworkPlugins/group/custom-flannel/NetCatPod (15m57s)
		TestStartStop (1h8m27s)
		TestStartStop/group (48m46s)

                                                
                                                
goroutine 5133 [running]:
testing.(*M).startAlarm.func1()
	/usr/local/go/src/testing/testing.go:2373 +0x30c
created by time.goFunc
	/usr/local/go/src/time/sleep.go:215 +0x38

                                                
                                                
goroutine 1 [chan receive, 63 minutes]:
testing.tRunner.func1()
	/usr/local/go/src/testing/testing.go:1651 +0x434
testing.tRunner(0x40004f3380, 0x40004bbbb8)
	/usr/local/go/src/testing/testing.go:1696 +0x120
testing.runTests(0x40006c2390, {0x47d1fe0, 0x2b, 0x2b}, {0x40004bbd08?, 0x11fc54?, 0x47f5c60?})
	/usr/local/go/src/testing/testing.go:2166 +0x3ac
testing.(*M).Run(0x400064f720)
	/usr/local/go/src/testing/testing.go:2034 +0x588
k8s.io/minikube/test/integration.TestMain(0x400064f720)
	/mnt/disks/sdb/jenkins/go/src/k8s.io/minikube/test/integration/main_test.go:62 +0x84
main.main()
	_testmain.go:131 +0x98

                                                
                                                
goroutine 5 [select]:
go.opencensus.io/stats/view.(*worker).start(0x400063bc80)
	/var/lib/jenkins/go/pkg/mod/go.opencensus.io@v0.24.0/stats/view/worker.go:292 +0x88
created by go.opencensus.io/stats/view.init.0 in goroutine 1
	/var/lib/jenkins/go/pkg/mod/go.opencensus.io@v0.24.0/stats/view/worker.go:34 +0x98

                                                
                                                
goroutine 4869 [select, 2 minutes]:
k8s.io/apimachinery/pkg/util/wait.PollImmediateUntilWithContext.poller.func1.1()
	/var/lib/jenkins/go/pkg/mod/k8s.io/apimachinery@v0.31.1/pkg/util/wait/poll.go:297 +0x150
created by k8s.io/apimachinery/pkg/util/wait.PollImmediateUntilWithContext.poller.func1 in goroutine 4868
	/var/lib/jenkins/go/pkg/mod/k8s.io/apimachinery@v0.31.1/pkg/util/wait/poll.go:280 +0xc0

                                                
                                                
goroutine 2398 [chan receive, 57 minutes]:
k8s.io/client-go/transport.(*dynamicClientCert).Run(0x400149a940, 0x4000102230)
	/var/lib/jenkins/go/pkg/mod/k8s.io/client-go@v0.31.1/transport/cert_rotation.go:150 +0x248
created by k8s.io/client-go/transport.(*tlsTransportCache).get in goroutine 2396
	/var/lib/jenkins/go/pkg/mod/k8s.io/client-go@v0.31.1/transport/cache.go:122 +0x48c

                                                
                                                
goroutine 130 [select]:
k8s.io/client-go/util/workqueue.(*delayingType[...]).waitingLoop(0x31f31a0)
	/var/lib/jenkins/go/pkg/mod/k8s.io/client-go@v0.31.1/util/workqueue/delaying_queue.go:304 +0x258
created by k8s.io/client-go/util/workqueue.newDelayingQueue[...] in goroutine 127
	/var/lib/jenkins/go/pkg/mod/k8s.io/client-go@v0.31.1/util/workqueue/delaying_queue.go:141 +0x200

                                                
                                                
goroutine 2629 [select]:
k8s.io/client-go/util/workqueue.(*delayingType[...]).waitingLoop(0x31f31a0)
	/var/lib/jenkins/go/pkg/mod/k8s.io/client-go@v0.31.1/util/workqueue/delaying_queue.go:304 +0x258
created by k8s.io/client-go/util/workqueue.newDelayingQueue[...] in goroutine 2628
	/var/lib/jenkins/go/pkg/mod/k8s.io/client-go@v0.31.1/util/workqueue/delaying_queue.go:141 +0x200

                                                
                                                
goroutine 596 [chan send, 105 minutes]:
os/exec.(*Cmd).watchCtx(0x4002379500, 0x400233b6c0)
	/usr/local/go/src/os/exec/exec.go:798 +0x2c8
created by os/exec.(*Cmd).Start in goroutine 595
	/usr/local/go/src/os/exec/exec.go:759 +0x78c

                                                
                                                
goroutine 131 [chan receive, 115 minutes]:
k8s.io/client-go/transport.(*dynamicClientCert).Run(0x40006b5c40, 0x4000102230)
	/var/lib/jenkins/go/pkg/mod/k8s.io/client-go@v0.31.1/transport/cert_rotation.go:150 +0x248
created by k8s.io/client-go/transport.(*tlsTransportCache).get in goroutine 127
	/var/lib/jenkins/go/pkg/mod/k8s.io/client-go@v0.31.1/transport/cache.go:122 +0x48c

                                                
                                                
goroutine 35 [select]:
k8s.io/klog/v2.(*flushDaemon).run.func1()
	/var/lib/jenkins/go/pkg/mod/k8s.io/klog/v2@v2.130.1/klog.go:1141 +0xe0
created by k8s.io/klog/v2.(*flushDaemon).run in goroutine 34
	/var/lib/jenkins/go/pkg/mod/k8s.io/klog/v2@v2.130.1/klog.go:1137 +0x198

                                                
                                                
goroutine 4885 [select]:
k8s.io/client-go/util/workqueue.(*delayingType[...]).waitingLoop(0x31f31a0)
	/var/lib/jenkins/go/pkg/mod/k8s.io/client-go@v0.31.1/util/workqueue/delaying_queue.go:304 +0x258
created by k8s.io/client-go/util/workqueue.newDelayingQueue[...] in goroutine 4884
	/var/lib/jenkins/go/pkg/mod/k8s.io/client-go@v0.31.1/util/workqueue/delaying_queue.go:141 +0x200

                                                
                                                
goroutine 2418 [select, 2 minutes]:
k8s.io/apimachinery/pkg/util/wait.PollImmediateUntilWithContext.poller.func1.1()
	/var/lib/jenkins/go/pkg/mod/k8s.io/apimachinery@v0.31.1/pkg/util/wait/poll.go:297 +0x150
created by k8s.io/apimachinery/pkg/util/wait.PollImmediateUntilWithContext.poller.func1 in goroutine 2417
	/var/lib/jenkins/go/pkg/mod/k8s.io/apimachinery@v0.31.1/pkg/util/wait/poll.go:280 +0xc0

                                                
                                                
goroutine 2400 [sync.Cond.Wait, 2 minutes]:
sync.runtime_notifyListWait(0x400149a910, 0x1e)
	/usr/local/go/src/runtime/sema.go:587 +0x154
sync.(*Cond).Wait(0x400149a900)
	/usr/local/go/src/sync/cond.go:71 +0xcc
k8s.io/client-go/util/workqueue.(*Typed[...]).Get(0x3218260)
	/var/lib/jenkins/go/pkg/mod/k8s.io/client-go@v0.31.1/util/workqueue/queue.go:282 +0x8c
k8s.io/client-go/transport.(*dynamicClientCert).processNextWorkItem(0x400149a940)
	/var/lib/jenkins/go/pkg/mod/k8s.io/client-go@v0.31.1/transport/cert_rotation.go:159 +0x40
k8s.io/client-go/transport.(*dynamicClientCert).runWorker(...)
	/var/lib/jenkins/go/pkg/mod/k8s.io/client-go@v0.31.1/transport/cert_rotation.go:154
k8s.io/apimachinery/pkg/util/wait.BackoffUntil.func1(0x30?)
	/var/lib/jenkins/go/pkg/mod/k8s.io/apimachinery@v0.31.1/pkg/util/wait/backoff.go:226 +0x40
k8s.io/apimachinery/pkg/util/wait.BackoffUntil(0x400085ac60, {0x31d56c0, 0x4001f75f20}, 0x1, 0x4000102230)
	/var/lib/jenkins/go/pkg/mod/k8s.io/apimachinery@v0.31.1/pkg/util/wait/backoff.go:227 +0x90
k8s.io/apimachinery/pkg/util/wait.JitterUntil(0x400085ac60, 0x3b9aca00, 0x0, 0x1, 0x4000102230)
	/var/lib/jenkins/go/pkg/mod/k8s.io/apimachinery@v0.31.1/pkg/util/wait/backoff.go:204 +0x80
k8s.io/apimachinery/pkg/util/wait.Until(...)
	/var/lib/jenkins/go/pkg/mod/k8s.io/apimachinery@v0.31.1/pkg/util/wait/backoff.go:161
created by k8s.io/client-go/transport.(*dynamicClientCert).Run in goroutine 2398
	/var/lib/jenkins/go/pkg/mod/k8s.io/client-go@v0.31.1/transport/cert_rotation.go:143 +0x198

                                                
                                                
goroutine 2052 [chan receive, 49 minutes]:
testing.(*testContext).waitParallel(0x40006e0690)
	/usr/local/go/src/testing/testing.go:1818 +0x158
testing.tRunner.func1()
	/usr/local/go/src/testing/testing.go:1666 +0x530
testing.tRunner(0x400149c820, 0x2e80e50)
	/usr/local/go/src/testing/testing.go:1696 +0x120
created by testing.(*T).Run in goroutine 1822
	/usr/local/go/src/testing/testing.go:1743 +0x314

                                                
                                                
goroutine 4852 [chan receive, 16 minutes]:
k8s.io/client-go/transport.(*dynamicClientCert).Run(0x40004e9080, 0x4000102230)
	/var/lib/jenkins/go/pkg/mod/k8s.io/client-go@v0.31.1/transport/cert_rotation.go:150 +0x248
created by k8s.io/client-go/transport.(*tlsTransportCache).get in goroutine 4850
	/var/lib/jenkins/go/pkg/mod/k8s.io/client-go@v0.31.1/transport/cache.go:122 +0x48c

                                                
                                                
goroutine 2809 [select, 2 minutes]:
k8s.io/apimachinery/pkg/util/wait.PollImmediateUntilWithContext.poller.func1.1()
	/var/lib/jenkins/go/pkg/mod/k8s.io/apimachinery@v0.31.1/pkg/util/wait/poll.go:297 +0x150
created by k8s.io/apimachinery/pkg/util/wait.PollImmediateUntilWithContext.poller.func1 in goroutine 2808
	/var/lib/jenkins/go/pkg/mod/k8s.io/apimachinery@v0.31.1/pkg/util/wait/poll.go:280 +0xc0

                                                
                                                
goroutine 2059 [chan receive, 17 minutes]:
testing.tRunner.func1()
	/usr/local/go/src/testing/testing.go:1651 +0x434
testing.tRunner(0x400149dd40, 0x400186e3c0)
	/usr/local/go/src/testing/testing.go:1696 +0x120
created by testing.(*T).Run in goroutine 1743
	/usr/local/go/src/testing/testing.go:1743 +0x314

                                                
                                                
goroutine 2807 [sync.Cond.Wait, 2 minutes]:
sync.runtime_notifyListWait(0x40013da3d0, 0x19)
	/usr/local/go/src/runtime/sema.go:587 +0x154
sync.(*Cond).Wait(0x40013da3c0)
	/usr/local/go/src/sync/cond.go:71 +0xcc
k8s.io/client-go/util/workqueue.(*Typed[...]).Get(0x3218260)
	/var/lib/jenkins/go/pkg/mod/k8s.io/client-go@v0.31.1/util/workqueue/queue.go:282 +0x8c
k8s.io/client-go/transport.(*dynamicClientCert).processNextWorkItem(0x40013da400)
	/var/lib/jenkins/go/pkg/mod/k8s.io/client-go@v0.31.1/transport/cert_rotation.go:159 +0x40
k8s.io/client-go/transport.(*dynamicClientCert).runWorker(...)
	/var/lib/jenkins/go/pkg/mod/k8s.io/client-go@v0.31.1/transport/cert_rotation.go:154
k8s.io/apimachinery/pkg/util/wait.BackoffUntil.func1(0x30?)
	/var/lib/jenkins/go/pkg/mod/k8s.io/apimachinery@v0.31.1/pkg/util/wait/backoff.go:226 +0x40
k8s.io/apimachinery/pkg/util/wait.BackoffUntil(0x4000224020, {0x31d56c0, 0x40019a4ea0}, 0x1, 0x4000102230)
	/var/lib/jenkins/go/pkg/mod/k8s.io/apimachinery@v0.31.1/pkg/util/wait/backoff.go:227 +0x90
k8s.io/apimachinery/pkg/util/wait.JitterUntil(0x4000224020, 0x3b9aca00, 0x0, 0x1, 0x4000102230)
	/var/lib/jenkins/go/pkg/mod/k8s.io/apimachinery@v0.31.1/pkg/util/wait/backoff.go:204 +0x80
k8s.io/apimachinery/pkg/util/wait.Until(...)
	/var/lib/jenkins/go/pkg/mod/k8s.io/apimachinery@v0.31.1/pkg/util/wait/backoff.go:161
created by k8s.io/client-go/transport.(*dynamicClientCert).Run in goroutine 2804
	/var/lib/jenkins/go/pkg/mod/k8s.io/client-go@v0.31.1/transport/cert_rotation.go:143 +0x198

                                                
                                                
goroutine 4855 [sync.Cond.Wait, 2 minutes]:
sync.runtime_notifyListWait(0x40004e9050, 0x3)
	/usr/local/go/src/runtime/sema.go:587 +0x154
sync.(*Cond).Wait(0x40004e9040)
	/usr/local/go/src/sync/cond.go:71 +0xcc
k8s.io/client-go/util/workqueue.(*Typed[...]).Get(0x3218260)
	/var/lib/jenkins/go/pkg/mod/k8s.io/client-go@v0.31.1/util/workqueue/queue.go:282 +0x8c
k8s.io/client-go/transport.(*dynamicClientCert).processNextWorkItem(0x40004e9080)
	/var/lib/jenkins/go/pkg/mod/k8s.io/client-go@v0.31.1/transport/cert_rotation.go:159 +0x40
k8s.io/client-go/transport.(*dynamicClientCert).runWorker(...)
	/var/lib/jenkins/go/pkg/mod/k8s.io/client-go@v0.31.1/transport/cert_rotation.go:154
k8s.io/apimachinery/pkg/util/wait.BackoffUntil.func1(0x30?)
	/var/lib/jenkins/go/pkg/mod/k8s.io/apimachinery@v0.31.1/pkg/util/wait/backoff.go:226 +0x40
k8s.io/apimachinery/pkg/util/wait.BackoffUntil(0x400085b4d0, {0x31d56c0, 0x4001671ef0}, 0x1, 0x4000102230)
	/var/lib/jenkins/go/pkg/mod/k8s.io/apimachinery@v0.31.1/pkg/util/wait/backoff.go:227 +0x90
k8s.io/apimachinery/pkg/util/wait.JitterUntil(0x400085b4d0, 0x3b9aca00, 0x0, 0x1, 0x4000102230)
	/var/lib/jenkins/go/pkg/mod/k8s.io/apimachinery@v0.31.1/pkg/util/wait/backoff.go:204 +0x80
k8s.io/apimachinery/pkg/util/wait.Until(...)
	/var/lib/jenkins/go/pkg/mod/k8s.io/apimachinery@v0.31.1/pkg/util/wait/backoff.go:161
created by k8s.io/client-go/transport.(*dynamicClientCert).Run in goroutine 4852
	/var/lib/jenkins/go/pkg/mod/k8s.io/client-go@v0.31.1/transport/cert_rotation.go:143 +0x198

                                                
                                                
goroutine 2632 [sync.Cond.Wait, 4 minutes]:
sync.runtime_notifyListWait(0x40004e8f50, 0x1b)
	/usr/local/go/src/runtime/sema.go:587 +0x154
sync.(*Cond).Wait(0x40004e8f40)
	/usr/local/go/src/sync/cond.go:71 +0xcc
k8s.io/client-go/util/workqueue.(*Typed[...]).Get(0x3218260)
	/var/lib/jenkins/go/pkg/mod/k8s.io/client-go@v0.31.1/util/workqueue/queue.go:282 +0x8c
k8s.io/client-go/transport.(*dynamicClientCert).processNextWorkItem(0x40004e8f80)
	/var/lib/jenkins/go/pkg/mod/k8s.io/client-go@v0.31.1/transport/cert_rotation.go:159 +0x40
k8s.io/client-go/transport.(*dynamicClientCert).runWorker(...)
	/var/lib/jenkins/go/pkg/mod/k8s.io/client-go@v0.31.1/transport/cert_rotation.go:154
k8s.io/apimachinery/pkg/util/wait.BackoffUntil.func1(0x30?)
	/var/lib/jenkins/go/pkg/mod/k8s.io/apimachinery@v0.31.1/pkg/util/wait/backoff.go:226 +0x40
k8s.io/apimachinery/pkg/util/wait.BackoffUntil(0x4000495e30, {0x31d56c0, 0x4001b00a20}, 0x1, 0x4000102230)
	/var/lib/jenkins/go/pkg/mod/k8s.io/apimachinery@v0.31.1/pkg/util/wait/backoff.go:227 +0x90
k8s.io/apimachinery/pkg/util/wait.JitterUntil(0x4000495e30, 0x3b9aca00, 0x0, 0x1, 0x4000102230)
	/var/lib/jenkins/go/pkg/mod/k8s.io/apimachinery@v0.31.1/pkg/util/wait/backoff.go:204 +0x80
k8s.io/apimachinery/pkg/util/wait.Until(...)
	/var/lib/jenkins/go/pkg/mod/k8s.io/apimachinery@v0.31.1/pkg/util/wait/backoff.go:161
created by k8s.io/client-go/transport.(*dynamicClientCert).Run in goroutine 2630
	/var/lib/jenkins/go/pkg/mod/k8s.io/client-go@v0.31.1/transport/cert_rotation.go:143 +0x198

                                                
                                                
goroutine 2808 [select, 2 minutes]:
k8s.io/apimachinery/pkg/util/wait.waitForWithContext({0x31fcba0, 0x4000102230}, 0x4000081740, 0x4001aa3f88)
	/var/lib/jenkins/go/pkg/mod/k8s.io/apimachinery@v0.31.1/pkg/util/wait/wait.go:205 +0xb0
k8s.io/apimachinery/pkg/util/wait.poll({0x31fcba0, 0x4000102230}, 0x0?, 0x4000081740, 0x4000081788)
	/var/lib/jenkins/go/pkg/mod/k8s.io/apimachinery@v0.31.1/pkg/util/wait/poll.go:260 +0x90
k8s.io/apimachinery/pkg/util/wait.PollImmediateUntilWithContext({0x31fcba0?, 0x4000102230?}, 0x4000b60600?, 0x400021fb80?)
	/var/lib/jenkins/go/pkg/mod/k8s.io/apimachinery@v0.31.1/pkg/util/wait/poll.go:200 +0x44
k8s.io/apimachinery/pkg/util/wait.PollImmediateUntil(0x0?, 0x8d7b4?, 0x4002379200?)
	/var/lib/jenkins/go/pkg/mod/k8s.io/apimachinery@v0.31.1/pkg/util/wait/poll.go:187 +0x40
created by k8s.io/client-go/transport.(*dynamicClientCert).Run in goroutine 2804
	/var/lib/jenkins/go/pkg/mod/k8s.io/client-go@v0.31.1/transport/cert_rotation.go:145 +0x23c

                                                
                                                
goroutine 149 [sync.Cond.Wait]:
sync.runtime_notifyListWait(0x40006b5c10, 0x2d)
	/usr/local/go/src/runtime/sema.go:587 +0x154
sync.(*Cond).Wait(0x40006b5c00)
	/usr/local/go/src/sync/cond.go:71 +0xcc
k8s.io/client-go/util/workqueue.(*Typed[...]).Get(0x3218260)
	/var/lib/jenkins/go/pkg/mod/k8s.io/client-go@v0.31.1/util/workqueue/queue.go:282 +0x8c
k8s.io/client-go/transport.(*dynamicClientCert).processNextWorkItem(0x40006b5c40)
	/var/lib/jenkins/go/pkg/mod/k8s.io/client-go@v0.31.1/transport/cert_rotation.go:159 +0x40
k8s.io/client-go/transport.(*dynamicClientCert).runWorker(...)
	/var/lib/jenkins/go/pkg/mod/k8s.io/client-go@v0.31.1/transport/cert_rotation.go:154
k8s.io/apimachinery/pkg/util/wait.BackoffUntil.func1(0x30?)
	/var/lib/jenkins/go/pkg/mod/k8s.io/apimachinery@v0.31.1/pkg/util/wait/backoff.go:226 +0x40
k8s.io/apimachinery/pkg/util/wait.BackoffUntil(0x40013be3d0, {0x31d56c0, 0x40008cdb60}, 0x1, 0x4000102230)
	/var/lib/jenkins/go/pkg/mod/k8s.io/apimachinery@v0.31.1/pkg/util/wait/backoff.go:227 +0x90
k8s.io/apimachinery/pkg/util/wait.JitterUntil(0x40013be3d0, 0x3b9aca00, 0x0, 0x1, 0x4000102230)
	/var/lib/jenkins/go/pkg/mod/k8s.io/apimachinery@v0.31.1/pkg/util/wait/backoff.go:204 +0x80
k8s.io/apimachinery/pkg/util/wait.Until(...)
	/var/lib/jenkins/go/pkg/mod/k8s.io/apimachinery@v0.31.1/pkg/util/wait/backoff.go:161
created by k8s.io/client-go/transport.(*dynamicClientCert).Run in goroutine 131
	/var/lib/jenkins/go/pkg/mod/k8s.io/client-go@v0.31.1/transport/cert_rotation.go:143 +0x198

                                                
                                                
goroutine 150 [select]:
k8s.io/apimachinery/pkg/util/wait.waitForWithContext({0x31fcba0, 0x4000102230}, 0x4000087740, 0x400131df88)
	/var/lib/jenkins/go/pkg/mod/k8s.io/apimachinery@v0.31.1/pkg/util/wait/wait.go:205 +0xb0
k8s.io/apimachinery/pkg/util/wait.poll({0x31fcba0, 0x4000102230}, 0x14?, 0x4000087740, 0x4000087788)
	/var/lib/jenkins/go/pkg/mod/k8s.io/apimachinery@v0.31.1/pkg/util/wait/poll.go:260 +0x90
k8s.io/apimachinery/pkg/util/wait.PollImmediateUntilWithContext({0x31fcba0?, 0x4000102230?}, 0x0?, 0x0?)
	/var/lib/jenkins/go/pkg/mod/k8s.io/apimachinery@v0.31.1/pkg/util/wait/poll.go:200 +0x44
k8s.io/apimachinery/pkg/util/wait.PollImmediateUntil(0x0?, 0x8d7b4?, 0x4000697d40?)
	/var/lib/jenkins/go/pkg/mod/k8s.io/apimachinery@v0.31.1/pkg/util/wait/poll.go:187 +0x40
created by k8s.io/client-go/transport.(*dynamicClientCert).Run in goroutine 131
	/var/lib/jenkins/go/pkg/mod/k8s.io/client-go@v0.31.1/transport/cert_rotation.go:145 +0x23c

                                                
                                                
goroutine 151 [select]:
k8s.io/apimachinery/pkg/util/wait.PollImmediateUntilWithContext.poller.func1.1()
	/var/lib/jenkins/go/pkg/mod/k8s.io/apimachinery@v0.31.1/pkg/util/wait/poll.go:297 +0x150
created by k8s.io/apimachinery/pkg/util/wait.PollImmediateUntilWithContext.poller.func1 in goroutine 150
	/var/lib/jenkins/go/pkg/mod/k8s.io/apimachinery@v0.31.1/pkg/util/wait/poll.go:280 +0xc0

                                                
                                                
goroutine 2777 [select]:
k8s.io/client-go/util/workqueue.(*delayingType[...]).waitingLoop(0x31f31a0)
	/var/lib/jenkins/go/pkg/mod/k8s.io/client-go@v0.31.1/util/workqueue/delaying_queue.go:304 +0x258
created by k8s.io/client-go/util/workqueue.newDelayingQueue[...] in goroutine 2776
	/var/lib/jenkins/go/pkg/mod/k8s.io/client-go@v0.31.1/util/workqueue/delaying_queue.go:141 +0x200

                                                
                                                
goroutine 366 [IO wait, 109 minutes]:
internal/poll.runtime_pollWait(0xffff595dac78, 0x72)
	/usr/local/go/src/runtime/netpoll.go:351 +0xa0
internal/poll.(*pollDesc).wait(0x40004e1100?, 0x10?, 0x0)
	/usr/local/go/src/internal/poll/fd_poll_runtime.go:84 +0x28
internal/poll.(*pollDesc).waitRead(...)
	/usr/local/go/src/internal/poll/fd_poll_runtime.go:89
internal/poll.(*FD).Accept(0x40004e1100)
	/usr/local/go/src/internal/poll/fd_unix.go:620 +0x24c
net.(*netFD).accept(0x40004e1100)
	/usr/local/go/src/net/fd_unix.go:172 +0x28
net.(*TCPListener).accept(0x40013da5c0)
	/usr/local/go/src/net/tcpsock_posix.go:159 +0x28
net.(*TCPListener).Accept(0x40013da5c0)
	/usr/local/go/src/net/tcpsock.go:372 +0x2c
net/http.(*Server).Serve(0x40008ecd20, {0x31ef6e0, 0x40013da5c0})
	/usr/local/go/src/net/http/server.go:3330 +0x294
net/http.(*Server).ListenAndServe(0x40008ecd20)
	/usr/local/go/src/net/http/server.go:3259 +0x84
k8s.io/minikube/test/integration.startHTTPProxy.func1(0x400149d860?, 0x400149d860)
	/mnt/disks/sdb/jenkins/go/src/k8s.io/minikube/test/integration/functional_test.go:2213 +0x20
created by k8s.io/minikube/test/integration.startHTTPProxy in goroutine 364
	/mnt/disks/sdb/jenkins/go/src/k8s.io/minikube/test/integration/functional_test.go:2212 +0x11c

                                                
                                                
goroutine 1743 [chan receive, 69 minutes]:
testing.(*T).Run(0x40006961a0, {0x2395ecb?, 0x23ff36874871?}, 0x400186e3c0)
	/usr/local/go/src/testing/testing.go:1751 +0x328
k8s.io/minikube/test/integration.TestNetworkPlugins(0x40006961a0)
	/mnt/disks/sdb/jenkins/go/src/k8s.io/minikube/test/integration/net_test.go:52 +0xcc
testing.tRunner(0x40006961a0, 0x2e80c10)
	/usr/local/go/src/testing/testing.go:1690 +0xe4
created by testing.(*T).Run in goroutine 1
	/usr/local/go/src/testing/testing.go:1743 +0x314

                                                
                                                
goroutine 4867 [sync.Cond.Wait, 2 minutes]:
sync.runtime_notifyListWait(0x400160d150, 0x3)
	/usr/local/go/src/runtime/sema.go:587 +0x154
sync.(*Cond).Wait(0x400160d140)
	/usr/local/go/src/sync/cond.go:71 +0xcc
k8s.io/client-go/util/workqueue.(*Typed[...]).Get(0x3218260)
	/var/lib/jenkins/go/pkg/mod/k8s.io/client-go@v0.31.1/util/workqueue/queue.go:282 +0x8c
k8s.io/client-go/transport.(*dynamicClientCert).processNextWorkItem(0x400160d180)
	/var/lib/jenkins/go/pkg/mod/k8s.io/client-go@v0.31.1/transport/cert_rotation.go:159 +0x40
k8s.io/client-go/transport.(*dynamicClientCert).runWorker(...)
	/var/lib/jenkins/go/pkg/mod/k8s.io/client-go@v0.31.1/transport/cert_rotation.go:154
k8s.io/apimachinery/pkg/util/wait.BackoffUntil.func1(0x30?)
	/var/lib/jenkins/go/pkg/mod/k8s.io/apimachinery@v0.31.1/pkg/util/wait/backoff.go:226 +0x40
k8s.io/apimachinery/pkg/util/wait.BackoffUntil(0x40015c0800, {0x31d56c0, 0x40015abc80}, 0x1, 0x4000102230)
	/var/lib/jenkins/go/pkg/mod/k8s.io/apimachinery@v0.31.1/pkg/util/wait/backoff.go:227 +0x90
k8s.io/apimachinery/pkg/util/wait.JitterUntil(0x40015c0800, 0x3b9aca00, 0x0, 0x1, 0x4000102230)
	/var/lib/jenkins/go/pkg/mod/k8s.io/apimachinery@v0.31.1/pkg/util/wait/backoff.go:204 +0x80
k8s.io/apimachinery/pkg/util/wait.Until(...)
	/var/lib/jenkins/go/pkg/mod/k8s.io/apimachinery@v0.31.1/pkg/util/wait/backoff.go:161
created by k8s.io/client-go/transport.(*dynamicClientCert).Run in goroutine 4886
	/var/lib/jenkins/go/pkg/mod/k8s.io/client-go@v0.31.1/transport/cert_rotation.go:143 +0x198

                                                
                                                
goroutine 2634 [select, 4 minutes]:
k8s.io/apimachinery/pkg/util/wait.PollImmediateUntilWithContext.poller.func1.1()
	/var/lib/jenkins/go/pkg/mod/k8s.io/apimachinery@v0.31.1/pkg/util/wait/poll.go:297 +0x150
created by k8s.io/apimachinery/pkg/util/wait.PollImmediateUntilWithContext.poller.func1 in goroutine 2633
	/var/lib/jenkins/go/pkg/mod/k8s.io/apimachinery@v0.31.1/pkg/util/wait/poll.go:280 +0xc0

                                                
                                                
goroutine 4857 [select, 2 minutes]:
k8s.io/apimachinery/pkg/util/wait.PollImmediateUntilWithContext.poller.func1.1()
	/var/lib/jenkins/go/pkg/mod/k8s.io/apimachinery@v0.31.1/pkg/util/wait/poll.go:297 +0x150
created by k8s.io/apimachinery/pkg/util/wait.PollImmediateUntilWithContext.poller.func1 in goroutine 4856
	/var/lib/jenkins/go/pkg/mod/k8s.io/apimachinery@v0.31.1/pkg/util/wait/poll.go:280 +0xc0

                                                
                                                
goroutine 2768 [select, 2 minutes]:
k8s.io/apimachinery/pkg/util/wait.waitForWithContext({0x31fcba0, 0x4000102230}, 0x40017b8740, 0x40013a7f88)
	/var/lib/jenkins/go/pkg/mod/k8s.io/apimachinery@v0.31.1/pkg/util/wait/wait.go:205 +0xb0
k8s.io/apimachinery/pkg/util/wait.poll({0x31fcba0, 0x4000102230}, 0x18?, 0x40017b8740, 0x40017b8788)
	/var/lib/jenkins/go/pkg/mod/k8s.io/apimachinery@v0.31.1/pkg/util/wait/poll.go:260 +0x90
k8s.io/apimachinery/pkg/util/wait.PollImmediateUntilWithContext({0x31fcba0?, 0x4000102230?}, 0x74736566696e616d?, 0x72746e6f432073?)
	/var/lib/jenkins/go/pkg/mod/k8s.io/apimachinery@v0.31.1/pkg/util/wait/poll.go:200 +0x44
k8s.io/apimachinery/pkg/util/wait.PollImmediateUntil(0x0?, 0x8d7b4?, 0x4000b60900?)
	/var/lib/jenkins/go/pkg/mod/k8s.io/apimachinery@v0.31.1/pkg/util/wait/poll.go:187 +0x40
created by k8s.io/client-go/transport.(*dynamicClientCert).Run in goroutine 2778
	/var/lib/jenkins/go/pkg/mod/k8s.io/client-go@v0.31.1/transport/cert_rotation.go:145 +0x23c

                                                
                                                
goroutine 2319 [chan receive, 57 minutes]:
k8s.io/client-go/transport.(*dynamicClientCert).Run(0x40013da6c0, 0x4000102230)
	/var/lib/jenkins/go/pkg/mod/k8s.io/client-go@v0.31.1/transport/cert_rotation.go:150 +0x248
created by k8s.io/client-go/transport.(*tlsTransportCache).get in goroutine 2317
	/var/lib/jenkins/go/pkg/mod/k8s.io/client-go@v0.31.1/transport/cache.go:122 +0x48c

                                                
                                                
goroutine 2633 [select, 4 minutes]:
k8s.io/apimachinery/pkg/util/wait.waitForWithContext({0x31fcba0, 0x4000102230}, 0x400130bf40, 0x400130bf88)
	/var/lib/jenkins/go/pkg/mod/k8s.io/apimachinery@v0.31.1/pkg/util/wait/wait.go:205 +0xb0
k8s.io/apimachinery/pkg/util/wait.poll({0x31fcba0, 0x4000102230}, 0x40?, 0x400130bf40, 0x400130bf88)
	/var/lib/jenkins/go/pkg/mod/k8s.io/apimachinery@v0.31.1/pkg/util/wait/poll.go:260 +0x90
k8s.io/apimachinery/pkg/util/wait.PollImmediateUntilWithContext({0x31fcba0?, 0x4000102230?}, 0x90a737473656669?, 0x697061090a2d2d2d?)
	/var/lib/jenkins/go/pkg/mod/k8s.io/apimachinery@v0.31.1/pkg/util/wait/poll.go:200 +0x44
k8s.io/apimachinery/pkg/util/wait.PollImmediateUntil(0x0?, 0x8d7b4?, 0x4002378c00?)
	/var/lib/jenkins/go/pkg/mod/k8s.io/apimachinery@v0.31.1/pkg/util/wait/poll.go:187 +0x40
created by k8s.io/client-go/transport.(*dynamicClientCert).Run in goroutine 2630
	/var/lib/jenkins/go/pkg/mod/k8s.io/client-go@v0.31.1/transport/cert_rotation.go:145 +0x23c

                                                
                                                
goroutine 2337 [sync.Cond.Wait, 2 minutes]:
sync.runtime_notifyListWait(0x40013da690, 0x1e)
	/usr/local/go/src/runtime/sema.go:587 +0x154
sync.(*Cond).Wait(0x40013da680)
	/usr/local/go/src/sync/cond.go:71 +0xcc
k8s.io/client-go/util/workqueue.(*Typed[...]).Get(0x3218260)
	/var/lib/jenkins/go/pkg/mod/k8s.io/client-go@v0.31.1/util/workqueue/queue.go:282 +0x8c
k8s.io/client-go/transport.(*dynamicClientCert).processNextWorkItem(0x40013da6c0)
	/var/lib/jenkins/go/pkg/mod/k8s.io/client-go@v0.31.1/transport/cert_rotation.go:159 +0x40
k8s.io/client-go/transport.(*dynamicClientCert).runWorker(...)
	/var/lib/jenkins/go/pkg/mod/k8s.io/client-go@v0.31.1/transport/cert_rotation.go:154
k8s.io/apimachinery/pkg/util/wait.BackoffUntil.func1(0x30?)
	/var/lib/jenkins/go/pkg/mod/k8s.io/apimachinery@v0.31.1/pkg/util/wait/backoff.go:226 +0x40
k8s.io/apimachinery/pkg/util/wait.BackoffUntil(0x4001aca020, {0x31d56c0, 0x4001ac8060}, 0x1, 0x4000102230)
	/var/lib/jenkins/go/pkg/mod/k8s.io/apimachinery@v0.31.1/pkg/util/wait/backoff.go:227 +0x90
k8s.io/apimachinery/pkg/util/wait.JitterUntil(0x4001aca020, 0x3b9aca00, 0x0, 0x1, 0x4000102230)
	/var/lib/jenkins/go/pkg/mod/k8s.io/apimachinery@v0.31.1/pkg/util/wait/backoff.go:204 +0x80
k8s.io/apimachinery/pkg/util/wait.Until(...)
	/var/lib/jenkins/go/pkg/mod/k8s.io/apimachinery@v0.31.1/pkg/util/wait/backoff.go:161
created by k8s.io/client-go/transport.(*dynamicClientCert).Run in goroutine 2319
	/var/lib/jenkins/go/pkg/mod/k8s.io/client-go@v0.31.1/transport/cert_rotation.go:143 +0x198

                                                
                                                
goroutine 2338 [select, 2 minutes]:
k8s.io/apimachinery/pkg/util/wait.waitForWithContext({0x31fcba0, 0x4000102230}, 0x40004c7f40, 0x40004c7f88)
	/var/lib/jenkins/go/pkg/mod/k8s.io/apimachinery@v0.31.1/pkg/util/wait/wait.go:205 +0xb0
k8s.io/apimachinery/pkg/util/wait.poll({0x31fcba0, 0x4000102230}, 0xc8?, 0x40004c7f40, 0x40004c7f88)
	/var/lib/jenkins/go/pkg/mod/k8s.io/apimachinery@v0.31.1/pkg/util/wait/poll.go:260 +0x90
k8s.io/apimachinery/pkg/util/wait.PollImmediateUntilWithContext({0x31fcba0?, 0x4000102230?}, 0x40001ffb00?, 0x40000c7b80?)
	/var/lib/jenkins/go/pkg/mod/k8s.io/apimachinery@v0.31.1/pkg/util/wait/poll.go:200 +0x44
k8s.io/apimachinery/pkg/util/wait.PollImmediateUntil(0x0?, 0x8d7b4?, 0x4002378780?)
	/var/lib/jenkins/go/pkg/mod/k8s.io/apimachinery@v0.31.1/pkg/util/wait/poll.go:187 +0x40
created by k8s.io/client-go/transport.(*dynamicClientCert).Run in goroutine 2319
	/var/lib/jenkins/go/pkg/mod/k8s.io/client-go@v0.31.1/transport/cert_rotation.go:145 +0x23c

                                                
                                                
goroutine 600 [chan receive, 105 minutes]:
k8s.io/client-go/transport.(*dynamicClientCert).Run(0x400149b4c0, 0x4000102230)
	/var/lib/jenkins/go/pkg/mod/k8s.io/client-go@v0.31.1/transport/cert_rotation.go:150 +0x248
created by k8s.io/client-go/transport.(*tlsTransportCache).get in goroutine 598
	/var/lib/jenkins/go/pkg/mod/k8s.io/client-go@v0.31.1/transport/cache.go:122 +0x48c

                                                
                                                
goroutine 2318 [select]:
k8s.io/client-go/util/workqueue.(*delayingType[...]).waitingLoop(0x31f31a0)
	/var/lib/jenkins/go/pkg/mod/k8s.io/client-go@v0.31.1/util/workqueue/delaying_queue.go:304 +0x258
created by k8s.io/client-go/util/workqueue.newDelayingQueue[...] in goroutine 2317
	/var/lib/jenkins/go/pkg/mod/k8s.io/client-go@v0.31.1/util/workqueue/delaying_queue.go:141 +0x200

                                                
                                                
goroutine 599 [select]:
k8s.io/client-go/util/workqueue.(*delayingType[...]).waitingLoop(0x31f31a0)
	/var/lib/jenkins/go/pkg/mod/k8s.io/client-go@v0.31.1/util/workqueue/delaying_queue.go:304 +0x258
created by k8s.io/client-go/util/workqueue.newDelayingQueue[...] in goroutine 598
	/var/lib/jenkins/go/pkg/mod/k8s.io/client-go@v0.31.1/util/workqueue/delaying_queue.go:141 +0x200

                                                
                                                
goroutine 2803 [select]:
k8s.io/client-go/util/workqueue.(*delayingType[...]).waitingLoop(0x31f31a0)
	/var/lib/jenkins/go/pkg/mod/k8s.io/client-go@v0.31.1/util/workqueue/delaying_queue.go:304 +0x258
created by k8s.io/client-go/util/workqueue.newDelayingQueue[...] in goroutine 2802
	/var/lib/jenkins/go/pkg/mod/k8s.io/client-go@v0.31.1/util/workqueue/delaying_queue.go:141 +0x200

                                                
                                                
goroutine 730 [chan send, 103 minutes]:
os/exec.(*Cmd).watchCtx(0x40001ffc80, 0x4001948770)
	/usr/local/go/src/os/exec/exec.go:798 +0x2c8
created by os/exec.(*Cmd).Start in goroutine 514
	/usr/local/go/src/os/exec/exec.go:759 +0x78c

                                                
                                                
goroutine 2417 [select, 2 minutes]:
k8s.io/apimachinery/pkg/util/wait.waitForWithContext({0x31fcba0, 0x4000102230}, 0x4001315f40, 0x40013adf88)
	/var/lib/jenkins/go/pkg/mod/k8s.io/apimachinery@v0.31.1/pkg/util/wait/wait.go:205 +0xb0
k8s.io/apimachinery/pkg/util/wait.poll({0x31fcba0, 0x4000102230}, 0x0?, 0x4001315f40, 0x4001315f88)
	/var/lib/jenkins/go/pkg/mod/k8s.io/apimachinery@v0.31.1/pkg/util/wait/poll.go:260 +0x90
k8s.io/apimachinery/pkg/util/wait.PollImmediateUntilWithContext({0x31fcba0?, 0x4000102230?}, 0x4000b60480?, 0x400021f3f0?)
	/var/lib/jenkins/go/pkg/mod/k8s.io/apimachinery@v0.31.1/pkg/util/wait/poll.go:200 +0x44
k8s.io/apimachinery/pkg/util/wait.PollImmediateUntil(0x0?, 0x8d7b4?, 0x40001ff200?)
	/var/lib/jenkins/go/pkg/mod/k8s.io/apimachinery@v0.31.1/pkg/util/wait/poll.go:187 +0x40
created by k8s.io/client-go/transport.(*dynamicClientCert).Run in goroutine 2398
	/var/lib/jenkins/go/pkg/mod/k8s.io/client-go@v0.31.1/transport/cert_rotation.go:145 +0x23c

                                                
                                                
goroutine 695 [chan send, 103 minutes]:
os/exec.(*Cmd).watchCtx(0x4002378c00, 0x4001949260)
	/usr/local/go/src/os/exec/exec.go:798 +0x2c8
created by os/exec.(*Cmd).Start in goroutine 694
	/usr/local/go/src/os/exec/exec.go:759 +0x78c

                                                
                                                
goroutine 2785 [select, 2 minutes]:
k8s.io/apimachinery/pkg/util/wait.PollImmediateUntilWithContext.poller.func1.1()
	/var/lib/jenkins/go/pkg/mod/k8s.io/apimachinery@v0.31.1/pkg/util/wait/poll.go:297 +0x150
created by k8s.io/apimachinery/pkg/util/wait.PollImmediateUntilWithContext.poller.func1 in goroutine 2768
	/var/lib/jenkins/go/pkg/mod/k8s.io/apimachinery@v0.31.1/pkg/util/wait/poll.go:280 +0xc0

                                                
                                                
goroutine 2339 [select, 2 minutes]:
k8s.io/apimachinery/pkg/util/wait.PollImmediateUntilWithContext.poller.func1.1()
	/var/lib/jenkins/go/pkg/mod/k8s.io/apimachinery@v0.31.1/pkg/util/wait/poll.go:297 +0x150
created by k8s.io/apimachinery/pkg/util/wait.PollImmediateUntilWithContext.poller.func1 in goroutine 2338
	/var/lib/jenkins/go/pkg/mod/k8s.io/apimachinery@v0.31.1/pkg/util/wait/poll.go:280 +0xc0

                                                
                                                
goroutine 2113 [chan receive, 69 minutes]:
testing.(*testContext).waitParallel(0x40006e0690)
	/usr/local/go/src/testing/testing.go:1818 +0x158
testing.(*T).Parallel(0x400014ed00)
	/usr/local/go/src/testing/testing.go:1485 +0x1b8
k8s.io/minikube/test/integration.MaybeParallel(0x400014ed00)
	/mnt/disks/sdb/jenkins/go/src/k8s.io/minikube/test/integration/helpers_test.go:483 +0x40
k8s.io/minikube/test/integration.TestNetworkPlugins.func1.1(0x400014ed00)
	/mnt/disks/sdb/jenkins/go/src/k8s.io/minikube/test/integration/net_test.go:106 +0x2ec
testing.tRunner(0x400014ed00, 0x40004e1300)
	/usr/local/go/src/testing/testing.go:1690 +0xe4
created by testing.(*T).Run in goroutine 2059
	/usr/local/go/src/testing/testing.go:1743 +0x314

                                                
                                                
goroutine 4851 [select]:
k8s.io/client-go/util/workqueue.(*delayingType[...]).waitingLoop(0x31f31a0)
	/var/lib/jenkins/go/pkg/mod/k8s.io/client-go@v0.31.1/util/workqueue/delaying_queue.go:304 +0x258
created by k8s.io/client-go/util/workqueue.newDelayingQueue[...] in goroutine 4850
	/var/lib/jenkins/go/pkg/mod/k8s.io/client-go@v0.31.1/util/workqueue/delaying_queue.go:141 +0x200

                                                
                                                
goroutine 2778 [chan receive, 49 minutes]:
k8s.io/client-go/transport.(*dynamicClientCert).Run(0x400149b500, 0x4000102230)
	/var/lib/jenkins/go/pkg/mod/k8s.io/client-go@v0.31.1/transport/cert_rotation.go:150 +0x248
created by k8s.io/client-go/transport.(*tlsTransportCache).get in goroutine 2776
	/var/lib/jenkins/go/pkg/mod/k8s.io/client-go@v0.31.1/transport/cache.go:122 +0x48c

                                                
                                                
goroutine 1822 [chan receive, 69 minutes]:
testing.(*T).Run(0x400149c4e0, {0x2395ecb?, 0x40013a9f58?}, 0x2e80e50)
	/usr/local/go/src/testing/testing.go:1751 +0x328
k8s.io/minikube/test/integration.TestStartStop(0x400149c4e0)
	/mnt/disks/sdb/jenkins/go/src/k8s.io/minikube/test/integration/start_stop_delete_test.go:46 +0x3c
testing.tRunner(0x400149c4e0, 0x2e80c58)
	/usr/local/go/src/testing/testing.go:1690 +0xe4
created by testing.(*T).Run in goroutine 1
	/usr/local/go/src/testing/testing.go:1743 +0x314

                                                
                                                
goroutine 4871 [IO wait]:
internal/poll.runtime_pollWait(0xffff595da018, 0x72)
	/usr/local/go/src/runtime/netpoll.go:351 +0xa0
internal/poll.(*pollDesc).wait(0x4001514b80?, 0x400128b800?, 0x0)
	/usr/local/go/src/internal/poll/fd_poll_runtime.go:84 +0x28
internal/poll.(*pollDesc).waitRead(...)
	/usr/local/go/src/internal/poll/fd_poll_runtime.go:89
internal/poll.(*FD).Read(0x4001514b80, {0x400128b800, 0x800, 0x800})
	/usr/local/go/src/internal/poll/fd_unix.go:165 +0x1fc
net.(*netFD).Read(0x4001514b80, {0x400128b800?, 0x18?, 0x10?})
	/usr/local/go/src/net/fd_posix.go:55 +0x28
net.(*conn).Read(0x4001dec5e8, {0x400128b800?, 0x400131a968?, 0x30f588?})
	/usr/local/go/src/net/net.go:189 +0x34
crypto/tls.(*atLeastReader).Read(0x4001f42138, {0x400128b800?, 0x0?, 0x4001f42138?})
	/usr/local/go/src/crypto/tls/conn.go:809 +0x40
bytes.(*Buffer).ReadFrom(0x40013a50b8, {0x31d5f00, 0x4001f42138})
	/usr/local/go/src/bytes/buffer.go:211 +0x90
crypto/tls.(*Conn).readFromUntil(0x40013a4e08, {0xffff591b8bb8, 0x4001f43740}, 0x0?)
	/usr/local/go/src/crypto/tls/conn.go:831 +0xd0
crypto/tls.(*Conn).readRecordOrCCS(0x40013a4e08, 0x0)
	/usr/local/go/src/crypto/tls/conn.go:629 +0x35c
crypto/tls.(*Conn).readRecord(...)
	/usr/local/go/src/crypto/tls/conn.go:591
crypto/tls.(*Conn).Read(0x40013a4e08, {0x4001602000, 0x1000, 0x5a2ba8?})
	/usr/local/go/src/crypto/tls/conn.go:1385 +0x164
bufio.(*Reader).Read(0x40018ab7a0, {0x400072b540, 0x9, 0x478c540?})
	/usr/local/go/src/bufio/bufio.go:241 +0x1b4
io.ReadAtLeast({0x31d4200, 0x40018ab7a0}, {0x400072b540, 0x9, 0x9}, 0x9)
	/usr/local/go/src/io/io.go:335 +0xa0
io.ReadFull(...)
	/usr/local/go/src/io/io.go:354
golang.org/x/net/http2.readFrameHeader({0x400072b540, 0x9, 0x162e7f0?}, {0x31d4200?, 0x40018ab7a0?})
	/var/lib/jenkins/go/pkg/mod/golang.org/x/net@v0.29.0/http2/frame.go:237 +0x58
golang.org/x/net/http2.(*Framer).ReadFrame(0x400072b500)
	/var/lib/jenkins/go/pkg/mod/golang.org/x/net@v0.29.0/http2/frame.go:501 +0x78
golang.org/x/net/http2.(*clientConnReadLoop).run(0x400131af98)
	/var/lib/jenkins/go/pkg/mod/golang.org/x/net@v0.29.0/http2/transport.go:2354 +0xd0
golang.org/x/net/http2.(*ClientConn).readLoop(0x4000b61380)
	/var/lib/jenkins/go/pkg/mod/golang.org/x/net@v0.29.0/http2/transport.go:2250 +0x78
created by golang.org/x/net/http2.(*Transport).newClientConn in goroutine 4870
	/var/lib/jenkins/go/pkg/mod/golang.org/x/net@v0.29.0/http2/transport.go:865 +0xad0

                                                
                                                
goroutine 2144 [chan receive, 16 minutes]:
testing.(*T).Run(0x4000697520, {0x239ee85?, 0x31cbd58?}, 0x40014a26c0)
	/usr/local/go/src/testing/testing.go:1751 +0x328
k8s.io/minikube/test/integration.TestNetworkPlugins.func1.1(0x4000697520)
	/mnt/disks/sdb/jenkins/go/src/k8s.io/minikube/test/integration/net_test.go:148 +0x778
testing.tRunner(0x4000697520, 0x4001f5c580)
	/usr/local/go/src/testing/testing.go:1690 +0xe4
created by testing.(*T).Run in goroutine 2059
	/usr/local/go/src/testing/testing.go:1743 +0x314

                                                
                                                
goroutine 2630 [chan receive, 50 minutes]:
k8s.io/client-go/transport.(*dynamicClientCert).Run(0x40004e8f80, 0x4000102230)
	/var/lib/jenkins/go/pkg/mod/k8s.io/client-go@v0.31.1/transport/cert_rotation.go:150 +0x248
created by k8s.io/client-go/transport.(*tlsTransportCache).get in goroutine 2628
	/var/lib/jenkins/go/pkg/mod/k8s.io/client-go@v0.31.1/transport/cache.go:122 +0x48c

                                                
                                                
goroutine 2143 [chan receive, 16 minutes]:
testing.(*T).Run(0x4000696ea0, {0x239ee85?, 0x31cbd58?}, 0x4001ac9a10)
	/usr/local/go/src/testing/testing.go:1751 +0x328
k8s.io/minikube/test/integration.TestNetworkPlugins.func1.1(0x4000696ea0)
	/mnt/disks/sdb/jenkins/go/src/k8s.io/minikube/test/integration/net_test.go:148 +0x778
testing.tRunner(0x4000696ea0, 0x4001f5c500)
	/usr/local/go/src/testing/testing.go:1690 +0xe4
created by testing.(*T).Run in goroutine 2059
	/usr/local/go/src/testing/testing.go:1743 +0x314

                                                
                                                
goroutine 581 [select]:
k8s.io/apimachinery/pkg/util/wait.PollImmediateUntilWithContext.poller.func1.1()
	/var/lib/jenkins/go/pkg/mod/k8s.io/apimachinery@v0.31.1/pkg/util/wait/poll.go:297 +0x150
created by k8s.io/apimachinery/pkg/util/wait.PollImmediateUntilWithContext.poller.func1 in goroutine 580
	/var/lib/jenkins/go/pkg/mod/k8s.io/apimachinery@v0.31.1/pkg/util/wait/poll.go:280 +0xc0

                                                
                                                
goroutine 580 [select]:
k8s.io/apimachinery/pkg/util/wait.waitForWithContext({0x31fcba0, 0x4000102230}, 0x4001313f40, 0x4001321f88)
	/var/lib/jenkins/go/pkg/mod/k8s.io/apimachinery@v0.31.1/pkg/util/wait/wait.go:205 +0xb0
k8s.io/apimachinery/pkg/util/wait.poll({0x31fcba0, 0x4000102230}, 0xe8?, 0x4001313f40, 0x4001313f88)
	/var/lib/jenkins/go/pkg/mod/k8s.io/apimachinery@v0.31.1/pkg/util/wait/poll.go:260 +0x90
k8s.io/apimachinery/pkg/util/wait.PollImmediateUntilWithContext({0x31fcba0?, 0x4000102230?}, 0x0?, 0x0?)
	/var/lib/jenkins/go/pkg/mod/k8s.io/apimachinery@v0.31.1/pkg/util/wait/poll.go:200 +0x44
k8s.io/apimachinery/pkg/util/wait.PollImmediateUntil(0x0?, 0x8d7b4?, 0x4002379200?)
	/var/lib/jenkins/go/pkg/mod/k8s.io/apimachinery@v0.31.1/pkg/util/wait/poll.go:187 +0x40
created by k8s.io/client-go/transport.(*dynamicClientCert).Run in goroutine 600
	/var/lib/jenkins/go/pkg/mod/k8s.io/client-go@v0.31.1/transport/cert_rotation.go:145 +0x23c

                                                
                                                
goroutine 2397 [select]:
k8s.io/client-go/util/workqueue.(*delayingType[...]).waitingLoop(0x31f31a0)
	/var/lib/jenkins/go/pkg/mod/k8s.io/client-go@v0.31.1/util/workqueue/delaying_queue.go:304 +0x258
created by k8s.io/client-go/util/workqueue.newDelayingQueue[...] in goroutine 2396
	/var/lib/jenkins/go/pkg/mod/k8s.io/client-go@v0.31.1/util/workqueue/delaying_queue.go:141 +0x200

                                                
                                                
goroutine 579 [sync.Cond.Wait]:
sync.runtime_notifyListWait(0x400149b490, 0x2b)
	/usr/local/go/src/runtime/sema.go:587 +0x154
sync.(*Cond).Wait(0x400149b480)
	/usr/local/go/src/sync/cond.go:71 +0xcc
k8s.io/client-go/util/workqueue.(*Typed[...]).Get(0x3218260)
	/var/lib/jenkins/go/pkg/mod/k8s.io/client-go@v0.31.1/util/workqueue/queue.go:282 +0x8c
k8s.io/client-go/transport.(*dynamicClientCert).processNextWorkItem(0x400149b4c0)
	/var/lib/jenkins/go/pkg/mod/k8s.io/client-go@v0.31.1/transport/cert_rotation.go:159 +0x40
k8s.io/client-go/transport.(*dynamicClientCert).runWorker(...)
	/var/lib/jenkins/go/pkg/mod/k8s.io/client-go@v0.31.1/transport/cert_rotation.go:154
k8s.io/apimachinery/pkg/util/wait.BackoffUntil.func1(0x30?)
	/var/lib/jenkins/go/pkg/mod/k8s.io/apimachinery@v0.31.1/pkg/util/wait/backoff.go:226 +0x40
k8s.io/apimachinery/pkg/util/wait.BackoffUntil(0x400085bd10, {0x31d56c0, 0x4001f32c30}, 0x1, 0x4000102230)
	/var/lib/jenkins/go/pkg/mod/k8s.io/apimachinery@v0.31.1/pkg/util/wait/backoff.go:227 +0x90
k8s.io/apimachinery/pkg/util/wait.JitterUntil(0x400085bd10, 0x3b9aca00, 0x0, 0x1, 0x4000102230)
	/var/lib/jenkins/go/pkg/mod/k8s.io/apimachinery@v0.31.1/pkg/util/wait/backoff.go:204 +0x80
k8s.io/apimachinery/pkg/util/wait.Until(...)
	/var/lib/jenkins/go/pkg/mod/k8s.io/apimachinery@v0.31.1/pkg/util/wait/backoff.go:161
created by k8s.io/client-go/transport.(*dynamicClientCert).Run in goroutine 600
	/var/lib/jenkins/go/pkg/mod/k8s.io/client-go@v0.31.1/transport/cert_rotation.go:143 +0x198

                                                
                                                
goroutine 2112 [chan receive, 69 minutes]:
testing.(*testContext).waitParallel(0x40006e0690)
	/usr/local/go/src/testing/testing.go:1818 +0x158
testing.(*T).Parallel(0x400014e4e0)
	/usr/local/go/src/testing/testing.go:1485 +0x1b8
k8s.io/minikube/test/integration.MaybeParallel(0x400014e4e0)
	/mnt/disks/sdb/jenkins/go/src/k8s.io/minikube/test/integration/helpers_test.go:483 +0x40
k8s.io/minikube/test/integration.TestNetworkPlugins.func1.1(0x400014e4e0)
	/mnt/disks/sdb/jenkins/go/src/k8s.io/minikube/test/integration/net_test.go:106 +0x2ec
testing.tRunner(0x400014e4e0, 0x40004e1280)
	/usr/local/go/src/testing/testing.go:1690 +0xe4
created by testing.(*T).Run in goroutine 2059
	/usr/local/go/src/testing/testing.go:1743 +0x314

                                                
                                                
goroutine 2111 [chan receive, 69 minutes]:
testing.(*testContext).waitParallel(0x40006e0690)
	/usr/local/go/src/testing/testing.go:1818 +0x158
testing.(*T).Parallel(0x400014e340)
	/usr/local/go/src/testing/testing.go:1485 +0x1b8
k8s.io/minikube/test/integration.MaybeParallel(0x400014e340)
	/mnt/disks/sdb/jenkins/go/src/k8s.io/minikube/test/integration/helpers_test.go:483 +0x40
k8s.io/minikube/test/integration.TestNetworkPlugins.func1.1(0x400014e340)
	/mnt/disks/sdb/jenkins/go/src/k8s.io/minikube/test/integration/net_test.go:106 +0x2ec
testing.tRunner(0x400014e340, 0x40004e1180)
	/usr/local/go/src/testing/testing.go:1690 +0xe4
created by testing.(*T).Run in goroutine 2059
	/usr/local/go/src/testing/testing.go:1743 +0x314

                                                
                                                
goroutine 2767 [sync.Cond.Wait, 2 minutes]:
sync.runtime_notifyListWait(0x400149b410, 0x19)
	/usr/local/go/src/runtime/sema.go:587 +0x154
sync.(*Cond).Wait(0x400149b400)
	/usr/local/go/src/sync/cond.go:71 +0xcc
k8s.io/client-go/util/workqueue.(*Typed[...]).Get(0x3218260)
	/var/lib/jenkins/go/pkg/mod/k8s.io/client-go@v0.31.1/util/workqueue/queue.go:282 +0x8c
k8s.io/client-go/transport.(*dynamicClientCert).processNextWorkItem(0x400149b500)
	/var/lib/jenkins/go/pkg/mod/k8s.io/client-go@v0.31.1/transport/cert_rotation.go:159 +0x40
k8s.io/client-go/transport.(*dynamicClientCert).runWorker(...)
	/var/lib/jenkins/go/pkg/mod/k8s.io/client-go@v0.31.1/transport/cert_rotation.go:154
k8s.io/apimachinery/pkg/util/wait.BackoffUntil.func1(0x30?)
	/var/lib/jenkins/go/pkg/mod/k8s.io/apimachinery@v0.31.1/pkg/util/wait/backoff.go:226 +0x40
k8s.io/apimachinery/pkg/util/wait.BackoffUntil(0x4001acab40, {0x31d56c0, 0x40014a3770}, 0x1, 0x4000102230)
	/var/lib/jenkins/go/pkg/mod/k8s.io/apimachinery@v0.31.1/pkg/util/wait/backoff.go:227 +0x90
k8s.io/apimachinery/pkg/util/wait.JitterUntil(0x4001acab40, 0x3b9aca00, 0x0, 0x1, 0x4000102230)
	/var/lib/jenkins/go/pkg/mod/k8s.io/apimachinery@v0.31.1/pkg/util/wait/backoff.go:204 +0x80
k8s.io/apimachinery/pkg/util/wait.Until(...)
	/var/lib/jenkins/go/pkg/mod/k8s.io/apimachinery@v0.31.1/pkg/util/wait/backoff.go:161
created by k8s.io/client-go/transport.(*dynamicClientCert).Run in goroutine 2778
	/var/lib/jenkins/go/pkg/mod/k8s.io/client-go@v0.31.1/transport/cert_rotation.go:143 +0x198

                                                
                                                
goroutine 726 [chan send, 103 minutes]:
os/exec.(*Cmd).watchCtx(0x40001ff980, 0x40019485b0)
	/usr/local/go/src/os/exec/exec.go:798 +0x2c8
created by os/exec.(*Cmd).Start in goroutine 725
	/usr/local/go/src/os/exec/exec.go:759 +0x78c

                                                
                                                
goroutine 4856 [select, 2 minutes]:
k8s.io/apimachinery/pkg/util/wait.waitForWithContext({0x31fcba0, 0x4000102230}, 0x40013acf40, 0x40013acf88)
	/var/lib/jenkins/go/pkg/mod/k8s.io/apimachinery@v0.31.1/pkg/util/wait/wait.go:205 +0xb0
k8s.io/apimachinery/pkg/util/wait.poll({0x31fcba0, 0x4000102230}, 0x40?, 0x40013acf40, 0x40013acf88)
	/var/lib/jenkins/go/pkg/mod/k8s.io/apimachinery@v0.31.1/pkg/util/wait/poll.go:260 +0x90
k8s.io/apimachinery/pkg/util/wait.PollImmediateUntilWithContext({0x31fcba0?, 0x4000102230?}, 0x40014a9e00?, 0x40014e57c0?)
	/var/lib/jenkins/go/pkg/mod/k8s.io/apimachinery@v0.31.1/pkg/util/wait/poll.go:200 +0x44
k8s.io/apimachinery/pkg/util/wait.PollImmediateUntil(0x0?, 0x8d7b4?, 0x4001860180?)
	/var/lib/jenkins/go/pkg/mod/k8s.io/apimachinery@v0.31.1/pkg/util/wait/poll.go:187 +0x40
created by k8s.io/client-go/transport.(*dynamicClientCert).Run in goroutine 4852
	/var/lib/jenkins/go/pkg/mod/k8s.io/client-go@v0.31.1/transport/cert_rotation.go:145 +0x23c

                                                
                                                
goroutine 2804 [chan receive, 47 minutes]:
k8s.io/client-go/transport.(*dynamicClientCert).Run(0x40013da400, 0x4000102230)
	/var/lib/jenkins/go/pkg/mod/k8s.io/client-go@v0.31.1/transport/cert_rotation.go:150 +0x248
created by k8s.io/client-go/transport.(*tlsTransportCache).get in goroutine 2802
	/var/lib/jenkins/go/pkg/mod/k8s.io/client-go@v0.31.1/transport/cache.go:122 +0x48c

                                                
                                                
goroutine 4884 [select]:
k8s.io/apimachinery/pkg/util/wait.loopConditionUntilContext({0x31fc998, 0x40004ae700}, {0x31efda0, 0x4001499180}, 0x1, 0x0, 0x40012b5bd0)
	/var/lib/jenkins/go/pkg/mod/k8s.io/apimachinery@v0.31.1/pkg/util/wait/loop.go:66 +0x160
k8s.io/apimachinery/pkg/util/wait.PollUntilContextTimeout({0x31fc998?, 0x40004af490?}, 0x3b9aca00, 0x40012d7dc8?, 0x1, 0x40012d7bd0)
	/var/lib/jenkins/go/pkg/mod/k8s.io/apimachinery@v0.31.1/pkg/util/wait/poll.go:48 +0x8c
k8s.io/minikube/test/integration.PodWait({0x31fc998, 0x40004af490}, 0x4000697860, {0x4001f17980, 0x15}, {0x2399e9a, 0x7}, {0x23a10fa, 0xa}, 0xd18c2e2800)
	/mnt/disks/sdb/jenkins/go/src/k8s.io/minikube/test/integration/helpers_test.go:371 +0x24c
k8s.io/minikube/test/integration.TestNetworkPlugins.func1.1.4(0x4000697860)
	/mnt/disks/sdb/jenkins/go/src/k8s.io/minikube/test/integration/net_test.go:163 +0x2dc
testing.tRunner(0x4000697860, 0x4001ac9a10)
	/usr/local/go/src/testing/testing.go:1690 +0xe4
created by testing.(*T).Run in goroutine 2143
	/usr/local/go/src/testing/testing.go:1743 +0x314

                                                
                                                
goroutine 4842 [IO wait]:
internal/poll.runtime_pollWait(0xffff595daa68, 0x72)
	/usr/local/go/src/runtime/netpoll.go:351 +0xa0
internal/poll.(*pollDesc).wait(0x40013b6d80?, 0x40012c5500?, 0x0)
	/usr/local/go/src/internal/poll/fd_poll_runtime.go:84 +0x28
internal/poll.(*pollDesc).waitRead(...)
	/usr/local/go/src/internal/poll/fd_poll_runtime.go:89
internal/poll.(*FD).Read(0x40013b6d80, {0x40012c5500, 0x5500, 0x5500})
	/usr/local/go/src/internal/poll/fd_unix.go:165 +0x1fc
net.(*netFD).Read(0x40013b6d80, {0x40012c5500?, 0x18?, 0x10?})
	/usr/local/go/src/net/fd_posix.go:55 +0x28
net.(*conn).Read(0x4001dec210, {0x40012c5500?, 0x400130e968?, 0x30f588?})
	/usr/local/go/src/net/net.go:189 +0x34
crypto/tls.(*atLeastReader).Read(0x4001f420d8, {0x40012c5500?, 0x0?, 0x4001f420d8?})
	/usr/local/go/src/crypto/tls/conn.go:809 +0x40
bytes.(*Buffer).ReadFrom(0x40006c8638, {0x31d5f00, 0x4001f420d8})
	/usr/local/go/src/bytes/buffer.go:211 +0x90
crypto/tls.(*Conn).readFromUntil(0x40006c8388, {0xffff591b8bb8, 0x400186e0f0}, 0x0?)
	/usr/local/go/src/crypto/tls/conn.go:831 +0xd0
crypto/tls.(*Conn).readRecordOrCCS(0x40006c8388, 0x0)
	/usr/local/go/src/crypto/tls/conn.go:629 +0x35c
crypto/tls.(*Conn).readRecord(...)
	/usr/local/go/src/crypto/tls/conn.go:591
crypto/tls.(*Conn).Read(0x40006c8388, {0x400235a000, 0x1000, 0x5a2ba8?})
	/usr/local/go/src/crypto/tls/conn.go:1385 +0x164
bufio.(*Reader).Read(0x400165f680, {0x40015d82e0, 0x9, 0x478c540?})
	/usr/local/go/src/bufio/bufio.go:241 +0x1b4
io.ReadAtLeast({0x31d4200, 0x400165f680}, {0x40015d82e0, 0x9, 0x9}, 0x9)
	/usr/local/go/src/io/io.go:335 +0xa0
io.ReadFull(...)
	/usr/local/go/src/io/io.go:354
golang.org/x/net/http2.readFrameHeader({0x40015d82e0, 0x9, 0x1852610?}, {0x31d4200?, 0x400165f680?})
	/var/lib/jenkins/go/pkg/mod/golang.org/x/net@v0.29.0/http2/frame.go:237 +0x58
golang.org/x/net/http2.(*Framer).ReadFrame(0x40015d82a0)
	/var/lib/jenkins/go/pkg/mod/golang.org/x/net@v0.29.0/http2/frame.go:501 +0x78
golang.org/x/net/http2.(*clientConnReadLoop).run(0x400130ef98)
	/var/lib/jenkins/go/pkg/mod/golang.org/x/net@v0.29.0/http2/transport.go:2354 +0xd0
golang.org/x/net/http2.(*ClientConn).readLoop(0x40001fa000)
	/var/lib/jenkins/go/pkg/mod/golang.org/x/net@v0.29.0/http2/transport.go:2250 +0x78
created by golang.org/x/net/http2.(*Transport).newClientConn in goroutine 4841
	/var/lib/jenkins/go/pkg/mod/golang.org/x/net@v0.29.0/http2/transport.go:865 +0xad0

                                                
                                                
goroutine 4845 [select]:
k8s.io/apimachinery/pkg/util/wait.loopConditionUntilContext({0x31fc998, 0x4000558d90}, {0x31efda0, 0x400014a300}, 0x1, 0x0, 0x40012bfbd0)
	/var/lib/jenkins/go/pkg/mod/k8s.io/apimachinery@v0.31.1/pkg/util/wait/loop.go:66 +0x160
k8s.io/apimachinery/pkg/util/wait.PollUntilContextTimeout({0x31fc998?, 0x40004af500?}, 0x3b9aca00, 0x40012bfdc8?, 0x1, 0x40012bfbd0)
	/var/lib/jenkins/go/pkg/mod/k8s.io/apimachinery@v0.31.1/pkg/util/wait/poll.go:48 +0x8c
k8s.io/minikube/test/integration.PodWait({0x31fc998, 0x40004af500}, 0x40006969c0, {0x40016b2c50, 0xd}, {0x2399e9a, 0x7}, {0x23a10fa, 0xa}, 0xd18c2e2800)
	/mnt/disks/sdb/jenkins/go/src/k8s.io/minikube/test/integration/helpers_test.go:371 +0x24c
k8s.io/minikube/test/integration.TestNetworkPlugins.func1.1.4(0x40006969c0)
	/mnt/disks/sdb/jenkins/go/src/k8s.io/minikube/test/integration/net_test.go:163 +0x2dc
testing.tRunner(0x40006969c0, 0x40014a26c0)
	/usr/local/go/src/testing/testing.go:1690 +0xe4
created by testing.(*T).Run in goroutine 2144
	/usr/local/go/src/testing/testing.go:1743 +0x314

                                                
                                                
goroutine 4868 [select, 2 minutes]:
k8s.io/apimachinery/pkg/util/wait.waitForWithContext({0x31fcba0, 0x4000102230}, 0x4000081740, 0x40013abf88)
	/var/lib/jenkins/go/pkg/mod/k8s.io/apimachinery@v0.31.1/pkg/util/wait/wait.go:205 +0xb0
k8s.io/apimachinery/pkg/util/wait.poll({0x31fcba0, 0x4000102230}, 0x88?, 0x4000081740, 0x4000081788)
	/var/lib/jenkins/go/pkg/mod/k8s.io/apimachinery@v0.31.1/pkg/util/wait/poll.go:260 +0x90
k8s.io/apimachinery/pkg/util/wait.PollImmediateUntilWithContext({0x31fcba0?, 0x4000102230?}, 0x4000b61080?, 0x4000112640?)
	/var/lib/jenkins/go/pkg/mod/k8s.io/apimachinery@v0.31.1/pkg/util/wait/poll.go:200 +0x44
k8s.io/apimachinery/pkg/util/wait.PollImmediateUntil(0x0?, 0x8d7b4?, 0x4002378780?)
	/var/lib/jenkins/go/pkg/mod/k8s.io/apimachinery@v0.31.1/pkg/util/wait/poll.go:187 +0x40
created by k8s.io/client-go/transport.(*dynamicClientCert).Run in goroutine 4886
	/var/lib/jenkins/go/pkg/mod/k8s.io/client-go@v0.31.1/transport/cert_rotation.go:145 +0x23c

                                                
                                                
goroutine 4886 [chan receive, 16 minutes]:
k8s.io/client-go/transport.(*dynamicClientCert).Run(0x400160d180, 0x4000102230)
	/var/lib/jenkins/go/pkg/mod/k8s.io/client-go@v0.31.1/transport/cert_rotation.go:150 +0x248
created by k8s.io/client-go/transport.(*tlsTransportCache).get in goroutine 4884
	/var/lib/jenkins/go/pkg/mod/k8s.io/client-go@v0.31.1/transport/cache.go:122 +0x48c

                                                
                                    

Test pass (167/229)

Order passed test Duration
3 TestDownloadOnly/v1.20.0/json-events 6.74
4 TestDownloadOnly/v1.20.0/preload-exists 0
8 TestDownloadOnly/v1.20.0/LogsDuration 0.07
9 TestDownloadOnly/v1.20.0/DeleteAll 0.19
10 TestDownloadOnly/v1.20.0/DeleteAlwaysSucceeds 0.13
12 TestDownloadOnly/v1.31.1/json-events 5.11
13 TestDownloadOnly/v1.31.1/preload-exists 0
17 TestDownloadOnly/v1.31.1/LogsDuration 0.07
18 TestDownloadOnly/v1.31.1/DeleteAll 0.21
19 TestDownloadOnly/v1.31.1/DeleteAlwaysSucceeds 0.14
21 TestBinaryMirror 0.61
25 TestAddons/PreSetup/EnablingAddonOnNonExistingCluster 0.06
26 TestAddons/PreSetup/DisablingAddonOnNonExistingCluster 0.07
27 TestAddons/Setup 228.85
35 TestAddons/parallel/InspektorGadget 10.99
40 TestAddons/parallel/Headlamp 16.75
41 TestAddons/parallel/CloudSpanner 6.75
43 TestAddons/parallel/NvidiaDevicePlugin 6.53
44 TestAddons/parallel/Yakd 10.77
45 TestAddons/StoppedEnableDisable 12.25
47 TestCertExpiration 252.8
49 TestForceSystemdFlag 36.73
50 TestForceSystemdEnv 43.39
56 TestErrorSpam/setup 32.6
57 TestErrorSpam/start 0.74
58 TestErrorSpam/status 1.02
59 TestErrorSpam/pause 1.81
60 TestErrorSpam/unpause 1.85
61 TestErrorSpam/stop 1.46
64 TestFunctional/serial/CopySyncFile 0
65 TestFunctional/serial/StartWithProxy 76.44
66 TestFunctional/serial/AuditLog 0
67 TestFunctional/serial/SoftStart 29.09
72 TestFunctional/serial/CacheCmd/cache/add_remote 4.33
73 TestFunctional/serial/CacheCmd/cache/add_local 1.52
74 TestFunctional/serial/CacheCmd/cache/CacheDelete 0.05
75 TestFunctional/serial/CacheCmd/cache/list 0.05
76 TestFunctional/serial/CacheCmd/cache/verify_cache_inside_node 0.32
77 TestFunctional/serial/CacheCmd/cache/cache_reload 2.17
78 TestFunctional/serial/CacheCmd/cache/delete 0.13
79 TestFunctional/serial/MinikubeKubectlCmd 0.15
80 TestFunctional/serial/MinikubeKubectlCmdDirectly 0.14
81 TestFunctional/serial/ExtraConfig 35.7
83 TestFunctional/serial/LogsCmd 1.74
84 TestFunctional/serial/LogsFileCmd 1.83
87 TestFunctional/parallel/ConfigCmd 0.46
89 TestFunctional/parallel/DryRun 0.66
90 TestFunctional/parallel/InternationalLanguage 0.24
91 TestFunctional/parallel/StatusCmd 1.63
96 TestFunctional/parallel/AddonsCmd 0.14
99 TestFunctional/parallel/SSHCmd 0.73
100 TestFunctional/parallel/CpCmd 2.24
102 TestFunctional/parallel/FileSync 0.36
103 TestFunctional/parallel/CertSync 2.4
109 TestFunctional/parallel/NonActiveRuntimeDisabled 0.79
111 TestFunctional/parallel/License 0.25
113 TestFunctional/parallel/TunnelCmd/serial/RunSecondTunnel 0.68
114 TestFunctional/parallel/TunnelCmd/serial/StartTunnel 0
121 TestFunctional/parallel/TunnelCmd/serial/DeleteTunnel 0.11
128 TestFunctional/parallel/ProfileCmd/profile_not_create 0.55
129 TestFunctional/parallel/ProfileCmd/profile_list 0.48
130 TestFunctional/parallel/ProfileCmd/profile_json_output 0.48
132 TestFunctional/parallel/MountCmd/specific-port 2.02
133 TestFunctional/parallel/MountCmd/VerifyCleanup 3.09
134 TestFunctional/parallel/Version/short 0.08
135 TestFunctional/parallel/Version/components 1.01
136 TestFunctional/parallel/ImageCommands/ImageListShort 0.28
137 TestFunctional/parallel/ImageCommands/ImageListTable 0.26
138 TestFunctional/parallel/ImageCommands/ImageListJson 0.27
139 TestFunctional/parallel/ImageCommands/ImageListYaml 0.3
140 TestFunctional/parallel/ImageCommands/ImageBuild 3.74
141 TestFunctional/parallel/ImageCommands/Setup 0.69
142 TestFunctional/parallel/ImageCommands/ImageLoadDaemon 4.7
143 TestFunctional/parallel/ImageCommands/ImageReloadDaemon 1.48
144 TestFunctional/parallel/ImageCommands/ImageTagAndLoadDaemon 1.43
145 TestFunctional/parallel/UpdateContextCmd/no_changes 0.21
146 TestFunctional/parallel/UpdateContextCmd/no_minikube_cluster 0.18
147 TestFunctional/parallel/UpdateContextCmd/no_clusters 0.23
148 TestFunctional/parallel/ImageCommands/ImageSaveToFile 0.58
149 TestFunctional/parallel/ImageCommands/ImageRemove 0.67
150 TestFunctional/parallel/ImageCommands/ImageLoadFromFile 0.99
151 TestFunctional/parallel/ImageCommands/ImageSaveDaemon 0.58
152 TestFunctional/delete_echo-server_images 0.04
153 TestFunctional/delete_my-image_image 0.02
154 TestFunctional/delete_minikube_cached_images 0.02
158 TestMultiControlPlane/serial/StartCluster 171.3
159 TestMultiControlPlane/serial/DeployApp 10.85
160 TestMultiControlPlane/serial/PingHostFromPods 1.6
161 TestMultiControlPlane/serial/AddWorkerNode 63.12
163 TestMultiControlPlane/serial/HAppyAfterClusterStart 0.78
164 TestMultiControlPlane/serial/CopyFile 19.14
165 TestMultiControlPlane/serial/StopSecondaryNode 12.76
166 TestMultiControlPlane/serial/DegradedAfterControlPlaneNodeStop 0.57
168 TestMultiControlPlane/serial/HAppyAfterSecondaryNodeRestart 3.31
169 TestMultiControlPlane/serial/RestartClusterKeepsNodes 188.27
171 TestMultiControlPlane/serial/DegradedAfterSecondaryNodeDelete 0.56
172 TestMultiControlPlane/serial/StopCluster 35.89
174 TestMultiControlPlane/serial/DegradedAfterClusterRestart 0.54
175 TestMultiControlPlane/serial/AddSecondaryNode 71.83
176 TestMultiControlPlane/serial/HAppyAfterSecondaryNodeAdd 0.75
180 TestJSONOutput/start/Command 75.59
181 TestJSONOutput/start/Audit 0
183 TestJSONOutput/start/parallel/DistinctCurrentSteps 0
184 TestJSONOutput/start/parallel/IncreasingCurrentSteps 0
186 TestJSONOutput/pause/Command 0.75
187 TestJSONOutput/pause/Audit 0
189 TestJSONOutput/pause/parallel/DistinctCurrentSteps 0
190 TestJSONOutput/pause/parallel/IncreasingCurrentSteps 0
192 TestJSONOutput/unpause/Command 0.66
193 TestJSONOutput/unpause/Audit 0
195 TestJSONOutput/unpause/parallel/DistinctCurrentSteps 0
196 TestJSONOutput/unpause/parallel/IncreasingCurrentSteps 0
198 TestJSONOutput/stop/Command 6.12
199 TestJSONOutput/stop/Audit 0
201 TestJSONOutput/stop/parallel/DistinctCurrentSteps 0
202 TestJSONOutput/stop/parallel/IncreasingCurrentSteps 0
203 TestErrorJSONOutput 0.23
205 TestKicCustomNetwork/create_custom_network 38.37
206 TestKicCustomNetwork/use_default_bridge_network 32.52
207 TestKicExistingNetwork 31.14
208 TestKicCustomSubnet 38.29
209 TestKicStaticIP 36.59
210 TestMainNoArgs 0.05
211 TestMinikubeProfile 68.89
214 TestMountStart/serial/StartWithMountFirst 7.29
215 TestMountStart/serial/VerifyMountFirst 0.27
216 TestMountStart/serial/StartWithMountSecond 6.64
217 TestMountStart/serial/VerifyMountSecond 0.26
218 TestMountStart/serial/DeleteFirst 1.61
219 TestMountStart/serial/VerifyMountPostDelete 0.26
220 TestMountStart/serial/Stop 1.21
221 TestMountStart/serial/RestartStopped 8.55
222 TestMountStart/serial/VerifyMountPostStop 0.26
225 TestMultiNode/serial/FreshStart2Nodes 108.01
226 TestMultiNode/serial/DeployApp2Nodes 6.39
227 TestMultiNode/serial/PingHostFrom2Pods 1
228 TestMultiNode/serial/AddNode 29.2
230 TestMultiNode/serial/ProfileList 0.34
231 TestMultiNode/serial/CopyFile 10.2
232 TestMultiNode/serial/StopNode 2.34
234 TestMultiNode/serial/RestartKeepsNodes 137.88
236 TestMultiNode/serial/StopMultiNode 23.97
238 TestMultiNode/serial/ValidateNameConflict 38.83
245 TestScheduledStopUnix 107.66
248 TestInsufficientStorage 10.75
249 TestRunningBinaryUpgrade 66.46
252 TestMissingContainerUpgrade 195.04
254 TestPause/serial/Start 88.37
256 TestNoKubernetes/serial/StartNoK8sWithVersion 0.12
257 TestNoKubernetes/serial/StartWithK8s 42
258 TestNoKubernetes/serial/StartWithStopK8s 7.8
259 TestNoKubernetes/serial/Start 6.36
260 TestNoKubernetes/serial/VerifyK8sNotRunning 0.28
261 TestNoKubernetes/serial/ProfileList 0.96
262 TestNoKubernetes/serial/Stop 1.21
263 TestNoKubernetes/serial/StartNoArgs 7.66
264 TestNoKubernetes/serial/VerifyK8sNotRunningSecond 0.29
265 TestPause/serial/SecondStartNoReconfiguration 19.61
266 TestPause/serial/Pause 1.11
267 TestPause/serial/VerifyStatus 0.35
268 TestPause/serial/Unpause 0.82
269 TestPause/serial/PauseAgain 0.95
270 TestPause/serial/DeletePaused 2.74
271 TestPause/serial/VerifyDeletedResources 0.14
272 TestStoppedBinaryUpgrade/Setup 0.93
273 TestStoppedBinaryUpgrade/Upgrade 70.27
274 TestStoppedBinaryUpgrade/MinikubeLogs 1.31
x
+
TestDownloadOnly/v1.20.0/json-events (6.74s)

                                                
                                                
=== RUN   TestDownloadOnly/v1.20.0/json-events
aaa_download_only_test.go:81: (dbg) Run:  out/minikube-linux-arm64 start -o=json --download-only -p download-only-084128 --force --alsologtostderr --kubernetes-version=v1.20.0 --container-runtime=crio --driver=docker  --container-runtime=crio
aaa_download_only_test.go:81: (dbg) Done: out/minikube-linux-arm64 start -o=json --download-only -p download-only-084128 --force --alsologtostderr --kubernetes-version=v1.20.0 --container-runtime=crio --driver=docker  --container-runtime=crio: (6.743929596s)
--- PASS: TestDownloadOnly/v1.20.0/json-events (6.74s)

                                                
                                    
x
+
TestDownloadOnly/v1.20.0/preload-exists (0s)

                                                
                                                
=== RUN   TestDownloadOnly/v1.20.0/preload-exists
--- PASS: TestDownloadOnly/v1.20.0/preload-exists (0.00s)

                                                
                                    
x
+
TestDownloadOnly/v1.20.0/LogsDuration (0.07s)

                                                
                                                
=== RUN   TestDownloadOnly/v1.20.0/LogsDuration
aaa_download_only_test.go:184: (dbg) Run:  out/minikube-linux-arm64 logs -p download-only-084128
aaa_download_only_test.go:184: (dbg) Non-zero exit: out/minikube-linux-arm64 logs -p download-only-084128: exit status 85 (70.055711ms)

                                                
                                                
-- stdout --
	
	==> Audit <==
	|---------|--------------------------------|----------------------|---------|---------|---------------------|----------|
	| Command |              Args              |       Profile        |  User   | Version |     Start Time      | End Time |
	|---------|--------------------------------|----------------------|---------|---------|---------------------|----------|
	| start   | -o=json --download-only        | download-only-084128 | jenkins | v1.34.0 | 16 Sep 24 10:34 UTC |          |
	|         | -p download-only-084128        |                      |         |         |                     |          |
	|         | --force --alsologtostderr      |                      |         |         |                     |          |
	|         | --kubernetes-version=v1.20.0   |                      |         |         |                     |          |
	|         | --container-runtime=crio       |                      |         |         |                     |          |
	|         | --driver=docker                |                      |         |         |                     |          |
	|         | --container-runtime=crio       |                      |         |         |                     |          |
	|---------|--------------------------------|----------------------|---------|---------|---------------------|----------|
	
	
	==> Last Start <==
	Log file created at: 2024/09/16 10:34:47
	Running on machine: ip-172-31-21-244
	Binary: Built with gc go1.23.0 for linux/arm64
	Log line format: [IWEF]mmdd hh:mm:ss.uuuuuu threadid file:line] msg
	I0916 10:34:47.349900 1383838 out.go:345] Setting OutFile to fd 1 ...
	I0916 10:34:47.350133 1383838 out.go:392] TERM=,COLORTERM=, which probably does not support color
	I0916 10:34:47.350160 1383838 out.go:358] Setting ErrFile to fd 2...
	I0916 10:34:47.350183 1383838 out.go:392] TERM=,COLORTERM=, which probably does not support color
	I0916 10:34:47.350466 1383838 root.go:338] Updating PATH: /home/jenkins/minikube-integration/19651-1378450/.minikube/bin
	W0916 10:34:47.350648 1383838 root.go:314] Error reading config file at /home/jenkins/minikube-integration/19651-1378450/.minikube/config/config.json: open /home/jenkins/minikube-integration/19651-1378450/.minikube/config/config.json: no such file or directory
	I0916 10:34:47.351122 1383838 out.go:352] Setting JSON to true
	I0916 10:34:47.352052 1383838 start.go:129] hostinfo: {"hostname":"ip-172-31-21-244","uptime":37033,"bootTime":1726445855,"procs":159,"os":"linux","platform":"ubuntu","platformFamily":"debian","platformVersion":"20.04","kernelVersion":"5.15.0-1069-aws","kernelArch":"aarch64","virtualizationSystem":"","virtualizationRole":"","hostId":"da8ac1fd-6236-412a-a346-95873c98230d"}
	I0916 10:34:47.352186 1383838 start.go:139] virtualization:  
	I0916 10:34:47.354552 1383838 out.go:97] [download-only-084128] minikube v1.34.0 on Ubuntu 20.04 (arm64)
	W0916 10:34:47.354730 1383838 preload.go:293] Failed to list preload files: open /home/jenkins/minikube-integration/19651-1378450/.minikube/cache/preloaded-tarball: no such file or directory
	I0916 10:34:47.354792 1383838 notify.go:220] Checking for updates...
	I0916 10:34:47.356129 1383838 out.go:169] MINIKUBE_LOCATION=19651
	I0916 10:34:47.357995 1383838 out.go:169] MINIKUBE_SUPPRESS_DOCKER_PERFORMANCE=true
	I0916 10:34:47.359644 1383838 out.go:169] KUBECONFIG=/home/jenkins/minikube-integration/19651-1378450/kubeconfig
	I0916 10:34:47.361002 1383838 out.go:169] MINIKUBE_HOME=/home/jenkins/minikube-integration/19651-1378450/.minikube
	I0916 10:34:47.362326 1383838 out.go:169] MINIKUBE_BIN=out/minikube-linux-arm64
	W0916 10:34:47.365110 1383838 out.go:321] minikube skips various validations when --force is supplied; this may lead to unexpected behavior
	I0916 10:34:47.365464 1383838 driver.go:394] Setting default libvirt URI to qemu:///system
	I0916 10:34:47.395678 1383838 docker.go:123] docker version: linux-27.2.1:Docker Engine - Community
	I0916 10:34:47.395793 1383838 cli_runner.go:164] Run: docker system info --format "{{json .}}"
	I0916 10:34:47.453222 1383838 info.go:266] docker info: {ID:5FDH:SA5P:5GCT:NLAS:B73P:SGDQ:PBG5:UBVH:UZY3:RXGO:CI7S:WAIH Containers:0 ContainersRunning:0 ContainersPaused:0 ContainersStopped:0 Images:1 Driver:overlay2 DriverStatus:[[Backing Filesystem extfs] [Supports d_type true] [Using metacopy false] [Native Overlay Diff true] [userxattr false]] SystemStatus:<nil> Plugins:{Volume:[local] Network:[bridge host ipvlan macvlan null overlay] Authorization:<nil> Log:[awslogs fluentd gcplogs gelf journald json-file local splunk syslog]} MemoryLimit:true SwapLimit:true KernelMemory:false KernelMemoryTCP:true CPUCfsPeriod:true CPUCfsQuota:true CPUShares:true CPUSet:true PidsLimit:true IPv4Forwarding:true BridgeNfIptables:true BridgeNfIP6Tables:true Debug:false NFd:29 OomKillDisable:true NGoroutines:52 SystemTime:2024-09-16 10:34:47.443376808 +0000 UTC LoggingDriver:json-file CgroupDriver:cgroupfs NEventsListener:0 KernelVersion:5.15.0-1069-aws OperatingSystem:Ubuntu 20.04.6 LTS OSType:linux Architecture:aar
ch64 IndexServerAddress:https://index.docker.io/v1/ RegistryConfig:{AllowNondistributableArtifactsCIDRs:[] AllowNondistributableArtifactsHostnames:[] InsecureRegistryCIDRs:[127.0.0.0/8] IndexConfigs:{DockerIo:{Name:docker.io Mirrors:[] Secure:true Official:true}} Mirrors:[]} NCPU:2 MemTotal:8214839296 GenericResources:<nil> DockerRootDir:/var/lib/docker HTTPProxy: HTTPSProxy: NoProxy: Name:ip-172-31-21-244 Labels:[] ExperimentalBuild:false ServerVersion:27.2.1 ClusterStore: ClusterAdvertise: Runtimes:{Runc:{Path:runc}} DefaultRuntime:runc Swarm:{NodeID: NodeAddr: LocalNodeState:inactive ControlAvailable:false Error: RemoteManagers:<nil>} LiveRestoreEnabled:false Isolation: InitBinary:docker-init ContainerdCommit:{ID:7f7fdf5fed64eb6a7caf99b3e12efcf9d60e311c Expected:7f7fdf5fed64eb6a7caf99b3e12efcf9d60e311c} RuncCommit:{ID:v1.1.14-0-g2c9f560 Expected:v1.1.14-0-g2c9f560} InitCommit:{ID:de40ad0 Expected:de40ad0} SecurityOptions:[name=apparmor name=seccomp,profile=builtin] ProductLicense: Warnings:<nil> ServerErro
rs:[] ClientInfo:{Debug:false Plugins:[map[Name:buildx Path:/usr/libexec/docker/cli-plugins/docker-buildx SchemaVersion:0.1.0 ShortDescription:Docker Buildx Vendor:Docker Inc. Version:v0.16.2] map[Name:compose Path:/usr/libexec/docker/cli-plugins/docker-compose SchemaVersion:0.1.0 ShortDescription:Docker Compose Vendor:Docker Inc. Version:v2.29.2]] Warnings:<nil>}}
	I0916 10:34:47.453342 1383838 docker.go:318] overlay module found
	I0916 10:34:47.454630 1383838 out.go:97] Using the docker driver based on user configuration
	I0916 10:34:47.454653 1383838 start.go:297] selected driver: docker
	I0916 10:34:47.454659 1383838 start.go:901] validating driver "docker" against <nil>
	I0916 10:34:47.454772 1383838 cli_runner.go:164] Run: docker system info --format "{{json .}}"
	I0916 10:34:47.517759 1383838 info.go:266] docker info: {ID:5FDH:SA5P:5GCT:NLAS:B73P:SGDQ:PBG5:UBVH:UZY3:RXGO:CI7S:WAIH Containers:0 ContainersRunning:0 ContainersPaused:0 ContainersStopped:0 Images:1 Driver:overlay2 DriverStatus:[[Backing Filesystem extfs] [Supports d_type true] [Using metacopy false] [Native Overlay Diff true] [userxattr false]] SystemStatus:<nil> Plugins:{Volume:[local] Network:[bridge host ipvlan macvlan null overlay] Authorization:<nil> Log:[awslogs fluentd gcplogs gelf journald json-file local splunk syslog]} MemoryLimit:true SwapLimit:true KernelMemory:false KernelMemoryTCP:true CPUCfsPeriod:true CPUCfsQuota:true CPUShares:true CPUSet:true PidsLimit:true IPv4Forwarding:true BridgeNfIptables:true BridgeNfIP6Tables:true Debug:false NFd:29 OomKillDisable:true NGoroutines:52 SystemTime:2024-09-16 10:34:47.507613231 +0000 UTC LoggingDriver:json-file CgroupDriver:cgroupfs NEventsListener:0 KernelVersion:5.15.0-1069-aws OperatingSystem:Ubuntu 20.04.6 LTS OSType:linux Architecture:aar
ch64 IndexServerAddress:https://index.docker.io/v1/ RegistryConfig:{AllowNondistributableArtifactsCIDRs:[] AllowNondistributableArtifactsHostnames:[] InsecureRegistryCIDRs:[127.0.0.0/8] IndexConfigs:{DockerIo:{Name:docker.io Mirrors:[] Secure:true Official:true}} Mirrors:[]} NCPU:2 MemTotal:8214839296 GenericResources:<nil> DockerRootDir:/var/lib/docker HTTPProxy: HTTPSProxy: NoProxy: Name:ip-172-31-21-244 Labels:[] ExperimentalBuild:false ServerVersion:27.2.1 ClusterStore: ClusterAdvertise: Runtimes:{Runc:{Path:runc}} DefaultRuntime:runc Swarm:{NodeID: NodeAddr: LocalNodeState:inactive ControlAvailable:false Error: RemoteManagers:<nil>} LiveRestoreEnabled:false Isolation: InitBinary:docker-init ContainerdCommit:{ID:7f7fdf5fed64eb6a7caf99b3e12efcf9d60e311c Expected:7f7fdf5fed64eb6a7caf99b3e12efcf9d60e311c} RuncCommit:{ID:v1.1.14-0-g2c9f560 Expected:v1.1.14-0-g2c9f560} InitCommit:{ID:de40ad0 Expected:de40ad0} SecurityOptions:[name=apparmor name=seccomp,profile=builtin] ProductLicense: Warnings:<nil> ServerErro
rs:[] ClientInfo:{Debug:false Plugins:[map[Name:buildx Path:/usr/libexec/docker/cli-plugins/docker-buildx SchemaVersion:0.1.0 ShortDescription:Docker Buildx Vendor:Docker Inc. Version:v0.16.2] map[Name:compose Path:/usr/libexec/docker/cli-plugins/docker-compose SchemaVersion:0.1.0 ShortDescription:Docker Compose Vendor:Docker Inc. Version:v2.29.2]] Warnings:<nil>}}
	I0916 10:34:47.517979 1383838 start_flags.go:310] no existing cluster config was found, will generate one from the flags 
	I0916 10:34:47.518280 1383838 start_flags.go:393] Using suggested 2200MB memory alloc based on sys=7834MB, container=7834MB
	I0916 10:34:47.518441 1383838 start_flags.go:929] Wait components to verify : map[apiserver:true system_pods:true]
	I0916 10:34:47.519921 1383838 out.go:169] Using Docker driver with root privileges
	I0916 10:34:47.521028 1383838 cni.go:84] Creating CNI manager for ""
	I0916 10:34:47.521108 1383838 cni.go:143] "docker" driver + "crio" runtime found, recommending kindnet
	I0916 10:34:47.521124 1383838 start_flags.go:319] Found "CNI" CNI - setting NetworkPlugin=cni
	I0916 10:34:47.521200 1383838 start.go:340] cluster config:
	{Name:download-only-084128 KeepContext:false EmbedCerts:false MinikubeISO: KicBaseImage:gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 Memory:2200 CPUs:2 DiskSize:20000 Driver:docker HyperkitVpnKitSock: HyperkitVSockPorts:[] DockerEnv:[] ContainerVolumeMounts:[] InsecureRegistry:[] RegistryMirror:[] HostOnlyCIDR:192.168.59.1/24 HypervVirtualSwitch: HypervUseExternalSwitch:false HypervExternalAdapter: KVMNetwork:default KVMQemuURI:qemu:///system KVMGPU:false KVMHidden:false KVMNUMACount:1 APIServerPort:8443 DockerOpt:[] DisableDriverMounts:false NFSShare:[] NFSSharesRoot:/nfsshares UUID: NoVTXCheck:false DNSProxy:false HostDNSResolver:true HostOnlyNicType:virtio NatNicType:virtio SSHIPAddress: SSHUser:root SSHKey: SSHPort:22 KubernetesConfig:{KubernetesVersion:v1.20.0 ClusterName:download-only-084128 Namespace:default APIServerHAVIP: APIServerName:minikubeCA APIServerNames:[] APIServerIPs:[] DNSDomain:cluster.local Co
ntainerRuntime:crio CRISocket: NetworkPlugin:cni FeatureGates: ServiceCIDR:10.96.0.0/12 ImageRepository: LoadBalancerStartIP: LoadBalancerEndIP: CustomIngressCert: RegistryAliases: ExtraOptions:[] ShouldLoadCachedImages:true EnableDefaultCNI:false CNI:} Nodes:[{Name: IP: Port:8443 KubernetesVersion:v1.20.0 ContainerRuntime:crio ControlPlane:true Worker:true}] Addons:map[] CustomAddonImages:map[] CustomAddonRegistries:map[] VerifyComponents:map[apiserver:true system_pods:true] StartHostTimeout:6m0s ScheduledStop:<nil> ExposedPorts:[] ListenAddress: Network: Subnet: MultiNodeRequested:false ExtraDisks:0 CertExpiration:26280h0m0s Mount:false MountString:/home/jenkins:/minikube-host Mount9PVersion:9p2000.L MountGID:docker MountIP: MountMSize:262144 MountOptions:[] MountPort:0 MountType:9p MountUID:docker BinaryMirror: DisableOptimizations:false DisableMetrics:false CustomQemuFirmwarePath: SocketVMnetClientPath: SocketVMnetPath: StaticIP: SSHAuthSock: SSHAgentPID:0 GPUs: AutoPauseInterval:1m0s}
	I0916 10:34:47.522407 1383838 out.go:97] Starting "download-only-084128" primary control-plane node in "download-only-084128" cluster
	I0916 10:34:47.522455 1383838 cache.go:121] Beginning downloading kic base image for docker with crio
	I0916 10:34:47.523686 1383838 out.go:97] Pulling base image v0.0.45-1726358845-19644 ...
	I0916 10:34:47.523721 1383838 preload.go:131] Checking if preload exists for k8s version v1.20.0 and runtime crio
	I0916 10:34:47.523771 1383838 image.go:79] Checking for gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 in local docker daemon
	I0916 10:34:47.539903 1383838 cache.go:149] Downloading gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 to local cache
	I0916 10:34:47.540123 1383838 image.go:63] Checking for gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 in local cache directory
	I0916 10:34:47.540239 1383838 image.go:148] Writing gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 to local cache
	I0916 10:34:47.607587 1383838 preload.go:118] Found remote preload: https://storage.googleapis.com/minikube-preloaded-volume-tarballs/v18/v1.20.0/preloaded-images-k8s-v18-v1.20.0-cri-o-overlay-arm64.tar.lz4
	I0916 10:34:47.607626 1383838 cache.go:56] Caching tarball of preloaded images
	I0916 10:34:47.608193 1383838 preload.go:131] Checking if preload exists for k8s version v1.20.0 and runtime crio
	I0916 10:34:47.610397 1383838 out.go:97] Downloading Kubernetes v1.20.0 preload ...
	I0916 10:34:47.610423 1383838 preload.go:236] getting checksum for preloaded-images-k8s-v18-v1.20.0-cri-o-overlay-arm64.tar.lz4 ...
	I0916 10:34:47.680808 1383838 download.go:107] Downloading: https://storage.googleapis.com/minikube-preloaded-volume-tarballs/v18/v1.20.0/preloaded-images-k8s-v18-v1.20.0-cri-o-overlay-arm64.tar.lz4?checksum=md5:59cd2ef07b53f039bfd1761b921f2a02 -> /home/jenkins/minikube-integration/19651-1378450/.minikube/cache/preloaded-tarball/preloaded-images-k8s-v18-v1.20.0-cri-o-overlay-arm64.tar.lz4
	
	
	* The control-plane node download-only-084128 host does not exist
	  To start a cluster, run: "minikube start -p download-only-084128"

                                                
                                                
-- /stdout --
aaa_download_only_test.go:185: minikube logs failed with error: exit status 85
--- PASS: TestDownloadOnly/v1.20.0/LogsDuration (0.07s)

                                                
                                    
x
+
TestDownloadOnly/v1.20.0/DeleteAll (0.19s)

                                                
                                                
=== RUN   TestDownloadOnly/v1.20.0/DeleteAll
aaa_download_only_test.go:197: (dbg) Run:  out/minikube-linux-arm64 delete --all
--- PASS: TestDownloadOnly/v1.20.0/DeleteAll (0.19s)

                                                
                                    
x
+
TestDownloadOnly/v1.20.0/DeleteAlwaysSucceeds (0.13s)

                                                
                                                
=== RUN   TestDownloadOnly/v1.20.0/DeleteAlwaysSucceeds
aaa_download_only_test.go:208: (dbg) Run:  out/minikube-linux-arm64 delete -p download-only-084128
--- PASS: TestDownloadOnly/v1.20.0/DeleteAlwaysSucceeds (0.13s)

                                                
                                    
x
+
TestDownloadOnly/v1.31.1/json-events (5.11s)

                                                
                                                
=== RUN   TestDownloadOnly/v1.31.1/json-events
aaa_download_only_test.go:81: (dbg) Run:  out/minikube-linux-arm64 start -o=json --download-only -p download-only-605096 --force --alsologtostderr --kubernetes-version=v1.31.1 --container-runtime=crio --driver=docker  --container-runtime=crio
aaa_download_only_test.go:81: (dbg) Done: out/minikube-linux-arm64 start -o=json --download-only -p download-only-605096 --force --alsologtostderr --kubernetes-version=v1.31.1 --container-runtime=crio --driver=docker  --container-runtime=crio: (5.105263953s)
--- PASS: TestDownloadOnly/v1.31.1/json-events (5.11s)

                                                
                                    
x
+
TestDownloadOnly/v1.31.1/preload-exists (0s)

                                                
                                                
=== RUN   TestDownloadOnly/v1.31.1/preload-exists
--- PASS: TestDownloadOnly/v1.31.1/preload-exists (0.00s)

                                                
                                    
x
+
TestDownloadOnly/v1.31.1/LogsDuration (0.07s)

                                                
                                                
=== RUN   TestDownloadOnly/v1.31.1/LogsDuration
aaa_download_only_test.go:184: (dbg) Run:  out/minikube-linux-arm64 logs -p download-only-605096
aaa_download_only_test.go:184: (dbg) Non-zero exit: out/minikube-linux-arm64 logs -p download-only-605096: exit status 85 (73.57802ms)

                                                
                                                
-- stdout --
	
	==> Audit <==
	|---------|--------------------------------|----------------------|---------|---------|---------------------|---------------------|
	| Command |              Args              |       Profile        |  User   | Version |     Start Time      |      End Time       |
	|---------|--------------------------------|----------------------|---------|---------|---------------------|---------------------|
	| start   | -o=json --download-only        | download-only-084128 | jenkins | v1.34.0 | 16 Sep 24 10:34 UTC |                     |
	|         | -p download-only-084128        |                      |         |         |                     |                     |
	|         | --force --alsologtostderr      |                      |         |         |                     |                     |
	|         | --kubernetes-version=v1.20.0   |                      |         |         |                     |                     |
	|         | --container-runtime=crio       |                      |         |         |                     |                     |
	|         | --driver=docker                |                      |         |         |                     |                     |
	|         | --container-runtime=crio       |                      |         |         |                     |                     |
	| delete  | --all                          | minikube             | jenkins | v1.34.0 | 16 Sep 24 10:34 UTC | 16 Sep 24 10:34 UTC |
	| delete  | -p download-only-084128        | download-only-084128 | jenkins | v1.34.0 | 16 Sep 24 10:34 UTC | 16 Sep 24 10:34 UTC |
	| start   | -o=json --download-only        | download-only-605096 | jenkins | v1.34.0 | 16 Sep 24 10:34 UTC |                     |
	|         | -p download-only-605096        |                      |         |         |                     |                     |
	|         | --force --alsologtostderr      |                      |         |         |                     |                     |
	|         | --kubernetes-version=v1.31.1   |                      |         |         |                     |                     |
	|         | --container-runtime=crio       |                      |         |         |                     |                     |
	|         | --driver=docker                |                      |         |         |                     |                     |
	|         | --container-runtime=crio       |                      |         |         |                     |                     |
	|---------|--------------------------------|----------------------|---------|---------|---------------------|---------------------|
	
	
	==> Last Start <==
	Log file created at: 2024/09/16 10:34:54
	Running on machine: ip-172-31-21-244
	Binary: Built with gc go1.23.0 for linux/arm64
	Log line format: [IWEF]mmdd hh:mm:ss.uuuuuu threadid file:line] msg
	I0916 10:34:54.483948 1384037 out.go:345] Setting OutFile to fd 1 ...
	I0916 10:34:54.484091 1384037 out.go:392] TERM=,COLORTERM=, which probably does not support color
	I0916 10:34:54.484101 1384037 out.go:358] Setting ErrFile to fd 2...
	I0916 10:34:54.484105 1384037 out.go:392] TERM=,COLORTERM=, which probably does not support color
	I0916 10:34:54.484337 1384037 root.go:338] Updating PATH: /home/jenkins/minikube-integration/19651-1378450/.minikube/bin
	I0916 10:34:54.484777 1384037 out.go:352] Setting JSON to true
	I0916 10:34:54.485594 1384037 start.go:129] hostinfo: {"hostname":"ip-172-31-21-244","uptime":37040,"bootTime":1726445855,"procs":155,"os":"linux","platform":"ubuntu","platformFamily":"debian","platformVersion":"20.04","kernelVersion":"5.15.0-1069-aws","kernelArch":"aarch64","virtualizationSystem":"","virtualizationRole":"","hostId":"da8ac1fd-6236-412a-a346-95873c98230d"}
	I0916 10:34:54.485664 1384037 start.go:139] virtualization:  
	I0916 10:34:54.487319 1384037 out.go:97] [download-only-605096] minikube v1.34.0 on Ubuntu 20.04 (arm64)
	I0916 10:34:54.487550 1384037 notify.go:220] Checking for updates...
	I0916 10:34:54.488662 1384037 out.go:169] MINIKUBE_LOCATION=19651
	I0916 10:34:54.490378 1384037 out.go:169] MINIKUBE_SUPPRESS_DOCKER_PERFORMANCE=true
	I0916 10:34:54.491578 1384037 out.go:169] KUBECONFIG=/home/jenkins/minikube-integration/19651-1378450/kubeconfig
	I0916 10:34:54.492765 1384037 out.go:169] MINIKUBE_HOME=/home/jenkins/minikube-integration/19651-1378450/.minikube
	I0916 10:34:54.493999 1384037 out.go:169] MINIKUBE_BIN=out/minikube-linux-arm64
	W0916 10:34:54.496260 1384037 out.go:321] minikube skips various validations when --force is supplied; this may lead to unexpected behavior
	I0916 10:34:54.496500 1384037 driver.go:394] Setting default libvirt URI to qemu:///system
	I0916 10:34:54.517651 1384037 docker.go:123] docker version: linux-27.2.1:Docker Engine - Community
	I0916 10:34:54.517797 1384037 cli_runner.go:164] Run: docker system info --format "{{json .}}"
	I0916 10:34:54.579733 1384037 info.go:266] docker info: {ID:5FDH:SA5P:5GCT:NLAS:B73P:SGDQ:PBG5:UBVH:UZY3:RXGO:CI7S:WAIH Containers:0 ContainersRunning:0 ContainersPaused:0 ContainersStopped:0 Images:1 Driver:overlay2 DriverStatus:[[Backing Filesystem extfs] [Supports d_type true] [Using metacopy false] [Native Overlay Diff true] [userxattr false]] SystemStatus:<nil> Plugins:{Volume:[local] Network:[bridge host ipvlan macvlan null overlay] Authorization:<nil> Log:[awslogs fluentd gcplogs gelf journald json-file local splunk syslog]} MemoryLimit:true SwapLimit:true KernelMemory:false KernelMemoryTCP:true CPUCfsPeriod:true CPUCfsQuota:true CPUShares:true CPUSet:true PidsLimit:true IPv4Forwarding:true BridgeNfIptables:true BridgeNfIP6Tables:true Debug:false NFd:29 OomKillDisable:true NGoroutines:45 SystemTime:2024-09-16 10:34:54.570426484 +0000 UTC LoggingDriver:json-file CgroupDriver:cgroupfs NEventsListener:0 KernelVersion:5.15.0-1069-aws OperatingSystem:Ubuntu 20.04.6 LTS OSType:linux Architecture:aar
ch64 IndexServerAddress:https://index.docker.io/v1/ RegistryConfig:{AllowNondistributableArtifactsCIDRs:[] AllowNondistributableArtifactsHostnames:[] InsecureRegistryCIDRs:[127.0.0.0/8] IndexConfigs:{DockerIo:{Name:docker.io Mirrors:[] Secure:true Official:true}} Mirrors:[]} NCPU:2 MemTotal:8214839296 GenericResources:<nil> DockerRootDir:/var/lib/docker HTTPProxy: HTTPSProxy: NoProxy: Name:ip-172-31-21-244 Labels:[] ExperimentalBuild:false ServerVersion:27.2.1 ClusterStore: ClusterAdvertise: Runtimes:{Runc:{Path:runc}} DefaultRuntime:runc Swarm:{NodeID: NodeAddr: LocalNodeState:inactive ControlAvailable:false Error: RemoteManagers:<nil>} LiveRestoreEnabled:false Isolation: InitBinary:docker-init ContainerdCommit:{ID:7f7fdf5fed64eb6a7caf99b3e12efcf9d60e311c Expected:7f7fdf5fed64eb6a7caf99b3e12efcf9d60e311c} RuncCommit:{ID:v1.1.14-0-g2c9f560 Expected:v1.1.14-0-g2c9f560} InitCommit:{ID:de40ad0 Expected:de40ad0} SecurityOptions:[name=apparmor name=seccomp,profile=builtin] ProductLicense: Warnings:<nil> ServerErro
rs:[] ClientInfo:{Debug:false Plugins:[map[Name:buildx Path:/usr/libexec/docker/cli-plugins/docker-buildx SchemaVersion:0.1.0 ShortDescription:Docker Buildx Vendor:Docker Inc. Version:v0.16.2] map[Name:compose Path:/usr/libexec/docker/cli-plugins/docker-compose SchemaVersion:0.1.0 ShortDescription:Docker Compose Vendor:Docker Inc. Version:v2.29.2]] Warnings:<nil>}}
	I0916 10:34:54.579850 1384037 docker.go:318] overlay module found
	I0916 10:34:54.581116 1384037 out.go:97] Using the docker driver based on user configuration
	I0916 10:34:54.581149 1384037 start.go:297] selected driver: docker
	I0916 10:34:54.581157 1384037 start.go:901] validating driver "docker" against <nil>
	I0916 10:34:54.581270 1384037 cli_runner.go:164] Run: docker system info --format "{{json .}}"
	I0916 10:34:54.636365 1384037 info.go:266] docker info: {ID:5FDH:SA5P:5GCT:NLAS:B73P:SGDQ:PBG5:UBVH:UZY3:RXGO:CI7S:WAIH Containers:0 ContainersRunning:0 ContainersPaused:0 ContainersStopped:0 Images:1 Driver:overlay2 DriverStatus:[[Backing Filesystem extfs] [Supports d_type true] [Using metacopy false] [Native Overlay Diff true] [userxattr false]] SystemStatus:<nil> Plugins:{Volume:[local] Network:[bridge host ipvlan macvlan null overlay] Authorization:<nil> Log:[awslogs fluentd gcplogs gelf journald json-file local splunk syslog]} MemoryLimit:true SwapLimit:true KernelMemory:false KernelMemoryTCP:true CPUCfsPeriod:true CPUCfsQuota:true CPUShares:true CPUSet:true PidsLimit:true IPv4Forwarding:true BridgeNfIptables:true BridgeNfIP6Tables:true Debug:false NFd:29 OomKillDisable:true NGoroutines:45 SystemTime:2024-09-16 10:34:54.626843078 +0000 UTC LoggingDriver:json-file CgroupDriver:cgroupfs NEventsListener:0 KernelVersion:5.15.0-1069-aws OperatingSystem:Ubuntu 20.04.6 LTS OSType:linux Architecture:aar
ch64 IndexServerAddress:https://index.docker.io/v1/ RegistryConfig:{AllowNondistributableArtifactsCIDRs:[] AllowNondistributableArtifactsHostnames:[] InsecureRegistryCIDRs:[127.0.0.0/8] IndexConfigs:{DockerIo:{Name:docker.io Mirrors:[] Secure:true Official:true}} Mirrors:[]} NCPU:2 MemTotal:8214839296 GenericResources:<nil> DockerRootDir:/var/lib/docker HTTPProxy: HTTPSProxy: NoProxy: Name:ip-172-31-21-244 Labels:[] ExperimentalBuild:false ServerVersion:27.2.1 ClusterStore: ClusterAdvertise: Runtimes:{Runc:{Path:runc}} DefaultRuntime:runc Swarm:{NodeID: NodeAddr: LocalNodeState:inactive ControlAvailable:false Error: RemoteManagers:<nil>} LiveRestoreEnabled:false Isolation: InitBinary:docker-init ContainerdCommit:{ID:7f7fdf5fed64eb6a7caf99b3e12efcf9d60e311c Expected:7f7fdf5fed64eb6a7caf99b3e12efcf9d60e311c} RuncCommit:{ID:v1.1.14-0-g2c9f560 Expected:v1.1.14-0-g2c9f560} InitCommit:{ID:de40ad0 Expected:de40ad0} SecurityOptions:[name=apparmor name=seccomp,profile=builtin] ProductLicense: Warnings:<nil> ServerErro
rs:[] ClientInfo:{Debug:false Plugins:[map[Name:buildx Path:/usr/libexec/docker/cli-plugins/docker-buildx SchemaVersion:0.1.0 ShortDescription:Docker Buildx Vendor:Docker Inc. Version:v0.16.2] map[Name:compose Path:/usr/libexec/docker/cli-plugins/docker-compose SchemaVersion:0.1.0 ShortDescription:Docker Compose Vendor:Docker Inc. Version:v2.29.2]] Warnings:<nil>}}
	I0916 10:34:54.636627 1384037 start_flags.go:310] no existing cluster config was found, will generate one from the flags 
	I0916 10:34:54.636978 1384037 start_flags.go:393] Using suggested 2200MB memory alloc based on sys=7834MB, container=7834MB
	I0916 10:34:54.637140 1384037 start_flags.go:929] Wait components to verify : map[apiserver:true system_pods:true]
	I0916 10:34:54.638660 1384037 out.go:169] Using Docker driver with root privileges
	I0916 10:34:54.639679 1384037 cni.go:84] Creating CNI manager for ""
	I0916 10:34:54.639742 1384037 cni.go:143] "docker" driver + "crio" runtime found, recommending kindnet
	I0916 10:34:54.639759 1384037 start_flags.go:319] Found "CNI" CNI - setting NetworkPlugin=cni
	I0916 10:34:54.639838 1384037 start.go:340] cluster config:
	{Name:download-only-605096 KeepContext:false EmbedCerts:false MinikubeISO: KicBaseImage:gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 Memory:2200 CPUs:2 DiskSize:20000 Driver:docker HyperkitVpnKitSock: HyperkitVSockPorts:[] DockerEnv:[] ContainerVolumeMounts:[] InsecureRegistry:[] RegistryMirror:[] HostOnlyCIDR:192.168.59.1/24 HypervVirtualSwitch: HypervUseExternalSwitch:false HypervExternalAdapter: KVMNetwork:default KVMQemuURI:qemu:///system KVMGPU:false KVMHidden:false KVMNUMACount:1 APIServerPort:8443 DockerOpt:[] DisableDriverMounts:false NFSShare:[] NFSSharesRoot:/nfsshares UUID: NoVTXCheck:false DNSProxy:false HostDNSResolver:true HostOnlyNicType:virtio NatNicType:virtio SSHIPAddress: SSHUser:root SSHKey: SSHPort:22 KubernetesConfig:{KubernetesVersion:v1.31.1 ClusterName:download-only-605096 Namespace:default APIServerHAVIP: APIServerName:minikubeCA APIServerNames:[] APIServerIPs:[] DNSDomain:cluster.local Co
ntainerRuntime:crio CRISocket: NetworkPlugin:cni FeatureGates: ServiceCIDR:10.96.0.0/12 ImageRepository: LoadBalancerStartIP: LoadBalancerEndIP: CustomIngressCert: RegistryAliases: ExtraOptions:[] ShouldLoadCachedImages:true EnableDefaultCNI:false CNI:} Nodes:[{Name: IP: Port:8443 KubernetesVersion:v1.31.1 ContainerRuntime:crio ControlPlane:true Worker:true}] Addons:map[] CustomAddonImages:map[] CustomAddonRegistries:map[] VerifyComponents:map[apiserver:true system_pods:true] StartHostTimeout:6m0s ScheduledStop:<nil> ExposedPorts:[] ListenAddress: Network: Subnet: MultiNodeRequested:false ExtraDisks:0 CertExpiration:26280h0m0s Mount:false MountString:/home/jenkins:/minikube-host Mount9PVersion:9p2000.L MountGID:docker MountIP: MountMSize:262144 MountOptions:[] MountPort:0 MountType:9p MountUID:docker BinaryMirror: DisableOptimizations:false DisableMetrics:false CustomQemuFirmwarePath: SocketVMnetClientPath: SocketVMnetPath: StaticIP: SSHAuthSock: SSHAgentPID:0 GPUs: AutoPauseInterval:1m0s}
	I0916 10:34:54.641191 1384037 out.go:97] Starting "download-only-605096" primary control-plane node in "download-only-605096" cluster
	I0916 10:34:54.641218 1384037 cache.go:121] Beginning downloading kic base image for docker with crio
	I0916 10:34:54.642561 1384037 out.go:97] Pulling base image v0.0.45-1726358845-19644 ...
	I0916 10:34:54.642594 1384037 preload.go:131] Checking if preload exists for k8s version v1.31.1 and runtime crio
	I0916 10:34:54.642774 1384037 image.go:79] Checking for gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 in local docker daemon
	I0916 10:34:54.658312 1384037 cache.go:149] Downloading gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 to local cache
	I0916 10:34:54.658440 1384037 image.go:63] Checking for gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 in local cache directory
	I0916 10:34:54.658469 1384037 image.go:66] Found gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 in local cache directory, skipping pull
	I0916 10:34:54.658478 1384037 image.go:135] gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 exists in cache, skipping pull
	I0916 10:34:54.658486 1384037 cache.go:152] successfully saved gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 as a tarball
	I0916 10:34:54.696961 1384037 preload.go:118] Found remote preload: https://storage.googleapis.com/minikube-preloaded-volume-tarballs/v18/v1.31.1/preloaded-images-k8s-v18-v1.31.1-cri-o-overlay-arm64.tar.lz4
	I0916 10:34:54.697002 1384037 cache.go:56] Caching tarball of preloaded images
	I0916 10:34:54.697172 1384037 preload.go:131] Checking if preload exists for k8s version v1.31.1 and runtime crio
	I0916 10:34:54.699097 1384037 out.go:97] Downloading Kubernetes v1.31.1 preload ...
	I0916 10:34:54.699119 1384037 preload.go:236] getting checksum for preloaded-images-k8s-v18-v1.31.1-cri-o-overlay-arm64.tar.lz4 ...
	I0916 10:34:54.770364 1384037 download.go:107] Downloading: https://storage.googleapis.com/minikube-preloaded-volume-tarballs/v18/v1.31.1/preloaded-images-k8s-v18-v1.31.1-cri-o-overlay-arm64.tar.lz4?checksum=md5:8285fc512c7462f100de137f91fcd0ae -> /home/jenkins/minikube-integration/19651-1378450/.minikube/cache/preloaded-tarball/preloaded-images-k8s-v18-v1.31.1-cri-o-overlay-arm64.tar.lz4
	I0916 10:34:58.058934 1384037 preload.go:247] saving checksum for preloaded-images-k8s-v18-v1.31.1-cri-o-overlay-arm64.tar.lz4 ...
	I0916 10:34:58.059041 1384037 preload.go:254] verifying checksum of /home/jenkins/minikube-integration/19651-1378450/.minikube/cache/preloaded-tarball/preloaded-images-k8s-v18-v1.31.1-cri-o-overlay-arm64.tar.lz4 ...
	I0916 10:34:58.923803 1384037 cache.go:59] Finished verifying existence of preloaded tar for v1.31.1 on crio
	I0916 10:34:58.924199 1384037 profile.go:143] Saving config to /home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/download-only-605096/config.json ...
	I0916 10:34:58.924234 1384037 lock.go:35] WriteFile acquiring /home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/download-only-605096/config.json: {Name:mk6cff4fd61d8a3defb43ca9b793b194c85c25f4 Clock:{} Delay:500ms Timeout:1m0s Cancel:<nil>}
	I0916 10:34:58.924827 1384037 preload.go:131] Checking if preload exists for k8s version v1.31.1 and runtime crio
	I0916 10:34:58.925004 1384037 download.go:107] Downloading: https://dl.k8s.io/release/v1.31.1/bin/linux/arm64/kubectl?checksum=file:https://dl.k8s.io/release/v1.31.1/bin/linux/arm64/kubectl.sha256 -> /home/jenkins/minikube-integration/19651-1378450/.minikube/cache/linux/arm64/v1.31.1/kubectl
	
	
	* The control-plane node download-only-605096 host does not exist
	  To start a cluster, run: "minikube start -p download-only-605096"

                                                
                                                
-- /stdout --
aaa_download_only_test.go:185: minikube logs failed with error: exit status 85
--- PASS: TestDownloadOnly/v1.31.1/LogsDuration (0.07s)

                                                
                                    
x
+
TestDownloadOnly/v1.31.1/DeleteAll (0.21s)

                                                
                                                
=== RUN   TestDownloadOnly/v1.31.1/DeleteAll
aaa_download_only_test.go:197: (dbg) Run:  out/minikube-linux-arm64 delete --all
--- PASS: TestDownloadOnly/v1.31.1/DeleteAll (0.21s)

                                                
                                    
x
+
TestDownloadOnly/v1.31.1/DeleteAlwaysSucceeds (0.14s)

                                                
                                                
=== RUN   TestDownloadOnly/v1.31.1/DeleteAlwaysSucceeds
aaa_download_only_test.go:208: (dbg) Run:  out/minikube-linux-arm64 delete -p download-only-605096
--- PASS: TestDownloadOnly/v1.31.1/DeleteAlwaysSucceeds (0.14s)

                                                
                                    
x
+
TestBinaryMirror (0.61s)

                                                
                                                
=== RUN   TestBinaryMirror
aaa_download_only_test.go:314: (dbg) Run:  out/minikube-linux-arm64 start --download-only -p binary-mirror-652159 --alsologtostderr --binary-mirror http://127.0.0.1:40363 --driver=docker  --container-runtime=crio
helpers_test.go:175: Cleaning up "binary-mirror-652159" profile ...
helpers_test.go:178: (dbg) Run:  out/minikube-linux-arm64 delete -p binary-mirror-652159
--- PASS: TestBinaryMirror (0.61s)

                                                
                                    
x
+
TestAddons/PreSetup/EnablingAddonOnNonExistingCluster (0.06s)

                                                
                                                
=== RUN   TestAddons/PreSetup/EnablingAddonOnNonExistingCluster
=== PAUSE TestAddons/PreSetup/EnablingAddonOnNonExistingCluster

                                                
                                                

                                                
                                                
=== CONT  TestAddons/PreSetup/EnablingAddonOnNonExistingCluster
addons_test.go:1037: (dbg) Run:  out/minikube-linux-arm64 addons enable dashboard -p addons-936355
addons_test.go:1037: (dbg) Non-zero exit: out/minikube-linux-arm64 addons enable dashboard -p addons-936355: exit status 85 (63.34178ms)

                                                
                                                
-- stdout --
	* Profile "addons-936355" not found. Run "minikube profile list" to view all profiles.
	  To start a cluster, run: "minikube start -p addons-936355"

                                                
                                                
-- /stdout --
--- PASS: TestAddons/PreSetup/EnablingAddonOnNonExistingCluster (0.06s)

                                                
                                    
x
+
TestAddons/PreSetup/DisablingAddonOnNonExistingCluster (0.07s)

                                                
                                                
=== RUN   TestAddons/PreSetup/DisablingAddonOnNonExistingCluster
=== PAUSE TestAddons/PreSetup/DisablingAddonOnNonExistingCluster

                                                
                                                

                                                
                                                
=== CONT  TestAddons/PreSetup/DisablingAddonOnNonExistingCluster
addons_test.go:1048: (dbg) Run:  out/minikube-linux-arm64 addons disable dashboard -p addons-936355
addons_test.go:1048: (dbg) Non-zero exit: out/minikube-linux-arm64 addons disable dashboard -p addons-936355: exit status 85 (72.470088ms)

                                                
                                                
-- stdout --
	* Profile "addons-936355" not found. Run "minikube profile list" to view all profiles.
	  To start a cluster, run: "minikube start -p addons-936355"

                                                
                                                
-- /stdout --
--- PASS: TestAddons/PreSetup/DisablingAddonOnNonExistingCluster (0.07s)

                                                
                                    
x
+
TestAddons/Setup (228.85s)

                                                
                                                
=== RUN   TestAddons/Setup
addons_test.go:110: (dbg) Run:  out/minikube-linux-arm64 start -p addons-936355 --wait=true --memory=4000 --alsologtostderr --addons=registry --addons=metrics-server --addons=volumesnapshots --addons=csi-hostpath-driver --addons=gcp-auth --addons=cloud-spanner --addons=inspektor-gadget --addons=storage-provisioner-rancher --addons=nvidia-device-plugin --addons=yakd --addons=volcano --driver=docker  --container-runtime=crio --addons=ingress --addons=ingress-dns
addons_test.go:110: (dbg) Done: out/minikube-linux-arm64 start -p addons-936355 --wait=true --memory=4000 --alsologtostderr --addons=registry --addons=metrics-server --addons=volumesnapshots --addons=csi-hostpath-driver --addons=gcp-auth --addons=cloud-spanner --addons=inspektor-gadget --addons=storage-provisioner-rancher --addons=nvidia-device-plugin --addons=yakd --addons=volcano --driver=docker  --container-runtime=crio --addons=ingress --addons=ingress-dns: (3m48.843277615s)
--- PASS: TestAddons/Setup (228.85s)

                                                
                                    
x
+
TestAddons/parallel/InspektorGadget (10.99s)

                                                
                                                
=== RUN   TestAddons/parallel/InspektorGadget
=== PAUSE TestAddons/parallel/InspektorGadget

                                                
                                                

                                                
                                                
=== CONT  TestAddons/parallel/InspektorGadget
addons_test.go:848: (dbg) TestAddons/parallel/InspektorGadget: waiting 8m0s for pods matching "k8s-app=gadget" in namespace "gadget" ...
helpers_test.go:344: "gadget-hx2qq" [fb6217d4-dbed-40c2-b47e-4342cb3f94b1] Running / Ready:ContainersNotReady (containers with unready status: [gadget]) / ContainersReady:ContainersNotReady (containers with unready status: [gadget])
addons_test.go:848: (dbg) TestAddons/parallel/InspektorGadget: k8s-app=gadget healthy within 5.01901549s
addons_test.go:851: (dbg) Run:  out/minikube-linux-arm64 addons disable inspektor-gadget -p addons-936355
addons_test.go:851: (dbg) Done: out/minikube-linux-arm64 addons disable inspektor-gadget -p addons-936355: (5.972818197s)
--- PASS: TestAddons/parallel/InspektorGadget (10.99s)

                                                
                                    
x
+
TestAddons/parallel/Headlamp (16.75s)

                                                
                                                
=== RUN   TestAddons/parallel/Headlamp
=== PAUSE TestAddons/parallel/Headlamp

                                                
                                                

                                                
                                                
=== CONT  TestAddons/parallel/Headlamp
addons_test.go:830: (dbg) Run:  out/minikube-linux-arm64 addons enable headlamp -p addons-936355 --alsologtostderr -v=1
addons_test.go:835: (dbg) TestAddons/parallel/Headlamp: waiting 8m0s for pods matching "app.kubernetes.io/name=headlamp" in namespace "headlamp" ...
helpers_test.go:344: "headlamp-57fb76fcdb-pqjqt" [076e515c-e543-4b89-812b-d4bd406522c2] Pending
helpers_test.go:344: "headlamp-57fb76fcdb-pqjqt" [076e515c-e543-4b89-812b-d4bd406522c2] Pending / Ready:ContainersNotReady (containers with unready status: [headlamp]) / ContainersReady:ContainersNotReady (containers with unready status: [headlamp])
helpers_test.go:344: "headlamp-57fb76fcdb-pqjqt" [076e515c-e543-4b89-812b-d4bd406522c2] Running
addons_test.go:835: (dbg) TestAddons/parallel/Headlamp: app.kubernetes.io/name=headlamp healthy within 10.004016957s
addons_test.go:839: (dbg) Run:  out/minikube-linux-arm64 -p addons-936355 addons disable headlamp --alsologtostderr -v=1
addons_test.go:839: (dbg) Done: out/minikube-linux-arm64 -p addons-936355 addons disable headlamp --alsologtostderr -v=1: (5.777612739s)
--- PASS: TestAddons/parallel/Headlamp (16.75s)

                                                
                                    
x
+
TestAddons/parallel/CloudSpanner (6.75s)

                                                
                                                
=== RUN   TestAddons/parallel/CloudSpanner
=== PAUSE TestAddons/parallel/CloudSpanner

                                                
                                                

                                                
                                                
=== CONT  TestAddons/parallel/CloudSpanner
addons_test.go:867: (dbg) TestAddons/parallel/CloudSpanner: waiting 6m0s for pods matching "app=cloud-spanner-emulator" in namespace "default" ...
helpers_test.go:344: "cloud-spanner-emulator-769b77f747-qvhhc" [6829e83f-9815-439b-bac9-1d8f63be6a7f] Running
addons_test.go:867: (dbg) TestAddons/parallel/CloudSpanner: app=cloud-spanner-emulator healthy within 6.004606862s
addons_test.go:870: (dbg) Run:  out/minikube-linux-arm64 addons disable cloud-spanner -p addons-936355
--- PASS: TestAddons/parallel/CloudSpanner (6.75s)

                                                
                                    
x
+
TestAddons/parallel/NvidiaDevicePlugin (6.53s)

                                                
                                                
=== RUN   TestAddons/parallel/NvidiaDevicePlugin
=== PAUSE TestAddons/parallel/NvidiaDevicePlugin

                                                
                                                

                                                
                                                
=== CONT  TestAddons/parallel/NvidiaDevicePlugin
addons_test.go:1061: (dbg) TestAddons/parallel/NvidiaDevicePlugin: waiting 6m0s for pods matching "name=nvidia-device-plugin-ds" in namespace "kube-system" ...
helpers_test.go:344: "nvidia-device-plugin-daemonset-6j9gc" [7ee6aa38-6656-4e60-bd4b-f35c0299acea] Running
addons_test.go:1061: (dbg) TestAddons/parallel/NvidiaDevicePlugin: name=nvidia-device-plugin-ds healthy within 6.003713104s
addons_test.go:1064: (dbg) Run:  out/minikube-linux-arm64 addons disable nvidia-device-plugin -p addons-936355
--- PASS: TestAddons/parallel/NvidiaDevicePlugin (6.53s)

                                                
                                    
x
+
TestAddons/parallel/Yakd (10.77s)

                                                
                                                
=== RUN   TestAddons/parallel/Yakd
=== PAUSE TestAddons/parallel/Yakd

                                                
                                                

                                                
                                                
=== CONT  TestAddons/parallel/Yakd
addons_test.go:1072: (dbg) TestAddons/parallel/Yakd: waiting 2m0s for pods matching "app.kubernetes.io/name=yakd-dashboard" in namespace "yakd-dashboard" ...
helpers_test.go:344: "yakd-dashboard-67d98fc6b-ztsj8" [1a4d121e-2e4c-4e21-89ed-2de3d1af95ff] Running
addons_test.go:1072: (dbg) TestAddons/parallel/Yakd: app.kubernetes.io/name=yakd-dashboard healthy within 5.00561192s
addons_test.go:1076: (dbg) Run:  out/minikube-linux-arm64 -p addons-936355 addons disable yakd --alsologtostderr -v=1
addons_test.go:1076: (dbg) Done: out/minikube-linux-arm64 -p addons-936355 addons disable yakd --alsologtostderr -v=1: (5.759594998s)
--- PASS: TestAddons/parallel/Yakd (10.77s)

                                                
                                    
x
+
TestAddons/StoppedEnableDisable (12.25s)

                                                
                                                
=== RUN   TestAddons/StoppedEnableDisable
addons_test.go:174: (dbg) Run:  out/minikube-linux-arm64 stop -p addons-936355
addons_test.go:174: (dbg) Done: out/minikube-linux-arm64 stop -p addons-936355: (11.970383823s)
addons_test.go:178: (dbg) Run:  out/minikube-linux-arm64 addons enable dashboard -p addons-936355
addons_test.go:182: (dbg) Run:  out/minikube-linux-arm64 addons disable dashboard -p addons-936355
addons_test.go:187: (dbg) Run:  out/minikube-linux-arm64 addons disable gvisor -p addons-936355
--- PASS: TestAddons/StoppedEnableDisable (12.25s)

                                                
                                    
x
+
TestCertExpiration (252.8s)

                                                
                                                
=== RUN   TestCertExpiration
=== PAUSE TestCertExpiration

                                                
                                                

                                                
                                                
=== CONT  TestCertExpiration
cert_options_test.go:123: (dbg) Run:  out/minikube-linux-arm64 start -p cert-expiration-258290 --memory=2048 --cert-expiration=3m --driver=docker  --container-runtime=crio
cert_options_test.go:123: (dbg) Done: out/minikube-linux-arm64 start -p cert-expiration-258290 --memory=2048 --cert-expiration=3m --driver=docker  --container-runtime=crio: (40.186534863s)
cert_options_test.go:131: (dbg) Run:  out/minikube-linux-arm64 start -p cert-expiration-258290 --memory=2048 --cert-expiration=8760h --driver=docker  --container-runtime=crio
cert_options_test.go:131: (dbg) Done: out/minikube-linux-arm64 start -p cert-expiration-258290 --memory=2048 --cert-expiration=8760h --driver=docker  --container-runtime=crio: (30.23832494s)
helpers_test.go:175: Cleaning up "cert-expiration-258290" profile ...
helpers_test.go:178: (dbg) Run:  out/minikube-linux-arm64 delete -p cert-expiration-258290
helpers_test.go:178: (dbg) Done: out/minikube-linux-arm64 delete -p cert-expiration-258290: (2.378890125s)
--- PASS: TestCertExpiration (252.80s)

                                                
                                    
x
+
TestForceSystemdFlag (36.73s)

                                                
                                                
=== RUN   TestForceSystemdFlag
=== PAUSE TestForceSystemdFlag

                                                
                                                

                                                
                                                
=== CONT  TestForceSystemdFlag
docker_test.go:91: (dbg) Run:  out/minikube-linux-arm64 start -p force-systemd-flag-426212 --memory=2048 --force-systemd --alsologtostderr -v=5 --driver=docker  --container-runtime=crio
docker_test.go:91: (dbg) Done: out/minikube-linux-arm64 start -p force-systemd-flag-426212 --memory=2048 --force-systemd --alsologtostderr -v=5 --driver=docker  --container-runtime=crio: (33.86380334s)
docker_test.go:132: (dbg) Run:  out/minikube-linux-arm64 -p force-systemd-flag-426212 ssh "cat /etc/crio/crio.conf.d/02-crio.conf"
helpers_test.go:175: Cleaning up "force-systemd-flag-426212" profile ...
helpers_test.go:178: (dbg) Run:  out/minikube-linux-arm64 delete -p force-systemd-flag-426212
helpers_test.go:178: (dbg) Done: out/minikube-linux-arm64 delete -p force-systemd-flag-426212: (2.448225169s)
--- PASS: TestForceSystemdFlag (36.73s)

                                                
                                    
x
+
TestForceSystemdEnv (43.39s)

                                                
                                                
=== RUN   TestForceSystemdEnv
=== PAUSE TestForceSystemdEnv

                                                
                                                

                                                
                                                
=== CONT  TestForceSystemdEnv
docker_test.go:155: (dbg) Run:  out/minikube-linux-arm64 start -p force-systemd-env-541584 --memory=2048 --alsologtostderr -v=5 --driver=docker  --container-runtime=crio
E0916 11:27:20.563688 1383833 cert_rotation.go:171] "Unhandled Error" err="key failed with : open /home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/functional-919910/client.crt: no such file or directory" logger="UnhandledError"
docker_test.go:155: (dbg) Done: out/minikube-linux-arm64 start -p force-systemd-env-541584 --memory=2048 --alsologtostderr -v=5 --driver=docker  --container-runtime=crio: (40.855058299s)
helpers_test.go:175: Cleaning up "force-systemd-env-541584" profile ...
helpers_test.go:178: (dbg) Run:  out/minikube-linux-arm64 delete -p force-systemd-env-541584
helpers_test.go:178: (dbg) Done: out/minikube-linux-arm64 delete -p force-systemd-env-541584: (2.539340222s)
--- PASS: TestForceSystemdEnv (43.39s)

                                                
                                    
x
+
TestErrorSpam/setup (32.6s)

                                                
                                                
=== RUN   TestErrorSpam/setup
error_spam_test.go:81: (dbg) Run:  out/minikube-linux-arm64 start -p nospam-329014 -n=1 --memory=2250 --wait=false --log_dir=/tmp/nospam-329014 --driver=docker  --container-runtime=crio
error_spam_test.go:81: (dbg) Done: out/minikube-linux-arm64 start -p nospam-329014 -n=1 --memory=2250 --wait=false --log_dir=/tmp/nospam-329014 --driver=docker  --container-runtime=crio: (32.597498663s)
error_spam_test.go:91: acceptable stderr: "E0916 10:46:22.959035 1395349 start.go:291] kubectl info: exec: fork/exec /usr/local/bin/kubectl: exec format error"
--- PASS: TestErrorSpam/setup (32.60s)

                                                
                                    
x
+
TestErrorSpam/start (0.74s)

                                                
                                                
=== RUN   TestErrorSpam/start
error_spam_test.go:216: Cleaning up 1 logfile(s) ...
error_spam_test.go:159: (dbg) Run:  out/minikube-linux-arm64 -p nospam-329014 --log_dir /tmp/nospam-329014 start --dry-run
error_spam_test.go:159: (dbg) Run:  out/minikube-linux-arm64 -p nospam-329014 --log_dir /tmp/nospam-329014 start --dry-run
error_spam_test.go:182: (dbg) Run:  out/minikube-linux-arm64 -p nospam-329014 --log_dir /tmp/nospam-329014 start --dry-run
--- PASS: TestErrorSpam/start (0.74s)

                                                
                                    
x
+
TestErrorSpam/status (1.02s)

                                                
                                                
=== RUN   TestErrorSpam/status
error_spam_test.go:216: Cleaning up 0 logfile(s) ...
error_spam_test.go:159: (dbg) Run:  out/minikube-linux-arm64 -p nospam-329014 --log_dir /tmp/nospam-329014 status
error_spam_test.go:159: (dbg) Run:  out/minikube-linux-arm64 -p nospam-329014 --log_dir /tmp/nospam-329014 status
error_spam_test.go:182: (dbg) Run:  out/minikube-linux-arm64 -p nospam-329014 --log_dir /tmp/nospam-329014 status
--- PASS: TestErrorSpam/status (1.02s)

                                                
                                    
x
+
TestErrorSpam/pause (1.81s)

                                                
                                                
=== RUN   TestErrorSpam/pause
error_spam_test.go:216: Cleaning up 0 logfile(s) ...
error_spam_test.go:159: (dbg) Run:  out/minikube-linux-arm64 -p nospam-329014 --log_dir /tmp/nospam-329014 pause
error_spam_test.go:159: (dbg) Run:  out/minikube-linux-arm64 -p nospam-329014 --log_dir /tmp/nospam-329014 pause
error_spam_test.go:182: (dbg) Run:  out/minikube-linux-arm64 -p nospam-329014 --log_dir /tmp/nospam-329014 pause
--- PASS: TestErrorSpam/pause (1.81s)

                                                
                                    
x
+
TestErrorSpam/unpause (1.85s)

                                                
                                                
=== RUN   TestErrorSpam/unpause
error_spam_test.go:216: Cleaning up 0 logfile(s) ...
error_spam_test.go:159: (dbg) Run:  out/minikube-linux-arm64 -p nospam-329014 --log_dir /tmp/nospam-329014 unpause
error_spam_test.go:159: (dbg) Run:  out/minikube-linux-arm64 -p nospam-329014 --log_dir /tmp/nospam-329014 unpause
error_spam_test.go:182: (dbg) Run:  out/minikube-linux-arm64 -p nospam-329014 --log_dir /tmp/nospam-329014 unpause
--- PASS: TestErrorSpam/unpause (1.85s)

                                                
                                    
x
+
TestErrorSpam/stop (1.46s)

                                                
                                                
=== RUN   TestErrorSpam/stop
error_spam_test.go:216: Cleaning up 0 logfile(s) ...
error_spam_test.go:159: (dbg) Run:  out/minikube-linux-arm64 -p nospam-329014 --log_dir /tmp/nospam-329014 stop
error_spam_test.go:159: (dbg) Done: out/minikube-linux-arm64 -p nospam-329014 --log_dir /tmp/nospam-329014 stop: (1.256093013s)
error_spam_test.go:159: (dbg) Run:  out/minikube-linux-arm64 -p nospam-329014 --log_dir /tmp/nospam-329014 stop
error_spam_test.go:182: (dbg) Run:  out/minikube-linux-arm64 -p nospam-329014 --log_dir /tmp/nospam-329014 stop
--- PASS: TestErrorSpam/stop (1.46s)

                                                
                                    
x
+
TestFunctional/serial/CopySyncFile (0s)

                                                
                                                
=== RUN   TestFunctional/serial/CopySyncFile
functional_test.go:1855: local sync path: /home/jenkins/minikube-integration/19651-1378450/.minikube/files/etc/test/nested/copy/1383833/hosts
--- PASS: TestFunctional/serial/CopySyncFile (0.00s)

                                                
                                    
x
+
TestFunctional/serial/StartWithProxy (76.44s)

                                                
                                                
=== RUN   TestFunctional/serial/StartWithProxy
functional_test.go:2234: (dbg) Run:  out/minikube-linux-arm64 start -p functional-919910 --memory=4000 --apiserver-port=8441 --wait=all --driver=docker  --container-runtime=crio
functional_test.go:2234: (dbg) Done: out/minikube-linux-arm64 start -p functional-919910 --memory=4000 --apiserver-port=8441 --wait=all --driver=docker  --container-runtime=crio: (1m16.435732817s)
--- PASS: TestFunctional/serial/StartWithProxy (76.44s)

                                                
                                    
x
+
TestFunctional/serial/AuditLog (0s)

                                                
                                                
=== RUN   TestFunctional/serial/AuditLog
--- PASS: TestFunctional/serial/AuditLog (0.00s)

                                                
                                    
x
+
TestFunctional/serial/SoftStart (29.09s)

                                                
                                                
=== RUN   TestFunctional/serial/SoftStart
functional_test.go:659: (dbg) Run:  out/minikube-linux-arm64 start -p functional-919910 --alsologtostderr -v=8
functional_test.go:659: (dbg) Done: out/minikube-linux-arm64 start -p functional-919910 --alsologtostderr -v=8: (29.090452358s)
functional_test.go:663: soft start took 29.091699843s for "functional-919910" cluster.
--- PASS: TestFunctional/serial/SoftStart (29.09s)

                                                
                                    
x
+
TestFunctional/serial/CacheCmd/cache/add_remote (4.33s)

                                                
                                                
=== RUN   TestFunctional/serial/CacheCmd/cache/add_remote
functional_test.go:1049: (dbg) Run:  out/minikube-linux-arm64 -p functional-919910 cache add registry.k8s.io/pause:3.1
functional_test.go:1049: (dbg) Done: out/minikube-linux-arm64 -p functional-919910 cache add registry.k8s.io/pause:3.1: (1.448594219s)
functional_test.go:1049: (dbg) Run:  out/minikube-linux-arm64 -p functional-919910 cache add registry.k8s.io/pause:3.3
functional_test.go:1049: (dbg) Done: out/minikube-linux-arm64 -p functional-919910 cache add registry.k8s.io/pause:3.3: (1.459273111s)
functional_test.go:1049: (dbg) Run:  out/minikube-linux-arm64 -p functional-919910 cache add registry.k8s.io/pause:latest
functional_test.go:1049: (dbg) Done: out/minikube-linux-arm64 -p functional-919910 cache add registry.k8s.io/pause:latest: (1.422148278s)
--- PASS: TestFunctional/serial/CacheCmd/cache/add_remote (4.33s)

                                                
                                    
x
+
TestFunctional/serial/CacheCmd/cache/add_local (1.52s)

                                                
                                                
=== RUN   TestFunctional/serial/CacheCmd/cache/add_local
functional_test.go:1077: (dbg) Run:  docker build -t minikube-local-cache-test:functional-919910 /tmp/TestFunctionalserialCacheCmdcacheadd_local2483047006/001
functional_test.go:1089: (dbg) Run:  out/minikube-linux-arm64 -p functional-919910 cache add minikube-local-cache-test:functional-919910
functional_test.go:1089: (dbg) Done: out/minikube-linux-arm64 -p functional-919910 cache add minikube-local-cache-test:functional-919910: (1.001469662s)
functional_test.go:1094: (dbg) Run:  out/minikube-linux-arm64 -p functional-919910 cache delete minikube-local-cache-test:functional-919910
functional_test.go:1083: (dbg) Run:  docker rmi minikube-local-cache-test:functional-919910
--- PASS: TestFunctional/serial/CacheCmd/cache/add_local (1.52s)

                                                
                                    
x
+
TestFunctional/serial/CacheCmd/cache/CacheDelete (0.05s)

                                                
                                                
=== RUN   TestFunctional/serial/CacheCmd/cache/CacheDelete
functional_test.go:1102: (dbg) Run:  out/minikube-linux-arm64 cache delete registry.k8s.io/pause:3.3
--- PASS: TestFunctional/serial/CacheCmd/cache/CacheDelete (0.05s)

                                                
                                    
x
+
TestFunctional/serial/CacheCmd/cache/list (0.05s)

                                                
                                                
=== RUN   TestFunctional/serial/CacheCmd/cache/list
functional_test.go:1110: (dbg) Run:  out/minikube-linux-arm64 cache list
--- PASS: TestFunctional/serial/CacheCmd/cache/list (0.05s)

                                                
                                    
x
+
TestFunctional/serial/CacheCmd/cache/verify_cache_inside_node (0.32s)

                                                
                                                
=== RUN   TestFunctional/serial/CacheCmd/cache/verify_cache_inside_node
functional_test.go:1124: (dbg) Run:  out/minikube-linux-arm64 -p functional-919910 ssh sudo crictl images
--- PASS: TestFunctional/serial/CacheCmd/cache/verify_cache_inside_node (0.32s)

                                                
                                    
x
+
TestFunctional/serial/CacheCmd/cache/cache_reload (2.17s)

                                                
                                                
=== RUN   TestFunctional/serial/CacheCmd/cache/cache_reload
functional_test.go:1147: (dbg) Run:  out/minikube-linux-arm64 -p functional-919910 ssh sudo crictl rmi registry.k8s.io/pause:latest
functional_test.go:1153: (dbg) Run:  out/minikube-linux-arm64 -p functional-919910 ssh sudo crictl inspecti registry.k8s.io/pause:latest
functional_test.go:1153: (dbg) Non-zero exit: out/minikube-linux-arm64 -p functional-919910 ssh sudo crictl inspecti registry.k8s.io/pause:latest: exit status 1 (297.007632ms)

                                                
                                                
-- stdout --
	FATA[0000] no such image "registry.k8s.io/pause:latest" present 

                                                
                                                
-- /stdout --
** stderr ** 
	ssh: Process exited with status 1

                                                
                                                
** /stderr **
functional_test.go:1158: (dbg) Run:  out/minikube-linux-arm64 -p functional-919910 cache reload
functional_test.go:1158: (dbg) Done: out/minikube-linux-arm64 -p functional-919910 cache reload: (1.227453808s)
functional_test.go:1163: (dbg) Run:  out/minikube-linux-arm64 -p functional-919910 ssh sudo crictl inspecti registry.k8s.io/pause:latest
--- PASS: TestFunctional/serial/CacheCmd/cache/cache_reload (2.17s)

                                                
                                    
x
+
TestFunctional/serial/CacheCmd/cache/delete (0.13s)

                                                
                                                
=== RUN   TestFunctional/serial/CacheCmd/cache/delete
functional_test.go:1172: (dbg) Run:  out/minikube-linux-arm64 cache delete registry.k8s.io/pause:3.1
functional_test.go:1172: (dbg) Run:  out/minikube-linux-arm64 cache delete registry.k8s.io/pause:latest
--- PASS: TestFunctional/serial/CacheCmd/cache/delete (0.13s)

                                                
                                    
x
+
TestFunctional/serial/MinikubeKubectlCmd (0.15s)

                                                
                                                
=== RUN   TestFunctional/serial/MinikubeKubectlCmd
functional_test.go:716: (dbg) Run:  out/minikube-linux-arm64 -p functional-919910 kubectl -- --context functional-919910 get pods
--- PASS: TestFunctional/serial/MinikubeKubectlCmd (0.15s)

                                                
                                    
x
+
TestFunctional/serial/MinikubeKubectlCmdDirectly (0.14s)

                                                
                                                
=== RUN   TestFunctional/serial/MinikubeKubectlCmdDirectly
functional_test.go:741: (dbg) Run:  out/kubectl --context functional-919910 get pods
--- PASS: TestFunctional/serial/MinikubeKubectlCmdDirectly (0.14s)

                                                
                                    
x
+
TestFunctional/serial/ExtraConfig (35.7s)

                                                
                                                
=== RUN   TestFunctional/serial/ExtraConfig
functional_test.go:757: (dbg) Run:  out/minikube-linux-arm64 start -p functional-919910 --extra-config=apiserver.enable-admission-plugins=NamespaceAutoProvision --wait=all
E0916 10:48:53.342604 1383833 cert_rotation.go:171] "Unhandled Error" err="key failed with : open /home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/addons-936355/client.crt: no such file or directory" logger="UnhandledError"
E0916 10:48:53.349976 1383833 cert_rotation.go:171] "Unhandled Error" err="key failed with : open /home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/addons-936355/client.crt: no such file or directory" logger="UnhandledError"
E0916 10:48:53.361410 1383833 cert_rotation.go:171] "Unhandled Error" err="key failed with : open /home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/addons-936355/client.crt: no such file or directory" logger="UnhandledError"
E0916 10:48:53.382824 1383833 cert_rotation.go:171] "Unhandled Error" err="key failed with : open /home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/addons-936355/client.crt: no such file or directory" logger="UnhandledError"
E0916 10:48:53.424522 1383833 cert_rotation.go:171] "Unhandled Error" err="key failed with : open /home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/addons-936355/client.crt: no such file or directory" logger="UnhandledError"
E0916 10:48:53.505940 1383833 cert_rotation.go:171] "Unhandled Error" err="key failed with : open /home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/addons-936355/client.crt: no such file or directory" logger="UnhandledError"
E0916 10:48:53.667321 1383833 cert_rotation.go:171] "Unhandled Error" err="key failed with : open /home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/addons-936355/client.crt: no such file or directory" logger="UnhandledError"
E0916 10:48:53.988981 1383833 cert_rotation.go:171] "Unhandled Error" err="key failed with : open /home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/addons-936355/client.crt: no such file or directory" logger="UnhandledError"
E0916 10:48:54.631201 1383833 cert_rotation.go:171] "Unhandled Error" err="key failed with : open /home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/addons-936355/client.crt: no such file or directory" logger="UnhandledError"
E0916 10:48:55.912900 1383833 cert_rotation.go:171] "Unhandled Error" err="key failed with : open /home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/addons-936355/client.crt: no such file or directory" logger="UnhandledError"
E0916 10:48:58.474263 1383833 cert_rotation.go:171] "Unhandled Error" err="key failed with : open /home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/addons-936355/client.crt: no such file or directory" logger="UnhandledError"
E0916 10:49:03.596569 1383833 cert_rotation.go:171] "Unhandled Error" err="key failed with : open /home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/addons-936355/client.crt: no such file or directory" logger="UnhandledError"
functional_test.go:757: (dbg) Done: out/minikube-linux-arm64 start -p functional-919910 --extra-config=apiserver.enable-admission-plugins=NamespaceAutoProvision --wait=all: (35.696261015s)
functional_test.go:761: restart took 35.696367318s for "functional-919910" cluster.
--- PASS: TestFunctional/serial/ExtraConfig (35.70s)

                                                
                                    
x
+
TestFunctional/serial/LogsCmd (1.74s)

                                                
                                                
=== RUN   TestFunctional/serial/LogsCmd
functional_test.go:1236: (dbg) Run:  out/minikube-linux-arm64 -p functional-919910 logs
functional_test.go:1236: (dbg) Done: out/minikube-linux-arm64 -p functional-919910 logs: (1.744424148s)
--- PASS: TestFunctional/serial/LogsCmd (1.74s)

                                                
                                    
x
+
TestFunctional/serial/LogsFileCmd (1.83s)

                                                
                                                
=== RUN   TestFunctional/serial/LogsFileCmd
functional_test.go:1250: (dbg) Run:  out/minikube-linux-arm64 -p functional-919910 logs --file /tmp/TestFunctionalserialLogsFileCmd2109493987/001/logs.txt
E0916 10:49:13.838653 1383833 cert_rotation.go:171] "Unhandled Error" err="key failed with : open /home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/addons-936355/client.crt: no such file or directory" logger="UnhandledError"
functional_test.go:1250: (dbg) Done: out/minikube-linux-arm64 -p functional-919910 logs --file /tmp/TestFunctionalserialLogsFileCmd2109493987/001/logs.txt: (1.826019802s)
--- PASS: TestFunctional/serial/LogsFileCmd (1.83s)

                                                
                                    
x
+
TestFunctional/parallel/ConfigCmd (0.46s)

                                                
                                                
=== RUN   TestFunctional/parallel/ConfigCmd
=== PAUSE TestFunctional/parallel/ConfigCmd

                                                
                                                

                                                
                                                
=== CONT  TestFunctional/parallel/ConfigCmd
functional_test.go:1199: (dbg) Run:  out/minikube-linux-arm64 -p functional-919910 config unset cpus
functional_test.go:1199: (dbg) Run:  out/minikube-linux-arm64 -p functional-919910 config get cpus
functional_test.go:1199: (dbg) Non-zero exit: out/minikube-linux-arm64 -p functional-919910 config get cpus: exit status 14 (71.126077ms)

                                                
                                                
** stderr ** 
	Error: specified key could not be found in config

                                                
                                                
** /stderr **
functional_test.go:1199: (dbg) Run:  out/minikube-linux-arm64 -p functional-919910 config set cpus 2
functional_test.go:1199: (dbg) Run:  out/minikube-linux-arm64 -p functional-919910 config get cpus
functional_test.go:1199: (dbg) Run:  out/minikube-linux-arm64 -p functional-919910 config unset cpus
functional_test.go:1199: (dbg) Run:  out/minikube-linux-arm64 -p functional-919910 config get cpus
functional_test.go:1199: (dbg) Non-zero exit: out/minikube-linux-arm64 -p functional-919910 config get cpus: exit status 14 (75.754652ms)

                                                
                                                
** stderr ** 
	Error: specified key could not be found in config

                                                
                                                
** /stderr **
--- PASS: TestFunctional/parallel/ConfigCmd (0.46s)

                                                
                                    
x
+
TestFunctional/parallel/DryRun (0.66s)

                                                
                                                
=== RUN   TestFunctional/parallel/DryRun
=== PAUSE TestFunctional/parallel/DryRun

                                                
                                                

                                                
                                                
=== CONT  TestFunctional/parallel/DryRun
functional_test.go:974: (dbg) Run:  out/minikube-linux-arm64 start -p functional-919910 --dry-run --memory 250MB --alsologtostderr --driver=docker  --container-runtime=crio
functional_test.go:974: (dbg) Non-zero exit: out/minikube-linux-arm64 start -p functional-919910 --dry-run --memory 250MB --alsologtostderr --driver=docker  --container-runtime=crio: exit status 23 (398.080483ms)

                                                
                                                
-- stdout --
	* [functional-919910] minikube v1.34.0 on Ubuntu 20.04 (arm64)
	  - MINIKUBE_LOCATION=19651
	  - MINIKUBE_SUPPRESS_DOCKER_PERFORMANCE=true
	  - KUBECONFIG=/home/jenkins/minikube-integration/19651-1378450/kubeconfig
	  - MINIKUBE_HOME=/home/jenkins/minikube-integration/19651-1378450/.minikube
	  - MINIKUBE_BIN=out/minikube-linux-arm64
	  - MINIKUBE_FORCE_SYSTEMD=
	* Using the docker driver based on existing profile
	
	

                                                
                                                
-- /stdout --
** stderr ** 
	I0916 10:51:01.755963 1410974 out.go:345] Setting OutFile to fd 1 ...
	I0916 10:51:01.758661 1410974 out.go:392] TERM=,COLORTERM=, which probably does not support color
	I0916 10:51:01.758719 1410974 out.go:358] Setting ErrFile to fd 2...
	I0916 10:51:01.758739 1410974 out.go:392] TERM=,COLORTERM=, which probably does not support color
	I0916 10:51:01.759052 1410974 root.go:338] Updating PATH: /home/jenkins/minikube-integration/19651-1378450/.minikube/bin
	I0916 10:51:01.759667 1410974 out.go:352] Setting JSON to false
	I0916 10:51:01.760692 1410974 start.go:129] hostinfo: {"hostname":"ip-172-31-21-244","uptime":38007,"bootTime":1726445855,"procs":182,"os":"linux","platform":"ubuntu","platformFamily":"debian","platformVersion":"20.04","kernelVersion":"5.15.0-1069-aws","kernelArch":"aarch64","virtualizationSystem":"","virtualizationRole":"","hostId":"da8ac1fd-6236-412a-a346-95873c98230d"}
	I0916 10:51:01.763478 1410974 start.go:139] virtualization:  
	I0916 10:51:01.768531 1410974 out.go:177] * [functional-919910] minikube v1.34.0 on Ubuntu 20.04 (arm64)
	I0916 10:51:01.771335 1410974 out.go:177]   - MINIKUBE_LOCATION=19651
	I0916 10:51:01.771607 1410974 notify.go:220] Checking for updates...
	I0916 10:51:01.776588 1410974 out.go:177]   - MINIKUBE_SUPPRESS_DOCKER_PERFORMANCE=true
	I0916 10:51:01.779121 1410974 out.go:177]   - KUBECONFIG=/home/jenkins/minikube-integration/19651-1378450/kubeconfig
	I0916 10:51:01.781739 1410974 out.go:177]   - MINIKUBE_HOME=/home/jenkins/minikube-integration/19651-1378450/.minikube
	I0916 10:51:01.784406 1410974 out.go:177]   - MINIKUBE_BIN=out/minikube-linux-arm64
	I0916 10:51:01.787102 1410974 out.go:177]   - MINIKUBE_FORCE_SYSTEMD=
	I0916 10:51:01.790397 1410974 config.go:182] Loaded profile config "functional-919910": Driver=docker, ContainerRuntime=crio, KubernetesVersion=v1.31.1
	I0916 10:51:01.790969 1410974 driver.go:394] Setting default libvirt URI to qemu:///system
	I0916 10:51:01.888183 1410974 docker.go:123] docker version: linux-27.2.1:Docker Engine - Community
	I0916 10:51:01.888354 1410974 cli_runner.go:164] Run: docker system info --format "{{json .}}"
	I0916 10:51:01.998496 1410974 info.go:266] docker info: {ID:5FDH:SA5P:5GCT:NLAS:B73P:SGDQ:PBG5:UBVH:UZY3:RXGO:CI7S:WAIH Containers:1 ContainersRunning:1 ContainersPaused:0 ContainersStopped:0 Images:2 Driver:overlay2 DriverStatus:[[Backing Filesystem extfs] [Supports d_type true] [Using metacopy false] [Native Overlay Diff true] [userxattr false]] SystemStatus:<nil> Plugins:{Volume:[local] Network:[bridge host ipvlan macvlan null overlay] Authorization:<nil> Log:[awslogs fluentd gcplogs gelf journald json-file local splunk syslog]} MemoryLimit:true SwapLimit:true KernelMemory:false KernelMemoryTCP:true CPUCfsPeriod:true CPUCfsQuota:true CPUShares:true CPUSet:true PidsLimit:true IPv4Forwarding:true BridgeNfIptables:true BridgeNfIP6Tables:true Debug:false NFd:32 OomKillDisable:true NGoroutines:51 SystemTime:2024-09-16 10:51:01.985232851 +0000 UTC LoggingDriver:json-file CgroupDriver:cgroupfs NEventsListener:0 KernelVersion:5.15.0-1069-aws OperatingSystem:Ubuntu 20.04.6 LTS OSType:linux Architecture:aar
ch64 IndexServerAddress:https://index.docker.io/v1/ RegistryConfig:{AllowNondistributableArtifactsCIDRs:[] AllowNondistributableArtifactsHostnames:[] InsecureRegistryCIDRs:[127.0.0.0/8] IndexConfigs:{DockerIo:{Name:docker.io Mirrors:[] Secure:true Official:true}} Mirrors:[]} NCPU:2 MemTotal:8214839296 GenericResources:<nil> DockerRootDir:/var/lib/docker HTTPProxy: HTTPSProxy: NoProxy: Name:ip-172-31-21-244 Labels:[] ExperimentalBuild:false ServerVersion:27.2.1 ClusterStore: ClusterAdvertise: Runtimes:{Runc:{Path:runc}} DefaultRuntime:runc Swarm:{NodeID: NodeAddr: LocalNodeState:inactive ControlAvailable:false Error: RemoteManagers:<nil>} LiveRestoreEnabled:false Isolation: InitBinary:docker-init ContainerdCommit:{ID:7f7fdf5fed64eb6a7caf99b3e12efcf9d60e311c Expected:7f7fdf5fed64eb6a7caf99b3e12efcf9d60e311c} RuncCommit:{ID:v1.1.14-0-g2c9f560 Expected:v1.1.14-0-g2c9f560} InitCommit:{ID:de40ad0 Expected:de40ad0} SecurityOptions:[name=apparmor name=seccomp,profile=builtin] ProductLicense: Warnings:<nil> ServerErro
rs:[] ClientInfo:{Debug:false Plugins:[map[Name:buildx Path:/usr/libexec/docker/cli-plugins/docker-buildx SchemaVersion:0.1.0 ShortDescription:Docker Buildx Vendor:Docker Inc. Version:v0.16.2] map[Name:compose Path:/usr/libexec/docker/cli-plugins/docker-compose SchemaVersion:0.1.0 ShortDescription:Docker Compose Vendor:Docker Inc. Version:v2.29.2]] Warnings:<nil>}}
	I0916 10:51:01.998703 1410974 docker.go:318] overlay module found
	I0916 10:51:02.001608 1410974 out.go:177] * Using the docker driver based on existing profile
	I0916 10:51:02.006290 1410974 start.go:297] selected driver: docker
	I0916 10:51:02.006318 1410974 start.go:901] validating driver "docker" against &{Name:functional-919910 KeepContext:false EmbedCerts:false MinikubeISO: KicBaseImage:gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 Memory:4000 CPUs:2 DiskSize:20000 Driver:docker HyperkitVpnKitSock: HyperkitVSockPorts:[] DockerEnv:[] ContainerVolumeMounts:[] InsecureRegistry:[] RegistryMirror:[] HostOnlyCIDR:192.168.59.1/24 HypervVirtualSwitch: HypervUseExternalSwitch:false HypervExternalAdapter: KVMNetwork:default KVMQemuURI:qemu:///system KVMGPU:false KVMHidden:false KVMNUMACount:1 APIServerPort:8441 DockerOpt:[] DisableDriverMounts:false NFSShare:[] NFSSharesRoot:/nfsshares UUID: NoVTXCheck:false DNSProxy:false HostDNSResolver:true HostOnlyNicType:virtio NatNicType:virtio SSHIPAddress: SSHUser:root SSHKey: SSHPort:22 KubernetesConfig:{KubernetesVersion:v1.31.1 ClusterName:functional-919910 Namespace:default APIServerHAVIP: APIServerNa
me:minikubeCA APIServerNames:[] APIServerIPs:[] DNSDomain:cluster.local ContainerRuntime:crio CRISocket: NetworkPlugin:cni FeatureGates: ServiceCIDR:10.96.0.0/12 ImageRepository: LoadBalancerStartIP: LoadBalancerEndIP: CustomIngressCert: RegistryAliases: ExtraOptions:[{Component:apiserver Key:enable-admission-plugins Value:NamespaceAutoProvision}] ShouldLoadCachedImages:true EnableDefaultCNI:false CNI:} Nodes:[{Name: IP:192.168.49.2 Port:8441 KubernetesVersion:v1.31.1 ContainerRuntime:crio ControlPlane:true Worker:true}] Addons:map[default-storageclass:true storage-provisioner:true] CustomAddonImages:map[] CustomAddonRegistries:map[] VerifyComponents:map[apiserver:true apps_running:true default_sa:true extra:true kubelet:true node_ready:true system_pods:true] StartHostTimeout:6m0s ScheduledStop:<nil> ExposedPorts:[] ListenAddress: Network: Subnet: MultiNodeRequested:false ExtraDisks:0 CertExpiration:26280h0m0s Mount:false MountString:/home/jenkins:/minikube-host Mount9PVersion:9p2000.L MountGID:docker MountIP
: MountMSize:262144 MountOptions:[] MountPort:0 MountType:9p MountUID:docker BinaryMirror: DisableOptimizations:false DisableMetrics:false CustomQemuFirmwarePath: SocketVMnetClientPath: SocketVMnetPath: StaticIP: SSHAuthSock: SSHAgentPID:0 GPUs: AutoPauseInterval:1m0s}
	I0916 10:51:02.006444 1410974 start.go:912] status for docker: {Installed:true Healthy:true Running:false NeedsImprovement:false Error:<nil> Reason: Fix: Doc: Version:}
	I0916 10:51:02.010487 1410974 out.go:201] 
	W0916 10:51:02.013049 1410974 out.go:270] X Exiting due to RSRC_INSUFFICIENT_REQ_MEMORY: Requested memory allocation 250MiB is less than the usable minimum of 1800MB
	X Exiting due to RSRC_INSUFFICIENT_REQ_MEMORY: Requested memory allocation 250MiB is less than the usable minimum of 1800MB
	I0916 10:51:02.015666 1410974 out.go:201] 

                                                
                                                
** /stderr **
functional_test.go:991: (dbg) Run:  out/minikube-linux-arm64 start -p functional-919910 --dry-run --alsologtostderr -v=1 --driver=docker  --container-runtime=crio
--- PASS: TestFunctional/parallel/DryRun (0.66s)

                                                
                                    
x
+
TestFunctional/parallel/InternationalLanguage (0.24s)

                                                
                                                
=== RUN   TestFunctional/parallel/InternationalLanguage
=== PAUSE TestFunctional/parallel/InternationalLanguage

                                                
                                                

                                                
                                                
=== CONT  TestFunctional/parallel/InternationalLanguage
functional_test.go:1020: (dbg) Run:  out/minikube-linux-arm64 start -p functional-919910 --dry-run --memory 250MB --alsologtostderr --driver=docker  --container-runtime=crio
functional_test.go:1020: (dbg) Non-zero exit: out/minikube-linux-arm64 start -p functional-919910 --dry-run --memory 250MB --alsologtostderr --driver=docker  --container-runtime=crio: exit status 23 (239.531174ms)

                                                
                                                
-- stdout --
	* [functional-919910] minikube v1.34.0 sur Ubuntu 20.04 (arm64)
	  - MINIKUBE_LOCATION=19651
	  - MINIKUBE_SUPPRESS_DOCKER_PERFORMANCE=true
	  - KUBECONFIG=/home/jenkins/minikube-integration/19651-1378450/kubeconfig
	  - MINIKUBE_HOME=/home/jenkins/minikube-integration/19651-1378450/.minikube
	  - MINIKUBE_BIN=out/minikube-linux-arm64
	  - MINIKUBE_FORCE_SYSTEMD=
	* Utilisation du pilote docker basé sur le profil existant
	
	

                                                
                                                
-- /stdout --
** stderr ** 
	I0916 10:51:01.447834 1410913 out.go:345] Setting OutFile to fd 1 ...
	I0916 10:51:01.448286 1410913 out.go:392] TERM=,COLORTERM=, which probably does not support color
	I0916 10:51:01.448318 1410913 out.go:358] Setting ErrFile to fd 2...
	I0916 10:51:01.448359 1410913 out.go:392] TERM=,COLORTERM=, which probably does not support color
	I0916 10:51:01.449048 1410913 root.go:338] Updating PATH: /home/jenkins/minikube-integration/19651-1378450/.minikube/bin
	I0916 10:51:01.449670 1410913 out.go:352] Setting JSON to false
	I0916 10:51:01.455166 1410913 start.go:129] hostinfo: {"hostname":"ip-172-31-21-244","uptime":38007,"bootTime":1726445855,"procs":180,"os":"linux","platform":"ubuntu","platformFamily":"debian","platformVersion":"20.04","kernelVersion":"5.15.0-1069-aws","kernelArch":"aarch64","virtualizationSystem":"","virtualizationRole":"","hostId":"da8ac1fd-6236-412a-a346-95873c98230d"}
	I0916 10:51:01.455326 1410913 start.go:139] virtualization:  
	I0916 10:51:01.462317 1410913 out.go:177] * [functional-919910] minikube v1.34.0 sur Ubuntu 20.04 (arm64)
	I0916 10:51:01.464350 1410913 out.go:177]   - MINIKUBE_LOCATION=19651
	I0916 10:51:01.464528 1410913 notify.go:220] Checking for updates...
	I0916 10:51:01.468194 1410913 out.go:177]   - MINIKUBE_SUPPRESS_DOCKER_PERFORMANCE=true
	I0916 10:51:01.470277 1410913 out.go:177]   - KUBECONFIG=/home/jenkins/minikube-integration/19651-1378450/kubeconfig
	I0916 10:51:01.472940 1410913 out.go:177]   - MINIKUBE_HOME=/home/jenkins/minikube-integration/19651-1378450/.minikube
	I0916 10:51:01.476843 1410913 out.go:177]   - MINIKUBE_BIN=out/minikube-linux-arm64
	I0916 10:51:01.482046 1410913 out.go:177]   - MINIKUBE_FORCE_SYSTEMD=
	I0916 10:51:01.485252 1410913 config.go:182] Loaded profile config "functional-919910": Driver=docker, ContainerRuntime=crio, KubernetesVersion=v1.31.1
	I0916 10:51:01.485835 1410913 driver.go:394] Setting default libvirt URI to qemu:///system
	I0916 10:51:01.528747 1410913 docker.go:123] docker version: linux-27.2.1:Docker Engine - Community
	I0916 10:51:01.528913 1410913 cli_runner.go:164] Run: docker system info --format "{{json .}}"
	I0916 10:51:01.605751 1410913 info.go:266] docker info: {ID:5FDH:SA5P:5GCT:NLAS:B73P:SGDQ:PBG5:UBVH:UZY3:RXGO:CI7S:WAIH Containers:1 ContainersRunning:1 ContainersPaused:0 ContainersStopped:0 Images:2 Driver:overlay2 DriverStatus:[[Backing Filesystem extfs] [Supports d_type true] [Using metacopy false] [Native Overlay Diff true] [userxattr false]] SystemStatus:<nil> Plugins:{Volume:[local] Network:[bridge host ipvlan macvlan null overlay] Authorization:<nil> Log:[awslogs fluentd gcplogs gelf journald json-file local splunk syslog]} MemoryLimit:true SwapLimit:true KernelMemory:false KernelMemoryTCP:true CPUCfsPeriod:true CPUCfsQuota:true CPUShares:true CPUSet:true PidsLimit:true IPv4Forwarding:true BridgeNfIptables:true BridgeNfIP6Tables:true Debug:false NFd:32 OomKillDisable:true NGoroutines:51 SystemTime:2024-09-16 10:51:01.59316819 +0000 UTC LoggingDriver:json-file CgroupDriver:cgroupfs NEventsListener:0 KernelVersion:5.15.0-1069-aws OperatingSystem:Ubuntu 20.04.6 LTS OSType:linux Architecture:aarc
h64 IndexServerAddress:https://index.docker.io/v1/ RegistryConfig:{AllowNondistributableArtifactsCIDRs:[] AllowNondistributableArtifactsHostnames:[] InsecureRegistryCIDRs:[127.0.0.0/8] IndexConfigs:{DockerIo:{Name:docker.io Mirrors:[] Secure:true Official:true}} Mirrors:[]} NCPU:2 MemTotal:8214839296 GenericResources:<nil> DockerRootDir:/var/lib/docker HTTPProxy: HTTPSProxy: NoProxy: Name:ip-172-31-21-244 Labels:[] ExperimentalBuild:false ServerVersion:27.2.1 ClusterStore: ClusterAdvertise: Runtimes:{Runc:{Path:runc}} DefaultRuntime:runc Swarm:{NodeID: NodeAddr: LocalNodeState:inactive ControlAvailable:false Error: RemoteManagers:<nil>} LiveRestoreEnabled:false Isolation: InitBinary:docker-init ContainerdCommit:{ID:7f7fdf5fed64eb6a7caf99b3e12efcf9d60e311c Expected:7f7fdf5fed64eb6a7caf99b3e12efcf9d60e311c} RuncCommit:{ID:v1.1.14-0-g2c9f560 Expected:v1.1.14-0-g2c9f560} InitCommit:{ID:de40ad0 Expected:de40ad0} SecurityOptions:[name=apparmor name=seccomp,profile=builtin] ProductLicense: Warnings:<nil> ServerError
s:[] ClientInfo:{Debug:false Plugins:[map[Name:buildx Path:/usr/libexec/docker/cli-plugins/docker-buildx SchemaVersion:0.1.0 ShortDescription:Docker Buildx Vendor:Docker Inc. Version:v0.16.2] map[Name:compose Path:/usr/libexec/docker/cli-plugins/docker-compose SchemaVersion:0.1.0 ShortDescription:Docker Compose Vendor:Docker Inc. Version:v2.29.2]] Warnings:<nil>}}
	I0916 10:51:01.605867 1410913 docker.go:318] overlay module found
	I0916 10:51:01.608975 1410913 out.go:177] * Utilisation du pilote docker basé sur le profil existant
	I0916 10:51:01.611575 1410913 start.go:297] selected driver: docker
	I0916 10:51:01.611596 1410913 start.go:901] validating driver "docker" against &{Name:functional-919910 KeepContext:false EmbedCerts:false MinikubeISO: KicBaseImage:gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 Memory:4000 CPUs:2 DiskSize:20000 Driver:docker HyperkitVpnKitSock: HyperkitVSockPorts:[] DockerEnv:[] ContainerVolumeMounts:[] InsecureRegistry:[] RegistryMirror:[] HostOnlyCIDR:192.168.59.1/24 HypervVirtualSwitch: HypervUseExternalSwitch:false HypervExternalAdapter: KVMNetwork:default KVMQemuURI:qemu:///system KVMGPU:false KVMHidden:false KVMNUMACount:1 APIServerPort:8441 DockerOpt:[] DisableDriverMounts:false NFSShare:[] NFSSharesRoot:/nfsshares UUID: NoVTXCheck:false DNSProxy:false HostDNSResolver:true HostOnlyNicType:virtio NatNicType:virtio SSHIPAddress: SSHUser:root SSHKey: SSHPort:22 KubernetesConfig:{KubernetesVersion:v1.31.1 ClusterName:functional-919910 Namespace:default APIServerHAVIP: APIServerNa
me:minikubeCA APIServerNames:[] APIServerIPs:[] DNSDomain:cluster.local ContainerRuntime:crio CRISocket: NetworkPlugin:cni FeatureGates: ServiceCIDR:10.96.0.0/12 ImageRepository: LoadBalancerStartIP: LoadBalancerEndIP: CustomIngressCert: RegistryAliases: ExtraOptions:[{Component:apiserver Key:enable-admission-plugins Value:NamespaceAutoProvision}] ShouldLoadCachedImages:true EnableDefaultCNI:false CNI:} Nodes:[{Name: IP:192.168.49.2 Port:8441 KubernetesVersion:v1.31.1 ContainerRuntime:crio ControlPlane:true Worker:true}] Addons:map[default-storageclass:true storage-provisioner:true] CustomAddonImages:map[] CustomAddonRegistries:map[] VerifyComponents:map[apiserver:true apps_running:true default_sa:true extra:true kubelet:true node_ready:true system_pods:true] StartHostTimeout:6m0s ScheduledStop:<nil> ExposedPorts:[] ListenAddress: Network: Subnet: MultiNodeRequested:false ExtraDisks:0 CertExpiration:26280h0m0s Mount:false MountString:/home/jenkins:/minikube-host Mount9PVersion:9p2000.L MountGID:docker MountIP
: MountMSize:262144 MountOptions:[] MountPort:0 MountType:9p MountUID:docker BinaryMirror: DisableOptimizations:false DisableMetrics:false CustomQemuFirmwarePath: SocketVMnetClientPath: SocketVMnetPath: StaticIP: SSHAuthSock: SSHAgentPID:0 GPUs: AutoPauseInterval:1m0s}
	I0916 10:51:01.611700 1410913 start.go:912] status for docker: {Installed:true Healthy:true Running:false NeedsImprovement:false Error:<nil> Reason: Fix: Doc: Version:}
	I0916 10:51:01.615080 1410913 out.go:201] 
	W0916 10:51:01.617729 1410913 out.go:270] X Fermeture en raison de RSRC_INSUFFICIENT_REQ_MEMORY : L'allocation de mémoire demandée 250 Mio est inférieure au minimum utilisable de 1800 Mo
	X Fermeture en raison de RSRC_INSUFFICIENT_REQ_MEMORY : L'allocation de mémoire demandée 250 Mio est inférieure au minimum utilisable de 1800 Mo
	I0916 10:51:01.622564 1410913 out.go:201] 

                                                
                                                
** /stderr **
--- PASS: TestFunctional/parallel/InternationalLanguage (0.24s)

                                                
                                    
x
+
TestFunctional/parallel/StatusCmd (1.63s)

                                                
                                                
=== RUN   TestFunctional/parallel/StatusCmd
=== PAUSE TestFunctional/parallel/StatusCmd

                                                
                                                

                                                
                                                
=== CONT  TestFunctional/parallel/StatusCmd
functional_test.go:854: (dbg) Run:  out/minikube-linux-arm64 -p functional-919910 status
functional_test.go:860: (dbg) Run:  out/minikube-linux-arm64 -p functional-919910 status -f host:{{.Host}},kublet:{{.Kubelet}},apiserver:{{.APIServer}},kubeconfig:{{.Kubeconfig}}
functional_test.go:872: (dbg) Run:  out/minikube-linux-arm64 -p functional-919910 status -o json
--- PASS: TestFunctional/parallel/StatusCmd (1.63s)

                                                
                                    
x
+
TestFunctional/parallel/AddonsCmd (0.14s)

                                                
                                                
=== RUN   TestFunctional/parallel/AddonsCmd
=== PAUSE TestFunctional/parallel/AddonsCmd

                                                
                                                

                                                
                                                
=== CONT  TestFunctional/parallel/AddonsCmd
functional_test.go:1690: (dbg) Run:  out/minikube-linux-arm64 -p functional-919910 addons list
functional_test.go:1702: (dbg) Run:  out/minikube-linux-arm64 -p functional-919910 addons list -o json
--- PASS: TestFunctional/parallel/AddonsCmd (0.14s)

                                                
                                    
x
+
TestFunctional/parallel/SSHCmd (0.73s)

                                                
                                                
=== RUN   TestFunctional/parallel/SSHCmd
=== PAUSE TestFunctional/parallel/SSHCmd

                                                
                                                

                                                
                                                
=== CONT  TestFunctional/parallel/SSHCmd
functional_test.go:1725: (dbg) Run:  out/minikube-linux-arm64 -p functional-919910 ssh "echo hello"
functional_test.go:1742: (dbg) Run:  out/minikube-linux-arm64 -p functional-919910 ssh "cat /etc/hostname"
--- PASS: TestFunctional/parallel/SSHCmd (0.73s)

                                                
                                    
x
+
TestFunctional/parallel/CpCmd (2.24s)

                                                
                                                
=== RUN   TestFunctional/parallel/CpCmd
=== PAUSE TestFunctional/parallel/CpCmd

                                                
                                                

                                                
                                                
=== CONT  TestFunctional/parallel/CpCmd
helpers_test.go:556: (dbg) Run:  out/minikube-linux-arm64 -p functional-919910 cp testdata/cp-test.txt /home/docker/cp-test.txt
helpers_test.go:534: (dbg) Run:  out/minikube-linux-arm64 -p functional-919910 ssh -n functional-919910 "sudo cat /home/docker/cp-test.txt"
helpers_test.go:556: (dbg) Run:  out/minikube-linux-arm64 -p functional-919910 cp functional-919910:/home/docker/cp-test.txt /tmp/TestFunctionalparallelCpCmd2247858640/001/cp-test.txt
helpers_test.go:534: (dbg) Run:  out/minikube-linux-arm64 -p functional-919910 ssh -n functional-919910 "sudo cat /home/docker/cp-test.txt"
helpers_test.go:556: (dbg) Run:  out/minikube-linux-arm64 -p functional-919910 cp testdata/cp-test.txt /tmp/does/not/exist/cp-test.txt
helpers_test.go:534: (dbg) Run:  out/minikube-linux-arm64 -p functional-919910 ssh -n functional-919910 "sudo cat /tmp/does/not/exist/cp-test.txt"
--- PASS: TestFunctional/parallel/CpCmd (2.24s)

                                                
                                    
x
+
TestFunctional/parallel/FileSync (0.36s)

                                                
                                                
=== RUN   TestFunctional/parallel/FileSync
=== PAUSE TestFunctional/parallel/FileSync

                                                
                                                

                                                
                                                
=== CONT  TestFunctional/parallel/FileSync
functional_test.go:1929: Checking for existence of /etc/test/nested/copy/1383833/hosts within VM
functional_test.go:1931: (dbg) Run:  out/minikube-linux-arm64 -p functional-919910 ssh "sudo cat /etc/test/nested/copy/1383833/hosts"
functional_test.go:1936: file sync test content: Test file for checking file sync process
--- PASS: TestFunctional/parallel/FileSync (0.36s)

                                                
                                    
x
+
TestFunctional/parallel/CertSync (2.4s)

                                                
                                                
=== RUN   TestFunctional/parallel/CertSync
=== PAUSE TestFunctional/parallel/CertSync

                                                
                                                

                                                
                                                
=== CONT  TestFunctional/parallel/CertSync
functional_test.go:1972: Checking for existence of /etc/ssl/certs/1383833.pem within VM
functional_test.go:1973: (dbg) Run:  out/minikube-linux-arm64 -p functional-919910 ssh "sudo cat /etc/ssl/certs/1383833.pem"
functional_test.go:1972: Checking for existence of /usr/share/ca-certificates/1383833.pem within VM
functional_test.go:1973: (dbg) Run:  out/minikube-linux-arm64 -p functional-919910 ssh "sudo cat /usr/share/ca-certificates/1383833.pem"
functional_test.go:1972: Checking for existence of /etc/ssl/certs/51391683.0 within VM
functional_test.go:1973: (dbg) Run:  out/minikube-linux-arm64 -p functional-919910 ssh "sudo cat /etc/ssl/certs/51391683.0"
functional_test.go:1999: Checking for existence of /etc/ssl/certs/13838332.pem within VM
functional_test.go:2000: (dbg) Run:  out/minikube-linux-arm64 -p functional-919910 ssh "sudo cat /etc/ssl/certs/13838332.pem"
functional_test.go:1999: Checking for existence of /usr/share/ca-certificates/13838332.pem within VM
functional_test.go:2000: (dbg) Run:  out/minikube-linux-arm64 -p functional-919910 ssh "sudo cat /usr/share/ca-certificates/13838332.pem"
functional_test.go:1999: Checking for existence of /etc/ssl/certs/3ec20f2e.0 within VM
functional_test.go:2000: (dbg) Run:  out/minikube-linux-arm64 -p functional-919910 ssh "sudo cat /etc/ssl/certs/3ec20f2e.0"
--- PASS: TestFunctional/parallel/CertSync (2.40s)

                                                
                                    
x
+
TestFunctional/parallel/NonActiveRuntimeDisabled (0.79s)

                                                
                                                
=== RUN   TestFunctional/parallel/NonActiveRuntimeDisabled
=== PAUSE TestFunctional/parallel/NonActiveRuntimeDisabled

                                                
                                                

                                                
                                                
=== CONT  TestFunctional/parallel/NonActiveRuntimeDisabled
functional_test.go:2027: (dbg) Run:  out/minikube-linux-arm64 -p functional-919910 ssh "sudo systemctl is-active docker"
functional_test.go:2027: (dbg) Non-zero exit: out/minikube-linux-arm64 -p functional-919910 ssh "sudo systemctl is-active docker": exit status 1 (373.222533ms)

                                                
                                                
-- stdout --
	inactive

                                                
                                                
-- /stdout --
** stderr ** 
	ssh: Process exited with status 3

                                                
                                                
** /stderr **
functional_test.go:2027: (dbg) Run:  out/minikube-linux-arm64 -p functional-919910 ssh "sudo systemctl is-active containerd"
functional_test.go:2027: (dbg) Non-zero exit: out/minikube-linux-arm64 -p functional-919910 ssh "sudo systemctl is-active containerd": exit status 1 (416.574143ms)

                                                
                                                
-- stdout --
	inactive

                                                
                                                
-- /stdout --
** stderr ** 
	ssh: Process exited with status 3

                                                
                                                
** /stderr **
--- PASS: TestFunctional/parallel/NonActiveRuntimeDisabled (0.79s)

                                                
                                    
x
+
TestFunctional/parallel/License (0.25s)

                                                
                                                
=== RUN   TestFunctional/parallel/License
=== PAUSE TestFunctional/parallel/License

                                                
                                                

                                                
                                                
=== CONT  TestFunctional/parallel/License
functional_test.go:2288: (dbg) Run:  out/minikube-linux-arm64 license
--- PASS: TestFunctional/parallel/License (0.25s)

                                                
                                    
x
+
TestFunctional/parallel/TunnelCmd/serial/RunSecondTunnel (0.68s)

                                                
                                                
=== RUN   TestFunctional/parallel/TunnelCmd/serial/RunSecondTunnel
functional_test_tunnel_test.go:154: (dbg) daemon: [out/minikube-linux-arm64 -p functional-919910 tunnel --alsologtostderr]
functional_test_tunnel_test.go:154: (dbg) daemon: [out/minikube-linux-arm64 -p functional-919910 tunnel --alsologtostderr]
functional_test_tunnel_test.go:194: (dbg) stopping [out/minikube-linux-arm64 -p functional-919910 tunnel --alsologtostderr] ...
helpers_test.go:508: unable to kill pid 1407975: os: process already finished
functional_test_tunnel_test.go:194: (dbg) stopping [out/minikube-linux-arm64 -p functional-919910 tunnel --alsologtostderr] ...
helpers_test.go:490: unable to find parent, assuming dead: process does not exist
--- PASS: TestFunctional/parallel/TunnelCmd/serial/RunSecondTunnel (0.68s)

                                                
                                    
x
+
TestFunctional/parallel/TunnelCmd/serial/StartTunnel (0s)

                                                
                                                
=== RUN   TestFunctional/parallel/TunnelCmd/serial/StartTunnel
functional_test_tunnel_test.go:129: (dbg) daemon: [out/minikube-linux-arm64 -p functional-919910 tunnel --alsologtostderr]
--- PASS: TestFunctional/parallel/TunnelCmd/serial/StartTunnel (0.00s)

                                                
                                    
x
+
TestFunctional/parallel/TunnelCmd/serial/DeleteTunnel (0.11s)

                                                
                                                
=== RUN   TestFunctional/parallel/TunnelCmd/serial/DeleteTunnel
functional_test_tunnel_test.go:434: (dbg) stopping [out/minikube-linux-arm64 -p functional-919910 tunnel --alsologtostderr] ...
--- PASS: TestFunctional/parallel/TunnelCmd/serial/DeleteTunnel (0.11s)

                                                
                                    
x
+
TestFunctional/parallel/ProfileCmd/profile_not_create (0.55s)

                                                
                                                
=== RUN   TestFunctional/parallel/ProfileCmd/profile_not_create
functional_test.go:1270: (dbg) Run:  out/minikube-linux-arm64 profile lis
functional_test.go:1275: (dbg) Run:  out/minikube-linux-arm64 profile list --output json
--- PASS: TestFunctional/parallel/ProfileCmd/profile_not_create (0.55s)

                                                
                                    
x
+
TestFunctional/parallel/ProfileCmd/profile_list (0.48s)

                                                
                                                
=== RUN   TestFunctional/parallel/ProfileCmd/profile_list
functional_test.go:1310: (dbg) Run:  out/minikube-linux-arm64 profile list
functional_test.go:1315: Took "414.235139ms" to run "out/minikube-linux-arm64 profile list"
functional_test.go:1324: (dbg) Run:  out/minikube-linux-arm64 profile list -l
functional_test.go:1329: Took "65.192867ms" to run "out/minikube-linux-arm64 profile list -l"
--- PASS: TestFunctional/parallel/ProfileCmd/profile_list (0.48s)

                                                
                                    
x
+
TestFunctional/parallel/ProfileCmd/profile_json_output (0.48s)

                                                
                                                
=== RUN   TestFunctional/parallel/ProfileCmd/profile_json_output
functional_test.go:1361: (dbg) Run:  out/minikube-linux-arm64 profile list -o json
functional_test.go:1366: Took "404.693959ms" to run "out/minikube-linux-arm64 profile list -o json"
functional_test.go:1374: (dbg) Run:  out/minikube-linux-arm64 profile list -o json --light
functional_test.go:1379: Took "75.099602ms" to run "out/minikube-linux-arm64 profile list -o json --light"
--- PASS: TestFunctional/parallel/ProfileCmd/profile_json_output (0.48s)

                                                
                                    
x
+
TestFunctional/parallel/MountCmd/specific-port (2.02s)

                                                
                                                
=== RUN   TestFunctional/parallel/MountCmd/specific-port
functional_test_mount_test.go:213: (dbg) daemon: [out/minikube-linux-arm64 mount -p functional-919910 /tmp/TestFunctionalparallelMountCmdspecific-port740621385/001:/mount-9p --alsologtostderr -v=1 --port 46464]
functional_test_mount_test.go:243: (dbg) Run:  out/minikube-linux-arm64 -p functional-919910 ssh "findmnt -T /mount-9p | grep 9p"
functional_test_mount_test.go:243: (dbg) Non-zero exit: out/minikube-linux-arm64 -p functional-919910 ssh "findmnt -T /mount-9p | grep 9p": exit status 1 (476.636925ms)

                                                
                                                
** stderr ** 
	ssh: Process exited with status 1

                                                
                                                
** /stderr **
functional_test_mount_test.go:243: (dbg) Run:  out/minikube-linux-arm64 -p functional-919910 ssh "findmnt -T /mount-9p | grep 9p"
functional_test_mount_test.go:257: (dbg) Run:  out/minikube-linux-arm64 -p functional-919910 ssh -- ls -la /mount-9p
functional_test_mount_test.go:261: guest mount directory contents
total 0
functional_test_mount_test.go:263: (dbg) stopping [out/minikube-linux-arm64 mount -p functional-919910 /tmp/TestFunctionalparallelMountCmdspecific-port740621385/001:/mount-9p --alsologtostderr -v=1 --port 46464] ...
functional_test_mount_test.go:264: reading mount text
functional_test_mount_test.go:278: done reading mount text
functional_test_mount_test.go:230: (dbg) Run:  out/minikube-linux-arm64 -p functional-919910 ssh "sudo umount -f /mount-9p"
functional_test_mount_test.go:230: (dbg) Non-zero exit: out/minikube-linux-arm64 -p functional-919910 ssh "sudo umount -f /mount-9p": exit status 1 (345.160034ms)

                                                
                                                
-- stdout --
	umount: /mount-9p: not mounted.

                                                
                                                
-- /stdout --
** stderr ** 
	ssh: Process exited with status 32

                                                
                                                
** /stderr **
functional_test_mount_test.go:232: "out/minikube-linux-arm64 -p functional-919910 ssh \"sudo umount -f /mount-9p\"": exit status 1
functional_test_mount_test.go:234: (dbg) stopping [out/minikube-linux-arm64 mount -p functional-919910 /tmp/TestFunctionalparallelMountCmdspecific-port740621385/001:/mount-9p --alsologtostderr -v=1 --port 46464] ...
--- PASS: TestFunctional/parallel/MountCmd/specific-port (2.02s)

                                                
                                    
x
+
TestFunctional/parallel/MountCmd/VerifyCleanup (3.09s)

                                                
                                                
=== RUN   TestFunctional/parallel/MountCmd/VerifyCleanup
functional_test_mount_test.go:298: (dbg) daemon: [out/minikube-linux-arm64 mount -p functional-919910 /tmp/TestFunctionalparallelMountCmdVerifyCleanup3682094802/001:/mount1 --alsologtostderr -v=1]
functional_test_mount_test.go:298: (dbg) daemon: [out/minikube-linux-arm64 mount -p functional-919910 /tmp/TestFunctionalparallelMountCmdVerifyCleanup3682094802/001:/mount2 --alsologtostderr -v=1]
functional_test_mount_test.go:298: (dbg) daemon: [out/minikube-linux-arm64 mount -p functional-919910 /tmp/TestFunctionalparallelMountCmdVerifyCleanup3682094802/001:/mount3 --alsologtostderr -v=1]
functional_test_mount_test.go:325: (dbg) Run:  out/minikube-linux-arm64 -p functional-919910 ssh "findmnt -T" /mount1
functional_test_mount_test.go:325: (dbg) Non-zero exit: out/minikube-linux-arm64 -p functional-919910 ssh "findmnt -T" /mount1: exit status 1 (886.762862ms)

                                                
                                                
** stderr ** 
	ssh: Process exited with status 1

                                                
                                                
** /stderr **
functional_test_mount_test.go:325: (dbg) Run:  out/minikube-linux-arm64 -p functional-919910 ssh "findmnt -T" /mount1
functional_test_mount_test.go:325: (dbg) Run:  out/minikube-linux-arm64 -p functional-919910 ssh "findmnt -T" /mount2
functional_test_mount_test.go:325: (dbg) Run:  out/minikube-linux-arm64 -p functional-919910 ssh "findmnt -T" /mount3
functional_test_mount_test.go:370: (dbg) Run:  out/minikube-linux-arm64 mount -p functional-919910 --kill=true
functional_test_mount_test.go:313: (dbg) stopping [out/minikube-linux-arm64 mount -p functional-919910 /tmp/TestFunctionalparallelMountCmdVerifyCleanup3682094802/001:/mount1 --alsologtostderr -v=1] ...
helpers_test.go:490: unable to find parent, assuming dead: process does not exist
functional_test_mount_test.go:313: (dbg) stopping [out/minikube-linux-arm64 mount -p functional-919910 /tmp/TestFunctionalparallelMountCmdVerifyCleanup3682094802/001:/mount2 --alsologtostderr -v=1] ...
helpers_test.go:490: unable to find parent, assuming dead: process does not exist
functional_test_mount_test.go:313: (dbg) stopping [out/minikube-linux-arm64 mount -p functional-919910 /tmp/TestFunctionalparallelMountCmdVerifyCleanup3682094802/001:/mount3 --alsologtostderr -v=1] ...
helpers_test.go:490: unable to find parent, assuming dead: process does not exist
--- PASS: TestFunctional/parallel/MountCmd/VerifyCleanup (3.09s)

                                                
                                    
x
+
TestFunctional/parallel/Version/short (0.08s)

                                                
                                                
=== RUN   TestFunctional/parallel/Version/short
=== PAUSE TestFunctional/parallel/Version/short

                                                
                                                

                                                
                                                
=== CONT  TestFunctional/parallel/Version/short
functional_test.go:2256: (dbg) Run:  out/minikube-linux-arm64 -p functional-919910 version --short
--- PASS: TestFunctional/parallel/Version/short (0.08s)

                                                
                                    
x
+
TestFunctional/parallel/Version/components (1.01s)

                                                
                                                
=== RUN   TestFunctional/parallel/Version/components
=== PAUSE TestFunctional/parallel/Version/components

                                                
                                                

                                                
                                                
=== CONT  TestFunctional/parallel/Version/components
functional_test.go:2270: (dbg) Run:  out/minikube-linux-arm64 -p functional-919910 version -o=json --components
functional_test.go:2270: (dbg) Done: out/minikube-linux-arm64 -p functional-919910 version -o=json --components: (1.014423244s)
--- PASS: TestFunctional/parallel/Version/components (1.01s)

                                                
                                    
x
+
TestFunctional/parallel/ImageCommands/ImageListShort (0.28s)

                                                
                                                
=== RUN   TestFunctional/parallel/ImageCommands/ImageListShort
=== PAUSE TestFunctional/parallel/ImageCommands/ImageListShort

                                                
                                                

                                                
                                                
=== CONT  TestFunctional/parallel/ImageCommands/ImageListShort
functional_test.go:261: (dbg) Run:  out/minikube-linux-arm64 -p functional-919910 image ls --format short --alsologtostderr
functional_test.go:266: (dbg) Stdout: out/minikube-linux-arm64 -p functional-919910 image ls --format short --alsologtostderr:
registry.k8s.io/pause:latest
registry.k8s.io/pause:3.3
registry.k8s.io/pause:3.10
registry.k8s.io/pause:3.1
registry.k8s.io/kube-scheduler:v1.31.1
registry.k8s.io/kube-proxy:v1.31.1
registry.k8s.io/kube-controller-manager:v1.31.1
registry.k8s.io/kube-apiserver:v1.31.1
registry.k8s.io/etcd:3.5.15-0
registry.k8s.io/coredns/coredns:v1.11.3
localhost/minikube-local-cache-test:functional-919910
localhost/kicbase/echo-server:functional-919910
gcr.io/k8s-minikube/storage-provisioner:v5
docker.io/kindest/kindnetd:v20240813-c6f155d6
functional_test.go:269: (dbg) Stderr: out/minikube-linux-arm64 -p functional-919910 image ls --format short --alsologtostderr:
I0916 10:51:18.924195 1414349 out.go:345] Setting OutFile to fd 1 ...
I0916 10:51:18.924459 1414349 out.go:392] TERM=,COLORTERM=, which probably does not support color
I0916 10:51:18.924486 1414349 out.go:358] Setting ErrFile to fd 2...
I0916 10:51:18.924516 1414349 out.go:392] TERM=,COLORTERM=, which probably does not support color
I0916 10:51:18.924956 1414349 root.go:338] Updating PATH: /home/jenkins/minikube-integration/19651-1378450/.minikube/bin
I0916 10:51:18.925767 1414349 config.go:182] Loaded profile config "functional-919910": Driver=docker, ContainerRuntime=crio, KubernetesVersion=v1.31.1
I0916 10:51:18.925963 1414349 config.go:182] Loaded profile config "functional-919910": Driver=docker, ContainerRuntime=crio, KubernetesVersion=v1.31.1
I0916 10:51:18.926542 1414349 cli_runner.go:164] Run: docker container inspect functional-919910 --format={{.State.Status}}
I0916 10:51:18.950125 1414349 ssh_runner.go:195] Run: systemctl --version
I0916 10:51:18.950191 1414349 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" functional-919910
I0916 10:51:18.978419 1414349 sshutil.go:53] new ssh client: &{IP:127.0.0.1 Port:34613 SSHKeyPath:/home/jenkins/minikube-integration/19651-1378450/.minikube/machines/functional-919910/id_rsa Username:docker}
I0916 10:51:19.074597 1414349 ssh_runner.go:195] Run: sudo crictl images --output json
--- PASS: TestFunctional/parallel/ImageCommands/ImageListShort (0.28s)

                                                
                                    
x
+
TestFunctional/parallel/ImageCommands/ImageListTable (0.26s)

                                                
                                                
=== RUN   TestFunctional/parallel/ImageCommands/ImageListTable
=== PAUSE TestFunctional/parallel/ImageCommands/ImageListTable

                                                
                                                

                                                
                                                
=== CONT  TestFunctional/parallel/ImageCommands/ImageListTable
functional_test.go:261: (dbg) Run:  out/minikube-linux-arm64 -p functional-919910 image ls --format table --alsologtostderr
functional_test.go:266: (dbg) Stdout: out/minikube-linux-arm64 -p functional-919910 image ls --format table --alsologtostderr:
|-----------------------------------------|--------------------|---------------|--------|
|                  Image                  |        Tag         |   Image ID    |  Size  |
|-----------------------------------------|--------------------|---------------|--------|
| registry.k8s.io/etcd                    | 3.5.15-0           | 27e3830e14027 | 140MB  |
| registry.k8s.io/kube-proxy              | v1.31.1            | 24a140c548c07 | 96MB   |
| localhost/minikube-local-cache-test     | functional-919910  | 8e152e739b383 | 3.33kB |
| registry.k8s.io/coredns/coredns         | v1.11.3            | 2f6c962e7b831 | 61.6MB |
| registry.k8s.io/pause                   | 3.3                | 3d18732f8686c | 487kB  |
| registry.k8s.io/pause                   | 3.1                | 8057e0500773a | 529kB  |
| registry.k8s.io/pause                   | 3.10               | afb61768ce381 | 520kB  |
| registry.k8s.io/kube-controller-manager | v1.31.1            | 279f381cb3736 | 86.9MB |
| gcr.io/k8s-minikube/storage-provisioner | v5                 | ba04bb24b9575 | 29MB   |
| localhost/kicbase/echo-server           | functional-919910  | ce2d2cda2d858 | 4.79MB |
| registry.k8s.io/kube-apiserver          | v1.31.1            | d3f53a98c0a9d | 92.6MB |
| registry.k8s.io/kube-scheduler          | v1.31.1            | 7f8aa378bb47d | 67MB   |
| registry.k8s.io/pause                   | latest             | 8cb2091f603e7 | 246kB  |
| docker.io/kindest/kindnetd              | v20240813-c6f155d6 | 6a23fa8fd2b78 | 90.3MB |
|-----------------------------------------|--------------------|---------------|--------|
functional_test.go:269: (dbg) Stderr: out/minikube-linux-arm64 -p functional-919910 image ls --format table --alsologtostderr:
I0916 10:51:19.483712 1414501 out.go:345] Setting OutFile to fd 1 ...
I0916 10:51:19.483946 1414501 out.go:392] TERM=,COLORTERM=, which probably does not support color
I0916 10:51:19.483975 1414501 out.go:358] Setting ErrFile to fd 2...
I0916 10:51:19.483993 1414501 out.go:392] TERM=,COLORTERM=, which probably does not support color
I0916 10:51:19.484379 1414501 root.go:338] Updating PATH: /home/jenkins/minikube-integration/19651-1378450/.minikube/bin
I0916 10:51:19.485446 1414501 config.go:182] Loaded profile config "functional-919910": Driver=docker, ContainerRuntime=crio, KubernetesVersion=v1.31.1
I0916 10:51:19.485649 1414501 config.go:182] Loaded profile config "functional-919910": Driver=docker, ContainerRuntime=crio, KubernetesVersion=v1.31.1
I0916 10:51:19.486483 1414501 cli_runner.go:164] Run: docker container inspect functional-919910 --format={{.State.Status}}
I0916 10:51:19.511825 1414501 ssh_runner.go:195] Run: systemctl --version
I0916 10:51:19.511881 1414501 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" functional-919910
I0916 10:51:19.532383 1414501 sshutil.go:53] new ssh client: &{IP:127.0.0.1 Port:34613 SSHKeyPath:/home/jenkins/minikube-integration/19651-1378450/.minikube/machines/functional-919910/id_rsa Username:docker}
I0916 10:51:19.633692 1414501 ssh_runner.go:195] Run: sudo crictl images --output json
--- PASS: TestFunctional/parallel/ImageCommands/ImageListTable (0.26s)

                                                
                                    
x
+
TestFunctional/parallel/ImageCommands/ImageListJson (0.27s)

                                                
                                                
=== RUN   TestFunctional/parallel/ImageCommands/ImageListJson
=== PAUSE TestFunctional/parallel/ImageCommands/ImageListJson

                                                
                                                

                                                
                                                
=== CONT  TestFunctional/parallel/ImageCommands/ImageListJson
functional_test.go:261: (dbg) Run:  out/minikube-linux-arm64 -p functional-919910 image ls --format json --alsologtostderr
functional_test.go:266: (dbg) Stdout: out/minikube-linux-arm64 -p functional-919910 image ls --format json --alsologtostderr:
[{"id":"a422e0e982356f6c1cf0e5bb7b733363caae3992a07c99951fbcc73e58ed656a","repoDigests":["docker.io/kubernetesui/metrics-scraper@sha256:76049887f07a0476dc93efc2d3569b9529bf982b22d29f356092ce206e98765c","docker.io/kubernetesui/metrics-scraper@sha256:853c43f3cced687cb211708aa0024304a5adb33ec45ebf5915d318358822e09a"],"repoTags":[],"size":"42263767"},{"id":"8e152e739b383b97b87b708477cca4aa0f0eb2ce1c48e472d5734f1fc65df473","repoDigests":["localhost/minikube-local-cache-test@sha256:401677c828f3aef369ef74e76887170cbd236f264644c7e0746aa6e967a95c6e"],"repoTags":["localhost/minikube-local-cache-test:functional-919910"],"size":"3330"},{"id":"24a140c548c075e487e45d0ee73b1aa89f8bfb40c08a57e05975559728822b1d","repoDigests":["registry.k8s.io/kube-proxy@sha256:4ee50b00484d7f39a90fc4cda92251177ef5ad8fdf2f2a0c768f9e634b4c6d44","registry.k8s.io/kube-proxy@sha256:7b3bf9f1e260ccb1fd543570e1e9869a373f716fb050cd23a6a2771aa4e06ae9"],"repoTags":["registry.k8s.io/kube-proxy:v1.31.1"],"size":"95951255"},{"id":"ce2d2cda2d858fdae
a84129deb86d18e5dbf1c548f230b79fdca74cc91729d17","repoDigests":["localhost/kicbase/echo-server@sha256:49260110d6ce1914d3de292ed370ee11a2e34ab577b97e6011d795cb13534d4a"],"repoTags":["localhost/kicbase/echo-server:functional-919910"],"size":"4788229"},{"id":"2f6c962e7b8311337352d9fdea917da2184d9919f4da7695bc2a6517cf392fe4","repoDigests":["registry.k8s.io/coredns/coredns@sha256:31440a2bef59e2f1ffb600113b557103740ff851e27b0aef5b849f6e3ab994a6","registry.k8s.io/coredns/coredns@sha256:9caabbf6238b189a65d0d6e6ac138de60d6a1c419e5a341fbbb7c78382559c6e"],"repoTags":["registry.k8s.io/coredns/coredns:v1.11.3"],"size":"61647114"},{"id":"279f381cb37365bbbcd133c9531fba9c2beb0f38dbbe6ddfcd0b1b1643d3450e","repoDigests":["registry.k8s.io/kube-controller-manager@sha256:9f9da5b27e03f89599cc40ba89150aebf3b4cff001e6db6d998674b34181e1a1","registry.k8s.io/kube-controller-manager@sha256:a9a0505b7d0caca0edd18e37bacc9425b2c8824546b26f5b286e8cb144669849"],"repoTags":["registry.k8s.io/kube-controller-manager:v1.31.1"],"size":"86930758"},
{"id":"afb61768ce381961ca0beff95337601f29dc70ff3ed14e5e4b3e5699057e6aa8","repoDigests":["registry.k8s.io/pause@sha256:e50b7059b633caf3c1449b8da680d11845cda4506b513ee7a2de00725f0a34a7","registry.k8s.io/pause@sha256:ee6521f290b2168b6e0935a181d4cff9be1ac3f505666ef0e3c98fae8199917a"],"repoTags":["registry.k8s.io/pause:3.10"],"size":"519877"},{"id":"3d18732f8686cc3c878055d99a05fa80289502fa496b36b6a0fe0f77206a7300","repoDigests":["registry.k8s.io/pause@sha256:e59730b14890252c14f85976e22ab1c47ec28b111ffed407f34bca1b44447476"],"repoTags":["registry.k8s.io/pause:3.3"],"size":"487479"},{"id":"8cb2091f603e75187e2f6226c5901d12e00b1d1f778c6471ae4578e8a1c4724a","repoDigests":["registry.k8s.io/pause@sha256:f5e31d44aa14d5669e030380b656463a7e45934c03994e72e3dbf83d4a645cca"],"repoTags":["registry.k8s.io/pause:latest"],"size":"246070"},{"id":"6a23fa8fd2b78ab58e42ba273808edc936a9c53d8ac4a919f6337be094843a51","repoDigests":["docker.io/kindest/kindnetd@sha256:4d39335073da9a0b82be8e01028f0aa75aff16caff2e2d8889d0effd579a6f64","docke
r.io/kindest/kindnetd@sha256:e59a687ca28ae274a2fc92f1e2f5f1c739f353178a43a23aafc71adb802ed166"],"repoTags":["docker.io/kindest/kindnetd:v20240813-c6f155d6"],"size":"90295858"},{"id":"20b332c9a70d8516d849d1ac23eff5800cbb2f263d379f0ec11ee908db6b25a8","repoDigests":["docker.io/kubernetesui/dashboard@sha256:2e500d29e9d5f4a086b908eb8dfe7ecac57d2ab09d65b24f588b1d449841ef93","docker.io/kubernetesui/dashboard@sha256:5c52c60663b473628bd98e4ffee7a747ef1f88d8c7bcee957b089fb3f61bdedf"],"repoTags":[],"size":"247562353"},{"id":"ba04bb24b95753201135cbc420b233c1b0b9fa2e1fd21d28319c348c33fbcde6","repoDigests":["gcr.io/k8s-minikube/storage-provisioner@sha256:0ba370588274b88531ab311a5d2e645d240a853555c1e58fd1dd428fc333c9d2","gcr.io/k8s-minikube/storage-provisioner@sha256:18eb69d1418e854ad5a19e399310e52808a8321e4c441c1dddad8977a0d7a944"],"repoTags":["gcr.io/k8s-minikube/storage-provisioner:v5"],"size":"29037500"},{"id":"d3f53a98c0a9d9163c4848bcf34b2d2f5e1e3691b79f3d1dd6d0206809e02853","repoDigests":["registry.k8s.io/kube-apiserv
er@sha256:2409c23dbb5a2b7a81adbb184d3eac43ac653e9b97a7c0ee121b89bb3ef61fdb","registry.k8s.io/kube-apiserver@sha256:e3a40e6c6e99ba4a4d72432b3eda702099a2926e49d4afeb6138f2d95e6371ef"],"repoTags":["registry.k8s.io/kube-apiserver:v1.31.1"],"size":"92632544"},{"id":"7f8aa378bb47dffcf430f3a601abe39137e88aee0238e23ed8530fdd18dab82d","repoDigests":["registry.k8s.io/kube-scheduler@sha256:65212209347a96b08a97e679b98dca46885f09cf3a53e8d13b28d2c083a5b690","registry.k8s.io/kube-scheduler@sha256:969a7e96340f3a927b3d652582edec2d6d82a083871d81ef5064b7edaab430d0"],"repoTags":["registry.k8s.io/kube-scheduler:v1.31.1"],"size":"67007814"},{"id":"27e3830e1402783674d8b594038967deea9d51f0d91b34c93c8f39d2f68af7da","repoDigests":["registry.k8s.io/etcd@sha256:a6dc63e6e8cfa0307d7851762fa6b629afb18f28d8aa3fab5a6e91b4af60026a","registry.k8s.io/etcd@sha256:e3ee3ca2dbaf511385000dbd54123629c71b6cfaabd469e658d76a116b7f43da"],"repoTags":["registry.k8s.io/etcd:3.5.15-0"],"size":"139912446"},{"id":"8057e0500773a37cde2cff041eb13ebd68c748419a2fbf
d1dfb5bf38696cc8e5","repoDigests":["registry.k8s.io/pause@sha256:b0602c9f938379133ff8017007894b48c1112681c9468f82a1e4cbf8a4498b67"],"repoTags":["registry.k8s.io/pause:3.1"],"size":"528622"}]
functional_test.go:269: (dbg) Stderr: out/minikube-linux-arm64 -p functional-919910 image ls --format json --alsologtostderr:
I0916 10:51:19.225360 1414416 out.go:345] Setting OutFile to fd 1 ...
I0916 10:51:19.225673 1414416 out.go:392] TERM=,COLORTERM=, which probably does not support color
I0916 10:51:19.225701 1414416 out.go:358] Setting ErrFile to fd 2...
I0916 10:51:19.225724 1414416 out.go:392] TERM=,COLORTERM=, which probably does not support color
I0916 10:51:19.225995 1414416 root.go:338] Updating PATH: /home/jenkins/minikube-integration/19651-1378450/.minikube/bin
I0916 10:51:19.226931 1414416 config.go:182] Loaded profile config "functional-919910": Driver=docker, ContainerRuntime=crio, KubernetesVersion=v1.31.1
I0916 10:51:19.227172 1414416 config.go:182] Loaded profile config "functional-919910": Driver=docker, ContainerRuntime=crio, KubernetesVersion=v1.31.1
I0916 10:51:19.227787 1414416 cli_runner.go:164] Run: docker container inspect functional-919910 --format={{.State.Status}}
I0916 10:51:19.249573 1414416 ssh_runner.go:195] Run: systemctl --version
I0916 10:51:19.249636 1414416 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" functional-919910
I0916 10:51:19.270360 1414416 sshutil.go:53] new ssh client: &{IP:127.0.0.1 Port:34613 SSHKeyPath:/home/jenkins/minikube-integration/19651-1378450/.minikube/machines/functional-919910/id_rsa Username:docker}
I0916 10:51:19.372670 1414416 ssh_runner.go:195] Run: sudo crictl images --output json
--- PASS: TestFunctional/parallel/ImageCommands/ImageListJson (0.27s)

                                                
                                    
x
+
TestFunctional/parallel/ImageCommands/ImageListYaml (0.3s)

                                                
                                                
=== RUN   TestFunctional/parallel/ImageCommands/ImageListYaml
=== PAUSE TestFunctional/parallel/ImageCommands/ImageListYaml

                                                
                                                

                                                
                                                
=== CONT  TestFunctional/parallel/ImageCommands/ImageListYaml
functional_test.go:261: (dbg) Run:  out/minikube-linux-arm64 -p functional-919910 image ls --format yaml --alsologtostderr
functional_test.go:266: (dbg) Stdout: out/minikube-linux-arm64 -p functional-919910 image ls --format yaml --alsologtostderr:
- id: 20b332c9a70d8516d849d1ac23eff5800cbb2f263d379f0ec11ee908db6b25a8
repoDigests:
- docker.io/kubernetesui/dashboard@sha256:2e500d29e9d5f4a086b908eb8dfe7ecac57d2ab09d65b24f588b1d449841ef93
- docker.io/kubernetesui/dashboard@sha256:5c52c60663b473628bd98e4ffee7a747ef1f88d8c7bcee957b089fb3f61bdedf
repoTags: []
size: "247562353"
- id: ce2d2cda2d858fdaea84129deb86d18e5dbf1c548f230b79fdca74cc91729d17
repoDigests:
- localhost/kicbase/echo-server@sha256:49260110d6ce1914d3de292ed370ee11a2e34ab577b97e6011d795cb13534d4a
repoTags:
- localhost/kicbase/echo-server:functional-919910
size: "4788229"
- id: 8e152e739b383b97b87b708477cca4aa0f0eb2ce1c48e472d5734f1fc65df473
repoDigests:
- localhost/minikube-local-cache-test@sha256:401677c828f3aef369ef74e76887170cbd236f264644c7e0746aa6e967a95c6e
repoTags:
- localhost/minikube-local-cache-test:functional-919910
size: "3330"
- id: a422e0e982356f6c1cf0e5bb7b733363caae3992a07c99951fbcc73e58ed656a
repoDigests:
- docker.io/kubernetesui/metrics-scraper@sha256:76049887f07a0476dc93efc2d3569b9529bf982b22d29f356092ce206e98765c
- docker.io/kubernetesui/metrics-scraper@sha256:853c43f3cced687cb211708aa0024304a5adb33ec45ebf5915d318358822e09a
repoTags: []
size: "42263767"
- id: 27e3830e1402783674d8b594038967deea9d51f0d91b34c93c8f39d2f68af7da
repoDigests:
- registry.k8s.io/etcd@sha256:a6dc63e6e8cfa0307d7851762fa6b629afb18f28d8aa3fab5a6e91b4af60026a
- registry.k8s.io/etcd@sha256:e3ee3ca2dbaf511385000dbd54123629c71b6cfaabd469e658d76a116b7f43da
repoTags:
- registry.k8s.io/etcd:3.5.15-0
size: "139912446"
- id: 279f381cb37365bbbcd133c9531fba9c2beb0f38dbbe6ddfcd0b1b1643d3450e
repoDigests:
- registry.k8s.io/kube-controller-manager@sha256:9f9da5b27e03f89599cc40ba89150aebf3b4cff001e6db6d998674b34181e1a1
- registry.k8s.io/kube-controller-manager@sha256:a9a0505b7d0caca0edd18e37bacc9425b2c8824546b26f5b286e8cb144669849
repoTags:
- registry.k8s.io/kube-controller-manager:v1.31.1
size: "86930758"
- id: 8057e0500773a37cde2cff041eb13ebd68c748419a2fbfd1dfb5bf38696cc8e5
repoDigests:
- registry.k8s.io/pause@sha256:b0602c9f938379133ff8017007894b48c1112681c9468f82a1e4cbf8a4498b67
repoTags:
- registry.k8s.io/pause:3.1
size: "528622"
- id: afb61768ce381961ca0beff95337601f29dc70ff3ed14e5e4b3e5699057e6aa8
repoDigests:
- registry.k8s.io/pause@sha256:e50b7059b633caf3c1449b8da680d11845cda4506b513ee7a2de00725f0a34a7
- registry.k8s.io/pause@sha256:ee6521f290b2168b6e0935a181d4cff9be1ac3f505666ef0e3c98fae8199917a
repoTags:
- registry.k8s.io/pause:3.10
size: "519877"
- id: 3d18732f8686cc3c878055d99a05fa80289502fa496b36b6a0fe0f77206a7300
repoDigests:
- registry.k8s.io/pause@sha256:e59730b14890252c14f85976e22ab1c47ec28b111ffed407f34bca1b44447476
repoTags:
- registry.k8s.io/pause:3.3
size: "487479"
- id: 8cb2091f603e75187e2f6226c5901d12e00b1d1f778c6471ae4578e8a1c4724a
repoDigests:
- registry.k8s.io/pause@sha256:f5e31d44aa14d5669e030380b656463a7e45934c03994e72e3dbf83d4a645cca
repoTags:
- registry.k8s.io/pause:latest
size: "246070"
- id: ba04bb24b95753201135cbc420b233c1b0b9fa2e1fd21d28319c348c33fbcde6
repoDigests:
- gcr.io/k8s-minikube/storage-provisioner@sha256:0ba370588274b88531ab311a5d2e645d240a853555c1e58fd1dd428fc333c9d2
- gcr.io/k8s-minikube/storage-provisioner@sha256:18eb69d1418e854ad5a19e399310e52808a8321e4c441c1dddad8977a0d7a944
repoTags:
- gcr.io/k8s-minikube/storage-provisioner:v5
size: "29037500"
- id: 2f6c962e7b8311337352d9fdea917da2184d9919f4da7695bc2a6517cf392fe4
repoDigests:
- registry.k8s.io/coredns/coredns@sha256:31440a2bef59e2f1ffb600113b557103740ff851e27b0aef5b849f6e3ab994a6
- registry.k8s.io/coredns/coredns@sha256:9caabbf6238b189a65d0d6e6ac138de60d6a1c419e5a341fbbb7c78382559c6e
repoTags:
- registry.k8s.io/coredns/coredns:v1.11.3
size: "61647114"
- id: d3f53a98c0a9d9163c4848bcf34b2d2f5e1e3691b79f3d1dd6d0206809e02853
repoDigests:
- registry.k8s.io/kube-apiserver@sha256:2409c23dbb5a2b7a81adbb184d3eac43ac653e9b97a7c0ee121b89bb3ef61fdb
- registry.k8s.io/kube-apiserver@sha256:e3a40e6c6e99ba4a4d72432b3eda702099a2926e49d4afeb6138f2d95e6371ef
repoTags:
- registry.k8s.io/kube-apiserver:v1.31.1
size: "92632544"
- id: 24a140c548c075e487e45d0ee73b1aa89f8bfb40c08a57e05975559728822b1d
repoDigests:
- registry.k8s.io/kube-proxy@sha256:4ee50b00484d7f39a90fc4cda92251177ef5ad8fdf2f2a0c768f9e634b4c6d44
- registry.k8s.io/kube-proxy@sha256:7b3bf9f1e260ccb1fd543570e1e9869a373f716fb050cd23a6a2771aa4e06ae9
repoTags:
- registry.k8s.io/kube-proxy:v1.31.1
size: "95951255"
- id: 7f8aa378bb47dffcf430f3a601abe39137e88aee0238e23ed8530fdd18dab82d
repoDigests:
- registry.k8s.io/kube-scheduler@sha256:65212209347a96b08a97e679b98dca46885f09cf3a53e8d13b28d2c083a5b690
- registry.k8s.io/kube-scheduler@sha256:969a7e96340f3a927b3d652582edec2d6d82a083871d81ef5064b7edaab430d0
repoTags:
- registry.k8s.io/kube-scheduler:v1.31.1
size: "67007814"
- id: 6a23fa8fd2b78ab58e42ba273808edc936a9c53d8ac4a919f6337be094843a51
repoDigests:
- docker.io/kindest/kindnetd@sha256:4d39335073da9a0b82be8e01028f0aa75aff16caff2e2d8889d0effd579a6f64
- docker.io/kindest/kindnetd@sha256:e59a687ca28ae274a2fc92f1e2f5f1c739f353178a43a23aafc71adb802ed166
repoTags:
- docker.io/kindest/kindnetd:v20240813-c6f155d6
size: "90295858"

                                                
                                                
functional_test.go:269: (dbg) Stderr: out/minikube-linux-arm64 -p functional-919910 image ls --format yaml --alsologtostderr:
I0916 10:51:18.920196 1414350 out.go:345] Setting OutFile to fd 1 ...
I0916 10:51:18.920352 1414350 out.go:392] TERM=,COLORTERM=, which probably does not support color
I0916 10:51:18.920364 1414350 out.go:358] Setting ErrFile to fd 2...
I0916 10:51:18.920370 1414350 out.go:392] TERM=,COLORTERM=, which probably does not support color
I0916 10:51:18.920645 1414350 root.go:338] Updating PATH: /home/jenkins/minikube-integration/19651-1378450/.minikube/bin
I0916 10:51:18.921483 1414350 config.go:182] Loaded profile config "functional-919910": Driver=docker, ContainerRuntime=crio, KubernetesVersion=v1.31.1
I0916 10:51:18.921640 1414350 config.go:182] Loaded profile config "functional-919910": Driver=docker, ContainerRuntime=crio, KubernetesVersion=v1.31.1
I0916 10:51:18.922181 1414350 cli_runner.go:164] Run: docker container inspect functional-919910 --format={{.State.Status}}
I0916 10:51:18.944047 1414350 ssh_runner.go:195] Run: systemctl --version
I0916 10:51:18.944111 1414350 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" functional-919910
I0916 10:51:18.967012 1414350 sshutil.go:53] new ssh client: &{IP:127.0.0.1 Port:34613 SSHKeyPath:/home/jenkins/minikube-integration/19651-1378450/.minikube/machines/functional-919910/id_rsa Username:docker}
I0916 10:51:19.069718 1414350 ssh_runner.go:195] Run: sudo crictl images --output json
--- PASS: TestFunctional/parallel/ImageCommands/ImageListYaml (0.30s)

                                                
                                    
x
+
TestFunctional/parallel/ImageCommands/ImageBuild (3.74s)

                                                
                                                
=== RUN   TestFunctional/parallel/ImageCommands/ImageBuild
=== PAUSE TestFunctional/parallel/ImageCommands/ImageBuild

                                                
                                                

                                                
                                                
=== CONT  TestFunctional/parallel/ImageCommands/ImageBuild
functional_test.go:308: (dbg) Run:  out/minikube-linux-arm64 -p functional-919910 ssh pgrep buildkitd
functional_test.go:308: (dbg) Non-zero exit: out/minikube-linux-arm64 -p functional-919910 ssh pgrep buildkitd: exit status 1 (333.784385ms)

                                                
                                                
** stderr ** 
	ssh: Process exited with status 1

                                                
                                                
** /stderr **
functional_test.go:315: (dbg) Run:  out/minikube-linux-arm64 -p functional-919910 image build -t localhost/my-image:functional-919910 testdata/build --alsologtostderr
functional_test.go:315: (dbg) Done: out/minikube-linux-arm64 -p functional-919910 image build -t localhost/my-image:functional-919910 testdata/build --alsologtostderr: (3.180956592s)
functional_test.go:320: (dbg) Stdout: out/minikube-linux-arm64 -p functional-919910 image build -t localhost/my-image:functional-919910 testdata/build --alsologtostderr:
STEP 1/3: FROM gcr.io/k8s-minikube/busybox
STEP 2/3: RUN true
--> bbb2e9bc78e
STEP 3/3: ADD content.txt /
COMMIT localhost/my-image:functional-919910
--> 83c9e8af553
Successfully tagged localhost/my-image:functional-919910
83c9e8af5531982233e18a9e25ed42f871cc3445de716d6050ae84dce0f7dcdf
functional_test.go:323: (dbg) Stderr: out/minikube-linux-arm64 -p functional-919910 image build -t localhost/my-image:functional-919910 testdata/build --alsologtostderr:
I0916 10:51:19.545798 1414507 out.go:345] Setting OutFile to fd 1 ...
I0916 10:51:19.547305 1414507 out.go:392] TERM=,COLORTERM=, which probably does not support color
I0916 10:51:19.547320 1414507 out.go:358] Setting ErrFile to fd 2...
I0916 10:51:19.547326 1414507 out.go:392] TERM=,COLORTERM=, which probably does not support color
I0916 10:51:19.547592 1414507 root.go:338] Updating PATH: /home/jenkins/minikube-integration/19651-1378450/.minikube/bin
I0916 10:51:19.548245 1414507 config.go:182] Loaded profile config "functional-919910": Driver=docker, ContainerRuntime=crio, KubernetesVersion=v1.31.1
I0916 10:51:19.548893 1414507 config.go:182] Loaded profile config "functional-919910": Driver=docker, ContainerRuntime=crio, KubernetesVersion=v1.31.1
I0916 10:51:19.549396 1414507 cli_runner.go:164] Run: docker container inspect functional-919910 --format={{.State.Status}}
I0916 10:51:19.575249 1414507 ssh_runner.go:195] Run: systemctl --version
I0916 10:51:19.575308 1414507 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" functional-919910
I0916 10:51:19.595570 1414507 sshutil.go:53] new ssh client: &{IP:127.0.0.1 Port:34613 SSHKeyPath:/home/jenkins/minikube-integration/19651-1378450/.minikube/machines/functional-919910/id_rsa Username:docker}
I0916 10:51:19.697080 1414507 build_images.go:161] Building image from path: /tmp/build.3091278659.tar
I0916 10:51:19.697196 1414507 ssh_runner.go:195] Run: sudo mkdir -p /var/lib/minikube/build
I0916 10:51:19.706798 1414507 ssh_runner.go:195] Run: stat -c "%s %y" /var/lib/minikube/build/build.3091278659.tar
I0916 10:51:19.710403 1414507 ssh_runner.go:352] existence check for /var/lib/minikube/build/build.3091278659.tar: stat -c "%s %y" /var/lib/minikube/build/build.3091278659.tar: Process exited with status 1
stdout:

                                                
                                                
stderr:
stat: cannot statx '/var/lib/minikube/build/build.3091278659.tar': No such file or directory
I0916 10:51:19.710444 1414507 ssh_runner.go:362] scp /tmp/build.3091278659.tar --> /var/lib/minikube/build/build.3091278659.tar (3072 bytes)
I0916 10:51:19.736256 1414507 ssh_runner.go:195] Run: sudo mkdir -p /var/lib/minikube/build/build.3091278659
I0916 10:51:19.747100 1414507 ssh_runner.go:195] Run: sudo tar -C /var/lib/minikube/build/build.3091278659 -xf /var/lib/minikube/build/build.3091278659.tar
I0916 10:51:19.757113 1414507 crio.go:315] Building image: /var/lib/minikube/build/build.3091278659
I0916 10:51:19.757193 1414507 ssh_runner.go:195] Run: sudo podman build -t localhost/my-image:functional-919910 /var/lib/minikube/build/build.3091278659 --cgroup-manager=cgroupfs
Trying to pull gcr.io/k8s-minikube/busybox:latest...
Getting image source signatures
Copying blob sha256:a01966dde7f8d5ba10b6d87e776c7c8fb5a5f6bfa678874bd28b33b1fc6dba34
Copying blob sha256:a01966dde7f8d5ba10b6d87e776c7c8fb5a5f6bfa678874bd28b33b1fc6dba34
Copying config sha256:71a676dd070f4b701c3272e566d84951362f1326ea07d5bbad119d1c4f6b3d02
Writing manifest to image destination
Storing signatures
I0916 10:51:22.618135 1414507 ssh_runner.go:235] Completed: sudo podman build -t localhost/my-image:functional-919910 /var/lib/minikube/build/build.3091278659 --cgroup-manager=cgroupfs: (2.860919115s)
I0916 10:51:22.618218 1414507 ssh_runner.go:195] Run: sudo rm -rf /var/lib/minikube/build/build.3091278659
I0916 10:51:22.627246 1414507 ssh_runner.go:195] Run: sudo rm -f /var/lib/minikube/build/build.3091278659.tar
I0916 10:51:22.636065 1414507 build_images.go:217] Built localhost/my-image:functional-919910 from /tmp/build.3091278659.tar
I0916 10:51:22.636101 1414507 build_images.go:133] succeeded building to: functional-919910
I0916 10:51:22.636107 1414507 build_images.go:134] failed building to: 
functional_test.go:451: (dbg) Run:  out/minikube-linux-arm64 -p functional-919910 image ls
--- PASS: TestFunctional/parallel/ImageCommands/ImageBuild (3.74s)

                                                
                                    
x
+
TestFunctional/parallel/ImageCommands/Setup (0.69s)

                                                
                                                
=== RUN   TestFunctional/parallel/ImageCommands/Setup
functional_test.go:342: (dbg) Run:  docker pull kicbase/echo-server:1.0
functional_test.go:347: (dbg) Run:  docker tag kicbase/echo-server:1.0 kicbase/echo-server:functional-919910
--- PASS: TestFunctional/parallel/ImageCommands/Setup (0.69s)

                                                
                                    
x
+
TestFunctional/parallel/ImageCommands/ImageLoadDaemon (4.7s)

                                                
                                                
=== RUN   TestFunctional/parallel/ImageCommands/ImageLoadDaemon
functional_test.go:355: (dbg) Run:  out/minikube-linux-arm64 -p functional-919910 image load --daemon kicbase/echo-server:functional-919910 --alsologtostderr
functional_test.go:355: (dbg) Done: out/minikube-linux-arm64 -p functional-919910 image load --daemon kicbase/echo-server:functional-919910 --alsologtostderr: (4.311149335s)
functional_test.go:451: (dbg) Run:  out/minikube-linux-arm64 -p functional-919910 image ls
--- PASS: TestFunctional/parallel/ImageCommands/ImageLoadDaemon (4.70s)

                                                
                                    
x
+
TestFunctional/parallel/ImageCommands/ImageReloadDaemon (1.48s)

                                                
                                                
=== RUN   TestFunctional/parallel/ImageCommands/ImageReloadDaemon
functional_test.go:365: (dbg) Run:  out/minikube-linux-arm64 -p functional-919910 image load --daemon kicbase/echo-server:functional-919910 --alsologtostderr
functional_test.go:365: (dbg) Done: out/minikube-linux-arm64 -p functional-919910 image load --daemon kicbase/echo-server:functional-919910 --alsologtostderr: (1.107666005s)
functional_test.go:451: (dbg) Run:  out/minikube-linux-arm64 -p functional-919910 image ls
--- PASS: TestFunctional/parallel/ImageCommands/ImageReloadDaemon (1.48s)

                                                
                                    
x
+
TestFunctional/parallel/ImageCommands/ImageTagAndLoadDaemon (1.43s)

                                                
                                                
=== RUN   TestFunctional/parallel/ImageCommands/ImageTagAndLoadDaemon
functional_test.go:235: (dbg) Run:  docker pull kicbase/echo-server:latest
functional_test.go:240: (dbg) Run:  docker tag kicbase/echo-server:latest kicbase/echo-server:functional-919910
functional_test.go:245: (dbg) Run:  out/minikube-linux-arm64 -p functional-919910 image load --daemon kicbase/echo-server:functional-919910 --alsologtostderr
functional_test.go:451: (dbg) Run:  out/minikube-linux-arm64 -p functional-919910 image ls
--- PASS: TestFunctional/parallel/ImageCommands/ImageTagAndLoadDaemon (1.43s)

                                                
                                    
x
+
TestFunctional/parallel/UpdateContextCmd/no_changes (0.21s)

                                                
                                                
=== RUN   TestFunctional/parallel/UpdateContextCmd/no_changes
=== PAUSE TestFunctional/parallel/UpdateContextCmd/no_changes

                                                
                                                

                                                
                                                
=== CONT  TestFunctional/parallel/UpdateContextCmd/no_changes
functional_test.go:2119: (dbg) Run:  out/minikube-linux-arm64 -p functional-919910 update-context --alsologtostderr -v=2
--- PASS: TestFunctional/parallel/UpdateContextCmd/no_changes (0.21s)

                                                
                                    
x
+
TestFunctional/parallel/UpdateContextCmd/no_minikube_cluster (0.18s)

                                                
                                                
=== RUN   TestFunctional/parallel/UpdateContextCmd/no_minikube_cluster
=== PAUSE TestFunctional/parallel/UpdateContextCmd/no_minikube_cluster

                                                
                                                

                                                
                                                
=== CONT  TestFunctional/parallel/UpdateContextCmd/no_minikube_cluster
functional_test.go:2119: (dbg) Run:  out/minikube-linux-arm64 -p functional-919910 update-context --alsologtostderr -v=2
--- PASS: TestFunctional/parallel/UpdateContextCmd/no_minikube_cluster (0.18s)

                                                
                                    
x
+
TestFunctional/parallel/UpdateContextCmd/no_clusters (0.23s)

                                                
                                                
=== RUN   TestFunctional/parallel/UpdateContextCmd/no_clusters
=== PAUSE TestFunctional/parallel/UpdateContextCmd/no_clusters

                                                
                                                

                                                
                                                
=== CONT  TestFunctional/parallel/UpdateContextCmd/no_clusters
functional_test.go:2119: (dbg) Run:  out/minikube-linux-arm64 -p functional-919910 update-context --alsologtostderr -v=2
--- PASS: TestFunctional/parallel/UpdateContextCmd/no_clusters (0.23s)

                                                
                                    
x
+
TestFunctional/parallel/ImageCommands/ImageSaveToFile (0.58s)

                                                
                                                
=== RUN   TestFunctional/parallel/ImageCommands/ImageSaveToFile
functional_test.go:380: (dbg) Run:  out/minikube-linux-arm64 -p functional-919910 image save kicbase/echo-server:functional-919910 /home/jenkins/workspace/Docker_Linux_crio_arm64/echo-server-save.tar --alsologtostderr
--- PASS: TestFunctional/parallel/ImageCommands/ImageSaveToFile (0.58s)

                                                
                                    
x
+
TestFunctional/parallel/ImageCommands/ImageRemove (0.67s)

                                                
                                                
=== RUN   TestFunctional/parallel/ImageCommands/ImageRemove
functional_test.go:392: (dbg) Run:  out/minikube-linux-arm64 -p functional-919910 image rm kicbase/echo-server:functional-919910 --alsologtostderr
functional_test.go:451: (dbg) Run:  out/minikube-linux-arm64 -p functional-919910 image ls
--- PASS: TestFunctional/parallel/ImageCommands/ImageRemove (0.67s)

                                                
                                    
x
+
TestFunctional/parallel/ImageCommands/ImageLoadFromFile (0.99s)

                                                
                                                
=== RUN   TestFunctional/parallel/ImageCommands/ImageLoadFromFile
functional_test.go:409: (dbg) Run:  out/minikube-linux-arm64 -p functional-919910 image load /home/jenkins/workspace/Docker_Linux_crio_arm64/echo-server-save.tar --alsologtostderr
functional_test.go:451: (dbg) Run:  out/minikube-linux-arm64 -p functional-919910 image ls
--- PASS: TestFunctional/parallel/ImageCommands/ImageLoadFromFile (0.99s)

                                                
                                    
x
+
TestFunctional/parallel/ImageCommands/ImageSaveDaemon (0.58s)

                                                
                                                
=== RUN   TestFunctional/parallel/ImageCommands/ImageSaveDaemon
functional_test.go:419: (dbg) Run:  docker rmi kicbase/echo-server:functional-919910
functional_test.go:424: (dbg) Run:  out/minikube-linux-arm64 -p functional-919910 image save --daemon kicbase/echo-server:functional-919910 --alsologtostderr
functional_test.go:432: (dbg) Run:  docker image inspect localhost/kicbase/echo-server:functional-919910
--- PASS: TestFunctional/parallel/ImageCommands/ImageSaveDaemon (0.58s)

                                                
                                    
x
+
TestFunctional/delete_echo-server_images (0.04s)

                                                
                                                
=== RUN   TestFunctional/delete_echo-server_images
functional_test.go:190: (dbg) Run:  docker rmi -f kicbase/echo-server:1.0
functional_test.go:190: (dbg) Run:  docker rmi -f kicbase/echo-server:functional-919910
--- PASS: TestFunctional/delete_echo-server_images (0.04s)

                                                
                                    
x
+
TestFunctional/delete_my-image_image (0.02s)

                                                
                                                
=== RUN   TestFunctional/delete_my-image_image
functional_test.go:198: (dbg) Run:  docker rmi -f localhost/my-image:functional-919910
--- PASS: TestFunctional/delete_my-image_image (0.02s)

                                                
                                    
x
+
TestFunctional/delete_minikube_cached_images (0.02s)

                                                
                                                
=== RUN   TestFunctional/delete_minikube_cached_images
functional_test.go:206: (dbg) Run:  docker rmi -f minikube-local-cache-test:functional-919910
--- PASS: TestFunctional/delete_minikube_cached_images (0.02s)

                                                
                                    
x
+
TestMultiControlPlane/serial/StartCluster (171.3s)

                                                
                                                
=== RUN   TestMultiControlPlane/serial/StartCluster
ha_test.go:101: (dbg) Run:  out/minikube-linux-arm64 start -p ha-334765 --wait=true --memory=2200 --ha -v=7 --alsologtostderr --driver=docker  --container-runtime=crio
E0916 10:51:37.203854 1383833 cert_rotation.go:171] "Unhandled Error" err="key failed with : open /home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/addons-936355/client.crt: no such file or directory" logger="UnhandledError"
E0916 10:53:53.342055 1383833 cert_rotation.go:171] "Unhandled Error" err="key failed with : open /home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/addons-936355/client.crt: no such file or directory" logger="UnhandledError"
ha_test.go:101: (dbg) Done: out/minikube-linux-arm64 start -p ha-334765 --wait=true --memory=2200 --ha -v=7 --alsologtostderr --driver=docker  --container-runtime=crio: (2m50.449919871s)
ha_test.go:107: (dbg) Run:  out/minikube-linux-arm64 -p ha-334765 status -v=7 --alsologtostderr
--- PASS: TestMultiControlPlane/serial/StartCluster (171.30s)

                                                
                                    
x
+
TestMultiControlPlane/serial/DeployApp (10.85s)

                                                
                                                
=== RUN   TestMultiControlPlane/serial/DeployApp
ha_test.go:128: (dbg) Run:  out/minikube-linux-arm64 kubectl -p ha-334765 -- apply -f ./testdata/ha/ha-pod-dns-test.yaml
ha_test.go:133: (dbg) Run:  out/minikube-linux-arm64 kubectl -p ha-334765 -- rollout status deployment/busybox
E0916 10:54:17.495512 1383833 cert_rotation.go:171] "Unhandled Error" err="key failed with : open /home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/functional-919910/client.crt: no such file or directory" logger="UnhandledError"
E0916 10:54:17.502092 1383833 cert_rotation.go:171] "Unhandled Error" err="key failed with : open /home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/functional-919910/client.crt: no such file or directory" logger="UnhandledError"
E0916 10:54:17.513457 1383833 cert_rotation.go:171] "Unhandled Error" err="key failed with : open /home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/functional-919910/client.crt: no such file or directory" logger="UnhandledError"
E0916 10:54:17.534843 1383833 cert_rotation.go:171] "Unhandled Error" err="key failed with : open /home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/functional-919910/client.crt: no such file or directory" logger="UnhandledError"
E0916 10:54:17.576290 1383833 cert_rotation.go:171] "Unhandled Error" err="key failed with : open /home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/functional-919910/client.crt: no such file or directory" logger="UnhandledError"
E0916 10:54:17.657866 1383833 cert_rotation.go:171] "Unhandled Error" err="key failed with : open /home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/functional-919910/client.crt: no such file or directory" logger="UnhandledError"
E0916 10:54:17.819900 1383833 cert_rotation.go:171] "Unhandled Error" err="key failed with : open /home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/functional-919910/client.crt: no such file or directory" logger="UnhandledError"
E0916 10:54:18.141754 1383833 cert_rotation.go:171] "Unhandled Error" err="key failed with : open /home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/functional-919910/client.crt: no such file or directory" logger="UnhandledError"
E0916 10:54:18.783681 1383833 cert_rotation.go:171] "Unhandled Error" err="key failed with : open /home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/functional-919910/client.crt: no such file or directory" logger="UnhandledError"
E0916 10:54:20.065015 1383833 cert_rotation.go:171] "Unhandled Error" err="key failed with : open /home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/functional-919910/client.crt: no such file or directory" logger="UnhandledError"
E0916 10:54:21.045284 1383833 cert_rotation.go:171] "Unhandled Error" err="key failed with : open /home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/addons-936355/client.crt: no such file or directory" logger="UnhandledError"
E0916 10:54:22.626388 1383833 cert_rotation.go:171] "Unhandled Error" err="key failed with : open /home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/functional-919910/client.crt: no such file or directory" logger="UnhandledError"
ha_test.go:133: (dbg) Done: out/minikube-linux-arm64 kubectl -p ha-334765 -- rollout status deployment/busybox: (7.860947295s)
ha_test.go:140: (dbg) Run:  out/minikube-linux-arm64 kubectl -p ha-334765 -- get pods -o jsonpath='{.items[*].status.podIP}'
ha_test.go:163: (dbg) Run:  out/minikube-linux-arm64 kubectl -p ha-334765 -- get pods -o jsonpath='{.items[*].metadata.name}'
ha_test.go:171: (dbg) Run:  out/minikube-linux-arm64 kubectl -p ha-334765 -- exec busybox-7dff88458-mbfkp -- nslookup kubernetes.io
ha_test.go:171: (dbg) Run:  out/minikube-linux-arm64 kubectl -p ha-334765 -- exec busybox-7dff88458-mh2kc -- nslookup kubernetes.io
ha_test.go:171: (dbg) Run:  out/minikube-linux-arm64 kubectl -p ha-334765 -- exec busybox-7dff88458-tczms -- nslookup kubernetes.io
ha_test.go:181: (dbg) Run:  out/minikube-linux-arm64 kubectl -p ha-334765 -- exec busybox-7dff88458-mbfkp -- nslookup kubernetes.default
ha_test.go:181: (dbg) Run:  out/minikube-linux-arm64 kubectl -p ha-334765 -- exec busybox-7dff88458-mh2kc -- nslookup kubernetes.default
ha_test.go:181: (dbg) Run:  out/minikube-linux-arm64 kubectl -p ha-334765 -- exec busybox-7dff88458-tczms -- nslookup kubernetes.default
ha_test.go:189: (dbg) Run:  out/minikube-linux-arm64 kubectl -p ha-334765 -- exec busybox-7dff88458-mbfkp -- nslookup kubernetes.default.svc.cluster.local
ha_test.go:189: (dbg) Run:  out/minikube-linux-arm64 kubectl -p ha-334765 -- exec busybox-7dff88458-mh2kc -- nslookup kubernetes.default.svc.cluster.local
ha_test.go:189: (dbg) Run:  out/minikube-linux-arm64 kubectl -p ha-334765 -- exec busybox-7dff88458-tczms -- nslookup kubernetes.default.svc.cluster.local
--- PASS: TestMultiControlPlane/serial/DeployApp (10.85s)

                                                
                                    
x
+
TestMultiControlPlane/serial/PingHostFromPods (1.6s)

                                                
                                                
=== RUN   TestMultiControlPlane/serial/PingHostFromPods
ha_test.go:199: (dbg) Run:  out/minikube-linux-arm64 kubectl -p ha-334765 -- get pods -o jsonpath='{.items[*].metadata.name}'
E0916 10:54:27.748136 1383833 cert_rotation.go:171] "Unhandled Error" err="key failed with : open /home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/functional-919910/client.crt: no such file or directory" logger="UnhandledError"
ha_test.go:207: (dbg) Run:  out/minikube-linux-arm64 kubectl -p ha-334765 -- exec busybox-7dff88458-mbfkp -- sh -c "nslookup host.minikube.internal | awk 'NR==5' | cut -d' ' -f3"
ha_test.go:218: (dbg) Run:  out/minikube-linux-arm64 kubectl -p ha-334765 -- exec busybox-7dff88458-mbfkp -- sh -c "ping -c 1 192.168.49.1"
ha_test.go:207: (dbg) Run:  out/minikube-linux-arm64 kubectl -p ha-334765 -- exec busybox-7dff88458-mh2kc -- sh -c "nslookup host.minikube.internal | awk 'NR==5' | cut -d' ' -f3"
ha_test.go:218: (dbg) Run:  out/minikube-linux-arm64 kubectl -p ha-334765 -- exec busybox-7dff88458-mh2kc -- sh -c "ping -c 1 192.168.49.1"
ha_test.go:207: (dbg) Run:  out/minikube-linux-arm64 kubectl -p ha-334765 -- exec busybox-7dff88458-tczms -- sh -c "nslookup host.minikube.internal | awk 'NR==5' | cut -d' ' -f3"
ha_test.go:218: (dbg) Run:  out/minikube-linux-arm64 kubectl -p ha-334765 -- exec busybox-7dff88458-tczms -- sh -c "ping -c 1 192.168.49.1"
--- PASS: TestMultiControlPlane/serial/PingHostFromPods (1.60s)

                                                
                                    
x
+
TestMultiControlPlane/serial/AddWorkerNode (63.12s)

                                                
                                                
=== RUN   TestMultiControlPlane/serial/AddWorkerNode
ha_test.go:228: (dbg) Run:  out/minikube-linux-arm64 node add -p ha-334765 -v=7 --alsologtostderr
E0916 10:54:37.990519 1383833 cert_rotation.go:171] "Unhandled Error" err="key failed with : open /home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/functional-919910/client.crt: no such file or directory" logger="UnhandledError"
E0916 10:54:58.472141 1383833 cert_rotation.go:171] "Unhandled Error" err="key failed with : open /home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/functional-919910/client.crt: no such file or directory" logger="UnhandledError"
ha_test.go:228: (dbg) Done: out/minikube-linux-arm64 node add -p ha-334765 -v=7 --alsologtostderr: (1m2.106617147s)
ha_test.go:234: (dbg) Run:  out/minikube-linux-arm64 -p ha-334765 status -v=7 --alsologtostderr
ha_test.go:234: (dbg) Done: out/minikube-linux-arm64 -p ha-334765 status -v=7 --alsologtostderr: (1.012115639s)
--- PASS: TestMultiControlPlane/serial/AddWorkerNode (63.12s)

                                                
                                    
x
+
TestMultiControlPlane/serial/HAppyAfterClusterStart (0.78s)

                                                
                                                
=== RUN   TestMultiControlPlane/serial/HAppyAfterClusterStart
ha_test.go:281: (dbg) Run:  out/minikube-linux-arm64 profile list --output json
--- PASS: TestMultiControlPlane/serial/HAppyAfterClusterStart (0.78s)

                                                
                                    
x
+
TestMultiControlPlane/serial/CopyFile (19.14s)

                                                
                                                
=== RUN   TestMultiControlPlane/serial/CopyFile
ha_test.go:326: (dbg) Run:  out/minikube-linux-arm64 -p ha-334765 status --output json -v=7 --alsologtostderr
helpers_test.go:556: (dbg) Run:  out/minikube-linux-arm64 -p ha-334765 cp testdata/cp-test.txt ha-334765:/home/docker/cp-test.txt
helpers_test.go:534: (dbg) Run:  out/minikube-linux-arm64 -p ha-334765 ssh -n ha-334765 "sudo cat /home/docker/cp-test.txt"
helpers_test.go:556: (dbg) Run:  out/minikube-linux-arm64 -p ha-334765 cp ha-334765:/home/docker/cp-test.txt /tmp/TestMultiControlPlaneserialCopyFile3524304278/001/cp-test_ha-334765.txt
helpers_test.go:534: (dbg) Run:  out/minikube-linux-arm64 -p ha-334765 ssh -n ha-334765 "sudo cat /home/docker/cp-test.txt"
helpers_test.go:556: (dbg) Run:  out/minikube-linux-arm64 -p ha-334765 cp ha-334765:/home/docker/cp-test.txt ha-334765-m02:/home/docker/cp-test_ha-334765_ha-334765-m02.txt
helpers_test.go:534: (dbg) Run:  out/minikube-linux-arm64 -p ha-334765 ssh -n ha-334765 "sudo cat /home/docker/cp-test.txt"
helpers_test.go:534: (dbg) Run:  out/minikube-linux-arm64 -p ha-334765 ssh -n ha-334765-m02 "sudo cat /home/docker/cp-test_ha-334765_ha-334765-m02.txt"
E0916 10:55:39.434009 1383833 cert_rotation.go:171] "Unhandled Error" err="key failed with : open /home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/functional-919910/client.crt: no such file or directory" logger="UnhandledError"
helpers_test.go:556: (dbg) Run:  out/minikube-linux-arm64 -p ha-334765 cp ha-334765:/home/docker/cp-test.txt ha-334765-m03:/home/docker/cp-test_ha-334765_ha-334765-m03.txt
helpers_test.go:534: (dbg) Run:  out/minikube-linux-arm64 -p ha-334765 ssh -n ha-334765 "sudo cat /home/docker/cp-test.txt"
helpers_test.go:534: (dbg) Run:  out/minikube-linux-arm64 -p ha-334765 ssh -n ha-334765-m03 "sudo cat /home/docker/cp-test_ha-334765_ha-334765-m03.txt"
helpers_test.go:556: (dbg) Run:  out/minikube-linux-arm64 -p ha-334765 cp ha-334765:/home/docker/cp-test.txt ha-334765-m04:/home/docker/cp-test_ha-334765_ha-334765-m04.txt
helpers_test.go:534: (dbg) Run:  out/minikube-linux-arm64 -p ha-334765 ssh -n ha-334765 "sudo cat /home/docker/cp-test.txt"
helpers_test.go:534: (dbg) Run:  out/minikube-linux-arm64 -p ha-334765 ssh -n ha-334765-m04 "sudo cat /home/docker/cp-test_ha-334765_ha-334765-m04.txt"
helpers_test.go:556: (dbg) Run:  out/minikube-linux-arm64 -p ha-334765 cp testdata/cp-test.txt ha-334765-m02:/home/docker/cp-test.txt
helpers_test.go:534: (dbg) Run:  out/minikube-linux-arm64 -p ha-334765 ssh -n ha-334765-m02 "sudo cat /home/docker/cp-test.txt"
helpers_test.go:556: (dbg) Run:  out/minikube-linux-arm64 -p ha-334765 cp ha-334765-m02:/home/docker/cp-test.txt /tmp/TestMultiControlPlaneserialCopyFile3524304278/001/cp-test_ha-334765-m02.txt
helpers_test.go:534: (dbg) Run:  out/minikube-linux-arm64 -p ha-334765 ssh -n ha-334765-m02 "sudo cat /home/docker/cp-test.txt"
helpers_test.go:556: (dbg) Run:  out/minikube-linux-arm64 -p ha-334765 cp ha-334765-m02:/home/docker/cp-test.txt ha-334765:/home/docker/cp-test_ha-334765-m02_ha-334765.txt
helpers_test.go:534: (dbg) Run:  out/minikube-linux-arm64 -p ha-334765 ssh -n ha-334765-m02 "sudo cat /home/docker/cp-test.txt"
helpers_test.go:534: (dbg) Run:  out/minikube-linux-arm64 -p ha-334765 ssh -n ha-334765 "sudo cat /home/docker/cp-test_ha-334765-m02_ha-334765.txt"
helpers_test.go:556: (dbg) Run:  out/minikube-linux-arm64 -p ha-334765 cp ha-334765-m02:/home/docker/cp-test.txt ha-334765-m03:/home/docker/cp-test_ha-334765-m02_ha-334765-m03.txt
helpers_test.go:534: (dbg) Run:  out/minikube-linux-arm64 -p ha-334765 ssh -n ha-334765-m02 "sudo cat /home/docker/cp-test.txt"
helpers_test.go:534: (dbg) Run:  out/minikube-linux-arm64 -p ha-334765 ssh -n ha-334765-m03 "sudo cat /home/docker/cp-test_ha-334765-m02_ha-334765-m03.txt"
helpers_test.go:556: (dbg) Run:  out/minikube-linux-arm64 -p ha-334765 cp ha-334765-m02:/home/docker/cp-test.txt ha-334765-m04:/home/docker/cp-test_ha-334765-m02_ha-334765-m04.txt
helpers_test.go:534: (dbg) Run:  out/minikube-linux-arm64 -p ha-334765 ssh -n ha-334765-m02 "sudo cat /home/docker/cp-test.txt"
helpers_test.go:534: (dbg) Run:  out/minikube-linux-arm64 -p ha-334765 ssh -n ha-334765-m04 "sudo cat /home/docker/cp-test_ha-334765-m02_ha-334765-m04.txt"
helpers_test.go:556: (dbg) Run:  out/minikube-linux-arm64 -p ha-334765 cp testdata/cp-test.txt ha-334765-m03:/home/docker/cp-test.txt
helpers_test.go:534: (dbg) Run:  out/minikube-linux-arm64 -p ha-334765 ssh -n ha-334765-m03 "sudo cat /home/docker/cp-test.txt"
helpers_test.go:556: (dbg) Run:  out/minikube-linux-arm64 -p ha-334765 cp ha-334765-m03:/home/docker/cp-test.txt /tmp/TestMultiControlPlaneserialCopyFile3524304278/001/cp-test_ha-334765-m03.txt
helpers_test.go:534: (dbg) Run:  out/minikube-linux-arm64 -p ha-334765 ssh -n ha-334765-m03 "sudo cat /home/docker/cp-test.txt"
helpers_test.go:556: (dbg) Run:  out/minikube-linux-arm64 -p ha-334765 cp ha-334765-m03:/home/docker/cp-test.txt ha-334765:/home/docker/cp-test_ha-334765-m03_ha-334765.txt
helpers_test.go:534: (dbg) Run:  out/minikube-linux-arm64 -p ha-334765 ssh -n ha-334765-m03 "sudo cat /home/docker/cp-test.txt"
helpers_test.go:534: (dbg) Run:  out/minikube-linux-arm64 -p ha-334765 ssh -n ha-334765 "sudo cat /home/docker/cp-test_ha-334765-m03_ha-334765.txt"
helpers_test.go:556: (dbg) Run:  out/minikube-linux-arm64 -p ha-334765 cp ha-334765-m03:/home/docker/cp-test.txt ha-334765-m02:/home/docker/cp-test_ha-334765-m03_ha-334765-m02.txt
helpers_test.go:534: (dbg) Run:  out/minikube-linux-arm64 -p ha-334765 ssh -n ha-334765-m03 "sudo cat /home/docker/cp-test.txt"
helpers_test.go:534: (dbg) Run:  out/minikube-linux-arm64 -p ha-334765 ssh -n ha-334765-m02 "sudo cat /home/docker/cp-test_ha-334765-m03_ha-334765-m02.txt"
helpers_test.go:556: (dbg) Run:  out/minikube-linux-arm64 -p ha-334765 cp ha-334765-m03:/home/docker/cp-test.txt ha-334765-m04:/home/docker/cp-test_ha-334765-m03_ha-334765-m04.txt
helpers_test.go:534: (dbg) Run:  out/minikube-linux-arm64 -p ha-334765 ssh -n ha-334765-m03 "sudo cat /home/docker/cp-test.txt"
helpers_test.go:534: (dbg) Run:  out/minikube-linux-arm64 -p ha-334765 ssh -n ha-334765-m04 "sudo cat /home/docker/cp-test_ha-334765-m03_ha-334765-m04.txt"
helpers_test.go:556: (dbg) Run:  out/minikube-linux-arm64 -p ha-334765 cp testdata/cp-test.txt ha-334765-m04:/home/docker/cp-test.txt
helpers_test.go:534: (dbg) Run:  out/minikube-linux-arm64 -p ha-334765 ssh -n ha-334765-m04 "sudo cat /home/docker/cp-test.txt"
helpers_test.go:556: (dbg) Run:  out/minikube-linux-arm64 -p ha-334765 cp ha-334765-m04:/home/docker/cp-test.txt /tmp/TestMultiControlPlaneserialCopyFile3524304278/001/cp-test_ha-334765-m04.txt
helpers_test.go:534: (dbg) Run:  out/minikube-linux-arm64 -p ha-334765 ssh -n ha-334765-m04 "sudo cat /home/docker/cp-test.txt"
helpers_test.go:556: (dbg) Run:  out/minikube-linux-arm64 -p ha-334765 cp ha-334765-m04:/home/docker/cp-test.txt ha-334765:/home/docker/cp-test_ha-334765-m04_ha-334765.txt
helpers_test.go:534: (dbg) Run:  out/minikube-linux-arm64 -p ha-334765 ssh -n ha-334765-m04 "sudo cat /home/docker/cp-test.txt"
helpers_test.go:534: (dbg) Run:  out/minikube-linux-arm64 -p ha-334765 ssh -n ha-334765 "sudo cat /home/docker/cp-test_ha-334765-m04_ha-334765.txt"
helpers_test.go:556: (dbg) Run:  out/minikube-linux-arm64 -p ha-334765 cp ha-334765-m04:/home/docker/cp-test.txt ha-334765-m02:/home/docker/cp-test_ha-334765-m04_ha-334765-m02.txt
helpers_test.go:534: (dbg) Run:  out/minikube-linux-arm64 -p ha-334765 ssh -n ha-334765-m04 "sudo cat /home/docker/cp-test.txt"
helpers_test.go:534: (dbg) Run:  out/minikube-linux-arm64 -p ha-334765 ssh -n ha-334765-m02 "sudo cat /home/docker/cp-test_ha-334765-m04_ha-334765-m02.txt"
helpers_test.go:556: (dbg) Run:  out/minikube-linux-arm64 -p ha-334765 cp ha-334765-m04:/home/docker/cp-test.txt ha-334765-m03:/home/docker/cp-test_ha-334765-m04_ha-334765-m03.txt
helpers_test.go:534: (dbg) Run:  out/minikube-linux-arm64 -p ha-334765 ssh -n ha-334765-m04 "sudo cat /home/docker/cp-test.txt"
helpers_test.go:534: (dbg) Run:  out/minikube-linux-arm64 -p ha-334765 ssh -n ha-334765-m03 "sudo cat /home/docker/cp-test_ha-334765-m04_ha-334765-m03.txt"
--- PASS: TestMultiControlPlane/serial/CopyFile (19.14s)

                                                
                                    
x
+
TestMultiControlPlane/serial/StopSecondaryNode (12.76s)

                                                
                                                
=== RUN   TestMultiControlPlane/serial/StopSecondaryNode
ha_test.go:363: (dbg) Run:  out/minikube-linux-arm64 -p ha-334765 node stop m02 -v=7 --alsologtostderr
ha_test.go:363: (dbg) Done: out/minikube-linux-arm64 -p ha-334765 node stop m02 -v=7 --alsologtostderr: (12.030673765s)
ha_test.go:369: (dbg) Run:  out/minikube-linux-arm64 -p ha-334765 status -v=7 --alsologtostderr
ha_test.go:369: (dbg) Non-zero exit: out/minikube-linux-arm64 -p ha-334765 status -v=7 --alsologtostderr: exit status 7 (726.069336ms)

                                                
                                                
-- stdout --
	ha-334765
	type: Control Plane
	host: Running
	kubelet: Running
	apiserver: Running
	kubeconfig: Configured
	
	ha-334765-m02
	type: Control Plane
	host: Stopped
	kubelet: Stopped
	apiserver: Stopped
	kubeconfig: Stopped
	
	ha-334765-m03
	type: Control Plane
	host: Running
	kubelet: Running
	apiserver: Running
	kubeconfig: Configured
	
	ha-334765-m04
	type: Worker
	host: Running
	kubelet: Running
	

                                                
                                                
-- /stdout --
** stderr ** 
	I0916 10:56:07.435787 1430478 out.go:345] Setting OutFile to fd 1 ...
	I0916 10:56:07.435997 1430478 out.go:392] TERM=,COLORTERM=, which probably does not support color
	I0916 10:56:07.436025 1430478 out.go:358] Setting ErrFile to fd 2...
	I0916 10:56:07.436044 1430478 out.go:392] TERM=,COLORTERM=, which probably does not support color
	I0916 10:56:07.436360 1430478 root.go:338] Updating PATH: /home/jenkins/minikube-integration/19651-1378450/.minikube/bin
	I0916 10:56:07.436611 1430478 out.go:352] Setting JSON to false
	I0916 10:56:07.436735 1430478 mustload.go:65] Loading cluster: ha-334765
	I0916 10:56:07.437334 1430478 config.go:182] Loaded profile config "ha-334765": Driver=docker, ContainerRuntime=crio, KubernetesVersion=v1.31.1
	I0916 10:56:07.437358 1430478 notify.go:220] Checking for updates...
	I0916 10:56:07.437375 1430478 status.go:255] checking status of ha-334765 ...
	I0916 10:56:07.437999 1430478 cli_runner.go:164] Run: docker container inspect ha-334765 --format={{.State.Status}}
	I0916 10:56:07.458269 1430478 status.go:330] ha-334765 host status = "Running" (err=<nil>)
	I0916 10:56:07.458413 1430478 host.go:66] Checking if "ha-334765" exists ...
	I0916 10:56:07.458786 1430478 cli_runner.go:164] Run: docker container inspect -f "{{range .NetworkSettings.Networks}}{{.IPAddress}},{{.GlobalIPv6Address}}{{end}}" ha-334765
	I0916 10:56:07.490196 1430478 host.go:66] Checking if "ha-334765" exists ...
	I0916 10:56:07.490506 1430478 ssh_runner.go:195] Run: sh -c "df -h /var | awk 'NR==2{print $5}'"
	I0916 10:56:07.490563 1430478 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" ha-334765
	I0916 10:56:07.511261 1430478 sshutil.go:53] new ssh client: &{IP:127.0.0.1 Port:34618 SSHKeyPath:/home/jenkins/minikube-integration/19651-1378450/.minikube/machines/ha-334765/id_rsa Username:docker}
	I0916 10:56:07.609524 1430478 ssh_runner.go:195] Run: systemctl --version
	I0916 10:56:07.614792 1430478 ssh_runner.go:195] Run: sudo systemctl is-active --quiet service kubelet
	I0916 10:56:07.628474 1430478 cli_runner.go:164] Run: docker system info --format "{{json .}}"
	I0916 10:56:07.689751 1430478 info.go:266] docker info: {ID:5FDH:SA5P:5GCT:NLAS:B73P:SGDQ:PBG5:UBVH:UZY3:RXGO:CI7S:WAIH Containers:4 ContainersRunning:3 ContainersPaused:0 ContainersStopped:1 Images:3 Driver:overlay2 DriverStatus:[[Backing Filesystem extfs] [Supports d_type true] [Using metacopy false] [Native Overlay Diff true] [userxattr false]] SystemStatus:<nil> Plugins:{Volume:[local] Network:[bridge host ipvlan macvlan null overlay] Authorization:<nil> Log:[awslogs fluentd gcplogs gelf journald json-file local splunk syslog]} MemoryLimit:true SwapLimit:true KernelMemory:false KernelMemoryTCP:true CPUCfsPeriod:true CPUCfsQuota:true CPUShares:true CPUSet:true PidsLimit:true IPv4Forwarding:true BridgeNfIptables:true BridgeNfIP6Tables:true Debug:false NFd:53 OomKillDisable:true NGoroutines:71 SystemTime:2024-09-16 10:56:07.678546204 +0000 UTC LoggingDriver:json-file CgroupDriver:cgroupfs NEventsListener:0 KernelVersion:5.15.0-1069-aws OperatingSystem:Ubuntu 20.04.6 LTS OSType:linux Architecture:aar
ch64 IndexServerAddress:https://index.docker.io/v1/ RegistryConfig:{AllowNondistributableArtifactsCIDRs:[] AllowNondistributableArtifactsHostnames:[] InsecureRegistryCIDRs:[127.0.0.0/8] IndexConfigs:{DockerIo:{Name:docker.io Mirrors:[] Secure:true Official:true}} Mirrors:[]} NCPU:2 MemTotal:8214839296 GenericResources:<nil> DockerRootDir:/var/lib/docker HTTPProxy: HTTPSProxy: NoProxy: Name:ip-172-31-21-244 Labels:[] ExperimentalBuild:false ServerVersion:27.2.1 ClusterStore: ClusterAdvertise: Runtimes:{Runc:{Path:runc}} DefaultRuntime:runc Swarm:{NodeID: NodeAddr: LocalNodeState:inactive ControlAvailable:false Error: RemoteManagers:<nil>} LiveRestoreEnabled:false Isolation: InitBinary:docker-init ContainerdCommit:{ID:7f7fdf5fed64eb6a7caf99b3e12efcf9d60e311c Expected:7f7fdf5fed64eb6a7caf99b3e12efcf9d60e311c} RuncCommit:{ID:v1.1.14-0-g2c9f560 Expected:v1.1.14-0-g2c9f560} InitCommit:{ID:de40ad0 Expected:de40ad0} SecurityOptions:[name=apparmor name=seccomp,profile=builtin] ProductLicense: Warnings:<nil> ServerErro
rs:[] ClientInfo:{Debug:false Plugins:[map[Name:buildx Path:/usr/libexec/docker/cli-plugins/docker-buildx SchemaVersion:0.1.0 ShortDescription:Docker Buildx Vendor:Docker Inc. Version:v0.16.2] map[Name:compose Path:/usr/libexec/docker/cli-plugins/docker-compose SchemaVersion:0.1.0 ShortDescription:Docker Compose Vendor:Docker Inc. Version:v2.29.2]] Warnings:<nil>}}
	I0916 10:56:07.690402 1430478 kubeconfig.go:125] found "ha-334765" server: "https://192.168.49.254:8443"
	I0916 10:56:07.690439 1430478 api_server.go:166] Checking apiserver status ...
	I0916 10:56:07.690492 1430478 ssh_runner.go:195] Run: sudo pgrep -xnf kube-apiserver.*minikube.*
	I0916 10:56:07.702435 1430478 ssh_runner.go:195] Run: sudo egrep ^[0-9]+:freezer: /proc/1387/cgroup
	I0916 10:56:07.712147 1430478 api_server.go:182] apiserver freezer: "13:freezer:/docker/471d2d625f18ea254879cc15bae69f2fa706198361173916de05b257110d78a5/crio/crio-42f82617ee823d573b9d4f28daf99d7e25b6909d4243e3187869a02bdce9fdff"
	I0916 10:56:07.712222 1430478 ssh_runner.go:195] Run: sudo cat /sys/fs/cgroup/freezer/docker/471d2d625f18ea254879cc15bae69f2fa706198361173916de05b257110d78a5/crio/crio-42f82617ee823d573b9d4f28daf99d7e25b6909d4243e3187869a02bdce9fdff/freezer.state
	I0916 10:56:07.721613 1430478 api_server.go:204] freezer state: "THAWED"
	I0916 10:56:07.721642 1430478 api_server.go:253] Checking apiserver healthz at https://192.168.49.254:8443/healthz ...
	I0916 10:56:07.729331 1430478 api_server.go:279] https://192.168.49.254:8443/healthz returned 200:
	ok
	I0916 10:56:07.729361 1430478 status.go:422] ha-334765 apiserver status = Running (err=<nil>)
	I0916 10:56:07.729372 1430478 status.go:257] ha-334765 status: &{Name:ha-334765 Host:Running Kubelet:Running APIServer:Running Kubeconfig:Configured Worker:false TimeToStop: DockerEnv: PodManEnv:}
	I0916 10:56:07.729412 1430478 status.go:255] checking status of ha-334765-m02 ...
	I0916 10:56:07.729744 1430478 cli_runner.go:164] Run: docker container inspect ha-334765-m02 --format={{.State.Status}}
	I0916 10:56:07.747404 1430478 status.go:330] ha-334765-m02 host status = "Stopped" (err=<nil>)
	I0916 10:56:07.747435 1430478 status.go:343] host is not running, skipping remaining checks
	I0916 10:56:07.747444 1430478 status.go:257] ha-334765-m02 status: &{Name:ha-334765-m02 Host:Stopped Kubelet:Stopped APIServer:Stopped Kubeconfig:Stopped Worker:false TimeToStop: DockerEnv: PodManEnv:}
	I0916 10:56:07.747465 1430478 status.go:255] checking status of ha-334765-m03 ...
	I0916 10:56:07.747788 1430478 cli_runner.go:164] Run: docker container inspect ha-334765-m03 --format={{.State.Status}}
	I0916 10:56:07.763838 1430478 status.go:330] ha-334765-m03 host status = "Running" (err=<nil>)
	I0916 10:56:07.763871 1430478 host.go:66] Checking if "ha-334765-m03" exists ...
	I0916 10:56:07.764176 1430478 cli_runner.go:164] Run: docker container inspect -f "{{range .NetworkSettings.Networks}}{{.IPAddress}},{{.GlobalIPv6Address}}{{end}}" ha-334765-m03
	I0916 10:56:07.779501 1430478 host.go:66] Checking if "ha-334765-m03" exists ...
	I0916 10:56:07.779821 1430478 ssh_runner.go:195] Run: sh -c "df -h /var | awk 'NR==2{print $5}'"
	I0916 10:56:07.779869 1430478 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" ha-334765-m03
	I0916 10:56:07.796296 1430478 sshutil.go:53] new ssh client: &{IP:127.0.0.1 Port:34628 SSHKeyPath:/home/jenkins/minikube-integration/19651-1378450/.minikube/machines/ha-334765-m03/id_rsa Username:docker}
	I0916 10:56:07.890316 1430478 ssh_runner.go:195] Run: sudo systemctl is-active --quiet service kubelet
	I0916 10:56:07.903889 1430478 kubeconfig.go:125] found "ha-334765" server: "https://192.168.49.254:8443"
	I0916 10:56:07.903921 1430478 api_server.go:166] Checking apiserver status ...
	I0916 10:56:07.903962 1430478 ssh_runner.go:195] Run: sudo pgrep -xnf kube-apiserver.*minikube.*
	I0916 10:56:07.914845 1430478 ssh_runner.go:195] Run: sudo egrep ^[0-9]+:freezer: /proc/1314/cgroup
	I0916 10:56:07.925507 1430478 api_server.go:182] apiserver freezer: "13:freezer:/docker/75c344efead1f7369938d66fd539ba803424cdff1fe321451a0b75f250362b53/crio/crio-a927a159b97a7f3a036dc367353cf8f215a5f6c7cb6d3a3296b6035b022dee53"
	I0916 10:56:07.925585 1430478 ssh_runner.go:195] Run: sudo cat /sys/fs/cgroup/freezer/docker/75c344efead1f7369938d66fd539ba803424cdff1fe321451a0b75f250362b53/crio/crio-a927a159b97a7f3a036dc367353cf8f215a5f6c7cb6d3a3296b6035b022dee53/freezer.state
	I0916 10:56:07.934216 1430478 api_server.go:204] freezer state: "THAWED"
	I0916 10:56:07.934250 1430478 api_server.go:253] Checking apiserver healthz at https://192.168.49.254:8443/healthz ...
	I0916 10:56:07.942141 1430478 api_server.go:279] https://192.168.49.254:8443/healthz returned 200:
	ok
	I0916 10:56:07.942171 1430478 status.go:422] ha-334765-m03 apiserver status = Running (err=<nil>)
	I0916 10:56:07.942181 1430478 status.go:257] ha-334765-m03 status: &{Name:ha-334765-m03 Host:Running Kubelet:Running APIServer:Running Kubeconfig:Configured Worker:false TimeToStop: DockerEnv: PodManEnv:}
	I0916 10:56:07.942198 1430478 status.go:255] checking status of ha-334765-m04 ...
	I0916 10:56:07.942501 1430478 cli_runner.go:164] Run: docker container inspect ha-334765-m04 --format={{.State.Status}}
	I0916 10:56:07.959702 1430478 status.go:330] ha-334765-m04 host status = "Running" (err=<nil>)
	I0916 10:56:07.959728 1430478 host.go:66] Checking if "ha-334765-m04" exists ...
	I0916 10:56:07.960052 1430478 cli_runner.go:164] Run: docker container inspect -f "{{range .NetworkSettings.Networks}}{{.IPAddress}},{{.GlobalIPv6Address}}{{end}}" ha-334765-m04
	I0916 10:56:07.979030 1430478 host.go:66] Checking if "ha-334765-m04" exists ...
	I0916 10:56:07.979367 1430478 ssh_runner.go:195] Run: sh -c "df -h /var | awk 'NR==2{print $5}'"
	I0916 10:56:07.979415 1430478 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" ha-334765-m04
	I0916 10:56:07.997620 1430478 sshutil.go:53] new ssh client: &{IP:127.0.0.1 Port:34633 SSHKeyPath:/home/jenkins/minikube-integration/19651-1378450/.minikube/machines/ha-334765-m04/id_rsa Username:docker}
	I0916 10:56:08.093863 1430478 ssh_runner.go:195] Run: sudo systemctl is-active --quiet service kubelet
	I0916 10:56:08.105084 1430478 status.go:257] ha-334765-m04 status: &{Name:ha-334765-m04 Host:Running Kubelet:Running APIServer:Irrelevant Kubeconfig:Irrelevant Worker:true TimeToStop: DockerEnv: PodManEnv:}

                                                
                                                
** /stderr **
--- PASS: TestMultiControlPlane/serial/StopSecondaryNode (12.76s)

                                                
                                    
x
+
TestMultiControlPlane/serial/DegradedAfterControlPlaneNodeStop (0.57s)

                                                
                                                
=== RUN   TestMultiControlPlane/serial/DegradedAfterControlPlaneNodeStop
ha_test.go:390: (dbg) Run:  out/minikube-linux-arm64 profile list --output json
--- PASS: TestMultiControlPlane/serial/DegradedAfterControlPlaneNodeStop (0.57s)

                                                
                                    
x
+
TestMultiControlPlane/serial/HAppyAfterSecondaryNodeRestart (3.31s)

                                                
                                                
=== RUN   TestMultiControlPlane/serial/HAppyAfterSecondaryNodeRestart
ha_test.go:281: (dbg) Run:  out/minikube-linux-arm64 profile list --output json
ha_test.go:281: (dbg) Done: out/minikube-linux-arm64 profile list --output json: (3.306374678s)
--- PASS: TestMultiControlPlane/serial/HAppyAfterSecondaryNodeRestart (3.31s)

                                                
                                    
x
+
TestMultiControlPlane/serial/RestartClusterKeepsNodes (188.27s)

                                                
                                                
=== RUN   TestMultiControlPlane/serial/RestartClusterKeepsNodes
ha_test.go:456: (dbg) Run:  out/minikube-linux-arm64 node list -p ha-334765 -v=7 --alsologtostderr
ha_test.go:462: (dbg) Run:  out/minikube-linux-arm64 stop -p ha-334765 -v=7 --alsologtostderr
E0916 10:57:01.356858 1383833 cert_rotation.go:171] "Unhandled Error" err="key failed with : open /home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/functional-919910/client.crt: no such file or directory" logger="UnhandledError"
ha_test.go:462: (dbg) Done: out/minikube-linux-arm64 stop -p ha-334765 -v=7 --alsologtostderr: (21.204300449s)
ha_test.go:467: (dbg) Run:  out/minikube-linux-arm64 start -p ha-334765 --wait=true -v=7 --alsologtostderr
E0916 10:58:53.341932 1383833 cert_rotation.go:171] "Unhandled Error" err="key failed with : open /home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/addons-936355/client.crt: no such file or directory" logger="UnhandledError"
E0916 10:59:17.495409 1383833 cert_rotation.go:171] "Unhandled Error" err="key failed with : open /home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/functional-919910/client.crt: no such file or directory" logger="UnhandledError"
E0916 10:59:45.199017 1383833 cert_rotation.go:171] "Unhandled Error" err="key failed with : open /home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/functional-919910/client.crt: no such file or directory" logger="UnhandledError"
ha_test.go:467: (dbg) Done: out/minikube-linux-arm64 start -p ha-334765 --wait=true -v=7 --alsologtostderr: (2m46.879687786s)
ha_test.go:472: (dbg) Run:  out/minikube-linux-arm64 node list -p ha-334765
--- PASS: TestMultiControlPlane/serial/RestartClusterKeepsNodes (188.27s)

                                                
                                    
x
+
TestMultiControlPlane/serial/DegradedAfterSecondaryNodeDelete (0.56s)

                                                
                                                
=== RUN   TestMultiControlPlane/serial/DegradedAfterSecondaryNodeDelete
ha_test.go:390: (dbg) Run:  out/minikube-linux-arm64 profile list --output json
--- PASS: TestMultiControlPlane/serial/DegradedAfterSecondaryNodeDelete (0.56s)

                                                
                                    
x
+
TestMultiControlPlane/serial/StopCluster (35.89s)

                                                
                                                
=== RUN   TestMultiControlPlane/serial/StopCluster
ha_test.go:531: (dbg) Run:  out/minikube-linux-arm64 -p ha-334765 stop -v=7 --alsologtostderr
ha_test.go:531: (dbg) Done: out/minikube-linux-arm64 -p ha-334765 stop -v=7 --alsologtostderr: (35.763873063s)
ha_test.go:537: (dbg) Run:  out/minikube-linux-arm64 -p ha-334765 status -v=7 --alsologtostderr
ha_test.go:537: (dbg) Non-zero exit: out/minikube-linux-arm64 -p ha-334765 status -v=7 --alsologtostderr: exit status 7 (125.921198ms)

                                                
                                                
-- stdout --
	ha-334765
	type: Control Plane
	host: Stopped
	kubelet: Stopped
	apiserver: Stopped
	kubeconfig: Stopped
	
	ha-334765-m02
	type: Control Plane
	host: Stopped
	kubelet: Stopped
	apiserver: Stopped
	kubeconfig: Stopped
	
	ha-334765-m04
	type: Worker
	host: Stopped
	kubelet: Stopped
	

                                                
                                                
-- /stdout --
** stderr ** 
	I0916 11:00:43.482590 1445359 out.go:345] Setting OutFile to fd 1 ...
	I0916 11:00:43.482793 1445359 out.go:392] TERM=,COLORTERM=, which probably does not support color
	I0916 11:00:43.482825 1445359 out.go:358] Setting ErrFile to fd 2...
	I0916 11:00:43.482848 1445359 out.go:392] TERM=,COLORTERM=, which probably does not support color
	I0916 11:00:43.483134 1445359 root.go:338] Updating PATH: /home/jenkins/minikube-integration/19651-1378450/.minikube/bin
	I0916 11:00:43.483372 1445359 out.go:352] Setting JSON to false
	I0916 11:00:43.483448 1445359 mustload.go:65] Loading cluster: ha-334765
	I0916 11:00:43.483510 1445359 notify.go:220] Checking for updates...
	I0916 11:00:43.483925 1445359 config.go:182] Loaded profile config "ha-334765": Driver=docker, ContainerRuntime=crio, KubernetesVersion=v1.31.1
	I0916 11:00:43.483948 1445359 status.go:255] checking status of ha-334765 ...
	I0916 11:00:43.484521 1445359 cli_runner.go:164] Run: docker container inspect ha-334765 --format={{.State.Status}}
	I0916 11:00:43.503016 1445359 status.go:330] ha-334765 host status = "Stopped" (err=<nil>)
	I0916 11:00:43.503043 1445359 status.go:343] host is not running, skipping remaining checks
	I0916 11:00:43.503050 1445359 status.go:257] ha-334765 status: &{Name:ha-334765 Host:Stopped Kubelet:Stopped APIServer:Stopped Kubeconfig:Stopped Worker:false TimeToStop: DockerEnv: PodManEnv:}
	I0916 11:00:43.503075 1445359 status.go:255] checking status of ha-334765-m02 ...
	I0916 11:00:43.503423 1445359 cli_runner.go:164] Run: docker container inspect ha-334765-m02 --format={{.State.Status}}
	I0916 11:00:43.535382 1445359 status.go:330] ha-334765-m02 host status = "Stopped" (err=<nil>)
	I0916 11:00:43.535403 1445359 status.go:343] host is not running, skipping remaining checks
	I0916 11:00:43.535411 1445359 status.go:257] ha-334765-m02 status: &{Name:ha-334765-m02 Host:Stopped Kubelet:Stopped APIServer:Stopped Kubeconfig:Stopped Worker:false TimeToStop: DockerEnv: PodManEnv:}
	I0916 11:00:43.535432 1445359 status.go:255] checking status of ha-334765-m04 ...
	I0916 11:00:43.535754 1445359 cli_runner.go:164] Run: docker container inspect ha-334765-m04 --format={{.State.Status}}
	I0916 11:00:43.554651 1445359 status.go:330] ha-334765-m04 host status = "Stopped" (err=<nil>)
	I0916 11:00:43.554674 1445359 status.go:343] host is not running, skipping remaining checks
	I0916 11:00:43.554681 1445359 status.go:257] ha-334765-m04 status: &{Name:ha-334765-m04 Host:Stopped Kubelet:Stopped APIServer:Stopped Kubeconfig:Stopped Worker:true TimeToStop: DockerEnv: PodManEnv:}

                                                
                                                
** /stderr **
--- PASS: TestMultiControlPlane/serial/StopCluster (35.89s)

                                                
                                    
x
+
TestMultiControlPlane/serial/DegradedAfterClusterRestart (0.54s)

                                                
                                                
=== RUN   TestMultiControlPlane/serial/DegradedAfterClusterRestart
ha_test.go:390: (dbg) Run:  out/minikube-linux-arm64 profile list --output json
--- PASS: TestMultiControlPlane/serial/DegradedAfterClusterRestart (0.54s)

                                                
                                    
x
+
TestMultiControlPlane/serial/AddSecondaryNode (71.83s)

                                                
                                                
=== RUN   TestMultiControlPlane/serial/AddSecondaryNode
ha_test.go:605: (dbg) Run:  out/minikube-linux-arm64 node add -p ha-334765 --control-plane -v=7 --alsologtostderr
ha_test.go:605: (dbg) Done: out/minikube-linux-arm64 node add -p ha-334765 --control-plane -v=7 --alsologtostderr: (1m10.804369605s)
ha_test.go:611: (dbg) Run:  out/minikube-linux-arm64 -p ha-334765 status -v=7 --alsologtostderr
ha_test.go:611: (dbg) Done: out/minikube-linux-arm64 -p ha-334765 status -v=7 --alsologtostderr: (1.028233907s)
--- PASS: TestMultiControlPlane/serial/AddSecondaryNode (71.83s)

                                                
                                    
x
+
TestMultiControlPlane/serial/HAppyAfterSecondaryNodeAdd (0.75s)

                                                
                                                
=== RUN   TestMultiControlPlane/serial/HAppyAfterSecondaryNodeAdd
ha_test.go:281: (dbg) Run:  out/minikube-linux-arm64 profile list --output json
--- PASS: TestMultiControlPlane/serial/HAppyAfterSecondaryNodeAdd (0.75s)

                                                
                                    
x
+
TestJSONOutput/start/Command (75.59s)

                                                
                                                
=== RUN   TestJSONOutput/start/Command
json_output_test.go:63: (dbg) Run:  out/minikube-linux-arm64 start -p json-output-804232 --output=json --user=testUser --memory=2200 --wait=true --driver=docker  --container-runtime=crio
E0916 11:03:53.341881 1383833 cert_rotation.go:171] "Unhandled Error" err="key failed with : open /home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/addons-936355/client.crt: no such file or directory" logger="UnhandledError"
E0916 11:04:17.495137 1383833 cert_rotation.go:171] "Unhandled Error" err="key failed with : open /home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/functional-919910/client.crt: no such file or directory" logger="UnhandledError"
json_output_test.go:63: (dbg) Done: out/minikube-linux-arm64 start -p json-output-804232 --output=json --user=testUser --memory=2200 --wait=true --driver=docker  --container-runtime=crio: (1m15.589583778s)
--- PASS: TestJSONOutput/start/Command (75.59s)

                                                
                                    
x
+
TestJSONOutput/start/Audit (0s)

                                                
                                                
=== RUN   TestJSONOutput/start/Audit
--- PASS: TestJSONOutput/start/Audit (0.00s)

                                                
                                    
x
+
TestJSONOutput/start/parallel/DistinctCurrentSteps (0s)

                                                
                                                
=== RUN   TestJSONOutput/start/parallel/DistinctCurrentSteps
=== PAUSE TestJSONOutput/start/parallel/DistinctCurrentSteps

                                                
                                                

                                                
                                                
=== CONT  TestJSONOutput/start/parallel/DistinctCurrentSteps
--- PASS: TestJSONOutput/start/parallel/DistinctCurrentSteps (0.00s)

                                                
                                    
x
+
TestJSONOutput/start/parallel/IncreasingCurrentSteps (0s)

                                                
                                                
=== RUN   TestJSONOutput/start/parallel/IncreasingCurrentSteps
=== PAUSE TestJSONOutput/start/parallel/IncreasingCurrentSteps

                                                
                                                

                                                
                                                
=== CONT  TestJSONOutput/start/parallel/IncreasingCurrentSteps
--- PASS: TestJSONOutput/start/parallel/IncreasingCurrentSteps (0.00s)

                                                
                                    
x
+
TestJSONOutput/pause/Command (0.75s)

                                                
                                                
=== RUN   TestJSONOutput/pause/Command
json_output_test.go:63: (dbg) Run:  out/minikube-linux-arm64 pause -p json-output-804232 --output=json --user=testUser
--- PASS: TestJSONOutput/pause/Command (0.75s)

                                                
                                    
x
+
TestJSONOutput/pause/Audit (0s)

                                                
                                                
=== RUN   TestJSONOutput/pause/Audit
--- PASS: TestJSONOutput/pause/Audit (0.00s)

                                                
                                    
x
+
TestJSONOutput/pause/parallel/DistinctCurrentSteps (0s)

                                                
                                                
=== RUN   TestJSONOutput/pause/parallel/DistinctCurrentSteps
=== PAUSE TestJSONOutput/pause/parallel/DistinctCurrentSteps

                                                
                                                

                                                
                                                
=== CONT  TestJSONOutput/pause/parallel/DistinctCurrentSteps
--- PASS: TestJSONOutput/pause/parallel/DistinctCurrentSteps (0.00s)

                                                
                                    
x
+
TestJSONOutput/pause/parallel/IncreasingCurrentSteps (0s)

                                                
                                                
=== RUN   TestJSONOutput/pause/parallel/IncreasingCurrentSteps
=== PAUSE TestJSONOutput/pause/parallel/IncreasingCurrentSteps

                                                
                                                

                                                
                                                
=== CONT  TestJSONOutput/pause/parallel/IncreasingCurrentSteps
--- PASS: TestJSONOutput/pause/parallel/IncreasingCurrentSteps (0.00s)

                                                
                                    
x
+
TestJSONOutput/unpause/Command (0.66s)

                                                
                                                
=== RUN   TestJSONOutput/unpause/Command
json_output_test.go:63: (dbg) Run:  out/minikube-linux-arm64 unpause -p json-output-804232 --output=json --user=testUser
--- PASS: TestJSONOutput/unpause/Command (0.66s)

                                                
                                    
x
+
TestJSONOutput/unpause/Audit (0s)

                                                
                                                
=== RUN   TestJSONOutput/unpause/Audit
--- PASS: TestJSONOutput/unpause/Audit (0.00s)

                                                
                                    
x
+
TestJSONOutput/unpause/parallel/DistinctCurrentSteps (0s)

                                                
                                                
=== RUN   TestJSONOutput/unpause/parallel/DistinctCurrentSteps
=== PAUSE TestJSONOutput/unpause/parallel/DistinctCurrentSteps

                                                
                                                

                                                
                                                
=== CONT  TestJSONOutput/unpause/parallel/DistinctCurrentSteps
--- PASS: TestJSONOutput/unpause/parallel/DistinctCurrentSteps (0.00s)

                                                
                                    
x
+
TestJSONOutput/unpause/parallel/IncreasingCurrentSteps (0s)

                                                
                                                
=== RUN   TestJSONOutput/unpause/parallel/IncreasingCurrentSteps
=== PAUSE TestJSONOutput/unpause/parallel/IncreasingCurrentSteps

                                                
                                                

                                                
                                                
=== CONT  TestJSONOutput/unpause/parallel/IncreasingCurrentSteps
--- PASS: TestJSONOutput/unpause/parallel/IncreasingCurrentSteps (0.00s)

                                                
                                    
x
+
TestJSONOutput/stop/Command (6.12s)

                                                
                                                
=== RUN   TestJSONOutput/stop/Command
json_output_test.go:63: (dbg) Run:  out/minikube-linux-arm64 stop -p json-output-804232 --output=json --user=testUser
json_output_test.go:63: (dbg) Done: out/minikube-linux-arm64 stop -p json-output-804232 --output=json --user=testUser: (6.122226827s)
--- PASS: TestJSONOutput/stop/Command (6.12s)

                                                
                                    
x
+
TestJSONOutput/stop/Audit (0s)

                                                
                                                
=== RUN   TestJSONOutput/stop/Audit
--- PASS: TestJSONOutput/stop/Audit (0.00s)

                                                
                                    
x
+
TestJSONOutput/stop/parallel/DistinctCurrentSteps (0s)

                                                
                                                
=== RUN   TestJSONOutput/stop/parallel/DistinctCurrentSteps
=== PAUSE TestJSONOutput/stop/parallel/DistinctCurrentSteps

                                                
                                                

                                                
                                                
=== CONT  TestJSONOutput/stop/parallel/DistinctCurrentSteps
--- PASS: TestJSONOutput/stop/parallel/DistinctCurrentSteps (0.00s)

                                                
                                    
x
+
TestJSONOutput/stop/parallel/IncreasingCurrentSteps (0s)

                                                
                                                
=== RUN   TestJSONOutput/stop/parallel/IncreasingCurrentSteps
=== PAUSE TestJSONOutput/stop/parallel/IncreasingCurrentSteps

                                                
                                                

                                                
                                                
=== CONT  TestJSONOutput/stop/parallel/IncreasingCurrentSteps
--- PASS: TestJSONOutput/stop/parallel/IncreasingCurrentSteps (0.00s)

                                                
                                    
x
+
TestErrorJSONOutput (0.23s)

                                                
                                                
=== RUN   TestErrorJSONOutput
json_output_test.go:160: (dbg) Run:  out/minikube-linux-arm64 start -p json-output-error-223589 --memory=2200 --output=json --wait=true --driver=fail
json_output_test.go:160: (dbg) Non-zero exit: out/minikube-linux-arm64 start -p json-output-error-223589 --memory=2200 --output=json --wait=true --driver=fail: exit status 56 (82.385496ms)

                                                
                                                
-- stdout --
	{"specversion":"1.0","id":"e339ed1a-6636-4efa-8c39-da41f77d6db1","source":"https://minikube.sigs.k8s.io/","type":"io.k8s.sigs.minikube.step","datacontenttype":"application/json","data":{"currentstep":"0","message":"[json-output-error-223589] minikube v1.34.0 on Ubuntu 20.04 (arm64)","name":"Initial Minikube Setup","totalsteps":"19"}}
	{"specversion":"1.0","id":"fdd4b5d2-96c8-4761-bf9b-0d31ae8fc522","source":"https://minikube.sigs.k8s.io/","type":"io.k8s.sigs.minikube.info","datacontenttype":"application/json","data":{"message":"MINIKUBE_LOCATION=19651"}}
	{"specversion":"1.0","id":"2370ef1e-370c-428f-9244-6cbeb0628dfb","source":"https://minikube.sigs.k8s.io/","type":"io.k8s.sigs.minikube.info","datacontenttype":"application/json","data":{"message":"MINIKUBE_SUPPRESS_DOCKER_PERFORMANCE=true"}}
	{"specversion":"1.0","id":"ccd545ce-365e-4385-a84f-d7b6474cf89d","source":"https://minikube.sigs.k8s.io/","type":"io.k8s.sigs.minikube.info","datacontenttype":"application/json","data":{"message":"KUBECONFIG=/home/jenkins/minikube-integration/19651-1378450/kubeconfig"}}
	{"specversion":"1.0","id":"ee7f91b6-5dc6-4f28-a79b-0bd4df9a5d84","source":"https://minikube.sigs.k8s.io/","type":"io.k8s.sigs.minikube.info","datacontenttype":"application/json","data":{"message":"MINIKUBE_HOME=/home/jenkins/minikube-integration/19651-1378450/.minikube"}}
	{"specversion":"1.0","id":"29eb7eea-cd95-420e-b67e-5d1aa076d6db","source":"https://minikube.sigs.k8s.io/","type":"io.k8s.sigs.minikube.info","datacontenttype":"application/json","data":{"message":"MINIKUBE_BIN=out/minikube-linux-arm64"}}
	{"specversion":"1.0","id":"2e69c5ff-0ced-49e9-98b3-fab304c1dd2c","source":"https://minikube.sigs.k8s.io/","type":"io.k8s.sigs.minikube.info","datacontenttype":"application/json","data":{"message":"MINIKUBE_FORCE_SYSTEMD="}}
	{"specversion":"1.0","id":"86b3bbe3-4bdd-453d-a3dc-014ff85a13e3","source":"https://minikube.sigs.k8s.io/","type":"io.k8s.sigs.minikube.error","datacontenttype":"application/json","data":{"advice":"","exitcode":"56","issues":"","message":"The driver 'fail' is not supported on linux/arm64","name":"DRV_UNSUPPORTED_OS","url":""}}

                                                
                                                
-- /stdout --
helpers_test.go:175: Cleaning up "json-output-error-223589" profile ...
helpers_test.go:178: (dbg) Run:  out/minikube-linux-arm64 delete -p json-output-error-223589
--- PASS: TestErrorJSONOutput (0.23s)

                                                
                                    
x
+
TestKicCustomNetwork/create_custom_network (38.37s)

                                                
                                                
=== RUN   TestKicCustomNetwork/create_custom_network
kic_custom_network_test.go:57: (dbg) Run:  out/minikube-linux-arm64 start -p docker-network-250632 --network=
E0916 11:05:16.406615 1383833 cert_rotation.go:171] "Unhandled Error" err="key failed with : open /home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/addons-936355/client.crt: no such file or directory" logger="UnhandledError"
kic_custom_network_test.go:57: (dbg) Done: out/minikube-linux-arm64 start -p docker-network-250632 --network=: (36.24640379s)
kic_custom_network_test.go:150: (dbg) Run:  docker network ls --format {{.Name}}
helpers_test.go:175: Cleaning up "docker-network-250632" profile ...
helpers_test.go:178: (dbg) Run:  out/minikube-linux-arm64 delete -p docker-network-250632
helpers_test.go:178: (dbg) Done: out/minikube-linux-arm64 delete -p docker-network-250632: (2.09424255s)
--- PASS: TestKicCustomNetwork/create_custom_network (38.37s)

                                                
                                    
x
+
TestKicCustomNetwork/use_default_bridge_network (32.52s)

                                                
                                                
=== RUN   TestKicCustomNetwork/use_default_bridge_network
kic_custom_network_test.go:57: (dbg) Run:  out/minikube-linux-arm64 start -p docker-network-110300 --network=bridge
kic_custom_network_test.go:57: (dbg) Done: out/minikube-linux-arm64 start -p docker-network-110300 --network=bridge: (30.503726371s)
kic_custom_network_test.go:150: (dbg) Run:  docker network ls --format {{.Name}}
helpers_test.go:175: Cleaning up "docker-network-110300" profile ...
helpers_test.go:178: (dbg) Run:  out/minikube-linux-arm64 delete -p docker-network-110300
helpers_test.go:178: (dbg) Done: out/minikube-linux-arm64 delete -p docker-network-110300: (1.99236545s)
--- PASS: TestKicCustomNetwork/use_default_bridge_network (32.52s)

                                                
                                    
x
+
TestKicExistingNetwork (31.14s)

                                                
                                                
=== RUN   TestKicExistingNetwork
kic_custom_network_test.go:150: (dbg) Run:  docker network ls --format {{.Name}}
kic_custom_network_test.go:93: (dbg) Run:  out/minikube-linux-arm64 start -p existing-network-957860 --network=existing-network
kic_custom_network_test.go:93: (dbg) Done: out/minikube-linux-arm64 start -p existing-network-957860 --network=existing-network: (29.037729846s)
helpers_test.go:175: Cleaning up "existing-network-957860" profile ...
helpers_test.go:178: (dbg) Run:  out/minikube-linux-arm64 delete -p existing-network-957860
helpers_test.go:178: (dbg) Done: out/minikube-linux-arm64 delete -p existing-network-957860: (1.949283705s)
--- PASS: TestKicExistingNetwork (31.14s)

                                                
                                    
x
+
TestKicCustomSubnet (38.29s)

                                                
                                                
=== RUN   TestKicCustomSubnet
kic_custom_network_test.go:112: (dbg) Run:  out/minikube-linux-arm64 start -p custom-subnet-711939 --subnet=192.168.60.0/24
kic_custom_network_test.go:112: (dbg) Done: out/minikube-linux-arm64 start -p custom-subnet-711939 --subnet=192.168.60.0/24: (36.057478679s)
kic_custom_network_test.go:161: (dbg) Run:  docker network inspect custom-subnet-711939 --format "{{(index .IPAM.Config 0).Subnet}}"
helpers_test.go:175: Cleaning up "custom-subnet-711939" profile ...
helpers_test.go:178: (dbg) Run:  out/minikube-linux-arm64 delete -p custom-subnet-711939
helpers_test.go:178: (dbg) Done: out/minikube-linux-arm64 delete -p custom-subnet-711939: (2.202131286s)
--- PASS: TestKicCustomSubnet (38.29s)

                                                
                                    
x
+
TestKicStaticIP (36.59s)

                                                
                                                
=== RUN   TestKicStaticIP
kic_custom_network_test.go:132: (dbg) Run:  out/minikube-linux-arm64 start -p static-ip-865673 --static-ip=192.168.200.200
kic_custom_network_test.go:132: (dbg) Done: out/minikube-linux-arm64 start -p static-ip-865673 --static-ip=192.168.200.200: (34.083576542s)
kic_custom_network_test.go:138: (dbg) Run:  out/minikube-linux-arm64 -p static-ip-865673 ip
helpers_test.go:175: Cleaning up "static-ip-865673" profile ...
helpers_test.go:178: (dbg) Run:  out/minikube-linux-arm64 delete -p static-ip-865673
helpers_test.go:178: (dbg) Done: out/minikube-linux-arm64 delete -p static-ip-865673: (2.127728334s)
--- PASS: TestKicStaticIP (36.59s)

                                                
                                    
x
+
TestMainNoArgs (0.05s)

                                                
                                                
=== RUN   TestMainNoArgs
main_test.go:68: (dbg) Run:  out/minikube-linux-arm64
--- PASS: TestMainNoArgs (0.05s)

                                                
                                    
x
+
TestMinikubeProfile (68.89s)

                                                
                                                
=== RUN   TestMinikubeProfile
minikube_profile_test.go:44: (dbg) Run:  out/minikube-linux-arm64 start -p first-583372 --driver=docker  --container-runtime=crio
minikube_profile_test.go:44: (dbg) Done: out/minikube-linux-arm64 start -p first-583372 --driver=docker  --container-runtime=crio: (30.269636478s)
minikube_profile_test.go:44: (dbg) Run:  out/minikube-linux-arm64 start -p second-586810 --driver=docker  --container-runtime=crio
E0916 11:08:53.341937 1383833 cert_rotation.go:171] "Unhandled Error" err="key failed with : open /home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/addons-936355/client.crt: no such file or directory" logger="UnhandledError"
minikube_profile_test.go:44: (dbg) Done: out/minikube-linux-arm64 start -p second-586810 --driver=docker  --container-runtime=crio: (33.265651159s)
minikube_profile_test.go:51: (dbg) Run:  out/minikube-linux-arm64 profile first-583372
minikube_profile_test.go:55: (dbg) Run:  out/minikube-linux-arm64 profile list -ojson
minikube_profile_test.go:51: (dbg) Run:  out/minikube-linux-arm64 profile second-586810
minikube_profile_test.go:55: (dbg) Run:  out/minikube-linux-arm64 profile list -ojson
helpers_test.go:175: Cleaning up "second-586810" profile ...
helpers_test.go:178: (dbg) Run:  out/minikube-linux-arm64 delete -p second-586810
helpers_test.go:178: (dbg) Done: out/minikube-linux-arm64 delete -p second-586810: (2.068207726s)
helpers_test.go:175: Cleaning up "first-583372" profile ...
helpers_test.go:178: (dbg) Run:  out/minikube-linux-arm64 delete -p first-583372
helpers_test.go:178: (dbg) Done: out/minikube-linux-arm64 delete -p first-583372: (1.973481685s)
--- PASS: TestMinikubeProfile (68.89s)

                                                
                                    
x
+
TestMountStart/serial/StartWithMountFirst (7.29s)

                                                
                                                
=== RUN   TestMountStart/serial/StartWithMountFirst
mount_start_test.go:98: (dbg) Run:  out/minikube-linux-arm64 start -p mount-start-1-434131 --memory=2048 --mount --mount-gid 0 --mount-msize 6543 --mount-port 46464 --mount-uid 0 --no-kubernetes --driver=docker  --container-runtime=crio
E0916 11:09:17.495408 1383833 cert_rotation.go:171] "Unhandled Error" err="key failed with : open /home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/functional-919910/client.crt: no such file or directory" logger="UnhandledError"
mount_start_test.go:98: (dbg) Done: out/minikube-linux-arm64 start -p mount-start-1-434131 --memory=2048 --mount --mount-gid 0 --mount-msize 6543 --mount-port 46464 --mount-uid 0 --no-kubernetes --driver=docker  --container-runtime=crio: (6.290578799s)
--- PASS: TestMountStart/serial/StartWithMountFirst (7.29s)

                                                
                                    
x
+
TestMountStart/serial/VerifyMountFirst (0.27s)

                                                
                                                
=== RUN   TestMountStart/serial/VerifyMountFirst
mount_start_test.go:114: (dbg) Run:  out/minikube-linux-arm64 -p mount-start-1-434131 ssh -- ls /minikube-host
--- PASS: TestMountStart/serial/VerifyMountFirst (0.27s)

                                                
                                    
x
+
TestMountStart/serial/StartWithMountSecond (6.64s)

                                                
                                                
=== RUN   TestMountStart/serial/StartWithMountSecond
mount_start_test.go:98: (dbg) Run:  out/minikube-linux-arm64 start -p mount-start-2-436346 --memory=2048 --mount --mount-gid 0 --mount-msize 6543 --mount-port 46465 --mount-uid 0 --no-kubernetes --driver=docker  --container-runtime=crio
mount_start_test.go:98: (dbg) Done: out/minikube-linux-arm64 start -p mount-start-2-436346 --memory=2048 --mount --mount-gid 0 --mount-msize 6543 --mount-port 46465 --mount-uid 0 --no-kubernetes --driver=docker  --container-runtime=crio: (5.633367465s)
--- PASS: TestMountStart/serial/StartWithMountSecond (6.64s)

                                                
                                    
x
+
TestMountStart/serial/VerifyMountSecond (0.26s)

                                                
                                                
=== RUN   TestMountStart/serial/VerifyMountSecond
mount_start_test.go:114: (dbg) Run:  out/minikube-linux-arm64 -p mount-start-2-436346 ssh -- ls /minikube-host
--- PASS: TestMountStart/serial/VerifyMountSecond (0.26s)

                                                
                                    
x
+
TestMountStart/serial/DeleteFirst (1.61s)

                                                
                                                
=== RUN   TestMountStart/serial/DeleteFirst
pause_test.go:132: (dbg) Run:  out/minikube-linux-arm64 delete -p mount-start-1-434131 --alsologtostderr -v=5
pause_test.go:132: (dbg) Done: out/minikube-linux-arm64 delete -p mount-start-1-434131 --alsologtostderr -v=5: (1.613070706s)
--- PASS: TestMountStart/serial/DeleteFirst (1.61s)

                                                
                                    
x
+
TestMountStart/serial/VerifyMountPostDelete (0.26s)

                                                
                                                
=== RUN   TestMountStart/serial/VerifyMountPostDelete
mount_start_test.go:114: (dbg) Run:  out/minikube-linux-arm64 -p mount-start-2-436346 ssh -- ls /minikube-host
--- PASS: TestMountStart/serial/VerifyMountPostDelete (0.26s)

                                                
                                    
x
+
TestMountStart/serial/Stop (1.21s)

                                                
                                                
=== RUN   TestMountStart/serial/Stop
mount_start_test.go:155: (dbg) Run:  out/minikube-linux-arm64 stop -p mount-start-2-436346
mount_start_test.go:155: (dbg) Done: out/minikube-linux-arm64 stop -p mount-start-2-436346: (1.213673203s)
--- PASS: TestMountStart/serial/Stop (1.21s)

                                                
                                    
x
+
TestMountStart/serial/RestartStopped (8.55s)

                                                
                                                
=== RUN   TestMountStart/serial/RestartStopped
mount_start_test.go:166: (dbg) Run:  out/minikube-linux-arm64 start -p mount-start-2-436346
mount_start_test.go:166: (dbg) Done: out/minikube-linux-arm64 start -p mount-start-2-436346: (7.552769505s)
--- PASS: TestMountStart/serial/RestartStopped (8.55s)

                                                
                                    
x
+
TestMountStart/serial/VerifyMountPostStop (0.26s)

                                                
                                                
=== RUN   TestMountStart/serial/VerifyMountPostStop
mount_start_test.go:114: (dbg) Run:  out/minikube-linux-arm64 -p mount-start-2-436346 ssh -- ls /minikube-host
--- PASS: TestMountStart/serial/VerifyMountPostStop (0.26s)

                                                
                                    
x
+
TestMultiNode/serial/FreshStart2Nodes (108.01s)

                                                
                                                
=== RUN   TestMultiNode/serial/FreshStart2Nodes
multinode_test.go:96: (dbg) Run:  out/minikube-linux-arm64 start -p multinode-654612 --wait=true --memory=2200 --nodes=2 -v=8 --alsologtostderr --driver=docker  --container-runtime=crio
E0916 11:10:40.561497 1383833 cert_rotation.go:171] "Unhandled Error" err="key failed with : open /home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/functional-919910/client.crt: no such file or directory" logger="UnhandledError"
multinode_test.go:96: (dbg) Done: out/minikube-linux-arm64 start -p multinode-654612 --wait=true --memory=2200 --nodes=2 -v=8 --alsologtostderr --driver=docker  --container-runtime=crio: (1m47.494471726s)
multinode_test.go:102: (dbg) Run:  out/minikube-linux-arm64 -p multinode-654612 status --alsologtostderr
--- PASS: TestMultiNode/serial/FreshStart2Nodes (108.01s)

                                                
                                    
x
+
TestMultiNode/serial/DeployApp2Nodes (6.39s)

                                                
                                                
=== RUN   TestMultiNode/serial/DeployApp2Nodes
multinode_test.go:493: (dbg) Run:  out/minikube-linux-arm64 kubectl -p multinode-654612 -- apply -f ./testdata/multinodes/multinode-pod-dns-test.yaml
multinode_test.go:498: (dbg) Run:  out/minikube-linux-arm64 kubectl -p multinode-654612 -- rollout status deployment/busybox
multinode_test.go:498: (dbg) Done: out/minikube-linux-arm64 kubectl -p multinode-654612 -- rollout status deployment/busybox: (4.432360854s)
multinode_test.go:505: (dbg) Run:  out/minikube-linux-arm64 kubectl -p multinode-654612 -- get pods -o jsonpath='{.items[*].status.podIP}'
multinode_test.go:528: (dbg) Run:  out/minikube-linux-arm64 kubectl -p multinode-654612 -- get pods -o jsonpath='{.items[*].metadata.name}'
multinode_test.go:536: (dbg) Run:  out/minikube-linux-arm64 kubectl -p multinode-654612 -- exec busybox-7dff88458-rdtjw -- nslookup kubernetes.io
multinode_test.go:536: (dbg) Run:  out/minikube-linux-arm64 kubectl -p multinode-654612 -- exec busybox-7dff88458-sfkxt -- nslookup kubernetes.io
multinode_test.go:546: (dbg) Run:  out/minikube-linux-arm64 kubectl -p multinode-654612 -- exec busybox-7dff88458-rdtjw -- nslookup kubernetes.default
multinode_test.go:546: (dbg) Run:  out/minikube-linux-arm64 kubectl -p multinode-654612 -- exec busybox-7dff88458-sfkxt -- nslookup kubernetes.default
multinode_test.go:554: (dbg) Run:  out/minikube-linux-arm64 kubectl -p multinode-654612 -- exec busybox-7dff88458-rdtjw -- nslookup kubernetes.default.svc.cluster.local
multinode_test.go:554: (dbg) Run:  out/minikube-linux-arm64 kubectl -p multinode-654612 -- exec busybox-7dff88458-sfkxt -- nslookup kubernetes.default.svc.cluster.local
--- PASS: TestMultiNode/serial/DeployApp2Nodes (6.39s)

                                                
                                    
x
+
TestMultiNode/serial/PingHostFrom2Pods (1s)

                                                
                                                
=== RUN   TestMultiNode/serial/PingHostFrom2Pods
multinode_test.go:564: (dbg) Run:  out/minikube-linux-arm64 kubectl -p multinode-654612 -- get pods -o jsonpath='{.items[*].metadata.name}'
multinode_test.go:572: (dbg) Run:  out/minikube-linux-arm64 kubectl -p multinode-654612 -- exec busybox-7dff88458-rdtjw -- sh -c "nslookup host.minikube.internal | awk 'NR==5' | cut -d' ' -f3"
multinode_test.go:583: (dbg) Run:  out/minikube-linux-arm64 kubectl -p multinode-654612 -- exec busybox-7dff88458-rdtjw -- sh -c "ping -c 1 192.168.67.1"
multinode_test.go:572: (dbg) Run:  out/minikube-linux-arm64 kubectl -p multinode-654612 -- exec busybox-7dff88458-sfkxt -- sh -c "nslookup host.minikube.internal | awk 'NR==5' | cut -d' ' -f3"
multinode_test.go:583: (dbg) Run:  out/minikube-linux-arm64 kubectl -p multinode-654612 -- exec busybox-7dff88458-sfkxt -- sh -c "ping -c 1 192.168.67.1"
--- PASS: TestMultiNode/serial/PingHostFrom2Pods (1.00s)

                                                
                                    
x
+
TestMultiNode/serial/AddNode (29.2s)

                                                
                                                
=== RUN   TestMultiNode/serial/AddNode
multinode_test.go:121: (dbg) Run:  out/minikube-linux-arm64 node add -p multinode-654612 -v 3 --alsologtostderr
multinode_test.go:121: (dbg) Done: out/minikube-linux-arm64 node add -p multinode-654612 -v 3 --alsologtostderr: (28.505594784s)
multinode_test.go:127: (dbg) Run:  out/minikube-linux-arm64 -p multinode-654612 status --alsologtostderr
--- PASS: TestMultiNode/serial/AddNode (29.20s)

                                                
                                    
x
+
TestMultiNode/serial/ProfileList (0.34s)

                                                
                                                
=== RUN   TestMultiNode/serial/ProfileList
multinode_test.go:143: (dbg) Run:  out/minikube-linux-arm64 profile list --output json
--- PASS: TestMultiNode/serial/ProfileList (0.34s)

                                                
                                    
x
+
TestMultiNode/serial/CopyFile (10.2s)

                                                
                                                
=== RUN   TestMultiNode/serial/CopyFile
multinode_test.go:184: (dbg) Run:  out/minikube-linux-arm64 -p multinode-654612 status --output json --alsologtostderr
helpers_test.go:556: (dbg) Run:  out/minikube-linux-arm64 -p multinode-654612 cp testdata/cp-test.txt multinode-654612:/home/docker/cp-test.txt
helpers_test.go:534: (dbg) Run:  out/minikube-linux-arm64 -p multinode-654612 ssh -n multinode-654612 "sudo cat /home/docker/cp-test.txt"
helpers_test.go:556: (dbg) Run:  out/minikube-linux-arm64 -p multinode-654612 cp multinode-654612:/home/docker/cp-test.txt /tmp/TestMultiNodeserialCopyFile4098428863/001/cp-test_multinode-654612.txt
helpers_test.go:534: (dbg) Run:  out/minikube-linux-arm64 -p multinode-654612 ssh -n multinode-654612 "sudo cat /home/docker/cp-test.txt"
helpers_test.go:556: (dbg) Run:  out/minikube-linux-arm64 -p multinode-654612 cp multinode-654612:/home/docker/cp-test.txt multinode-654612-m02:/home/docker/cp-test_multinode-654612_multinode-654612-m02.txt
helpers_test.go:534: (dbg) Run:  out/minikube-linux-arm64 -p multinode-654612 ssh -n multinode-654612 "sudo cat /home/docker/cp-test.txt"
helpers_test.go:534: (dbg) Run:  out/minikube-linux-arm64 -p multinode-654612 ssh -n multinode-654612-m02 "sudo cat /home/docker/cp-test_multinode-654612_multinode-654612-m02.txt"
helpers_test.go:556: (dbg) Run:  out/minikube-linux-arm64 -p multinode-654612 cp multinode-654612:/home/docker/cp-test.txt multinode-654612-m03:/home/docker/cp-test_multinode-654612_multinode-654612-m03.txt
helpers_test.go:534: (dbg) Run:  out/minikube-linux-arm64 -p multinode-654612 ssh -n multinode-654612 "sudo cat /home/docker/cp-test.txt"
helpers_test.go:534: (dbg) Run:  out/minikube-linux-arm64 -p multinode-654612 ssh -n multinode-654612-m03 "sudo cat /home/docker/cp-test_multinode-654612_multinode-654612-m03.txt"
helpers_test.go:556: (dbg) Run:  out/minikube-linux-arm64 -p multinode-654612 cp testdata/cp-test.txt multinode-654612-m02:/home/docker/cp-test.txt
helpers_test.go:534: (dbg) Run:  out/minikube-linux-arm64 -p multinode-654612 ssh -n multinode-654612-m02 "sudo cat /home/docker/cp-test.txt"
helpers_test.go:556: (dbg) Run:  out/minikube-linux-arm64 -p multinode-654612 cp multinode-654612-m02:/home/docker/cp-test.txt /tmp/TestMultiNodeserialCopyFile4098428863/001/cp-test_multinode-654612-m02.txt
helpers_test.go:534: (dbg) Run:  out/minikube-linux-arm64 -p multinode-654612 ssh -n multinode-654612-m02 "sudo cat /home/docker/cp-test.txt"
helpers_test.go:556: (dbg) Run:  out/minikube-linux-arm64 -p multinode-654612 cp multinode-654612-m02:/home/docker/cp-test.txt multinode-654612:/home/docker/cp-test_multinode-654612-m02_multinode-654612.txt
helpers_test.go:534: (dbg) Run:  out/minikube-linux-arm64 -p multinode-654612 ssh -n multinode-654612-m02 "sudo cat /home/docker/cp-test.txt"
helpers_test.go:534: (dbg) Run:  out/minikube-linux-arm64 -p multinode-654612 ssh -n multinode-654612 "sudo cat /home/docker/cp-test_multinode-654612-m02_multinode-654612.txt"
helpers_test.go:556: (dbg) Run:  out/minikube-linux-arm64 -p multinode-654612 cp multinode-654612-m02:/home/docker/cp-test.txt multinode-654612-m03:/home/docker/cp-test_multinode-654612-m02_multinode-654612-m03.txt
helpers_test.go:534: (dbg) Run:  out/minikube-linux-arm64 -p multinode-654612 ssh -n multinode-654612-m02 "sudo cat /home/docker/cp-test.txt"
helpers_test.go:534: (dbg) Run:  out/minikube-linux-arm64 -p multinode-654612 ssh -n multinode-654612-m03 "sudo cat /home/docker/cp-test_multinode-654612-m02_multinode-654612-m03.txt"
helpers_test.go:556: (dbg) Run:  out/minikube-linux-arm64 -p multinode-654612 cp testdata/cp-test.txt multinode-654612-m03:/home/docker/cp-test.txt
helpers_test.go:534: (dbg) Run:  out/minikube-linux-arm64 -p multinode-654612 ssh -n multinode-654612-m03 "sudo cat /home/docker/cp-test.txt"
helpers_test.go:556: (dbg) Run:  out/minikube-linux-arm64 -p multinode-654612 cp multinode-654612-m03:/home/docker/cp-test.txt /tmp/TestMultiNodeserialCopyFile4098428863/001/cp-test_multinode-654612-m03.txt
helpers_test.go:534: (dbg) Run:  out/minikube-linux-arm64 -p multinode-654612 ssh -n multinode-654612-m03 "sudo cat /home/docker/cp-test.txt"
helpers_test.go:556: (dbg) Run:  out/minikube-linux-arm64 -p multinode-654612 cp multinode-654612-m03:/home/docker/cp-test.txt multinode-654612:/home/docker/cp-test_multinode-654612-m03_multinode-654612.txt
helpers_test.go:534: (dbg) Run:  out/minikube-linux-arm64 -p multinode-654612 ssh -n multinode-654612-m03 "sudo cat /home/docker/cp-test.txt"
helpers_test.go:534: (dbg) Run:  out/minikube-linux-arm64 -p multinode-654612 ssh -n multinode-654612 "sudo cat /home/docker/cp-test_multinode-654612-m03_multinode-654612.txt"
helpers_test.go:556: (dbg) Run:  out/minikube-linux-arm64 -p multinode-654612 cp multinode-654612-m03:/home/docker/cp-test.txt multinode-654612-m02:/home/docker/cp-test_multinode-654612-m03_multinode-654612-m02.txt
helpers_test.go:534: (dbg) Run:  out/minikube-linux-arm64 -p multinode-654612 ssh -n multinode-654612-m03 "sudo cat /home/docker/cp-test.txt"
helpers_test.go:534: (dbg) Run:  out/minikube-linux-arm64 -p multinode-654612 ssh -n multinode-654612-m02 "sudo cat /home/docker/cp-test_multinode-654612-m03_multinode-654612-m02.txt"
--- PASS: TestMultiNode/serial/CopyFile (10.20s)

                                                
                                    
x
+
TestMultiNode/serial/StopNode (2.34s)

                                                
                                                
=== RUN   TestMultiNode/serial/StopNode
multinode_test.go:248: (dbg) Run:  out/minikube-linux-arm64 -p multinode-654612 node stop m03
multinode_test.go:248: (dbg) Done: out/minikube-linux-arm64 -p multinode-654612 node stop m03: (1.243898116s)
multinode_test.go:254: (dbg) Run:  out/minikube-linux-arm64 -p multinode-654612 status
multinode_test.go:254: (dbg) Non-zero exit: out/minikube-linux-arm64 -p multinode-654612 status: exit status 7 (544.183734ms)

                                                
                                                
-- stdout --
	multinode-654612
	type: Control Plane
	host: Running
	kubelet: Running
	apiserver: Running
	kubeconfig: Configured
	
	multinode-654612-m02
	type: Worker
	host: Running
	kubelet: Running
	
	multinode-654612-m03
	type: Worker
	host: Stopped
	kubelet: Stopped
	

                                                
                                                
-- /stdout --
multinode_test.go:261: (dbg) Run:  out/minikube-linux-arm64 -p multinode-654612 status --alsologtostderr
multinode_test.go:261: (dbg) Non-zero exit: out/minikube-linux-arm64 -p multinode-654612 status --alsologtostderr: exit status 7 (551.233539ms)

                                                
                                                
-- stdout --
	multinode-654612
	type: Control Plane
	host: Running
	kubelet: Running
	apiserver: Running
	kubeconfig: Configured
	
	multinode-654612-m02
	type: Worker
	host: Running
	kubelet: Running
	
	multinode-654612-m03
	type: Worker
	host: Stopped
	kubelet: Stopped
	

                                                
                                                
-- /stdout --
** stderr ** 
	I0916 11:12:19.754373 1499114 out.go:345] Setting OutFile to fd 1 ...
	I0916 11:12:19.754801 1499114 out.go:392] TERM=,COLORTERM=, which probably does not support color
	I0916 11:12:19.754816 1499114 out.go:358] Setting ErrFile to fd 2...
	I0916 11:12:19.754823 1499114 out.go:392] TERM=,COLORTERM=, which probably does not support color
	I0916 11:12:19.755182 1499114 root.go:338] Updating PATH: /home/jenkins/minikube-integration/19651-1378450/.minikube/bin
	I0916 11:12:19.755450 1499114 out.go:352] Setting JSON to false
	I0916 11:12:19.755502 1499114 mustload.go:65] Loading cluster: multinode-654612
	I0916 11:12:19.755577 1499114 notify.go:220] Checking for updates...
	I0916 11:12:19.756647 1499114 config.go:182] Loaded profile config "multinode-654612": Driver=docker, ContainerRuntime=crio, KubernetesVersion=v1.31.1
	I0916 11:12:19.756670 1499114 status.go:255] checking status of multinode-654612 ...
	I0916 11:12:19.757313 1499114 cli_runner.go:164] Run: docker container inspect multinode-654612 --format={{.State.Status}}
	I0916 11:12:19.775900 1499114 status.go:330] multinode-654612 host status = "Running" (err=<nil>)
	I0916 11:12:19.775925 1499114 host.go:66] Checking if "multinode-654612" exists ...
	I0916 11:12:19.776229 1499114 cli_runner.go:164] Run: docker container inspect -f "{{range .NetworkSettings.Networks}}{{.IPAddress}},{{.GlobalIPv6Address}}{{end}}" multinode-654612
	I0916 11:12:19.808389 1499114 host.go:66] Checking if "multinode-654612" exists ...
	I0916 11:12:19.808746 1499114 ssh_runner.go:195] Run: sh -c "df -h /var | awk 'NR==2{print $5}'"
	I0916 11:12:19.808790 1499114 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" multinode-654612
	I0916 11:12:19.827111 1499114 sshutil.go:53] new ssh client: &{IP:127.0.0.1 Port:34738 SSHKeyPath:/home/jenkins/minikube-integration/19651-1378450/.minikube/machines/multinode-654612/id_rsa Username:docker}
	I0916 11:12:19.922295 1499114 ssh_runner.go:195] Run: systemctl --version
	I0916 11:12:19.926730 1499114 ssh_runner.go:195] Run: sudo systemctl is-active --quiet service kubelet
	I0916 11:12:19.938979 1499114 cli_runner.go:164] Run: docker system info --format "{{json .}}"
	I0916 11:12:20.006997 1499114 info.go:266] docker info: {ID:5FDH:SA5P:5GCT:NLAS:B73P:SGDQ:PBG5:UBVH:UZY3:RXGO:CI7S:WAIH Containers:3 ContainersRunning:2 ContainersPaused:0 ContainersStopped:1 Images:3 Driver:overlay2 DriverStatus:[[Backing Filesystem extfs] [Supports d_type true] [Using metacopy false] [Native Overlay Diff true] [userxattr false]] SystemStatus:<nil> Plugins:{Volume:[local] Network:[bridge host ipvlan macvlan null overlay] Authorization:<nil> Log:[awslogs fluentd gcplogs gelf journald json-file local splunk syslog]} MemoryLimit:true SwapLimit:true KernelMemory:false KernelMemoryTCP:true CPUCfsPeriod:true CPUCfsQuota:true CPUShares:true CPUSet:true PidsLimit:true IPv4Forwarding:true BridgeNfIptables:true BridgeNfIP6Tables:true Debug:false NFd:41 OomKillDisable:true NGoroutines:61 SystemTime:2024-09-16 11:12:19.990157834 +0000 UTC LoggingDriver:json-file CgroupDriver:cgroupfs NEventsListener:0 KernelVersion:5.15.0-1069-aws OperatingSystem:Ubuntu 20.04.6 LTS OSType:linux Architecture:aar
ch64 IndexServerAddress:https://index.docker.io/v1/ RegistryConfig:{AllowNondistributableArtifactsCIDRs:[] AllowNondistributableArtifactsHostnames:[] InsecureRegistryCIDRs:[127.0.0.0/8] IndexConfigs:{DockerIo:{Name:docker.io Mirrors:[] Secure:true Official:true}} Mirrors:[]} NCPU:2 MemTotal:8214839296 GenericResources:<nil> DockerRootDir:/var/lib/docker HTTPProxy: HTTPSProxy: NoProxy: Name:ip-172-31-21-244 Labels:[] ExperimentalBuild:false ServerVersion:27.2.1 ClusterStore: ClusterAdvertise: Runtimes:{Runc:{Path:runc}} DefaultRuntime:runc Swarm:{NodeID: NodeAddr: LocalNodeState:inactive ControlAvailable:false Error: RemoteManagers:<nil>} LiveRestoreEnabled:false Isolation: InitBinary:docker-init ContainerdCommit:{ID:7f7fdf5fed64eb6a7caf99b3e12efcf9d60e311c Expected:7f7fdf5fed64eb6a7caf99b3e12efcf9d60e311c} RuncCommit:{ID:v1.1.14-0-g2c9f560 Expected:v1.1.14-0-g2c9f560} InitCommit:{ID:de40ad0 Expected:de40ad0} SecurityOptions:[name=apparmor name=seccomp,profile=builtin] ProductLicense: Warnings:<nil> ServerErro
rs:[] ClientInfo:{Debug:false Plugins:[map[Name:buildx Path:/usr/libexec/docker/cli-plugins/docker-buildx SchemaVersion:0.1.0 ShortDescription:Docker Buildx Vendor:Docker Inc. Version:v0.16.2] map[Name:compose Path:/usr/libexec/docker/cli-plugins/docker-compose SchemaVersion:0.1.0 ShortDescription:Docker Compose Vendor:Docker Inc. Version:v2.29.2]] Warnings:<nil>}}
	I0916 11:12:20.007664 1499114 kubeconfig.go:125] found "multinode-654612" server: "https://192.168.67.2:8443"
	I0916 11:12:20.007704 1499114 api_server.go:166] Checking apiserver status ...
	I0916 11:12:20.007809 1499114 ssh_runner.go:195] Run: sudo pgrep -xnf kube-apiserver.*minikube.*
	I0916 11:12:20.023897 1499114 ssh_runner.go:195] Run: sudo egrep ^[0-9]+:freezer: /proc/1383/cgroup
	I0916 11:12:20.035822 1499114 api_server.go:182] apiserver freezer: "13:freezer:/docker/402497514f0b8b3453fe3f147b28574766ee05bfb7c084c9f8550757726f30cd/crio/crio-00cc927e5dcd6bba6542407e57d794c19b8cdd3c3a3e876481e838d3d1bb32ca"
	I0916 11:12:20.035903 1499114 ssh_runner.go:195] Run: sudo cat /sys/fs/cgroup/freezer/docker/402497514f0b8b3453fe3f147b28574766ee05bfb7c084c9f8550757726f30cd/crio/crio-00cc927e5dcd6bba6542407e57d794c19b8cdd3c3a3e876481e838d3d1bb32ca/freezer.state
	I0916 11:12:20.048181 1499114 api_server.go:204] freezer state: "THAWED"
	I0916 11:12:20.048225 1499114 api_server.go:253] Checking apiserver healthz at https://192.168.67.2:8443/healthz ...
	I0916 11:12:20.056191 1499114 api_server.go:279] https://192.168.67.2:8443/healthz returned 200:
	ok
	I0916 11:12:20.056227 1499114 status.go:422] multinode-654612 apiserver status = Running (err=<nil>)
	I0916 11:12:20.056250 1499114 status.go:257] multinode-654612 status: &{Name:multinode-654612 Host:Running Kubelet:Running APIServer:Running Kubeconfig:Configured Worker:false TimeToStop: DockerEnv: PodManEnv:}
	I0916 11:12:20.056270 1499114 status.go:255] checking status of multinode-654612-m02 ...
	I0916 11:12:20.056599 1499114 cli_runner.go:164] Run: docker container inspect multinode-654612-m02 --format={{.State.Status}}
	I0916 11:12:20.080826 1499114 status.go:330] multinode-654612-m02 host status = "Running" (err=<nil>)
	I0916 11:12:20.080858 1499114 host.go:66] Checking if "multinode-654612-m02" exists ...
	I0916 11:12:20.081188 1499114 cli_runner.go:164] Run: docker container inspect -f "{{range .NetworkSettings.Networks}}{{.IPAddress}},{{.GlobalIPv6Address}}{{end}}" multinode-654612-m02
	I0916 11:12:20.100144 1499114 host.go:66] Checking if "multinode-654612-m02" exists ...
	I0916 11:12:20.100498 1499114 ssh_runner.go:195] Run: sh -c "df -h /var | awk 'NR==2{print $5}'"
	I0916 11:12:20.100567 1499114 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" multinode-654612-m02
	I0916 11:12:20.118224 1499114 sshutil.go:53] new ssh client: &{IP:127.0.0.1 Port:34743 SSHKeyPath:/home/jenkins/minikube-integration/19651-1378450/.minikube/machines/multinode-654612-m02/id_rsa Username:docker}
	I0916 11:12:20.214038 1499114 ssh_runner.go:195] Run: sudo systemctl is-active --quiet service kubelet
	I0916 11:12:20.226064 1499114 status.go:257] multinode-654612-m02 status: &{Name:multinode-654612-m02 Host:Running Kubelet:Running APIServer:Irrelevant Kubeconfig:Irrelevant Worker:true TimeToStop: DockerEnv: PodManEnv:}
	I0916 11:12:20.226100 1499114 status.go:255] checking status of multinode-654612-m03 ...
	I0916 11:12:20.226475 1499114 cli_runner.go:164] Run: docker container inspect multinode-654612-m03 --format={{.State.Status}}
	I0916 11:12:20.249479 1499114 status.go:330] multinode-654612-m03 host status = "Stopped" (err=<nil>)
	I0916 11:12:20.249504 1499114 status.go:343] host is not running, skipping remaining checks
	I0916 11:12:20.249511 1499114 status.go:257] multinode-654612-m03 status: &{Name:multinode-654612-m03 Host:Stopped Kubelet:Stopped APIServer:Stopped Kubeconfig:Stopped Worker:true TimeToStop: DockerEnv: PodManEnv:}

                                                
                                                
** /stderr **
--- PASS: TestMultiNode/serial/StopNode (2.34s)

                                                
                                    
x
+
TestMultiNode/serial/RestartKeepsNodes (137.88s)

                                                
                                                
=== RUN   TestMultiNode/serial/RestartKeepsNodes
multinode_test.go:314: (dbg) Run:  out/minikube-linux-arm64 node list -p multinode-654612
multinode_test.go:321: (dbg) Run:  out/minikube-linux-arm64 stop -p multinode-654612
multinode_test.go:321: (dbg) Done: out/minikube-linux-arm64 stop -p multinode-654612: (24.915574254s)
multinode_test.go:326: (dbg) Run:  out/minikube-linux-arm64 start -p multinode-654612 --wait=true -v=8 --alsologtostderr
E0916 11:13:53.341699 1383833 cert_rotation.go:171] "Unhandled Error" err="key failed with : open /home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/addons-936355/client.crt: no such file or directory" logger="UnhandledError"
E0916 11:14:17.495606 1383833 cert_rotation.go:171] "Unhandled Error" err="key failed with : open /home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/functional-919910/client.crt: no such file or directory" logger="UnhandledError"
multinode_test.go:326: (dbg) Done: out/minikube-linux-arm64 start -p multinode-654612 --wait=true -v=8 --alsologtostderr: (1m52.812694582s)
multinode_test.go:331: (dbg) Run:  out/minikube-linux-arm64 node list -p multinode-654612
--- PASS: TestMultiNode/serial/RestartKeepsNodes (137.88s)

                                                
                                    
x
+
TestMultiNode/serial/StopMultiNode (23.97s)

                                                
                                                
=== RUN   TestMultiNode/serial/StopMultiNode
multinode_test.go:345: (dbg) Run:  out/minikube-linux-arm64 -p multinode-654612 stop
multinode_test.go:345: (dbg) Done: out/minikube-linux-arm64 -p multinode-654612 stop: (23.773644001s)
multinode_test.go:351: (dbg) Run:  out/minikube-linux-arm64 -p multinode-654612 status
multinode_test.go:351: (dbg) Non-zero exit: out/minikube-linux-arm64 -p multinode-654612 status: exit status 7 (89.077118ms)

                                                
                                                
-- stdout --
	multinode-654612
	type: Control Plane
	host: Stopped
	kubelet: Stopped
	apiserver: Stopped
	kubeconfig: Stopped
	
	multinode-654612-m02
	type: Worker
	host: Stopped
	kubelet: Stopped
	

                                                
                                                
-- /stdout --
multinode_test.go:358: (dbg) Run:  out/minikube-linux-arm64 -p multinode-654612 status --alsologtostderr
multinode_test.go:358: (dbg) Non-zero exit: out/minikube-linux-arm64 -p multinode-654612 status --alsologtostderr: exit status 7 (104.188648ms)

                                                
                                                
-- stdout --
	multinode-654612
	type: Control Plane
	host: Stopped
	kubelet: Stopped
	apiserver: Stopped
	kubeconfig: Stopped
	
	multinode-654612-m02
	type: Worker
	host: Stopped
	kubelet: Stopped
	

                                                
                                                
-- /stdout --
** stderr ** 
	I0916 11:15:24.843810 1507559 out.go:345] Setting OutFile to fd 1 ...
	I0916 11:15:24.843932 1507559 out.go:392] TERM=,COLORTERM=, which probably does not support color
	I0916 11:15:24.843949 1507559 out.go:358] Setting ErrFile to fd 2...
	I0916 11:15:24.843977 1507559 out.go:392] TERM=,COLORTERM=, which probably does not support color
	I0916 11:15:24.844325 1507559 root.go:338] Updating PATH: /home/jenkins/minikube-integration/19651-1378450/.minikube/bin
	I0916 11:15:24.844552 1507559 out.go:352] Setting JSON to false
	I0916 11:15:24.844579 1507559 mustload.go:65] Loading cluster: multinode-654612
	I0916 11:15:24.844906 1507559 notify.go:220] Checking for updates...
	I0916 11:15:24.845324 1507559 config.go:182] Loaded profile config "multinode-654612": Driver=docker, ContainerRuntime=crio, KubernetesVersion=v1.31.1
	I0916 11:15:24.845373 1507559 status.go:255] checking status of multinode-654612 ...
	I0916 11:15:24.846361 1507559 cli_runner.go:164] Run: docker container inspect multinode-654612 --format={{.State.Status}}
	I0916 11:15:24.867592 1507559 status.go:330] multinode-654612 host status = "Stopped" (err=<nil>)
	I0916 11:15:24.867614 1507559 status.go:343] host is not running, skipping remaining checks
	I0916 11:15:24.867628 1507559 status.go:257] multinode-654612 status: &{Name:multinode-654612 Host:Stopped Kubelet:Stopped APIServer:Stopped Kubeconfig:Stopped Worker:false TimeToStop: DockerEnv: PodManEnv:}
	I0916 11:15:24.867659 1507559 status.go:255] checking status of multinode-654612-m02 ...
	I0916 11:15:24.868063 1507559 cli_runner.go:164] Run: docker container inspect multinode-654612-m02 --format={{.State.Status}}
	I0916 11:15:24.890994 1507559 status.go:330] multinode-654612-m02 host status = "Stopped" (err=<nil>)
	I0916 11:15:24.891014 1507559 status.go:343] host is not running, skipping remaining checks
	I0916 11:15:24.891021 1507559 status.go:257] multinode-654612-m02 status: &{Name:multinode-654612-m02 Host:Stopped Kubelet:Stopped APIServer:Stopped Kubeconfig:Stopped Worker:true TimeToStop: DockerEnv: PodManEnv:}

                                                
                                                
** /stderr **
--- PASS: TestMultiNode/serial/StopMultiNode (23.97s)

                                                
                                    
x
+
TestMultiNode/serial/ValidateNameConflict (38.83s)

                                                
                                                
=== RUN   TestMultiNode/serial/ValidateNameConflict
multinode_test.go:455: (dbg) Run:  out/minikube-linux-arm64 node list -p multinode-654612
multinode_test.go:464: (dbg) Run:  out/minikube-linux-arm64 start -p multinode-654612-m02 --driver=docker  --container-runtime=crio
multinode_test.go:464: (dbg) Non-zero exit: out/minikube-linux-arm64 start -p multinode-654612-m02 --driver=docker  --container-runtime=crio: exit status 14 (86.794849ms)

                                                
                                                
-- stdout --
	* [multinode-654612-m02] minikube v1.34.0 on Ubuntu 20.04 (arm64)
	  - MINIKUBE_LOCATION=19651
	  - MINIKUBE_SUPPRESS_DOCKER_PERFORMANCE=true
	  - KUBECONFIG=/home/jenkins/minikube-integration/19651-1378450/kubeconfig
	  - MINIKUBE_HOME=/home/jenkins/minikube-integration/19651-1378450/.minikube
	  - MINIKUBE_BIN=out/minikube-linux-arm64
	  - MINIKUBE_FORCE_SYSTEMD=
	
	

                                                
                                                
-- /stdout --
** stderr ** 
	! Profile name 'multinode-654612-m02' is duplicated with machine name 'multinode-654612-m02' in profile 'multinode-654612'
	X Exiting due to MK_USAGE: Profile name should be unique

                                                
                                                
** /stderr **
multinode_test.go:472: (dbg) Run:  out/minikube-linux-arm64 start -p multinode-654612-m03 --driver=docker  --container-runtime=crio
multinode_test.go:472: (dbg) Done: out/minikube-linux-arm64 start -p multinode-654612-m03 --driver=docker  --container-runtime=crio: (36.289416744s)
multinode_test.go:479: (dbg) Run:  out/minikube-linux-arm64 node add -p multinode-654612
multinode_test.go:479: (dbg) Non-zero exit: out/minikube-linux-arm64 node add -p multinode-654612: exit status 80 (378.296972ms)

                                                
                                                
-- stdout --
	* Adding node m03 to cluster multinode-654612 as [worker]
	
	

                                                
                                                
-- /stdout --
** stderr ** 
	X Exiting due to GUEST_NODE_ADD: failed to add node: Node multinode-654612-m03 already exists in multinode-654612-m03 profile
	* 
	╭─────────────────────────────────────────────────────────────────────────────────────────────╮
	│                                                                                             │
	│    * If the above advice does not help, please let us know:                                 │
	│      https://github.com/kubernetes/minikube/issues/new/choose                               │
	│                                                                                             │
	│    * Please run `minikube logs --file=logs.txt` and attach logs.txt to the GitHub issue.    │
	│    * Please also attach the following file to the GitHub issue:                             │
	│    * - /tmp/minikube_node_040ea7097fd6ed71e65be9a474587f81f0ccd21d_1.log                    │
	│                                                                                             │
	╰─────────────────────────────────────────────────────────────────────────────────────────────╯

                                                
                                                
** /stderr **
multinode_test.go:484: (dbg) Run:  out/minikube-linux-arm64 delete -p multinode-654612-m03
multinode_test.go:484: (dbg) Done: out/minikube-linux-arm64 delete -p multinode-654612-m03: (2.027046822s)
--- PASS: TestMultiNode/serial/ValidateNameConflict (38.83s)

                                                
                                    
x
+
TestScheduledStopUnix (107.66s)

                                                
                                                
=== RUN   TestScheduledStopUnix
scheduled_stop_test.go:128: (dbg) Run:  out/minikube-linux-arm64 start -p scheduled-stop-561611 --memory=2048 --driver=docker  --container-runtime=crio
scheduled_stop_test.go:128: (dbg) Done: out/minikube-linux-arm64 start -p scheduled-stop-561611 --memory=2048 --driver=docker  --container-runtime=crio: (31.485176684s)
scheduled_stop_test.go:137: (dbg) Run:  out/minikube-linux-arm64 stop -p scheduled-stop-561611 --schedule 5m
scheduled_stop_test.go:191: (dbg) Run:  out/minikube-linux-arm64 status --format={{.TimeToStop}} -p scheduled-stop-561611 -n scheduled-stop-561611
scheduled_stop_test.go:169: signal error was:  <nil>
scheduled_stop_test.go:137: (dbg) Run:  out/minikube-linux-arm64 stop -p scheduled-stop-561611 --schedule 15s
scheduled_stop_test.go:169: signal error was:  os: process already finished
scheduled_stop_test.go:137: (dbg) Run:  out/minikube-linux-arm64 stop -p scheduled-stop-561611 --cancel-scheduled
scheduled_stop_test.go:176: (dbg) Run:  out/minikube-linux-arm64 status --format={{.Host}} -p scheduled-stop-561611 -n scheduled-stop-561611
scheduled_stop_test.go:205: (dbg) Run:  out/minikube-linux-arm64 status -p scheduled-stop-561611
scheduled_stop_test.go:137: (dbg) Run:  out/minikube-linux-arm64 stop -p scheduled-stop-561611 --schedule 15s
scheduled_stop_test.go:169: signal error was:  os: process already finished
E0916 11:18:53.342966 1383833 cert_rotation.go:171] "Unhandled Error" err="key failed with : open /home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/addons-936355/client.crt: no such file or directory" logger="UnhandledError"
E0916 11:19:17.495743 1383833 cert_rotation.go:171] "Unhandled Error" err="key failed with : open /home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/functional-919910/client.crt: no such file or directory" logger="UnhandledError"
scheduled_stop_test.go:205: (dbg) Run:  out/minikube-linux-arm64 status -p scheduled-stop-561611
scheduled_stop_test.go:205: (dbg) Non-zero exit: out/minikube-linux-arm64 status -p scheduled-stop-561611: exit status 7 (74.778473ms)

                                                
                                                
-- stdout --
	scheduled-stop-561611
	type: Control Plane
	host: Stopped
	kubelet: Stopped
	apiserver: Stopped
	kubeconfig: Stopped
	

                                                
                                                
-- /stdout --
scheduled_stop_test.go:176: (dbg) Run:  out/minikube-linux-arm64 status --format={{.Host}} -p scheduled-stop-561611 -n scheduled-stop-561611
scheduled_stop_test.go:176: (dbg) Non-zero exit: out/minikube-linux-arm64 status --format={{.Host}} -p scheduled-stop-561611 -n scheduled-stop-561611: exit status 7 (68.144144ms)

                                                
                                                
-- stdout --
	Stopped

                                                
                                                
-- /stdout --
scheduled_stop_test.go:176: status error: exit status 7 (may be ok)
helpers_test.go:175: Cleaning up "scheduled-stop-561611" profile ...
helpers_test.go:178: (dbg) Run:  out/minikube-linux-arm64 delete -p scheduled-stop-561611
helpers_test.go:178: (dbg) Done: out/minikube-linux-arm64 delete -p scheduled-stop-561611: (4.591441351s)
--- PASS: TestScheduledStopUnix (107.66s)

                                                
                                    
x
+
TestInsufficientStorage (10.75s)

                                                
                                                
=== RUN   TestInsufficientStorage
status_test.go:50: (dbg) Run:  out/minikube-linux-arm64 start -p insufficient-storage-227630 --memory=2048 --output=json --wait=true --driver=docker  --container-runtime=crio
status_test.go:50: (dbg) Non-zero exit: out/minikube-linux-arm64 start -p insufficient-storage-227630 --memory=2048 --output=json --wait=true --driver=docker  --container-runtime=crio: exit status 26 (8.199050858s)

                                                
                                                
-- stdout --
	{"specversion":"1.0","id":"6d6a513f-9aae-4e3f-b14f-d7b8a54c3c27","source":"https://minikube.sigs.k8s.io/","type":"io.k8s.sigs.minikube.step","datacontenttype":"application/json","data":{"currentstep":"0","message":"[insufficient-storage-227630] minikube v1.34.0 on Ubuntu 20.04 (arm64)","name":"Initial Minikube Setup","totalsteps":"19"}}
	{"specversion":"1.0","id":"bd888335-730b-4d6a-b116-93ed6106c991","source":"https://minikube.sigs.k8s.io/","type":"io.k8s.sigs.minikube.info","datacontenttype":"application/json","data":{"message":"MINIKUBE_LOCATION=19651"}}
	{"specversion":"1.0","id":"33c12ff5-2f33-41fb-a7b8-a4c3cf767bd0","source":"https://minikube.sigs.k8s.io/","type":"io.k8s.sigs.minikube.info","datacontenttype":"application/json","data":{"message":"MINIKUBE_SUPPRESS_DOCKER_PERFORMANCE=true"}}
	{"specversion":"1.0","id":"a6e5ba78-30f6-4d85-9927-487a3f94cdeb","source":"https://minikube.sigs.k8s.io/","type":"io.k8s.sigs.minikube.info","datacontenttype":"application/json","data":{"message":"KUBECONFIG=/home/jenkins/minikube-integration/19651-1378450/kubeconfig"}}
	{"specversion":"1.0","id":"ac257242-745c-42d4-93d3-cca8b4e91267","source":"https://minikube.sigs.k8s.io/","type":"io.k8s.sigs.minikube.info","datacontenttype":"application/json","data":{"message":"MINIKUBE_HOME=/home/jenkins/minikube-integration/19651-1378450/.minikube"}}
	{"specversion":"1.0","id":"cd245064-e5e8-43d5-8b19-0e04dbb56cfc","source":"https://minikube.sigs.k8s.io/","type":"io.k8s.sigs.minikube.info","datacontenttype":"application/json","data":{"message":"MINIKUBE_BIN=out/minikube-linux-arm64"}}
	{"specversion":"1.0","id":"4211a688-36fe-4ea2-b90d-540adfde305a","source":"https://minikube.sigs.k8s.io/","type":"io.k8s.sigs.minikube.info","datacontenttype":"application/json","data":{"message":"MINIKUBE_FORCE_SYSTEMD="}}
	{"specversion":"1.0","id":"f52181b4-c584-4f52-9b74-8dcc31b2e1e3","source":"https://minikube.sigs.k8s.io/","type":"io.k8s.sigs.minikube.info","datacontenttype":"application/json","data":{"message":"MINIKUBE_TEST_STORAGE_CAPACITY=100"}}
	{"specversion":"1.0","id":"7a146647-8914-4678-96e8-becf2d356b5b","source":"https://minikube.sigs.k8s.io/","type":"io.k8s.sigs.minikube.info","datacontenttype":"application/json","data":{"message":"MINIKUBE_TEST_AVAILABLE_STORAGE=19"}}
	{"specversion":"1.0","id":"4232f2bf-c867-4115-897a-72feb21a0204","source":"https://minikube.sigs.k8s.io/","type":"io.k8s.sigs.minikube.step","datacontenttype":"application/json","data":{"currentstep":"1","message":"Using the docker driver based on user configuration","name":"Selecting Driver","totalsteps":"19"}}
	{"specversion":"1.0","id":"677ffb99-cb58-4a86-881e-f0e1d88b8d0e","source":"https://minikube.sigs.k8s.io/","type":"io.k8s.sigs.minikube.info","datacontenttype":"application/json","data":{"message":"Using Docker driver with root privileges"}}
	{"specversion":"1.0","id":"df416e44-6c2b-43b5-bf4b-0c10151d30f6","source":"https://minikube.sigs.k8s.io/","type":"io.k8s.sigs.minikube.step","datacontenttype":"application/json","data":{"currentstep":"3","message":"Starting \"insufficient-storage-227630\" primary control-plane node in \"insufficient-storage-227630\" cluster","name":"Starting Node","totalsteps":"19"}}
	{"specversion":"1.0","id":"e4fa705b-69fd-44d1-a120-8dad1e1ca20b","source":"https://minikube.sigs.k8s.io/","type":"io.k8s.sigs.minikube.step","datacontenttype":"application/json","data":{"currentstep":"5","message":"Pulling base image v0.0.45-1726358845-19644 ...","name":"Pulling Base Image","totalsteps":"19"}}
	{"specversion":"1.0","id":"f29d7422-50bd-44b1-bf6c-a2ce0bde238f","source":"https://minikube.sigs.k8s.io/","type":"io.k8s.sigs.minikube.step","datacontenttype":"application/json","data":{"currentstep":"8","message":"Creating docker container (CPUs=2, Memory=2048MB) ...","name":"Creating Container","totalsteps":"19"}}
	{"specversion":"1.0","id":"df85b68b-cb67-49a5-a527-dde97b6a5928","source":"https://minikube.sigs.k8s.io/","type":"io.k8s.sigs.minikube.error","datacontenttype":"application/json","data":{"advice":"Try one or more of the following to free up space on the device:\n\t\n\t\t\t1. Run \"docker system prune\" to remove unused Docker data (optionally with \"-a\")\n\t\t\t2. Increase the storage allocated to Docker for Desktop by clicking on:\n\t\t\t\tDocker icon \u003e Preferences \u003e Resources \u003e Disk Image Size\n\t\t\t3. Run \"minikube ssh -- docker system prune\" if using the Docker container runtime","exitcode":"26","issues":"https://github.com/kubernetes/minikube/issues/9024","message":"Docker is out of disk space! (/var is at 100% of capacity). You can pass '--force' to skip this check.","name":"RSRC_DOCKER_STORAGE","url":""}}

                                                
                                                
-- /stdout --
status_test.go:76: (dbg) Run:  out/minikube-linux-arm64 status -p insufficient-storage-227630 --output=json --layout=cluster
status_test.go:76: (dbg) Non-zero exit: out/minikube-linux-arm64 status -p insufficient-storage-227630 --output=json --layout=cluster: exit status 7 (301.425295ms)

                                                
                                                
-- stdout --
	{"Name":"insufficient-storage-227630","StatusCode":507,"StatusName":"InsufficientStorage","StatusDetail":"/var is almost out of disk space","Step":"Creating Container","StepDetail":"Creating docker container (CPUs=2, Memory=2048MB) ...","BinaryVersion":"v1.34.0","Components":{"kubeconfig":{"Name":"kubeconfig","StatusCode":500,"StatusName":"Error"}},"Nodes":[{"Name":"insufficient-storage-227630","StatusCode":507,"StatusName":"InsufficientStorage","Components":{"apiserver":{"Name":"apiserver","StatusCode":405,"StatusName":"Stopped"},"kubelet":{"Name":"kubelet","StatusCode":405,"StatusName":"Stopped"}}}]}

                                                
                                                
-- /stdout --
** stderr ** 
	E0916 11:19:33.934065 1521957 status.go:417] kubeconfig endpoint: get endpoint: "insufficient-storage-227630" does not appear in /home/jenkins/minikube-integration/19651-1378450/kubeconfig

                                                
                                                
** /stderr **
status_test.go:76: (dbg) Run:  out/minikube-linux-arm64 status -p insufficient-storage-227630 --output=json --layout=cluster
status_test.go:76: (dbg) Non-zero exit: out/minikube-linux-arm64 status -p insufficient-storage-227630 --output=json --layout=cluster: exit status 7 (310.545283ms)

                                                
                                                
-- stdout --
	{"Name":"insufficient-storage-227630","StatusCode":507,"StatusName":"InsufficientStorage","StatusDetail":"/var is almost out of disk space","BinaryVersion":"v1.34.0","Components":{"kubeconfig":{"Name":"kubeconfig","StatusCode":500,"StatusName":"Error"}},"Nodes":[{"Name":"insufficient-storage-227630","StatusCode":507,"StatusName":"InsufficientStorage","Components":{"apiserver":{"Name":"apiserver","StatusCode":405,"StatusName":"Stopped"},"kubelet":{"Name":"kubelet","StatusCode":405,"StatusName":"Stopped"}}}]}

                                                
                                                
-- /stdout --
** stderr ** 
	E0916 11:19:34.247977 1522019 status.go:417] kubeconfig endpoint: get endpoint: "insufficient-storage-227630" does not appear in /home/jenkins/minikube-integration/19651-1378450/kubeconfig
	E0916 11:19:34.258839 1522019 status.go:560] unable to read event log: stat: stat /home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/insufficient-storage-227630/events.json: no such file or directory

                                                
                                                
** /stderr **
helpers_test.go:175: Cleaning up "insufficient-storage-227630" profile ...
helpers_test.go:178: (dbg) Run:  out/minikube-linux-arm64 delete -p insufficient-storage-227630
helpers_test.go:178: (dbg) Done: out/minikube-linux-arm64 delete -p insufficient-storage-227630: (1.940388268s)
--- PASS: TestInsufficientStorage (10.75s)

                                                
                                    
x
+
TestRunningBinaryUpgrade (66.46s)

                                                
                                                
=== RUN   TestRunningBinaryUpgrade
=== PAUSE TestRunningBinaryUpgrade

                                                
                                                

                                                
                                                
=== CONT  TestRunningBinaryUpgrade
version_upgrade_test.go:120: (dbg) Run:  /tmp/minikube-v1.26.0.2723837247 start -p running-upgrade-664813 --memory=2200 --vm-driver=docker  --container-runtime=crio
version_upgrade_test.go:120: (dbg) Done: /tmp/minikube-v1.26.0.2723837247 start -p running-upgrade-664813 --memory=2200 --vm-driver=docker  --container-runtime=crio: (37.378044255s)
version_upgrade_test.go:130: (dbg) Run:  out/minikube-linux-arm64 start -p running-upgrade-664813 --memory=2200 --alsologtostderr -v=1 --driver=docker  --container-runtime=crio
version_upgrade_test.go:130: (dbg) Done: out/minikube-linux-arm64 start -p running-upgrade-664813 --memory=2200 --alsologtostderr -v=1 --driver=docker  --container-runtime=crio: (24.851779528s)
helpers_test.go:175: Cleaning up "running-upgrade-664813" profile ...
helpers_test.go:178: (dbg) Run:  out/minikube-linux-arm64 delete -p running-upgrade-664813
helpers_test.go:178: (dbg) Done: out/minikube-linux-arm64 delete -p running-upgrade-664813: (3.488824684s)
--- PASS: TestRunningBinaryUpgrade (66.46s)

                                                
                                    
x
+
TestMissingContainerUpgrade (195.04s)

                                                
                                                
=== RUN   TestMissingContainerUpgrade
=== PAUSE TestMissingContainerUpgrade

                                                
                                                

                                                
                                                
=== CONT  TestMissingContainerUpgrade
version_upgrade_test.go:309: (dbg) Run:  /tmp/minikube-v1.26.0.1166352422 start -p missing-upgrade-304524 --memory=2200 --driver=docker  --container-runtime=crio
version_upgrade_test.go:309: (dbg) Done: /tmp/minikube-v1.26.0.1166352422 start -p missing-upgrade-304524 --memory=2200 --driver=docker  --container-runtime=crio: (1m45.643738351s)
version_upgrade_test.go:318: (dbg) Run:  docker stop missing-upgrade-304524
version_upgrade_test.go:318: (dbg) Done: docker stop missing-upgrade-304524: (1.897802563s)
version_upgrade_test.go:323: (dbg) Run:  docker rm missing-upgrade-304524
version_upgrade_test.go:329: (dbg) Run:  out/minikube-linux-arm64 start -p missing-upgrade-304524 --memory=2200 --alsologtostderr -v=1 --driver=docker  --container-runtime=crio
version_upgrade_test.go:329: (dbg) Done: out/minikube-linux-arm64 start -p missing-upgrade-304524 --memory=2200 --alsologtostderr -v=1 --driver=docker  --container-runtime=crio: (1m24.744841937s)
helpers_test.go:175: Cleaning up "missing-upgrade-304524" profile ...
helpers_test.go:178: (dbg) Run:  out/minikube-linux-arm64 delete -p missing-upgrade-304524
helpers_test.go:178: (dbg) Done: out/minikube-linux-arm64 delete -p missing-upgrade-304524: (1.965669564s)
--- PASS: TestMissingContainerUpgrade (195.04s)

                                                
                                    
x
+
TestPause/serial/Start (88.37s)

                                                
                                                
=== RUN   TestPause/serial/Start
pause_test.go:80: (dbg) Run:  out/minikube-linux-arm64 start -p pause-839428 --memory=2048 --install-addons=false --wait=all --driver=docker  --container-runtime=crio
pause_test.go:80: (dbg) Done: out/minikube-linux-arm64 start -p pause-839428 --memory=2048 --install-addons=false --wait=all --driver=docker  --container-runtime=crio: (1m28.366214851s)
--- PASS: TestPause/serial/Start (88.37s)

                                                
                                    
x
+
TestNoKubernetes/serial/StartNoK8sWithVersion (0.12s)

                                                
                                                
=== RUN   TestNoKubernetes/serial/StartNoK8sWithVersion
no_kubernetes_test.go:83: (dbg) Run:  out/minikube-linux-arm64 start -p NoKubernetes-161840 --no-kubernetes --kubernetes-version=1.20 --driver=docker  --container-runtime=crio
no_kubernetes_test.go:83: (dbg) Non-zero exit: out/minikube-linux-arm64 start -p NoKubernetes-161840 --no-kubernetes --kubernetes-version=1.20 --driver=docker  --container-runtime=crio: exit status 14 (120.142725ms)

                                                
                                                
-- stdout --
	* [NoKubernetes-161840] minikube v1.34.0 on Ubuntu 20.04 (arm64)
	  - MINIKUBE_LOCATION=19651
	  - MINIKUBE_SUPPRESS_DOCKER_PERFORMANCE=true
	  - KUBECONFIG=/home/jenkins/minikube-integration/19651-1378450/kubeconfig
	  - MINIKUBE_HOME=/home/jenkins/minikube-integration/19651-1378450/.minikube
	  - MINIKUBE_BIN=out/minikube-linux-arm64
	  - MINIKUBE_FORCE_SYSTEMD=
	
	

                                                
                                                
-- /stdout --
** stderr ** 
	X Exiting due to MK_USAGE: cannot specify --kubernetes-version with --no-kubernetes,
	to unset a global config run:
	
	$ minikube config unset kubernetes-version

                                                
                                                
** /stderr **
--- PASS: TestNoKubernetes/serial/StartNoK8sWithVersion (0.12s)

                                                
                                    
x
+
TestNoKubernetes/serial/StartWithK8s (42s)

                                                
                                                
=== RUN   TestNoKubernetes/serial/StartWithK8s
no_kubernetes_test.go:95: (dbg) Run:  out/minikube-linux-arm64 start -p NoKubernetes-161840 --driver=docker  --container-runtime=crio
no_kubernetes_test.go:95: (dbg) Done: out/minikube-linux-arm64 start -p NoKubernetes-161840 --driver=docker  --container-runtime=crio: (41.610446309s)
no_kubernetes_test.go:200: (dbg) Run:  out/minikube-linux-arm64 -p NoKubernetes-161840 status -o json
--- PASS: TestNoKubernetes/serial/StartWithK8s (42.00s)

                                                
                                    
x
+
TestNoKubernetes/serial/StartWithStopK8s (7.8s)

                                                
                                                
=== RUN   TestNoKubernetes/serial/StartWithStopK8s
no_kubernetes_test.go:112: (dbg) Run:  out/minikube-linux-arm64 start -p NoKubernetes-161840 --no-kubernetes --driver=docker  --container-runtime=crio
no_kubernetes_test.go:112: (dbg) Done: out/minikube-linux-arm64 start -p NoKubernetes-161840 --no-kubernetes --driver=docker  --container-runtime=crio: (5.546853397s)
no_kubernetes_test.go:200: (dbg) Run:  out/minikube-linux-arm64 -p NoKubernetes-161840 status -o json
no_kubernetes_test.go:200: (dbg) Non-zero exit: out/minikube-linux-arm64 -p NoKubernetes-161840 status -o json: exit status 2 (308.574338ms)

                                                
                                                
-- stdout --
	{"Name":"NoKubernetes-161840","Host":"Running","Kubelet":"Stopped","APIServer":"Stopped","Kubeconfig":"Configured","Worker":false}

                                                
                                                
-- /stdout --
no_kubernetes_test.go:124: (dbg) Run:  out/minikube-linux-arm64 delete -p NoKubernetes-161840
no_kubernetes_test.go:124: (dbg) Done: out/minikube-linux-arm64 delete -p NoKubernetes-161840: (1.939579279s)
--- PASS: TestNoKubernetes/serial/StartWithStopK8s (7.80s)

                                                
                                    
x
+
TestNoKubernetes/serial/Start (6.36s)

                                                
                                                
=== RUN   TestNoKubernetes/serial/Start
no_kubernetes_test.go:136: (dbg) Run:  out/minikube-linux-arm64 start -p NoKubernetes-161840 --no-kubernetes --driver=docker  --container-runtime=crio
no_kubernetes_test.go:136: (dbg) Done: out/minikube-linux-arm64 start -p NoKubernetes-161840 --no-kubernetes --driver=docker  --container-runtime=crio: (6.362180036s)
--- PASS: TestNoKubernetes/serial/Start (6.36s)

                                                
                                    
x
+
TestNoKubernetes/serial/VerifyK8sNotRunning (0.28s)

                                                
                                                
=== RUN   TestNoKubernetes/serial/VerifyK8sNotRunning
no_kubernetes_test.go:147: (dbg) Run:  out/minikube-linux-arm64 ssh -p NoKubernetes-161840 "sudo systemctl is-active --quiet service kubelet"
no_kubernetes_test.go:147: (dbg) Non-zero exit: out/minikube-linux-arm64 ssh -p NoKubernetes-161840 "sudo systemctl is-active --quiet service kubelet": exit status 1 (276.926506ms)

                                                
                                                
** stderr ** 
	ssh: Process exited with status 3

                                                
                                                
** /stderr **
--- PASS: TestNoKubernetes/serial/VerifyK8sNotRunning (0.28s)

                                                
                                    
x
+
TestNoKubernetes/serial/ProfileList (0.96s)

                                                
                                                
=== RUN   TestNoKubernetes/serial/ProfileList
no_kubernetes_test.go:169: (dbg) Run:  out/minikube-linux-arm64 profile list
no_kubernetes_test.go:179: (dbg) Run:  out/minikube-linux-arm64 profile list --output=json
--- PASS: TestNoKubernetes/serial/ProfileList (0.96s)

                                                
                                    
x
+
TestNoKubernetes/serial/Stop (1.21s)

                                                
                                                
=== RUN   TestNoKubernetes/serial/Stop
no_kubernetes_test.go:158: (dbg) Run:  out/minikube-linux-arm64 stop -p NoKubernetes-161840
no_kubernetes_test.go:158: (dbg) Done: out/minikube-linux-arm64 stop -p NoKubernetes-161840: (1.213062157s)
--- PASS: TestNoKubernetes/serial/Stop (1.21s)

                                                
                                    
x
+
TestNoKubernetes/serial/StartNoArgs (7.66s)

                                                
                                                
=== RUN   TestNoKubernetes/serial/StartNoArgs
no_kubernetes_test.go:191: (dbg) Run:  out/minikube-linux-arm64 start -p NoKubernetes-161840 --driver=docker  --container-runtime=crio
no_kubernetes_test.go:191: (dbg) Done: out/minikube-linux-arm64 start -p NoKubernetes-161840 --driver=docker  --container-runtime=crio: (7.662676063s)
--- PASS: TestNoKubernetes/serial/StartNoArgs (7.66s)

                                                
                                    
x
+
TestNoKubernetes/serial/VerifyK8sNotRunningSecond (0.29s)

                                                
                                                
=== RUN   TestNoKubernetes/serial/VerifyK8sNotRunningSecond
no_kubernetes_test.go:147: (dbg) Run:  out/minikube-linux-arm64 ssh -p NoKubernetes-161840 "sudo systemctl is-active --quiet service kubelet"
no_kubernetes_test.go:147: (dbg) Non-zero exit: out/minikube-linux-arm64 ssh -p NoKubernetes-161840 "sudo systemctl is-active --quiet service kubelet": exit status 1 (286.307704ms)

                                                
                                                
** stderr ** 
	ssh: Process exited with status 3

                                                
                                                
** /stderr **
--- PASS: TestNoKubernetes/serial/VerifyK8sNotRunningSecond (0.29s)

                                                
                                    
x
+
TestPause/serial/SecondStartNoReconfiguration (19.61s)

                                                
                                                
=== RUN   TestPause/serial/SecondStartNoReconfiguration
pause_test.go:92: (dbg) Run:  out/minikube-linux-arm64 start -p pause-839428 --alsologtostderr -v=1 --driver=docker  --container-runtime=crio
pause_test.go:92: (dbg) Done: out/minikube-linux-arm64 start -p pause-839428 --alsologtostderr -v=1 --driver=docker  --container-runtime=crio: (19.595793828s)
--- PASS: TestPause/serial/SecondStartNoReconfiguration (19.61s)

                                                
                                    
x
+
TestPause/serial/Pause (1.11s)

                                                
                                                
=== RUN   TestPause/serial/Pause
pause_test.go:110: (dbg) Run:  out/minikube-linux-arm64 pause -p pause-839428 --alsologtostderr -v=5
pause_test.go:110: (dbg) Done: out/minikube-linux-arm64 pause -p pause-839428 --alsologtostderr -v=5: (1.109997424s)
--- PASS: TestPause/serial/Pause (1.11s)

                                                
                                    
x
+
TestPause/serial/VerifyStatus (0.35s)

                                                
                                                
=== RUN   TestPause/serial/VerifyStatus
status_test.go:76: (dbg) Run:  out/minikube-linux-arm64 status -p pause-839428 --output=json --layout=cluster
status_test.go:76: (dbg) Non-zero exit: out/minikube-linux-arm64 status -p pause-839428 --output=json --layout=cluster: exit status 2 (347.758877ms)

                                                
                                                
-- stdout --
	{"Name":"pause-839428","StatusCode":418,"StatusName":"Paused","Step":"Done","StepDetail":"* Paused 7 containers in: kube-system, kubernetes-dashboard, storage-gluster, istio-operator","BinaryVersion":"v1.34.0","Components":{"kubeconfig":{"Name":"kubeconfig","StatusCode":200,"StatusName":"OK"}},"Nodes":[{"Name":"pause-839428","StatusCode":200,"StatusName":"OK","Components":{"apiserver":{"Name":"apiserver","StatusCode":418,"StatusName":"Paused"},"kubelet":{"Name":"kubelet","StatusCode":405,"StatusName":"Stopped"}}}]}

                                                
                                                
-- /stdout --
--- PASS: TestPause/serial/VerifyStatus (0.35s)

                                                
                                    
x
+
TestPause/serial/Unpause (0.82s)

                                                
                                                
=== RUN   TestPause/serial/Unpause
pause_test.go:121: (dbg) Run:  out/minikube-linux-arm64 unpause -p pause-839428 --alsologtostderr -v=5
--- PASS: TestPause/serial/Unpause (0.82s)

                                                
                                    
x
+
TestPause/serial/PauseAgain (0.95s)

                                                
                                                
=== RUN   TestPause/serial/PauseAgain
pause_test.go:110: (dbg) Run:  out/minikube-linux-arm64 pause -p pause-839428 --alsologtostderr -v=5
--- PASS: TestPause/serial/PauseAgain (0.95s)

                                                
                                    
x
+
TestPause/serial/DeletePaused (2.74s)

                                                
                                                
=== RUN   TestPause/serial/DeletePaused
pause_test.go:132: (dbg) Run:  out/minikube-linux-arm64 delete -p pause-839428 --alsologtostderr -v=5
pause_test.go:132: (dbg) Done: out/minikube-linux-arm64 delete -p pause-839428 --alsologtostderr -v=5: (2.737548789s)
--- PASS: TestPause/serial/DeletePaused (2.74s)

                                                
                                    
x
+
TestPause/serial/VerifyDeletedResources (0.14s)

                                                
                                                
=== RUN   TestPause/serial/VerifyDeletedResources
pause_test.go:142: (dbg) Run:  out/minikube-linux-arm64 profile list --output json
pause_test.go:168: (dbg) Run:  docker ps -a
pause_test.go:173: (dbg) Run:  docker volume inspect pause-839428
pause_test.go:173: (dbg) Non-zero exit: docker volume inspect pause-839428: exit status 1 (18.939879ms)

                                                
                                                
-- stdout --
	[]

                                                
                                                
-- /stdout --
** stderr ** 
	Error response from daemon: get pause-839428: no such volume

                                                
                                                
** /stderr **
pause_test.go:178: (dbg) Run:  docker network ls
--- PASS: TestPause/serial/VerifyDeletedResources (0.14s)

                                                
                                    
x
+
TestStoppedBinaryUpgrade/Setup (0.93s)

                                                
                                                
=== RUN   TestStoppedBinaryUpgrade/Setup
--- PASS: TestStoppedBinaryUpgrade/Setup (0.93s)

                                                
                                    
x
+
TestStoppedBinaryUpgrade/Upgrade (70.27s)

                                                
                                                
=== RUN   TestStoppedBinaryUpgrade/Upgrade
version_upgrade_test.go:183: (dbg) Run:  /tmp/minikube-v1.26.0.906002893 start -p stopped-upgrade-928314 --memory=2200 --vm-driver=docker  --container-runtime=crio
E0916 11:24:17.495128 1383833 cert_rotation.go:171] "Unhandled Error" err="key failed with : open /home/jenkins/minikube-integration/19651-1378450/.minikube/profiles/functional-919910/client.crt: no such file or directory" logger="UnhandledError"
version_upgrade_test.go:183: (dbg) Done: /tmp/minikube-v1.26.0.906002893 start -p stopped-upgrade-928314 --memory=2200 --vm-driver=docker  --container-runtime=crio: (36.646255946s)
version_upgrade_test.go:192: (dbg) Run:  /tmp/minikube-v1.26.0.906002893 -p stopped-upgrade-928314 stop
version_upgrade_test.go:192: (dbg) Done: /tmp/minikube-v1.26.0.906002893 -p stopped-upgrade-928314 stop: (2.877403663s)
version_upgrade_test.go:198: (dbg) Run:  out/minikube-linux-arm64 start -p stopped-upgrade-928314 --memory=2200 --alsologtostderr -v=1 --driver=docker  --container-runtime=crio
version_upgrade_test.go:198: (dbg) Done: out/minikube-linux-arm64 start -p stopped-upgrade-928314 --memory=2200 --alsologtostderr -v=1 --driver=docker  --container-runtime=crio: (30.747414618s)
--- PASS: TestStoppedBinaryUpgrade/Upgrade (70.27s)

                                                
                                    
x
+
TestStoppedBinaryUpgrade/MinikubeLogs (1.31s)

                                                
                                                
=== RUN   TestStoppedBinaryUpgrade/MinikubeLogs
version_upgrade_test.go:206: (dbg) Run:  out/minikube-linux-arm64 logs -p stopped-upgrade-928314
version_upgrade_test.go:206: (dbg) Done: out/minikube-linux-arm64 logs -p stopped-upgrade-928314: (1.311326074s)
--- PASS: TestStoppedBinaryUpgrade/MinikubeLogs (1.31s)

                                                
                                    

Test skip (27/229)

x
+
TestDownloadOnly/v1.20.0/cached-images (0s)

                                                
                                                
=== RUN   TestDownloadOnly/v1.20.0/cached-images
aaa_download_only_test.go:129: Preload exists, images won't be cached
--- SKIP: TestDownloadOnly/v1.20.0/cached-images (0.00s)

                                                
                                    
x
+
TestDownloadOnly/v1.20.0/binaries (0s)

                                                
                                                
=== RUN   TestDownloadOnly/v1.20.0/binaries
aaa_download_only_test.go:151: Preload exists, binaries are present within.
--- SKIP: TestDownloadOnly/v1.20.0/binaries (0.00s)

                                                
                                    
x
+
TestDownloadOnly/v1.20.0/kubectl (0s)

                                                
                                                
=== RUN   TestDownloadOnly/v1.20.0/kubectl
aaa_download_only_test.go:167: Test for darwin and windows
--- SKIP: TestDownloadOnly/v1.20.0/kubectl (0.00s)

                                                
                                    
x
+
TestDownloadOnly/v1.31.1/cached-images (0s)

                                                
                                                
=== RUN   TestDownloadOnly/v1.31.1/cached-images
aaa_download_only_test.go:129: Preload exists, images won't be cached
--- SKIP: TestDownloadOnly/v1.31.1/cached-images (0.00s)

                                                
                                    
x
+
TestDownloadOnly/v1.31.1/binaries (0s)

                                                
                                                
=== RUN   TestDownloadOnly/v1.31.1/binaries
aaa_download_only_test.go:151: Preload exists, binaries are present within.
--- SKIP: TestDownloadOnly/v1.31.1/binaries (0.00s)

                                                
                                    
x
+
TestDownloadOnly/v1.31.1/kubectl (0s)

                                                
                                                
=== RUN   TestDownloadOnly/v1.31.1/kubectl
aaa_download_only_test.go:167: Test for darwin and windows
--- SKIP: TestDownloadOnly/v1.31.1/kubectl (0.00s)

                                                
                                    
x
+
TestDownloadOnlyKic (0.65s)

                                                
                                                
=== RUN   TestDownloadOnlyKic
aaa_download_only_test.go:232: (dbg) Run:  out/minikube-linux-arm64 start --download-only -p download-docker-880503 --alsologtostderr --driver=docker  --container-runtime=crio
aaa_download_only_test.go:244: Skip for arm64 platform. See https://github.com/kubernetes/minikube/issues/10144
helpers_test.go:175: Cleaning up "download-docker-880503" profile ...
helpers_test.go:178: (dbg) Run:  out/minikube-linux-arm64 delete -p download-docker-880503
--- SKIP: TestDownloadOnlyKic (0.65s)

                                                
                                    
x
+
TestOffline (0s)

                                                
                                                
=== RUN   TestOffline
=== PAUSE TestOffline

                                                
                                                

                                                
                                                
=== CONT  TestOffline
aab_offline_test.go:35: skipping TestOffline - only docker runtime supported on arm64. See https://github.com/kubernetes/minikube/issues/10144
--- SKIP: TestOffline (0.00s)

                                                
                                    
x
+
TestAddons/serial/Volcano (0s)

                                                
                                                
=== RUN   TestAddons/serial/Volcano
addons_test.go:879: skipping: crio not supported
--- SKIP: TestAddons/serial/Volcano (0.00s)

                                                
                                    
x
+
TestAddons/parallel/HelmTiller (0s)

                                                
                                                
=== RUN   TestAddons/parallel/HelmTiller
=== PAUSE TestAddons/parallel/HelmTiller

                                                
                                                

                                                
                                                
=== CONT  TestAddons/parallel/HelmTiller
addons_test.go:446: skip Helm test on arm64
--- SKIP: TestAddons/parallel/HelmTiller (0.00s)

                                                
                                    
x
+
TestAddons/parallel/Olm (0s)

                                                
                                                
=== RUN   TestAddons/parallel/Olm
=== PAUSE TestAddons/parallel/Olm

                                                
                                                

                                                
                                                
=== CONT  TestAddons/parallel/Olm
addons_test.go:500: Skipping OLM addon test until https://github.com/operator-framework/operator-lifecycle-manager/issues/2534 is resolved
--- SKIP: TestAddons/parallel/Olm (0.00s)

                                                
                                    
x
+
TestDockerFlags (0s)

                                                
                                                
=== RUN   TestDockerFlags
docker_test.go:41: skipping: only runs with docker container runtime, currently testing crio
--- SKIP: TestDockerFlags (0.00s)

                                                
                                    
x
+
TestDockerEnvContainerd (0s)

                                                
                                                
=== RUN   TestDockerEnvContainerd
docker_test.go:170: running with crio true linux arm64
docker_test.go:172: skipping: TestDockerEnvContainerd can only be run with the containerd runtime on Docker driver
--- SKIP: TestDockerEnvContainerd (0.00s)

                                                
                                    
x
+
TestKVMDriverInstallOrUpdate (0s)

                                                
                                                
=== RUN   TestKVMDriverInstallOrUpdate
driver_install_or_update_test.go:45: Skip if arm64. See https://github.com/kubernetes/minikube/issues/10144
--- SKIP: TestKVMDriverInstallOrUpdate (0.00s)

                                                
                                    
x
+
TestHyperKitDriverInstallOrUpdate (0s)

                                                
                                                
=== RUN   TestHyperKitDriverInstallOrUpdate
driver_install_or_update_test.go:105: Skip if not darwin.
--- SKIP: TestHyperKitDriverInstallOrUpdate (0.00s)

                                                
                                    
x
+
TestHyperkitDriverSkipUpgrade (0s)

                                                
                                                
=== RUN   TestHyperkitDriverSkipUpgrade
driver_install_or_update_test.go:169: Skip if not darwin.
--- SKIP: TestHyperkitDriverSkipUpgrade (0.00s)

                                                
                                    
x
+
TestFunctional/parallel/MySQL (0s)

                                                
                                                
=== RUN   TestFunctional/parallel/MySQL
=== PAUSE TestFunctional/parallel/MySQL

                                                
                                                

                                                
                                                
=== CONT  TestFunctional/parallel/MySQL
functional_test.go:1787: arm64 is not supported by mysql. Skip the test. See https://github.com/kubernetes/minikube/issues/10144
--- SKIP: TestFunctional/parallel/MySQL (0.00s)

                                                
                                    
x
+
TestFunctional/parallel/DockerEnv (0s)

                                                
                                                
=== RUN   TestFunctional/parallel/DockerEnv
=== PAUSE TestFunctional/parallel/DockerEnv

                                                
                                                

                                                
                                                
=== CONT  TestFunctional/parallel/DockerEnv
functional_test.go:463: only validate docker env with docker container runtime, currently testing crio
--- SKIP: TestFunctional/parallel/DockerEnv (0.00s)

                                                
                                    
x
+
TestFunctional/parallel/PodmanEnv (0s)

                                                
                                                
=== RUN   TestFunctional/parallel/PodmanEnv
=== PAUSE TestFunctional/parallel/PodmanEnv

                                                
                                                

                                                
                                                
=== CONT  TestFunctional/parallel/PodmanEnv
functional_test.go:550: only validate podman env with docker container runtime, currently testing crio
--- SKIP: TestFunctional/parallel/PodmanEnv (0.00s)

                                                
                                    
x
+
TestFunctional/parallel/TunnelCmd/serial/DNSResolutionByDig (0s)

                                                
                                                
=== RUN   TestFunctional/parallel/TunnelCmd/serial/DNSResolutionByDig
functional_test_tunnel_test.go:99: DNS forwarding is only supported for Hyperkit on Darwin, skipping test DNS forwarding
--- SKIP: TestFunctional/parallel/TunnelCmd/serial/DNSResolutionByDig (0.00s)

                                                
                                    
x
+
TestFunctional/parallel/TunnelCmd/serial/DNSResolutionByDscacheutil (0s)

                                                
                                                
=== RUN   TestFunctional/parallel/TunnelCmd/serial/DNSResolutionByDscacheutil
functional_test_tunnel_test.go:99: DNS forwarding is only supported for Hyperkit on Darwin, skipping test DNS forwarding
--- SKIP: TestFunctional/parallel/TunnelCmd/serial/DNSResolutionByDscacheutil (0.00s)

                                                
                                    
x
+
TestFunctional/parallel/TunnelCmd/serial/AccessThroughDNS (0s)

                                                
                                                
=== RUN   TestFunctional/parallel/TunnelCmd/serial/AccessThroughDNS
functional_test_tunnel_test.go:99: DNS forwarding is only supported for Hyperkit on Darwin, skipping test DNS forwarding
--- SKIP: TestFunctional/parallel/TunnelCmd/serial/AccessThroughDNS (0.00s)

                                                
                                    
x
+
TestGvisorAddon (0s)

                                                
                                                
=== RUN   TestGvisorAddon
gvisor_addon_test.go:34: skipping test because --gvisor=false
--- SKIP: TestGvisorAddon (0.00s)

                                                
                                    
x
+
TestImageBuild (0s)

                                                
                                                
=== RUN   TestImageBuild
image_test.go:33: 
--- SKIP: TestImageBuild (0.00s)

                                                
                                    
x
+
TestChangeNoneUser (0s)

                                                
                                                
=== RUN   TestChangeNoneUser
none_test.go:38: Test requires none driver and SUDO_USER env to not be empty
--- SKIP: TestChangeNoneUser (0.00s)

                                                
                                    
x
+
TestScheduledStopWindows (0s)

                                                
                                                
=== RUN   TestScheduledStopWindows
scheduled_stop_test.go:42: test only runs on windows
--- SKIP: TestScheduledStopWindows (0.00s)

                                                
                                    
x
+
TestSkaffold (0s)

                                                
                                                
=== RUN   TestSkaffold
skaffold_test.go:45: skaffold requires docker-env, currently testing crio container runtime
--- SKIP: TestSkaffold (0.00s)

                                                
                                    
Copied to clipboard